code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module Listy where
import Data.Monoid
newtype Listy a =
Listy [a]
deriving (Eq, Show)
instance Monoid (Listy a) where
mempty = Listy []
mappend (Listy l) (Listy l') = Listy $ mappend l l'
|
aniketd/learn.haskell
|
haskellbook/orphan-instance/Listy.hs
|
unlicense
| 199
| 0
| 8
| 45
| 87
| 47
| 40
| 8
| 0
|
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Tutorial example based on
-- http://haskell-distributed-next.github.io/tutorials/ch-tutorial1.html
import Control.Concurrent (threadDelay)
import Control.Monad (forever)
import Control.Distributed.Process
import Control.Distributed.Process.Node
import Network.Transport.TCP (createTransport, defaultTCPParameters)
import Control.Concurrent.MVar
( MVar
, newMVar
, putMVar
, takeMVar
)
import qualified Control.Exception as Ex
import Control.Exception (throwIO)
import Control.Distributed.Process hiding (call, monitor)
import Control.Distributed.Process.Closure
import Control.Distributed.Process.Node
import Control.Distributed.Process.Platform hiding (__remoteTable, send)
-- import Control.Distributed.Process.Platform as Alt (monitor)
-- import Control.Distributed.Process.Platform.Test
import Control.Distributed.Process.Platform.Time
import Control.Distributed.Process.Platform.Timer
import Control.Distributed.Process.Platform.Supervisor hiding (start, shutdown)
import qualified Control.Distributed.Process.Platform.Supervisor as Supervisor
import Control.Distributed.Process.Platform.ManagedProcess.Client (shutdown)
import Control.Distributed.Process.Serializable()
import Control.Distributed.Static (staticLabel)
import Control.Monad (void, forM_, forM)
import Control.Rematch
( equalTo
, is
, isNot
, isNothing
, isJust
)
import Data.ByteString.Lazy (empty)
import Data.Maybe (catMaybes)
{-
#if !MIN_VERSION_base(4,6,0)
import Prelude hiding (catch)
#endif
-}
replyBack :: (ProcessId, String) -> Process ()
replyBack (sender, msg) = send sender msg
logMessage :: String -> Process ()
logMessage msg = say msg
-- ---------------------------------------------------------------------
-- Note: this TH stuff has to be before anything that refers to it
exitIgnore :: Process ()
exitIgnore = liftIO $ throwIO ChildInitIgnore
noOp :: Process ()
noOp = return ()
chatty :: ProcessId -> Process ()
chatty me = go 1
where
go :: Int -> Process ()
go 4 = do
logMessage "exiting"
return ()
go n = do
send me n
logMessage $ ":sent " ++ show n
sleepFor 2 Seconds
go (n + 1)
$(remotable [ 'exitIgnore
, 'noOp
, 'chatty
])
-- | This is very important, if you do not start the node with this
-- table the supervisor will start and then silently fail when you try
-- to run a closure.
myRemoteTable :: RemoteTable
myRemoteTable = Main.__remoteTable initRemoteTable
-- ---------------------------------------------------------------------
main :: IO ()
main = do
Right t <- createTransport "127.0.0.1" "10501" defaultTCPParameters
node <- newLocalNode t myRemoteTable
-- node <- newLocalNode t initRemoteTable
runProcess node $ do
self <- getSelfPid
r <- Supervisor.start restartAll [(defaultWorker $ RunClosure ($(mkClosure 'chatty) self))]
-- r <- Supervisor.start restartAll [(permChild $ RunClosure ($(mkClosure 'chatty) self))]
sleepFor 3 Seconds
reportAlive r
logMessage "started"
s <- statistics r
logMessage $ "stats:" ++ show s
reportAlive r
getMessagesUntilTimeout
logMessage "getMessagesUntilTimeout returned"
s2 <- statistics r
logMessage $ "stats:" ++ show s2
reportAlive r
return ()
-- A 1 second wait. Otherwise the main thread can terminate before
-- our messages reach the logging process or get flushed to stdio
threadDelay (1*1000000)
return ()
-- TODO: I suspect this has to be a handleMessage
getMessagesUntilTimeout :: Process ()
getMessagesUntilTimeout = do
mm <- expectTimeout (6*1000000) :: Process (Maybe Int)
case mm of
Nothing -> do
logMessage $ "getMessagesUntilTimeout:timed out"
return ()
Just m -> do
logMessage $ "getMessagesUntilTimeout:" ++ show m
getMessagesUntilTimeout
reportAlive :: ProcessId -> Process ()
reportAlive pid = do
alive <- isProcessAlive pid
logMessage $ "pid:" ++ show pid ++ " alive:" ++ show alive
-- ---------------------------------------------------------------------
defaultWorker :: ChildStart -> ChildSpec
defaultWorker clj =
ChildSpec
{
childKey = ""
, childType = Worker
, childRestart = Temporary
, childStop = TerminateImmediately
, childStart = clj
, childRegName = Nothing
}
permChild :: ChildStart -> ChildSpec
permChild clj =
(defaultWorker clj)
{
childKey = "perm-child"
, childRestart = Permanent
}
tempWorker :: ChildStart -> ChildSpec
tempWorker clj =
(defaultWorker clj)
{
childKey = "temp-worker"
, childRestart = Temporary
}
-- ---------------------------------------------------------------------
restartStrategy :: RestartStrategy
restartStrategy = -- restartAll
RestartAll {intensity = RestartLimit {maxR = maxRestarts 1,
maxT = seconds 1},
mode = RestartEach {order = LeftToRight}}
-- ---------------------------------------------------------------------
|
alanz/cloud-haskell-play
|
src/simplesupervisor.hs
|
unlicense
| 5,088
| 0
| 20
| 962
| 1,104
| 599
| 505
| 117
| 2
|
-- |Distributed Dot Product - Erlang Style
--
-- Backend implementation for DNA project.
-- Author: Braam Research, LLC
-- Copyright (C) 2014 Cambridge University
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE ForeignFunctionInterface #-}
module Main where
import GHC.Generics (Generic)
import Data.Typeable
import Control.DeepSeq
import Control.Monad
import System.Posix.Files
import System.Environment (getArgs)
import Control.Concurrent (threadDelay)
import Control.Distributed.Process hiding (say)
import Control.Distributed.Process.Closure
--import Control.Distributed.Process.Backend.SimpleLocalnet
import Control.Distributed.Process.Node (initRemoteTable)
import Control.Distributed.Process.Platform (resolve)
import qualified Control.Distributed.Process.Platform.Service.SystemLog as Log
import qualified Control.Distributed.Process.Platform.UnsafePrimitives as Unsafe
import qualified Data.Vector.Storable as S
import Text.Printf
import Control.Distributed.Process.Debug
import qualified Control.Distributed.Process.Platform.Time as Time
import qualified Control.Distributed.Process.Platform.Timer as Timer
import Data.Binary
import System.IO
import Network.URI (URI(..), URIAuth(..), parseURI)
import DNA.Channel.File
import DNA.Message
import DNA.CmdOpts
import DNA.Common (startLogger, say, startTracing)
import DNA.SimpleLocalNetWithoutDiscovery
import Cfg (executableName, timePeriod, timePeriodPure, synchronizationPoint)
import Paths_ddp_erlang_style (version)
data PartialSum = PartialSum ProcessId Double deriving (Show,Typeable,Generic)
instance Binary PartialSum
newtype Result = Result Double deriving (Show,Typeable,Generic)
instance Binary Result
data Crash = Crash Bool deriving (Show, Typeable, Generic)
instance Binary Crash
-- Collects data from compute processes, looking for either partial sum result or monitoring messages.
-- Partial sums are get accumulated.
dpSum :: [ProcessId] -> Double -> Process(Double)
dpSum [ ] sum = do
return sum
dpSum pids sum = do
receiveWait
[ match $ \(PartialSum pid s) -> dpSum (filter (/= pid) pids) (s+sum)
, match $ \(ProcessMonitorNotification _ pid _) -> dpSum (filter (/= pid) pids) sum
]
spawnCollector :: ProcessId -> Process ()
spawnCollector pid = do
synchronizationPoint
collectorPID <- getSelfPid
masterPID <- dnaSlaveHandleStart "collector" collectorPID
(DnaPidList computePids) <- expect
-- XXX The specification said the master monitors the compute nodes (failure of which is ignored)
-- XXX and the master monitors the collector (failure of which terminates the program)
-- Monitoring here remains to prevent collector from entering infinite loop waiting for node that died.
-- install monitors for compute processes.
forM_ computePids $ \pid -> monitor pid
sum <- timePeriod "collection phase" $ dpSum computePids 0
send masterPID (Result sum)
traceMessage "trace message from collector."
data CompVec = CompVec ProcessId (S.Vector Double) deriving (Eq, Show, Typeable, Generic)
instance Binary CompVec where
put (CompVec pid vec) = put pid >> put vec
get = do { pid <- get; vec <- get; return (CompVec pid vec)}
instance NFData CompVec where
rnf (CompVec p v) = rnf p `seq` rnf v
spawnCChan :: Int64 -> (Int -> Double) -> ProcessId -> Process()
spawnCChan n f pid = do
myPid <- getSelfPid
let vec = S.generate (fromIntegral n) f
timePeriod "generating and sending precomputed vector" $ Unsafe.send pid (CompVec myPid $! vec)
spawnCompute :: (FilePath, Int64, Int64, Int64, ProcessId) -> Process ()
spawnCompute (file, chOffset, chSize, itemCount, collectorPID) = do
synchronizationPoint
getSelfPid >>= enableTrace
computePID <- getSelfPid
sayDebug $ printf "[Compute %s] : f:%s iC:%s cS:%s cO:%s coll:%s" (show computePID) file (show itemCount) (show chSize) (show chOffset) (show collectorPID)
masterPID <- dnaSlaveHandleStart "compute" computePID
-- testing the crashing.
-- We will terminate before any computing actions take place if we receive True in Crash message.
(Crash crashEnabled) <- expect
when crashEnabled terminate -- terminate the process.
fChanPid <- spawnLocal (spawnFChan file chOffset chSize computePID)
cChanPid <- spawnLocal (spawnCChan chSize (\n -> 1.0) computePID)
(iov, cv) <- timePeriod "receiving vectors" $ do
(FileVec fChanPid iov) <- timePeriod "receiving read vector" expect
(CompVec cChanPid cv) <- timePeriod "receiving computed vector" expect
return (iov, cv)
--sayDebug $ printf "[Compute %s] : Value of iov: %s" (show computePID) (show iov)
sumOnComputeNode <- timePeriod "compute sends sum" $ do
let sumOnComputeNode = timePeriodPure "pure computation time" $ S.sum $ S.zipWith (*) iov cv
send collectorPID (PartialSum computePID sumOnComputeNode)
return sumOnComputeNode
sayDebug $ printf "[Compute] : sumOnComputeNode : %s at %s send to %s" (show sumOnComputeNode) (show computePID) (show collectorPID)
send masterPID (DnaFinished computePID)
remotable [ 'spawnCompute, 'spawnCollector]
-- |Monitoring of processes. We watch out for collector process, ignoring all other failures.
masterMonitor :: ProcessId -> Process Double
masterMonitor collectorPid = do
-- if we will just return the value, we may stackoverflow on really big number of processes (1e6+).
-- so we return a value to dispatch later.
maybeResult <- receiveWait
[ match $ \(ProcessMonitorNotification _ pid reason) ->
if pid == collectorPid
then do
sayDebug $ "Collector failure "++show reason++". Master process terminate."
terminate
else do
case reason of
DiedNormal -> return ()
_ -> sayDebug $ "[Coordinator] Compute node failure " ++ (show pid)++" reason "++show reason
return Nothing
, match $ \(Result sum) -> return (Just sum)
]
-- dispatch result.
case maybeResult of
Nothing -> masterMonitor collectorPid
Just r -> return r
master :: MasterOptions -> Backend -> [NodeId] -> Process ()
master masterOptions backend peers = do
case peers of
[] -> error "no peers found!"
_
| length peers < 2 -> error "too few peers."
| otherwise -> return ()
synchronizationPoint
startLogger peers
logPID <- Log.systemLog (liftIO . putStrLn) (return ()) Log.Debug return
Timer.sleep (Time.milliSeconds 100)
masterPID <- getSelfPid
say $ printf "[Master %s]" (show masterPID)
-- startTracing peers
-- enableTrace masterPID
-- traceMessage "trace message from master"
-- Set up scheduling variables
let allComputeNids = tail peers
let crashEnabled = masterOptsCrash masterOptions
let filePath = masterOptsFilename masterOptions
let nidToCrash = head allComputeNids
let chunkCount = fromIntegral $ length allComputeNids
fileStatus <- liftIO $ getFileStatus filePath
let itemCount = div (read $ show (fileSize fileStatus)) itemSize
liftIO . putStrLn $ "itemcount: " ++ (show itemCount)
let chunkOffsets = map (chunkOffset chunkCount itemCount) [1..chunkCount]
liftIO . putStrLn $ "Offsets: " ++ show chunkOffsets
liftIO . putStrLn $ "NodeIds: " ++ show allComputeNids
let chunkSizes = map (chunkSize chunkCount itemCount) [1..chunkCount]
liftIO . putStrLn $ "chunkSizes : " ++ show chunkSizes
-- Start collector process
let collectorNid = head peers
collectorPid <- dnaMasterStartSlave "collector" masterPID collectorNid ($(mkClosure 'spawnCollector) (masterPID))
-- enableTrace collectorPid
-- Start compute processes
computePids <- forM (zip3 allComputeNids chunkOffsets chunkSizes) $ \(computeNid,chO,chS) -> do
pid <- dnaMasterStartSlave "compute" masterPID computeNid ( $(mkClosure 'spawnCompute) (filePath, chO, chS, itemCount, collectorPid))
send pid (Crash (crashEnabled && computeNid == nidToCrash))
return pid
sum <- timePeriod "master waits for result" $ do
-- Send collector computePid's
send collectorPid (DnaPidList computePids)
sayDebug "--------------------------------------------------------------------------------------"
masterMonitor collectorPid
sayDebug $ printf "Result %s" (show sum)
terminateAllSlaves backend
main :: IO ()
main = do
putStrLn $ "ddp-elang-style version "++show version
dnaParseCommandLineAndRun rtable "ddp-erlang-style" master
where
rtable :: RemoteTable
rtable = __remoteTable initRemoteTable
|
SKA-ScienceDataProcessor/RC
|
MS1/ddp-erlang-style/ddp-erlang-style.hs
|
apache-2.0
| 9,553
| 0
| 22
| 2,488
| 2,174
| 1,106
| 1,068
| 151
| 4
|
{-# LANGUAGE TupleSections, ParallelListComp, NoMonomorphismRestriction, RecursiveDo, ViewPatterns #-}
module Compile(compile) where
import Compile.Monad
import Context
import Control.Monad.Writer
import Data.Either
import Data.Function
import Data.Maybe
import ID
import My.Control.Monad
import My.Control.Monad.State
import My.Data.List
import My.Prelude
import PCode
import qualified Data.Map as M
import Syntax
import Data.Array
import My.Data.Tree
lookupName s = lift $ gets (lookupSymName s)
getSymVal s = lift $ gets (lookupSymVal s)
newVar = lift (state createSym)
intercept m = censor (const mempty) $ listen m
m !- s = fromMaybe s $ M.lookup s m
addLocals ls = modifying locals_ $ \m -> foldr (uncurry M.insert) m ls
globVal t s locs = case M.lookup s locs of
Just s' -> SymVal t s'
Nothing -> SymVal GValue s
branch (IntVal (fromInteger -> n)) alts | n>=0 && n<length (tail alts) = goto (tail alts!!n)
| otherwise = goto (head alts)
branch v alts = tell [Branch v alts] >> return NullVal
goto n = branch NullVal [n]
a ?>>= b = listen a >>= \(a,l) -> if null l || not (isBranch $ last l) then b a else return a
a ?>> b = a ?>>= const b
flattenable code = map (f . instr) code'
where f (Branch v alts) = Branch v (map (a!) alts)
f i = i
(bounds,instr,nexts,_) = navigate code
t = spanningTree 0 nexts ; code' = flatten t
a = array bounds (zip code' [0..])
compile args ret expr = runCompileT (compile' (fmap bindSym ret) expr)
(M.fromList [(s,s) | bv <- maybe id (:) ret args, s <- bindSyms bv])
>§ \(_,c) -> Code args (flattenable $ c++[Branch NullVal []]) ret
compile' dest (Symbol sym) = do
name <- lookupName sym
locs <- gets locals
let def = globVal Value sym locs
val = fromMaybe def (IntVal $< (readConstant =<< name))
case dest of
Just v | v/=sym -> tell [set v val] >> return def
_ -> return val
compile' dest (Group (Symbol id:args)) = do
gl <- getSymVal id
let compile = case gl of
Axiom a -> compileAxiom a
Builtin b -> compileBuiltin b
_ -> \d a -> compileCall d (Symbol id:a)
compile dest args
compile' dest (Group args) = compileCall dest args
schedule dests args = do
(vals,code) <- unzip $< sequence [intercept $ compile' dest arg | dest <- dests | arg <- args]
mapM_ tell (reverse $ sortBy (compare`on`length) code)
return vals
compileBy op dest args = do
vals <- schedule (repeat Nothing) args
dest <- maybe newVar return dest
tell [op dest vals]
return (SymVal Value dest)
compileBuiltin _ dest [] = compileValue dest (IntVal 0)
compileBuiltin b dest args = compileBy (Op b) dest args
compileCall = compileBuiltin BCall
compileAxiom XAlter _ forms = do
let (vars,exprs) = partitionEithers $ zipWith ($) (cycle [Left,Right]) forms
locs <- gets locals
let assocs = [(v,locs!-v) | Symbol v <- vars]
schedule (map (Just . snd) assocs) exprs
addLocals assocs
return NullVal
compileAxiom XBind _ args = case args of
[bVars] -> doBind bVars Nothing
[bVars,expr] -> do
v <- newVar
compile' (Just v) expr
doBind bVars (Just v)
where
doBind bVars val = do
bnd <- bindFromSyntax bVars
bnd' <- localizeBV bnd
tell [PCode.Bind bnd' val]
return NullVal
localizeBV (BindVar s sz pad subs) = do
s' <- newVar
addLocals [(s,s')]
subs' <- mapM (\(bv,n) -> liftM (,n) (localizeBV bv)) subs
return (BindVar s' sz pad subs')
compileAxiom XDo dest [] = return NullVal
compileAxiom XDo dest forms = do
let cs = reverse $ zipWith compile' (dest:repeat Nothing) (reverse forms)
last $< sequence cs
compileAxiom XChoose dest (cond:forms) = do
v <- maybe newVar return dest
rec
pushInfo (start,alts,end,dest)
start <- getPos
condVal <- compile' Nothing cond
branch condVal alts
let compileAlt alt = saving locals_ $ getPos ->> (compile' (Just v) alt ?>> goto end)
alts <- mapM compileAlt forms
end <- getPos
popInfo
return (SymVal Value v)
compileAxiom XReturn _ [arg] = withInfo $ \(_,_,end,dest) -> compile' dest arg ?>> goto end
compileAxiom XRestart _ [] = withInfo $ \(start,_,_,_) -> goto start
compileAxiom XRestart _ [arg] = withInfo $ \(_,alts,_,_) ->
compile' Nothing arg ?>>= \v -> branch v alts
compileAxiom XAddr dest [Symbol s] = gets locals >>= compileValue dest . globVal Address s
compileAxiom XSize dest [Symbol s] = compileValue dest (SymVal Size s)
compileAxiom XID dest [Symbol s] = compileValue dest (SymVal SymID s)
compileAxiom XVerb dest [Group (name:args),expr] = do
bv@BindVar { bindSym = sym } <- bindFromSyntax name
ret <- case bindSubs bv of
[] -> newVar >§ \ret -> bv { bindSym = ret }
(h,_):_ -> return h
code <- compileExpr args (Just ret) expr
lift $ modify $ exportSymVal sym (Verb code)
compile' dest (Symbol sym)
compileAxiom XVerb dest [Symbol s,Symbol a] = do
lift $ modify $ \env -> exportSymVal s (lookupSymVal a env) env
compile' dest (Symbol s)
compileAxiom XNoun dest [Symbol sym,size,init] = do
v <- newVar
codeSz <- compileExpr [] (Just $ symBind v) size
codeInit <- compileExpr [Symbol sym] Nothing init
lift $ modify $ exportSymVal sym $ Noun codeSz codeInit
compile' dest (Symbol sym)
compileAxiom XLang dest [Symbol sym] = do
[impSym,idSym] <- lift $ mapM (state . internSym) ["alpha/import","id"]
compile' dest (Group [Symbol impSym,Group [Symbol idSym,Symbol sym]])
compileAxiom a _ args = error $ "Couldn't compile axiom "++show a++" with args "++show args
compileExpr args ret expr = do
args <- mapM bindFromSyntax args
code <- lift $ compile args ret expr
return code
compileValue dest val = (>>return val) $ case dest of
Just v -> tell [set v val]
Nothing -> return ()
bindFromSyntax (Symbol v) = return $ symBind v
bindFromSyntax (Group (Symbol v:t)) = do
let fun (ns,l) (Symbol v) = lookupName v >>= \s ->
maybe (fun' ns l (Symbol v)) (\n -> return (n:ns,l)) (readConstant =<< s)
fun (ns,l) e = fun' ns l e
fun' ns l e = do
b <- bindFromSyntax e
return ([],(b,product ns):l)
(pads,subs) <- foldM fun ([],[]) $ reverse t
let size = foldl (<+>) (pad,0) $ [(n*a,n*b) | (BindVar _ (a,b) _ _,n) <- subs]
pad = if null pads then 0 else product pads
(a,b) <+> (a',b') = (a+a',b+b')
return $ BindVar v size pad subs
bindFromSyntax s = error $ "Invalid shape for bindVar : "++show s
|
lih/Alpha
|
src/Compile.hs
|
bsd-2-clause
| 6,515
| 1
| 17
| 1,528
| 3,033
| 1,493
| 1,540
| 158
| 4
|
module Control.Monad.IO.MonadIOException where
import Control.Monad.IO.Unwrappable
import Control.Monad.IO.Class
import Control.Exception
-- | Guarantees that an IO operation will be performed before an after executing
-- | a MonadIOUnwrappable monad. The operation will be performed even if the
-- | MonadIO contains error monads that fails, or if an exception is raised.
bracketIO :: MonadIOUnwrappable m => IO a -- ^ The operation to perform initially.
-> (a -> IO b) -- ^ The cleanup that should always be performed
-> (a -> m c) -- ^ The monad transformer stack to execute.
-> m c
bracketIO init cleanup action = do
s <- unwrapState
r <- liftIO $ bracket init cleanup (\x -> unwrapMonadIO s (action x))
rewrapMonadIO s r
|
A1kmm/monadio-unwrappable
|
Control/Monad/IO/MonadIOException.hs
|
bsd-2-clause
| 857
| 0
| 14
| 250
| 147
| 79
| 68
| 12
| 1
|
{-# LANGUAGE CPP, ForeignFunctionInterface, OverloadedStrings #-}
-- A simple tool that creates a number of "dead" connections to a
-- server. A dead connection is a connection that doesn't transmit
-- any data but stays connected. This tool is useful to simulate a
-- number of slow/idle connections to a server.
import Args (ljust, nonNegative, parseArgs, positive, theLast)
import EventSocket (connect, recv, sendAll)
import Control.Concurrent (forkIO)
import Control.Monad (forM_, forever)
import qualified Data.ByteString.Char8 as S
import Data.Function (on)
import Data.Monoid (Monoid(..), Last(..))
import Network.Socket (AddrInfo(..), SocketType(..), defaultHints, getAddrInfo,
socket, sClose, withSocketsDo)
import System.Console.GetOpt (ArgDescr(ReqArg), OptDescr(..))
import System.Environment (getArgs)
import System.Event.Thread (ensureIOManagerIsRunning, threadDelay)
import System.Posix.Resource (ResourceLimit(..), ResourceLimits(..),
Resource(..), setResourceLimit)
main = withSocketsDo $ do
(cfg, _) <- parseArgs defaultConfig defaultOptions =<< getArgs
let numConns = theLast cfgNumConns cfg
host = theLast cfgHost cfg
port = theLast cfgPort cfg
delay = theLast cfgDelay cfg * 1000
lim = ResourceLimit $ fromIntegral numConns + 50
myHints = defaultHints { addrSocketType = Stream }
ensureIOManagerIsRunning
setResourceLimit ResourceOpenFiles
ResourceLimits { softLimit = lim, hardLimit = lim }
addrinfos <- getAddrInfo (Just myHints) (Just host) (Just $ show port)
let addr = head addrinfos
putStrLn $ "Running " ++ show numConns ++ " threads to clobber " ++
host ++ ":" ++ show port ++ "..."
forM_ [0..numConns-1] $ \n -> forkIO . forever $ do
let myDelay = delay + n * 1037
sock <- socket (addrFamily addr) (addrSocketType addr)
(addrProtocol addr)
connect sock (addrAddress addr)
let sendLoop s
| S.null s = recvLoop
| otherwise = do
threadDelay myDelay
let len = (n `mod` (S.length request - 1)) + 1
let (h,t) = S.splitAt len s
sendAll sock h
sendLoop t
recvLoop = do
threadDelay myDelay
s <- recv sock 256
if S.null s
then sClose sock
else recvLoop
sendLoop request
putStrLn $ show numConns ++ " threads looping"
-- Block process forever.
--threadDelay maxBound
request = "GET / HTTP/1.1\r\nHost: www.test.com\r\n\r\n"
------------------------------------------------------------------------
-- Configuration
data Config = Config {
cfgNumConns :: Last Int
, cfgDelay :: Last Int
, cfgHost :: Last String
, cfgPort :: Last Int
}
defaultConfig :: Config
defaultConfig = Config
{ cfgNumConns = ljust 50
, cfgDelay = ljust 100
, cfgHost = ljust "localhost"
, cfgPort = ljust 3000
}
instance Monoid Config where
mempty = Config
{ cfgNumConns = mempty
, cfgDelay = mempty
, cfgHost = mempty
, cfgPort = mempty
}
mappend a b = Config
{ cfgNumConns = app cfgNumConns a b
, cfgDelay = app cfgDelay a b
, cfgHost = app cfgHost a b
, cfgPort = app cfgPort a b
}
where
app :: (Monoid b) => (a -> b) -> a -> a -> b
app = on mappend
defaultOptions :: [OptDescr (IO Config)]
defaultOptions = [
Option ['n'] ["connections"]
(ReqArg (nonNegative "number of connections" $ \n ->
mempty { cfgNumConns = n }) "N")
"number of connections"
, Option ['d'] ["delay"]
(ReqArg (nonNegative "delay between chunks" $ \d ->
mempty { cfgDelay = d }) "N")
"delay between chunks (ms)"
, Option ['h'] ["host"]
(ReqArg (\s -> return $ mempty { cfgHost = ljust s }) "HOST")
"server address"
, Option ['p'] ["port"]
(ReqArg (positive "server port" $ \n ->
mempty { cfgPort = n }) "N")
"server port"
]
|
tibbe/event
|
benchmarks/DeadConn.hs
|
bsd-2-clause
| 4,332
| 0
| 28
| 1,384
| 1,167
| 632
| 535
| 93
| 2
|
{-# OPTIONS_GHC -Wall #-}
module HW04.HW04 where
-- Exercise 1 -----------------------------------------
fun1' :: [Integer] -> Integer
fun1' = product . map (subtract 2) . filter even
fun2' :: Integer -> Integer
fun2' = sum . filter even . takeWhile (> 1) . iterate collatz
where
collatz n
| even n = n `div` 2
| otherwise = 3 * n + 1
-- Exercise 2 -----------------------------------------
data Tree a = Leaf
| Node Integer (Tree a) a (Tree a)
deriving (Show, Eq)
getHeight :: Tree a -> Integer
getHeight Leaf = -1
getHeight (Node h _ _ _) = h
foldTree :: [a] -> Tree a
foldTree = foldr insert Leaf
where
insert x Leaf = Node 0 Leaf x Leaf
insert x (Node _ left y right)
| hLeft < hRight =
let newLeft = insert x left
hNewLeft = getHeight newLeft
newHeight = max hNewLeft hRight + 1
in Node newHeight newLeft y right
| otherwise =
let newRight = insert x right
hNewRight = getHeight newRight
newHeight = max hLeft hNewRight + 1
in Node newHeight left y newRight
where
hLeft = getHeight left
hRight = getHeight right
-- Exercise 3 -----------------------------------------
xor :: [Bool] -> Bool
xor = odd . foldr (\x cnt -> if x then cnt + 1 else cnt) (0 :: Integer)
map' :: (a -> b) -> [a] -> [b]
map' f = foldr (\x xs -> f x : xs) []
-- Exercise 4 -----------------------------------------
cartProd :: [a] -> [b] -> [(a, b)]
cartProd xs ys = [(x, y) | x <- xs, y <- ys]
sieveSundaram :: Integer -> [Integer]
sieveSundaram n = map ((+1) . (*2)) $ filter (not . (`elem` crossed)) [1..n]
where
crossed = filter (<= n) . map transform . filter (uncurry (<=)) $ cartProd [1..n] [1..n]
transform (i, j) = i + j + 2 * i * j
|
kemskems/cis194-spring13
|
src/HW04/HW04.hs
|
bsd-3-clause
| 1,846
| 0
| 13
| 541
| 737
| 388
| 349
| 41
| 2
|
{-# LANGUAGE PatternGuards #-}
module Idris.Delaborate (bugaddr, delab, delab', delabMV, delabTy, delabTy', pprintErr) where
-- Convert core TT back into high level syntax, primarily for display
-- purposes.
import Util.Pretty
import Idris.AbsSyntax
import Idris.Core.TT
import Idris.Core.Evaluate
import Idris.ErrReverse
import Data.List (intersperse)
import qualified Data.Text as T
import Debug.Trace
bugaddr = "https://github.com/idris-lang/Idris-dev/issues"
delab :: IState -> Term -> PTerm
delab i tm = delab' i tm False False
delabMV :: IState -> Term -> PTerm
delabMV i tm = delab' i tm False True
delabTy :: IState -> Name -> PTerm
delabTy i n
= case lookupTy n (tt_ctxt i) of
(ty:_) -> case lookupCtxt n (idris_implicits i) of
(imps:_) -> delabTy' i imps ty False False
_ -> delabTy' i [] ty False False
delab' :: IState -> Term -> Bool -> Bool -> PTerm
delab' i t f mvs = delabTy' i [] t f mvs
delabTy' :: IState -> [PArg] -- ^ implicit arguments to type, if any
-> Term
-> Bool -- ^ use full names
-> Bool -- ^ Don't treat metavariables specially
-> PTerm
delabTy' ist imps tm fullname mvs = de [] imps tm
where
un = fileFC "(val)"
de env _ (App f a) = deFn env f [a]
de env _ (V i) | i < length env = PRef un (snd (env!!i))
| otherwise = PRef un (sUN ("v" ++ show i ++ ""))
de env _ (P _ n _) | n == unitTy = PTrue un IsType
| n == unitCon = PTrue un IsTerm
| n == falseTy = PFalse un
| Just n' <- lookup n env = PRef un n'
| otherwise
= case lookup n (idris_metavars ist) of
Just (Just _, mi, _) -> mkMVApp n []
_ -> PRef un n
de env _ (Bind n (Lam ty) sc)
= PLam n (de env [] ty) (de ((n,n):env) [] sc)
de env ((PImp { argopts = opts }):is) (Bind n (Pi ty) sc)
= PPi (Imp opts Dynamic False) n (de env [] ty) (de ((n,n):env) is sc)
de env (PConstraint _ _ _ _:is) (Bind n (Pi ty) sc)
= PPi constraint n (de env [] ty) (de ((n,n):env) is sc)
de env (PTacImplicit _ _ _ tac _:is) (Bind n (Pi ty) sc)
= PPi (tacimpl tac) n (de env [] ty) (de ((n,n):env) is sc)
de env (plic:is) (Bind n (Pi ty) sc)
= PPi (Exp (argopts plic) Dynamic False)
n
(de env [] ty)
(de ((n,n):env) is sc)
de env [] (Bind n (Pi ty) sc)
= PPi expl n (de env [] ty) (de ((n,n):env) [] sc)
de env _ (Bind n (Let ty val) sc)
= PLet n (de env [] ty) (de env [] val) (de ((n,n):env) [] sc)
de env _ (Bind n (Hole ty) sc) = de ((n, sUN "[__]"):env) [] sc
de env _ (Bind n (Guess ty val) sc) = de ((n, sUN "[__]"):env) [] sc
de env plic (Bind n bb sc) = de ((n,n):env) [] sc
de env _ (Constant i) = PConstant i
de env _ Erased = Placeholder
de env _ Impossible = Placeholder
de env _ (TType i) = PType
dens x | fullname = x
dens ns@(NS n _) = case lookupCtxt n (idris_implicits ist) of
[_] -> n -- just one thing
[] -> n -- metavariables have no implicits
_ -> ns
dens n = n
deFn env (App f a) args = deFn env f (a:args)
deFn env (P _ n _) [l,r]
| n == pairTy = PPair un IsType (de env [] l) (de env [] r)
| n == eqCon = PRefl un (de env [] r)
| n == sUN "lazy" = de env [] r
deFn env (P _ n _) [ty, Bind x (Lam _) r]
| n == sUN "Exists"
= PDPair un IsType (PRef un x) (de env [] ty)
(de ((x,x):env) [] (instantiate (P Bound x ty) r))
deFn env (P _ n _) [_,_,l,r]
| n == pairCon = PPair un IsTerm (de env [] l) (de env [] r)
| n == eqTy = PEq un (de env [] l) (de env [] r)
| n == sUN "Ex_intro" = PDPair un IsTerm (de env [] l) Placeholder
(de env [] r)
deFn env f@(P _ n _) args
| n `elem` map snd env
= PApp un (de env [] f) (map pexp (map (de env []) args))
deFn env (P _ n _) args
| not mvs = case lookup n (idris_metavars ist) of
Just (Just _, mi, _) ->
mkMVApp n (drop mi (map (de env []) args))
_ -> mkPApp n (map (de env []) args)
| otherwise = mkPApp n (map (de env []) args)
deFn env f args = PApp un (de env [] f) (map pexp (map (de env []) args))
mkMVApp n []
= PMetavar n
mkMVApp n args
= PApp un (PMetavar n) (map pexp args)
mkPApp n args
| Just imps <- lookupCtxtExact n (idris_implicits ist)
= PApp un (PRef un n) (zipWith imp (imps ++ repeat (pexp undefined)) args)
| otherwise = PApp un (PRef un n) (map pexp args)
imp (PImp p m l n _) arg = PImp p m l n arg
imp (PExp p l n _) arg = PExp p l n arg
imp (PConstraint p l n _) arg = PConstraint p l n arg
imp (PTacImplicit p l n sc _) arg = PTacImplicit p l n sc arg
-- | How far to indent sub-errors
errorIndent :: Int
errorIndent = 8
-- | Actually indent a sub-error - no line at end because a newline can end
-- multiple layers of indent
indented :: Doc a -> Doc a
indented = nest errorIndent . (line <>)
pprintTerm :: IState -> PTerm -> Doc OutputAnnotation
pprintTerm ist = pprintTerm' ist []
pprintTerm' :: IState -> [(Name, Bool)] -> PTerm -> Doc OutputAnnotation
pprintTerm' ist bnd tm = pprintPTerm (ppOptionIst ist) bnd [] (idris_infixes ist) tm
pprintErr :: IState -> Err -> Doc OutputAnnotation
pprintErr i err = pprintErr' i (fmap (errReverse i) err)
pprintErr' i (Msg s) = text s
pprintErr' i (InternalMsg s) =
vsep [ text "INTERNAL ERROR:" <+> text s,
text "This is probably a bug, or a missing error message.",
text ("Please consider reporting at " ++ bugaddr)
]
pprintErr' i (CantUnify _ x y e sc s) =
text "Can't unify" <> indented (pprintTerm' i (map (\ (n, b) -> (n, False)) sc) (delab i x)) <$>
text "with" <> indented (pprintTerm' i (map (\ (n, b) -> (n, False)) sc) (delab i y)) <>
case e of
Msg "" -> empty
_ -> line <> line <> text "Specifically:" <>
indented (pprintErr' i e) <>
if (opt_errContext (idris_options i)) then showSc i sc else empty
pprintErr' i (CantConvert x y env) =
text "Can't convert" <>
indented (pprintTerm' i (map (\ (n, b) -> (n, False)) env) (delab i x)) <$>
text "with" <>
indented (pprintTerm' i (map (\ (n, b) -> (n, False)) env) (delab i y)) <>
if (opt_errContext (idris_options i)) then line <> showSc i env else empty
pprintErr' i (CantSolveGoal x env) =
text "Can't solve goal " <>
indented (pprintTerm' i (map (\ (n, b) -> (n, False)) env) (delab i x)) <>
if (opt_errContext (idris_options i)) then line <> showSc i env else empty
pprintErr' i (UnifyScope n out tm env) =
text "Can't unify" <> indented (annName n) <+>
text "with" <> indented (pprintTerm' i (map (\ (n, b) -> (n, False)) env) (delab i tm)) <+>
text "as" <> indented (annName out) <> text "is not in scope" <>
if (opt_errContext (idris_options i)) then line <> showSc i env else empty
pprintErr' i (CantInferType t) = text "Can't infer type for" <+> text t
pprintErr' i (NonFunctionType f ty) =
pprintTerm i (delab i f) <+>
text "does not have a function type" <+>
parens (pprintTerm i (delab i ty))
pprintErr' i (NotEquality tm ty) =
pprintTerm i (delab i tm) <+>
text "does not have an equality type" <+>
parens (pprintTerm i (delab i ty))
pprintErr' i (TooManyArguments f) = text "Too many arguments for" <+> annName f
pprintErr' i (CantIntroduce ty) =
text "Can't use lambda here: type is" <+> pprintTerm i (delab i ty)
pprintErr' i (InfiniteUnify x tm env) =
text "Unifying" <+> annName' x (showbasic x) <+> text "and" <+>
pprintTerm' i (map (\ (n, b) -> (n, False)) env) (delab i tm) <+>
text "would lead to infinite value" <>
if (opt_errContext (idris_options i)) then line <> showSc i env else empty
pprintErr' i (NotInjective p x y) =
text "Can't verify injectivity of" <+> pprintTerm i (delab i p) <+>
text " when unifying" <+> pprintTerm i (delab i x) <+> text "and" <+>
pprintTerm i (delab i y)
pprintErr' i (CantResolve c) = text "Can't resolve type class" <+> pprintTerm i (delab i c)
pprintErr' i (CantResolveAlts as) = text "Can't disambiguate name:" <+>
align (cat (punctuate (comma <> space) (map text as)))
pprintErr' i (NoTypeDecl n) = text "No type declaration for" <+> annName n
pprintErr' i (NoSuchVariable n) = text "No such variable" <+> annName n
pprintErr' i (IncompleteTerm t) = text "Incomplete term" <+> pprintTerm i (delab i t)
pprintErr' i UniverseError = text "Universe inconsistency"
pprintErr' i ProgramLineComment = text "Program line next to comment"
pprintErr' i (Inaccessible n) = annName n <+> text "is not an accessible pattern variable"
pprintErr' i (NonCollapsiblePostulate n) = text "The return type of postulate" <+>
annName n <+> text "is not collapsible"
pprintErr' i (AlreadyDefined n) = annName n<+>
text "is already defined"
pprintErr' i (ProofSearchFail e) = pprintErr' i e
pprintErr' i (NoRewriting tm) = text "rewrite did not change type" <+> pprintTerm i (delab i tm)
pprintErr' i (At f e) = annotate (AnnFC f) (text (show f)) <> colon <> pprintErr' i e
pprintErr' i (Elaborating s n e) = text "When elaborating" <+> text s <>
annName' n (showqual i n) <> colon <$>
pprintErr' i e
pprintErr' i (ElaboratingArg f x _ e)
| isUN x =
text "When elaborating argument" <+>
annotate (AnnBoundName x False) (text (showbasic x)) <+> --TODO check plicity
text "to" <+> whatIsName <> annName f <> colon <>
indented (pprintErr' i e)
| otherwise =
text "When elaborating an application of" <+> whatIsName <>
annName f <> colon <> indented (pprintErr' i e)
where whatIsName = let ctxt = tt_ctxt i
in if isTConName f ctxt
then text "type constructor" <> space
else if isConName f ctxt
then text "constructor" <> space
else if isFnName f ctxt
then text "function" <> space
else empty
pprintErr' i (ProviderError msg) = text ("Type provider error: " ++ msg)
pprintErr' i (LoadingFailed fn e) = text "Loading" <+> text fn <+> text "failed:" <+> pprintErr' i e
pprintErr' i (ReflectionError parts orig) =
let parts' = map (fillSep . map showPart) parts in
align (fillSep parts') <>
if (opt_origerr (idris_options i))
then line <> line <> text "Original error:" <$> indented (pprintErr' i orig)
else empty
where showPart :: ErrorReportPart -> Doc OutputAnnotation
showPart (TextPart str) = fillSep . map text . words $ str
showPart (NamePart n) = annName n
showPart (TermPart tm) = pprintTerm i (delab i tm)
showPart (SubReport rs) = indented . hsep . map showPart $ rs
pprintErr' i (ReflectionFailed msg err) =
text "When attempting to perform error reflection, the following internal error occurred:" <>
indented (pprintErr' i err) <>
text ("This is probably a bug. Please consider reporting it at " ++ bugaddr)
isUN :: Name -> Bool
isUN (UN n) = not $ T.isPrefixOf (T.pack "__") n -- TODO figure out why MNs are getting rewritte to UNs for top-level pattern-matching functions
isUN (NS n _) = isUN n
isUN _ = False
annName :: Name -> Doc OutputAnnotation
annName n = annName' n (showbasic n)
annName' :: Name -> String -> Doc OutputAnnotation
annName' n str = annotate (AnnName n Nothing Nothing Nothing) (text str)
showSc :: IState -> [(Name, Term)] -> Doc OutputAnnotation
showSc i [] = empty
showSc i xs = line <> line <> text "In context:" <>
indented (vsep (reverse (showSc' [] xs)))
where showSc' bnd [] = []
showSc' bnd ((n, ty):ctxt) =
let this = bindingOf n False <+> colon <+> pprintTerm' i bnd (delab i ty)
in this : showSc' ((n,False):bnd) ctxt
showqual :: IState -> Name -> String
showqual i n = showName (Just i) [] (ppOptionIst i) { ppopt_impl = False } False (dens n)
where
dens ns@(NS n _) = case lookupCtxt n (idris_implicits i) of
[_] -> n -- just one thing
_ -> ns
dens n = n
showbasic :: Name -> String
showbasic n@(UN _) = show n
showbasic (MN _ s) = str s
showbasic (NS n s) = showSep "." (map str (reverse s)) ++ "." ++ showbasic n
showbasic (SN s) = show s
|
DanielWaterworth/Idris-dev
|
src/Idris/Delaborate.hs
|
bsd-3-clause
| 12,960
| 0
| 18
| 3,948
| 5,541
| 2,750
| 2,791
| 246
| 33
|
-----------------------------------------------------------------------------
-- |
-- Module : TestSuite.Basics.Quantifiers
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : erkokl@gmail.com
-- Stability : experimental
--
-- Various combinations of quantifiers
-----------------------------------------------------------------------------
module TestSuite.Basics.Quantifiers(tests) where
import Control.Monad (void)
import Utils.SBVTestFramework
tests :: TestTree
tests = testGroup "Basics.Quantifiers" $ concatMap mkGoal goals ++ concatMap mkPred preds
where mkGoal (g, nm) = [ goldenCapturedIO ("quantified_sat" ++ "_" ++ nm) $ \rf -> void $ satWith z3{verbose=True, redirectVerbose=Just rf} g
, goldenCapturedIO ("quantified_prove" ++ "_" ++ nm) $ \rf -> void $ proveWith z3{verbose=True, redirectVerbose=Just rf} g
]
mkPred (p, nm) = [ goldenCapturedIO ("quantified_sat" ++ "_" ++ nm) $ \rf -> void $ satWith z3{verbose=True, redirectVerbose=Just rf} p
, goldenCapturedIO ("quantified_prove" ++ "_" ++ nm) $ \rf -> void $ proveWith z3{verbose=True, redirectVerbose=Just rf} p
]
qs = [(exists, "exists"), (forall, "forall")]
acts = [ (\x y -> x + (y - x) .== y , "thm")
, (\x y -> x .== y &&& x ./= y, "contradiction")
, (\x y -> x .== y + 1 , "satisfiable")
]
goals = [(t1 q1 q2 a, nq1 ++ nq2 ++ "_" ++ an ++ "_c") | (q1, nq1) <- qs
, (q2, nq2) <- qs
, (a, an) <- acts ]
preds = [(t2 q1 q2 a, nq1 ++ nq2 ++ "_" ++ an ++ "_p") | (q1, nq1) <- qs
, (q2, nq2) <- qs
, (a, an) <- acts ]
t1 :: (String -> Symbolic SWord8) -> (String -> Symbolic SWord8) -> (SWord8 -> SWord8 -> SBool) -> Goal
t1 q1 q2 act = q1 "x" >>= \x -> q2 "y" >>= \y -> constrain $ act x y
t2 :: (String -> Symbolic SWord8) -> (String -> Symbolic SWord8) -> (SWord8 -> SWord8 -> SBool) -> Predicate
t2 q1 q2 act = q1 "x" >>= \x -> q2 "y" >>= \y -> return $ act x y
|
josefs/sbv
|
SBVTestSuite/TestSuite/Basics/Quantifiers.hs
|
bsd-3-clause
| 2,396
| 0
| 14
| 865
| 761
| 419
| 342
| 23
| 1
|
{-
The C algorithm does not appear to give notable performance
improvements, at least when building tries based on /usr/dict on
little-endian 32-bit machines. The implementation also appears
somewhat buggy (cf test/TrieFile.hs) and using the FFI complicates
distribution.
{-# LANGUAGE CPP, ForeignFunctionInterface #-}
{-# CFILES ByteStringInternal/indexOfDifference.c #-}
-}
{-# OPTIONS_GHC -Wall -fwarn-tabs #-}
----------------------------------------------------------------
-- ~ 2009.02.06
-- |
-- Module : Data.Trie.ByteStringInternal
-- Copyright : Copyright (c) 2008--2011 wren gayle romano
-- License : BSD3
-- Maintainer : wren@community.haskell.org
-- Stability : experimental
-- Portability : portable
--
-- Helper functions on 'ByteString's for "Data.Trie.Internal".
----------------------------------------------------------------
module Data.Trie.ByteStringInternal
( ByteString, ByteStringElem
, breakMaximalPrefix
) where
import qualified Data.ByteString as S
import Data.ByteString.Internal (ByteString(..), inlinePerformIO)
import Data.Word
import Foreign.ForeignPtr (ForeignPtr, withForeignPtr)
import Foreign.Ptr (Ptr, plusPtr)
import Foreign.Storable (Storable(..))
{-
#ifdef __USE_C_INTERNAL__
import Foreign.C.Types (CInt)
import Control.Monad (liftM)
#endif
-}
----------------------------------------------------------------
-- | Associated type of 'ByteString'
type ByteStringElem = Word8
----------------------------------------------------------------
-- | Returns the longest shared prefix and the two remaining suffixes
-- for a pair of strings.
--
-- > s == (\(pre,s',z') -> pre `append` s') (breakMaximalPrefix s z)
-- > z == (\(pre,s',z') -> pre `append` z') (breakMaximalPrefix s z)
breakMaximalPrefix :: ByteString -> ByteString
-> (ByteString, ByteString, ByteString)
breakMaximalPrefix
str1@(PS s1 off1 len1)
str2@(PS s2 off2 len2)
| len1 == 0 = (S.empty, S.empty, str2)
| len2 == 0 = (S.empty, str1, S.empty)
| otherwise = inlinePerformIO $
withForeignPtr s1 $ \p1 ->
withForeignPtr s2 $ \p2 -> do
i <- indexOfDifference
(p1 `ptrElemOff` off1)
(p2 `ptrElemOff` off2)
(min len1 len2)
let pre = if off1 + len1 < off2 + len2 -- share the smaller one
then newPS s1 off1 i
else newPS s2 off2 i
let s1' = newPS s1 (off1 + i) (len1 - i)
let s2' = newPS s2 (off2 + i) (len2 - i)
return $! (,,) !$ pre !$ s1' !$ s2'
-- | C-style pointer addition, without the liberal type of 'plusPtr'.
ptrElemOff :: Storable a => Ptr a -> Int -> Ptr a
{-# INLINE ptrElemOff #-}
ptrElemOff p i =
p `plusPtr` (i * sizeOf (undefined `asTypeOf` inlinePerformIO (peek p)))
newPS :: ForeignPtr ByteStringElem -> Int -> Int -> ByteString
{-# INLINE newPS #-}
newPS s o l =
if l <= 0 then S.empty else PS s o l
-- | fix associativity bug
(!$) :: (a -> b) -> a -> b
{-# INLINE (!$) #-}
(!$) = ($!)
----------------------------------------------------------------
-- | Calculates the first index where values differ.
indexOfDifference :: Ptr ByteStringElem -> Ptr ByteStringElem -> Int -> IO Int
{-
#ifdef __USE_C_INTERNAL__
indexOfDifference p q i =
liftM fromIntegral $! c_indexOfDifference p q (fromIntegral i)
-- This could probably be not IO, but the wrapper requires that anyways...
foreign import ccall unsafe "ByteStringInternal/indexOfDifference.h indexOfDifference"
c_indexOfDifference :: Ptr ByteStringElem -> Ptr ByteStringElem -> CInt -> IO CInt
#else
-}
-- Use the naive algorithm which doesn't depend on architecture details
indexOfDifference p1 p2 limit = goByte 0
where
goByte n =
if n >= limit
then return limit
else do c1 <- peekElemOff p1 n
c2 <- peekElemOff p2 n
if c1 == c2
then goByte $! n+1
else return n
{-
#endif
-}
----------------------------------------------------------------
----------------------------------------------------------- fin.
|
tkonolige/dbignore
|
bytestring-trie/src/Data/Trie/ByteStringInternal.hs
|
bsd-3-clause
| 4,255
| 0
| 18
| 1,013
| 711
| 397
| 314
| 52
| 3
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
module Stack.BuildPlanSpec where
import Stack.BuildPlan
import Control.Monad.Logger
import Control.Exception hiding (try)
import Control.Monad.Catch (try)
import Data.Monoid
import qualified Data.Map as Map
import qualified Data.Set as Set
import Network.HTTP.Conduit (Manager)
import System.Directory
import System.IO.Temp
import System.Environment
import Test.Hspec
import Stack.Config
import Stack.Types
import Stack.Types.StackT
data T = T
{ manager :: Manager
}
setup :: IO T
setup = do
manager <- newTLSManager
unsetEnv "STACK_YAML"
return T{..}
teardown :: T -> IO ()
teardown _ = return ()
main :: IO ()
main = hspec spec
spec :: Spec
spec = beforeAll setup $ afterAll teardown $ do
let logLevel = LevelDebug
let loadConfig' m = runStackLoggingT m logLevel False (loadConfig mempty)
let loadBuildConfigRest m = runStackLoggingT m logLevel False
let inTempDir action = do
currentDirectory <- getCurrentDirectory
withSystemTempDirectory "Stack_BuildPlanSpec" $ \tempDir -> do
let enterDir = setCurrentDirectory tempDir
let exitDir = setCurrentDirectory currentDirectory
bracket_ enterDir exitDir action
it "finds missing transitive dependencies #159" $ \T{..} -> inTempDir $ do
-- Note: this test is somewhat fragile, depending on packages on
-- Hackage remaining in a certain state. If it fails, confirm that
-- github still depends on failure.
writeFile "stack.yaml" "resolver: lts-2.9"
LoadConfig{..} <- loadConfig' manager
bconfig <- loadBuildConfigRest manager (lcLoadBuildConfig Nothing ThrowException)
runStackT manager logLevel bconfig False $ do
menv <- getMinimalEnvOverride
mbp <- loadMiniBuildPlan $ LTS 2 9
eres <- try $ resolveBuildPlan
menv
mbp
(const False)
(Map.fromList
[ ($(mkPackageName "github"), Set.empty)
])
case eres of
Left (UnknownPackages _ unknown _) -> do
case Map.lookup $(mkPackageName "github") unknown of
Nothing -> error "doesn't list github as unknown"
Just _ -> return ()
{- Currently not implemented, see: https://github.com/fpco/stack/issues/159#issuecomment-107809418
case Map.lookup $(mkPackageName "failure") unknown of
Nothing -> error "failure not listed"
Just _ -> return ()
-}
_ -> error $ "Unexpected result from resolveBuildPlan: " ++ show eres
return ()
describe "shadowMiniBuildPlan" $ do
let version = $(mkVersion "1.0.0") -- unimportant for this test
pn = either throw id . parsePackageNameFromString
mkMPI deps = MiniPackageInfo
{ mpiVersion = version
, mpiFlags = Map.empty
, mpiPackageDeps = Set.fromList $ map pn $ words deps
, mpiToolDeps = Set.empty
, mpiExes = Set.empty
, mpiHasLibrary = True
}
go x y = (pn x, mkMPI y)
resourcet = go "resourcet" ""
conduit = go "conduit" "resourcet"
conduitExtra = go "conduit-extra" "conduit"
text = go "text" ""
attoparsec = go "attoparsec" "text"
aeson = go "aeson" "text attoparsec"
mkMBP pkgs = MiniBuildPlan
{ mbpGhcVersion = version
, mbpPackages = Map.fromList pkgs
}
mbpAll = mkMBP [resourcet, conduit, conduitExtra, text, attoparsec, aeson]
test name input shadowed output extra =
it name $ const $
shadowMiniBuildPlan input (Set.fromList $ map pn $ words shadowed)
`shouldBe` (output, Map.fromList extra)
test "no shadowing" mbpAll "" mbpAll []
test "shadow something that isn't there" mbpAll "does-not-exist" mbpAll []
test "shadow a leaf" mbpAll "conduit-extra"
(mkMBP [resourcet, conduit, text, attoparsec, aeson])
[]
test "shadow direct dep" mbpAll "conduit"
(mkMBP [resourcet, text, attoparsec, aeson])
[conduitExtra]
test "shadow deep dep" mbpAll "resourcet"
(mkMBP [text, attoparsec, aeson])
[conduit, conduitExtra]
test "shadow deep dep and leaf" mbpAll "resourcet aeson"
(mkMBP [text, attoparsec])
[conduit, conduitExtra]
test "shadow deep dep and direct dep" mbpAll "resourcet conduit"
(mkMBP [text, attoparsec, aeson])
[conduitExtra]
|
hesselink/stack
|
src/test/Stack/BuildPlanSpec.hs
|
bsd-3-clause
| 5,018
| 0
| 25
| 1,670
| 1,130
| 577
| 553
| 104
| 3
|
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Text
-- Copyright : Duncan Coutts 2007
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This defines a 'Text' class which is a bit like the 'Read' and 'Show'
-- classes. The difference is that is uses a modern pretty printer and parser
-- system and the format is not expected to be Haskell concrete syntax but
-- rather the external human readable representation used by Cabal.
--
module Distribution.Text (
Text(..),
display,
simpleParse,
) where
import qualified Distribution.Compat.ReadP as Parse
import qualified Text.PrettyPrint as Disp
import Data.Version (Version(Version))
import qualified Data.Char as Char (isDigit, isAlphaNum, isSpace)
class Text a where
disp :: a -> Disp.Doc
parse :: Parse.ReadP r a
display :: Text a => a -> String
display = Disp.renderStyle style . disp
where style = Disp.Style {
Disp.mode = Disp.PageMode,
Disp.lineLength = 79,
Disp.ribbonsPerLine = 1.0
}
simpleParse :: Text a => String -> Maybe a
simpleParse str = case [ p | (p, s) <- Parse.readP_to_S parse str
, all Char.isSpace s ] of
[] -> Nothing
(p:_) -> Just p
-- -----------------------------------------------------------------------------
-- Instances for types from the base package
instance Text Bool where
disp = Disp.text . show
parse = Parse.choice [ (Parse.string "True" Parse.+++
Parse.string "true") >> return True
, (Parse.string "False" Parse.+++
Parse.string "false") >> return False ]
instance Text Version where
disp (Version branch _tags) -- Do not display the tags
= Disp.hcat (Disp.punctuate (Disp.char '.') (map Disp.int branch))
parse = do
branch <- Parse.sepBy1 digits (Parse.char '.')
tags <- Parse.many (Parse.char '-' >> Parse.munch1 Char.isAlphaNum)
return (Version branch tags)
where
digits = do
first <- Parse.satisfy Char.isDigit
if first == '0'
then return 0
else do rest <- Parse.munch Char.isDigit
return (read (first : rest))
|
dcreager/cabal
|
Distribution/Text.hs
|
bsd-3-clause
| 2,290
| 0
| 17
| 596
| 565
| 302
| 263
| 41
| 2
|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TupleSections #-}
{-# OPTIONS -Wall #-}
module IterX.IterX (
ResultX(..)
, IterX(..)
, Status(..)
, Failure
, Success
, failX
, doneX
, cxtFailure
, feed
, runIterX
, convStream
, unfoldConvStream
, iterXToStreamTrans
, delimitG
, delimitN
, DelState
) where
import IterX.Core
import IterX.Exception
import IterX.StreamTrans
import IterX.Unsafe
import Control.Applicative
import Control.Monad.State
import Control.Exception (throw)
import Data.MonoTraversable
import Data.Monoid
import Data.Sequences
----------------------------------------------------------------
data ResultX s m r =
DoneX r s
| FailX s String
| MoreX (s -> m (ResultX s m r))
deriving (Functor)
instance (Show s, Show r) => Show (ResultX s m r) where
show (DoneX r s) = "DoneX (" ++ show r ++ ") (" ++ show s ++ ")"
show (FailX s e) = "FailX (" ++ show s ++ ") (" ++ show e ++ ")"
show (MoreX _) = "MoreX"
data Status = EOF | HasMore
deriving (Eq, Show)
newtype IterX s m a = IterX {
runIter :: forall r.
s
-> Status
-> Failure s m r
-> Success s m a r
-> m (ResultX s m r)}
type Failure s m r = s -> Status -> String -> m (ResultX s m r)
type Success s m a r = s -> Status -> a -> m (ResultX s m r)
instance Functor (IterX s m) where
{-# INLINE fmap #-}
fmap = mapIter
instance Applicative (IterX s m) where
{-# INLINE pure #-}
pure = returnIter
{-# INLINE (<*>) #-}
(<*>) = apIter
instance Monad (IterX s m) where
{-# INLINE return #-}
return = returnIter
{-# INLINE (>>=) #-}
(>>=) = bindIter
fail = failIter
instance MonadTrans (IterX s) where
{-# INLINE lift #-}
lift m = IterX $ \s st _ onD -> m >>= onD s st
failX :: Monad m => s -> Status -> String -> m (ResultX s m a)
failX s _ err = return $ FailX s err
doneX :: Monad m => s -> Status -> a -> m (ResultX s m a)
doneX s _ a = return $ DoneX a s
-- | transform a failure continuation by adding extra context
cxtFailure :: String -> IterX s m a -> IterX s m a
cxtFailure cxt i = IterX $ \s st onF onD ->
let cxtF s' st' err = onF s' st' (cxt ++ ": " ++ err)
in runIter i s st cxtF onD
{-# INLINE cxtFailure #-}
returnIter :: a -> IterX s m a
returnIter a = IterX $ \s st _ onD -> onD s st a
{-# INLINE returnIter #-}
bindIter :: IterX s m a -> (a -> IterX s m b) -> IterX s m b
bindIter m f = IterX $ \s st onF onD -> runIter m s st onF
$ \s' st' a -> runIter (f a) s' st' onF onD
{-# INLINE bindIter #-}
failIter :: String -> IterX s m a
failIter err = IterX $ \s st onF _ -> onF s st err
{-# INLINE failIter #-}
mapIter :: (a -> b) -> IterX s m a -> IterX s m b
mapIter f m = IterX $ \s st onF onD -> runIter m s st onF $
\s' st' a -> onD s' st' (f a)
{-# INLINE mapIter #-}
apIter :: IterX s m (a -> b) -> IterX s m a -> IterX s m b
apIter f a = do
f' <- f
a' <- a
return (f' a')
{-# INLINE apIter #-}
----------------------------------------------------------------
-- run an iteratee with the given producer. Extra input after the iteratee
-- completes is discarded.
runIterX :: (Monad m)
=> Producer (StateT (ResultX s m a) m) s
-> IterX s m a
-> m (ResultX s m a)
runIterX gen i = do
let s0 = MoreX $ \s -> runIter i s HasMore failX doneX
foldG f s0 gen
where
f (MoreX k) s = k s
f (DoneX _ _) _ = throw (TerminateEarly "runIterX")
f r _ = return r
----------------------------------------------------------------
feed :: (Monad m, Monoid s) => ResultX s m r -> s -> m (ResultX s m r)
feed (MoreX k) s = k s
feed (DoneX r s0) s = return $ DoneX r (s0<>s)
feed f@FailX{} _ = return f
----------------------------------------------------------------
-- | Create a 'Transducer' from an 'IterX'
convStream :: Monad m
=> IterX e1 m e2
-> Transducer (StateT (ResultX e1 m e2) (GenT e2 m)) m e1 e2
convStream i = streamGM (f id) i0
where
i0 = MoreX $ \inp -> runIter i inp HasMore failX doneX
f acc (MoreX k) s = k s >>= \case
res@(MoreX _) -> return (res,acc [])
-- TODO: probably makes sense to unroll this at least once
DoneX e2 rest -> f (acc . (e2:)) i0 rest
FailX _ err -> throw $ IterFailure $ "convStream: " ++ err
f _acc _other _ = error "convStream: other case arrived?"
-- | Create a 'Transducer' from an 'IterX' generating function
unfoldConvStream :: Monad m
=> (st -> IterX e1 m (st,e2))
-> st
-> Transducer (StateT (ResultX e1 m (st,e2)) (GenT e2 m)) m
e1 e2
unfoldConvStream mkI st0 = streamGM (f id) (i0 st0)
where
i0 s = MoreX $ \inp -> runIter (mkI s) inp HasMore failX doneX
-- TODO: probably makes sense to unroll this at least once
f acc (MoreX k) s = k s >>= \case
res@(MoreX _) -> return (res,acc [])
DoneX !(st',!e2) rest -> f (acc . (e2:)) (i0 st') rest
FailX _ err -> throw $ IterFailure $ "unfoldConvStream: " ++ err
f _acc _other _ = error "unfoldConvStream: other case arrived?"
iterXToStreamTrans :: Monad m => IterX s m (s -> [b]) -> StreamTransM m s [b]
iterXToStreamTrans iter = StreamTransM $ \s ->
runIter iter s HasMore failX doneX >>= \case
MoreX k -> return (step k,[])
DoneX f s' -> finish s' f
FailX _s err -> throw $ IterFailure
$ "iterXToStreamTrans: " ++ err
where
finish s f = let f' = StreamTransM $ return . (f',) . f
in return (f',f s)
step k = StreamTransM $ k >=> \case
DoneX f s -> finish s f
MoreX k' -> return (step k',[])
FailX _s err -> throw $ IterFailure
$ "iterXToStreamTrans: " ++ err
-- | create a transducer from a 'delimited stream'.
delimitG :: Monad m
=> IterX inp m st
-> (st -> inp -> (Either st inp, [outp]))
-> Transducer (StateT (DelState inp m st) (GenT outp m)) m inp outp
delimitG iter0 f = streamGM g s0
where
s0 = StartDelimiter
g st e = case st of
ProcState s -> g' [] s e
StartDelimiter -> runIter0 [] e
ConsumeDelimiter k -> k e >>= procResult []
g' outp0 s e = case f s e of
(Left s', outp) -> return (ProcState s', outp0 ++ outp)
(Right nxt, outp) -> runIter0 (outp0++outp) nxt
runIter0 o inp = runIter iter0 inp HasMore failX doneX >>= procResult o
procResult outp res = case res of
DoneX s' r -> g' outp s' r
MoreX k' -> return $ (ConsumeDelimiter k', outp)
FailX _ err -> throw $ IterFailure $ "delimitG: " ++ err
{-# INLINEABLE delimitG #-}
type DelState inp m st = DelStateD (inp -> m (ResultX inp m st)) st
data DelStateD i s =
StartDelimiter
| ConsumeDelimiter !i
| ProcState !s
delimitN :: (IsSequence inp, Index inp ~ Int, Monad m)
=> IterX inp m Int
-> Transducer (StateT (DelState inp m Int) (GenT inp m)) m inp inp
delimitN iter = delimitG iter f
where
f !n inp =
let len = olength inp
in if len <= n
then (Left $! n-len, [inp])
else case unsafeSplitAt n inp of
(!h,t) -> (Right t,[h])
{-# INLINEABLE delimitN #-}
|
JohnLato/iterx
|
src/IterX/IterX.hs
|
bsd-3-clause
| 7,630
| 0
| 14
| 2,340
| 2,869
| 1,471
| 1,398
| 188
| 6
|
{-# LANGUAGE OverloadedStrings #-}
module Lib
(Token,Corpus,
Unigrams, Digrams, Trigrams,
unigrams, digrams, trigrams, clean,
fromUnigrams, fromDigrams, fromTrigrams
) where
import Control.Monad.Random
import Data.Char (isSpace)
import Data.List (foldl', zipWith4)
import qualified Data.Map.Strict as M
import qualified Data.Text as T
type Token = T.Text
type Corpus = [Token]
type Unigrams = M.Map Token [Token]
type Digrams = M.Map (Token,Token) [Token]
type Trigrams = M.Map (Token,Token,Token) [Token]
clean :: T.Text -> Corpus
clean = filter (not . junk) . T.split isSpace . T.map replacePunctuation
where
replacePunctuation c = if c `elem` ("…+„”*—-:;\"()[]»«_"::String)
then ' ' else c
junk e = e `elem` ["", ".", ",", "..", ",,"]
unigrams :: Corpus -> Unigrams
unigrams = let merge (t,u) m = M.insertWith (++) t [u] m
in ngrams merge pairs
digrams :: Corpus -> Digrams
digrams = let merge (t,u,v) m = M.insertWith (++) (t,u) [v] m
in ngrams merge triplets
trigrams :: Corpus -> Trigrams
trigrams = let merge (t,u,v,w) m = M.insertWith (++) (t,u,v) [w] m
in ngrams merge quadruplets
fromUnigrams :: (RandomGen g) => Integer -> Unigrams -> Rand g [Token]
fromUnigrams k m = fromNGrams (\t e -> [e,t]) (\(t:_) -> t) k m
fromDigrams :: (RandomGen g) => Integer -> Digrams -> Rand g [Token]
fromDigrams k m = fromNGrams (\(t,v) e -> [e,v,t]) (\(v:t:_) -> (t,v)) k m
fromTrigrams :: (RandomGen g) => Integer -> Trigrams -> Rand g [Token]
fromTrigrams k m = fromNGrams (\(t,v,u) e -> [e,u,v,t]) (\(u:v:t:_) -> (t,v,u)) k m
fromNGrams ::
(RandomGen g, Ord n) =>
(n -> Token -> [Token]) -> ([Token] -> n) -> Integer -> M.Map n [Token]
-> Rand g [Token]
fromNGrams initialBuildStep accDecompStep k m = do
(key,tokens) <- randomKV m
e <- randomEl tokens
build (k-1) (initialBuildStep key e)
where
build 0 acc = return (reverse acc)
build i acc = case M.lookup (accDecompStep acc) m of
Nothing -> return (reverse acc)
(Just ts) -> randomEl ts >>= build (i-1) . (:acc)
ngrams :: (a -> M.Map k v -> M.Map k v) -> ([b] -> [a]) -> [b] -> M.Map k v
ngrams add shift tokens = foldl' (flip add) M.empty (shift tokens)
pairs :: [a] -> [(a,a)]
pairs l = zipWith (,) l (tail l)
triplets :: [a] -> [(a,a,a)]
triplets l = zipWith3 (,,) l (tail l) (tail (tail l))
quadruplets :: [a] -> [(a,a,a,a)]
quadruplets l = zipWith4 (,,,) l (tail l) (tail (tail l)) (tail (tail (tail l)))
randomKV :: (RandomGen g) => M.Map k v -> Rand g (k,v)
randomKV m = getRandomR (0, M.size m - 1) >>= return . (M.assocs m !!)
randomEl :: (RandomGen g) => [a] -> Rand g a
randomEl l = getRandomR (0, length l - 1) >>= return . (l !!)
|
pzel/slowotok
|
src/Lib.hs
|
bsd-3-clause
| 2,764
| 0
| 14
| 615
| 1,433
| 789
| 644
| 60
| 3
|
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Blaze.ByteString.Builder
import qualified Data.ByteString.Lazy.Char8 as BSL8
import Data.Map
import Data.Maybe
import Data.OpenSRS.ToXML
import Data.OpenSRS.Types
import Data.Text hiding (unlines)
import Data.Time
import Test.Hspec
import Text.HTML.TagSoup
import Text.HTML.TagSoup.Tree
import Text.HTML.TagSoup.Manipulators
import Text.HTML.TagSoup.Pretty
import Text.XmlHtml
-- | API configuration to use in these (non-integration) tests.
testConfig :: SRSConfig
testConfig = SRSConfig "https://horizon.opensrs.net:55443" "janedoe" "0123456789abcdef" "127.0.0.1" True
testDomain1 :: IO Domain
testDomain1 = do
t <- getCurrentTime
let t' = addUTCTime ((86400 * 365) :: NominalDiffTime) t
return $ Domain "foo.com" True contacts (Just t) True (Just t) (Just "5534") True (Just t') nameservers
where
contacts = fromList [
("owner", testc),
("admin", testc),
("billing", testc),
("tech", testc)
]
testc = Contact (Just "Jane")
(Just "Doe")
(Just "Frobozz Pty Ltd")
(Just "jane.doe@frobozz.com.au")
(Just "+61.299999999")
Nothing
(Just "Frobozz Pty Ltd")
(Just "Level 50")
(Just "1 George Street")
(Just "Sydney")
(Just "NSW")
(Just "2000")
(Just "AU")
nameservers = [
Nameserver (Just "ns1.anchor.net.au") (Just "0") (Just "127.0.0.1"),
Nameserver (Just "ns2.anchor.net.au") (Just "0") (Just "127.0.0.2") ]
testDoc1 :: String
testDoc1 = unlines ["<!DOCTYPE OPS_envelope SYSTEM \"ops.dtd\">",
"<OPS_envelope>",
" <header>",
" <version>",
" 0.9",
" </version>",
" </header>",
" <body>",
" <data_block>",
" <dt_assoc>",
" <item key=\"protocol\">",
" XCP",
" </item>",
" <item key=\"action\">",
" SET",
" </item>",
" <item key=\"object\">",
" COOKIE",
" </item>",
" <item key=\"registrant_ip\">",
" 127.0.0.1",
" </item>",
" <item key=\"attributes\">",
" <dt_assoc>",
" <item key=\"domain\">",
" foo.com",
" </item>",
" <item key=\"reg_username\">",
" webmaster",
" </item>",
" <item key=\"reg_password\">",
" myLovelyHorse",
" </item>",
" </dt_assoc>",
" </item>",
" </dt_assoc>",
" </data_block>",
" </body>",
"</OPS_envelope>"]
stripStr :: String -> String
stripStr = unpack . strip . pack
reqXML :: SRSRequest -> String
reqXML = BSL8.unpack . toLazyByteString . render . requestXML
suite :: Spec
suite = do
describe "XML inspection" $ do
it "can get a string using a tag as a source" $
stripStr (getText (parseTags testDoc1) "<version>") `shouldBe` "0.9"
it "treats quotes in getText queries the same" $ do
stripStr (getText (parseTags testDoc1) "<item key='domain'>") `shouldBe` "foo.com"
stripStr (getText (parseTags testDoc1) "<item key=\"domain\">") `shouldBe` "foo.com"
it "can get items within tree" $ do
let xmlt = tagTree $ parseTags testDoc1
let items = flattenTree . kidsWith "item" $ topMatching "<item key='attributes'>" xmlt
stripStr (getText' items "<item key='domain'>") `shouldBe` "foo.com"
stripStr (getText' items "<item key='reg_password'>") `shouldBe` "myLovelyHorse"
it "can prettyprint TagSoup XML documents" $
prettyXML " " testDoc1 `shouldBe` testDoc1
describe "Domains" $ do
it "Can be marshalled into a registration request" $ do
d <- testDomain1
let req = RegisterDomain testConfig d False Nothing Nothing
False False True True 1
(fromJust $ makeUsername "janedoe")
(fromJust $ makePassword "imasecret")
NewRegistration Nothing
let rxml = reqXML req
rxml `shouldContain` "<OPS_envelope>"
it "Can be marshalled into an update request" $ do
d <- testDomain1
let req = UpdateDomain testConfig d
let rxml = reqXML req
rxml `shouldContain` "<OPS_envelope>"
main :: IO ()
main = hspec suite
|
anchor/haskell-opensrs
|
tests/XmlTests.hs
|
bsd-3-clause
| 5,323
| 0
| 19
| 2,230
| 1,011
| 539
| 472
| 120
| 1
|
-----------------------------------------------------------------------------
-- |
-- Module : Graphics.HGL.Internals.Utilities
-- Copyright : (c) Alastair Reid, 1999-2003
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : internal
-- Portability : non-portable (requires concurrency)
--
-- A simple graphics library.
--
-----------------------------------------------------------------------------
-- #hide
module Graphics.HGL.Internals.Utilities(
bracket, bracket_,
safeTry,
E.Exception,
modMVar, modMVar_
) where
import qualified Control.Exception as E (bracket, try, Exception)
import Control.Concurrent( MVar, takeMVar, putMVar )
bracket :: IO a -> (a -> IO b) -> (a -> IO c) -> IO c
bracket = E.bracket
-- Not exactly the same type as GHC's bracket_
bracket_ :: IO a -> (a -> IO b) -> IO c -> IO c
bracket_ left right m = bracket left right (const m)
safeTry :: IO a -> IO (Either E.Exception a)
safeTry = E.try
----------------------------------------------------------------
-- Utilities
----------------------------------------------------------------
modMVar :: MVar a -> (a -> a) -> IO a
modMVar mv f = do
x <- takeMVar mv
putMVar mv (f x)
return x
modMVar_ :: MVar a -> (a -> a) -> IO ()
modMVar_ mv f = do
x <- takeMVar mv
putMVar mv (f x)
|
FranklinChen/hugs98-plus-Sep2006
|
packages/HGL/Graphics/HGL/Internals/Utilities.hs
|
bsd-3-clause
| 1,370
| 8
| 10
| 242
| 362
| 194
| 168
| 22
| 1
|
module Code29_Plan1 where
import Code28
jcode :: Int -> [Int]
jcode n = code (0,n)
bumpBy :: Int -> [Int] -> [Int]
bumpBy _ [] = []
bumpBy k [a] = [a+k]
bumpBy k (a:b:as) = (a+k) : b : bumpBy k as
bumpDn :: (Int, Int) -> [Int]
bumpDn (k,n) = bumpBy k [n-1,n-2 .. 1]
code :: (Int, Int) -> [Int]
code (_,1) = []
code (k,n) = code (k',n-1) □ bumpDn (k,n)
where k' = if odd n then k+1 else 1
|
sampou-org/pfad
|
Code/Code29_Plan1.hs
|
bsd-3-clause
| 477
| 0
| 8
| 172
| 280
| 156
| 124
| 14
| 2
|
module Tagger.IncompletionFinder where
-- from base
import Data.Maybe (isNothing)
import Control.Applicative ((<$>), (<*>))
import Control.Monad (mapM, liftM2)
-- from filepath
import System.FilePath ((</>))
-- from taglib
import qualified Sound.TagLib as T
-- from mtl
import Control.Monad.Trans (lift)
-- from transformers
import Control.Monad.Trans.Maybe (MaybeT(..), runMaybeT)
import Tagger.Types
import Tagger.Crawler (listArtists)
-- | Read ID3 Tags and set them on the Songs and Albums of an Artist,
-- | filter those with incomplete Tags.
getIncomplete :: [Artist] -> IO [Artist]
getIncomplete as = mapM tagArt as >>= return . filter incomp
-- | If any Tag is not set
incomp :: Artist -> Bool
incomp (Artist artName artAlbs) = or (map incompAlb artAlbs)
where incompAlb alb = or [ isNothing (albRelease alb)
, isNothing (albGenre alb)
, or (map incompTrk (albTracks alb))]
incompTrk trk = or [ isNothing (name trk)
, isNothing (rank trk)]
-- | Read Tags for each Album
tagArt :: Artist -> IO Artist
tagArt art = do albs <- mapM tagAlb (artAlbs art)
return $ art { artAlbs = albs }
-- | Read Tags for each Track and set Album Info from Tracks
tagAlb :: Album -> IO Album
tagAlb alb = do albTrkTags <- mapM tagTrk (albTracks alb)
return $ alb { albTracks = map fst albTrkTags
, albRelease = fst . head $ map snd albTrkTags
, albGenre = snd . head $ map snd albTrkTags }
-- TODO replace head with sensible thing
-- | Read Tag for one Track as well as ReleaseDate and Genre
tagTrk :: Track -> IO (Track, (Maybe Int, Maybe String))
tagTrk trk = do mbTrk <- runMaybeT tagTrk'
case mbTrk of
Nothing -> return (trk, (Nothing, Nothing))
Just x -> return x
where path = location trk </> file trk
tagTrk' = do tagFile <- MaybeT (T.open path)
tag <- MaybeT (T.tag tagFile)
liftM2 (,) (lift $ readTTags trk tag)
(lift $ readATags tag)
-- | Convert Tags into Song. Interpret empty strings and zeroes
-- | as missing information
readTTags :: Track -> T.Tag -> IO Track
readTTags track tag = do
name' <- smb <$> T.title tag
rank' <- (imb . fromInteger) <$> T.track tag
return $ track { name = name', rank = rank' }
-- | Read Album ID3 Tags
readATags :: T.Tag -> IO (Maybe Int, Maybe String)
readATags tag = (,) <$> ((imb . fromInteger) <$> T.year tag)
<*> (smb <$> T.genre tag)
-- | Interpret empty String as Nothing
smb :: String -> Maybe String
smb "" = Nothing
smb x = Just x
-- | Interpret zero as Nothing
imb :: Int -> Maybe Int
imb 0 = Nothing
imb x = Just x
|
rethab/tagger
|
src/Tagger/IncompletionFinder.hs
|
bsd-3-clause
| 3,001
| 0
| 13
| 987
| 854
| 449
| 405
| 51
| 2
|
module Main
where
import Control.Monad (liftM, when)
import Data.List (isPrefixOf)
import Data.Maybe (fromMaybe)
import Data.Version (showVersion)
import System.Environment (getArgs)
import System.Exit (ExitCode (..), exitFailure, exitSuccess, exitWith)
import System.IO (hPutStrLn, stderr, hSetBuffering, BufferMode(LineBuffering))
import GHC.ParMake.Common (maybeRead)
import GHC.ParMake.Util
import qualified GHC.ParMake.BuildPlan as BuildPlan
import qualified GHC.ParMake.Parse as Parse
import qualified GHC.ParMake.Engine as Engine
import qualified Paths_ghc_parmake (version)
-- Argument handling.
data Args = Args {
verbosity :: Verbosity,
printVersion :: Bool,
printUsage :: Bool,
numJobs :: Int,
ghcPath :: String,
ghcServerPath :: String,
extraDepends :: [String],
outputFilename :: Maybe String,
osuf :: String,
hisuf :: String
} deriving Show
defaultArgs :: Args
defaultArgs = Args {
verbosity = normal,
printVersion = False,
printUsage = False,
numJobs = 1,
ghcPath = "ghc",
ghcServerPath = "ghc-server",
extraDepends = [],
outputFilename = Nothing,
osuf = "o",
hisuf = "hi"
}
parseArgs :: [String] -> Args
parseArgs l = go l defaultArgs
where
parseNumJobs n = fromMaybe (fatal "The argument to '-j' must be an integer!")
(liftM abs $ maybeRead n)
parseVerbosity n = fromMaybe verbose (maybeRead n >>= intToVerbosity)
go [] acc = acc
go ("-V":_) acc = acc { printVersion = True }
go ("--help":_) acc = acc { printUsage = True }
go ("-j":n:as) acc = go as $ acc { numJobs = parseNumJobs n }
go (('-':'j':n:[]):as) acc = go as $ acc { numJobs = parseNumJobs [n] }
go (('-':'v':n:[]):as) acc = go as $
acc { verbosity = parseVerbosity [n] }
go (('-':'v':'v':n:[]):as) acc = go as $
acc { verbosity = parseVerbosity [n] }
go ("-v":as) acc = go as $ acc { verbosity = verbose }
go ("-optP-include":as) acc = handleOptPInclude as acc
go ("-o":n:as) acc = go as $ acc { outputFilename = Just n }
go ("-osuf":suf:as) acc = go as $ acc { osuf = suf }
go ("-hisuf":suf:as) acc = go as $ acc { hisuf = suf }
go ("--ghc-path":p:as) acc = go as $ acc { ghcPath = p }
go (a:as) acc
| "--ghc-path=" `isPrefixOf` a = let (o,p') = break (== '=') a in
go (o:(tail p'):as) acc
go ("--ghc-server-path":p:as) acc= go as $ acc { ghcServerPath = p }
go (a:as) acc | "--ghc-server-path=" `isPrefixOf` a
= let (o,p') = break (== '=') a in
go (o:(tail p'):as) acc
go (_:as) acc = go as acc
-- Add '-optP-include -optPmyfile' as extraDepends
handleOptPInclude [] _ = fatal "no path is given after -optP-include"
handleOptPInclude (optPfile:as) acc@Args { extraDepends = ds } =
case splitOffPrefix "-optP" optPfile of
Just path | not (null path) -> go as $ acc { extraDepends = path : ds }
| otherwise -> fatal $
"path given after -optP-include is empty"
_ -> fatal "missing -optP after -optP-include"
splitOffPrefix :: (Eq a) => [a] -> [a] -> Maybe [a]
splitOffPrefix p s = case splitAt (length p) s of
(p', r) | p' == p -> Just r
_ -> Nothing
-- | Processes a list of arguments, returning:
-- * the GHC arguments we want to use in parmake
-- * the files to be compiled
-- * the original GHC arguments with custom parmake arguments removed
-- (thus also contains files)
getGhcArgs :: [String] -> ([String],[String],[String])
getGhcArgs argv = let nonParmakeArgs = rmArgs argv
(args, files) = mkArgs nonParmakeArgs [] []
in (args, files, nonParmakeArgs)
where
pgmSuffixes = ["L", "P", "c", "m", "s", "a", "l", "dll", "F", "windres"]
optsWithArg = [ "-odir", "-hidir", "-ohi", "-stubdir", "-outputdir"
, "-tmpdir", "-osuf", "-hisuf", "-hcsuf"
, "-package", "-package-db", "-package-id", "-hide-package"
, "-ignore-package", "-package-name", "-package-conf", "-f"
, "-framework", "-framework-path"
, "-main-is", "-x"]
++ ["-pgm" ++ str | str <- pgmSuffixes ]
++ ["-opt" ++ str | str <- pgmSuffixes ]
eatOption [] as = ([], as)
eatOption (opt:arg:xs) as
-- Unlike 'ghc --make', 'ghc -c' for some reason does not include -hidir
-- in the interface search path.
| opt == "-hidir" = (xs, ('-':'i':arg):arg:opt:as)
| opt `elem` optsWithArg = (xs, arg:opt:as)
eatOption (x:xs) as = (xs, x:as)
-- Processes GHC args to create GHC style args suiting for parmake,
-- and splitting files apart.
mkArgs [] as fs = (reverse as, reverse fs)
mkArgs ("-o":_:xs) as fs = mkArgs xs as fs
mkArgs ("--make":xs) as fs = mkArgs xs as fs
mkArgs xs@(('-':_):_) as fs = let (xs', as') = eatOption xs as
in mkArgs xs' as' fs
mkArgs (x:xs) as fs = mkArgs xs as (x:fs)
-- Removes parmake args from a list of arguments.
rmArgs [] = []
-- Options not passed to GHC: -o, -j, -vv, --ghc-path, --make.
rmArgs ("-j":_:xs) = rmArgs xs
rmArgs (('-':'v':'v':_:[]):xs) = rmArgs xs
rmArgs ("--ghc-path":_:xs) = rmArgs xs
rmArgs (x:xs)
| "--ghc-path=" `isPrefixOf` x = rmArgs xs
rmArgs (arg:xs) = arg : rmArgs xs
usage :: IO ()
usage =
putStr $ "Usage: ghc-parmake [OPTIONS] FILES\n" ++
"A parallel wrapper around 'ghc --make'.\n\n" ++
"Options: \n" ++
"-j N - Run N jobs in parallel.\n" ++
"--ghc-path=PATH - Set the path to the ghc executable.\n" ++
"--ghc-server-path=PATH - Set the path to the ghc-server executable.\n" ++
"-vv[N] - Set verbosity to N (only for ghc-parmake). " ++
"N is 0-3, default 1.\n" ++
"-v[N] - Set verbosity to N " ++
"(both for GHC and ghc-parmake itself).\n" ++
"--help - Print usage information.\n" ++
"-V - Print version information.\n" ++
"\nOther options are passed to GHC unmodified.\n"
-- TODO: To fully emulate GHC's behaviour, we must know whether the input module
-- set contains a module Main.
--
-- Consider these two invocations:
--
-- $ ghc --make Module.hs Module0.hs
-- [1 of 2] Compiling Module ( Module.hs, t/Module.o )
-- [2 of 2] Compiling Module0 ( Module0.hs, t/Module0.o )
--
-- $ ghc --make Module.hs Main.hs
-- [1 of 2] Compiling Module ( Module.hs, t/Module.o )
-- [2 of 2] Compiling Main ( Main.hs, t/Main.o )
-- Linking Main ...
--
-- In the first case, the linking step is not performed since there is no module
-- called 'Main'.
--
-- Additionally, the module 'Main' can have an arbitrary source file name, not
-- necessary 'Main.hs'. This changes the name of the output executable:
--
-- $ ghc --make Module.hs MyProg.hs
-- [1 of 2] Compiling Module ( Module.hs, t/Module.o )
-- [2 of 2] Compiling Main ( MyProg.hs, t/Main.o )
-- Linking MyProg ...
--
-- We currently solve this problem by the final real GHC pass.
-- | All flags conflicting with `ghc -M`.
-- Obtained from the man page (listed in the same order as they appear there)
-- and ghc/Main.hs, `data PostLoadMode`:
-- All modes that are not `DoMkDependHS` (`-M`) are conflicting
-- (apart from `--make`).
flagsConflictingWithM :: [String]
flagsConflictingWithM =
-- "Help and verbosity options"
[ "?"
, "--supported-extensions"
, "--supported-languages"
, "--info"
, "--version"
, "--numeric-version"
, "--print-libdir"
-- -V and --help are not included here because this program uses them
-- "Which phases to run"
, "-E"
, "-C"
, "-S"
, "-c"
-- "Alternative modes of operation"
, "--interactive"
, "-e"
-- "Interface file options"
, "--show-iface"
-- Undocumented?
, "--abi-hash"
]
-- Program entry point.
main :: IO ()
main =
do -- Set stderr to line buffering to prevent interleaved GHC errors
hSetBuffering stderr LineBuffering
argv <- getArgs
let args = parseArgs argv
let (parmakeGhcArgs, files, nonParmakeArgs) = getGhcArgs argv
let v = verbosity $ args
when (printVersion args) $ putStrLn
("ghc-parmake " ++ showVersion Paths_ghc_parmake.version)
>> exitSuccess
when (printUsage args) $ usage >> exitSuccess
when (null $ ghcPath args) $ fatal "ghc path is invalid" >> exitFailure
-- Cases in which we just want to pass on all arguments to GHC and be
-- as transparent as possible:
--
-- * --numeric-version is used
-- (e.g. cabal does this to determine the GHC version)
-- * No input files are given
-- * An option conflicting with "-M" is given
let passToGhc = exitWith =<<
runProcess defaultOutputHooks Nothing (ghcPath args) nonParmakeArgs
when (any (`elem` parmakeGhcArgs) flagsConflictingWithM) $ passToGhc
-- We must not print this (or any other output) before handling the
-- skip-to-GHC cases above.
debug' v $ "Parsed args: " ++ show args
when (null files) $ passToGhc
debug' v "Running ghc -M (twice)..."
deps <- Parse.getModuleDeps v (ghcPath args) parmakeGhcArgs files
when (null deps) $ do
hPutStrLn stderr "ghc-parmake: no dependencies"
exitFailure
debug' v ("Parsed dependencies:\n" ++ show deps)
let settings = BuildPlan.Settings { BuildPlan.osuf = osuf args
, BuildPlan.hisuf = hisuf args }
plan = BuildPlan.new settings deps (extraDepends args)
debug' v ("Produced a build plan:\n" ++ show plan)
debug' v "Building..."
exitCode <- Engine.compile v plan (numJobs args) (ghcServerPath args)
(ghcPath args) parmakeGhcArgs files (outputFilename args)
when (exitCode /= ExitSuccess) $ exitWith exitCode
debug' v $ "Running final 'ghc --make' pass: "
++ ghcPath args ++ " " ++ unwords nonParmakeArgs
passToGhc
|
23Skidoo/ghc-parmake
|
GhcParmake.hs
|
bsd-3-clause
| 10,683
| 0
| 17
| 3,250
| 2,656
| 1,437
| 1,219
| 175
| 19
|
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE DeriveDataTypeable #-}
module GEC.KeyExchange.Pure
(
-- * Types
StsCtx, GecKeError(..), GenError, StsResult, mkCtx
-- * Aliases
, Message1, Message2, Message3, KeyMaterial
-- * Message construction
, initiate
, respond
, responseAck
, finish
-- * Helper information
, messageOneSize, messageTwoSize, messageThreeSize
) where
import Crypto.Random (GenError, CryptoRandomGen)
import Crypto.Classes (ctr, buildKey, IV(..))
import Crypto.Cipher.AES128 (AESKey128)
import Crypto.Curve25519.Pure as Curve
import Crypto.Ed25519.Pure as Ed
import qualified Crypto.Hash.SHA512 as SHA
import Control.Exception
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import Data.Bits
import Data.Data
--------------------------------------------------------------------------------
-- Constants
messageOneSize, messageTwoSize, messageThreeSize :: Int
messageOneSize = pubKeySize
messageTwoSize = pubKeySize + sigSize
messageThreeSize = sigSize
pubKeySize, sigSize :: Int
pubKeySize = 32
sigSize = 64
-- Key encryption key material length (128 bit aes key + 64 bit salt)
kckLen :: Int
kckLen = 24
--------------------------------------------------------------------------------
-- Types
data StsCtx = STS0
{ meP :: Ed.PublicKey
, meQ :: Ed.PrivateKey
, themP :: Ed.PublicKey
}
| Init1
{ meP :: Ed.PublicKey
, meQ :: Ed.PrivateKey
, themP :: Ed.PublicKey
, ephemP :: Curve.PublicKey
, ephemQ :: Curve.PrivateKey
}
| Resp1
{ meP :: Ed.PublicKey
, meQ :: Ed.PrivateKey
, themP :: Ed.PublicKey
, ephemP :: Curve.PublicKey
, ephemQ :: Curve.PrivateKey
, themEphemP :: Curve.PublicKey
, theirKCK :: ByteString -> ByteString
, sharedSecret :: ByteString
}
mkCtx :: (Ed.PublicKey,Ed.PrivateKey) -> Ed.PublicKey -> StsCtx
mkCtx (meP,meQ) themP = STS0 {..}
data GecKeError = GeneratorError GenError
| InvalidInput
| InvalidContext
deriving (Eq, Ord, Show, Read, Data, Typeable)
instance Exception GecKeError
data Party = Initiator | Responder | Client
deriving (Enum)
type StsResult a = Either GecKeError a
type Message1 = ByteString
type Message2 = ByteString
type Message3 = ByteString
type KeyMaterial = ByteString
--------------------------------------------------------------------------------
-- Message Construction
initiate :: CryptoRandomGen g => g -> StsCtx -> StsResult (Message1,StsCtx,g)
initiate g (STS0 { .. }) =
case genKeyPair g of
Right (ephemQ,ephemP,g2) -> Right (Curve.exportPublic ephemP, Init1 { .. } , g2)
Left err -> Left err
initiate _ _ = Left InvalidContext
respond :: CryptoRandomGen g => g -> StsCtx -> Message1 -> StsResult (Message2,StsCtx,g)
respond g (STS0 {..}) msg =
case Curve.importPublic msg of
Nothing -> Left InvalidInput
Just themEphemP -> either Left (Right . buildMessage themEphemP) (genKeyPair g)
where
buildMessage themEphemP (ephemQ, ephemP, g2) =
let sharedSecret = makeShared ephemQ themEphemP
myKCK = e_kck $ kdf kckLen Responder sharedSecret
theirKCK = e_kck $ kdf kckLen Initiator sharedSecret
signData = Curve.exportPublic ephemP ## Curve.exportPublic themEphemP
Sig sig = Ed.sign signData meQ meP
encOf_sig = myKCK sig
in ( Curve.exportPublic ephemP ## encOf_sig , Resp1 { .. } , g2)
respond _ _ _ = Left InvalidContext
responseAck :: StsCtx -> Message2 -> Int -> StsResult (Message3, KeyMaterial)
responseAck (Init1 {..}) msg nrBytes
| B.length msg /= messageTwoSize = Left InvalidInput
| otherwise =
if Ed.valid signedData themP (Sig sig)
then return (responseMsg, keyMaterial)
else Left InvalidInput
where
-- Parse the incoming message and derive key material
(themEphemP,encData) = B.splitAt pubKeySize msg
sig = theirKCK encData
signedData = themEphemP ## Curve.exportPublic ephemP
sharedSecret = makeShared ephemQ (myJust $ Curve.importPublic themEphemP)
theirKCK = e_kck $ kdf kckLen Responder sharedSecret
myKCK = e_kck $ kdf kckLen Initiator sharedSecret
-- Now construct the response message
unsignedOutput = Curve.exportPublic ephemP ## themEphemP
(Sig outSig) = Ed.sign unsignedOutput meQ meP
responseMsg = myKCK outSig
-- Derive the client's key material
keyMaterial = kdf nrBytes Client sharedSecret
myJust (Just x) = x
myJust _ = error "Impossible: The Message2 bytestring is of proper length but pub key too small!"
responseAck _ _ _ = Left InvalidContext
finish :: StsCtx -> Message3 -> Int -> StsResult KeyMaterial
finish (Resp1 {..}) msg nrBytes
| B.length msg /= messageThreeSize = Left InvalidInput
| Ed.valid signedData themP (Sig sig) = return keyMaterial
| otherwise = Left InvalidInput
where
signedData = Curve.exportPublic themEphemP ## Curve.exportPublic ephemP
sig = theirKCK msg
keyMaterial = kdf nrBytes Client sharedSecret
finish _ _ _ = Left InvalidContext
--------------------------------------------------------------------------------
-- Utils
-- @kdf nrBytes p secret@ will derive a secret of byte length @nrBytes@
-- using additional data @p@ and shared secret @secret@.
--
-- The KDF algorithm is an iterated SHA512:
--
-- @
-- H( 0 || secret || PARTY ) || H( 1 || secret || PARTY ) || H ( 2 || secret || PARTY)
-- @
--
-- Where the counter (0,1..) is a 16 bit big endian number, secret is the
-- shared secret of 32 bytes, and PARTY is one byte (0 for Initiator, 1 for
-- Responder, 2 for Client key material).
kdf :: Int -> Party -> ByteString -> ByteString
kdf nrBytes p sec
| nrBytes > (2^(16 :: Integer) * 64) = error "Will not derive over 2^16 * 64 bytes from the secret key material with ~128 bits. If you wrote the code to do this intentionally then you should hire someone to write this bit of code for you - you're using it wrong!"
| otherwise = B.take nrBytes full
where
full = B.concat $ map sha512 [B.concat [p16 cnt, sec, party] | cnt <- [0..nrBlk-1]]
party = encodeParty p
p16 c = B.pack [fromIntegral $ (c `shiftR` 8) .&. 0xFF , fromIntegral $ c .&. 0xFF]
nrBlk = (nrBytes + blkSz - 1) `div` blkSz
blkSz = 512 `div` 8
-- Encryption/decryption function that consume key material of kckLen and
-- produces a stream cipher.
e_kck :: ByteString -> (ByteString -> ByteString)
e_kck mat =
let (key,salt) = B.splitAt 16 mat
Just k = buildKey key :: Maybe AESKey128
iv = IV (salt ## B.replicate 8 0)
in fst . ctr k iv
encodeParty :: Party -> ByteString
encodeParty = B.pack . (:[]) . fromIntegral . fromEnum
(##) :: ByteString -> ByteString -> ByteString
(##) = B.append
sha512 :: ByteString -> ByteString
sha512 = SHA.hash
genKeyPair :: CryptoRandomGen g => g -> Either GecKeError (Curve.PrivateKey, Curve.PublicKey, g)
genKeyPair = either (Left . GeneratorError) Right . Curve.generateKeyPair
|
GaloisInc/gec
|
src/GEC/KeyExchange/Pure.hs
|
bsd-3-clause
| 7,774
| 0
| 13
| 2,218
| 1,829
| 1,002
| 827
| 136
| 3
|
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Database where
import Database.Persist.TH
share [mkPersist sqlSettings, mkSave "entityDefs", mkMigrate "migrateAll"] [persistLowerCase|
Character
username String
UniqueUsername username
userID String
UniqueUserID userID
accessToken String
refreshToken String
deriving Show
|]
|
Frefreak/Gideon
|
src/Database.hs
|
bsd-3-clause
| 606
| 0
| 7
| 178
| 46
| 29
| 17
| 9
| 0
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoRebindableSyntax #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
module Duckling.CreditCardNumber.Types where
import Control.DeepSeq
import Data.Aeson
import Data.Hashable
import Data.Text (Text)
import qualified Data.Text as Text
import GHC.Generics
import Prelude
import Duckling.Resolve (Resolve(..))
data Issuer
= Visa
| Amex
| Discover
| Mastercard
| DinerClub
| Other
deriving (Eq, Generic, Hashable, Ord, Show, NFData)
instance ToJSON Issuer where
toJSON = String . Text.toLower . Text.pack . show
data CreditCardNumberData = CreditCardNumberData
{ number :: Text
, issuer :: Issuer
}
deriving (Eq, Generic, Hashable, Ord, Show, NFData)
instance Resolve CreditCardNumberData where
type ResolvedValue CreditCardNumberData = CreditCardNumberValue
resolve _ _ CreditCardNumberData {number, issuer} =
Just (CreditCardNumberValue number issuer, False)
data CreditCardNumberValue = CreditCardNumberValue
{ vNumber :: Text
, vIssuer :: Issuer
}
deriving (Eq, Ord, Show)
instance ToJSON CreditCardNumberValue where
toJSON (CreditCardNumberValue number issuer) =
object [ "value" .= number
, "issuer" .= issuer
]
|
facebookincubator/duckling
|
Duckling/CreditCardNumber/Types.hs
|
bsd-3-clause
| 1,527
| 0
| 9
| 274
| 328
| 190
| 138
| 41
| 0
|
{-# BangPatterns #-}
module Clique where
import Graph (Graph, Vertex, isAdjacentG)
-- A Clique is a list of vertices and a (cached) size
type Clique = ([Vertex], Int)
emptyClique :: Clique
emptyClique = ([],0)
-- verification (of clique property, not of maximality)
-- True iff the given list of vertices form a clique.
isClique :: Graph -> [Vertex] -> Bool
isClique bigG vertices =
and [isAdjacentG bigG u v | u <- vertices, v <- vertices, u /= v]
|
BlairArchibald/bones
|
apps/maxclique/src/Clique.hs
|
bsd-3-clause
| 466
| 0
| 8
| 96
| 118
| 69
| 49
| 8
| 1
|
{-# LANGUAGE LambdaCase, ScopedTypeVariables #-}
-- | A description of the platform we're compiling for.
--
module GHC.Platform (
PlatformMini(..),
PlatformWordSize(..),
Platform(..), platformArch, platformOS,
Arch(..),
OS(..),
ArmISA(..),
ArmISAExt(..),
ArmABI(..),
PPC_64ABI(..),
target32Bit,
isARM,
osElfTarget,
osMachOTarget,
osSubsectionsViaSymbols,
platformUsesFrameworks,
platformWordSizeInBytes,
platformWordSizeInBits,
PlatformMisc(..),
IntegerLibrary(..),
stringEncodeArch,
stringEncodeOS,
)
where
import Prelude -- See Note [Why do we import Prelude here?]
import GHC.Read
-- | Contains the bare-bones arch and os information. This isn't enough for
-- code gen, but useful for tasks where we can fall back upon the host
-- platform, as this is all we know about the host platform.
data PlatformMini
= PlatformMini
{ platformMini_arch :: Arch
, platformMini_os :: OS
}
deriving (Read, Show, Eq)
-- | Contains enough information for the native code generator to emit
-- code for this platform.
data Platform
= Platform {
platformMini :: PlatformMini,
-- Word size in bytes (i.e. normally 4 or 8,
-- for 32bit and 64bit platforms respectively)
platformWordSize :: PlatformWordSize,
platformUnregisterised :: Bool,
platformHasGnuNonexecStack :: Bool,
platformHasIdentDirective :: Bool,
platformHasSubsectionsViaSymbols :: Bool,
platformIsCrossCompiling :: Bool
}
deriving (Read, Show, Eq)
data PlatformWordSize
= PW4 -- ^ A 32-bit platform
| PW8 -- ^ A 64-bit platform
deriving (Eq)
instance Show PlatformWordSize where
show PW4 = "4"
show PW8 = "8"
instance Read PlatformWordSize where
readPrec = do
i :: Int <- readPrec
case i of
4 -> return PW4
8 -> return PW8
other -> fail ("Invalid PlatformWordSize: " ++ show other)
platformWordSizeInBytes :: Platform -> Int
platformWordSizeInBytes p =
case platformWordSize p of
PW4 -> 4
PW8 -> 8
platformWordSizeInBits :: Platform -> Int
platformWordSizeInBits p = platformWordSizeInBytes p * 8
-- | Legacy accessor
platformArch :: Platform -> Arch
platformArch = platformMini_arch . platformMini
-- | Legacy accessor
platformOS :: Platform -> OS
platformOS = platformMini_os . platformMini
-- | Architectures that the native code generator knows about.
-- TODO: It might be nice to extend these constructors with information
-- about what instruction set extensions an architecture might support.
--
data Arch
= ArchUnknown
| ArchX86
| ArchX86_64
| ArchPPC
| ArchPPC_64
{ ppc_64ABI :: PPC_64ABI
}
| ArchS390X
| ArchSPARC
| ArchSPARC64
| ArchARM
{ armISA :: ArmISA
, armISAExt :: [ArmISAExt]
, armABI :: ArmABI
}
| ArchARM64
| ArchAlpha
| ArchMipseb
| ArchMipsel
| ArchJavaScript
deriving (Read, Show, Eq)
-- Note [Platform Syntax]
-- ~~~~~~~~~~~~~~~~~~~~~~
-- There is a very loose encoding of platforms shared by many tools we are
-- encoding to here. GNU Config (http://git.savannah.gnu.org/cgit/config.git),
-- and LLVM's http://llvm.org/doxygen/classllvm_1_1Triple.html are perhaps the
-- most definitional parsers. The basic syntax is a list of of '-'-separated
-- components. The Unix 'uname' command syntax is related but briefer.
--
-- Those two parsers are quite forgiving, and even the 'config.sub'
-- normalization is forgiving too. The "best" way to encode a platform is
-- therefore somewhat a matter of taste.
--
-- The 'stringEncode*' functions here convert each part of GHC's structured
-- notion of a platform into one dash-separated component.
-- | See Note [Platform Syntax].
stringEncodeArch :: Arch -> String
stringEncodeArch = \case
ArchUnknown -> "unknown"
ArchX86 -> "i386"
ArchX86_64 -> "x86_64"
ArchPPC -> "powerpc"
ArchPPC_64 { ppc_64ABI = abi } -> case abi of
ELF_V1 -> "powerpc64"
ELF_V2 -> "powerpc64le"
ArchS390X -> "s390x"
ArchSPARC -> "sparc"
ArchSPARC64 -> "sparc64"
ArchARM { armISA = isa, armISAExt = _, armABI = _ } -> "arm" ++ vsuf
where
vsuf = case isa of
ARMv5 -> "v5"
ARMv6 -> "v6"
ARMv7 -> "v7"
ArchARM64 -> "aarch64"
ArchAlpha -> "alpha"
ArchMipseb -> "mipseb"
ArchMipsel -> "mipsel"
ArchJavaScript -> "js"
isARM :: Arch -> Bool
isARM (ArchARM {}) = True
isARM ArchARM64 = True
isARM _ = False
-- | Operating systems that the native code generator knows about.
-- Having OSUnknown should produce a sensible default, but no promises.
data OS
= OSUnknown
| OSLinux
| OSDarwin
| OSSolaris2
| OSMinGW32
| OSFreeBSD
| OSDragonFly
| OSOpenBSD
| OSNetBSD
| OSKFreeBSD
| OSHaiku
| OSQNXNTO
| OSAIX
| OSHurd
deriving (Read, Show, Eq)
-- | See Note [Platform Syntax].
stringEncodeOS :: OS -> String
stringEncodeOS = \case
OSUnknown -> "unknown"
OSLinux -> "linux"
OSDarwin -> "darwin"
OSSolaris2 -> "solaris2"
OSMinGW32 -> "mingw32"
OSFreeBSD -> "freebsd"
OSDragonFly -> "dragonfly"
OSOpenBSD -> "openbsd"
OSNetBSD -> "netbsd"
OSKFreeBSD -> "kfreebsdgnu"
OSHaiku -> "haiku"
OSQNXNTO -> "nto-qnx"
OSAIX -> "aix"
OSHurd -> "hurd"
-- | ARM Instruction Set Architecture, Extensions and ABI
--
data ArmISA
= ARMv5
| ARMv6
| ARMv7
deriving (Read, Show, Eq)
data ArmISAExt
= VFPv2
| VFPv3
| VFPv3D16
| NEON
| IWMMX2
deriving (Read, Show, Eq)
data ArmABI
= SOFT
| SOFTFP
| HARD
deriving (Read, Show, Eq)
-- | PowerPC 64-bit ABI
--
data PPC_64ABI
= ELF_V1
| ELF_V2
deriving (Read, Show, Eq)
-- | This predicate tells us whether the platform is 32-bit.
target32Bit :: Platform -> Bool
target32Bit p =
case platformWordSize p of
PW4 -> True
PW8 -> False
-- | This predicate tells us whether the OS supports ELF-like shared libraries.
osElfTarget :: OS -> Bool
osElfTarget OSLinux = True
osElfTarget OSFreeBSD = True
osElfTarget OSDragonFly = True
osElfTarget OSOpenBSD = True
osElfTarget OSNetBSD = True
osElfTarget OSSolaris2 = True
osElfTarget OSDarwin = False
osElfTarget OSMinGW32 = False
osElfTarget OSKFreeBSD = True
osElfTarget OSHaiku = True
osElfTarget OSQNXNTO = False
osElfTarget OSAIX = False
osElfTarget OSHurd = True
osElfTarget OSUnknown = False
-- Defaulting to False is safe; it means don't rely on any
-- ELF-specific functionality. It is important to have a default for
-- portability, otherwise we have to answer this question for every
-- new platform we compile on (even unreg).
-- | This predicate tells us whether the OS support Mach-O shared libraries.
osMachOTarget :: OS -> Bool
osMachOTarget OSDarwin = True
osMachOTarget _ = False
osUsesFrameworks :: OS -> Bool
osUsesFrameworks OSDarwin = True
osUsesFrameworks _ = False
platformUsesFrameworks :: Platform -> Bool
platformUsesFrameworks = osUsesFrameworks . platformOS
osSubsectionsViaSymbols :: OS -> Bool
osSubsectionsViaSymbols OSDarwin = True
osSubsectionsViaSymbols _ = False
-- | Platform-specific settings formerly hard-coded in Config.hs.
--
-- These should probably be all be triaged whether they can be computed from
-- other settings or belong in another another place (like 'Platform' above).
data PlatformMisc = PlatformMisc
{ -- TODO Recalculate string from richer info?
platformMisc_targetPlatformString :: String
, platformMisc_integerLibrary :: String
, platformMisc_integerLibraryType :: IntegerLibrary
, platformMisc_ghcWithInterpreter :: Bool
, platformMisc_ghcWithNativeCodeGen :: Bool
, platformMisc_ghcWithSMP :: Bool
, platformMisc_ghcRTSWays :: String
-- | Determines whether we will be compiling info tables that reside just
-- before the entry code, or with an indirection to the entry code. See
-- TABLES_NEXT_TO_CODE in includes/rts/storage/InfoTables.h.
, platformMisc_tablesNextToCode :: Bool
, platformMisc_leadingUnderscore :: Bool
, platformMisc_libFFI :: Bool
, platformMisc_ghcThreaded :: Bool
, platformMisc_ghcDebugged :: Bool
, platformMisc_ghcRtsWithLibdw :: Bool
, platformMisc_llvmTarget :: String
}
data IntegerLibrary
= IntegerGMP
| IntegerSimple
deriving (Read, Show, Eq)
|
sdiehl/ghc
|
libraries/ghc-boot/GHC/Platform.hs
|
bsd-3-clause
| 8,870
| 0
| 14
| 2,340
| 1,448
| 843
| 605
| 214
| 17
|
module Handler.Posts where
import Import
import Handler.Common
getPostsR :: Handler Html
getPostsR = do
blogPosts <- runDB $ selectList [] [Desc BlogPostId]
defaultLayout $ do
$(widgetFile "posts/index")
|
roggenkamps/steveroggenkamp.com
|
Handler/Posts.hs
|
bsd-3-clause
| 222
| 0
| 12
| 45
| 68
| 34
| 34
| -1
| -1
|
{-# LANGUAGE RebindableSyntax,
PostfixOperators,
ScopedTypeVariables,
DeriveGeneric,
GeneralizedNewtypeDeriving,
TypeFamilies,
TypeOperators,
ConstraintKinds,
DefaultSignatures,
MultiParamTypeClasses,
FlexibleContexts,
FlexibleInstances,
UndecidableInstances
#-}
-- TODO: remove the above pragma
module AERN2.AnalyticMV.Type
-- (
-- Analytic(..)
-- , UnitInterval (..)
-- , toUnitInterval
-- , fromUnitInterval
-- , lift1UI
-- , ana_derivative
-- )
where
import MixedTypesNumPrelude
import Debug.Trace
import Data.IORef
import System.IO.Unsafe
import Text.Printf
import qualified Data.Map as Map
import qualified Data.List as List
import Data.Function.Memoize
import AERN2.MP.Ball
-- import AERN2.MP.Dyadic
import AERN2.RealFun.Operations
-- import AERN2.Real
data PowS =
PowS
{
pows_x0 :: V MPBall
, pows_k :: Integer
, pows_A :: Integer
, pows_terms :: (MultiIndex -> MPBall)
}
deriving Show
type V a = [a]
type MultiIndex = V Integer
debug_print_PowS :: String -> PowS -> PowS
debug_print_PowS name pw =
pw { pows_terms = terms }
where
terms m = (trace msg . pows_terms pw) m
where
msg = "term of " ++ name ++ ": " ++ show m
type PowS_Sumup = Map.Map MultiIndex Integer
debug_sumup_PowS :: IORef PowS_Sumup -> PowS -> PowS
debug_sumup_PowS sumupRef pw =
pw { pows_terms = terms }
where
terms m = (logM . pows_terms pw) m
where
logM v =
unsafePerformIO $ do
modifyIORef sumupRef $ Map.insertWith (+) m 1
return v
{- POLYNOMIAL to PowS conversion -}
newtype P = P (Map.Map MultiIndex MPBall)
instance Show P where
show (P p) =
List.intercalate " + " $ map showTerm $ Map.toAscList p
where
showTerm (m, c) = show (c) ++ concat (map showV $ zip [1..] m)
showV (_i,0) = ""
showV (i,1) = printf "x%d" i
showV (i,n) = printf "x%d^%d" i n
{- P arithmetic -}
x_d_i_n :: Integer -> Integer -> Integer -> P
x_d_i_n d i n = P $ Map.singleton (m_d_i_n d i n) (mpBall 1)
constP :: Integer -> P
constP d = P $ Map.singleton (replicate d 0) (mpBall 1)
vars :: Integer -> [P]
vars d =
[ x_d_i_n d i 1 | i<-[1..d]]
_x1,_x2 :: P
[_x1, _x2] = vars 2
m_d_i_n :: Integer -> Integer -> Integer -> MultiIndex
m_d_i_n d i n =
take (i-1) (repeat 0) ++ [n] ++ (take (d-i) (repeat 0))
instance CanAddAsymmetric P P where
add (P p1) (P p2) = P $ Map.unionWith (+) p1 p2
instance CanNeg P where
negate (P p) = P (Map.map negate p)
instance CanAddAsymmetric P MPBall where
type AddType P MPBall = P
add (P p1) r2 = P $ Map.insertWith (+) m0000 r2 p1
where
d = length (fst $ Map.findMin p1)
m0000 = replicate d 0
instance CanMulAsymmetric MPBall P where
type MulType MPBall P = P
mul r1 (P p2) = P $ Map.map (* r1) p2
instance CanMulAsymmetric Rational P where
type MulType Rational P = P
mul r1 (P p2) = P $ Map.map (* r1) p2
instance CanMulAsymmetric Integer P where
type MulType Integer P = P
mul r1 (P p2) = P $ Map.map (* r1) p2
instance CanMulAsymmetric P P where
mul (P p1) (P p2) = foldl1 (+) pairs
where
pairs =
[
P (Map.fromList [(zipWith (+) m1 m2, c1 * c2)])
|
(m1,c1) <- (Map.toList p1) , (m2,c2) <- (Map.toList p2)
]
{- P translation to centre x0 -}
translate_P :: P -> V MPBall -> P
translate_P (P p) x0 =
foldl1 (+) $ map evalT $ Map.toList p
where
d = length x0
evalT (m,c)
| null varVals = c * (constP d)
| otherwise = c * (foldl1 (*) varVals)
where
varVals = map evalV $ filter (\(_,n,_) -> n > 0) $ zip3 [1..] m x0
evalV (i,n,x0i) =
foldl1 (*) $ replicate n $ xi + x0i
where
xi = x_d_i_n d i 1
type Analytic = V MPBall -> PowS
poly_Analytic :: P -> Analytic
poly_Analytic pp x0 =
PowS {
pows_x0 = x0,
pows_k = 1,
pows_A = snd $ integerBounds $ (maximum (map abs $ Map.elems tp)),
pows_terms = terms
}
where
(P tp) = translate_P pp x0
terms m =
case Map.lookup m tp of
Just c -> c
_ -> mpBall 0
{- Example Analytic functions -}
{-
Define sine using a 2-variable linear ODE.
y1' = y2
y2' = -y1
y1(0) = 0
y2(0) = 1
(apply (head $ ode_step_Analytic [_ode_sine_1, _ode_sine_2] (rational 0) [mpBall 0, mpBall 1]) [mpBall 0.1]) ? (bits S 10)
-}
_ode_sine_1 :: Analytic
_ode_sine_1 =
poly_Analytic y2
-- debug_PowS "_ode_sine_1" . poly_Analytic y2
where
[_y1, y2] = vars 2
_ode_sine_2 :: Analytic
_ode_sine_2 =
poly_Analytic (-y1)
where
[y1, _y2] = vars 2
_solve_sine_1 :: (CanBeRational t) => Precision -> t -> IO MPBall
_solve_sine_1 p t =
do
sumupRef <- newIORef (Map.empty)
let _ode_sine_1' = debug_sumup_PowS sumupRef . _ode_sine_1
let result =
(head $ fst $ ode_Analytic [_ode_sine_1', _ode_sine_2] (rational 0) [mpBallP p 0, mpBallP p 1] (rational t))
print result
sumup <- readIORef sumupRef
mapM_ print $ Map.toAscList sumup
return result
-- usage: (apply _exp1_0 [mpBall 0.5]) ? (bitsS 100)
_exp1_0 :: PowS
_exp1_0 =
PowS {
pows_x0 = [mpBall 0],
pows_k = 1,
pows_A = 1,
pows_terms = terms
}
where
terms = memoFix aux
where
aux :: (([Integer] -> MPBall) -> [Integer] -> MPBall)
aux _trms [0] = mpBall 1
aux trms [m1] = (1/!(mpBall m1))*(trms [m1-1])
aux _ _ = error "unary _exp1_0 used illegaly"
{- EVALUATION -}
sum1 :: PowS -> MPBall -> MPBall
sum1 f x1 =
updateRadius (+ (errorBound r)) c
where
p = getPrecision x1
a = pows_A f
k = pows_k f
(x0_1:_) = pows_x0 f
a_m m' = pows_terms f [m']
q = (abs (x1-x0_1)) * k
r_prelim = mpBall $ radius x1
r
| r_prelim !>! 0 = r_prelim
| otherwise = (mpBallP p 0.5)^!(integer $ getPrecision x1)
m
| r ?==? 0 || q ?==? 0 = 0
| otherwise =
max 0 $ (snd $ integerBounds $ (~!) $ (log ((1-q)*r/a))/(log q)) - 1
c = horn m (a_m m)
horn 0 y = y
horn i y = horn (i - 1) (y * (x1 - x0_1) + (a_m (i - 1)))
-- realLim :: (Integer -> MPBall) -> (Integer -> MPBall) -> MPBall
-- realLim xe_n err_n =
-- newCR "lim" [] makeQ
-- where
-- makeQ _me_src acc@(AccuracySG s _) = h 0
-- where
-- h k
-- | kthOk =
-- centreAsBall ((xe_n k) ? (acc + 1)) -- TODO: is this correct?
-- + (mpBall (0, (dyadic 0.5)^!(fromAccuracy s)))
-- | otherwise =
-- h (k + 1)
-- where
-- kthError :: MPBall
-- kthError = (err_n k) ? (acc + 2)
-- kthOk :: Bool
-- kthOk =
-- (kthError <= 0.5^!((fromAccuracy s) + 1)) == Just True
sigma :: PowS -> MPBall -> PowS
sigma f x1 =
f {
pows_x0 = x0_rest,
pows_A = a,
pows_terms = terms
}
where
(x0_1:x0_rest) = pows_x0 f
a = pows_A f
k = pows_k f
terms ms =
sum1 f1 x1
where
f1 =
f {
pows_x0 = [x0_1],
pows_A = a * k ^! (sum ms),
pows_terms = \[m0] -> pows_terms f (m0 : ms)
}
{-- EVALUATION --}
instance CanApply PowS (V MPBall) where
type ApplyType PowS (V MPBall) = MPBall
apply f x =
case x of
[] -> error "CanApply PowS does not support 0-dimensional PowS"
[x1] -> sum1 f x1
(x1 : xs) -> apply (sigma f x1) xs
apply0 :: PowS -> MPBall
apply0 f = (pows_terms f m0000)
where
d = length (pows_x0 f)
m0000 = replicate d 0
{-- ADDITION --}
-- assuming that both power series use the same centre
instance CanAddAsymmetric PowS PowS where
type AddType PowS PowS = PowS
add f1 f2 =
PowS {
pows_x0 = x0_1,
pows_k = max k1 k2,
pows_A = a1 + a2,
pows_terms = memoize terms
}
where
x0_1 = pows_x0 f1
a1 = pows_A f1
a2 = pows_A f2
k1 = pows_k f1
k2 = pows_k f2
terms1 = pows_terms f1
terms2 = pows_terms f2
terms m = terms1 m + terms2 m
{-- SCALING --}
-- assuming that both power series use the same centre
instance CanMulAsymmetric Rational PowS where
type MulType Rational PowS = PowS
mul q1 f2 =
PowS {
pows_x0 = x0,
pows_k = k,
pows_A = max 1 $ ceiling $ a*(abs q1),
pows_terms = memoize terms
}
where
x0 = pows_x0 f2
a = pows_A f2
k = pows_k f2
terms2 = pows_terms f2
terms m = q1 * terms2 m
instance CanMulAsymmetric Integer PowS where
type MulType Integer PowS = PowS
mul n1 f2 = mul (rational n1) f2
{-- MLUTIPLICATION --}
-- assuming that both power series use the same centre
instance CanMulAsymmetric PowS PowS where
type MulType PowS PowS = PowS
mul f1 f2 =
PowS {
pows_x0 = x0_1,
pows_k = max k1 k2,
pows_A = a1 * a2,
pows_terms = memoize terms
}
where
x0_1 = pows_x0 f1
a1 = pows_A f1
a2 = pows_A f2
k1 = pows_k f1
k2 = pows_k f2
terms1 = pows_terms f1
terms2 = pows_terms f2
terms = aux [] []
where
aux m1 m2 [] = terms1 m1 * terms2 m2
aux m1 m2 (m:ms) =
sum $ [ aux (m1 ++ [i]) (m2 ++ [m-i]) ms | i <- [0..m] ]
{-- DERIVATIVE --}
deriv_powS :: PowS -> Integer -> PowS
deriv_powS f j =
PowS {
pows_x0 = x0,
pows_k = 2*k,
pows_A = a*k,
pows_terms = memoize terms
}
where
x0 = pows_x0 f
a = pows_A f
k = pows_k f
terms m =
(mj+1) * (pows_terms f m')
where
mj = m !! (j-1)
m' = take (j-1) m ++ [mj+1] ++ drop j m
{-- ODE one step --}
ode_step_powS :: V PowS -> Rational -> V MPBall -> V PowS
ode_step_powS f t0 y0 = map step_i [1..d]
-- works only with y0 = x0 (where x0 is the centre of f)
where
d = length f
m0000 = replicate d 0
p = getPrecision (head y0)
step_i i =
PowS {
pows_x0 = [mpBallP p t0],
pows_k = a*k,
pows_A = 1,
pows_terms = terms
}
where
-- x0 = pows_x0 (head f) -- all components of the field must have the same centre
a = maximum $ map pows_A f
k = maximum $ map pows_k f
terms [m] = apply0 (fim m)
terms _ = error "bad terms"
fi0 =
PowS {
pows_x0 = y0,
pows_k = 1,
pows_A = 1,
pows_terms = terms_i
}
where
terms_i mx
| mx == mx_i = mpBall 1
| mx == m0000 = y0 !! (i-1)
| otherwise = mpBall 0
mx_i = m_d_i_n d i 1
fimP1 m fim'' =
(1/!(m+1))*(foldl1 (+) [ (deriv_powS fim'' j) * fj | (j, fj) <- zip [1..d] f])
fim' _ 0 = fi0
fim' fim'' m = fimP1 (m-1) (fim'' (m-1))
fim = memoFix fim'
{- Many steps ODE solving (polynomial only for now) -}
ode_step_Analytic :: V Analytic -> Rational -> V MPBall -> V PowS
ode_step_Analytic fA t0 y0 = ode_step_powS fPowS t0 y0
where
fPowS = map ($ y0) fA
ode_Analytic :: V Analytic -> Rational -> V MPBall -> Rational -> (V MPBall, [(Rational, V MPBall, V PowS)])
ode_Analytic fA t00 y00 tE = aux [] t00 y00
where
p = getPrecision $ head y00
aux prevSegs t0 y0
| tE <= t1 = (map (flip apply [mpBallP p tE]) seg, reverse $ (t0,y0,seg) : prevSegs)
| otherwise = aux ((t0,y0,seg):prevSegs) t1 y1
where
seg = ode_step_Analytic fA t0 y0
k = maximum $ map pows_k seg
h = 1/!(256*k)
t1 = t0 + h
y1 = map (flip apply [mpBallP p t1]) seg
|
michalkonecny/aern2
|
aern2-fun-univariate/src/AERN2/AnalyticMV/Type.hs
|
bsd-3-clause
| 11,182
| 0
| 19
| 3,299
| 4,099
| 2,162
| 1,937
| 303
| 3
|
module Frenetic.NetCore.Reduce
( reduce
, isEmptyPredicate
, dnf
, flattenDNF
, flattenConj
, isEmptyConj
) where
import Frenetic.Common
import Data.List (nub, partition)
import Data.Maybe
import qualified Data.Set as Set
import qualified Data.Map as M
import qualified Frenetic.CoFiniteSet as CFS
import Frenetic.NetCore.Types
import Frenetic.NetCore.Short
import Frenetic.NetCore.Util (Field (..))
import Frenetic.Pattern
import Debug.Trace
import Nettle.Ethernet.EthernetAddress (unpackEth64)
import Nettle.IPv4.IPAddress (IPAddressPrefix (..), IPAddress (..))
-- |Reduce the policy to produce a smaller, more readable policy
reduce = reducePo
reducePo :: Policy -> Policy
reducePo PoBottom = PoBottom
reducePo (PoBasic pr act) = if pr' == None || act == mempty
then PoBottom
else PoBasic pr' act' where
pr' = pr
act' = act
-- Note that because we use multiset forwarding semantics, we CANNOT do common
-- subexpression reduction on unions.
reducePo (PoUnion p1 p2) =
let p1' = reducePo p1
p2' = reducePo p2
in case (p1', p2') of
(PoBottom, _) -> p2'
(_, PoBottom) -> p1'
otherwise -> PoUnion p1' p2' where
reducePo (Restrict pol pred) = Restrict (reducePo pol) pred
-- mjr: I'm sure there's something we can do here
reducePo (Sequence pol1 pol2) = Sequence (reducePo pol1) (reducePo pol2)
reducePo (SendPackets chan) = SendPackets chan
isNonNegatedAtom pred = case pred of
DlSrc _ -> True
DlDst _ -> True
DlTyp _ -> True
DlVlan _ -> True
DlVlanPcp _ -> True
NwSrc _ -> True
NwDst _ -> True
NwProto _ -> True
NwTos _ -> True
TpSrcPort _ -> True
TpDstPort _ -> True
IngressPort _ -> True
Switch _ -> True
Or _ _ -> False
And _ _ -> False
Not _ -> True
Any -> True
None -> True
isAtom (Not Any) = False
isAtom (Not None) = False
isAtom (Not x) = isNonNegatedAtom x
isAtom x = isNonNegatedAtom x
isConjunction (Or _ _) = False
isConjunction (And pr1 pr2) = isConjunction pr1 && isConjunction pr2
isConjunction x = isAtom x
isConjOpt (Or _ _) = False
isConjOpt _ = True
flattenDNF :: Predicate -> [[Predicate]]
flattenDNF (Or pr1 pr2) = flattenDNF pr1 ++ flattenDNF pr2
flattenDNF conj = case flattenConj conj of
Just atoms -> if isEmptyConj atoms then [] else [atoms]
Nothing -> []
flattenConj :: Predicate -> Maybe [Predicate]
flattenConj (And pr1 pr2) = do
atoms1 <- flattenConj pr1
atoms2 <- flattenConj pr2
return (atoms1 ++ atoms2)
flattenConj None = Nothing
flattenConj Any = Just []
flattenConj atom = Just [atom]
atomKV :: Predicate -> (Field, CFS.CoFiniteSet Integer)
atomKV (DlSrc x) = (FDlSrc, CFS.singleton (fromIntegral (unpackEth64 x)))
atomKV (DlDst x) = (FDlDst, CFS.singleton (fromIntegral (unpackEth64 x)))
atomKV (DlTyp x) = (FDlTyp, CFS.singleton (fromIntegral x))
atomKV (DlVlan Nothing) = (FDlVlan, CFS.singleton 0xffff)
atomKV (DlVlan (Just x)) = (FDlVlan, CFS.singleton (fromIntegral x))
atomKV (DlVlanPcp x) = (FDlVlanPcp, CFS.singleton (fromIntegral x))
atomKV (NwSrc (IPAddressPrefix (IPAddress x) 32)) =
(FNwSrc, CFS.singleton (fromIntegral x))
atomKV (NwDst (IPAddressPrefix (IPAddress x) 32)) =
(FNwDst, CFS.singleton (fromIntegral x))
atomKV (NwProto x) = (FNwProto, CFS.singleton (fromIntegral x))
atomKV (NwTos x) = (FNwTos, CFS.singleton (fromIntegral x))
atomKV (TpSrcPort x) = (FTpSrc, CFS.singleton (fromIntegral x))
atomKV (TpDstPort x) = (FTpDst, CFS.singleton (fromIntegral x))
atomKV (IngressPort x) = (FInPort, CFS.singleton (fromIntegral x))
atomKV (Switch x) = (FSwitch, CFS.singleton (fromIntegral x))
atomKV (Not (DlSrc x)) = (FDlSrc, CFS.excludes (fromIntegral (unpackEth64 x)))
atomKV (Not (DlDst x)) = (FDlDst, CFS.excludes (fromIntegral (unpackEth64 x)))
atomKV (Not (DlTyp x)) = (FDlTyp, CFS.excludes (fromIntegral x))
atomKV (Not (DlVlan Nothing)) = (FDlVlan, CFS.excludes 0xffff)
atomKV (Not (DlVlan (Just x))) = (FDlVlan, CFS.excludes (fromIntegral x))
atomKV (Not (DlVlanPcp x)) = (FDlVlanPcp, CFS.excludes (fromIntegral x))
atomKV (Not (NwSrc (IPAddressPrefix (IPAddress x) 32))) =
(FNwSrc, CFS.excludes (fromIntegral x))
atomKV (Not (NwDst (IPAddressPrefix (IPAddress x) 32))) =
(FNwDst, CFS.excludes (fromIntegral x))
atomKV (Not (NwProto x)) = (FNwProto, CFS.excludes (fromIntegral x))
atomKV (Not (NwTos x)) = (FNwTos, CFS.excludes (fromIntegral x))
atomKV (Not (TpSrcPort x)) = (FTpSrc, CFS.excludes (fromIntegral x))
atomKV (Not (TpDstPort x)) = (FTpDst, CFS.excludes (fromIntegral x))
atomKV (Not (IngressPort x)) = (FInPort, CFS.excludes (fromIntegral x))
atomKV (Not (Switch x)) = (FSwitch, CFS.excludes (fromIntegral x))
atomKV _ = error "atomKV: not an atom"
isEmptyConj :: [Predicate] -> Bool
isEmptyConj atoms = loop M.empty atoms where
loop :: Map Field (CFS.CoFiniteSet Integer) -> [Predicate] -> Bool
loop _ [] = False
loop sets (atom : rest) =
let (k, v) = atomKV atom
in case M.lookup k sets of
Nothing -> loop (M.insert k v sets) rest
Just set -> case CFS.null (CFS.inter set v) of
True -> True
False -> loop (M.insert k (CFS.inter set v) sets) rest
isEmptyPredicate :: Predicate -> Bool
isEmptyPredicate pred = null (flattenDNF (dnf pred))
dnf :: Predicate -> Predicate
dnf pr = case pr of
Not Any -> None
Not None -> Any
Not (Not pr') -> dnf pr'
Not (Or pr1 pr2) -> dnf (And (Not pr1) (Not pr2))
Not (And pr1 pr2) -> dnf (Or (Not pr1) (Not pr2))
Or pr1 pr2 -> Or (dnf pr1) (dnf pr2)
And x (Or y z) ->
Or (dnf (And x y)) (dnf (And x z))
And (Or x y) z ->
Or (dnf (And x z)) (dnf (And y z))
And x y ->
let x' = dnf x
y' = dnf y in
if isConjunction x' && isConjunction y' then
And x' y'
else
dnf (And x' y')
otherwise ->
if isAtom pr then pr else error ("missing case in dnf " ++ show pr)
disjList (Or pr1 pr2) = Or (disjList pr1) (disjList pr2)
disjList x = case simplify (conjList x) of
[] -> None
x:xs -> foldl And x xs
conjList (And pr1 pr2) = conjList pr1 ++ conjList pr2
conjList x = [x]
isAny Any = True
isAny _ = False
isNone None = True
isNone _ = False
-- Simplifies a conjunction
simplify :: [Predicate] -> [Predicate]
simplify atomList = Set.toList result
where result = if None `Set.member` atoms then Set.empty else atoms
atoms = Set.fromList (filter isAny atomList)
|
frenetic-lang/netcore-1.0
|
src/Frenetic/NetCore/Reduce.hs
|
bsd-3-clause
| 6,396
| 0
| 20
| 1,315
| 2,767
| 1,406
| 1,361
| 160
| 18
|
module Card (
Card(..)
, createCard
, getPicture
, startFlipAnimation
, updateFlipAnimation
, stopFlipAnimation
) where
import LivePicture
import Graphics.Gloss
-- | Data which represents one Card that is clickable and can be rendered on the screen.
data Card = Card
{ front :: LivePicture -- | front face of the card
, back :: LivePicture -- | back face of the card
, isFlipped :: Bool -- | flag which indicates whether the cards is flipped face up.
, isAnimating :: Bool -- | flag that indicats if the card is currently animating (flipping from face to another)
, animationDuration :: Float -- | duration of the flip animation
, animationTimePassed :: Float -- | flip animation time passed
, cardId :: Int -- | card id used for differentiating between cards.
} deriving (Show)
-- | Default duration of the flip animation.
defaultAnimationDuration :: Float
defaultAnimationDuration = 0.8
-- | creates and returns a card based on given arguments.
createCard :: Picture -> Picture -> Int -> Int -> Position -> Int -> Card
createCard frontPicture backPicture width height (x, y) matchNumber = Card
{ front = LivePicture.create frontPicture width height (x ,y)
, back = LivePicture.create backPicture width height (x, y)
, isFlipped = False
, isAnimating = False
, animationDuration = defaultAnimationDuration
, animationTimePassed = 0
, cardId = matchNumber
}
-- | gets a renderable Gloss picture of a given card.
getPicture :: Card -> Picture
getPicture card@(Card front back isFlipped isAnimating animationDuration animationTimePassed cardId) = if not isFlipped
then
if not isAnimating
then
picture back
else
reverseFlipPicture card
else
if not isAnimating
then
picture front
else
flipPicture card
-- | scales the image based on given scale factors in the coordinate system orgin and returns a scaled version of renderable picture.
-- | used for flip animation
scaleAroundOrigin :: Position -> Float -> Float -> Picture -> Picture
scaleAroundOrigin (x, y) scaleFactorX scaleFactorY picture = (translate x y . scale scaleFactorX scaleFactorY . translate (-x) (-y)) $ picture
-- | Excutes flip animation (from back to front) based on the current Card state and its animation state.
flipPicture :: Card -> Picture
flipPicture (Card front back isFlipped isAnimating animationDuration animationTimePassed cardId)
| animationTimePassed < treshold = let scaleX = (treshold - animationTimePassed) / treshold in scaleAroundOrigin (position back) scaleX 1 (picture back)
| animationTimePassed > treshold && animationTimePassed < animationDuration = let scaleX = (animationTimePassed - treshold) / treshold in scaleAroundOrigin (position front) scaleX 1 (picture front)
| otherwise = picture front
where
treshold = animationDuration / 2
-- | Excutes flip animation (from front to back) based on the current Card state and its animation state.
reverseFlipPicture :: Card -> Picture
reverseFlipPicture (Card front back isFlipped isAnimating animationDuration animationTimePassed cardId)
| animationTimePassed < treshold = let scaleX = (treshold - animationTimePassed) / treshold in scaleAroundOrigin (position front) scaleX 1 (picture front)
| animationTimePassed > treshold && animationTimePassed < animationDuration = let scaleX = (animationTimePassed - treshold) / treshold in scaleAroundOrigin (position back) scaleX 1 (picture back)
| otherwise = picture back
where treshold = animationDuration / 2
-- | Used fo starting the flip animation.
startFlipAnimation :: Card -> Card
startFlipAnimation card@(Card front back isFlipped _ _ _ _) = card { isFlipped = isFlipped', isAnimating = True}
where
isFlipped' = not isFlipped
-- | Used for updating current flip animation state.
updateFlipAnimation :: Float -> Card -> Card
updateFlipAnimation seconds card = card { animationTimePassed = animationTimePassed' }
where
animationTimePassed' = animationTimePassed card + seconds
-- | Used for stoping flip animation.
stopFlipAnimation :: Card -> Card
stopFlipAnimation card = card { isAnimating = False, animationTimePassed = 0 }
|
stefanjanjic90/DroidThatYouAreLookingFor
|
Card.hs
|
bsd-3-clause
| 6,098
| 0
| 12
| 2,691
| 905
| 490
| 415
| 59
| 4
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
module Database.Relational.Schema.SQLite3Syscat.IndexList where
import Data.Int (Int64)
import Database.Record.TH (derivingShow)
import Database.Relational.Query.TH (defineTableTypesAndRecordDefault)
$(defineTableTypesAndRecordDefault
"pragma" "index_list"
[
-- pragma "main.index_list"
-- column type NULL
-- --------------------- ------------------- ------
-- seq integer No
("seq", [t|Int64|]),
-- name text No
("name", [t|String|]),
-- unique integer No
("unique", [t|Int64|])
]
[derivingShow])
|
yuga/haskell-relational-record-driver-sqlite3
|
src/Database/Relational/Schema/SQLite3Syscat/IndexList.hs
|
bsd-3-clause
| 709
| 0
| 9
| 197
| 108
| 75
| 33
| 13
| 0
|
{-# LANGUAGE TypeApplications #-}
module Test.Pos.Chain.Update.CborSpec
( spec
) where
import Universum
import Test.Hspec (Spec, describe)
import Pos.Chain.Update (ApplicationName (..), BlockVersion (..),
BlockVersionData (..), SoftforkRule (..),
SoftwareVersion (..))
import Test.Pos.Binary.Helpers (binaryTest)
import Test.Pos.Chain.Update.Arbitrary ()
spec :: Spec
spec = describe "Cbor Bi instances" $ do
binaryTest @ApplicationName
binaryTest @BlockVersion
binaryTest @BlockVersionData
binaryTest @SoftforkRule
binaryTest @SoftwareVersion
|
input-output-hk/pos-haskell-prototype
|
chain/test/Test/Pos/Chain/Update/CborSpec.hs
|
mit
| 676
| 0
| 9
| 187
| 154
| 91
| 63
| 17
| 1
|
{-# LANGUAGE OverloadedStrings, CPP #-}
module Bead.View.Content.Home.Data where
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Time
import Bead.View.Content hiding (userState)
import Bead.View.Content.SubmissionTable
type ActiveAssignment = (AssignmentKey, AssignmentDesc, SubmissionInfo)
activeAsgKey (key,_desc,_info) = key
activeAsgDesc (_key,desc,_info) = desc
activeAsgInfo (_key,_desc,info) = info
type StudentAssignments = Map Course [ActiveAssignment]
-- Returns True if the student is not registered in any courses otherwise False
isNotRegistered :: StudentAssignments -> Bool
isNotRegistered = Map.null
-- Returns all the AcitveAssignment list grouped with its courses or groups
toActiveAssignmentList :: StudentAssignments -> [ (Course, [ActiveAssignment]) ]
toActiveAssignmentList = Map.toList
-- Returns a list of all the ActiveAssignments
toAllActiveAssignmentList :: StudentAssignments -> [ActiveAssignment]
toAllActiveAssignmentList = foldl (++) [] . map snd . toActiveAssignmentList
data HomePageData = HomePageData {
userState :: UserState
, hasCourses :: Bool -- True if the user has administrated courses
, hasGroups :: Bool -- True if the user has administrated groups
, assignments :: StudentAssignments -- Empty map means that the user is not registrated in any courses
, sTables :: [SubmissionTableInfo]
-- ^ The convertes function that convert a given utc time into the users local timezone
, timeConverter :: UserTimeConverter
, submissionTableCtx :: SubmissionTableContext
, now :: UTCTime
}
administratedCourseMap = stcAdminCourses . submissionTableCtx
administratedGroupMap = stcAdminGroups . submissionTableCtx
courseTestScripts = stcCourseTestScriptInfos . submissionTableCtx
|
pgj/bead
|
src/Bead/View/Content/Home/Data.hs
|
bsd-3-clause
| 1,819
| 0
| 9
| 304
| 307
| 191
| 116
| 30
| 1
|
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE FlexibleContexts #-}
module Graphics.GL.Pal.Shape where
import Graphics.GL
import Control.Monad.Reader
import Graphics.GL.Pal.Types
import Graphics.GL.Pal.InferUniforms
import Graphics.GL.Pal.WithActions
import Graphics.GL.Pal.AssignAttribute
import Graphics.GL.Pal.ArrayBuffer
import Graphics.GL.Pal.Shader
import Data.Data
import Foreign
newVAO :: MonadIO m => m VertexArrayObject
newVAO = VertexArrayObject <$> overPtr (glGenVertexArrays 1)
-- | A shape is the combination of a VAO, a program,
-- and the collection of uniforms for that program.
makeShape :: (MonadIO m, Data u) => Geometry -> Program -> m (Shape u)
makeShape sGeometry@Geometry{..} sProgram = do
-- Setup a VAO
sVAO <- newVAO
withVAO sVAO $ do
withArrayBuffer geoPositions $ assignFloatAttribute sProgram "aPosition" GL_FLOAT 3
withArrayBuffer geoNormals $ assignFloatAttribute sProgram "aNormal" GL_FLOAT 3
withArrayBuffer geoTangents $ assignFloatAttribute sProgram "aTangent" GL_FLOAT 3
withArrayBuffer geoUVs $ assignFloatAttribute sProgram "aUV" GL_FLOAT 2
bindElementArrayBuffer geoIndices
sUniforms <- acquireUniforms sProgram
return Shape{..}
withShape :: MonadIO m => Shape t -> ReaderT (Shape t) m a -> m a
withShape shape@Shape{..} action = do
useProgram sProgram
withVAO sVAO (runReaderT action shape)
-- | Must be called from within withShape
drawShape :: (MonadReader (Shape u) m, MonadIO m) => m ()
drawShape = do
Shape{..} <- ask
let indexCount = geoIndexCount sGeometry
glDrawElements GL_TRIANGLES indexCount GL_UNSIGNED_INT nullPtr
drawShapeInstanced :: (MonadReader (Shape u) m, MonadIO m) => GLsizei -> m ()
drawShapeInstanced instanceCount = do
Shape{..} <- ask
let indexCount = geoIndexCount sGeometry
glDrawElementsInstanced GL_TRIANGLES indexCount GL_UNSIGNED_INT nullPtr instanceCount
drawShapeInstancedBaseInstance :: (MonadReader (Shape u) m, MonadIO m) => GLsizei -> GLuint -> m ()
drawShapeInstancedBaseInstance instanceCount baseInstance = do
Shape{..} <- ask
let indexCount = geoIndexCount sGeometry
glDrawElementsInstancedBaseInstance GL_TRIANGLES indexCount GL_UNSIGNED_INT nullPtr instanceCount baseInstance
|
lukexi/gl-pal
|
src/Graphics/GL/Pal/Shape.hs
|
bsd-3-clause
| 2,297
| 0
| 11
| 403
| 615
| 306
| 309
| 45
| 1
|
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
--------------------------------------------------------------------------------
-- |
-- Module : Data.Comp.Multi.Generic
-- Copyright : (c) 2011 Patrick Bahr
-- License : BSD3
-- Maintainer : Patrick Bahr <paba@diku.dk>
-- Stability : experimental
-- Portability : non-portable (GHC Extensions)
--
-- This module defines type generic functions and recursive schemes
-- along the lines of the Uniplate library. All definitions are
-- generalised versions of those in "Data.Comp.Generic".
--
--------------------------------------------------------------------------------
module Data.Comp.Multi.Generic where
import Control.Monad
import Data.Comp.Multi.HFoldable
import Data.Comp.Multi.HFunctor
import Data.Comp.Multi.HTraversable
import Data.Comp.Multi.Sum
import Data.Comp.Multi.Term
import GHC.Exts
import Prelude
import Data.Maybe
-- | This function returns a list of all subterms of the given
-- term. This function is similar to Uniplate's @universe@ function.
subterms :: forall f . HFoldable f => Term f :=> [E (Term f)]
subterms t = build (f t)
where f :: Term f :=> (E (Term f) -> b -> b) -> b -> b
f t cons nil = E t `cons` hfoldl (\u s -> f s cons u) nil (unTerm t)
-- | This function returns a list of all subterms of the given term
-- that are constructed from a particular functor.
subterms' :: forall f g . (HFoldable f, g :<: f) => Term f :=> [E (g (Term f))]
subterms' (Term t) = build (f t)
where f :: f (Term f) :=> (E (g (Term f)) -> b -> b) -> b -> b
f t cons nil = let rest = hfoldl (\u (Term s) -> f s cons u) nil t
in case proj t of
Just t' -> E t' `cons` rest
Nothing -> rest
-- | This function transforms every subterm according to the given
-- function in a bottom-up manner. This function is similar to
-- Uniplate's @transform@ function.
transform :: forall f . (HFunctor f) => (Term f :-> Term f) -> Term f :-> Term f
transform f = run
where run :: Term f :-> Term f
run = f . Term . hfmap run . unTerm
-- | Monadic version of 'transform'.
transformM :: forall f m . (HTraversable f, Monad m) =>
NatM m (Term f) (Term f) -> NatM m (Term f) (Term f)
transformM f = run
where run :: NatM m (Term f) (Term f)
run t = f =<< liftM Term (hmapM run $ unTerm t)
query :: HFoldable f => (Term f :=> r) -> (r -> r -> r) -> Term f :=> r
-- query q c = run
-- where run i@(Term t) = foldl (\s x -> s `c` run x) (q i) t
query q c i@(Term t) = hfoldl (\s x -> s `c` query q c x) (q i) t
subs :: HFoldable f => Term f :=> [E (Term f)]
subs = query (\x-> [E x]) (++)
subs' :: (HFoldable f, g :<: f) => Term f :=> [E (g (Term f))]
subs' = mapMaybe pr . subs
where pr (E v) = fmap E (project v)
-- | This function computes the generic size of the given term,
-- i.e. the its number of subterm occurrences.
size :: HFoldable f => Cxt h f a :=> Int
size (Hole {}) = 0
size (Term t) = hfoldl (\s x -> s + size x) 1 t
-- | This function computes the generic depth of the given term.
depth :: HFoldable f => Cxt h f a :=> Int
depth (Hole {}) = 0
depth (Term t) = 1 + hfoldl (\s x -> s `max` depth x) 0 t
|
spacekitteh/compdata
|
src/Data/Comp/Multi/Generic.hs
|
bsd-3-clause
| 3,518
| 0
| 15
| 897
| 1,084
| 574
| 510
| 50
| 2
|
{-# LANGUAGE OverloadedStrings, DoAndIfThenElse #-}
-- | Description : Low-level ZeroMQ communication wrapper.
--
-- The "ZeroMQ" module abstracts away the low-level 0MQ based interface with IPython, replacing it
-- instead with a Haskell Channel based interface. The `serveProfile` function takes a IPython
-- profile specification and returns the channel interface to use.
module IHaskell.IPython.ZeroMQ (
ZeroMQInterface(..),
ZeroMQStdin(..),
serveProfile,
serveStdin,
ZeroMQEphemeralPorts,
withEphemeralPorts,
) where
import Control.Concurrent
import Control.Exception
import Control.Monad
import Data.Aeson
import qualified Data.ByteString.Lazy as LBS
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as Char
import Data.Char
import Data.Digest.Pure.SHA as SHA
import Data.Monoid ((<>))
import qualified Data.Text.Encoding as Text
import System.ZMQ4 as ZMQ4 hiding (stdin)
import Text.Read (readMaybe)
import IHaskell.IPython.Types
import IHaskell.IPython.Message.Parser
import IHaskell.IPython.Message.Writer
-- | The channel interface to the ZeroMQ sockets. All communication is done via Messages, which are
-- encoded and decoded into a lower level form before being transmitted to IPython. These channels
-- should functionally serve as high-level sockets which speak Messages instead of ByteStrings.
data ZeroMQInterface =
Channels
{
-- | A channel populated with requests from the frontend.
shellRequestChannel :: Chan Message
-- | Writing to this channel causes a reply to be sent to the frontend.
, shellReplyChannel :: Chan Message
-- | This channel is a duplicate of the shell request channel, though using a different backend
-- socket.
, controlRequestChannel :: Chan Message
-- | This channel is a duplicate of the shell reply channel, though using a different backend
-- socket.
, controlReplyChannel :: Chan Message
-- | Writing to this channel sends an iopub message to the frontend.
, iopubChannel :: Chan Message
-- | Key used to sign messages.
, hmacKey :: ByteString
}
data ZeroMQStdin =
StdinChannel
{ stdinRequestChannel :: Chan Message
, stdinReplyChannel :: Chan Message
}
-- | Create new channels for a ZeroMQInterface
newZeroMQInterface :: ByteString -> IO ZeroMQInterface
newZeroMQInterface key = do
shellReqChan <- newChan
shellRepChan <- newChan
controlReqChan <- dupChan shellReqChan
controlRepChan <- dupChan shellRepChan
iopubChan <- newChan
return $! Channels
{ shellRequestChannel = shellReqChan
, shellReplyChannel = shellRepChan
, controlRequestChannel = controlReqChan
, controlReplyChannel = controlRepChan
, iopubChannel = iopubChan
, hmacKey = key
}
-- | Start responding on all ZeroMQ channels used to communicate with IPython | via the provided
-- profile. Return a set of channels which can be used to | communicate with IPython in a more
-- structured manner.
serveProfile :: Profile -- ^ The profile specifying which ports and transport mechanisms to use.
-> Bool -- ^ Print debug output
-> IO ZeroMQInterface -- ^ The Message-channel based interface to the sockets.
serveProfile profile debug = do
channels <- newZeroMQInterface (signatureKey profile)
-- Create the context in a separate thread that never finishes. If withContext or withSocket
-- complete, the context or socket become invalid.
forkIO $ withContext $ \context -> do
-- Serve on all sockets.
forkIO $ serveSocket context Rep (hbPort profile) $ heartbeat channels
forkIO $ serveSocket context Router (controlPort profile) $ control debug channels
forkIO $ serveSocket context Router (shellPort profile) $ shell debug channels
-- The context is reference counted in this thread only. Thus, the last serveSocket cannot be
-- asynchronous, because otherwise context would be garbage collectable - since it would only be
-- used in other threads. Thus, keep the last serveSocket in this thread.
serveSocket context Pub (iopubPort profile) $ iopub debug channels
return channels
-- | Describes ports used when creating an ephemeral ZeroMQ session. Used to generate the ipython
-- JSON config file.
data ZeroMQEphemeralPorts =
ZeroMQEphemeralPorts
{ ephHbPort :: !Port
, ephControlPort :: !Port
, ephShellPort :: !Port
, ephIOPubPort :: !Port
, ephSignatureKey :: !ByteString
}
instance ToJSON ZeroMQEphemeralPorts where
toJSON ports =
object
[ "ip" .= ("127.0.0.1" :: String)
, "transport" .= TCP
, "control_port" .= ephControlPort ports
, "hb_port" .= ephHbPort ports
, "shell_port" .= ephShellPort ports
, "iopub_port" .= ephIOPubPort ports
, "key" .= Text.decodeUtf8 (ephSignatureKey ports)
]
parsePort :: String -> Maybe Int
parsePort s = readMaybe num
where
num = reverse (takeWhile isNumber (reverse s))
bindLocalEphemeralPort :: Socket a -> IO Int
bindLocalEphemeralPort socket = do
bind socket $ "tcp://127.0.0.1:*"
endpointString <- lastEndpoint socket
case parsePort endpointString of
Nothing ->
fail $ "internalError: IHaskell.IPython.ZeroMQ.bindLocalEphemeralPort encountered a port index that could not be interpreted as an int."
Just endpointIndex ->
return endpointIndex
-- | Run session for communicating with an IPython instance on ephemerally allocated ZMQ4 sockets.
-- The sockets will be closed when the callback returns.
withEphemeralPorts :: ByteString -- ^ HMAC encryption key
-> Bool -- ^ Print debug output
-> (ZeroMQEphemeralPorts
-> ZeroMQInterface
-> IO a) -- ^ Callback that takes the interface to the sockets.
-> IO a
withEphemeralPorts key debug callback = do
channels <- newZeroMQInterface key
-- Create the ZMQ4 context
withContext $ \context -> do
-- Create the sockets to communicate with.
withSocket context Rep $ \heartbeatSocket -> do
withSocket context Router $ \controlportSocket -> do
withSocket context Router $ \shellportSocket -> do
withSocket context Pub $ \iopubSocket -> do
-- Bind each socket to a local port, getting the port chosen.
hbPort <- bindLocalEphemeralPort heartbeatSocket
controlPort <- bindLocalEphemeralPort controlportSocket
shellPort <- bindLocalEphemeralPort shellportSocket
iopubPort <- bindLocalEphemeralPort iopubSocket
-- Create object to store ephemeral ports
let ports = ZeroMQEphemeralPorts { ephHbPort = hbPort, ephControlPort = controlPort, ephShellPort = shellPort, ephIOPubPort = iopubPort, ephSignatureKey = key }
-- Launch actions to listen to communicate between channels and cockets.
_ <- forkIO $ forever $ heartbeat channels heartbeatSocket
_ <- forkIO $ forever $ control debug channels controlportSocket
_ <- forkIO $ forever $ shell debug channels shellportSocket
_ <- forkIO $ checkedIOpub debug channels iopubSocket
-- Run callback function; provide it with both ports and channels.
callback ports channels
serveStdin :: Profile -> IO ZeroMQStdin
serveStdin profile = do
reqChannel <- newChan
repChannel <- newChan
-- Create the context in a separate thread that never finishes. If withContext or withSocket
-- complete, the context or socket become invalid.
forkIO $ withContext $ \context ->
-- Serve on all sockets.
serveSocket context Router (stdinPort profile) $ \socket -> do
-- Read the request from the interface channel and send it.
readChan reqChannel >>= sendMessage False (signatureKey profile) socket
-- Receive a response and write it to the interface channel.
receiveMessage False socket >>= writeChan repChannel
return $ StdinChannel reqChannel repChannel
-- | Serve on a given socket in a separate thread. Bind the socket in the | given context and then
-- loop the provided action, which should listen | on the socket and respond to any events.
serveSocket :: SocketType a => Context -> a -> Port -> (Socket a -> IO b) -> IO ()
serveSocket context socketType port action = void $
withSocket context socketType $ \socket -> do
bind socket $ "tcp://127.0.0.1:" ++ show port
forever $ action socket
-- | Listener on the heartbeat port. Echoes back any data it was sent.
heartbeat :: ZeroMQInterface -> Socket Rep -> IO ()
heartbeat _ socket = do
-- Read some data.
request <- receive socket
-- Send it back.
send socket [] request
-- | Listener on the shell port. Reads messages and writes them to | the shell request channel. For
-- each message, reads a response from the | shell reply channel of the interface and sends it back
-- to the frontend.
shell :: Bool -> ZeroMQInterface -> Socket Router -> IO ()
shell debug channels socket = do
-- Receive a message and write it to the interface channel.
receiveMessage debug socket >>= writeChan requestChannel
-- Read the reply from the interface channel and send it.
readChan replyChannel >>= sendMessage debug (hmacKey channels) socket
where
requestChannel = shellRequestChannel channels
replyChannel = shellReplyChannel channels
-- | Listener on the shell port. Reads messages and writes them to | the shell request channel. For
-- each message, reads a response from the | shell reply channel of the interface and sends it back
-- to the frontend.
control :: Bool -> ZeroMQInterface -> Socket Router -> IO ()
control debug channels socket = do
-- Receive a message and write it to the interface channel.
receiveMessage debug socket >>= writeChan requestChannel
-- Read the reply from the interface channel and send it.
readChan replyChannel >>= sendMessage debug (hmacKey channels) socket
where
requestChannel = controlRequestChannel channels
replyChannel = controlReplyChannel channels
-- | Send messages via the iopub channel. | This reads messages from the ZeroMQ iopub interface
-- channel | and then writes the messages to the socket.
iopub :: Bool -> ZeroMQInterface -> Socket Pub -> IO ()
iopub debug channels socket =
readChan (iopubChannel channels) >>= sendMessage debug (hmacKey channels) socket
-- | Attempt to send a message along the socket, returning true if successful.
trySendMessage :: Sender a => String -> Bool -> ByteString -> Socket a -> Message -> IO Bool
trySendMessage nm debug hmacKey socket message = do
let zmqErrorHandler :: ZMQError -> IO Bool
zmqErrorHandler e
-- Ignore errors if we cannot send. We may want to forward this to the thread that tried put the
-- message in the Chan initially.
| errno e == 38 = return False
| otherwise = throwIO e
(sendMessage debug hmacKey socket message >> return True) `catch` zmqErrorHandler
-- | Send messages via the iopub channel. This reads messages from the ZeroMQ iopub interface
-- channel and then writes the messages to the socket. This is a checked implementation which will
-- stop if the socket is closed.
checkedIOpub :: Bool -> ZeroMQInterface -> Socket Pub -> IO ()
checkedIOpub debug channels socket = do
msg <- readChan (iopubChannel channels)
cont <- trySendMessage "io" debug (hmacKey channels) socket msg
when cont $
checkedIOpub debug channels socket
-- | Receive and parse a message from a socket.
receiveMessage :: Receiver a => Bool -> Socket a -> IO Message
receiveMessage debug socket = do
-- Read all identifiers until the identifier/message delimiter.
idents <- readUntil "<IDS|MSG>"
-- Ignore the signature for now.
void next
headerData <- next
parentHeader <- next
metadata <- next
content <- next
when debug $ do
putStr "Header: "
Char.putStrLn headerData
putStr "Content: "
Char.putStrLn content
let message = parseMessage idents headerData parentHeader metadata content
return message
where
-- Receive the next piece of data from the socket.
next = receive socket
-- Read data from the socket until we hit an ending string. Return all data as a list, which does
-- not include the ending string.
readUntil str = do
line <- next
if line /= str
then do
remaining <- readUntil str
return $ line : remaining
else return []
-- | Encode a message in the IPython ZeroMQ communication protocol and send it through the provided
-- socket. Sign it using HMAC with SHA-256 using the provided key.
sendMessage :: Sender a => Bool -> ByteString -> Socket a -> Message -> IO ()
sendMessage _ _ _ SendNothing = return ()
sendMessage debug hmacKey socket message = do
when debug $ do
putStr "Message: "
print message
putStr "Sent: "
print content
-- Send all pieces of the message.
mapM_ sendPiece idents
sendPiece "<IDS|MSG>"
sendPiece signature
sendPiece headStr
sendPiece parentHeaderStr
sendPiece metadata
-- Conclude transmission with content.
sendLast content
where
sendPiece = send socket [SendMore]
sendLast = send socket []
-- Encode to a strict bytestring.
encodeStrict :: ToJSON a => a -> ByteString
encodeStrict = LBS.toStrict . encode
-- Signature for the message using HMAC SHA-256.
signature :: ByteString
signature = hmac $ headStr <> parentHeaderStr <> metadata <> content
-- Compute the HMAC SHA-256 signature of a bytestring message.
hmac :: ByteString -> ByteString
hmac = Char.pack . SHA.showDigest . SHA.hmacSha256 (LBS.fromStrict hmacKey) . LBS.fromStrict
-- Pieces of the message.
head = header message
parentHeaderStr = maybe "{}" encodeStrict $ parentHeader head
idents = identifiers head
metadata = "{}"
content = encodeStrict message
headStr = encodeStrict head
|
artuuge/IHaskell
|
ipython-kernel/src/IHaskell/IPython/ZeroMQ.hs
|
mit
| 14,212
| 0
| 31
| 3,316
| 2,477
| 1,249
| 1,228
| 221
| 2
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[TcBinds]{TcBinds}
-}
{-# LANGUAGE CPP, RankNTypes, ScopedTypeVariables #-}
module TcBinds ( tcLocalBinds, tcTopBinds, tcRecSelBinds,
tcHsBootSigs, tcPolyCheck,
PragFun, tcSpecPrags, tcVectDecls, mkPragFun,
TcSigInfo(..), TcSigFun,
instTcTySig, instTcTySigFromId, findScopedTyVars,
badBootDeclErr, mkExport ) where
import {-# SOURCE #-} TcMatches ( tcGRHSsPat, tcMatchesFun )
import {-# SOURCE #-} TcExpr ( tcMonoExpr )
import {-# SOURCE #-} TcPatSyn ( tcInferPatSynDecl, tcCheckPatSynDecl, tcPatSynBuilderBind )
import DynFlags
import HsSyn
import HscTypes( isHsBootOrSig )
import TcRnMonad
import TcEnv
import TcUnify
import TcSimplify
import TcEvidence
import TcHsType
import TcPat
import TcMType
import ConLike
import FamInstEnv( normaliseType )
import FamInst( tcGetFamInstEnvs )
import Type( pprSigmaTypeExtraCts )
import TyCon
import TcType
import TysPrim
import Id
import Var
import VarSet
import VarEnv( TidyEnv )
import Module
import Name
import NameSet
import NameEnv
import SrcLoc
import Bag
import ListSetOps
import ErrUtils
import Digraph
import Maybes
import Util
import BasicTypes
import Outputable
import FastString
import Type(mkStrLitTy)
import Class(classTyCon)
import PrelNames(ipClassName)
import TcValidity (checkValidType)
import Control.Monad
import Data.List (partition)
#include "HsVersions.h"
{-
************************************************************************
* *
\subsection{Type-checking bindings}
* *
************************************************************************
@tcBindsAndThen@ typechecks a @HsBinds@. The "and then" part is because
it needs to know something about the {\em usage} of the things bound,
so that it can create specialisations of them. So @tcBindsAndThen@
takes a function which, given an extended environment, E, typechecks
the scope of the bindings returning a typechecked thing and (most
important) an LIE. It is this LIE which is then used as the basis for
specialising the things bound.
@tcBindsAndThen@ also takes a "combiner" which glues together the
bindings and the "thing" to make a new "thing".
The real work is done by @tcBindWithSigsAndThen@.
Recursive and non-recursive binds are handled in essentially the same
way: because of uniques there are no scoping issues left. The only
difference is that non-recursive bindings can bind primitive values.
Even for non-recursive binding groups we add typings for each binder
to the LVE for the following reason. When each individual binding is
checked the type of its LHS is unified with that of its RHS; and
type-checking the LHS of course requires that the binder is in scope.
At the top-level the LIE is sure to contain nothing but constant
dictionaries, which we resolve at the module level.
Note [Polymorphic recursion]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The game plan for polymorphic recursion in the code above is
* Bind any variable for which we have a type signature
to an Id with a polymorphic type. Then when type-checking
the RHSs we'll make a full polymorphic call.
This fine, but if you aren't a bit careful you end up with a horrendous
amount of partial application and (worse) a huge space leak. For example:
f :: Eq a => [a] -> [a]
f xs = ...f...
If we don't take care, after typechecking we get
f = /\a -> \d::Eq a -> let f' = f a d
in
\ys:[a] -> ...f'...
Notice the the stupid construction of (f a d), which is of course
identical to the function we're executing. In this case, the
polymorphic recursion isn't being used (but that's a very common case).
This can lead to a massive space leak, from the following top-level defn
(post-typechecking)
ff :: [Int] -> [Int]
ff = f Int dEqInt
Now (f dEqInt) evaluates to a lambda that has f' as a free variable; but
f' is another thunk which evaluates to the same thing... and you end
up with a chain of identical values all hung onto by the CAF ff.
ff = f Int dEqInt
= let f' = f Int dEqInt in \ys. ...f'...
= let f' = let f' = f Int dEqInt in \ys. ...f'...
in \ys. ...f'...
Etc.
NOTE: a bit of arity anaysis would push the (f a d) inside the (\ys...),
which would make the space leak go away in this case
Solution: when typechecking the RHSs we always have in hand the
*monomorphic* Ids for each binding. So we just need to make sure that
if (Method f a d) shows up in the constraints emerging from (...f...)
we just use the monomorphic Id. We achieve this by adding monomorphic Ids
to the "givens" when simplifying constraints. That's what the "lies_avail"
is doing.
Then we get
f = /\a -> \d::Eq a -> letrec
fm = \ys:[a] -> ...fm...
in
fm
-}
tcTopBinds :: HsValBinds Name -> TcM (TcGblEnv, TcLclEnv)
-- The TcGblEnv contains the new tcg_binds and tcg_spects
-- The TcLclEnv has an extended type envt for the new bindings
tcTopBinds (ValBindsOut binds sigs)
= do { -- Pattern synonym bindings populate the global environment
(binds', (tcg_env, tcl_env)) <- tcValBinds TopLevel binds sigs $
do { gbl <- getGblEnv
; lcl <- getLclEnv
; return (gbl, lcl) }
; specs <- tcImpPrags sigs -- SPECIALISE prags for imported Ids
; let { tcg_env' = tcg_env { tcg_binds = foldr (unionBags . snd)
(tcg_binds tcg_env)
binds'
, tcg_imp_specs = specs ++ tcg_imp_specs tcg_env } }
; return (tcg_env', tcl_env) }
-- The top level bindings are flattened into a giant
-- implicitly-mutually-recursive LHsBinds
tcTopBinds (ValBindsIn {}) = panic "tcTopBinds"
tcRecSelBinds :: HsValBinds Name -> TcM TcGblEnv
tcRecSelBinds (ValBindsOut binds sigs)
= tcExtendGlobalValEnv [sel_id | L _ (IdSig sel_id) <- sigs] $
do { (rec_sel_binds, tcg_env) <- discardWarnings (tcValBinds TopLevel binds sigs getGblEnv)
; let tcg_env'
| isHsBootOrSig (tcg_src tcg_env) = tcg_env
| otherwise = tcg_env { tcg_binds = foldr (unionBags . snd)
(tcg_binds tcg_env)
rec_sel_binds }
-- Do not add the code for record-selector bindings when
-- compiling hs-boot files
; return tcg_env' }
tcRecSelBinds (ValBindsIn {}) = panic "tcRecSelBinds"
tcHsBootSigs :: HsValBinds Name -> TcM [Id]
-- A hs-boot file has only one BindGroup, and it only has type
-- signatures in it. The renamer checked all this
tcHsBootSigs (ValBindsOut binds sigs)
= do { checkTc (null binds) badBootDeclErr
; concat <$> mapM (addLocM tc_boot_sig) (filter isTypeLSig sigs) }
where
tc_boot_sig (TypeSig lnames ty _) = mapM f lnames
where
f (L _ name) = do { sigma_ty <- tcHsSigType (FunSigCtxt name) ty
; return (mkVanillaGlobal name sigma_ty) }
-- Notice that we make GlobalIds, not LocalIds
tc_boot_sig s = pprPanic "tcHsBootSigs/tc_boot_sig" (ppr s)
tcHsBootSigs groups = pprPanic "tcHsBootSigs" (ppr groups)
badBootDeclErr :: MsgDoc
badBootDeclErr = ptext (sLit "Illegal declarations in an hs-boot file")
------------------------
tcLocalBinds :: HsLocalBinds Name -> TcM thing
-> TcM (HsLocalBinds TcId, thing)
tcLocalBinds EmptyLocalBinds thing_inside
= do { thing <- thing_inside
; return (EmptyLocalBinds, thing) }
tcLocalBinds (HsValBinds (ValBindsOut binds sigs)) thing_inside
= do { (binds', thing) <- tcValBinds NotTopLevel binds sigs thing_inside
; return (HsValBinds (ValBindsOut binds' sigs), thing) }
tcLocalBinds (HsValBinds (ValBindsIn {})) _ = panic "tcLocalBinds"
tcLocalBinds (HsIPBinds (IPBinds ip_binds _)) thing_inside
= do { ipClass <- tcLookupClass ipClassName
; (given_ips, ip_binds') <-
mapAndUnzipM (wrapLocSndM (tc_ip_bind ipClass)) ip_binds
-- If the binding binds ?x = E, we must now
-- discharge any ?x constraints in expr_lie
-- See Note [Implicit parameter untouchables]
; (ev_binds, result) <- checkConstraints (IPSkol ips)
[] given_ips thing_inside
; return (HsIPBinds (IPBinds ip_binds' ev_binds), result) }
where
ips = [ip | L _ (IPBind (Left (L _ ip)) _) <- ip_binds]
-- I wonder if we should do these one at at time
-- Consider ?x = 4
-- ?y = ?x + 1
tc_ip_bind ipClass (IPBind (Left (L _ ip)) expr)
= do { ty <- newFlexiTyVarTy openTypeKind
; let p = mkStrLitTy $ hsIPNameFS ip
; ip_id <- newDict ipClass [ p, ty ]
; expr' <- tcMonoExpr expr ty
; let d = toDict ipClass p ty `fmap` expr'
; return (ip_id, (IPBind (Right ip_id) d)) }
tc_ip_bind _ (IPBind (Right {}) _) = panic "tc_ip_bind"
-- Coerces a `t` into a dictionry for `IP "x" t`.
-- co : t -> IP "x" t
toDict ipClass x ty =
case unwrapNewTyCon_maybe (classTyCon ipClass) of
Just (_,_,ax) -> HsWrap $ mkWpCast $ mkTcSymCo $ mkTcUnbranchedAxInstCo Representational ax [x,ty]
Nothing -> panic "The dictionary for `IP` is not a newtype?"
{-
Note [Implicit parameter untouchables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We add the type variables in the types of the implicit parameters
as untouchables, not so much because we really must not unify them,
but rather because we otherwise end up with constraints like this
Num alpha, Implic { wanted = alpha ~ Int }
The constraint solver solves alpha~Int by unification, but then
doesn't float that solved constraint out (it's not an unsolved
wanted). Result disaster: the (Num alpha) is again solved, this
time by defaulting. No no no.
However [Oct 10] this is all handled automatically by the
untouchable-range idea.
Note [Placeholder PatSyn kinds]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this (Trac #9161)
{-# LANGUAGE PatternSynonyms, DataKinds #-}
pattern A = ()
b :: A
b = undefined
Here, the type signature for b mentions A. But A is a pattern
synonym, which is typechecked (for very good reasons; a view pattern
in the RHS may mention a value binding) as part of a group of
bindings. It is entirely resonable to reject this, but to do so
we need A to be in the kind environment when kind-checking the signature for B.
Hence the tcExtendKindEnv2 patsyn_placeholder_kinds, which adds a binding
A -> AGlobal (AConLike (PatSynCon _|_))
to the environment. Then TcHsType.tcTyVar will find A in the kind environment,
and will give a 'wrongThingErr' as a result. But the lookup of A won't fail.
The _|_ (= panic "fakePatSynCon") works because the wrongThingErr call, in
tcTyVar, doesn't look inside the TcTyThing.
-}
tcValBinds :: TopLevelFlag
-> [(RecFlag, LHsBinds Name)] -> [LSig Name]
-> TcM thing
-> TcM ([(RecFlag, LHsBinds TcId)], thing)
tcValBinds top_lvl binds sigs thing_inside
= do { -- Typecheck the signature
; (poly_ids, sig_fn, nwc_tvs) <- tcExtendKindEnv2 patsyn_placeholder_kinds $
-- See Note [Placeholder PatSyn kinds]
tcTySigs sigs
; let prag_fn = mkPragFun sigs (foldr (unionBags . snd) emptyBag binds)
-- Extend the envt right away with all
-- the Ids declared with type signatures
-- Use tcExtendIdEnv3 to avoid extending the TcIdBinder stack
; tcExtendIdEnv3 [(idName id, id) | id <- poly_ids] (mkVarSet nwc_tvs) $ do
{ (binds', (extra_binds', thing)) <- tcBindGroups top_lvl sig_fn prag_fn binds $ do
{ thing <- thing_inside
-- See Note [Pattern synonym wrappers don't yield dependencies]
; patsyn_workers <- mapM tcPatSynBuilderBind patsyns
; let extra_binds = [ (NonRecursive, worker) | worker <- patsyn_workers ]
; return (extra_binds, thing) }
; return (binds' ++ extra_binds', thing) }}
where
patsyns
= [psb | (_, lbinds) <- binds, L _ (PatSynBind psb) <- bagToList lbinds]
patsyn_placeholder_kinds -- See Note [Placeholder PatSyn kinds]
= [(name, placeholder_patsyn_tything)| PSB{ psb_id = L _ name } <- patsyns ]
placeholder_patsyn_tything
= AGlobal $ AConLike $ PatSynCon $ panic "fakePatSynCon"
------------------------
tcBindGroups :: TopLevelFlag -> TcSigFun -> PragFun
-> [(RecFlag, LHsBinds Name)] -> TcM thing
-> TcM ([(RecFlag, LHsBinds TcId)], thing)
-- Typecheck a whole lot of value bindings,
-- one strongly-connected component at a time
-- Here a "strongly connected component" has the strightforward
-- meaning of a group of bindings that mention each other,
-- ignoring type signatures (that part comes later)
tcBindGroups _ _ _ [] thing_inside
= do { thing <- thing_inside
; return ([], thing) }
tcBindGroups top_lvl sig_fn prag_fn (group : groups) thing_inside
= do { (group', (groups', thing))
<- tc_group top_lvl sig_fn prag_fn group $
tcBindGroups top_lvl sig_fn prag_fn groups thing_inside
; return (group' ++ groups', thing) }
------------------------
tc_group :: forall thing.
TopLevelFlag -> TcSigFun -> PragFun
-> (RecFlag, LHsBinds Name) -> TcM thing
-> TcM ([(RecFlag, LHsBinds TcId)], thing)
-- Typecheck one strongly-connected component of the original program.
-- We get a list of groups back, because there may
-- be specialisations etc as well
tc_group top_lvl sig_fn prag_fn (NonRecursive, binds) thing_inside
-- A single non-recursive binding
-- We want to keep non-recursive things non-recursive
-- so that we desugar unlifted bindings correctly
= do { let bind = case bagToList binds of
[bind] -> bind
[] -> panic "tc_group: empty list of binds"
_ -> panic "tc_group: NonRecursive binds is not a singleton bag"
; (bind', thing) <- tc_single top_lvl sig_fn prag_fn bind thing_inside
; return ( [(NonRecursive, bind')], thing) }
tc_group top_lvl sig_fn prag_fn (Recursive, binds) thing_inside
= -- To maximise polymorphism, we do a new
-- strongly-connected-component analysis, this time omitting
-- any references to variables with type signatures.
-- (This used to be optional, but isn't now.)
do { traceTc "tc_group rec" (pprLHsBinds binds)
; when hasPatSyn $ recursivePatSynErr binds
; (binds1, thing) <- go sccs
; return ([(Recursive, binds1)], thing) }
-- Rec them all together
where
hasPatSyn = anyBag (isPatSyn . unLoc) binds
isPatSyn PatSynBind{} = True
isPatSyn _ = False
sccs :: [SCC (LHsBind Name)]
sccs = stronglyConnCompFromEdgedVertices (mkEdges sig_fn binds)
go :: [SCC (LHsBind Name)] -> TcM (LHsBinds TcId, thing)
go (scc:sccs) = do { (binds1, ids1, closed) <- tc_scc scc
; (binds2, thing) <- tcExtendLetEnv top_lvl closed ids1 $
go sccs
; return (binds1 `unionBags` binds2, thing) }
go [] = do { thing <- thing_inside; return (emptyBag, thing) }
tc_scc (AcyclicSCC bind) = tc_sub_group NonRecursive [bind]
tc_scc (CyclicSCC binds) = tc_sub_group Recursive binds
tc_sub_group = tcPolyBinds top_lvl sig_fn prag_fn Recursive
recursivePatSynErr :: OutputableBndr name => LHsBinds name -> TcM a
recursivePatSynErr binds
= failWithTc $
hang (ptext (sLit "Recursive pattern synonym definition with following bindings:"))
2 (vcat $ map pprLBind . bagToList $ binds)
where
pprLoc loc = parens (ptext (sLit "defined at") <+> ppr loc)
pprLBind (L loc bind) = pprWithCommas ppr (collectHsBindBinders bind) <+>
pprLoc loc
tc_single :: forall thing.
TopLevelFlag -> TcSigFun -> PragFun
-> LHsBind Name -> TcM thing
-> TcM (LHsBinds TcId, thing)
tc_single _top_lvl sig_fn _prag_fn (L _ (PatSynBind psb@PSB{ psb_id = L _ name })) thing_inside
= do { (pat_syn, aux_binds) <- tc_pat_syn_decl
; let tything = AConLike (PatSynCon pat_syn)
; thing <- tcExtendGlobalEnv [tything] thing_inside
; return (aux_binds, thing)
}
where
tc_pat_syn_decl = case sig_fn name of
Nothing -> tcInferPatSynDecl psb
Just (TcPatSynInfo tpsi) -> tcCheckPatSynDecl psb tpsi
Just _ -> panic "tc_single"
tc_single top_lvl sig_fn prag_fn lbind thing_inside
= do { (binds1, ids, closed) <- tcPolyBinds top_lvl sig_fn prag_fn
NonRecursive NonRecursive
[lbind]
; thing <- tcExtendLetEnv top_lvl closed ids thing_inside
; return (binds1, thing) }
-- | No signature or a partial signature
noCompleteSig :: Maybe TcSigInfo -> Bool
noCompleteSig Nothing = True
noCompleteSig (Just sig) = isPartialSig sig
------------------------
mkEdges :: TcSigFun -> LHsBinds Name -> [Node BKey (LHsBind Name)]
type BKey = Int -- Just number off the bindings
mkEdges sig_fn binds
= [ (bind, key, [key | n <- nameSetElems (bind_fvs (unLoc bind)),
Just key <- [lookupNameEnv key_map n], no_sig n ])
| (bind, key) <- keyd_binds
]
where
no_sig :: Name -> Bool
no_sig n = noCompleteSig (sig_fn n)
keyd_binds = bagToList binds `zip` [0::BKey ..]
key_map :: NameEnv BKey -- Which binding it comes from
key_map = mkNameEnv [(bndr, key) | (L _ bind, key) <- keyd_binds
, bndr <- collectHsBindBinders bind ]
------------------------
tcPolyBinds :: TopLevelFlag -> TcSigFun -> PragFun
-> RecFlag -- Whether the group is really recursive
-> RecFlag -- Whether it's recursive after breaking
-- dependencies based on type signatures
-> [LHsBind Name] -- None are PatSynBind
-> TcM (LHsBinds TcId, [TcId], TopLevelFlag)
-- Typechecks a single bunch of values bindings all together,
-- and generalises them. The bunch may be only part of a recursive
-- group, because we use type signatures to maximise polymorphism
--
-- Returns a list because the input may be a single non-recursive binding,
-- in which case the dependency order of the resulting bindings is
-- important.
--
-- Knows nothing about the scope of the bindings
-- None of the bindings are pattern synonyms
tcPolyBinds top_lvl sig_fn prag_fn rec_group rec_tc bind_list
= setSrcSpan loc $
recoverM (recoveryCode binder_names sig_fn) $ do
-- Set up main recover; take advantage of any type sigs
{ traceTc "------------------------------------------------" Outputable.empty
; traceTc "Bindings for {" (ppr binder_names)
; dflags <- getDynFlags
; type_env <- getLclTypeEnv
; let plan = decideGeneralisationPlan dflags type_env
binder_names bind_list sig_fn
; traceTc "Generalisation plan" (ppr plan)
; result@(tc_binds, poly_ids, _) <- case plan of
NoGen -> tcPolyNoGen rec_tc prag_fn sig_fn bind_list
InferGen mn cl -> tcPolyInfer rec_tc prag_fn sig_fn mn cl bind_list
CheckGen lbind sig -> tcPolyCheck rec_tc prag_fn sig lbind
-- Check whether strict bindings are ok
-- These must be non-recursive etc, and are not generalised
-- They desugar to a case expression in the end
; checkStrictBinds top_lvl rec_group bind_list tc_binds poly_ids
; traceTc "} End of bindings for" (vcat [ ppr binder_names, ppr rec_group
, vcat [ppr id <+> ppr (idType id) | id <- poly_ids]
])
; return result }
where
binder_names = collectHsBindListBinders bind_list
loc = foldr1 combineSrcSpans (map getLoc bind_list)
-- The mbinds have been dependency analysed and
-- may no longer be adjacent; so find the narrowest
-- span that includes them all
------------------
tcPolyNoGen -- No generalisation whatsoever
:: RecFlag -- Whether it's recursive after breaking
-- dependencies based on type signatures
-> PragFun -> TcSigFun
-> [LHsBind Name]
-> TcM (LHsBinds TcId, [TcId], TopLevelFlag)
tcPolyNoGen rec_tc prag_fn tc_sig_fn bind_list
= do { (binds', mono_infos) <- tcMonoBinds rec_tc tc_sig_fn
(LetGblBndr prag_fn)
bind_list
; mono_ids' <- mapM tc_mono_info mono_infos
; return (binds', mono_ids', NotTopLevel) }
where
tc_mono_info (name, _, mono_id)
= do { mono_ty' <- zonkTcType (idType mono_id)
-- Zonk, mainly to expose unboxed types to checkStrictBinds
; let mono_id' = setIdType mono_id mono_ty'
; _specs <- tcSpecPrags mono_id' (prag_fn name)
; return mono_id' }
-- NB: tcPrags generates error messages for
-- specialisation pragmas for non-overloaded sigs
-- Indeed that is why we call it here!
-- So we can safely ignore _specs
------------------
tcPolyCheck :: RecFlag -- Whether it's recursive after breaking
-- dependencies based on type signatures
-> PragFun -> TcSigInfo
-> LHsBind Name
-> TcM (LHsBinds TcId, [TcId], TopLevelFlag)
-- There is just one binding,
-- it binds a single variable,
-- it has a signature,
tcPolyCheck rec_tc prag_fn
sig@(TcSigInfo { sig_id = poly_id, sig_tvs = tvs_w_scoped
, sig_nwcs = sig_nwcs, sig_theta = theta
, sig_tau = tau, sig_loc = loc })
bind
= ASSERT( null sig_nwcs ) -- We should be in tcPolyInfer if there are wildcards
do { ev_vars <- newEvVars theta
; let skol_info = SigSkol (FunSigCtxt (idName poly_id)) (mkPhiTy theta tau)
prag_sigs = prag_fn (idName poly_id)
tvs = map snd tvs_w_scoped
; (ev_binds, (binds', [mono_info]))
<- setSrcSpan loc $
checkConstraints skol_info tvs ev_vars $
tcMonoBinds rec_tc (\_ -> Just sig) LetLclBndr [bind]
; spec_prags <- tcSpecPrags poly_id prag_sigs
; poly_id <- addInlinePrags poly_id prag_sigs
; let (_, _, mono_id) = mono_info
export = ABE { abe_wrap = idHsWrapper
, abe_poly = poly_id
, abe_mono = mono_id
, abe_prags = SpecPrags spec_prags }
abs_bind = L loc $ AbsBinds
{ abs_tvs = tvs
, abs_ev_vars = ev_vars, abs_ev_binds = ev_binds
, abs_exports = [export], abs_binds = binds' }
closed | isEmptyVarSet (tyVarsOfType (idType poly_id)) = TopLevel
| otherwise = NotTopLevel
; return (unitBag abs_bind, [poly_id], closed) }
tcPolyCheck _rec_tc _prag_fn sig _bind
= pprPanic "tcPolyCheck" (ppr sig)
------------------
tcPolyInfer
:: RecFlag -- Whether it's recursive after breaking
-- dependencies based on type signatures
-> PragFun -> TcSigFun
-> Bool -- True <=> apply the monomorphism restriction
-> Bool -- True <=> free vars have closed types
-> [LHsBind Name]
-> TcM (LHsBinds TcId, [TcId], TopLevelFlag)
tcPolyInfer rec_tc prag_fn tc_sig_fn mono closed bind_list
= do { (((binds', mono_infos), tclvl), wanted)
<- captureConstraints $
captureTcLevel $
tcMonoBinds rec_tc tc_sig_fn LetLclBndr bind_list
; let name_taus = [(name, idType mono_id) | (name, _, mono_id) <- mono_infos]
; traceTc "simplifyInfer call" (ppr name_taus $$ ppr wanted)
; (qtvs, givens, mr_bites, ev_binds)
<- simplifyInfer tclvl mono name_taus wanted
; inferred_theta <- zonkTcThetaType (map evVarPred givens)
; exports <- checkNoErrs $ mapM (mkExport prag_fn qtvs inferred_theta)
mono_infos
; loc <- getSrcSpanM
; let poly_ids = map abe_poly exports
final_closed | closed && not mr_bites = TopLevel
| otherwise = NotTopLevel
abs_bind = L loc $
AbsBinds { abs_tvs = qtvs
, abs_ev_vars = givens, abs_ev_binds = ev_binds
, abs_exports = exports, abs_binds = binds' }
; traceTc "Binding:" (ppr final_closed $$
ppr (poly_ids `zip` map idType poly_ids))
; return (unitBag abs_bind, poly_ids, final_closed) }
-- poly_ids are guaranteed zonked by mkExport
--------------
mkExport :: PragFun
-> [TyVar] -> TcThetaType -- Both already zonked
-> MonoBindInfo
-> TcM (ABExport Id)
-- Only called for generalisation plan IferGen, not by CheckGen or NoGen
--
-- mkExport generates exports with
-- zonked type variables,
-- zonked poly_ids
-- The former is just because no further unifications will change
-- the quantified type variables, so we can fix their final form
-- right now.
-- The latter is needed because the poly_ids are used to extend the
-- type environment; see the invariant on TcEnv.tcExtendIdEnv
-- Pre-condition: the qtvs and theta are already zonked
mkExport prag_fn qtvs inferred_theta (poly_name, mb_sig, mono_id)
= do { mono_ty <- zonkTcType (idType mono_id)
; poly_id <- case mb_sig of
Nothing -> mkInferredPolyId poly_name qtvs inferred_theta mono_ty
Just (TcPatSynInfo _) -> panic "mkExport"
Just sig | isPartialSig sig
-> do { final_theta <- completeTheta inferred_theta sig
; mkInferredPolyId poly_name qtvs final_theta mono_ty }
| otherwise
-> return (sig_id sig)
-- NB: poly_id has a zonked type
; poly_id <- addInlinePrags poly_id prag_sigs
; spec_prags <- tcSpecPrags poly_id prag_sigs
-- tcPrags requires a zonked poly_id
; let sel_poly_ty = mkSigmaTy qtvs inferred_theta mono_ty
; traceTc "mkExport: check sig"
(vcat [ ppr poly_name, ppr sel_poly_ty, ppr (idType poly_id) ])
-- Perform the impedence-matching and ambiguity check
-- right away. If it fails, we want to fail now (and recover
-- in tcPolyBinds). If we delay checking, we get an error cascade.
-- Remember we are in the tcPolyInfer case, so the type envt is
-- closed (unless we are doing NoMonoLocalBinds in which case all bets
-- are off)
-- See Note [Impedence matching]
; (wrap, wanted) <- addErrCtxtM (mk_bind_msg inferred True poly_name (idType poly_id)) $
captureConstraints $
tcSubType_NC sig_ctxt sel_poly_ty (idType poly_id)
; ev_binds <- simplifyTop wanted
; return (ABE { abe_wrap = mkWpLet (EvBinds ev_binds) <.> wrap
, abe_poly = poly_id
, abe_mono = mono_id
, abe_prags = SpecPrags spec_prags }) }
where
inferred = isNothing mb_sig
prag_sigs = prag_fn poly_name
sig_ctxt = InfSigCtxt poly_name
mkInferredPolyId :: Name -> [TyVar] -> TcThetaType -> TcType -> TcM Id
-- In the inference case (no signature) this stuff figures out
-- the right type variables and theta to quantify over
-- See Note [Validity of inferred types]
mkInferredPolyId poly_name qtvs theta mono_ty
= do { fam_envs <- tcGetFamInstEnvs
; let (_co, norm_mono_ty) = normaliseType fam_envs Nominal mono_ty
-- Unification may not have normalised the type,
-- (see Note [Lazy flattening] in TcFlatten) so do it
-- here to make it as uncomplicated as possible.
-- Example: f :: [F Int] -> Bool
-- should be rewritten to f :: [Char] -> Bool, if possible
my_tvs2 = closeOverKinds (growThetaTyVars theta (tyVarsOfType norm_mono_ty))
-- Include kind variables! Trac #7916
my_tvs = filter (`elemVarSet` my_tvs2) qtvs -- Maintain original order
my_theta = filter (quantifyPred my_tvs2) theta
inferred_poly_ty = mkSigmaTy my_tvs my_theta norm_mono_ty
; addErrCtxtM (mk_bind_msg True False poly_name inferred_poly_ty) $
checkValidType (InfSigCtxt poly_name) inferred_poly_ty
; return (mkLocalId poly_name inferred_poly_ty) }
mk_bind_msg :: Bool -> Bool -> Name -> TcType -> TidyEnv -> TcM (TidyEnv, SDoc)
mk_bind_msg inferred want_ambig poly_name poly_ty tidy_env
= do { (tidy_env', tidy_ty) <- zonkTidyTcType tidy_env poly_ty
; return (tidy_env', mk_msg tidy_ty) }
where
mk_msg ty = vcat [ ptext (sLit "When checking that") <+> quotes (ppr poly_name)
<+> ptext (sLit "has the") <+> what <+> ptext (sLit "type")
, nest 2 (ppr poly_name <+> dcolon <+> ppr ty)
, ppWhen want_ambig $
ptext (sLit "Probable cause: the inferred type is ambiguous") ]
what | inferred = ptext (sLit "inferred")
| otherwise = ptext (sLit "specified")
-- | Report the inferred constraints for an extra-constraints wildcard/hole as
-- an error message, unless the PartialTypeSignatures flag is enabled. In this
-- case, the extra inferred constraints are accepted without complaining.
-- Returns the annotated constraints combined with the inferred constraints.
completeTheta :: TcThetaType -> TcSigInfo -> TcM TcThetaType
completeTheta _ (TcPatSynInfo _)
= panic "Extra-constraints wildcard not supported in a pattern signature"
completeTheta inferred_theta
sig@(TcSigInfo { sig_id = poly_id
, sig_extra_cts = mb_extra_cts
, sig_theta = annotated_theta })
| Just loc <- mb_extra_cts
= do { annotated_theta <- zonkTcThetaType annotated_theta
; let inferred_diff = minusList inferred_theta annotated_theta
final_theta = annotated_theta ++ inferred_diff
; partial_sigs <- xoptM Opt_PartialTypeSignatures
; warn_partial_sigs <- woptM Opt_WarnPartialTypeSignatures
; msg <- mkLongErrAt loc (mk_msg inferred_diff partial_sigs) empty
; case partial_sigs of
True | warn_partial_sigs -> reportWarning $ makeIntoWarning msg
| otherwise -> return ()
False -> reportError msg
; return final_theta }
| otherwise
= zonkTcThetaType annotated_theta
-- No extra-constraints wildcard means no extra constraints will be added
-- to the context, so just return the possibly empty (zonked)
-- annotated_theta.
where
pts_hint = text "To use the inferred type, enable PartialTypeSignatures"
mk_msg inferred_diff suppress_hint
= vcat [ hang ((text "Found hole") <+> quotes (char '_'))
2 (text "with inferred constraints:")
<+> pprTheta inferred_diff
, if suppress_hint then empty else pts_hint
, typeSigCtxt (idName poly_id) sig ]
{-
Note [Validity of inferred types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to check inferred type for validity, in case it uses language
extensions that are not turned on. The principle is that if the user
simply adds the inferred type to the program source, it'll compile fine.
See #8883.
Examples that might fail:
- an inferred theta that requires type equalities e.g. (F a ~ G b)
or multi-parameter type classes
- an inferred type that includes unboxed tuples
However we don't do the ambiguity check (checkValidType omits it for
InfSigCtxt) because the impedence-matching stage, which follows
immediately, will do it and we don't want two error messages.
Moreover, because of the impedence matching stage, the ambiguity-check
suggestion of -XAllowAmbiguiousTypes will not work.
Note [Impedence matching]
~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f 0 x = x
f n x = g [] (not x)
g [] y = f 10 y
g _ y = f 9 y
After typechecking we'll get
f_mono_ty :: a -> Bool -> Bool
g_mono_ty :: [b] -> Bool -> Bool
with constraints
(Eq a, Num a)
Note that f is polymorphic in 'a' and g in 'b'; and these are not linked.
The types we really want for f and g are
f :: forall a. (Eq a, Num a) => a -> Bool -> Bool
g :: forall b. [b] -> Bool -> Bool
We can get these by "impedence matching":
tuple :: forall a b. (Eq a, Num a) => (a -> Bool -> Bool, [b] -> Bool -> Bool)
tuple a b d1 d1 = let ...bind f_mono, g_mono in (f_mono, g_mono)
f a d1 d2 = case tuple a Any d1 d2 of (f, g) -> f
g b = case tuple Integer b dEqInteger dNumInteger of (f,g) -> g
Suppose the shared quantified tyvars are qtvs and constraints theta.
Then we want to check that
f's polytype is more polymorphic than forall qtvs. theta => f_mono_ty
and the proof is the impedence matcher.
Notice that the impedence matcher may do defaulting. See Trac #7173.
It also cleverly does an ambiguity check; for example, rejecting
f :: F a -> a
where F is a non-injective type function.
-}
type PragFun = Name -> [LSig Name]
mkPragFun :: [LSig Name] -> LHsBinds Name -> PragFun
mkPragFun sigs binds = \n -> lookupNameEnv prag_env n `orElse` []
where
prs = mapMaybe get_sig sigs
get_sig :: LSig Name -> Maybe (Located Name, LSig Name)
get_sig (L l (SpecSig nm ty inl)) = Just (nm, L l $ SpecSig nm ty (add_arity nm inl))
get_sig (L l (InlineSig nm inl)) = Just (nm, L l $ InlineSig nm (add_arity nm inl))
get_sig _ = Nothing
add_arity (L _ n) inl_prag -- Adjust inl_sat field to match visible arity of function
| Just ar <- lookupNameEnv ar_env n,
Inline <- inl_inline inl_prag = inl_prag { inl_sat = Just ar }
-- add arity only for real INLINE pragmas, not INLINABLE
| otherwise = inl_prag
prag_env :: NameEnv [LSig Name]
prag_env = foldl add emptyNameEnv prs
add env (L _ n,p) = extendNameEnv_Acc (:) singleton env n p
-- ar_env maps a local to the arity of its definition
ar_env :: NameEnv Arity
ar_env = foldrBag lhsBindArity emptyNameEnv binds
lhsBindArity :: LHsBind Name -> NameEnv Arity -> NameEnv Arity
lhsBindArity (L _ (FunBind { fun_id = id, fun_matches = ms })) env
= extendNameEnv env (unLoc id) (matchGroupArity ms)
lhsBindArity _ env = env -- PatBind/VarBind
------------------
tcSpecPrags :: Id -> [LSig Name]
-> TcM [LTcSpecPrag]
-- Add INLINE and SPECIALSE pragmas
-- INLINE prags are added to the (polymorphic) Id directly
-- SPECIALISE prags are passed to the desugarer via TcSpecPrags
-- Pre-condition: the poly_id is zonked
-- Reason: required by tcSubExp
tcSpecPrags poly_id prag_sigs
= do { traceTc "tcSpecPrags" (ppr poly_id <+> ppr spec_sigs)
; unless (null bad_sigs) warn_discarded_sigs
; pss <- mapAndRecoverM (wrapLocM (tcSpec poly_id)) spec_sigs
; return $ concatMap (\(L l ps) -> map (L l) ps) pss }
where
spec_sigs = filter isSpecLSig prag_sigs
bad_sigs = filter is_bad_sig prag_sigs
is_bad_sig s = not (isSpecLSig s || isInlineLSig s)
warn_discarded_sigs = warnPrags poly_id bad_sigs $
ptext (sLit "Discarding unexpected pragmas for")
--------------
tcSpec :: TcId -> Sig Name -> TcM [TcSpecPrag]
tcSpec poly_id prag@(SpecSig fun_name hs_tys inl)
-- The Name fun_name in the SpecSig may not be the same as that of the poly_id
-- Example: SPECIALISE for a class method: the Name in the SpecSig is
-- for the selector Id, but the poly_id is something like $cop
-- However we want to use fun_name in the error message, since that is
-- what the user wrote (Trac #8537)
= addErrCtxt (spec_ctxt prag) $
do { spec_tys <- mapM (tcHsSigType sig_ctxt) hs_tys
; warnIf (not (isOverloadedTy poly_ty || isInlinePragma inl))
(ptext (sLit "SPECIALISE pragma for non-overloaded function")
<+> quotes (ppr fun_name))
-- Note [SPECIALISE pragmas]
; wraps <- mapM (tcSubType sig_ctxt (idType poly_id)) spec_tys
; return [ (SpecPrag poly_id wrap inl) | wrap <- wraps ] }
where
name = idName poly_id
poly_ty = idType poly_id
sig_ctxt = FunSigCtxt name
spec_ctxt prag = hang (ptext (sLit "In the SPECIALISE pragma")) 2 (ppr prag)
tcSpec _ prag = pprPanic "tcSpec" (ppr prag)
--------------
tcImpPrags :: [LSig Name] -> TcM [LTcSpecPrag]
-- SPECIALISE pragmas for imported things
tcImpPrags prags
= do { this_mod <- getModule
; dflags <- getDynFlags
; if (not_specialising dflags) then
return []
else do
{ pss <- mapAndRecoverM (wrapLocM tcImpSpec)
[L loc (name,prag)
| (L loc prag@(SpecSig (L _ name) _ _)) <- prags
, not (nameIsLocalOrFrom this_mod name) ]
; return $ concatMap (\(L l ps) -> map (L l) ps) pss } }
where
-- Ignore SPECIALISE pragmas for imported things
-- when we aren't specialising, or when we aren't generating
-- code. The latter happens when Haddocking the base library;
-- we don't wnat complaints about lack of INLINABLE pragmas
not_specialising dflags
| not (gopt Opt_Specialise dflags) = True
| otherwise = case hscTarget dflags of
HscNothing -> True
HscInterpreted -> True
_other -> False
tcImpSpec :: (Name, Sig Name) -> TcM [TcSpecPrag]
tcImpSpec (name, prag)
= do { id <- tcLookupId name
; unless (isAnyInlinePragma (idInlinePragma id))
(addWarnTc (impSpecErr name))
; tcSpec id prag }
impSpecErr :: Name -> SDoc
impSpecErr name
= hang (ptext (sLit "You cannot SPECIALISE") <+> quotes (ppr name))
2 (vcat [ ptext (sLit "because its definition has no INLINE/INLINABLE pragma")
, parens $ sep
[ ptext (sLit "or its defining module") <+> quotes (ppr mod)
, ptext (sLit "was compiled without -O")]])
where
mod = nameModule name
--------------
tcVectDecls :: [LVectDecl Name] -> TcM ([LVectDecl TcId])
tcVectDecls decls
= do { decls' <- mapM (wrapLocM tcVect) decls
; let ids = [lvectDeclName decl | decl <- decls', not $ lvectInstDecl decl]
dups = findDupsEq (==) ids
; mapM_ reportVectDups dups
; traceTcConstraints "End of tcVectDecls"
; return decls'
}
where
reportVectDups (first:_second:_more)
= addErrAt (getSrcSpan first) $
ptext (sLit "Duplicate vectorisation declarations for") <+> ppr first
reportVectDups _ = return ()
--------------
tcVect :: VectDecl Name -> TcM (VectDecl TcId)
-- FIXME: We can't typecheck the expression of a vectorisation declaration against the vectorised
-- type of the original definition as this requires internals of the vectoriser not available
-- during type checking. Instead, constrain the rhs of a vectorisation declaration to be a single
-- identifier (this is checked in 'rnHsVectDecl'). Fix this by enabling the use of 'vectType'
-- from the vectoriser here.
tcVect (HsVect s name rhs)
= addErrCtxt (vectCtxt name) $
do { var <- wrapLocM tcLookupId name
; let L rhs_loc (HsVar rhs_var_name) = rhs
; rhs_id <- tcLookupId rhs_var_name
; return $ HsVect s var (L rhs_loc (HsVar rhs_id))
}
{- OLD CODE:
-- turn the vectorisation declaration into a single non-recursive binding
; let bind = L loc $ mkTopFunBind name [mkSimpleMatch [] rhs]
sigFun = const Nothing
pragFun = mkPragFun [] (unitBag bind)
-- perform type inference (including generalisation)
; (binds, [id'], _) <- tcPolyInfer False True sigFun pragFun NonRecursive [bind]
; traceTc "tcVect inferred type" $ ppr (varType id')
; traceTc "tcVect bindings" $ ppr binds
-- add all bindings, including the type variable and dictionary bindings produced by type
-- generalisation to the right-hand side of the vectorisation declaration
; let [AbsBinds tvs evs _ evBinds actualBinds] = (map unLoc . bagToList) binds
; let [bind'] = bagToList actualBinds
MatchGroup
[L _ (Match _ _ (GRHSs [L _ (GRHS _ rhs')] _))]
_ = (fun_matches . unLoc) bind'
rhsWrapped = mkHsLams tvs evs (mkHsDictLet evBinds rhs')
-- We return the type-checked 'Id', to propagate the inferred signature
-- to the vectoriser - see "Note [Typechecked vectorisation pragmas]" in HsDecls
; return $ HsVect (L loc id') (Just rhsWrapped)
}
-}
tcVect (HsNoVect s name)
= addErrCtxt (vectCtxt name) $
do { var <- wrapLocM tcLookupId name
; return $ HsNoVect s var
}
tcVect (HsVectTypeIn _ isScalar lname rhs_name)
= addErrCtxt (vectCtxt lname) $
do { tycon <- tcLookupLocatedTyCon lname
; checkTc ( not isScalar -- either we have a non-SCALAR declaration
|| isJust rhs_name -- or we explicitly provide a vectorised type
|| tyConArity tycon == 0 -- otherwise the type constructor must be nullary
)
scalarTyConMustBeNullary
; rhs_tycon <- fmapMaybeM (tcLookupTyCon . unLoc) rhs_name
; return $ HsVectTypeOut isScalar tycon rhs_tycon
}
tcVect (HsVectTypeOut _ _ _)
= panic "TcBinds.tcVect: Unexpected 'HsVectTypeOut'"
tcVect (HsVectClassIn _ lname)
= addErrCtxt (vectCtxt lname) $
do { cls <- tcLookupLocatedClass lname
; return $ HsVectClassOut cls
}
tcVect (HsVectClassOut _)
= panic "TcBinds.tcVect: Unexpected 'HsVectClassOut'"
tcVect (HsVectInstIn linstTy)
= addErrCtxt (vectCtxt linstTy) $
do { (cls, tys) <- tcHsVectInst linstTy
; inst <- tcLookupInstance cls tys
; return $ HsVectInstOut inst
}
tcVect (HsVectInstOut _)
= panic "TcBinds.tcVect: Unexpected 'HsVectInstOut'"
vectCtxt :: Outputable thing => thing -> SDoc
vectCtxt thing = ptext (sLit "When checking the vectorisation declaration for") <+> ppr thing
scalarTyConMustBeNullary :: MsgDoc
scalarTyConMustBeNullary = ptext (sLit "VECTORISE SCALAR type constructor must be nullary")
--------------
-- If typechecking the binds fails, then return with each
-- signature-less binder given type (forall a.a), to minimise
-- subsequent error messages
recoveryCode :: [Name] -> TcSigFun -> TcM (LHsBinds TcId, [Id], TopLevelFlag)
recoveryCode binder_names sig_fn
= do { traceTc "tcBindsWithSigs: error recovery" (ppr binder_names)
; poly_ids <- mapM mk_dummy binder_names
; return (emptyBag, poly_ids, if all is_closed poly_ids
then TopLevel else NotTopLevel) }
where
mk_dummy name
| isJust (sig_fn name) = tcLookupId name -- Had signature; look it up
| otherwise = return (mkLocalId name forall_a_a) -- No signature
is_closed poly_id = isEmptyVarSet (tyVarsOfType (idType poly_id))
forall_a_a :: TcType
forall_a_a = mkForAllTy openAlphaTyVar (mkTyVarTy openAlphaTyVar)
{-
Note [SPECIALISE pragmas]
~~~~~~~~~~~~~~~~~~~~~~~~~
There is no point in a SPECIALISE pragma for a non-overloaded function:
reverse :: [a] -> [a]
{-# SPECIALISE reverse :: [Int] -> [Int] #-}
But SPECIALISE INLINE *can* make sense for GADTS:
data Arr e where
ArrInt :: !Int -> ByteArray# -> Arr Int
ArrPair :: !Int -> Arr e1 -> Arr e2 -> Arr (e1, e2)
(!:) :: Arr e -> Int -> e
{-# SPECIALISE INLINE (!:) :: Arr Int -> Int -> Int #-}
{-# SPECIALISE INLINE (!:) :: Arr (a, b) -> Int -> (a, b) #-}
(ArrInt _ ba) !: (I# i) = I# (indexIntArray# ba i)
(ArrPair _ a1 a2) !: i = (a1 !: i, a2 !: i)
When (!:) is specialised it becomes non-recursive, and can usefully
be inlined. Scary! So we only warn for SPECIALISE *without* INLINE
for a non-overloaded function.
************************************************************************
* *
\subsection{tcMonoBind}
* *
************************************************************************
@tcMonoBinds@ deals with a perhaps-recursive group of HsBinds.
The signatures have been dealt with already.
Note [Pattern bindings]
~~~~~~~~~~~~~~~~~~~~~~~
The rule for typing pattern bindings is this:
..sigs..
p = e
where 'p' binds v1..vn, and 'e' may mention v1..vn,
typechecks exactly like
..sigs..
x = e -- Inferred type
v1 = case x of p -> v1
..
vn = case x of p -> vn
Note that
(f :: forall a. a -> a) = id
should not typecheck because
case id of { (f :: forall a. a->a) -> f }
will not typecheck.
-}
tcMonoBinds :: RecFlag -- Whether the binding is recursive for typechecking purposes
-- i.e. the binders are mentioned in their RHSs, and
-- we are not rescued by a type signature
-> TcSigFun -> LetBndrSpec
-> [LHsBind Name]
-> TcM (LHsBinds TcId, [MonoBindInfo])
tcMonoBinds is_rec sig_fn no_gen
[ L b_loc (FunBind { fun_id = L nm_loc name, fun_infix = inf,
fun_matches = matches, bind_fvs = fvs })]
-- Single function binding,
| NonRecursive <- is_rec -- ...binder isn't mentioned in RHS
, Nothing <- sig_fn name -- ...with no type signature
= -- In this very special case we infer the type of the
-- right hand side first (it may have a higher-rank type)
-- and *then* make the monomorphic Id for the LHS
-- e.g. f = \(x::forall a. a->a) -> <body>
-- We want to infer a higher-rank type for f
setSrcSpan b_loc $
do { rhs_ty <- newFlexiTyVarTy openTypeKind
; mono_id <- newNoSigLetBndr no_gen name rhs_ty
; (co_fn, matches') <- tcExtendIdBndrs [TcIdBndr mono_id NotTopLevel] $
-- We extend the error context even for a non-recursive
-- function so that in type error messages we show the
-- type of the thing whose rhs we are type checking
tcMatchesFun name inf matches rhs_ty
; return (unitBag $ L b_loc $
FunBind { fun_id = L nm_loc mono_id, fun_infix = inf,
fun_matches = matches', bind_fvs = fvs,
fun_co_fn = co_fn, fun_tick = [] },
[(name, Nothing, mono_id)]) }
tcMonoBinds _ sig_fn no_gen binds
= do { tc_binds <- mapM (wrapLocM (tcLhs sig_fn no_gen)) binds
-- Bring the monomorphic Ids, into scope for the RHSs
; let mono_info = getMonoBindInfo tc_binds
rhs_id_env = [(name, mono_id) | (name, mb_sig, mono_id) <- mono_info
, noCompleteSig mb_sig ]
-- A monomorphic binding for each term variable that lacks
-- a type sig. (Ones with a sig are already in scope.)
; traceTc "tcMonoBinds" $ vcat [ ppr n <+> ppr id <+> ppr (idType id)
| (n,id) <- rhs_id_env]
; binds' <- tcExtendIdEnv2 rhs_id_env $
mapM (wrapLocM tcRhs) tc_binds
; return (listToBag binds', mono_info) }
------------------------
-- tcLhs typechecks the LHS of the bindings, to construct the environment in which
-- we typecheck the RHSs. Basically what we are doing is this: for each binder:
-- if there's a signature for it, use the instantiated signature type
-- otherwise invent a type variable
-- You see that quite directly in the FunBind case.
--
-- But there's a complication for pattern bindings:
-- data T = MkT (forall a. a->a)
-- MkT f = e
-- Here we can guess a type variable for the entire LHS (which will be refined to T)
-- but we want to get (f::forall a. a->a) as the RHS environment.
-- The simplest way to do this is to typecheck the pattern, and then look up the
-- bound mono-ids. Then we want to retain the typechecked pattern to avoid re-doing
-- it; hence the TcMonoBind data type in which the LHS is done but the RHS isn't
data TcMonoBind -- Half completed; LHS done, RHS not done
= TcFunBind MonoBindInfo SrcSpan Bool (MatchGroup Name (LHsExpr Name))
| TcPatBind [MonoBindInfo] (LPat TcId) (GRHSs Name (LHsExpr Name)) TcSigmaType
type MonoBindInfo = (Name, Maybe TcSigInfo, TcId)
-- Type signature (if any), and
-- the monomorphic bound things
tcLhs :: TcSigFun -> LetBndrSpec -> HsBind Name -> TcM TcMonoBind
tcLhs sig_fn no_gen (FunBind { fun_id = L nm_loc name, fun_infix = inf, fun_matches = matches })
| Just sig <- sig_fn name
= ASSERT2( case no_gen of { LetLclBndr -> True; LetGblBndr {} -> False }
, ppr name ) -- { f :: ty; f x = e } is always done via CheckGen
-- which gives rise to LetLclBndr. It wouldn't make
-- sense to have a *polymorphic* function Id at this point
do { mono_name <- newLocalName name
; let mono_id = mkLocalId mono_name (sig_tau sig)
; addErrCtxt (typeSigCtxt name sig) $
emitWildcardHoleConstraints (sig_nwcs sig)
; return (TcFunBind (name, Just sig, mono_id) nm_loc inf matches) }
| otherwise
= do { mono_ty <- newFlexiTyVarTy openTypeKind
; mono_id <- newNoSigLetBndr no_gen name mono_ty
; return (TcFunBind (name, Nothing, mono_id) nm_loc inf matches) }
-- TODOT: emit Hole Constraints for wildcards
tcLhs sig_fn no_gen (PatBind { pat_lhs = pat, pat_rhs = grhss })
= do { let tc_pat exp_ty = tcLetPat sig_fn no_gen pat exp_ty $
mapM lookup_info (collectPatBinders pat)
-- After typechecking the pattern, look up the binder
-- names, which the pattern has brought into scope.
lookup_info :: Name -> TcM MonoBindInfo
lookup_info name = do { mono_id <- tcLookupId name
; return (name, sig_fn name, mono_id) }
; ((pat', infos), pat_ty) <- addErrCtxt (patMonoBindsCtxt pat grhss) $
tcInfer tc_pat
; return (TcPatBind infos pat' grhss pat_ty) }
tcLhs _ _ other_bind = pprPanic "tcLhs" (ppr other_bind)
-- AbsBind, VarBind impossible
-------------------
tcRhs :: TcMonoBind -> TcM (HsBind TcId)
-- When we are doing pattern bindings, or multiple function bindings at a time
-- we *don't* bring any scoped type variables into scope
-- Wny not? They are not completely rigid.
-- That's why we have the special case for a single FunBind in tcMonoBinds
tcRhs (TcFunBind (_, mb_sig, mono_id) loc inf matches)
= tcExtendIdBndrs [TcIdBndr mono_id NotTopLevel] $
tcExtendTyVarEnv2 tvsAndNwcs $
-- NotTopLevel: it's a monomorphic binding
do { traceTc "tcRhs: fun bind" (ppr mono_id $$ ppr (idType mono_id))
; (co_fn, matches') <- tcMatchesFun (idName mono_id) inf
matches (idType mono_id)
; return (FunBind { fun_id = L loc mono_id, fun_infix = inf
, fun_matches = matches'
, fun_co_fn = co_fn
, bind_fvs = placeHolderNamesTc
, fun_tick = [] }) }
where
tvsAndNwcs = maybe [] (\sig -> [(n, tv) | (Just n, tv) <- sig_tvs sig]
++ sig_nwcs sig) mb_sig
tcRhs (TcPatBind infos pat' grhss pat_ty)
= tcExtendIdBndrs [ TcIdBndr mono_id NotTopLevel | (_,_,mono_id) <- infos ] $
-- NotTopLevel: it's a monomorphic binding
do { traceTc "tcRhs: pat bind" (ppr pat' $$ ppr pat_ty)
; grhss' <- addErrCtxt (patMonoBindsCtxt pat' grhss) $
tcGRHSsPat grhss pat_ty
; return (PatBind { pat_lhs = pat', pat_rhs = grhss', pat_rhs_ty = pat_ty
, bind_fvs = placeHolderNamesTc
, pat_ticks = ([],[]) }) }
---------------------
getMonoBindInfo :: [Located TcMonoBind] -> [MonoBindInfo]
getMonoBindInfo tc_binds
= foldr (get_info . unLoc) [] tc_binds
where
get_info (TcFunBind info _ _ _) rest = info : rest
get_info (TcPatBind infos _ _ _) rest = infos ++ rest
{-
************************************************************************
* *
Signatures
* *
************************************************************************
Type signatures are tricky. See Note [Signature skolems] in TcType
@tcSigs@ checks the signatures for validity, and returns a list of
{\em freshly-instantiated} signatures. That is, the types are already
split up, and have fresh type variables installed. All non-type-signature
"RenamedSigs" are ignored.
The @TcSigInfo@ contains @TcTypes@ because they are unified with
the variable's type, and after that checked to see whether they've
been instantiated.
Note [Scoped tyvars]
~~~~~~~~~~~~~~~~~~~~
The -XScopedTypeVariables flag brings lexically-scoped type variables
into scope for any explicitly forall-quantified type variables:
f :: forall a. a -> a
f x = e
Then 'a' is in scope inside 'e'.
However, we do *not* support this
- For pattern bindings e.g
f :: forall a. a->a
(f,g) = e
Note [Signature skolems]
~~~~~~~~~~~~~~~~~~~~~~~~
When instantiating a type signature, we do so with either skolems or
SigTv meta-type variables depending on the use_skols boolean. This
variable is set True when we are typechecking a single function
binding; and False for pattern bindings and a group of several
function bindings.
Reason: in the latter cases, the "skolems" can be unified together,
so they aren't properly rigid in the type-refinement sense.
NB: unless we are doing H98, each function with a sig will be done
separately, even if it's mutually recursive, so use_skols will be True
Note [Only scoped tyvars are in the TyVarEnv]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We are careful to keep only the *lexically scoped* type variables in
the type environment. Why? After all, the renamer has ensured
that only legal occurrences occur, so we could put all type variables
into the type env.
But we want to check that two distinct lexically scoped type variables
do not map to the same internal type variable. So we need to know which
the lexically-scoped ones are... and at the moment we do that by putting
only the lexically scoped ones into the environment.
Note [Instantiate sig with fresh variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's vital to instantiate a type signature with fresh variables.
For example:
type T = forall a. [a] -> [a]
f :: T;
f = g where { g :: T; g = <rhs> }
We must not use the same 'a' from the defn of T at both places!!
(Instantiation is only necessary because of type synonyms. Otherwise,
it's all cool; each signature has distinct type variables from the renamer.)
Note [Fail eagerly on bad signatures]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If a type signaure is wrong, fail immediately:
* the type sigs may bind type variables, so proceeding without them
can lead to a cascade of errors
* the type signature might be ambiguous, in which case checking
the code against the signature will give a very similar error
to the ambiguity error.
ToDo: this means we fall over if any type sig
is wrong (eg at the top level of the module),
which is over-conservative
-}
tcTySigs :: [LSig Name] -> TcM ([TcId], TcSigFun, [TcTyVar])
tcTySigs hs_sigs
= checkNoErrs $ -- See Note [Fail eagerly on bad signatures]
do { (ty_sigs_s, tyvarsl) <- unzip <$> mapAndRecoverM tcTySig hs_sigs
; let ty_sigs = concat ty_sigs_s
poly_ids = [id | TcSigInfo{ sig_id = id } <- ty_sigs]
env = mkNameEnv [(getName sig, sig) | sig <- ty_sigs]
; return (poly_ids, lookupNameEnv env, concat tyvarsl) }
tcTySig :: LSig Name -> TcM ([TcSigInfo], [TcTyVar])
tcTySig (L _ (IdSig id))
= do { sig <- instTcTySigFromId id
; return ([sig], []) }
tcTySig (L loc (TypeSig names@(L _ name1 : _) hs_ty wcs))
= setSrcSpan loc $
pushTcLevelM $
do { nwc_tvs <- mapM newWildcardVarMetaKind wcs -- Generate fresh meta vars for the wildcards
; sigma_ty <- tcExtendTyVarEnv nwc_tvs $ tcHsSigType (FunSigCtxt name1) hs_ty
; sigs <- mapM (instTcTySig hs_ty sigma_ty (extra_cts hs_ty) (zip wcs nwc_tvs))
(map unLoc names)
; return (sigs, nwc_tvs) }
where
extra_cts (L _ (HsForAllTy _ extra _ _ _)) = extra
extra_cts _ = Nothing
tcTySig (L loc (PatSynSig (L _ name) (_, qtvs) prov req ty))
= setSrcSpan loc $
do { traceTc "tcTySig {" $ ppr name $$ ppr qtvs $$ ppr prov $$ ppr req $$ ppr ty
; let ctxt = FunSigCtxt name
; tcHsTyVarBndrs qtvs $ \ qtvs' -> do
{ ty' <- tcHsSigType ctxt ty
; req' <- tcHsContext req
; prov' <- tcHsContext prov
; qtvs' <- mapM zonkQuantifiedTyVar qtvs'
; let (_, pat_ty) = tcSplitFunTys ty'
univ_set = tyVarsOfType pat_ty
(univ_tvs, ex_tvs) = partition (`elemVarSet` univ_set) qtvs'
; traceTc "tcTySig }" $ ppr (ex_tvs, prov') $$ ppr (univ_tvs, req') $$ ppr ty'
; let tpsi = TPSI{ patsig_name = name,
patsig_tau = ty',
patsig_ex = ex_tvs,
patsig_univ = univ_tvs,
patsig_prov = prov',
patsig_req = req' }
; return ([TcPatSynInfo tpsi], []) }}
tcTySig _ = return ([], [])
instTcTySigFromId :: Id -> TcM TcSigInfo
instTcTySigFromId id
= do { let loc = getSrcSpan id
; (tvs, theta, tau) <- tcInstType (tcInstSigTyVarsLoc loc)
(idType id)
; return (TcSigInfo { sig_id = id, sig_loc = loc
, sig_tvs = [(Nothing, tv) | tv <- tvs]
, sig_nwcs = []
, sig_theta = theta, sig_tau = tau
, sig_extra_cts = Nothing
, sig_partial = False }) }
instTcTySig :: LHsType Name -> TcType -- HsType and corresponding TcType
-> Maybe SrcSpan -- Just loc <=> an extra-constraints
-- wildcard is present at location loc.
-> [(Name, TcTyVar)] -> Name -> TcM TcSigInfo
instTcTySig hs_ty@(L loc _) sigma_ty extra_cts nwcs name
= do { (inst_tvs, theta, tau) <- tcInstType tcInstSigTyVars sigma_ty
; return (TcSigInfo { sig_id = mkLocalId name sigma_ty
, sig_loc = loc
, sig_tvs = findScopedTyVars hs_ty sigma_ty inst_tvs
, sig_nwcs = nwcs
, sig_theta = theta, sig_tau = tau
, sig_extra_cts = extra_cts
, sig_partial = isJust extra_cts || not (null nwcs) }) }
-------------------------------
data GeneralisationPlan
= NoGen -- No generalisation, no AbsBinds
| InferGen -- Implicit generalisation; there is an AbsBinds
Bool -- True <=> apply the MR; generalise only unconstrained type vars
Bool -- True <=> bindings mention only variables with closed types
-- See Note [Bindings with closed types] in TcRnTypes
| CheckGen (LHsBind Name) TcSigInfo
-- One binding with a signature
-- Explicit generalisation; there is an AbsBinds
-- A consequence of the no-AbsBinds choice (NoGen) is that there is
-- no "polymorphic Id" and "monmomorphic Id"; there is just the one
instance Outputable GeneralisationPlan where
ppr NoGen = ptext (sLit "NoGen")
ppr (InferGen b c) = ptext (sLit "InferGen") <+> ppr b <+> ppr c
ppr (CheckGen _ s) = ptext (sLit "CheckGen") <+> ppr s
decideGeneralisationPlan
:: DynFlags -> TcTypeEnv -> [Name]
-> [LHsBind Name] -> TcSigFun -> GeneralisationPlan
decideGeneralisationPlan dflags type_env bndr_names lbinds sig_fn
| strict_pat_binds = NoGen
| Just (lbind, sig) <- one_funbind_with_sig lbinds = CheckGen lbind sig
| mono_local_binds = NoGen
| otherwise = InferGen mono_restriction closed_flag
where
bndr_set = mkNameSet bndr_names
binds = map unLoc lbinds
strict_pat_binds = any isStrictHsBind binds
-- Strict patterns (top level bang or unboxed tuple) must not
-- be polymorphic, because we are going to force them
-- See Trac #4498, #8762
mono_restriction = xopt Opt_MonomorphismRestriction dflags
&& any restricted binds
is_closed_ns :: NameSet -> Bool -> Bool
is_closed_ns ns b = foldNameSet ((&&) . is_closed_id) b ns
-- ns are the Names referred to from the RHS of this bind
is_closed_id :: Name -> Bool
-- See Note [Bindings with closed types] in TcRnTypes
is_closed_id name
| name `elemNameSet` bndr_set
= True -- Ignore binders in this groups, of course
| Just thing <- lookupNameEnv type_env name
= case thing of
ATcId { tct_closed = cl } -> isTopLevel cl -- This is the key line
ATyVar {} -> False -- In-scope type variables
AGlobal {} -> True -- are not closed!
_ -> pprPanic "is_closed_id" (ppr name)
| otherwise
= WARN( isInternalName name, ppr name ) True
-- The free-var set for a top level binding mentions
-- imported things too, so that we can report unused imports
-- These won't be in the local type env.
-- Ditto class method etc from the current module
closed_flag = foldr (is_closed_ns . bind_fvs) True binds
mono_local_binds = xopt Opt_MonoLocalBinds dflags
&& not closed_flag
no_sig n = noCompleteSig (sig_fn n)
-- With OutsideIn, all nested bindings are monomorphic
-- except a single function binding with a signature
one_funbind_with_sig [lbind@(L _ (FunBind { fun_id = v }))]
= case sig_fn (unLoc v) of
Nothing -> Nothing
Just sig | isPartialSig sig -> Nothing
Just sig | otherwise -> Just (lbind, sig)
one_funbind_with_sig _
= Nothing
-- The Haskell 98 monomorphism resetriction
restricted (PatBind {}) = True
restricted (VarBind { var_id = v }) = no_sig v
restricted (FunBind { fun_id = v, fun_matches = m }) = restricted_match m
&& no_sig (unLoc v)
restricted (PatSynBind {}) = panic "isRestrictedGroup/unrestricted PatSynBind"
restricted (AbsBinds {}) = panic "isRestrictedGroup/unrestricted AbsBinds"
restricted_match (MG { mg_alts = L _ (Match _ [] _ _) : _ }) = True
restricted_match _ = False
-- No args => like a pattern binding
-- Some args => a function binding
-------------------
checkStrictBinds :: TopLevelFlag -> RecFlag
-> [LHsBind Name]
-> LHsBinds TcId -> [Id]
-> TcM ()
-- Check that non-overloaded unlifted bindings are
-- a) non-recursive,
-- b) not top level,
-- c) not a multiple-binding group (more or less implied by (a))
checkStrictBinds top_lvl rec_group orig_binds tc_binds poly_ids
| unlifted_bndrs || any_strict_pat -- This binding group must be matched strictly
= do { checkTc (isNotTopLevel top_lvl)
(strictBindErr "Top-level" unlifted_bndrs orig_binds)
; checkTc (isNonRec rec_group)
(strictBindErr "Recursive" unlifted_bndrs orig_binds)
; checkTc (all is_monomorphic (bagToList tc_binds))
(polyBindErr orig_binds)
-- data Ptr a = Ptr Addr#
-- f x = let p@(Ptr y) = ... in ...
-- Here the binding for 'p' is polymorphic, but does
-- not mix with an unlifted binding for 'y'. You should
-- use a bang pattern. Trac #6078.
; checkTc (isSingleton orig_binds)
(strictBindErr "Multiple" unlifted_bndrs orig_binds)
-- Complain about a binding that looks lazy
-- e.g. let I# y = x in ...
-- Remember, in checkStrictBinds we are going to do strict
-- matching, so (for software engineering reasons) we insist
-- that the strictness is manifest on each binding
-- However, lone (unboxed) variables are ok
; checkTc (not any_pat_looks_lazy)
(unliftedMustBeBang orig_binds) }
| otherwise
= traceTc "csb2" (ppr poly_ids) >>
return ()
where
unlifted_bndrs = any is_unlifted poly_ids
any_strict_pat = any (isStrictHsBind . unLoc) orig_binds
any_pat_looks_lazy = any (looksLazyPatBind . unLoc) orig_binds
is_unlifted id = case tcSplitSigmaTy (idType id) of
(_, _, rho) -> isUnLiftedType rho
-- For the is_unlifted check, we need to look inside polymorphism
-- and overloading. E.g. x = (# 1, True #)
-- would get type forall a. Num a => (# a, Bool #)
-- and we want to reject that. See Trac #9140
is_monomorphic (L _ (AbsBinds { abs_tvs = tvs, abs_ev_vars = evs }))
= null tvs && null evs
is_monomorphic _ = True
unliftedMustBeBang :: [LHsBind Name] -> SDoc
unliftedMustBeBang binds
= hang (text "Pattern bindings containing unlifted types should use an outermost bang pattern:")
2 (vcat (map ppr binds))
polyBindErr :: [LHsBind Name] -> SDoc
polyBindErr binds
= hang (ptext (sLit "You can't mix polymorphic and unlifted bindings"))
2 (vcat [vcat (map ppr binds),
ptext (sLit "Probable fix: use a bang pattern")])
strictBindErr :: String -> Bool -> [LHsBind Name] -> SDoc
strictBindErr flavour unlifted_bndrs binds
= hang (text flavour <+> msg <+> ptext (sLit "aren't allowed:"))
2 (vcat (map ppr binds))
where
msg | unlifted_bndrs = ptext (sLit "bindings for unlifted types")
| otherwise = ptext (sLit "bang-pattern or unboxed-tuple bindings")
{-
Note [Binding scoped type variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
************************************************************************
* *
\subsection[TcBinds-errors]{Error contexts and messages}
* *
************************************************************************
-}
-- This one is called on LHS, when pat and grhss are both Name
-- and on RHS, when pat is TcId and grhss is still Name
patMonoBindsCtxt :: (OutputableBndr id, Outputable body) => LPat id -> GRHSs Name body -> SDoc
patMonoBindsCtxt pat grhss
= hang (ptext (sLit "In a pattern binding:")) 2 (pprPatBind pat grhss)
typeSigCtxt :: Name -> TcSigInfo -> SDoc
typeSigCtxt _ (TcPatSynInfo _)
= panic "Should only be called with a TcSigInfo"
typeSigCtxt name (TcSigInfo { sig_id = _id, sig_tvs = tvs
, sig_theta = theta, sig_tau = tau
, sig_extra_cts = extra_cts })
= sep [ text "In" <+> pprUserTypeCtxt (FunSigCtxt name) <> colon
, nest 2 (pprSigmaTypeExtraCts (isJust extra_cts)
(mkSigmaTy (map snd tvs) theta tau)) ]
|
forked-upstream-packages-for-ghcjs/ghc
|
compiler/typecheck/TcBinds.hs
|
bsd-3-clause
| 69,666
| 1
| 21
| 20,531
| 12,657
| 6,612
| 6,045
| -1
| -1
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable, DeriveFunctor, DeriveFoldable,
DeriveTraversable #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-} -- Note [Pass sensitive types]
-- in module PlaceHolder
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | Abstract syntax of global declarations.
--
-- Definitions for: @SynDecl@ and @ConDecl@, @ClassDecl@,
-- @InstDecl@, @DefaultDecl@ and @ForeignDecl@.
module HsDecls (
-- * Toplevel declarations
HsDecl(..), LHsDecl, HsDataDefn(..),
-- ** Class or type declarations
TyClDecl(..), LTyClDecl,
TyClGroup(..), tyClGroupConcat, mkTyClGroup,
isClassDecl, isDataDecl, isSynDecl, tcdName,
isFamilyDecl, isTypeFamilyDecl, isDataFamilyDecl,
isOpenTypeFamilyInfo, isClosedTypeFamilyInfo,
tyFamInstDeclName, tyFamInstDeclLName,
countTyClDecls, pprTyClDeclFlavour,
tyClDeclLName, tyClDeclTyVars,
hsDeclHasCusk, famDeclHasCusk,
FamilyDecl(..), LFamilyDecl,
-- ** Instance declarations
InstDecl(..), LInstDecl, NewOrData(..), FamilyInfo(..),
TyFamInstDecl(..), LTyFamInstDecl, instDeclDataFamInsts,
DataFamInstDecl(..), LDataFamInstDecl, pprDataFamInstFlavour,
TyFamEqn(..), TyFamInstEqn, LTyFamInstEqn, TyFamDefltEqn, LTyFamDefltEqn,
HsTyPats,
LClsInstDecl, ClsInstDecl(..),
-- ** Standalone deriving declarations
DerivDecl(..), LDerivDecl,
-- ** @RULE@ declarations
LRuleDecls,RuleDecls(..),RuleDecl(..), LRuleDecl, RuleBndr(..),LRuleBndr,
collectRuleBndrSigTys,
flattenRuleDecls,
-- ** @VECTORISE@ declarations
VectDecl(..), LVectDecl,
lvectDeclName, lvectInstDecl,
-- ** @default@ declarations
DefaultDecl(..), LDefaultDecl,
-- ** Template haskell declaration splice
SpliceExplicitFlag(..),
SpliceDecl(..), LSpliceDecl,
-- ** Foreign function interface declarations
ForeignDecl(..), LForeignDecl, ForeignImport(..), ForeignExport(..),
noForeignImportCoercionYet, noForeignExportCoercionYet,
CImportSpec(..),
-- ** Data-constructor declarations
ConDecl(..), LConDecl, ResType(..),
HsConDeclDetails, hsConDeclArgTys,
-- ** Document comments
DocDecl(..), LDocDecl, docDeclDoc,
-- ** Deprecations
WarnDecl(..), LWarnDecl,
WarnDecls(..), LWarnDecls,
-- ** Annotations
AnnDecl(..), LAnnDecl,
AnnProvenance(..), annProvenanceName_maybe,
-- ** Role annotations
RoleAnnotDecl(..), LRoleAnnotDecl, roleAnnotDeclName,
-- * Grouping
HsGroup(..), emptyRdrGroup, emptyRnGroup, appendGroups
) where
-- friends:
import {-# SOURCE #-} HsExpr( LHsExpr, HsExpr, HsSplice, pprExpr, pprUntypedSplice )
-- Because Expr imports Decls via HsBracket
import HsBinds
import HsPat
import HsTypes
import HsDoc
import TyCon
import Name
import BasicTypes
import Coercion
import ForeignCall
import PlaceHolder ( PostTc,PostRn,PlaceHolder(..),DataId )
import NameSet
-- others:
import InstEnv
import Class
import Outputable
import Util
import SrcLoc
import FastString
import Bag
import Data.Data hiding (TyCon,Fixity)
#if __GLASGOW_HASKELL__ < 709
import Data.Foldable ( Foldable )
import Data.Traversable ( Traversable )
#endif
import Data.Maybe
{-
************************************************************************
* *
\subsection[HsDecl]{Declarations}
* *
************************************************************************
-}
type LHsDecl id = Located (HsDecl id)
-- ^ When in a list this may have
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnSemi'
--
-- For details on above see note [Api annotations] in ApiAnnotation
-- | A Haskell Declaration
data HsDecl id
= TyClD (TyClDecl id) -- ^ A type or class declaration.
| InstD (InstDecl id) -- ^ An instance declaration.
| DerivD (DerivDecl id)
| ValD (HsBind id)
| SigD (Sig id)
| DefD (DefaultDecl id)
| ForD (ForeignDecl id)
| WarningD (WarnDecls id)
| AnnD (AnnDecl id)
| RuleD (RuleDecls id)
| VectD (VectDecl id)
| SpliceD (SpliceDecl id)
| DocD (DocDecl)
| QuasiQuoteD (HsQuasiQuote id)
| RoleAnnotD (RoleAnnotDecl id)
deriving (Typeable)
deriving instance (DataId id) => Data (HsDecl id)
-- NB: all top-level fixity decls are contained EITHER
-- EITHER SigDs
-- OR in the ClassDecls in TyClDs
--
-- The former covers
-- a) data constructors
-- b) class methods (but they can be also done in the
-- signatures of class decls)
-- c) imported functions (that have an IfacSig)
-- d) top level decls
--
-- The latter is for class methods only
-- | A 'HsDecl' is categorised into a 'HsGroup' before being
-- fed to the renamer.
data HsGroup id
= HsGroup {
hs_valds :: HsValBinds id,
hs_splcds :: [LSpliceDecl id],
hs_tyclds :: [TyClGroup id],
-- A list of mutually-recursive groups
-- No family-instances here; they are in hs_instds
-- Parser generates a singleton list;
-- renamer does dependency analysis
hs_instds :: [LInstDecl id],
-- Both class and family instance declarations in here
hs_derivds :: [LDerivDecl id],
hs_fixds :: [LFixitySig id],
-- Snaffled out of both top-level fixity signatures,
-- and those in class declarations
hs_defds :: [LDefaultDecl id],
hs_fords :: [LForeignDecl id],
hs_warnds :: [LWarnDecls id],
hs_annds :: [LAnnDecl id],
hs_ruleds :: [LRuleDecls id],
hs_vects :: [LVectDecl id],
hs_docs :: [LDocDecl]
} deriving (Typeable)
deriving instance (DataId id) => Data (HsGroup id)
emptyGroup, emptyRdrGroup, emptyRnGroup :: HsGroup a
emptyRdrGroup = emptyGroup { hs_valds = emptyValBindsIn }
emptyRnGroup = emptyGroup { hs_valds = emptyValBindsOut }
emptyGroup = HsGroup { hs_tyclds = [], hs_instds = [],
hs_derivds = [],
hs_fixds = [], hs_defds = [], hs_annds = [],
hs_fords = [], hs_warnds = [], hs_ruleds = [], hs_vects = [],
hs_valds = error "emptyGroup hs_valds: Can't happen",
hs_splcds = [],
hs_docs = [] }
appendGroups :: HsGroup a -> HsGroup a -> HsGroup a
appendGroups
HsGroup {
hs_valds = val_groups1,
hs_splcds = spliceds1,
hs_tyclds = tyclds1,
hs_instds = instds1,
hs_derivds = derivds1,
hs_fixds = fixds1,
hs_defds = defds1,
hs_annds = annds1,
hs_fords = fords1,
hs_warnds = warnds1,
hs_ruleds = rulds1,
hs_vects = vects1,
hs_docs = docs1 }
HsGroup {
hs_valds = val_groups2,
hs_splcds = spliceds2,
hs_tyclds = tyclds2,
hs_instds = instds2,
hs_derivds = derivds2,
hs_fixds = fixds2,
hs_defds = defds2,
hs_annds = annds2,
hs_fords = fords2,
hs_warnds = warnds2,
hs_ruleds = rulds2,
hs_vects = vects2,
hs_docs = docs2 }
=
HsGroup {
hs_valds = val_groups1 `plusHsValBinds` val_groups2,
hs_splcds = spliceds1 ++ spliceds2,
hs_tyclds = tyclds1 ++ tyclds2,
hs_instds = instds1 ++ instds2,
hs_derivds = derivds1 ++ derivds2,
hs_fixds = fixds1 ++ fixds2,
hs_annds = annds1 ++ annds2,
hs_defds = defds1 ++ defds2,
hs_fords = fords1 ++ fords2,
hs_warnds = warnds1 ++ warnds2,
hs_ruleds = rulds1 ++ rulds2,
hs_vects = vects1 ++ vects2,
hs_docs = docs1 ++ docs2 }
instance OutputableBndr name => Outputable (HsDecl name) where
ppr (TyClD dcl) = ppr dcl
ppr (ValD binds) = ppr binds
ppr (DefD def) = ppr def
ppr (InstD inst) = ppr inst
ppr (DerivD deriv) = ppr deriv
ppr (ForD fd) = ppr fd
ppr (SigD sd) = ppr sd
ppr (RuleD rd) = ppr rd
ppr (VectD vect) = ppr vect
ppr (WarningD wd) = ppr wd
ppr (AnnD ad) = ppr ad
ppr (SpliceD dd) = ppr dd
ppr (DocD doc) = ppr doc
ppr (QuasiQuoteD qq) = ppr qq
ppr (RoleAnnotD ra) = ppr ra
instance OutputableBndr name => Outputable (HsGroup name) where
ppr (HsGroup { hs_valds = val_decls,
hs_tyclds = tycl_decls,
hs_instds = inst_decls,
hs_derivds = deriv_decls,
hs_fixds = fix_decls,
hs_warnds = deprec_decls,
hs_annds = ann_decls,
hs_fords = foreign_decls,
hs_defds = default_decls,
hs_ruleds = rule_decls,
hs_vects = vect_decls })
= vcat_mb empty
[ppr_ds fix_decls, ppr_ds default_decls,
ppr_ds deprec_decls, ppr_ds ann_decls,
ppr_ds rule_decls,
ppr_ds vect_decls,
if isEmptyValBinds val_decls
then Nothing
else Just (ppr val_decls),
ppr_ds (tyClGroupConcat tycl_decls),
ppr_ds inst_decls,
ppr_ds deriv_decls,
ppr_ds foreign_decls]
where
ppr_ds :: Outputable a => [a] -> Maybe SDoc
ppr_ds [] = Nothing
ppr_ds ds = Just (vcat (map ppr ds))
vcat_mb :: SDoc -> [Maybe SDoc] -> SDoc
-- Concatenate vertically with white-space between non-blanks
vcat_mb _ [] = empty
vcat_mb gap (Nothing : ds) = vcat_mb gap ds
vcat_mb gap (Just d : ds) = gap $$ d $$ vcat_mb blankLine ds
data SpliceExplicitFlag = ExplicitSplice | -- <=> $(f x y)
ImplicitSplice -- <=> f x y, i.e. a naked top level expression
deriving (Data, Typeable)
type LSpliceDecl name = Located (SpliceDecl name)
data SpliceDecl id
= SpliceDecl -- Top level splice
(Located (HsSplice id))
SpliceExplicitFlag
deriving (Typeable)
deriving instance (DataId id) => Data (SpliceDecl id)
instance OutputableBndr name => Outputable (SpliceDecl name) where
ppr (SpliceDecl (L _ e) _) = pprUntypedSplice e
{-
************************************************************************
* *
\subsection[SynDecl]{@data@, @newtype@ or @type@ (synonym) type declaration}
* *
************************************************************************
--------------------------------
THE NAMING STORY
--------------------------------
Here is the story about the implicit names that go with type, class,
and instance decls. It's a bit tricky, so pay attention!
"Implicit" (or "system") binders
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Each data type decl defines
a worker name for each constructor
to-T and from-T convertors
Each class decl defines
a tycon for the class
a data constructor for that tycon
the worker for that constructor
a selector for each superclass
All have occurrence names that are derived uniquely from their parent
declaration.
None of these get separate definitions in an interface file; they are
fully defined by the data or class decl. But they may *occur* in
interface files, of course. Any such occurrence must haul in the
relevant type or class decl.
Plan of attack:
- Ensure they "point to" the parent data/class decl
when loading that decl from an interface file
(See RnHiFiles.getSysBinders)
- When typechecking the decl, we build the implicit TyCons and Ids.
When doing so we look them up in the name cache (RnEnv.lookupSysName),
to ensure correct module and provenance is set
These are the two places that we have to conjure up the magic derived
names. (The actual magic is in OccName.mkWorkerOcc, etc.)
Default methods
~~~~~~~~~~~~~~~
- Occurrence name is derived uniquely from the method name
E.g. $dmmax
- If there is a default method name at all, it's recorded in
the ClassOpSig (in HsBinds), in the DefMeth field.
(DefMeth is defined in Class.lhs)
Source-code class decls and interface-code class decls are treated subtly
differently, which has given me a great deal of confusion over the years.
Here's the deal. (We distinguish the two cases because source-code decls
have (Just binds) in the tcdMeths field, whereas interface decls have Nothing.
In *source-code* class declarations:
- When parsing, every ClassOpSig gets a DefMeth with a suitable RdrName
This is done by RdrHsSyn.mkClassOpSigDM
- The renamer renames it to a Name
- During typechecking, we generate a binding for each $dm for
which there's a programmer-supplied default method:
class Foo a where
op1 :: <type>
op2 :: <type>
op1 = ...
We generate a binding for $dmop1 but not for $dmop2.
The Class for Foo has a NoDefMeth for op2 and a DefMeth for op1.
The Name for $dmop2 is simply discarded.
In *interface-file* class declarations:
- When parsing, we see if there's an explicit programmer-supplied default method
because there's an '=' sign to indicate it:
class Foo a where
op1 = :: <type> -- NB the '='
op2 :: <type>
We use this info to generate a DefMeth with a suitable RdrName for op1,
and a NoDefMeth for op2
- The interface file has a separate definition for $dmop1, with unfolding etc.
- The renamer renames it to a Name.
- The renamer treats $dmop1 as a free variable of the declaration, so that
the binding for $dmop1 will be sucked in. (See RnHsSyn.tyClDeclFVs)
This doesn't happen for source code class decls, because they *bind* the default method.
Dictionary functions
~~~~~~~~~~~~~~~~~~~~
Each instance declaration gives rise to one dictionary function binding.
The type checker makes up new source-code instance declarations
(e.g. from 'deriving' or generic default methods --- see
TcInstDcls.tcInstDecls1). So we can't generate the names for
dictionary functions in advance (we don't know how many we need).
On the other hand for interface-file instance declarations, the decl
specifies the name of the dictionary function, and it has a binding elsewhere
in the interface file:
instance {Eq Int} = dEqInt
dEqInt :: {Eq Int} <pragma info>
So again we treat source code and interface file code slightly differently.
Source code:
- Source code instance decls have a Nothing in the (Maybe name) field
(see data InstDecl below)
- The typechecker makes up a Local name for the dict fun for any source-code
instance decl, whether it comes from a source-code instance decl, or whether
the instance decl is derived from some other construct (e.g. 'deriving').
- The occurrence name it chooses is derived from the instance decl (just for
documentation really) --- e.g. dNumInt. Two dict funs may share a common
occurrence name, but will have different uniques. E.g.
instance Foo [Int] where ...
instance Foo [Bool] where ...
These might both be dFooList
- The CoreTidy phase externalises the name, and ensures the occurrence name is
unique (this isn't special to dict funs). So we'd get dFooList and dFooList1.
- We can take this relaxed approach (changing the occurrence name later)
because dict fun Ids are not captured in a TyCon or Class (unlike default
methods, say). Instead, they are kept separately in the InstEnv. This
makes it easy to adjust them after compiling a module. (Once we've finished
compiling that module, they don't change any more.)
Interface file code:
- The instance decl gives the dict fun name, so the InstDecl has a (Just name)
in the (Maybe name) field.
- RnHsSyn.instDeclFVs treats the dict fun name as free in the decl, so that we
suck in the dfun binding
-}
type LTyClDecl name = Located (TyClDecl name)
-- | A type or class declaration.
data TyClDecl name
= -- | @type/data family T :: *->*@
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnType',
-- 'ApiAnnotation.AnnData',
-- 'ApiAnnotation.AnnFamily','ApiAnnotation.AnnWhere',
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnDcolon',
-- 'ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
FamDecl { tcdFam :: FamilyDecl name }
| -- | @type@ declaration
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnType',
-- 'ApiAnnotation.AnnEqual',
-- For details on above see note [Api annotations] in ApiAnnotation
SynDecl { tcdLName :: Located name -- ^ Type constructor
, tcdTyVars :: LHsTyVarBndrs name -- ^ Type variables; for an associated type
-- these include outer binders
, tcdRhs :: LHsType name -- ^ RHS of type declaration
, tcdFVs :: PostRn name NameSet }
| -- | @data@ declaration
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnData',
-- 'ApiAnnotation.AnnFamily',
-- 'ApiAnnotation.AnnNewType',
-- 'ApiAnnotation.AnnNewType','ApiAnnotation.AnnWhere'
-- For details on above see note [Api annotations] in ApiAnnotation
DataDecl { tcdLName :: Located name -- ^ Type constructor
, tcdTyVars :: LHsTyVarBndrs name -- ^ Type variables; for an assoicated type
-- these include outer binders
-- Eg class T a where
-- type F a :: *
-- type F a = a -> a
-- Here the type decl for 'f' includes 'a'
-- in its tcdTyVars
, tcdDataDefn :: HsDataDefn name
, tcdFVs :: PostRn name NameSet }
| ClassDecl { tcdCtxt :: LHsContext name, -- ^ Context...
tcdLName :: Located name, -- ^ Name of the class
tcdTyVars :: LHsTyVarBndrs name, -- ^ Class type variables
tcdFDs :: [Located (FunDep (Located name))],
-- ^ Functional deps
tcdSigs :: [LSig name], -- ^ Methods' signatures
tcdMeths :: LHsBinds name, -- ^ Default methods
tcdATs :: [LFamilyDecl name], -- ^ Associated types;
tcdATDefs :: [LTyFamDefltEqn name], -- ^ Associated type defaults
tcdDocs :: [LDocDecl], -- ^ Haddock docs
tcdFVs :: PostRn name NameSet
}
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnClass',
-- 'ApiAnnotation.AnnWhere','ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose'
-- - The tcdFDs will have 'ApiAnnotation.AnnVbar',
-- 'ApiAnnotation.AnnComma'
-- 'ApiAnnotation.AnnRarrow'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Typeable)
deriving instance (DataId id) => Data (TyClDecl id)
-- This is used in TcTyClsDecls to represent
-- strongly connected components of decls
-- No familiy instances in here
-- The role annotations must be grouped with their decls for the
-- type-checker to infer roles correctly
data TyClGroup name
= TyClGroup { group_tyclds :: [LTyClDecl name]
, group_roles :: [LRoleAnnotDecl name] }
deriving (Typeable)
deriving instance (DataId id) => Data (TyClGroup id)
tyClGroupConcat :: [TyClGroup name] -> [LTyClDecl name]
tyClGroupConcat = concatMap group_tyclds
mkTyClGroup :: [LTyClDecl name] -> TyClGroup name
mkTyClGroup decls = TyClGroup { group_tyclds = decls, group_roles = [] }
type LFamilyDecl name = Located (FamilyDecl name)
data FamilyDecl name = FamilyDecl
{ fdInfo :: FamilyInfo name -- type or data, closed or open
, fdLName :: Located name -- type constructor
, fdTyVars :: LHsTyVarBndrs name -- type variables
, fdKindSig :: Maybe (LHsKind name) } -- result kind
deriving( Typeable )
deriving instance (DataId id) => Data (FamilyDecl id)
data FamilyInfo name
= DataFamily
| OpenTypeFamily
-- this list might be empty, if we're in an hs-boot file and the user
-- said "type family Foo x where .."
| ClosedTypeFamily [LTyFamInstEqn name]
deriving( Typeable )
deriving instance (DataId name) => Data (FamilyInfo name)
{-
------------------------------
Simple classifiers
-}
-- | @True@ <=> argument is a @data@\/@newtype@
-- declaration.
isDataDecl :: TyClDecl name -> Bool
isDataDecl (DataDecl {}) = True
isDataDecl _other = False
-- | type or type instance declaration
isSynDecl :: TyClDecl name -> Bool
isSynDecl (SynDecl {}) = True
isSynDecl _other = False
-- | type class
isClassDecl :: TyClDecl name -> Bool
isClassDecl (ClassDecl {}) = True
isClassDecl _ = False
-- | type/data family declaration
isFamilyDecl :: TyClDecl name -> Bool
isFamilyDecl (FamDecl {}) = True
isFamilyDecl _other = False
-- | type family declaration
isTypeFamilyDecl :: TyClDecl name -> Bool
isTypeFamilyDecl (FamDecl (FamilyDecl { fdInfo = info })) = case info of
OpenTypeFamily -> True
ClosedTypeFamily {} -> True
_ -> False
isTypeFamilyDecl _ = False
-- | open type family info
isOpenTypeFamilyInfo :: FamilyInfo name -> Bool
isOpenTypeFamilyInfo OpenTypeFamily = True
isOpenTypeFamilyInfo _ = False
-- | closed type family info
isClosedTypeFamilyInfo :: FamilyInfo name -> Bool
isClosedTypeFamilyInfo (ClosedTypeFamily {}) = True
isClosedTypeFamilyInfo _ = False
-- | data family declaration
isDataFamilyDecl :: TyClDecl name -> Bool
isDataFamilyDecl (FamDecl (FamilyDecl { fdInfo = DataFamily })) = True
isDataFamilyDecl _other = False
-- Dealing with names
tyFamInstDeclName :: OutputableBndr name
=> TyFamInstDecl name -> name
tyFamInstDeclName = unLoc . tyFamInstDeclLName
tyFamInstDeclLName :: OutputableBndr name
=> TyFamInstDecl name -> Located name
tyFamInstDeclLName (TyFamInstDecl { tfid_eqn =
(L _ (TyFamEqn { tfe_tycon = ln })) })
= ln
tyClDeclLName :: TyClDecl name -> Located name
tyClDeclLName (FamDecl { tcdFam = FamilyDecl { fdLName = ln } }) = ln
tyClDeclLName decl = tcdLName decl
tcdName :: TyClDecl name -> name
tcdName = unLoc . tyClDeclLName
tyClDeclTyVars :: OutputableBndr name => TyClDecl name -> LHsTyVarBndrs name
tyClDeclTyVars (FamDecl { tcdFam = FamilyDecl { fdTyVars = tvs } }) = tvs
tyClDeclTyVars d = tcdTyVars d
countTyClDecls :: [TyClDecl name] -> (Int, Int, Int, Int, Int)
-- class, synonym decls, data, newtype, family decls
countTyClDecls decls
= (count isClassDecl decls,
count isSynDecl decls, -- excluding...
count isDataTy decls, -- ...family...
count isNewTy decls, -- ...instances
count isFamilyDecl decls)
where
isDataTy DataDecl{ tcdDataDefn = HsDataDefn { dd_ND = DataType } } = True
isDataTy _ = False
isNewTy DataDecl{ tcdDataDefn = HsDataDefn { dd_ND = NewType } } = True
isNewTy _ = False
-- | Does this declaration have a complete, user-supplied kind signature?
-- See Note [Complete user-supplied kind signatures]
hsDeclHasCusk :: TyClDecl name -> Bool
hsDeclHasCusk (FamDecl { tcdFam = fam_decl }) = famDeclHasCusk fam_decl
hsDeclHasCusk (SynDecl { tcdTyVars = tyvars, tcdRhs = rhs })
= hsTvbAllKinded tyvars && rhs_annotated rhs
where
rhs_annotated (L _ ty) = case ty of
HsParTy lty -> rhs_annotated lty
HsKindSig {} -> True
_ -> False
hsDeclHasCusk (DataDecl { tcdTyVars = tyvars }) = hsTvbAllKinded tyvars
hsDeclHasCusk (ClassDecl { tcdTyVars = tyvars }) = hsTvbAllKinded tyvars
-- | Does this family declaration have a complete, user-supplied kind signature?
famDeclHasCusk :: FamilyDecl name -> Bool
famDeclHasCusk (FamilyDecl { fdInfo = ClosedTypeFamily _
, fdTyVars = tyvars
, fdKindSig = m_sig })
= hsTvbAllKinded tyvars && isJust m_sig
famDeclHasCusk _ = True -- all open families have CUSKs!
{-
Note [Complete user-supplied kind signatures]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We kind-check declarations differently if they have a complete, user-supplied
kind signature (CUSK). This is because we can safely generalise a CUSKed
declaration before checking all of the others, supporting polymorphic recursion.
See https://ghc.haskell.org/trac/ghc/wiki/GhcKinds/KindInference#Proposednewstrategy
and #9200 for lots of discussion of how we got here.
A declaration has a CUSK if we can know its complete kind without doing any inference,
at all. Here are the rules:
- A class or datatype is said to have a CUSK if and only if all of its type
variables are annotated. Its result kind is, by construction, Constraint or *
respectively.
- A type synonym has a CUSK if and only if all of its type variables and its
RHS are annotated with kinds.
- A closed type family is said to have a CUSK if and only if all of its type
variables and its return type are annotated.
- An open type family always has a CUSK -- unannotated type variables (and return type) default to *.
-}
instance OutputableBndr name
=> Outputable (TyClDecl name) where
ppr (FamDecl { tcdFam = decl }) = ppr decl
ppr (SynDecl { tcdLName = ltycon, tcdTyVars = tyvars, tcdRhs = rhs })
= hang (ptext (sLit "type") <+>
pp_vanilla_decl_head ltycon tyvars [] <+> equals)
4 (ppr rhs)
ppr (DataDecl { tcdLName = ltycon, tcdTyVars = tyvars, tcdDataDefn = defn })
= pp_data_defn (pp_vanilla_decl_head ltycon tyvars) defn
ppr (ClassDecl {tcdCtxt = context, tcdLName = lclas, tcdTyVars = tyvars,
tcdFDs = fds,
tcdSigs = sigs, tcdMeths = methods,
tcdATs = ats, tcdATDefs = at_defs})
| null sigs && isEmptyBag methods && null ats && null at_defs -- No "where" part
= top_matter
| otherwise -- Laid out
= vcat [ top_matter <+> ptext (sLit "where")
, nest 2 $ pprDeclList (map ppr ats ++
map ppr_fam_deflt_eqn at_defs ++
pprLHsBindsForUser methods sigs) ]
where
top_matter = ptext (sLit "class")
<+> pp_vanilla_decl_head lclas tyvars (unLoc context)
<+> pprFundeps (map unLoc fds)
instance OutputableBndr name => Outputable (TyClGroup name) where
ppr (TyClGroup { group_tyclds = tyclds, group_roles = roles })
= ppr tyclds $$
ppr roles
instance (OutputableBndr name) => Outputable (FamilyDecl name) where
ppr (FamilyDecl { fdInfo = info, fdLName = ltycon,
fdTyVars = tyvars, fdKindSig = mb_kind})
= vcat [ pprFlavour info <+> pp_vanilla_decl_head ltycon tyvars [] <+> pp_kind <+> pp_where
, nest 2 $ pp_eqns ]
where
pp_kind = case mb_kind of
Nothing -> empty
Just kind -> dcolon <+> ppr kind
(pp_where, pp_eqns) = case info of
ClosedTypeFamily eqns -> ( ptext (sLit "where")
, if null eqns
then ptext (sLit "..")
else vcat $ map ppr_fam_inst_eqn eqns )
_ -> (empty, empty)
pprFlavour :: FamilyInfo name -> SDoc
pprFlavour DataFamily = ptext (sLit "data family")
pprFlavour OpenTypeFamily = ptext (sLit "type family")
pprFlavour (ClosedTypeFamily {}) = ptext (sLit "type family")
instance Outputable (FamilyInfo name) where
ppr = pprFlavour
pp_vanilla_decl_head :: OutputableBndr name
=> Located name
-> LHsTyVarBndrs name
-> HsContext name
-> SDoc
pp_vanilla_decl_head thing tyvars context
= hsep [pprHsContext context, pprPrefixOcc (unLoc thing), ppr tyvars]
pp_fam_inst_lhs :: OutputableBndr name
=> Located name
-> HsTyPats name
-> HsContext name
-> SDoc
pp_fam_inst_lhs thing (HsWB { hswb_cts = typats }) context -- explicit type patterns
= hsep [ pprHsContext context, pprPrefixOcc (unLoc thing)
, hsep (map (pprParendHsType.unLoc) typats)]
pprTyClDeclFlavour :: TyClDecl a -> SDoc
pprTyClDeclFlavour (ClassDecl {}) = ptext (sLit "class")
pprTyClDeclFlavour (SynDecl {}) = ptext (sLit "type")
pprTyClDeclFlavour (FamDecl { tcdFam = FamilyDecl { fdInfo = info }})
= pprFlavour info
pprTyClDeclFlavour (DataDecl { tcdDataDefn = HsDataDefn { dd_ND = nd } })
= ppr nd
{-
************************************************************************
* *
\subsection[ConDecl]{A data-constructor declaration}
* *
************************************************************************
-}
data HsDataDefn name -- The payload of a data type defn
-- Used *both* for vanilla data declarations,
-- *and* for data family instances
= -- | Declares a data type or newtype, giving its constructors
-- @
-- data/newtype T a = <constrs>
-- data/newtype instance T [a] = <constrs>
-- @
HsDataDefn { dd_ND :: NewOrData,
dd_ctxt :: LHsContext name, -- ^ Context
dd_cType :: Maybe (Located CType),
dd_kindSig:: Maybe (LHsKind name),
-- ^ Optional kind signature.
--
-- @(Just k)@ for a GADT-style @data@,
-- or @data instance@ decl, with explicit kind sig
--
-- Always @Nothing@ for H98-syntax decls
dd_cons :: [LConDecl name],
-- ^ Data constructors
--
-- For @data T a = T1 | T2 a@
-- the 'LConDecl's all have 'ResTyH98'.
-- For @data T a where { T1 :: T a }@
-- the 'LConDecls' all have 'ResTyGADT'.
dd_derivs :: Maybe (Located [LHsType name])
-- ^ Derivings; @Nothing@ => not specified,
-- @Just []@ => derive exactly what is asked
--
-- These "types" must be of form
-- @
-- forall ab. C ty1 ty2
-- @
-- Typically the foralls and ty args are empty, but they
-- are non-empty for the newtype-deriving case
--
-- - 'ApiAnnotation.AnnKeywordId' :
-- 'ApiAnnotation.AnnDeriving',
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
}
deriving( Typeable )
deriving instance (DataId id) => Data (HsDataDefn id)
data NewOrData
= NewType -- ^ @newtype Blah ...@
| DataType -- ^ @data Blah ...@
deriving( Eq, Data, Typeable ) -- Needed because Demand derives Eq
type LConDecl name = Located (ConDecl name)
-- ^ May have 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnSemi' when
-- in a GADT constructor list
-- For details on above see note [Api annotations] in ApiAnnotation
-- |
--
-- @
-- data T b = forall a. Eq a => MkT a b
-- MkT :: forall b a. Eq a => MkT a b
--
-- data T b where
-- MkT1 :: Int -> T Int
--
-- data T = Int `MkT` Int
-- | MkT2
--
-- data T a where
-- Int `MkT` Int :: T Int
-- @
--
-- - 'ApiAnnotation.AnnKeywordId's : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnDotdot','ApiAnnotation.AnnCLose',
-- 'ApiAnnotation.AnnEqual','ApiAnnotation.AnnVbar',
-- 'ApiAnnotation.AnnDarrow','ApiAnnotation.AnnDarrow',
-- 'ApiAnnotation.AnnForall','ApiAnnotation.AnnDot'
-- For details on above see note [Api annotations] in ApiAnnotation
data ConDecl name
= ConDecl
{ con_names :: [Located name]
-- ^ Constructor names. This is used for the DataCon itself, and for
-- the user-callable wrapper Id.
-- It is a list to deal with GADT constructors of the form
-- T1, T2, T3 :: <payload>
, con_explicit :: HsExplicitFlag
-- ^ Is there an user-written forall? (cf. 'HsTypes.HsForAllTy')
, con_qvars :: LHsTyVarBndrs name
-- ^ Type variables. Depending on 'con_res' this describes the
-- following entities
--
-- - ResTyH98: the constructor's *existential* type variables
-- - ResTyGADT: *all* the constructor's quantified type variables
--
-- If con_explicit is Implicit, then con_qvars is irrelevant
-- until after renaming.
, con_cxt :: LHsContext name
-- ^ The context. This /does not/ include the \"stupid theta\" which
-- lives only in the 'TyData' decl.
, con_details :: HsConDeclDetails name
-- ^ The main payload
, con_res :: ResType (LHsType name)
-- ^ Result type of the constructor
, con_doc :: Maybe LHsDocString
-- ^ A possible Haddock comment.
, con_old_rec :: Bool
-- ^ TEMPORARY field; True <=> user has employed now-deprecated syntax for
-- GADT-style record decl C { blah } :: T a b
-- Remove this when we no longer parse this stuff, and hence do not
-- need to report decprecated use
} deriving (Typeable)
deriving instance (DataId name) => Data (ConDecl name)
type HsConDeclDetails name
= HsConDetails (LBangType name) (Located [LConDeclField name])
hsConDeclArgTys :: HsConDeclDetails name -> [LBangType name]
hsConDeclArgTys (PrefixCon tys) = tys
hsConDeclArgTys (InfixCon ty1 ty2) = [ty1,ty2]
hsConDeclArgTys (RecCon flds) = map (cd_fld_type . unLoc) (unLoc flds)
data ResType ty
= ResTyH98 -- Constructor was declared using Haskell 98 syntax
| ResTyGADT SrcSpan ty -- Constructor was declared using GADT-style syntax,
-- and here is its result type, and the SrcSpan
-- of the original sigtype, for API Annotations
deriving (Data, Typeable)
instance Outputable ty => Outputable (ResType ty) where
-- Debugging only
ppr ResTyH98 = ptext (sLit "ResTyH98")
ppr (ResTyGADT _ ty) = ptext (sLit "ResTyGADT") <+> ppr ty
pp_data_defn :: OutputableBndr name
=> (HsContext name -> SDoc) -- Printing the header
-> HsDataDefn name
-> SDoc
pp_data_defn pp_hdr (HsDataDefn { dd_ND = new_or_data, dd_ctxt = L _ context
, dd_kindSig = mb_sig
, dd_cons = condecls, dd_derivs = derivings })
| null condecls
= ppr new_or_data <+> pp_hdr context <+> pp_sig
| otherwise
= hang (ppr new_or_data <+> pp_hdr context <+> pp_sig)
2 (pp_condecls condecls $$ pp_derivings)
where
pp_sig = case mb_sig of
Nothing -> empty
Just kind -> dcolon <+> ppr kind
pp_derivings = case derivings of
Nothing -> empty
Just (L _ ds) -> hsep [ptext (sLit "deriving"),
parens (interpp'SP ds)]
instance OutputableBndr name => Outputable (HsDataDefn name) where
ppr d = pp_data_defn (\_ -> ptext (sLit "Naked HsDataDefn")) d
instance Outputable NewOrData where
ppr NewType = ptext (sLit "newtype")
ppr DataType = ptext (sLit "data")
pp_condecls :: OutputableBndr name => [LConDecl name] -> SDoc
pp_condecls cs@(L _ ConDecl{ con_res = ResTyGADT _ _ } : _) -- In GADT syntax
= hang (ptext (sLit "where")) 2 (vcat (map ppr cs))
pp_condecls cs -- In H98 syntax
= equals <+> sep (punctuate (ptext (sLit " |")) (map ppr cs))
instance (OutputableBndr name) => Outputable (ConDecl name) where
ppr = pprConDecl
pprConDecl :: OutputableBndr name => ConDecl name -> SDoc
pprConDecl (ConDecl { con_names = cons, con_explicit = expl, con_qvars = tvs
, con_cxt = cxt, con_details = details
, con_res = ResTyH98, con_doc = doc })
= sep [ppr_mbDoc doc, pprHsForAll expl tvs cxt, ppr_details details]
where
ppr_details (InfixCon t1 t2) = hsep [ppr t1, pprInfixOcc cons, ppr t2]
ppr_details (PrefixCon tys) = hsep (pprPrefixOcc cons
: map (pprParendHsType . unLoc) tys)
ppr_details (RecCon fields) = ppr_con_names cons
<+> pprConDeclFields (unLoc fields)
pprConDecl (ConDecl { con_names = cons, con_explicit = expl, con_qvars = tvs
, con_cxt = cxt, con_details = PrefixCon arg_tys
, con_res = ResTyGADT _ res_ty })
= ppr_con_names cons <+> dcolon <+>
sep [pprHsForAll expl tvs cxt, ppr (foldr mk_fun_ty res_ty arg_tys)]
where
mk_fun_ty a b = noLoc (HsFunTy a b)
pprConDecl (ConDecl { con_names = cons, con_explicit = expl, con_qvars = tvs
, con_cxt = cxt, con_details = RecCon fields
, con_res = ResTyGADT _ res_ty })
= sep [ppr_con_names cons <+> dcolon <+> pprHsForAll expl tvs cxt,
pprConDeclFields (unLoc fields) <+> arrow <+> ppr res_ty]
pprConDecl decl@(ConDecl { con_details = InfixCon ty1 ty2, con_res = ResTyGADT {} })
= pprConDecl (decl { con_details = PrefixCon [ty1,ty2] })
-- In GADT syntax we don't allow infix constructors
-- so if we ever trip over one (albeit I can't see how that
-- can happen) print it like a prefix one
ppr_con_names :: (OutputableBndr name) => [Located name] -> SDoc
ppr_con_names [x] = ppr x
ppr_con_names xs = interpp'SP xs
instance (Outputable name) => OutputableBndr [Located name] where
pprBndr _bs xs = cat $ punctuate comma (map ppr xs)
pprPrefixOcc [x] = ppr x
pprPrefixOcc xs = cat $ punctuate comma (map ppr xs)
pprInfixOcc [x] = ppr x
pprInfixOcc xs = cat $ punctuate comma (map ppr xs)
{-
************************************************************************
* *
Instance declarations
* *
************************************************************************
Note [Type family instance declarations in HsSyn]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The data type TyFamEqn represents one equation of a type family instance.
It is parameterised over its tfe_pats field:
* An ordinary type family instance declaration looks like this in source Haskell
type instance T [a] Int = a -> a
(or something similar for a closed family)
It is represented by a TyFamInstEqn, with *type* in the tfe_pats field.
* On the other hand, the *default instance* of an associated type looksl like
this in source Haskell
class C a where
type T a b
type T a b = a -> b -- The default instance
It is represented by a TyFamDefltEqn, with *type variables8 in the tfe_pats field.
-}
----------------- Type synonym family instances -------------
type LTyFamInstEqn name = Located (TyFamInstEqn name)
-- ^ May have 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnSemi'
-- when in a list
-- For details on above see note [Api annotations] in ApiAnnotation
type LTyFamDefltEqn name = Located (TyFamDefltEqn name)
type HsTyPats name = HsWithBndrs name [LHsType name]
-- ^ Type patterns (with kind and type bndrs)
-- See Note [Family instance declaration binders]
type TyFamInstEqn name = TyFamEqn name (HsTyPats name)
type TyFamDefltEqn name = TyFamEqn name (LHsTyVarBndrs name)
-- See Note [Type family instance declarations in HsSyn]
-- | One equation in a type family instance declaration
-- See Note [Type family instance declarations in HsSyn]
data TyFamEqn name pats
= TyFamEqn
{ tfe_tycon :: Located name
, tfe_pats :: pats
, tfe_rhs :: LHsType name }
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnEqual'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving( Typeable )
deriving instance (DataId name, Data pats) => Data (TyFamEqn name pats)
type LTyFamInstDecl name = Located (TyFamInstDecl name)
data TyFamInstDecl name
= TyFamInstDecl
{ tfid_eqn :: LTyFamInstEqn name
, tfid_fvs :: PostRn name NameSet }
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnType',
-- 'ApiAnnotation.AnnInstance',
-- For details on above see note [Api annotations] in ApiAnnotation
deriving( Typeable )
deriving instance (DataId name) => Data (TyFamInstDecl name)
----------------- Data family instances -------------
type LDataFamInstDecl name = Located (DataFamInstDecl name)
data DataFamInstDecl name
= DataFamInstDecl
{ dfid_tycon :: Located name
, dfid_pats :: HsTyPats name -- LHS
, dfid_defn :: HsDataDefn name -- RHS
, dfid_fvs :: PostRn name NameSet } -- Free vars for
-- dependency analysis
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnData',
-- 'ApiAnnotation.AnnNewType','ApiAnnotation.AnnInstance',
-- 'ApiAnnotation.AnnWhere','ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving( Typeable )
deriving instance (DataId name) => Data (DataFamInstDecl name)
----------------- Class instances -------------
type LClsInstDecl name = Located (ClsInstDecl name)
data ClsInstDecl name
= ClsInstDecl
{ cid_poly_ty :: LHsType name -- Context => Class Instance-type
-- Using a polytype means that the renamer conveniently
-- figures out the quantified type variables for us.
, cid_binds :: LHsBinds name -- Class methods
, cid_sigs :: [LSig name] -- User-supplied pragmatic info
, cid_tyfam_insts :: [LTyFamInstDecl name] -- Type family instances
, cid_datafam_insts :: [LDataFamInstDecl name] -- Data family instances
, cid_overlap_mode :: Maybe (Located OverlapMode)
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose',
-- For details on above see note [Api annotations] in ApiAnnotation
}
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnInstance',
-- 'ApiAnnotation.AnnWhere',
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnClose',
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Typeable)
deriving instance (DataId id) => Data (ClsInstDecl id)
----------------- Instances of all kinds -------------
type LInstDecl name = Located (InstDecl name)
data InstDecl name -- Both class and family instances
= ClsInstD
{ cid_inst :: ClsInstDecl name }
| DataFamInstD -- data family instance
{ dfid_inst :: DataFamInstDecl name }
| TyFamInstD -- type family instance
{ tfid_inst :: TyFamInstDecl name }
deriving (Typeable)
deriving instance (DataId id) => Data (InstDecl id)
{-
Note [Family instance declaration binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A {Ty|Data}FamInstDecl is a data/type family instance declaration
the pats field is LHS patterns, and the tvs of the HsBSig
tvs are fv(pat_tys), *including* ones that are already in scope
Eg class C s t where
type F t p :: *
instance C w (a,b) where
type F (a,b) x = x->a
The tcdTyVars of the F decl are {a,b,x}, even though the F decl
is nested inside the 'instance' decl.
However after the renamer, the uniques will match up:
instance C w7 (a8,b9) where
type F (a8,b9) x10 = x10->a8
so that we can compare the type patter in the 'instance' decl and
in the associated 'type' decl
-}
instance (OutputableBndr name) => Outputable (TyFamInstDecl name) where
ppr = pprTyFamInstDecl TopLevel
pprTyFamInstDecl :: OutputableBndr name => TopLevelFlag -> TyFamInstDecl name -> SDoc
pprTyFamInstDecl top_lvl (TyFamInstDecl { tfid_eqn = eqn })
= ptext (sLit "type") <+> ppr_instance_keyword top_lvl <+> ppr_fam_inst_eqn eqn
ppr_instance_keyword :: TopLevelFlag -> SDoc
ppr_instance_keyword TopLevel = ptext (sLit "instance")
ppr_instance_keyword NotTopLevel = empty
ppr_fam_inst_eqn :: OutputableBndr name => LTyFamInstEqn name -> SDoc
ppr_fam_inst_eqn (L _ (TyFamEqn { tfe_tycon = tycon
, tfe_pats = pats
, tfe_rhs = rhs }))
= pp_fam_inst_lhs tycon pats [] <+> equals <+> ppr rhs
ppr_fam_deflt_eqn :: OutputableBndr name => LTyFamDefltEqn name -> SDoc
ppr_fam_deflt_eqn (L _ (TyFamEqn { tfe_tycon = tycon
, tfe_pats = tvs
, tfe_rhs = rhs }))
= pp_vanilla_decl_head tycon tvs [] <+> equals <+> ppr rhs
instance (OutputableBndr name) => Outputable (DataFamInstDecl name) where
ppr = pprDataFamInstDecl TopLevel
pprDataFamInstDecl :: OutputableBndr name => TopLevelFlag -> DataFamInstDecl name -> SDoc
pprDataFamInstDecl top_lvl (DataFamInstDecl { dfid_tycon = tycon
, dfid_pats = pats
, dfid_defn = defn })
= pp_data_defn pp_hdr defn
where
pp_hdr ctxt = ppr_instance_keyword top_lvl <+> pp_fam_inst_lhs tycon pats ctxt
pprDataFamInstFlavour :: DataFamInstDecl name -> SDoc
pprDataFamInstFlavour (DataFamInstDecl { dfid_defn = (HsDataDefn { dd_ND = nd }) })
= ppr nd
instance (OutputableBndr name) => Outputable (ClsInstDecl name) where
ppr (ClsInstDecl { cid_poly_ty = inst_ty, cid_binds = binds
, cid_sigs = sigs, cid_tyfam_insts = ats
, cid_overlap_mode = mbOverlap
, cid_datafam_insts = adts })
| null sigs, null ats, null adts, isEmptyBag binds -- No "where" part
= top_matter
| otherwise -- Laid out
= vcat [ top_matter <+> ptext (sLit "where")
, nest 2 $ pprDeclList $
map (pprTyFamInstDecl NotTopLevel . unLoc) ats ++
map (pprDataFamInstDecl NotTopLevel . unLoc) adts ++
pprLHsBindsForUser binds sigs ]
where
top_matter = ptext (sLit "instance") <+> ppOverlapPragma mbOverlap
<+> ppr inst_ty
ppOverlapPragma :: Maybe (Located OverlapMode) -> SDoc
ppOverlapPragma mb =
case mb of
Nothing -> empty
Just (L _ (NoOverlap _)) -> ptext (sLit "{-# NO_OVERLAP #-}")
Just (L _ (Overlappable _)) -> ptext (sLit "{-# OVERLAPPABLE #-}")
Just (L _ (Overlapping _)) -> ptext (sLit "{-# OVERLAPPING #-}")
Just (L _ (Overlaps _)) -> ptext (sLit "{-# OVERLAPS #-}")
Just (L _ (Incoherent _)) -> ptext (sLit "{-# INCOHERENT #-}")
instance (OutputableBndr name) => Outputable (InstDecl name) where
ppr (ClsInstD { cid_inst = decl }) = ppr decl
ppr (TyFamInstD { tfid_inst = decl }) = ppr decl
ppr (DataFamInstD { dfid_inst = decl }) = ppr decl
-- Extract the declarations of associated data types from an instance
instDeclDataFamInsts :: [LInstDecl name] -> [DataFamInstDecl name]
instDeclDataFamInsts inst_decls
= concatMap do_one inst_decls
where
do_one (L _ (ClsInstD { cid_inst = ClsInstDecl { cid_datafam_insts = fam_insts } }))
= map unLoc fam_insts
do_one (L _ (DataFamInstD { dfid_inst = fam_inst })) = [fam_inst]
do_one (L _ (TyFamInstD {})) = []
{-
************************************************************************
* *
\subsection[DerivDecl]{A stand-alone instance deriving declaration}
* *
************************************************************************
-}
type LDerivDecl name = Located (DerivDecl name)
data DerivDecl name = DerivDecl
{ deriv_type :: LHsType name
, deriv_overlap_mode :: Maybe (Located OverlapMode)
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose',
-- 'ApiAnnotation.AnnDeriving',
-- 'ApiAnnotation.AnnInstance'
-- For details on above see note [Api annotations] in ApiAnnotation
}
deriving (Typeable)
deriving instance (DataId name) => Data (DerivDecl name)
instance (OutputableBndr name) => Outputable (DerivDecl name) where
ppr (DerivDecl ty o)
= hsep [ptext (sLit "deriving instance"), ppOverlapPragma o, ppr ty]
{-
************************************************************************
* *
\subsection[DefaultDecl]{A @default@ declaration}
* *
************************************************************************
There can only be one default declaration per module, but it is hard
for the parser to check that; we pass them all through in the abstract
syntax, and that restriction must be checked in the front end.
-}
type LDefaultDecl name = Located (DefaultDecl name)
data DefaultDecl name
= DefaultDecl [LHsType name]
-- ^ - 'ApiAnnotation.AnnKeywordId's : 'ApiAnnotation.AnnDefault',
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Typeable)
deriving instance (DataId name) => Data (DefaultDecl name)
instance (OutputableBndr name)
=> Outputable (DefaultDecl name) where
ppr (DefaultDecl tys)
= ptext (sLit "default") <+> parens (interpp'SP tys)
{-
************************************************************************
* *
\subsection{Foreign function interface declaration}
* *
************************************************************************
-}
-- foreign declarations are distinguished as to whether they define or use a
-- Haskell name
--
-- * the Boolean value indicates whether the pre-standard deprecated syntax
-- has been used
--
type LForeignDecl name = Located (ForeignDecl name)
data ForeignDecl name
= ForeignImport (Located name) -- defines this name
(LHsType name) -- sig_ty
(PostTc name Coercion) -- rep_ty ~ sig_ty
ForeignImport
| ForeignExport (Located name) -- uses this name
(LHsType name) -- sig_ty
(PostTc name Coercion) -- sig_ty ~ rep_ty
ForeignExport
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnForeign',
-- 'ApiAnnotation.AnnImport','ApiAnnotation.AnnExport',
-- 'ApiAnnotation.AnnDcolon'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Typeable)
deriving instance (DataId name) => Data (ForeignDecl name)
{-
In both ForeignImport and ForeignExport:
sig_ty is the type given in the Haskell code
rep_ty is the representation for this type, i.e. with newtypes
coerced away and type functions evaluated.
Thus if the declaration is valid, then rep_ty will only use types
such as Int and IO that we know how to make foreign calls with.
-}
noForeignImportCoercionYet :: PlaceHolder
noForeignImportCoercionYet = PlaceHolder
noForeignExportCoercionYet :: PlaceHolder
noForeignExportCoercionYet = PlaceHolder
-- Specification Of an imported external entity in dependence on the calling
-- convention
--
data ForeignImport = -- import of a C entity
--
-- * the two strings specifying a header file or library
-- may be empty, which indicates the absence of a
-- header or object specification (both are not used
-- in the case of `CWrapper' and when `CFunction'
-- has a dynamic target)
--
-- * the calling convention is irrelevant for code
-- generation in the case of `CLabel', but is needed
-- for pretty printing
--
-- * `Safety' is irrelevant for `CLabel' and `CWrapper'
--
CImport (Located CCallConv) -- ccall or stdcall
(Located Safety) -- interruptible, safe or unsafe
(Maybe Header) -- name of C header
CImportSpec -- details of the C entity
(Located SourceText) -- original source text for
-- the C entity
deriving (Data, Typeable)
-- details of an external C entity
--
data CImportSpec = CLabel CLabelString -- import address of a C label
| CFunction CCallTarget -- static or dynamic function
| CWrapper -- wrapper to expose closures
-- (former f.e.d.)
deriving (Data, Typeable)
-- specification of an externally exported entity in dependence on the calling
-- convention
--
data ForeignExport = CExport (Located CExportSpec) -- contains the calling
-- convention
(Located SourceText) -- original source text for
-- the C entity
deriving (Data, Typeable)
-- pretty printing of foreign declarations
--
instance OutputableBndr name => Outputable (ForeignDecl name) where
ppr (ForeignImport n ty _ fimport) =
hang (ptext (sLit "foreign import") <+> ppr fimport <+> ppr n)
2 (dcolon <+> ppr ty)
ppr (ForeignExport n ty _ fexport) =
hang (ptext (sLit "foreign export") <+> ppr fexport <+> ppr n)
2 (dcolon <+> ppr ty)
instance Outputable ForeignImport where
ppr (CImport cconv safety mHeader spec _) =
ppr cconv <+> ppr safety <+>
char '"' <> pprCEntity spec <> char '"'
where
pp_hdr = case mHeader of
Nothing -> empty
Just (Header header) -> ftext header
pprCEntity (CLabel lbl) =
ptext (sLit "static") <+> pp_hdr <+> char '&' <> ppr lbl
pprCEntity (CFunction (StaticTarget lbl _ isFun)) =
ptext (sLit "static")
<+> pp_hdr
<+> (if isFun then empty else ptext (sLit "value"))
<+> ppr lbl
pprCEntity (CFunction (DynamicTarget)) =
ptext (sLit "dynamic")
pprCEntity (CWrapper) = ptext (sLit "wrapper")
instance Outputable ForeignExport where
ppr (CExport (L _ (CExportStatic lbl cconv)) _) =
ppr cconv <+> char '"' <> ppr lbl <> char '"'
{-
************************************************************************
* *
\subsection{Transformation rules}
* *
************************************************************************
-}
type LRuleDecls name = Located (RuleDecls name)
-- Note [Pragma source text] in BasicTypes
data RuleDecls name = HsRules { rds_src :: SourceText
, rds_rules :: [LRuleDecl name] }
deriving (Typeable)
deriving instance (DataId name) => Data (RuleDecls name)
type LRuleDecl name = Located (RuleDecl name)
data RuleDecl name
= HsRule -- Source rule
(Located RuleName) -- Rule name
Activation
[LRuleBndr name] -- Forall'd vars; after typechecking this
-- includes tyvars
(Located (HsExpr name)) -- LHS
(PostRn name NameSet) -- Free-vars from the LHS
(Located (HsExpr name)) -- RHS
(PostRn name NameSet) -- Free-vars from the RHS
-- ^
-- - 'ApiAnnotation.AnnKeywordId' :
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnTilde',
-- 'ApiAnnotation.AnnVal',
-- 'ApiAnnotation.AnnClose',
-- 'ApiAnnotation.AnnForall','ApiAnnotation.AnnDot',
-- 'ApiAnnotation.AnnEqual',
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Typeable)
deriving instance (DataId name) => Data (RuleDecl name)
flattenRuleDecls :: [LRuleDecls name] -> [LRuleDecl name]
flattenRuleDecls decls = concatMap (rds_rules . unLoc) decls
type LRuleBndr name = Located (RuleBndr name)
data RuleBndr name
= RuleBndr (Located name)
| RuleBndrSig (Located name) (HsWithBndrs name (LHsType name))
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnDcolon','ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Typeable)
deriving instance (DataId name) => Data (RuleBndr name)
collectRuleBndrSigTys :: [RuleBndr name] -> [HsWithBndrs name (LHsType name)]
collectRuleBndrSigTys bndrs = [ty | RuleBndrSig _ ty <- bndrs]
instance OutputableBndr name => Outputable (RuleDecls name) where
ppr (HsRules _ rules) = ppr rules
instance OutputableBndr name => Outputable (RuleDecl name) where
ppr (HsRule name act ns lhs _fv_lhs rhs _fv_rhs)
= sep [text "{-# RULES" <+> doubleQuotes (ftext $ unLoc name)
<+> ppr act,
nest 4 (pp_forall <+> pprExpr (unLoc lhs)),
nest 4 (equals <+> pprExpr (unLoc rhs) <+> text "#-}") ]
where
pp_forall | null ns = empty
| otherwise = forAllLit <+> fsep (map ppr ns) <> dot
instance OutputableBndr name => Outputable (RuleBndr name) where
ppr (RuleBndr name) = ppr name
ppr (RuleBndrSig name ty) = ppr name <> dcolon <> ppr ty
{-
************************************************************************
* *
\subsection{Vectorisation declarations}
* *
************************************************************************
A vectorisation pragma, one of
{-# VECTORISE f = closure1 g (scalar_map g) #-}
{-# VECTORISE SCALAR f #-}
{-# NOVECTORISE f #-}
{-# VECTORISE type T = ty #-}
{-# VECTORISE SCALAR type T #-}
-}
type LVectDecl name = Located (VectDecl name)
data VectDecl name
= HsVect
SourceText -- Note [Pragma source text] in BasicTypes
(Located name)
(LHsExpr name)
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnEqual','ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
| HsNoVect
SourceText -- Note [Pragma source text] in BasicTypes
(Located name)
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
| HsVectTypeIn -- pre type-checking
SourceText -- Note [Pragma source text] in BasicTypes
Bool -- 'TRUE' => SCALAR declaration
(Located name)
(Maybe (Located name)) -- 'Nothing' => no right-hand side
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnType','ApiAnnotation.AnnClose',
-- 'ApiAnnotation.AnnEqual'
-- For details on above see note [Api annotations] in ApiAnnotation
| HsVectTypeOut -- post type-checking
Bool -- 'TRUE' => SCALAR declaration
TyCon
(Maybe TyCon) -- 'Nothing' => no right-hand side
| HsVectClassIn -- pre type-checking
SourceText -- Note [Pragma source text] in BasicTypes
(Located name)
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClass','ApiAnnotation.AnnClose',
-- For details on above see note [Api annotations] in ApiAnnotation
| HsVectClassOut -- post type-checking
Class
| HsVectInstIn -- pre type-checking (always SCALAR) !!!FIXME: should be superfluous now
(LHsType name)
| HsVectInstOut -- post type-checking (always SCALAR) !!!FIXME: should be superfluous now
ClsInst
deriving (Typeable)
deriving instance (DataId name) => Data (VectDecl name)
lvectDeclName :: NamedThing name => LVectDecl name -> Name
lvectDeclName (L _ (HsVect _ (L _ name) _)) = getName name
lvectDeclName (L _ (HsNoVect _ (L _ name))) = getName name
lvectDeclName (L _ (HsVectTypeIn _ _ (L _ name) _)) = getName name
lvectDeclName (L _ (HsVectTypeOut _ tycon _)) = getName tycon
lvectDeclName (L _ (HsVectClassIn _ (L _ name))) = getName name
lvectDeclName (L _ (HsVectClassOut cls)) = getName cls
lvectDeclName (L _ (HsVectInstIn _))
= panic "HsDecls.lvectDeclName: HsVectInstIn"
lvectDeclName (L _ (HsVectInstOut _))
= panic "HsDecls.lvectDeclName: HsVectInstOut"
lvectInstDecl :: LVectDecl name -> Bool
lvectInstDecl (L _ (HsVectInstIn _)) = True
lvectInstDecl (L _ (HsVectInstOut _)) = True
lvectInstDecl _ = False
instance OutputableBndr name => Outputable (VectDecl name) where
ppr (HsVect _ v rhs)
= sep [text "{-# VECTORISE" <+> ppr v,
nest 4 $
pprExpr (unLoc rhs) <+> text "#-}" ]
ppr (HsNoVect _ v)
= sep [text "{-# NOVECTORISE" <+> ppr v <+> text "#-}" ]
ppr (HsVectTypeIn _ False t Nothing)
= sep [text "{-# VECTORISE type" <+> ppr t <+> text "#-}" ]
ppr (HsVectTypeIn _ False t (Just t'))
= sep [text "{-# VECTORISE type" <+> ppr t, text "=", ppr t', text "#-}" ]
ppr (HsVectTypeIn _ True t Nothing)
= sep [text "{-# VECTORISE SCALAR type" <+> ppr t <+> text "#-}" ]
ppr (HsVectTypeIn _ True t (Just t'))
= sep [text "{-# VECTORISE SCALAR type" <+> ppr t, text "=", ppr t', text "#-}" ]
ppr (HsVectTypeOut False t Nothing)
= sep [text "{-# VECTORISE type" <+> ppr t <+> text "#-}" ]
ppr (HsVectTypeOut False t (Just t'))
= sep [text "{-# VECTORISE type" <+> ppr t, text "=", ppr t', text "#-}" ]
ppr (HsVectTypeOut True t Nothing)
= sep [text "{-# VECTORISE SCALAR type" <+> ppr t <+> text "#-}" ]
ppr (HsVectTypeOut True t (Just t'))
= sep [text "{-# VECTORISE SCALAR type" <+> ppr t, text "=", ppr t', text "#-}" ]
ppr (HsVectClassIn _ c)
= sep [text "{-# VECTORISE class" <+> ppr c <+> text "#-}" ]
ppr (HsVectClassOut c)
= sep [text "{-# VECTORISE class" <+> ppr c <+> text "#-}" ]
ppr (HsVectInstIn ty)
= sep [text "{-# VECTORISE SCALAR instance" <+> ppr ty <+> text "#-}" ]
ppr (HsVectInstOut i)
= sep [text "{-# VECTORISE SCALAR instance" <+> ppr i <+> text "#-}" ]
{-
************************************************************************
* *
\subsection[DocDecl]{Document comments}
* *
************************************************************************
-}
type LDocDecl = Located (DocDecl)
data DocDecl
= DocCommentNext HsDocString
| DocCommentPrev HsDocString
| DocCommentNamed String HsDocString
| DocGroup Int HsDocString
deriving (Data, Typeable)
-- Okay, I need to reconstruct the document comments, but for now:
instance Outputable DocDecl where
ppr _ = text "<document comment>"
docDeclDoc :: DocDecl -> HsDocString
docDeclDoc (DocCommentNext d) = d
docDeclDoc (DocCommentPrev d) = d
docDeclDoc (DocCommentNamed _ d) = d
docDeclDoc (DocGroup _ d) = d
{-
************************************************************************
* *
\subsection[DeprecDecl]{Deprecations}
* *
************************************************************************
We use exported entities for things to deprecate.
-}
type LWarnDecls name = Located (WarnDecls name)
-- Note [Pragma source text] in BasicTypes
data WarnDecls name = Warnings { wd_src :: SourceText
, wd_warnings :: [LWarnDecl name]
}
deriving (Data, Typeable)
type LWarnDecl name = Located (WarnDecl name)
data WarnDecl name = Warning [Located name] WarningTxt
deriving (Data, Typeable)
instance OutputableBndr name => Outputable (WarnDecls name) where
ppr (Warnings _ decls) = ppr decls
instance OutputableBndr name => Outputable (WarnDecl name) where
ppr (Warning thing txt)
= hsep [text "{-# DEPRECATED", ppr thing, doubleQuotes (ppr txt), text "#-}"]
{-
************************************************************************
* *
\subsection[AnnDecl]{Annotations}
* *
************************************************************************
-}
type LAnnDecl name = Located (AnnDecl name)
data AnnDecl name = HsAnnotation
SourceText -- Note [Pragma source text] in BasicTypes
(AnnProvenance name) (Located (HsExpr name))
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnType'
-- 'ApiAnnotation.AnnModule'
-- 'ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Typeable)
deriving instance (DataId name) => Data (AnnDecl name)
instance (OutputableBndr name) => Outputable (AnnDecl name) where
ppr (HsAnnotation _ provenance expr)
= hsep [text "{-#", pprAnnProvenance provenance, pprExpr (unLoc expr), text "#-}"]
data AnnProvenance name = ValueAnnProvenance (Located name)
| TypeAnnProvenance (Located name)
| ModuleAnnProvenance
deriving (Data, Typeable, Functor)
deriving instance Foldable AnnProvenance
deriving instance Traversable AnnProvenance
annProvenanceName_maybe :: AnnProvenance name -> Maybe name
annProvenanceName_maybe (ValueAnnProvenance (L _ name)) = Just name
annProvenanceName_maybe (TypeAnnProvenance (L _ name)) = Just name
annProvenanceName_maybe ModuleAnnProvenance = Nothing
pprAnnProvenance :: OutputableBndr name => AnnProvenance name -> SDoc
pprAnnProvenance ModuleAnnProvenance = ptext (sLit "ANN module")
pprAnnProvenance (ValueAnnProvenance (L _ name))
= ptext (sLit "ANN") <+> ppr name
pprAnnProvenance (TypeAnnProvenance (L _ name))
= ptext (sLit "ANN type") <+> ppr name
{-
************************************************************************
* *
\subsection[RoleAnnot]{Role annotations}
* *
************************************************************************
-}
type LRoleAnnotDecl name = Located (RoleAnnotDecl name)
-- See #8185 for more info about why role annotations are
-- top-level declarations
data RoleAnnotDecl name
= RoleAnnotDecl (Located name) -- type constructor
[Located (Maybe Role)] -- optional annotations
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnType',
-- 'ApiAnnotation.AnnRole'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Data, Typeable)
instance OutputableBndr name => Outputable (RoleAnnotDecl name) where
ppr (RoleAnnotDecl ltycon roles)
= ptext (sLit "type role") <+> ppr ltycon <+>
hsep (map (pp_role . unLoc) roles)
where
pp_role Nothing = underscore
pp_role (Just r) = ppr r
roleAnnotDeclName :: RoleAnnotDecl name -> name
roleAnnotDeclName (RoleAnnotDecl (L _ name) _) = name
|
DavidAlphaFox/ghc
|
compiler/hsSyn/HsDecls.hs
|
bsd-3-clause
| 70,445
| 0
| 16
| 21,104
| 12,186
| 6,626
| 5,560
| 875
| 6
|
{-# LANGUAGE RecordWildCards, ScopedTypeVariables #-}
module Graphics.UI.Threepenny.Widgets (
-- * Synopsis
-- | Widgets are reusable building blocks for a graphical user interface.
-- This module collects useful widgets that are designed to work
-- with functional reactive programming (FRP).
--
-- For more details and information on how to write your own widgets, see the
-- <https://github.com/HeinrichApfelmus/threepenny-gui/blob/master/doc/design-widgets.md widget design guide>.
-- * Tidings
Tidings, rumors, facts, tidings,
-- * Widgets
-- ** Input widgets
TextEntry, entry, userText,
-- ** ListBox
ListBox, listBox, userSelection,
) where
import Control.Monad (void, when)
import qualified Data.Map as Map
import qualified Graphics.UI.Threepenny.Attributes as UI
import qualified Graphics.UI.Threepenny.Events as UI
import qualified Graphics.UI.Threepenny.Elements as UI
import Graphics.UI.Threepenny.Core
import Reactive.Threepenny
{-----------------------------------------------------------------------------
Input widgets
------------------------------------------------------------------------------}
-- | A single-line text entry.
data TextEntry = TextEntry
{ _elementTE :: Element
, _userTE :: Tidings String
}
instance Widget TextEntry where getElement = _elementTE
-- | User changes to the text value.
userText :: TextEntry -> Tidings String
userText = _userTE
-- | Create a single-line text entry.
entry
:: Behavior String -- ^ Display value when the element does not have focus.
-> UI TextEntry
entry bValue = do -- single text entry
input <- UI.input
bEditing <- stepper False $ and <$>
unions [True <$ UI.focus input, False <$ UI.blur input]
window <- askWindow
liftIOLater $ onChange bValue $ \s -> runUI window $ do
editing <- liftIO $ currentValue bEditing
when (not editing) $ void $ element input # set value s
let _elementTE = input
_userTE = tidings bValue $ UI.valueChange input
return TextEntry {..}
{-----------------------------------------------------------------------------
List box
------------------------------------------------------------------------------}
-- | A list of values. The user can select entries.
data ListBox a = ListBox
{ _elementLB :: Element
, _selectionLB :: Tidings (Maybe a)
}
instance Widget (ListBox a) where getElement = _elementLB
-- | User changes to the current selection (possibly empty).
userSelection :: ListBox a -> Tidings (Maybe a)
userSelection = _selectionLB
-- | Create a 'ListBox'.
listBox :: forall a. Ord a
=> Behavior [a] -- ^ list of items
-> Behavior (Maybe a) -- ^ selected item
-> Behavior (a -> UI Element) -- ^ display for an item
-> UI (ListBox a)
listBox bitems bsel bdisplay = do
list <- UI.select
-- animate output items
element list # sink items (map <$> bdisplay <*> bitems)
-- animate output selection
let bindices :: Behavior (Map.Map a Int)
bindices = (Map.fromList . flip zip [0..]) <$> bitems
bindex = lookupIndex <$> bindices <*> bsel
lookupIndex indices Nothing = Nothing
lookupIndex indices (Just sel) = Map.lookup sel indices
element list # sink UI.selection bindex
-- changing the display won't change the current selection
-- eDisplay <- changes display
-- sink listBox [ selection :== stepper (-1) $ bSelection <@ eDisplay ]
-- user selection
let bindices2 :: Behavior (Map.Map Int a)
bindices2 = Map.fromList . zip [0..] <$> bitems
_selectionLB = tidings bsel $
lookupIndex <$> bindices2 <@> UI.selectionChange list
_elementLB = list
return ListBox {..}
items = mkWriteAttr $ \i x -> void $ do
return x # set children [] #+ map (\i -> UI.option #+ [i]) i
|
duplode/threepenny-gui
|
src/Graphics/UI/Threepenny/Widgets.hs
|
bsd-3-clause
| 3,958
| 0
| 17
| 916
| 829
| 445
| 384
| 60
| 2
|
module AddOneParameter.A2 where
import AddOneParameter.C2
import AddOneParameter.D2
sumSq xs = sum (map (sq sq_f) xs) + sumSquares xs + sumSquares1 xs
sq_f_2 = 2
main = sumSq [1..4]
|
RefactoringTools/HaRe
|
test/testdata/AddOneParameter/A2.expected.hs
|
bsd-3-clause
| 189
| 0
| 11
| 35
| 73
| 38
| 35
| 6
| 1
|
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
-- | This module builds Docker (OpenContainer) images.
module Stack.Image
(stageContainerImageArtifacts, createContainerImageFromStage,
imgCmdName, imgDockerCmdName, imgOptsFromMonoid,
imgDockerOptsFromMonoid, imgOptsParser, imgDockerOptsParser)
where
import Control.Applicative
import Control.Exception.Lifted hiding (finally)
import Control.Monad
import Control.Monad.Catch hiding (bracket)
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Reader
import Control.Monad.Trans.Control
import Data.Char (toLower)
import qualified Data.Map.Strict as Map
import Data.Maybe
import Data.Monoid
import qualified Data.Text as T
import Data.Typeable
import Options.Applicative
import Path
import Path.IO
import Stack.Constants
import Stack.Types
import Stack.Types.Internal
import System.FilePath (dropTrailingPathSeparator)
import System.Process
type Build e m = (HasBuildConfig e, HasConfig e, HasEnvConfig e, HasTerminal e, MonadBaseControl IO m, MonadCatch m, MonadIO m, MonadLogger m, MonadReader e m)
type Assemble e m = (HasConfig e, HasTerminal e, MonadBaseControl IO m, MonadCatch m, MonadIO m, MonadLogger m, MonadMask m, MonadReader e m)
-- | Stages the executables & additional content in a staging
-- directory under '.stack-work'
stageContainerImageArtifacts :: Build e m
=> m ()
stageContainerImageArtifacts = do
imageDir <- imageStagingDir <$> getWorkingDir
createTree imageDir
stageExesInDir imageDir
syncAddContentToDir imageDir
-- | Builds a Docker (OpenContainer) image extending the `base` image
-- specified in the project's stack.yaml. Then new image will be
-- extended with an ENTRYPOINT specified for each `entrypoint` listed
-- in the config file.
createContainerImageFromStage :: Assemble e m
=> m ()
createContainerImageFromStage = do
imageDir <- imageStagingDir <$> getWorkingDir
createDockerImage imageDir
extendDockerImageWithEntrypoint imageDir
-- | Stage all the Package executables in the usr/local/bin
-- subdirectory of a temp directory.
stageExesInDir :: Build e m => Path Abs Dir -> m ()
stageExesInDir dir = do
srcBinPath <-
(</> $(mkRelDir "bin")) <$>
installationRootLocal
let destBinPath = dir </>
$(mkRelDir "usr/local/bin")
createTree destBinPath
copyDirectoryRecursive srcBinPath destBinPath
-- | Add any additional files into the temp directory, respecting the
-- (Source, Destination) mapping.
syncAddContentToDir :: Build e m => Path Abs Dir -> m ()
syncAddContentToDir dir = do
config <- asks getConfig
bconfig <- asks getBuildConfig
let imgAdd = maybe Map.empty imgDockerAdd (imgDocker (configImage config))
forM_
(Map.toList imgAdd)
(\(source,dest) ->
do sourcePath <- parseRelDir source
destPath <- parseAbsDir dest
let destFullPath = dir </> dropRoot destPath
createTree destFullPath
copyDirectoryRecursive
(bcRoot bconfig </> sourcePath)
destFullPath)
-- | Derive an image name from the project directory.
imageName :: Path Abs Dir -> String
imageName = map toLower . dropTrailingPathSeparator . toFilePath . dirname
-- | Create a general purpose docker image from the temporary
-- directory of executables & static content.
createDockerImage :: Assemble e m => Path Abs Dir -> m ()
createDockerImage dir = do
config <- asks getConfig
let dockerConfig = imgDocker (configImage config)
case imgDockerBase =<< dockerConfig of
Nothing -> throwM StackImageDockerBaseUnspecifiedException
Just base -> do
liftIO
(do writeFile
(toFilePath
(dir </>
$(mkRelFile "Dockerfile")))
(unlines ["FROM " ++ base, "ADD ./ /"])
callProcess
"docker"
[ "build"
, "-t"
, fromMaybe
(imageName (parent (parent dir)))
(imgDockerImageName =<< dockerConfig)
, toFilePath dir])
-- | Extend the general purpose docker image with entrypoints (if
-- specified).
extendDockerImageWithEntrypoint :: Assemble e m => Path Abs Dir -> m ()
extendDockerImageWithEntrypoint dir = do
config <- asks getConfig
let dockerConfig = imgDocker (configImage config)
let dockerImageName = fromMaybe
(imageName (parent (parent dir)))
(imgDockerImageName =<< dockerConfig)
let imgEntrypoints = maybe Nothing imgDockerEntrypoints dockerConfig
case imgEntrypoints of
Nothing -> return ()
Just eps -> do
forM_
eps
(\ep ->
liftIO
(do writeFile
(toFilePath
(dir </>
$(mkRelFile "Dockerfile")))
(unlines
[ "FROM " ++ dockerImageName
, "ENTRYPOINT [\"/usr/local/bin/" ++
ep ++ "\"]"
, "CMD []"])
callProcess
"docker"
[ "build"
, "-t"
, dockerImageName ++ "-" ++ ep
, toFilePath dir]))
-- | The command name for dealing with images.
imgCmdName :: String
imgCmdName = "image"
-- | The command name for building a docker container.
imgDockerCmdName :: String
imgDockerCmdName = "container"
-- | A parser for ImageOptsMonoid.
imgOptsParser :: Parser ImageOptsMonoid
imgOptsParser = ImageOptsMonoid <$>
optional
(subparser
(command
imgDockerCmdName
(info
imgDockerOptsParser
(progDesc "Create a container image (EXPERIMENTAL)"))))
-- | A parser for ImageDockerOptsMonoid.
imgDockerOptsParser :: Parser ImageDockerOptsMonoid
imgDockerOptsParser = ImageDockerOptsMonoid <$>
optional
(option
str
(long (imgDockerCmdName ++ "-" ++ T.unpack imgDockerBaseArgName) <>
metavar "NAME" <>
help "Docker base image name")) <*>
pure Nothing <*>
pure Nothing <*>
pure Nothing
-- | Convert image opts monoid to image options.
imgOptsFromMonoid :: ImageOptsMonoid -> ImageOpts
imgOptsFromMonoid ImageOptsMonoid{..} = ImageOpts
{ imgDocker = imgDockerOptsFromMonoid <$> imgMonoidDocker
}
-- | Convert Docker image opts monoid to Docker image options.
imgDockerOptsFromMonoid :: ImageDockerOptsMonoid -> ImageDockerOpts
imgDockerOptsFromMonoid ImageDockerOptsMonoid{..} = ImageDockerOpts
{ imgDockerBase = emptyToNothing imgDockerMonoidBase
, imgDockerEntrypoints = emptyToNothing imgDockerMonoidEntrypoints
, imgDockerAdd = fromMaybe Map.empty imgDockerMonoidAdd
, imgDockerImageName = emptyToNothing imgDockerMonoidImageName
}
where emptyToNothing Nothing = Nothing
emptyToNothing (Just s)
| null s =
Nothing
| otherwise =
Just s
-- | Stack image exceptions.
data StackImageException =
StackImageDockerBaseUnspecifiedException
deriving (Typeable)
instance Exception StackImageException
instance Show StackImageException where
show StackImageDockerBaseUnspecifiedException = "You must specify a base docker image on which to place your haskell executables."
|
robstewart57/stack
|
src/Stack/Image.hs
|
bsd-3-clause
| 8,388
| 0
| 26
| 2,695
| 1,525
| 783
| 742
| 171
| 2
|
-- !!! ds014 -- character and string literals
-- !!! really should add ALL weird forms...
module ShouldCompile where
a = 'a'
b = "b"
c = a:b
d = b ++ b
b1 = "" -- examples from the Haskell report
b2 = "\&" -- the same thing
b3 = "\SO\&H" ++ "\137\&9"
a000 = '\NUL'
a001 = '\SOH'
a002 = '\STX'
a003 = '\ETX'
a004 = '\EOT'
a005 = '\ENQ'
a006 = '\ACK'
a007 = '\BEL'
a010 = '\BS'
a011 = '\HT'
a012 = '\LF'
a013 = '\VT'
a014 = '\FF'
a015 = '\CR'
a016 = '\SO'
a017 = '\SI'
a020 = '\DLE'
a021 = '\DC1'
a022 = '\DC2'
a023 = '\DC3'
a024 = '\DC4'
a025 = '\NAK'
a026 = '\SYN'
a027 = '\ETB'
a030 = '\CAN'
a031 = '\EM'
a032 = '\SUB'
a033 = '\ESC'
a034 = '\FS'
a035 = '\GS'
a036 = '\RS'
a037 = '\US'
a040 = '\SP'
a042 = '"'
a047 = '\''
a134 = '\\'
a177 = '\DEL'
ascii = "\NUL\SOH\STX\ETX\EOT\ENQ\ACK\BEL\
\\BS\HT\LF\VT\FF\CR\SO\SI\
\\DLE\DC1\DC2\DC3\DC4\NAK\SYN\ETB\
\\CAN\EM\SUB\ESC\FS\GS\RS\US\
\\SP!\"#$%&'\
\()*+,-./\
\01234567\
\89:;<=>?\
\@ABCDEFG\
\HIJKLMNO\
\PQRSTUVW\
\XYZ[\\]^_\
\`abcdefg\
\hijklmno\
\pqrstuvw\
\xyz{|}~\DEL"
na200 = '\o200'
na250 = '\o250'
na300 = '\o300'
na350 = '\o350'
na377 = '\o377'
eightbit = "\o200\o250\o300\o350\o377"
|
ezyang/ghc
|
testsuite/tests/deSugar/should_compile/ds014.hs
|
bsd-3-clause
| 1,284
| 0
| 5
| 344
| 275
| 166
| 109
| 52
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module JsonUtils where
import qualified Data.Text as T
import qualified Data.HashMap.Strict as HM
import Data.Aeson
import Data.List (foldl')
mapObject :: (Object -> Object) -> Value -> Value
mapObject f (Object m) = Object (f m)
mapObject _ v = error $ "Not an object " ++ show v
delete :: [T.Text] -> Value -> Value
delete [] = error "delete []"
delete ["*"] = error "delete [\"*\"]"
delete [k] = mapObject $ HM.delete k
delete ("*":ks) = mapObject $ HM.map (delete ks)
delete (k:ks) = mapObject $ HM.adjust (delete ks) k
merge :: Value -> Value -> Value
merge (Object m1) (Object m2) = Object $ HM.unionWith merge m2 m1
merge v1 v2 = error $ "Cannot merge " ++ show v1 ++ " with " ++ show v2
merges :: [Value] -> Value
merges = foldl' merge (Object HM.empty)
|
nomeata/gipeda
|
src/JsonUtils.hs
|
mit
| 823
| 0
| 8
| 170
| 340
| 178
| 162
| 20
| 1
|
{-
Pentagon numbers
Problem 44
Pentagonal numbers are generated by the formula, Pn=n(3n−1)/2. The first ten pentagonal numbers are:
1, 5, 12, 22, 35, 51, 70, 92, 117, 145, ...
It can be seen that P4 + P7 = 22 + 70 = 92 = P8. However, their difference, 70 − 22 = 48, is not pentagonal.
Find the pair of pentagonal numbers, Pj and Pk, for which their sum and difference are pentagonal and D = |Pk − Pj| is minimised; what is the value of D?
-}
import Data.List
import Math.NumberTheory.Powers.Squares
maxn = 10000
pairs :: [Int] -> [(Int,Int)]
pairs s = [(x,y) | (x:xt) <- tails s, y <- xt]
pent :: Int -> Int
pent n = n*(3*n-1) `div` 2
is_pent :: Int -> Bool
is_pent x = (isSquare p) && ((mod q 6) == 5)
where
p = (24*x) + 1
q = integerSquareRoot p
-- check a pair
check :: (Int,Int) -> Bool
check (a,b) = and $ map (is_pent . abs) [a+b,a-b]
solve =
map (\(a,b) -> abs (a-b))
$ filter check
$ pairs
$ map pent [1..maxn]
main = do
print $ pairs [1,2,3,4,5]
print $ take 10 $ map pent [1..maxn]
print $ take 10 $ pairs $ map pent [1..maxn]
print solve
{-
It would be useful to be able to generate the pairs incrementally
and in ascending order of D.
Runtime is about 4 seconds at maxn=10k, but never finishes for maxn=100k.
Tried using a set instead of "is_pent", but it was slower.
So I got lucky, but don't know how to make it better.
-}
|
bertdouglas/euler-haskell
|
001-050/44a.hs
|
mit
| 1,388
| 0
| 13
| 312
| 395
| 213
| 182
| 23
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Day9 (day9, day9', run, Expr(..), parseInput) where
import Data.Function (fix)
import Text.Parsec ((<|>) , Parsec , ParseError)
import qualified Text.Parsec as P
data Expr =
CompressExpr
Int -- Characters to repeat
Int -- Number of times to repeat
Int -- Length of marker (number of characters)
| LiteralExpr Int -- Length of data
deriving (Eq, Ord, Show)
type Compressed = [Expr]
parseInput :: String -> Either ParseError Compressed
parseInput = P.parse (P.many1 (P.try parseCompressExpr <|> parseLiteralExpr)) ""
parseCompressExpr :: Parsec String () Expr
parseCompressExpr = do
start <- P.sourceColumn <$> P.getPosition
take <- read <$> (P.char '(' *> P.many1 P.digit <* P.char 'x')
repeat <- read <$> (P.many1 P.digit <* P.char ')')
end <- P.sourceColumn <$> P.getPosition
return $ CompressExpr take repeat (end - start)
parseLiteralExpr :: Parsec String () Expr
parseLiteralExpr = LiteralExpr . length <$> P.many1 P.upper
lengthDecompressedV1 :: Compressed -> Int
lengthDecompressedV1 = lengthDecompressed sumExprs
where
sumExprs = sum . map exprLength
exprLength (CompressExpr _ _ l) = l
exprLength (LiteralExpr l) = l
lengthDecompressedV2 :: Compressed -> Int
lengthDecompressedV2 = fix lengthDecompressed
-- First argument is function to use for recursing to process duplicated data
lengthDecompressed :: (Compressed -> Int) -> Compressed -> Int
lengthDecompressed recur ((CompressExpr t n _) : ds) =
let (repeated, rest) = takeCharacters t ds in
(n * (recur repeated)) + (lengthDecompressed recur rest)
lengthDecompressed recur ((LiteralExpr n) : ds) = n + (lengthDecompressed recur ds)
lengthDecompressed _ [] = 0
takeCharacters :: Int -> Compressed -> (Compressed, Compressed)
takeCharacters 0 ds = ([], ds)
takeCharacters n (r@(LiteralExpr k) : ds)
| n < k = ([LiteralExpr n], (LiteralExpr (k - n)) : ds)
| n >= k = let (a, b) = takeCharacters (n - k) ds in (r : a, b)
takeCharacters n (c@(CompressExpr _ _ l) : ds)
| n < l = undefined -- maybe should be ([LiteralExpr n], (LiteralExpr (n - l)) : ds)
| n >= l = let (a, b) = takeCharacters (n - l) ds in (c : a, b)
-- Final, top-level exports
day9 :: String -> Int
day9 = either (const (-1)) lengthDecompressedV1 . parseInput
day9' :: String -> Int
day9' = either (const (-1)) lengthDecompressedV2. parseInput
-- Input
run :: IO ()
run = do
putStrLn "Day 9 results: "
input <- readFile "inputs/day9.txt"
putStrLn $ " " ++ show (day9 input)
putStrLn $ " " ++ show (day9' input)
|
brianshourd/adventOfCode2016
|
src/Day9.hs
|
mit
| 2,605
| 0
| 13
| 529
| 940
| 491
| 449
| 55
| 2
|
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE ImplicitParams #-}
{-# LANGUAGE RankNTypes #-}
module Graphics.Oedel.Widget where
import Data.Monoid
import Graphics.Oedel.Style
import Control.Reactive
import Control.Applicative
-- | Identifies an input of type @b@ within an environment of type @a@.
newtype Input a b = Input {
-- | Reads an input from an environment. If the input is not available,
-- 'Nothing' will be returned.
readEnv :: a -> Maybe b }
instance Functor (Input a) where
fmap f (Input i) = Input $ (f <$>) . i
instance Applicative (Input a) where
pure x = Input $ const $ Just x
(<*>) (Input f) (Input x) = Input $ \e -> f e <*> x e
instance Monad (Input a) where
return = pure
(>>=) (Input x) f = Input $ \e -> x e >>= \v ->
let Input y = f v
in y e
-- | Identifies an output of type @b@ within an environment of type @a@.
newtype Output a b = Output {
-- | Constructs an environment containing only the given output, set
-- to the given value.
putEnv :: b -> a }
instance Monoid a => Monoid (Output a b) where
mempty = Output $ const mempty
mappend (Output x) (Output y) = Output $ \v -> x v <> y v
-- | Composes 'Input' with a behavior-like applicative. This is meant to
-- be used with layout functions suffixed with "Dyn", allowing a layout
-- to vary based on a behavior embedded within an environment.
data InputDyn f a b = InputDyn (Input a (f b))
instance Functor f => Functor (InputDyn f a) where
fmap f (InputDyn x) = InputDyn $ (f <$>) <$> x
instance Applicative f => Applicative (InputDyn f a) where
pure x = InputDyn $ pure $ pure x
(<*>) (InputDyn f) (InputDyn x) = InputDyn $ (<*>) <$> f <*> x
-- | Converts an 'Input' for a behavior into a 'InputDyn', for use in
-- layout functions suffixed with "Dyn".
dyn :: Input a (f b) -> InputDyn f a b
dyn = InputDyn
-- | Inverse of 'dyn'.
undyn :: InputDyn f a b -> Input a (f b)
undyn (InputDyn inp) = inp
-- | @w a@ is a description of an interactive figure within an environment of
-- type @a@. Widgets can read from and write to their environment, and widgets
-- with the same environment type can be composed as figures.
class ReactiveState e => Widget e w | w -> e where
-- | Decorates a widget to, upon instantiation, read the given input,
-- instantiate it with the current time, and then write it to the given
-- output.
declare :: (Monoid a) => Output a b -> Input a (Moment e b) -> w a -> w a
-- | @w@ is a widget type that allows dynamic switching.
class Widget e w => WidgetSwitch e w where
-- | Constructs a widget whose contents dynamically switch between
-- widgets. The initial widget is given, along with an event that selects
-- other widgets.
frame :: (Monoid a) => w a -> Input a (e (w a)) -> w a
-- | @w@ is a widget type that allows the construction of buttons
-- that can be styled with a description of type @p@.
class (Widget e w, Style p) => WidgetButton p e w where
-- | Constructs a button widget enclosing the given widget.
-- The given output event will occur when the button is pressed.
button :: (?style :: p, Monoid a) => Output a (e ()) -> w a -> w a
-- | @w@ is a widget type that allows the construction of text boxes that
-- can be styled with a description of type @p@.
class (Widget e w, Style p) => WidgetTextBox p e w where
-- | Constructs a text box. The given output behavior will provide the
-- text box contents.
textBox :: (?style :: p, Monoid a) => Output a (I e String) -> w a
-- | @w@ is a widget type that allows the construction of option selectors that
-- can be styled with a description of type @p@.
class (Widget e w, Style p) => WidgetOption p o e w | w -> o where
-- | Constructs an option selector (combo box or radio buttons) displaying
-- the given options. The given output behavior will provide the currently
-- selected option.
options :: (?style :: p, Monoid a) => Output a (I e b) -> [(o, b)] -> w a
-- | @w@ is a widget type that allows the construction of check boxes that
-- can be styled with a description of type @p@.
class (Widget e w, Style p) => WidgetCheckBox p e w where
-- | Constructs a check box. The given output behavior will provide the
-- current checked state.
checkBox :: (?style :: p, Monoid a) => Output a (I e Bool) -> w a
|
dzamkov/Oedel
|
src/Graphics/Oedel/Widget.hs
|
mit
| 4,489
| 0
| 14
| 1,131
| 1,061
| 568
| 493
| -1
| -1
|
{-|
Module : Prelude.Betahaxx.Abbr
Unicode synonyms for @Data.Foldable.elem@ and @Data.Foldable.notElem@ that have
RULES specializing to @Data.Set.member@ and @Data.Set.notMember@ (which is
mainly what I want to use this for.) However, with current GHC, we always have
to require Ord for this to work. Ord isn't usually too much to expect anyway.
-}
module Data.Foldable.Unicode.Betahaxx ( (∈), (∋), (∉), (∌) ) where
import Prelude (Ord, Bool, flip)
import Data.Set (member, notMember)
import Data.Foldable (Foldable, elem, notElem)
(∈) :: (Foldable f, Ord a) => a -> f a -> Bool
(∈) = elem
(∋) :: (Foldable f, Ord a) => f a -> a -> Bool
(∋) = flip elem
(∉) :: (Foldable f, Ord a) => a -> f a -> Bool
(∉) = notElem
(∌) :: (Foldable f, Ord a) => f a -> a -> Bool
(∌) = flip notElem
infix 4 ∈, ∋, ∉, ∌
{-# NOINLINE (∈) #-}
{-# NOINLINE (∋) #-}
{-# NOINLINE (∉) #-}
{-# NOINLINE (∌) #-}
{-# RULES "∈/Set" (∈) = member #-}
{-# RULES "∋/Set" (∋) = flip member #-}
{-# RULES "∉/Set" (∉) = notMember #-}
{-# RULES "∌/Set" (∌) = flip notMember #-}
|
betaveros/betahaxx
|
Data/Foldable/Unicode/Betahaxx.hs
|
mit
| 1,105
| 0
| 8
| 201
| 262
| 162
| 100
| 21
| 1
|
import ParserTests
import Test.Tasty (defaultMain, testGroup)
import Test.Tasty.HUnit (assertEqual, testCase)
main :: IO ()
main = defaultMain $ testGroup "All unit tests" [ParserTests.run]
|
andybalaam/pepper
|
old/pepper2/tests/Tests.hs
|
mit
| 191
| 0
| 8
| 24
| 60
| 34
| 26
| 5
| 1
|
module Main where
import Monad
import System.Environment
import Text.ParserCombinators.Parsec hiding (spaces)
main :: IO ()
main = do args <- getArgs
putStrLn (readExpr (args !! 0))
symbol :: Parser Char
symbol = oneOf "!$%&|*+-/:<=>?@^_~#"
readExpr input = case parse parseExpr "lisp" input of
Left err -> "No match: " ++ show err
Right val -> "Found val"
spaces :: Parser ()
spaces = skipMany1 space
data LispVal = Atom String
| List [LispVal]
| DottedList [LispVal] LispVal
| Number Integer
| String String
| Bool Bool
parseString :: Parser LispVal
parseString = do char '"'
x <- many (noneOf "\"")
char '"'
return $ String x
parseAtom :: Parser LispVal
parseAtom = do first <- letter <|> symbol
rest <- many (letter <|> digit <|> symbol)
let atom = [first] ++ rest
return $ case atom of
"#t" -> Bool True
"#f" -> Bool False
otherwise -> Atom atom
parseNumber :: Parser LispVal
parseNumber = liftM (Number . read) $ many1 digit
parseList :: Parser LispVal
parseList = liftM List $ sepBy parseExpr spaces
parseDottedList :: Parser LispVal
parseDottedList = do
head <- endBy parseExpr spaces
tail <- char '.' >> spaces >> parseExpr
return $ DottedList head tail
parseQuoted :: Parser LispVal
parseQuoted = do
char '\''
x <- parseExpr
return $ List [Atom "quote", x]
parseExpr :: Parser LispVal
parseExpr = parseAtom
<|> parseString
<|> parseNumber
<|> parseQuoted
<|> do char '('
x <- (try parseList) <|> parseDottedList
char ')'
return x
|
tismith/tlisp
|
write-yourself-a-scheme/listings/listing3.4.hs
|
mit
| 1,813
| 0
| 11
| 617
| 554
| 269
| 285
| 56
| 3
|
{-|
Module : Control.Flower.Applicative.Lazy
Description : Combinators for directional lazy applicative functors
-}
module Control.Flower.Applicative.Lazy (
ap,
lift2, lift3,
(<*), (*>),
(<$*), (*$>),
(<$**), (**$>)
) where
import Prelude hiding ((<*), (*>))
import Control.Applicative hiding ((<*), (*>))
{- $setup
>>> import Control.Flower.Apply
>>> let x = Just 3
>>> let y = Just 4
>>> let f = (+2)
>>> let g = (*2)
>>> let h = (+)
-}
{-| A simple alias for 'Prelude.<*>'
>>> ap (Just (+1)) (Just 4)
Just 5
-}
ap :: Applicative f => f (a -> b) -> f a -> f b
ap = (<*>)
{-| Right-associative, left-flowing applicative operator
>>> Just (+1) <* Just 4
Just 5
-}
infixr 4 <*
(<*) :: Applicative f => f (a -> b) -> f a -> f b
(<*) = ap
{-| Left-associative, right-flowing applicative operator
>>> Just 4 *> Just (+1)
Just 5
-}
infixl 4 *>
(*>) :: Applicative f => f a -> f (a -> b) -> f b
(*>) = flip ap
{-| An alias for 'Control.Applicative.lift2', updating with unified "lift" naming
>>> lift2 (+) (Just 4) (Just 1)
Just 5
-}
lift2 :: Applicative f => (a -> b -> c) -> f a -> f b -> f c
lift2 = liftA2
{-| Right-associative, left-flowing 'lift2' operator
>>> (+) <$* Just 4 |< Just 1
Just 5
-}
infixr 4 <$*
(<$*) :: Applicative f => (a -> b -> c) -> f a -> f b -> f c
(<$*) = lift2
{-| Left-associative, right-flowing 'lift2' operator
>>> Just 4 >| Just 1 *$> (+)
Just 5
-}
infixl 4 *$>
(*$>) :: Applicative f => f a -> (a -> b -> c) -> f b -> f c
(*$>) = flip lift2
{-| An alias for 'Control.Applicative.lift3', updating with unified "lift" naming
>>> lift3 (\x y z -> x * y * z) (Just 4) (Just 3) (Just 2)
Just 24
-}
lift3 :: Applicative f => (a -> b -> c -> d) -> f a -> f b -> f c -> f d
lift3 = liftA3
{-| Right-associative, left-flowing 'lift3' operator
>>> (\x y z -> x * y * z) <$** Just 4 |< Just 3 |< Just 2
Just 24
-}
infixr 4 <$**
(<$**) :: Applicative f => (a -> b -> c -> d) -> f a -> f b -> f c -> f d
f <$** x = \y z -> lift3 f x y z -- AKA f <$ a <* b <* c
{-| Left-associative, right-flowing 'lift3' operator
>>> Just 2 >| Just 3 >| Just 4 **$> \x y z -> x * y * z
Just 24
-}
infixl 4 **$>
(**$>) :: Applicative f => f a -> (a -> b -> c -> d) -> f b -> f c -> f d
x **$> f = \y z -> lift3 f x y z
|
expede/flower
|
src/Control/Flower/Applicative/Lazy.hs
|
mit
| 2,264
| 0
| 11
| 544
| 647
| 355
| 292
| 32
| 1
|
{-|
Module : PostgREST.Middleware
Description : Sets CORS policy. Also the PostgreSQL GUCs, role, search_path and pre-request function.
-}
{-# LANGUAGE RecordWildCards #-}
module PostgREST.Middleware
( runPgLocals
, pgrstFormat
, pgrstMiddleware
, defaultCorsPolicy
, corsPolicy
, optionalRollback
) where
import qualified Data.Aeson as JSON
import qualified Data.ByteString.Char8 as BS
import qualified Data.CaseInsensitive as CI
import qualified Data.HashMap.Strict as M
import qualified Data.Text as T
import qualified Hasql.Decoders as HD
import qualified Hasql.DynamicStatements.Snippet as H hiding
(sql)
import qualified Hasql.DynamicStatements.Statement as H
import qualified Hasql.Transaction as H
import qualified Network.HTTP.Types.Header as HTTP
import qualified Network.Wai as Wai
import qualified Network.Wai.Logger as Wai
import qualified Network.Wai.Middleware.Cors as Wai
import qualified Network.Wai.Middleware.Gzip as Wai
import qualified Network.Wai.Middleware.RequestLogger as Wai
import qualified Network.Wai.Middleware.Static as Wai
import Data.Function (id)
import Data.List (lookup)
import Data.Scientific (FPFormat (..), formatScientific,
isInteger)
import Network.HTTP.Types.Status (Status, status400, status500,
statusCode)
import System.IO.Unsafe (unsafePerformIO)
import System.Log.FastLogger (toLogStr)
import PostgREST.Config (AppConfig (..), LogLevel (..))
import PostgREST.Error (Error, errorResponseFor)
import PostgREST.GucHeader (addHeadersIfNotIncluded)
import PostgREST.Query.SqlFragment (fromQi, intercalateSnippet,
unknownEncoder)
import PostgREST.Request.ApiRequest (ApiRequest (..), Target (..))
import PostgREST.Request.Preferences
import Protolude hiding (head, toS)
import Protolude.Conv (toS)
-- | Runs local(transaction scoped) GUCs for every request, plus the pre-request function
runPgLocals :: AppConfig -> M.HashMap Text JSON.Value ->
(ApiRequest -> ExceptT Error H.Transaction Wai.Response) ->
ApiRequest -> ByteString -> ExceptT Error H.Transaction Wai.Response
runPgLocals conf claims app req jsonDbS = do
lift $ H.statement mempty $ H.dynamicallyParameterized
("select " <> intercalateSnippet ", " (searchPathSql : roleSql ++ claimsSql ++ [methodSql, pathSql] ++ headersSql ++ cookiesSql ++ appSettingsSql ++ specSql))
HD.noResult (configDbPreparedStatements conf)
lift $ traverse_ H.sql preReqSql
app req
where
methodSql = setConfigLocal mempty ("request.method", iMethod req)
pathSql = setConfigLocal mempty ("request.path", iPath req)
headersSql = setConfigLocal "request.header." <$> iHeaders req
cookiesSql = setConfigLocal "request.cookie." <$> iCookies req
claimsWithRole =
let anon = JSON.String . toS $ configDbAnonRole conf in -- role claim defaults to anon if not specified in jwt
M.union claims (M.singleton "role" anon)
claimsSql = setConfigLocal "request.jwt.claim." <$> [(toS c, toS $ unquoted v) | (c,v) <- M.toList claimsWithRole]
roleSql = maybeToList $ (\x -> setConfigLocal mempty ("role", toS $ unquoted x)) <$> M.lookup "role" claimsWithRole
appSettingsSql = setConfigLocal mempty <$> (join bimap toS <$> configAppSettings conf)
searchPathSql =
let schemas = T.intercalate ", " (iSchema req : configDbExtraSearchPath conf) in
setConfigLocal mempty ("search_path", toS schemas)
preReqSql = (\f -> "select " <> fromQi f <> "();") <$> configDbPreRequest conf
specSql = case iTarget req of
TargetProc{tpIsRootSpec=True} -> [setConfigLocal mempty ("request.spec", jsonDbS)]
_ -> mempty
-- | Do a pg set_config(setting, value, true) call. This is equivalent to a SET LOCAL.
setConfigLocal :: ByteString -> (ByteString, ByteString) -> H.Snippet
setConfigLocal prefix (k, v) =
"set_config(" <> unknownEncoder (prefix <> k) <> ", " <> unknownEncoder v <> ", true)"
-- | Log in apache format. Only requests that have a status greater than minStatus are logged.
-- | There's no way to filter logs in the apache format on wai-extra: https://hackage.haskell.org/package/wai-extra-3.0.29.2/docs/Network-Wai-Middleware-RequestLogger.html#t:OutputFormat.
-- | So here we copy wai-logger apacheLogStr function: https://github.com/kazu-yamamoto/logger/blob/a4f51b909a099c51af7a3f75cf16e19a06f9e257/wai-logger/Network/Wai/Logger/Apache.hs#L45
-- | TODO: Add the ability to filter apache logs on wai-extra and remove this function.
pgrstFormat :: Status -> Wai.OutputFormatter
pgrstFormat minStatus date req status responseSize =
if status < minStatus
then mempty
else toLogStr (getSourceFromSocket req)
<> " - - ["
<> toLogStr date
<> "] \""
<> toLogStr (Wai.requestMethod req)
<> " "
<> toLogStr (Wai.rawPathInfo req <> Wai.rawQueryString req)
<> " "
<> toLogStr (show (Wai.httpVersion req)::Text)
<> "\" "
<> toLogStr (show (statusCode status)::Text)
<> " "
<> toLogStr (maybe "-" show responseSize::Text)
<> " \""
<> toLogStr (fromMaybe mempty $ Wai.requestHeaderReferer req)
<> "\" \""
<> toLogStr (fromMaybe mempty $ Wai.requestHeaderUserAgent req)
<> "\"\n"
where
getSourceFromSocket = BS.pack . Wai.showSockAddr . Wai.remoteHost
pgrstMiddleware :: LogLevel -> Wai.Application -> Wai.Application
pgrstMiddleware logLevel =
logger
. Wai.cors corsPolicy
. Wai.staticPolicy (Wai.only [("favicon.ico", "static/favicon.ico")])
where
logger = case logLevel of
LogCrit -> id
LogError -> unsafePerformIO $ Wai.mkRequestLogger Wai.def { Wai.outputFormat = Wai.CustomOutputFormat $ pgrstFormat status500}
LogWarn -> unsafePerformIO $ Wai.mkRequestLogger Wai.def { Wai.outputFormat = Wai.CustomOutputFormat $ pgrstFormat status400}
LogInfo -> Wai.logStdout
defaultCorsPolicy :: Wai.CorsResourcePolicy
defaultCorsPolicy = Wai.CorsResourcePolicy Nothing
["GET", "POST", "PATCH", "PUT", "DELETE", "OPTIONS"] ["Authorization"] Nothing
(Just $ 60*60*24) False False True
-- | CORS policy to be used in by Wai Cors middleware
corsPolicy :: Wai.Request -> Maybe Wai.CorsResourcePolicy
corsPolicy req = case lookup "origin" headers of
Just origin -> Just defaultCorsPolicy {
Wai.corsOrigins = Just ([origin], True)
, Wai.corsRequestHeaders = "Authentication" : accHeaders
, Wai.corsExposedHeaders = Just [
"Content-Encoding", "Content-Location", "Content-Range", "Content-Type"
, "Date", "Location", "Server", "Transfer-Encoding", "Range-Unit"
]
}
Nothing -> Nothing
where
headers = Wai.requestHeaders req
accHeaders = case lookup "access-control-request-headers" headers of
Just hdrs -> map (CI.mk . toS . T.strip . toS) $ BS.split ',' hdrs
Nothing -> []
unquoted :: JSON.Value -> Text
unquoted (JSON.String t) = t
unquoted (JSON.Number n) =
toS $ formatScientific Fixed (if isInteger n then Just 0 else Nothing) n
unquoted (JSON.Bool b) = show b
unquoted v = toS $ JSON.encode v
-- | Set a transaction to eventually roll back if requested and set respective
-- headers on the response.
optionalRollback
:: AppConfig
-> ApiRequest
-> ExceptT Error H.Transaction Wai.Response
-> ExceptT Error H.Transaction Wai.Response
optionalRollback AppConfig{..} ApiRequest{..} transaction = do
resp <- catchError transaction $ return . errorResponseFor
when (shouldRollback || (configDbTxRollbackAll && not shouldCommit))
(lift H.condemn)
return $ Wai.mapResponseHeaders preferenceApplied resp
where
shouldCommit =
configDbTxAllowOverride && iPreferTransaction == Just Commit
shouldRollback =
configDbTxAllowOverride && iPreferTransaction == Just Rollback
preferenceApplied
| shouldCommit =
addHeadersIfNotIncluded
[(HTTP.hPreferenceApplied, BS.pack (show Commit))]
| shouldRollback =
addHeadersIfNotIncluded
[(HTTP.hPreferenceApplied, BS.pack (show Rollback))]
| otherwise =
identity
|
steve-chavez/postgrest
|
src/PostgREST/Middleware.hs
|
mit
| 8,519
| 0
| 25
| 1,925
| 2,027
| 1,098
| 929
| 150
| 4
|
module Main where
import Antiqua.Loading.TMXLoader
import qualified Antiqua.Data.NonEmpty as NE
import Antiqua.Geometry.Line
main :: IO ()
main = do
let ne = NE.NonEmpty 1 [2, 3, 4, 5, 6]
print ne
(print . NE.reverse) (ne)
loadTmx "holophote.tmx"
return ()
|
olive/antiqua-prime
|
src/Main.hs
|
mit
| 279
| 0
| 11
| 61
| 109
| 59
| 50
| 11
| 1
|
mySort :: Ord a => [a] -> [a]
mySort (a:as) = let smallerOrEqual = [s | s <- as, s <= a]
bigger = [s | s <- as, s >= a]
in mySort smallerOrEqual ++ [a] ++ mySort bigger
mySort [] = []
mySortComp :: Ord a => (a -> a -> Ordering) -> [a] -> [a]
mySortComp p (a:as) = let smallerOrEqual = [s | s <- as, case p a s of
LT -> True
EQ -> True
GT -> False]
bigger = [s | s <- as, case p a s of
LT -> False
EQ -> False
GT -> True]
in mySortComp p smallerOrEqual ++ [a] ++ mySortComp p bigger
mySortComp _ [] = []
|
diminishedprime/.org
|
reading-list/seven_languages_in_seven_weeks/haskell/day_2.hs
|
mit
| 919
| 0
| 13
| 536
| 310
| 158
| 152
| 16
| 5
|
module Game.Keyboard (
Keyboard,
initKeyboard,
handleKeyEvent,
isKeyDown
) where
import Data.Set (Set)
import qualified Data.Set as Set
import Graphics.UI.GLUT (Key(..), KeyState(..))
newtype Keyboard = Keyboard (Set Key) deriving (Show)
initKeyboard :: Keyboard
initKeyboard = Keyboard Set.empty
handleKeyEvent :: Key -> KeyState -> Keyboard -> Keyboard
handleKeyEvent k s = keyHandlerForState s k
keyHandlerForState :: KeyState -> Key -> Keyboard -> Keyboard
keyHandlerForState Up = removeKey
keyHandlerForState Down = addKey
removeKey :: Key -> Keyboard -> Keyboard
removeKey k (Keyboard s) = Keyboard $ Set.delete k s
addKey :: Key -> Keyboard -> Keyboard
addKey k (Keyboard s) = Keyboard $ Set.insert k s
isKeyDown :: Keyboard -> Key -> Bool
isKeyDown (Keyboard s) k = Set.member k s
|
sgrif/haskell-game
|
Game/Keyboard.hs
|
mit
| 809
| 0
| 7
| 138
| 277
| 151
| 126
| 22
| 1
|
module Job.Dispatch
( dispatchJob
) where
import Import
import Job.Activation
import Job.StepAchieved
import Job.Tracking
import Job.Welcome
-- | Dispatch jobs to their repsective handlers.
dispatchJob :: Entity Job -> HandlerT App IO ()
dispatchJob (Entity jobId job) = do
master <- getYesod
-- Update the job attempt.
runDB $ update jobId [JobAttempt +=. 1]
-- Dispatch the action to the correct handler.
case jobAction job of
SendActiviatonMail -> sendActivationMail jobId $ jobValue job
SendWelcomeMail -> sendWelcomeMail jobId $ jobValue job
SendStepAchievedMail -> sendStepAchievedMail jobId $ jobValue job
-- | Send tracking if Google Analytics is enabled.
case appAnalytics $ appSettings master of
Nothing -> return ()
Just uaCode -> sendGoogleAnalyticsTracking uaCode (jobAction job) (jobValue job)
|
Tehnix/campaigns
|
Job/Dispatch.hs
|
mit
| 860
| 0
| 12
| 167
| 212
| 104
| 108
| 18
| 4
|
-- School of Haskell "Basics of Haskell" Chapter 3
-- https://www.schoolofhaskell.com/school/starting-with-haskell/basics-of-haskell/3-pure-functions-laziness-io
-- Exercise 3
-- Rewrite the previous exercise to take the input string from the user.
putQStrLn' :: String -> IO ()
putQStrLn' str = do
putStr "\""
putStr str
putStr "\""
putStr "\n"
main :: IO ()
main = do
putStrLn "Enter your name:"
str <- getLine
putQStrLn' ("Hello, " ++ str)
|
chsm/code
|
hs/soh/basics/03-03.hs
|
mit
| 468
| 0
| 9
| 89
| 92
| 42
| 50
| 11
| 1
|
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
module MMM.HsMMM where
import MMM.Core.FuncComb((^^^), dirac, grd)
import MMM.OOP.Language.Syntax as OOP
import MMM.Util.Pretty
import qualified Data.Map as M
import Control.Monad.State
--------------------------------------------------------------------------------
-- * Embeded Haskell with MMM support.
--------------------------------------
-- ** Types
data HsTyAtom
= HsTyMMM HsTy HsTy HsTy HsTy
| HsTyVar String
| HsTyInt
| HsTyFloat
| HsTyBool
deriving (Eq, Show)
instance Pretty HsTyAtom where
pretty HsTyInt = t "Integer"
pretty HsTyFloat = t "Float"
pretty HsTyBool = t "Bool"
pretty (HsTyVar id) = pretty id
pretty (HsTyMMM m s i o) = t "MMM" <+> pretty m <+> pretty s <+> pretty i <+> pretty o
data HsTy
= HsTyNil
| HsTy1 HsTyAtom
| HsTyList HsTy
| HsTyProd [HsTy]
| HsTySum [HsTy]
| HsTyFun HsTy HsTy
| HsTyApp HsTy HsTy
| HsTyCtx HsTy HsTy
deriving (Eq, Show)
instance Pretty HsTy where
pretty (HsTy1 ty) = pretty ty
pretty (HsTyNil) = t "()"
pretty (HsTyList l) = brackets (pretty l)
pretty (HsTyProd l) = pprAssocLeftWith (\d a -> parens $ d <> comma <+> pretty a) l
pretty (HsTySum l) = pprAssocLeftWith (\d a -> t "Either" <+> parens d <+> parens (pretty a)) l
pretty (HsTyFun a b) = pretty a <+> t "->" <+> parens (pretty b)
pretty (HsTyCtx a b) = pretty a <+> t "=>" <+> parens (pretty b)
pretty (HsTyApp a b) = parens $ pretty a <+> pretty b
hstyFromType :: (IsVar id) => OOP.Type id -> HsTy
hstyFromType (OOP.Simple OOP.TyVoid) = HsTyNil
hstyFromType (OOP.Simple ts) = HsTy1 $ hstyatomFromTySimple ts
hstyFromType (OOP.Object id) = HsTy1 $ HsTyVar (varName id)
hstyFromType (OOP.ListOf a) = HsTyList $ hstyFromType a
hstyatomFromTySimple :: OOP.TySimple -> HsTyAtom
hstyatomFromTySimple TyInt = HsTyInt
hstyatomFromTySimple TyFloat = HsTyFloat
hstyatomFromTySimple TyBool = HsTyBool
hstyCtxStrongVar :: String -> HsTy -> HsTy
hstyCtxStrongVar s = HsTyCtx (HsTyApp (HsTy1 $ HsTyVar "Strong") (HsTy1 $ HsTyVar s))
--------------------------------------
-- ** Top-level declarations
type HsModule = [HsStmt]
hsmoduleGetName :: HsModule -> String
hsmoduleGetName hsm
= let
l = grd (==[]) $ filter isSModDecl hsm
in either (const "DEFAULT") (\((HsSModDecl s):_) -> s) l
where
isSModDecl (HsSModDecl _) = True
isSModDecl _ = False
data HsStmt
= HsSDecl HsDecl
| HsSTyDecl String HsTy
| HsSPragma HsPragma
| HsSImport String (Maybe String)
| HsSModDecl String
deriving (Eq, Show)
instance Ord HsStmt where
compare (HsSModDecl _) _ = GT
compare (HsSImport _ _) (HsSModDecl _) = LT
compare (HsSImport _ _) _ = GT
compare (HsSTyDecl _ _) (HsSModDecl _) = LT
compare (HsSTyDecl _ _) (HsSImport _ _) = LT
compare (HsSTyDecl _ _) _ = GT
compare _ _ = EQ
hsStmtIsLineP :: HsStmt -> Bool
hsStmtIsLineP (HsSPragma p) = hspragmaIsLine p
hsStmtIsLineP _ = False
instance Pretty HsStmt where
pretty (HsSDecl d) = pretty d
pretty (HsSPragma p) = pretty p
pretty (HsSTyDecl s ty)
= t "type" <+> t s <+> t "=" <+> pretty ty
pretty (HsSImport s Nothing)
= t "import" <+> t s
pretty (HsSImport s (Just q))
= t "import" <+> t "qualified" <+> t s <+> t "as" <+> t q
pretty (HsSModDecl s)
= t "module" <+> t s <+> t "where" <+> t "\n\n"
data HsPragma
= HsPLine Int String
deriving (Eq, Show)
hspragmaIsLine :: HsPragma -> Bool
hspragmaIsLine (HsPLine _ _) = True
instance Pretty HsPragma where
pretty (HsPLine i f) = t "{-#" <+> t "LINE" <+> pretty i <+> t "\"" <> t f <> t "\"" <+> t "#-}"
data HsDecl
= HsDecl
{ hsDeclName :: String
, hsDeclType :: Maybe HsTy
, hsDeclDef :: HsExp
, hsDeclAux :: [HsDecl]
}
| HsDeclComment HsDecl String
deriving (Eq, Show)
instance Pretty HsDecl where
pretty (HsDeclComment d c)
= comment c $+$ pretty d
where
comment c = vcat (map ((t "--" <+>) . pretty) $ lines c)
pretty (HsDecl n ty d auxs)
= maybe empty ((t n <+>) . (t "::" <+>) . pretty) ty
$+$ t n <+> t "=" <+> pretty d
$+$ case auxs of
[] -> empty
_ -> nest 2 (t "where" $+$ nest 2 (vcat . map pretty $ auxs))
$+$ t "\n"
hsdeclSimpl :: String -> HsExp -> HsDecl
hsdeclSimpl s e = HsDecl s Nothing e []
hsdeclSimplTy :: String -> HsTy -> HsExp -> HsDecl
hsdeclSimplTy s ty e = HsDecl s (Just ty) e []
--------------------------------------
-- ** Expressions
data HsExp
= HsEId
| HsEEta
| HsEBang
| HsEMMM HsMMM
| HsEVar String
| HsEProj Int Int
| HsEInj Int Int
| HsESplit HsExp HsExp
| HsEEith HsExp HsExp
| HsEProd [HsExp]
| HsESum [HsExp]
| HsEComp HsExp HsExp
| HsEKlei HsExp HsExp
| HsEOOP (OOP.Exp String)
| HsELam [String] HsExp
deriving (Eq, Show)
instance Pretty HsExp where
pretty HsEBang = t "bang"
pretty HsEId = t "id"
pretty HsEEta = t "return"
pretty (HsEMMM m) = pretty m
pretty (HsEVar v) = pretty v
pretty (HsEProj 1 _) = t "id"
pretty (HsEProj 2 i) = t "p" <> pretty i
pretty (HsEProj c i) = pretty $ buildBinary c i (HsEProj 2)
pretty (HsEInj 2 i) = t "i" <> pretty i
pretty (HsEInj c i) = pretty $ buildBinary c i (HsEInj 2)
pretty (HsESplit a b) = parens $ t "split" <+> pretty a <+> pretty b
pretty (HsEEith a b) = parens $ t "either" <+> pretty a <+> pretty b
pretty (HsEProd as) = pprAssocLeftWith (\r a -> parens $ r <+> t "><" <+> pretty a) as
pretty (HsESum as) = pprAssocLeftWith (\r a -> parens $ r <+> t "-|-" <+> pretty a) as
pretty (HsEComp f g) = parens $ pretty f <+> t "." <+> pretty g
pretty (HsEKlei m n) = parens $ pretty m <+> t ".!" <+> pretty n
pretty (HsEOOP e) = pretty e
pretty (HsELam [] e) = parens $ t "const" <+> pretty e
pretty (HsELam v e)
= parens $ t "\\" <+> (pprAssocLeftWith (\r a -> parens $ r <+> t "," <+> pretty a) v)
<+> t "->"
<+> pretty e
buildBinary :: Int -> Int -> (Int -> HsExp) -> HsExp
buildBinary max 1 f = foldl1 HsEComp $ replicate (max - 1) (f 1)
buildBinary max i f
| max > i = let aux = foldl1 HsEComp $ replicate (max - i) (f 1)
in HsEComp (f 2) aux
| otherwise = f 2
hsexpLiftMMM :: HsExp -> HsExp
hsexpLiftMMM = HsEMMM . HsMLift
--------------------------------------
-- ** Mealy Machines
data HsMMM
= HsMLift HsExp
| HsMVar String
| HsMExtL HsMMM
| HsMExtR HsMMM
| HsMRunm HsMMM
| HsMRunmI HsMMM
| HsMRunm1 HsMMM
| HsMKlei HsMMM HsMMM
| HsMCond HsExp HsMMM HsMMM
| HsMGetSt
| HsMCopy
deriving (Eq, Show)
instance Pretty HsMMM where
pretty (HsMLift f) = parens $ t "f2m" <+> pretty f
pretty (HsMVar v) = pretty v
pretty (HsMExtL m) = parens $ t "extl" <+> pretty m
pretty (HsMExtR m) = parens $ t "extr" <+> pretty m
pretty (HsMRunm m) = parens $ t "runm" <+> pretty m
pretty (HsMRunmI m) = parens $ t "runm_" <+> pretty m
pretty (HsMRunm1 m) = parens $ t "runm1" <+> pretty m
pretty (HsMKlei m n)= pretty m $+$ t ".!" <+> pretty n
pretty (HsMCond c m n) = parens $ t "mcond" $+$ pretty c
$+$ parens (pretty m)
$+$ parens (pretty n)
pretty HsMGetSt = t "getst"
pretty HsMCopy = t "copy"
|
VictorCMiraldo/mmm
|
MMM/HsMMM.hs
|
mit
| 7,630
| 0
| 18
| 2,100
| 2,986
| 1,492
| 1,494
| 194
| 2
|
type Birds = Int
type Pole = (Birds, Birds)
--landLeft :: Birds -> Pole -> Pole
--landLeft n (left, right) = (left + n, right)
--
--landRight :: Birds -> Pole -> Pole
--landRight n (left, right) = (left, right + n)
landLeft :: Birds -> Pole -> Maybe Pole
landLeft n (left,right)
| abs ((left + n) - right) < 4 = Just (left + n, right)
| otherwise = Nothing
landRight :: Birds -> Pole -> Maybe Pole
landRight n (left,right)
| abs (left - (right + n)) < 4 = Just (left, right + n)
| otherwise = Nothing
banana :: Pole -> Maybe Pole
banana _ = Nothing
routine :: Maybe Pole
routine = do
start <- return (0, 0)
first <- landLeft 2 start
second <- landRight 2 first
landLeft 1 second
|
CreaturePhil/cis194
|
walktheline.hs
|
mit
| 756
| 2
| 13
| 214
| 263
| 137
| 126
| 18
| 1
|
import Crypto.Enigma
main :: IO ()
main = do
msg <- getLine
putStrLn $ enigma conf state msg
where
(conf, state) = intToSettingDefault 0
|
kc1212/enigma-hs
|
bin/Main.hs
|
mit
| 160
| 0
| 8
| 47
| 59
| 29
| 30
| 6
| 1
|
{-# LANGUAGE MultiParamTypeClasses, TypeSynonymInstances, FlexibleInstances #-}
{- |
Module : $Header$
Description : Comorphism from OWL 2 to CASL_Dl
Copyright : (c) Francisc-Nicolae Bungiu, Felix Gabriel Mance
License : GPLv2 or higher, see LICENSE.txt
Maintainer : f.bungiu@jacobs-university.de
Stability : provisional
Portability : non-portable (via Logic.Logic)
-}
module OWL2.OWL22CASL (OWL22CASL (..)) where
import Logic.Logic as Logic
import Logic.Comorphism
import Common.AS_Annotation
import Common.Result
import Common.Id
import Common.IRI
import Control.Monad
import qualified Data.Set as Set
import qualified Data.Map as Map
import qualified Data.List as List
import qualified Common.Lib.MapSet as MapSet
import qualified Common.Lib.Rel as Rel
-- the DL with the initial signature for OWL
import CASL_DL.PredefinedCASLAxioms
-- OWL = domain
import OWL2.Logic_OWL2
import OWL2.AS as AS
import OWL2.Parse
import OWL2.Print
import OWL2.ProfilesAndSublogics
import OWL2.ManchesterPrint ()
import OWL2.Morphism
import OWL2.Symbols
import qualified OWL2.Sign as OS
import qualified OWL2.Sublogic as SL
-- CASL_DL = codomain
import CASL.Logic_CASL
import CASL.AS_Basic_CASL
import CASL.Sign
import CASL.Morphism
import CASL.Induction
import CASL.Sublogic
-- import OWL2.ManchesterParser
import Common.ProofTree
import Data.Maybe
import Text.ParserCombinators.Parsec
data OWL22CASL = OWL22CASL deriving Show
instance Language OWL22CASL
instance Comorphism
OWL22CASL -- comorphism
OWL2 -- lid domain
ProfSub -- sublogics domain
OntologyDocument -- Basic spec domain
Axiom -- sentence domain
SymbItems -- symbol items domain
SymbMapItems -- symbol map items domain
OS.Sign -- signature domain
OWLMorphism -- morphism domain
Entity -- symbol domain
RawSymb -- rawsymbol domain
ProofTree -- proof tree codomain
CASL -- lid codomain
CASL_Sublogics -- sublogics codomain
CASLBasicSpec -- Basic spec codomain
CASLFORMULA -- sentence codomain
SYMB_ITEMS -- symbol items codomain
SYMB_MAP_ITEMS -- symbol map items codomain
CASLSign -- signature codomain
CASLMor -- morphism codomain
Symbol -- symbol codomain
RawSymbol -- rawsymbol codomain
ProofTree -- proof tree domain
where
sourceLogic OWL22CASL = OWL2
sourceSublogic OWL22CASL = topS
targetLogic OWL22CASL = CASL
mapSublogic OWL22CASL _ = Just $ cFol
{ cons_features = emptyMapConsFeature }
map_theory OWL22CASL = mapTheory
map_morphism OWL22CASL = mapMorphism
map_symbol OWL22CASL _ = mapSymbol
isInclusionComorphism OWL22CASL = True
has_model_expansion OWL22CASL = True
-- s = emptySign ()
objectPropPred :: PredType
objectPropPred = PredType [thing, thing]
dataPropPred :: PredType
dataPropPred = PredType [thing, dataS]
indiConst :: OpType
indiConst = OpType Total [] thing
uriToIdM :: IRI -> Result Id
uriToIdM = return . uriToCaslId
tokDecl :: Token -> VAR_DECL
tokDecl = flip mkVarDecl thing
tokDataDecl :: Token -> VAR_DECL
tokDataDecl = flip mkVarDecl dataS
nameDecl :: Int -> SORT -> VAR_DECL
nameDecl = mkVarDecl . mkNName
thingDecl :: Int -> VAR_DECL
thingDecl = flip nameDecl thing
dataDecl :: Int -> VAR_DECL
dataDecl = flip nameDecl dataS
qualThing :: Int -> TERM f
qualThing = toQualVar . thingDecl
qualData :: Int -> TERM f
qualData = toQualVar . dataDecl
implConj :: [FORMULA f] -> FORMULA f -> FORMULA f
implConj = mkImpl . conjunct
mkNC :: [FORMULA f] -> FORMULA f
mkNC = mkNeg . conjunct
mkEqVar :: VAR_DECL -> TERM f -> FORMULA f
mkEqVar = mkStEq . toQualVar
mkFEI :: [VAR_DECL] -> [VAR_DECL] -> FORMULA f -> FORMULA f -> FORMULA f
mkFEI l1 l2 f = mkForall l1 . mkExist l2 . mkImpl f
mkFIE :: [Int] -> [FORMULA f] -> Int -> Int -> FORMULA f
mkFIE l1 l2 x y = mkVDecl l1 $ implConj l2 $ mkEqVar (thingDecl x) $ qualThing y
mkFI :: [VAR_DECL] -> [VAR_DECL] -> FORMULA f -> FORMULA f -> FORMULA f
mkFI l1 l2 f1 = (mkForall l1) . (mkImpl (mkExist l2 f1))
mkRI :: [Int] -> Int -> FORMULA f -> FORMULA f
mkRI l x so = mkVDecl l $ mkImpl (mkMember (qualThing x) thing) so
mkThingVar :: VAR -> TERM f
mkThingVar v = Qual_var v thing nullRange
mkEqDecl :: Int -> TERM f -> FORMULA f
mkEqDecl i = mkEqVar (thingDecl i)
mkVDecl :: [Int] -> FORMULA f -> FORMULA f
mkVDecl = mkForall . map thingDecl
mkVDataDecl :: [Int] -> FORMULA f -> FORMULA f
mkVDataDecl = mkForall . map dataDecl
mk1VDecl :: FORMULA f -> FORMULA f
mk1VDecl = mkVDecl [1]
mkPred :: PredType -> [TERM f] -> PRED_NAME -> FORMULA f
mkPred c tl u = mkPredication (mkQualPred u $ toPRED_TYPE c) tl
mkMember :: TERM f -> SORT -> FORMULA f
mkMember t s = Membership t s nullRange
-- | Get all distinct pairs for commutative operations
comPairs :: [t] -> [t1] -> [(t, t1)]
comPairs [] [] = []
comPairs _ [] = []
comPairs [] _ = []
comPairs (a : as) (_ : bs) = mkPairs a bs ++ comPairs as bs
mkPairs :: t -> [s] -> [(t, s)]
mkPairs a = map (\ b -> (a, b))
data VarOrIndi = OVar Int | OIndi IRI
deriving (Show, Eq, Ord)
-- | Mapping of OWL morphisms to CASL morphisms
mapMorphism :: OWLMorphism -> Result CASLMor
mapMorphism oMor = do
cdm <- mapSign $ osource oMor
ccd <- mapSign $ otarget oMor
let emap = mmaps oMor
preds = Map.foldWithKey (\ (Entity _ ty u1) u2 -> let
i1 = uriToCaslId u1
i2 = uriToCaslId u2
in case ty of
Class -> Map.insert (i1, conceptPred) i2
ObjectProperty -> Map.insert (i1, objectPropPred) i2
DataProperty -> Map.insert (i1, dataPropPred) i2
_ -> id) Map.empty emap
ops = Map.foldWithKey (\ (Entity _ ty u1) u2 -> case ty of
NamedIndividual -> Map.insert (uriToCaslId u1, indiConst)
(uriToCaslId u2, Total)
_ -> id) Map.empty emap
return (embedMorphism () cdm ccd)
{ op_map = ops
, pred_map = preds }
mapSymbol :: Entity -> Set.Set Symbol
mapSymbol (Entity _ ty iRi) = let
syN = Set.singleton . Symbol (uriToCaslId iRi)
in case ty of
Class -> syN $ PredAsItemType conceptPred
ObjectProperty -> syN $ PredAsItemType objectPropPred
DataProperty -> syN $ PredAsItemType dataPropPred
NamedIndividual -> syN $ OpAsItemType indiConst
AnnotationProperty -> Set.empty
Datatype -> Set.empty
mapSign :: OS.Sign -> Result CASLSign
mapSign sig =
let conc = OS.concepts sig
cvrt = map uriToCaslId . Set.toList
tMp k = MapSet.fromList . map (\ u -> (u, [k]))
cPreds = thing : nothing : cvrt conc
oPreds = cvrt $ OS.objectProperties sig
dPreds = cvrt $ OS.dataProperties sig
aPreds = foldr MapSet.union MapSet.empty
[ tMp conceptPred cPreds
, tMp objectPropPred oPreds
, tMp dataPropPred dPreds ]
in return $ uniteCASLSign predefSign2
(emptySign ())
{ predMap = aPreds
, opMap = tMp indiConst . cvrt $ OS.individuals sig
}
loadDataInformation :: ProfSub -> CASLSign
loadDataInformation sl = let
dts = Set.map uriToCaslId $ SL.datatype $ sublogic sl
eSig x = (emptySign ()) { sortRel =
Rel.fromList [(x, dataS)]}
sigs = Set.toList $
Set.map (\x -> Map.findWithDefault (eSig x) x datatypeSigns) dts
in foldl uniteCASLSign (emptySign ()) sigs
mapTheory :: (OS.Sign, [Named Axiom]) -> Result (CASLSign, [Named CASLFORMULA])
mapTheory (owlSig, owlSens) = let
sl = sublogicOfTheo OWL2 (owlSig, map sentence owlSens)
in do
cSig <- mapSign owlSig
let pSig = loadDataInformation sl
{- dTypes = (emptySign ()) {sortRel = Rel.transClosure . Rel.fromSet
. Set.map (\ d -> (uriToCaslId d, dataS))
. Set.union predefIRIs $ OS.datatypes owlSig} -}
(cSens, nSigs) <- foldM (\ (x, y) z -> do
(sen, y') <- mapSentence z
return (sen ++ x, y ++ y')) -- uniteCASLSign sig y))
([], []) owlSens
return (foldl1 uniteCASLSign $ [cSig,pSig] ++ nSigs, -- , dTypes],
predefinedAxioms ++ (reverse cSens))
-- | mapping of OWL to CASL_DL formulae
mapSentence :: Named Axiom -> Result ([Named CASLFORMULA], [CASLSign])
mapSentence inSen = do
(outAx, outSigs) <- mapAxioms $ sentence inSen
return (map (flip mapNamed inSen . const) outAx, outSigs)
mapVar :: VarOrIndi -> Result (TERM ())
mapVar v = case v of
OVar n -> return $ qualThing n
OIndi i -> mapIndivURI i
-- | Mapping of Class URIs
mapClassURI :: Class -> Token -> Result CASLFORMULA
mapClassURI c t = fmap (mkPred conceptPred [mkThingVar t]) $ uriToIdM c
-- | Mapping of Individual URIs
mapIndivURI :: Individual -> Result (TERM ())
mapIndivURI uriI = do
ur <- uriToIdM uriI
return $ mkAppl (mkQualOp ur (Op_type Total [] thing nullRange)) []
mapNNInt :: NNInt -> TERM ()
mapNNInt int = let NNInt uInt = int in foldr1 joinDigits $ map mkDigit uInt
mapIntLit :: IntLit -> TERM ()
mapIntLit int =
let cInt = mapNNInt $ absInt int
in if isNegInt int then negateInt $ upcast cInt integer
else upcast cInt integer
mapDecLit :: DecLit -> TERM ()
mapDecLit dec =
let ip = truncDec dec
np = absInt ip
fp = fracDec dec
n = mkDecimal (mapNNInt np) (mapNNInt fp)
in if isNegInt ip then negateFloat n else n
mapFloatLit :: FloatLit -> TERM ()
mapFloatLit f =
let fb = floatBase f
ex = floatExp f
in mkFloat (mapDecLit fb) (mapIntLit ex)
mapNrLit :: Literal -> TERM ()
mapNrLit l = case l of
NumberLit f
| isFloatInt f -> mapIntLit $ truncDec $ floatBase f
| isFloatDec f -> mapDecLit $ floatBase f
| otherwise -> mapFloatLit f
_ -> error "not number literal"
mapLiteral :: Literal -> Result (TERM ())
mapLiteral lit = return $ case lit of
Literal l ty -> Sorted_term (case ty of
Untyped _ -> foldr consChar emptyStringTerm l
Typed dt -> case getDatatypeCat dt of
OWL2Number -> let p = parse literal "" l in case p of
Right nr -> mapNrLit nr
_ -> error "cannot parse number literal"
OWL2Bool -> case l of
"true" -> trueT
_ -> falseT
_ -> foldr consChar emptyStringTerm l) dataS nullRange
_ -> mapNrLit lit
-- | Mapping of data properties
mapDataProp :: DataPropertyExpression -> Int -> Int
-> Result CASLFORMULA
mapDataProp dp a b = fmap (mkPred dataPropPred [qualThing a, qualData b])
$ uriToIdM dp
-- | Mapping of obj props
mapObjProp :: ObjectPropertyExpression -> Int -> Int
-> Result CASLFORMULA
mapObjProp ob a b = case ob of
ObjectProp u -> fmap (mkPred objectPropPred $ map qualThing [a, b])
$ uriToIdM u
ObjectInverseOf u -> mapObjProp u b a
-- | Mapping of obj props with Individuals
mapObjPropI :: ObjectPropertyExpression -> VarOrIndi -> VarOrIndi
-> Result CASLFORMULA
mapObjPropI ob lP rP = case ob of
ObjectProp u -> do
l <- mapVar lP
r <- mapVar rP
fmap (mkPred objectPropPred [l, r]) $ uriToIdM u
ObjectInverseOf u -> mapObjPropI u rP lP
-- | mapping of individual list
mapComIndivList :: SameOrDifferent -> Maybe Individual
-> [Individual] -> Result [CASLFORMULA]
mapComIndivList sod mol inds = do
fs <- mapM mapIndivURI inds
tps <- case mol of
Nothing -> return $ comPairs fs fs
Just ol -> do
f <- mapIndivURI ol
return $ mkPairs f fs
return $ map (\ (x, y) -> case sod of
Same -> mkStEq x y
Different -> mkNeg $ mkStEq x y) tps
{- | Mapping along DataPropsList for creation of pairs for commutative
operations. -}
mapComDataPropsList :: Maybe DataPropertyExpression
-> [DataPropertyExpression] -> Int -> Int
-> Result [(CASLFORMULA, CASLFORMULA)]
mapComDataPropsList md props a b = do
fs <- mapM (\ x -> mapDataProp x a b) props
case md of
Nothing -> return $ comPairs fs fs
Just dp -> fmap (`mkPairs` fs) $ mapDataProp dp a b
{- | Mapping along ObjectPropsList for creation of pairs for commutative
operations. -}
mapComObjectPropsList :: Maybe ObjectPropertyExpression
-> [ObjectPropertyExpression] -> Int -> Int
-> Result [(CASLFORMULA, CASLFORMULA)]
mapComObjectPropsList mol props a b = do
fs <- mapM (\ x -> mapObjProp x a b) props
case mol of
Nothing -> return $ comPairs fs fs
Just ol -> fmap (`mkPairs` fs) $ mapObjProp ol a b
mapDataRangeAux :: DataRange -> CASLTERM -> Result (CASLFORMULA, [CASLSign])
mapDataRangeAux dr i = case dr of
DataType d fl -> do
let dt = mkMember i $ uriToCaslId d
(sens, s) <- mapAndUnzipM (mapFacet i) fl
return (conjunct $ dt : sens, concat s)
DataComplementOf drc -> do
(sens, s) <- mapDataRangeAux drc i
return (mkNeg sens, s)
DataJunction jt drl -> do
(jl, sl) <- mapAndUnzipM ((\ v r -> mapDataRangeAux r v) i) drl
--let usig = uniteL sl
return $ case jt of
IntersectionOf -> (conjunct jl, concat sl)
UnionOf -> (disjunct jl, concat sl)
DataOneOf cs -> do
ls <- mapM mapLiteral cs
return (disjunct $ map (mkStEq i) ls, [])
-- | mapping of Data Range
mapDataRange :: DataRange -> Int -> Result (CASLFORMULA, [CASLSign])
mapDataRange dr = mapDataRangeAux dr . qualData
mkFacetPred :: TERM f -> ConstrainingFacet -> TERM f -> (FORMULA f, Id)
mkFacetPred lit f var =
let cf = mkInfix $ fromCF f
in (mkPred dataPred [var, lit] cf, cf)
mapFacet :: CASLTERM -> (ConstrainingFacet, RestrictionValue)
-> Result (CASLFORMULA, [CASLSign])
mapFacet var (f, r) = do
con <- mapLiteral r
let (fp, cf) = mkFacetPred con f var
return (fp,
[(emptySign ()) {predMap = MapSet.fromList [(cf, [dataPred])]}])
cardProps :: Bool
-> Either ObjectPropertyExpression DataPropertyExpression -> Int
-> [Int] -> Result [CASLFORMULA]
cardProps b prop var vLst =
if b then let Left ope = prop in mapM (mapObjProp ope var) vLst
else let Right dpe = prop in mapM (mapDataProp dpe var) vLst
mapCard :: Bool -> CardinalityType -> Int
-> Either ObjectPropertyExpression DataPropertyExpression
-> Maybe (Either ClassExpression DataRange) -> Int
-> Result (FORMULA (), [CASLSign])
mapCard b ct n prop d var = do
let vlst = map (var +) [1 .. n]
vlstM = vlst ++ [n + var + 1]
vlstE = [n + var + 1]
(dOut, s) <- case d of
Nothing -> return ([], [])
Just y ->
if b then let Left ce = y in mapAndUnzipM
(mapDescription ce) vlst
else let Right dr = y in mapAndUnzipM (mapDataRange dr) vlst
(eOut, s') <- case d of
Nothing -> return ([], [])
Just y ->
if b then let Left ce = y in mapAndUnzipM
(mapDescription ce) vlstM
else let Right dr = y in mapAndUnzipM (mapDataRange dr) vlstM
(fOut, s'') <- case d of
Nothing -> return ([], [])
Just y ->
if b then let Left ce = y in mapAndUnzipM
(mapDescription ce) vlstE
else let Right dr = y in mapAndUnzipM (mapDataRange dr) vlstE
let dlst = map (\ (x, y) -> mkNeg $ mkStEq (qualThing x) $ qualThing y)
$ comPairs vlst vlst
dlstM = map (\ (x, y) -> mkStEq (qualThing x) $ qualThing y)
$ comPairs vlstM vlstM
qVars = map thingDecl vlst
qVarsM = map thingDecl vlstM
qVarsE = map thingDecl vlstE
oProps <- cardProps b prop var vlst
oPropsM <- cardProps b prop var vlstM
oPropsE <- cardProps b prop var vlstE
let minLst = conjunct $ dlst ++ oProps ++ dOut
maxLst = mkImpl (conjunct $ oPropsM ++ eOut)
$ disjunct dlstM
exactLst' = mkImpl (conjunct $ oPropsE ++ fOut) $ disjunct dlstM
senAux = conjunct [minLst, mkForall qVarsE exactLst']
exactLst = if null qVars then senAux else mkExist qVars senAux
ts = concat $ s ++ s' ++ s''
return $ case ct of
MinCardinality -> (mkExist qVars minLst, ts)
MaxCardinality -> (mkForall qVarsM maxLst, ts)
ExactCardinality -> (exactLst, ts)
-- | mapping of OWL2 Descriptions
mapDescription :: ClassExpression -> Int ->
Result (CASLFORMULA, [CASLSign])
mapDescription desc var = case desc of
Expression u -> do
c <- mapClassURI u $ mkNName var
return (c, [])
ObjectJunction ty ds -> do
(els, s) <- mapAndUnzipM (flip mapDescription var) ds
return ((case ty of
UnionOf -> disjunct
IntersectionOf -> conjunct)
els, concat s)
ObjectComplementOf d -> do
(els, s) <- mapDescription d var
return (mkNeg els, s)
ObjectOneOf is -> do
il <- mapM mapIndivURI is
return (disjunct $ map (mkStEq $ qualThing var) il, [])
ObjectValuesFrom ty o d -> let n = var + 1 in do
oprop0 <- mapObjProp o var n
(desc0, s) <- mapDescription d n
return $ case ty of
SomeValuesFrom -> (mkExist [thingDecl n] $ conjunct [oprop0, desc0],
s)
AllValuesFrom -> (mkVDecl [n] $ mkImpl oprop0 desc0,
s)
ObjectHasSelf o -> do
op <- mapObjProp o var var
return (op, [])
ObjectHasValue o i -> do
op <- mapObjPropI o (OVar var) (OIndi i)
return (op, [])
ObjectCardinality (Cardinality ct n oprop d) -> mapCard True ct n
(Left oprop) (fmap Left d) var
DataValuesFrom ty dpe dr -> let n = var + 1 in do
oprop0 <- mapDataProp (head dpe) var n
(desc0, s) <- mapDataRange dr n
--let ts = niteCASLSign cSig s
return $ case ty of
SomeValuesFrom -> (mkExist [dataDecl n] $ conjunct [oprop0, desc0],
s)
AllValuesFrom -> (mkVDataDecl [n] $ mkImpl oprop0 desc0, s)
DataHasValue dpe c -> do
con <- mapLiteral c
return (mkPred dataPropPred [qualThing var, con]
$ uriToCaslId dpe, [])
DataCardinality (Cardinality ct n dpe dr) -> mapCard False ct n
(Right dpe) (fmap Right dr) var
-- | Mapping of a list of descriptions
mapDescriptionList :: Int -> [ClassExpression]
-> Result ([CASLFORMULA], [CASLSign])
mapDescriptionList n lst = do
(els, s) <- mapAndUnzipM (uncurry $ mapDescription)
$ zip lst $ replicate (length lst) n
return (els, concat s)
-- | Mapping of a list of pairs of descriptions
mapDescriptionListP :: Int -> [(ClassExpression, ClassExpression)]
-> Result ([(CASLFORMULA, CASLFORMULA)], [CASLSign])
mapDescriptionListP n lst = do
let (l, r) = unzip lst
([lls, rls], s) <- mapAndUnzipM (mapDescriptionList n) [l, r]
return (zip lls rls, concat s)
mapCharact :: ObjectPropertyExpression -> Character
-> Result CASLFORMULA
mapCharact ope c = case c of
Functional -> do
so1 <- mapObjProp ope 1 2
so2 <- mapObjProp ope 1 3
return $ mkFIE [1, 2, 3] [so1, so2] 2 3
InverseFunctional -> do
so1 <- mapObjProp ope 1 3
so2 <- mapObjProp ope 2 3
return $ mkFIE [1, 2, 3] [so1, so2] 1 2
Reflexive -> do
so <- mapObjProp ope 1 1
return $ mkRI [1] 1 so
Irreflexive -> do
so <- mapObjProp ope 1 1
return $ mkRI [1] 1 $ mkNeg so
Symmetric -> do
so1 <- mapObjProp ope 1 2
so2 <- mapObjProp ope 2 1
return $ mkVDecl [1, 2] $ mkImpl so1 so2
Asymmetric -> do
so1 <- mapObjProp ope 1 2
so2 <- mapObjProp ope 2 1
return $ mkVDecl [1, 2] $ mkImpl so1 $ mkNeg so2
Antisymmetric -> do
so1 <- mapObjProp ope 1 2
so2 <- mapObjProp ope 2 1
return $ mkFIE [1, 2] [so1, so2] 1 2
Transitive -> do
so1 <- mapObjProp ope 1 2
so2 <- mapObjProp ope 2 3
so3 <- mapObjProp ope 1 3
return $ mkVDecl [1, 2, 3] $ implConj [so1, so2] so3
-- | Mapping of ObjectSubPropertyChain
mapSubObjPropChain :: [ObjectPropertyExpression]
-> ObjectPropertyExpression -> Result CASLFORMULA
mapSubObjPropChain props oP = do
let (_, vars) = unzip $ zip (oP:props) [1 ..]
-- because we need n+1 vars for a chain of n roles
oProps <- mapM (\ (z, x) -> mapObjProp z x (x+1)) $
zip props vars
ooP <- mapObjProp oP 1 (head $ reverse vars)
return $ mkVDecl vars $ implConj oProps ooP
-- | Mapping of subobj properties
mapSubObjProp :: ObjectPropertyExpression
-> ObjectPropertyExpression -> Int -> Result CASLFORMULA
mapSubObjProp e1 e2 a = do
let b = a + 1
l <- mapObjProp e1 a b
r <- mapObjProp e2 a b
return $ mkForallRange (map thingDecl [a, b]) (mkImpl l r) nullRange
mkEDPairs :: [Int] -> Maybe AS.Relation -> [(FORMULA f, FORMULA f)]
-> Result ([FORMULA f], [CASLSign])
mkEDPairs il mr pairs = do
let ls = map (\ (x, y) -> mkVDecl il
$ case fromMaybe (error "expected EDRelation") mr of
EDRelation Equivalent -> mkEqv x y
EDRelation Disjoint -> mkNC [x, y]
_ -> error "expected EDRelation") pairs
return (ls, [])
mkEDPairs' :: [Int] -> Maybe AS.Relation -> [(FORMULA f, FORMULA f)]
-> Result ([FORMULA f], [CASLSign])
mkEDPairs' [i1, i2] mr pairs = do
let ls = map (\ (x, y) -> mkVDecl [i1] $ mkVDataDecl [i2]
$ case fromMaybe (error "expected EDRelation") mr of
EDRelation Equivalent -> mkEqv x y
EDRelation Disjoint -> mkNC [x, y]
_ -> error "expected EDRelation") pairs
return (ls, [])
mkEDPairs' _ _ _ = error "wrong call of mkEDPairs'"
keyDecl :: Int -> [Int] -> [VAR_DECL]
keyDecl h il = map thingDecl (take h il) ++ map dataDecl (drop h il)
mapKey :: ClassExpression -> [FORMULA ()] -> [FORMULA ()]
-> Int -> [Int] -> Int -> Result (FORMULA (), [CASLSign])
mapKey ce pl npl p i h = do
(nce, s) <- mapDescription ce 1
(c3, _) <- mapDescription ce p
let un = mkForall [thingDecl p] $ implConj (c3 : npl)
$ mkStEq (qualThing p) $ qualThing 1
return (mkForall [thingDecl 1] $ mkImpl nce
$ mkExist (keyDecl h i) $ conjunct $ pl ++ [un], s)
-- mapAxioms :: Axiom -> Result ([CASLFORMULA], [CASLSign])
-- mapAxioms (PlainAxiom ex fb) = case fb of
-- ListFrameBit rel lfb -> mapListFrameBit ex rel lfb
-- AnnFrameBit ans afb -> mapAnnFrameBit ex ans afb
swrlVariableToVar :: IRI -> VAR_DECL
swrlVariableToVar iri = (flip mkVarDecl) thing $
case List.stripPrefix "urn:swrl#" (showIRI iri) of
Nothing -> idToSimpleId . uriToCaslId $ iri
Just var -> genToken var
mapAxioms :: Axiom -> Result([CASLFORMULA], [CASLSign])
mapAxioms axiom = case axiom of
Declaration _ _ -> return ([], [])
ClassAxiom clAxiom -> case clAxiom of
SubClassOf _ sub sup -> do
(domT, s1) <- mapDescription sub 1
(codT, s2) <- mapDescriptionList 1 [sup]
return (map (mk1VDecl . mkImpl domT) codT,
s1 ++ s2)
EquivalentClasses _ cel -> do
(els, _) <- mapDescriptionListP 1 $ comPairs cel cel
mkEDPairs [1] (Just $ EDRelation Equivalent) els
DisjointClasses _ cel -> do
(els, _) <- mapDescriptionListP 1 $ comPairs cel cel
mkEDPairs [1] (Just $ EDRelation Disjoint) els
DisjointUnion _ clIri clsl -> do
(decrs, s1) <- mapDescriptionList 1 clsl
(decrsS, s2) <- mapDescriptionListP 1 $ comPairs clsl clsl
let decrsP = map (\ (x, y) -> conjunct [x, y]) decrsS
mcls <- mapClassURI clIri $ mkNName 1
return ([mk1VDecl $ mkEqv mcls $ conjunct
[disjunct decrs, mkNC decrsP]], s1 ++ s2)
ObjectPropertyAxiom opAxiom -> case opAxiom of
SubObjectPropertyOf _ subOpExpr supOpExpr -> case subOpExpr of
SubObjPropExpr_obj opExpr -> do
os <- mapM (\ (o1, o2) -> mapSubObjProp o1 o2 3)
$ mkPairs opExpr [supOpExpr]
return (os, [])
SubObjPropExpr_exprchain opExprs -> do
os <- mapSubObjPropChain opExprs supOpExpr
return ([os], [])
EquivalentObjectProperties _ opExprs -> do
pairs <- mapComObjectPropsList Nothing opExprs 1 2
mkEDPairs [1, 2] (Just $ EDRelation Equivalent) pairs
DisjointObjectProperties _ opExprs -> do
pairs <- mapComObjectPropsList Nothing opExprs 1 2
mkEDPairs [1, 2] (Just $ EDRelation Disjoint) pairs
InverseObjectProperties _ opExpr1 opExpr2 -> do
os1 <- mapM (\o1 -> mapObjProp o1 1 2) [opExpr2]
o2 <- mapObjProp opExpr1 2 1
return (map (mkVDecl [1, 2] . mkEqv o2) os1, [])
ObjectPropertyDomain _ opExpr clExpr -> do
tobjP <- mapObjProp opExpr 1 2
(tdsc, s) <- mapAndUnzipM (\c -> mapDescription c 1) [clExpr]
let vars = (mkNName 1, mkNName 2)
return (map (mkFI [tokDecl $ fst vars] [tokDecl $ snd vars] tobjP) tdsc,
concat s)
ObjectPropertyRange _ opExpr clExpr -> do
tobjP <- mapObjProp opExpr 1 2
(tdsc, s) <- mapAndUnzipM (\c -> mapDescription c 2) [clExpr]
let vars = (mkNName 2, mkNName 1)
return (map (mkFI [tokDecl $ fst vars] [tokDecl $ snd vars] tobjP) tdsc,
concat s)
FunctionalObjectProperty _ opExpr -> do
cl <- mapM (mapCharact opExpr) [Functional]
return (cl, [])
InverseFunctionalObjectProperty _ opExpr -> do
cl <- mapM (mapCharact opExpr) [InverseFunctional]
return (cl, [])
ReflexiveObjectProperty _ opExpr -> do
cl <- mapM (mapCharact opExpr) [Reflexive]
return (cl, [])
IrreflexiveObjectProperty _ opExpr -> do
cl <- mapM (mapCharact opExpr) [Irreflexive]
return (cl, [])
SymmetricObjectProperty _ opExpr -> do
cl <- mapM (mapCharact opExpr) [Symmetric]
return (cl, [])
AsymmetricObjectProperty _ opExpr -> do
cl <- mapM (mapCharact opExpr) [Asymmetric]
return (cl, [])
TransitiveObjectProperty _ opExpr -> do
cl <- mapM (mapCharact opExpr) [Transitive]
return (cl, [])
DataPropertyAxiom dpAxiom -> case dpAxiom of
SubDataPropertyOf _ subDpExpr supDpExpr -> do
os1 <- mapM (\ o1 -> mapDataProp o1 1 2) [supDpExpr]
o2 <- mapDataProp subDpExpr 1 2 -- was 2 1
return (map (mkForall [thingDecl 1, dataDecl 2]
. mkImpl o2) os1, [])
EquivalentDataProperties _ dpExprs -> do
pairs <- mapComDataPropsList Nothing dpExprs 1 2
mkEDPairs' [1, 2] (Just $ EDRelation Equivalent) pairs
DisjointDataProperties _ dpExprs -> do
pairs <- mapComDataPropsList Nothing dpExprs 1 2
mkEDPairs' [1, 2] (Just $ EDRelation Disjoint) pairs
DataPropertyDomain _ dpExpr clExpr -> do
(els, s) <- mapAndUnzipM (\ c -> mapDescription c 1) [clExpr]
oEx <- mapDataProp dpExpr 1 2
let vars = (mkNName 1, mkNName 2)
return (map (mkFI [tokDecl $ fst vars]
[mkVarDecl (snd vars) dataS] oEx) els, concat s)
DataPropertyRange _ dpExpr dr -> do
oEx <- mapDataProp dpExpr 1 2
(odes, s) <- mapAndUnzipM (\r -> mapDataRange r 2) [dr]
let vars = (mkNName 1, mkNName 2)
return (map (mkFEI [tokDecl $ fst vars]
[tokDataDecl $ snd vars] oEx) odes, concat s)
FunctionalDataProperty _ dpExpr -> do
so1 <- mapDataProp dpExpr 1 2
so2 <- mapDataProp dpExpr 1 3
return ([mkForall (thingDecl 1 : map dataDecl [2, 3]) $ implConj
[so1, so2] $ mkEqVar (dataDecl 2) $ qualData 3], [])
DatatypeDefinition _ dt dr -> do
(odes, s) <- mapDataRange dr 2
return ([mkVDataDecl [2] $ mkEqv odes $ mkMember
(qualData 2) $ uriToCaslId dt], s)
HasKey _ ce opl dpl -> do
let lo = length opl
ld = length dpl
uptoOP = [2 .. lo + 1]
uptoDP = [lo + 2 .. lo + ld + 1]
tl = lo + ld + 2
ol <- mapM (\ (n, o) -> mapObjProp o 1 n) $ zip uptoOP opl
nol <- mapM (\ (n, o) -> mapObjProp o tl n) $ zip uptoOP opl
dl <- mapM (\ (n, d) -> mapDataProp d 1 n) $ zip uptoDP dpl
ndl <- mapM (\ (n, d) -> mapDataProp d tl n) $ zip uptoDP dpl
(keys, s) <-
mapKey ce (ol ++ dl) (nol ++ ndl) tl (uptoOP ++ uptoDP) lo
return ([keys], s)
Assertion assertion -> case assertion of
SameIndividual _ inds -> do
let (mi, rest) = case inds of
(iri:r) -> (Just iri, r)
_ -> (Nothing, inds)
fs <- mapComIndivList Same mi rest
return (fs, [])
DifferentIndividuals _ inds -> do
let (mi, rest) = case inds of
(iri:r) -> (Just iri, r)
_ -> (Nothing, inds)
fs <- mapComIndivList Different mi rest
return (fs, [])
ClassAssertion _ ce iIri -> do
(els, s) <- mapAndUnzipM (\c -> mapDescription c 1) [ce]
inD <- mapIndivURI iIri
let els' = map (substitute (mkNName 1) thing inD) els
return ( els', concat s)
ObjectPropertyAssertion _ op si ti -> do
oPropH <- mapObjPropI op (OIndi si) (OIndi ti)
return ([oPropH], [])
NegativeObjectPropertyAssertion _ op si ti -> do
oPropH <- mapObjPropI op (OIndi si) (OIndi ti)
let oProp = Negation oPropH nullRange
return ([oProp], [])
DataPropertyAssertion _ dp si tv -> do
inS <- mapIndivURI si
inT <- mapLiteral tv
oProp <- mapDataProp dp 1 2
return ([mkForall [thingDecl 1, dataDecl 2] $ implConj
[mkEqDecl 1 inS, mkEqVar (dataDecl 2) $ upcast inT dataS] oProp],
[])
NegativeDataPropertyAssertion _ dp si tv -> do
inS <- mapIndivURI si
inT <- mapLiteral tv
oPropH <- mapDataProp dp 1 2
let oProp = Negation oPropH nullRange
return ([mkForall [thingDecl 1, dataDecl 2] $ implConj
[mkEqDecl 1 inS, mkEqVar (dataDecl 2) $ upcast inT dataS] oProp],
[])
AnnotationAxiom _ -> return ([], [])
Rule rule -> case rule of
DLSafeRule _ b h ->
let vars = Set.toList . Set.unions $ getVariablesFromAtom <$> (b ++ h)
names = swrlVariableToVar <$> vars
f (s, sig, startVal) at = do
(sentences', sig', offsetValue) <- atomToSentence startVal at
return (s ++ sentences', sig ++ sig', offsetValue)
g startVal atoms = foldM f ([], [], startVal) atoms
in do
(antecedentSen, sig1, offsetValue) <- g 1 b
let antecedent = conjunct antecedentSen
(consequentSen, sig2, lastVar) <- g offsetValue h
let consequent = conjunct consequentSen
let impl = mkImpl antecedent consequent
return $ ([mkForall (names ++ map thingDecl [1..lastVar - 1]) impl], sig1 ++ sig2)
DGRule _ _ _ -> fail "Translating DGRules is not supported yet!"
DGAxiom _ _ _ _ _ -> fail "Translating DGAxioms is not supported yet!"
iArgToTerm :: IndividualArg -> Result(TERM ())
iArgToTerm arg = case arg of
IVar v -> return . toQualVar . swrlVariableToVar $ v
IArg iri -> mapIndivURI iri
iArgToVarOrIndi :: IndividualArg -> VarOrIndi
iArgToVarOrIndi arg = case arg of
IVar v -> OIndi v
IArg iri -> OIndi iri
iArgToIRI :: IndividualArg -> IRI
iArgToIRI arg = case arg of
IVar var -> var
IArg ind -> ind
dArgToTerm :: DataArg -> Result (TERM ())
dArgToTerm arg = case arg of
DVar var -> return . toQualVar . tokDataDecl . uriToTok $ var
DArg lit -> mapLiteral lit
atomToSentence :: Int -> Atom -> Result ([CASLFORMULA], [CASLSign], Int)
atomToSentence startVar atom = case atom of
ClassAtom clExpr iarg -> do
(el, sigs) <- mapDescription clExpr startVar
inD <- iArgToTerm iarg
let el' = substitute (mkNName startVar) thing inD el
return ([el'], sigs, startVar)
DataRangeAtom dr darg -> do
dt <- dArgToTerm darg
(odes, s) <- mapDataRangeAux dr dt
return ([substitute (mkNName 1) thing dt odes], s, startVar)
ObjectPropertyAtom opExpr iarg1 iarg2 -> do
let si = iArgToVarOrIndi iarg1
ti = iArgToVarOrIndi iarg2
oPropH <- mapObjPropI opExpr si ti
return ([oPropH], [], startVar)
DataPropertyAtom dpExpr iarg darg -> do
let a = 1
b = 2
inS <- iArgToTerm iarg
inT <- dArgToTerm darg
oProp <- mapDataProp dpExpr a b
return ([mkForall [thingDecl a, dataDecl b] $ implConj
[mkEqDecl a inS, mkEqVar (dataDecl b) $ upcast inT dataS] oProp],
[], startVar)
BuiltInAtom iri args -> do
prdArgs <- mapM dArgToTerm args
let predtype = PredType $ map (const thing) args
prd = mkPred predtype prdArgs (uriToId iri)
return ([prd], [], startVar)
SameIndividualAtom iarg1 iarg2 -> do
fs <- mapComIndivList Same (Just $ iArgToIRI iarg1) [iArgToIRI iarg2]
return (fs, [], startVar)
DifferentIndividualsAtom iarg1 iarg2 -> do
fs <- mapComIndivList Different (Just $ iArgToIRI iarg1) [iArgToIRI iarg2]
return (fs, [], startVar)
_ -> fail $ "Couldn't translate unknown atom '" ++ show atom ++ "'!"
|
spechub/Hets
|
OWL2/OWL22CASL.hs
|
gpl-2.0
| 34,475
| 0
| 23
| 10,559
| 12,152
| 6,032
| 6,120
| 751
| 40
|
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE FlexibleInstances, TypeSynonymInstances,
MultiParamTypeClasses, DeriveDataTypeable, OverloadedStrings #-}
-----------------------------------------------------------------------------
--
-- Module : IDE.Pane.Errors
-- Copyright : 2007-2011 Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GPL
--
-- Maintainer : maintainer@leksah.org
-- Stability : provisional
-- Portability :
--
-- | A pane which displays a list of errors
--
-----------------------------------------------------------------------------
module IDE.Pane.Errors (
IDEErrors
, ErrorsState
, fillErrorList
, addErrorToList
, selectError
, getErrors
, selectMatchingErrors
) where
import Graphics.UI.Gtk
import Data.Typeable (Typeable)
import IDE.Core.State
import IDE.ImportTool
(addResolveMenuItems, resolveErrors)
import Data.List (elemIndex)
import IDE.LogRef (showSourceSpan)
import Control.Monad.IO.Class (MonadIO(..))
import IDE.Utils.GUIUtils (getDarkState, treeViewContextMenu, __)
import Data.Text (Text)
import Control.Monad (foldM_, unless, void, when, forM_)
import qualified Data.Text as T
(intercalate, lines, takeWhile, length, drop)
import Data.IORef (writeIORef, readIORef, newIORef, IORef)
import Data.Maybe (isNothing)
import qualified Data.Foldable as F (toList)
import qualified Data.Sequence as Seq (null, elemIndexL)
-- | The representation of the Errors pane
data IDEErrors = IDEErrors {
scrolledView :: ScrolledWindow
, treeView :: TreeView
, errorStore :: ListStore ErrColumn
, autoClose :: IORef Bool -- ^ If the pane was only displayed to show current error
} deriving Typeable
-- | The data for a single row in the Errors pane
data ErrColumn = ErrColumn {
logRef :: LogRef,
background :: Text}
-- | The additional state used when recovering the pane
data ErrorsState = ErrorsState
deriving (Eq,Ord,Read,Show,Typeable)
instance Pane IDEErrors IDEM
where
primPaneName _ = __ "Errors"
getTopWidget = castToWidget . scrolledView
paneId _b = "*Errors"
instance RecoverablePane IDEErrors ErrorsState IDEM where
saveState _p = return (Just ErrorsState)
recoverState pp ErrorsState = do nb <- getNotebook pp
buildPane pp nb builder
builder = builder'
-- | Builds an 'IDEErrors' pane together with a list of
-- event 'Connections'
builder' :: PanePath ->
Notebook ->
Window ->
IDEM (Maybe IDEErrors, Connections)
builder' _pp _nb _windows = reifyIDE $ \ ideR -> do
errorStore <- listStoreNew []
treeView <- treeViewNew
treeViewSetModel treeView errorStore
rendererA <- cellRendererTextNew
colA <- treeViewColumnNew
treeViewColumnSetTitle colA (__ "Description")
treeViewColumnSetSizing colA TreeViewColumnAutosize
treeViewColumnSetResizable colA True
treeViewColumnSetReorderable colA True
treeViewAppendColumn treeView colA
cellLayoutPackStart colA rendererA False
cellLayoutSetAttributes colA rendererA errorStore
$ \row -> [cellText := removeIndentation (refDescription (logRef row)),
cellTextBackground := background row ]
rendererB <- cellRendererTextNew
colB <- treeViewColumnNew
treeViewColumnSetTitle colB (__ "Location")
treeViewColumnSetSizing colB TreeViewColumnAutosize
treeViewColumnSetResizable colB True
treeViewColumnSetReorderable colB True
treeViewAppendColumn treeView colB
cellLayoutPackStart colB rendererB False
cellLayoutSetAttributes colB rendererB errorStore
$ \row -> [ cellText := showSourceSpan (logRef row),
cellTextBackground := background row ]
treeViewSetHeadersVisible treeView False
selB <- treeViewGetSelection treeView
treeSelectionSetMode selB SelectionMultiple
scrolledView <- scrolledWindowNew Nothing Nothing
scrolledWindowSetShadowType scrolledView ShadowIn
containerAdd scrolledView treeView
scrolledWindowSetPolicy scrolledView PolicyAutomatic PolicyAutomatic
autoClose <- newIORef False
let pane = IDEErrors {..}
cid1 <- after treeView focusInEvent $ do
liftIO $ reflectIDE (makeActive pane) ideR
return True
(cid2, cid3) <- treeViewContextMenu treeView $ errorsContextMenu ideR errorStore treeView
cid4 <- treeView `on` rowActivated $ errorsSelect ideR errorStore
reflectIDE (fillErrorList' pane) ideR
return (Just pane, map ConnectC [cid1, cid2, cid3, cid4])
-- | Removes the unnecessary indentation
removeIndentation :: Text -> Text
removeIndentation t = T.intercalate "\n" $ map (T.drop minIndent) l
where
l = T.lines t
minIndent = minimum $ map (T.length . T.takeWhile (== ' ')) l
-- | Get the Errors pane
getErrors :: Maybe PanePath -> IDEM IDEErrors
getErrors Nothing = forceGetPane (Right "*Errors")
getErrors (Just pp) = forceGetPane (Left pp)
-- | Repopulates the Errors pane
fillErrorList :: Bool -- ^ Whether to display the Errors pane
-> IDEAction
fillErrorList False = getPane >>= maybe (return ()) fillErrorList'
fillErrorList True = getErrors Nothing >>= \ p -> fillErrorList' p >> displayPane p False
-- | Fills the pane with the error list from the IDE state
fillErrorList' :: IDEErrors -> IDEAction
fillErrorList' pane = do
refs <- readIDE errorRefs
ac <- liftIO $ readIORef (autoClose pane)
when (Seq.null refs && ac) . void $ closePane pane
isDark <- getDarkState
liftIO $ do
let store = errorStore pane
listStoreClear store
forM_ (zip (F.toList refs) [0..]) $ \ (lr, index) ->
listStoreInsert store index $ ErrColumn lr (
(if even index then fst else snd) $
(if isDark then fst else snd) $
case logRefType lr of
WarningRef -> (("#282000", "#201900"), ("#FFF1DE", "#FFF5E8"))
LintRef -> (("#003000", "#002800"), ("#DBFFDB", "#EDFFED"))
_ -> (("#380000", "#280000"), ("#FFDEDE", "#FFEBEB")))
-- | Add any LogRef to the Errors pane at a given index
addErrorToList :: Bool -- ^ Whether to display the pane
-> Int -- ^ The index to insert at
-> LogRef
-> IDEAction
addErrorToList False index lr = getPane >>= maybe (return ()) (addErrorToList' index lr)
addErrorToList True index lr = getErrors Nothing >>= \ p -> addErrorToList' index lr p >> displayPane p False
-- | Add a 'LogRef' at a specific index to the Errors pane
addErrorToList' :: Int -> LogRef -> IDEErrors -> IDEAction
addErrorToList' index lr pane = do
-- refs <- readIDE errorRefs
ac <- liftIO $ readIORef (autoClose pane)
-- when (null refs && ac) . void $ closePane pane
isDark <- getDarkState
liftIO $ do
let store = errorStore pane
-- listStoreClear store
-- forM_ (zip (toList refs) [0..]) $ \ (lr, index) ->
listStoreInsert store index $ ErrColumn lr (
(if even index then fst else snd) $
(if isDark then fst else snd) $
case logRefType lr of
WarningRef -> (("#282000", "#201900"), ("#FFF1DE", "#FFF5E8"))
LintRef -> (("#003000", "#002800"), ("#DBFFDB", "#EDFFED"))
_ -> (("#380000", "#280000"), ("#FFDEDE", "#FFEBEB")))
-- | Get the currently selected error
getSelectedError :: TreeView
-> ListStore ErrColumn
-> IO (Maybe LogRef)
getSelectedError treeView store = do
treeSelection <- treeViewGetSelection treeView
paths <- treeSelectionGetSelectedRows treeSelection
case paths of
[a]:r -> do
val <- listStoreGetValue store a
return (Just (logRef val))
_ -> return Nothing
-- | Select an error in the Errors pane
selectError :: Maybe LogRef -- ^ When @Nothing@, the first error in the list is selected
-> IDEAction
selectError mbLogRef = do
(mbPane :: Maybe IDEErrors) <- getPane
errorRefs' <- readIDE errorRefs
errors <- getErrors Nothing
when (isNothing mbPane) $ do
liftIO $ writeIORef (autoClose errors) True
displayPane errors False
liftIO $ do
selection <- treeViewGetSelection (treeView errors)
case mbLogRef of
Nothing -> do
size <- listStoreGetSize (errorStore errors)
unless (size == 0) $
treeViewScrollToCell (treeView errors) (Just [0]) Nothing Nothing
treeSelectionUnselectAll selection
Just lr -> case lr `Seq.elemIndexL` errorRefs' of
Nothing -> return ()
Just ind -> do
treeViewScrollToCell (treeView errors) (Just [ind]) Nothing Nothing
treeSelectionSelectPath selection [ind]
-- | Constructs the context menu for the Errors pane
errorsContextMenu :: IDERef
-> ListStore ErrColumn
-> TreeView
-> Menu
-> IO ()
errorsContextMenu ideR store treeView theMenu = do
mbSel <- getSelectedError treeView store
item0 <- menuItemNewWithLabel (__ "Resolve Errors")
item0 `on` menuItemActivate $ reflectIDE resolveErrors ideR
menuShellAppend theMenu item0
case mbSel of
Just sel -> addResolveMenuItems ideR theMenu sel
Nothing -> return ()
-- | Highlight an error refered to by the 'TreePath' in the given 'TreeViewColumn'
errorsSelect :: IDERef
-> ListStore ErrColumn
-> TreePath
-> TreeViewColumn
-> IO ()
errorsSelect ideR store [index] _ = do
ref <- listStoreGetValue store index
reflectIDE (setCurrentError (Just (logRef ref))) ideR
errorsSelect _ _ _ _ = return ()
-- | Select the matching errors for a 'SrcSpan' in the Errors
-- pane, or none at all
selectMatchingErrors :: Maybe SrcSpan -- ^ When @Nothing@, unselects any errors in the pane
-> IDEM ()
selectMatchingErrors mbSpan = do
mbErrors <- getPane
case mbErrors of
Nothing -> return ()
Just pane ->
liftIO $ do
treeSel <- treeViewGetSelection (treeView pane)
case mbSpan of
Nothing -> treeSelectionUnselectAll treeSel
Just (SrcSpan file lStart cStart lEnd cEnd) -> do
size <- listStoreGetSize (errorStore pane)
foldM_ (\ haveScrolled n -> do
mbIter <- treeModelGetIter (errorStore pane) [n]
case mbIter of
Nothing -> return False
Just iter -> do
ErrColumn {logRef = ref@LogRef{..}} <- listStoreGetValue (errorStore pane) n
isSelected <- treeSelectionIterIsSelected treeSel iter
let shouldBeSel = file == logRefFullFilePath ref
&& (lStart, cStart) <= (srcSpanEndLine logRefSrcSpan,
srcSpanEndColumn logRefSrcSpan)
&& (lEnd, cEnd) >= (srcSpanStartLine logRefSrcSpan,
srcSpanStartColumn logRefSrcSpan)
when (isSelected && not shouldBeSel) $ treeSelectionUnselectIter treeSel iter
when (not isSelected && shouldBeSel) $ do
unless haveScrolled $ treeViewScrollToCell (treeView pane) (Just [n]) Nothing Nothing
treeSelectionSelectIter treeSel iter
return $ haveScrolled || shouldBeSel)
False (take size [0..])
|
cocreature/leksah
|
src/IDE/Pane/Errors.hs
|
gpl-2.0
| 12,287
| 0
| 36
| 3,764
| 2,839
| 1,426
| 1,413
| 228
| 5
|
{-# LANGUAGE OverloadedStrings #-}
{-
Copyright (C) 2010-2015 Puneeth Chaganti <punchagan@gmail.com>
and John MacFarlane <jgm@berkeley.edu>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Writers.Org
Copyright : Copyright (C) 2010-2015 Puneeth Chaganti and John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : Puneeth Chaganti <punchagan@gmail.com>
Stability : alpha
Portability : portable
Conversion of 'Pandoc' documents to Emacs Org-Mode.
Org-Mode: <http://orgmode.org>
-}
module Text.Pandoc.Writers.Org ( writeOrg) where
import Text.Pandoc.Definition
import Text.Pandoc.Options
import Text.Pandoc.Shared
import Text.Pandoc.Writers.Shared
import Text.Pandoc.Pretty
import Text.Pandoc.Templates (renderTemplate')
import Data.List ( intersect, intersperse, transpose )
import Control.Monad.State
data WriterState =
WriterState { stNotes :: [[Block]]
, stLinks :: Bool
, stImages :: Bool
, stHasMath :: Bool
, stOptions :: WriterOptions
}
-- | Convert Pandoc to Org.
writeOrg :: WriterOptions -> Pandoc -> String
writeOrg opts document =
let st = WriterState { stNotes = [], stLinks = False,
stImages = False, stHasMath = False,
stOptions = opts }
in evalState (pandocToOrg document) st
-- | Return Org representation of document.
pandocToOrg :: Pandoc -> State WriterState String
pandocToOrg (Pandoc meta blocks) = do
opts <- liftM stOptions get
let colwidth = if writerWrapText opts == WrapAuto
then Just $ writerColumns opts
else Nothing
metadata <- metaToJSON opts
(fmap (render colwidth) . blockListToOrg)
(fmap (render colwidth) . inlineListToOrg)
meta
body <- blockListToOrg blocks
notes <- liftM (reverse . stNotes) get >>= notesToOrg
-- note that the notes may contain refs, so we do them first
hasMath <- liftM stHasMath get
let main = render colwidth $ foldl ($+$) empty $ [body, notes]
let context = defField "body" main
$ defField "math" hasMath
$ metadata
if writerStandalone opts
then return $ renderTemplate' (writerTemplate opts) context
else return main
-- | Return Org representation of notes.
notesToOrg :: [[Block]] -> State WriterState Doc
notesToOrg notes =
mapM (\(num, note) -> noteToOrg num note) (zip [1..] notes) >>=
return . vsep
-- | Return Org representation of a note.
noteToOrg :: Int -> [Block] -> State WriterState Doc
noteToOrg num note = do
contents <- blockListToOrg note
let marker = "[" ++ show num ++ "] "
return $ hang (length marker) (text marker) contents
-- | Escape special characters for Org.
escapeString :: String -> String
escapeString = escapeStringUsing $
[ ('\x2014',"---")
, ('\x2013',"--")
, ('\x2019',"'")
, ('\x2026',"...")
] ++ backslashEscapes "^_"
isRawFormat :: Format -> Bool
isRawFormat f =
f == Format "latex" || f == Format "tex" || f == Format "org"
-- | Convert Pandoc block element to Org.
blockToOrg :: Block -- ^ Block element
-> State WriterState Doc
blockToOrg Null = return empty
blockToOrg (Div attrs bs) = do
contents <- blockListToOrg bs
let startTag = tagWithAttrs "div" attrs
let endTag = text "</div>"
return $ blankline $$ "#+BEGIN_HTML" $$
nest 2 startTag $$ "#+END_HTML" $$ blankline $$
contents $$ blankline $$ "#+BEGIN_HTML" $$
nest 2 endTag $$ "#+END_HTML" $$ blankline
blockToOrg (Plain inlines) = inlineListToOrg inlines
-- title beginning with fig: indicates that the image is a figure
blockToOrg (Para [Image attr txt (src,'f':'i':'g':':':tit)]) = do
capt <- if null txt
then return empty
else (\c -> "#+CAPTION: " <> c <> blankline) `fmap`
inlineListToOrg txt
img <- inlineToOrg (Image attr txt (src,tit))
return $ capt <> img
blockToOrg (Para inlines) = do
contents <- inlineListToOrg inlines
return $ contents <> blankline
blockToOrg (RawBlock "html" str) =
return $ blankline $$ "#+BEGIN_HTML" $$
nest 2 (text str) $$ "#+END_HTML" $$ blankline
blockToOrg (RawBlock f str) | isRawFormat f =
return $ text str
blockToOrg (RawBlock _ _) = return empty
blockToOrg HorizontalRule = return $ blankline $$ "--------------" $$ blankline
blockToOrg (Header level _ inlines) = do
contents <- inlineListToOrg inlines
let headerStr = text $ if level > 999 then " " else replicate level '*'
return $ headerStr <> " " <> contents <> blankline
blockToOrg (CodeBlock (_,classes,_) str) = do
opts <- stOptions <$> get
let tabstop = writerTabStop opts
let at = classes `intersect` ["asymptote", "C", "clojure", "css", "ditaa",
"dot", "emacs-lisp", "gnuplot", "haskell", "js", "latex",
"ledger", "lisp", "matlab", "mscgen", "ocaml", "octave",
"oz", "perl", "plantuml", "python", "R", "ruby", "sass",
"scheme", "screen", "sh", "sql", "sqlite"]
let (beg, end) = case at of
[] -> ("#+BEGIN_EXAMPLE", "#+END_EXAMPLE")
(x:_) -> ("#+BEGIN_SRC " ++ x, "#+END_SRC")
return $ text beg $$ nest tabstop (text str) $$ text end $$ blankline
blockToOrg (BlockQuote blocks) = do
contents <- blockListToOrg blocks
return $ blankline $$ "#+BEGIN_QUOTE" $$
nest 2 contents $$ "#+END_QUOTE" $$ blankline
blockToOrg (Table caption' _ _ headers rows) = do
caption'' <- inlineListToOrg caption'
let caption = if null caption'
then empty
else ("#+CAPTION: " <> caption'')
headers' <- mapM blockListToOrg headers
rawRows <- mapM (mapM blockListToOrg) rows
let numChars = maximum . map offset
-- FIXME: width is not being used.
let widthsInChars =
map ((+2) . numChars) $ transpose (headers' : rawRows)
-- FIXME: Org doesn't allow blocks with height more than 1.
let hpipeBlocks blocks = hcat [beg, middle, end]
where h = maximum (1 : map height blocks)
sep' = lblock 3 $ vcat (map text $ replicate h " | ")
beg = lblock 2 $ vcat (map text $ replicate h "| ")
end = lblock 2 $ vcat (map text $ replicate h " |")
middle = hcat $ intersperse sep' blocks
let makeRow = hpipeBlocks . zipWith lblock widthsInChars
let head' = makeRow headers'
rows' <- mapM (\row -> do cols <- mapM blockListToOrg row
return $ makeRow cols) rows
let border ch = char '|' <> char ch <>
(hcat $ intersperse (char ch <> char '+' <> char ch) $
map (\l -> text $ replicate l ch) widthsInChars) <>
char ch <> char '|'
let body = vcat rows'
let head'' = if all null headers
then empty
else head' $$ border '-'
return $ head'' $$ body $$ caption $$ blankline
blockToOrg (BulletList items) = do
contents <- mapM bulletListItemToOrg items
-- ensure that sublists have preceding blank line
return $ blankline $+$ vcat contents $$ blankline
blockToOrg (OrderedList (start, _, delim) items) = do
let delim' = case delim of
TwoParens -> OneParen
x -> x
let markers = take (length items) $ orderedListMarkers
(start, Decimal, delim')
let maxMarkerLength = maximum $ map length markers
let markers' = map (\m -> let s = maxMarkerLength - length m
in m ++ replicate s ' ') markers
contents <- mapM (\(item, num) -> orderedListItemToOrg item num) $
zip markers' items
-- ensure that sublists have preceding blank line
return $ blankline $$ vcat contents $$ blankline
blockToOrg (DefinitionList items) = do
contents <- mapM definitionListItemToOrg items
return $ vcat contents $$ blankline
-- | Convert bullet list item (list of blocks) to Org.
bulletListItemToOrg :: [Block] -> State WriterState Doc
bulletListItemToOrg items = do
contents <- blockListToOrg items
return $ hang 3 "- " (contents <> cr)
-- | Convert ordered list item (a list of blocks) to Org.
orderedListItemToOrg :: String -- ^ marker for list item
-> [Block] -- ^ list item (list of blocks)
-> State WriterState Doc
orderedListItemToOrg marker items = do
contents <- blockListToOrg items
return $ hang (length marker + 1) (text marker <> space) (contents <> cr)
-- | Convert defintion list item (label, list of blocks) to Org.
definitionListItemToOrg :: ([Inline], [[Block]]) -> State WriterState Doc
definitionListItemToOrg (label, defs) = do
label' <- inlineListToOrg label
contents <- liftM vcat $ mapM blockListToOrg defs
return $ hang 3 "- " $ label' <> " :: " <> (contents <> cr)
-- | Convert list of Pandoc block elements to Org.
blockListToOrg :: [Block] -- ^ List of block elements
-> State WriterState Doc
blockListToOrg blocks = mapM blockToOrg blocks >>= return . vcat
-- | Convert list of Pandoc inline elements to Org.
inlineListToOrg :: [Inline] -> State WriterState Doc
inlineListToOrg lst = mapM inlineToOrg lst >>= return . hcat
-- | Convert Pandoc inline element to Org.
inlineToOrg :: Inline -> State WriterState Doc
inlineToOrg (Span (uid, [], []) []) =
return $ "<<" <> text uid <> ">>"
inlineToOrg (Span _ lst) =
inlineListToOrg lst
inlineToOrg (Emph lst) = do
contents <- inlineListToOrg lst
return $ "/" <> contents <> "/"
inlineToOrg (Strong lst) = do
contents <- inlineListToOrg lst
return $ "*" <> contents <> "*"
inlineToOrg (Strikeout lst) = do
contents <- inlineListToOrg lst
return $ "+" <> contents <> "+"
inlineToOrg (Superscript lst) = do
contents <- inlineListToOrg lst
return $ "^{" <> contents <> "}"
inlineToOrg (Subscript lst) = do
contents <- inlineListToOrg lst
return $ "_{" <> contents <> "}"
inlineToOrg (SmallCaps lst) = inlineListToOrg lst
inlineToOrg (Quoted SingleQuote lst) = do
contents <- inlineListToOrg lst
return $ "'" <> contents <> "'"
inlineToOrg (Quoted DoubleQuote lst) = do
contents <- inlineListToOrg lst
return $ "\"" <> contents <> "\""
inlineToOrg (Cite _ lst) = inlineListToOrg lst
inlineToOrg (Code _ str) = return $ "=" <> text str <> "="
inlineToOrg (Str str) = return $ text $ escapeString str
inlineToOrg (Math t str) = do
modify $ \st -> st{ stHasMath = True }
return $ if t == InlineMath
then "$" <> text str <> "$"
else "$$" <> text str <> "$$"
inlineToOrg (RawInline f str) | isRawFormat f =
return $ text str
inlineToOrg (RawInline _ _) = return empty
inlineToOrg (LineBreak) = return (text "\\\\" <> cr)
inlineToOrg Space = return space
inlineToOrg SoftBreak = do
wrapText <- gets (writerWrapText . stOptions)
case wrapText of
WrapPreserve -> return cr
WrapAuto -> return space
WrapNone -> return space
inlineToOrg (Link _ txt (src, _)) = do
case txt of
[Str x] | escapeURI x == src -> -- autolink
do modify $ \s -> s{ stLinks = True }
return $ "[[" <> text x <> "]]"
_ -> do contents <- inlineListToOrg txt
modify $ \s -> s{ stLinks = True }
return $ "[[" <> text src <> "][" <> contents <> "]]"
inlineToOrg (Image _ _ (source, _)) = do
modify $ \s -> s{ stImages = True }
return $ "[[" <> text source <> "]]"
inlineToOrg (Note contents) = do
-- add to notes in state
notes <- get >>= (return . stNotes)
modify $ \st -> st { stNotes = contents:notes }
let ref = show $ (length notes) + 1
return $ " [" <> text ref <> "]"
|
janschulz/pandoc
|
src/Text/Pandoc/Writers/Org.hs
|
gpl-2.0
| 12,500
| 3
| 20
| 3,291
| 3,669
| 1,830
| 1,839
| 239
| 7
|
{-# LANGUAGE OverloadedStrings #-}
module NetworkAccessor where
import Network.HTTP.Client
import Network.HTTP.Simple
import Network.HTTP.Types.Header
import Data.ByteString.Lazy.Char8 as C8
getResponseJSON :: String -> IO String
getResponseJSON url = do
req <- parseRequest url
resp <- httpLBS (req {requestHeaders = [(hUserAgent, "Haskell")]})
json <- pure $ C8.unpack $ responseBody resp
return json
|
ivanmoore/seedy
|
src/NetworkAccessor.hs
|
gpl-3.0
| 421
| 0
| 13
| 67
| 119
| 66
| 53
| 12
| 1
|
-----------------------------------------------------------------------------
-- |
-- Module : Data.Packed.Static.Shapes
-- Copyright : (c) Reiner Pope 2008
-- License : GPL-style
--
-- Maintainer : Reiner Pope <reiner.pope@gmail.com>
-- Stability : experimental
-- Portability : portable
--
-- Shape-based functionality, common for matrices and vectors
--
-----------------------------------------------------------------------------
{-# LANGUAGE UndecidableInstances #-}
module Data.Packed.Static.Shapes (
Unknown,
ShapedContainer(..),
atShape,
shapeOf,
forgetShapeU,
unsafeWrap,
) where
import qualified Numeric.LinearAlgebra as H
-- | Uninhabited type. Represents unknown lengths.
-- Instances of 'ShapedContainer' use 'Unknown'
-- for the 'UnknownShape' type.
data Unknown
class ShapedContainer a where
-- | Less-typed, hmatrix representation
type Unwrapped a :: * -> *
-- | Convert to hmatrix representation
unWrap :: a s t -> Unwrapped a t
-- | Convert from hmatrix representation
wrapU :: Unwrapped a t -> a (UnknownShape a) t
-- | standard \'unknown\' shape. For vectors, @Unknown@; for matrices, @(Unknown,Unknown)@.
type UnknownShape a
-- | Coerce the static shape. Unsafe; the user
-- of this function has an obligation to prove that
-- the object's dynamic shape is the same as that
-- represented by s'.
unsafeReshape :: a s t -> a s' t
-- | For type hints.
--
-- @\> constant (5::Double) `atShape` d4
-- [$vec| 5.0, 5.0, 5.0, 5.0 |] :: Vector D4 Double@
--
-- Implementation:
--
-- @atShape = const@.
atShape :: a s t -> s -> a s t
atShape = const
-- | For type hints.
--
-- @\> constant (5::Double) `atShape` shapeOf [$vec|1|]
-- [$vec| 5.0 |]@
--
-- Implementation:
--
-- @shapeOf _ = undefined@
shapeOf :: a s t -> s
shapeOf _ = undefined
-- | @unsafeWrap = unsafeReshape . wrapU@.
unsafeWrap :: ShapedContainer a => Unwrapped a t -> a s t
unsafeWrap = unsafeReshape . wrapU
-- | Changes the static shape to the UnknownShape.
-- Dynamic representation is unchanged.
forgetShapeU :: ShapedContainer a => a s t -> a (UnknownShape a) t
forgetShapeU = unsafeReshape
------- instances
liftH f = unsafeWrap . f . unWrap
liftH2 f a b = unsafeWrap $ f (unWrap a) (unWrap b)
liftH2' f a b = f (unWrap a) (unWrap b)
instance (ShapedContainer a, H.Container (Unwrapped a) e) => H.Container (a n) e where
toComplex = uncurry $ liftH2 $ curry H.toComplex
fromComplex m = let (a,b) = H.fromComplex $ unWrap m in (unsafeWrap a, unsafeWrap b)
comp = liftH H.comp
conj = liftH H.conj
real = liftH H.real
complex = liftH H.complex
instance (ShapedContainer a, H.Linear (Unwrapped a) e) => H.Linear (a n) e where
scale e = liftH (H.scale e)
addConstant e = liftH (H.addConstant e)
add = liftH2 H.add
sub = liftH2 H.sub
mul = liftH2 H.mul
divide = liftH2 H.divide
scaleRecip e = liftH (H.scaleRecip e)
equal = liftH2' H.equal
instance (ShapedContainer a, Eq (Unwrapped a t)) => Eq (a s t) where
(==) = liftH2' (==)
instance (ShapedContainer a, Show (a n e), Num (Unwrapped a e)) => Num (a n e) where
(+) = liftH2 (+)
(*) = liftH2 (*)
(-) = liftH2 (-)
negate = liftH negate
abs = liftH abs
signum = liftH signum
fromInteger = error "fromInteger: Data.Packed.Static.Common"
instance (ShapedContainer a, Show (a n e), Fractional (Unwrapped a e)) => Fractional (a n e) where
(/) = liftH2 (/)
recip = liftH recip
fromRational = error "fromRational: Data.Packed.Static.Common"
instance (ShapedContainer a, Show (a n e), Floating (Unwrapped a e)) => Floating (a n e) where
pi = error "pi: Data.Packed.Static.Common"
exp = liftH exp
sqrt = liftH sqrt
log = liftH log
(**) = liftH2 (**)
logBase = liftH2 logBase
sin = liftH sin
tan = liftH tan
cos = liftH cos
asin = liftH asin
atan = liftH atan
acos = liftH acos
sinh = liftH sinh
tanh = liftH tanh
cosh = liftH cosh
asinh = liftH asinh
atanh = liftH atanh
acosh = liftH acosh
instance (ShapedContainer a, H.Normed (Unwrapped a e)) => H.Normed (a n e) where
pnorm p = H.pnorm p . unWrap
|
reinerp/hmatrix-static
|
Data/Packed/Static/Shapes.hs
|
gpl-3.0
| 4,338
| 0
| 11
| 1,072
| 1,188
| 637
| 551
| -1
| -1
|
module Hkl.Transformation
( Transformation (..)
, apply
, unapply
)where
{-
Copyright : Copyright (C) 2014-2015 Synchrotron Soleil
License : GPL3+
Maintainer : picca@synchrotron-soleil.fr
Stability : Experimental
Portability: GHC only?
-}
import Prelude hiding (sqrt, sin, cos, (+), (-), (*), (**), (/))
import qualified Prelude
import Numeric.LinearAlgebra (fromLists, Vector, Matrix,
ident, scalar, fromList,
(@>), (<>), inv)
import Numeric.Units.Dimensional.Prelude (_0, (-), (/~),
Angle, sin, cos, one)
import Hkl.Lattice
-- A Transformation which can be apply to a Vector of Double
data Transformation = NoTransformation -- Doesn't transform the vector at all
| Rotation [Double] (Angle Double)
| UB Lattice
| Holder [Transformation]
crossprod :: Vector Double -> Matrix Double
crossprod axis = fromLists [[ 0, -z, y],
[ z, 0, -x],
[-y, x, 0]]
where
x = axis @> 0
y = axis @> 1
z = axis @> 2
-- apply a transformation
apply :: Transformation -> Vector Double -> Vector Double
apply NoTransformation v = v
apply (Rotation axis angle) v = (ident 3 Prelude.+ s Prelude.* q Prelude.+ c Prelude.* (q <> q)) <> v
where
ax = fromList axis
c = scalar (1 Prelude.- cos angle /~ one)
s = scalar (sin angle /~ one)
q = crossprod ax
apply (UB lattice) v = busing lattice <> v
apply (Holder t) v = foldr apply v t
-- unapply a transformation
unapply :: Vector Double -> Transformation -> Vector Double
unapply v NoTransformation = v
unapply v (Rotation axis angle) = apply (Rotation axis (_0 - angle)) v
unapply v (UB lattice) = inv (busing lattice) <> v
unapply v (Holder t) = foldl unapply v t
|
klauer/hkl
|
contrib/Hkl/Transformation.hs
|
gpl-3.0
| 1,922
| 2
| 11
| 604
| 593
| 335
| 258
| 37
| 1
|
import Test.QuickCheck
import Data.List
data NestedList a = Element a | List [NestedList a] deriving (Show)
myFlatten :: NestedList a -> [a]
myFlatten (Element a) = [a]
myFlatten (List []) = []
myFlatten (List x) = foldl1' (++) (map myFlatten x)
testMyFlatten :: [Int] -> Bool
testMyFlatten x =
(length (myFlatten (List [])) == 0)
&& (myFlatten (List (map Element x)) == x)
&& (myFlatten
(List [Element 1, List [Element 2,
List [Element 3, Element 4], Element 5]])
== [1,2,3,4,5])
main = quickCheck testMyFlatten
|
CmdrMoozy/haskell99
|
007.hs
|
gpl-3.0
| 527
| 10
| 15
| 98
| 283
| 148
| 135
| 16
| 1
|
module CCTK.Code.Arithmetic (
encode,
decode,
encodeFixed,
decodeFixed
) where
import Control.Applicative
import Data.Array as A
import Data.List (scanl')
import qualified Data.Map.Strict as M
encodeFixed :: [a] -> Int -> Integer -> [a]
encodeFixed code = go [] where
go acc 0 _ = acc
go acc l x = let (q,r) = x `quotRem` size' in go ((cache ! fromInteger r) : acc) (l-1) q
cache = listArray (0, size-1) code
size = length code
size' = toInteger size
decodeFixed :: Ord a => [a] -> [a] -> Maybe Integer
decodeFixed code = go 0 where
go acc [] = Just acc
go acc (x:xs) = case M.lookup x cache of
Nothing -> Nothing
Just v -> go (acc*size + v) xs
cache = M.fromList (zip code [0..])
size = toInteger (length code)
encode :: [a] -> Integer -> [a]
encode code x = encodeFixed code level (x - base) where
(base, level) = last . takeWhile ((x >=) . fst) $ zip sums [0..]
sums = scanl' (+) 0 powers
powers = iterate (size*) 1
size = toInteger (length code)
decode :: Ord a => [a] -> [a] -> Maybe Integer
decode code xs = (base+) <$> decodeFixed code xs where
size = toInteger (length code)
level = toInteger (length xs)
base = geosum size level
geosum 1 n = fromIntegral n
geosum r n = (r^n - 1) `quot` (r - 1)
|
maugier/cctk
|
src/CCTK/Code/Arithmetic.hs
|
gpl-3.0
| 1,312
| 0
| 14
| 342
| 624
| 332
| 292
| 37
| 3
|
module Src.Week6.TestHW06 where
import Data.Aeson
import qualified Data.Text as T
import qualified Data.ByteString.Lazy.Char8 as B
inputFile :: FilePath
inputFile = "Src/Week6/markets.json"
outputFile :: FilePath
outputFile = "Src/Week6/outMart.json"
|
urbanslug/cs194
|
Src/Week6/TestHW06.hs
|
gpl-3.0
| 256
| 0
| 4
| 32
| 50
| 34
| 16
| 8
| 1
|
{-# LANGUAGE ScopedTypeVariables, TypeSynonymInstances #-}
module Backup
where
import Prelude hiding (catch)
import Util (epoch, decode', expandUser, safeWriteFileWith)
import Config
import Supervisor
import Process
import Process.Stats (Message (..))
import qualified Process.Stats as Stats
import qualified Process.Log as Log
import qualified Channel as Ch
import qualified Process.KeyStore as KS
import qualified Process.HashStore as HS
import qualified Process.HashStoreTypes as HST
import qualified Process.BlobStore as BS
import qualified Process.External as Ext
import qualified Process.Index as Idx
import Data.ByteString.Char8 (pack, unpack)
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Char8 as B8
import Data.ByteString (ByteString)
import System.FilePath
import System.Directory
import System.Posix.Files
import System.Unix.Directory (withTemporaryDirectory)
import System.Time
import System.Time.Utils (clockTimeToEpoch)
import System.Posix.Directory hiding (removeDirectory)
import qualified Stat as S
import System.Posix.Files
import System.Posix.Types
import System.IO
import qualified Codec.Archive.Tar as Tar
import qualified Data.Set as Set
import qualified Data.Map as Map
import Data.Map (Map)
import Control.Concurrent
import Control.Concurrent.STM
import Control.Exception
import Control.Monad.Trans
import Control.Monad.State
import Control.Monad
import Control.Arrow (second, first)
import GHC.ST
import Data.BloomFilter
import Data.BloomFilter.Hash (cheapHashes, Hashable(..))
import Data.UUID
import Data.Conduit hiding (Stop)
import qualified Data.Conduit.List as CL
import qualified Data.Conduit.Extra as CE
import Data.Function
import Data.List hiding (group)
import Data.Maybe
import Data.Ord
import Crypto.Simple (newMasterKey, readMasterKey)
import Data.Serialize (Serialize(..), encode)
import qualified Data.Serialize as Ser
import qualified Data.BTree.BTree as T
import qualified Data.BTree.Types as Ty
import qualified Data.BTree.KVBackend.Files as Back
import qualified Data.BTree.KVBackend.CachedExternal as CachedExt
import qualified Data.BTree.Cache.STM as C
instance Serialize ClockTime where
put (TOD a b) = Ser.put (a, b)
get = do (a, b) <- Ser.get
return $ TOD a b
data Snapshot =
Snapshot
{ timestamp :: ClockTime
, reference :: ByteString}
deriving (Eq)
instance Serialize Snapshot where
put (Snapshot t r) = Ser.put (t, r)
get = do (t, r) <- Ser.get
return $ Snapshot t r
maxBlob = 2 * (1024 ^ 2)
localIndex dir = Idx.index dir $ Back.evalFilesKV dir
remoteIndex extCh dir =
Idx.index dir $ Back.evalFilesKV dir . CachedExt.evalCachedExternal extCh
backend statCh base mbbufdir = do
masterKey <- either error id `fmap` (readMasterKey $ base </> "conf" </> "key")
mod <- expandUser backendModule
let extP = Ext.external statCh masterKey mbbufdir mod $
defaultConfigP { name = "external",
channelSize = 10 }
return $ replicateB 5 extP -- TODO: parameterise
stats = return $ Stats.stats 100000 30 defaultConfigP { name = "stats" }
snapP base = localIndex (base </> "snap") $
defaultConfigP { name = "snapshots" }
init base = do
createDirectoryIfMissing True $ base </> "conf"
newMasterKey $ base </> "conf" </> "key"
listall base = do
with (snapP base) $ \ch -> do
xs <- sendReply ch $ Idx.ToList
forM_ (filter (not . B.isPrefixOf (pack "__") . fst) xs) $
\(repo, snaps :: Map.Map Int Snapshot) -> do
putStrLn $ unpack repo ++ ":"
forM_ (reverse $ Map.toList snaps) $ \(num, snap) ->
putStrLn $ " " ++ show num ++ ": " ++ show (timestamp snap)
recordSnapshot snapCh extCh name dir = do
tarball <- tar dir
now <- getClockTime
id <- encode `fmap` liftIO uuid
let snapshot = Snapshot now id
send extCh $ Ext.Put id tarball
send snapCh $ Idx.Modify_
(\_ m -> Map.insert ((1 :: Int) + (fst $ Map.findMax m)) snapshot m)
key
$ Map.singleton 1 snapshot
where
key = pack name
seal base = do
let pri = base </> "pri"
sec = base </> "sec"
snap = base </> "snap"
statP <- stats
with statP $ \statCh -> do
extP <- backend statCh base Nothing
with extP $ \extCh -> do
with (snapP base) $ \snapCh -> do
let record name repo = recordSnapshot snapCh extCh name $ sec </> repo
send statCh $ Say "Saving index"
goSnapshot statCh extCh sec "pidx" $ pri </> "idx"
send statCh $ ClearGoal
send statCh $ Say "Transferring tarballs"
-- Pri index
record "__pidx" "pidx"
-- Sec index
record "__sidx" "idx"
-- Seal
snapshots <- tar snap
send extCh $ Ext.Put (pack "snapshots") snapshots
recover base = do
let pri = base </> "pri"
sec = base </> "sec"
pidx = pri </> "idx"
pidxrepo = sec </> "pidx"
sidx = sec </> "idx"
snap = base </> "snap"
forM_ [pri, sec, snap] $ \dir -> do
ex <- doesDirectoryExist dir
when ex $ removeDirectoryRecursive dir
createDirectoryIfMissing True dir
withTemporaryDirectory "hindsight" $ \tmp -> do
statP <- stats
with statP $ \statCh -> do
extP <- backend statCh base $ Just tmp
with extP $ \extCh -> do
snapshots <- sendReply extCh $ Ext.Get $ pack "snapshots"
untar statCh snap snapshots
with (snapP base) $ \snapCh -> do
(mbsidx :: Maybe (Map Int Snapshot)) <- sendReply snapCh $ Idx.Lookup $ pack "__sidx"
let sidxid = maybe (error "No secondary index") (reference.snd.Map.findMax) mbsidx
sidxtar <- sendReply extCh $ Ext.Get sidxid
untar statCh sidx sidxtar
mbpidx <- sendReply snapCh $ Idx.Lookup $ pack "__pidx"
let pidxid = maybe (error "No primary index") (reference.snd.Map.findMax) mbpidx
pidxtar <- sendReply extCh $ Ext.Get pidxid
untar statCh pidxrepo pidxtar
send statCh $ Say "Downloading index"
goCheckout statCh extCh sec "pidx" Nothing pidx
snapshot base name path = do
let pri = base </> "pri"
sec = base </> "sec"
repo = name ++ "~head"
statP <- stats
with statP $ \statCh -> do
extP <- backend statCh base Nothing
with extP $ \extCh -> do
send statCh $ Say "Taking snapshot"
goSnapshot statCh extCh pri repo path
send statCh $ Say "Saving internal state"
goSnapshot statCh extCh sec repo $ pri </> repo
with (snapP base) $ \snapCh ->
recordSnapshot snapCh extCh name $ sec </> repo
tar dir = do
entries <- Tar.pack dir ["."]
-- putStrLn $ Tar.entryPath $ head entries
return $ B.concat $ BL.toChunks $ Tar.write entries
untar statCh path tarball = do
-- send statCh $ Say $ "Unpacking tarball to " ++ path
Tar.unpack path $ Tar.read $ BL.fromChunks [tarball]
list rec = inspect $ const $ goList rec
listdir = inspect $ const goListDir
search = inspect $ const goSearch
checkout rec noData base name version mbdir dest =
inspect (\e s k base repo mbdir ->
goCheckout' rec noData k s e base repo mbdir dest) base name version mbdir
deleteSnapshot base repo version = do
with (snapP base) $ \snapCh -> do
x <- sendReply snapCh $ Idx.Lookup key :: IO (Maybe (Map.Map Int Snapshot))
case x of
Nothing -> putStrLn $ "Not found: " ++ repo
Just m | version `Map.member` m ->
do send snapCh $ Idx.Insert key $ Map.delete version m
flushChannel snapCh
putStrLn $ "Deleted: " ++ repo ++ "~" ++ show version
| otherwise -> putStrLn "what"
where
key = pack repo
withCacheDirectory base f = do
let cache = base </> "cache"
createDirectoryIfMissing True cache
forkIO $ cleaner cache
f cache
where
cleaner cache = return ()
-- cleaner path = forever $ do threadDelay $ 5 * 10^6
-- all <- getDirectoryContents path
-- `catch` \(e :: IOError) -> return []
-- let fs = filter (not . (extSeparator `elem`)) all
-- mapM_ removeFile (map (path </>) fs)
-- `catch` \(e :: IOError) -> return ()
inspect go base name version mbdir = do
let pri = base </> "pri"
sec = base </> "sec"
snap = base </> "snap"
with (snapP base) $ \snapCh -> do
mbsnaps <- sendReply snapCh $ Idx.Lookup $ pack name :: IO (Maybe (Map.Map Int Snapshot))
case mbsnaps of
Nothing -> putStrLn $ "No snapshot named " ++ name
Just snaps -> do
if not (version `Map.member` snaps) || version < 0
then putStrLn $ "No such snapshot version"
else do
let snap = snaps Map.! version
snapref = reference snap
repo = name ++ "~" ++ show (clockTimeToEpoch $ timestamp snap)
withCacheDirectory base $ \tmp -> do
statP <- stats
with statP $ \statCh -> do
extP <- backend statCh base $ Just tmp
with extP $ \extCh -> do
initDir (sec </> repo) $ \d -> do
tarball <- sendReply extCh $ Ext.Get snapref
untar statCh d tarball
initDir (pri </> repo) $ \d -> do
goCheckout statCh extCh sec repo (Just "root") d
cache <- newMVar Set.empty
let hiP = localIndex (sec </> "idx") $
defaultConfigP { name = "hash index" }
kiP = localIndex (sec </> repo) $
defaultConfigP { name = "key index (" ++ repo ++ ")" }
bsP eCh = BS.blobStore "" maxBlob eCh $
defaultConfigP { name = "blobstore (" ++ repo ++ ")" }
hsP hCh bCh = HS.hashStore cache statCh hCh bCh $
defaultConfigP { name = "hashstore (" ++ repo ++ ")" }
ksP hCh bCh = KS.keyStore "" hCh bCh $
defaultConfigP { name = "keystore (" ++ repo ++ ")" }
with hiP $ \hiCh ->
with kiP $ \kiCh ->
with (ksP kiCh -|- hsP hiCh -|- bsP extCh) $ \ksCh -> do
let kiP = remoteIndex ksCh (pri </> repo) $
defaultConfigP { name = "key index (" ++ repo ++ ")" }
with kiP $ \(kiCh :: Channel KIMessage) -> do
go extCh statCh kiCh pri repo mbdir
where
initDir d k = do
ex <- doesDirectoryExist d
unless ex $ do
createDirectoryIfMissing True d
k d
goSnapshot statCh extCh base repo path = do
let idx = base </> "idx"
repodir = base </> repo
irollback = idx </> "rollback"
rrollback = repodir </> "rollback"
mapM_ (createDirectoryIfMissing True)
[idx, repodir]
(wsup, rsup) <- Ch.newUnboundedChannelP
let spawn' = spawn $ Just rsup
hiP = localIndex idx $
defaultConfigP { name = "hash index" }
hiCh <- spawn' hiP
HS.recover irollback statCh hiCh extCh -- Recover from crash if necessary
cache <- newMVar Set.empty
let kiP = localIndex repodir $
defaultConfigP { name = "key index (" ++ repo ++ ")" }
bsP eCh = BS.blobStore irollback maxBlob eCh $
defaultConfigP { name = "blobstore (" ++ repo ++ ")" }
hsP hCh bCh = HS.hashStore cache statCh hCh bCh $
defaultConfigP { name = "hashstore (" ++ repo ++ ")" }
ksP hCh bCh = KS.keyStore rrollback hCh bCh $
defaultConfigP { name = "keystore (" ++ repo ++ ")" }
kiCh <- spawn' kiP
KS.recover rrollback statCh kiCh hiCh
-- take "locks"
mapM_ (createDirectoryIfMissing True)
[irollback, rrollback]
-- let's do this
ksCh <- spawn' $ replicateB 3 -- TODO: paramaterise
(ksP kiCh -|- hsP hiCh -|- bsP extCh)
-- removeMissing kiCh
send statCh $ Say " Calculating size"
totSize <- runResourceT $ traverse statCh path $$ sumFileSize
send statCh $ Say " Transferring"
send statCh $ SetGoal $ fromIntegral totSize
-- start cleaner
pid <- forkIO $ rollbackCleaner rrollback ksCh kiCh hiCh
-- insert keys!
runResourceT $ traverse statCh path $= CE.group 1024 $$
CL.mapM_ (sendFiles rrollback ksCh)
-- stop cleaner
killThread pid
flush ksCh kiCh hiCh
-- release "locks"
mapM_ removeDirectoryRecursive
[irollback, rrollback]
Ch.sendP wsup Stop
where
flush ksCh kiCh hiCh = do
flushChannel ksCh -- Flush from chain to trees and externally
flushChannel kiCh -- Flush key tree to disk
flushChannel hiCh -- Flush hash tree to disk
rollbackCleaner dir ksCh kiCh hiCh = forever $ do
threadDelay $ fromIntegral $ (10^6) * flushInterval
go
where
go = do
now <- epoch
flush ksCh kiCh hiCh
files <- filter (not . (elem '.')) `fmap` getDirectoryContents dir
forM_ files $ \file -> do
let path = dir </> file
mt <- modificationTime `fmap` getFileStatus path
when (fromEnum mt < fromIntegral now) $ do
removeFile path
toKey = pack . makeRelative path
sendFiles dir ksCh lst = do
uid <- show `fmap` uuid
safeWriteFileWith id (dir </> uid) $ encode $ map (toKey.fst) lst
mapM_ (sendFile ksCh) lst
sendFile ksCh (file, stat) = do
let modtime = modificationTime stat
rep <- liftIO newEmptyTMVarIO
liftIO $ send ksCh $
KS.Insert
(Just $ encode $ fromEnum modtime)
(toKey file)
(encode `fmap` S.readPosixFile (Just stat) file)
(if isRegularFile stat then KS.File file
else KS.None)
rep
void $ liftIO $ forkIO $
do x <- atomically $ readTMVar rep
case x of
Left _ -> return ()
Right ins -> unless ins $ do
now <- getClockTime
send statCh $ Stats.Completed now $ fromIntegral $ fileSize stat
removeMissing kiCh = do
void $ join $ sendReply kiCh $ Idx.Mapi_ $ \file _ -> do
(void . getSymbolicLinkStatus) (path </> B8.unpack file)
`catch` \(e :: SomeException) -> do
send statCh $ SetMessage $ B8.unpack file
send kiCh $ Idx.Delete file
sumFileSize = CL.fold (\n (_, stat) -> n + fileSize stat) 0
traverse statCh path =
Source
{ sourcePull = do
stream <- liftIO $ openDirStream path
let state = [(path, stream)]
pull state
, sourceClose = return ()
}
where
src state = Source (pull state) (return ())
pull state = do
res <- pull0 state
return $ case res of
StateClosed -> Closed
StateOpen state' val -> Open (src state') val
pull0 [] = return StateClosed
pull0 all @ ((path, stream) : rest) = do
-- liftIO $ print path
entry <- liftIO $ readDirStream stream
case entry of
".." -> pull0 all
"." -> pull0 all
"" -> do
liftIO $ closeDirStream stream
pull0 rest
_ -> do
let path' = path </> entry
send statCh $ SetMessage path'
estat <- liftIO $ try $ getSymbolicLinkStatus path'
case estat of
Left (e :: SomeException) -> do
liftIO $ Log.warning "Traverse" $
"Skipping file: " ++ path ++ " -- " ++ show e
pull0 all
Right stat
| isRegularFile stat -> return $ StateOpen all (path', stat)
| isDirectory stat -> do
estream' <- liftIO $ try $ openDirStream path'
case estream' of
Left (e :: SomeException) -> do
liftIO $ Log.warning "Traverse" $
"Skipping dir: " ++ path' ++ " -- " ++ show e
pull0 all
Right stream' ->
return $ StateOpen ((path', stream') : all) (path', stat)
| isSymbolicLink stat -> return $ StateOpen all (path', stat)
| otherwise -> pull0 all
type KIMessage = Idx.Message ByteString (Maybe ByteString, HS.ID, [HS.ID])
goInspect recursive go mbterm kiCh = do
case mbterm of
Just x' -> do
if x /= "" then do
mbv <- sendReply kiCh $ Idx.Lookup key
case mbv of
Just v -> do
typ <- S.fileType `fmap` go (key, v)
when (typ == S.Directory) goDir
Nothing -> putStrLn "No such file or directory"
else goDir
where
goDir = do
kvs <- sendReply kiCh $ Idx.Search search
mapM_ go kvs
x = if length x' > 0 && last x' == '/'
then Data.List.init x'
else x'
search =
if recursive
then searchRec
else listDirSearch x
searchRec (min, max) =
min <= key' && key' <= max ||
key' `B.isPrefixOf` min
key = pack x
key' = if null x then B.empty else pack (x ++ "/")
Nothing -> do
kvs <- sendReply kiCh $ Idx.ToList
mapM_ go kvs
goCheckout statCh extCh base repo mbdir dest =
with (localIndex (base </> repo) defaultConfigP { name = "hash index" }) $ \kiCh ->
goCheckout' True False kiCh statCh extCh base repo mbdir dest
goCheckout' rec noData (kiCh :: Channel KIMessage) statCh extCh base repo mbterm dest = do
(wsup, rsup) <- Ch.newUnboundedChannelP
let spawn' = spawn $ Just rsup
hiP = localIndex (base </> "idx") $
defaultConfigP { name = "hash index" }
hiCh <- spawn' hiP
cache <- newMVar Set.empty
let bsP eCh = BS.blobStore "" maxBlob eCh $
defaultConfigP { name = "blobstore (" ++ repo ++ ")" }
hsP hCh bCh = HS.hashStore cache statCh hCh bCh $
defaultConfigP { name = "hashstore (" ++ repo ++ ")" }
-- ksP hCh bCh = KS.keyStore hCh bCh $
-- defaultConfigP { name = "keystore (" ++ repo ++ ")" }
hsCh <- spawn' $ replicateB 3 -- TODO: paramaterise
(hsP hiCh -|- bsP extCh)
restore dest kiCh hsCh
Ch.sendP wsup Stop
where
restore dest kiCh hsCh =
goInspect rec unpackOneSafe mbterm kiCh
where
unpackOneSafe x = do
ei <- try $ unpackOne x
case ei of
Left (e :: SomeException) -> do
Log.warning "Checkout" $ show e
return undefined
Right v -> return v
unpackOne (key, (_, metahash, hashes)) = do
hFlush stdout
let path = dest </> B8.unpack key
dir = takeDirectory path
-- Create dir and preserve it's timestamps if already exist
createDirectoryIfMissing True dir
dirStat <- S.readPosixFile Nothing dir
-- Get meta data
Just metachunk <- sendReply hsCh $ HS.Lookup metahash
let stat = decode' "Metachunk" metachunk :: S.PosixFile
-- Create posix file with correct permissions
case S.fileType stat of
S.File -> do
h <- openFile path WriteMode
unless noData $ do
forM_ hashes $ \hash -> do
Just chunk <- sendReply hsCh $ HS.Lookup hash
B.hPut h chunk
hClose h
when noData $ do
setFileSize path $ fileSize $
S.getFileStatus $ S.fileStatus stat
-- restore timestamps
S.updatePosixFile stat path
_ -> S.createPosixFile stat path
-- Restore timestamps of parent dir
S.updatePosixFile dirStat dir
send statCh $ SetMessage path
return stat
goList rec statCh kiCh base repo mbterm = do
kvs <-
case mbterm of
Just x ->
sendReply kiCh $ Idx.Search search
where
search (min, max) =
min <= key && key <= max || key `B.isPrefixOf` min
key = pack x
Nothing -> sendReply kiCh $ Idx.ToList
sendBlock statCh Quiet
forM_ (sort $ map fst kvs) $ \file ->
putStrLn $ B.unpack file
goListDir statCh kiCh base repo term = do
kvs <- sendReply kiCh $ Idx.Search $ listDirSearch term
sendBlock statCh Quiet
forM_ (sort $ map fst kvs) $ \file ->
putStrLn $ B.unpack file
listDirSearch term = search
where
search (min, max)
| min == max = hasPrefix min && B.notElem '/' (stripPrefix min)
| min <= key && key <= max = True
| hasPrefix min = not (hasPrefix max && dir min == dir max)
| min > key = False
| max < key = False
| otherwise = error $ "min: " ++ show min ++ ", max: " ++ show max
stripPrefix = B.drop (B.length key)
hasPrefix = B.isPrefixOf key
dir = B.takeWhile (/= '/') . stripPrefix
key = if null term then B.empty else pack $ term ++ "/"
goSearch statCh kiCh base repo term = do
kvs <- sendReply kiCh $ Idx.Search $ \(min, max) ->
min /= max || (pack term `B.isInfixOf` min)
sendBlock statCh Quiet
forM_ (sort $ map fst kvs) $ \file ->
putStrLn $ B.unpack file
bloomStat base name version = do
let pri = base </> "pri"
sec = base </> "sec"
snap = base </> "snap"
with (snapP base) $ \snapCh -> do
mbsnaps <- sendReply snapCh $ Idx.Lookup $ pack name :: IO (Maybe (Map.Map Int Snapshot))
case mbsnaps of
Nothing -> putStrLn $ "No snapshot named " ++ name
Just snaps -> do
if not (version `Map.member` snaps) || version < 0
then putStrLn $ "No such snapshot version"
else do
let snap = snaps Map.! version
snapref = reference snap
repo = name ++ "~" ++ show (clockTimeToEpoch $ timestamp snap)
c <- C.sizedParam 128 $ Back.evalFilesKV $ pri </> repo
root <- decode' "bloomStat" `fmap` B.readFile (pri </> repo </> "root")
print root
p <- T.makeParam 128 (Just $ root) c
_ :: Maybe (Maybe ByteString, HST.ID, [HST.ID]) <- T.execTree p $ T.lookup B.empty
putStrLn "foldli"
let mbf = newMB (cheapHashes 10) (16 * 2^20)
ls <- map snd `fmap` (T.execTree p $ T.toList)
print $ length ls
let bf = runST $ unsafeFreezeMB =<<
(mbf >>= \bf -> mapM_ (insertMB bf . encode) ls >> return bf)
B.writeFile (pri </> repo </> "bloom") $
encode $ bitArrayB $ bf
|
br0ns/hindsight
|
src/Backup.hs
|
gpl-3.0
| 22,912
| 3
| 46
| 7,510
| 7,408
| 3,647
| 3,761
| 523
| 8
|
-- | Convenience functions to launch mintette.
module RSCoin.Mintette.Launcher
( ContextArgument (..)
, dumpStorageStatistics
, launchMintetteReal
, mintetteWrapperReal
, addToBank
) where
import Control.Monad.Catch (bracket)
import Control.Monad.Trans (MonadIO (liftIO))
import qualified Data.Text.IO as TIO
import Formatting (int, sformat, stext, (%))
import RSCoin.Core (ContextArgument (..), RealMode, SecretKey,
mintetteLoggerName, runRealModeUntrusted)
import qualified RSCoin.Core.Communication as CC
import RSCoin.Core.Types (Mintette)
import RSCoin.Mintette.Acidic (GetPeriodId (..), closeState, getStatistics,
openMemState, openState)
import RSCoin.Mintette.AcidState (State, query)
import RSCoin.Mintette.Env (RuntimeEnv)
import RSCoin.Mintette.Server (serve)
mintetteWrapperReal :: Bool
-> Maybe FilePath
-> ContextArgument
-> (State -> RealMode a)
-> IO a
mintetteWrapperReal deleteIfExists dbPath ca action = do
let openAction = maybe openMemState (openState deleteIfExists) dbPath
runRealModeUntrusted mintetteLoggerName ca . bracket openAction closeState $
action
launchMintetteReal
::
Bool -> Int -> RuntimeEnv -> Maybe FilePath -> ContextArgument -> IO ()
launchMintetteReal deleteIfExists port env dbPath ctxArg =
mintetteWrapperReal deleteIfExists dbPath ctxArg $ \st -> serve port st env
addToBank :: ContextArgument -> SecretKey -> Mintette -> IO ()
addToBank ctxArg mintetteSK mintette = do
runRealModeUntrusted mintetteLoggerName ctxArg $ CC.addMintetteUsingPermission mintetteSK mintette
dumpStorageStatistics :: Bool -> FilePath -> ContextArgument -> IO ()
dumpStorageStatistics deleteIfExists dbPath ctxArg =
mintetteWrapperReal deleteIfExists (Just dbPath) ctxArg impl
where
impl st = do
pId <- query st GetPeriodId
liftIO . TIO.putStrLn .
sformat ("Storage statistics (period id is " % int % "):\n" % stext)
pId =<< getStatistics st
|
input-output-hk/rscoin-haskell
|
src/RSCoin/Mintette/Launcher.hs
|
gpl-3.0
| 2,325
| 0
| 15
| 685
| 522
| 286
| 236
| 43
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AndroidEnterprise.Products.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Finds approved products that match a query, or all approved products if
-- there is no query.
--
-- /See:/ <https://developers.google.com/android/work/play/emm-api Google Play EMM API Reference> for @androidenterprise.products.list@.
module Network.Google.Resource.AndroidEnterprise.Products.List
(
-- * REST Resource
ProductsListResource
-- * Creating a Request
, productsList
, ProductsList
-- * Request Lenses
, plXgafv
, plUploadProtocol
, plEnterpriseId
, plAccessToken
, plToken
, plUploadType
, plQuery
, plLanguage
, plApproved
, plMaxResults
, plCallback
) where
import Network.Google.AndroidEnterprise.Types
import Network.Google.Prelude
-- | A resource alias for @androidenterprise.products.list@ method which the
-- 'ProductsList' request conforms to.
type ProductsListResource =
"androidenterprise" :>
"v1" :>
"enterprises" :>
Capture "enterpriseId" Text :>
"products" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "token" Text :>
QueryParam "uploadType" Text :>
QueryParam "query" Text :>
QueryParam "language" Text :>
QueryParam "approved" Bool :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ProductsListResponse
-- | Finds approved products that match a query, or all approved products if
-- there is no query.
--
-- /See:/ 'productsList' smart constructor.
data ProductsList =
ProductsList'
{ _plXgafv :: !(Maybe Xgafv)
, _plUploadProtocol :: !(Maybe Text)
, _plEnterpriseId :: !Text
, _plAccessToken :: !(Maybe Text)
, _plToken :: !(Maybe Text)
, _plUploadType :: !(Maybe Text)
, _plQuery :: !(Maybe Text)
, _plLanguage :: !(Maybe Text)
, _plApproved :: !(Maybe Bool)
, _plMaxResults :: !(Maybe (Textual Word32))
, _plCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProductsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plXgafv'
--
-- * 'plUploadProtocol'
--
-- * 'plEnterpriseId'
--
-- * 'plAccessToken'
--
-- * 'plToken'
--
-- * 'plUploadType'
--
-- * 'plQuery'
--
-- * 'plLanguage'
--
-- * 'plApproved'
--
-- * 'plMaxResults'
--
-- * 'plCallback'
productsList
:: Text -- ^ 'plEnterpriseId'
-> ProductsList
productsList pPlEnterpriseId_ =
ProductsList'
{ _plXgafv = Nothing
, _plUploadProtocol = Nothing
, _plEnterpriseId = pPlEnterpriseId_
, _plAccessToken = Nothing
, _plToken = Nothing
, _plUploadType = Nothing
, _plQuery = Nothing
, _plLanguage = Nothing
, _plApproved = Nothing
, _plMaxResults = Nothing
, _plCallback = Nothing
}
-- | V1 error format.
plXgafv :: Lens' ProductsList (Maybe Xgafv)
plXgafv = lens _plXgafv (\ s a -> s{_plXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plUploadProtocol :: Lens' ProductsList (Maybe Text)
plUploadProtocol
= lens _plUploadProtocol
(\ s a -> s{_plUploadProtocol = a})
-- | The ID of the enterprise.
plEnterpriseId :: Lens' ProductsList Text
plEnterpriseId
= lens _plEnterpriseId
(\ s a -> s{_plEnterpriseId = a})
-- | OAuth access token.
plAccessToken :: Lens' ProductsList (Maybe Text)
plAccessToken
= lens _plAccessToken
(\ s a -> s{_plAccessToken = a})
-- | Defines the token of the page to return, usually taken from
-- TokenPagination. This can only be used if token paging is enabled.
plToken :: Lens' ProductsList (Maybe Text)
plToken = lens _plToken (\ s a -> s{_plToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plUploadType :: Lens' ProductsList (Maybe Text)
plUploadType
= lens _plUploadType (\ s a -> s{_plUploadType = a})
-- | The search query as typed in the Google Play store search box. If
-- omitted, all approved apps will be returned (using the pagination
-- parameters), including apps that are not available in the store (e.g.
-- unpublished apps).
plQuery :: Lens' ProductsList (Maybe Text)
plQuery = lens _plQuery (\ s a -> s{_plQuery = a})
-- | The BCP47 tag for the user\'s preferred language (e.g. \"en-US\",
-- \"de\"). Results are returned in the language best matching the
-- preferred language.
plLanguage :: Lens' ProductsList (Maybe Text)
plLanguage
= lens _plLanguage (\ s a -> s{_plLanguage = a})
-- | Specifies whether to search among all products (false) or among only
-- products that have been approved (true). Only \"true\" is supported, and
-- should be specified.
plApproved :: Lens' ProductsList (Maybe Bool)
plApproved
= lens _plApproved (\ s a -> s{_plApproved = a})
-- | Defines how many results the list operation should return. The default
-- number depends on the resource collection.
plMaxResults :: Lens' ProductsList (Maybe Word32)
plMaxResults
= lens _plMaxResults (\ s a -> s{_plMaxResults = a})
. mapping _Coerce
-- | JSONP
plCallback :: Lens' ProductsList (Maybe Text)
plCallback
= lens _plCallback (\ s a -> s{_plCallback = a})
instance GoogleRequest ProductsList where
type Rs ProductsList = ProductsListResponse
type Scopes ProductsList =
'["https://www.googleapis.com/auth/androidenterprise"]
requestClient ProductsList'{..}
= go _plEnterpriseId _plXgafv _plUploadProtocol
_plAccessToken
_plToken
_plUploadType
_plQuery
_plLanguage
_plApproved
_plMaxResults
_plCallback
(Just AltJSON)
androidEnterpriseService
where go
= buildClient (Proxy :: Proxy ProductsListResource)
mempty
|
brendanhay/gogol
|
gogol-android-enterprise/gen/Network/Google/Resource/AndroidEnterprise/Products/List.hs
|
mpl-2.0
| 6,974
| 0
| 23
| 1,771
| 1,136
| 656
| 480
| 153
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DFAReporting.VideoFormats.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets one video format by ID.
--
-- /See:/ <https://developers.google.com/doubleclick-advertisers/ Campaign Manager 360 API Reference> for @dfareporting.videoFormats.get@.
module Network.Google.Resource.DFAReporting.VideoFormats.Get
(
-- * REST Resource
VideoFormatsGetResource
-- * Creating a Request
, videoFormatsGet
, VideoFormatsGet
-- * Request Lenses
, vfgXgafv
, vfgUploadProtocol
, vfgAccessToken
, vfgUploadType
, vfgProFileId
, vfgId
, vfgCallback
) where
import Network.Google.DFAReporting.Types
import Network.Google.Prelude
-- | A resource alias for @dfareporting.videoFormats.get@ method which the
-- 'VideoFormatsGet' request conforms to.
type VideoFormatsGetResource =
"dfareporting" :>
"v3.5" :>
"userprofiles" :>
Capture "profileId" (Textual Int64) :>
"videoFormats" :>
Capture "id" (Textual Int32) :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] VideoFormat
-- | Gets one video format by ID.
--
-- /See:/ 'videoFormatsGet' smart constructor.
data VideoFormatsGet =
VideoFormatsGet'
{ _vfgXgafv :: !(Maybe Xgafv)
, _vfgUploadProtocol :: !(Maybe Text)
, _vfgAccessToken :: !(Maybe Text)
, _vfgUploadType :: !(Maybe Text)
, _vfgProFileId :: !(Textual Int64)
, _vfgId :: !(Textual Int32)
, _vfgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'VideoFormatsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'vfgXgafv'
--
-- * 'vfgUploadProtocol'
--
-- * 'vfgAccessToken'
--
-- * 'vfgUploadType'
--
-- * 'vfgProFileId'
--
-- * 'vfgId'
--
-- * 'vfgCallback'
videoFormatsGet
:: Int64 -- ^ 'vfgProFileId'
-> Int32 -- ^ 'vfgId'
-> VideoFormatsGet
videoFormatsGet pVfgProFileId_ pVfgId_ =
VideoFormatsGet'
{ _vfgXgafv = Nothing
, _vfgUploadProtocol = Nothing
, _vfgAccessToken = Nothing
, _vfgUploadType = Nothing
, _vfgProFileId = _Coerce # pVfgProFileId_
, _vfgId = _Coerce # pVfgId_
, _vfgCallback = Nothing
}
-- | V1 error format.
vfgXgafv :: Lens' VideoFormatsGet (Maybe Xgafv)
vfgXgafv = lens _vfgXgafv (\ s a -> s{_vfgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
vfgUploadProtocol :: Lens' VideoFormatsGet (Maybe Text)
vfgUploadProtocol
= lens _vfgUploadProtocol
(\ s a -> s{_vfgUploadProtocol = a})
-- | OAuth access token.
vfgAccessToken :: Lens' VideoFormatsGet (Maybe Text)
vfgAccessToken
= lens _vfgAccessToken
(\ s a -> s{_vfgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
vfgUploadType :: Lens' VideoFormatsGet (Maybe Text)
vfgUploadType
= lens _vfgUploadType
(\ s a -> s{_vfgUploadType = a})
-- | User profile ID associated with this request.
vfgProFileId :: Lens' VideoFormatsGet Int64
vfgProFileId
= lens _vfgProFileId (\ s a -> s{_vfgProFileId = a})
. _Coerce
-- | Video format ID.
vfgId :: Lens' VideoFormatsGet Int32
vfgId
= lens _vfgId (\ s a -> s{_vfgId = a}) . _Coerce
-- | JSONP
vfgCallback :: Lens' VideoFormatsGet (Maybe Text)
vfgCallback
= lens _vfgCallback (\ s a -> s{_vfgCallback = a})
instance GoogleRequest VideoFormatsGet where
type Rs VideoFormatsGet = VideoFormat
type Scopes VideoFormatsGet =
'["https://www.googleapis.com/auth/dfatrafficking"]
requestClient VideoFormatsGet'{..}
= go _vfgProFileId _vfgId _vfgXgafv
_vfgUploadProtocol
_vfgAccessToken
_vfgUploadType
_vfgCallback
(Just AltJSON)
dFAReportingService
where go
= buildClient
(Proxy :: Proxy VideoFormatsGetResource)
mempty
|
brendanhay/gogol
|
gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/VideoFormats/Get.hs
|
mpl-2.0
| 4,963
| 0
| 19
| 1,227
| 821
| 474
| 347
| 116
| 1
|
--import MoofParse
--import MoofLexer
--import PostIndent
--import System.IO
--import IndentParse
--import System.Environment(getArgs)
--
--main = do
-- args <- getArgs
-- filestf <- readFile (args !! 0)
-- let tokens = moofScanTokens filestf
-- print tokens
-- print ""
-- print ""
--
-- let parse_tree = indentParse tokens
-- print parse_tree
--
--import MoofParse
import MoofLexer
import qualified PostIndent as PI
--import System.IO
--import qualified IndentParse as IP
--import Test.HUnit
file_list = [ "Test/Indent/func_Call.mf",
"Test/Indent/func.mf",
"Test/Indent/if.mf",
"Test/Indent/multiLine.mf"]
outputPretifier (PI.PToken _ str _) = str ++ " "
outputPretifier PI.L_Indent = "["
outputPretifier PI.R_Indent = "]"
outputPretifier PI.Endl = '\n' : []
main :: IO ()
main = mapM_ (\x->x) $ do
file_name <- file_list
return $ do
print file_name
filestf <- readFile file_name
let outputStr1 = (file_name ++ "\n" ++ filestf)
let tokens = moofScanTokens filestf
let itoks = PI.moofIParse tokens
case itoks of
Right x -> putStrLn (concat (map outputPretifier x))
Left y -> print y
|
mmath10/Moof
|
src/Main.hs
|
lgpl-3.0
| 1,191
| 0
| 18
| 265
| 260
| 139
| 121
| 22
| 2
|
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
-- * Tagless Typed Interpreters: extensibility
module ExtF where
import Intro2 as F hiding (main)
-- We extend the final representation of the language with a new
-- expression form: multiplication
class MulSYM repr where
mul :: repr -> repr -> repr
-- An extended sample expression
tfm1 = add (lit 7) (neg (mul (lit 1) (lit 2)))
-- We can even use a previously defined unextended expression (tf1)
-- as a part of the extended expression.
-- We can indeed mix-and-match
tfm2 = mul (lit 7) tf1
-- * //
-- We extend the specific interpreters thusly
instance MulSYM Int where
mul e1 e2 = e1 * e2
-- The definition of eval stays the same. Why?
-- * The extension _automatically_ kicks in
tfm1_eval = eval tfm1
-- 5
tfm2_eval = eval tfm2
-- 35
-- We `extend' the view interpreter just as well
instance MulSYM String where
mul e1 e2 = "(" ++ e1 ++ " * " ++ e2 ++ ")"
tfm1_view = view tfm1
-- "(7 + (-(1 * 2)))"
tfm2_view = view tfm2
-- "(7 * (8 + (-(1 + 2))))"
-- * //
-- We can put the original, unextended expressions (F.tf1 from Intro2.hs)
-- and the extended ones (which we have just defined)
-- into the same list
tfl1 = [F.tf1] -- old expression
tfl2 = tfm1 : tfm2 : tfl1 -- add extended expressions
-- The inferred type of tfl2 is insightful:
-- *ExtF> :t tfl1
-- tfl1 :: (ExpSYM repr) => [repr]
-- *ExtF> :t tfl2
-- tfl2 :: (ExpSYM repr, MulSYM repr) => [repr]
tfl2_eval = map eval tfl2
-- [5,35,5]
tfl2_view = map view tfl2
-- ["(7 + (-(1 * 2)))","(7 * (8 + (-(1 + 2))))","(8 + (-(1 + 2)))"]
main = do
print tfm1_eval
print tfm2_eval
print tfm1_view
print tfm2_view
print tfl2_eval
print tfl2_view
|
egaburov/funstuff
|
Haskell/tytag/codes/ExtF.hs
|
apache-2.0
| 1,780
| 0
| 11
| 396
| 298
| 164
| 134
| 27
| 1
|
-- | Utilities for stress testing DNA code
module DNA.Interpreter.Testing where
import Control.Monad
import Control.Monad.Reader
-- import Control.Monad.IO.Class
-- import Control.Concurrent (threadDelay)
import Control.Distributed.Process
-- import System.Random
import DNA.Interpreter.Types
import DNA.Types
import System.IO.Unsafe
import Data.IORef
{-
-- | Crash process
crashMaybe :: MonadProcess m => Double -> m ()
crashMaybe pCrash = do
roll <- liftIO randomIO
me <- liftP getSelfPid
when (roll < pCrash) $ do
liftIO $ threadDelay (0*1000)
liftIO $ putStrLn $ show me ++ " CRASH!"
error "Ooops crashed"
-}
ref :: IORef Bool
ref = unsafePerformIO $ newIORef False
{-# NOINLINE ref #-}
-- | Crash process
crashMaybeWorker :: Double -> DnaMonad ()
crashMaybeWorker _ = do
n <- envRank `fmap` ask
f <- liftIO $ readIORef ref
me <- liftP getSelfPid
when (not f && n==0) $ do
liftIO $ writeIORef ref True
liftIO $ putStrLn $ show me ++ " CRASH!"
error "Ooops crashed"
|
SKA-ScienceDataProcessor/RC
|
MS3/lib/DNA/Interpreter/Testing.hs
|
apache-2.0
| 1,056
| 0
| 12
| 238
| 192
| 101
| 91
| 20
| 1
|
-- Copyright (c) 2010 - Seweryn Dynerowicz
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- imitations under the License.
module Algebra.Optimum
( llo1
, rlo1
, llo2
, rlo2
, lloStd
, rloStd
, lloMem
, rloMem
, lloStdDepth
, rloStdDepth
, lloMemDepth
, rloMemDepth
) where
import Algebra.Semiring
import Algebra.Matrix
data LatticeStep = Lt | Gt | Eq | Pa
instance Show LatticeStep where
show Lt = "<"
show Gt = ">"
show Eq = "≈"
show Pa = "|"
norChar :: (Semiring s) => s -> s -> LatticeStep
norChar a b | (nor a b) && (nor b a) = Eq
norChar a b | (nor a b) = Lt
norChar a b | (nor b a) = Gt
norChar a b | otherwise = Pa
-- Iterated solving of X = AX + B
llo1 :: (Semiring s) => s -> s -> s -> s
llo1 a b x = if (x == updated) then x else llo1 a b updated
where updated = add (mul a x) b
-- Iterated solving of X = XA + B
rlo1 :: (Semiring s) => s -> s -> s -> s
rlo1 a b x = if (x == updated) then x else rlo1 a b updated
where updated = add (mul x a) b
lloDepth1 :: (Semiring s) => s -> s -> s -> [LatticeStep]
lloDepth1 a b x = if (x == updated) then [] else (norChar updated x) : lloDepth1 a b updated
where updated = add (mul a x) b
rloDepth1 :: (Semiring s) => s -> s -> s -> [LatticeStep]
rloDepth1 a b x = if (x == updated) then [] else (norChar updated x) : rloDepth1 a b updated
where updated = add (mul x a) b
llo2 :: (Semiring s) => s -> s -> s
llo2 a x = if (x == updated) then x else llo2 a updated
where updated = add (mul a x) x
rlo2 :: (Semiring s) => s -> s -> s
rlo2 a x = if (x == updated) then x else rlo2 a updated
where updated = add (mul x a) x
lloDepth2 :: (Semiring s) => s -> s -> [LatticeStep]
lloDepth2 a x = if (x == updated) then [] else (norChar updated x) : lloDepth2 a updated
where updated = add (mul a x) x
rloDepth2 :: (Semiring s) => s -> s -> [LatticeStep]
rloDepth2 a x = if (x == updated) then [] else (norChar updated x) : rloDepth2 a updated
where updated = add (mul x a) x
lloStd :: (Semiring s) => s -> s
lloStd a = llo1 a unit unit
rloStd :: (Semiring s) => s -> s
rloStd a = rlo1 a unit unit
lloMem :: (Semiring s) => s -> s
lloMem a = llo2 a unit
rloMem :: (Semiring s) => s -> s
rloMem a = rlo2 a unit
lloStdDepth :: (Semiring s) => s -> [LatticeStep]
lloStdDepth a = lloDepth1 a unit unit
rloStdDepth :: (Semiring s) => s -> [LatticeStep]
rloStdDepth a = rloDepth1 a unit unit
lloMemDepth :: (Semiring s) => s -> [LatticeStep]
lloMemDepth a = lloDepth2 a unit
rloMemDepth :: (Semiring s) => s -> [LatticeStep]
rloMemDepth a = rloDepth2 a unit
|
sdynerow/SemiringsLibrary
|
Algebra/Optimum.hs
|
apache-2.0
| 3,010
| 0
| 10
| 671
| 1,190
| 633
| 557
| 66
| 2
|
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QTextLength.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:20
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QTextLength (
QqTextLength(..)
,QqTextLength_nf(..)
,rawValue
,qTextLength_delete
)
where
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Enums.Gui.QTextLength
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
class QqTextLength x1 where
qTextLength :: x1 -> IO (QTextLength ())
instance QqTextLength (()) where
qTextLength ()
= withQTextLengthResult $
qtc_QTextLength
foreign import ccall "qtc_QTextLength" qtc_QTextLength :: IO (Ptr (TQTextLength ()))
instance QqTextLength ((QTextLength t1)) where
qTextLength (x1)
= withQTextLengthResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTextLength1 cobj_x1
foreign import ccall "qtc_QTextLength1" qtc_QTextLength1 :: Ptr (TQTextLength t1) -> IO (Ptr (TQTextLength ()))
instance QqTextLength ((QTextLengthType, Double)) where
qTextLength (x1, x2)
= withQTextLengthResult $
qtc_QTextLength2 (toCLong $ qEnum_toInt x1) (toCDouble x2)
foreign import ccall "qtc_QTextLength2" qtc_QTextLength2 :: CLong -> CDouble -> IO (Ptr (TQTextLength ()))
class QqTextLength_nf x1 where
qTextLength_nf :: x1 -> IO (QTextLength ())
instance QqTextLength_nf (()) where
qTextLength_nf ()
= withObjectRefResult $
qtc_QTextLength
instance QqTextLength_nf ((QTextLength t1)) where
qTextLength_nf (x1)
= withObjectRefResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTextLength1 cobj_x1
instance QqTextLength_nf ((QTextLengthType, Double)) where
qTextLength_nf (x1, x2)
= withObjectRefResult $
qtc_QTextLength2 (toCLong $ qEnum_toInt x1) (toCDouble x2)
rawValue :: QTextLength a -> (()) -> IO (Double)
rawValue x0 ()
= withDoubleResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QTextLength_rawValue cobj_x0
foreign import ccall "qtc_QTextLength_rawValue" qtc_QTextLength_rawValue :: Ptr (TQTextLength a) -> IO CDouble
instance Qqtype (QTextLength a) (()) (IO (QTextLengthType)) where
qtype x0 ()
= withQEnumResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QTextLength_type cobj_x0
foreign import ccall "qtc_QTextLength_type" qtc_QTextLength_type :: Ptr (TQTextLength a) -> IO CLong
instance Qvalue (QTextLength a) ((Double)) (IO (Double)) where
value x0 (x1)
= withDoubleResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QTextLength_value cobj_x0 (toCDouble x1)
foreign import ccall "qtc_QTextLength_value" qtc_QTextLength_value :: Ptr (TQTextLength a) -> CDouble -> IO CDouble
qTextLength_delete :: QTextLength a -> IO ()
qTextLength_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QTextLength_delete cobj_x0
foreign import ccall "qtc_QTextLength_delete" qtc_QTextLength_delete :: Ptr (TQTextLength a) -> IO ()
|
uduki/hsQt
|
Qtc/Gui/QTextLength.hs
|
bsd-2-clause
| 3,200
| 0
| 13
| 512
| 872
| 461
| 411
| -1
| -1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
module Servant.Client.MultipartFormData
( ToMultipartFormData (..)
, MultipartFormDataReqBody
) where
import Control.Exception
import Control.Monad
import Control.Monad.Reader.Class
import Control.Monad.IO.Class
import Control.Monad.Error.Class
import Data.ByteString.Lazy hiding (pack, filter, map, null, elem)
import Data.Proxy
import Data.String.Conversions
import Data.Typeable (Typeable)
import Network.HTTP.Client hiding (Proxy, path)
import qualified Network.HTTP.Client as Client
import Network.HTTP.Client.MultipartFormData
import Network.HTTP.Media
import Network.HTTP.Types
import qualified Network.HTTP.Types as H
import qualified Network.HTTP.Types.Header as HTTP
import Servant.API
import Servant.Common.BaseUrl
import Servant.Client
import Servant.Common.Req
-- | A type that can be converted to a multipart/form-data value.
class ToMultipartFormData a where
-- | Convert a Haskell value to a multipart/form-data-friendly intermediate type.
toMultipartFormData :: a -> [Part]
-- | Extract the request body as a value of type @a@.
data MultipartFormDataReqBody a
deriving (Typeable)
instance (ToMultipartFormData b, MimeUnrender ct a, cts' ~ (ct ': cts)
) => HasClient (MultipartFormDataReqBody b :> Post cts' a) where
type Client (MultipartFormDataReqBody b :> Post cts' a)
= b -> ClientM a
clientWithRoute Proxy req reqData =
let reqToRequest' req' baseurl' = do
requestWithoutBody <- reqToRequest req' baseurl'
formDataBody (toMultipartFormData reqData) requestWithoutBody
in snd <$> performRequestCT' reqToRequest' (Proxy :: Proxy ct) H.methodPost req
-- copied `performRequest` from servant-0.7.1, then modified so it takes a variant of `reqToRequest`
-- as an argument.
performRequest' :: (Req -> BaseUrl -> IO Request)
-> Method -> Req -> Manager -> BaseUrl
-> ClientM ( Int, ByteString, MediaType
, [HTTP.Header], Response ByteString)
performRequest' reqToRequest' reqMethod req manager reqHost = do
partialRequest <- liftIO $ reqToRequest' req reqHost
let request = partialRequest { Client.method = reqMethod
}
eResponse <- liftIO $ catchConnectionError $ Client.httpLbs request manager
case eResponse of
Left err ->
throwError . ConnectionError $ SomeException err
Right response -> do
let status = Client.responseStatus response
body = Client.responseBody response
hdrs = Client.responseHeaders response
status_code = statusCode status
ct <- case lookup "Content-Type" $ Client.responseHeaders response of
Nothing -> pure $ "application"//"octet-stream"
Just t -> case parseAccept t of
Nothing -> throwError $ InvalidContentTypeHeader (cs t) body
Just t' -> pure t'
unless (status_code >= 200 && status_code < 300) $
throwError $ FailureResponse status ct body
return (status_code, body, ct, hdrs, response)
-- copied `performRequestCT` from servant-0.7.1, then modified so it takes a variant of `reqToRequest`
-- as an argument.
performRequestCT' :: MimeUnrender ct result =>
(Req -> BaseUrl -> IO Request) ->
Proxy ct -> Method -> Req
-> ClientM ([HTTP.Header], result)
performRequestCT' reqToRequest' ct reqMethod req = do
let acceptCT = contentType ct
ClientEnv manager reqHost <- ask
(_status, respBody, respCT, hdrs, _response) <-
performRequest' reqToRequest' reqMethod (req { reqAccept = [acceptCT] }) manager reqHost
unless (matches respCT acceptCT) $ throwError $ UnsupportedContentType respCT respBody
case mimeUnrender ct respBody of
Left err -> throwError $ DecodeFailure err respCT respBody
Right val -> return (hdrs, val)
|
mseri/fbmessenger-api-hs
|
src/Servant/Client/MultipartFormData.hs
|
bsd-3-clause
| 4,305
| 0
| 22
| 1,058
| 974
| 519
| 455
| -1
| -1
|
-- (c) The University of Glasgow 2006
-- (c) The GRASP/AQUA Project, Glasgow University, 1998
--
-- Type - public interface
{-# LANGUAGE CPP, FlexibleContexts #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# OPTIONS_GHC -Wno-incomplete-record-updates #-}
-- | Main functions for manipulating types and type-related things
module Type (
-- Note some of this is just re-exports from TyCon..
-- * Main data types representing Types
-- $type_classification
-- $representation_types
TyThing(..), Type, ArgFlag(..), AnonArgFlag(..), ForallVisFlag(..),
KindOrType, PredType, ThetaType,
Var, TyVar, isTyVar, TyCoVar, TyCoBinder, TyCoVarBinder, TyVarBinder,
KnotTied,
-- ** Constructing and deconstructing types
mkTyVarTy, mkTyVarTys, getTyVar, getTyVar_maybe, repGetTyVar_maybe,
getCastedTyVar_maybe, tyVarKind, varType,
mkAppTy, mkAppTys, splitAppTy, splitAppTys, repSplitAppTys,
splitAppTy_maybe, repSplitAppTy_maybe, tcRepSplitAppTy_maybe,
mkVisFunTy, mkInvisFunTy, mkVisFunTys, mkInvisFunTys,
splitFunTy, splitFunTy_maybe,
splitFunTys, funResultTy, funArgTy,
mkTyConApp, mkTyConTy,
tyConAppTyCon_maybe, tyConAppTyConPicky_maybe,
tyConAppArgs_maybe, tyConAppTyCon, tyConAppArgs,
splitTyConApp_maybe, splitTyConApp, tyConAppArgN, nextRole,
tcSplitTyConApp_maybe,
splitListTyConApp_maybe,
repSplitTyConApp_maybe,
mkForAllTy, mkForAllTys, mkTyCoInvForAllTys,
mkSpecForAllTy, mkSpecForAllTys,
mkVisForAllTys, mkTyCoInvForAllTy,
mkInvForAllTy, mkInvForAllTys,
splitForAllTys, splitForAllTysSameVis,
splitForAllVarBndrs,
splitForAllTy_maybe, splitForAllTy,
splitForAllTy_ty_maybe, splitForAllTy_co_maybe,
splitPiTy_maybe, splitPiTy, splitPiTys,
mkTyConBindersPreferAnon,
mkPiTy, mkPiTys,
mkLamType, mkLamTypes,
piResultTy, piResultTys,
applyTysX, dropForAlls,
mkFamilyTyConApp,
mkNumLitTy, isNumLitTy,
mkStrLitTy, isStrLitTy,
isLitTy,
isPredTy,
getRuntimeRep_maybe, kindRep_maybe, kindRep,
mkCastTy, mkCoercionTy, splitCastTy_maybe,
discardCast,
userTypeError_maybe, pprUserTypeErrorTy,
coAxNthLHS,
stripCoercionTy,
splitPiTysInvisible, splitPiTysInvisibleN,
invisibleTyBndrCount,
filterOutInvisibleTypes, filterOutInferredTypes,
partitionInvisibleTypes, partitionInvisibles,
tyConArgFlags, appTyArgFlags,
synTyConResKind,
modifyJoinResTy, setJoinResTy,
-- ** Analyzing types
TyCoMapper(..), mapType, mapCoercion,
-- (Newtypes)
newTyConInstRhs,
-- ** Binders
sameVis,
mkTyCoVarBinder, mkTyCoVarBinders,
mkTyVarBinders,
mkAnonBinder,
isAnonTyCoBinder,
binderVar, binderVars, binderType, binderArgFlag,
tyCoBinderType, tyCoBinderVar_maybe,
tyBinderType,
binderRelevantType_maybe,
isVisibleArgFlag, isInvisibleArgFlag, isVisibleBinder,
isInvisibleBinder, isNamedBinder,
tyConBindersTyCoBinders,
-- ** Common type constructors
funTyCon,
-- ** Predicates on types
isTyVarTy, isFunTy, isCoercionTy,
isCoercionTy_maybe, isForAllTy,
isForAllTy_ty, isForAllTy_co,
isPiTy, isTauTy, isFamFreeTy,
isCoVarType,
isValidJoinPointType,
tyConAppNeedsKindSig,
-- *** Levity and boxity
isLiftedType_maybe,
isLiftedTypeKind, isUnliftedTypeKind,
isLiftedRuntimeRep, isUnliftedRuntimeRep,
isUnliftedType, mightBeUnliftedType, isUnboxedTupleType, isUnboxedSumType,
isAlgType, isDataFamilyAppType,
isPrimitiveType, isStrictType,
isRuntimeRepTy, isRuntimeRepVar, isRuntimeRepKindedTy,
dropRuntimeRepArgs,
getRuntimeRep,
-- * Main data types representing Kinds
Kind,
-- ** Finding the kind of a type
typeKind, tcTypeKind, isTypeLevPoly, resultIsLevPoly,
tcIsLiftedTypeKind, tcIsConstraintKind, tcReturnsConstraintKind,
tcIsRuntimeTypeKind,
-- ** Common Kind
liftedTypeKind,
-- * Type free variables
tyCoFVsOfType, tyCoFVsBndr, tyCoFVsVarBndr, tyCoFVsVarBndrs,
tyCoVarsOfType, tyCoVarsOfTypes,
tyCoVarsOfTypeDSet,
coVarsOfType,
coVarsOfTypes,
closeOverKindsDSet, closeOverKindsFV, closeOverKindsList,
closeOverKinds,
noFreeVarsOfType,
splitVisVarsOfType, splitVisVarsOfTypes,
expandTypeSynonyms,
typeSize, occCheckExpand,
-- * Well-scoped lists of variables
scopedSort, tyCoVarsOfTypeWellScoped,
tyCoVarsOfTypesWellScoped,
-- * Type comparison
eqType, eqTypeX, eqTypes, nonDetCmpType, nonDetCmpTypes, nonDetCmpTypeX,
nonDetCmpTypesX, nonDetCmpTc,
eqVarBndrs,
-- * Forcing evaluation of types
seqType, seqTypes,
-- * Other views onto Types
coreView, tcView,
tyConsOfType,
-- * Main type substitution data types
TvSubstEnv, -- Representation widely visible
TCvSubst(..), -- Representation visible to a few friends
-- ** Manipulating type substitutions
emptyTvSubstEnv, emptyTCvSubst, mkEmptyTCvSubst,
mkTCvSubst, zipTvSubst, mkTvSubstPrs,
zipTCvSubst,
notElemTCvSubst,
getTvSubstEnv, setTvSubstEnv,
zapTCvSubst, getTCvInScope, getTCvSubstRangeFVs,
extendTCvInScope, extendTCvInScopeList, extendTCvInScopeSet,
extendTCvSubst, extendCvSubst,
extendTvSubst, extendTvSubstBinderAndInScope,
extendTvSubstList, extendTvSubstAndInScope,
extendTCvSubstList,
extendTvSubstWithClone,
extendTCvSubstWithClone,
isInScope, composeTCvSubstEnv, composeTCvSubst, zipTyEnv, zipCoEnv,
isEmptyTCvSubst, unionTCvSubst,
-- ** Performing substitution on types and kinds
substTy, substTys, substTyWith, substTysWith, substTheta,
substTyAddInScope,
substTyUnchecked, substTysUnchecked, substThetaUnchecked,
substTyWithUnchecked,
substCoUnchecked, substCoWithUnchecked,
substTyVarBndr, substTyVarBndrs, substTyVar, substTyVars,
substVarBndr, substVarBndrs,
cloneTyVarBndr, cloneTyVarBndrs, lookupTyVar,
-- * Tidying type related things up for printing
tidyType, tidyTypes,
tidyOpenType, tidyOpenTypes,
tidyOpenKind,
tidyVarBndr, tidyVarBndrs, tidyFreeTyCoVars,
tidyOpenTyCoVar, tidyOpenTyCoVars,
tidyTyCoVarOcc,
tidyTopType,
tidyKind,
tidyTyCoVarBinder, tidyTyCoVarBinders,
-- * Kinds
isConstraintKindCon,
classifiesTypeWithValues,
isKindLevPoly
) where
#include "HsVersions.h"
import GhcPrelude
import BasicTypes
-- We import the representation and primitive functions from TyCoRep.
-- Many things are reexported, but not the representation!
import TyCoRep
import TyCoSubst
import TyCoTidy
import TyCoFVs
-- friends:
import Var
import VarEnv
import VarSet
import UniqSet
import TyCon
import TysPrim
import {-# SOURCE #-} TysWiredIn ( listTyCon, typeNatKind
, typeSymbolKind, liftedTypeKind
, constraintKind )
import PrelNames
import CoAxiom
import {-# SOURCE #-} Coercion( mkNomReflCo, mkGReflCo, mkReflCo
, mkTyConAppCo, mkAppCo, mkCoVarCo, mkAxiomRuleCo
, mkForAllCo, mkFunCo, mkAxiomInstCo, mkUnivCo
, mkSymCo, mkTransCo, mkNthCo, mkLRCo, mkInstCo
, mkKindCo, mkSubCo, mkFunCo, mkAxiomInstCo
, decomposePiCos, coercionKind, coercionLKind
, coercionRKind, coercionType
, isReflexiveCo, seqCo )
-- others
import Util
import FV
import Outputable
import FastString
import Pair
import ListSetOps
import Unique ( nonDetCmpUnique )
import Maybes ( orElse )
import Data.Maybe ( isJust )
import Control.Monad ( guard )
-- $type_classification
-- #type_classification#
--
-- Types are one of:
--
-- [Unboxed] Iff its representation is other than a pointer
-- Unboxed types are also unlifted.
--
-- [Lifted] Iff it has bottom as an element.
-- Closures always have lifted types: i.e. any
-- let-bound identifier in Core must have a lifted
-- type. Operationally, a lifted object is one that
-- can be entered.
-- Only lifted types may be unified with a type variable.
--
-- [Algebraic] Iff it is a type with one or more constructors, whether
-- declared with @data@ or @newtype@.
-- An algebraic type is one that can be deconstructed
-- with a case expression. This is /not/ the same as
-- lifted types, because we also include unboxed
-- tuples in this classification.
--
-- [Data] Iff it is a type declared with @data@, or a boxed tuple.
--
-- [Primitive] Iff it is a built-in type that can't be expressed in Haskell.
--
-- Currently, all primitive types are unlifted, but that's not necessarily
-- the case: for example, @Int@ could be primitive.
--
-- Some primitive types are unboxed, such as @Int#@, whereas some are boxed
-- but unlifted (such as @ByteArray#@). The only primitive types that we
-- classify as algebraic are the unboxed tuples.
--
-- Some examples of type classifications that may make this a bit clearer are:
--
-- @
-- Type primitive boxed lifted algebraic
-- -----------------------------------------------------------------------------
-- Int# Yes No No No
-- ByteArray# Yes Yes No No
-- (\# a, b \#) Yes No No Yes
-- (\# a | b \#) Yes No No Yes
-- ( a, b ) No Yes Yes Yes
-- [a] No Yes Yes Yes
-- @
-- $representation_types
-- A /source type/ is a type that is a separate type as far as the type checker is
-- concerned, but which has a more low-level representation as far as Core-to-Core
-- passes and the rest of the back end is concerned.
--
-- You don't normally have to worry about this, as the utility functions in
-- this module will automatically convert a source into a representation type
-- if they are spotted, to the best of its abilities. If you don't want this
-- to happen, use the equivalent functions from the "TcType" module.
{-
************************************************************************
* *
Type representation
* *
************************************************************************
Note [coreView vs tcView]
~~~~~~~~~~~~~~~~~~~~~~~~~
So far as the typechecker is concerned, 'Constraint' and 'TYPE
LiftedRep' are distinct kinds.
But in Core these two are treated as identical.
We implement this by making 'coreView' convert 'Constraint' to 'TYPE
LiftedRep' on the fly. The function tcView (used in the type checker)
does not do this.
See also #11715, which tracks removing this inconsistency.
-}
-- | Gives the typechecker view of a type. This unwraps synonyms but
-- leaves 'Constraint' alone. c.f. coreView, which turns Constraint into
-- TYPE LiftedRep. Returns Nothing if no unwrapping happens.
-- See also Note [coreView vs tcView]
{-# INLINE tcView #-}
tcView :: Type -> Maybe Type
tcView (TyConApp tc tys) | Just (tenv, rhs, tys') <- expandSynTyCon_maybe tc tys
= Just (mkAppTys (substTy (mkTvSubstPrs tenv) rhs) tys')
-- The free vars of 'rhs' should all be bound by 'tenv', so it's
-- ok to use 'substTy' here.
-- See also Note [The substitution invariant] in TyCoSubst.
-- Its important to use mkAppTys, rather than (foldl AppTy),
-- because the function part might well return a
-- partially-applied type constructor; indeed, usually will!
tcView _ = Nothing
{-# INLINE coreView #-}
coreView :: Type -> Maybe Type
-- ^ This function Strips off the /top layer only/ of a type synonym
-- application (if any) its underlying representation type.
-- Returns Nothing if there is nothing to look through.
-- This function considers 'Constraint' to be a synonym of @TYPE LiftedRep@.
--
-- By being non-recursive and inlined, this case analysis gets efficiently
-- joined onto the case analysis that the caller is already doing
coreView ty@(TyConApp tc tys)
| Just (tenv, rhs, tys') <- expandSynTyCon_maybe tc tys
= Just (mkAppTys (substTy (mkTvSubstPrs tenv) rhs) tys')
-- This equation is exactly like tcView
-- At the Core level, Constraint = Type
-- See Note [coreView vs tcView]
| isConstraintKindCon tc
= ASSERT2( null tys, ppr ty )
Just liftedTypeKind
coreView _ = Nothing
-----------------------------------------------
expandTypeSynonyms :: Type -> Type
-- ^ Expand out all type synonyms. Actually, it'd suffice to expand out
-- just the ones that discard type variables (e.g. type Funny a = Int)
-- But we don't know which those are currently, so we just expand all.
--
-- 'expandTypeSynonyms' only expands out type synonyms mentioned in the type,
-- not in the kinds of any TyCon or TyVar mentioned in the type.
--
-- Keep this synchronized with 'synonymTyConsOfType'
expandTypeSynonyms ty
= go (mkEmptyTCvSubst in_scope) ty
where
in_scope = mkInScopeSet (tyCoVarsOfType ty)
go subst (TyConApp tc tys)
| Just (tenv, rhs, tys') <- expandSynTyCon_maybe tc expanded_tys
= let subst' = mkTvSubst in_scope (mkVarEnv tenv)
-- Make a fresh substitution; rhs has nothing to
-- do with anything that has happened so far
-- NB: if you make changes here, be sure to build an
-- /idempotent/ substitution, even in the nested case
-- type T a b = a -> b
-- type S x y = T y x
-- (#11665)
in mkAppTys (go subst' rhs) tys'
| otherwise
= TyConApp tc expanded_tys
where
expanded_tys = (map (go subst) tys)
go _ (LitTy l) = LitTy l
go subst (TyVarTy tv) = substTyVar subst tv
go subst (AppTy t1 t2) = mkAppTy (go subst t1) (go subst t2)
go subst ty@(FunTy _ arg res)
= ty { ft_arg = go subst arg, ft_res = go subst res }
go subst (ForAllTy (Bndr tv vis) t)
= let (subst', tv') = substVarBndrUsing go subst tv in
ForAllTy (Bndr tv' vis) (go subst' t)
go subst (CastTy ty co) = mkCastTy (go subst ty) (go_co subst co)
go subst (CoercionTy co) = mkCoercionTy (go_co subst co)
go_mco _ MRefl = MRefl
go_mco subst (MCo co) = MCo (go_co subst co)
go_co subst (Refl ty)
= mkNomReflCo (go subst ty)
go_co subst (GRefl r ty mco)
= mkGReflCo r (go subst ty) (go_mco subst mco)
-- NB: coercions are always expanded upon creation
go_co subst (TyConAppCo r tc args)
= mkTyConAppCo r tc (map (go_co subst) args)
go_co subst (AppCo co arg)
= mkAppCo (go_co subst co) (go_co subst arg)
go_co subst (ForAllCo tv kind_co co)
= let (subst', tv', kind_co') = go_cobndr subst tv kind_co in
mkForAllCo tv' kind_co' (go_co subst' co)
go_co subst (FunCo r co1 co2)
= mkFunCo r (go_co subst co1) (go_co subst co2)
go_co subst (CoVarCo cv)
= substCoVar subst cv
go_co subst (AxiomInstCo ax ind args)
= mkAxiomInstCo ax ind (map (go_co subst) args)
go_co subst (UnivCo p r t1 t2)
= mkUnivCo (go_prov subst p) r (go subst t1) (go subst t2)
go_co subst (SymCo co)
= mkSymCo (go_co subst co)
go_co subst (TransCo co1 co2)
= mkTransCo (go_co subst co1) (go_co subst co2)
go_co subst (NthCo r n co)
= mkNthCo r n (go_co subst co)
go_co subst (LRCo lr co)
= mkLRCo lr (go_co subst co)
go_co subst (InstCo co arg)
= mkInstCo (go_co subst co) (go_co subst arg)
go_co subst (KindCo co)
= mkKindCo (go_co subst co)
go_co subst (SubCo co)
= mkSubCo (go_co subst co)
go_co subst (AxiomRuleCo ax cs)
= AxiomRuleCo ax (map (go_co subst) cs)
go_co _ (HoleCo h)
= pprPanic "expandTypeSynonyms hit a hole" (ppr h)
go_prov _ UnsafeCoerceProv = UnsafeCoerceProv
go_prov subst (PhantomProv co) = PhantomProv (go_co subst co)
go_prov subst (ProofIrrelProv co) = ProofIrrelProv (go_co subst co)
go_prov _ p@(PluginProv _) = p
-- the "False" and "const" are to accommodate the type of
-- substForAllCoBndrUsing, which is general enough to
-- handle coercion optimization (which sometimes swaps the
-- order of a coercion)
go_cobndr subst = substForAllCoBndrUsing False (go_co subst) subst
-- | Extract the RuntimeRep classifier of a type from its kind. For example,
-- @kindRep * = LiftedRep@; Panics if this is not possible.
-- Treats * and Constraint as the same
kindRep :: HasDebugCallStack => Kind -> Type
kindRep k = case kindRep_maybe k of
Just r -> r
Nothing -> pprPanic "kindRep" (ppr k)
-- | Given a kind (TYPE rr), extract its RuntimeRep classifier rr.
-- For example, @kindRep_maybe * = Just LiftedRep@
-- Returns 'Nothing' if the kind is not of form (TYPE rr)
-- Treats * and Constraint as the same
kindRep_maybe :: HasDebugCallStack => Kind -> Maybe Type
kindRep_maybe kind
| Just kind' <- coreView kind = kindRep_maybe kind'
| TyConApp tc [arg] <- kind
, tc `hasKey` tYPETyConKey = Just arg
| otherwise = Nothing
-- | This version considers Constraint to be the same as *. Returns True
-- if the argument is equivalent to Type/Constraint and False otherwise.
-- See Note [Kind Constraint and kind Type]
isLiftedTypeKind :: Kind -> Bool
isLiftedTypeKind kind
= case kindRep_maybe kind of
Just rep -> isLiftedRuntimeRep rep
Nothing -> False
isLiftedRuntimeRep :: Type -> Bool
-- isLiftedRuntimeRep is true of LiftedRep :: RuntimeRep
-- False of type variables (a :: RuntimeRep)
-- and of other reps e.g. (IntRep :: RuntimeRep)
isLiftedRuntimeRep rep
| Just rep' <- coreView rep = isLiftedRuntimeRep rep'
| TyConApp rr_tc args <- rep
, rr_tc `hasKey` liftedRepDataConKey = ASSERT( null args ) True
| otherwise = False
-- | Returns True if the kind classifies unlifted types and False otherwise.
-- Note that this returns False for levity-polymorphic kinds, which may
-- be specialized to a kind that classifies unlifted types.
isUnliftedTypeKind :: Kind -> Bool
isUnliftedTypeKind kind
= case kindRep_maybe kind of
Just rep -> isUnliftedRuntimeRep rep
Nothing -> False
isUnliftedRuntimeRep :: Type -> Bool
-- True of definitely-unlifted RuntimeReps
-- False of (LiftedRep :: RuntimeRep)
-- and of variables (a :: RuntimeRep)
isUnliftedRuntimeRep rep
| Just rep' <- coreView rep = isUnliftedRuntimeRep rep'
| TyConApp rr_tc _ <- rep -- NB: args might be non-empty
-- e.g. TupleRep [r1, .., rn]
= isPromotedDataCon rr_tc && not (rr_tc `hasKey` liftedRepDataConKey)
-- Avoid searching all the unlifted RuntimeRep type cons
-- In the RuntimeRep data type, only LiftedRep is lifted
-- But be careful of type families (F tys) :: RuntimeRep
| otherwise {- Variables, applications -}
= False
-- | Is this the type 'RuntimeRep'?
isRuntimeRepTy :: Type -> Bool
isRuntimeRepTy ty | Just ty' <- coreView ty = isRuntimeRepTy ty'
isRuntimeRepTy (TyConApp tc args)
| tc `hasKey` runtimeRepTyConKey = ASSERT( null args ) True
isRuntimeRepTy _ = False
-- | Is a tyvar of type 'RuntimeRep'?
isRuntimeRepVar :: TyVar -> Bool
isRuntimeRepVar = isRuntimeRepTy . tyVarKind
{-
************************************************************************
* *
Analyzing types
* *
************************************************************************
These functions do a map-like operation over types, performing some operation
on all variables and binding sites. Primarily used for zonking.
Note [Efficiency for mapCoercion ForAllCo case]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
As noted in Note [Forall coercions] in TyCoRep, a ForAllCo is a bit redundant.
It stores a TyCoVar and a Coercion, where the kind of the TyCoVar always matches
the left-hand kind of the coercion. This is convenient lots of the time, but
not when mapping a function over a coercion.
The problem is that tcm_tybinder will affect the TyCoVar's kind and
mapCoercion will affect the Coercion, and we hope that the results will be
the same. Even if they are the same (which should generally happen with
correct algorithms), then there is an efficiency issue. In particular,
this problem seems to make what should be a linear algorithm into a potentially
exponential one. But it's only going to be bad in the case where there's
lots of foralls in the kinds of other foralls. Like this:
forall a : (forall b : (forall c : ...). ...). ...
This construction seems unlikely. So we'll do the inefficient, easy way
for now.
Note [Specialising mappers]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
These INLINABLE pragmas are indispensable. mapType/mapCoercion are used
to implement zonking, and it's vital that they get specialised to the TcM
monad. This specialisation happens automatically (that is, without a
SPECIALISE pragma) as long as the definitions are INLINABLE. For example,
this one change made a 20% allocation difference in perf/compiler/T5030.
-}
-- | This describes how a "map" operation over a type/coercion should behave
data TyCoMapper env m
= TyCoMapper
{ tcm_tyvar :: env -> TyVar -> m Type
, tcm_covar :: env -> CoVar -> m Coercion
, tcm_hole :: env -> CoercionHole -> m Coercion
-- ^ What to do with coercion holes.
-- See Note [Coercion holes] in TyCoRep.
, tcm_tycobinder :: env -> TyCoVar -> ArgFlag -> m (env, TyCoVar)
-- ^ The returned env is used in the extended scope
, tcm_tycon :: TyCon -> m TyCon
-- ^ This is used only for TcTyCons
-- a) To zonk TcTyCons
-- b) To turn TcTyCons into TyCons.
-- See Note [Type checking recursive type and class declarations]
-- in TcTyClsDecls
}
{-# INLINABLE mapType #-} -- See Note [Specialising mappers]
mapType :: Monad m => TyCoMapper env m -> env -> Type -> m Type
mapType mapper@(TyCoMapper { tcm_tyvar = tyvar
, tcm_tycobinder = tycobinder
, tcm_tycon = tycon })
env ty
= go ty
where
go (TyVarTy tv) = tyvar env tv
go (AppTy t1 t2) = mkAppTy <$> go t1 <*> go t2
go ty@(LitTy {}) = return ty
go (CastTy ty co) = mkCastTy <$> go ty <*> mapCoercion mapper env co
go (CoercionTy co) = CoercionTy <$> mapCoercion mapper env co
go ty@(FunTy _ arg res)
= do { arg' <- go arg; res' <- go res
; return (ty { ft_arg = arg', ft_res = res' }) }
go ty@(TyConApp tc tys)
| isTcTyCon tc
= do { tc' <- tycon tc
; mkTyConApp tc' <$> mapM go tys }
-- Not a TcTyCon
| null tys -- Avoid allocation in this very
= return ty -- common case (E.g. Int, LiftedRep etc)
| otherwise
= mkTyConApp tc <$> mapM go tys
go (ForAllTy (Bndr tv vis) inner)
= do { (env', tv') <- tycobinder env tv vis
; inner' <- mapType mapper env' inner
; return $ ForAllTy (Bndr tv' vis) inner' }
{-# INLINABLE mapCoercion #-} -- See Note [Specialising mappers]
mapCoercion :: Monad m
=> TyCoMapper env m -> env -> Coercion -> m Coercion
mapCoercion mapper@(TyCoMapper { tcm_covar = covar
, tcm_hole = cohole
, tcm_tycobinder = tycobinder
, tcm_tycon = tycon })
env co
= go co
where
go_mco MRefl = return MRefl
go_mco (MCo co) = MCo <$> (go co)
go (Refl ty) = Refl <$> mapType mapper env ty
go (GRefl r ty mco) = mkGReflCo r <$> mapType mapper env ty <*> (go_mco mco)
go (TyConAppCo r tc args)
= do { tc' <- if isTcTyCon tc
then tycon tc
else return tc
; mkTyConAppCo r tc' <$> mapM go args }
go (AppCo c1 c2) = mkAppCo <$> go c1 <*> go c2
go (ForAllCo tv kind_co co)
= do { kind_co' <- go kind_co
; (env', tv') <- tycobinder env tv Inferred
; co' <- mapCoercion mapper env' co
; return $ mkForAllCo tv' kind_co' co' }
-- See Note [Efficiency for mapCoercion ForAllCo case]
go (FunCo r c1 c2) = mkFunCo r <$> go c1 <*> go c2
go (CoVarCo cv) = covar env cv
go (AxiomInstCo ax i args)
= mkAxiomInstCo ax i <$> mapM go args
go (HoleCo hole) = cohole env hole
go (UnivCo p r t1 t2)
= mkUnivCo <$> go_prov p <*> pure r
<*> mapType mapper env t1 <*> mapType mapper env t2
go (SymCo co) = mkSymCo <$> go co
go (TransCo c1 c2) = mkTransCo <$> go c1 <*> go c2
go (AxiomRuleCo r cos) = AxiomRuleCo r <$> mapM go cos
go (NthCo r i co) = mkNthCo r i <$> go co
go (LRCo lr co) = mkLRCo lr <$> go co
go (InstCo co arg) = mkInstCo <$> go co <*> go arg
go (KindCo co) = mkKindCo <$> go co
go (SubCo co) = mkSubCo <$> go co
go_prov UnsafeCoerceProv = return UnsafeCoerceProv
go_prov (PhantomProv co) = PhantomProv <$> go co
go_prov (ProofIrrelProv co) = ProofIrrelProv <$> go co
go_prov p@(PluginProv _) = return p
{-
************************************************************************
* *
\subsection{Constructor-specific functions}
* *
************************************************************************
---------------------------------------------------------------------
TyVarTy
~~~~~~~
-}
-- | Attempts to obtain the type variable underlying a 'Type', and panics with the
-- given message if this is not a type variable type. See also 'getTyVar_maybe'
getTyVar :: String -> Type -> TyVar
getTyVar msg ty = case getTyVar_maybe ty of
Just tv -> tv
Nothing -> panic ("getTyVar: " ++ msg)
isTyVarTy :: Type -> Bool
isTyVarTy ty = isJust (getTyVar_maybe ty)
-- | Attempts to obtain the type variable underlying a 'Type'
getTyVar_maybe :: Type -> Maybe TyVar
getTyVar_maybe ty | Just ty' <- coreView ty = getTyVar_maybe ty'
| otherwise = repGetTyVar_maybe ty
-- | If the type is a tyvar, possibly under a cast, returns it, along
-- with the coercion. Thus, the co is :: kind tv ~N kind ty
getCastedTyVar_maybe :: Type -> Maybe (TyVar, CoercionN)
getCastedTyVar_maybe ty | Just ty' <- coreView ty = getCastedTyVar_maybe ty'
getCastedTyVar_maybe (CastTy (TyVarTy tv) co) = Just (tv, co)
getCastedTyVar_maybe (TyVarTy tv)
= Just (tv, mkReflCo Nominal (tyVarKind tv))
getCastedTyVar_maybe _ = Nothing
-- | Attempts to obtain the type variable underlying a 'Type', without
-- any expansion
repGetTyVar_maybe :: Type -> Maybe TyVar
repGetTyVar_maybe (TyVarTy tv) = Just tv
repGetTyVar_maybe _ = Nothing
{-
---------------------------------------------------------------------
AppTy
~~~~~
We need to be pretty careful with AppTy to make sure we obey the
invariant that a TyConApp is always visibly so. mkAppTy maintains the
invariant: use it.
Note [Decomposing fat arrow c=>t]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Can we unify (a b) with (Eq a => ty)? If we do so, we end up with
a partial application like ((=>) Eq a) which doesn't make sense in
source Haskell. In contrast, we *can* unify (a b) with (t1 -> t2).
Here's an example (#9858) of how you might do it:
i :: (Typeable a, Typeable b) => Proxy (a b) -> TypeRep
i p = typeRep p
j = i (Proxy :: Proxy (Eq Int => Int))
The type (Proxy (Eq Int => Int)) is only accepted with -XImpredicativeTypes,
but suppose we want that. But then in the call to 'i', we end
up decomposing (Eq Int => Int), and we definitely don't want that.
This really only applies to the type checker; in Core, '=>' and '->'
are the same, as are 'Constraint' and '*'. But for now I've put
the test in repSplitAppTy_maybe, which applies throughout, because
the other calls to splitAppTy are in Unify, which is also used by
the type checker (e.g. when matching type-function equations).
-}
-- | Applies a type to another, as in e.g. @k a@
mkAppTy :: Type -> Type -> Type
-- See Note [Respecting definitional equality], invariant (EQ1).
mkAppTy (CastTy fun_ty co) arg_ty
| ([arg_co], res_co) <- decomposePiCos co (coercionKind co) [arg_ty]
= (fun_ty `mkAppTy` (arg_ty `mkCastTy` arg_co)) `mkCastTy` res_co
mkAppTy (TyConApp tc tys) ty2 = mkTyConApp tc (tys ++ [ty2])
mkAppTy ty1 ty2 = AppTy ty1 ty2
-- Note that the TyConApp could be an
-- under-saturated type synonym. GHC allows that; e.g.
-- type Foo k = k a -> k a
-- type Id x = x
-- foo :: Foo Id -> Foo Id
--
-- Here Id is partially applied in the type sig for Foo,
-- but once the type synonyms are expanded all is well
--
-- Moreover in TcHsTypes.tcInferApps we build up a type
-- (T t1 t2 t3) one argument at a type, thus forming
-- (T t1), (T t1 t2), etc
mkAppTys :: Type -> [Type] -> Type
mkAppTys ty1 [] = ty1
mkAppTys (CastTy fun_ty co) arg_tys -- much more efficient then nested mkAppTy
-- Why do this? See (EQ1) of
-- Note [Respecting definitional equality]
-- in TyCoRep
= foldl' AppTy ((mkAppTys fun_ty casted_arg_tys) `mkCastTy` res_co) leftovers
where
(arg_cos, res_co) = decomposePiCos co (coercionKind co) arg_tys
(args_to_cast, leftovers) = splitAtList arg_cos arg_tys
casted_arg_tys = zipWith mkCastTy args_to_cast arg_cos
mkAppTys (TyConApp tc tys1) tys2 = mkTyConApp tc (tys1 ++ tys2)
mkAppTys ty1 tys2 = foldl' AppTy ty1 tys2
-------------
splitAppTy_maybe :: Type -> Maybe (Type, Type)
-- ^ Attempt to take a type application apart, whether it is a
-- function, type constructor, or plain type application. Note
-- that type family applications are NEVER unsaturated by this!
splitAppTy_maybe ty | Just ty' <- coreView ty
= splitAppTy_maybe ty'
splitAppTy_maybe ty = repSplitAppTy_maybe ty
-------------
repSplitAppTy_maybe :: HasDebugCallStack => Type -> Maybe (Type,Type)
-- ^ Does the AppTy split as in 'splitAppTy_maybe', but assumes that
-- any Core view stuff is already done
repSplitAppTy_maybe (FunTy _ ty1 ty2)
= Just (TyConApp funTyCon [rep1, rep2, ty1], ty2)
where
rep1 = getRuntimeRep ty1
rep2 = getRuntimeRep ty2
repSplitAppTy_maybe (AppTy ty1 ty2)
= Just (ty1, ty2)
repSplitAppTy_maybe (TyConApp tc tys)
| not (mustBeSaturated tc) || tys `lengthExceeds` tyConArity tc
, Just (tys', ty') <- snocView tys
= Just (TyConApp tc tys', ty') -- Never create unsaturated type family apps!
repSplitAppTy_maybe _other = Nothing
-- This one doesn't break apart (c => t).
-- See Note [Decomposing fat arrow c=>t]
-- Defined here to avoid module loops between Unify and TcType.
tcRepSplitAppTy_maybe :: Type -> Maybe (Type,Type)
-- ^ Does the AppTy split as in 'tcSplitAppTy_maybe', but assumes that
-- any coreView stuff is already done. Refuses to look through (c => t)
tcRepSplitAppTy_maybe (FunTy { ft_af = af, ft_arg = ty1, ft_res = ty2 })
| InvisArg <- af
= Nothing -- See Note [Decomposing fat arrow c=>t]
| otherwise
= Just (TyConApp funTyCon [rep1, rep2, ty1], ty2)
where
rep1 = getRuntimeRep ty1
rep2 = getRuntimeRep ty2
tcRepSplitAppTy_maybe (AppTy ty1 ty2) = Just (ty1, ty2)
tcRepSplitAppTy_maybe (TyConApp tc tys)
| not (mustBeSaturated tc) || tys `lengthExceeds` tyConArity tc
, Just (tys', ty') <- snocView tys
= Just (TyConApp tc tys', ty') -- Never create unsaturated type family apps!
tcRepSplitAppTy_maybe _other = Nothing
-------------
splitAppTy :: Type -> (Type, Type)
-- ^ Attempts to take a type application apart, as in 'splitAppTy_maybe',
-- and panics if this is not possible
splitAppTy ty = case splitAppTy_maybe ty of
Just pr -> pr
Nothing -> panic "splitAppTy"
-------------
splitAppTys :: Type -> (Type, [Type])
-- ^ Recursively splits a type as far as is possible, leaving a residual
-- type being applied to and the type arguments applied to it. Never fails,
-- even if that means returning an empty list of type applications.
splitAppTys ty = split ty ty []
where
split orig_ty ty args | Just ty' <- coreView ty = split orig_ty ty' args
split _ (AppTy ty arg) args = split ty ty (arg:args)
split _ (TyConApp tc tc_args) args
= let -- keep type families saturated
n | mustBeSaturated tc = tyConArity tc
| otherwise = 0
(tc_args1, tc_args2) = splitAt n tc_args
in
(TyConApp tc tc_args1, tc_args2 ++ args)
split _ (FunTy _ ty1 ty2) args
= ASSERT( null args )
(TyConApp funTyCon [], [rep1, rep2, ty1, ty2])
where
rep1 = getRuntimeRep ty1
rep2 = getRuntimeRep ty2
split orig_ty _ args = (orig_ty, args)
-- | Like 'splitAppTys', but doesn't look through type synonyms
repSplitAppTys :: HasDebugCallStack => Type -> (Type, [Type])
repSplitAppTys ty = split ty []
where
split (AppTy ty arg) args = split ty (arg:args)
split (TyConApp tc tc_args) args
= let n | mustBeSaturated tc = tyConArity tc
| otherwise = 0
(tc_args1, tc_args2) = splitAt n tc_args
in
(TyConApp tc tc_args1, tc_args2 ++ args)
split (FunTy _ ty1 ty2) args
= ASSERT( null args )
(TyConApp funTyCon [], [rep1, rep2, ty1, ty2])
where
rep1 = getRuntimeRep ty1
rep2 = getRuntimeRep ty2
split ty args = (ty, args)
{-
LitTy
~~~~~
-}
mkNumLitTy :: Integer -> Type
mkNumLitTy n = LitTy (NumTyLit n)
-- | Is this a numeric literal. We also look through type synonyms.
isNumLitTy :: Type -> Maybe Integer
isNumLitTy ty | Just ty1 <- coreView ty = isNumLitTy ty1
isNumLitTy (LitTy (NumTyLit n)) = Just n
isNumLitTy _ = Nothing
mkStrLitTy :: FastString -> Type
mkStrLitTy s = LitTy (StrTyLit s)
-- | Is this a symbol literal. We also look through type synonyms.
isStrLitTy :: Type -> Maybe FastString
isStrLitTy ty | Just ty1 <- coreView ty = isStrLitTy ty1
isStrLitTy (LitTy (StrTyLit s)) = Just s
isStrLitTy _ = Nothing
-- | Is this a type literal (symbol or numeric).
isLitTy :: Type -> Maybe TyLit
isLitTy ty | Just ty1 <- coreView ty = isLitTy ty1
isLitTy (LitTy l) = Just l
isLitTy _ = Nothing
-- | Is this type a custom user error?
-- If so, give us the kind and the error message.
userTypeError_maybe :: Type -> Maybe Type
userTypeError_maybe t
= do { (tc, _kind : msg : _) <- splitTyConApp_maybe t
-- There may be more than 2 arguments, if the type error is
-- used as a type constructor (e.g. at kind `Type -> Type`).
; guard (tyConName tc == errorMessageTypeErrorFamName)
; return msg }
-- | Render a type corresponding to a user type error into a SDoc.
pprUserTypeErrorTy :: Type -> SDoc
pprUserTypeErrorTy ty =
case splitTyConApp_maybe ty of
-- Text "Something"
Just (tc,[txt])
| tyConName tc == typeErrorTextDataConName
, Just str <- isStrLitTy txt -> ftext str
-- ShowType t
Just (tc,[_k,t])
| tyConName tc == typeErrorShowTypeDataConName -> ppr t
-- t1 :<>: t2
Just (tc,[t1,t2])
| tyConName tc == typeErrorAppendDataConName ->
pprUserTypeErrorTy t1 <> pprUserTypeErrorTy t2
-- t1 :$$: t2
Just (tc,[t1,t2])
| tyConName tc == typeErrorVAppendDataConName ->
pprUserTypeErrorTy t1 $$ pprUserTypeErrorTy t2
-- An unevaluated type function
_ -> ppr ty
{-
---------------------------------------------------------------------
FunTy
~~~~~
Note [Representation of function types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Functions (e.g. Int -> Char) can be thought of as being applications
of funTyCon (known in Haskell surface syntax as (->)),
(->) :: forall (r1 :: RuntimeRep) (r2 :: RuntimeRep)
(a :: TYPE r1) (b :: TYPE r2).
a -> b -> Type
However, for efficiency's sake we represent saturated applications of (->)
with FunTy. For instance, the type,
(->) r1 r2 a b
is equivalent to,
FunTy (Anon a) b
Note how the RuntimeReps are implied in the FunTy representation. For this
reason we must be careful when recontructing the TyConApp representation (see,
for instance, splitTyConApp_maybe).
In the compiler we maintain the invariant that all saturated applications of
(->) are represented with FunTy.
See #11714.
-}
splitFunTy :: Type -> (Type, Type)
-- ^ Attempts to extract the argument and result types from a type, and
-- panics if that is not possible. See also 'splitFunTy_maybe'
splitFunTy ty | Just ty' <- coreView ty = splitFunTy ty'
splitFunTy (FunTy _ arg res) = (arg, res)
splitFunTy other = pprPanic "splitFunTy" (ppr other)
splitFunTy_maybe :: Type -> Maybe (Type, Type)
-- ^ Attempts to extract the argument and result types from a type
splitFunTy_maybe ty | Just ty' <- coreView ty = splitFunTy_maybe ty'
splitFunTy_maybe (FunTy _ arg res) = Just (arg, res)
splitFunTy_maybe _ = Nothing
splitFunTys :: Type -> ([Type], Type)
splitFunTys ty = split [] ty ty
where
split args orig_ty ty | Just ty' <- coreView ty = split args orig_ty ty'
split args _ (FunTy _ arg res) = split (arg:args) res res
split args orig_ty _ = (reverse args, orig_ty)
funResultTy :: Type -> Type
-- ^ Extract the function result type and panic if that is not possible
funResultTy ty | Just ty' <- coreView ty = funResultTy ty'
funResultTy (FunTy { ft_res = res }) = res
funResultTy ty = pprPanic "funResultTy" (ppr ty)
funArgTy :: Type -> Type
-- ^ Extract the function argument type and panic if that is not possible
funArgTy ty | Just ty' <- coreView ty = funArgTy ty'
funArgTy (FunTy { ft_arg = arg }) = arg
funArgTy ty = pprPanic "funArgTy" (ppr ty)
-- ^ Just like 'piResultTys' but for a single argument
-- Try not to iterate 'piResultTy', because it's inefficient to substitute
-- one variable at a time; instead use 'piResultTys"
piResultTy :: HasDebugCallStack => Type -> Type -> Type
piResultTy ty arg = case piResultTy_maybe ty arg of
Just res -> res
Nothing -> pprPanic "piResultTy" (ppr ty $$ ppr arg)
piResultTy_maybe :: Type -> Type -> Maybe Type
-- We don't need a 'tc' version, because
-- this function behaves the same for Type and Constraint
piResultTy_maybe ty arg
| Just ty' <- coreView ty = piResultTy_maybe ty' arg
| FunTy { ft_res = res } <- ty
= Just res
| ForAllTy (Bndr tv _) res <- ty
= let empty_subst = mkEmptyTCvSubst $ mkInScopeSet $
tyCoVarsOfTypes [arg,res]
in Just (substTy (extendTCvSubst empty_subst tv arg) res)
| otherwise
= Nothing
-- | (piResultTys f_ty [ty1, .., tyn]) gives the type of (f ty1 .. tyn)
-- where f :: f_ty
-- 'piResultTys' is interesting because:
-- 1. 'f_ty' may have more for-alls than there are args
-- 2. Less obviously, it may have fewer for-alls
-- For case 2. think of:
-- piResultTys (forall a.a) [forall b.b, Int]
-- This really can happen, but only (I think) in situations involving
-- undefined. For example:
-- undefined :: forall a. a
-- Term: undefined @(forall b. b->b) @Int
-- This term should have type (Int -> Int), but notice that
-- there are more type args than foralls in 'undefined's type.
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism] in coreSyn/CoreLint.hs
-- This is a heavily used function (e.g. from typeKind),
-- so we pay attention to efficiency, especially in the special case
-- where there are no for-alls so we are just dropping arrows from
-- a function type/kind.
piResultTys :: HasDebugCallStack => Type -> [Type] -> Type
piResultTys ty [] = ty
piResultTys ty orig_args@(arg:args)
| Just ty' <- coreView ty
= piResultTys ty' orig_args
| FunTy { ft_res = res } <- ty
= piResultTys res args
| ForAllTy (Bndr tv _) res <- ty
= go (extendTCvSubst init_subst tv arg) res args
| otherwise
= pprPanic "piResultTys1" (ppr ty $$ ppr orig_args)
where
init_subst = mkEmptyTCvSubst $ mkInScopeSet (tyCoVarsOfTypes (ty:orig_args))
go :: TCvSubst -> Type -> [Type] -> Type
go subst ty [] = substTyUnchecked subst ty
go subst ty all_args@(arg:args)
| Just ty' <- coreView ty
= go subst ty' all_args
| FunTy { ft_res = res } <- ty
= go subst res args
| ForAllTy (Bndr tv _) res <- ty
= go (extendTCvSubst subst tv arg) res args
| not (isEmptyTCvSubst subst) -- See Note [Care with kind instantiation]
= go init_subst
(substTy subst ty)
all_args
| otherwise
= -- We have not run out of arguments, but the function doesn't
-- have the right kind to apply to them; so panic.
-- Without the explicit isEmptyVarEnv test, an ill-kinded type
-- would give an infinite loop, which is very unhelpful
-- c.f. #15473
pprPanic "piResultTys2" (ppr ty $$ ppr orig_args $$ ppr all_args)
applyTysX :: [TyVar] -> Type -> [Type] -> Type
-- applyTyxX beta-reduces (/\tvs. body_ty) arg_tys
-- Assumes that (/\tvs. body_ty) is closed
applyTysX tvs body_ty arg_tys
= ASSERT2( arg_tys `lengthAtLeast` n_tvs, pp_stuff )
ASSERT2( tyCoVarsOfType body_ty `subVarSet` mkVarSet tvs, pp_stuff )
mkAppTys (substTyWith tvs (take n_tvs arg_tys) body_ty)
(drop n_tvs arg_tys)
where
pp_stuff = vcat [ppr tvs, ppr body_ty, ppr arg_tys]
n_tvs = length tvs
{- Note [Care with kind instantiation]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have
T :: forall k. k
and we are finding the kind of
T (forall b. b -> b) * Int
Then
T (forall b. b->b) :: k[ k :-> forall b. b->b]
:: forall b. b -> b
So
T (forall b. b->b) * :: (b -> b)[ b :-> *]
:: * -> *
In other words we must instantiate the forall!
Similarly (#15428)
S :: forall k f. k -> f k
and we are finding the kind of
S * (* ->) Int Bool
We have
S * (* ->) :: (k -> f k)[ k :-> *, f :-> (* ->)]
:: * -> * -> *
So again we must instantiate.
The same thing happens in GHC.CoreToIface.toIfaceAppArgsX.
---------------------------------------------------------------------
TyConApp
~~~~~~~~
-}
-- | A key function: builds a 'TyConApp' or 'FunTy' as appropriate to
-- its arguments. Applies its arguments to the constructor from left to right.
mkTyConApp :: TyCon -> [Type] -> Type
mkTyConApp tycon tys
| isFunTyCon tycon
, [_rep1,_rep2,ty1,ty2] <- tys
= FunTy { ft_af = VisArg, ft_arg = ty1, ft_res = ty2 }
-- The FunTyCon (->) is always a visible one
| otherwise
= TyConApp tycon tys
-- splitTyConApp "looks through" synonyms, because they don't
-- mean a distinct type, but all other type-constructor applications
-- including functions are returned as Just ..
-- | Retrieve the tycon heading this type, if there is one. Does /not/
-- look through synonyms.
tyConAppTyConPicky_maybe :: Type -> Maybe TyCon
tyConAppTyConPicky_maybe (TyConApp tc _) = Just tc
tyConAppTyConPicky_maybe (FunTy {}) = Just funTyCon
tyConAppTyConPicky_maybe _ = Nothing
-- | The same as @fst . splitTyConApp@
tyConAppTyCon_maybe :: Type -> Maybe TyCon
tyConAppTyCon_maybe ty | Just ty' <- coreView ty = tyConAppTyCon_maybe ty'
tyConAppTyCon_maybe (TyConApp tc _) = Just tc
tyConAppTyCon_maybe (FunTy {}) = Just funTyCon
tyConAppTyCon_maybe _ = Nothing
tyConAppTyCon :: Type -> TyCon
tyConAppTyCon ty = tyConAppTyCon_maybe ty `orElse` pprPanic "tyConAppTyCon" (ppr ty)
-- | The same as @snd . splitTyConApp@
tyConAppArgs_maybe :: Type -> Maybe [Type]
tyConAppArgs_maybe ty | Just ty' <- coreView ty = tyConAppArgs_maybe ty'
tyConAppArgs_maybe (TyConApp _ tys) = Just tys
tyConAppArgs_maybe (FunTy _ arg res)
| Just rep1 <- getRuntimeRep_maybe arg
, Just rep2 <- getRuntimeRep_maybe res
= Just [rep1, rep2, arg, res]
tyConAppArgs_maybe _ = Nothing
tyConAppArgs :: Type -> [Type]
tyConAppArgs ty = tyConAppArgs_maybe ty `orElse` pprPanic "tyConAppArgs" (ppr ty)
tyConAppArgN :: Int -> Type -> Type
-- Executing Nth
tyConAppArgN n ty
= case tyConAppArgs_maybe ty of
Just tys -> ASSERT2( tys `lengthExceeds` n, ppr n <+> ppr tys ) tys `getNth` n
Nothing -> pprPanic "tyConAppArgN" (ppr n <+> ppr ty)
-- | Attempts to tease a type apart into a type constructor and the application
-- of a number of arguments to that constructor. Panics if that is not possible.
-- See also 'splitTyConApp_maybe'
splitTyConApp :: Type -> (TyCon, [Type])
splitTyConApp ty = case splitTyConApp_maybe ty of
Just stuff -> stuff
Nothing -> pprPanic "splitTyConApp" (ppr ty)
-- | Attempts to tease a type apart into a type constructor and the application
-- of a number of arguments to that constructor
splitTyConApp_maybe :: HasDebugCallStack => Type -> Maybe (TyCon, [Type])
splitTyConApp_maybe ty | Just ty' <- coreView ty = splitTyConApp_maybe ty'
splitTyConApp_maybe ty = repSplitTyConApp_maybe ty
-- | Split a type constructor application into its type constructor and
-- applied types. Note that this may fail in the case of a 'FunTy' with an
-- argument of unknown kind 'FunTy' (e.g. @FunTy (a :: k) Int@. since the kind
-- of @a@ isn't of the form @TYPE rep@). Consequently, you may need to zonk your
-- type before using this function.
--
-- If you only need the 'TyCon', consider using 'tcTyConAppTyCon_maybe'.
tcSplitTyConApp_maybe :: HasCallStack => Type -> Maybe (TyCon, [Type])
-- Defined here to avoid module loops between Unify and TcType.
tcSplitTyConApp_maybe ty | Just ty' <- tcView ty = tcSplitTyConApp_maybe ty'
tcSplitTyConApp_maybe ty = repSplitTyConApp_maybe ty
-------------------
repSplitTyConApp_maybe :: HasDebugCallStack => Type -> Maybe (TyCon, [Type])
-- ^ Like 'splitTyConApp_maybe', but doesn't look through synonyms. This
-- assumes the synonyms have already been dealt with.
--
-- Moreover, for a FunTy, it only succeeds if the argument types
-- have enough info to extract the runtime-rep arguments that
-- the funTyCon requires. This will usually be true;
-- but may be temporarily false during canonicalization:
-- see Note [FunTy and decomposing tycon applications] in TcCanonical
--
repSplitTyConApp_maybe (TyConApp tc tys) = Just (tc, tys)
repSplitTyConApp_maybe (FunTy _ arg res)
| Just arg_rep <- getRuntimeRep_maybe arg
, Just res_rep <- getRuntimeRep_maybe res
= Just (funTyCon, [arg_rep, res_rep, arg, res])
repSplitTyConApp_maybe _ = Nothing
-------------------
-- | Attempts to tease a list type apart and gives the type of the elements if
-- successful (looks through type synonyms)
splitListTyConApp_maybe :: Type -> Maybe Type
splitListTyConApp_maybe ty = case splitTyConApp_maybe ty of
Just (tc,[e]) | tc == listTyCon -> Just e
_other -> Nothing
nextRole :: Type -> Role
nextRole ty
| Just (tc, tys) <- splitTyConApp_maybe ty
, let num_tys = length tys
, num_tys < tyConArity tc
= tyConRoles tc `getNth` num_tys
| otherwise
= Nominal
newTyConInstRhs :: TyCon -> [Type] -> Type
-- ^ Unwrap one 'layer' of newtype on a type constructor and its
-- arguments, using an eta-reduced version of the @newtype@ if possible.
-- This requires tys to have at least @newTyConInstArity tycon@ elements.
newTyConInstRhs tycon tys
= ASSERT2( tvs `leLength` tys, ppr tycon $$ ppr tys $$ ppr tvs )
applyTysX tvs rhs tys
where
(tvs, rhs) = newTyConEtadRhs tycon
{-
---------------------------------------------------------------------
CastTy
~~~~~~
A casted type has its *kind* casted into something new.
-}
splitCastTy_maybe :: Type -> Maybe (Type, Coercion)
splitCastTy_maybe ty | Just ty' <- coreView ty = splitCastTy_maybe ty'
splitCastTy_maybe (CastTy ty co) = Just (ty, co)
splitCastTy_maybe _ = Nothing
-- | Make a 'CastTy'. The Coercion must be nominal. Checks the
-- Coercion for reflexivity, dropping it if it's reflexive.
-- See Note [Respecting definitional equality] in TyCoRep
mkCastTy :: Type -> Coercion -> Type
mkCastTy ty co | isReflexiveCo co = ty -- (EQ2) from the Note
-- NB: Do the slow check here. This is important to keep the splitXXX
-- functions working properly. Otherwise, we may end up with something
-- like (((->) |> something_reflexive_but_not_obviously_so) biz baz)
-- fails under splitFunTy_maybe. This happened with the cheaper check
-- in test dependent/should_compile/dynamic-paper.
mkCastTy (CastTy ty co1) co2
-- (EQ3) from the Note
= mkCastTy ty (co1 `mkTransCo` co2)
-- call mkCastTy again for the reflexivity check
mkCastTy (ForAllTy (Bndr tv vis) inner_ty) co
-- (EQ4) from the Note
| isTyVar tv
, let fvs = tyCoVarsOfCo co
= -- have to make sure that pushing the co in doesn't capture the bound var!
if tv `elemVarSet` fvs
then let empty_subst = mkEmptyTCvSubst (mkInScopeSet fvs)
(subst, tv') = substVarBndr empty_subst tv
in ForAllTy (Bndr tv' vis) (substTy subst inner_ty `mkCastTy` co)
else ForAllTy (Bndr tv vis) (inner_ty `mkCastTy` co)
mkCastTy ty co = CastTy ty co
tyConBindersTyCoBinders :: [TyConBinder] -> [TyCoBinder]
-- Return the tyConBinders in TyCoBinder form
tyConBindersTyCoBinders = map to_tyb
where
to_tyb (Bndr tv (NamedTCB vis)) = Named (Bndr tv vis)
to_tyb (Bndr tv (AnonTCB af)) = Anon af (varType tv)
-- | Drop the cast on a type, if any. If there is no
-- cast, just return the original type. This is rarely what
-- you want. The CastTy data constructor (in TyCoRep) has the
-- invariant that another CastTy is not inside. See the
-- data constructor for a full description of this invariant.
-- Since CastTy cannot be nested, the result of discardCast
-- cannot be a CastTy.
discardCast :: Type -> Type
discardCast (CastTy ty _) = ASSERT(not (isCastTy ty)) ty
where
isCastTy CastTy{} = True
isCastTy _ = False
discardCast ty = ty
{-
--------------------------------------------------------------------
CoercionTy
~~~~~~~~~~
CoercionTy allows us to inject coercions into types. A CoercionTy
should appear only in the right-hand side of an application.
-}
mkCoercionTy :: Coercion -> Type
mkCoercionTy = CoercionTy
isCoercionTy :: Type -> Bool
isCoercionTy (CoercionTy _) = True
isCoercionTy _ = False
isCoercionTy_maybe :: Type -> Maybe Coercion
isCoercionTy_maybe (CoercionTy co) = Just co
isCoercionTy_maybe _ = Nothing
stripCoercionTy :: Type -> Coercion
stripCoercionTy (CoercionTy co) = co
stripCoercionTy ty = pprPanic "stripCoercionTy" (ppr ty)
{-
---------------------------------------------------------------------
SynTy
~~~~~
Notes on type synonyms
~~~~~~~~~~~~~~~~~~~~~~
The various "split" functions (splitFunTy, splitRhoTy, splitForAllTy) try
to return type synonyms wherever possible. Thus
type Foo a = a -> a
we want
splitFunTys (a -> Foo a) = ([a], Foo a)
not ([a], a -> a)
The reason is that we then get better (shorter) type signatures in
interfaces. Notably this plays a role in tcTySigs in TcBinds.hs.
---------------------------------------------------------------------
ForAllTy
~~~~~~~~
-}
-- | Make a dependent forall over an 'Inferred' variable
mkTyCoInvForAllTy :: TyCoVar -> Type -> Type
mkTyCoInvForAllTy tv ty
| isCoVar tv
, not (tv `elemVarSet` tyCoVarsOfType ty)
= mkVisFunTy (varType tv) ty
| otherwise
= ForAllTy (Bndr tv Inferred) ty
-- | Like 'mkTyCoInvForAllTy', but tv should be a tyvar
mkInvForAllTy :: TyVar -> Type -> Type
mkInvForAllTy tv ty = ASSERT( isTyVar tv )
ForAllTy (Bndr tv Inferred) ty
-- | Like 'mkForAllTys', but assumes all variables are dependent and
-- 'Inferred', a common case
mkTyCoInvForAllTys :: [TyCoVar] -> Type -> Type
mkTyCoInvForAllTys tvs ty = foldr mkTyCoInvForAllTy ty tvs
-- | Like 'mkTyCoInvForAllTys', but tvs should be a list of tyvar
mkInvForAllTys :: [TyVar] -> Type -> Type
mkInvForAllTys tvs ty = foldr mkInvForAllTy ty tvs
-- | Like 'mkForAllTy', but assumes the variable is dependent and 'Specified',
-- a common case
mkSpecForAllTy :: TyVar -> Type -> Type
mkSpecForAllTy tv ty = ASSERT( isTyVar tv )
-- covar is always Inferred, so input should be tyvar
ForAllTy (Bndr tv Specified) ty
-- | Like 'mkForAllTys', but assumes all variables are dependent and
-- 'Specified', a common case
mkSpecForAllTys :: [TyVar] -> Type -> Type
mkSpecForAllTys tvs ty = foldr mkSpecForAllTy ty tvs
-- | Like mkForAllTys, but assumes all variables are dependent and visible
mkVisForAllTys :: [TyVar] -> Type -> Type
mkVisForAllTys tvs = ASSERT( all isTyVar tvs )
-- covar is always Inferred, so all inputs should be tyvar
mkForAllTys [ Bndr tv Required | tv <- tvs ]
mkLamType :: Var -> Type -> Type
-- ^ Makes a @(->)@ type or an implicit forall type, depending
-- on whether it is given a type variable or a term variable.
-- This is used, for example, when producing the type of a lambda.
-- Always uses Inferred binders.
mkLamTypes :: [Var] -> Type -> Type
-- ^ 'mkLamType' for multiple type or value arguments
mkLamType v body_ty
| isTyVar v
= ForAllTy (Bndr v Inferred) body_ty
| isCoVar v
, v `elemVarSet` tyCoVarsOfType body_ty
= ForAllTy (Bndr v Required) body_ty
| isPredTy arg_ty -- See Note [mkLamType: dictionary arguments]
= mkInvisFunTy arg_ty body_ty
| otherwise
= mkVisFunTy arg_ty body_ty
where
arg_ty = varType v
mkLamTypes vs ty = foldr mkLamType ty vs
{- Note [mkLamType: dictionary arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have (\ (d :: Ord a). blah), we want to give it type
(Ord a => blah_ty)
with a fat arrow; that is, using mkInvisFunTy, not mkVisFunTy.
Why? After all, we are in Core, where (=>) and (->) behave the same.
Yes, but the /specialiser/ does treat dictionary arguments specially.
Suppose we do w/w on 'foo' in module A, thus (#11272, #6056)
foo :: Ord a => Int -> blah
foo a d x = case x of I# x' -> $wfoo @a d x'
$wfoo :: Ord a => Int# -> blah
Now in module B we see (foo @Int dOrdInt). The specialiser will
specialise this to $sfoo, where
$sfoo :: Int -> blah
$sfoo x = case x of I# x' -> $wfoo @Int dOrdInt x'
Now we /must/ also specialise $wfoo! But it wasn't user-written,
and has a type built with mkLamTypes.
Conclusion: the easiest thing is to make mkLamType build
(c => ty)
when the argument is a predicate type. See TyCoRep
Note [Types for coercions, predicates, and evidence]
-}
-- | Given a list of type-level vars and the free vars of a result kind,
-- makes TyCoBinders, preferring anonymous binders
-- if the variable is, in fact, not dependent.
-- e.g. mkTyConBindersPreferAnon [(k:*),(b:k),(c:k)] (k->k)
-- We want (k:*) Named, (b:k) Anon, (c:k) Anon
--
-- All non-coercion binders are /visible/.
mkTyConBindersPreferAnon :: [TyVar] -- ^ binders
-> TyCoVarSet -- ^ free variables of result
-> [TyConBinder]
mkTyConBindersPreferAnon vars inner_tkvs = ASSERT( all isTyVar vars)
fst (go vars)
where
go :: [TyVar] -> ([TyConBinder], VarSet) -- also returns the free vars
go [] = ([], inner_tkvs)
go (v:vs) | v `elemVarSet` fvs
= ( Bndr v (NamedTCB Required) : binders
, fvs `delVarSet` v `unionVarSet` kind_vars )
| otherwise
= ( Bndr v (AnonTCB VisArg) : binders
, fvs `unionVarSet` kind_vars )
where
(binders, fvs) = go vs
kind_vars = tyCoVarsOfType $ tyVarKind v
-- | Take a ForAllTy apart, returning the list of tycovars and the result type.
-- This always succeeds, even if it returns only an empty list. Note that the
-- result type returned may have free variables that were bound by a forall.
splitForAllTys :: Type -> ([TyCoVar], Type)
splitForAllTys ty = split ty ty []
where
split orig_ty ty tvs | Just ty' <- coreView ty = split orig_ty ty' tvs
split _ (ForAllTy (Bndr tv _) ty) tvs = split ty ty (tv:tvs)
split orig_ty _ tvs = (reverse tvs, orig_ty)
-- | Like 'splitForAllTys', but only splits a 'ForAllTy' if
-- @'sameVis' argf supplied_argf@ is 'True', where @argf@ is the visibility
-- of the @ForAllTy@'s binder and @supplied_argf@ is the visibility provided
-- as an argument to this function.
splitForAllTysSameVis :: ArgFlag -> Type -> ([TyCoVar], Type)
splitForAllTysSameVis supplied_argf ty = split ty ty []
where
split orig_ty ty tvs | Just ty' <- coreView ty = split orig_ty ty' tvs
split _ (ForAllTy (Bndr tv argf) ty) tvs
| argf `sameVis` supplied_argf = split ty ty (tv:tvs)
split orig_ty _ tvs = (reverse tvs, orig_ty)
-- | Like splitForAllTys, but split only for tyvars.
-- This always succeeds, even if it returns only an empty list. Note that the
-- result type returned may have free variables that were bound by a forall.
splitTyVarForAllTys :: Type -> ([TyVar], Type)
splitTyVarForAllTys ty = split ty ty []
where
split orig_ty ty tvs | Just ty' <- coreView ty = split orig_ty ty' tvs
split _ (ForAllTy (Bndr tv _) ty) tvs | isTyVar tv = split ty ty (tv:tvs)
split orig_ty _ tvs = (reverse tvs, orig_ty)
-- | Checks whether this is a proper forall (with a named binder)
isForAllTy :: Type -> Bool
isForAllTy ty | Just ty' <- coreView ty = isForAllTy ty'
isForAllTy (ForAllTy {}) = True
isForAllTy _ = False
-- | Like `isForAllTy`, but returns True only if it is a tyvar binder
isForAllTy_ty :: Type -> Bool
isForAllTy_ty ty | Just ty' <- coreView ty = isForAllTy_ty ty'
isForAllTy_ty (ForAllTy (Bndr tv _) _) | isTyVar tv = True
isForAllTy_ty _ = False
-- | Like `isForAllTy`, but returns True only if it is a covar binder
isForAllTy_co :: Type -> Bool
isForAllTy_co ty | Just ty' <- coreView ty = isForAllTy_co ty'
isForAllTy_co (ForAllTy (Bndr tv _) _) | isCoVar tv = True
isForAllTy_co _ = False
-- | Is this a function or forall?
isPiTy :: Type -> Bool
isPiTy ty | Just ty' <- coreView ty = isPiTy ty'
isPiTy (ForAllTy {}) = True
isPiTy (FunTy {}) = True
isPiTy _ = False
-- | Is this a function?
isFunTy :: Type -> Bool
isFunTy ty | Just ty' <- coreView ty = isFunTy ty'
isFunTy (FunTy {}) = True
isFunTy _ = False
-- | Take a forall type apart, or panics if that is not possible.
splitForAllTy :: Type -> (TyCoVar, Type)
splitForAllTy ty
| Just answer <- splitForAllTy_maybe ty = answer
| otherwise = pprPanic "splitForAllTy" (ppr ty)
-- | Drops all ForAllTys
dropForAlls :: Type -> Type
dropForAlls ty = go ty
where
go ty | Just ty' <- coreView ty = go ty'
go (ForAllTy _ res) = go res
go res = res
-- | Attempts to take a forall type apart, but only if it's a proper forall,
-- with a named binder
splitForAllTy_maybe :: Type -> Maybe (TyCoVar, Type)
splitForAllTy_maybe ty = go ty
where
go ty | Just ty' <- coreView ty = go ty'
go (ForAllTy (Bndr tv _) ty) = Just (tv, ty)
go _ = Nothing
-- | Like splitForAllTy_maybe, but only returns Just if it is a tyvar binder.
splitForAllTy_ty_maybe :: Type -> Maybe (TyCoVar, Type)
splitForAllTy_ty_maybe ty = go ty
where
go ty | Just ty' <- coreView ty = go ty'
go (ForAllTy (Bndr tv _) ty) | isTyVar tv = Just (tv, ty)
go _ = Nothing
-- | Like splitForAllTy_maybe, but only returns Just if it is a covar binder.
splitForAllTy_co_maybe :: Type -> Maybe (TyCoVar, Type)
splitForAllTy_co_maybe ty = go ty
where
go ty | Just ty' <- coreView ty = go ty'
go (ForAllTy (Bndr tv _) ty) | isCoVar tv = Just (tv, ty)
go _ = Nothing
-- | Attempts to take a forall type apart; works with proper foralls and
-- functions
splitPiTy_maybe :: Type -> Maybe (TyCoBinder, Type)
splitPiTy_maybe ty = go ty
where
go ty | Just ty' <- coreView ty = go ty'
go (ForAllTy bndr ty) = Just (Named bndr, ty)
go (FunTy { ft_af = af, ft_arg = arg, ft_res = res})
= Just (Anon af arg, res)
go _ = Nothing
-- | Takes a forall type apart, or panics
splitPiTy :: Type -> (TyCoBinder, Type)
splitPiTy ty
| Just answer <- splitPiTy_maybe ty = answer
| otherwise = pprPanic "splitPiTy" (ppr ty)
-- | Split off all TyCoBinders to a type, splitting both proper foralls
-- and functions
splitPiTys :: Type -> ([TyCoBinder], Type)
splitPiTys ty = split ty ty []
where
split orig_ty ty bs | Just ty' <- coreView ty = split orig_ty ty' bs
split _ (ForAllTy b res) bs = split res res (Named b : bs)
split _ (FunTy { ft_af = af, ft_arg = arg, ft_res = res }) bs
= split res res (Anon af arg : bs)
split orig_ty _ bs = (reverse bs, orig_ty)
-- | Like 'splitPiTys' but split off only /named/ binders
-- and returns TyCoVarBinders rather than TyCoBinders
splitForAllVarBndrs :: Type -> ([TyCoVarBinder], Type)
splitForAllVarBndrs ty = split ty ty []
where
split orig_ty ty bs | Just ty' <- coreView ty = split orig_ty ty' bs
split _ (ForAllTy b res) bs = split res res (b:bs)
split orig_ty _ bs = (reverse bs, orig_ty)
{-# INLINE splitForAllVarBndrs #-}
invisibleTyBndrCount :: Type -> Int
-- Returns the number of leading invisible forall'd binders in the type
-- Includes invisible predicate arguments; e.g. for
-- e.g. forall {k}. (k ~ *) => k -> k
-- returns 2 not 1
invisibleTyBndrCount ty = length (fst (splitPiTysInvisible ty))
-- Like splitPiTys, but returns only *invisible* binders, including constraints
-- Stops at the first visible binder
splitPiTysInvisible :: Type -> ([TyCoBinder], Type)
splitPiTysInvisible ty = split ty ty []
where
split orig_ty ty bs
| Just ty' <- coreView ty = split orig_ty ty' bs
split _ (ForAllTy b res) bs
| Bndr _ vis <- b
, isInvisibleArgFlag vis = split res res (Named b : bs)
split _ (FunTy { ft_af = InvisArg, ft_arg = arg, ft_res = res }) bs
= split res res (Anon InvisArg arg : bs)
split orig_ty _ bs = (reverse bs, orig_ty)
splitPiTysInvisibleN :: Int -> Type -> ([TyCoBinder], Type)
-- Same as splitPiTysInvisible, but stop when
-- - you have found 'n' TyCoBinders,
-- - or you run out of invisible binders
splitPiTysInvisibleN n ty = split n ty ty []
where
split n orig_ty ty bs
| n == 0 = (reverse bs, orig_ty)
| Just ty' <- coreView ty = split n orig_ty ty' bs
| ForAllTy b res <- ty
, Bndr _ vis <- b
, isInvisibleArgFlag vis = split (n-1) res res (Named b : bs)
| FunTy { ft_af = InvisArg, ft_arg = arg, ft_res = res } <- ty
= split (n-1) res res (Anon InvisArg arg : bs)
| otherwise = (reverse bs, orig_ty)
-- | Given a 'TyCon' and a list of argument types, filter out any invisible
-- (i.e., 'Inferred' or 'Specified') arguments.
filterOutInvisibleTypes :: TyCon -> [Type] -> [Type]
filterOutInvisibleTypes tc tys = snd $ partitionInvisibleTypes tc tys
-- | Given a 'TyCon' and a list of argument types, filter out any 'Inferred'
-- arguments.
filterOutInferredTypes :: TyCon -> [Type] -> [Type]
filterOutInferredTypes tc tys =
filterByList (map (/= Inferred) $ tyConArgFlags tc tys) tys
-- | Given a 'TyCon' and a list of argument types, partition the arguments
-- into:
--
-- 1. 'Inferred' or 'Specified' (i.e., invisible) arguments and
--
-- 2. 'Required' (i.e., visible) arguments
partitionInvisibleTypes :: TyCon -> [Type] -> ([Type], [Type])
partitionInvisibleTypes tc tys =
partitionByList (map isInvisibleArgFlag $ tyConArgFlags tc tys) tys
-- | Given a list of things paired with their visibilities, partition the
-- things into (invisible things, visible things).
partitionInvisibles :: [(a, ArgFlag)] -> ([a], [a])
partitionInvisibles = partitionWith pick_invis
where
pick_invis :: (a, ArgFlag) -> Either a a
pick_invis (thing, vis) | isInvisibleArgFlag vis = Left thing
| otherwise = Right thing
-- | Given a 'TyCon' and a list of argument types to which the 'TyCon' is
-- applied, determine each argument's visibility
-- ('Inferred', 'Specified', or 'Required').
--
-- Wrinkle: consider the following scenario:
--
-- > T :: forall k. k -> k
-- > tyConArgFlags T [forall m. m -> m -> m, S, R, Q]
--
-- After substituting, we get
--
-- > T (forall m. m -> m -> m) :: (forall m. m -> m -> m) -> forall n. n -> n -> n
--
-- Thus, the first argument is invisible, @S@ is visible, @R@ is invisible again,
-- and @Q@ is visible.
tyConArgFlags :: TyCon -> [Type] -> [ArgFlag]
tyConArgFlags tc = fun_kind_arg_flags (tyConKind tc)
-- | Given a 'Type' and a list of argument types to which the 'Type' is
-- applied, determine each argument's visibility
-- ('Inferred', 'Specified', or 'Required').
--
-- Most of the time, the arguments will be 'Required', but not always. Consider
-- @f :: forall a. a -> Type@. In @f Type Bool@, the first argument (@Type@) is
-- 'Specified' and the second argument (@Bool@) is 'Required'. It is precisely
-- this sort of higher-rank situation in which 'appTyArgFlags' comes in handy,
-- since @f Type Bool@ would be represented in Core using 'AppTy's.
-- (See also #15792).
appTyArgFlags :: Type -> [Type] -> [ArgFlag]
appTyArgFlags ty = fun_kind_arg_flags (typeKind ty)
-- | Given a function kind and a list of argument types (where each argument's
-- kind aligns with the corresponding position in the argument kind), determine
-- each argument's visibility ('Inferred', 'Specified', or 'Required').
fun_kind_arg_flags :: Kind -> [Type] -> [ArgFlag]
fun_kind_arg_flags = go emptyTCvSubst
where
go subst ki arg_tys
| Just ki' <- coreView ki = go subst ki' arg_tys
go _ _ [] = []
go subst (ForAllTy (Bndr tv argf) res_ki) (arg_ty:arg_tys)
= argf : go subst' res_ki arg_tys
where
subst' = extendTvSubst subst tv arg_ty
go subst (TyVarTy tv) arg_tys
| Just ki <- lookupTyVar subst tv = go subst ki arg_tys
-- This FunTy case is important to handle kinds with nested foralls, such
-- as this kind (inspired by #16518):
--
-- forall {k1} k2. k1 -> k2 -> forall k3. k3 -> Type
--
-- Here, we want to get the following ArgFlags:
--
-- [Inferred, Specified, Required, Required, Specified, Required]
-- forall {k1}. forall k2. k1 -> k2 -> forall k3. k3 -> Type
go subst (FunTy{ft_af = af, ft_res = res_ki}) (_:arg_tys)
= argf : go subst res_ki arg_tys
where
argf = case af of
VisArg -> Required
InvisArg -> Inferred
go _ _ arg_tys = map (const Required) arg_tys
-- something is ill-kinded. But this can happen
-- when printing errors. Assume everything is Required.
-- @isTauTy@ tests if a type has no foralls
isTauTy :: Type -> Bool
isTauTy ty | Just ty' <- coreView ty = isTauTy ty'
isTauTy (TyVarTy _) = True
isTauTy (LitTy {}) = True
isTauTy (TyConApp tc tys) = all isTauTy tys && isTauTyCon tc
isTauTy (AppTy a b) = isTauTy a && isTauTy b
isTauTy (FunTy _ a b) = isTauTy a && isTauTy b
isTauTy (ForAllTy {}) = False
isTauTy (CastTy ty _) = isTauTy ty
isTauTy (CoercionTy _) = False -- Not sure about this
{-
%************************************************************************
%* *
TyCoBinders
%* *
%************************************************************************
-}
-- | Make an anonymous binder
mkAnonBinder :: AnonArgFlag -> Type -> TyCoBinder
mkAnonBinder = Anon
-- | Does this binder bind a variable that is /not/ erased? Returns
-- 'True' for anonymous binders.
isAnonTyCoBinder :: TyCoBinder -> Bool
isAnonTyCoBinder (Named {}) = False
isAnonTyCoBinder (Anon {}) = True
tyCoBinderVar_maybe :: TyCoBinder -> Maybe TyCoVar
tyCoBinderVar_maybe (Named tv) = Just $ binderVar tv
tyCoBinderVar_maybe _ = Nothing
tyCoBinderType :: TyCoBinder -> Type
tyCoBinderType (Named tvb) = binderType tvb
tyCoBinderType (Anon _ ty) = ty
tyBinderType :: TyBinder -> Type
tyBinderType (Named (Bndr tv _))
= ASSERT( isTyVar tv )
tyVarKind tv
tyBinderType (Anon _ ty) = ty
-- | Extract a relevant type, if there is one.
binderRelevantType_maybe :: TyCoBinder -> Maybe Type
binderRelevantType_maybe (Named {}) = Nothing
binderRelevantType_maybe (Anon _ ty) = Just ty
------------- Closing over kinds -----------------
-- | Add the kind variables free in the kinds of the tyvars in the given set.
-- Returns a non-deterministic set.
closeOverKinds :: TyVarSet -> TyVarSet
closeOverKinds = fvVarSet . closeOverKindsFV . nonDetEltsUniqSet
-- It's OK to use nonDetEltsUniqSet here because we immediately forget
-- about the ordering by returning a set.
-- | Given a list of tyvars returns a deterministic FV computation that
-- returns the given tyvars with the kind variables free in the kinds of the
-- given tyvars.
closeOverKindsFV :: [TyVar] -> FV
closeOverKindsFV tvs =
mapUnionFV (tyCoFVsOfType . tyVarKind) tvs `unionFV` mkFVs tvs
-- | Add the kind variables free in the kinds of the tyvars in the given set.
-- Returns a deterministically ordered list.
closeOverKindsList :: [TyVar] -> [TyVar]
closeOverKindsList tvs = fvVarList $ closeOverKindsFV tvs
-- | Add the kind variables free in the kinds of the tyvars in the given set.
-- Returns a deterministic set.
closeOverKindsDSet :: DTyVarSet -> DTyVarSet
closeOverKindsDSet = fvDVarSet . closeOverKindsFV . dVarSetElems
{-
************************************************************************
* *
\subsection{Type families}
* *
************************************************************************
-}
mkFamilyTyConApp :: TyCon -> [Type] -> Type
-- ^ Given a family instance TyCon and its arg types, return the
-- corresponding family type. E.g:
--
-- > data family T a
-- > data instance T (Maybe b) = MkT b
--
-- Where the instance tycon is :RTL, so:
--
-- > mkFamilyTyConApp :RTL Int = T (Maybe Int)
mkFamilyTyConApp tc tys
| Just (fam_tc, fam_tys) <- tyConFamInst_maybe tc
, let tvs = tyConTyVars tc
fam_subst = ASSERT2( tvs `equalLength` tys, ppr tc <+> ppr tys )
zipTvSubst tvs tys
= mkTyConApp fam_tc (substTys fam_subst fam_tys)
| otherwise
= mkTyConApp tc tys
-- | Get the type on the LHS of a coercion induced by a type/data
-- family instance.
coAxNthLHS :: CoAxiom br -> Int -> Type
coAxNthLHS ax ind =
mkTyConApp (coAxiomTyCon ax) (coAxBranchLHS (coAxiomNthBranch ax ind))
isFamFreeTy :: Type -> Bool
isFamFreeTy ty | Just ty' <- coreView ty = isFamFreeTy ty'
isFamFreeTy (TyVarTy _) = True
isFamFreeTy (LitTy {}) = True
isFamFreeTy (TyConApp tc tys) = all isFamFreeTy tys && isFamFreeTyCon tc
isFamFreeTy (AppTy a b) = isFamFreeTy a && isFamFreeTy b
isFamFreeTy (FunTy _ a b) = isFamFreeTy a && isFamFreeTy b
isFamFreeTy (ForAllTy _ ty) = isFamFreeTy ty
isFamFreeTy (CastTy ty _) = isFamFreeTy ty
isFamFreeTy (CoercionTy _) = False -- Not sure about this
-- | Does this type classify a core (unlifted) Coercion?
-- At either role nominal or representational
-- (t1 ~# t2) or (t1 ~R# t2)
-- See Note [Types for coercions, predicates, and evidence] in TyCoRep
isCoVarType :: Type -> Bool
-- ToDo: should we check saturation?
isCoVarType ty
| Just tc <- tyConAppTyCon_maybe ty
= tc `hasKey` eqPrimTyConKey || tc `hasKey` eqReprPrimTyConKey
| otherwise
= False
{-
************************************************************************
* *
\subsection{Liftedness}
* *
************************************************************************
-}
-- | Returns Just True if this type is surely lifted, Just False
-- if it is surely unlifted, Nothing if we can't be sure (i.e., it is
-- levity polymorphic), and panics if the kind does not have the shape
-- TYPE r.
isLiftedType_maybe :: HasDebugCallStack => Type -> Maybe Bool
isLiftedType_maybe ty = go (getRuntimeRep ty)
where
go rr | Just rr' <- coreView rr = go rr'
| isLiftedRuntimeRep rr = Just True
| TyConApp {} <- rr = Just False -- Everything else is unlifted
| otherwise = Nothing -- levity polymorphic
-- | See "Type#type_classification" for what an unlifted type is.
-- Panics on levity polymorphic types; See 'mightBeUnliftedType' for
-- a more approximate predicate that behaves better in the presence of
-- levity polymorphism.
isUnliftedType :: HasDebugCallStack => Type -> Bool
-- isUnliftedType returns True for forall'd unlifted types:
-- x :: forall a. Int#
-- I found bindings like these were getting floated to the top level.
-- They are pretty bogus types, mind you. It would be better never to
-- construct them
isUnliftedType ty
= not (isLiftedType_maybe ty `orElse`
pprPanic "isUnliftedType" (ppr ty <+> dcolon <+> ppr (typeKind ty)))
-- | Returns:
--
-- * 'False' if the type is /guaranteed/ lifted or
-- * 'True' if it is unlifted, OR we aren't sure (e.g. in a levity-polymorphic case)
mightBeUnliftedType :: Type -> Bool
mightBeUnliftedType ty
= case isLiftedType_maybe ty of
Just is_lifted -> not is_lifted
Nothing -> True
-- | Is this a type of kind RuntimeRep? (e.g. LiftedRep)
isRuntimeRepKindedTy :: Type -> Bool
isRuntimeRepKindedTy = isRuntimeRepTy . typeKind
-- | Drops prefix of RuntimeRep constructors in 'TyConApp's. Useful for e.g.
-- dropping 'LiftedRep arguments of unboxed tuple TyCon applications:
--
-- dropRuntimeRepArgs [ 'LiftedRep, 'IntRep
-- , String, Int# ] == [String, Int#]
--
dropRuntimeRepArgs :: [Type] -> [Type]
dropRuntimeRepArgs = dropWhile isRuntimeRepKindedTy
-- | Extract the RuntimeRep classifier of a type. For instance,
-- @getRuntimeRep_maybe Int = LiftedRep@. Returns 'Nothing' if this is not
-- possible.
getRuntimeRep_maybe :: HasDebugCallStack
=> Type -> Maybe Type
getRuntimeRep_maybe = kindRep_maybe . typeKind
-- | Extract the RuntimeRep classifier of a type. For instance,
-- @getRuntimeRep_maybe Int = LiftedRep@. Panics if this is not possible.
getRuntimeRep :: HasDebugCallStack => Type -> Type
getRuntimeRep ty
= case getRuntimeRep_maybe ty of
Just r -> r
Nothing -> pprPanic "getRuntimeRep" (ppr ty <+> dcolon <+> ppr (typeKind ty))
isUnboxedTupleType :: Type -> Bool
isUnboxedTupleType ty
= tyConAppTyCon (getRuntimeRep ty) `hasKey` tupleRepDataConKey
-- NB: Do not use typePrimRep, as that can't tell the difference between
-- unboxed tuples and unboxed sums
isUnboxedSumType :: Type -> Bool
isUnboxedSumType ty
= tyConAppTyCon (getRuntimeRep ty) `hasKey` sumRepDataConKey
-- | See "Type#type_classification" for what an algebraic type is.
-- Should only be applied to /types/, as opposed to e.g. partially
-- saturated type constructors
isAlgType :: Type -> Bool
isAlgType ty
= case splitTyConApp_maybe ty of
Just (tc, ty_args) -> ASSERT( ty_args `lengthIs` tyConArity tc )
isAlgTyCon tc
_other -> False
-- | Check whether a type is a data family type
isDataFamilyAppType :: Type -> Bool
isDataFamilyAppType ty = case tyConAppTyCon_maybe ty of
Just tc -> isDataFamilyTyCon tc
_ -> False
-- | Computes whether an argument (or let right hand side) should
-- be computed strictly or lazily, based only on its type.
-- Currently, it's just 'isUnliftedType'. Panics on levity-polymorphic types.
isStrictType :: HasDebugCallStack => Type -> Bool
isStrictType = isUnliftedType
isPrimitiveType :: Type -> Bool
-- ^ Returns true of types that are opaque to Haskell.
isPrimitiveType ty = case splitTyConApp_maybe ty of
Just (tc, ty_args) -> ASSERT( ty_args `lengthIs` tyConArity tc )
isPrimTyCon tc
_ -> False
{-
************************************************************************
* *
\subsection{Join points}
* *
************************************************************************
-}
-- | Determine whether a type could be the type of a join point of given total
-- arity, according to the polymorphism rule. A join point cannot be polymorphic
-- in its return type, since given
-- join j @a @b x y z = e1 in e2,
-- the types of e1 and e2 must be the same, and a and b are not in scope for e2.
-- (See Note [The polymorphism rule of join points] in CoreSyn.) Returns False
-- also if the type simply doesn't have enough arguments.
--
-- Note that we need to know how many arguments (type *and* value) the putative
-- join point takes; for instance, if
-- j :: forall a. a -> Int
-- then j could be a binary join point returning an Int, but it could *not* be a
-- unary join point returning a -> Int.
--
-- TODO: See Note [Excess polymorphism and join points]
isValidJoinPointType :: JoinArity -> Type -> Bool
isValidJoinPointType arity ty
= valid_under emptyVarSet arity ty
where
valid_under tvs arity ty
| arity == 0
= isEmptyVarSet (tvs `intersectVarSet` tyCoVarsOfType ty)
| Just (t, ty') <- splitForAllTy_maybe ty
= valid_under (tvs `extendVarSet` t) (arity-1) ty'
| Just (_, res_ty) <- splitFunTy_maybe ty
= valid_under tvs (arity-1) res_ty
| otherwise
= False
{- Note [Excess polymorphism and join points]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In principle, if a function would be a join point except that it fails
the polymorphism rule (see Note [The polymorphism rule of join points] in
CoreSyn), it can still be made a join point with some effort. This is because
all tail calls must return the same type (they return to the same context!), and
thus if the return type depends on an argument, that argument must always be the
same.
For instance, consider:
let f :: forall a. a -> Char -> [a]
f @a x c = ... f @a y 'a' ...
in ... f @Int 1 'b' ... f @Int 2 'c' ...
(where the calls are tail calls). `f` fails the polymorphism rule because its
return type is [a], where [a] is bound. But since the type argument is always
'Int', we can rewrite it as:
let f' :: Int -> Char -> [Int]
f' x c = ... f' y 'a' ...
in ... f' 1 'b' ... f 2 'c' ...
and now we can make f' a join point:
join f' :: Int -> Char -> [Int]
f' x c = ... jump f' y 'a' ...
in ... jump f' 1 'b' ... jump f' 2 'c' ...
It's not clear that this comes up often, however. TODO: Measure how often and
add this analysis if necessary. See #14620.
************************************************************************
* *
\subsection{Sequencing on types}
* *
************************************************************************
-}
seqType :: Type -> ()
seqType (LitTy n) = n `seq` ()
seqType (TyVarTy tv) = tv `seq` ()
seqType (AppTy t1 t2) = seqType t1 `seq` seqType t2
seqType (FunTy _ t1 t2) = seqType t1 `seq` seqType t2
seqType (TyConApp tc tys) = tc `seq` seqTypes tys
seqType (ForAllTy (Bndr tv _) ty) = seqType (varType tv) `seq` seqType ty
seqType (CastTy ty co) = seqType ty `seq` seqCo co
seqType (CoercionTy co) = seqCo co
seqTypes :: [Type] -> ()
seqTypes [] = ()
seqTypes (ty:tys) = seqType ty `seq` seqTypes tys
{-
************************************************************************
* *
Comparison for types
(We don't use instances so that we know where it happens)
* *
************************************************************************
Note [Equality on AppTys]
~~~~~~~~~~~~~~~~~~~~~~~~~
In our cast-ignoring equality, we want to say that the following two
are equal:
(Maybe |> co) (Int |> co') ~? Maybe Int
But the left is an AppTy while the right is a TyConApp. The solution is
to use repSplitAppTy_maybe to break up the TyConApp into its pieces and
then continue. Easy to do, but also easy to forget to do.
-}
eqType :: Type -> Type -> Bool
-- ^ Type equality on source types. Does not look through @newtypes@ or
-- 'PredType's, but it does look through type synonyms.
-- This first checks that the kinds of the types are equal and then
-- checks whether the types are equal, ignoring casts and coercions.
-- (The kind check is a recursive call, but since all kinds have type
-- @Type@, there is no need to check the types of kinds.)
-- See also Note [Non-trivial definitional equality] in TyCoRep.
eqType t1 t2 = isEqual $ nonDetCmpType t1 t2
-- It's OK to use nonDetCmpType here and eqType is deterministic,
-- nonDetCmpType does equality deterministically
-- | Compare types with respect to a (presumably) non-empty 'RnEnv2'.
eqTypeX :: RnEnv2 -> Type -> Type -> Bool
eqTypeX env t1 t2 = isEqual $ nonDetCmpTypeX env t1 t2
-- It's OK to use nonDetCmpType here and eqTypeX is deterministic,
-- nonDetCmpTypeX does equality deterministically
-- | Type equality on lists of types, looking through type synonyms
-- but not newtypes.
eqTypes :: [Type] -> [Type] -> Bool
eqTypes tys1 tys2 = isEqual $ nonDetCmpTypes tys1 tys2
-- It's OK to use nonDetCmpType here and eqTypes is deterministic,
-- nonDetCmpTypes does equality deterministically
eqVarBndrs :: RnEnv2 -> [Var] -> [Var] -> Maybe RnEnv2
-- Check that the var lists are the same length
-- and have matching kinds; if so, extend the RnEnv2
-- Returns Nothing if they don't match
eqVarBndrs env [] []
= Just env
eqVarBndrs env (tv1:tvs1) (tv2:tvs2)
| eqTypeX env (varType tv1) (varType tv2)
= eqVarBndrs (rnBndr2 env tv1 tv2) tvs1 tvs2
eqVarBndrs _ _ _= Nothing
-- Now here comes the real worker
{-
Note [nonDetCmpType nondeterminism]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
nonDetCmpType is implemented in terms of nonDetCmpTypeX. nonDetCmpTypeX
uses nonDetCmpTc which compares TyCons by their Unique value. Using Uniques for
ordering leads to nondeterminism. We hit the same problem in the TyVarTy case,
comparing type variables is nondeterministic, note the call to nonDetCmpVar in
nonDetCmpTypeX.
See Note [Unique Determinism] for more details.
-}
nonDetCmpType :: Type -> Type -> Ordering
nonDetCmpType t1 t2
-- we know k1 and k2 have the same kind, because they both have kind *.
= nonDetCmpTypeX rn_env t1 t2
where
rn_env = mkRnEnv2 (mkInScopeSet (tyCoVarsOfTypes [t1, t2]))
nonDetCmpTypes :: [Type] -> [Type] -> Ordering
nonDetCmpTypes ts1 ts2 = nonDetCmpTypesX rn_env ts1 ts2
where
rn_env = mkRnEnv2 (mkInScopeSet (tyCoVarsOfTypes (ts1 ++ ts2)))
-- | An ordering relation between two 'Type's (known below as @t1 :: k1@
-- and @t2 :: k2@)
data TypeOrdering = TLT -- ^ @t1 < t2@
| TEQ -- ^ @t1 ~ t2@ and there are no casts in either,
-- therefore we can conclude @k1 ~ k2@
| TEQX -- ^ @t1 ~ t2@ yet one of the types contains a cast so
-- they may differ in kind.
| TGT -- ^ @t1 > t2@
deriving (Eq, Ord, Enum, Bounded)
nonDetCmpTypeX :: RnEnv2 -> Type -> Type -> Ordering -- Main workhorse
-- See Note [Non-trivial definitional equality] in TyCoRep
nonDetCmpTypeX env orig_t1 orig_t2 =
case go env orig_t1 orig_t2 of
-- If there are casts then we also need to do a comparison of the kinds of
-- the types being compared
TEQX -> toOrdering $ go env k1 k2
ty_ordering -> toOrdering ty_ordering
where
k1 = typeKind orig_t1
k2 = typeKind orig_t2
toOrdering :: TypeOrdering -> Ordering
toOrdering TLT = LT
toOrdering TEQ = EQ
toOrdering TEQX = EQ
toOrdering TGT = GT
liftOrdering :: Ordering -> TypeOrdering
liftOrdering LT = TLT
liftOrdering EQ = TEQ
liftOrdering GT = TGT
thenCmpTy :: TypeOrdering -> TypeOrdering -> TypeOrdering
thenCmpTy TEQ rel = rel
thenCmpTy TEQX rel = hasCast rel
thenCmpTy rel _ = rel
hasCast :: TypeOrdering -> TypeOrdering
hasCast TEQ = TEQX
hasCast rel = rel
-- Returns both the resulting ordering relation between the two types
-- and whether either contains a cast.
go :: RnEnv2 -> Type -> Type -> TypeOrdering
go env t1 t2
| Just t1' <- coreView t1 = go env t1' t2
| Just t2' <- coreView t2 = go env t1 t2'
go env (TyVarTy tv1) (TyVarTy tv2)
= liftOrdering $ rnOccL env tv1 `nonDetCmpVar` rnOccR env tv2
go env (ForAllTy (Bndr tv1 _) t1) (ForAllTy (Bndr tv2 _) t2)
= go env (varType tv1) (varType tv2)
`thenCmpTy` go (rnBndr2 env tv1 tv2) t1 t2
-- See Note [Equality on AppTys]
go env (AppTy s1 t1) ty2
| Just (s2, t2) <- repSplitAppTy_maybe ty2
= go env s1 s2 `thenCmpTy` go env t1 t2
go env ty1 (AppTy s2 t2)
| Just (s1, t1) <- repSplitAppTy_maybe ty1
= go env s1 s2 `thenCmpTy` go env t1 t2
go env (FunTy _ s1 t1) (FunTy _ s2 t2)
= go env s1 s2 `thenCmpTy` go env t1 t2
go env (TyConApp tc1 tys1) (TyConApp tc2 tys2)
= liftOrdering (tc1 `nonDetCmpTc` tc2) `thenCmpTy` gos env tys1 tys2
go _ (LitTy l1) (LitTy l2) = liftOrdering (compare l1 l2)
go env (CastTy t1 _) t2 = hasCast $ go env t1 t2
go env t1 (CastTy t2 _) = hasCast $ go env t1 t2
go _ (CoercionTy {}) (CoercionTy {}) = TEQ
-- Deal with the rest: TyVarTy < CoercionTy < AppTy < LitTy < TyConApp < ForAllTy
go _ ty1 ty2
= liftOrdering $ (get_rank ty1) `compare` (get_rank ty2)
where get_rank :: Type -> Int
get_rank (CastTy {})
= pprPanic "nonDetCmpTypeX.get_rank" (ppr [ty1,ty2])
get_rank (TyVarTy {}) = 0
get_rank (CoercionTy {}) = 1
get_rank (AppTy {}) = 3
get_rank (LitTy {}) = 4
get_rank (TyConApp {}) = 5
get_rank (FunTy {}) = 6
get_rank (ForAllTy {}) = 7
gos :: RnEnv2 -> [Type] -> [Type] -> TypeOrdering
gos _ [] [] = TEQ
gos _ [] _ = TLT
gos _ _ [] = TGT
gos env (ty1:tys1) (ty2:tys2) = go env ty1 ty2 `thenCmpTy` gos env tys1 tys2
-------------
nonDetCmpTypesX :: RnEnv2 -> [Type] -> [Type] -> Ordering
nonDetCmpTypesX _ [] [] = EQ
nonDetCmpTypesX env (t1:tys1) (t2:tys2) = nonDetCmpTypeX env t1 t2
`thenCmp`
nonDetCmpTypesX env tys1 tys2
nonDetCmpTypesX _ [] _ = LT
nonDetCmpTypesX _ _ [] = GT
-------------
-- | Compare two 'TyCon's. NB: This should /never/ see 'Constraint' (as
-- recognized by Kind.isConstraintKindCon) which is considered a synonym for
-- 'Type' in Core.
-- See Note [Kind Constraint and kind Type] in Kind.
-- See Note [nonDetCmpType nondeterminism]
nonDetCmpTc :: TyCon -> TyCon -> Ordering
nonDetCmpTc tc1 tc2
= ASSERT( not (isConstraintKindCon tc1) && not (isConstraintKindCon tc2) )
u1 `nonDetCmpUnique` u2
where
u1 = tyConUnique tc1
u2 = tyConUnique tc2
{-
************************************************************************
* *
The kind of a type
* *
************************************************************************
Note [typeKind vs tcTypeKind]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We have two functions to get the kind of a type
* typeKind ignores the distinction between Constraint and *
* tcTypeKind respects the distinction between Constraint and *
tcTypeKind is used by the type inference engine, for which Constraint
and * are different; after that we use typeKind.
See also Note [coreView vs tcView]
Note [Kinding rules for types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In typeKind we consider Constraint and (TYPE LiftedRep) to be identical.
We then have
t1 : TYPE rep1
t2 : TYPE rep2
(FUN) ----------------
t1 -> t2 : Type
ty : TYPE rep
`a` is not free in rep
(FORALL) -----------------------
forall a. ty : TYPE rep
In tcTypeKind we consider Constraint and (TYPE LiftedRep) to be distinct:
t1 : TYPE rep1
t2 : TYPE rep2
(FUN) ----------------
t1 -> t2 : Type
t1 : Constraint
t2 : TYPE rep
(PRED1) ----------------
t1 => t2 : Type
t1 : Constraint
t2 : Constraint
(PRED2) ---------------------
t1 => t2 : Constraint
ty : TYPE rep
`a` is not free in rep
(FORALL1) -----------------------
forall a. ty : TYPE rep
ty : Constraint
(FORALL2) -------------------------
forall a. ty : Constraint
Note that:
* The only way we distinguish '->' from '=>' is by the fact
that the argument is a PredTy. Both are FunTys
Note [Phantom type variables in kinds]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
type K (r :: RuntimeRep) = Type -- Note 'r' is unused
data T r :: K r -- T :: forall r -> K r
foo :: forall r. T r
The body of the forall in foo's type has kind (K r), and
normally it would make no sense to have
forall r. (ty :: K r)
because the kind of the forall would escape the binding
of 'r'. But in this case it's fine because (K r) exapands
to Type, so we expliclity /permit/ the type
forall r. T r
To accommodate such a type, in typeKind (forall a.ty) we use
occCheckExpand to expand any type synonyms in the kind of 'ty'
to eliminate 'a'. See kinding rule (FORALL) in
Note [Kinding rules for types]
And in TcValidity.checkEscapingKind, we use also use
occCheckExpand, for the same reason.
-}
-----------------------------
typeKind :: HasDebugCallStack => Type -> Kind
-- No need to expand synonyms
typeKind (TyConApp tc tys) = piResultTys (tyConKind tc) tys
typeKind (LitTy l) = typeLiteralKind l
typeKind (FunTy {}) = liftedTypeKind
typeKind (TyVarTy tyvar) = tyVarKind tyvar
typeKind (CastTy _ty co) = coercionRKind co
typeKind (CoercionTy co) = coercionType co
typeKind (AppTy fun arg)
= go fun [arg]
where
-- Accumulate the type arguments, so we can call piResultTys,
-- rather than a succession of calls to piResultTy (which is
-- asymptotically costly as the number of arguments increases)
go (AppTy fun arg) args = go fun (arg:args)
go fun args = piResultTys (typeKind fun) args
typeKind ty@(ForAllTy {})
= case occCheckExpand tvs body_kind of
-- We must make sure tv does not occur in kind
-- As it is already out of scope!
-- See Note [Phantom type variables in kinds]
Just k' -> k'
Nothing -> pprPanic "typeKind"
(ppr ty $$ ppr tvs $$ ppr body <+> dcolon <+> ppr body_kind)
where
(tvs, body) = splitTyVarForAllTys ty
body_kind = typeKind body
---------------------------------------------
-- Utilities to be used in Unify, which uses "tc" functions
---------------------------------------------
tcTypeKind :: HasDebugCallStack => Type -> Kind
-- No need to expand synonyms
tcTypeKind (TyConApp tc tys) = piResultTys (tyConKind tc) tys
tcTypeKind (LitTy l) = typeLiteralKind l
tcTypeKind (TyVarTy tyvar) = tyVarKind tyvar
tcTypeKind (CastTy _ty co) = coercionRKind co
tcTypeKind (CoercionTy co) = coercionType co
tcTypeKind (FunTy { ft_af = af, ft_res = res })
| InvisArg <- af
, tcIsConstraintKind (tcTypeKind res)
= constraintKind -- Eq a => Ord a :: Constraint
| otherwise -- Eq a => a -> a :: TYPE LiftedRep
= liftedTypeKind -- Eq a => Array# Int :: Type LiftedRep (not TYPE PtrRep)
tcTypeKind (AppTy fun arg)
= go fun [arg]
where
-- Accumulate the type arguments, so we can call piResultTys,
-- rather than a succession of calls to piResultTy (which is
-- asymptotically costly as the number of arguments increases)
go (AppTy fun arg) args = go fun (arg:args)
go fun args = piResultTys (tcTypeKind fun) args
tcTypeKind ty@(ForAllTy {})
| tcIsConstraintKind body_kind
= constraintKind
| otherwise
= case occCheckExpand tvs body_kind of
-- We must make sure tv does not occur in kind
-- As it is already out of scope!
-- See Note [Phantom type variables in kinds]
Just k' -> k'
Nothing -> pprPanic "tcTypeKind"
(ppr ty $$ ppr tvs $$ ppr body <+> dcolon <+> ppr body_kind)
where
(tvs, body) = splitTyVarForAllTys ty
body_kind = tcTypeKind body
isPredTy :: HasDebugCallStack => Type -> Bool
-- See Note [Types for coercions, predicates, and evidence] in TyCoRep
isPredTy ty = tcIsConstraintKind (tcTypeKind ty)
-- tcIsConstraintKind stuff only makes sense in the typechecker
-- After that Constraint = Type
-- See Note [coreView vs tcView]
-- Defined here because it is used in isPredTy and tcRepSplitAppTy_maybe (sigh)
tcIsConstraintKind :: Kind -> Bool
tcIsConstraintKind ty
| Just (tc, args) <- tcSplitTyConApp_maybe ty -- Note: tcSplit here
, isConstraintKindCon tc
= ASSERT2( null args, ppr ty ) True
| otherwise
= False
-- | Is this kind equivalent to @*@?
--
-- This considers 'Constraint' to be distinct from @*@. For a version that
-- treats them as the same type, see 'isLiftedTypeKind'.
tcIsLiftedTypeKind :: Kind -> Bool
tcIsLiftedTypeKind ty
| Just (tc, [arg]) <- tcSplitTyConApp_maybe ty -- Note: tcSplit here
, tc `hasKey` tYPETyConKey
= isLiftedRuntimeRep arg
| otherwise
= False
-- | Is this kind equivalent to @TYPE r@ (for some unknown r)?
--
-- This considers 'Constraint' to be distinct from @*@.
tcIsRuntimeTypeKind :: Kind -> Bool
tcIsRuntimeTypeKind ty
| Just (tc, _) <- tcSplitTyConApp_maybe ty -- Note: tcSplit here
, tc `hasKey` tYPETyConKey
= True
| otherwise
= False
tcReturnsConstraintKind :: Kind -> Bool
-- True <=> the Kind ultimately returns a Constraint
-- E.g. * -> Constraint
-- forall k. k -> Constraint
tcReturnsConstraintKind kind
| Just kind' <- tcView kind = tcReturnsConstraintKind kind'
tcReturnsConstraintKind (ForAllTy _ ty) = tcReturnsConstraintKind ty
tcReturnsConstraintKind (FunTy { ft_res = ty }) = tcReturnsConstraintKind ty
tcReturnsConstraintKind (TyConApp tc _) = isConstraintKindCon tc
tcReturnsConstraintKind _ = False
--------------------------
typeLiteralKind :: TyLit -> Kind
typeLiteralKind (NumTyLit {}) = typeNatKind
typeLiteralKind (StrTyLit {}) = typeSymbolKind
-- | Returns True if a type is levity polymorphic. Should be the same
-- as (isKindLevPoly . typeKind) but much faster.
-- Precondition: The type has kind (TYPE blah)
isTypeLevPoly :: Type -> Bool
isTypeLevPoly = go
where
go ty@(TyVarTy {}) = check_kind ty
go ty@(AppTy {}) = check_kind ty
go ty@(TyConApp tc _) | not (isTcLevPoly tc) = False
| otherwise = check_kind ty
go (ForAllTy _ ty) = go ty
go (FunTy {}) = False
go (LitTy {}) = False
go ty@(CastTy {}) = check_kind ty
go ty@(CoercionTy {}) = pprPanic "isTypeLevPoly co" (ppr ty)
check_kind = isKindLevPoly . typeKind
-- | Looking past all pi-types, is the end result potentially levity polymorphic?
-- Example: True for (forall r (a :: TYPE r). String -> a)
-- Example: False for (forall r1 r2 (a :: TYPE r1) (b :: TYPE r2). a -> b -> Type)
resultIsLevPoly :: Type -> Bool
resultIsLevPoly = isTypeLevPoly . snd . splitPiTys
{- **********************************************************************
* *
Occurs check expansion
%* *
%********************************************************************* -}
{- Note [Occurs check expansion]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
(occurCheckExpand tv xi) expands synonyms in xi just enough to get rid
of occurrences of tv outside type function arguments, if that is
possible; otherwise, it returns Nothing.
For example, suppose we have
type F a b = [a]
Then
occCheckExpand b (F Int b) = Just [Int]
but
occCheckExpand a (F a Int) = Nothing
We don't promise to do the absolute minimum amount of expanding
necessary, but we try not to do expansions we don't need to. We
prefer doing inner expansions first. For example,
type F a b = (a, Int, a, [a])
type G b = Char
We have
occCheckExpand b (F (G b)) = Just (F Char)
even though we could also expand F to get rid of b.
-}
occCheckExpand :: [Var] -> Type -> Maybe Type
-- See Note [Occurs check expansion]
-- We may have needed to do some type synonym unfolding in order to
-- get rid of the variable (or forall), so we also return the unfolded
-- version of the type, which is guaranteed to be syntactically free
-- of the given type variable. If the type is already syntactically
-- free of the variable, then the same type is returned.
occCheckExpand vs_to_avoid ty
| null vs_to_avoid -- Efficient shortcut
= Just ty -- Can happen, eg. CoreUtils.mkSingleAltCase
| otherwise
= go (mkVarSet vs_to_avoid, emptyVarEnv) ty
where
go :: (VarSet, VarEnv TyCoVar) -> Type -> Maybe Type
-- The VarSet is the set of variables we are trying to avoid
-- The VarEnv carries mappings necessary
-- because of kind expansion
go cxt@(as, env) (TyVarTy tv')
| tv' `elemVarSet` as = Nothing
| Just tv'' <- lookupVarEnv env tv' = return (mkTyVarTy tv'')
| otherwise = do { tv'' <- go_var cxt tv'
; return (mkTyVarTy tv'') }
go _ ty@(LitTy {}) = return ty
go cxt (AppTy ty1 ty2) = do { ty1' <- go cxt ty1
; ty2' <- go cxt ty2
; return (mkAppTy ty1' ty2') }
go cxt ty@(FunTy _ ty1 ty2)
= do { ty1' <- go cxt ty1
; ty2' <- go cxt ty2
; return (ty { ft_arg = ty1', ft_res = ty2' }) }
go cxt@(as, env) (ForAllTy (Bndr tv vis) body_ty)
= do { ki' <- go cxt (varType tv)
; let tv' = setVarType tv ki'
env' = extendVarEnv env tv tv'
as' = as `delVarSet` tv
; body' <- go (as', env') body_ty
; return (ForAllTy (Bndr tv' vis) body') }
-- For a type constructor application, first try expanding away the
-- offending variable from the arguments. If that doesn't work, next
-- see if the type constructor is a type synonym, and if so, expand
-- it and try again.
go cxt ty@(TyConApp tc tys)
= case mapM (go cxt) tys of
Just tys' -> return (mkTyConApp tc tys')
Nothing | Just ty' <- tcView ty -> go cxt ty'
| otherwise -> Nothing
-- Failing that, try to expand a synonym
go cxt (CastTy ty co) = do { ty' <- go cxt ty
; co' <- go_co cxt co
; return (mkCastTy ty' co') }
go cxt (CoercionTy co) = do { co' <- go_co cxt co
; return (mkCoercionTy co') }
------------------
go_var cxt v = do { k' <- go cxt (varType v)
; return (setVarType v k') }
-- Works for TyVar and CoVar
-- See Note [Occurrence checking: look inside kinds]
------------------
go_mco _ MRefl = return MRefl
go_mco ctx (MCo co) = MCo <$> go_co ctx co
------------------
go_co cxt (Refl ty) = do { ty' <- go cxt ty
; return (mkNomReflCo ty') }
go_co cxt (GRefl r ty mco) = do { mco' <- go_mco cxt mco
; ty' <- go cxt ty
; return (mkGReflCo r ty' mco') }
-- Note: Coercions do not contain type synonyms
go_co cxt (TyConAppCo r tc args) = do { args' <- mapM (go_co cxt) args
; return (mkTyConAppCo r tc args') }
go_co cxt (AppCo co arg) = do { co' <- go_co cxt co
; arg' <- go_co cxt arg
; return (mkAppCo co' arg') }
go_co cxt@(as, env) (ForAllCo tv kind_co body_co)
= do { kind_co' <- go_co cxt kind_co
; let tv' = setVarType tv $
coercionLKind kind_co'
env' = extendVarEnv env tv tv'
as' = as `delVarSet` tv
; body' <- go_co (as', env') body_co
; return (ForAllCo tv' kind_co' body') }
go_co cxt (FunCo r co1 co2) = do { co1' <- go_co cxt co1
; co2' <- go_co cxt co2
; return (mkFunCo r co1' co2') }
go_co cxt@(as,env) (CoVarCo c)
| c `elemVarSet` as = Nothing
| Just c' <- lookupVarEnv env c = return (mkCoVarCo c')
| otherwise = do { c' <- go_var cxt c
; return (mkCoVarCo c') }
go_co cxt (HoleCo h) = do { c' <- go_var cxt (ch_co_var h)
; return (HoleCo (h { ch_co_var = c' })) }
go_co cxt (AxiomInstCo ax ind args) = do { args' <- mapM (go_co cxt) args
; return (mkAxiomInstCo ax ind args') }
go_co cxt (UnivCo p r ty1 ty2) = do { p' <- go_prov cxt p
; ty1' <- go cxt ty1
; ty2' <- go cxt ty2
; return (mkUnivCo p' r ty1' ty2') }
go_co cxt (SymCo co) = do { co' <- go_co cxt co
; return (mkSymCo co') }
go_co cxt (TransCo co1 co2) = do { co1' <- go_co cxt co1
; co2' <- go_co cxt co2
; return (mkTransCo co1' co2') }
go_co cxt (NthCo r n co) = do { co' <- go_co cxt co
; return (mkNthCo r n co') }
go_co cxt (LRCo lr co) = do { co' <- go_co cxt co
; return (mkLRCo lr co') }
go_co cxt (InstCo co arg) = do { co' <- go_co cxt co
; arg' <- go_co cxt arg
; return (mkInstCo co' arg') }
go_co cxt (KindCo co) = do { co' <- go_co cxt co
; return (mkKindCo co') }
go_co cxt (SubCo co) = do { co' <- go_co cxt co
; return (mkSubCo co') }
go_co cxt (AxiomRuleCo ax cs) = do { cs' <- mapM (go_co cxt) cs
; return (mkAxiomRuleCo ax cs') }
------------------
go_prov _ UnsafeCoerceProv = return UnsafeCoerceProv
go_prov cxt (PhantomProv co) = PhantomProv <$> go_co cxt co
go_prov cxt (ProofIrrelProv co) = ProofIrrelProv <$> go_co cxt co
go_prov _ p@(PluginProv _) = return p
{-
%************************************************************************
%* *
Miscellaneous functions
%* *
%************************************************************************
-}
-- | All type constructors occurring in the type; looking through type
-- synonyms, but not newtypes.
-- When it finds a Class, it returns the class TyCon.
tyConsOfType :: Type -> UniqSet TyCon
tyConsOfType ty
= go ty
where
go :: Type -> UniqSet TyCon -- The UniqSet does duplicate elim
go ty | Just ty' <- coreView ty = go ty'
go (TyVarTy {}) = emptyUniqSet
go (LitTy {}) = emptyUniqSet
go (TyConApp tc tys) = go_tc tc `unionUniqSets` go_s tys
go (AppTy a b) = go a `unionUniqSets` go b
go (FunTy _ a b) = go a `unionUniqSets` go b `unionUniqSets` go_tc funTyCon
go (ForAllTy (Bndr tv _) ty) = go ty `unionUniqSets` go (varType tv)
go (CastTy ty co) = go ty `unionUniqSets` go_co co
go (CoercionTy co) = go_co co
go_co (Refl ty) = go ty
go_co (GRefl _ ty mco) = go ty `unionUniqSets` go_mco mco
go_co (TyConAppCo _ tc args) = go_tc tc `unionUniqSets` go_cos args
go_co (AppCo co arg) = go_co co `unionUniqSets` go_co arg
go_co (ForAllCo _ kind_co co) = go_co kind_co `unionUniqSets` go_co co
go_co (FunCo _ co1 co2) = go_co co1 `unionUniqSets` go_co co2
go_co (AxiomInstCo ax _ args) = go_ax ax `unionUniqSets` go_cos args
go_co (UnivCo p _ t1 t2) = go_prov p `unionUniqSets` go t1 `unionUniqSets` go t2
go_co (CoVarCo {}) = emptyUniqSet
go_co (HoleCo {}) = emptyUniqSet
go_co (SymCo co) = go_co co
go_co (TransCo co1 co2) = go_co co1 `unionUniqSets` go_co co2
go_co (NthCo _ _ co) = go_co co
go_co (LRCo _ co) = go_co co
go_co (InstCo co arg) = go_co co `unionUniqSets` go_co arg
go_co (KindCo co) = go_co co
go_co (SubCo co) = go_co co
go_co (AxiomRuleCo _ cs) = go_cos cs
go_mco MRefl = emptyUniqSet
go_mco (MCo co) = go_co co
go_prov UnsafeCoerceProv = emptyUniqSet
go_prov (PhantomProv co) = go_co co
go_prov (ProofIrrelProv co) = go_co co
go_prov (PluginProv _) = emptyUniqSet
-- this last case can happen from the tyConsOfType used from
-- checkTauTvUpdate
go_s tys = foldr (unionUniqSets . go) emptyUniqSet tys
go_cos cos = foldr (unionUniqSets . go_co) emptyUniqSet cos
go_tc tc = unitUniqSet tc
go_ax ax = go_tc $ coAxiomTyCon ax
-- | Find the result 'Kind' of a type synonym,
-- after applying it to its 'arity' number of type variables
-- Actually this function works fine on data types too,
-- but they'd always return '*', so we never need to ask
synTyConResKind :: TyCon -> Kind
synTyConResKind tycon = piResultTys (tyConKind tycon) (mkTyVarTys (tyConTyVars tycon))
-- | Retrieve the free variables in this type, splitting them based
-- on whether they are used visibly or invisibly. Invisible ones come
-- first.
splitVisVarsOfType :: Type -> Pair TyCoVarSet
splitVisVarsOfType orig_ty = Pair invis_vars vis_vars
where
Pair invis_vars1 vis_vars = go orig_ty
invis_vars = invis_vars1 `minusVarSet` vis_vars
go (TyVarTy tv) = Pair (tyCoVarsOfType $ tyVarKind tv) (unitVarSet tv)
go (AppTy t1 t2) = go t1 `mappend` go t2
go (TyConApp tc tys) = go_tc tc tys
go (FunTy _ t1 t2) = go t1 `mappend` go t2
go (ForAllTy (Bndr tv _) ty)
= ((`delVarSet` tv) <$> go ty) `mappend`
(invisible (tyCoVarsOfType $ varType tv))
go (LitTy {}) = mempty
go (CastTy ty co) = go ty `mappend` invisible (tyCoVarsOfCo co)
go (CoercionTy co) = invisible $ tyCoVarsOfCo co
invisible vs = Pair vs emptyVarSet
go_tc tc tys = let (invis, vis) = partitionInvisibleTypes tc tys in
invisible (tyCoVarsOfTypes invis) `mappend` foldMap go vis
splitVisVarsOfTypes :: [Type] -> Pair TyCoVarSet
splitVisVarsOfTypes = foldMap splitVisVarsOfType
modifyJoinResTy :: Int -- Number of binders to skip
-> (Type -> Type) -- Function to apply to result type
-> Type -- Type of join point
-> Type -- New type
-- INVARIANT: If any of the first n binders are foralls, those tyvars cannot
-- appear in the original result type. See isValidJoinPointType.
modifyJoinResTy orig_ar f orig_ty
= go orig_ar orig_ty
where
go 0 ty = f ty
go n ty | Just (arg_bndr, res_ty) <- splitPiTy_maybe ty
= mkPiTy arg_bndr (go (n-1) res_ty)
| otherwise
= pprPanic "modifyJoinResTy" (ppr orig_ar <+> ppr orig_ty)
setJoinResTy :: Int -- Number of binders to skip
-> Type -- New result type
-> Type -- Type of join point
-> Type -- New type
-- INVARIANT: Same as for modifyJoinResTy
setJoinResTy ar new_res_ty ty
= modifyJoinResTy ar (const new_res_ty) ty
{-
************************************************************************
* *
Functions over Kinds
* *
************************************************************************
Note [Kind Constraint and kind Type]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The kind Constraint is the kind of classes and other type constraints.
The special thing about types of kind Constraint is that
* They are displayed with double arrow:
f :: Ord a => a -> a
* They are implicitly instantiated at call sites; so the type inference
engine inserts an extra argument of type (Ord a) at every call site
to f.
However, once type inference is over, there is *no* distinction between
Constraint and Type. Indeed we can have coercions between the two. Consider
class C a where
op :: a -> a
For this single-method class we may generate a newtype, which in turn
generates an axiom witnessing
C a ~ (a -> a)
so on the left we have Constraint, and on the right we have Type.
See #7451.
Bottom line: although 'Type' and 'Constraint' are distinct TyCons, with
distinct uniques, they are treated as equal at all times except
during type inference.
-}
isConstraintKindCon :: TyCon -> Bool
isConstraintKindCon tc = tyConUnique tc == constraintKindTyConKey
-- | Tests whether the given kind (which should look like @TYPE x@)
-- is something other than a constructor tree (that is, constructors at every node).
-- E.g. True of TYPE k, TYPE (F Int)
-- False of TYPE 'LiftedRep
isKindLevPoly :: Kind -> Bool
isKindLevPoly k = ASSERT2( isLiftedTypeKind k || _is_type, ppr k )
-- the isLiftedTypeKind check is necessary b/c of Constraint
go k
where
go ty | Just ty' <- coreView ty = go ty'
go TyVarTy{} = True
go AppTy{} = True -- it can't be a TyConApp
go (TyConApp tc tys) = isFamilyTyCon tc || any go tys
go ForAllTy{} = True
go (FunTy _ t1 t2) = go t1 || go t2
go LitTy{} = False
go CastTy{} = True
go CoercionTy{} = True
_is_type = classifiesTypeWithValues k
-----------------------------------------
-- Subkinding
-- The tc variants are used during type-checking, where ConstraintKind
-- is distinct from all other kinds
-- After type-checking (in core), Constraint and liftedTypeKind are
-- indistinguishable
-- | Does this classify a type allowed to have values? Responds True to things
-- like *, #, TYPE Lifted, TYPE v, Constraint.
classifiesTypeWithValues :: Kind -> Bool
-- ^ True of any sub-kind of OpenTypeKind
classifiesTypeWithValues k = isJust (kindRep_maybe k)
{-
%************************************************************************
%* *
Pretty-printing
%* *
%************************************************************************
Most pretty-printing is either in TyCoRep or GHC.Iface.Type.
-}
-- | Does a 'TyCon' (that is applied to some number of arguments) need to be
-- ascribed with an explicit kind signature to resolve ambiguity if rendered as
-- a source-syntax type?
-- (See @Note [When does a tycon application need an explicit kind signature?]@
-- for a full explanation of what this function checks for.)
tyConAppNeedsKindSig
:: Bool -- ^ Should specified binders count towards injective positions in
-- the kind of the TyCon? (If you're using visible kind
-- applications, then you want True here.
-> TyCon
-> Int -- ^ The number of args the 'TyCon' is applied to.
-> Bool -- ^ Does @T t_1 ... t_n@ need a kind signature? (Where @n@ is the
-- number of arguments)
tyConAppNeedsKindSig spec_inj_pos tc n_args
| LT <- listLengthCmp tc_binders n_args
= False
| otherwise
= let (dropped_binders, remaining_binders)
= splitAt n_args tc_binders
result_kind = mkTyConKind remaining_binders tc_res_kind
result_vars = tyCoVarsOfType result_kind
dropped_vars = fvVarSet $
mapUnionFV injective_vars_of_binder dropped_binders
in not (subVarSet result_vars dropped_vars)
where
tc_binders = tyConBinders tc
tc_res_kind = tyConResKind tc
-- Returns the variables that would be fixed by knowing a TyConBinder. See
-- Note [When does a tycon application need an explicit kind signature?]
-- for a more detailed explanation of what this function does.
injective_vars_of_binder :: TyConBinder -> FV
injective_vars_of_binder (Bndr tv vis) =
case vis of
AnonTCB VisArg -> injectiveVarsOfType False -- conservative choice
(varType tv)
NamedTCB argf | source_of_injectivity argf
-> unitFV tv `unionFV`
injectiveVarsOfType False (varType tv)
_ -> emptyFV
source_of_injectivity Required = True
source_of_injectivity Specified = spec_inj_pos
source_of_injectivity Inferred = False
{-
Note [When does a tycon application need an explicit kind signature?]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There are a couple of places in GHC where we convert Core Types into forms that
more closely resemble user-written syntax. These include:
1. Template Haskell Type reification (see, for instance, TcSplice.reify_tc_app)
2. Converting Types to LHsTypes (in GHC.Hs.Utils.typeToLHsType, or in Haddock)
This conversion presents a challenge: how do we ensure that the resulting type
has enough kind information so as not to be ambiguous? To better motivate this
question, consider the following Core type:
-- Foo :: Type -> Type
type Foo = Proxy Type
There is nothing ambiguous about the RHS of Foo in Core. But if we were to,
say, reify it into a TH Type, then it's tempting to just drop the invisible
Type argument and simply return `Proxy`. But now we've lost crucial kind
information: we don't know if we're dealing with `Proxy Type` or `Proxy Bool`
or `Proxy Int` or something else! We've inadvertently introduced ambiguity.
Unlike in other situations in GHC, we can't just turn on
-fprint-explicit-kinds, as we need to produce something which has the same
structure as a source-syntax type. Moreover, we can't rely on visible kind
application, since the first kind argument to Proxy is inferred, not specified.
Our solution is to annotate certain tycons with their kinds whenever they
appear in applied form in order to resolve the ambiguity. For instance, we
would reify the RHS of Foo like so:
type Foo = (Proxy :: Type -> Type)
We need to devise an algorithm that determines precisely which tycons need
these explicit kind signatures. We certainly don't want to annotate _every_
tycon with a kind signature, or else we might end up with horribly bloated
types like the following:
(Either :: Type -> Type -> Type) (Int :: Type) (Char :: Type)
We only want to annotate tycons that absolutely require kind signatures in
order to resolve some sort of ambiguity, and nothing more.
Suppose we have a tycon application (T ty_1 ... ty_n). Why might this type
require a kind signature? It might require it when we need to fill in any of
T's omitted arguments. By "omitted argument", we mean one that is dropped when
reifying ty_1 ... ty_n. Sometimes, the omitted arguments are inferred and
specified arguments (e.g., TH reification in TcSplice), and sometimes the
omitted arguments are only the inferred ones (e.g., in GHC.Hs.Utils.typeToLHsType,
which reifies specified arguments through visible kind application).
Regardless, the key idea is that _some_ arguments are going to be omitted after
reification, and the only mechanism we have at our disposal for filling them in
is through explicit kind signatures.
What do we mean by "fill in"? Let's consider this small example:
T :: forall {k}. Type -> (k -> Type) -> k
Moreover, we have this application of T:
T @{j} Int aty
When we reify this type, we omit the inferred argument @{j}. Is it fixed by the
other (non-inferred) arguments? Yes! If we know the kind of (aty :: blah), then
we'll generate an equality constraint (kappa -> Type) and, assuming we can
solve it, that will fix `kappa`. (Here, `kappa` is the unification variable
that we instantiate `k` with.)
Therefore, for any application of a tycon T to some arguments, the Question We
Must Answer is:
* Given the first n arguments of T, do the kinds of the non-omitted arguments
fill in the omitted arguments?
(This is still a bit hand-wavey, but we'll refine this question incrementally
as we explain more of the machinery underlying this process.)
Answering this question is precisely the role that the `injectiveVarsOfType`
and `injective_vars_of_binder` functions exist to serve. If an omitted argument
`a` appears in the set returned by `injectiveVarsOfType ty`, then knowing
`ty` determines (i.e., fills in) `a`. (More on `injective_vars_of_binder` in a
bit.)
More formally, if
`a` is in `injectiveVarsOfType ty`
and S1(ty) ~ S2(ty),
then S1(a) ~ S2(a),
where S1 and S2 are arbitrary substitutions.
For example, is `F` is a non-injective type family, then
injectiveVarsOfType(Either c (Maybe (a, F b c))) = {a, c}
Now that we know what this function does, here is a second attempt at the
Question We Must Answer:
* Given the first n arguments of T (ty_1 ... ty_n), consider the binders
of T that are instantiated by non-omitted arguments. Do the injective
variables of these binders fill in the remainder of T's kind?
Alright, we're getting closer. Next, we need to clarify what the injective
variables of a tycon binder are. This the role that the
`injective_vars_of_binder` function serves. Here is what this function does for
each form of tycon binder:
* Anonymous binders are injective positions. For example, in the promoted data
constructor '(:):
'(:) :: forall a. a -> [a] -> [a]
The second and third tyvar binders (of kinds `a` and `[a]`) are both
anonymous, so if we had '(:) 'True '[], then the kinds of 'True and
'[] would contribute to the kind of '(:) 'True '[]. Therefore,
injective_vars_of_binder(_ :: a) = injectiveVarsOfType(a) = {a}.
(Similarly, injective_vars_of_binder(_ :: [a]) = {a}.)
* Named binders:
- Inferred binders are never injective positions. For example, in this data
type:
data Proxy a
Proxy :: forall {k}. k -> Type
If we had Proxy 'True, then the kind of 'True would not contribute to the
kind of Proxy 'True. Therefore,
injective_vars_of_binder(forall {k}. ...) = {}.
- Required binders are injective positions. For example, in this data type:
data Wurble k (a :: k) :: k
Wurble :: forall k -> k -> k
The first tyvar binder (of kind `forall k`) has required visibility, so if
we had Wurble (Maybe a) Nothing, then the kind of Maybe a would
contribute to the kind of Wurble (Maybe a) Nothing. Hence,
injective_vars_of_binder(forall a -> ...) = {a}.
- Specified binders /might/ be injective positions, depending on how you
approach things. Continuing the '(:) example:
'(:) :: forall a. a -> [a] -> [a]
Normally, the (forall a. ...) tyvar binder wouldn't contribute to the kind
of '(:) 'True '[], since it's not explicitly instantiated by the user. But
if visible kind application is enabled, then this is possible, since the
user can write '(:) @Bool 'True '[]. (In that case,
injective_vars_of_binder(forall a. ...) = {a}.)
There are some situations where using visible kind application is appropriate
(e.g., GHC.Hs.Utils.typeToLHsType) and others where it is not (e.g., TH
reification), so the `injective_vars_of_binder` function is parametrized by
a Bool which decides if specified binders should be counted towards
injective positions or not.
Now that we've defined injective_vars_of_binder, we can refine the Question We
Must Answer once more:
* Given the first n arguments of T (ty_1 ... ty_n), consider the binders
of T that are instantiated by non-omitted arguments. For each such binder
b_i, take the union of all injective_vars_of_binder(b_i). Is this set a
superset of the free variables of the remainder of T's kind?
If the answer to this question is "no", then (T ty_1 ... ty_n) needs an
explicit kind signature, since T's kind has kind variables leftover that
aren't fixed by the non-omitted arguments.
One last sticking point: what does "the remainder of T's kind" mean? You might
be tempted to think that it corresponds to all of the arguments in the kind of
T that would normally be instantiated by omitted arguments. But this isn't
quite right, strictly speaking. Consider the following (silly) example:
S :: forall {k}. Type -> Type
And suppose we have this application of S:
S Int Bool
The Int argument would be omitted, and
injective_vars_of_binder(_ :: Type) = {}. This is not a superset of {k}, which
might suggest that (S Bool) needs an explicit kind signature. But
(S Bool :: Type) doesn't actually fix `k`! This is because the kind signature
only affects the /result/ of the application, not all of the individual
arguments. So adding a kind signature here won't make a difference. Therefore,
the fourth (and final) iteration of the Question We Must Answer is:
* Given the first n arguments of T (ty_1 ... ty_n), consider the binders
of T that are instantiated by non-omitted arguments. For each such binder
b_i, take the union of all injective_vars_of_binder(b_i). Is this set a
superset of the free variables of the kind of (T ty_1 ... ty_n)?
Phew, that was a lot of work!
How can be sure that this is correct? That is, how can we be sure that in the
event that we leave off a kind annotation, that one could infer the kind of the
tycon application from its arguments? It's essentially a proof by induction: if
we can infer the kinds of every subtree of a type, then the whole tycon
application will have an inferrable kind--unless, of course, the remainder of
the tycon application's kind has uninstantiated kind variables.
What happens if T is oversaturated? That is, if T's kind has fewer than n
arguments, in the case that the concrete application instantiates a result
kind variable with an arrow kind? If we run out of arguments, we do not attach
a kind annotation. This should be a rare case, indeed. Here is an example:
data T1 :: k1 -> k2 -> *
data T2 :: k1 -> k2 -> *
type family G (a :: k) :: k
type instance G T1 = T2
type instance F Char = (G T1 Bool :: (* -> *) -> *) -- F from above
Here G's kind is (forall k. k -> k), and the desugared RHS of that last
instance of F is (G (* -> (* -> *) -> *) (T1 * (* -> *)) Bool). According to
the algorithm above, there are 3 arguments to G so we should peel off 3
arguments in G's kind. But G's kind has only two arguments. This is the
rare special case, and we choose not to annotate the application of G with
a kind signature. After all, we needn't do this, since that instance would
be reified as:
type instance F Char = G (T1 :: * -> (* -> *) -> *) Bool
So the kind of G isn't ambiguous anymore due to the explicit kind annotation
on its argument. See #8953 and test th/T8953.
-}
|
sdiehl/ghc
|
compiler/types/Type.hs
|
bsd-3-clause
| 128,297
| 0
| 15
| 34,988
| 22,628
| 11,738
| 10,890
| 1,431
| 31
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Language.Nix.Identifier ( Identifier, ident, quote, needsQuoting ) where
import Control.DeepSeq.Generics
import Data.String
import Internal.PrettyPrinting ( Pretty(..), text )
import GHC.Generics ( Generic )
import Internal.Lens
import Text.Regex.Posix
-- | Identifiers in Nix are essentially strings. Reasonable people restrict
-- themselves to identifiers of the form @[a-zA-Z\_][a-zA-Z0-9\_\'\-]*@,
-- because these don't need quoting. The @Identifier@ type is an instance of
-- the 'IsString' class for convenience. The methods of the 'Pretty' class can
-- be used to pretty-print an identifier with proper quoting.
--
-- >>> let i = Identifier "test" in (i, pPrint i)
-- (Identifier "test",test)
-- >>> let i = Identifier "foo.bar" in (i, pPrint i)
-- (Identifier "foo.bar","foo.bar")
newtype Identifier = Identifier String
deriving (Show, Eq, IsString, Generic)
instance Default Identifier where def = Identifier ""
instance Pretty Identifier where
pPrint i = text (i ^. ident . to quote)
instance Ord Identifier where
compare (Identifier a) (Identifier b) = compare a b
instance NFData Identifier where rnf = genericRnf
-- | Checks whether a given string would need quoting when interpreted as an
-- intentifier.
needsQuoting :: String -> Bool
needsQuoting str = not (str =~ grammar)
where grammar :: String -- TODO: should be a compiled regular expression
grammar = "^[a-zA-Z\\_][a-zA-Z0-9\\_\\'\\-]*$"
-- | Lens that allows conversion from/to the standard 'String' type. The setter
-- does not evaluate its input, so it's safe to use with 'undefined'.
--
-- >>> putStrLn $ Identifier "abc.def" ^. ident
-- abc.def
--
-- >>> pPrint $ undefined & ident .~ "abcdef"
-- abcdef
ident :: Lens' Identifier String
ident f (Identifier str) = Identifier `fmap` f str
-- | Helper function to quote a given identifier string if necessary.
--
-- >>> putStrLn (quote "abc")
-- abc
--
-- >>> putStrLn (quote "abc.def")
-- "abc.def"
quote :: String -> String
quote s = if needsQuoting s then show s else s
|
psibi/cabal2nix
|
src/Language/Nix/Identifier.hs
|
bsd-3-clause
| 2,117
| 0
| 9
| 358
| 329
| 193
| 136
| -1
| -1
|
module Insomnia.Typecheck.ConstructImportDefinitions (constructImportDefinitions) where
import Data.Monoid (Monoid(..), (<>), Endo(..))
import qualified Unbound.Generics.LocallyNameless as U
import Insomnia.Common.Stochasticity
import Insomnia.Identifier (Path(..), lastOfPath)
import Insomnia.Expr (QVar(..), Expr(..))
import Insomnia.Types (TypeConstructor(..), Type(..), TypePath(..))
import Insomnia.TypeDefn (TypeAlias(..))
import Insomnia.ModuleType (TypeSigDecl(..))
import Insomnia.Module
import Insomnia.Typecheck.Env
import Insomnia.Typecheck.SelfSig
type Decls = Endo [Decl]
singleDecl :: Decl -> Decls
singleDecl d = Endo (d:)
constructImportDefinitions :: SelfSig -> Stochasticity -> TC Decls
constructImportDefinitions (ValueSelfSig q ty rest) stoch = do
d <- importValue q ty stoch
ds <- constructImportDefinitions rest stoch
return (d <> ds)
constructImportDefinitions (TypeSelfSig tp tsd rest) stoch = do
d <- importType tp tsd
ds <- constructImportDefinitions rest stoch
return (d <> ds)
constructImportDefinitions (SubmoduleSelfSig p _ rest) stoch = do
d <- importSubmodule p
ds <- constructImportDefinitions rest stoch
return (d <> ds)
constructImportDefinitions (GenerativeSelfSig p _ rest) stoch = do
d <- importSubmodule p
ds <- constructImportDefinitions rest stoch
return (d <> ds)
constructImportDefinitions UnitSelfSig _ = return mempty
importSubmodule :: Path -> TC Decls
importSubmodule pSub = do
let f = lastOfPath pSub
return $ singleDecl $ SubmoduleDefn f (ModuleId pSub)
importValue :: QVar -> Type -> Stochasticity -> TC Decls
importValue q@(QVar _ f) ty stoch = do
let
-- sig f : T
-- val f = p.f
dSig = singleDecl $ ValueDecl f $ SigDecl DeterministicParam ty
dVal = singleDecl
$ ValueDecl f
$ case stoch of
DeterministicParam -> ParameterDecl (Q q)
RandomVariable -> SampleDecl (Return (Q q))
return (dSig <> dVal)
importType :: TypePath -> TypeSigDecl -> TC Decls
importType tp@(TypePath _ f) tsd = do
-- TODO: for polymorphic types this doesn't kindcheck.
let gcon = TCGlobal tp
manifestAlias = ManifestTypeAlias (U.bind [] (TC gcon))
-- if this is an alias for an abstract type or for another
-- manifest alias, just alias it. If this is an alias for a
-- data type, make a datatype copy; if it's an alias for a
-- datatype copy, propagate the copy.
alias = case tsd of
AbstractTypeSigDecl _k ->
manifestAlias
AliasTypeSigDecl (ManifestTypeAlias _rhs) ->
manifestAlias
AliasTypeSigDecl copy@(DataCopyTypeAlias {}) ->
copy
ManifestTypeSigDecl defn ->
DataCopyTypeAlias tp defn
return $ singleDecl $ TypeAliasDefn f alias
|
lambdageek/insomnia
|
src/Insomnia/Typecheck/ConstructImportDefinitions.hs
|
bsd-3-clause
| 2,788
| 0
| 18
| 584
| 810
| 421
| 389
| 61
| 4
|
import qualified Data.ByteString.Char8 as C
import Control.Monad (forever,when)
import System.Console.GetOpt
import System.Environment
import System.Exit
import System.Random
import System.IO
import MBE.Markov
import MDB.LHashMap
data Opts = Opts {
dbFile :: FilePath,
outFile :: FilePath,
showHelp :: Bool
}
defaults = Opts {
dbFile = "default.db",
outFile = "/dev/stdout",
showHelp = False
}
header = "Usage: hmctg [opt... ]"
main :: IO ()
main = do
(opts,_) <- getArgs >>= parseOpts
when (showHelp opts) showUsageAndExit
db <- importDB (dbFile opts)
let output = outFile opts
if output == "/dev/stdout"
then forever $ do
gen <- newStdGen
C.putStrLn $ recChain db gen
else withFile output WriteMode (\handle -> forever $ do
gen <- newStdGen
C.hPutStrLn handle $ recChain db gen)
options :: [OptDescr (Opts -> Opts)]
options = [
Option "d" ["database"]
(ReqArg (\arg opt -> opt {dbFile = arg}) "INPUT") "Which database to process. (Defaults to default.db)",
Option "o" ["output"]
(ReqArg (\arg opt -> opt {outFile = arg}) "OUTPUT") "Which file to output to. (Defaults to stdout)",
Option "h?" ["help"]
(NoArg (\opt -> opt {showHelp = True})) "Show help and exit."
]
parseOpts :: [String] -> IO (Opts, [String])
parseOpts argv =
case getOpt Permute options argv of
(o,n,[] ) -> return (foldl (flip id) defaults o, n)
(_,_,errs) -> ioError (userError (concat errs ++ showUsage))
showUsage = usageInfo header options
showUsageAndExit = do
putStrLn showUsage
exitSuccess
|
c-14/hmctg
|
src/hmctg.hs
|
bsd-3-clause
| 1,908
| 0
| 16
| 665
| 555
| 300
| 255
| 48
| 2
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoRebindableSyntax #-}
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Time.DE.Rules
( rules
) where
import Prelude
import qualified Data.Text as Text
import Duckling.Dimensions.Types
import Duckling.Duration.Helpers (isGrain)
import Duckling.Numeral.Helpers (parseInt)
import Duckling.Ordinal.Types (OrdinalData(..))
import Duckling.Regex.Types (GroupMatch(..))
import Duckling.Time.Computed
import Duckling.Time.Helpers
import Duckling.Time.HolidayHelpers
import Duckling.Time.Types (TimeData(..))
import Duckling.Types
import qualified Duckling.Ordinal.Types as TOrdinal
import qualified Duckling.Time.Types as TTime
import qualified Duckling.TimeGrain.Types as TG
ruleInstants :: [Rule]
ruleInstants = mkRuleInstants
[ ( "now" , TG.Second, 0,
"(genau)? ?jetzt|diesen moment|in diesem moment|gerade eben" )
, ( "today" , TG.Day , 0,
"heute|(um diese zeit|zu dieser zeit|um diesen zeitpunkt|zu diesem zeitpunkt)" )
, ( "tomorrow" , TG.Day , 1, "morgen" )
, ( "yesterday" , TG.Day , -1, "gestern" )
, ( "after tomorrow" , TG.Day , 2, "(ü)bermorgen" )
, ( "before yesterday", TG.Day , -2, "vorgestern" )
, ( "3 days ago" , TG.Day , -3, "vorvorgestern" )
, ( "EOM|End of month", TG.Month , 1, "(das )?ende des monats?" )
, ( "EOY|End of year" , TG.Year , 1,
"(das )?(EOY|jahr(es)? ?ende|ende (des )?jahr(es)?)" )
]
ruleDaysOfWeek :: [Rule]
ruleDaysOfWeek = mkRuleDaysOfWeek
[ ( "Montag" , "montags?|mo\\.?" )
, ( "Dienstag" , "die?nstags?|di\\.?" )
, ( "Mittwoch" , "mittwochs?|mi\\.?" )
, ( "Donnerstag", "donn?erstag|do\\.?" )
, ( "Freitag" , "freitags?|fr\\.?" )
, ( "Samstag" , "samstags?|sonnabends?|sa\\.?" )
, ( "Sonntag" , "sonntags?|so\\." )
]
ruleMonths :: [Rule]
ruleMonths = mkRuleMonths
[ ( "Januar" , "januar|jan\\.?" )
, ( "Februar" , "februar|feb\\.?" )
, ( "Marz" , "m(ä)rz|m(ä)r\\.?" )
, ( "April" , "april|apr\\.?" )
, ( "Mai" , "mai\\.?" )
, ( "Juni" , "juni|jun\\.?" )
, ( "Juli" , "juli|jul\\.?" )
, ( "August" , "august|aug\\.?" )
, ( "September", "september|sept?\\.?" )
, ( "Oktober" , "oktober|okt\\.?" )
, ( "November" , "november|nov\\.?" )
, ( "Dezember" , "dezember|dez\\.?" )
]
ruleSeasons :: [Rule]
ruleSeasons = mkRuleSeasons
[ ( "sommer" , "sommer" , monthDay 6 21, monthDay 9 23 )
, ( "herbst" , "herbst" , monthDay 9 23, monthDay 12 21 )
, ( "winter" , "winter" , monthDay 12 21, monthDay 3 20 )
, ( "fruhling", "fr(ü)h(ling|jahr)", monthDay 3 20, monthDay 6 21 )
]
ruleHolidays :: [Rule]
ruleHolidays = mkRuleHolidays
[ ( "Neujahr" , "neujahr(s?tag)?"
, monthDay 1 1 )
, ( "Valentinstag" , "valentin'?stag"
, monthDay 2 14 )
, ( "Schweizer Bundesfeiertag"
, "schweiz(er)? (bundes)?feiertag|bundes feiertag"
, monthDay 8 1 )
, ( "Tag der Deutschen Einheit" , "tag (der)? deutsc?hen? einheit"
, monthDay 10 3 )
, ( "Oesterreichischer Nationalfeiertag"
, "((ö)sterreichischer?)? nationalfeiertag|national feiertag"
, monthDay 10 26 )
, ( "Halloween" , "hall?owe?en?"
, monthDay 10 31 )
, ( "Allerheiligen" , "allerheiligen?|aller heiligen?"
, monthDay 11 1 )
, ( "Nikolaus" , "nikolaus(tag)?|nikolaus tag|nikolo"
, monthDay 12 6 )
, ( "Heiligabend" , "heilig(er)? abend"
, monthDay 12 24 )
, ( "Weihnachten" , "weih?nacht(en|stag)?"
, monthDay 12 25 )
, ( "Silvester" , "silvester"
, monthDay 12 31 )
, ( "Muttertag" , "mutt?ertag|mutt?er (tag)?"
, nthDOWOfMonth 2 7 5 )
, ( "Vatertag" , "vatt?er( ?tag)?"
, nthDOWOfMonth 3 7 6 )
]
ruleComputedHolidays :: [Rule]
ruleComputedHolidays = mkRuleHolidays
[ ( "Christi Himmelfahrt", "(christi\\s+)?himmelfahrt(stag)?"
, cycleNthAfter False TG.Day 39 easterSunday )
, ( "Aschermittwoch", "ascher?(tag|mittwoch)"
, cycleNthAfter False TG.Day (-46) easterSunday )
, ( "Aschura", "asc?hura(\\-?tag)?"
, cycleNthAfter False TG.Day 9 muharram )
, ( "Bhai Dooj", "bhai(ya)?\\s+d(u|oo)j|bhau\\-beej|bhai\\s+(tika|phonta)"
, cycleNthAfter False TG.Day 4 dhanteras )
, ( "Chhath", "chhathi?|chhath (parv|puja)|dala (chhath|puja)|surya shashthi"
, cycleNthAfter False TG.Day 8 dhanteras )
, ( "Boghi", "boghi|bogi\\s+pandigai"
, cycleNthAfter False TG.Day (-1) thaiPongal )
, ( "Chinesisches Neujahr", "chinesische(s|r)\\s+(neujahr(s(tag|fest))?|frühlingsfest)"
, chineseNewYear )
, ( "Aschermontag"
, "(orthodoxer?\\s+)?(ascher|reiner?\\s+|sauberer?\\s+)montag"
, cycleNthAfter False TG.Day (-48) orthodoxEaster )
, ( "Corpus Christi", "corpus\\s+christi|fronleichnam"
, cycleNthAfter False TG.Day 60 easterSunday )
, ( "Dhanteras", "dhanatrayodashi|dhanteras|dhanvantari\\s+trayodashi"
, dhanteras )
, ( "Diwali", "deepavali|diwali|lakshmi\\s+puja"
, cycleNthAfter False TG.Day 2 dhanteras )
, ( "Durga Ashtami", "(durga|maha)(\\s+a)?shtami"
, cycleNthAfter False TG.Day 7 navaratri )
, ( "Ostermontag", "ostermontag"
, cycleNthAfter False TG.Day 1 easterSunday )
, ( "Ostersonntag", "ostersonntag", easterSunday )
, ( "Eid al-Adha", "bakr[\\-\\s]e?id|e?id [au]l\\-adha|opferfest"
, eidalAdha )
, ( "Eid al-Fitr", "eid al\\-fitr", eidalFitr )
, ( "Govardhan Puja", "govardhan\\s+puja|annak(u|oo)t"
, cycleNthAfter False TG.Day 3 dhanteras )
, ( "Karfreitag", "(kar|stiller\\s+|hoher\\s+)freitag"
, cycleNthAfter False TG.Day (-2) easterSunday )
, ( "Guru Gobind Singh Jayanti"
, "guru\\s+(gobind|govind)\\s+singh\\s+(Geburtstag|jayanti)"
, guruGobindSinghJayanti )
, ( "Holi", "(rangwali )?holi|dhuleti|dhulandi|phagwah"
, cycleNthAfter False TG.Day 39 vasantPanchami )
, ( "Holika Dahan", "holika dahan|kamudu pyre|chhoti holi"
, cycleNthAfter False TG.Day 38 vasantPanchami )
, ( "Karsamstag"
, "(kar|stiller\\s+)samstag|karsonnabend"
, cycleNthAfter False TG.Day (-1) easterSunday )
, ( "Islamisches Neujahr", "(arabisches|hijri|islamisches) neujahr|amun jadid|muharram"
, muharram )
, ( "Isra and Mi'raj"
, "isra and mi'raj|aufstieg des propheten|(die\\s+)?nachtreise|aufstieg\\s+in\\s+den\\s+himmel"
, cycleNthAfter False TG.Day 26 rajab
)
, ( "Jumu'atul-Wida", "jumu'atul\\-widaa?'?|jamat[\\-\\s]ul[\\-\\s]vida"
, predNthAfter (-1) (dayOfWeek 5) eidalFitr )
, ( "Kaanum Pongal", "(kaanum|kanni)\\s+pongal"
, cycleNthAfter False TG.Day 2 thaiPongal )
, ( "Lag BaOmer", "lag (b|l)[a']omer", lagBaOmer )
, ( "Vaisakhi", "mesadi|[bv]aisakhi|vaisakhadi|vasakhi|vaishakhi", vaisakhi)
, ( "Lailat al-Qadr"
, "la[iy]lat al[\\-\\s][qk]adr|(die)? nacht der (bestimmung|allmacht)"
, cycleNthAfter False TG.Day 26 ramadan )
, ( "Lazarus-Samstag", "lazarus(\\-|\\s+)samstag"
, cycleNthAfter False TG.Day (-8) orthodoxEaster )
, ( "Maha Navami", "maha\\s+navami", cycleNthAfter False TG.Day 8 navaratri )
, ( "Maha Saptami", "maha\\s+saptami", cycleNthAfter False TG.Day 6 navaratri )
, ( "Mattu Pongal", "maa?ttu\\s+pongal"
, cycleNthAfter False TG.Day 1 thaiPongal )
, ( "Gründonnerstag"
, "(grün|hoher\\s+|heiliger\\s+|weißer\\s+|palm)donnerstag"
, cycleNthAfter False TG.Day (-3) easterSunday )
, ( "Maulid an-Nabī"
, "Maulid\\s+an\\-Nabī|mawlid(\\s+al\\-nab(awi|i\\s+al\\-sharif))?|mevli[dt]|mulud|geburtstag des propheten( muhammad)?"
, mawlid )
, ( "Naraka Chaturdashi"
, "naraka? (nivaran )?chaturdashi|(kali|roop) chaudas|choti diwali"
, cycleNthAfter False TG.Day 1 dhanteras )
, ( "Orthodoxer Ostermontag", "orthodoxer\\s+ostermontag"
, cycleNthAfter False TG.Day 1 orthodoxEaster )
, ( "Orthodoxer Ostersonntag", "orthodoxer\\s+ostersonntag"
, orthodoxEaster )
, ( "Orthodoxer Karsamstag", "orthodoxer\\s+karsamstag"
, cycleNthAfter False TG.Day (-1) orthodoxEaster )
, ( "Orthodoxer Karfreitag", "orthodoxer\\s+karfreitag"
, cycleNthAfter False TG.Day (-2) orthodoxEaster )
, ( "Orthodoxer Palmsonntag", "orthodoxer\\s+palmsonntag"
, cycleNthAfter False TG.Day (-7) orthodoxEaster )
, ( "Palmsonntag", "palmsonntag"
, cycleNthAfter False TG.Day (-7) easterSunday )
, ( "Pfingsten", "pfingsten|pentecost"
, cycleNthAfter False TG.Day 49 easterSunday )
, ( "Purim", "purim", purim )
, ( "Raksha Bandhan", "raksha(\\s+)?bandhan|rakhi", rakshaBandhan )
, ( "Pargat Diwas", "pargat diwas|(maharishi )?valmiki jayanti", pargatDiwas )
, ( "Mahavir Jayanti", "(mahavir|mahaveer) (jayanti|janma kalyanak)"
, mahavirJayanti )
, ( "Maha Shivaratri", "maha(\\s+)?shivaratri", mahaShivaRatri)
, ( "Dayananda Saraswati Jayanti","((maharishi|swami) )?(dayananda )?saraswati jayanti", saraswatiJayanti )
, ( "Karva Chauth", "karva\\s+chauth|karaka\\s+chaturthi", karvaChauth)
, ( "Krishna Janmashtami", "(krishna )?janmashtami|gokulashtami", krishnaJanmashtami )
, ( "Schmini Azeret", "sc?he?mini\\s+at?zeret"
, cycleNthAfter False TG.Day 21 roshHashana )
, ( "Fastnacht", "fastnacht(sdienstag)?|mardi gras"
, cycleNthAfter False TG.Day (-47) easterSunday )
, ( "Shushan Purim", "shushan\\s+purim", cycleNthAfter False TG.Day 1 purim )
, ( "Simchat Torah", "simc?hat\\s+torah"
, cycleNthAfter False TG.Day 22 roshHashana )
, ( "Thai Pongal"
, "(thai )?pongal|pongal pandigai|(makara? |magha )?sankranth?i|maghi"
, thaiPongal )
, ( "Thiru Onam", "(thiru(v|\\s+))?onam", thiruOnam )
, ( "Tisha B'Av", "tisha b'av", tishaBAv )
, ( "Dreifaltigkeitssonntag",
"trinitatis(fest)?|(dreifaltigkeits|goldener\\s+)sonntag|drei(faltigkeit|einigkeit)(sfest)?"
, cycleNthAfter False TG.Day 56 easterSunday )
, ( "Vasant Panchami", "[bv]asant\\s+panchami", vasantPanchami )
, ( "Vijayadashami", "dasara|duss(eh|he)ra|vijayadashami"
, cycleNthAfter False TG.Day 9 navaratri )
, ( "Tu biSchevat", "tu b[i']sc?he?vat", tuBishvat )
, ( "Vesak", "v(e|ai)sak(ha)?|buddha(\\-?tag|\\s+purnima)|wesakfest", vesak )
, ( "Jom Ha'atzmaut", "[yj]om ha'?atzmaut", yomHaatzmaut )
, ( "Jom HaShoah"
, "[yj]om hashoah|[yj]om hazikaron lashoah ve-lag'vurah|holocaust\\-?gedenktag"
, cycleNthAfter False TG.Day 12 passover )
, ( "Jom Kippur", "[yj]om\\s+kippur", cycleNthAfter False TG.Day 9 roshHashana )
, ( "Pfingstmontag", "pfingstmontag|(pentecost|whit)\\s+montag"
, cycleNthAfter False TG.Day 50 easterSunday )
, ( "Rabindra Jayanti", "rabindra(nath)?\\s+jayanti", rabindraJayanti )
, ("Guru Ravidass Jayanti", "guru\\s+ravidass?\\s+(geburtstag|jayanti)"
, ravidassJayanti )
]
ruleComputedHolidays' :: [Rule]
ruleComputedHolidays' = mkRuleHolidays'
[ ( "Global Youth Service-Tag", "global youth service[\\-\\s]?tag|gysd"
, let start = globalYouthServiceDay
end = cycleNthAfter False TG.Day 2 globalYouthServiceDay
in interval TTime.Open start end )
, ( "Große Fastenzeit", "große\\s+fastenzeit"
, let start = cycleNthAfter False TG.Day (-48) orthodoxEaster
end = cycleNthAfter False TG.Day (-9) orthodoxEaster
in interval TTime.Open start end )
, ( "Chanukka", "c?hann?ukk?ah?"
, let start = chanukah
end = cycleNthAfter False TG.Day 7 chanukah
in interval TTime.Open start end )
, ( "Fastenzeit", "fastenzeit"
, let start = cycleNthAfter False TG.Day (-46) easterSunday
end = cycleNthAfter False TG.Day (-1) easterSunday
in interval TTime.Open start end )
, ( "Navaratri", "durga\\s+puja|durgotsava|nava?rath?ri"
, let start = navaratri
end = cycleNthAfter False TG.Day 9 navaratri
in interval TTime.Open start end )
, ( "Pessach", "passover|pess?a[ck]?h|pascha|Passah?"
, let start = passover
end = cycleNthAfter False TG.Day 8 passover
in interval TTime.Open start end )
, ( "Ramadan", "rama[dt]h?an|ramzaa?n"
, let start = ramadan
end = cycleNthAfter False TG.Day (-1) eidalFitr
in interval TTime.Open start end )
, ( "Rosch haSchana", "rosch ha\\-?schanah?"
, let start = roshHashana
end = cycleNthAfter False TG.Day 2 roshHashana
in interval TTime.Open start end )
, ( "Schawuot", "sc?ha[vw]u'?oth?|shovuos"
, let start = cycleNthAfter False TG.Day 50 passover
end = cycleNthAfter False TG.Day 52 passover
in interval TTime.Open start end )
, ( "Sukkot", "Laubhüttenfest|su[ck]{2}o[st]"
, let start = cycleNthAfter False TG.Day 14 roshHashana
end = cycleNthAfter False TG.Day 22 roshHashana
in interval TTime.Open start end )
-- Does not account for leap years, so every 365 days.
, ( "Parsi Neujahr", "parsi neujahr|jamshedi navroz"
, predEveryNDaysFrom 365 (2020, 8, 16)
)
, ( "Earth Hour", "earth hour|stunde der erde"
, computeEarthHour )
, ( "Königstag", "königstag|koningsdag"
, computeKingsDay )
]
ruleRelativeMinutesTotillbeforeIntegerHourofday :: Rule
ruleRelativeMinutesTotillbeforeIntegerHourofday = Rule
{ name = "relative minutes to|till|before <integer> (hour-of-day)"
, pattern =
[ Predicate $ isIntegerBetween 1 59
, regex "vor"
, Predicate isAnHourOfDay
]
, prod = \tokens -> case tokens of
(token:_:Token Time td:_) -> do
n <- getIntValue token
Token Time <$> minutesBefore n td
_ -> Nothing
}
ruleQuarterTotillbeforeIntegerHourofday :: Rule
ruleQuarterTotillbeforeIntegerHourofday = Rule
{ name = "quarter to|till|before <integer> (hour-of-day)"
, pattern =
[regex "vie?rtel vor"
, Predicate isAnHourOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> Token Time <$> minutesBefore 15 td
_ -> Nothing
}
ruleHalfTotillbeforeIntegerHourofday :: Rule
ruleHalfTotillbeforeIntegerHourofday = Rule
{ name = "half to|till|before <integer> (hour-of-day)"
, pattern =
[ regex "halbe? vor"
, Predicate isAnHourOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> Token Time <$> minutesBefore 30 td
_ -> Nothing
}
ruleTheOrdinalCycleOfTime :: Rule
ruleTheOrdinalCycleOfTime = Rule
{ name = "the <ordinal> <cycle> of <time>"
, pattern =
[ regex "der|die|das"
, dimension Ordinal
, dimension TimeGrain
, regex "im|in|von"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Ordinal od:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleNthAfter True grain (TOrdinal.value od - 1) td
_ -> Nothing
}
ruleNthTimeOfTime2 :: Rule
ruleNthTimeOfTime2 = Rule
{ name = "nth <time> of <time>"
, pattern =
[ regex "der|die|das"
, dimension Ordinal
, dimension Time
, regex "im"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:
Token Ordinal OrdinalData{TOrdinal.value = v}:
Token Time td1:
_:
Token Time td2:
_) -> Token Time . predNth (v - 1) False <$> intersect td2 td1
_ -> Nothing
}
ruleLastTime :: Rule
ruleLastTime = Rule
{ name = "last <time>"
, pattern =
[ regex "letzten?|letztes"
, Predicate isOkWithThisNext
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ predNth (-1) False td
_ -> Nothing
}
ruleDatetimeDatetimeInterval :: Rule
ruleDatetimeDatetimeInterval = Rule
{ name = "<datetime> - <datetime> (interval)"
, pattern =
[ Predicate isNotLatent
, regex "\\-|bis( zum)?|auf( den)?"
, Predicate isNotLatent
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleDateDateInterval :: Rule
ruleDateDateInterval = Rule
{ name = "dd.(mm.)? - dd.mm.(yy[yy]?)? (interval)"
, pattern =
[ regex "(?:vo[nm]\\s+)?(10|20|30|31|[012]?[1-9])\\.?((?<=\\.)(?:10|11|12|0?[1-9])(?:\\.?))?"
, regex "\\-|/|bis( zum)?|auf( den)?"
, regex "(10|20|30|31|[012]?[1-9])\\.(10|11|12|0?[1-9])\\.?((?<=\\.)\\d{2,4})?"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (d1:"":_)):
_:
Token RegexMatch (GroupMatch (d2:m2:"":_)):
_) -> do
d1 <- parseInt d1
d2 <- parseInt d2
m2 <- parseInt m2
Token Time <$> interval TTime.Closed (monthDay m2 d1) (monthDay m2 d2)
(Token RegexMatch (GroupMatch (d1:"":_)):
_:
Token RegexMatch (GroupMatch (d2:m2:y:_)):
_) -> do
d1 <- parseInt d1
d2 <- parseInt d2
m2 <- parseInt m2
y <- parseInt y
Token Time <$> interval TTime.Closed (yearMonthDay y m2 d1) (yearMonthDay y m2 d2)
(Token RegexMatch (GroupMatch (d1:m1:_)):
_:
Token RegexMatch (GroupMatch (d2:m2:"":_)):
_) -> do
d1 <- parseInt d1
d2 <- parseInt d2
m1 <- parseInt m1
m2 <- parseInt m2
Token Time <$> interval TTime.Closed (monthDay m1 d1) (monthDay m2 d2)
(Token RegexMatch (GroupMatch (d1:m1:_)):
_:
Token RegexMatch (GroupMatch (d2:m2:y:_)):
_) -> do
d1 <- parseInt d1
d2 <- parseInt d2
m1 <- parseInt m1
m2 <- parseInt m2
y <- parseInt y
Token Time <$> interval TTime.Closed (yearMonthDay y m1 d1) (yearMonthDay y m2 d2)
_ -> Nothing
}
ruleEvening :: Rule
ruleEvening = Rule
{ name = "evening"
, pattern =
[ regex "abends?"
]
, prod = \_ ->
let from = hour False 18
to = hour False 0
in Token Time . mkLatent . partOfDay <$>
interval TTime.Open from to
}
ruleTheDayofmonthNonOrdinal :: Rule
ruleTheDayofmonthNonOrdinal = Rule
{ name = "the <day-of-month> (non ordinal)"
, pattern =
[ regex "der"
, Predicate $ isIntegerBetween 1 31
]
, prod = \tokens -> case tokens of
(_:token:_) -> do
v <- getIntValue token
tt $ dayOfMonth v
_ -> Nothing
}
ruleInDuration :: Rule
ruleInDuration = Rule
{ name = "in <duration>"
, pattern =
[ regex "in"
, dimension Duration
]
, prod = \tokens -> case tokens of
(_:Token Duration dd:_) ->
tt $ inDuration dd
_ -> Nothing
}
ruleLastCycleOfTime :: Rule
ruleLastCycleOfTime = Rule
{ name = "last <cycle> of <time>"
, pattern =
[ regex "letzte(r|n|s)?"
, dimension TimeGrain
, regex "um|im"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleLastOf grain td
_ -> Nothing
}
ruleFromDatetimeDatetimeInterval :: Rule
ruleFromDatetimeDatetimeInterval = Rule
{ name = "from <datetime> - <datetime> (interval)"
, pattern =
[ regex "vo[nm]"
, dimension Time
, regex "\\-|bis( zum)?|auf( den)?"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleRelativeMinutesAfterpastIntegerHourofday :: Rule
ruleRelativeMinutesAfterpastIntegerHourofday = Rule
{ name = "relative minutes after|past <integer> (hour-of-day)"
, pattern =
[ Predicate $ isIntegerBetween 1 59
, regex "nach"
, Predicate isAnHourOfDay
]
, prod = \tokens -> case tokens of
(token:
_:
Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:
_) -> do
n <- getIntValue token
tt $ hourMinute is12H hours n
_ -> Nothing
}
ruleQuarterAfterpastIntegerHourofday :: Rule
ruleQuarterAfterpastIntegerHourofday = Rule
{ name = "quarter after|past <integer> (hour-of-day)"
, pattern =
[ regex "vie?rtel nach"
, Predicate isAnHourOfDay
]
, prod = \tokens -> case tokens of
(_:
Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:
_) -> tt $ hourMinute is12H hours 15
_ -> Nothing
}
ruleHalfAfterpastIntegerHourofday :: Rule
ruleHalfAfterpastIntegerHourofday = Rule
{ name = "half after|past <integer> (hour-of-day)"
, pattern =
[ regex "halbe? nach"
, Predicate isAnHourOfDay
]
, prod = \tokens -> case tokens of
(_:
Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:
_) -> tt $ hourMinute is12H hours 30
_ -> Nothing
}
ruleMonthDdddInterval :: Rule
ruleMonthDdddInterval = Rule
{ name = "<month> dd-dd (interval)"
, pattern =
[ regex "([012]?\\d|30|31)(ter|\\.)?"
, regex "\\-|bis( zum)?|auf( den)?"
, regex "([012]?\\d|30|31)(ter|\\.)?"
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (m1:_)):
_:
Token RegexMatch (GroupMatch (m2:_)):
Token Time td:
_) -> do
v1 <- parseInt m1
v2 <- parseInt m2
from <- intersect (dayOfMonth v1) td
to <- intersect (dayOfMonth v2) td
Token Time <$> interval TTime.Closed from to
_ -> Nothing
}
ruleTheCycleAfterTime :: Rule
ruleTheCycleAfterTime = Rule
{ name = "the <cycle> after <time>"
, pattern =
[ regex "der"
, dimension TimeGrain
, regex "nach"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleNthAfter False grain 1 td
_ -> Nothing
}
ruleTheCycleBeforeTime :: Rule
ruleTheCycleBeforeTime = Rule
{ name = "the <cycle> before <time>"
, pattern =
[ regex "der"
, dimension TimeGrain
, regex "vor"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleNthAfter False grain (-1) td
_ -> Nothing
}
ruleYearLatent2 :: Rule
ruleYearLatent2 = Rule
{ name = "year (latent)"
, pattern =
[ Predicate $ isIntegerBetween 2101 10000
]
, prod = \tokens -> case tokens of
(token:_) -> do
v <- getIntValue token
tt . mkLatent $ year v
_ -> Nothing
}
ruleTimeAfterNext :: Rule
ruleTimeAfterNext = Rule
{ name = "<time> after next"
, pattern =
[ dimension Time
, regex "nach dem n(ä)chsten"
]
, prod = \tokens -> case tokens of
(Token Time td:_) ->
tt $ predNth 1 True td
_ -> Nothing
}
ruleTheIdesOfNamedmonth :: Rule
ruleTheIdesOfNamedmonth = Rule
{ name = "the ides of <named-month>"
, pattern =
[ regex "die iden (des?)"
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(_:Token Time td@TimeData {TTime.form = Just (TTime.Month m)}:_) ->
Token Time <$>
intersect (dayOfMonth $ if elem m [3, 5, 7, 10] then 15 else 13) td
_ -> Nothing
}
ruleNoon :: Rule
ruleNoon = Rule
{ name = "noon"
, pattern =
[ regex "mittags?|zw(ö)lf (uhr)?"
]
, prod = \_ -> tt $ hour False 12
}
ruleThisnextDayofweek :: Rule
ruleThisnextDayofweek = Rule
{ name = "this|next <day-of-week>"
, pattern =
[ regex "diese(n|r)|kommenden|n(ä)chsten"
, Predicate isADayOfWeek
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ predNth 0 True td
_ -> Nothing
}
ruleBetweenTimeofdayAndTimeofdayInterval :: Rule
ruleBetweenTimeofdayAndTimeofdayInterval = Rule
{ name = "between <time-of-day> and <time-of-day> (interval)"
, pattern =
[ regex "zwischen"
, Predicate isATimeOfDay
, regex "und"
, Predicate isATimeOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleNextCycle :: Rule
ruleNextCycle = Rule
{ name = "next <cycle>"
, pattern =
[ regex "n(ä)chste(r|n|s)?|kommende(r|n|s)?"
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_) ->
tt $ cycleNth grain 1
_ -> Nothing
}
ruleAfterNextCycle :: Rule
ruleAfterNextCycle = Rule
{ name = "after next <cycle>"
, pattern =
[ regex "(ü)ber ?n(ä)chste[ns]?"
, dimension TimeGrain
]
, prod = \case
(_:Token TimeGrain grain:_) ->
tt $ cycleNth grain 2
_ -> Nothing
}
ruleTimeofdayApproximately :: Rule
ruleTimeofdayApproximately = Rule
{ name = "<time-of-day> approximately"
, pattern =
[ Predicate isATimeOfDay
, regex "ca\\.?|circa|zirka|ungef(ä)hr|(in )?etwa"
]
, prod = \tokens -> case tokens of
(Token Time td:_) -> tt $ notLatent td
_ -> Nothing
}
ruleOnDate :: Rule
ruleOnDate = Rule
{ name = "on <date>"
, pattern =
[ regex "am"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:x:_) -> Just x
_ -> Nothing
}
ruleDurationFromNow :: Rule
ruleDurationFromNow = Rule
{ name = "<duration> from now"
, pattern =
[ dimension Duration
, regex "ab (heute|jetzt)"
]
, prod = \tokens -> case tokens of
(Token Duration dd:_) ->
tt $ inDuration dd
_ -> Nothing
}
ruleLunch :: Rule
ruleLunch = Rule
{ name = "lunch"
, pattern =
[ regex "(am |zu )?mittags?"
]
, prod = \_ ->
let from = hour False 12
to = hour False 14
in Token Time . mkLatent . partOfDay <$>
interval TTime.Open from to
}
ruleLastCycle :: Rule
ruleLastCycle = Rule
{ name = "last <cycle>"
, pattern =
[ regex "letzte(r|n|s)?|vergangene(r|n|s)?"
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_) ->
tt . cycleNth grain $ - 1
_ -> Nothing
}
ruleAfternoon :: Rule
ruleAfternoon = Rule
{ name = "afternoon"
, pattern =
[ regex "nach ?mittags?"
]
, prod = \_ ->
let from = hour False 12
to = hour False 19
in Token Time . mkLatent . partOfDay <$>
interval TTime.Open from to
}
ruleTimeBeforeLast :: Rule
ruleTimeBeforeLast = Rule
{ name = "<time> before last"
, pattern =
[ regex "vorletzten?|vor ?letztes?"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ predNth (-2) False td
_ -> Nothing
}
ruleNamedmonthDayofmonthOrdinal :: Rule
ruleNamedmonthDayofmonthOrdinal = Rule
{ name = "<named-month> <day-of-month> (ordinal)"
, pattern =
[ Predicate isAMonth
, Predicate isDOMOrdinal
]
, prod = \tokens -> case tokens of
(Token Time td:token:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleInduringThePartofday :: Rule
ruleInduringThePartofday = Rule
{ name = "in|during the <part-of-day>"
, pattern =
[ regex "(in|an|am|w(ä)h?rend)( der| dem| des)?"
, Predicate isAPartOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ notLatent td
_ -> Nothing
}
ruleHourofdayIntegerAsRelativeMinutes :: Rule
ruleHourofdayIntegerAsRelativeMinutes = Rule
{ name = "<hour-of-day> <integer> (as relative minutes)"
, pattern =
[ Predicate $ and . sequence [isNotLatent, isAnHourOfDay]
, Predicate $ isIntegerBetween 1 59
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:
token:
_) -> do
n <- getIntValue token
tt $ hourMinute is12H hours n
_ -> Nothing
}
ruleHourofdayQuarter :: Rule
ruleHourofdayQuarter = Rule
{ name = "<hour-of-day> <quarter> (as relative minutes)"
, pattern =
[ Predicate isAnHourOfDay
, regex "vie?rtel"
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:_) ->
tt $ hourMinute is12H hours 15
_ -> Nothing
}
ruleHourofdayHalf :: Rule
ruleHourofdayHalf = Rule
{ name = "<hour-of-day> <half> (as relative minutes)"
, pattern =
[ Predicate isAnHourOfDay
, regex "halbe?"
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:_) ->
tt $ hourMinute is12H hours 30
_ -> Nothing
}
ruleDayofmonthordinalNamedmonth :: Rule
ruleDayofmonthordinalNamedmonth = Rule
{ name = "<day-of-month>(ordinal) <named-month>"
, pattern =
[ Predicate isDOMOrdinal
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(token:Token Time td:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleIntersectBy :: Rule
ruleIntersectBy = Rule
{ name = "intersect by ','"
, pattern =
[ Predicate isNotLatent
, regex ",( den|r)?"
, Predicate isNotLatent
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleNthTimeAfterTime :: Rule
ruleNthTimeAfterTime = Rule
{ name = "nth <time> after <time>"
, pattern =
[ dimension Ordinal
, dimension Time
, regex "nach"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Ordinal OrdinalData{TOrdinal.value = v}:
Token Time td1:
_:
Token Time td2:
_) -> tt $ predNthAfter (v - 1) td1 td2
_ -> Nothing
}
ruleMmdd :: Rule
ruleMmdd = Rule
{ name = "mm/dd"
, pattern =
[ regex "(?:am\\s+)?([012]?[1-9]|10|20|30|31)\\.(10|11|12|0?[1-9])\\.?"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (m1:m2:_)):_) -> do
d <- parseInt m1
m <- parseInt m2
tt $ monthDay m d
_ -> Nothing
}
ruleAfterDuration :: Rule
ruleAfterDuration = Rule
{ name = "after <duration>"
, pattern =
[ regex "nach"
, dimension Duration
]
, prod = \tokens -> case tokens of
(_:Token Duration dd:_) ->
tt $ inDuration dd
_ -> Nothing
}
ruleTimeofdayLatent :: Rule
ruleTimeofdayLatent = Rule
{ name = "time-of-day (latent)"
, pattern =
[ Predicate $ isIntegerBetween 0 23
]
, prod = \tokens -> case tokens of
(token:_) -> do
n <- getIntValue token
tt . mkLatent $ hour (n < 12) n
_ -> Nothing
}
ruleFromTimeofdayTimeofdayInterval :: Rule
ruleFromTimeofdayTimeofdayInterval = Rule
{ name = "from <time-of-day> - <time-of-day> (interval)"
, pattern =
[ regex "(von|nach|ab|fr(ü)hestens (um)?)"
, Predicate isATimeOfDay
, regex "((noch|aber|jedoch)? vor)|\\-|bis"
, Predicate isATimeOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleExactlyTimeofday :: Rule
ruleExactlyTimeofday = Rule
{ name = "exactly <time-of-day>"
, pattern =
[ regex "genau|exakt|p(ü)nktlich|punkt( um)?"
, Predicate isATimeOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> tt $ notLatent td
_ -> Nothing
}
ruleBetweenDatetimeAndDatetimeInterval :: Rule
ruleBetweenDatetimeAndDatetimeInterval = Rule
{ name = "between <datetime> and <datetime> (interval)"
, pattern =
[ regex "zwischen"
, dimension Time
, regex "und"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleDurationAgo :: Rule
ruleDurationAgo = Rule
{ name = "<duration> ago"
, pattern =
[ regex "vor"
, dimension Duration
]
, prod = \tokens -> case tokens of
(_:Token Duration dd:_) ->
tt $ durationAgo dd
_ -> Nothing
}
ruleByTheEndOfTime :: Rule
ruleByTheEndOfTime = Rule
{ name = "by the end of <time>"
, pattern =
[ regex "bis (zum)? ende (von)?|(noch)? vor"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> Token Time <$> interval TTime.Closed td now
_ -> Nothing
}
ruleAfterWork :: Rule
ruleAfterWork = Rule
{ name = "after work"
, pattern =
[ regex "nach (der)? arbeit|(am)? feier ?abend"
]
, prod = \_ -> do
td2 <- interval TTime.Open (hour False 17) (hour False 21)
Token Time . partOfDay <$> intersect today td2
}
ruleLastNCycle :: Rule
ruleLastNCycle = Rule
{ name = "last n <cycle>"
, pattern =
[ regex "letzten?|vergangenen?"
, Predicate $ isIntegerBetween 1 9999
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:token:Token TimeGrain grain:_) -> do
n <- getIntValue token
tt $ cycleN True grain (- n)
_ -> Nothing
}
ruleTimeofdaySharp :: Rule
ruleTimeofdaySharp = Rule
{ name = "<time-of-day> sharp"
, pattern =
[ Predicate isATimeOfDay
, regex "genau|exakt|p(ü)nktlich|punkt( um)?"
]
, prod = \tokens -> case tokens of
(Token Time td:_) -> tt $ notLatent td
_ -> Nothing
}
ruleWithinDuration :: Rule
ruleWithinDuration = Rule
{ name = "within <duration>"
, pattern =
[ regex "binnen|innerhalb( von)?"
, dimension Duration
]
, prod = \tokens -> case tokens of
(_:Token Duration dd:_) -> Token Time <$>
interval TTime.Open now (inDuration dd)
_ -> Nothing
}
ruleMidnighteodendOfDay :: Rule
ruleMidnighteodendOfDay = Rule
{ name = "midnight|EOD|end of day"
, pattern =
[ regex "mitternacht|EOD|tagesende|ende (des)? tag(es)?"
]
, prod = \_ -> tt $ hour False 0
}
ruleDayofmonthNonOrdinalNamedmonth :: Rule
ruleDayofmonthNonOrdinalNamedmonth = Rule
{ name = "<day-of-month> (non ordinal) <named-month>"
, pattern =
[ Predicate isDOMInteger
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(token:Token Time td:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleIntersect :: Rule
ruleIntersect = Rule
{ name = "intersect"
, pattern =
[ Predicate isNotLatent
, Predicate isNotLatent
]
, prod = \tokens -> case tokens of
(Token Time td1:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleDayOfWeekIntersectDuration :: Rule
ruleDayOfWeekIntersectDuration = Rule
{ name = "<day-of-week> in <duration>"
, pattern =
[ Predicate isADayOfWeek
, regex "(in|vor)"
, dimension Duration
]
, prod = \case
(Token Time td:Token RegexMatch (GroupMatch (match:_)):Token Duration dd:_) ->
case Text.toLower match of
"vor" -> Token Time <$> intersect td (durationIntervalAgo dd)
_ -> Token Time <$> intersect td (inDurationInterval dd)
_ -> Nothing
}
ruleAboutTimeofday :: Rule
ruleAboutTimeofday = Rule
{ name = "about <time-of-day>"
, pattern =
[ regex "so( um)?|(so |um |so um )?circa|zirka|ca\\.?|ungef(ä)hr|(etwa|gegen)( so| um| so um)?"
, Predicate isATimeOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> tt $ notLatent td
_ -> Nothing
}
ruleUntilTimeofday :: Rule
ruleUntilTimeofday = Rule
{ name = "until <time-of-day>"
, pattern =
[ regex "vor|bis( zu[rm]?)?|sp(ä)testens?"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> tt $ withDirection TTime.Before td
_ -> Nothing
}
ruleUntilTimeofdayPostfix :: Rule
ruleUntilTimeofdayPostfix = Rule
{ name = "<time-of-day> until"
, pattern =
[ dimension Time
, regex "sp(ä)testens"
]
, prod = \tokens -> case tokens of
(Token Time td:_:_) -> tt $ withDirection TTime.Before td
_ -> Nothing
}
ruleAtTimeofday :: Rule
ruleAtTimeofday = Rule
{ name = "at <time-of-day>"
, pattern =
[ regex "um|@"
, Predicate isATimeOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> tt $ notLatent td
_ -> Nothing
}
ruleNthTimeOfTime :: Rule
ruleNthTimeOfTime = Rule
{ name = "nth <time> of <time>"
, pattern =
[ dimension Ordinal
, dimension Time
, regex "im"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Ordinal OrdinalData{TOrdinal.value = v}:
Token Time td1:
_:
Token Time td2:
_) -> Token Time . predNth (v - 1) False <$> intersect td2 td1
_ -> Nothing
}
ruleTimePartofday :: Rule
ruleTimePartofday = Rule
{ name = "<time> <part-of-day>"
, pattern =
[ dimension Time
, Predicate isAPartOfDay
]
, prod = \tokens -> case tokens of
(Token Time td1:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleWeekend :: Rule
ruleWeekend = Rule
{ name = "week-end"
, pattern =
[ regex "wochen ?ende?"
]
, prod = \_ -> tt $ mkOkForThisNext weekend
}
ruleNthTimeAfterTime2 :: Rule
ruleNthTimeAfterTime2 = Rule
{ name = "nth <time> after <time>"
, pattern =
[ regex "der|das"
, dimension Ordinal
, dimension Time
, regex "nach"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:
Token Ordinal OrdinalData{TOrdinal.value = v}:
Token Time td1:
_:
Token Time td2:
_) -> tt $ predNthAfter (v - 1) td1 td2
_ -> Nothing
}
ruleNextTime :: Rule
ruleNextTime = Rule
{ name = "next <time>"
, pattern =
[ regex "(n(ä)chste|kommende)[ns]?"
, Predicate $ and . sequence [isNotLatent, isOkWithThisNext]
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ predNth 0 True td
_ -> Nothing
}
ruleOrdinalQuarterYear :: Rule
ruleOrdinalQuarterYear = Rule
{ name = "<ordinal> quarter <year>"
, pattern =
[ dimension Ordinal
, Predicate $ isGrain TG.Quarter
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Ordinal od:_:Token Time td:_) ->
tt $ cycleNthAfter False TG.Quarter (TOrdinal.value od - 1) td
_ -> Nothing
}
ruleYyyymmdd :: Rule
ruleYyyymmdd = Rule
{ name = "yyyy-mm-dd"
, pattern =
[ regex "(\\d{2,4})-(1[0-2]|0?[1-9])-(3[01]|[12]\\d|0?[1-9])"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (m1:m2:m3:_)):_) -> do
y <- parseInt m1
m <- parseInt m2
d <- parseInt m3
tt $ yearMonthDay y m d
_ -> Nothing
}
ruleTheOrdinalCycleAfterTime :: Rule
ruleTheOrdinalCycleAfterTime = Rule
{ name = "the <ordinal> <cycle> after <time>"
, pattern =
[ regex "the"
, dimension Ordinal
, dimension TimeGrain
, regex "nach"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Ordinal od:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleNthAfter True grain (TOrdinal.value od - 1) td
_ -> Nothing
}
ruleIntersectByOfFromS :: Rule
ruleIntersectByOfFromS = Rule
{ name = "intersect by 'of', 'from', 's"
, pattern =
[ Predicate isNotLatent
, regex "von|der|im"
, Predicate isNotLatent
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleNextNCycle :: Rule
ruleNextNCycle = Rule
{ name = "next n <cycle>"
, pattern =
[ regex "n(ä)chsten?|kommenden?"
, Predicate $ isIntegerBetween 1 9999
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:token:Token TimeGrain grain:_) -> do
v <- getIntValue token
tt $ cycleN True grain v
_ -> Nothing
}
ruleADuration :: Rule
ruleADuration = Rule
{ name = "a <duration>"
, pattern =
[ regex "(in )?eine?(r|n)?"
, dimension Duration
]
, prod = \tokens -> case tokens of
(_:Token Duration dd:_) ->
tt $ inDuration dd
_ -> Nothing
}
ruleMorning :: Rule
ruleMorning = Rule
{ name = "morning"
, pattern =
[ regex "morgens|(in der )?fr(ü)h|vor ?mittags?|am morgen"
]
, prod = \_ ->
let from = hour False 3
to = hour False 12
in Token Time . mkLatent . partOfDay <$>
interval TTime.Open from to
}
ruleThisPartofday :: Rule
ruleThisPartofday = Rule
{ name = "this <part-of-day>"
, pattern =
[ regex "diesen?|dieses|heute"
, Predicate isAPartOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> Token Time . partOfDay <$> intersect today td
_ -> Nothing
}
ruleThisCycle :: Rule
ruleThisCycle = Rule
{ name = "this <cycle>"
, pattern =
[ regex "diese(r|n|s)?|kommende(r|n|s)?"
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_) ->
tt $ cycleNth grain 0
_ -> Nothing
}
ruleThisTime :: Rule
ruleThisTime = Rule
{ name = "this <time>"
, pattern =
[ regex "diese(n|r|s)?|(im )?laufenden"
, Predicate isOkWithThisNext
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ predNth 0 False td
_ -> Nothing
}
ruleDurationHence :: Rule
ruleDurationHence = Rule
{ name = "<duration> hence"
, pattern =
[ dimension Duration
, regex "hence"
]
, prod = \tokens -> case tokens of
(Token Duration dd:_) ->
tt $ inDuration dd
_ -> Nothing
}
ruleDayofmonthNonOrdinalOfNamedmonth :: Rule
ruleDayofmonthNonOrdinalOfNamedmonth = Rule
{ name = "<day-of-month> (non ordinal) of <named-month>"
, pattern =
[ Predicate isDOMInteger
, regex "vom|von"
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(token:_:Token Time td:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleAfterLunch :: Rule
ruleAfterLunch = Rule
{ name = "after lunch"
, pattern =
[ regex "nach dem mittagessen|nachmittags?"
]
, prod = \_ -> do
td2 <- interval TTime.Open (hour False 13) (hour False 17)
Token Time . partOfDay <$> intersect today td2
}
ruleOnANamedday :: Rule
ruleOnANamedday = Rule
{ name = "on a named-day"
, pattern =
[ regex "an einem"
, Predicate isADayOfWeek
]
, prod = \tokens -> case tokens of
(_:x:_) -> Just x
_ -> Nothing
}
ruleYearLatent :: Rule
ruleYearLatent = Rule
{ name = "year (latent)"
, pattern =
[ Predicate $
or . sequence [isIntegerBetween (- 10000) 0, isIntegerBetween 25 999]
]
, prod = \tokens -> case tokens of
(token:_) -> do
y <- getIntValue token
tt . mkLatent $ year y
_ -> Nothing
}
ruleAfterTimeofday :: Rule
ruleAfterTimeofday = Rule
{ name = "after <time-of-day>"
, pattern =
[ regex "nach|ab|fr(ü)he?stens"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> tt $ withDirection TTime.After td
_ -> Nothing
}
ruleAfterTimeofdayPostfix :: Rule
ruleAfterTimeofdayPostfix = Rule
{ name = "<time-of-day> after"
, pattern =
[ dimension Time
, regex "fr(ü)he?stens"
]
, prod = \tokens -> case tokens of
(Token Time td:_:_) -> tt $ withDirection TTime.After td
_ -> Nothing
}
ruleNight :: Rule
ruleNight = Rule
{ name = "night"
, pattern =
[ regex "nachts?"
]
, prod = \_ ->
let from = hour False 0
to = hour False 4
in Token Time . mkLatent . partOfDay <$>
interval TTime.Open from to
}
ruleDayofmonthOrdinal :: Rule
ruleDayofmonthOrdinal = Rule
{ name = "<day-of-month> (ordinal)"
, pattern =
[ Predicate isDOMOrdinal
]
, prod = \tokens -> case tokens of
(Token Ordinal OrdinalData{TOrdinal.value = v}:_) ->
tt $ dayOfMonth v
_ -> Nothing
}
ruleHalfIntegerGermanStyleHourofday :: Rule
ruleHalfIntegerGermanStyleHourofday = Rule
{ name = "half <integer> (german style hour-of-day)"
, pattern =
[ regex "halb"
, Predicate isAnHourOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> Token Time <$> minutesBefore 30 td
_ -> Nothing
}
ruleOrdinalCycleAfterTime :: Rule
ruleOrdinalCycleAfterTime = Rule
{ name = "<ordinal> <cycle> after <time>"
, pattern =
[ dimension Ordinal
, dimension TimeGrain
, regex "nach"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Ordinal od:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleNthAfter True grain (TOrdinal.value od - 1) td
_ -> Nothing
}
ruleOrdinalCycleOfTime :: Rule
ruleOrdinalCycleOfTime = Rule
{ name = "<ordinal> <cycle> of <time>"
, pattern =
[ dimension Ordinal
, dimension TimeGrain
, regex "im|in|von"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Ordinal od:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleNthAfter True grain (TOrdinal.value od - 1) td
_ -> Nothing
}
ruleAfterNextTime :: Rule
ruleAfterNextTime = Rule
{ name = "after next <time>"
, pattern =
[ regex "(ü)ber ?n(ä)chste[ns]?"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ predNth 1 True td
_ -> Nothing
}
ruleHhmm :: Rule
ruleHhmm = Rule
{ name = "hh:mm"
, pattern =
[ regex "((?:[01]?\\d)|(?:2[0-3]))[:.h]([0-5]\\d)(?:uhr|h)?"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (m1:m2:_)):_) -> do
h <- parseInt m1
m <- parseInt m2
tt $ hourMinute False h m
_ -> Nothing
}
ruleTonight :: Rule
ruleTonight = Rule
{ name = "tonight"
, pattern =
[ regex "heute? (am)? abends?"
]
, prod = \_ -> do
td2 <- interval TTime.Open (hour False 18) (hour False 0)
Token Time . partOfDay <$> intersect today td2
}
ruleYear :: Rule
ruleYear = Rule
{ name = "year"
, pattern =
[ Predicate $ isIntegerBetween 1000 2100
]
, prod = \tokens -> case tokens of
(token:_) -> do
y <- getIntValue token
tt $ year y
_ -> Nothing
}
ruleNamedmonthDayofmonthNonOrdinal :: Rule
ruleNamedmonthDayofmonthNonOrdinal = Rule
{ name = "<named-month> <day-of-month> (non ordinal)"
, pattern =
[ Predicate isAMonth
, Predicate isDOMInteger
]
, prod = \tokens -> case tokens of
(Token Time td:token:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleHhmmMilitary :: Rule
ruleHhmmMilitary = Rule
{ name = "hhmm (military)"
, pattern =
[ regex "((?:[01]?\\d)|(?:2[0-3]))([0-5]\\d)"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (h:m:_)):_) -> do
hh <- parseInt h
mm <- parseInt m
tt . mkLatent $ hourMinute False hh mm
_ -> Nothing
}
ruleAbsorptionOfAfterNamedDay :: Rule
ruleAbsorptionOfAfterNamedDay = Rule
{ name = "absorption of , after named day"
, pattern =
[ Predicate isADayOfWeek
, regex ","
]
, prod = \tokens -> case tokens of
(x:_) -> Just x
_ -> Nothing
}
ruleLastDayofweekOfTime :: Rule
ruleLastDayofweekOfTime = Rule
{ name = "last <day-of-week> of <time>"
, pattern =
[ regex "letzte(r|n|s)?"
, Predicate isADayOfWeek
, regex "[ui]m"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td1:_:Token Time td2:_) ->
tt $ predLastOf td1 td2
_ -> Nothing
}
ruleHhmmMilitaryAmpm :: Rule
ruleHhmmMilitaryAmpm = Rule
{ name = "hhmm (military) am|pm"
, pattern =
[ regex "((?:1[012]|0?\\d))([0-5]\\d)"
, regex "([ap])\\.?m\\.?(?:[\\s'\"-_{}\\[\\]()]|$)"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (hh:mm:_)):Token RegexMatch (GroupMatch (ap:_)):_) -> do
h <- parseInt hh
m <- parseInt mm
tt . timeOfDayAMPM (Text.toLower ap == "a") $ hourMinute True h m
_ -> Nothing
}
ruleTimeofdayTimeofdayInterval :: Rule
ruleTimeofdayTimeofdayInterval = Rule
{ name = "<time-of-day> - <time-of-day> (interval)"
, pattern =
[ Predicate $ and . sequence [isNotLatent, isATimeOfDay]
, regex "\\-|bis"
, Predicate isATimeOfDay
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleTimeofdayTimeofdayInterval2 :: Rule
ruleTimeofdayTimeofdayInterval2 = Rule
{ name = "<time-of-day> - <time-of-day> (interval)"
, pattern =
[ Predicate isATimeOfDay
, regex "\\-|/|bis"
, Predicate $ and . sequence [isNotLatent, isATimeOfDay]
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleDurationAfterTime :: Rule
ruleDurationAfterTime = Rule
{ name = "<duration> after <time>"
, pattern =
[ dimension Duration
, regex "nach"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Duration dd:_:Token Time td:_) ->
tt $ durationAfter dd td
_ -> Nothing
}
ruleOrdinalQuarter :: Rule
ruleOrdinalQuarter = Rule
{ name = "<ordinal> quarter"
, pattern =
[ dimension Ordinal
, Predicate $ isGrain TG.Quarter
]
, prod = \tokens -> case tokens of
(Token Ordinal OrdinalData{TOrdinal.value = v}:_) -> tt .
cycleNthAfter False TG.Quarter (v - 1) $ cycleNth TG.Year 0
_ -> Nothing
}
ruleTheDayofmonthOrdinal :: Rule
ruleTheDayofmonthOrdinal = Rule
{ name = "the <day-of-month> (ordinal)"
, pattern =
[ regex "der"
, Predicate isDOMOrdinal
]
, prod = \tokens -> case tokens of
(_:Token Ordinal OrdinalData{TOrdinal.value = v}:_) ->
tt $ dayOfMonth v
_ -> Nothing
}
ruleDurationBeforeTime :: Rule
ruleDurationBeforeTime = Rule
{ name = "<duration> before <time>"
, pattern =
[ dimension Duration
, regex "vor"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Duration dd:_:Token Time td:_) ->
tt $ durationBefore dd td
_ -> Nothing
}
rulePartofdayOfTime :: Rule
rulePartofdayOfTime = Rule
{ name = "<part-of-day> of <time>"
, pattern =
[ Predicate isAPartOfDay
, regex "des|von|vom|am"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleMmddyyyy :: Rule
ruleMmddyyyy = Rule
{ name = "mm/dd/yyyy"
, pattern =
[ regex "([012]?[1-9]|10|20|30|31)\\.(0?[1-9]|10|11|12)\\.(\\d{2,4})"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (m1:m2:m3:_)):_) -> do
y <- parseInt m3
m <- parseInt m2
d <- parseInt m1
tt $ yearMonthDay y m d
_ -> Nothing
}
ruleTimeofdayOclock :: Rule
ruleTimeofdayOclock = Rule
{ name = "<time-of-day> o'clock"
, pattern =
[ Predicate isATimeOfDay
, regex "uhr|h(?:[\\s'\"-_{}\\[\\]()]|$)"
]
, prod = \tokens -> case tokens of
(Token Time td:_) ->
tt $ notLatent td
_ -> Nothing
}
ruleDayofmonthordinalNamedmonthYear :: Rule
ruleDayofmonthordinalNamedmonthYear = Rule
{ name = "<day-of-month>(ordinal) <named-month> year"
, pattern =
[ Predicate isDOMOrdinal
, Predicate isAMonth
, regex "(\\d{2,4})"
]
, prod = \tokens -> case tokens of
(token:
Token Time td:
Token RegexMatch (GroupMatch (match:_)):
_) -> do
n <- parseInt match
dom <- intersectDOM td token
Token Time <$> intersect dom (year n)
_ -> Nothing
}
ruleTimezone :: Rule
ruleTimezone = Rule
{ name = "<time> timezone"
, pattern =
[ Predicate $ and . sequence [isNotLatent, isATimeOfDay]
, regex "\\b(YEKT|YEKST|YAKT|YAKST|WITA|WIT|WIB|WGT|WGST|WFT|WET|WEST|WAT|WAST|VUT|VLAT|VLAST|VET|UZT|UYT|UYST|UTC|ULAT|TVT|TMT|TLT|TKT|TJT|TFT|TAHT|SST|SRT|SGT|SCT|SBT|SAST|SAMT|RET|PYT|PYST|PWT|PST|PONT|PMST|PMDT|PKT|PHT|PHOT|PGT|PETT|PETST|PET|PDT|OMST|OMSST|NZST|NZDT|NUT|NST|NPT|NOVT|NOVST|NFT|NDT|NCT|MYT|MVT|MUT|MST|MSK|MSD|MMT|MHT|MDT|MAWT|MART|MAGT|MAGST|LINT|LHST|LHDT|KUYT|KST|KRAT|KRAST|KGT|JST|IST|IRST|IRKT|IRKST|IRDT|IOT|IDT|ICT|HOVT|HKT|GYT|GST|GMT|GILT|GFT|GET|GAMT|GALT|FNT|FKT|FKST|FJT|FJST|EST|EGT|EGST|EET|EEST|EDT|ECT|EAT|EAST|EASST|DAVT|ChST|CXT|CVT|CST|COT|CLT|CLST|CKT|CHAST|CHADT|CET|CEST|CDT|CCT|CAT|CAST|BTT|BST|BRT|BRST|BOT|BNT|AZT|AZST|AZOT|AZOST|AWST|AWDT|AST|ART|AQTT|ANAT|ANAST|AMT|AMST|ALMT|AKST|AKDT|AFT|AEST|AEDT|ADT|ACST|ACDT)\\b"
]
, prod = \tokens -> case tokens of
(Token Time td:
Token RegexMatch (GroupMatch (tz:_)):
_) -> Token Time <$> inTimezone (Text.toUpper tz) td
_ -> Nothing
}
rules :: [Rule]
rules =
[ ruleADuration
, ruleAboutTimeofday
, ruleAbsorptionOfAfterNamedDay
, ruleAfterDuration
, ruleAfterLunch
, ruleAfterNextTime
, ruleAfterTimeofday
, ruleAfterTimeofdayPostfix
, ruleAfterWork
, ruleAfternoon
, ruleAtTimeofday
, ruleBetweenDatetimeAndDatetimeInterval
, ruleBetweenTimeofdayAndTimeofdayInterval
, ruleByTheEndOfTime
, ruleDatetimeDatetimeInterval
, ruleDateDateInterval
, ruleDayofmonthNonOrdinalNamedmonth
, ruleDayofmonthNonOrdinalOfNamedmonth
, ruleDayofmonthOrdinal
, ruleDayofmonthordinalNamedmonth
, ruleDayofmonthordinalNamedmonthYear
, ruleDurationAfterTime
, ruleDurationAgo
, ruleDurationBeforeTime
, ruleDurationFromNow
, ruleDurationHence
, ruleEvening
, ruleExactlyTimeofday
, ruleFromDatetimeDatetimeInterval
, ruleFromTimeofdayTimeofdayInterval
, ruleHalfIntegerGermanStyleHourofday
, ruleHhmm
, ruleHhmmMilitary
, ruleHhmmMilitaryAmpm
, ruleHourofdayIntegerAsRelativeMinutes
, ruleInDuration
, ruleInduringThePartofday
, ruleIntersect
, ruleIntersectBy
, ruleIntersectByOfFromS
, ruleDayOfWeekIntersectDuration
, ruleLastCycle
, ruleLastCycleOfTime
, ruleLastDayofweekOfTime
, ruleLastNCycle
, ruleLastTime
, ruleLunch
, ruleMidnighteodendOfDay
, ruleMmdd
, ruleMmddyyyy
, ruleMonthDdddInterval
, ruleMorning
, ruleNamedmonthDayofmonthNonOrdinal
, ruleNamedmonthDayofmonthOrdinal
, ruleNextCycle
, ruleAfterNextCycle
, ruleNextNCycle
, ruleNextTime
, ruleNight
, ruleNoon
, ruleNthTimeAfterTime
, ruleNthTimeAfterTime2
, ruleNthTimeOfTime
, ruleNthTimeOfTime2
, ruleOnANamedday
, ruleOnDate
, ruleOrdinalCycleAfterTime
, ruleOrdinalCycleOfTime
, ruleOrdinalQuarter
, ruleOrdinalQuarterYear
, rulePartofdayOfTime
, ruleRelativeMinutesAfterpastIntegerHourofday
, ruleRelativeMinutesTotillbeforeIntegerHourofday
, ruleTheCycleAfterTime
, ruleTheCycleBeforeTime
, ruleTheDayofmonthNonOrdinal
, ruleTheDayofmonthOrdinal
, ruleTheIdesOfNamedmonth
, ruleTheOrdinalCycleAfterTime
, ruleTheOrdinalCycleOfTime
, ruleThisCycle
, ruleThisPartofday
, ruleThisTime
, ruleThisnextDayofweek
, ruleTimeAfterNext
, ruleTimeBeforeLast
, ruleTimePartofday
, ruleTimeofdayApproximately
, ruleTimeofdayLatent
, ruleTimeofdayOclock
, ruleTimeofdaySharp
, ruleTimeofdayTimeofdayInterval
, ruleTimeofdayTimeofdayInterval2
, ruleTonight
, ruleUntilTimeofday
, ruleUntilTimeofdayPostfix
, ruleWeekend
, ruleWithinDuration
, ruleYear
, ruleYearLatent
, ruleYearLatent2
, ruleYyyymmdd
, ruleQuarterTotillbeforeIntegerHourofday
, ruleHalfTotillbeforeIntegerHourofday
, ruleQuarterAfterpastIntegerHourofday
, ruleHalfAfterpastIntegerHourofday
, ruleHourofdayQuarter
, ruleHourofdayHalf
, ruleTimezone
]
++ ruleInstants
++ ruleDaysOfWeek
++ ruleMonths
++ ruleSeasons
++ ruleHolidays
++ ruleComputedHolidays
++ ruleComputedHolidays'
|
facebookincubator/duckling
|
Duckling/Time/DE/Rules.hs
|
bsd-3-clause
| 56,000
| 0
| 23
| 14,209
| 15,316
| 8,322
| 6,994
| 1,581
| 5
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- Load information on package sources
module Stack.Build.Source
( loadSourceMap
, SourceMap
, PackageSource (..)
, localFlags
, loadLocals
) where
import Network.HTTP.Client.Conduit (HasHttpManager)
import Control.Applicative ((<|>), (<$>), (<*>))
import Control.Exception (catch)
import Control.Monad
import Control.Monad.Catch (MonadCatch)
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Reader (MonadReader, asks)
import Control.Monad.Trans.Resource
import Crypto.Hash (Digest, SHA256)
import Crypto.Hash.Conduit (sinkHash)
import Data.Byteable (toBytes)
import qualified Data.ByteString as S
import Data.Conduit (($$), ZipSink (..))
import qualified Data.Conduit.Binary as CB
import qualified Data.Conduit.List as CL
import Data.Either
import qualified Data.Foldable as F
import Data.Function
import qualified Data.HashSet as HashSet
import Data.List
import qualified Data.Map as Map
import Data.Map.Strict (Map)
import Data.Maybe
import Data.Monoid ((<>), Any (..), mconcat)
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import Path
import Prelude hiding (writeFile)
import Stack.Build.Cache
import Stack.Build.Types
import Stack.BuildPlan (loadMiniBuildPlan,
shadowMiniBuildPlan)
import Stack.Constants (wiredInPackages)
import Stack.Package
import Stack.PackageIndex
import Stack.Types
import System.Directory hiding (findExecutable, findFiles)
import System.IO (withBinaryFile, IOMode (ReadMode))
import System.IO.Error (isDoesNotExistError)
type SourceMap = Map PackageName PackageSource
-- | Where the package's source is located: local directory or package index
data PackageSource
= PSLocal LocalPackage
| PSUpstream Version InstallLocation (Map FlagName Bool)
-- ^ Upstream packages could be installed in either local or snapshot
-- databases; this is what 'InstallLocation' specifies.
deriving Show
instance PackageInstallInfo PackageSource where
piiVersion (PSLocal lp) = packageVersion $ lpPackage lp
piiVersion (PSUpstream v _ _) = v
piiLocation (PSLocal _) = Local
piiLocation (PSUpstream _ loc _) = loc
loadSourceMap :: (MonadIO m, MonadCatch m, MonadReader env m, HasBuildConfig env, MonadBaseControl IO m, HasHttpManager env, MonadLogger m, HasEnvConfig env)
=> BuildOpts
-> m ( MiniBuildPlan
, [LocalPackage]
, Set PackageName -- non-local targets
, SourceMap
)
loadSourceMap bopts = do
bconfig <- asks getBuildConfig
mbp0 <- case bcResolver bconfig of
ResolverSnapshot snapName -> do
$logDebug $ "Checking resolver: " <> renderSnapName snapName
loadMiniBuildPlan snapName
ResolverGhc ghc -> return MiniBuildPlan
{ mbpGhcVersion = fromMajorVersion ghc
, mbpPackages = Map.empty
}
menv <- getMinimalEnvOverride
caches <- getPackageCaches menv
let latestVersion = Map.fromList $ map toTuple $ Map.keys caches
(locals, extraNames, extraIdents) <- loadLocals bopts latestVersion
let
-- loadLocals returns PackageName (foo) and PackageIdentifier (bar-1.2.3) targets separately;
-- here we combine them into nonLocalTargets. This is one of the
-- return values of this function.
nonLocalTargets :: Set PackageName
nonLocalTargets = extraNames <> Set.map packageIdentifierName extraIdents
-- Extend extra-deps to encompass targets requested on the command line
-- that are not in the snapshot.
extraDeps0 = extendExtraDeps
(bcExtraDeps bconfig)
mbp0
latestVersion
extraNames
extraIdents
let shadowed = Set.fromList (map (packageName . lpPackage) locals)
<> Map.keysSet extraDeps0
(mbp, extraDeps1) = shadowMiniBuildPlan mbp0 shadowed
-- Add the extra deps from the stack.yaml file to the deps grabbed from
-- the snapshot
extraDeps2 = Map.union
(Map.map (\v -> (v, Map.empty)) extraDeps0)
(Map.map (\mpi -> (mpiVersion mpi, mpiFlags mpi)) extraDeps1)
-- Overwrite any flag settings with those from the config file
extraDeps3 = Map.mapWithKey
(\n (v, f) -> PSUpstream v Local $ fromMaybe f $ Map.lookup n $ bcFlags bconfig)
extraDeps2
let sourceMap = Map.unions
[ Map.fromList $ flip map locals $ \lp ->
let p = lpPackage lp
in (packageName p, PSLocal lp)
, extraDeps3
, flip fmap (mbpPackages mbp) $ \mpi ->
(PSUpstream (mpiVersion mpi) Snap (mpiFlags mpi))
] `Map.difference` Map.fromList (map (, ()) (HashSet.toList wiredInPackages))
let unknown = Set.difference nonLocalTargets $ Map.keysSet sourceMap
unless (Set.null unknown) $ do
let toEither name =
case Map.lookup name latestVersion of
Nothing -> Left name
Just version -> Right (name, version)
eithers = map toEither $ Set.toList unknown
(unknown', notInIndex) = partitionEithers eithers
throwM $ UnknownTargets
(Set.fromList unknown')
(Map.fromList notInIndex)
(bcStackYaml bconfig)
return (mbp, locals, nonLocalTargets, sourceMap)
-- | 'loadLocals' combines two pieces of information:
--
-- 1. Targets, i.e. arguments passed to stack such as @foo@ and @bar@ in the @stack foo bar@ invocation
--
-- 2. Local packages listed in @stack.yaml@
--
-- It returns:
--
-- 1. For every local package, a 'LocalPackage' structure
--
-- 2. If a target does not correspond to a local package but is a valid
-- 'PackageName' or 'PackageIdentifier', it is returned as such.
--
-- NOTE: as the function is written right now, it may "drop" targets if
-- they correspond to existing directories not listed in stack.yaml. This
-- may be a bug.
loadLocals :: forall m env .
(MonadReader env m, HasBuildConfig env, MonadIO m, MonadLogger m, MonadThrow m, MonadCatch m,HasEnvConfig env)
=> BuildOpts
-> Map PackageName Version
-> m ([LocalPackage], Set PackageName, Set PackageIdentifier)
loadLocals bopts latestVersion = do
targets <- mapM parseTarget $
case boptsTargets bopts of
[] -> ["."]
x -> x
-- Group targets by their kind
(dirs, names, idents) <-
case partitionEithers targets of
([], targets') -> return $ partitionTargetSpecs targets'
(bad, _) -> throwM $ Couldn'tParseTargets bad
econfig <- asks getEnvConfig
bconfig <- asks getBuildConfig
-- Iterate over local packages declared in stack.yaml and turn them
-- into LocalPackage structures. The targets affect whether these
-- packages will be marked as wanted.
lps <- forM (Map.toList $ bcPackages bconfig) $ \(dir, validWanted) -> do
cabalfp <- getCabalFileName dir
name <- parsePackageNameFromFilePath cabalfp
let wanted = validWanted && isWanted dirs names dir name
config = PackageConfig
{ packageConfigEnableTests = False
, packageConfigEnableBenchmarks = False
, packageConfigFlags = localFlags (boptsFlags bopts) bconfig name
, packageConfigGhcVersion = envConfigGhcVersion econfig
, packageConfigPlatform = configPlatform $ getConfig bconfig
}
configFinal = config
{ packageConfigEnableTests =
case boptsFinalAction bopts of
DoTests _ -> wanted
_ -> False
, packageConfigEnableBenchmarks = wanted && boptsFinalAction bopts == DoBenchmarks
}
pkg <- readPackage config cabalfp
pkgFinal <- readPackage configFinal cabalfp
when (packageName pkg /= name) $ throwM
$ MismatchedCabalName cabalfp (packageName pkg)
mbuildCache <- tryGetBuildCache dir
files <- getPackageFiles (packageFiles pkg) AllFiles cabalfp
(isDirty, newBuildCache) <- checkBuildCache
(fromMaybe Map.empty mbuildCache)
(map toFilePath $ Set.toList files)
return LocalPackage
{ lpPackage = pkg
, lpPackageFinal = pkgFinal
, lpWanted = wanted
, lpFiles = files
, lpDirtyFiles = isDirty
, lpNewBuildCache = newBuildCache
, lpCabalFile = cabalfp
, lpDir = dir
, lpComponents = fromMaybe Set.empty $ Map.lookup name names
}
let known = Set.fromList $ map (packageName . lpPackage) lps
unknown = Set.difference (Map.keysSet names) known
return (lps, unknown, idents)
where
-- Attempt to parse a TargetSpec based on its textual form and on
-- whether it is a name of an existing directory.
--
-- If a TargetSpec is not recognized, return it verbatim as Left.
parseTarget :: Text -> m (Either Text TargetSpec)
parseTarget t = do
let s = T.unpack t
isDir <- liftIO $ doesDirectoryExist s
if isDir
then liftM (Right . TSDir) $ liftIO (canonicalizePath s) >>= parseAbsDir
else return
$ maybe (Left t) Right
$ (flip TSName Set.empty <$> parsePackageNameFromString s)
<|> (TSIdent <$> parsePackageIdentifierFromString s)
<|> (do
t' <- T.stripSuffix ":latest" t
name <- parsePackageNameFromString $ T.unpack t'
version <- Map.lookup name latestVersion
Just $ TSIdent $ PackageIdentifier name version)
<|> (do
let (name', rest) = T.break (== ':') t
component <- T.stripPrefix ":" rest
name <- parsePackageNameFromString $ T.unpack name'
Just $ TSName name $ Set.singleton component)
isWanted dirs names dir name =
name `Map.member` names ||
any (`isParentOf` dir) dirs ||
any (== dir) dirs
data TargetSpec
= TSName PackageName (Set Text)
| TSIdent PackageIdentifier
| TSDir (Path Abs Dir)
partitionTargetSpecs :: [TargetSpec] -> ([Path Abs Dir], Map PackageName (Set Text), Set PackageIdentifier)
partitionTargetSpecs =
loop id Map.empty Set.empty
where
loop dirs names idents ts0 =
case ts0 of
[] -> (dirs [], names, idents)
TSName name comps:ts -> loop
dirs
(Map.insertWith Set.union name comps names)
idents
ts
TSIdent ident:ts -> loop dirs names (Set.insert ident idents) ts
TSDir dir:ts -> loop (dirs . (dir:)) names idents ts
-- | All flags for a local package
localFlags :: (Map (Maybe PackageName) (Map FlagName Bool))
-> BuildConfig
-> PackageName
-> Map FlagName Bool
localFlags boptsflags bconfig name = Map.unions
[ fromMaybe Map.empty $ Map.lookup (Just name) $ boptsflags
, fromMaybe Map.empty $ Map.lookup Nothing $ boptsflags
, fromMaybe Map.empty $ Map.lookup name $ bcFlags bconfig
]
-- | Add in necessary packages to extra dependencies
--
-- See https://github.com/commercialhaskell/stack/issues/272 for the requirements of this function
extendExtraDeps :: Map PackageName Version -- ^ original extra deps
-> MiniBuildPlan
-> Map PackageName Version -- ^ latest versions in indices
-> Set PackageName -- ^ extra package names desired
-> Set PackageIdentifier -- ^ extra package identifiers desired
-> Map PackageName Version -- ^ new extradeps
extendExtraDeps extraDeps0 mbp latestVersion extraNames extraIdents =
F.foldl' addIdent
(F.foldl' addName extraDeps0 extraNames)
extraIdents
where
snapshot = fmap mpiVersion $ mbpPackages mbp
addName m name =
case Map.lookup name m <|> Map.lookup name snapshot of
-- alright exists in snapshot or extra-deps
Just _ -> m
Nothing ->
case Map.lookup name latestVersion of
-- use the latest version in the index
Just v -> Map.insert name v m
-- does not exist, will be reported as an error
Nothing -> m
addIdent m (PackageIdentifier name version) =
case Map.lookup name snapshot of
-- the version matches what's in the snapshot, so just use the snapshot version
Just version' | version == version' -> m
_ -> Map.insert name version m
-- | Compare the current filesystem state to the cached information, and
-- determine (1) if the files are dirty, and (2) the new cache values.
checkBuildCache :: MonadIO m
=> Map FilePath FileCacheInfo -- ^ old cache
-> [FilePath] -- ^ files in package
-> m (Bool, Map FilePath FileCacheInfo)
checkBuildCache oldCache files = liftIO $ do
(Any isDirty, m) <- fmap mconcat $ mapM go files
return (isDirty, m)
where
go fp = do
mmodTime <- getModTimeMaybe fp
case mmodTime of
Nothing -> return (Any False, Map.empty)
Just modTime' -> do
(isDirty, newFci) <-
case Map.lookup fp oldCache of
Just fci
| fciModTime fci == modTime' -> return (False, fci)
| otherwise -> do
newFci <- calcFci modTime' fp
let isDirty =
fciSize fci /= fciSize newFci ||
fciHash fci /= fciHash newFci
return (isDirty, newFci)
Nothing -> do
newFci <- calcFci modTime' fp
return (True, newFci)
return (Any isDirty, Map.singleton fp newFci)
getModTimeMaybe fp =
liftIO
(catch
(liftM
(Just . modTime)
(getModificationTime fp))
(\e ->
if isDoesNotExistError e
then return Nothing
else throwM e))
calcFci modTime' fp =
withBinaryFile fp ReadMode $ \h -> do
(size, digest) <- CB.sourceHandle h $$ getZipSink
((,)
<$> ZipSink (CL.fold
(\x y -> x + fromIntegral (S.length y))
0)
<*> ZipSink sinkHash)
return FileCacheInfo
{ fciModTime = modTime'
, fciSize = size
, fciHash = toBytes (digest :: Digest SHA256)
}
|
cocreature/stack
|
src/Stack/Build/Source.hs
|
bsd-3-clause
| 16,180
| 0
| 27
| 5,704
| 3,575
| 1,863
| 1,712
| 295
| 5
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeSynonymInstances #-}
module AlphaHeavy.QuickFIX.GetMessageField where
import Control.Applicative
import Control.Exception (throwIO)
import Data.ByteString (ByteString)
import Data.Int
import Data.Time
import GHC.Generics
import AlphaHeavy.FIX as FIX
import AlphaHeavy.QuickFIX.Foreign
import AlphaHeavy.QuickFIX.Types
class GetMessageField a where
getMessageField :: QuickFIXMessagePtr -> Int -> IO a
instance GetMessageField Bool where
getMessageField msg fid =
getBoolField msg (fromIntegral fid)
instance GetMessageField Char where
getMessageField msg fid =
getCharField msg (fromIntegral fid)
instance GetMessageField Int where
getMessageField msg fid =
fromIntegral <$> getIntField msg (fromIntegral fid)
instance GetMessageField Int32 where
getMessageField msg fid =
getIntField msg (fromIntegral fid)
instance GetMessageField Int64 where
getMessageField msg fid =
fromIntegral <$> getIntField msg (fromIntegral fid)
instance GetMessageField Float where
getMessageField msg fid =
realToFrac <$> getDoubleField msg (fromIntegral fid)
instance GetMessageField Double where
getMessageField msg fid =
getDoubleField msg (fromIntegral fid)
instance GetMessageField String where
getMessageField msg fid =
getStringFieldCPS msg (fromIntegral fid)
instance GetMessageField ByteString where
getMessageField = error "no bytestring support yet"
instance GetMessageField Data.Time.UTCTime where
getMessageField msg fid = do
txt <- getMessageField msg fid
case parseTime defaultTimeLocale "%Y%m%d-%H:%M:%S" txt of
Just val -> return $! val
Nothing -> throwIO . IncorrectTagValue fid $ txt
instance GetMessageField Decimal where
getMessageField msg fid =
read <$> getMessageField msg fid
instance GetMessageField Exchange where
getMessageField msg fid = do
val <- getMessageField msg fid
return $! case val of
"O" -> Exchange_NASDAQ
"N" -> Exchange_NYSE
"SMART" -> Exchange_SMART
_ -> Exchange_OTHER val
instance GetMessageField (U1 x) where
getMessageField _ _ =
return U1
|
alphaHeavy/quickfix-hs
|
src/AlphaHeavy/QuickFIX/GetMessageField.hs
|
bsd-3-clause
| 2,205
| 0
| 13
| 404
| 553
| 277
| 276
| 61
| 0
|
{-# LANGUAGE TemplateHaskell #-}
module EFA.Test.Solver where
import qualified Data.Set as S
import qualified Data.List as L
import Data.Graph.Inductive
import Test.QuickCheck
import Test.QuickCheck.All
import Debug.Trace
import EFA.Topology.RandomTopology
import EFA.Topology.Topology
import EFA.Solver.Equation
import EFA.Solver.Horn
import EFA.Solver.IsVar
import EFA.Solver.DirEquation
import EFA.Interpreter.Arith
import EFA.Equation.Env
import EFA.Utility
-- | Given x and eta environments, the number of all solved (directed) equations should be equal the
-- double of the number of edges in the graph, that is, every power position has been calculated.
-- This is a good example for the use of various functions together.
prop_solver :: Int -> Gen Bool
prop_solver seed = do
ratio <- choose (2.0, 5.0)
let g = randomTopology 0 50 ratio
terms = map give [ PowerIdx 0 0 0 1 ]
xenvts = envToEqTerms (randomXEnv 0 1 g)
eenvts = envToEqTerms (randomEtaEnv 17 1 g)
ts = terms ++ xenvts ++ eenvts ++ mkEdgeEq g ++ mkNodeEq g
isV = isVar g ts
(given, nov, givExt, rest) = splitTerms isV ts
ss = givExt ++ rest
ho = hornOrder isV ss
dirs = directEquations isV ho
noEdges = length (edges g)
-- For every edge one x plus all PowerIdx minus one, because one PowerIdx is given.
return $ length dirs == noEdges + (2*noEdges - 1)
prop_orderOfEqs :: Int -> Gen Bool
prop_orderOfEqs seed = do
ratio <- choose (2.0, 6.0)
let g = randomTopology seed 50 ratio
terms = map give [ PowerIdx 0 0 0 1 ]
xenvts = envToEqTerms (randomXEnv 0 1 g)
eenvts = envToEqTerms (randomEtaEnv 17 1 g)
ts = terms ++ xenvts ++ eenvts ++ mkEdgeEq g ++ mkNodeEq g
isV = isVar g ts
(given, nov, givExt, rest) = splitTerms isV ts
ss = givExt ++ rest
ho = hornOrder isV ss
dirs = directEquations isV ho
dirsets = L.scanl S.union S.empty $ map (mkVarSet isV) dirs -- For _:a:b:_, b includes a
atMostOneMore (s, t) = S.size (s S.\\ t) <= 1
return $ all atMostOneMore (pairs dirsets)
runTests = $quickCheckAll
|
energyflowanalysis/efa-2.1
|
attic/test/EFA/Test/Solver.hs
|
bsd-3-clause
| 2,135
| 0
| 13
| 499
| 632
| 336
| 296
| 49
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
module Network.HTTP2.Types (
-- * Constant
frameHeaderLength
, maxPayloadLength
-- * SettingsList
, SettingsKeyId(..)
, checkSettingsList
, fromSettingsKeyId
, SettingsValue
, SettingsList
, toSettingsKeyId
-- * Settings
, Settings(..)
, defaultSettings
, updateSettings
-- * Error
, HTTP2Error(..)
, errorCodeId
-- * Error code
, ErrorCode
, ErrorCodeId(..)
, fromErrorCodeId
, toErrorCodeId
-- * Frame type
, FrameType
, minFrameType
, maxFrameType
, FrameTypeId(..)
, fromFrameTypeId
, toFrameTypeId
-- * Frame
, Frame(..)
, FrameHeader(..)
, FramePayload(..)
, framePayloadToFrameTypeId
, isPaddingDefined
-- * Stream identifier
, StreamId
, isControl
, isRequest
, isResponse
, testExclusive
, setExclusive
, clearExclusive
-- * Flags
, FrameFlags
, defaultFlags
, testEndStream
, testAck
, testEndHeader
, testPadded
, testPriority
, setEndStream
, setAck
, setEndHeader
, setPadded
, setPriority
-- * Window
, WindowSize
, defaultInitialWindowSize
, maxWindowSize
, isWindowOverflow
-- * Misc
, recommendedConcurrency
-- * Types
, HeaderBlockFragment
, Weight
, Priority(..)
, defaultPriority
, highestPriority
, Padding
) where
import qualified Control.Exception as E
import Data.Bits (setBit, testBit, clearBit)
import Data.ByteString (ByteString)
import Data.Maybe (mapMaybe)
import Data.Typeable
import Data.Word (Word8, Word16, Word32)
----------------------------------------------------------------
-- | The length of HTTP/2 frame header.
--
-- >>> frameHeaderLength
-- 9
frameHeaderLength :: Int
frameHeaderLength = 9
----------------------------------------------------------------
type ErrorCode = Word32
data ErrorCodeId = NoError
| ProtocolError
| InternalError
| FlowControlError
| SettingsTimeout
| StreamClosed
| FrameSizeError
| RefusedStream
| Cancel
| CompressionError
| ConnectError
| EnhanceYourCalm
| InadequateSecurity
| HTTP11Required
-- our extensions
| UnknownErrorCode ErrorCode
deriving (Show, Read, Eq, Ord)
-- |
--
-- >>> fromErrorCodeId NoError
-- 0
-- >>> fromErrorCodeId InadequateSecurity
-- 12
fromErrorCodeId :: ErrorCodeId -> ErrorCode
fromErrorCodeId NoError = 0x0
fromErrorCodeId ProtocolError = 0x1
fromErrorCodeId InternalError = 0x2
fromErrorCodeId FlowControlError = 0x3
fromErrorCodeId SettingsTimeout = 0x4
fromErrorCodeId StreamClosed = 0x5
fromErrorCodeId FrameSizeError = 0x6
fromErrorCodeId RefusedStream = 0x7
fromErrorCodeId Cancel = 0x8
fromErrorCodeId CompressionError = 0x9
fromErrorCodeId ConnectError = 0xa
fromErrorCodeId EnhanceYourCalm = 0xb
fromErrorCodeId InadequateSecurity = 0xc
fromErrorCodeId HTTP11Required = 0xd
fromErrorCodeId (UnknownErrorCode w) = w
-- |
--
-- >>> toErrorCodeId 0
-- NoError
-- >>> toErrorCodeId 0xc
-- InadequateSecurity
-- >>> toErrorCodeId 0xe
-- UnknownErrorCode 14
toErrorCodeId :: ErrorCode -> ErrorCodeId
toErrorCodeId 0x0 = NoError
toErrorCodeId 0x1 = ProtocolError
toErrorCodeId 0x2 = InternalError
toErrorCodeId 0x3 = FlowControlError
toErrorCodeId 0x4 = SettingsTimeout
toErrorCodeId 0x5 = StreamClosed
toErrorCodeId 0x6 = FrameSizeError
toErrorCodeId 0x7 = RefusedStream
toErrorCodeId 0x8 = Cancel
toErrorCodeId 0x9 = CompressionError
toErrorCodeId 0xa = ConnectError
toErrorCodeId 0xb = EnhanceYourCalm
toErrorCodeId 0xc = InadequateSecurity
toErrorCodeId 0xd = HTTP11Required
toErrorCodeId w = UnknownErrorCode w
----------------------------------------------------------------
-- | The connection error or the stream error.
data HTTP2Error = ConnectionError ErrorCodeId ByteString
| StreamError ErrorCodeId StreamId
deriving (Eq, Show, Typeable, Read)
instance E.Exception HTTP2Error
-- | Obtaining 'ErrorCodeId' from 'HTTP2Error'.
errorCodeId :: HTTP2Error -> ErrorCodeId
errorCodeId (ConnectionError err _) = err
errorCodeId (StreamError err _) = err
----------------------------------------------------------------
data SettingsKeyId = SettingsHeaderTableSize
| SettingsEnablePush
| SettingsMaxConcurrentStreams
| SettingsInitialWindowSize
| SettingsMaxFrameSize -- this means payload size
| SettingsMaxHeaderBlockSize
deriving (Show, Read, Eq, Ord, Enum, Bounded)
type SettingsValue = Int -- Word32
-- |
--
-- >>> fromSettingsKeyId SettingsHeaderTableSize
-- 1
-- >>> fromSettingsKeyId SettingsMaxHeaderBlockSize
-- 6
fromSettingsKeyId :: SettingsKeyId -> Word16
fromSettingsKeyId x = fromIntegral (fromEnum x) + 1
minSettingsKeyId :: Word16
minSettingsKeyId = fromIntegral $ fromEnum (minBound :: SettingsKeyId)
maxSettingsKeyId :: Word16
maxSettingsKeyId = fromIntegral $ fromEnum (maxBound :: SettingsKeyId)
-- |
--
-- >>> toSettingsKeyId 0
-- Nothing
-- >>> toSettingsKeyId 1
-- Just SettingsHeaderTableSize
-- >>> toSettingsKeyId 6
-- Just SettingsMaxHeaderBlockSize
-- >>> toSettingsKeyId 7
-- Nothing
toSettingsKeyId :: Word16 -> Maybe SettingsKeyId
toSettingsKeyId x
| minSettingsKeyId <= n && n <= maxSettingsKeyId = Just . toEnum . fromIntegral $ n
| otherwise = Nothing
where
n = x - 1
----------------------------------------------------------------
-- | Settings containing raw values.
type SettingsList = [(SettingsKeyId,SettingsValue)]
-- | Checking 'SettingsList' and reporting an error if any.
--
-- >>> checkSettingsList [(SettingsEnablePush,2)]
-- Just (ConnectionError ProtocolError "enable push must be 0 or 1")
checkSettingsList :: SettingsList -> Maybe HTTP2Error
checkSettingsList settings = case mapMaybe checkSettingsValue settings of
[] -> Nothing
(x:_) -> Just x
checkSettingsValue :: (SettingsKeyId,SettingsValue) -> Maybe HTTP2Error
checkSettingsValue (SettingsEnablePush,v)
| v /= 0 && v /= 1 = Just $ ConnectionError ProtocolError "enable push must be 0 or 1"
checkSettingsValue (SettingsInitialWindowSize,v)
| v > 2147483647 = Just $ ConnectionError FlowControlError "Window size must be less than or equal to 65535"
checkSettingsValue (SettingsMaxFrameSize,v)
| v < 16384 || v > 16777215 = Just $ ConnectionError ProtocolError "Max frame size must be in between 16384 and 16777215"
checkSettingsValue _ = Nothing
----------------------------------------------------------------
-- | Cooked version of settings. This is suitable to be stored in a HTTP/2 context.
data Settings = Settings {
headerTableSize :: Int
, enablePush :: Bool
, maxConcurrentStreams :: Maybe Int
, initialWindowSize :: WindowSize
, maxFrameSize :: Int
, maxHeaderBlockSize :: Maybe Int
} deriving (Show)
-- | The default settings.
--
-- >>> defaultSettings
-- Settings {headerTableSize = 4096, enablePush = True, maxConcurrentStreams = Nothing, initialWindowSize = 65535, maxFrameSize = 16384, maxHeaderBlockSize = Nothing}
defaultSettings :: Settings
defaultSettings = Settings {
headerTableSize = 4096
, enablePush = True
, maxConcurrentStreams = Nothing
, initialWindowSize = defaultInitialWindowSize
, maxFrameSize = 16384
, maxHeaderBlockSize = Nothing
}
-- | Updating settings.
--
-- >>> updateSettings defaultSettings [(SettingsEnablePush,0),(SettingsMaxHeaderBlockSize,200)]
-- Settings {headerTableSize = 4096, enablePush = False, maxConcurrentStreams = Nothing, initialWindowSize = 65535, maxFrameSize = 16384, maxHeaderBlockSize = Just 200}
updateSettings :: Settings -> SettingsList -> Settings
updateSettings settings kvs = foldr update settings kvs
where
update (SettingsHeaderTableSize,x) def = def { headerTableSize = x }
-- fixme: x should be 0 or 1
update (SettingsEnablePush,x) def = def { enablePush = x > 0 }
update (SettingsMaxConcurrentStreams,x) def = def { maxConcurrentStreams = Just x }
update (SettingsInitialWindowSize,x) def = def { initialWindowSize = x }
update (SettingsMaxFrameSize,x) def = def { maxFrameSize = x }
update (SettingsMaxHeaderBlockSize,x) def = def { maxHeaderBlockSize = Just x }
type WindowSize = Int
-- | The default initial window size.
--
-- >>> defaultInitialWindowSize
-- 65535
defaultInitialWindowSize :: WindowSize
defaultInitialWindowSize = 65535
-- | The maximum window size.
--
-- >>> maxWindowSize
-- 2147483647
maxWindowSize :: WindowSize
maxWindowSize = 2147483647
-- | Checking if a window size exceeds the maximum window size.
--
-- >>> isWindowOverflow 10
-- False
-- >>> isWindowOverflow maxWindowSize
-- False
-- >>> isWindowOverflow (maxWindowSize + 1)
-- True
isWindowOverflow :: WindowSize -> Bool
isWindowOverflow w = testBit w 31
-- | Default concurrency.
--
-- >>> recommendedConcurrency
-- 100
recommendedConcurrency :: Int
recommendedConcurrency = 100
----------------------------------------------------------------
type Weight = Int
data Priority = Priority {
exclusive :: Bool
, streamDependency :: StreamId
, weight :: Weight
} deriving (Show, Read, Eq)
-- | Default priority which depends on stream 0.
--
-- >>> defaultPriority
-- Priority {exclusive = False, streamDependency = 0, weight = 16}
defaultPriority :: Priority
defaultPriority = Priority False 0 16
-- | Highest priority which depends on stream 0.
--
-- >>> highestPriority
-- Priority {exclusive = False, streamDependency = 0, weight = 256}
highestPriority :: Priority
highestPriority = Priority False 0 256
----------------------------------------------------------------
type FrameType = Word8
minFrameType :: FrameType
minFrameType = 0
maxFrameType :: FrameType
maxFrameType = 9
-- Valid frame types
data FrameTypeId = FrameData
| FrameHeaders
| FramePriority
| FrameRSTStream
| FrameSettings
| FramePushPromise
| FramePing
| FrameGoAway
| FrameWindowUpdate
| FrameContinuation
| FrameUnknown FrameType
deriving (Show, Eq, Ord)
-- |
--
-- >>> fromFrameTypeId FrameData
-- 0
-- >>> fromFrameTypeId FrameContinuation
-- 9
-- >>> fromFrameTypeId (FrameUnknown 10)
-- 10
fromFrameTypeId :: FrameTypeId -> FrameType
fromFrameTypeId FrameData = 0
fromFrameTypeId FrameHeaders = 1
fromFrameTypeId FramePriority = 2
fromFrameTypeId FrameRSTStream = 3
fromFrameTypeId FrameSettings = 4
fromFrameTypeId FramePushPromise = 5
fromFrameTypeId FramePing = 6
fromFrameTypeId FrameGoAway = 7
fromFrameTypeId FrameWindowUpdate = 8
fromFrameTypeId FrameContinuation = 9
fromFrameTypeId (FrameUnknown x) = x
-- |
--
-- >>> toFrameTypeId 0
-- FrameData
-- >>> toFrameTypeId 9
-- FrameContinuation
-- >>> toFrameTypeId 10
-- FrameUnknown 10
toFrameTypeId :: FrameType -> FrameTypeId
toFrameTypeId 0 = FrameData
toFrameTypeId 1 = FrameHeaders
toFrameTypeId 2 = FramePriority
toFrameTypeId 3 = FrameRSTStream
toFrameTypeId 4 = FrameSettings
toFrameTypeId 5 = FramePushPromise
toFrameTypeId 6 = FramePing
toFrameTypeId 7 = FrameGoAway
toFrameTypeId 8 = FrameWindowUpdate
toFrameTypeId 9 = FrameContinuation
toFrameTypeId x = FrameUnknown x
----------------------------------------------------------------
-- | The maximum length of HTTP/2 payload.
--
-- >>> maxPayloadLength
-- 16384
maxPayloadLength :: Int
maxPayloadLength = 2^(14::Int)
----------------------------------------------------------------
-- Flags
type FrameFlags = Word8
-- |
-- >>> defaultFlags
-- 0
defaultFlags :: FrameFlags
defaultFlags = 0
-- |
-- >>> testEndStream 0x1
-- True
testEndStream :: FrameFlags -> Bool
testEndStream x = x `testBit` 0
-- |
-- >>> testAck 0x1
-- True
testAck :: FrameFlags -> Bool
testAck x = x `testBit` 0 -- fixme: is the spec intentional?
-- |
-- >>> testEndHeader 0x4
-- True
testEndHeader :: FrameFlags -> Bool
testEndHeader x = x `testBit` 2
-- |
-- >>> testPadded 0x8
-- True
testPadded :: FrameFlags -> Bool
testPadded x = x `testBit` 3
-- |
-- >>> testPriority 0x20
-- True
testPriority :: FrameFlags -> Bool
testPriority x = x `testBit` 5
-- |
-- >>> setEndStream 0
-- 1
setEndStream :: FrameFlags -> FrameFlags
setEndStream x = x `setBit` 0
-- |
-- >>> setAck 0
-- 1
setAck :: FrameFlags -> FrameFlags
setAck x = x `setBit` 0 -- fixme: is the spec intentional?
-- |
-- >>> setEndHeader 0
-- 4
setEndHeader :: FrameFlags -> FrameFlags
setEndHeader x = x `setBit` 2
-- |
-- >>> setPadded 0
-- 8
setPadded :: FrameFlags -> FrameFlags
setPadded x = x `setBit` 3
-- |
-- >>> setPriority 0
-- 32
setPriority :: FrameFlags -> FrameFlags
setPriority x = x `setBit` 5
----------------------------------------------------------------
type StreamId = Int
-- |
-- >>> isControl 0
-- True
-- >>> isControl 1
-- False
isControl :: StreamId -> Bool
isControl 0 = True
isControl _ = False
-- |
-- >>> isRequest 0
-- False
-- >>> isRequest 1
-- True
isRequest :: StreamId -> Bool
isRequest = odd
-- |
-- >>> isResponse 0
-- False
-- >>> isResponse 2
-- True
isResponse :: StreamId -> Bool
isResponse 0 = False
isResponse n = even n
testExclusive :: Int -> Bool
testExclusive n = n `testBit` 31
setExclusive :: Int -> Int
setExclusive n = n `setBit` 31
clearExclusive :: Int -> Int
clearExclusive n = n `clearBit` 31
----------------------------------------------------------------
type HeaderBlockFragment = ByteString
type Padding = ByteString
----------------------------------------------------------------
-- | The data type for HTTP/2 frames.
data Frame = Frame
{ frameHeader :: FrameHeader
, framePayload :: FramePayload
} deriving (Show, Read, Eq)
-- | The data type for HTTP/2 frame headers.
data FrameHeader = FrameHeader
{ payloadLength :: Int
, flags :: FrameFlags
, streamId :: StreamId
} deriving (Show, Read, Eq)
-- | The data type for HTTP/2 frame payloads.
data FramePayload =
DataFrame ByteString
| HeadersFrame (Maybe Priority) HeaderBlockFragment
| PriorityFrame Priority
| RSTStreamFrame ErrorCodeId
| SettingsFrame SettingsList
| PushPromiseFrame StreamId HeaderBlockFragment
| PingFrame ByteString
| GoAwayFrame StreamId ErrorCodeId ByteString
| WindowUpdateFrame WindowSize
| ContinuationFrame HeaderBlockFragment
| UnknownFrame FrameType ByteString
deriving (Show, Read, Eq)
----------------------------------------------------------------
-- | Getting 'FrameType' from 'FramePayload'.
--
-- >>> framePayloadToFrameTypeId (DataFrame "body")
-- FrameData
framePayloadToFrameTypeId :: FramePayload -> FrameTypeId
framePayloadToFrameTypeId (DataFrame _) = FrameData
framePayloadToFrameTypeId (HeadersFrame _ _) = FrameHeaders
framePayloadToFrameTypeId (PriorityFrame _) = FramePriority
framePayloadToFrameTypeId (RSTStreamFrame _) = FrameRSTStream
framePayloadToFrameTypeId (SettingsFrame _) = FrameSettings
framePayloadToFrameTypeId (PushPromiseFrame _ _) = FramePushPromise
framePayloadToFrameTypeId (PingFrame _) = FramePing
framePayloadToFrameTypeId (GoAwayFrame _ _ _) = FrameGoAway
framePayloadToFrameTypeId (WindowUpdateFrame _) = FrameWindowUpdate
framePayloadToFrameTypeId (ContinuationFrame _) = FrameContinuation
framePayloadToFrameTypeId (UnknownFrame w8 _) = FrameUnknown w8
----------------------------------------------------------------
-- | Checking if padding is defined in this frame type.
isPaddingDefined :: FramePayload -> Bool
isPaddingDefined (DataFrame _) = True
isPaddingDefined (HeadersFrame _ _) = True
isPaddingDefined (PriorityFrame _) = False
isPaddingDefined (RSTStreamFrame _) = False
isPaddingDefined (SettingsFrame _) = False
isPaddingDefined (PushPromiseFrame _ _) = True
isPaddingDefined (PingFrame _) = False
isPaddingDefined (GoAwayFrame _ _ _) = False
isPaddingDefined (WindowUpdateFrame _) = False
isPaddingDefined (ContinuationFrame _) = False
isPaddingDefined (UnknownFrame _ _) = False
|
bergmark/http2
|
Network/HTTP2/Types.hs
|
bsd-3-clause
| 16,457
| 0
| 10
| 3,310
| 2,977
| 1,737
| 1,240
| 329
| 6
|
module ClearScene
( clearScene
) where
import Data.Set
import Class.GameScene as GS
import GlobalValue
import KeyBind
data ClearScene = ClearScene Int
instance GameScene ClearScene where
update (GV {keyset = key}) scene
| member A key = return $ GS.EndScene
| member QUIT key = return $ GS.EndScene
| otherwise = return $ GS.Replace scene
clearScene :: Int -> IO ClearScene
clearScene score = return $ ClearScene score
|
c000/PaperPuppet
|
src/ClearScene.hs
|
bsd-3-clause
| 452
| 0
| 10
| 100
| 148
| 76
| 72
| 14
| 1
|
module Text.EscapeCodes where
import Data.Char
import Data.Maybe
data Attribute = Normal | Bold | Underline | Blink | Reverse | Invisible
deriving (Show, Eq)
data Color = Black | Red | Green | Yellow | Blue | Magenta | Cyan | White
deriving (Show, Eq, Enum, Bounded)
data EscapeCode = FormatAttribute Attribute
| FormatForeground Color
| FormatBackground Color
| FormatUnknown Int
deriving (Show, Eq)
escapeChar :: Char
escapeChar = chr 27
parseEscapeCodes :: String -> [Either Char EscapeCode]
parseEscapeCodes x = f 0 x
where
f 0 (c1:c2:cs) | c1 == escapeChar && c2 == '[' = f 1 cs
f 1 (c:cs) | isDigit c = Right (g $ read a) : f 2 b
where (a,b) = span isDigit (c:cs)
f 2 (';':cs) = f 1 cs
f 2 ('m':cs) = f 0 cs
-- defensive coding, never pattern match
-- always continue producing
f _ (c:cs) = Left c : f 0 cs
f _ [] = []
g :: Int -> EscapeCode
g x | x >= 30 && x <= 37 = FormatForeground $ toEnum (x - 30)
| x >= 40 && x <= 47 = FormatBackground $ toEnum (x - 40)
| otherwise = case lookup x attribs of
Nothing -> FormatUnknown x
Just y -> FormatAttribute y
attribs = [(0, Normal),
(1, Bold),
(4, Underline),
(5, Blink),
(7, Reverse),
(8, Invisible)]
getColor :: Color -> (Int,Int,Int)
getColor x = case lookup x colors of
Nothing -> (0,0,0)
Just t -> t
where
colors = [(Black, (0,0,0)),
(Red, (h,0,0)),
(Green, (0,h,0)),
(White, (f,f,f))]
h = 0x80
f = 0xff
|
ndmitchell/guihaskell
|
Text/EscapeCodes.hs
|
bsd-3-clause
| 1,960
| 0
| 12
| 863
| 713
| 390
| 323
| 45
| 7
|
module BasicConstant where
import Alias
import BasicData
import Data.Bits
aFile,aAndBFile,hFile,gAndHFile,edge,backrank,eighthRank,firstRank,kingside,queenside :: BitBoard
initWKingsideRook,initWQueensideRook,initBKingsideRook,initBQueensideRook,initRooks :: BitBoard
initWKing,initBKing,initKingsAndRooks,initKingsideRooks,initQueensideRooks :: BitBoard
initWKRBQR,initWQRBKR,lightSquares,darkSquares :: BitBoard
aFile = 0x8080808080808080
aAndBFile = 0xc0c0c0c0c0c0c0c0
hFile = 0x0101010101010101
gAndHFile = 0x0303030303030303
edge = 0xff818181818181ff
backrank = 0xff000000000000ff
eighthRank = 0xff00000000000000
firstRank = 0x00000000000000ff
kingside = 0x0f0f0f0f0f0f0f0f
queenside = 0xf0f0f0f0f0f0f0f0
initWKingsideRook = 0x0000000000000001
initWQueensideRook = 0x0000000000000080
initBKingsideRook = 0x0100000000000000
initBQueensideRook = 0x8000000000000000
initRooks = 0x8100000000000081
initWKing = 0x0000000000000008
initBKing = 0x0800000000000000
initKingsAndRooks = 0x8900000000000089
initKingsideRooks = initWKingsideRook .|. initBKingsideRook
initQueensideRooks = initWQueensideRook .|. initBQueensideRook
initWKRBQR = initWKingsideRook .|. initBQueensideRook
initWQRBKR = initWQueensideRook .|. initBKingsideRook
lightSquares = 0x5555555555555555
darkSquares = complement lightSquares
pawnMaterial,knightMaterial,bishopMaterial,rookMaterial,queenMaterial,kingMaterial :: PieceValue
pawnMaterial = 100
knightMaterial = 320
bishopMaterial = 330
rookMaterial = 500
queenMaterial = 900
kingMaterial = 20000
|
syanidar/Sophy
|
src/Foundation/BasicConstant.hs
|
bsd-3-clause
| 1,883
| 0
| 5
| 485
| 259
| 174
| 85
| 39
| 1
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE UndecidableInstances #-}
-- Front end for imperative programs
module Language.Embedded.Imperative.Frontend where
import Prelude hiding (break)
import Data.Array.IO
import Data.IORef
import Data.Typeable
import System.IO.Unsafe
import Control.Monad.Operational.Higher
import System.IO.Fake
import Language.Embedded.Expression
import Language.Embedded.Imperative.CMD
import Language.Embedded.Imperative.Args
import Language.Embedded.Imperative.Frontend.General
--------------------------------------------------------------------------------
-- * References
--------------------------------------------------------------------------------
-- | Create an uninitialized reference
newRef :: (pred a, RefCMD :<: instr) =>
ProgramT instr (Param2 exp pred) m (Ref a)
newRef = newNamedRef "r"
-- | Create an uninitialized named reference
--
-- The provided base name may be appended with a unique identifier to avoid name
-- collisions.
newNamedRef :: (pred a, RefCMD :<: instr)
=> String -- ^ Base name
-> ProgramT instr (Param2 exp pred) m (Ref a)
newNamedRef = singleInj . NewRef
-- | Create an initialized reference
initRef :: (pred a, RefCMD :<: instr)
=> exp a -- ^ Initial value
-> ProgramT instr (Param2 exp pred) m (Ref a)
initRef = initNamedRef "r"
-- | Create an initialized named reference
--
-- The provided base name may be appended with a unique identifier to avoid name
-- collisions.
initNamedRef :: (pred a, RefCMD :<: instr)
=> String -- ^ Base name
-> exp a -- ^ Initial value
-> ProgramT instr (Param2 exp pred) m (Ref a)
initNamedRef base a = singleInj (InitRef base a)
-- | Get the contents of a reference
getRef :: (pred a, FreeExp exp, FreePred exp a, RefCMD :<: instr, Monad m) =>
Ref a -> ProgramT instr (Param2 exp pred) m (exp a)
getRef = fmap valToExp . singleInj . GetRef
-- | Set the contents of a reference
setRef :: (pred a, RefCMD :<: instr) =>
Ref a -> exp a -> ProgramT instr (Param2 exp pred) m ()
setRef r = singleInj . SetRef r
-- | Modify the contents of reference
modifyRef :: (pred a, FreeExp exp, FreePred exp a, RefCMD :<: instr, Monad m) =>
Ref a -> (exp a -> exp a) -> ProgramT instr (Param2 exp pred) m ()
modifyRef r f = setRef r . f =<< unsafeFreezeRef r
-- | Freeze the contents of reference (only safe if the reference is not updated
-- as long as the resulting value is alive)
unsafeFreezeRef
:: (pred a, FreeExp exp, FreePred exp a, RefCMD :<: instr, Monad m)
=> Ref a -> ProgramT instr (Param2 exp pred) m (exp a)
unsafeFreezeRef = fmap valToExp . singleInj . UnsafeFreezeRef
-- | Read the value of a reference without returning in the monad
--
-- WARNING: Don't use this function unless you really know what you are doing.
-- It is almost always better to use 'unsafeFreezeRef' instead.
--
-- 'veryUnsafeFreezeRef' behaves predictably when doing code generation, but it
-- can give strange results when running in 'IO', as explained here:
--
-- <http://fun-discoveries.blogspot.se/2015/09/strictness-can-fix-non-termination.html>
veryUnsafeFreezeRef :: (FreeExp exp, FreePred exp a) => Ref a -> exp a
veryUnsafeFreezeRef (RefRun r) = constExp $! unsafePerformIO $! readIORef r
veryUnsafeFreezeRef (RefComp v) = varExp v
--------------------------------------------------------------------------------
-- * Arrays
--------------------------------------------------------------------------------
-- | Create an uninitialized array
newArr :: (pred a, Integral i, Ix i, ArrCMD :<: instr)
=> exp i -- ^ Length
-> ProgramT instr (Param2 exp pred) m (Arr i a)
newArr = newNamedArr "a"
-- | Create an uninitialized named array
--
-- The provided base name may be appended with a unique identifier to avoid name
-- collisions.
newNamedArr :: (pred a, Integral i, Ix i, ArrCMD :<: instr)
=> String -- ^ Base name
-> exp i -- ^ Length
-> ProgramT instr (Param2 exp pred) m (Arr i a)
newNamedArr base len = singleInj (NewArr base len)
-- | Create an array and initialize it with a constant list
constArr :: (pred a, Integral i, Ix i, ArrCMD :<: instr)
=> [a] -- ^ Initial contents
-> ProgramT instr (Param2 exp pred) m (Arr i a)
constArr = constNamedArr "a"
-- | Create a named array and initialize it with a constant list
--
-- The provided base name may be appended with a unique identifier to avoid name
-- collisions.
constNamedArr :: (pred a, Integral i, Ix i, ArrCMD :<: instr)
=> String -- ^ Base name
-> [a] -- ^ Initial contents
-> ProgramT instr (Param2 exp pred) m (Arr i a)
constNamedArr base init = singleInj (ConstArr base init)
-- | Get an element of an array
getArr
:: ( pred a
, FreeExp exp
, FreePred exp a
, Integral i
, Ix i
, ArrCMD :<: instr
, Monad m
)
=> Arr i a -> exp i -> ProgramT instr (Param2 exp pred) m (exp a)
getArr arr i = fmap valToExp $ singleInj $ GetArr arr i
-- | Set an element of an array
setArr :: (pred a, Integral i, Ix i, ArrCMD :<: instr) =>
Arr i a -> exp i -> exp a -> ProgramT instr (Param2 exp pred) m ()
setArr arr i a = singleInj (SetArr arr i a)
-- | Copy the contents of an array to another array. The number of elements to
-- copy must not be greater than the number of allocated elements in either
-- array.
copyArr :: (pred a, Integral i, Ix i, ArrCMD :<: instr)
=> (Arr i a, exp i) -- ^ (destination,offset)
-> (Arr i a, exp i) -- ^ (source,offset
-> exp i -- ^ Number of elements
-> ProgramT instr (Param2 exp pred) m ()
copyArr arr1 arr2 len = singleInj $ CopyArr arr1 arr2 len
-- | Freeze a mutable array to an immutable one. This involves copying the array
-- to a newly allocated one.
freezeArr :: (pred a, Integral i, Ix i, Num (exp i), ArrCMD :<: instr, Monad m)
=> Arr i a
-> exp i -- ^ Length of new array
-> ProgramT instr (Param2 exp pred) m (IArr i a)
freezeArr arr n = do
arr2 <- newArr n
copyArr (arr2,0) (arr,0) n
unsafeFreezeArr arr2
-- | Freeze a mutable array to an immutable one without making a copy. This is
-- generally only safe if the the mutable array is not updated as long as the
-- immutable array is alive.
unsafeFreezeArr :: (pred a, Integral i, Ix i, ArrCMD :<: instr) =>
Arr i a -> ProgramT instr (Param2 exp pred) m (IArr i a)
unsafeFreezeArr arr = singleInj $ UnsafeFreezeArr arr
-- | Thaw an immutable array to a mutable one. This involves copying the array
-- to a newly allocated one.
thawArr :: (pred a, Integral i, Ix i, Num (exp i), ArrCMD :<: instr, Monad m)
=> IArr i a
-> exp i -- ^ Number of elements to copy
-> ProgramT instr (Param2 exp pred) m (Arr i a)
thawArr arr n = do
arr2 <- unsafeThawArr arr
arr3 <- newArr n
copyArr (arr3,0) (arr2,0) n
return arr3
-- | Thaw an immutable array to a mutable one without making a copy. This is
-- generally only safe if the the mutable array is not updated as long as the
-- immutable array is alive.
unsafeThawArr :: (pred a, Integral i, Ix i, ArrCMD :<: instr) =>
IArr i a -> ProgramT instr (Param2 exp pred) m (Arr i a)
unsafeThawArr arr = singleInj $ UnsafeThawArr arr
--------------------------------------------------------------------------------
-- * Control flow
--------------------------------------------------------------------------------
-- | Conditional statement
iff :: (ControlCMD :<: instr)
=> exp Bool -- ^ Condition
-> ProgramT instr (Param2 exp pred) m () -- ^ True branch
-> ProgramT instr (Param2 exp pred) m () -- ^ False branch
-> ProgramT instr (Param2 exp pred) m ()
iff b t f = singleInj $ If b t f
-- | Conditional statement that returns an expression
ifE
:: ( pred a
, FreeExp exp
, FreePred exp a
, ControlCMD :<: instr
, RefCMD :<: instr
, Monad m
)
=> exp Bool -- ^ Condition
-> ProgramT instr (Param2 exp pred) m (exp a) -- ^ True branch
-> ProgramT instr (Param2 exp pred) m (exp a) -- ^ False branch
-> ProgramT instr (Param2 exp pred) m (exp a)
ifE b t f = do
r <- newRef
iff b (t >>= setRef r) (f >>= setRef r)
getRef r
-- | While loop
while :: (ControlCMD :<: instr)
=> ProgramT instr (Param2 exp pred) m (exp Bool) -- ^ Continue condition
-> ProgramT instr (Param2 exp pred) m () -- ^ Loop body
-> ProgramT instr (Param2 exp pred) m ()
while b t = singleInj $ While b t
-- | For loop
for
:: ( FreeExp exp
, ControlCMD :<: instr
, Integral n
, pred n
, FreePred exp n
)
=> IxRange (exp n) -- ^ Index range
-> (exp n -> ProgramT instr (Param2 exp pred) m ()) -- ^ Loop body
-> ProgramT instr (Param2 exp pred) m ()
for range body = singleInj $ For range (body . valToExp)
-- | Break out from a loop
break :: (ControlCMD :<: instr) => ProgramT instr (Param2 exp pred) m ()
break = singleInj Break
-- | Assertion
assert :: (ControlCMD :<: instr)
=> exp Bool -- ^ Expression that should be true
-> String -- ^ Message in case of failure
-> ProgramT instr (Param2 exp pred) m ()
assert cond msg = singleInj $ Assert cond msg
--------------------------------------------------------------------------------
-- * Pointer operations
--------------------------------------------------------------------------------
-- | Swap two pointers
--
-- This is generally an unsafe operation. E.g. it can be used to make a
-- reference to a data structure escape the scope of the data.
--
-- The 'IsPointer' class ensures that the operation is only possible for types
-- that are represented as pointers in C.
unsafeSwap :: (IsPointer a, PtrCMD :<: instr) =>
a -> a -> ProgramT instr (Param2 exp pred) m ()
unsafeSwap a b = singleInj $ SwapPtr a b
--------------------------------------------------------------------------------
-- * File handling
--------------------------------------------------------------------------------
-- | Open a file
fopen :: (FileCMD :<: instr) =>
FilePath -> IOMode -> ProgramT instr (Param2 exp pred) m Handle
fopen file = singleInj . FOpen file
-- | Close a file
fclose :: (FileCMD :<: instr) => Handle -> ProgramT instr (Param2 exp pred) m ()
fclose = singleInj . FClose
-- | Check for end of file
feof :: (FreeExp exp, FreePred exp Bool, FileCMD :<: instr, Monad m) =>
Handle -> ProgramT instr (Param2 exp pred) m (exp Bool)
feof = fmap valToExp . singleInj . FEof
class PrintfType r
where
type PrintfExp r :: * -> *
fprf :: Handle -> String -> [PrintfArg (PrintfExp r)] -> r
instance (FileCMD :<: instr, a ~ ()) =>
PrintfType (ProgramT instr (Param2 exp pred) m a)
where
type PrintfExp (ProgramT instr (Param2 exp pred) m a) = exp
fprf h form as = singleInj $ FPrintf h form (reverse as)
instance (Formattable a, PrintfType r, exp ~ PrintfExp r) =>
PrintfType (exp a -> r)
where
type PrintfExp (exp a -> r) = exp
fprf h form as = \a -> fprf h form (PrintfArg a : as)
-- | Print to a handle. Accepts a variable number of arguments.
fprintf :: PrintfType r => Handle -> String -> r
fprintf h format = fprf h format []
-- | Put a single value to a handle
fput :: forall instr exp pred a m
. (Formattable a, FreePred exp a, FileCMD :<: instr)
=> Handle
-> String -- ^ Prefix
-> exp a -- ^ Expression to print
-> String -- ^ Suffix
-> ProgramT instr (Param2 exp pred) m ()
fput hdl prefix a suffix =
fprintf hdl (prefix ++ formatSpecPrint (Proxy :: Proxy a) ++ suffix) a
-- | Get a single value from a handle
fget
:: ( Formattable a
, pred a
, FreeExp exp
, FreePred exp a
, FileCMD :<: instr
, Monad m
)
=> Handle -> ProgramT instr (Param2 exp pred) m (exp a)
fget = fmap valToExp . singleInj . FGet
-- | Print to @stdout@. Accepts a variable number of arguments.
printf :: PrintfType r => String -> r
printf = fprintf stdout
--------------------------------------------------------------------------------
-- * C-specific commands
--------------------------------------------------------------------------------
-- | Create a null pointer
newPtr :: (pred a, C_CMD :<: instr) => ProgramT instr (Param2 exp pred) m (Ptr a)
newPtr = newNamedPtr "p"
-- | Create a named null pointer
--
-- The provided base name may be appended with a unique identifier to avoid name
-- collisions.
newNamedPtr :: (pred a, C_CMD :<: instr)
=> String -- ^ Base name
-> ProgramT instr (Param2 exp pred) m (Ptr a)
newNamedPtr = singleInj . NewPtr
-- | Cast a pointer to an array
ptrToArr :: (C_CMD :<: instr) => Ptr a -> ProgramT instr (Param2 exp pred) m (Arr i a)
ptrToArr = singleInj . PtrToArr
-- | Create a pointer to an abstract object. The only thing one can do with such
-- objects is to pass them to 'callFun' or 'callProc'.
newObject :: (C_CMD :<: instr)
=> String -- ^ Object type
-> Bool -- ^ Pointed?
-> ProgramT instr (Param2 exp pred) m Object
newObject t p = newNamedObject "obj" t p
-- | Create a pointer to a named abstract object. The only thing one can do with
-- such objects is to pass them to 'callFun' or 'callProc'.
--
-- The provided base name may be appended with a unique identifier to avoid name
-- collisions.
newNamedObject :: (C_CMD :<: instr)
=> String -- ^ Base name
-> String -- ^ Object type
-> Bool -- ^ Pointed?
-> ProgramT instr (Param2 exp pred) m Object
newNamedObject base t p = singleInj $ NewObject base t p
-- | Add an @#include@ statement to the generated code
addInclude :: (C_CMD :<: instr) => String -> ProgramT instr (Param2 exp pred) m ()
addInclude = singleInj . AddInclude
-- | Add a global definition to the generated code
--
-- Can be used conveniently as follows:
--
-- > {-# LANGUAGE QuasiQuotes #-}
-- >
-- > import Language.Embedded.Imperative
-- > import Language.C.Quote.C
-- >
-- > prog = do
-- > ...
-- > addDefinition myCFunction
-- > ...
-- > where
-- > myCFunction = [cedecl|
-- > void my_C_function( ... )
-- > {
-- > // C code
-- > // goes here
-- > }
-- > |]
addDefinition :: (C_CMD :<: instr) => Definition -> ProgramT instr (Param2 exp pred) m ()
addDefinition = singleInj . AddDefinition
-- | Declare an external function
addExternFun :: (pred res, C_CMD :<: instr)
=> String -- ^ Function name
-> proxy res -- ^ Proxy for result type
-> [FunArg exp pred] -- ^ Arguments (only used to determine types)
-> ProgramT instr (Param2 exp pred) m ()
addExternFun fun res args = singleInj $ AddExternFun fun res args
-- | Declare an external procedure
addExternProc :: (C_CMD :<: instr)
=> String -- ^ Procedure name
-> [FunArg exp pred] -- ^ Arguments (only used to determine types)
-> ProgramT instr (Param2 exp pred) m ()
addExternProc proc args = singleInj $ AddExternProc proc args
-- | Call a function
callFun :: (pred a, FreeExp exp, FreePred exp a, C_CMD :<: instr, Monad m)
=> String -- ^ Function name
-> [FunArg exp pred] -- ^ Arguments
-> ProgramT instr (Param2 exp pred) m (exp a)
callFun fun as = fmap valToExp $ singleInj $ CallFun fun as
-- | Call a procedure
callProc :: (C_CMD :<: instr)
=> String -- ^ Procedure name
-> [FunArg exp pred] -- ^ Arguments
-> ProgramT instr (Param2 exp pred) m ()
callProc fun as = singleInj $ CallProc (Nothing :: Maybe Object) fun as
-- | Call a procedure and assign its result
callProcAssign :: (Assignable obj, C_CMD :<: instr)
=> obj -- ^ Object to which the result should be assigned
-> String -- ^ Procedure name
-> [FunArg exp pred] -- ^ Arguments
-> ProgramT instr (Param2 exp pred) m ()
callProcAssign obj fun as = singleInj $ CallProc (Just obj) fun as
-- The reason for having both `callProc` and `callProcAssign` instead of a
-- single one with a `Maybe obj` is that the caller would have to resolve the
-- overloading when passing `Nothing` (as currently done in `callProc`).
-- | Declare and call an external function
externFun :: forall instr m exp pred res
. (pred res, FreeExp exp, FreePred exp res, C_CMD :<: instr, Monad m)
=> String -- ^ Function name
-> [FunArg exp pred] -- ^ Arguments
-> ProgramT instr (Param2 exp pred) m (exp res)
externFun fun args = do
addExternFun fun (Proxy :: Proxy res) args
callFun fun args
-- | Declare and call an external procedure
externProc :: (C_CMD :<: instr, Monad m)
=> String -- ^ Procedure name
-> [FunArg exp pred] -- ^ Arguments
-> ProgramT instr (Param2 exp pred) m ()
externProc proc args = do
addExternProc proc args
callProc proc args
-- | Generate code into another translation unit
inModule :: (C_CMD :<: instr)
=> String
-> ProgramT instr (Param2 exp pred) m ()
-> ProgramT instr (Param2 exp pred) m ()
inModule mod prog = singleInj $ InModule mod prog
-- | Get current time as number of seconds passed today
getTime
:: (pred Double, FreeExp exp, FreePred exp Double, C_CMD :<: instr, Monad m)
=> ProgramT instr (Param2 exp pred) m (exp Double)
getTime = do
addInclude "<sys/time.h>"
addInclude "<sys/resource.h>"
addDefinition getTimeDef
callFun "get_time" []
where
getTimeDef = [cedecl|
double get_time()
{
struct timeval t;
struct timezone tzp;
gettimeofday(&t, &tzp);
return t.tv_sec + t.tv_usec*1e-6;
}
|]
-- From http://stackoverflow.com/questions/2349776/how-can-i-benchmark-c-code-easily
-- Arguments
-- | Value argument
valArg :: pred a => exp a -> FunArg exp pred
valArg = ValArg
-- | Reference argument
refArg :: (pred a, Arg RefArg pred) => Ref a -> FunArg exp pred
refArg = FunArg . RefArg
-- | Mutable array argument
arrArg :: (pred a, Arg ArrArg pred) => Arr i a -> FunArg exp pred
arrArg = FunArg . ArrArg
-- | Immutable array argument
iarrArg :: (pred a, Arg IArrArg pred) => IArr i a -> FunArg exp pred
iarrArg = FunArg . IArrArg
-- | Pointer argument
ptrArg :: (pred a, Arg PtrArg pred) => Ptr a -> FunArg exp pred
ptrArg = FunArg . PtrArg
-- | Abstract object argument
objArg :: Object -> FunArg exp pred
objArg = FunArg . ObjArg
-- | Constant string argument
strArg :: String -> FunArg exp pred
strArg = FunArg . StrArg
-- | Named constant argument
constArg
:: String -- ^ Type
-> String -- ^ Named constant
-> FunArg exp pred
constArg t n = FunArg $ ConstArg t n
-- | Modifier that takes the address of another argument
addr :: FunArg exp pred -> FunArg exp pred
addr = AddrArg
-- | Modifier that dereferences another argument
deref :: FunArg exp pred -> FunArg exp pred
deref = DerefArg
-- | Add an offset to another argument
offset :: Integral i => FunArg exp pred -> exp i -> FunArg exp pred
offset = OffsetArg
-- The `Integral` constraint isn't needed, but it makes sense, since the
-- intention of `offset` is to add an offset to a pointer.
--------------------------------------------------------------------------------
-- * Running programs
--------------------------------------------------------------------------------
-- | Run a program in 'IO'. Note that not all instructions are supported for
-- running in 'IO'. For example, calls to external C functions are not
-- supported.
runIO :: (EvalExp exp, InterpBi instr IO (Param1 pred), HBifunctor instr) =>
Program instr (Param2 exp pred) a -> IO a
runIO = interpretBi (return . evalExp)
-- | Like 'runIO' but with explicit input/output connected to @stdin@/@stdout@
captureIO :: (EvalExp exp, InterpBi instr IO (Param1 pred), HBifunctor instr)
=> Program instr (Param2 exp pred) a -- ^ Program to run
-> String -- ^ Input to send to @stdin@
-> IO String -- ^ Result from @stdout@
captureIO = fakeIO . runIO
|
kmate/imperative-edsl
|
src/Language/Embedded/Imperative/Frontend.hs
|
bsd-3-clause
| 20,035
| 0
| 13
| 4,618
| 5,268
| 2,752
| 2,516
| -1
| -1
|
{-# LANGUAGE
EmptyDataDecls
, ExistentialQuantification
, FlexibleInstances
, TemplateHaskell
, UndecidableInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Data.ClassFile.Desc.Typed
( module Data.ClassFile.Desc
, Int (..)
, Long (..)
, Float (..)
, Double (..)
, ReturnAddress ()
, Reference (..)
, Void (..)
, FieldDesc ()
, ParameterDesc ()
, ReturnDesc ()
) where
import Control.Monad
import Data.ClassFile.Desc
import Data.List
import Data.Word
import GHC.Exts (maxTupleSize)
import Language.Haskell.TH
import Prelude hiding (Double, Float, Int)
data Int = B | S | I | C | Z
data Long = J
data Float = F
data Double = D
data ReturnAddress
data Reference = L String
| forall a. ComponentType a => A a
data Void = V
instance Desc Int where
desc B = "B"
desc S = "S"
desc I = "I"
desc C = "C"
desc Z = "Z"
instance Desc Long where
desc = const "L"
stackSize _ = 2
instance Desc Float where
desc = const "F"
instance Desc Double where
desc = const "D"
stackSize _ = 2
instance Desc Reference where
descs (L x) = showChar 'L' . showString x . showChar ';'
descs (A x) = showChar '[' . descs x
instance Desc Void where
desc _ = "V"
stackSize _ = 0
instance Desc () where
descs _ = id
stackSize _ = 0
instance FieldType a => Desc [a] where
descs = undefined
stackSize = foldl' ((. stackSize) . (+)) 0
class Friend a
instance Friend Int
instance Friend Long
instance Friend Float
instance Friend Double
instance Friend Reference
instance Friend ()
instance Friend Void
instance (FieldType a, Desc [a]) => Friend [a]
class Desc a => FieldType a
instance FieldType Int
instance FieldType Long
instance FieldType Float
instance FieldType Double
instance FieldType Reference
class FieldType a => ComponentType a
instance FieldType a => ComponentType a
class (Friend a, FieldType a) => FieldDesc a
instance (Friend a, FieldType a) => FieldDesc a
class (Friend a, Desc a) => ParameterDesc a
instance ParameterDesc Int
instance ParameterDesc Long
instance ParameterDesc Float
instance ParameterDesc Double
instance ParameterDesc Reference
instance ParameterDesc ()
$(do
let n = min 255 maxTupleSize
names <- replicateM n $ newName "a"
liftM concat $ forM [2 .. n] $ \i -> do
let names' = take i names
tvs = map varT names'
ctxt = cxt . map (\tv -> classP ''FieldType [tv]) $ tvs
tupleTyp = foldl' appT (tupleT i) tvs
let
p = tupP . map varP $ names'
es = map varE names'
descTyp = appT (conT ''Desc) tupleTyp
descsDec = funD 'descs [clause [p] (normalB g) []]
where
xs = map (\e -> [| descs $e |]) es
g = foldr1 (\a b -> [| $a . $b |]) xs
stackSizeDec = funD 'stackSize [clause [p] (normalB g) []]
where
xs = map (\e -> [| stackSize $e |]) es
g = foldl1' (\a b -> [| ($a :: Word16) + $b |]) xs
parameterDescTyp = appT (conT ''ParameterDesc) tupleTyp
friendTyp = appT (conT ''Friend) tupleTyp
sequence [ instanceD ctxt descTyp [descsDec, stackSizeDec]
, instanceD (cxt []) friendTyp []
, instanceD ctxt parameterDescTyp []
])
class (Friend a, Desc a) => ReturnDesc a
instance ReturnDesc Int
instance ReturnDesc Long
instance ReturnDesc Float
instance ReturnDesc Double
instance ReturnDesc Reference
instance ReturnDesc Void
|
sonyandy/tnt
|
Data/ClassFile/Desc/Typed.hs
|
bsd-3-clause
| 3,492
| 0
| 22
| 913
| 1,257
| 653
| 604
| -1
| -1
|
module System.Build.Access.Tag where
class Tag r where
tag ::
[(String, String, String)]
-> r
-> r
getTag ::
r
-> [(String, String, String)]
|
tonymorris/lastik
|
System/Build/Access/Tag.hs
|
bsd-3-clause
| 172
| 0
| 9
| 54
| 63
| 38
| 25
| 9
| 0
|
{-# LANGUAGE InstanceSigs #-} -- Because i love it
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleInstances #-} -- IsoFunctorStageOne (MumuIso Maybe x)
{-# LANGUAGE MultiParamTypeClasses #-} -- IsoFunctorStageTwo
{-# LANGUAGE TypeFamilies #-}
{-| Non-polymorphic semi-isomorphisms.
-}
module Sky.Isomorphism.MumuIsoFunctor where
import Prelude hiding (id, (.))
import Control.Category -- yay
import Data.Tuple (swap)
import Control.Monad ((<=<))
-- Note: Isomorphisms should be instances of Category.
----------------------------------------------------------------------------------------------------
class (Category i, Monad (MonadT i)) => SemiIsomorphism i where
type MonadT i :: * -> *
--type MonadT i = m
packSemiIsomorphism :: (a -> (MonadT i) b, b -> (MonadT i) a) -> i a b
unpackSemiIsomorphism :: i a b -> (a -> (MonadT i) b, b -> (MonadT i) a)
-- Derived
toSemiIsomorphism :: (a -> (MonadT i) b) -> (b -> (MonadT i) a) -> i a b
toSemiIsomorphism amb bma = packSemiIsomorphism (amb, bma)
applySemiIsomorphism :: i a b -> a -> (MonadT i) b
applySemiIsomorphism = fst . unpackSemiIsomorphism
unapplySemiIsomorphism :: i a b -> b -> (MonadT i) a
unapplySemiIsomorphism = snd . unpackSemiIsomorphism
revertSemiIsomorphism :: i a b -> i b a
revertSemiIsomorphism = packSemiIsomorphism . swap . unpackSemiIsomorphism
convertSemiIsomorphism :: (SemiIsomorphism j, MonadT j ~ MonadT i) => j a b -> i a b
convertSemiIsomorphism = packSemiIsomorphism . unpackSemiIsomorphism
----------------------------------------------------------------------------------------------------
-- Operators and stuff
iso :: forall i a b. (SemiIsomorphism i) => (a -> (MonadT i) b) -> (b -> (MonadT i) a) -> i a b
iso = toSemiIsomorphism
apply :: forall i a b. (SemiIsomorphism i) => i a b -> a -> (MonadT i) b
apply = applySemiIsomorphism
unapply :: forall i a b. (SemiIsomorphism i) => i a b -> b -> (MonadT i) a
unapply = unapplySemiIsomorphism
convert :: forall i j a b. (SemiIsomorphism i, SemiIsomorphism j, MonadT i ~ MonadT j) => i a b -> j a b
convert = convertSemiIsomorphism
----------------------------------------------------------------------------------------------------
-- Simple semi-isomorphism
newtype MumuIso m a b = MumuIso { _rawMumuIso :: (a -> m b, b -> m a) }
instance Monad m => Category (MumuIso m) where
id = MumuIso (return, return)
(MumuIso (applyF, unapplyF)) . (MumuIso (applyG, unapplyG)) =
MumuIso ((applyF <=< applyG), (unapplyG <=< unapplyF))
instance (Monad m) => SemiIsomorphism (MumuIso m) where
type MonadT (MumuIso m) = m
packSemiIsomorphism = MumuIso
unpackSemiIsomorphism = _rawMumuIso
----------------------------------------------------------------------------------------------------
-- Semi-Isomorphisms can be used as IsoFunctors!
class IsoFunctorStageOne f where
isomap1 :: forall a b. MumuIso Maybe a b -> f a -> f b
instance IsoFunctorStageOne (MumuIso Maybe x) where
isomap1 :: forall a b. MumuIso Maybe a b -> MumuIso Maybe x a -> MumuIso Maybe x b
isomap1 ab xa = ab . xa
----------------------------------------------------------------------------------------------------
-- But we'd like to make it work with any monad m
class (Monad (MonadStageTwo f)) => IsoFunctorStageTwo f where
type MonadStageTwo f :: * -> *
isomap2 :: forall a b. MumuIso (MonadStageTwo f) a b -> f a -> f b
instance (Monad m) => IsoFunctorStageTwo (MumuIso m x) where
type MonadStageTwo (MumuIso m x) = m
isomap2 :: forall a b. MumuIso m a b -> MumuIso m x a -> MumuIso m x b
isomap2 ab xa = ab . xa
----------------------------------------------------------------------------------------------------
-- Now make it work for _ANY SEMIISOMORPHISM_
class (Monad (MonadStageThree f)) => IsoFunctorStageThree f where
type MonadStageThree f :: * -> *
isomap3 :: forall a b i. (SemiIsomorphism i, MonadT i ~ MonadStageThree f) => i a b -> f a -> f b
instance (Monad m) => IsoFunctorStageThree (MumuIso m x) where
type MonadStageThree (MumuIso m x) = m
isomap3 :: forall a b i. (SemiIsomorphism i, MonadT i ~ m) => i a b -> MumuIso m x a -> MumuIso m x b
isomap3 ab xa = (convert ab) . xa
----------------------------------------------------------------------------------------------------
-- Can we make it work with _ANY 2 SEMIISOMORPHISMS_ ?
-- Same as IsoFunctorStageThree, redefined to avoid conflicts
class IsoFunctorStageFour f where
type MonadStageFour f :: * -> *
isomap4 :: forall a b i. (SemiIsomorphism i, MonadT i ~ MonadStageFour f) => i a b -> f a -> f b
instance (SemiIsomorphism j) => IsoFunctorStageFour (j x) where
type MonadStageFour (j x) = MonadT j
isomap4 :: forall a b i. (SemiIsomorphism i, MonadT i ~ MonadT j) => i a b -> j x a -> j x b
isomap4 ab xa = (convert ab) . xa
|
xicesky/sky-haskell-playground
|
src/Sky/Isomorphism/MumuIsoFunctor.hs
|
bsd-3-clause
| 4,955
| 0
| 13
| 971
| 1,520
| 807
| 713
| -1
| -1
|
{-# LANGUAGE ScopedTypeVariables #-}
--vchan library
import VChanUtil
import System.IO
import Data.Binary
import Data.ByteString (ByteString, cons, empty)
import Data.Bits
import Control.Monad
data EvidencePiece = M0 M0Rep
| M1 M1Rep
| M2 M2Rep deriving (Eq, Ord, Show)
type M0Rep = ByteString
type M1Rep = ByteString
type M2Rep = ByteString
instance Binary EvidencePiece where
put (M0 req) = do put (0::Word8);
put req;
put(M1 quote) = do put (1::Word8);
put quote;
put(M2 res)= do put(2::Word8);
put res;
get = do t<- get :: Get Word8
case t of
0 -> do req <- get
return (M0 req)
1 -> do quote <- get
return (M1 quote)
2 -> do res <- get
return (M2 res)
data EvidenceDescriptor = D0 | D1 | D2 deriving(Eq, Ord) --for now
instance Binary EvidenceDescriptor where
put D0 = put (0::Word8)
put D1 = put (1::Word8)
put D2 = put (2::Word8)
get = do t<- get :: Get Word8
case t of
0 -> return D0
1 -> return D1
2 -> return D2
instance Show EvidenceDescriptor where
show D0 = "Measurement #0"
show D1 = "Measurement #1"
show D2 = "Measurement #2"
prompt:: IO Int
prompt= loop
where loop = do putStrLn "Which Domain ID is the Attester?"
input <- getLine
case reads input of
[(id,_)] -> return id
_ -> do putStrLn "Error: Please Enter a Number."
loop
main :: IO ()
main = do
measurerID <- prompt
chan <- server_init measurerID
forever $ process chan
return ()
process :: LibXenVChan -> IO ()
process chan = do
ctrlWait chan
ed :: EvidenceDescriptor <- receive chan
let ep = measure ed
send chan ep
return ()
measure :: EvidenceDescriptor -> EvidencePiece
measure ed = case ed of
D0 -> M0 m0Val
D1 -> M1 m1Val
D2 -> M2 m2Val
m0Val :: M0Rep
m0Val = cons (bit 0) empty
m1Val :: M1Rep
m1Val = cons (bit 1) empty
m2Val :: M2Rep
m2Val = cons (bit 2) empty
|
armoredsoftware/protocol
|
demos/demo2/Measurer.hs
|
bsd-3-clause
| 2,428
| 0
| 15
| 1,007
| 779
| 387
| 392
| 74
| 3
|
module TestAlg where
import Control.Applicative
import Control.Monad.Identity
import Generics.Types
import qualified Generics.Morphism.Para as Para
import qualified Container.Tree.Abstract as F
type Tree = FixA Id (F.Tree () Int)
type Alg r = Para.Alg (F.Tree () Int) r
type EndoAlg = Para.Endo (F.Tree () Int)
-- constructors
leaf :: Tree
leaf = (In . Id) F.Leaf
branch :: Int -> Tree -> Tree -> Tree
branch i a b = (In . Id) (F.Branch () i a b)
single :: Int -> Tree
single a = branch a leaf leaf
tri :: Int -> Int -> Int -> Tree
tri a b c = branch b (single a) (single c)
-- repmin
-- minAlg :: Alg Int
-- minAlg = Para.Psi $ \a ->
-- case fst a of
-- F.Leaf -> maxBound
-- F.Branch _ v l r -> minimum [v, l, r]
-- repAlg :: Alg (Int -> Tree)
-- repAlg = Para.Psi $ \a x ->
-- case fst a of
-- F.Leaf -> In (Id (F.Leaf))
-- F.Branch k _ l r -> In (Id (F.Branch k x (l x) (r x)))
-- repMinAlg :: Alg Tree
-- repMinAlg = repAlg <*> minAlg
-- two repmins.
-- runRepMinAsPara :: Tree -> Tree
-- runRepMinAsPara = runIdentity . Para.paraMA repMinAlg
-- runRepMinAsEndo :: Tree -> Tree
-- runRepMinAsEndo = runIdentity . Para.endoMA (Para.toEndo repMinAlg)
-- test tree.
myT :: Tree
myT =
branch 40 (branch 6 (tri 1 2 3)
(tri 8 9 10))
(branch 86 (tri 81 82 83)
(tri 88 89 90))
|
sebastiaanvisser/islay
|
src/TestAlg.hs
|
bsd-3-clause
| 1,401
| 0
| 9
| 382
| 356
| 200
| 156
| 23
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import Data.Text (Text)
import Data.Void (Void)
import Dhall.Core (Expr)
import Dhall.CsvToDhall
( defaultConversion
, dhallFromCsv
, resolveSchemaExpr
, typeCheckSchemaExpr
)
import Dhall.Src (Src)
import System.FilePath (dropExtension, replaceExtension, takeBaseName)
import Test.Tasty (TestTree)
import Test.Tasty.Silver (findByExtension)
import qualified Data.Csv
import qualified Data.Text as Text
import qualified Data.Text.IO
import qualified Dhall.Core as D
import qualified Dhall.Csv
import qualified Dhall.Csv.Util
import qualified Dhall.CsvToDhall as CsvToDhall
import qualified GHC.IO.Encoding
import qualified Test.Tasty
import qualified Test.Tasty.Silver as Silver
main :: IO ()
main = do
GHC.IO.Encoding.setLocaleEncoding GHC.IO.Encoding.utf8
testTree <- goldenTests
Test.Tasty.defaultMain testTree
goldenTests :: IO TestTree
goldenTests = do
dhallToCsvTree <- dhallToCsvGolden
csvToDhallTree <- csvToDhallGolden
noHeaderCsvToDhallTree <- noHeaderCsvToDhallGolden
return $ Test.Tasty.testGroup "dhall-csv"
[ dhallToCsvTree
, csvToDhallTree
, noHeaderCsvToDhallTree
]
dhallToCsvGolden :: IO TestTree
dhallToCsvGolden = do
dhallFiles <- findByExtension [".dhall"] "./tasty/data/dhall-to-csv"
return $ Test.Tasty.testGroup "dhall-to-csv"
[ Silver.goldenVsAction
(takeBaseName dhallFile)
csvFile
(Dhall.Csv.codeToValue Nothing =<< Data.Text.IO.readFile dhallFile)
Dhall.Csv.Util.encodeCsvDefault
| dhallFile <- dhallFiles
, let csvFile = replaceExtension dhallFile ".csv"
]
csvToDhallGolden :: IO TestTree
csvToDhallGolden = do
csvFiles <- findByExtension [".csv"] "./tasty/data/csv-to-dhall"
return $ Test.Tasty.testGroup "csv-to-dhall"
[ Silver.goldenVsAction
(takeBaseName csvFile)
dhallFile
(getSchemaAndCsv csvFile True schema)
(showExpressionOrError . (uncurry (dhallFromCsv defaultConversion)))
| csvFile <- csvFiles
, let dhallFile = replaceExtension csvFile ".dhall"
, let schema = Text.pack $ (dropExtension csvFile) ++ "_schema.dhall"
]
noHeaderCsvToDhallGolden :: IO TestTree
noHeaderCsvToDhallGolden = do
csvFiles <- findByExtension [".csv"] "./tasty/data/no-header-csv-to-dhall"
return $ Test.Tasty.testGroup "csv-to-dhall"
[ Silver.goldenVsAction
(takeBaseName csvFile)
dhallFile
(getSchemaAndCsv csvFile False schema)
(showExpressionOrError . (uncurry (dhallFromCsv defaultConversion)))
| csvFile <- csvFiles
, let dhallFile = replaceExtension csvFile ".dhall"
, let schema = Text.pack $ (dropExtension csvFile) ++ "_schema.dhall"
]
textToCsv :: Bool -> Text -> IO [Data.Csv.NamedRecord]
textToCsv hasHeader txt =
case Dhall.Csv.Util.decodeCsvDefault hasHeader txt of
Left err -> fail err
Right csv -> return csv
getSchemaAndCsv :: FilePath -> Bool -> Text -> IO (Expr Src Void, [Data.Csv.NamedRecord])
getSchemaAndCsv csvFile hasHeader schema = do
finalSchema <- typeCheckSchemaExpr id =<< resolveSchemaExpr schema
csv <- textToCsv hasHeader =<< Data.Text.IO.readFile csvFile
return (finalSchema, csv)
showExpressionOrError :: Either CsvToDhall.CompileError (Expr Src Void) -> Text
showExpressionOrError (Left err) = Text.pack $ show err
showExpressionOrError (Right expr) = (D.pretty expr) <> "\n"
|
Gabriel439/Haskell-Dhall-Library
|
dhall-csv/tasty/Main.hs
|
bsd-3-clause
| 3,648
| 0
| 17
| 813
| 914
| 485
| 429
| 86
| 2
|
data X a where
I :: Int -> X Int
|
itchyny/vim-haskell-indent
|
test/datatype/gadt.out.hs
|
mit
| 35
| 0
| 7
| 12
| 18
| 10
| 8
| -1
| -1
|
module Handler.Home where
import Import
-- This is a handler function for the GET request method on the HomeR
-- resource pattern. All of your resource patterns are defined in
-- config/routes
--
-- The majority of the code you will write in Yesod lives in these handler
-- functions. You can spread them across multiple files if you are so
-- inclined, or create a single monolithic file.
getHomeR :: Handler Html
getHomeR = do
allPosts <- runDB $ selectList [] [Desc BlogPostId]
defaultLayout $ do
$(widgetFile "posts/index")
|
MaxGabriel/YesodScreencast
|
Handler/Home.hs
|
cc0-1.0
| 545
| 0
| 12
| 106
| 70
| 38
| 32
| -1
| -1
|
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE LambdaCase #-}
-----------------------------------------------------------------------------
--
-- Module : IDE.Workspace
-- Copyright : 2007-2011 Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GPL
--
-- Maintainer : maintainer@leksah.org
-- Stability : provisional
-- Portability :
--
-- | Represents a workspace, a work unit, which can be composed of multiple packages
--
-----------------------------------------------------------------------------
module IDE.Workspaces (
workspaceNew
, workspaceOpen
, workspaceTry
, workspaceOpenThis
, workspaceClose
, workspaceClean
, workspaceMake
, workspaceActivatePackage
, workspaceAddPackage
, workspaceAddPackage'
, workspaceRemovePackage
, workspacePackageNew
, workspacePackageClone
, workspaceTryQuiet
, workspaceNewHere
, packageTry
, packageTryQuiet
, backgroundMake
, makePackage
, fileOpen
, fileOpen'
) where
import IDE.Core.State
import Graphics.UI.Editor.Parameters
(Parameter(..), (<<<-), paraName, emptyParams)
import Control.Monad (filterM, void, unless, when, liftM)
import Data.Maybe (isJust, fromJust, catMaybes)
import IDE.Utils.GUIUtils
(chooseFile, chooseSaveFile, __)
import System.FilePath
(takeFileName, (</>), isAbsolute, dropFileName, makeRelative,
dropExtension, takeBaseName, addExtension, takeExtension,
takeDirectory)
import Text.PrinterParser
(readFields,
writeFields,
readParser,
stringParser,
intParser,
mkFieldS,
FieldDescriptionS(..))
import qualified Text.PrettyPrint as PP (text)
import Graphics.UI.Gtk
(dialogSetDefaultResponse, windowWindowPosition, widgetDestroy,
dialogRun, messageDialogNew, dialogAddButton, Window(..),
widgetHide, DialogFlags(..))
import IDE.Pane.PackageEditor (packageNew', packageClone, choosePackageFile, standardSetup)
import Data.List (delete)
import IDE.Package
(getModuleTemplate, getPackageDescriptionAndPath, activatePackage,
deactivatePackage, idePackageFromPath, idePackageFromPath)
import System.Directory
(doesDirectoryExist, getDirectoryContents, getHomeDirectory,
createDirectoryIfMissing, doesFileExist)
import System.Time (getClockTime)
import Graphics.UI.Gtk.Windows.MessageDialog
(ButtonsType(..), MessageType(..))
import Graphics.UI.Gtk.Windows.Dialog (ResponseId(..))
import qualified Control.Exception as Exc (SomeException(..), throw, Exception)
import qualified Data.Map as Map (empty)
import IDE.Pane.SourceBuffer
(belongsToWorkspace, IDEBuffer(..), maybeActiveBuf, fileOpenThis,
fileCheckAll, belongsToPackages')
import System.Glib.Attributes (AttrOp(..), set)
import Graphics.UI.Gtk.General.Enums (WindowPosition(..))
import Control.Applicative ((<$>))
import IDE.Build
import IDE.Utils.FileUtils(myCanonicalizePath)
import Control.Monad.Trans.Reader (ask)
import Control.Monad.IO.Class (MonadIO(..))
import Control.Monad.Trans.Class (lift)
import qualified Data.Set as Set (toList)
import Distribution.PackageDescription (hsSourceDirs)
import IDE.Command.VCS.Common.Workspaces as VCSWS
import qualified VCSWrapper.Common as VCS
import qualified VCSGui.Common as VCSGUI
import qualified IDE.Workspaces.Writer as Writer
import System.Log.Logger (debugM)
import IDE.Pane.Log (showDefaultLogLaunch', getLog)
import IDE.LogRef (logOutputDefault)
import Data.Foldable (forM_)
import Data.Text (Text)
import qualified Data.Text as T (unpack, pack)
import Data.Monoid ((<>))
import qualified Text.Printf as S (printf)
import Text.Printf (PrintfType)
import qualified Data.Text.IO as T (writeFile)
import Graphics.UI.Gtk.Selectors.FileChooserDialog
(fileChooserDialogNew)
import Graphics.UI.Gtk.Selectors.FileChooser
(fileChooserGetFilename, fileChooserSetCurrentFolder,
FileChooserAction(..))
import Graphics.UI.Gtk.Abstract.Widget (widgetShow)
import Control.Exception (SomeException(..), catch)
printf :: PrintfType r => Text -> r
printf = S.printf . T.unpack
-- | Constructs a new workspace and makes it the current workspace
workspaceNew :: IDEAction
workspaceNew = do
window <- getMainWindow
mbFile <- liftIO $ chooseSaveFile window (__ "New file for workspace") Nothing
forM_ mbFile workspaceNewHere
workspaceNewHere :: FilePath -> IDEAction
workspaceNewHere filePath =
let realPath = if takeExtension filePath == leksahWorkspaceFileExtension
then filePath
else addExtension filePath leksahWorkspaceFileExtension
in do
dir <- liftIO $ myCanonicalizePath $ dropFileName realPath
let cPath = dir </> takeFileName realPath
newWorkspace = emptyWorkspace {
wsName = T.pack $ takeBaseName cPath,
wsFile = cPath}
liftIO $ writeFields cPath newWorkspace Writer.workspaceDescr
workspaceOpenThis False (Just cPath)
return ()
workspaceOpen :: IDEAction
workspaceOpen = do
window <- getMainWindow
mbFilePath <- liftIO $ chooseWorkspaceFile window
workspaceOpenThis True mbFilePath
return ()
workspaceTryQuiet :: WorkspaceAction -> IDEAction
workspaceTryQuiet f = do
maybeWorkspace <- readIDE workspace
case maybeWorkspace of
Just ws -> runWorkspace f ws
Nothing -> ideMessage Normal (__ "No workspace open")
workspaceTry :: WorkspaceAction -> IDEAction
workspaceTry f = do
maybeWorkspace <- readIDE workspace
case maybeWorkspace of
Just ws -> runWorkspace f ws
Nothing -> do
mainWindow <- getMainWindow
defaultWorkspace <- liftIO $ (</> "leksah.lkshw") <$> getHomeDirectory
resp <- liftIO $ do
defaultExists <- doesFileExist defaultWorkspace
md <- messageDialogNew (Just mainWindow) [DialogModal] MessageQuestion ButtonsCancel (
__ "You need to have a workspace open for this to work. "
<> __ "Choose ~/leksah.lkshw to "
<> __ (if defaultExists then "open workspace " else "create a workspace ")
<> T.pack defaultWorkspace)
dialogAddButton md (__ "_New Workspace") (ResponseUser 1)
dialogAddButton md (__ "_Open Workspace") (ResponseUser 2)
dialogAddButton md ("~/leksah.lkshw" :: Text) (ResponseUser 3)
dialogSetDefaultResponse md (ResponseUser 3)
set md [ windowWindowPosition := WinPosCenterOnParent ]
resp <- dialogRun md
widgetHide md
return resp
case resp of
ResponseUser 1 -> do
workspaceNew
postAsyncIDE $ workspaceTryQuiet f
ResponseUser 2 -> do
workspaceOpen
postAsyncIDE $ workspaceTryQuiet f
ResponseUser 3 -> do
defaultExists <- liftIO $ doesFileExist defaultWorkspace
if defaultExists
then workspaceOpenThis True (Just defaultWorkspace)
else workspaceNewHere defaultWorkspace
postAsyncIDE $ workspaceTryQuiet f
_ -> return ()
chooseWorkspaceFile :: Window -> IO (Maybe FilePath)
chooseWorkspaceFile window = chooseFile window (__ "Select leksah workspace file (.lkshw)") Nothing [("Leksah Workspace Files", ["*.lkshw"])]
workspaceOpenThis :: Bool -> Maybe FilePath -> IDEAction
workspaceOpenThis askForSession mbFilePath =
case mbFilePath of
Nothing -> return ()
Just filePath -> do
liftIO . debugM "leksah" $ "workspaceOpenThis " ++ show askForSession ++ " " ++ filePath
let spath = dropExtension filePath ++ leksahSessionFileExtension
workspaceClose
exists <- liftIO $ doesFileExist spath
wantToLoadSession <-
if exists && askForSession
then do
window <- getMainWindow
liftIO $ do
md <- messageDialogNew (Just window) [] MessageQuestion ButtonsNone
$ __ "There are session settings stored with this workspace."
dialogAddButton md (__ "_Ignore Session") ResponseCancel
dialogAddButton md (__ "_Load Session") ResponseYes
dialogSetDefaultResponse md ResponseYes
set md [ windowWindowPosition := WinPosCenterOnParent ]
rid <- dialogRun md
widgetDestroy md
case rid of
ResponseYes -> return True
otherwise -> return False
else return False
if wantToLoadSession
then void (triggerEventIDE (LoadSession spath))
else do
ideR <- ask
catchIDE (do
workspace <- readWorkspace filePath
Writer.setWorkspace (Just workspace {wsFile = filePath})
VCSWS.onWorkspaceOpen workspace)
(\ (e :: Exc.SomeException) -> reflectIDE
(ideMessage Normal (T.pack $ printf (__ "Can't load workspace file %s\n%s") filePath (show e))) ideR)
-- | Closes a workspace
workspaceClose :: IDEAction
workspaceClose = do
liftIO $ debugM "leksah" "workspaceClose"
oldWorkspace <- readIDE workspace
case oldWorkspace of
Nothing -> return ()
Just ws -> do
VCSWS.onWorkspaceClose
let oldActivePackFile = wsActivePackFile ws
prefs <- readIDE prefs
when (saveSessionOnClose prefs) $
triggerEventIDE_ (SaveSession (dropExtension (wsFile ws) ++ leksahSessionFileExtension))
addRecentlyUsedWorkspace (wsFile ws)
Writer.setWorkspace Nothing
when (isJust oldActivePackFile) $ do
triggerEventIDE (Sensitivity [(SensitivityProjectActive, False),
(SensitivityWorkspaceOpen, False)])
return ()
return ()
return ()
workspacePackageNew :: WorkspaceAction
workspacePackageNew = do
ws <- ask
let path = dropFileName (wsFile ws)
lift $ packageNew' path logOutputDefault (\isNew fp -> do
window <- getMainWindow
workspaceTry $ void (workspaceAddPackage' fp)
when isNew $ do
mbPack <- idePackageFromPath logOutputDefault fp
constructAndOpenMainModules mbPack
void (triggerEventIDE UpdateWorkspaceInfo))
workspacePackageClone :: WorkspaceAction
workspacePackageClone = do
ws <- ask
let path = dropFileName (wsFile ws)
lift $ packageClone path logOutputDefault (\fp -> do
window <- getMainWindow
workspaceTry $ void (workspaceAddPackage' fp)
void (triggerEventIDE UpdateWorkspaceInfo))
constructAndOpenMainModules :: Maybe IDEPackage -> IDEAction
constructAndOpenMainModules Nothing = return ()
constructAndOpenMainModules (Just idePackage) =
forM_ (ipdMain idePackage) $ \(target, bi, isTest) -> do
mbPD <- getPackageDescriptionAndPath
case mbPD of
Just (pd,_) ->
case hsSourceDirs bi of
path:_ -> do
liftIO $ createDirectoryIfMissing True path
alreadyExists <- liftIO $ doesFileExist (path </> target)
unless alreadyExists $ do
template <- liftIO $ getModuleTemplate (if isTest then "testmain" else "main") pd "Main" "" ""
liftIO $ T.writeFile (path </> target) template
fileOpenThis (path </> target)
_ -> return ()
Nothing -> ideMessage Normal (__ "No package description")
workspaceAddPackage :: WorkspaceAction
workspaceAddPackage = do
ws <- ask
let path = dropFileName (wsFile ws)
window <- lift getMainWindow
mbFilePath <- liftIO $ choosePackageFile window (Just path)
case mbFilePath of
Nothing -> return ()
Just fp -> do
void (workspaceAddPackage' fp)
lift $ void (triggerEventIDE UpdateWorkspaceInfo)
workspaceAddPackage' :: FilePath -> WorkspaceM (Maybe IDEPackage)
workspaceAddPackage' fp = do
ws <- ask
cfp <- liftIO $ myCanonicalizePath fp
mbPack <- lift $ idePackageFromPath logOutputDefault cfp
case mbPack of
Just pack -> do
unless (cfp `elem` map ipdCabalFile (wsPackages ws)) $ lift $
Writer.writeWorkspace $ ws {wsPackages = pack : wsPackages ws,
wsActivePackFile = Just (ipdCabalFile pack),
wsActiveExe = Nothing}
return (Just pack)
Nothing -> return Nothing
packageTryQuiet :: PackageAction -> IDEAction
packageTryQuiet f = do
maybePackage <- readIDE activePack
case maybePackage of
Just p -> workspaceTryQuiet $ runPackage f p
Nothing -> ideMessage Normal (__ "No active package")
packageTry :: PackageAction -> IDEAction
packageTry f = workspaceTry $ do
maybePackage <- lift $ readIDE activePack
case maybePackage of
Just p -> runPackage f p
Nothing -> do
window <- lift getMainWindow
resp <- liftIO $ do
md <- messageDialogNew (Just window) [] MessageQuestion ButtonsCancel
(__ "You need to have an active package for this to work.")
dialogAddButton md (__ "_New Package") (ResponseUser 1)
dialogAddButton md (__ "_Add Package") (ResponseUser 2)
dialogSetDefaultResponse md (ResponseUser 2)
set md [ windowWindowPosition := WinPosCenterOnParent ]
resp <- dialogRun md
widgetHide md
return resp
case resp of
ResponseUser 1 -> do
workspacePackageNew
lift $ postAsyncIDE $ packageTryQuiet f
ResponseUser 2 -> do
workspaceAddPackage
lift $ postAsyncIDE $ packageTryQuiet f
_ -> return ()
workspaceRemovePackage :: IDEPackage -> WorkspaceAction
workspaceRemovePackage pack = do
ws <- ask
when (pack `elem` wsPackages ws) $ lift $
Writer.writeWorkspace ws {wsPackages = delete pack (wsPackages ws)}
return ()
workspaceActivatePackage :: IDEPackage -> Maybe Text -> WorkspaceAction
workspaceActivatePackage pack exe = do
ws <- ask
let activePath = takeDirectory $ ipdCabalFile pack
lift $ activatePackage (Just activePath) (Just pack) exe
when (pack `elem` wsPackages ws) $ lift $ do
Writer.writeWorkspace ws {wsActivePackFile = Just (ipdCabalFile pack)
,wsActiveExe = exe}
return ()
return ()
readWorkspace :: FilePath -> IDEM Workspace
readWorkspace fp = do
liftIO $ debugM "leksah" "readWorkspace"
ws <- liftIO $ readFields fp Writer.workspaceDescr emptyWorkspace
ws' <- liftIO $ makePathsAbsolute ws fp
packages <- mapM (idePackageFromPath logOutputDefault) (wsPackagesFiles ws')
--TODO set package vcs here
return ws'{ wsPackages = catMaybes packages}
makePathsAbsolute :: Workspace -> FilePath -> IO Workspace
makePathsAbsolute ws bp = do
wsFile' <- myCanonicalizePath bp
wsActivePackFile' <- case wsActivePackFile ws of
Nothing -> return Nothing
Just fp -> do
fp' <- makeAbsolute (dropFileName wsFile') fp
return (Just fp')
wsPackagesFiles' <- mapM (makeAbsolute (dropFileName wsFile')) (wsPackagesFiles ws)
return ws {wsActivePackFile = wsActivePackFile', wsFile = wsFile', wsPackagesFiles = wsPackagesFiles'}
where
makeAbsolute basePath relativePath =
myCanonicalizePath
(if isAbsolute relativePath
then relativePath
else basePath </> relativePath)
emptyWorkspace = Workspace {
wsVersion = Writer.workspaceVersion
, wsSaveTime = ""
, wsName = ""
, wsFile = ""
, wsPackages = []
, wsPackagesFiles = []
, wsActivePackFile = Nothing
, wsActiveExe = Nothing
, wsNobuildPack = []
, packageVcsConf = Map.empty
}
addRecentlyUsedWorkspace :: FilePath -> IDEAction
addRecentlyUsedWorkspace fp = do
state <- readIDE currentState
unless (isStartingOrClosing state) $ do
recentWorkspaces' <- readIDE recentWorkspaces
unless (fp `elem` recentWorkspaces') $
modifyIDE_ (\ide -> ide{recentWorkspaces = take 12 (fp : recentWorkspaces')})
triggerEventIDE UpdateRecent
return ()
removeRecentlyUsedWorkspace :: FilePath -> IDEAction
removeRecentlyUsedWorkspace fp = do
state <- readIDE currentState
unless (isStartingOrClosing state) $ do
recentWorkspaces' <- readIDE recentWorkspaces
when (fp `elem` recentWorkspaces') $
modifyIDE_ (\ide -> ide{recentWorkspaces = filter (/= fp) recentWorkspaces'})
triggerEventIDE UpdateRecent
return ()
------------------------
-- Workspace make
workspaceClean :: WorkspaceAction
workspaceClean = do
ws <- ask
settings <- lift $ do
prefs' <- readIDE prefs
return (defaultMakeSettings prefs')
makePackages settings (wsPackages ws) MoClean MoClean moNoOp
buildSteps :: Bool -> IDEM [MakeOp]
buildSteps runTests = do
debug <- isJust <$> readIDE debugState
return $ case (runTests, debug) of
(True, True) -> [MoBuild,MoDocu]
(True, False) -> [MoBuild,MoDocu,MoTest,MoCopy,MoRegister]
(False, True) -> [MoBuild]
(False, False) -> [MoBuild,MoCopy,MoRegister]
workspaceMake :: WorkspaceAction
workspaceMake = do
ws <- ask
settings <- lift $ do
prefs' <- readIDE prefs
return ((defaultMakeSettings prefs'){
msMakeMode = True,
msBackgroundBuild = False})
build <- lift . buildSteps $ msRunUnitTests settings
let steps = MoComposed (MoConfigure : build)
makePackages settings (wsPackages ws) steps steps MoMetaInfo
backgroundMake :: IDEAction
backgroundMake = catchIDE (do
ideR <- ask
prefs <- readIDE prefs
debug <- isJust <$> readIDE debugState
modifiedPacks <- if saveAllBeforeBuild prefs
then fileCheckAll belongsToPackages'
else return []
let isModified = not (null modifiedPacks)
when isModified $ do
let settings = defaultMakeSettings prefs
steps <- buildSteps $ msRunUnitTests settings
workspaceTryQuiet $ if debug || msSingleBuildWithoutLinking settings && not (msMakeMode settings)
then makePackages settings modifiedPacks (MoComposed steps) (MoComposed []) moNoOp
else makePackages settings modifiedPacks (MoComposed steps)
(MoComposed (MoConfigure:steps)) MoMetaInfo
)
(\(e :: Exc.SomeException) -> sysMessage Normal (T.pack $ show e))
makePackage :: PackageAction
makePackage = do
p <- ask
liftIDE $ do
getLog >>= liftIO . bringPaneToFront
showDefaultLogLaunch'
prefs' <- readIDE prefs
mbWs <- readIDE workspace
let settings = (defaultMakeSettings prefs'){msBackgroundBuild = False}
case mbWs of
Nothing -> sysMessage Normal (__ "No workspace for build.")
Just ws -> do
debug <- isJust <$> readIDE debugState
steps <- buildSteps $ msRunUnitTests settings
if debug || msSingleBuildWithoutLinking settings && not (msMakeMode settings)
then runWorkspace
(makePackages settings [p] (MoComposed steps) (MoComposed []) moNoOp) ws
else
runWorkspace
(makePackages settings [p]
(MoComposed steps)
(MoComposed (MoConfigure:steps))
MoMetaInfo) ws
fileOpen :: IDEAction
fileOpen = do
window <- getMainWindow
prefs <- readIDE prefs
mbBuf <- maybeActiveBuf
mbFileName <- liftIO $ do
dialog <- fileChooserDialogNew
(Just $ __ "Open File")
(Just window)
FileChooserActionOpen
[("gtk-cancel"
,ResponseCancel)
,("gtk-open"
,ResponseAccept)]
case mbBuf >>= fileName of
Just fn -> void (fileChooserSetCurrentFolder dialog (dropFileName fn))
Nothing -> return ()
widgetShow dialog
response <- dialogRun dialog
case response of
ResponseAccept -> do
f <- fileChooserGetFilename dialog
widgetDestroy dialog
return f
ResponseCancel -> do
widgetDestroy dialog
return Nothing
ResponseDeleteEvent-> do
widgetDestroy dialog
return Nothing
_ -> return Nothing
forM_ mbFileName fileOpen'
fileOpen' :: FilePath -> IDEAction
fileOpen' fp = do
window <- getMainWindow
knownFile <- belongsToWorkspace fp
unless knownFile $
if takeExtension fp == ".cabal"
then do
resp <- liftIO $ do
md <- messageDialogNew
(Just window) []
MessageQuestion
ButtonsNone
(__ "Would you like to add the package " <> T.pack fp
<> __ " to the workspace so that it can be built by Leksah?")
dialogAddButton md (__ "_Add " <> T.pack (takeFileName fp)) (ResponseUser 1)
dialogAddButton md (__ "Just _open " <> T.pack (takeFileName fp)) (ResponseUser 2)
dialogSetDefaultResponse md (ResponseUser 1)
resp <- dialogRun md
widgetDestroy md
return resp
case resp of
ResponseUser 1 -> workspaceTry $ do
workspaceAddPackage' fp
return ()
_ -> return ()
else liftIO (findCabalFile fp) >>= \case
Nothing -> return ()
Just cabalFile -> do
resp <- liftIO $ do
md <- messageDialogNew
(Just window) []
MessageQuestion
ButtonsNone
(__ "The file " <> T.pack fp
<> __ " seems to belong to the package " <> T.pack cabalFile
<> __ " would you like to add " <> T.pack (takeFileName cabalFile)
<> __ " to your workspace?")
dialogAddButton md (__ "_Add " <> T.pack (takeFileName cabalFile)) (ResponseUser 1)
dialogAddButton md (__ "Just _open " <> T.pack (takeFileName fp)) (ResponseUser 2)
dialogSetDefaultResponse md (ResponseUser 1)
resp <- dialogRun md
widgetDestroy md
return resp
case resp of
ResponseUser 1 -> workspaceTry $ do
workspaceAddPackage' cabalFile
return ()
_ -> return ()
fileOpenThis fp
where
findCabalFile :: FilePath -> IO (Maybe FilePath)
findCabalFile fp = (do
let dir = takeDirectory fp
contents <- getDirectoryContents dir
files <- filterM (\f -> not <$> doesDirectoryExist (dir </> f)) contents
let cabal = filter ((== ".cabal") . takeExtension) files
case cabal of
(c:_) -> return . Just $ dir </> c
_ | fp == dir -> return Nothing
| otherwise -> findCabalFile dir
) `catch` (\(_ :: SomeException) -> return Nothing)
|
jaccokrijnen/leksah
|
src/IDE/Workspaces.hs
|
gpl-2.0
| 24,955
| 0
| 29
| 8,201
| 6,067
| 3,029
| 3,038
| 540
| 7
|
module Network.Haskoin.Block.Tests (tests) where
import Data.Either (fromRight)
import Data.String (fromString)
import Data.String.Conversions (cs)
import Network.Haskoin.Block
import Network.Haskoin.Test
import Network.Haskoin.Transaction
import Test.Framework
import Test.Framework.Providers.QuickCheck2
import Test.QuickCheck
tests :: [Test]
tests =
[ testGroup
"Block hash tests"
[ testProperty "decode . encode block hash" $
forAll arbitraryBlockHash $ \h ->
hexToBlockHash (blockHashToHex h) == Just h
, testProperty "From string block hash" $
forAll arbitraryBlockHash $ \h ->
fromString (cs $ blockHashToHex h) == h
]
, testGroup
"Merkle trees"
[ testProperty "Width of tree at maxmum height = 1" testTreeWidth
, testProperty "Width of tree at height 0 is # txns" testBaseWidth
, testProperty "extract . build partial merkle tree" $
forAll
(listOf1 ((,) <$> arbitraryTxHash <*> arbitrary))
buildExtractTree
]
]
{- Merkle Trees -}
testTreeWidth :: Int -> Property
testTreeWidth i = i /= 0 ==> calcTreeWidth (abs i) (calcTreeHeight $ abs i) == 1
testBaseWidth :: Int -> Property
testBaseWidth i = i /= 0 ==> calcTreeWidth (abs i) 0 == abs i
buildExtractTree :: [(TxHash, Bool)] -> Bool
buildExtractTree txs =
r == buildMerkleRoot (map fst txs) && m == map fst (filter snd txs)
where
(f, h) = buildPartialMerkle txs
(r, m) =
fromRight (error "Could not extract matches from Merkle tree") $
extractMatches f h (length txs)
|
xenog/haskoin
|
test/bitcoin/Network/Haskoin/Block/Tests.hs
|
unlicense
| 1,818
| 0
| 15
| 602
| 447
| 237
| 210
| -1
| -1
|
#! /usr/bin/env runhaskell
-----------------------------------------------------------------------------
-- |
-- Module :
-- Copyright : (c) 2012 Boyun Tang
-- License : BSD-style
-- Maintainer : tangboyun@hotmail.com
-- Stability : experimental
-- Portability : ghc
--
--
--
-----------------------------------------------------------------------------
import Distribution.Simple
main = defaultMain
|
tangboyun/bio-seq-gb
|
Setup.hs
|
bsd-3-clause
| 402
| 0
| 4
| 43
| 24
| 19
| 5
| 2
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.