code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE OverloadedLists #-}
{-# OPTIONS_GHC -Wno-missing-signatures #-}
-- | This module defines the 'Frame' data type, as well as functions for using
-- it easily. The 'F' monad is a reader for a 'Frame' and can be consumed by
-- 'runFrame'.
module Frame where
import Control.Monad.IO.Class
import Control.Monad.Trans.Class ( lift )
import Control.Monad.Trans.Reader
import Control.Monad.Trans.Resource as ResourceT
import qualified SDL
import UnliftIO ( MonadUnliftIO(..)
, askRunInIO
, toIO
)
import UnliftIO.Exception ( finally
, throwString
)
import UnliftIO.MVar
import Data.IORef
import Data.Vector ( Vector
, cons
)
import Data.Word
import HasVulkan
import MonadVulkan
import RefCounted
import Vulkan.CStruct.Extends ( SomeStruct )
import Vulkan.Core10 as Vk
hiding ( createDevice
, createFramebuffer
, createImageView
, createInstance
, withBuffer
, withImage
)
import Vulkan.Extensions.VK_KHR_surface
import Vulkan.Extensions.VK_KHR_swapchain
import Vulkan.Zero
-- | A record of everything required to render a single frame of the
-- application.
data Frame = Frame
{ fIndex :: Word64
, -- SDL Stuff
fWindow :: SDL.Window
-- Vulkan items
, fSurface :: SurfaceKHR
, fSwapchain :: SwapchainKHR
, fSwapchainFormat :: Format
, fRenderPass :: RenderPass
, fImageExtent :: Extent2D
, fImageAvailableSemaphore :: Semaphore
, fRenderFinishedSemaphore :: Semaphore
, fPipeline :: Pipeline
, fJuliaPipeline :: Pipeline
, fJuliaPipelineLayout :: PipelineLayout
, fJuliaDescriptorSets :: Word32 -> DescriptorSet
, fImages :: Word32 -> Image
, fImageViews :: Word32 -> ImageView
, fFramebuffers :: Word32 -> Framebuffer
, fReleaseSwapchain :: RefCounted
-- Scheduling. TODO, abstract this
, -- | This 'MVar' will be signaled when this frame has finished rendering on
-- the GPU
fCurrentPresented :: MVar ()
, -- | These 'MVar's track when previous frames have finished executing on
-- the GPU
fLastPresented :: MVar ()
, fSecondLastPresented :: MVar ()
, fThirdLastPresented :: MVar ()
-- | When did we start rendering this frame, in ns
, fStartTime :: Word64
-- | The 'InternalState' for tracking frame-only resources.
, fResources :: (ReleaseKey, ResourceT.InternalState)
-- | A list of 'Fences' of GPU work submitted for this frame.
, fGPUWork :: IORef (Vector Fence)
}
numConcurrentFrames :: Int
numConcurrentFrames = 3
-- | A monad for running a single frame
newtype F a = F { unF :: ReaderT Frame V a }
deriving newtype ( Functor
, Applicative
, Monad
, MonadFail
, MonadIO
, HasVulkan
)
instance MonadUnliftIO F where
withRunInIO a = F $ withRunInIO (\r -> a (r . unF))
-- | By default resources allocated will only last until the frame is retired.
--
-- To allocate something globally use 'allocateGlobal'
instance MonadResource F where
liftResourceT r = do
i <- asksFrame (snd . fResources)
liftIO $ runInternalState r i
-- | Allocate a resource in the 'V' scope
allocateGlobal :: F a -> (a -> F ()) -> F (ReleaseKey, a)
allocateGlobal create destroy = do
createIO <- toIO create
run <- askRunInIO
F $ allocate createIO (run . destroy)
-- | c.f. 'bracket' and 'bracket_'
allocateGlobal_ :: F a -> F () -> F (ReleaseKey, a)
allocateGlobal_ create destroy = allocateGlobal create (const destroy)
-- | Run a frame
--
-- The frame will be retired by another thread when all the fences added by
-- 'queueSubmitFrame' have been signaled.
runFrame :: Frame -> F a -> V a
runFrame f (F r) = runReaderT r f `finally` do
fences <- liftIO $ readIORef (fGPUWork f)
-- Wait in another thread for this frame to be presented before retiring
spawn_ $ do
waitForFencesSafe' fences True 1e9 >>= \case
TIMEOUT -> do
-- Give the frame one last chance to complete,
-- It could be that the program was suspended during the preceding
-- wait causing it to timeout, this will check if it actually
-- finished.
waitForFencesSafe' fences True 0 >>= \case
TIMEOUT ->
throwString "Timed out waiting for frame to finish on the GPU"
_ -> pure ()
_ -> pure ()
commandPool <- getCommandPool (commandPoolIndex f)
resetCommandPool' commandPool zero
putMVar (fCurrentPresented f) ()
retireFrame f
askFrame :: F Frame
askFrame = F ask
asksFrame :: (Frame -> a) -> F a
asksFrame = F . asks
-- | Get a fresh command pool for this frame, it will be reset upon frame
-- retirement
frameCommandPool :: F CommandPool
frameCommandPool = do
poolIndex <- commandPoolIndex <$> askFrame
F . lift . getCommandPool $ fromIntegral poolIndex
commandPoolIndex :: Frame -> Int
commandPoolIndex Frame {..} = fromIntegral fIndex `mod` numConcurrentFrames
-- | Free frame resources, the frame must have finished GPU execution first.
retireFrame :: MonadIO m => Frame -> m ()
retireFrame Frame {..} = release (fst fResources)
-- | 'queueSubmit' and add wait for the 'Fence' before retiring the frame.
queueSubmitFrame :: Queue -> Vector (SomeStruct SubmitInfo) -> Fence -> F ()
queueSubmitFrame q ss fence = do
queueSubmit q ss fence
gpuWork <- asksFrame fGPUWork
liftIO $ atomicModifyIORef' gpuWork ((, ()) . cons fence)
-- | Make sure a reference is held until this frame is retired
frameRefCount :: RefCounted -> F ()
frameRefCount = resourceTRefCount
| expipiplus1/vulkan | examples/resize/Frame.hs | bsd-3-clause | 6,655 | 0 | 22 | 2,368 | 1,152 | 635 | 517 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TypeOperators #-}
module Api
( app
, generateJavaScript
) where
import Control.Monad.Except
import Control.Monad.Reader (ReaderT, runReaderT)
import Control.Monad.Reader.Class
import Data.Int (Int64)
import Database.Persist.Postgresql (Entity (..), fromSqlKey, insert,
selectFirst, selectList, (==.))
import Network.Wai (Application)
import Network.Wai.Middleware.Cors (simpleCors)
import Servant
import Servant.JS (vanillaJS, writeJSForAPI)
import Config (App (..), Config (..))
import Models
import Api.User
import Api.Appliance
import Api.Device
import Api.Environment
type FisbangAPI = UserAPI :<|> ApplianceAPI :<|> DeviceAPI :<|> EnvironmentAPI
fisbangAPIServer = userServer :<|> applianceServer :<|> deviceServer :<|> environmentServer
-- | This is the function we export to run our 'FisbangAPI'. Given
-- a 'Config', we return a WAI 'Application' which any WAI compliant server
-- can run.
fisbangApi :: Config -> Application
fisbangApi cfg = serve (Proxy :: Proxy FisbangAPI) (appToServer cfg)
-- | This functions tells Servant how to run the 'App' monad with our
-- 'server' function.
appToServer :: Config -> Server FisbangAPI
appToServer cfg = enter (convertApp cfg) fisbangAPIServer
-- | This function converts our 'App' monad into the @ExceptT ServantErr
-- IO@ monad that Servant's 'enter' function needs in order to run the
-- application. The ':~>' type is a natural transformation, or, in
-- non-category theory terms, a function that converts two type
-- constructors without looking at the values in the types.
convertApp :: Config -> App :~> ExceptT ServantErr IO
convertApp cfg = Nat (flip runReaderT cfg . runApp)
-- | Since we also want to provide a minimal front end, we need to give
-- Servant a way to serve a directory with HTML and JavaScript. This
-- function creates a WAI application that just serves the files out of the
-- given directory.
files :: Application
files = serveDirectory "assets"
-- | Just like a normal API type, we can use the ':<|>' combinator to unify
-- two different APIs and applications. This is a powerful tool for code
-- reuse and abstraction! We need to put the 'Raw' endpoint last, since it
-- always succeeds.
type FisbangAPP = FisbangAPI :<|> Raw
fisbangApp :: Proxy FisbangAPP
fisbangApp = Proxy
-- | Finally, this function takes a configuration and runs our 'FisbangApp'
-- alongside the 'Raw' endpoint that serves all of our files.
app :: Config -> Application
app cfg =
simpleCors (serve fisbangApp (appToServer cfg :<|> files))
-- | Generates JavaScript to query the API.
generateJavaScript :: IO ()
generateJavaScript =
writeJSForAPI (Proxy :: Proxy FisbangAPI) vanillaJS "./assets/api.js"
| Fisbang/fisbang-api | src/Api.hs | bsd-3-clause | 3,061 | 0 | 10 | 725 | 434 | 257 | 177 | 41 | 1 |
module Lang.LF.Internal.Hyps where
import Data.Set (Set)
import qualified Data.Set as Set
import Lang.LF.Internal.Model
-- | A sequence of hypotheses, giving types to the free variables in Ξ³.
data LFHyps (f :: Ctx * -> SORT -> *) (Ξ³ :: Ctx *) where
HNil :: LFHyps f E
HCons :: !(LFHyps f Ξ³) -> !Quant -> !String -> !(f Ξ³ TYPE) -> LFHyps f (Ξ³ ::> b)
getName :: Set String
-> String
-> String
getName ss nm = tryName ss (nm : [ nm++show i | i <- [0..] ])
where
tryName ss (x:xs)
| Set.member x ss = tryName ss xs
| otherwise = x
tryName _ [] = undefined
freshName :: Set String
-> String
-> String
freshName nms nm = getName nms nm
lookupHyp :: LFModel f m
=> LFHyps f Ξ³
-> Var Ξ³
-> Weakening Ξ³ Ξ³'
-> (String, Quant, f Ξ³' TYPE)
lookupHyp (HCons _ q nm a) B w =
(nm, q, weaken (WeakRight w) a)
lookupHyp (HCons h _ _ _) (F x) w =
lookupHyp h x (WeakRight w)
lookupHyp HNil _ _ = error "impossible"
lookupVar :: LFModel f m
=> LFHyps f Ξ³
-> Var Ξ³
-> (String, Quant, f Ξ³ TYPE)
lookupVar h v = lookupHyp h v WeakRefl
extendHyps :: LFHyps f Ξ³ -> String -> Quant -> f Ξ³ TYPE -> LFHyps f (Ξ³ ::> b)
extendHyps h nm q a = HCons h q nm a
{-
inEmptyCtx :: ((?nms :: Set String, ?hyps :: Hyps f E) => a)
-> a
inEmptyCtx f =
let ?nms = Set.empty in
let ?hyps = HNil in
f
extendCtx :: (?nms :: Set String, ?hyps :: Hyps f Ξ³)
=> String
-> Quant
-> f Ξ³ TYPE
-> ((?nms :: Set String, ?hyps :: Hyps f (Ξ³::>b)) => x)
-> x
extendCtx nm q a f =
let nm' = freshName nm in
let ?nms = Set.insert nm' ?nms in
let ?hyps = extendHyps ?hyps nm' q a in
f
-}
| robdockins/canonical-lf | src/Lang/LF/Internal/Hyps.hs | bsd-3-clause | 1,762 | 0 | 12 | 550 | 560 | 285 | 275 | -1 | -1 |
n = 200
result = [ (a,b,c) | c <- [1..n], b <- [1..c], a <- [1..b], a^2 + b^2 == c^2]
main = print (result)
| kite-lang/kite | benchmarking/pythagoras.hs | mit | 109 | 0 | 11 | 27 | 97 | 53 | 44 | 3 | 1 |
-- https://www.codewars.com/kata/escape-the-mines-or-die
module EscapeTheMinesOrDie where
import Control.Arrow (first, second)
import Data.Function (on)
import Data.List (minimumBy)
import Data.Maybe (catMaybes)
import Data.Set (Set, empty, insert, member)
type XY = (Int, Int)
data Move = U | D | L | R
deriving (Eq, Show)
(!?) :: [a] -> Int -> Maybe a
__ !? n | n < 0 = Nothing
[] !? _ = Nothing
(x : __) !? 0 = Just x
(_ : xs) !? n = xs !? pred n
(!!?) :: [[a]] -> XY -> Maybe a
xs !!? (i, j) = (!? j) =<< (!? i) xs
living :: [[Bool]] -> XY -> Bool
living m i = m !!? i == Just True
ensureNonEmpty :: [a] -> Maybe [a]
ensureNonEmpty [] = Nothing
ensureNonEmpty xs = Just xs
dfs :: Set XY -> [[Bool]] -> XY -> XY -> Maybe [Move]
dfs _ m i _ | not $ living m i = Nothing
dfs _ _ i o | i == o = Just []
dfs v _ i _ | member i v = Nothing
dfs v m i o = minMoves <$> moveses
where
minMoves = minimumBy $ on compare length
moveses = ensureNonEmpty . catMaybes $ uncurry explore <$> mvs
explore mv step = (mv :) <$> dfs (insert i v) m (step i) o
mvs = [(U, second pred), (D, second succ), (L, first pred), (R, first succ)]
solve :: [[Bool]] -> XY -> XY -> Maybe [Move]
solve = dfs empty
| airtial/Codegames | codewars/escape-the-mines-or-die.hs | gpl-2.0 | 1,213 | 0 | 10 | 282 | 646 | 343 | 303 | 32 | 1 |
{-# LANGUAGE CPP #-}
{- |
Module : ./CASL/CompositionTable/ModelChecker.hs
Description : checks validity of models regarding a composition table
Copyright : (c) Uni Bremen 2005
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : non-portable
checks validity of models regarding a composition table
-}
module CASL.CompositionTable.ModelChecker (modelCheck) where
import CASL.CompositionTable.CompositionTable
import CASL.CompositionTable.ModelTable
import CASL.CompositionTable.ModelFormula
import CASL.AS_Basic_CASL
import CASL.Fold
import CASL.Sign
import CASL.ToDoc
import CASL.Logic_CASL
import Logic.Logic
import Common.AS_Annotation
import Common.Result
import Common.Id
import qualified Common.Lib.MapSet as MapSet
import Common.Utils
import qualified Data.Set as Set
import qualified Data.IntSet as IntSet
import qualified Data.IntMap as IntMap
import qualified Data.Map as Map
import Data.Function
import Data.Maybe
import Data.List
modelCheck :: Int -> (Sign () (), [Named (FORMULA ())])
-> Table2 -> Result ()
modelCheck c (sign, sent) t = do
let sm = Map.fromList $ extractAnnotations (annoMap sign)
mapM_ (modelCheckTest c sm sign t) sent
extractAnnotations :: MapSet.MapSet Symbol Annotation -> [(OP_SYMB, String)]
extractAnnotations m =
catMaybes [extractAnnotation (a, b) | (a, b) <- MapSet.toList m]
extractAnnotation :: (Symbol, [Annotation]) -> Maybe (OP_SYMB, String)
extractAnnotation (Symbol symbname symbtype, set) = case symbtype of
OpAsItemType _ -> Just (createOpSymb symbname symbtype, getAnno set)
_ -> Nothing
createOpSymb :: Id -> SymbType -> OP_SYMB
createOpSymb i st = case st of
OpAsItemType ty -> Qual_op_name i (toOP_TYPE ty) nullRange
_ -> error "CASL.CompositionTable.ModelChecker.createOpSymb"
getAnno :: [Annotation] -> String
getAnno as = case as of
[a] -> getAnnoAux a
_ -> "failure"
getAnnoAux :: Annotation -> String
getAnnoAux a = case a of
Unparsed_anno (Annote_word word) _ _ -> word
_ -> ""
modelCheckTest :: Int -> Map.Map OP_SYMB String -> Sign () () -> Table2
-> Named (FORMULA ()) -> Result ()
modelCheckTest c symbs sign t x = let
(n, d) = modelCheckTest1 c (sentence x) t symbs
fstr = shows (printTheoryFormula (mapNamed (simplify_sen CASL sign) x)) "\n"
in if null d
then hint () ("Formula succeeded:\n" ++ fstr) nullRange
else warning () ("Formula failed:\n" ++ fstr ++ show n
++ " counter example" ++ (if n > 1 then "s" else "")
++ ":\n" ++ intercalate "\n" d) nullRange
ifind :: Int -> IntMap.IntMap a -> a
ifind = IntMap.findWithDefault (error "CompositionTable.ifind")
modelCheckTest1 :: Int -> FORMULA () -> Table2 -> Map.Map OP_SYMB String
-> (Int, [String])
modelCheckTest1 c sen t symbs = let
vs = number $ Set.toList $ vars sen
vm = Map.fromList vs
rm = IntMap.fromList $ map (\ (a, b) -> (b, show a)) vs
nf = foldFormula (fromCASL symbs vm) sen
in case nf of
Quant quant decl f -> calculateQuantification (Just c) (`ifind` rm)
quant f t $ generateVariableAssignments decl t
_ -> if calculateFormula t IntMap.empty nf then
(0, []) else (1, ["formula as given above."])
calculateQuantification :: Maybe Int -> (Int -> String) -> QUANTIFIER -> Form
-> Table2 -> [Assignment] -> (Int, [String])
calculateQuantification mc si quant f t@(Table2 _ _ l _ _ _) vs =
let calc ass = calculateFormula t ass f
nD = showAssignments si l
in case quant of
Universal -> case mc of
Just c -> let
fall (c0, ds) ass = let
res = calc ass
nC0 = if res then c0 else c0 + 1
nDs = if res || nC0 > c then ds else nD ass : ds
in seq (seq nC0 nDs) (nC0, nDs)
in foldl' fall (0, []) vs
Nothing -> foldr (\ ass p@(_, ds) ->
if null ds then if calc ass then p else (1, [nD ass]) else p)
(0, []) vs
Existential -> if any calc vs then (0, []) else
(1, ["Existential not fulfilled"])
Unique_existential -> let
funi ass ds = case ds of
_ : _ : _ -> ds
_ | calc ass -> nD ass : ds
_ -> ds
in case foldr funi [] vs of
[] -> (1, ["Unique Existential not fulfilled"])
[_] -> (0, [])
ds -> (1, ds)
type Assignment = IntMap.IntMap Int
showAssignments :: (Int -> String) -> IntMap.IntMap Baserel -> Assignment
-> String
showAssignments si l xs =
'[' : intercalate ", " (map (showSingleAssignment si l) $ IntMap.toList xs)
++ "]"
showSingleAssignment :: (Int -> String) -> IntMap.IntMap Baserel -> (Int, Int)
-> String
showSingleAssignment si m (v, i) = si v ++ "->" ++ case ifind i m of
Baserel b -> b
calculateTerm :: Assignment -> Table2 -> Term -> BSet
calculateTerm ass t trm = case trm of
Var var -> getBaseRelForVariable var ass
Appl opSymb terms -> applyOperation opSymb terms t ass
Cond t1 fo t2 -> on (\ a b -> if calculateFormula t ass fo then a else b)
(calculateTerm ass t) t1 t2
applyOperation :: Op -> [Term] -> Table2 -> Assignment -> BSet
applyOperation ra ts table@(Table2 _ id_ _ baserels cmpentries convtbl) ass =
let err = error "CompositionTable.applyOperator"
in case ts of
ft : rt -> let r1 = calculateTerm ass table ft
in case rt of
[sd] -> case ra of
Comp -> calculateComposition cmpentries r1
Inter -> IntSet.intersection r1
Union -> IntSet.union r1
_ -> err
$ calculateTerm ass table sd
[] -> let (conv, inv, shortc, hom) = convtbl in case ra of
Compl -> IntSet.difference baserels
Conv -> calculateConverse conv
Shortcut -> calculateConverse shortc
Inv -> calculateConverse inv
Home -> calculateConverse hom
_ -> err
$ r1
_ -> err
[] -> case ra of
One -> baserels
Iden -> IntSet.singleton id_
Zero -> IntSet.empty
_ -> err
intSetFold :: (Int -> b -> b) -> b -> IntSet.IntSet -> b
intSetFold =
#if __GLASGOW_HASKELL__ < 704
IntSet.fold
#else
IntSet.foldr'
#endif
calculateComposition :: CmpTbl -> BSet -> BSet -> BSet
calculateComposition entries rels1 rels2 = intSetFold
(\ s1 t -> case ifind s1 entries of
m1 -> intSetFold
(\ s2 -> case ifind s2 m1 of
m2 -> IntSet.union m2)
t rels2)
IntSet.empty rels1
calculateConverse :: ConTable -> BSet -> BSet
calculateConverse t =
IntSet.unions . map (`ifind` t)
. IntSet.toList
getBaseRelForVariable :: Int -> Assignment -> BSet
getBaseRelForVariable var = IntSet.singleton . ifind var
calculateFormula :: Table2 -> Assignment -> Form -> Bool
calculateFormula t varass qf = case qf of
Quant q vardecls f ->
null . snd . calculateQuantification Nothing show q f t
$ appendVariableAssignments varass vardecls t
Junct j formulas -> (if j then all else any)
(calculateFormula t varass) formulas
Impl isImpl f1 f2 -> on (if isImpl then (<=) else (==))
(calculateFormula t varass) f1 f2
Neg f -> not $ calculateFormula t varass f
Const b -> b
Eq term1 term2 -> on (==) (calculateTerm varass t) term1 term2
generateVariableAssignments :: [Int] -> Table2 -> [Assignment]
generateVariableAssignments vs =
gVAs vs . IntSet.toList . getBaseRelations
gVAs :: [Int] -> [Int] -> [Assignment]
gVAs vs brs = foldr (\ v rs -> [IntMap.insert v b r | b <- brs, r <- rs])
[IntMap.empty] vs
getBaseRelations :: Table2 -> BSet
getBaseRelations (Table2 _ _ _ br _ _) = br
appendVariableAssignments :: Assignment -> [Int] -> Table2 -> [Assignment]
appendVariableAssignments vm decls t =
map (`IntMap.union` vm) (generateVariableAssignments decls t)
| spechub/Hets | CASL/CompositionTable/ModelChecker.hs | gpl-2.0 | 8,008 | 129 | 20 | 2,104 | 2,685 | 1,422 | 1,263 | 181 | 15 |
{-# LANGUAGE PackageImports #-}
import "Carnap-Server" Application (develMain)
import Prelude (IO)
main :: IO ()
main = develMain
| gleachkr/Carnap | Carnap-Server/app/devel.hs | gpl-3.0 | 131 | 0 | 6 | 19 | 34 | 20 | 14 | 5 | 1 |
type A a = a a
| roberth/uu-helium | test/kinderrors/KindError6.hs | gpl-3.0 | 15 | 0 | 5 | 6 | 11 | 6 | 5 | 1 | 0 |
{-|
Module : ParseSndOrderRule
Description : Imports 2-rules from .ggx to verigraph
Stability : experimental
AGG implements first-order rules in the SPO approach, to model 2-rules
(second-order rules in the DPO approach) a translation is needed.
AGG model: a rule is two graphs and a partial mapping between them.
@
N
β²
β
L ββββΆ R
@
Verigraph second-order model:
@
nl fl fr
NlβββββββLaββββββ\<Lb\>ββββββΆLc
β² β² β² β²
nalβ laβ lbβ lcβ
β β β β
^ nk ^ fk ^ gk ^
NkβββββββKaββββββ\<Kb\>ββββββΆKc
v v v v
narβ raβ rbβ rcβ
β β β β
v nr βΌ fr βΌ gr βΌ
NrβββββββRaββββββ\<Rb\>ββββββΆRc
@
The second-order rules in AGG must be represented as two first-order rules with some aditional maps.
This maps bind two graph in different rules, to represent it we use the object names in AGG.
The object names map must bind the graphs La to Ra and Lc to Rc,
if there a NAC these maps will be needes: Nl to Nr, La to Nl and Ra to Nr.
Besides that, rule names in agg must follow this form: 2rule_(left|right|nacid)_(ruleName)
The translation from first-order rules in the SPO to DPO is straightforward,
and additionally with object name maps, all second-order rule can be instantiated.
-}
module XML.ParseSndOrderRule
( parseSndOrderRules
, getLeftObjNameMapping
, getRightObjNameMapping
, getObjectNacNameMorphism
, getObjectNacNameMorphismNodes
, getObjectNacNameMorphismEdges
, getObjectNameMorphism
) where
import Data.Char (toLower)
import Data.Function (on)
import Data.List (find, groupBy, intercalate, sortBy, sortOn, (\\))
import Data.Maybe (fromMaybe, mapMaybe)
import Abstract.Category
import Category.Graph ()
import Category.TypedGraph ()
import qualified Data.Graphs as G
import Data.Graphs.Morphism as GM
import Data.TypedGraph
import Data.TypedGraph.Morphism as TGM
import Util.List
import XML.ParsedTypes
-- | Gets the object name map between the left of two rules
getLeftObjNameMapping :: SndOrderRuleSide -> SndOrderRuleSide -> [Mapping]
getLeftObjNameMapping (_,_,((_,left,_,_),_)) (_,_,((_,right,_,_),_)) = getObjNameMapping left right
-- | Gets the object name map between the right of two rules
getRightObjNameMapping :: SndOrderRuleSide -> SndOrderRuleSide -> [Mapping]
getRightObjNameMapping (_,_,((_,_,left,_),_)) (_,_,((_,_,right,_),_)) = getObjNameMapping left right
-- | Gets the object name map between two ParsedTypedGraph
getObjNameMapping :: ParsedTypedGraph -> ParsedTypedGraph -> [Mapping]
getObjNameMapping (_,nodesL,edgesL) (_,nodesR,edgesR) = mapNodes ++ mapEdges
where
f id (Just n) = Just (id,n)
f _ _ = Nothing
fNodes (id,m,_) = f id m
fEdges (id,m,_,_,_) = f id m
nodesLMap = mapMaybe fNodes nodesL
nodesRMap = mapMaybe fNodes nodesR
edgesLMap = mapMaybe fEdges edgesL
edgesRMap = mapMaybe fEdges edgesR
getMap f = mapMaybe
(\(id,n) ->
case find (\(_,b) -> n == b) f of
Just (x,_) -> Just (x, Nothing, id)
_ -> Nothing)
nonMono = concatMap
(\(id,objName) ->
map
(\name -> (id,name))
(split "|" objName)
)
mapNodes = getMap (nonMono nodesRMap) nodesLMap
mapEdges = getMap (nonMono edgesRMap) edgesLMap
-- | Receives all parsed 2-rules in the agg format (first-order rule with object name maps)
-- and converts to second-order rules on verigraph
parseSndOrderRules :: [RuleWithNacs] -> [(SndOrderRuleSide,SndOrderRuleSide,[SndOrderRuleSide])]
parseSndOrderRules = groupRules . map getSndOrderRuleSide
-- | Parse SndOrderRule names in the form: 2rule_left_ruleName or 2rule_nacName_ruleName
getSndOrderRuleSide :: RuleWithNacs -> SndOrderRuleSide
getSndOrderRuleSide rule@((name,_,_,_),_) = (side, ruleName, rule)
where
splitted = split "_" name
side = if length splitted < 3
then error "Error parsing 2rule name"
else map toLower $ splitted !! 1
ruleName = intercalate "_" (tail (tail splitted))
-- put together rules in the form (left,right,[nacs])
groupRules :: [SndOrderRuleSide] -> [(SndOrderRuleSide,SndOrderRuleSide,[SndOrderRuleSide])]
groupRules rules =
map
(\list ->
let left = getLeft list
right = getRight list
remainList = list \\ [left,right]
in (left,right,remainList)
) grouped
where
side (x,_,_) = x
name (_,x,_) = x
sorted = sortOn name rules
grouped = groupBy ((==) `on` name) sorted
getLeft list = fromMaybe (error "Second-order rule without left") (findSide "left" list)
getRight list = fromMaybe (error "Second-order rule without right") (findSide "right" list)
findSide str = find (\x -> side x == str)
-- TODO: replace applyNodeUnsafe for getNodeType?
-- | Given a morphism from some graph in the rule left to nac extracts the mapping
getObjectNacNameMorph :: GraphMorphism a b -> ([Mapping], [Mapping])
getObjectNacNameMorph m = (nodesMap m, edgesMap m)
where
adjustNonMono = parseNonMonoObjNames . group . sort
nodesMap = adjustNonMono . getMap GM.applyNodeIdUnsafe . G.nodeIds . domain
edgesMap = adjustNonMono . getMap GM.applyEdgeIdUnsafe . G.edgeIds . domain
getMap f = map (\e -> (show (f m e), Nothing, show e))
group = groupBy (\(x,_,_) (y,_,_) -> x == y)
sort = sortBy (\(x,_,_) (y,_,_) -> compare x y)
-- | Given a morphism from some graph in the rule left to nac extracts the mapping
getObjectNacNameMorphism :: GraphMorphism a b -> [Mapping]
getObjectNacNameMorphism m = nods ++ edgs
where
(nods,edgs) = getObjectNacNameMorph m
-- | Given a morphism from some graph in the rule left to nac extracts the nodes mapping
getObjectNacNameMorphismNodes :: GraphMorphism a b -> [Mapping]
getObjectNacNameMorphismNodes m = fst (getObjectNacNameMorph m)
-- | Given a morphism from some graph in the rule left to nac extracts the edges mapping
getObjectNacNameMorphismEdges :: GraphMorphism a b -> [Mapping]
getObjectNacNameMorphismEdges m = snd (getObjectNacNameMorph m)
-- | Glues the non mono maps
parseNonMonoObjNames :: [[Mapping]] -> [Mapping]
parseNonMonoObjNames [] = []
parseNonMonoObjNames (x:xs) = (a,b,newObjName) : parseNonMonoObjNames xs
where
(a,b,_) = head x
allObjNames = map (\(_,_,y) -> y) x
newObjName = intercalate "|" allObjNames
-- | Given two morphisms with the same domain, maps the codomain of both according to the interface (domain graph)
-- Used to translate DPO in verigraph to SPO in ggx
getObjectNameMorphism :: TypedGraphMorphism a b -> TypedGraphMorphism a b -> [Mapping]
getObjectNameMorphism left right = nodesMap ++ edgesMap
where
nodesMap = getMap TGM.applyNodeIdUnsafe (nodeIds $ domain left)
edgesMap = getMap TGM.applyEdgeIdUnsafe (edgeIds $ domain left)
getMap f = map (\e -> (show (f right e), Nothing, show (f left e)))
| rodrigo-machado/verigraph | src/library/XML/ParseSndOrderRule.hs | gpl-3.0 | 7,447 | 0 | 14 | 1,817 | 1,671 | 940 | 731 | 98 | 3 |
-- pick.hs -- picks the n-th value in a LeanCheck enumeration
--
-- Copyright (c) 2020 Rudy Matela.
-- Distributed under the 3-Clause BSD licence (see the file LICENSE).
import Test.LeanCheck
import Test.LeanCheck.Utils
import Test.LeanCheck.Function ()
import System.Environment
u :: a
u = undefined
usage :: IO ()
usage = putStrLn $ "usage:"
\\ " pick <Type> <n>"
\\ ""
\\ "example:"
\\ " pick Int 12"
where
s1 \\ s2 = s1 ++ "\n" ++ s2
main :: IO ()
main = do
as <- getArgs
case as of
[t,n] -> pick t (read n)
_ -> usage
put :: (Listable a, Show a) => String -> Int -> a -> IO ()
put t n u = putStrLn
$ "list :: [" ++ t ++ "] !! " ++ show n ++ " = "
++ show ((list `asTypeOf` [u]) !! n)
pick :: String -> Int -> IO ()
pick t n = case t of
"()" -> put t n (u :: () )
"Int" -> put t n (u :: Int )
"Nat" -> put t n (u :: Nat )
"Integer" -> put t n (u :: Integer )
"Bool" -> put t n (u :: Bool )
"Char" -> put t n (u :: Char )
"Float" -> put t n (u :: Float )
"Double" -> put t n (u :: Double )
"Rational" -> put t n (u :: Rational )
-- lists
"[()]" -> put t n (u :: [()] )
"[Int]" -> put t n (u :: [Int] )
"[Nat]" -> put t n (u :: [Nat] )
"[Integer]" -> put t n (u :: [Integer] )
"[Bool]" -> put t n (u :: [Bool] )
"[Char]" -> put t n (u :: [Char] )
"String" -> put t n (u :: String )
-- pairs
"((),())" -> put t n (u :: ((),()) )
"(Int,Int)" -> put t n (u :: (Int,Int) )
"(Nat,Nat)" -> put t n (u :: (Nat,Nat) )
"(Bool,Bool)" -> put t n (u :: (Bool,Bool) )
"(Bool,Int)" -> put t n (u :: (Bool,Int) )
"(Int,Bool)" -> put t n (u :: (Int,Bool) )
"(Int,Int,Int)" -> put t n (u :: (Int,Int,Int) )
"(Nat,Nat,Nat)" -> put t n (u :: (Nat,Nat,Nat) )
-- lists & pairs
"[((),())]" -> put t n (u :: [((),())] )
"([()],[()])" -> put t n (u :: ([()],[()]) )
"([Bool],[Bool])" -> put t n (u :: ([Bool],[Bool]) )
"([Int],[Int])" -> put t n (u :: ([Int],[Int]) )
-- lists of lists
"[[()]]" -> put t n (u :: [[()]] )
"[[Int]]" -> put t n (u :: [[Int]] )
"[[Nat]]" -> put t n (u :: [[Nat]] )
"[[Integer]]" -> put t n (u :: [[Integer]] )
"[[Bool]]" -> put t n (u :: [[Bool]] )
"[[Char]]" -> put t n (u :: [[Char]] )
"[String]" -> put t n (u :: [String] )
-- lists of pairs
"[(Int,Int)]" -> put t n (u :: [(Int,Int)] )
-- unhandled
_ -> error $ "unknown/unhandled type `" ++ t ++ "'"
| rudymatela/llcheck | bench/pick.hs | bsd-3-clause | 3,233 | 0 | 12 | 1,482 | 1,175 | 644 | 531 | 62 | 37 |
{-# LANGUAGE FlexibleContexts #-}
import Test.Hspec
import Control.Monad.Except.Union
data MyError = MyError deriving (Show)
data Another = Another deriving (Show)
data Yet = Yet deriving (Show)
raising :: (Raises MyError s m) => m ()
raising = raise MyError
again :: (Raises MyError s m, Raises Another s m) => m ()
again = raising *> raise Another *> pure ()
discardAnother :: Another -> MyError
discardAnother = const MyError
catching :: Either MyError ()
catching = runExcept $ singleError $ raify (\Another -> MyError) again
catching2 :: Either Yet ()
catching2 = runExcept $ singleError $ reraise (\MyError -> Yet) raising
catching3 :: Either Yet ()
catching3 = runExcept $ singleError $ raify (\MyError -> Yet) $ reraise (\Another -> Yet) again
catching4 :: Either MyError ()
catching4 = runExcept $ singleError $ do
raising
raify (\Another -> MyError) again
spec :: Spec
spec = describe "a bunch of functions" $
it "compiles properly" $ True `shouldBe` True
main :: IO ()
main = hspec spec
| rimmington/redolent-except | test/Spec.hs | bsd-3-clause | 1,030 | 0 | 10 | 199 | 379 | 200 | 179 | 27 | 1 |
{-# LANGUAGE CPP, DeriveDataTypeable, IncoherentInstances #-}
{- | an 'Interval' is a pair of 'lower' and 'upper',
representing some interval in ordered system.
The lower bound is inclusive and the upper bound is exclusive:
('lower' <= x < 'upper') .
The intersection of two intervals are also interval
but the union of two intervals are not,
so 'Interval' constitute a 'PiSystem'.
-}
module Language.Paraiso.Interval (
Interval(..)) where
import Data.Typeable
import Language.Paraiso.PiSystem as S
import Prelude hiding (null)
data Interval a =
-- | an empty interval.
Empty |
-- | a non-empty interval.
Interval{lower::a, upper::a}
deriving (Eq, Show, Typeable)
instance (Ord a) => PiSystem (Interval a) where
empty = Empty
null Empty = True
null (Interval l u) = l >= u
intersection Empty _ = Empty
intersection _ Empty = Empty
intersection (Interval l1 u1) (Interval l2 u2) =
let l = max l1 l2; u = min u1 u2; ret = Interval l u in
if null ret then Empty else ret
| nushio3/Paraiso | Language/Paraiso/Interval.hs | bsd-3-clause | 1,076 | 0 | 10 | 272 | 233 | 131 | 102 | 19 | 0 |
-- |
-- Module : $Header$
-- Copyright : (c) 2013-2014 Galois, Inc.
-- License : BSD3
-- Maintainer : cryptol@galois.com
-- Stability : provisional
-- Portability : portable
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ >= 706
{-# LANGUAGE RecursiveDo #-}
#else
{-# LANGUAGE DoRec, RecursiveDo #-}
#endif
{-# LANGUAGE Safe #-}
module Cryptol.TypeCheck.Kind
( checkType
, checkSchema
, checkNewtype
, checkTySyn
) where
import qualified Cryptol.Parser.AST as P
import Cryptol.Parser.AST (Named(..))
import Cryptol.Parser.Position
import Cryptol.TypeCheck.AST
import Cryptol.TypeCheck.Monad hiding (withTParams)
import Cryptol.TypeCheck.Solve (simplifyAllConstraints
,checkTypeFunction)
import Cryptol.Utils.PP
import qualified Data.Map as Map
import Data.List(sortBy,groupBy)
import Data.Maybe(fromMaybe)
import Data.Function(on)
import Control.Monad(unless,forM)
-- | Check a type signature.
checkSchema :: P.Schema -> InferM (Schema, [Goal])
checkSchema (P.Forall xs ps t mb) =
do ((xs1,(ps1,t1)), gs) <-
collectGoals $
rng $ withTParams True xs $
do ps1 <- mapM checkProp ps
t1 <- doCheckType t (Just KType)
return (ps1,t1)
return (Forall xs1 ps1 t1, gs)
where
rng = case mb of
Nothing -> id
Just r -> inRange r
-- | Check a type-synonym declaration.
checkTySyn :: P.TySyn -> InferM TySyn
checkTySyn (P.TySyn x as t) =
do ((as1,t1),gs) <- collectGoals
$ inRange (srcRange x)
$ do r <- withTParams False as (doCheckType t Nothing)
simplifyAllConstraints
return r
return TySyn { tsName = thing x
, tsParams = as1
, tsConstraints = map goal gs
, tsDef = t1
}
-- | Check a newtype declaration.
-- XXX: Do something with constraints.
checkNewtype :: P.Newtype -> InferM Newtype
checkNewtype (P.Newtype x as fs) =
do ((as1,fs1),gs) <- collectGoals $
inRange (srcRange x) $
do r <- withTParams False as $
forM fs $ \field ->
let n = name field
in kInRange (srcRange n) $
do t1 <- doCheckType (value field) (Just KType)
return (thing n, t1)
simplifyAllConstraints
return r
return Newtype { ntName = thing x
, ntParams = as1
, ntConstraints = map goal gs
, ntFields = fs1
}
checkType :: P.Type -> Maybe Kind -> InferM Type
checkType t k =
do (_, t1) <- withTParams True [] $ doCheckType t k
return t1
{- | Check someting with type parameters.
When we check things with type parameters (i.e., type schemas, and type
synonym declarations) we do kind inference based only on the immediately
visible body. Type parameters that are not mentioned in the body are
defaulted to kind 'KNum'. If this is not the desired behavior, programmers
may add explicit kind annotations on the type parameters.
Here is an example of how this may show up:
> f : {n}. [8] -> [8]
> f x = x + `n
Note that @n@ does not appear in the body of the schema, so we will
default it to 'KNum', which is the correct thing in this case.
To use such a function, we'd have to provide an explicit type application:
> f `{n = 3}
There are two reasons for this choice:
1. It makes it possible to figure if something is correct without
having to look trough arbitrary amounts of code.
2. It is a bit easier to implement, and it covers the large majority
of use cases, with a very small inconvenience (an explicit kind
annotation) in the rest.
-}
withTParams :: Bool -> [P.TParam] -> KindM a -> InferM ([TParam], a)
withTParams allowWildCards xs m =
mdo mapM_ recordError duplicates
(a, vars) <- runKindM allowWildCards (zip' xs ts) m
(as, ts) <- unzip `fmap` mapM (newTP vars) xs
return (as,a)
where
getKind vs tp =
case Map.lookup (P.tpQName tp) vs of
Just k -> return k
Nothing -> do recordWarning (DefaultingKind tp P.KNum)
return KNum
newTP vs tp = do k <- getKind vs tp
n <- newTParam (Just (mkUnqual (P.tpName tp))) k
return (n, TVar (tpVar n))
{- Note that we only zip based on the first argument.
This is needed to make the monadic recursion work correctly,
because the data dependency is only on the part that is known. -}
zip' [] _ = []
zip' (a:as) ~(t:ts) = (mkUnqual (P.tpName a), fmap cvtK (P.tpKind a), t) : zip' as ts
cvtK P.KNum = KNum
cvtK P.KType = KType
duplicates = [ RepeatedTyParams ds
| ds@(_ : _ : _) <- groupBy ((==) `on` P.tpName)
$ sortBy (compare `on` P.tpName) xs ]
-- | Check an application of a type constant.
tcon :: TCon -- ^ Type constant being applied
-> [P.Type] -- ^ Type parameters
-> Maybe Kind -- ^ Expected kind
-> KindM Type -- ^ Resulting type
tcon tc ts0 k =
do (ts1,k1) <- appTy ts0 (kindOf tc)
checkKind (TCon tc ts1) k k1
-- | Check a use of a type-synonym, newtype, or scoped-type variable.
tySyn :: Bool -- ^ Should we check for scoped type vars.
-> QName -- ^ Name of type sysnonym
-> [P.Type] -- ^ Type synonym parameters
-> Maybe Kind -- ^ Expected kind
-> KindM Type -- ^ Resulting type
tySyn scoped x ts k =
do mb <- kLookupTSyn x
case mb of
Just (tysyn@(TySyn f as ps def)) ->
do (ts1,k1) <- appTy ts (kindOf tysyn)
ts2 <- checkParams as ts1
let su = zip as ts2
ps1 <- mapM (`kInstantiateT` su) ps
kNewGoals (CtPartialTypeFun (UserTyFun f)) ps1
t1 <- kInstantiateT def su
checkKind (TUser x ts1 t1) k k1
-- Maybe it is a newtype?
Nothing ->
do mbN <- kLookupNewtype x
case mbN of
Just nt ->
do let tc = newtypeTyCon nt
(ts1,_) <- appTy ts (kindOf tc)
ts2 <- checkParams (ntParams nt) ts1
return (TCon tc ts2)
-- Maybe it is a scoped type variable?
Nothing
| scoped -> kExistTVar x $ fromMaybe KNum k
| otherwise ->
do kRecordError $ UndefinedTypeSynonym x
kNewType (text "type synonym" <+> pp x) $ fromMaybe KNum k
where
checkParams as ts1
| paramHave == paramNeed = return ts1
| paramHave < paramNeed =
do kRecordError (TooFewTySynParams x (paramNeed-paramHave))
let src = text "missing prameter of" <+> pp x
fake <- mapM (kNewType src . kindOf . tpVar)
(drop paramHave as)
return (ts1 ++ fake)
| otherwise = do kRecordError (TooManyTySynParams x (paramHave-paramNeed))
return (take paramNeed ts1)
where paramHave = length ts1
paramNeed = length as
-- | Check a type-application.
appTy :: [P.Type] -- ^ Parameters to type function
-> Kind -- ^ Kind of type function
-> KindM ([Type], Kind) -- ^ Validated parameters, resulting kind
appTy [] k1 = return ([],k1)
appTy (t : ts) (k1 :-> k2) =
do t1 <- doCheckType t (Just k1)
(ts1,k) <- appTy ts k2
return (t1 : ts1, k)
appTy ts k1 =
do kRecordError (TooManyTypeParams (length ts) k1)
return ([], k1)
-- | Validate a parsed type.
doCheckType :: P.Type -- ^ Type that needs to be checked
-> Maybe Kind -- ^ Expected kind (if any)
-> KindM Type -- ^ Checked type
doCheckType ty k =
case ty of
P.TWild ->
do ok <- kWildOK
unless ok $ kRecordError UnexpectedTypeWildCard
theKind <- case k of
Just k1 -> return k1
Nothing -> do kRecordWarning (DefaultingWildType P.KNum)
return KNum
kNewType (text "wildcard") theKind
P.TFun t1 t2 -> tcon (TC TCFun) [t1,t2] k
P.TSeq t1 t2 -> tcon (TC TCSeq) [t1,t2] k
P.TBit -> tcon (TC TCBit) [] k
P.TNum n -> tcon (TC (TCNum n)) [] k
P.TChar n -> tcon (TC (TCNum $ fromIntegral $ fromEnum n)) [] k
P.TInf -> tcon (TC TCInf) [] k
P.TApp tf ts ->
do it <- tcon (TF tf) ts k
-- Now check for additional well-formedness
-- constraints.
case it of
TCon (TF f) ts' ->
case checkTypeFunction f ts' of
[] -> return ()
ps -> kNewGoals (CtPartialTypeFun (BuiltInTyFun f)) ps
_ -> return ()
return it
P.TTuple ts -> tcon (TC (TCTuple (length ts))) ts k
P.TRecord fs -> do t1 <- TRec `fmap` mapM checkF fs
checkKind t1 k KType
P.TLocated t r1 -> kInRange r1 $ doCheckType t k
P.TUser x [] -> checkTyThing x k
P.TUser x ts -> tySyn False x ts k
where
checkF f = do t <- kInRange (srcRange (name f))
$ doCheckType (value f) (Just KType)
return (thing (name f), t)
-- | Check a type-variable or type-synonym.
checkTyThing :: QName -- ^ Name of thing that needs checking
-> Maybe Kind -- ^ Expected kind
-> KindM Type
checkTyThing x k =
do it <- kLookupTyVar x
case it of
Just (TLocalVar t mbk) ->
case k of
Nothing -> return t
Just k1 ->
case mbk of
Nothing -> kSetKind x k1 >> return t
Just k2 -> checkKind t k k2
Just (TOuterVar t) -> checkKind t k (kindOf t)
Nothing -> tySyn True x [] k
-- | Validate a parsed proposition.
checkProp :: P.Prop -- ^ Proposition that need to be checked
-> KindM Type -- ^ Checked representation
checkProp prop =
case prop of
P.CFin t1 -> tcon (PC PFin) [t1] (Just KProp)
P.CEqual t1 t2 -> tcon (PC PEqual) [t1,t2] (Just KProp)
P.CGeq t1 t2 -> tcon (PC PGeq) [t1,t2] (Just KProp)
P.CArith t1 -> tcon (PC PArith) [t1] (Just KProp)
P.CCmp t1 -> tcon (PC PCmp) [t1] (Just KProp)
P.CLocated p r1 -> kInRange r1 (checkProp p)
-- | Check that a type has the expected kind.
checkKind :: Type -- ^ Kind-checked type
-> Maybe Kind -- ^ Expected kind (if any)
-> Kind -- ^ Inferred kind
-> KindM Type -- ^ A type consistent with expectations.
checkKind _ (Just k1) k2
| k1 /= k2 = do kRecordError (KindMismatch k1 k2)
kNewType (text "kind error") k1
checkKind t _ _ = return t
| TomMD/cryptol | src/Cryptol/TypeCheck/Kind.hs | bsd-3-clause | 11,204 | 3 | 24 | 4,027 | 3,194 | 1,581 | 1,613 | 214 | 16 |
-----------------------------------------------------------------------------
-- |
-- Module : Control.Parallel
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : non-portable
--
-- Parallel Constructs
--
-----------------------------------------------------------------------------
module Control.Parallel (
par, seq -- re-exported
#if defined(__GRANSIM__)
, parGlobal, parLocal, parAt, parAtAbs, parAtRel, parAtForNow
#endif
) where
import Prelude
#ifdef __GLASGOW_HASKELL__
import qualified GHC.Conc ( par )
#endif
#if defined(__GRANSIM__)
import PrelBase
import PrelErr ( parError )
import PrelGHC ( parGlobal#, parLocal#, parAt#, parAtAbs#, parAtRel#, parAtForNow# )
{-# INLINE parGlobal #-}
{-# INLINE parLocal #-}
{-# INLINE parAt #-}
{-# INLINE parAtAbs #-}
{-# INLINE parAtRel #-}
{-# INLINE parAtForNow #-}
parGlobal :: Int -> Int -> Int -> Int -> a -> b -> b
parLocal :: Int -> Int -> Int -> Int -> a -> b -> b
parAt :: Int -> Int -> Int -> Int -> a -> b -> c -> c
parAtAbs :: Int -> Int -> Int -> Int -> Int -> a -> b -> b
parAtRel :: Int -> Int -> Int -> Int -> Int -> a -> b -> b
parAtForNow :: Int -> Int -> Int -> Int -> a -> b -> c -> c
parGlobal (I# w) (I# g) (I# s) (I# p) x y = case (parGlobal# x w g s p y) of { 0# -> parError; _ -> y }
parLocal (I# w) (I# g) (I# s) (I# p) x y = case (parLocal# x w g s p y) of { 0# -> parError; _ -> y }
parAt (I# w) (I# g) (I# s) (I# p) v x y = case (parAt# x v w g s p y) of { 0# -> parError; _ -> y }
parAtAbs (I# w) (I# g) (I# s) (I# p) (I# q) x y = case (parAtAbs# x q w g s p y) of { 0# -> parError; _ -> y }
parAtRel (I# w) (I# g) (I# s) (I# p) (I# q) x y = case (parAtRel# x q w g s p y) of { 0# -> parError; _ -> y }
parAtForNow (I# w) (I# g) (I# s) (I# p) v x y = case (parAtForNow# x v w g s p y) of { 0# -> parError; _ -> y }
#endif
-- Maybe parIO and the like could be added here later.
-- | Indicates that it may be beneficial to evaluate the first
-- argument in parallel with the second. Returns the value of the
-- second argument.
--
-- @a `par` b@ is exactly equivalent semantically to @b@.
--
-- @par@ is generally used when the value of @a@ is likely to be
-- required later, but not immediately. Also it is a good idea to
-- ensure that @a@ is not a trivial computation, otherwise the cost of
-- spawning it in parallel overshadows the benefits obtained by
-- running it in parallel.
--
-- Note that actual parallelism is only supported by certain
-- implementations (GHC with the @-threaded@ option, and GPH, for
-- now). On other implementations, @par a b = b@.
--
par :: a -> b -> b
#ifdef __GLASGOW_HASKELL__
par = GHC.Conc.par
#else
-- For now, Hugs does not support par properly.
par a b = b
#endif
| alekar/hugs | packages/base/Control/Parallel.hs | bsd-3-clause | 2,925 | 4 | 11 | 660 | 831 | 464 | 367 | 5 | 1 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
\section[Specialise]{Stamping out overloading, and (optionally) polymorphism}
-}
{-# LANGUAGE CPP #-}
module Specialise ( specProgram, specUnfolding ) where
#include "HsVersions.h"
import Id
import TcType hiding( substTy )
import Type hiding( substTy, extendTvSubstList )
import Module( Module, HasModule(..) )
import Coercion( Coercion )
import CoreMonad
import qualified CoreSubst
import CoreUnfold
import VarSet
import VarEnv
import CoreSyn
import Rules
import CoreUtils ( exprIsTrivial, applyTypeToArgs, mkCast )
import CoreFVs ( exprFreeVars, exprsFreeVars, idFreeVars, exprsFreeIdsList )
import UniqSupply
import Name
import MkId ( voidArgId, voidPrimId )
import Maybes ( catMaybes, isJust )
import BasicTypes
import HscTypes
import Bag
import DynFlags
import Util
import Outputable
import FastString
import State
import UniqDFM
import TrieMap
import Control.Monad
#if __GLASGOW_HASKELL__ > 710
import qualified Control.Monad.Fail as MonadFail
#endif
{-
************************************************************************
* *
\subsection[notes-Specialise]{Implementation notes [SLPJ, Aug 18 1993]}
* *
************************************************************************
These notes describe how we implement specialisation to eliminate
overloading.
The specialisation pass works on Core
syntax, complete with all the explicit dictionary application,
abstraction and construction as added by the type checker. The
existing type checker remains largely as it is.
One important thought: the {\em types} passed to an overloaded
function, and the {\em dictionaries} passed are mutually redundant.
If the same function is applied to the same type(s) then it is sure to
be applied to the same dictionary(s)---or rather to the same {\em
values}. (The arguments might look different but they will evaluate
to the same value.)
Second important thought: we know that we can make progress by
treating dictionary arguments as static and worth specialising on. So
we can do without binding-time analysis, and instead specialise on
dictionary arguments and no others.
The basic idea
~~~~~~~~~~~~~~
Suppose we have
let f = <f_rhs>
in <body>
and suppose f is overloaded.
STEP 1: CALL-INSTANCE COLLECTION
We traverse <body>, accumulating all applications of f to types and
dictionaries.
(Might there be partial applications, to just some of its types and
dictionaries? In principle yes, but in practice the type checker only
builds applications of f to all its types and dictionaries, so partial
applications could only arise as a result of transformation, and even
then I think it's unlikely. In any case, we simply don't accumulate such
partial applications.)
STEP 2: EQUIVALENCES
So now we have a collection of calls to f:
f t1 t2 d1 d2
f t3 t4 d3 d4
...
Notice that f may take several type arguments. To avoid ambiguity, we
say that f is called at type t1/t2 and t3/t4.
We take equivalence classes using equality of the *types* (ignoring
the dictionary args, which as mentioned previously are redundant).
STEP 3: SPECIALISATION
For each equivalence class, choose a representative (f t1 t2 d1 d2),
and create a local instance of f, defined thus:
f@t1/t2 = <f_rhs> t1 t2 d1 d2
f_rhs presumably has some big lambdas and dictionary lambdas, so lots
of simplification will now result. However we don't actually *do* that
simplification. Rather, we leave it for the simplifier to do. If we
*did* do it, though, we'd get more call instances from the specialised
RHS. We can work out what they are by instantiating the call-instance
set from f's RHS with the types t1, t2.
Add this new id to f's IdInfo, to record that f has a specialised version.
Before doing any of this, check that f's IdInfo doesn't already
tell us about an existing instance of f at the required type/s.
(This might happen if specialisation was applied more than once, or
it might arise from user SPECIALIZE pragmas.)
Recursion
~~~~~~~~~
Wait a minute! What if f is recursive? Then we can't just plug in
its right-hand side, can we?
But it's ok. The type checker *always* creates non-recursive definitions
for overloaded recursive functions. For example:
f x = f (x+x) -- Yes I know its silly
becomes
f a (d::Num a) = let p = +.sel a d
in
letrec fl (y::a) = fl (p y y)
in
fl
We still have recusion for non-overloaded functions which we
speciailise, but the recursive call should get specialised to the
same recursive version.
Polymorphism 1
~~~~~~~~~~~~~~
All this is crystal clear when the function is applied to *constant
types*; that is, types which have no type variables inside. But what if
it is applied to non-constant types? Suppose we find a call of f at type
t1/t2. There are two possibilities:
(a) The free type variables of t1, t2 are in scope at the definition point
of f. In this case there's no problem, we proceed just as before. A common
example is as follows. Here's the Haskell:
g y = let f x = x+x
in f y + f y
After typechecking we have
g a (d::Num a) (y::a) = let f b (d'::Num b) (x::b) = +.sel b d' x x
in +.sel a d (f a d y) (f a d y)
Notice that the call to f is at type type "a"; a non-constant type.
Both calls to f are at the same type, so we can specialise to give:
g a (d::Num a) (y::a) = let f@a (x::a) = +.sel a d x x
in +.sel a d (f@a y) (f@a y)
(b) The other case is when the type variables in the instance types
are *not* in scope at the definition point of f. The example we are
working with above is a good case. There are two instances of (+.sel a d),
but "a" is not in scope at the definition of +.sel. Can we do anything?
Yes, we can "common them up", a sort of limited common sub-expression deal.
This would give:
g a (d::Num a) (y::a) = let +.sel@a = +.sel a d
f@a (x::a) = +.sel@a x x
in +.sel@a (f@a y) (f@a y)
This can save work, and can't be spotted by the type checker, because
the two instances of +.sel weren't originally at the same type.
Further notes on (b)
* There are quite a few variations here. For example, the defn of
+.sel could be floated ouside the \y, to attempt to gain laziness.
It certainly mustn't be floated outside the \d because the d has to
be in scope too.
* We don't want to inline f_rhs in this case, because
that will duplicate code. Just commoning up the call is the point.
* Nothing gets added to +.sel's IdInfo.
* Don't bother unless the equivalence class has more than one item!
Not clear whether this is all worth it. It is of course OK to
simply discard call-instances when passing a big lambda.
Polymorphism 2 -- Overloading
~~~~~~~~~~~~~~
Consider a function whose most general type is
f :: forall a b. Ord a => [a] -> b -> b
There is really no point in making a version of g at Int/Int and another
at Int/Bool, because it's only instancing the type variable "a" which
buys us any efficiency. Since g is completely polymorphic in b there
ain't much point in making separate versions of g for the different
b types.
That suggests that we should identify which of g's type variables
are constrained (like "a") and which are unconstrained (like "b").
Then when taking equivalence classes in STEP 2, we ignore the type args
corresponding to unconstrained type variable. In STEP 3 we make
polymorphic versions. Thus:
f@t1/ = /\b -> <f_rhs> t1 b d1 d2
We do this.
Dictionary floating
~~~~~~~~~~~~~~~~~~~
Consider this
f a (d::Num a) = let g = ...
in
...(let d1::Ord a = Num.Ord.sel a d in g a d1)...
Here, g is only called at one type, but the dictionary isn't in scope at the
definition point for g. Usually the type checker would build a
definition for d1 which enclosed g, but the transformation system
might have moved d1's defn inward. Solution: float dictionary bindings
outwards along with call instances.
Consider
f x = let g p q = p==q
h r s = (r+s, g r s)
in
h x x
Before specialisation, leaving out type abstractions we have
f df x = let g :: Eq a => a -> a -> Bool
g dg p q = == dg p q
h :: Num a => a -> a -> (a, Bool)
h dh r s = let deq = eqFromNum dh
in (+ dh r s, g deq r s)
in
h df x x
After specialising h we get a specialised version of h, like this:
h' r s = let deq = eqFromNum df
in (+ df r s, g deq r s)
But we can't naively make an instance for g from this, because deq is not in scope
at the defn of g. Instead, we have to float out the (new) defn of deq
to widen its scope. Notice that this floating can't be done in advance -- it only
shows up when specialisation is done.
User SPECIALIZE pragmas
~~~~~~~~~~~~~~~~~~~~~~~
Specialisation pragmas can be digested by the type checker, and implemented
by adding extra definitions along with that of f, in the same way as before
f@t1/t2 = <f_rhs> t1 t2 d1 d2
Indeed the pragmas *have* to be dealt with by the type checker, because
only it knows how to build the dictionaries d1 and d2! For example
g :: Ord a => [a] -> [a]
{-# SPECIALIZE f :: [Tree Int] -> [Tree Int] #-}
Here, the specialised version of g is an application of g's rhs to the
Ord dictionary for (Tree Int), which only the type checker can conjure
up. There might not even *be* one, if (Tree Int) is not an instance of
Ord! (All the other specialision has suitable dictionaries to hand
from actual calls.)
Problem. The type checker doesn't have to hand a convenient <f_rhs>, because
it is buried in a complex (as-yet-un-desugared) binding group.
Maybe we should say
f@t1/t2 = f* t1 t2 d1 d2
where f* is the Id f with an IdInfo which says "inline me regardless!".
Indeed all the specialisation could be done in this way.
That in turn means that the simplifier has to be prepared to inline absolutely
any in-scope let-bound thing.
Again, the pragma should permit polymorphism in unconstrained variables:
h :: Ord a => [a] -> b -> b
{-# SPECIALIZE h :: [Int] -> b -> b #-}
We *insist* that all overloaded type variables are specialised to ground types,
(and hence there can be no context inside a SPECIALIZE pragma).
We *permit* unconstrained type variables to be specialised to
- a ground type
- or left as a polymorphic type variable
but nothing in between. So
{-# SPECIALIZE h :: [Int] -> [c] -> [c] #-}
is *illegal*. (It can be handled, but it adds complication, and gains the
programmer nothing.)
SPECIALISING INSTANCE DECLARATIONS
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
instance Foo a => Foo [a] where
...
{-# SPECIALIZE instance Foo [Int] #-}
The original instance decl creates a dictionary-function
definition:
dfun.Foo.List :: forall a. Foo a -> Foo [a]
The SPECIALIZE pragma just makes a specialised copy, just as for
ordinary function definitions:
dfun.Foo.List@Int :: Foo [Int]
dfun.Foo.List@Int = dfun.Foo.List Int dFooInt
The information about what instance of the dfun exist gets added to
the dfun's IdInfo in the same way as a user-defined function too.
Automatic instance decl specialisation?
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Can instance decls be specialised automatically? It's tricky.
We could collect call-instance information for each dfun, but
then when we specialised their bodies we'd get new call-instances
for ordinary functions; and when we specialised their bodies, we might get
new call-instances of the dfuns, and so on. This all arises because of
the unrestricted mutual recursion between instance decls and value decls.
Still, there's no actual problem; it just means that we may not do all
the specialisation we could theoretically do.
Furthermore, instance decls are usually exported and used non-locally,
so we'll want to compile enough to get those specialisations done.
Lastly, there's no such thing as a local instance decl, so we can
survive solely by spitting out *usage* information, and then reading that
back in as a pragma when next compiling the file. So for now,
we only specialise instance decls in response to pragmas.
SPITTING OUT USAGE INFORMATION
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
To spit out usage information we need to traverse the code collecting
call-instance information for all imported (non-prelude?) functions
and data types. Then we equivalence-class it and spit it out.
This is done at the top-level when all the call instances which escape
must be for imported functions and data types.
*** Not currently done ***
Partial specialisation by pragmas
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
What about partial specialisation:
k :: (Ord a, Eq b) => [a] -> b -> b -> [a]
{-# SPECIALIZE k :: Eq b => [Int] -> b -> b -> [a] #-}
or even
{-# SPECIALIZE k :: Eq b => [Int] -> [b] -> [b] -> [a] #-}
Seems quite reasonable. Similar things could be done with instance decls:
instance (Foo a, Foo b) => Foo (a,b) where
...
{-# SPECIALIZE instance Foo a => Foo (a,Int) #-}
{-# SPECIALIZE instance Foo b => Foo (Int,b) #-}
Ho hum. Things are complex enough without this. I pass.
Requirements for the simplifier
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The simplifier has to be able to take advantage of the specialisation.
* When the simplifier finds an application of a polymorphic f, it looks in
f's IdInfo in case there is a suitable instance to call instead. This converts
f t1 t2 d1 d2 ===> f_t1_t2
Note that the dictionaries get eaten up too!
* Dictionary selection operations on constant dictionaries must be
short-circuited:
+.sel Int d ===> +Int
The obvious way to do this is in the same way as other specialised
calls: +.sel has inside it some IdInfo which tells that if it's applied
to the type Int then it should eat a dictionary and transform to +Int.
In short, dictionary selectors need IdInfo inside them for constant
methods.
* Exactly the same applies if a superclass dictionary is being
extracted:
Eq.sel Int d ===> dEqInt
* Something similar applies to dictionary construction too. Suppose
dfun.Eq.List is the function taking a dictionary for (Eq a) to
one for (Eq [a]). Then we want
dfun.Eq.List Int d ===> dEq.List_Int
Where does the Eq [Int] dictionary come from? It is built in
response to a SPECIALIZE pragma on the Eq [a] instance decl.
In short, dfun Ids need IdInfo with a specialisation for each
constant instance of their instance declaration.
All this uses a single mechanism: the SpecEnv inside an Id
What does the specialisation IdInfo look like?
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The SpecEnv of an Id maps a list of types (the template) to an expression
[Type] |-> Expr
For example, if f has this RuleInfo:
[Int, a] -> \d:Ord Int. f' a
it means that we can replace the call
f Int t ===> (\d. f' t)
This chucks one dictionary away and proceeds with the
specialised version of f, namely f'.
What can't be done this way?
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There is no way, post-typechecker, to get a dictionary for (say)
Eq a from a dictionary for Eq [a]. So if we find
==.sel [t] d
we can't transform to
eqList (==.sel t d')
where
eqList :: (a->a->Bool) -> [a] -> [a] -> Bool
Of course, we currently have no way to automatically derive
eqList, nor to connect it to the Eq [a] instance decl, but you
can imagine that it might somehow be possible. Taking advantage
of this is permanently ruled out.
Still, this is no great hardship, because we intend to eliminate
overloading altogether anyway!
A note about non-tyvar dictionaries
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Some Ids have types like
forall a,b,c. Eq a -> Ord [a] -> tau
This seems curious at first, because we usually only have dictionary
args whose types are of the form (C a) where a is a type variable.
But this doesn't hold for the functions arising from instance decls,
which sometimes get arguments with types of form (C (T a)) for some
type constructor T.
Should we specialise wrt this compound-type dictionary? We used to say
"no", saying:
"This is a heuristic judgement, as indeed is the fact that we
specialise wrt only dictionaries. We choose *not* to specialise
wrt compound dictionaries because at the moment the only place
they show up is in instance decls, where they are simply plugged
into a returned dictionary. So nothing is gained by specialising
wrt them."
But it is simpler and more uniform to specialise wrt these dicts too;
and in future GHC is likely to support full fledged type signatures
like
f :: Eq [(a,b)] => ...
************************************************************************
* *
\subsubsection{The new specialiser}
* *
************************************************************************
Our basic game plan is this. For let(rec) bound function
f :: (C a, D c) => (a,b,c,d) -> Bool
* Find any specialised calls of f, (f ts ds), where
ts are the type arguments t1 .. t4, and
ds are the dictionary arguments d1 .. d2.
* Add a new definition for f1 (say):
f1 = /\ b d -> (..body of f..) t1 b t3 d d1 d2
Note that we abstract over the unconstrained type arguments.
* Add the mapping
[t1,b,t3,d] |-> \d1 d2 -> f1 b d
to the specialisations of f. This will be used by the
simplifier to replace calls
(f t1 t2 t3 t4) da db
by
(\d1 d1 -> f1 t2 t4) da db
All the stuff about how many dictionaries to discard, and what types
to apply the specialised function to, are handled by the fact that the
SpecEnv contains a template for the result of the specialisation.
We don't build *partial* specialisations for f. For example:
f :: Eq a => a -> a -> Bool
{-# SPECIALISE f :: (Eq b, Eq c) => (b,c) -> (b,c) -> Bool #-}
Here, little is gained by making a specialised copy of f.
There's a distinct danger that the specialised version would
first build a dictionary for (Eq b, Eq c), and then select the (==)
method from it! Even if it didn't, not a great deal is saved.
We do, however, generate polymorphic, but not overloaded, specialisations:
f :: Eq a => [a] -> b -> b -> b
... SPECIALISE f :: [Int] -> b -> b -> b ...
Hence, the invariant is this:
*** no specialised version is overloaded ***
************************************************************************
* *
\subsubsection{The exported function}
* *
************************************************************************
-}
-- | Specialise calls to type-class overloaded functions occuring in a program.
specProgram :: ModGuts -> CoreM ModGuts
specProgram guts@(ModGuts { mg_module = this_mod
, mg_rules = local_rules
, mg_binds = binds })
= do { dflags <- getDynFlags
-- Specialise the bindings of this module
; (binds', uds) <- runSpecM dflags this_mod (go binds)
-- Specialise imported functions
; hpt_rules <- getRuleBase
; let rule_base = extendRuleBaseList hpt_rules local_rules
; (new_rules, spec_binds) <- specImports dflags this_mod top_env emptyVarSet
[] rule_base (ud_calls uds)
-- Don't forget to wrap the specialized bindings with bindings
-- for the needed dictionaries.
-- See Note [Wrap bindings returned by specImports]
; let spec_binds' = wrapDictBinds (ud_binds uds) spec_binds
; let final_binds
| null spec_binds' = binds'
| otherwise = Rec (flattenBinds spec_binds') : binds'
-- Note [Glom the bindings if imported functions are specialised]
; return (guts { mg_binds = final_binds
, mg_rules = new_rules ++ local_rules }) }
where
-- We need to start with a Subst that knows all the things
-- that are in scope, so that the substitution engine doesn't
-- accidentally re-use a unique that's already in use
-- Easiest thing is to do it all at once, as if all the top-level
-- decls were mutually recursive
top_env = SE { se_subst = CoreSubst.mkEmptySubst $ mkInScopeSet $ mkVarSet $
bindersOfBinds binds
, se_interesting = emptyVarSet }
go [] = return ([], emptyUDs)
go (bind:binds) = do (binds', uds) <- go binds
(bind', uds') <- specBind top_env bind uds
return (bind' ++ binds', uds')
{-
Note [Wrap bindings returned by specImports]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
'specImports' returns a set of specialized bindings. However, these are lacking
necessary floated dictionary bindings, which are returned by
UsageDetails(ud_binds). These dictionaries need to be brought into scope with
'wrapDictBinds' before the bindings returned by 'specImports' can be used. See,
for instance, the 'specImports' call in 'specProgram'.
Note [Disabling cross-module specialisation]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Since GHC 7.10 we have performed specialisation of INLINABLE bindings living
in modules outside of the current module. This can sometimes uncover user code
which explodes in size when aggressively optimized. The
-fno-cross-module-specialise option was introduced to allow users to being
bitten by such instances to revert to the pre-7.10 behavior.
See Trac #10491
-}
-- | Specialise a set of calls to imported bindings
specImports :: DynFlags
-> Module
-> SpecEnv -- Passed in so that all top-level Ids are in scope
-> VarSet -- Don't specialise these ones
-- See Note [Avoiding recursive specialisation]
-> [Id] -- Stack of imported functions being specialised
-> RuleBase -- Rules from this module and the home package
-- (but not external packages, which can change)
-> CallDetails -- Calls for imported things, and floating bindings
-> CoreM ( [CoreRule] -- New rules
, [CoreBind] ) -- Specialised bindings
-- See Note [Wrapping bindings returned by specImports]
specImports dflags this_mod top_env done callers rule_base cds
-- See Note [Disabling cross-module specialisation]
| not $ gopt Opt_CrossModuleSpecialise dflags =
return ([], [])
| otherwise =
do { let import_calls = dVarEnvElts cds
; (rules, spec_binds) <- go rule_base import_calls
; return (rules, spec_binds) }
where
go :: RuleBase -> [CallInfoSet] -> CoreM ([CoreRule], [CoreBind])
go _ [] = return ([], [])
go rb (cis@(CIS fn _calls_for_fn) : other_calls)
= do { (rules1, spec_binds1) <- specImport dflags this_mod top_env
done callers rb fn $
ciSetToList cis
; (rules2, spec_binds2) <- go (extendRuleBaseList rb rules1) other_calls
; return (rules1 ++ rules2, spec_binds1 ++ spec_binds2) }
specImport :: DynFlags
-> Module
-> SpecEnv -- Passed in so that all top-level Ids are in scope
-> VarSet -- Don't specialise these
-- See Note [Avoiding recursive specialisation]
-> [Id] -- Stack of imported functions being specialised
-> RuleBase -- Rules from this module
-> Id -> [CallInfo] -- Imported function and calls for it
-> CoreM ( [CoreRule] -- New rules
, [CoreBind] ) -- Specialised bindings
specImport dflags this_mod top_env done callers rb fn calls_for_fn
| fn `elemVarSet` done
= return ([], []) -- No warning. This actually happens all the time
-- when specialising a recursive function, because
-- the RHS of the specialised function contains a recursive
-- call to the original function
| null calls_for_fn -- We filtered out all the calls in deleteCallsMentioning
= return ([], [])
| wantSpecImport dflags unfolding
, Just rhs <- maybeUnfoldingTemplate unfolding
= do { -- Get rules from the external package state
-- We keep doing this in case we "page-fault in"
-- more rules as we go along
; hsc_env <- getHscEnv
; eps <- liftIO $ hscEPS hsc_env
; vis_orphs <- getVisibleOrphanMods
; let full_rb = unionRuleBase rb (eps_rule_base eps)
rules_for_fn = getRules (RuleEnv full_rb vis_orphs) fn
; (rules1, spec_pairs, uds) <- -- pprTrace "specImport1" (vcat [ppr fn, ppr calls_for_fn, ppr rhs]) $
runSpecM dflags this_mod $
specCalls (Just this_mod) top_env rules_for_fn calls_for_fn fn rhs
; let spec_binds1 = [NonRec b r | (b,r) <- spec_pairs]
-- After the rules kick in we may get recursion, but
-- we rely on a global GlomBinds to sort that out later
-- See Note [Glom the bindings if imported functions are specialised]
-- Now specialise any cascaded calls
; (rules2, spec_binds2) <- -- pprTrace "specImport 2" (ppr fn $$ ppr rules1 $$ ppr spec_binds1) $
specImports dflags this_mod top_env
(extendVarSet done fn)
(fn:callers)
(extendRuleBaseList rb rules1)
(ud_calls uds)
-- Don't forget to wrap the specialized bindings with bindings
-- for the needed dictionaries
-- See Note [Wrap bindings returned by specImports]
; let final_binds = wrapDictBinds (ud_binds uds)
(spec_binds2 ++ spec_binds1)
; return (rules2 ++ rules1, final_binds) }
| warnMissingSpecs dflags callers
= do { warnMsg (vcat [ hang (text "Could not specialise imported function" <+> quotes (ppr fn))
2 (vcat [ text "when specialising" <+> quotes (ppr caller)
| caller <- callers])
, ifPprDebug (text "calls:" <+> vcat (map (pprCallInfo fn) calls_for_fn))
, text "Probable fix: add INLINABLE pragma on" <+> quotes (ppr fn) ])
; return ([], []) }
| otherwise
= return ([], [])
where
unfolding = realIdUnfolding fn -- We want to see the unfolding even for loop breakers
warnMissingSpecs :: DynFlags -> [Id] -> Bool
-- See Note [Warning about missed specialisations]
warnMissingSpecs dflags callers
| wopt Opt_WarnAllMissedSpecs dflags = True
| not (wopt Opt_WarnMissedSpecs dflags) = False
| null callers = False
| otherwise = all has_inline_prag callers
where
has_inline_prag id = isAnyInlinePragma (idInlinePragma id)
wantSpecImport :: DynFlags -> Unfolding -> Bool
-- See Note [Specialise imported INLINABLE things]
wantSpecImport dflags unf
= case unf of
NoUnfolding -> False
BootUnfolding -> False
OtherCon {} -> False
DFunUnfolding {} -> True
CoreUnfolding { uf_src = src, uf_guidance = _guidance }
| gopt Opt_SpecialiseAggressively dflags -> True
| isStableSource src -> True
-- Specialise even INLINE things; it hasn't inlined yet,
-- so perhaps it never will. Moreover it may have calls
-- inside it that we want to specialise
| otherwise -> False -- Stable, not INLINE, hence INLINABLE
{- Note [Warning about missed specialisations]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose
* In module Lib, you carefully mark a function 'foo' INLINABLE
* Import Lib(foo) into another module M
* Call 'foo' at some specialised type in M
Then you jolly well expect it to be specialised in M. But what if
'foo' calls another function 'Lib.bar'. Then you'd like 'bar' to be
specialised too. But if 'bar' is not marked INLINABLE it may well
not be specialised. The warning Opt_WarnMissedSpecs warns about this.
It's more noisy to warning about a missed specialisation opportunity
for /every/ overloaded imported function, but sometimes useful. That
is what Opt_WarnAllMissedSpecs does.
ToDo: warn about missed opportunities for local functions.
Note [Specialise imported INLINABLE things]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
What imported functions do we specialise? The basic set is
* DFuns and things with INLINABLE pragmas.
but with -fspecialise-aggressively we add
* Anything with an unfolding template
Trac #8874 has a good example of why we want to auto-specialise DFuns.
We have the -fspecialise-aggressively flag (usually off), because we
risk lots of orphan modules from over-vigorous specialisation.
However it's not a big deal: anything non-recursive with an
unfolding-template will probably have been inlined already.
Note [Glom the bindings if imported functions are specialised]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have an imported, *recursive*, INLINABLE function
f :: Eq a => a -> a
f = /\a \d x. ...(f a d)...
In the module being compiled we have
g x = f (x::Int)
Now we'll make a specialised function
f_spec :: Int -> Int
f_spec = \x -> ...(f Int dInt)...
{-# RULE f Int _ = f_spec #-}
g = \x. f Int dInt x
Note that f_spec doesn't look recursive
After rewriting with the RULE, we get
f_spec = \x -> ...(f_spec)...
BUT since f_spec was non-recursive before it'll *stay* non-recursive.
The occurrence analyser never turns a NonRec into a Rec. So we must
make sure that f_spec is recursive. Easiest thing is to make all
the specialisations for imported bindings recursive.
Note [Avoiding recursive specialisation]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we specialise 'f' we may find new overloaded calls to 'g', 'h' in
'f's RHS. So we want to specialise g,h. But we don't want to
specialise f any more! It's possible that f's RHS might have a
recursive yet-more-specialised call, so we'd diverge in that case.
And if the call is to the same type, one specialisation is enough.
Avoiding this recursive specialisation loop is the reason for the
'done' VarSet passed to specImports and specImport.
************************************************************************
* *
\subsubsection{@specExpr@: the main function}
* *
************************************************************************
-}
data SpecEnv
= SE { se_subst :: CoreSubst.Subst
-- We carry a substitution down:
-- a) we must clone any binding that might float outwards,
-- to avoid name clashes
-- b) we carry a type substitution to use when analysing
-- the RHS of specialised bindings (no type-let!)
, se_interesting :: VarSet
-- Dict Ids that we know something about
-- and hence may be worth specialising against
-- See Note [Interesting dictionary arguments]
}
specVar :: SpecEnv -> Id -> CoreExpr
specVar env v = CoreSubst.lookupIdSubst (text "specVar") (se_subst env) v
specExpr :: SpecEnv -> CoreExpr -> SpecM (CoreExpr, UsageDetails)
---------------- First the easy cases --------------------
specExpr env (Type ty) = return (Type (substTy env ty), emptyUDs)
specExpr env (Coercion co) = return (Coercion (substCo env co), emptyUDs)
specExpr env (Var v) = return (specVar env v, emptyUDs)
specExpr _ (Lit lit) = return (Lit lit, emptyUDs)
specExpr env (Cast e co)
= do { (e', uds) <- specExpr env e
; return ((mkCast e' (substCo env co)), uds) }
specExpr env (Tick tickish body)
= do { (body', uds) <- specExpr env body
; return (Tick (specTickish env tickish) body', uds) }
---------------- Applications might generate a call instance --------------------
specExpr env expr@(App {})
= go expr []
where
go (App fun arg) args = do (arg', uds_arg) <- specExpr env arg
(fun', uds_app) <- go fun (arg':args)
return (App fun' arg', uds_arg `plusUDs` uds_app)
go (Var f) args = case specVar env f of
Var f' -> return (Var f', mkCallUDs env f' args)
e' -> return (e', emptyUDs) -- I don't expect this!
go other _ = specExpr env other
---------------- Lambda/case require dumping of usage details --------------------
specExpr env e@(Lam _ _) = do
(body', uds) <- specExpr env' body
let (free_uds, dumped_dbs) = dumpUDs bndrs' uds
return (mkLams bndrs' (wrapDictBindsE dumped_dbs body'), free_uds)
where
(bndrs, body) = collectBinders e
(env', bndrs') = substBndrs env bndrs
-- More efficient to collect a group of binders together all at once
-- and we don't want to split a lambda group with dumped bindings
specExpr env (Case scrut case_bndr ty alts)
= do { (scrut', scrut_uds) <- specExpr env scrut
; (scrut'', case_bndr', alts', alts_uds)
<- specCase env scrut' case_bndr alts
; return (Case scrut'' case_bndr' (substTy env ty) alts'
, scrut_uds `plusUDs` alts_uds) }
---------------- Finally, let is the interesting case --------------------
specExpr env (Let bind body)
= do { -- Clone binders
(rhs_env, body_env, bind') <- cloneBindSM env bind
-- Deal with the body
; (body', body_uds) <- specExpr body_env body
-- Deal with the bindings
; (binds', uds) <- specBind rhs_env bind' body_uds
-- All done
; return (foldr Let body' binds', uds) }
specTickish :: SpecEnv -> Tickish Id -> Tickish Id
specTickish env (Breakpoint ix ids)
= Breakpoint ix [ id' | id <- ids, Var id' <- [specVar env id]]
-- drop vars from the list if they have a non-variable substitution.
-- should never happen, but it's harmless to drop them anyway.
specTickish _ other_tickish = other_tickish
specCase :: SpecEnv
-> CoreExpr -- Scrutinee, already done
-> Id -> [CoreAlt]
-> SpecM ( CoreExpr -- New scrutinee
, Id
, [CoreAlt]
, UsageDetails)
specCase env scrut' case_bndr [(con, args, rhs)]
| isDictId case_bndr -- See Note [Floating dictionaries out of cases]
, interestingDict env scrut'
, not (isDeadBinder case_bndr && null sc_args')
= do { (case_bndr_flt : sc_args_flt) <- mapM clone_me (case_bndr' : sc_args')
; let sc_rhss = [ Case (Var case_bndr_flt) case_bndr' (idType sc_arg')
[(con, args', Var sc_arg')]
| sc_arg' <- sc_args' ]
-- Extend the substitution for RHS to map the *original* binders
-- to their floated verions.
mb_sc_flts :: [Maybe DictId]
mb_sc_flts = map (lookupVarEnv clone_env) args'
clone_env = zipVarEnv sc_args' sc_args_flt
subst_prs = (case_bndr, Var case_bndr_flt)
: [ (arg, Var sc_flt)
| (arg, Just sc_flt) <- args `zip` mb_sc_flts ]
env_rhs' = env_rhs { se_subst = CoreSubst.extendIdSubstList (se_subst env_rhs) subst_prs
, se_interesting = se_interesting env_rhs `extendVarSetList`
(case_bndr_flt : sc_args_flt) }
; (rhs', rhs_uds) <- specExpr env_rhs' rhs
; let scrut_bind = mkDB (NonRec case_bndr_flt scrut')
case_bndr_set = unitVarSet case_bndr_flt
sc_binds = [(NonRec sc_arg_flt sc_rhs, case_bndr_set)
| (sc_arg_flt, sc_rhs) <- sc_args_flt `zip` sc_rhss ]
flt_binds = scrut_bind : sc_binds
(free_uds, dumped_dbs) = dumpUDs (case_bndr':args') rhs_uds
all_uds = flt_binds `addDictBinds` free_uds
alt' = (con, args', wrapDictBindsE dumped_dbs rhs')
; return (Var case_bndr_flt, case_bndr', [alt'], all_uds) }
where
(env_rhs, (case_bndr':args')) = substBndrs env (case_bndr:args)
sc_args' = filter is_flt_sc_arg args'
clone_me bndr = do { uniq <- getUniqueM
; return (mkUserLocalOrCoVar occ uniq ty loc) }
where
name = idName bndr
ty = idType bndr
occ = nameOccName name
loc = getSrcSpan name
arg_set = mkVarSet args'
is_flt_sc_arg var = isId var
&& not (isDeadBinder var)
&& isDictTy var_ty
&& not (tyCoVarsOfType var_ty `intersectsVarSet` arg_set)
where
var_ty = idType var
specCase env scrut case_bndr alts
= do { (alts', uds_alts) <- mapAndCombineSM spec_alt alts
; return (scrut, case_bndr', alts', uds_alts) }
where
(env_alt, case_bndr') = substBndr env case_bndr
spec_alt (con, args, rhs) = do
(rhs', uds) <- specExpr env_rhs rhs
let (free_uds, dumped_dbs) = dumpUDs (case_bndr' : args') uds
return ((con, args', wrapDictBindsE dumped_dbs rhs'), free_uds)
where
(env_rhs, args') = substBndrs env_alt args
{-
Note [Floating dictionaries out of cases]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
g = \d. case d of { MkD sc ... -> ...(f sc)... }
Naively we can't float d2's binding out of the case expression,
because 'sc' is bound by the case, and that in turn means we can't
specialise f, which seems a pity.
So we invert the case, by floating out a binding
for 'sc_flt' thus:
sc_flt = case d of { MkD sc ... -> sc }
Now we can float the call instance for 'f'. Indeed this is just
what'll happen if 'sc' was originally bound with a let binding,
but case is more efficient, and necessary with equalities. So it's
good to work with both.
You might think that this won't make any difference, because the
call instance will only get nuked by the \d. BUT if 'g' itself is
specialised, then transitively we should be able to specialise f.
In general, given
case e of cb { MkD sc ... -> ...(f sc)... }
we transform to
let cb_flt = e
sc_flt = case cb_flt of { MkD sc ... -> sc }
in
case cb_flt of bg { MkD sc ... -> ....(f sc_flt)... }
The "_flt" things are the floated binds; we use the current substitution
to substitute sc -> sc_flt in the RHS
************************************************************************
* *
Dealing with a binding
* *
************************************************************************
-}
specBind :: SpecEnv -- Use this for RHSs
-> CoreBind -- Binders are already cloned by cloneBindSM,
-- but RHSs are un-processed
-> UsageDetails -- Info on how the scope of the binding
-> SpecM ([CoreBind], -- New bindings
UsageDetails) -- And info to pass upstream
-- Returned UsageDetails:
-- No calls for binders of this bind
specBind rhs_env (NonRec fn rhs) body_uds
= do { (rhs', rhs_uds) <- specExpr rhs_env rhs
; (fn', spec_defns, body_uds1) <- specDefn rhs_env body_uds fn rhs
; let pairs = spec_defns ++ [(fn', rhs')]
-- fn' mentions the spec_defns in its rules,
-- so put the latter first
combined_uds = body_uds1 `plusUDs` rhs_uds
-- This way round a call in rhs_uds of a function f
-- at type T will override a call of f at T in body_uds1; and
-- that is good because it'll tend to keep "earlier" calls
-- See Note [Specialisation of dictionary functions]
(free_uds, dump_dbs, float_all) = dumpBindUDs [fn] combined_uds
-- See Note [From non-recursive to recursive]
final_binds :: [DictBind]
final_binds
| isEmptyBag dump_dbs = [mkDB $ NonRec b r | (b,r) <- pairs]
| otherwise = [flattenDictBinds dump_dbs pairs]
; if float_all then
-- Rather than discard the calls mentioning the bound variables
-- we float this binding along with the others
return ([], free_uds `snocDictBinds` final_binds)
else
-- No call in final_uds mentions bound variables,
-- so we can just leave the binding here
return (map fst final_binds, free_uds) }
specBind rhs_env (Rec pairs) body_uds
-- Note [Specialising a recursive group]
= do { let (bndrs,rhss) = unzip pairs
; (rhss', rhs_uds) <- mapAndCombineSM (specExpr rhs_env) rhss
; let scope_uds = body_uds `plusUDs` rhs_uds
-- Includes binds and calls arising from rhss
; (bndrs1, spec_defns1, uds1) <- specDefns rhs_env scope_uds pairs
; (bndrs3, spec_defns3, uds3)
<- if null spec_defns1 -- Common case: no specialisation
then return (bndrs1, [], uds1)
else do { -- Specialisation occurred; do it again
(bndrs2, spec_defns2, uds2)
<- specDefns rhs_env uds1 (bndrs1 `zip` rhss)
; return (bndrs2, spec_defns2 ++ spec_defns1, uds2) }
; let (final_uds, dumped_dbs, float_all) = dumpBindUDs bndrs uds3
bind = flattenDictBinds dumped_dbs
(spec_defns3 ++ zip bndrs3 rhss')
; if float_all then
return ([], final_uds `snocDictBind` bind)
else
return ([fst bind], final_uds) }
---------------------------
specDefns :: SpecEnv
-> UsageDetails -- Info on how it is used in its scope
-> [(OutId,InExpr)] -- The things being bound and their un-processed RHS
-> SpecM ([OutId], -- Original Ids with RULES added
[(OutId,OutExpr)], -- Extra, specialised bindings
UsageDetails) -- Stuff to fling upwards from the specialised versions
-- Specialise a list of bindings (the contents of a Rec), but flowing usages
-- upwards binding by binding. Example: { f = ...g ...; g = ...f .... }
-- Then if the input CallDetails has a specialised call for 'g', whose specialisation
-- in turn generates a specialised call for 'f', we catch that in this one sweep.
-- But not vice versa (it's a fixpoint problem).
specDefns _env uds []
= return ([], [], uds)
specDefns env uds ((bndr,rhs):pairs)
= do { (bndrs1, spec_defns1, uds1) <- specDefns env uds pairs
; (bndr1, spec_defns2, uds2) <- specDefn env uds1 bndr rhs
; return (bndr1 : bndrs1, spec_defns1 ++ spec_defns2, uds2) }
---------------------------
specDefn :: SpecEnv
-> UsageDetails -- Info on how it is used in its scope
-> OutId -> InExpr -- The thing being bound and its un-processed RHS
-> SpecM (Id, -- Original Id with added RULES
[(Id,CoreExpr)], -- Extra, specialised bindings
UsageDetails) -- Stuff to fling upwards from the specialised versions
specDefn env body_uds fn rhs
= do { let (body_uds_without_me, calls_for_me) = callsForMe fn body_uds
rules_for_me = idCoreRules fn
; (rules, spec_defns, spec_uds) <- specCalls Nothing env rules_for_me
calls_for_me fn rhs
; return ( fn `addIdSpecialisations` rules
, spec_defns
, body_uds_without_me `plusUDs` spec_uds) }
-- It's important that the `plusUDs` is this way
-- round, because body_uds_without_me may bind
-- dictionaries that are used in calls_for_me passed
-- to specDefn. So the dictionary bindings in
-- spec_uds may mention dictionaries bound in
-- body_uds_without_me
---------------------------
specCalls :: Maybe Module -- Just this_mod => specialising imported fn
-- Nothing => specialising local fn
-> SpecEnv
-> [CoreRule] -- Existing RULES for the fn
-> [CallInfo]
-> OutId -> InExpr
-> SpecM ([CoreRule], -- New RULES for the fn
[(Id,CoreExpr)], -- Extra, specialised bindings
UsageDetails) -- New usage details from the specialised RHSs
-- This function checks existing rules, and does not create
-- duplicate ones. So the caller does not need to do this filtering.
-- See 'already_covered'
specCalls mb_mod env rules_for_me calls_for_me fn rhs
-- The first case is the interesting one
| rhs_tyvars `lengthIs` n_tyvars -- Rhs of fn's defn has right number of big lambdas
&& rhs_ids `lengthAtLeast` n_dicts -- and enough dict args
&& notNull calls_for_me -- And there are some calls to specialise
&& not (isNeverActive (idInlineActivation fn))
-- Don't specialise NOINLINE things
-- See Note [Auto-specialisation and RULES]
-- && not (certainlyWillInline (idUnfolding fn)) -- And it's not small
-- See Note [Inline specialisation] for why we do not
-- switch off specialisation for inline functions
= -- pprTrace "specDefn: some" (ppr fn $$ ppr calls_for_me $$ ppr rules_for_me) $
do { stuff <- mapM spec_call calls_for_me
; let (spec_defns, spec_uds, spec_rules) = unzip3 (catMaybes stuff)
; return (spec_rules, spec_defns, plusUDList spec_uds) }
| otherwise -- No calls or RHS doesn't fit our preconceptions
= WARN( not (exprIsTrivial rhs) && notNull calls_for_me,
text "Missed specialisation opportunity for"
<+> ppr fn $$ _trace_doc )
-- Note [Specialisation shape]
-- pprTrace "specDefn: none" (ppr fn <+> ppr calls_for_me) $
return ([], [], emptyUDs)
where
_trace_doc = sep [ ppr rhs_tyvars, ppr n_tyvars
, ppr rhs_ids, ppr n_dicts
, ppr (idInlineActivation fn) ]
fn_type = idType fn
fn_arity = idArity fn
fn_unf = realIdUnfolding fn -- Ignore loop-breaker-ness here
(tyvars, theta, _) = tcSplitSigmaTy fn_type
n_tyvars = length tyvars
n_dicts = length theta
inl_prag = idInlinePragma fn
inl_act = inlinePragmaActivation inl_prag
is_local = isLocalId fn
-- Figure out whether the function has an INLINE pragma
-- See Note [Inline specialisations]
(rhs_tyvars, rhs_ids, rhs_body) = collectTyAndValBinders rhs
rhs_dict_ids = take n_dicts rhs_ids
body = mkLams (drop n_dicts rhs_ids) rhs_body
-- Glue back on the non-dict lambdas
already_covered :: DynFlags -> [CoreExpr] -> Bool
already_covered dflags args -- Note [Specialisations already covered]
= isJust (lookupRule dflags
(CoreSubst.substInScope (se_subst env), realIdUnfolding)
(const True)
fn args rules_for_me)
mk_ty_args :: [Maybe Type] -> [TyVar] -> [CoreExpr]
mk_ty_args [] poly_tvs
= ASSERT( null poly_tvs ) []
mk_ty_args (Nothing : call_ts) (poly_tv : poly_tvs)
= Type (mkTyVarTy poly_tv) : mk_ty_args call_ts poly_tvs
mk_ty_args (Just ty : call_ts) poly_tvs
= Type ty : mk_ty_args call_ts poly_tvs
mk_ty_args (Nothing : _) [] = panic "mk_ty_args"
----------------------------------------------------------
-- Specialise to one particular call pattern
spec_call :: CallInfo -- Call instance
-> SpecM (Maybe ((Id,CoreExpr), -- Specialised definition
UsageDetails, -- Usage details from specialised body
CoreRule)) -- Info for the Id's SpecEnv
spec_call _call_info@(CallKey call_ts, (call_ds, _))
= ASSERT( call_ts `lengthIs` n_tyvars && call_ds `lengthIs` n_dicts )
-- Suppose f's defn is f = /\ a b c -> \ d1 d2 -> rhs
-- Suppose the call is for f [Just t1, Nothing, Just t3] [dx1, dx2]
-- Construct the new binding
-- f1 = SUBST[a->t1,c->t3, d1->d1', d2->d2'] (/\ b -> rhs)
-- PLUS the rule
-- RULE "SPEC f" forall b d1' d2'. f b d1' d2' = f1 b
-- In the rule, d1' and d2' are just wildcards, not used in the RHS
-- PLUS the usage-details
-- { d1' = dx1; d2' = dx2 }
-- where d1', d2' are cloned versions of d1,d2, with the type substitution
-- applied. These auxiliary bindings just avoid duplication of dx1, dx2
--
-- Note that the substitution is applied to the whole thing.
-- This is convenient, but just slightly fragile. Notably:
-- * There had better be no name clashes in a/b/c
do { let
-- poly_tyvars = [b] in the example above
-- spec_tyvars = [a,c]
-- ty_args = [t1,b,t3]
spec_tv_binds = [(tv,ty) | (tv, Just ty) <- rhs_tyvars `zip` call_ts]
env1 = extendTvSubstList env spec_tv_binds
(rhs_env, poly_tyvars) = substBndrs env1
[tv | (tv, Nothing) <- rhs_tyvars `zip` call_ts]
-- Clone rhs_dicts, including instantiating their types
; inst_dict_ids <- mapM (newDictBndr rhs_env) rhs_dict_ids
; let (rhs_env2, dx_binds, spec_dict_args)
= bindAuxiliaryDicts rhs_env rhs_dict_ids call_ds inst_dict_ids
ty_args = mk_ty_args call_ts poly_tyvars
ev_args = map varToCoreExpr inst_dict_ids -- ev_args, ev_bndrs:
ev_bndrs = exprsFreeIdsList ev_args -- See Note [Evidence foralls]
rule_args = ty_args ++ ev_args
rule_bndrs = poly_tyvars ++ ev_bndrs
; dflags <- getDynFlags
; if already_covered dflags rule_args then
return Nothing
else -- pprTrace "spec_call" (vcat [ ppr _call_info, ppr fn, ppr rhs_dict_ids
-- , text "rhs_env2" <+> ppr (se_subst rhs_env2)
-- , ppr dx_binds ]) $
do
{ -- Figure out the type of the specialised function
let body_ty = applyTypeToArgs rhs fn_type rule_args
(lam_args, app_args) -- Add a dummy argument if body_ty is unlifted
| isUnliftedType body_ty -- C.f. WwLib.mkWorkerArgs
= (poly_tyvars ++ [voidArgId], poly_tyvars ++ [voidPrimId])
| otherwise = (poly_tyvars, poly_tyvars)
spec_id_ty = mkLamTypes lam_args body_ty
; spec_f <- newSpecIdSM fn spec_id_ty
; (spec_rhs, rhs_uds) <- specExpr rhs_env2 (mkLams lam_args body)
; this_mod <- getModule
; let
-- The rule to put in the function's specialisation is:
-- forall b, d1',d2'. f t1 b t3 d1' d2' = f1 b
herald = case mb_mod of
Nothing -- Specialising local fn
-> text "SPEC"
Just this_mod -- Specialising imoprted fn
-> text "SPEC/" <> ppr this_mod
rule_name = mkFastString $ showSDocForUser dflags neverQualify $
herald <+> ppr fn <+> hsep (map ppr_call_key_ty call_ts)
-- This name ends up in interface files, so use showSDocForUser,
-- otherwise uniques end up there, making builds
-- less deterministic (See #4012 comment:61 ff)
spec_env_rule = mkRule
this_mod
True {- Auto generated -}
is_local
rule_name
inl_act -- Note [Auto-specialisation and RULES]
(idName fn)
rule_bndrs
rule_args
(mkVarApps (Var spec_f) app_args)
-- Add the { d1' = dx1; d2' = dx2 } usage stuff
final_uds = foldr consDictBind rhs_uds dx_binds
--------------------------------------
-- Add a suitable unfolding if the spec_inl_prag says so
-- See Note [Inline specialisations]
(spec_inl_prag, spec_unf)
| not is_local && isStrongLoopBreaker (idOccInfo fn)
= (neverInlinePragma, noUnfolding)
-- See Note [Specialising imported functions] in OccurAnal
| InlinePragma { inl_inline = Inlinable } <- inl_prag
= (inl_prag { inl_inline = EmptyInlineSpec }, noUnfolding)
| otherwise
= (inl_prag, specUnfolding poly_tyvars spec_app
arity_decrease fn_unf)
arity_decrease = length spec_dict_args
spec_app e = (e `mkApps` ty_args) `mkApps` spec_dict_args
--------------------------------------
-- Adding arity information just propagates it a bit faster
-- See Note [Arity decrease] in Simplify
-- Copy InlinePragma information from the parent Id.
-- So if f has INLINE[1] so does spec_f
spec_f_w_arity = spec_f `setIdArity` max 0 (fn_arity - n_dicts)
`setInlinePragma` spec_inl_prag
`setIdUnfolding` spec_unf
; return (Just ((spec_f_w_arity, spec_rhs), final_uds, spec_env_rule)) } }
{- Note [Evidence foralls]
~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose (Trac #12212) that we are specialising
f :: forall a b. (Num a, F a ~ F b) => blah
with a=b=Int. Then the RULE will be something like
RULE forall (d:Num Int) (g :: F Int ~ F Int).
f Int Int d g = f_spec
But both varToCoreExpr (when constructing the LHS args), and the
simplifier (when simplifying the LHS args), will transform to
RULE forall (d:Num Int) (g :: F Int ~ F Int).
f Int Int d <F Int> = f_spec
by replacing g with Refl. So now 'g' is unbound, which results in a later
crash. So we use Refl right off the bat, and do not forall-quantify 'g':
* varToCoreExpr generates a Refl
* exprsFreeIdsList returns the Ids bound by the args,
which won't include g
You might wonder if this will match as often, but the simplifier replaces
complicated Refl coercions with Refl pretty aggressively.
Note [Orphans and auto-generated rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we specialise an INLINABLE function, or when we have
-fspecialise-aggressively, we auto-generate RULES that are orphans.
We don't want to warn about these, or we'd generate a lot of warnings.
Thus, we only warn about user-specified orphan rules.
Indeed, we don't even treat the module as an orphan module if it has
auto-generated *rule* orphans. Orphan modules are read every time we
compile, so they are pretty obtrusive and slow down every compilation,
even non-optimised ones. (Reason: for type class instances it's a
type correctness issue.) But specialisation rules are strictly for
*optimisation* only so it's fine not to read the interface.
What this means is that a SPEC rules from auto-specialisation in
module M will be used in other modules only if M.hi has been read for
some other reason, which is actually pretty likely.
-}
bindAuxiliaryDicts
:: SpecEnv
-> [DictId] -> [CoreExpr] -- Original dict bndrs, and the witnessing expressions
-> [DictId] -- A cloned dict-id for each dict arg
-> (SpecEnv, -- Substitute for all orig_dicts
[DictBind], -- Auxiliary dict bindings
[CoreExpr]) -- Witnessing expressions (all trivial)
-- Bind any dictionary arguments to fresh names, to preserve sharing
bindAuxiliaryDicts env@(SE { se_subst = subst, se_interesting = interesting })
orig_dict_ids call_ds inst_dict_ids
= (env', dx_binds, spec_dict_args)
where
(dx_binds, spec_dict_args) = go call_ds inst_dict_ids
env' = env { se_subst = subst `CoreSubst.extendSubstList`
(orig_dict_ids `zip` spec_dict_args)
`CoreSubst.extendInScopeList` dx_ids
, se_interesting = interesting `unionVarSet` interesting_dicts }
dx_ids = [dx_id | (NonRec dx_id _, _) <- dx_binds]
interesting_dicts = mkVarSet [ dx_id | (NonRec dx_id dx, _) <- dx_binds
, interestingDict env dx ]
-- See Note [Make the new dictionaries interesting]
go :: [CoreExpr] -> [CoreBndr] -> ([DictBind], [CoreExpr])
go [] _ = ([], [])
go (dx:dxs) (dx_id:dx_ids)
| exprIsTrivial dx = (dx_binds, dx : args)
| otherwise = (mkDB (NonRec dx_id dx) : dx_binds, Var dx_id : args)
where
(dx_binds, args) = go dxs dx_ids
-- In the first case extend the substitution but not bindings;
-- in the latter extend the bindings but not the substitution.
-- For the former, note that we bind the *original* dict in the substitution,
-- overriding any d->dx_id binding put there by substBndrs
go _ _ = pprPanic "bindAuxiliaryDicts" (ppr orig_dict_ids $$ ppr call_ds $$ ppr inst_dict_ids)
{-
Note [Make the new dictionaries interesting]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Important! We're going to substitute dx_id1 for d
and we want it to look "interesting", else we won't gather *any*
consequential calls. E.g.
f d = ...g d....
If we specialise f for a call (f (dfun dNumInt)), we'll get
a consequent call (g d') with an auxiliary definition
d' = df dNumInt
We want that consequent call to look interesting
Note [From non-recursive to recursive]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Even in the non-recursive case, if any dict-binds depend on 'fn' we might
have built a recursive knot
f a d x = <blah>
MkUD { ud_binds = d7 = MkD ..f..
, ud_calls = ...(f T d7)... }
The we generate
Rec { fs x = <blah>[T/a, d7/d]
f a d x = <blah>
RULE f T _ = fs
d7 = ...f... }
Here the recursion is only through the RULE.
Note [Specialisation of dictionary functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Here is a nasty example that bit us badly: see Trac #3591
class Eq a => C a
instance Eq [a] => C [a]
---------------
dfun :: Eq [a] -> C [a]
dfun a d = MkD a d (meth d)
d4 :: Eq [T] = <blah>
d2 :: C [T] = dfun T d4
d1 :: Eq [T] = $p1 d2
d3 :: C [T] = dfun T d1
None of these definitions is recursive. What happened was that we
generated a specialisation:
RULE forall d. dfun T d = dT :: C [T]
dT = (MkD a d (meth d)) [T/a, d1/d]
= MkD T d1 (meth d1)
But now we use the RULE on the RHS of d2, to get
d2 = dT = MkD d1 (meth d1)
d1 = $p1 d2
and now d1 is bottom! The problem is that when specialising 'dfun' we
should first dump "below" the binding all floated dictionary bindings
that mention 'dfun' itself. So d2 and d3 (and hence d1) must be
placed below 'dfun', and thus unavailable to it when specialising
'dfun'. That in turn means that the call (dfun T d1) must be
discarded. On the other hand, the call (dfun T d4) is fine, assuming
d4 doesn't mention dfun.
But look at this:
class C a where { foo,bar :: [a] -> [a] }
instance C Int where
foo x = r_bar x
bar xs = reverse xs
r_bar :: C a => [a] -> [a]
r_bar xs = bar (xs ++ xs)
That translates to:
r_bar a (c::C a) (xs::[a]) = bar a d (xs ++ xs)
Rec { $fCInt :: C Int = MkC foo_help reverse
foo_help (xs::[Int]) = r_bar Int $fCInt xs }
The call (r_bar $fCInt) mentions $fCInt,
which mentions foo_help,
which mentions r_bar
But we DO want to specialise r_bar at Int:
Rec { $fCInt :: C Int = MkC foo_help reverse
foo_help (xs::[Int]) = r_bar Int $fCInt xs
r_bar a (c::C a) (xs::[a]) = bar a d (xs ++ xs)
RULE r_bar Int _ = r_bar_Int
r_bar_Int xs = bar Int $fCInt (xs ++ xs)
}
Note that, because of its RULE, r_bar joins the recursive
group. (In this case it'll unravel a short moment later.)
Conclusion: we catch the nasty case using filter_dfuns in
callsForMe. To be honest I'm not 100% certain that this is 100%
right, but it works. Sigh.
Note [Specialising a recursive group]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
let rec { f x = ...g x'...
; g y = ...f y'.... }
in f 'a'
Here we specialise 'f' at Char; but that is very likely to lead to
a specialisation of 'g' at Char. We must do the latter, else the
whole point of specialisation is lost.
But we do not want to keep iterating to a fixpoint, because in the
presence of polymorphic recursion we might generate an infinite number
of specialisations.
So we use the following heuristic:
* Arrange the rec block in dependency order, so far as possible
(the occurrence analyser already does this)
* Specialise it much like a sequence of lets
* Then go through the block a second time, feeding call-info from
the RHSs back in the bottom, as it were
In effect, the ordering maxmimises the effectiveness of each sweep,
and we do just two sweeps. This should catch almost every case of
monomorphic recursion -- the exception could be a very knotted-up
recursion with multiple cycles tied up together.
This plan is implemented in the Rec case of specBindItself.
Note [Specialisations already covered]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We obviously don't want to generate two specialisations for the same
argument pattern. There are two wrinkles
1. We do the already-covered test in specDefn, not when we generate
the CallInfo in mkCallUDs. We used to test in the latter place, but
we now iterate the specialiser somewhat, and the Id at the call site
might therefore not have all the RULES that we can see in specDefn
2. What about two specialisations where the second is an *instance*
of the first? If the more specific one shows up first, we'll generate
specialisations for both. If the *less* specific one shows up first,
we *don't* currently generate a specialisation for the more specific
one. (See the call to lookupRule in already_covered.) Reasons:
(a) lookupRule doesn't say which matches are exact (bad reason)
(b) if the earlier specialisation is user-provided, it's
far from clear that we should auto-specialise further
Note [Auto-specialisation and RULES]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider:
g :: Num a => a -> a
g = ...
f :: (Int -> Int) -> Int
f w = ...
{-# RULE f g = 0 #-}
Suppose that auto-specialisation makes a specialised version of
g::Int->Int That version won't appear in the LHS of the RULE for f.
So if the specialisation rule fires too early, the rule for f may
never fire.
It might be possible to add new rules, to "complete" the rewrite system.
Thus when adding
RULE forall d. g Int d = g_spec
also add
RULE f g_spec = 0
But that's a bit complicated. For now we ask the programmer's help,
by *copying the INLINE activation pragma* to the auto-specialised
rule. So if g says {-# NOINLINE[2] g #-}, then the auto-spec rule
will also not be active until phase 2. And that's what programmers
should jolly well do anyway, even aside from specialisation, to ensure
that g doesn't inline too early.
This in turn means that the RULE would never fire for a NOINLINE
thing so not much point in generating a specialisation at all.
Note [Specialisation shape]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
We only specialise a function if it has visible top-level lambdas
corresponding to its overloading. E.g. if
f :: forall a. Eq a => ....
then its body must look like
f = /\a. \d. ...
Reason: when specialising the body for a call (f ty dexp), we want to
substitute dexp for d, and pick up specialised calls in the body of f.
This doesn't always work. One example I came across was this:
newtype Gen a = MkGen{ unGen :: Int -> a }
choose :: Eq a => a -> Gen a
choose n = MkGen (\r -> n)
oneof = choose (1::Int)
It's a silly exapmle, but we get
choose = /\a. g `cast` co
where choose doesn't have any dict arguments. Thus far I have not
tried to fix this (wait till there's a real example).
Mind you, then 'choose' will be inlined (since RHS is trivial) so
it doesn't matter. This comes up with single-method classes
class C a where { op :: a -> a }
instance C a => C [a] where ....
==>
$fCList :: C a => C [a]
$fCList = $copList |> (...coercion>...)
....(uses of $fCList at particular types)...
So we suppress the WARN if the rhs is trivial.
Note [Inline specialisations]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Here is what we do with the InlinePragma of the original function
* Activation/RuleMatchInfo: both transferred to the
specialised function
* InlineSpec:
(a) An INLINE pragma is transferred
(b) An INLINABLE pragma is *not* transferred
Why (a): transfer INLINE pragmas? The point of INLINE was precisely to
specialise the function at its call site, and arguably that's not so
important for the specialised copies. BUT *pragma-directed*
specialisation now takes place in the typechecker/desugarer, with
manually specified INLINEs. The specialisation here is automatic.
It'd be very odd if a function marked INLINE was specialised (because
of some local use), and then forever after (including importing
modules) the specialised version wasn't INLINEd. After all, the
programmer said INLINE!
You might wonder why we specialise INLINE functions at all. After
all they should be inlined, right? Two reasons:
* Even INLINE functions are sometimes not inlined, when they aren't
applied to interesting arguments. But perhaps the type arguments
alone are enough to specialise (even though the args are too boring
to trigger inlining), and it's certainly better to call the
specialised version.
* The RHS of an INLINE function might call another overloaded function,
and we'd like to generate a specialised version of that function too.
This actually happens a lot. Consider
replicateM_ :: (Monad m) => Int -> m a -> m ()
{-# INLINABLE replicateM_ #-}
replicateM_ d x ma = ...
The strictness analyser may transform to
replicateM_ :: (Monad m) => Int -> m a -> m ()
{-# INLINE replicateM_ #-}
replicateM_ d x ma = case x of I# x' -> $wreplicateM_ d x' ma
$wreplicateM_ :: (Monad m) => Int# -> m a -> m ()
{-# INLINABLE $wreplicateM_ #-}
$wreplicateM_ = ...
Now an importing module has a specialised call to replicateM_, say
(replicateM_ dMonadIO). We certainly want to specialise $wreplicateM_!
This particular example had a huge effect on the call to replicateM_
in nofib/shootout/n-body.
Why (b): discard INLINABLE pragmas? See Trac #4874 for persuasive examples.
Suppose we have
{-# INLINABLE f #-}
f :: Ord a => [a] -> Int
f xs = letrec f' = ...f'... in f'
Then, when f is specialised and optimised we might get
wgo :: [Int] -> Int#
wgo = ...wgo...
f_spec :: [Int] -> Int
f_spec xs = case wgo xs of { r -> I# r }
and we clearly want to inline f_spec at call sites. But if we still
have the big, un-optimised of f (albeit specialised) captured in an
INLINABLE pragma for f_spec, we won't get that optimisation.
So we simply drop INLINABLE pragmas when specialising. It's not really
a complete solution; ignoring specialisation for now, INLINABLE functions
don't get properly strictness analysed, for example. But it works well
for examples involving specialisation, which is the dominant use of
INLINABLE. See Trac #4874.
************************************************************************
* *
\subsubsection{UsageDetails and suchlike}
* *
************************************************************************
-}
data UsageDetails
= MkUD {
ud_binds :: !(Bag DictBind),
-- Floated dictionary bindings
-- The order is important;
-- in ds1 `union` ds2, bindings in ds2 can depend on those in ds1
-- (Remember, Bags preserve order in GHC.)
ud_calls :: !CallDetails
-- INVARIANT: suppose bs = bindersOf ud_binds
-- Then 'calls' may *mention* 'bs',
-- but there should be no calls *for* bs
}
instance Outputable UsageDetails where
ppr (MkUD { ud_binds = dbs, ud_calls = calls })
= text "MkUD" <+> braces (sep (punctuate comma
[text "binds" <+> equals <+> ppr dbs,
text "calls" <+> equals <+> ppr calls]))
-- | A 'DictBind' is a binding along with a cached set containing its free
-- variables (both type variables and dictionaries)
type DictBind = (CoreBind, VarSet)
type DictExpr = CoreExpr
emptyUDs :: UsageDetails
emptyUDs = MkUD { ud_binds = emptyBag, ud_calls = emptyDVarEnv }
------------------------------------------------------------
type CallDetails = DIdEnv CallInfoSet
-- The order of specialized binds and rules depends on how we linearize
-- CallDetails, so to get determinism we must use a deterministic set here.
-- See Note [Deterministic UniqFM] in UniqDFM
newtype CallKey = CallKey [Maybe Type]
-- Nothing => unconstrained type argument
data CallInfoSet = CIS Id (Bag CallInfo)
-- The list of types and dictionaries is guaranteed to
-- match the type of f
{-
Note [CallInfoSet determinism]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
CallInfoSet holds a Bag of (CallKey, [DictExpr], VarSet) triplets for a given
Id. They represent the types that the function is instantiated at along with
the dictionaries and free variables.
We use this information to generate specialized versions of a given function.
CallInfoSet used to be defined as:
data CallInfoSet = CIS Id (Map CallKey ([DictExpr], VarSet))
Unfortunately this was not deterministic. The Ord instance of CallKey was
defined in terms of nonDetCmpType which is not deterministic.
See Note [nonDetCmpType nondeterminism].
The end result was that if the function had multiple specializations they would
be generated in arbitrary order.
We need a container that:
a) when turned into a list has only one element per each CallKey and the list
has deterministic order
b) supports union
c) supports singleton
d) supports filter
We can't use UniqDFM here because there's no one Unique that we can key on.
The current approach is to implement the set as a Bag with duplicates.
This makes b), c), d) trivial and pushes a) towards the end. The deduplication
is done by using a TrieMap for membership tests on CallKey. This lets us delete
the nondeterministic Ord CallKey instance.
An alternative approach would be to augument the Map the same way that UniqDFM
is augumented, by keeping track of insertion order and using it to order the
resulting lists. It would mean keeping the nondeterministic Ord CallKey
instance making it easy to reintroduce nondeterminism in the future.
-}
ciSetToList :: CallInfoSet -> [CallInfo]
ciSetToList (CIS _ b) = snd $ foldrBag combine (emptyTM, []) b
where
-- This is where we eliminate duplicates, recording the CallKeys we've
-- already seen in the TrieMap. See Note [CallInfoSet determinism].
combine :: CallInfo -> (CallKeySet, [CallInfo]) -> (CallKeySet, [CallInfo])
combine ci@(CallKey key, _) (set, acc)
| Just _ <- lookupTM key set = (set, acc)
| otherwise = (insertTM key () set, ci:acc)
type CallKeySet = ListMap (MaybeMap TypeMap) ()
-- We only use it in ciSetToList to check for membership
ciSetFilter :: (CallInfo -> Bool) -> CallInfoSet -> CallInfoSet
ciSetFilter p (CIS id a) = CIS id (filterBag p a)
type CallInfo = (CallKey, ([DictExpr], VarSet))
-- Range is dict args and the vars of the whole
-- call (including tyvars)
-- [*not* include the main id itself, of course]
instance Outputable CallInfoSet where
ppr (CIS fn map) = hang (text "CIS" <+> ppr fn)
2 (ppr map)
pprCallInfo :: Id -> CallInfo -> SDoc
pprCallInfo fn (CallKey mb_tys, (_dxs, _))
= hang (ppr fn)
2 (fsep (map ppr_call_key_ty mb_tys {- ++ map pprParendExpr _dxs -}))
ppr_call_key_ty :: Maybe Type -> SDoc
ppr_call_key_ty Nothing = char '_'
ppr_call_key_ty (Just ty) = char '@' <+> pprParendType ty
instance Outputable CallKey where
ppr (CallKey ts) = ppr ts
unionCalls :: CallDetails -> CallDetails -> CallDetails
unionCalls c1 c2 = plusDVarEnv_C unionCallInfoSet c1 c2
unionCallInfoSet :: CallInfoSet -> CallInfoSet -> CallInfoSet
unionCallInfoSet (CIS f calls1) (CIS _ calls2) =
CIS f (calls1 `unionBags` calls2)
callDetailsFVs :: CallDetails -> VarSet
callDetailsFVs calls =
nonDetFoldUDFM (unionVarSet . callInfoFVs) emptyVarSet calls
-- It's OK to use nonDetFoldUDFM here because we forget the ordering
-- immediately by converting to a nondeterministic set.
callInfoFVs :: CallInfoSet -> VarSet
callInfoFVs (CIS _ call_info) =
foldrBag (\(_, (_,fv)) vs -> unionVarSet fv vs) emptyVarSet call_info
------------------------------------------------------------
singleCall :: Id -> [Maybe Type] -> [DictExpr] -> UsageDetails
singleCall id tys dicts
= MkUD {ud_binds = emptyBag,
ud_calls = unitDVarEnv id $ CIS id $
unitBag (CallKey tys, (dicts, call_fvs)) }
where
call_fvs = exprsFreeVars dicts `unionVarSet` tys_fvs
tys_fvs = tyCoVarsOfTypes (catMaybes tys)
-- The type args (tys) are guaranteed to be part of the dictionary
-- types, because they are just the constrained types,
-- and the dictionary is therefore sure to be bound
-- inside the binding for any type variables free in the type;
-- hence it's safe to neglect tyvars free in tys when making
-- the free-var set for this call
-- BUT I don't trust this reasoning; play safe and include tys_fvs
--
-- We don't include the 'id' itself.
mkCallUDs, mkCallUDs' :: SpecEnv -> Id -> [CoreExpr] -> UsageDetails
mkCallUDs env f args
= -- pprTrace "mkCallUDs" (vcat [ ppr f, ppr args, ppr res ])
res
where
res = mkCallUDs' env f args
mkCallUDs' env f args
| not (want_calls_for f) -- Imported from elsewhere
|| null theta -- Not overloaded
= emptyUDs
| not (all type_determines_value theta)
|| not (spec_tys `lengthIs` n_tyvars)
|| not ( dicts `lengthIs` n_dicts)
|| not (any (interestingDict env) dicts) -- Note [Interesting dictionary arguments]
-- See also Note [Specialisations already covered]
= -- pprTrace "mkCallUDs: discarding" _trace_doc
emptyUDs -- Not overloaded, or no specialisation wanted
| otherwise
= -- pprTrace "mkCallUDs: keeping" _trace_doc
singleCall f spec_tys dicts
where
_trace_doc = vcat [ppr f, ppr args, ppr n_tyvars, ppr n_dicts
, ppr (map (interestingDict env) dicts)]
(tyvars, theta, _) = tcSplitSigmaTy (idType f)
constrained_tyvars = tyCoVarsOfTypes theta
n_tyvars = length tyvars
n_dicts = length theta
spec_tys = [mk_spec_ty tv ty | (tv, ty) <- tyvars `type_zip` args]
dicts = [dict_expr | (_, dict_expr) <- theta `zip` (drop n_tyvars args)]
-- ignores Coercion arguments
type_zip :: [TyVar] -> [CoreExpr] -> [(TyVar, Type)]
type_zip tvs (Coercion _ : args) = type_zip tvs args
type_zip (tv:tvs) (Type ty : args) = (tv, ty) : type_zip tvs args
type_zip _ _ = []
mk_spec_ty tyvar ty
| tyvar `elemVarSet` constrained_tyvars = Just ty
| otherwise = Nothing
want_calls_for f = isLocalId f || isJust (maybeUnfoldingTemplate (realIdUnfolding f))
-- For imported things, we gather call instances if
-- there is an unfolding that we could in principle specialise
-- We might still decide not to use it (consulting dflags)
-- in specImports
-- Use 'realIdUnfolding' to ignore the loop-breaker flag!
type_determines_value pred -- See Note [Type determines value]
= case classifyPredType pred of
ClassPred cls _ -> not (isIPClass cls) -- Superclasses can't be IPs
EqPred {} -> True
IrredPred {} -> True -- Things like (D []) where D is a
-- Constraint-ranged family; Trac #7785
{-
Note [Type determines value]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Only specialise if all overloading is on non-IP *class* params,
because these are the ones whose *type* determines their *value*. In
parrticular, with implicit params, the type args *don't* say what the
value of the implicit param is! See Trac #7101
However, consider
type family D (v::*->*) :: Constraint
type instance D [] = ()
f :: D v => v Char -> Int
If we see a call (f "foo"), we'll pass a "dictionary"
() |> (g :: () ~ D [])
and it's good to specialise f at this dictionary.
So the question is: can an implicit parameter "hide inside" a
type-family constraint like (D a). Well, no. We don't allow
type instance D Maybe = ?x:Int
Hence the IrredPred case in type_determines_value.
See Trac #7785.
Note [Interesting dictionary arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this
\a.\d:Eq a. let f = ... in ...(f d)...
There really is not much point in specialising f wrt the dictionary d,
because the code for the specialised f is not improved at all, because
d is lambda-bound. We simply get junk specialisations.
What is "interesting"? Just that it has *some* structure. But what about
variables?
* A variable might be imported, in which case its unfolding
will tell us whether it has useful structure
* Local variables are cloned on the way down (to avoid clashes when
we float dictionaries), and cloning drops the unfolding
(cloneIdBndr). Moreover, we make up some new bindings, and it's a
nuisance to give them unfoldings. So we keep track of the
"interesting" dictionaries as a VarSet in SpecEnv.
We have to take care to put any new interesting dictionary
bindings in the set.
We accidentally lost accurate tracking of local variables for a long
time, because cloned variables don't have unfoldings. But makes a
massive difference in a few cases, eg Trac #5113. For nofib as a
whole it's only a small win: 2.2% improvement in allocation for ansi,
1.2% for bspt, but mostly 0.0! Average 0.1% increase in binary size.
-}
interestingDict :: SpecEnv -> CoreExpr -> Bool
-- A dictionary argument is interesting if it has *some* structure
-- NB: "dictionary" arguments include constraints of all sorts,
-- including equality constraints; hence the Coercion case
interestingDict env (Var v) = hasSomeUnfolding (idUnfolding v)
|| isDataConWorkId v
|| v `elemVarSet` se_interesting env
interestingDict _ (Type _) = False
interestingDict _ (Coercion _) = False
interestingDict env (App fn (Type _)) = interestingDict env fn
interestingDict env (App fn (Coercion _)) = interestingDict env fn
interestingDict env (Tick _ a) = interestingDict env a
interestingDict env (Cast e _) = interestingDict env e
interestingDict _ _ = True
plusUDs :: UsageDetails -> UsageDetails -> UsageDetails
plusUDs (MkUD {ud_binds = db1, ud_calls = calls1})
(MkUD {ud_binds = db2, ud_calls = calls2})
= MkUD { ud_binds = db1 `unionBags` db2
, ud_calls = calls1 `unionCalls` calls2 }
plusUDList :: [UsageDetails] -> UsageDetails
plusUDList = foldr plusUDs emptyUDs
-----------------------------
_dictBindBndrs :: Bag DictBind -> [Id]
_dictBindBndrs dbs = foldrBag ((++) . bindersOf . fst) [] dbs
-- | Construct a 'DictBind' from a 'CoreBind'
mkDB :: CoreBind -> DictBind
mkDB bind = (bind, bind_fvs bind)
-- | Identify the free variables of a 'CoreBind'
bind_fvs :: CoreBind -> VarSet
bind_fvs (NonRec bndr rhs) = pair_fvs (bndr,rhs)
bind_fvs (Rec prs) = foldl delVarSet rhs_fvs bndrs
where
bndrs = map fst prs
rhs_fvs = unionVarSets (map pair_fvs prs)
pair_fvs :: (Id, CoreExpr) -> VarSet
pair_fvs (bndr, rhs) = exprFreeVars rhs `unionVarSet` idFreeVars bndr
-- Don't forget variables mentioned in the
-- rules of the bndr. C.f. OccAnal.addRuleUsage
-- Also tyvars mentioned in its type; they may not appear in the RHS
-- type T a = Int
-- x :: T a = 3
-- | Flatten a set of 'DictBind's and some other binding pairs into a single
-- recursive binding, including some additional bindings.
flattenDictBinds :: Bag DictBind -> [(Id,CoreExpr)] -> DictBind
flattenDictBinds dbs pairs
= (Rec bindings, fvs)
where
(bindings, fvs) = foldrBag add
([], emptyVarSet)
(dbs `snocBag` mkDB (Rec pairs))
add (NonRec b r, fvs') (pairs, fvs) =
((b,r) : pairs, fvs `unionVarSet` fvs')
add (Rec prs1, fvs') (pairs, fvs) =
(prs1 ++ pairs, fvs `unionVarSet` fvs')
snocDictBinds :: UsageDetails -> [DictBind] -> UsageDetails
-- Add ud_binds to the tail end of the bindings in uds
snocDictBinds uds dbs
= uds { ud_binds = ud_binds uds `unionBags`
foldr consBag emptyBag dbs }
consDictBind :: DictBind -> UsageDetails -> UsageDetails
consDictBind bind uds = uds { ud_binds = bind `consBag` ud_binds uds }
addDictBinds :: [DictBind] -> UsageDetails -> UsageDetails
addDictBinds binds uds = uds { ud_binds = listToBag binds `unionBags` ud_binds uds }
snocDictBind :: UsageDetails -> DictBind -> UsageDetails
snocDictBind uds bind = uds { ud_binds = ud_binds uds `snocBag` bind }
wrapDictBinds :: Bag DictBind -> [CoreBind] -> [CoreBind]
wrapDictBinds dbs binds
= foldrBag add binds dbs
where
add (bind,_) binds = bind : binds
wrapDictBindsE :: Bag DictBind -> CoreExpr -> CoreExpr
wrapDictBindsE dbs expr
= foldrBag add expr dbs
where
add (bind,_) expr = Let bind expr
----------------------
dumpUDs :: [CoreBndr] -> UsageDetails -> (UsageDetails, Bag DictBind)
-- Used at a lambda or case binder; just dump anything mentioning the binder
dumpUDs bndrs uds@(MkUD { ud_binds = orig_dbs, ud_calls = orig_calls })
| null bndrs = (uds, emptyBag) -- Common in case alternatives
| otherwise = -- pprTrace "dumpUDs" (ppr bndrs $$ ppr free_uds $$ ppr dump_dbs) $
(free_uds, dump_dbs)
where
free_uds = MkUD { ud_binds = free_dbs, ud_calls = free_calls }
bndr_set = mkVarSet bndrs
(free_dbs, dump_dbs, dump_set) = splitDictBinds orig_dbs bndr_set
free_calls = deleteCallsMentioning dump_set $ -- Drop calls mentioning bndr_set on the floor
deleteCallsFor bndrs orig_calls -- Discard calls for bndr_set; there should be
-- no calls for any of the dicts in dump_dbs
dumpBindUDs :: [CoreBndr] -> UsageDetails -> (UsageDetails, Bag DictBind, Bool)
-- Used at a lambda or case binder; just dump anything mentioning the binder
dumpBindUDs bndrs (MkUD { ud_binds = orig_dbs, ud_calls = orig_calls })
= -- pprTrace "dumpBindUDs" (ppr bndrs $$ ppr free_uds $$ ppr dump_dbs) $
(free_uds, dump_dbs, float_all)
where
free_uds = MkUD { ud_binds = free_dbs, ud_calls = free_calls }
bndr_set = mkVarSet bndrs
(free_dbs, dump_dbs, dump_set) = splitDictBinds orig_dbs bndr_set
free_calls = deleteCallsFor bndrs orig_calls
float_all = dump_set `intersectsVarSet` callDetailsFVs free_calls
callsForMe :: Id -> UsageDetails -> (UsageDetails, [CallInfo])
callsForMe fn (MkUD { ud_binds = orig_dbs, ud_calls = orig_calls })
= -- pprTrace ("callsForMe")
-- (vcat [ppr fn,
-- text "Orig dbs =" <+> ppr (_dictBindBndrs orig_dbs),
-- text "Orig calls =" <+> ppr orig_calls,
-- text "Dep set =" <+> ppr dep_set,
-- text "Calls for me =" <+> ppr calls_for_me]) $
(uds_without_me, calls_for_me)
where
uds_without_me = MkUD { ud_binds = orig_dbs
, ud_calls = delDVarEnv orig_calls fn }
calls_for_me = case lookupDVarEnv orig_calls fn of
Nothing -> []
Just cis -> filter_dfuns (ciSetToList cis)
dep_set = foldlBag go (unitVarSet fn) orig_dbs
go dep_set (db,fvs) | fvs `intersectsVarSet` dep_set
= extendVarSetList dep_set (bindersOf db)
| otherwise = dep_set
-- Note [Specialisation of dictionary functions]
filter_dfuns | isDFunId fn = filter ok_call
| otherwise = \cs -> cs
ok_call (_, (_,fvs)) = not (fvs `intersectsVarSet` dep_set)
----------------------
splitDictBinds :: Bag DictBind -> IdSet -> (Bag DictBind, Bag DictBind, IdSet)
-- Returns (free_dbs, dump_dbs, dump_set)
splitDictBinds dbs bndr_set
= foldlBag split_db (emptyBag, emptyBag, bndr_set) dbs
-- Important that it's foldl not foldr;
-- we're accumulating the set of dumped ids in dump_set
where
split_db (free_dbs, dump_dbs, dump_idset) db@(bind, fvs)
| dump_idset `intersectsVarSet` fvs -- Dump it
= (free_dbs, dump_dbs `snocBag` db,
extendVarSetList dump_idset (bindersOf bind))
| otherwise -- Don't dump it
= (free_dbs `snocBag` db, dump_dbs, dump_idset)
----------------------
deleteCallsMentioning :: VarSet -> CallDetails -> CallDetails
-- Remove calls *mentioning* bs
deleteCallsMentioning bs calls
= mapDVarEnv (ciSetFilter keep_call) calls
where
keep_call (_, (_, fvs)) = not (fvs `intersectsVarSet` bs)
deleteCallsFor :: [Id] -> CallDetails -> CallDetails
-- Remove calls *for* bs
deleteCallsFor bs calls = delDVarEnvList calls bs
{-
************************************************************************
* *
\subsubsection{Boring helper functions}
* *
************************************************************************
-}
newtype SpecM a = SpecM (State SpecState a)
data SpecState = SpecState {
spec_uniq_supply :: UniqSupply,
spec_module :: Module,
spec_dflags :: DynFlags
}
instance Functor SpecM where
fmap = liftM
instance Applicative SpecM where
pure x = SpecM $ return x
(<*>) = ap
instance Monad SpecM where
SpecM x >>= f = SpecM $ do y <- x
case f y of
SpecM z ->
z
fail str = SpecM $ fail str
#if __GLASGOW_HASKELL__ > 710
instance MonadFail.MonadFail SpecM where
fail str = SpecM $ fail str
#endif
instance MonadUnique SpecM where
getUniqueSupplyM
= SpecM $ do st <- get
let (us1, us2) = splitUniqSupply $ spec_uniq_supply st
put $ st { spec_uniq_supply = us2 }
return us1
getUniqueM
= SpecM $ do st <- get
let (u,us') = takeUniqFromSupply $ spec_uniq_supply st
put $ st { spec_uniq_supply = us' }
return u
instance HasDynFlags SpecM where
getDynFlags = SpecM $ liftM spec_dflags get
instance HasModule SpecM where
getModule = SpecM $ liftM spec_module get
runSpecM :: DynFlags -> Module -> SpecM a -> CoreM a
runSpecM dflags this_mod (SpecM spec)
= do us <- getUniqueSupplyM
let initialState = SpecState {
spec_uniq_supply = us,
spec_module = this_mod,
spec_dflags = dflags
}
return $ evalState spec initialState
mapAndCombineSM :: (a -> SpecM (b, UsageDetails)) -> [a] -> SpecM ([b], UsageDetails)
mapAndCombineSM _ [] = return ([], emptyUDs)
mapAndCombineSM f (x:xs) = do (y, uds1) <- f x
(ys, uds2) <- mapAndCombineSM f xs
return (y:ys, uds1 `plusUDs` uds2)
extendTvSubstList :: SpecEnv -> [(TyVar,Type)] -> SpecEnv
extendTvSubstList env tv_binds
= env { se_subst = CoreSubst.extendTvSubstList (se_subst env) tv_binds }
substTy :: SpecEnv -> Type -> Type
substTy env ty = CoreSubst.substTy (se_subst env) ty
substCo :: SpecEnv -> Coercion -> Coercion
substCo env co = CoreSubst.substCo (se_subst env) co
substBndr :: SpecEnv -> CoreBndr -> (SpecEnv, CoreBndr)
substBndr env bs = case CoreSubst.substBndr (se_subst env) bs of
(subst', bs') -> (env { se_subst = subst' }, bs')
substBndrs :: SpecEnv -> [CoreBndr] -> (SpecEnv, [CoreBndr])
substBndrs env bs = case CoreSubst.substBndrs (se_subst env) bs of
(subst', bs') -> (env { se_subst = subst' }, bs')
cloneBindSM :: SpecEnv -> CoreBind -> SpecM (SpecEnv, SpecEnv, CoreBind)
-- Clone the binders of the bind; return new bind with the cloned binders
-- Return the substitution to use for RHSs, and the one to use for the body
cloneBindSM env@(SE { se_subst = subst, se_interesting = interesting }) (NonRec bndr rhs)
= do { us <- getUniqueSupplyM
; let (subst', bndr') = CoreSubst.cloneIdBndr subst us bndr
interesting' | interestingDict env rhs
= interesting `extendVarSet` bndr'
| otherwise = interesting
; return (env, env { se_subst = subst', se_interesting = interesting' }
, NonRec bndr' rhs) }
cloneBindSM env@(SE { se_subst = subst, se_interesting = interesting }) (Rec pairs)
= do { us <- getUniqueSupplyM
; let (subst', bndrs') = CoreSubst.cloneRecIdBndrs subst us (map fst pairs)
env' = env { se_subst = subst'
, se_interesting = interesting `extendVarSetList`
[ v | (v,r) <- pairs, interestingDict env r ] }
; return (env', env', Rec (bndrs' `zip` map snd pairs)) }
newDictBndr :: SpecEnv -> CoreBndr -> SpecM CoreBndr
-- Make up completely fresh binders for the dictionaries
-- Their bindings are going to float outwards
newDictBndr env b = do { uniq <- getUniqueM
; let n = idName b
ty' = substTy env (idType b)
; return (mkUserLocalOrCoVar (nameOccName n) uniq ty' (getSrcSpan n)) }
newSpecIdSM :: Id -> Type -> SpecM Id
-- Give the new Id a similar occurrence name to the old one
newSpecIdSM old_id new_ty
= do { uniq <- getUniqueM
; let name = idName old_id
new_occ = mkSpecOcc (nameOccName name)
new_id = mkUserLocalOrCoVar new_occ uniq new_ty (getSrcSpan name)
; return new_id }
{-
Old (but interesting) stuff about unboxed bindings
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
What should we do when a value is specialised to a *strict* unboxed value?
map_*_* f (x:xs) = let h = f x
t = map f xs
in h:t
Could convert let to case:
map_*_Int# f (x:xs) = case f x of h# ->
let t = map f xs
in h#:t
This may be undesirable since it forces evaluation here, but the value
may not be used in all branches of the body. In the general case this
transformation is impossible since the mutual recursion in a letrec
cannot be expressed as a case.
There is also a problem with top-level unboxed values, since our
implementation cannot handle unboxed values at the top level.
Solution: Lift the binding of the unboxed value and extract it when it
is used:
map_*_Int# f (x:xs) = let h = case (f x) of h# -> _Lift h#
t = map f xs
in case h of
_Lift h# -> h#:t
Now give it to the simplifier and the _Lifting will be optimised away.
The benefit is that we have given the specialised "unboxed" values a
very simple lifted semantics and then leave it up to the simplifier to
optimise it --- knowing that the overheads will be removed in nearly
all cases.
In particular, the value will only be evaluated in the branches of the
program which use it, rather than being forced at the point where the
value is bound. For example:
filtermap_*_* p f (x:xs)
= let h = f x
t = ...
in case p x of
True -> h:t
False -> t
==>
filtermap_*_Int# p f (x:xs)
= let h = case (f x) of h# -> _Lift h#
t = ...
in case p x of
True -> case h of _Lift h#
-> h#:t
False -> t
The binding for h can still be inlined in the one branch and the
_Lifting eliminated.
Question: When won't the _Lifting be eliminated?
Answer: When they at the top-level (where it is necessary) or when
inlining would duplicate work (or possibly code depending on
options). However, the _Lifting will still be eliminated if the
strictness analyser deems the lifted binding strict.
-}
| olsner/ghc | compiler/specialise/Specialise.hs | bsd-3-clause | 96,464 | 1 | 22 | 27,877 | 11,162 | 6,058 | 5,104 | -1 | -1 |
-- | This module facilitates the use of vectors by providing various functionalities of vector calculus
module Vector (
-- * types
Vector,
-- * functions
dot,
sub,
add,
app,
size,
i,
j,
k,
cross,
norm,
unity
)where
type Vector = [Double] -- ^ List of Double elements
-- |dot product of two vectors
dot :: Vector -> Vector -> Double
dot [] _ = 0
dot _ [] = 0
dot (x:xs) (y:ys) = x*y + dot xs ys
-- |return difference of two vector
sub :: Vector -> Vector -> Vector
sub [] (x:xs) = map (\x -> -x) (x:xs)
sub (x:xs) [] = (x:xs)
sub (x:xs) (y:ys) = (x-y) : sub xs ys
-- |return addition of two vector
add :: Vector -> Vector -> Vector
add [] (x:xs) = (x:xs)
add (x:xs) [] = (x:xs)
add (x:xs) (y:ys) = (x+y) : add xs ys
-- |apply a function, component wise on two vectors
app :: (Double -> Double -> Double) -> Vector -> Vector -> Vector
app _ [] _ = []
app _ _ [] = []
app f (x:xs) (y:ys) = (f x y) : (app f xs ys)
-- |size of a vector
size :: Vector -> Int
size = foldl (\acc t-> acc+1) 0
-- |return vector i
i :: Vector
i = [1,0,0]
-- |return vector j
j :: Vector
j = [0,1,0]
-- |return vector k
k :: Vector
k = [0,0,1]
-- |cross product of two vectors
cross :: Vector -> Vector -> Vector
cross [x1,x2,x3] [y1,y2,y3] = [x2*y3-x3*y2, x3*y1-x1*y3, x1*y2-x2*y1]
-- |normalized distance of a vector
norm :: Vector-> Double
norm = sqrt. foldl (\acc t -> acc + t*t) 0
-- |find a unit vector along the vector
unity :: Vector -> Vector
unity x = map (\t -> t/normalized) x
where normalized = norm x
| ankeshs/numerikell | tmp/Vector.hs | bsd-3-clause | 1,631 | 0 | 10 | 451 | 734 | 410 | 324 | 45 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[Demand]{@Demand@: A decoupled implementation of a demand domain}
-}
{-# LANGUAGE CPP, FlexibleInstances, TypeSynonymInstances, RecordWildCards #-}
module Eta.BasicTypes.Demand (
StrDmd, UseDmd(..), Count,
Demand, CleanDemand, getStrDmd, getUseDmd,
mkProdDmd, mkOnceUsedDmd, mkManyUsedDmd, mkHeadStrict, oneifyDmd,
toCleanDmd,
absDmd, topDmd, botDmd, seqDmd,
lubDmd, bothDmd,
lazyApply1Dmd, lazyApply2Dmd, strictApply1Dmd,
catchArgDmd,
isTopDmd, isAbsDmd, isSeqDmd,
peelUseCall, cleanUseDmd_maybe, strictenDmd, bothCleanDmd,
addCaseBndrDmd,
DmdType(..), dmdTypeDepth, lubDmdType, bothDmdType,
nopDmdType, botDmdType, mkDmdType,
addDemand, removeDmdTyArgs,
BothDmdArg, mkBothDmdArg, toBothDmdArg,
DmdEnv, emptyDmdEnv,
peelFV, findIdDemand,
DmdResult, CPRResult,
isBotRes, isTopRes,
topRes, botRes, exnRes, cprProdRes,
vanillaCprProdRes, cprSumRes,
appIsBottom, isBottomingSig, pprIfaceStrictSig,
trimCPRInfo, returnsCPR_maybe,
StrictSig(..), mkStrictSig, mkClosedStrictSig,
nopSig, botSig, exnSig, cprProdSig,
isTopSig, hasDemandEnvSig,
splitStrictSig, strictSigDmdEnv,
increaseStrictSigArity,
seqDemand, seqDemandList, seqDmdType, seqStrictSig,
evalDmd, cleanEvalDmd, cleanEvalProdDmd, isStrictDmd,
splitDmdTy, splitFVs,
deferAfterIO,
postProcessUnsat, postProcessDmdType,
splitProdDmd_maybe, peelCallDmd, mkCallDmd, mkWorkerDemand,
dmdTransformSig, dmdTransformDataConSig, dmdTransformDictSelSig,
argOneShots, argsOneShots, saturatedByOneShots,
trimToType, TypeShape(..),
useCount, isUsedOnce, reuseEnv,
killUsageDemand, killUsageSig, zapUsageDemand, zapUsageEnvSig,
zapUsedOnceDemand, zapUsedOnceSig,
strictifyDictDmd
) where
#include "HsVersions.h"
import Eta.Main.DynFlags
import Eta.Utils.Outputable
import Eta.BasicTypes.Var ( Var )
import Eta.BasicTypes.VarEnv
import Eta.Utils.UniqFM
import Eta.Utils.Util
import Eta.BasicTypes.BasicTypes
import Eta.Utils.Binary
import Eta.Utils.Maybes ( orElse )
import Eta.Types.Type ( Type, isUnLiftedType )
import Eta.Types.TyCon ( isNewTyCon, isClassTyCon )
import Eta.BasicTypes.DataCon ( splitDataProductType_maybe )
{-
************************************************************************
* *
Joint domain for Strictness and Absence
* *
************************************************************************
-}
data JointDmd s u = JD { sd :: s, ud :: u }
deriving ( Eq, Show )
getStrDmd :: JointDmd s u -> s
getStrDmd = sd
getUseDmd :: JointDmd s u -> u
getUseDmd = ud
-- Pretty-printing
instance (Outputable s, Outputable u) => Outputable (JointDmd s u) where
ppr (JD {sd = s, ud = u}) = angleBrackets (ppr s <> char ',' <> ppr u)
-- Well-formedness preserving constructors for the joint domain
mkJointDmd :: s -> u -> JointDmd s u
mkJointDmd s u = JD { sd = s, ud = u }
mkJointDmds :: [s] -> [u] -> [JointDmd s u]
mkJointDmds ss as = zipWithEqual "mkJointDmds" mkJointDmd ss as
{-
************************************************************************
* *
Strictness domain
* *
************************************************************************
Lazy
|
ExnStr x -
|
HeadStr
/ \
SCall SProd
\ /
HyperStr
Note [Exceptions and strictness]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Exceptions need rather careful treatment, especially because of 'catch'
('catch#'), 'catchSTM' ('catchSTM#'), and 'orElse' ('catchRetry#').
See Trac #11555, #10712 and #13330, and for some more background, #11222.
There are three main pieces.
* The Termination type includes ThrowsExn, meaning "under the given
demand this expression either diverges or throws an exception".
This is relatively uncontroversial. The primops raise# and
raiseIO# both return ThrowsExn; nothing else does.
* An ArgStr has an ExnStr flag to say how to process the Termination
result of the argument. If the ExnStr flag is ExnStr, we squash
ThrowsExn to topRes. (This is done in postProcessDmdResult.)
Here is the key example
catchRetry# (\s -> retry# s) blah
We analyse the argument (\s -> retry# s) with demand
Str ExnStr (SCall HeadStr)
i.e. with the ExnStr flag set.
- First we analyse the argument with the "clean-demand" (SCall
HeadStr), getting a DmdResult of ThrowsExn from the saturated
application of retry#.
- Then we apply the post-processing for the shell, squashing the
ThrowsExn to topRes.
This also applies uniformly to free variables. Consider
let r = \st -> retry# st
in catchRetry# (\s -> ...(r s')..) handler st
If we give the first argument of catch a strict signature, we'll get a demand
'C(S)' for 'r'; that is, 'r' is definitely called with one argument, which
indeed it is. But when we post-process the free-var demands on catchRetry#'s
argument (in postProcessDmdEnv), we'll give 'r' a demand of (Str ExnStr (SCall
HeadStr)); and if we feed that into r's RHS (which would be reasonable) we'll
squash the retry just as if we'd inlined 'r'.
* We don't try to get clever about 'catch#' and 'catchSTM#' at the moment. We
previously (#11222) tried to take advantage of the fact that 'catch#' calls its
first argument eagerly. See especially commit
9915b6564403a6d17651e9969e9ea5d7d7e78e7f. We analyzed that first argument with
a strict demand, and then performed a post-processing step at the end to change
ThrowsExn to TopRes. The trouble, I believe, is that to use this approach
correctly, we'd need somewhat different information about that argument.
Diverges, ThrowsExn (i.e., diverges or throws an exception), and Dunno are the
wrong split here. In order to evaluate part of the argument speculatively,
we'd need to know that it *does not throw an exception*. That is, that it
either diverges or succeeds. But we don't currently have a way to talk about
that. Abstractly and approximately,
catch# m f s = case ORACLE m s of
DivergesOrSucceeds -> m s
Fails exc -> f exc s
where the magical ORACLE determines whether or not (m s) throws an exception
when run, and if so which one. If we want, we can safely consider (catch# m f s)
strict in anything that both branches are strict in (by performing demand
analysis for 'catch#' in the same way we do for case). We could also safely
consider it strict in anything demanded by (m s) that is guaranteed not to
throw an exception under that demand, but I don't know if we have the means
to express that.
My mind keeps turning to this model (not as an actual change to the type, but
as a way to think about what's going on in the analysis):
newtype IO a = IO {unIO :: State# s -> (# s, (# SomeException | a #) #)}
instance Monad IO where
return a = IO $ \s -> (# s, (# | a #) #)
IO m >>= f = IO $ \s -> case m s of
(# s', (# e | #) #) -> (# s', e #)
(# s', (# | a #) #) -> unIO (f a) s
raiseIO# e s = (# s, (# e | #) #)
catch# m f s = case m s of
(# s', (# e | #) #) -> f e s'
res -> res
Thinking about it this way seems likely to be productive for analyzing IO
exception behavior, but imprecise exceptions and asynchronous exceptions remain
quite slippery beasts. Can we incorporate them? I think we can. We can imagine
applying 'seq#' to evaluate @m s@, determining whether it throws an imprecise
or asynchronous exception or whether it succeeds or throws an IO exception.
This confines the peculiarities to 'seq#', which is indeed rather essentially
peculiar.
-}
-- | Vanilla strictness domain
data StrDmd
= HyperStr -- ^ Hyper-strict (bottom of the lattice).
-- See Note [HyperStr and Use demands]
| SCall StrDmd -- ^ Call demand
-- Used only for values of function type
| SProd [ArgStr] -- ^ Product
-- Used only for values of product type
-- Invariant: not all components are HyperStr (use HyperStr)
-- not all components are Lazy (use HeadStr)
| HeadStr -- ^ Head-Strict
-- A polymorphic demand: used for values of all types,
-- including a type variable
deriving ( Eq, Show )
-- | Strictness of a function argument.
type ArgStr = Str StrDmd
-- | Strictness demand.
data Str s = Lazy -- ^ Lazy (top of the lattice)
| Str ExnStr s -- ^ Strict
deriving ( Eq, Show )
-- | How are exceptions handled for strict demands?
data ExnStr -- See Note [Exceptions and strictness]
= VanStr -- ^ "Vanilla" case, ordinary strictness
| ExnStr -- ^ @Str ExnStr d@ means be strict like @d@ but then degrade
-- the 'Termination' info 'ThrowsExn' to 'Dunno'.
-- e.g. the first argument of @catch@ has this strictness.
deriving( Eq, Show )
-- Well-formedness preserving constructors for the Strictness domain
strBot, strTop :: ArgStr
strBot = Str VanStr HyperStr
strTop = Lazy
mkSCall :: StrDmd -> StrDmd
mkSCall HyperStr = HyperStr
mkSCall s = SCall s
mkSProd :: [ArgStr] -> StrDmd
mkSProd sx
| any isHyperStr sx = HyperStr
| all isLazy sx = HeadStr
| otherwise = SProd sx
isLazy :: ArgStr -> Bool
isLazy Lazy = True
isLazy (Str {}) = False
isHyperStr :: ArgStr -> Bool
isHyperStr (Str _ HyperStr) = True
isHyperStr _ = False
-- Pretty-printing
instance Outputable StrDmd where
ppr HyperStr = char 'B'
ppr (SCall s) = char 'C' <> parens (ppr s)
ppr HeadStr = char 'S'
ppr (SProd sx) = char 'S' <> parens (hcat (map ppr sx))
instance Outputable ArgStr where
ppr (Str x s) = (case x of VanStr -> empty; ExnStr -> char 'x')
<> ppr s
ppr Lazy = char 'L'
lubArgStr :: ArgStr -> ArgStr -> ArgStr
lubArgStr Lazy _ = Lazy
lubArgStr _ Lazy = Lazy
lubArgStr (Str x1 s1) (Str x2 s2) = Str (x1 `lubExnStr` x2) (s1 `lubStr` s2)
lubExnStr :: ExnStr -> ExnStr -> ExnStr
lubExnStr VanStr VanStr = VanStr
lubExnStr _ _ = ExnStr -- ExnStr is lazier
lubStr :: StrDmd -> StrDmd -> StrDmd
lubStr HyperStr s = s
lubStr (SCall s1) HyperStr = SCall s1
lubStr (SCall _) HeadStr = HeadStr
lubStr (SCall s1) (SCall s2) = SCall (s1 `lubStr` s2)
lubStr (SCall _) (SProd _) = HeadStr
lubStr (SProd sx) HyperStr = SProd sx
lubStr (SProd _) HeadStr = HeadStr
lubStr (SProd s1) (SProd s2)
| s1 `equalLength` s2 = mkSProd (zipWith lubArgStr s1 s2)
| otherwise = HeadStr
lubStr (SProd _) (SCall _) = HeadStr
lubStr HeadStr _ = HeadStr
bothArgStr :: ArgStr -> ArgStr -> ArgStr
bothArgStr Lazy s = s
bothArgStr s Lazy = s
bothArgStr (Str x1 s1) (Str x2 s2) = Str (x1 `bothExnStr` x2) (s1 `bothStr` s2)
bothExnStr :: ExnStr -> ExnStr -> ExnStr
bothExnStr ExnStr ExnStr = ExnStr
bothExnStr _ _ = VanStr
bothStr :: StrDmd -> StrDmd -> StrDmd
bothStr HyperStr _ = HyperStr
bothStr HeadStr s = s
bothStr (SCall _) HyperStr = HyperStr
bothStr (SCall s1) HeadStr = SCall s1
bothStr (SCall s1) (SCall s2) = SCall (s1 `bothStr` s2)
bothStr (SCall _) (SProd _) = HyperStr -- Weird
bothStr (SProd _) HyperStr = HyperStr
bothStr (SProd s1) HeadStr = SProd s1
bothStr (SProd s1) (SProd s2)
| s1 `equalLength` s2 = mkSProd (zipWith bothArgStr s1 s2)
| otherwise = HyperStr -- Weird
bothStr (SProd _) (SCall _) = HyperStr
-- utility functions to deal with memory leaks
seqStrDmd :: StrDmd -> ()
seqStrDmd (SProd ds) = seqStrDmdList ds
seqStrDmd (SCall s) = seqStrDmd s
seqStrDmd _ = ()
seqStrDmdList :: [ArgStr] -> ()
seqStrDmdList [] = ()
seqStrDmdList (d:ds) = seqArgStr d `seq` seqStrDmdList ds
seqArgStr :: ArgStr -> ()
seqArgStr Lazy = ()
seqArgStr (Str x s) = x `seq` seqStrDmd s
-- Splitting polymorphic demands
splitArgStrProdDmd :: Int -> ArgStr -> Maybe [ArgStr]
splitArgStrProdDmd n Lazy = Just (replicate n Lazy)
splitArgStrProdDmd n (Str _ s) = splitStrProdDmd n s
splitStrProdDmd :: Int -> StrDmd -> Maybe [ArgStr]
splitStrProdDmd n HyperStr = Just (replicate n strBot)
splitStrProdDmd n HeadStr = Just (replicate n strTop)
splitStrProdDmd n (SProd ds) = WARN( not (ds `lengthIs` n),
text "splitStrProdDmd" $$ ppr n $$ ppr ds )
Just ds
splitStrProdDmd _ (SCall {}) = Nothing
-- This can happen when the programmer uses unsafeCoerce,
-- and we don't then want to crash the compiler (Trac #9208)
{-
************************************************************************
* *
Absence domain
* *
************************************************************************
Used
/ \
UCall UProd
\ /
UHead
|
Count x -
|
Abs
-}
-- | Domain for genuine usage
data UseDmd
= UCall Count UseDmd -- ^ Call demand for absence.
-- Used only for values of function type
| UProd [ArgUse] -- ^ Product.
-- Used only for values of product type
-- See Note [Don't optimise UProd(Used) to Used]
--
-- Invariant: Not all components are Abs
-- (in that case, use UHead)
| UHead -- ^ May be used but its sub-components are
-- definitely *not* used. Roughly U(AAA)
-- e.g. the usage of @x@ in @x `seq` e@
-- A polymorphic demand: used for values of all types,
-- including a type variable
-- Since (UCall _ Abs) is ill-typed, UHead doesn't
-- make sense for lambdas
| Used -- ^ May be used and its sub-components may be used.
-- (top of the lattice)
deriving ( Eq, Show )
-- Extended usage demand for absence and counting
type ArgUse = Use UseDmd
data Use u
= Abs -- Definitely unused
-- Bottom of the lattice
| Use Count u -- May be used with some cardinality
deriving ( Eq, Show )
-- | Abstract counting of usages
data Count = One | Many
deriving ( Eq, Show )
-- Pretty-printing
instance Outputable ArgUse where
ppr Abs = char 'A'
ppr (Use Many a) = ppr a
ppr (Use One a) = char '1' <> char '*' <> ppr a
instance Outputable UseDmd where
ppr Used = char 'U'
ppr (UCall c a) = char 'C' <> ppr c <> parens (ppr a)
ppr UHead = char 'H'
ppr (UProd as) = char 'U' <> parens (hcat (punctuate (char ',') (map ppr as)))
instance Outputable Count where
ppr One = char '1'
ppr Many = text ""
useBot, useTop :: ArgUse
useBot = Abs
useTop = Use Many Used
mkUCall :: Count -> UseDmd -> UseDmd
--mkUCall c Used = Used c
mkUCall c a = UCall c a
mkUProd :: [ArgUse] -> UseDmd
mkUProd ux
| all (== Abs) ux = UHead
| otherwise = UProd ux
lubCount :: Count -> Count -> Count
lubCount _ Many = Many
lubCount Many _ = Many
lubCount x _ = x
lubArgUse :: ArgUse -> ArgUse -> ArgUse
lubArgUse Abs x = x
lubArgUse x Abs = x
lubArgUse (Use c1 a1) (Use c2 a2) = Use (lubCount c1 c2) (lubUse a1 a2)
lubUse :: UseDmd -> UseDmd -> UseDmd
lubUse UHead u = u
lubUse (UCall c u) UHead = UCall c u
lubUse (UCall c1 u1) (UCall c2 u2) = UCall (lubCount c1 c2) (lubUse u1 u2)
lubUse (UCall _ _) _ = Used
lubUse (UProd ux) UHead = UProd ux
lubUse (UProd ux1) (UProd ux2)
| ux1 `equalLength` ux2 = UProd $ zipWith lubArgUse ux1 ux2
| otherwise = Used
lubUse (UProd {}) (UCall {}) = Used
-- lubUse (UProd {}) Used = Used
lubUse (UProd ux) Used = UProd (map (`lubArgUse` useTop) ux)
lubUse Used (UProd ux) = UProd (map (`lubArgUse` useTop) ux)
lubUse Used _ = Used -- Note [Used should win]
-- `both` is different from `lub` in its treatment of counting; if
-- `both` is computed for two used, the result always has
-- cardinality `Many` (except for the inner demands of UCall demand -- [TODO] explain).
-- Also, x `bothUse` x /= x (for anything but Abs).
bothArgUse :: ArgUse -> ArgUse -> ArgUse
bothArgUse Abs x = x
bothArgUse x Abs = x
bothArgUse (Use _ a1) (Use _ a2) = Use Many (bothUse a1 a2)
bothUse :: UseDmd -> UseDmd -> UseDmd
bothUse UHead u = u
bothUse (UCall c u) UHead = UCall c u
-- Exciting special treatment of inner demand for call demands:
-- use `lubUse` instead of `bothUse`!
bothUse (UCall _ u1) (UCall _ u2) = UCall Many (u1 `lubUse` u2)
bothUse (UCall {}) _ = Used
bothUse (UProd ux) UHead = UProd ux
bothUse (UProd ux1) (UProd ux2)
| ux1 `equalLength` ux2 = UProd $ zipWith bothArgUse ux1 ux2
| otherwise = Used
bothUse (UProd {}) (UCall {}) = Used
-- bothUse (UProd {}) Used = Used -- Note [Used should win]
bothUse Used (UProd ux) = UProd (map (`bothArgUse` useTop) ux)
bothUse (UProd ux) Used = UProd (map (`bothArgUse` useTop) ux)
bothUse Used _ = Used -- Note [Used should win]
peelUseCall :: UseDmd -> Maybe (Count, UseDmd)
peelUseCall (UCall c u) = Just (c,u)
peelUseCall _ = Nothing
addCaseBndrDmd :: Demand -- On the case binder
-> [Demand] -- On the components of the constructor
-> [Demand] -- Final demands for the components of the constructor
-- See Note [Demand on case-alternative binders]
addCaseBndrDmd (JD { sd = ms, ud = mu }) alt_dmds
= case mu of
Abs -> alt_dmds
Use _ u -> zipWith bothDmd alt_dmds (mkJointDmds ss us)
where
Just ss = splitArgStrProdDmd arity ms -- Guaranteed not to be a call
Just us = splitUseProdDmd arity u -- Ditto
where
arity = length alt_dmds
{- Note [Demand on case-alternative binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The demand on a binder in a case alternative comes
(a) From the demand on the binder itself
(b) From the demand on the case binder
Forgetting (b) led directly to Trac #10148.
Example. Source code:
f x@(p,_) = if p then foo x else True
foo (p,True) = True
foo (p,q) = foo (q,p)
After strictness analysis:
f = \ (x_an1 [Dmd=<S(SL),1*U(U,1*U)>] :: (Bool, Bool)) ->
case x_an1
of wild_X7 [Dmd=<L,1*U(1*U,1*U)>]
{ (p_an2 [Dmd=<S,1*U>], ds_dnz [Dmd=<L,A>]) ->
case p_an2 of _ {
False -> GHC.Types.True;
True -> foo wild_X7 }
It's true that ds_dnz is *itself* absent, but the use of wild_X7 means
that it is very much alive and demanded. See Trac #10148 for how the
consequences play out.
This is needed even for non-product types, in case the case-binder
is used but the components of the case alternative are not.
Note [Don't optimise UProd(Used) to Used]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
These two UseDmds:
UProd [Used, Used] and Used
are semantically equivalent, but we do not turn the former into
the latter, for a regrettable-subtle reason. Suppose we did.
then
f (x,y) = (y,x)
would get
StrDmd = Str = SProd [Lazy, Lazy]
UseDmd = Used = UProd [Used, Used]
But with the joint demand of <Str, Used> doesn't convey any clue
that there is a product involved, and so the worthSplittingFun
will not fire. (We'd need to use the type as well to make it fire.)
Moreover, consider
g h p@(_,_) = h p
This too would get <Str, Used>, but this time there really isn't any
point in w/w since the components of the pair are not used at all.
So the solution is: don't aggressively collapse UProd [Used,Used] to
Used; intead leave it as-is. In effect we are using the UseDmd to do a
little bit of boxity analysis. Not very nice.
Note [Used should win]
~~~~~~~~~~~~~~~~~~~~~~
Both in lubUse and bothUse we want (Used `both` UProd us) to be Used.
Why? Because Used carries the implication the whole thing is used,
box and all, so we don't want to w/w it. If we use it both boxed and
unboxed, then we are definitely using the box, and so we are quite
likely to pay a reboxing cost. So we make Used win here.
Example is in the Buffer argument of GHC.IO.Handle.Internals.writeCharBuffer
Baseline: (A) Not making Used win (UProd wins)
Compare with: (B) making Used win for lub and both
Min -0.3% -5.6% -10.7% -11.0% -33.3%
Max +0.3% +45.6% +11.5% +11.5% +6.9%
Geometric Mean -0.0% +0.5% +0.3% +0.2% -0.8%
Baseline: (B) Making Used win for both lub and both
Compare with: (C) making Used win for both, but UProd win for lub
Min -0.1% -0.3% -7.9% -8.0% -6.5%
Max +0.1% +1.0% +21.0% +21.0% +0.5%
Geometric Mean +0.0% +0.0% -0.0% -0.1% -0.1%
-}
-- If a demand is used multiple times (i.e. reused), than any use-once
-- mentioned there, that is not protected by a UCall, can happen many times.
markReusedDmd :: ArgUse -> ArgUse
markReusedDmd Abs = Abs
markReusedDmd (Use _ a) = Use Many (markReused a)
markReused :: UseDmd -> UseDmd
markReused (UCall _ u) = UCall Many u -- No need to recurse here
markReused (UProd ux) = UProd (map markReusedDmd ux)
markReused u = u
isUsedMU :: ArgUse -> Bool
-- True <=> markReusedDmd d = d
isUsedMU Abs = True
isUsedMU (Use One _) = False
isUsedMU (Use Many u) = isUsedU u
isUsedU :: UseDmd -> Bool
-- True <=> markReused d = d
isUsedU Used = True
isUsedU UHead = True
isUsedU (UProd us) = all isUsedMU us
isUsedU (UCall One _) = False
isUsedU (UCall Many _) = True -- No need to recurse
-- Squashing usage demand demands
seqUseDmd :: UseDmd -> ()
seqUseDmd (UProd ds) = seqArgUseList ds
seqUseDmd (UCall c d) = c `seq` seqUseDmd d
seqUseDmd _ = ()
seqArgUseList :: [ArgUse] -> ()
seqArgUseList [] = ()
seqArgUseList (d:ds) = seqArgUse d `seq` seqArgUseList ds
seqArgUse :: ArgUse -> ()
seqArgUse (Use c u) = c `seq` seqUseDmd u
seqArgUse _ = ()
-- Splitting polymorphic Maybe-Used demands
splitUseProdDmd :: Int -> UseDmd -> Maybe [ArgUse]
splitUseProdDmd n Used = Just (replicate n useTop)
splitUseProdDmd n UHead = Just (replicate n Abs)
splitUseProdDmd n (UProd ds) = WARN( not (ds `lengthIs` n),
text "splitUseProdDmd" $$ ppr n
$$ ppr ds )
Just ds
splitUseProdDmd _ (UCall _ _) = Nothing
-- This can happen when the programmer uses unsafeCoerce,
-- and we don't then want to crash the compiler (Trac #9208)
useCount :: Use u -> Count
useCount Abs = One
useCount (Use One _) = One
useCount _ = Many
{-
************************************************************************
* *
Clean demand for Strictness and Usage
* *
************************************************************************
This domain differst from JointDemand in the sence that pure absence
is taken away, i.e., we deal *only* with non-absent demands.
Note [Strict demands]
~~~~~~~~~~~~~~~~~~~~~
isStrictDmd returns true only of demands that are
both strict
and used
In particular, it is False for <HyperStr, Abs>, which can and does
arise in, say (Trac #7319)
f x = raise# <some exception>
Then 'x' is not used, so f gets strictness <HyperStr,Abs> -> .
Now the w/w generates
fx = let x <HyperStr,Abs> = absentError "unused"
in raise <some exception>
At this point we really don't want to convert to
fx = case absentError "unused" of x -> raise <some exception>
Since the program is going to diverge, this swaps one error for another,
but it's really a bad idea to *ever* evaluate an absent argument.
In Trac #7319 we get
T7319.exe: Oops! Entered absent arg w_s1Hd{v} [lid] [base:GHC.Base.String{tc 36u}]
Note [Dealing with call demands]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Call demands are constructed and deconstructed coherently for
strictness and absence. For instance, the strictness signature for the
following function
f :: (Int -> (Int, Int)) -> (Int, Bool)
f g = (snd (g 3), True)
should be: <L,C(U(AU))>m
-}
type CleanDemand = JointDmd StrDmd UseDmd
-- A demand that is at least head-strict
bothCleanDmd :: CleanDemand -> CleanDemand -> CleanDemand
bothCleanDmd (JD { sd = s1, ud = a1}) (JD { sd = s2, ud = a2})
= JD { sd = s1 `bothStr` s2, ud = a1 `bothUse` a2 }
mkHeadStrict :: CleanDemand -> CleanDemand
mkHeadStrict cd = cd { sd = HeadStr }
mkOnceUsedDmd, mkManyUsedDmd :: CleanDemand -> Demand
mkOnceUsedDmd (JD {sd = s,ud = a}) = JD { sd = Str VanStr s, ud = Use One a }
mkManyUsedDmd (JD {sd = s,ud = a}) = JD { sd = Str VanStr s, ud = Use Many a }
evalDmd :: Demand
-- Evaluated strictly, and used arbitrarily deeply
evalDmd = JD { sd = Str VanStr HeadStr, ud = useTop }
mkProdDmd :: [Demand] -> CleanDemand
mkProdDmd dx
= JD { sd = mkSProd $ map getStrDmd dx
, ud = mkUProd $ map getUseDmd dx }
mkCallDmd :: CleanDemand -> CleanDemand
mkCallDmd (JD {sd = d, ud = u})
= JD { sd = mkSCall d, ud = mkUCall One u }
-- See Note [Demand on the worker] in WorkWrap
mkWorkerDemand :: Int -> Demand
mkWorkerDemand n = JD { sd = Lazy, ud = Use One (go n) }
where go 0 = Used
go n = mkUCall One $ go (n-1)
cleanEvalDmd :: CleanDemand
cleanEvalDmd = JD { sd = HeadStr, ud = Used }
cleanEvalProdDmd :: Arity -> CleanDemand
cleanEvalProdDmd n = JD { sd = HeadStr, ud = UProd (replicate n useTop) }
{-
************************************************************************
* *
Demand: combining stricness and usage
* *
************************************************************************
-}
type Demand = JointDmd ArgStr ArgUse
lubDmd :: Demand -> Demand -> Demand
lubDmd (JD {sd = s1, ud = a1}) (JD {sd = s2, ud = a2})
= JD { sd = s1 `lubArgStr` s2
, ud = a1 `lubArgUse` a2 }
bothDmd :: Demand -> Demand -> Demand
bothDmd (JD {sd = s1, ud = a1}) (JD {sd = s2, ud = a2})
= JD { sd = s1 `bothArgStr` s2
, ud = a1 `bothArgUse` a2 }
lazyApply1Dmd, lazyApply2Dmd, strictApply1Dmd, catchArgDmd :: Demand
strictApply1Dmd = JD { sd = Str VanStr (SCall HeadStr)
, ud = Use Many (UCall One Used) }
-- First argument of catchRetry# and catchSTM#:
-- uses its arg once, applies it once
-- and catches exceptions (the ExnStr) part
catchArgDmd = JD { sd = Str ExnStr (SCall HeadStr)
, ud = Use One (UCall One Used) }
lazyApply1Dmd = JD { sd = Lazy
, ud = Use One (UCall One Used) }
-- Second argument of catch#:
-- uses its arg at most once, applies it once
-- but is lazy (might not be called at all)
lazyApply2Dmd = JD { sd = Lazy
, ud = Use One (UCall One (UCall One Used)) }
absDmd :: Demand
absDmd = JD { sd = Lazy, ud = Abs }
topDmd :: Demand
topDmd = JD { sd = Lazy, ud = useTop }
botDmd :: Demand
botDmd = JD { sd = strBot, ud = useBot }
seqDmd :: Demand
seqDmd = JD { sd = Str VanStr HeadStr, ud = Use One UHead }
oneifyDmd :: Demand -> Demand
oneifyDmd (JD { sd = s, ud = Use _ a }) = JD { sd = s, ud = Use One a }
oneifyDmd jd = jd
isTopDmd :: Demand -> Bool
-- Used to suppress pretty-printing of an uninformative demand
isTopDmd (JD {sd = Lazy, ud = Use Many Used}) = True
isTopDmd _ = False
isAbsDmd :: Demand -> Bool
isAbsDmd (JD {ud = Abs}) = True -- The strictness part can be HyperStr
isAbsDmd _ = False -- for a bottom demand
isSeqDmd :: Demand -> Bool
isSeqDmd (JD {sd = Str VanStr HeadStr, ud = Use _ UHead}) = True
isSeqDmd _ = False
isUsedOnce :: Demand -> Bool
isUsedOnce (JD { ud = a }) = case useCount a of
One -> True
Many -> False
-- More utility functions for strictness
seqDemand :: Demand -> ()
seqDemand (JD {sd = s, ud = u}) = seqArgStr s `seq` seqArgUse u
seqDemandList :: [Demand] -> ()
seqDemandList [] = ()
seqDemandList (d:ds) = seqDemand d `seq` seqDemandList ds
isStrictDmd :: Demand -> Bool
-- See Note [Strict demands]
isStrictDmd (JD {ud = Abs}) = False
isStrictDmd (JD {sd = Lazy}) = False
isStrictDmd _ = True
isWeakDmd :: Demand -> Bool
isWeakDmd (JD {sd = s, ud = a}) = isLazy s && isUsedMU a
cleanUseDmd_maybe :: Demand -> Maybe UseDmd
cleanUseDmd_maybe (JD { ud = Use _ u }) = Just u
cleanUseDmd_maybe _ = Nothing
splitFVs :: Bool -- Thunk
-> DmdEnv -> (DmdEnv, DmdEnv)
splitFVs is_thunk rhs_fvs
| is_thunk = nonDetFoldUFM_Directly add (emptyVarEnv, emptyVarEnv) rhs_fvs
-- It's OK to use nonDetFoldUFM_Directly because we
-- immediately forget the ordering by putting the elements
-- in the envs again
| otherwise = partitionVarEnv isWeakDmd rhs_fvs
where
add uniq dmd@(JD { sd = s, ud = u }) (lazy_fv, sig_fv)
| Lazy <- s = (addToUFM_Directly lazy_fv uniq dmd, sig_fv)
| otherwise = ( addToUFM_Directly lazy_fv uniq (JD { sd = Lazy, ud = u })
, addToUFM_Directly sig_fv uniq (JD { sd = s, ud = Abs }) )
data TypeShape = TsFun TypeShape
| TsProd [TypeShape]
| TsUnk
instance Outputable TypeShape where
ppr TsUnk = text "TsUnk"
ppr (TsFun ts) = text "TsFun" <> parens (ppr ts)
ppr (TsProd tss) = parens (hsep $ punctuate comma $ map ppr tss)
trimToType :: Demand -> TypeShape -> Demand
-- See Note [Trimming a demand to a type]
trimToType (JD { sd = ms, ud = mu }) ts
= JD (go_ms ms ts) (go_mu mu ts)
where
go_ms :: ArgStr -> TypeShape -> ArgStr
go_ms Lazy _ = Lazy
go_ms (Str x s) ts = Str x (go_s s ts)
go_s :: StrDmd -> TypeShape -> StrDmd
go_s HyperStr _ = HyperStr
go_s (SCall s) (TsFun ts) = SCall (go_s s ts)
go_s (SProd mss) (TsProd tss)
| equalLength mss tss = SProd (zipWith go_ms mss tss)
go_s _ _ = HeadStr
go_mu :: ArgUse -> TypeShape -> ArgUse
go_mu Abs _ = Abs
go_mu (Use c u) ts = Use c (go_u u ts)
go_u :: UseDmd -> TypeShape -> UseDmd
go_u UHead _ = UHead
go_u (UCall c u) (TsFun ts) = UCall c (go_u u ts)
go_u (UProd mus) (TsProd tss)
| equalLength mus tss = UProd (zipWith go_mu mus tss)
go_u _ _ = Used
{-
Note [Trimming a demand to a type]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this:
f :: a -> Bool
f x = case ... of
A g1 -> case (x |> g1) of (p,q) -> ...
B -> error "urk"
where A,B are the constructors of a GADT. We'll get a U(U,U) demand
on x from the A branch, but that's a stupid demand for x itself, which
has type 'a'. Indeed we get ASSERTs going off (notably in
splitUseProdDmd, Trac #8569).
Bottom line: we really don't want to have a binder whose demand is more
deeply-nested than its type. There are various ways to tackle this.
When processing (x |> g1), we could "trim" the incoming demand U(U,U)
to match x's type. But I'm currently doing so just at the moment when
we pin a demand on a binder, in DmdAnal.findBndrDmd.
Note [Threshold demands]
~~~~~~~~~~~~~~~~~~~~~~~~
Threshold usage demand is generated to figure out if
cardinality-instrumented demands of a binding's free variables should
be unleashed. See also [Aggregated demand for cardinality].
Note [Replicating polymorphic demands]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Some demands can be considered as polymorphic. Generally, it is
applicable to such beasts as tops, bottoms as well as Head-Used and
Head-stricts demands. For instance,
S ~ S(L, ..., L)
Also, when top or bottom is occurred as a result demand, it in fact
can be expanded to saturate a callee's arity.
-}
splitProdDmd_maybe :: Demand -> Maybe [Demand]
-- Split a product into its components, iff there is any
-- useful information to be extracted thereby
-- The demand is not necessarily strict!
splitProdDmd_maybe (JD { sd = s, ud = u })
= case (s,u) of
(Str _ (SProd sx), Use _ u) | Just ux <- splitUseProdDmd (length sx) u
-> Just (mkJointDmds sx ux)
(Str _ s, Use _ (UProd ux)) | Just sx <- splitStrProdDmd (length ux) s
-> Just (mkJointDmds sx ux)
(Lazy, Use _ (UProd ux)) -> Just (mkJointDmds (replicate (length ux) Lazy) ux)
_ -> Nothing
{-
************************************************************************
* *
Demand results
* *
************************************************************************
DmdResult: Dunno CPRResult
/
ThrowsExn
/
Diverges
CPRResult: NoCPR
/ \
RetProd RetSum ConTag
Product constructors return (Dunno (RetProd rs))
In a fixpoint iteration, start from Diverges
We have lubs, but not glbs; but that is ok.
-}
------------------------------------------------------------------------
-- Constructed Product Result
------------------------------------------------------------------------
data Termination r
= Diverges -- Definitely diverges
| ThrowsExn -- Definitely throws an exception or diverges
| Dunno r -- Might diverge or converge
deriving( Eq, Show )
type DmdResult = Termination CPRResult
data CPRResult = NoCPR -- Top of the lattice
| RetProd -- Returns a constructor from a product type
| RetSum ConTag -- Returns a constructor from a data type
deriving( Eq, Show )
lubCPR :: CPRResult -> CPRResult -> CPRResult
lubCPR (RetSum t1) (RetSum t2)
| t1 == t2 = RetSum t1
lubCPR RetProd RetProd = RetProd
lubCPR _ _ = NoCPR
lubDmdResult :: DmdResult -> DmdResult -> DmdResult
lubDmdResult Diverges r = r
lubDmdResult ThrowsExn Diverges = ThrowsExn
lubDmdResult ThrowsExn r = r
lubDmdResult (Dunno c1) Diverges = Dunno c1
lubDmdResult (Dunno c1) ThrowsExn = Dunno c1
lubDmdResult (Dunno c1) (Dunno c2) = Dunno (c1 `lubCPR` c2)
-- This needs to commute with defaultDmd, i.e.
-- defaultDmd (r1 `lubDmdResult` r2) = defaultDmd r1 `lubDmd` defaultDmd r2
-- (See Note [Default demand on free variables] for why)
bothDmdResult :: DmdResult -> Termination () -> DmdResult
-- See Note [Asymmetry of 'both' for DmdType and DmdResult]
bothDmdResult _ Diverges = Diverges
bothDmdResult r ThrowsExn = case r of { Diverges -> r; _ -> ThrowsExn }
bothDmdResult r (Dunno {}) = r
-- This needs to commute with defaultDmd, i.e.
-- defaultDmd (r1 `bothDmdResult` r2) = defaultDmd r1 `bothDmd` defaultDmd r2
-- (See Note [Default demand on free variables] for why)
instance Outputable r => Outputable (Termination r) where
ppr Diverges = char 'b'
ppr ThrowsExn = char 'x'
ppr (Dunno c) = ppr c
instance Outputable CPRResult where
ppr NoCPR = empty
ppr (RetSum n) = char 'm' <> int n
ppr RetProd = char 'm'
seqDmdResult :: DmdResult -> ()
seqDmdResult Diverges = ()
seqDmdResult ThrowsExn = ()
seqDmdResult (Dunno c) = seqCPRResult c
seqCPRResult :: CPRResult -> ()
seqCPRResult NoCPR = ()
seqCPRResult (RetSum n) = n `seq` ()
seqCPRResult RetProd = ()
------------------------------------------------------------------------
-- Combined demand result --
------------------------------------------------------------------------
-- [cprRes] lets us switch off CPR analysis
-- by making sure that everything uses TopRes
topRes, exnRes, botRes :: DmdResult
topRes = Dunno NoCPR
exnRes = ThrowsExn
botRes = Diverges
cprSumRes :: ConTag -> DmdResult
cprSumRes tag = Dunno $ RetSum tag
cprProdRes :: [DmdType] -> DmdResult
cprProdRes _arg_tys = Dunno $ RetProd
vanillaCprProdRes :: Arity -> DmdResult
vanillaCprProdRes _arity = Dunno $ RetProd
isTopRes :: DmdResult -> Bool
isTopRes (Dunno NoCPR) = True
isTopRes _ = False
isBotRes :: DmdResult -> Bool
-- True if the result diverges or throws an exception
isBotRes Diverges = True
isBotRes ThrowsExn = True
isBotRes (Dunno {}) = False
trimCPRInfo :: Bool -> Bool -> DmdResult -> DmdResult
trimCPRInfo trim_all trim_sums res
= trimR res
where
trimR (Dunno c) = Dunno (trimC c)
trimR res = res
trimC (RetSum n) | trim_all || trim_sums = NoCPR
| otherwise = RetSum n
trimC RetProd | trim_all = NoCPR
| otherwise = RetProd
trimC NoCPR = NoCPR
returnsCPR_maybe :: DmdResult -> Maybe ConTag
returnsCPR_maybe (Dunno c) = retCPR_maybe c
returnsCPR_maybe _ = Nothing
retCPR_maybe :: CPRResult -> Maybe ConTag
retCPR_maybe (RetSum t) = Just t
retCPR_maybe RetProd = Just fIRST_TAG
retCPR_maybe NoCPR = Nothing
-- See Notes [Default demand on free variables]
-- and [defaultDmd vs. resTypeArgDmd]
defaultDmd :: Termination r -> Demand
defaultDmd (Dunno {}) = absDmd
defaultDmd _ = botDmd -- Diverges or ThrowsExn
resTypeArgDmd :: Termination r -> Demand
-- TopRes and BotRes are polymorphic, so that
-- BotRes === (Bot -> BotRes) === ...
-- TopRes === (Top -> TopRes) === ...
-- This function makes that concrete
-- Also see Note [defaultDmd vs. resTypeArgDmd]
resTypeArgDmd (Dunno _) = topDmd
resTypeArgDmd _ = botDmd -- Diverges or ThrowsExn
{-
Note [defaultDmd and resTypeArgDmd]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
These functions are similar: They express the demand on something not
explicitly mentioned in the environment resp. the argument list. Yet they are
different:
* Variables not mentioned in the free variables environment are definitely
unused, so we can use absDmd there.
* Further arguments *can* be used, of course. Hence topDmd is used.
Note [Worthy functions for Worker-Wrapper split]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For non-bottoming functions a worker-wrapper transformation takes into
account several possibilities to decide if the function is worthy for
splitting:
1. The result is of product type and the function is strict in some
(or even all) of its arguments. The check that the argument is used is
more of sanity nature, since strictness implies usage. Example:
f :: (Int, Int) -> Int
f p = (case p of (a,b) -> a) + 1
should be splitted to
f :: (Int, Int) -> Int
f p = case p of (a,b) -> $wf a
$wf :: Int -> Int
$wf a = a + 1
2. Sometimes it also makes sense to perform a WW split if the
strictness analysis cannot say for sure if the function is strict in
components of its argument. Then we reason according to the inferred
usage information: if the function uses its product argument's
components, the WW split can be beneficial. Example:
g :: Bool -> (Int, Int) -> Int
g c p = case p of (a,b) ->
if c then a else b
The function g is strict in is argument p and lazy in its
components. However, both components are used in the RHS. The idea is
since some of the components (both in this case) are used in the
right-hand side, the product must presumable be taken apart.
Therefore, the WW transform splits the function g to
g :: Bool -> (Int, Int) -> Int
g c p = case p of (a,b) -> $wg c a b
$wg :: Bool -> Int -> Int -> Int
$wg c a b = if c then a else b
3. If an argument is absent, it would be silly to pass it to a
function, hence the worker with reduced arity is generated.
Note [Worker-wrapper for bottoming functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We used not to split if the result is bottom.
[Justification: there's no efficiency to be gained.]
But it's sometimes bad not to make a wrapper. Consider
fw = \x# -> let x = I# x# in case e of
p1 -> error_fn x
p2 -> error_fn x
p3 -> the real stuff
The re-boxing code won't go away unless error_fn gets a wrapper too.
[We don't do reboxing now, but in general it's better to pass an
unboxed thing to f, and have it reboxed in the error cases....]
However we *don't* want to do this when the argument is not actually
taken apart in the function at all. Otherwise we risk decomposing a
massive tuple which is barely used. Example:
f :: ((Int,Int) -> String) -> (Int,Int) -> a
f g pr = error (g pr)
main = print (f fst (1, error "no"))
Here, f does not take 'pr' apart, and it's stupid to do so.
Imagine that it had millions of fields. This actually happened
in GHC itself where the tuple was DynFlags
************************************************************************
* *
Demand environments and types
* *
************************************************************************
-}
type DmdEnv = VarEnv Demand -- See Note [Default demand on free variables]
data DmdType = DmdType
DmdEnv -- Demand on explicitly-mentioned
-- free variables
[Demand] -- Demand on arguments
DmdResult -- See [Nature of result demand]
{-
Note [Nature of result demand]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A DmdResult contains information about termination (currently distinguishing
definite divergence and no information; it is possible to include definite
convergence here), and CPR information about the result.
The semantics of this depends on whether we are looking at a DmdType, i.e. the
demand put on by an expression _under a specific incoming demand_ on its
environment, or at a StrictSig describing a demand transformer.
For a
* DmdType, the termination information is true given the demand it was
generated with, while for
* a StrictSig it holds after applying enough arguments.
The CPR information, though, is valid after the number of arguments mentioned
in the type is given. Therefore, when forgetting the demand on arguments, as in
dmdAnalRhs, this needs to be considere (via removeDmdTyArgs).
Consider
b2 x y = x `seq` y `seq` error (show x)
this has a strictness signature of
<S><S>b
meaning that "b2 `seq` ()" and "b2 1 `seq` ()" might well terminate, but
for "b2 1 2 `seq` ()" we get definite divergence.
For comparison,
b1 x = x `seq` error (show x)
has a strictness signature of
<S>b
and "b1 1 `seq` ()" is known to terminate.
Now consider a function h with signature "<C(S)>", and the expression
e1 = h b1
now h puts a demand of <C(S)> onto its argument, and the demand transformer
turns it into
<S>b
Now the DmdResult "b" does apply to us, even though "b1 `seq` ()" does not
diverge, and we do not anything being passed to b.
Note [Asymmetry of 'both' for DmdType and DmdResult]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
'both' for DmdTypes is *asymmetrical*, because there is only one
result! For example, given (e1 e2), we get a DmdType dt1 for e1, use
its arg demand to analyse e2 giving dt2, and then do (dt1 `bothType` dt2).
Similarly with
case e of { p -> rhs }
we get dt_scrut from the scrutinee and dt_rhs from the RHS, and then
compute (dt_rhs `bothType` dt_scrut).
We
1. combine the information on the free variables,
2. take the demand on arguments from the first argument
3. combine the termination results, but
4. take CPR info from the first argument.
3 and 4 are implementd in bothDmdResult.
-}
-- Equality needed for fixpoints in DmdAnal
instance Eq DmdType where
(==) (DmdType fv1 ds1 res1)
(DmdType fv2 ds2 res2) = nonDetUFMToList fv1 == nonDetUFMToList fv2
-- It's OK to use nonDetUFMToList here because we're testing for
-- equality and even though the lists will be in some arbitrary
-- Unique order, it is the same order for both
&& ds1 == ds2 && res1 == res2
lubDmdType :: DmdType -> DmdType -> DmdType
lubDmdType d1 d2
= DmdType lub_fv lub_ds lub_res
where
n = max (dmdTypeDepth d1) (dmdTypeDepth d2)
(DmdType fv1 ds1 r1) = ensureArgs n d1
(DmdType fv2 ds2 r2) = ensureArgs n d2
lub_fv = plusVarEnv_CD lubDmd fv1 (defaultDmd r1) fv2 (defaultDmd r2)
lub_ds = zipWithEqual "lubDmdType" lubDmd ds1 ds2
lub_res = lubDmdResult r1 r2
{-
Note [The need for BothDmdArg]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Previously, the right argument to bothDmdType, as well as the return value of
dmdAnalStar via postProcessDmdType, was a DmdType. But bothDmdType only needs
to know about the free variables and termination information, but nothing about
the demand put on arguments, nor cpr information. So we make that explicit by
only passing the relevant information.
-}
type BothDmdArg = (DmdEnv, Termination ())
mkBothDmdArg :: DmdEnv -> BothDmdArg
mkBothDmdArg env = (env, Dunno ())
toBothDmdArg :: DmdType -> BothDmdArg
toBothDmdArg (DmdType fv _ r) = (fv, go r)
where
go (Dunno {}) = Dunno ()
go ThrowsExn = ThrowsExn
go Diverges = Diverges
bothDmdType :: DmdType -> BothDmdArg -> DmdType
bothDmdType (DmdType fv1 ds1 r1) (fv2, t2)
-- See Note [Asymmetry of 'both' for DmdType and DmdResult]
-- 'both' takes the argument/result info from its *first* arg,
-- using its second arg just for its free-var info.
= DmdType (plusVarEnv_CD bothDmd fv1 (defaultDmd r1) fv2 (defaultDmd t2))
ds1
(r1 `bothDmdResult` t2)
instance Outputable DmdType where
ppr (DmdType fv ds res)
= hsep [hcat (map ppr ds) <> ppr res,
if null fv_elts then empty
else braces (fsep (map pp_elt fv_elts))]
where
pp_elt (uniq, dmd) = ppr uniq <> text "->" <> ppr dmd
fv_elts = nonDetUFMToList fv
-- It's OK to use nonDetUFMToList here because we only do it for
-- pretty printing
emptyDmdEnv :: VarEnv Demand
emptyDmdEnv = emptyVarEnv
-- nopDmdType is the demand of doing nothing
-- (lazy, absent, no CPR information, no termination information).
-- Note that it is ''not'' the top of the lattice (which would be "may use everything"),
-- so it is (no longer) called topDmd
nopDmdType, botDmdType, exnDmdType :: DmdType
nopDmdType = DmdType emptyDmdEnv [] topRes
botDmdType = DmdType emptyDmdEnv [] botRes
exnDmdType = DmdType emptyDmdEnv [] exnRes
cprProdDmdType :: Arity -> DmdType
cprProdDmdType arity
= DmdType emptyDmdEnv [] (vanillaCprProdRes arity)
isTopDmdType :: DmdType -> Bool
isTopDmdType (DmdType env [] res)
| isTopRes res && isEmptyVarEnv env = True
isTopDmdType _ = False
mkDmdType :: DmdEnv -> [Demand] -> DmdResult -> DmdType
mkDmdType fv ds res = DmdType fv ds res
dmdTypeDepth :: DmdType -> Arity
dmdTypeDepth (DmdType _ ds _) = length ds
-- Remove any demand on arguments. This is used in dmdAnalRhs on the body
removeDmdTyArgs :: DmdType -> DmdType
removeDmdTyArgs = ensureArgs 0
-- This makes sure we can use the demand type with n arguments,
-- It extends the argument list with the correct resTypeArgDmd
-- It also adjusts the DmdResult: Divergence survives additional arguments,
-- CPR information does not (and definite converge also would not).
ensureArgs :: Arity -> DmdType -> DmdType
ensureArgs n d | n == depth = d
| otherwise = DmdType fv ds' r'
where depth = dmdTypeDepth d
DmdType fv ds r = d
ds' = take n (ds ++ repeat (resTypeArgDmd r))
r' = case r of -- See [Nature of result demand]
Dunno _ -> topRes
_ -> r
seqDmdType :: DmdType -> ()
seqDmdType (DmdType env ds res) =
seqDmdEnv env `seq` seqDemandList ds `seq` seqDmdResult res `seq` ()
seqDmdEnv :: DmdEnv -> ()
seqDmdEnv env = seqEltsUFM seqDemandList env
splitDmdTy :: DmdType -> (Demand, DmdType)
-- Split off one function argument
-- We already have a suitable demand on all
-- free vars, so no need to add more!
splitDmdTy (DmdType fv (dmd:dmds) res_ty) = (dmd, DmdType fv dmds res_ty)
splitDmdTy ty@(DmdType _ [] res_ty) = (resTypeArgDmd res_ty, ty)
-- When e is evaluated after executing an IO action, and d is e's demand, then
-- what of this demand should we consider, given that the IO action can cleanly
-- exit?
-- * We have to kill all strictness demands (i.e. lub with a lazy demand)
-- * We can keep usage information (i.e. lub with an absent demand)
-- * We have to kill definite divergence
-- * We can keep CPR information.
-- See Note [IO hack in the demand analyser] in DmdAnal
deferAfterIO :: DmdType -> DmdType
deferAfterIO d@(DmdType _ _ res) =
case d `lubDmdType` nopDmdType of
DmdType fv ds _ -> DmdType fv ds (defer_res res)
where
defer_res r@(Dunno {}) = r
defer_res _ = topRes -- Diverges and ThrowsExn
strictenDmd :: Demand -> CleanDemand
strictenDmd (JD { sd = s, ud = u})
= JD { sd = poke_s s, ud = poke_u u }
where
poke_s Lazy = HeadStr
poke_s (Str _ s) = s
poke_u Abs = UHead
poke_u (Use _ u) = u
-- Deferring and peeling
type DmdShell -- Describes the "outer shell"
-- of a Demand
= JointDmd (Str ()) (Use ())
toCleanDmd :: Demand -> Type -> (DmdShell, CleanDemand)
-- Splits a Demand into its "shell" and the inner "clean demand"
toCleanDmd (JD { sd = s, ud = u }) expr_ty
= (JD { sd = ss, ud = us }, JD { sd = s', ud = u' })
-- See Note [Analyzing with lazy demand and lambdas]
where
(ss, s') = case s of
Str x s' -> (Str x (), s')
Lazy | is_unlifted -> (Str VanStr (), HeadStr)
| otherwise -> (Lazy, HeadStr)
(us, u') = case u of
Use c u' -> (Use c (), u')
Abs | is_unlifted -> (Use One (), Used)
| otherwise -> (Abs, Used)
is_unlifted = isUnLiftedType expr_ty
-- See Note [Analysing with absent demand]
-- This is used in dmdAnalStar when post-processing
-- a function's argument demand. So we only care about what
-- does to free variables, and whether it terminates.
-- see Note [The need for BothDmdArg]
postProcessDmdType :: DmdShell -> DmdType -> BothDmdArg
postProcessDmdType du@(JD { sd = ss }) (DmdType fv _ res_ty)
= (postProcessDmdEnv du fv, term_info)
where
term_info = case postProcessDmdResult ss res_ty of
Dunno _ -> Dunno ()
ThrowsExn -> ThrowsExn
Diverges -> Diverges
postProcessDmdResult :: Str () -> DmdResult -> DmdResult
postProcessDmdResult Lazy _ = topRes
postProcessDmdResult (Str ExnStr _) ThrowsExn = topRes -- Key point!
-- Note that only ThrowsExn results can be caught, not Diverges
postProcessDmdResult _ res = res
postProcessDmdEnv :: DmdShell -> DmdEnv -> DmdEnv
postProcessDmdEnv ds@(JD { sd = ss, ud = us }) env
| Abs <- us = emptyDmdEnv
-- In this case (postProcessDmd ds) == id; avoid a redundant rebuild
-- of the environment. Be careful, bad things will happen if this doesn't
-- match postProcessDmd (see #13977).
| Str VanStr _ <- ss
, Use One _ <- us = env
| otherwise = mapVarEnv (postProcessDmd ds) env
-- For the Absent case just discard all usage information
-- We only processed the thing at all to analyse the body
-- See Note [Always analyse in virgin pass]
reuseEnv :: DmdEnv -> DmdEnv
reuseEnv = mapVarEnv (postProcessDmd
(JD { sd = Str VanStr (), ud = Use Many () }))
postProcessUnsat :: DmdShell -> DmdType -> DmdType
postProcessUnsat ds@(JD { sd = ss }) (DmdType fv args res_ty)
= DmdType (postProcessDmdEnv ds fv)
(map (postProcessDmd ds) args)
(postProcessDmdResult ss res_ty)
postProcessDmd :: DmdShell -> Demand -> Demand
postProcessDmd (JD { sd = ss, ud = us }) (JD { sd = s, ud = a})
= JD { sd = s', ud = a' }
where
s' = case ss of
Lazy -> Lazy
Str ExnStr _ -> markExnStr s
Str VanStr _ -> s
a' = case us of
Abs -> Abs
Use Many _ -> markReusedDmd a
Use One _ -> a
markExnStr :: ArgStr -> ArgStr
markExnStr (Str VanStr s) = Str ExnStr s
markExnStr s = s
-- Peels one call level from the demand, and also returns
-- whether it was unsaturated (separately for strictness and usage)
peelCallDmd :: CleanDemand -> (CleanDemand, DmdShell)
-- Exploiting the fact that
-- on the strictness side C(B) = B
-- and on the usage side C(U) = U
peelCallDmd (JD {sd = s, ud = u})
= (JD { sd = s', ud = u' }, JD { sd = ss, ud = us })
where
(s', ss) = case s of
SCall s' -> (s', Str VanStr ())
HyperStr -> (HyperStr, Str VanStr ())
_ -> (HeadStr, Lazy)
(u', us) = case u of
UCall c u' -> (u', Use c ())
_ -> (Used, Use Many ())
-- The _ cases for usage includes UHead which seems a bit wrong
-- because the body isn't used at all!
-- c.f. the Abs case in toCleanDmd
-- Peels that multiple nestings of calls clean demand and also returns
-- whether it was unsaturated (separately for strictness and usage
-- see Note [Demands from unsaturated function calls]
peelManyCalls :: Int -> CleanDemand -> DmdShell
peelManyCalls n (JD { sd = str, ud = abs })
= JD { sd = go_str n str, ud = go_abs n abs }
where
go_str :: Int -> StrDmd -> Str () -- True <=> unsaturated, defer
go_str 0 _ = Str VanStr ()
go_str _ HyperStr = Str VanStr () -- == go_str (n-1) HyperStr, as HyperStr = Call(HyperStr)
go_str n (SCall d') = go_str (n-1) d'
go_str _ _ = Lazy
go_abs :: Int -> UseDmd -> Use () -- Many <=> unsaturated, or at least
go_abs 0 _ = Use One () -- one UCall Many in the demand
go_abs n (UCall One d') = go_abs (n-1) d'
go_abs _ _ = Use Many ()
{-
Note [Demands from unsaturated function calls]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider a demand transformer d1 -> d2 -> r for f.
If a sufficiently detailed demand is fed into this transformer,
e.g <C(C(S)), C1(C1(S))> arising from "f x1 x2" in a strict, use-once context,
then d1 and d2 is precisely the demand unleashed onto x1 and x2 (similar for
the free variable environment) and furthermore the result information r is the
one we want to use.
An anonymous lambda is also an unsaturated function all (needs one argument,
none given), so this applies to that case as well.
But the demand fed into f might be less than <C(C(S)), C1(C1(S))>. There are a few cases:
* Not enough demand on the strictness side:
- In that case, we need to zap all strictness in the demand on arguments and
free variables.
- Furthermore, we remove CPR information. It could be left, but given the incoming
demand is not enough to evaluate so far we just do not bother.
- And finally termination information: If r says that f diverges for sure,
then this holds when the demand guarantees that two arguments are going to
be passed. If the demand is lower, we may just as well converge.
If we were tracking definite convegence, than that would still hold under
a weaker demand than expected by the demand transformer.
* Not enough demand from the usage side: The missing usage can be expanded
using UCall Many, therefore this is subsumed by the third case:
* At least one of the uses has a cardinality of Many.
- Even if f puts a One demand on any of its argument or free variables, if
we call f multiple times, we may evaluate this argument or free variable
multiple times. So forget about any occurrence of "One" in the demand.
In dmdTransformSig, we call peelManyCalls to find out if we are in any of these
cases, and then call postProcessUnsat to reduce the demand appropriately.
Similarly, dmdTransformDictSelSig and dmdAnal, when analyzing a Lambda, use
peelCallDmd, which peels only one level, but also returns the demand put on the
body of the function.
-}
peelFV :: DmdType -> Var -> (DmdType, Demand)
peelFV (DmdType fv ds res) id = -- pprTrace "rfv" (ppr id <+> ppr dmd $$ ppr fv)
(DmdType fv' ds res, dmd)
where
fv' = fv `delVarEnv` id
-- See Note [Default demand on free variables]
dmd = lookupVarEnv fv id `orElse` defaultDmd res
addDemand :: Demand -> DmdType -> DmdType
addDemand dmd (DmdType fv ds res) = DmdType fv (dmd:ds) res
findIdDemand :: DmdType -> Var -> Demand
findIdDemand (DmdType fv _ res) id
= lookupVarEnv fv id `orElse` defaultDmd res
{-
Note [Default demand on free variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the variable is not mentioned in the environment of a demand type,
its demand is taken to be a result demand of the type.
For the stricness component,
if the result demand is a Diverges, then we use HyperStr
else we use Lazy
For the usage component, we use Absent.
So we use either absDmd or botDmd.
Also note the equations for lubDmdResult (resp. bothDmdResult) noted there.
Note [Always analyse in virgin pass]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tricky point: make sure that we analyse in the 'virgin' pass. Consider
rec { f acc x True = f (...rec { g y = ...g... }...)
f acc x False = acc }
In the virgin pass for 'f' we'll give 'f' a very strict (bottom) type.
That might mean that we analyse the sub-expression containing the
E = "...rec g..." stuff in a bottom demand. Suppose we *didn't analyse*
E, but just returned botType.
Then in the *next* (non-virgin) iteration for 'f', we might analyse E
in a weaker demand, and that will trigger doing a fixpoint iteration
for g. But *because it's not the virgin pass* we won't start g's
iteration at bottom. Disaster. (This happened in $sfibToList' of
nofib/spectral/fibheaps.)
So in the virgin pass we make sure that we do analyse the expression
at least once, to initialise its signatures.
Note [Analyzing with lazy demand and lambdas]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The insight for analyzing lambdas follows from the fact that for
strictness S = C(L). This polymorphic expansion is critical for
cardinality analysis of the following example:
{-# NOINLINE build #-}
build g = (g (:) [], g (:) [])
h c z = build (\x ->
let z1 = z ++ z
in if c
then \y -> x (y ++ z1)
else \y -> x (z1 ++ y))
One can see that `build` assigns to `g` demand <L,C(C1(U))>.
Therefore, when analyzing the lambda `(\x -> ...)`, we
expect each lambda \y -> ... to be annotated as "one-shot"
one. Therefore (\x -> \y -> x (y ++ z)) should be analyzed with a
demand <C(C(..), C(C1(U))>.
This is achieved by, first, converting the lazy demand L into the
strict S by the second clause of the analysis.
Note [Analysing with absent demand]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we analyse an expression with demand <L,A>. The "A" means
"absent", so this expression will never be needed. What should happen?
There are several wrinkles:
* We *do* want to analyse the expression regardless.
Reason: Note [Always analyse in virgin pass]
But we can post-process the results to ignore all the usage
demands coming back. This is done by postProcessDmdType.
* But in the case of an *unlifted type* we must be extra careful,
because unlifted values are evaluated even if they are not used.
Example (see Trac #9254):
f :: (() -> (# Int#, () #)) -> ()
-- Strictness signature is
-- <C(S(LS)), 1*C1(U(A,1*U()))>
-- I.e. calls k, but discards first component of result
f k = case k () of (# _, r #) -> r
g :: Int -> ()
g y = f (\n -> (# case y of I# y2 -> y2, n #))
Here f's strictness signature says (correctly) that it calls its
argument function and ignores the first component of its result.
This is correct in the sense that it'd be fine to (say) modify the
function so that always returned 0# in the first component.
But in function g, we *will* evaluate the 'case y of ...', because
it has type Int#. So 'y' will be evaluated. So we must record this
usage of 'y', else 'g' will say 'y' is absent, and will w/w so that
'y' is bound to an aBSENT_ERROR thunk.
An alternative would be to replace the 'case y of ...' with (say) 0#,
but I have not tried that. It's not a common situation, but it is
not theoretical: unsafePerformIO's implementation is very very like
'f' above.
************************************************************************
* *
Demand signatures
* *
************************************************************************
In a let-bound Id we record its strictness info.
In principle, this strictness info is a demand transformer, mapping
a demand on the Id into a DmdType, which gives
a) the free vars of the Id's value
b) the Id's arguments
c) an indication of the result of applying
the Id to its arguments
However, in fact we store in the Id an extremely emascuated demand
transfomer, namely
a single DmdType
(Nevertheless we dignify StrictSig as a distinct type.)
This DmdType gives the demands unleashed by the Id when it is applied
to as many arguments as are given in by the arg demands in the DmdType.
Also see Note [Nature of result demand] for the meaning of a DmdResult in a
strictness signature.
If an Id is applied to less arguments than its arity, it means that
the demand on the function at a call site is weaker than the vanilla
call demand, used for signature inference. Therefore we place a top
demand on all arguments. Otherwise, the demand is specified by Id's
signature.
For example, the demand transformer described by the demand signature
StrictSig (DmdType {x -> <S,1*U>} <L,A><L,U(U,U)>m)
says that when the function is applied to two arguments, it
unleashes demand <S,1*U> on the free var x, <L,A> on the first arg,
and <L,U(U,U)> on the second, then returning a constructor.
If this same function is applied to one arg, all we can say is that it
uses x with <L,U>, and its arg with demand <L,U>.
-}
newtype StrictSig = StrictSig DmdType
deriving( Eq )
instance Outputable StrictSig where
ppr (StrictSig ty) = ppr ty
-- Used for printing top-level strictness pragmas in interface files
pprIfaceStrictSig :: StrictSig -> SDoc
pprIfaceStrictSig (StrictSig (DmdType _ dmds res))
= hcat (map ppr dmds) <> ppr res
mkStrictSig :: DmdType -> StrictSig
mkStrictSig dmd_ty = StrictSig dmd_ty
mkClosedStrictSig :: [Demand] -> DmdResult -> StrictSig
mkClosedStrictSig ds res = mkStrictSig (DmdType emptyDmdEnv ds res)
splitStrictSig :: StrictSig -> ([Demand], DmdResult)
splitStrictSig (StrictSig (DmdType _ dmds res)) = (dmds, res)
increaseStrictSigArity :: Int -> StrictSig -> StrictSig
-- Add extra arguments to a strictness signature
increaseStrictSigArity arity_increase (StrictSig (DmdType env dmds res))
= StrictSig (DmdType env (replicate arity_increase topDmd ++ dmds) res)
isTopSig :: StrictSig -> Bool
isTopSig (StrictSig ty) = isTopDmdType ty
hasDemandEnvSig :: StrictSig -> Bool
hasDemandEnvSig (StrictSig (DmdType env _ _)) = not (isEmptyVarEnv env)
strictSigDmdEnv :: StrictSig -> DmdEnv
strictSigDmdEnv (StrictSig (DmdType env _ _)) = env
isBottomingSig :: StrictSig -> Bool
-- True if the signature diverges or throws an exception
isBottomingSig (StrictSig (DmdType _ _ res)) = isBotRes res
nopSig, botSig, exnSig :: StrictSig
nopSig = StrictSig nopDmdType
botSig = StrictSig botDmdType
exnSig = StrictSig exnDmdType
cprProdSig :: Arity -> StrictSig
cprProdSig arity = StrictSig (cprProdDmdType arity)
seqStrictSig :: StrictSig -> ()
seqStrictSig (StrictSig ty) = seqDmdType ty
dmdTransformSig :: StrictSig -> CleanDemand -> DmdType
-- (dmdTransformSig fun_sig dmd) considers a call to a function whose
-- signature is fun_sig, with demand dmd. We return the demand
-- that the function places on its context (eg its args)
dmdTransformSig (StrictSig dmd_ty@(DmdType _ arg_ds _)) cd
= postProcessUnsat (peelManyCalls (length arg_ds) cd) dmd_ty
-- see Note [Demands from unsaturated function calls]
dmdTransformDataConSig :: Arity -> StrictSig -> CleanDemand -> DmdType
-- Same as dmdTransformSig but for a data constructor (worker),
-- which has a special kind of demand transformer.
-- If the constructor is saturated, we feed the demand on
-- the result into the constructor arguments.
dmdTransformDataConSig arity (StrictSig (DmdType _ _ con_res))
(JD { sd = str, ud = abs })
| Just str_dmds <- go_str arity str
, Just abs_dmds <- go_abs arity abs
= DmdType emptyDmdEnv (mkJointDmds str_dmds abs_dmds) con_res
-- Must remember whether it's a product, hence con_res, not TopRes
| otherwise -- Not saturated
= nopDmdType
where
go_str 0 dmd = splitStrProdDmd arity dmd
go_str n (SCall s') = go_str (n-1) s'
go_str n HyperStr = go_str (n-1) HyperStr
go_str _ _ = Nothing
go_abs 0 dmd = splitUseProdDmd arity dmd
go_abs n (UCall One u') = go_abs (n-1) u'
go_abs _ _ = Nothing
dmdTransformDictSelSig :: StrictSig -> CleanDemand -> DmdType
-- Like dmdTransformDataConSig, we have a special demand transformer
-- for dictionary selectors. If the selector is saturated (ie has one
-- argument: the dictionary), we feed the demand on the result into
-- the indicated dictionary component.
dmdTransformDictSelSig (StrictSig (DmdType _ [dict_dmd] _)) cd
| (cd',defer_use) <- peelCallDmd cd
, Just jds <- splitProdDmd_maybe dict_dmd
= postProcessUnsat defer_use $
DmdType emptyDmdEnv [mkOnceUsedDmd $ mkProdDmd $ map (enhance cd') jds] topRes
| otherwise
= nopDmdType -- See Note [Demand transformer for a dictionary selector]
where
enhance cd old | isAbsDmd old = old
| otherwise = mkOnceUsedDmd cd -- This is the one!
dmdTransformDictSelSig _ _ = panic "dmdTransformDictSelSig: no args"
{-
Note [Demand transformer for a dictionary selector]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we evaluate (op dict-expr) under demand 'd', then we can push the demand 'd'
into the appropriate field of the dictionary. What *is* the appropriate field?
We just look at the strictness signature of the class op, which will be
something like: U(AAASAAAAA). Then replace the 'S' by the demand 'd'.
For single-method classes, which are represented by newtypes the signature
of 'op' won't look like U(...), so the splitProdDmd_maybe will fail.
That's fine: if we are doing strictness analysis we are also doing inlining,
so we'll have inlined 'op' into a cast. So we can bale out in a conservative
way, returning nopDmdType.
It is (just.. Trac #8329) possible to be running strictness analysis *without*
having inlined class ops from single-method classes. Suppose you are using
ghc --make; and the first module has a local -O0 flag. So you may load a class
without interface pragmas, ie (currently) without an unfolding for the class
ops. Now if a subsequent module in the --make sweep has a local -O flag
you might do strictness analysis, but there is no inlining for the class op.
This is weird, so I'm not worried about whether this optimises brilliantly; but
it should not fall over.
-}
argsOneShots :: StrictSig -> Arity -> [[OneShotInfo]]
-- See Note [Computing one-shot info]
argsOneShots (StrictSig (DmdType _ arg_ds _)) n_val_args
| unsaturated_call = []
| otherwise = go arg_ds
where
unsaturated_call = arg_ds `lengthExceeds` n_val_args
go [] = []
go (arg_d : arg_ds) = argOneShots arg_d `cons` go arg_ds
-- Avoid list tail like [ [], [], [] ]
cons [] [] = []
cons a as = a:as
-- saturatedByOneShots n C1(C1(...)) = True,
-- <=>
-- there are at least n nested C1(..) calls
-- See Note [Demand on the worker] in WorkWrap
saturatedByOneShots :: Int -> Demand -> Bool
saturatedByOneShots n (JD { ud = usg })
= case usg of
Use _ arg_usg -> go n arg_usg
_ -> False
where
go 0 _ = True
go n (UCall One u) = go (n-1) u
go _ _ = False
argOneShots :: Demand -- depending on saturation
-> [OneShotInfo]
argOneShots (JD { ud = usg })
= case usg of
Use _ arg_usg -> go arg_usg
_ -> []
where
go (UCall One u) = OneShotLam : go u
go (UCall Many u) = NoOneShotInfo : go u
go _ = []
{- Note [Computing one-shot info]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider a call
f (\pqr. e1) (\xyz. e2) e3
where f has usage signature
C1(C(C1(U))) C1(U) U
Then argsOneShots returns a [[OneShotInfo]] of
[[OneShot,NoOneShotInfo,OneShot], [OneShot]]
The occurrence analyser propagates this one-shot infor to the
binders \pqr and \xyz; see Note [Use one-shot information] in OccurAnal.
-}
-- appIsBottom returns true if an application to n args
-- would diverge or throw an exception
-- See Note [Unsaturated applications]
appIsBottom :: StrictSig -> Int -> Bool
appIsBottom (StrictSig (DmdType _ ds res)) n
| isBotRes res = not $ lengthExceeds ds n
appIsBottom _ _ = False
{-
Note [Unsaturated applications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If a function having bottom as its demand result is applied to a less
number of arguments than its syntactic arity, we cannot say for sure
that it is going to diverge. This is the reason why we use the
function appIsBottom, which, given a strictness signature and a number
of arguments, says conservatively if the function is going to diverge
or not.
Zap absence or one-shot information, under control of flags
Note [Killing usage information]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The flags -fkill-one-shot and -fkill-absence let you switch off the generation
of absence or one-shot information altogether. This is only used for performance
tests, to see how important they are.
-}
zapUsageEnvSig :: StrictSig -> StrictSig
-- Remove the usage environment from the demand
zapUsageEnvSig (StrictSig (DmdType _ ds r)) = mkClosedStrictSig ds r
zapUsageDemand :: Demand -> Demand
-- Remove the usage info, but not the strictness info, from the demand
zapUsageDemand = kill_usage $ KillFlags
{ kf_abs = True
, kf_used_once = True
, kf_called_once = True
}
-- | Remove all 1* information (but not C1 information) from the demand
zapUsedOnceDemand :: Demand -> Demand
zapUsedOnceDemand = kill_usage $ KillFlags
{ kf_abs = False
, kf_used_once = True
, kf_called_once = False
}
-- | Remove all 1* information (but not C1 information) from the strictness
-- signature
zapUsedOnceSig :: StrictSig -> StrictSig
zapUsedOnceSig (StrictSig (DmdType env ds r))
= StrictSig (DmdType env (map zapUsedOnceDemand ds) r)
killUsageDemand :: DynFlags -> Demand -> Demand
-- See Note [Killing usage information]
killUsageDemand dflags dmd
| Just kfs <- killFlags dflags = kill_usage kfs dmd
| otherwise = dmd
killUsageSig :: DynFlags -> StrictSig -> StrictSig
-- See Note [Killing usage information]
killUsageSig dflags sig@(StrictSig (DmdType env ds r))
| Just kfs <- killFlags dflags = StrictSig (DmdType env (map (kill_usage kfs) ds) r)
| otherwise = sig
data KillFlags = KillFlags
{ kf_abs :: Bool
, kf_used_once :: Bool
, kf_called_once :: Bool
}
killFlags :: DynFlags -> Maybe KillFlags
-- See Note [Killing usage information]
killFlags dflags
| not kf_abs && not kf_used_once = Nothing
| otherwise = Just (KillFlags {..})
where
kf_abs = gopt Opt_KillAbsence dflags
kf_used_once = gopt Opt_KillOneShot dflags
kf_called_once = kf_used_once
kill_usage :: KillFlags -> Demand -> Demand
kill_usage kfs (JD {sd = s, ud = u}) = JD {sd = s, ud = zap_musg kfs u}
zap_musg :: KillFlags -> ArgUse -> ArgUse
zap_musg kfs Abs
| kf_abs kfs = useTop
| otherwise = Abs
zap_musg kfs (Use c u)
| kf_used_once kfs = Use Many (zap_usg kfs u)
| otherwise = Use c (zap_usg kfs u)
zap_usg :: KillFlags -> UseDmd -> UseDmd
zap_usg kfs (UCall c u)
| kf_called_once kfs = UCall Many (zap_usg kfs u)
| otherwise = UCall c (zap_usg kfs u)
zap_usg kfs (UProd us) = UProd (map (zap_musg kfs) us)
zap_usg _ u = u
-- If the argument is a used non-newtype dictionary, give it strict
-- demand. Also split the product type & demand and recur in order to
-- similarly strictify the argument's contained used non-newtype
-- superclass dictionaries. We use the demand as our recursive measure
-- to guarantee termination.
strictifyDictDmd :: Type -> Demand -> Demand
strictifyDictDmd ty dmd = case getUseDmd dmd of
Use n _ |
Just (tycon, _arg_tys, _data_con, inst_con_arg_tys)
<- splitDataProductType_maybe ty,
not (isNewTyCon tycon), isClassTyCon tycon -- is a non-newtype dictionary
-> seqDmd `bothDmd` -- main idea: ensure it's strict
case splitProdDmd_maybe dmd of
-- superclass cycles should not be a problem, since the demand we are
-- consuming would also have to be infinite in order for us to diverge
Nothing -> dmd -- no components have interesting demand, so stop
-- looking for superclass dicts
Just dmds
| all (not . isAbsDmd) dmds -> evalDmd
-- abstract to strict w/ arbitrary component use, since this
-- smells like reboxing; results in CBV boxed
--
-- TODO revisit this if we ever do boxity analysis
| otherwise -> case mkProdDmd $ zipWith strictifyDictDmd inst_con_arg_tys dmds of
JD {sd = s,ud = a} -> JD (Str VanStr s) (Use n a)
-- TODO could optimize with an aborting variant of zipWith since
-- the superclass dicts are always a prefix
_ -> dmd -- unused or not a dictionary
{-
Note [HyperStr and Use demands]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The information "HyperStr" needs to be in the strictness signature, and not in
the demand signature, because we still want to know about the demand on things. Consider
f (x,y) True = error (show x)
f (x,y) False = x+1
The signature of f should be <S(SL),1*U(1*U(U),A)><S,1*U>m. If we were not
distinguishing the uses on x and y in the True case, we could either not figure
out how deeply we can unpack x, or that we do not have to pass y.
************************************************************************
* *
Serialisation
* *
************************************************************************
-}
instance Binary StrDmd where
put_ bh HyperStr = do putByte bh 0
put_ bh HeadStr = do putByte bh 1
put_ bh (SCall s) = do putByte bh 2
put_ bh s
put_ bh (SProd sx) = do putByte bh 3
put_ bh sx
get bh = do
h <- getByte bh
case h of
0 -> do return HyperStr
1 -> do return HeadStr
2 -> do s <- get bh
return (SCall s)
_ -> do sx <- get bh
return (SProd sx)
instance Binary ExnStr where
put_ bh VanStr = putByte bh 0
put_ bh ExnStr = putByte bh 1
get bh = do h <- getByte bh
return (case h of
0 -> VanStr
_ -> ExnStr)
instance Binary ArgStr where
put_ bh Lazy = do
putByte bh 0
put_ bh (Str x s) = do
putByte bh 1
put_ bh x
put_ bh s
get bh = do
h <- getByte bh
case h of
0 -> return Lazy
_ -> do x <- get bh
s <- get bh
return $ Str x s
instance Binary Count where
put_ bh One = do putByte bh 0
put_ bh Many = do putByte bh 1
get bh = do h <- getByte bh
case h of
0 -> return One
_ -> return Many
instance Binary ArgUse where
put_ bh Abs = do
putByte bh 0
put_ bh (Use c u) = do
putByte bh 1
put_ bh c
put_ bh u
get bh = do
h <- getByte bh
case h of
0 -> return Abs
_ -> do c <- get bh
u <- get bh
return $ Use c u
instance Binary UseDmd where
put_ bh Used = do
putByte bh 0
put_ bh UHead = do
putByte bh 1
put_ bh (UCall c u) = do
putByte bh 2
put_ bh c
put_ bh u
put_ bh (UProd ux) = do
putByte bh 3
put_ bh ux
get bh = do
h <- getByte bh
case h of
0 -> return $ Used
1 -> return $ UHead
2 -> do c <- get bh
u <- get bh
return (UCall c u)
_ -> do ux <- get bh
return (UProd ux)
instance (Binary s, Binary u) => Binary (JointDmd s u) where
put_ bh (JD { sd = x, ud = y }) = do put_ bh x; put_ bh y
get bh = do
x <- get bh
y <- get bh
return $ JD { sd = x, ud = y }
instance Binary StrictSig where
put_ bh (StrictSig aa) = do
put_ bh aa
get bh = do
aa <- get bh
return (StrictSig aa)
instance Binary DmdType where
-- Ignore DmdEnv when spitting out the DmdType
put_ bh (DmdType _ ds dr)
= do put_ bh ds
put_ bh dr
get bh
= do ds <- get bh
dr <- get bh
return (DmdType emptyDmdEnv ds dr)
instance Binary DmdResult where
put_ bh (Dunno c) = do { putByte bh 0; put_ bh c }
put_ bh ThrowsExn = putByte bh 1
put_ bh Diverges = putByte bh 2
get bh = do { h <- getByte bh
; case h of
0 -> do { c <- get bh; return (Dunno c) }
1 -> return ThrowsExn
_ -> return Diverges }
instance Binary CPRResult where
put_ bh (RetSum n) = do { putByte bh 0; put_ bh n }
put_ bh RetProd = putByte bh 1
put_ bh NoCPR = putByte bh 2
get bh = do
h <- getByte bh
case h of
0 -> do { n <- get bh; return (RetSum n) }
1 -> return RetProd
_ -> return NoCPR
| rahulmutt/ghcvm | compiler/Eta/BasicTypes/Demand.hs | bsd-3-clause | 80,826 | 5 | 19 | 22,522 | 14,301 | 7,492 | 6,809 | 946 | 9 |
{-# LANGUAGE CPP #-}
#ifdef __GHCJS__
{-# LANGUAGE JavaScriptFFI #-}
#else
{-# LANGUAGE RankNTypes, TypeSynonymInstances, FlexibleInstances #-}
{-# OPTIONS_GHC -fno-warn-missing-methods #-}
#endif
module React.Imports
( RawEvent
, ImportedClass
-- * Foreign imports
, js_render
, js_createClass
, js_react_createElement_DOM
, js_react_createElement_Class
, js_set_handler
, js_forceUpdate
, js_foreignParent
) where
import React.GHCJS
data RawEvent_
type RawEvent = JSRef RawEvent_
data ImportedClass_ props sig
type ImportedClass props sig = JSRef (ImportedClass_ props sig)
#ifdef __GHCJS__
foreign import javascript unsafe "React.render($1, $2)"
js_render :: JSRef () -> Element -> IO ()
foreign import javascript unsafe "js_createClass"
js_createClass :: JSAny -> JSAny
foreign import javascript unsafe "React.createElement.apply(null, [$1, $2].concat($3))"
js_react_createElement_DOM :: JSString -> JSAny -> JSAny -> IO JSAny
foreign import javascript unsafe "React.createElement.apply(null, [$1, $2].concat($3))"
-- foreign import javascript unsafe "function(x, y, z) { console.log(x, y, z, [x,y].concat(z)); return React.createElement.apply(null, [x, y].concat(z)); }($1, $2, $3)"
js_react_createElement_Class :: JSAny -> JSAny -> JSAny -> IO JSAny
foreign import javascript unsafe "js_set_handler"
js_set_handler :: Int -> JSString -> (JSFun (RawEvent -> IO ())) -> JSAny -> IO ()
foreign import javascript unsafe "$1.forceUpdate()"
js_forceUpdate :: JSAny -> IO ()
foreign import javascript unsafe "React.createElement($1, $2, $3)"
js_foreignParent :: ImportedClass props sig -> JSRef props -> JSAny -> IO JSAny
#else
-- mock the foreign imports
js_render :: JSRef () -> Element -> IO ()
js_render = error "cannot evaluate js_render in ghc"
js_createClass :: JSAny -> JSAny
js_createClass = error "cannot evaluate js_createClass in ghc"
js_react_createElement_DOM :: JSString -> JSAny -> JSAny -> IO JSAny
js_react_createElement_DOM = error "cannot evaluate js_react_createElement_DOM in ghc"
js_react_createElement_Class :: JSAny -> JSAny -> JSAny -> IO JSAny
js_react_createElement_Class = error "cannot evaluate js_react_createElement_Class in ghc"
js_set_handler :: Int -> JSString -> JSFun (RawEvent -> IO ()) -> JSAny -> IO ()
js_set_handler = error "cannot evaluate js_set_handler in ghc"
js_forceUpdate :: JSAny -> IO ()
js_forceUpdate = error "cannot evaluate js_forceUpdate in ghc"
js_foreignParent :: ImportedClass props sig -> JSRef props -> JSAny -> IO JSAny
js_foreignParent = error "cannot evaluate js_foreignParent in ghc"
#endif
| seanhess/react-haskell | src/React/Imports.hs | mit | 2,645 | 25 | 10 | 425 | 310 | 171 | 139 | -1 | -1 |
--
-- Copyright (c) 2012 Citrix Systems, Inc.
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
{-# LANGUAGE MultiParamTypeClasses, GeneralizedNewtypeDeriving, ScopedTypeVariables, OverloadedStrings, FlexibleContexts #-}
module Msg.Json
( JMsg (..)
, jsonMessages
, JConvContext
, JConvT(..)
, newJConvContext
, runJConvT
) where
import Control.Applicative
import Control.Monad
import Control.Monad.Trans
import Control.Monad.Reader
import Data.Maybe
import Control.Monad.Trans.Maybe
import Data.List
import Data.Word
import qualified Data.ByteString as B
import qualified Data.ByteString.UTF8 as UTF8B
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Lazy.UTF8 as UTF8
import qualified Data.Text.Lazy as TL
import qualified Control.Exception as E
import Text.JSON
import Text.Printf
import System.IO.Unsafe
import DBus.Types hiding (fromVariant,toVariant)
import qualified DBus.Types as DB
import DBus.Message
import DBus.Wire ( marshalMessage, unmarshalMessage, Endianness(..) )
import Rpc.Core hiding (ObjectPath, BusName, MemberName, InterfaceName, mkObjectPath_, mkBusName_, mkMemberName_, mkInterfaceName_) --hiding (ObjectPath, BusName, MemberName, InterfaceName)
import qualified Rpc.Core as Rpc
import Channel
import Msg.JsonRpc
import Msg.JsonConv
import Msg.DBus
import Bouncer
import Tools.Log
import Tools.FreezeIOM
import Control.Concurrent
import qualified Control.Monad.Error as CME
import Data.String
import qualified Data.Map as M
import qualified DBus.Introspection as I
data JMsg
= JMsgReq JReq
| JMsgSignal JSignal
| JMsgResp JResp
| JMsgRespErr JRespErr
-- special treatment for websocket channels as we do not assume null terminated jsons but
-- we assume messages are framed and boundaries preserved
jsonMessages :: Channel -> IO [ JMsg ]
jsonMessages c@(WebSocketCh{}) = framedJsonMessages c
jsonMessages c = nullSeparatedJsonMessages =<< incomingData c
-- 1MB
maxFrameLen = 1024*1024
-- on message boundary preserving channel
framedJsonMessages :: Channel -> IO [ JMsg ]
framedJsonMessages ch
= frame =<< recv ch maxFrameLen
where
frame x
| B.null x = return [] -- EOF
| otherwise =
do m_ <- decodeJMsg (UTF8B.toString x)
case m_ of
Nothing -> framedJsonMessages ch
Just m -> (:) <$> pure m <*> unsafeInterleaveIO (framedJsonMessages ch)
-- on channel which does not preserve msg boundaries
nullSeparatedJsonMessages :: BL.ByteString -> IO [ JMsg ]
nullSeparatedJsonMessages dat
= do let (x,xs) = BL.break (== 0) dat
ys <- if BL.null xs
then return []
else unsafeInterleaveIO (nullSeparatedJsonMessages $ BL.tail xs)
if BL.null x
then return []
else do
m_ <- decodeJMsg (UTF8.toString x)
case m_ of
Nothing -> return ys
Just m -> return (m:ys)
decodeJMsg str
= case decode str of
Ok v ->
case (jmsgFromJson v) of
Just m -> return (Just m)
_ -> warn ("invalid json message: " ++ str) >> return Nothing
_ -> warn ("json parse failed: " ++ str) >> return Nothing
jmsgFromJson :: JSValue -> Maybe JMsg
jmsgFromJson v@(JSObject o)
= let kv = fromJSObject o in
from_type (fromMaybe "request" (join $ fmap unpstr $ "type" `lookup` kv))
where
from_type "request" = JMsgReq <$> jreqFromJson v
from_type "signal" = JMsgSignal <$> jsignalFromJson v
from_type "response" = JMsgResp <$> jrespFromJson v
from_type "error" = JMsgRespErr <$> jerrFromJson v
from_type _ = Nothing
unpstr (JSString s) = Just $ fromJSString s
unpstr _ = Nothing
jmsgFromJson _ = Nothing
bufferSz = 4096
-- Lazy chunks of incoming data, terminating on EOF or error
incomingData :: Channel -> IO BL.ByteString
incomingData sock =
( do chunk <- recv sock bufferSz
when ( B.null chunk ) $ E.throw (EOF sock)
other_chunks <- unsafeInterleaveIO $ incomingData sock
return $ BL.append (BL.fromChunks [chunk]) other_chunks
) `E.catch` err
where
err :: E.SomeException -> IO BL.ByteString
err x = do warn $ printf "%s on %s" (show x) (show sock)
return BL.empty
jmsgToJson :: JMsg -> JSValue
jmsgToJson (JMsgReq m) = jreqToJson m
jmsgToJson (JMsgSignal m) = jsignalToJson m
jmsgToJson (JMsgResp m) = jrespToJson m
jmsgToJson (JMsgRespErr m) = jerrToJson m
data JConvContext m
= JConvContext
{
findSig :: JMsg -> m (Maybe [Type])
}
data QSig = QSig BusName ObjectPath InterfaceName MemberName
newJConvContext :: (FreezeIOM ctx (Either e) m, MonadRpc e m) => m (JConvContext m)
newJConvContext =
do f <- sigFinder
return JConvContext {
findSig = \m ->
case m of
JMsgReq r ->
let q = QSig <$> Just (mkBusName_ (jreqDest r))
<*> Just (mkObjectPath_ (jreqPath r))
<*> Just (mkInterfaceName_ (jreqInterface r))
<*> Just (mkMemberName_ (jreqMethod r))
in case q of
Just (QSig b o i m) -> f b o i m
_ -> return Nothing
_ -> return Nothing
}
newtype
JConvT m a
= JConvT { unJConvT :: ReaderT (JConvContext m) m a }
deriving (Functor, Monad, MonadIO)
instance MonadTrans JConvT where
lift f = JConvT $ lift f
context :: Monad m => JConvT m (JConvContext m)
context = JConvT $ ask
runJConvT :: (Monad m) => JConvContext m -> JConvT m a -> m a
runJConvT c f
= runReaderT (unJConvT f) c
instance ReceiveMessages Channel JMsg where
receiveMessages ch = jsonMessages ch
instance SendMessages Channel JMsg where
sendMessage ch@(WebSocketCh{}) m
= send_all ch buf
where buf = UTF8B.fromString $ encode (jmsgToJson m)
-- use null terminator if not websockets
sendMessage ch m
= send_all ch buf
where buf = (UTF8B.fromString $ encode (jmsgToJson m)) `B.snoc` 0
mkSerial :: (Integral a) => a -> Serial
mkSerial v = fromJust . DB.fromVariant . DB.toVariant $ (fromIntegral v :: Word32)
class Signed a where
signature :: a -> Maybe TL.Text
instance Signed JReq where signature = jreqSignature
instance Signed JSignal where signature = jsigSignature
instance Signed JResp where signature = jrespSignature
instance Signed JRespErr where signature = jerrSignature
instance Signed JMsg where
signature (JMsgReq x) = signature x
signature (JMsgSignal x) = signature x
signature (JMsgResp x) = signature x
signature (JMsgRespErr x) = signature x
marshaled serial m =
case marshalMessage BigEndian serial m of
Left _ -> Nothing
Right buf -> Just $ (B.concat $ BL.toChunks buf)
mkMethodCallMsg :: Serial -> MethodCall -> Maybe Msg
mkMethodCallMsg serial m
= Msg <$> pure (ReceivedMethodCall serial Nothing m)
<*> marshaled serial m
mkSignalMsg :: Serial -> Signal -> Maybe Msg
mkSignalMsg serial m
= Msg <$> pure (ReceivedSignal serial Nothing m)
<*> marshaled serial m
mkErrorMsg :: Serial -> Error -> Maybe Msg
mkErrorMsg serial m
= Msg <$> pure (ReceivedError serial Nothing m)
<*> marshaled serial m
mkMethodReturnMsg :: Serial -> MethodReturn -> Maybe Msg
mkMethodReturnMsg serial m
= Msg <$> pure (ReceivedMethodReturn serial Nothing m)
<*> marshaled serial m
-- | type conversion mode
convMode :: forall m. (Monad m) => JMsg -> JConvT m TypeConvMode
convMode msg
= return . fromMaybe GuessTypes =<< (provided `or` introspected)
where
provided, introspected :: JConvT m (Maybe TypeConvMode)
provided
= do let sig = join $ mkSignature `fmap` signature msg
return (SpecifyTypes . signatureTypes <$> sig)
introspected
= do c <- context
maybe_sig <- lift $ findSig c msg
return (SpecifyTypes `fmap` maybe_sig)
p `or` q
= p >>= cont where
cont (Just m) = return $ Just m
cont Nothing = q
jmsgErr tag act =
do x <- act
when (isNothing x) $
liftIO $ warn ("converting " ++ tag ++ " to dbus message FAILED")
return x
instance MonadIO m => MsgConvert (JConvT m) JMsg Msg where
msgconvert m@(JMsgReq x) = MaybeT . jmsgErr "request" $
do types <- convMode m
return $ do m' <- convToMethodCall types x
let serial = mkSerial (intReqID $ jreqId x)
mkMethodCallMsg serial m'
msgconvert m@(JMsgSignal x) = MaybeT . jmsgErr "signal" $
do types <- convMode m
return $ do m' <- convToSignal types x
let serial = mkSerial (intReqID $ jsigId x)
mkSignalMsg serial m'
msgconvert m@(JMsgResp x) = MaybeT . jmsgErr "response" $
do types <- convMode m
return $ do m' <- convToMethodReturn types x
let serial = mkSerial (intReqID $ jrespId x)
mkMethodReturnMsg serial m'
msgconvert m@(JMsgRespErr x) = MaybeT . jmsgErr "error response" $
do types <- convMode m
return $ do m' <- convToError types x
let serial = mkSerial (intReqID $ jerrId x)
mkErrorMsg serial m'
instance MonadIO m => MsgConvert (JConvT m) Msg JMsg where
msgconvert (Msg rm _)
= MaybeT $ case rm of
ReceivedMethodCall s _ x -> err "method-call" $ return $ JMsgReq <$> convFromMethodCall (s,x)
ReceivedMethodReturn s _ x -> err "method-return" $ return $ JMsgResp <$> convFromMethodReturn (s,x)
ReceivedError s _ x -> err "error" $ return $ JMsgRespErr <$> convFromError (s,x)
ReceivedSignal s _ x -> err "signal" $ return $ JMsgSignal <$> convFromSignal (s,x)
_ -> return Nothing
where
err tag act =
do x <- act
when (isNothing x) $
liftIO $ warn ("converting " ++ tag ++ " to json message FAILED")
return x
-- | dbus signature lookup via introspection
introspect :: (MonadRpc e m) => BusName -> ObjectPath -> m I.Object
introspect service p
= do r <- rpcCallOnce $ RpcCall (Rpc.mkBusName_ . DB.strBusName $ service)
(Rpc.mkObjectPath_ . DB.strObjectPath $ p)
(Rpc.mkInterfaceName_ . DB.strInterfaceName $ introspectable)
"Introspect" []
ret (conv $ r)
where
conv [xmlv] = do xml <- fromVariant xmlv
I.fromXML p xml
conv _ = error "unexpected introspect response"
ret Nothing = error $ "failed to introspect " ++ show service ++ " " ++ show p
ret (Just v) = return v
introspectable :: InterfaceName
introspectable = fromString "org.freedesktop.DBus.Introspectable"
type OMap = M.Map (BusName,ObjectPath) I.Object
sigFinder :: forall ctx e m. (FreezeIOM ctx (Either e) m, MonadRpc e m) => m (BusName -> ObjectPath -> InterfaceName -> MemberName -> m (Maybe [Type]))
sigFinder
= do cache <- liftIO $ newMVar M.empty
return $ finder cache
where
sig :: I.Object -> InterfaceName -> MemberName -> Maybe [Type]
sig (I.Object _ intfs _) i m
= do i' <- find_i i intfs
m' <- find_m m i'
mapM paramSig (inparams m')
where
find_i n = find (\(I.Interface n' _ _ _) -> n == n')
find_m n (I.Interface _ methods _ _) = find (\(I.Method n' _ _) -> n == n') methods
inparams (I.Method _ p _) = p
paramSig (I.Parameter _ ps) = shead (signatureTypes ps)
shead (x:xs) = Just x
shead _ = Nothing
finder :: (MVar OMap) -> BusName -> ObjectPath -> InterfaceName -> MemberName -> m (Maybe [Type])
finder cache service path intf meth =
do obj <- intro (service,path)
case obj of
Nothing -> return Nothing
Just obj -> return $ sig obj intf meth
where
intro :: (BusName,ObjectPath) -> m (Maybe I.Object)
intro key@(s,p) = do
context <- rpcGetContext
let continue (Left ex) = return Nothing
continue (Right v) = return (Just v)
x <- freeze $ \context -> do
liftIO . modifyMVar cache $ \ch ->
case M.lookup key ch of
Just v -> return (ch, Right v)
Nothing -> do
r <- thaw context (introspect s p :: m I.Object)
case r of
Right v -> return (M.insert key v ch, Right v)
Left er -> return (ch , Left er)
continue x
| jean-edouard/manager | rpc-proxy/Msg/Json.hs | gpl-2.0 | 13,227 | 11 | 54 | 3,618 | 4,135 | 2,083 | 2,052 | 287 | 6 |
module DotRender(
writeDotRun
) where
import Data.List
import Data.Maybe
import Dot
import Resolver.Log
import Resolver.PrettyPrint
import Resolver.Types
import System.IO
import Text.Printf
import Types
import qualified Data.Set as Set
inBounds :: Params -> Integer -> Bool
inBounds params n = let first = maybe 0 id (firstStep params) in
n >= first && maybe True (\max -> n < first + max) (maxSteps params)
choiceText :: LinkChoice -> String
choiceText (LinkChoice (InstallVersion ver _)) = "Install " ++ pp ver
choiceText (LinkChoice (BreakSoftDep d)) = "Break " ++ pp d
choiceText Unknown = "(...)"
dotChoiceLabel :: LinkChoice -> String
dotChoiceLabel lc@(LinkChoice c) = choiceText lc
dotChoiceLabel Unknown = ""
inferTargetFormat :: Params -> TargetFormat
inferTargetFormat (Params { targetFormat = fmt,
dotOutput = output }) =
case fmt of
Nothing -> PS
Just fmt' -> fmt'
cloudImage :: Params -> String
cloudImage params =
case inferTargetFormat params of
PS -> "cloud.eps"
PNG -> "cloud.png"
dotStepNode :: Params -> ProcessingStep -> Node
dotStepNode params step = node (name $ printf "step%d" (stepOrder step))
<<< set "label" (printf "Step: %d\nScore: %d\nTier: %s"
(stepOrder step)
(solScore $ stepSol step)
(show $ solTier $ stepSol step))
<<< Set.null (solBrokenDeps (stepSol step)) `thenDo`
set "style" "filled" `andAlso`
set "peripheries" "2" `andAlso`
set "fillColor" "lightgrey"
-- Generate nodes for any successors that were not processed in the
-- render.
dotUnprocessedSuccs :: Params -> ProcessingStep -> [Node]
dotUnprocessedSuccs params step = unprocessed ++ excluded
where unprocessed = [ node (name $ printf "step%dunproc%d" (stepOrder step) stepNum)
<<< set "label" (printf "Unprocessed\nScore: %d\nTier: %s"
(solScore succSol)
(show $ solTier succSol))
<<< set "style" "dashed"
<<< Set.null (solBrokenDeps (stepSol step)) `thenDo`
set "style" "dashed,filled" `andAlso`
set "peripheries" "2" `andAlso`
set "fillcolor" "lightgrey"
| ((Unprocessed { successorChoice = succChoice,
successorSolution = succSol }),
stepNum)
<- zip (stepSuccessors step) ([0..] :: [Integer]) ]
excluded = [ node (name $ printf "step%d" (stepOrder step))
<<< set "label" (printf "Step %d+\n%d nodes..." (stepOrder step) (stepBranchSize step))
<<< set "shape" "plaintext"
<<< set "image" (cloudImage params)
| (Successor { successorStep = step }) <- stepSuccessors step,
not $ inBounds params (stepOrder step) ]
-- | If the parent of the given step (or of one of its
-- backpropagations) was excluded from the render, build and return a
-- node for it.
--
-- TODO: should show links between excluded nodes, etc...that will
-- need a bit of an overhaul though.
dotExcludedIndices :: Params -> ProcessingStep -> [Integer]
dotExcludedIndices params step =
(maybeToList $ do (ParentLink {parentLinkParent = parentStep}) <- stepPredecessor step
(if inBounds params $ stepOrder parentStep
then fail "Not an excluded step."
else return $ stepOrder parentStep))
++
[ parentStepNum
| Backpropagation {
backpropagationStep =
ProcessingStep {
stepOrder = parentStepNum
} } <- stepBackpropagations step,
not $ inBounds params parentStepNum ]
dotExcludedParentNode :: Params -> Integer -> Node
dotExcludedParentNode params stepNum = node (name $ printf "step%d" stepNum)
<<< set "label" (printf "Step %d" stepNum)
<<< set "shape" "plaintext"
<<< set "image" (cloudImage params)
dotPromotions params step =
if not $ showPromotions params
then []
else [ node (name $ printf "step%dpromotion%d" (stepOrder step) promotionNum)
<<< set "label" (makeLabel promotion)
<<< set "shape" "oval"
| (promotion, promotionNum) <- zip (Set.toList $ stepPromotions step) ([0..] :: [Integer]) ]
++
[ node (name $ printf "step%dbackprop%d" (stepOrder step) backpropNum)
<<< set "label" (makeLabel $ backpropagationPromotion backprop)
<<< set "shape" "oval"
<<< backpropagationRedundant backprop `thenDo`
set "style" "dashed"
| (backprop, backpropNum) <- zip (stepBackpropagations step) ([0..] :: [Integer]) ]
where makeLabel p = if Set.size (promotionChoices p) <= 5
then printf "%s\n%s"
(show $ promotionTier p)
(concat $ intersperse "\n"
[pp c | c <- Set.toList $ promotionChoices p])
else printf "%s\n%d choices..."
(show $ promotionTier p)
(Set.size $ promotionChoices p)
dotEdges params step = cutIncoming ++ processed ++ unprocessed ++ promotions ++ backprops
where processed = [ edge (node (name $ printf "step%d" (stepOrder step)))
(node (name $ printf "step%d" (stepOrder step')))
<<< set "label" (dotChoiceLabel succChoice)
<<< forced `thenDo`
-- This gives us an arrow drawn with two
-- parallel lines.
set "color" "black:black"
| Successor { successorStep = step',
successorChoice = succChoice,
successorForced = forced } <- stepSuccessors step ]
unprocessed = [ edge (node (name $ printf "step%d" (stepOrder step)))
(node (name $ printf "step%dunproc%d" (stepOrder step) stepNum))
<<< set "label" (dotChoiceLabel succChoice)
<<< forced `thenDo` set "color" "black:black"
| ((Unprocessed { successorChoice = succChoice,
successorForced = forced }), stepNum)
<- zip (stepSuccessors step) ([0..] :: [Integer]) ]
promotions = if (not $ showPromotions params) || (Set.null $ stepPromotions step)
then []
else [ edge (node (name $ printf "step%d" (stepOrder step)))
(node (name $ printf "step%dpromotion%d" (stepOrder step) promotionNum))
| promotionNum <- [0..((Set.size $ stepPromotions step) - 1)] ]
backprops = let attrs = set "color" "red" `andAlso`
set "style" "dashed" `andAlso`
set "constraint" "false" in
if (not $ showPromotions params) || (null $ stepBackpropagations step)
then []
-- Temporal edges to backpropagations.
else [edge (node (name $ printf "step%d" (stepOrder step)))
(node (name $ printf "step%dbackprop0" (stepOrder step)))
<<< attrs]
++
[edge (node (name $ printf "step%dbackprop%d" (stepOrder step) backpropNum))
(node (name $ printf "step%dbackprop%d" (stepOrder step) (backpropNum + 1)))
<<< attrs
| backpropNum <- [0..((length $ stepBackpropagations step) - 2)] ]
++
-- Structural edges to backpropagations.
[ edge (node (name $ printf "step%d" (stepOrder $ backpropagationStep backprop)))
(node (name $ printf "step%dbackprop%d" (stepOrder step) backpropNum))
| (backprop, backpropNum) <- zip (stepBackpropagations step) ([0..] :: [Integer]) ]
cutIncoming = [ edge (node (name $ printf "step%d" (stepOrder parentStep)))
(node (name $ printf "step%d" (stepOrder step)))
<<< set "label" (dotChoiceLabel choice)
<<< forced `thenDo` set "color" "black:black"
| ParentLink { parentLinkAction = choice,
parentLinkForced = forced,
parentLinkParent = parentStep }
<- maybeToList $ stepPredecessor step,
not $ inBounds params $ stepOrder parentStep ]
dotOrderEdges steps =
[ edge (node (name $ printf "step%d" (stepOrder step1)))
(node (name $ printf "step%d" (stepOrder step2)))
<<< set "constraint" "false"
<<< set "style" "dotted"
<<< set "color" "blue"
| (step1, step2) <- zip steps (drop 1 steps) ]
renderDot :: Params -> [ProcessingStep] -> Digraph
renderDot params steps =
let droppedSteps = maybe steps (\n -> genericDrop n steps) (firstStep params)
truncatedSteps = maybe droppedSteps (\n -> genericTake n droppedSteps) (maxSteps params) in
if null truncatedSteps
then error "No steps to render."
else let stepNodes = map (dotStepNode params) truncatedSteps
unprocessed = concat $ map (dotUnprocessedSuccs params) truncatedSteps
excludedParentIndices = concat $ map (dotExcludedIndices params) truncatedSteps
excludedParentIndicesUnique = Set.toList $ Set.fromList excludedParentIndices
excludedParents = map (dotExcludedParentNode params) excludedParentIndicesUnique
promotions = concat $ map (dotPromotions params) truncatedSteps
stepEdges = concat $ map (dotEdges params) truncatedSteps
orderEdges = dotOrderEdges truncatedSteps in
digraph (stepNodes ++ excludedParents ++
unprocessed ++ promotions) (stepEdges ++ orderEdges)
writeDotRun params steps outputFile =
do let dot = renderDot params steps
withFile outputFile WriteMode $ \h ->
hPutStrLn h (show dot)
| dankamongmen/raptitude | tools/resolver-visualize/DotRender.hs | gpl-2.0 | 11,197 | 0 | 21 | 4,352 | 2,778 | 1,427 | 1,351 | 179 | 3 |
-- | The API for dealing with git blobs from Github repos, as described in
-- <http://developer.github.com/v3/git/blobs/>.
module Github.GitData.Blobs (
blob
,module Github.Data
) where
import Github.Data
import Github.Private
-- | Get a blob by SHA1.
--
-- > blob "thoughtbot" "paperclip" "bc5c51d1ece1ee45f94b056a0f5a1674d7e8cba9"
blob :: String -> String -> String -> IO (Either Error Blob)
blob user repoName sha =
githubGet ["repos", user, repoName, "git", "blobs", sha]
| fernandocastor/github | Github/GitData/Blobs.hs | bsd-3-clause | 481 | 0 | 10 | 72 | 94 | 56 | 38 | 8 | 1 |
{- |
Description : Test file for Parse_AS_DFOL
-}
import DFOL.AS_DFOL
import DFOL.Parse_AS_DFOL
import Text.ParserCombinators.Parsec
import Common.AnnoState
import Common.Doc
import Common.DocUtils
matrices :: String
matrices = "Nat :: Sort " ++
"Mat :: Nat -> Nat -> Sort " ++
"plus :: Pi m, n : Nat. Mat(m, n) -> Mat(m, n) -> Mat(m, n) " ++
"mult :: Pi p, q, r : Nat. Mat(p, q) -> Mat(q, r) -> Mat(p, r) " ++
".forall m, n : Nat; A, B : Mat(m, n). plus(m, n, A, B) == plus(m, n, B, A) %(plus_commut)% " ++
".forall m, n : Nat; A, B, C : Mat(m, n). plus(m, n, plus(m, n, A, B), C) == plus(m, n, A, plus(m, n, B, C)) %(plus_assoc)% " {- ++
".forall p, q, r, s : Nat; A : Mat(p, q); B : Mat(q, r); C : Mat(r, s). mult(p, r, s, mult(p, q, r, A, B), C) == mult(p, q, s, A, mult(q, r, s, B, C)) %(mult_commut)%" -}
run :: Either ParseError BASIC_SPEC
run = runParser basicSpec (AnnoState [] ()) "" matrices
result :: Pretty a => Either ParseError a -> Doc
result (Right x) = pretty x
result (Left _) = text "Error"
| mariefarrell/Hets | DFOL/Tests/Test_Parse_AS_DFOL.hs | gpl-2.0 | 1,088 | 1 | 10 | 280 | 172 | 83 | 89 | 18 | 1 |
import qualified Data.Vector as U
import Data.Bits
main = print . U.elem 100 . U.map (`shiftL` 1) . U.enumFromTo 1 $ (10000 :: Int)
| dolio/vector | old-testsuite/microsuite/elem.hs | bsd-3-clause | 133 | 2 | 9 | 25 | 66 | 35 | 31 | 3 | 1 |
{-# LANGUAGE AllowAmbiguousTypes, TypeFamilies #-}
module ContextStack2 where
type family TF a :: *
type instance TF (a,b) = (TF a, TF b)
t :: (a ~ TF (a,Int)) => Int
t = undefined
{- a ~ TF (a,Int)
~ (TF a, TF Int)
~ (TF (TF (a,Int)), TF Int)
~ (TF (TF a, TF Int), TF Int)
~ ((TF (TF a), TF (TF Int)), TF Int)
fsk ~ a
TF (a, Int) ~ fsk
-->
fsk ~ a
* fsk ~ (TF a, TF Int)
(flatten lhs)
a ~ (TF a, TF Int)
(flatten rhs)
a ~ (fsk1, TF Int)
(wk) TF a ~ fsk1
--> (rewrite inert)
fsk ~ (fsk1, TF Int)
a ~ (fsk1, TF Int)
(wk) TF a ~ fsk1
-->
fsk ~ (fsk1, TF Int)
a ~ (fsk1, TF Int)
* TF (fsk1, fsk2) ~ fsk1
(wk) TF Tnt ~ fsk2
-->
fsk ~ (fsk1, TF Int)
a ~ (fsk1, TF Int)
* fsk1 ~ (TF fsk1, TF fsk2)
(flatten rhs)
fsk1 ~ (fsk3, TF fsk2)
(wk) TF Int ~ fsk2
TF fsk1 ~ fsk3
-}
| snoyberg/ghc | testsuite/tests/typecheck/should_fail/ContextStack2.hs | bsd-3-clause | 934 | 0 | 9 | 349 | 73 | 44 | 29 | 6 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveDataTypeable #-}
module Distribution.Solver.Types.SourcePackage
( PackageDescriptionOverride
, SourcePackage(..)
) where
import Distribution.Package
( PackageId, Package(..) )
import Distribution.PackageDescription
( GenericPackageDescription(..) )
import Data.ByteString.Lazy (ByteString)
import GHC.Generics (Generic)
import Distribution.Compat.Binary (Binary(..))
import Data.Typeable
-- | A package description along with the location of the package sources.
--
data SourcePackage loc = SourcePackage {
packageInfoId :: PackageId,
packageDescription :: GenericPackageDescription,
packageSource :: loc,
packageDescrOverride :: PackageDescriptionOverride
}
deriving (Eq, Show, Generic, Typeable)
instance (Binary loc) => Binary (SourcePackage loc)
instance Package (SourcePackage a) where packageId = packageInfoId
-- | We sometimes need to override the .cabal file in the tarball with
-- the newer one from the package index.
type PackageDescriptionOverride = Maybe ByteString
| themoritz/cabal | cabal-install/Distribution/Solver/Types/SourcePackage.hs | bsd-3-clause | 1,102 | 0 | 8 | 190 | 201 | 124 | 77 | 22 | 0 |
module Record3 where
data C = F {f1, f2 :: Int, f3 :: Bool }
g = f1 (F 1 2 True) | kmate/HaRe | old/testing/addField/Record3_TokOut.hs | bsd-3-clause | 85 | 0 | 8 | 26 | 45 | 27 | 18 | 3 | 1 |
{-# LANGUAGE TypeFamilies #-}
module ShouldCompile where
class C8 a where
data S8 a :: * -> *
instance C8 Int where
data S8 Int a = S8Int a
| urbanslug/ghc | testsuite/tests/indexed-types/should_compile/Simple4.hs | bsd-3-clause | 147 | 0 | 7 | 36 | 48 | 26 | 22 | 6 | 0 |
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE MonadComprehensions, ParallelListComp #-}
-- Test for parallel list comp, which should work for monad comp as well:
--
-- On GHC 6.0 and earlier, this parallel list comprehension generated
-- an incorrect unused-binding warning.
module ShouldCompile where
t :: [(Char,Char)]
t = [ (a,b) | a <- "foo" | b <- "bar" ]
| urbanslug/ghc | testsuite/tests/rename/should_compile/mc12.hs | bsd-3-clause | 356 | 1 | 7 | 61 | 54 | 35 | 19 | 5 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Meinesweeper.Field where
import Control.Lens
data Field = Field { _mined :: Bool
, _flagged :: Bool
, _covered :: Bool
, _adjacentMines :: Int
, _xy :: (Int,Int)
}
instance Show Field where
show f
| _flagged f = "| F |"
| _covered f = "| |"
| _mined f = "| * |"
| otherwise = "| " ++ show (_adjacentMines f) ++ " |"
newField :: Field
newField = Field { _mined = False
, _flagged = False
, _covered = True
, _adjacentMines = 0
, _xy = (-1,-1)
}
makeLenses ''Field
| IanConnolly/Meinesweeper | src/Meinesweeper/Field.hs | mit | 711 | 0 | 11 | 309 | 192 | 106 | 86 | 21 | 1 |
{-# LANGUAGE FlexibleContexts #-}
module Main where
-- import Data.Tape2D
-- import Experiments.Juicy
import Codec.Picture
import Control.Applicative
import Control.Comonad as C
import Control.Comonad.Store
import Data.Array.Repa as R
import Data.Array.Repa.Repr.Unboxed
import Data.Functor
import Data.Functor.Identity
import Data.Indexed
import Data.Tape
import Data.Word
import Experiments.Filters
import Experiments.Repa
import Experiments.Store ()
import Experiments.Tapes
import Linear.V2
import qualified Experiments.Repa2 as R2
constrain :: Ord c => c -> c -> c -> c
constrain mi ma = max mi . min ma
-- withCoKleisli' :: (Unbox a, Pixel a, Integral a, Num b, RealFrac c) => (Store (V2 Int) b -> c) -> (Image a -> Image a)
-- withCoKleisli' f im = withCoKleisli (round . constrain 0 255 . f . fmap fromIntegral) im
withCoKleisli :: (Unbox a, Num a, Pixel a) => (FArray D DIM2 a -> a) -> (Image a -> Image a)
withCoKleisli ck i = R2.toImage . withCoKleisliArray ck . R2.fromImage $ i
withCoKleisli' :: (Unbox a, Pixel a, Integral a, Num b, RealFrac c) => (FArray D DIM2 b -> c) -> (Image a -> Image a)
withCoKleisli' f im = withCoKleisli (round . constrain 0 255 . f . fmap fromIntegral) im
-- withCoKleisli' :: (Pixel a, Integral a, Num b, RealFrac c) => (Store (V2 Int) b -> c) -> (Image a -> Image a)
-- withCoKleisli' f im = withCoKleisli (round . constrain 0 255 . f . fmap fromIntegral) im
-- withCoKleisli' :: (Pixel a, Integral a, Num b, RealFrac c) => Image a -> (OffsetTape2D Int b -> c) -> Image a
-- withCoKleisli' im f = withCoKleisli im (round . constrain 0 255 . f . fmap fromIntegral)
main :: IO ()
main = do
Right (ImageY8 im) <- readImage "media/cameraman.jpg"
-- let imRes = withCoKleisli' (sharpen 0.5) im
-- let imRes = withCoKleisli' (sharpen 0.4) im
-- let imRes = withCoKleisli' (gauss 2 =<= gauss 2) im
let imRes = withCoKleisli' (gaussE 2 =<= gaussE 2) im
-- let imRes = withCoKleisli' (gaussE 2) im
saveBmpImage "media/cameraman6.bmp" (ImageY8 imRes)
return ()
dot :: Fractional a => Int -> Tape a -> Tape a -> a
dot n t1 t2 = mask (replicate n 1, 1, replicate n 1) (liftA2 (*) t1 t2)
proj :: Fractional a => Int -> Tape a -> Tape a -> a
proj n t1 t2 = dot n t1 t2 / dot n t2 t2
| mstksg/cm-dip | src/Main.hs | mit | 2,295 | 0 | 13 | 479 | 576 | 303 | 273 | 35 | 1 |
module Main where
import ReferenceDatabase
import Text.EditDistance
import Text.CSL
import Text.CSL.Input.Identifier.Internal as Internal
import Text.CSL.Input.Identifier
import Text.CSL.Reference
import qualified Data.Map.Strict as Map
import Control.Monad.Trans.Either
import Control.Monad.State as State
import Data.Aeson
main :: IO ()
main = do
wdTest
rdTest
--main = do
-- m <- readBiblioFile "mybibdb.bib"
-- s <- readCSLFile "bibtex.csl"
-- --putStrLn $ unlines $ map (show) m
-- let result = processBibliography procOpts s m
-- let procAsBibtex = processBibliography procOpts s
-- -- the following retains the order
-- --let result = map (\r -> renderPlainStrict $ head $ procAsBibtex [r]) m
-- putStrLn $ foldl (\x y -> x++"\n\n\n\n\n"++y) "" $ map show $ (zip (map (show . refId) m) (map (renderPlainStrict) result))
-- mainLoop []
showRefAsBibtex :: Reference -> ([Reference] -> [[FormattedOutput]]) -> String
showRefAsBibtex r pf = renderPlainStrict $ head $ pf [r]
mainLoop :: [Reference] -> IO ()
mainLoop database = do
putStrLn "Please enter a reference"
ref <- getLine
if (ref == "q") || (ref == ":q") || (ref == "quit")
then putStrLn "Quit requested, exiting."
else if ref == "clear" then do
putStrLn "Clearing database"
mainLoop []
else do
lookupResult <- resolveEitherDef ref
newDb <- updateDatabase database lookupResult
mainLoop newDb
updateDatabase :: [Reference] -> Either String Reference -> IO [Reference]
updateDatabase database lookupResult = case lookupResult of
Left err -> do
putStrLn ("Error looking up reference: " ++ err)
return database
Right ref -> do
putStrLn "Adding to database"
putStrLn "Database contains:"
strDb <- showReferenceList (ref:database)
putStrLn strDb
return (ref:database)
--show (ref:database)
showReferenceList :: [Reference] -> IO (String)
showReferenceList rs = do
s <- readCSLFile "bibtex.csl"
let result = processBibliography procOpts s rs
return $ unlines $ map (renderPlainStrict) result
-- resolveEitherRef s = do
-- fn <- getDataFileName "default.db"
-- let go = withDatabaseFile fn $ ((runEitherT.resolveEither) s)
-- State.evalStateT go (Database Map.empty)
cites :: [Cite]
cites = [emptyCite { citeId = "Caso2007"
, citeLabel = "page"
, citeLocator = "15"}
,emptyCite { citeId = "Rossato2006"
, citeLabel = "page"
, citeLocator = "10"}
]
-- getSimilar word database threshold = filter (\w -> (approxStringMatch word w) >= threshold) database
-- approxStringMatch sa sb = (bigger-ld)/bigger where
-- ld = fromIntegral $ levenshteinDistance defaultEditCosts sa sb
-- bigger = fromIntegral $ max (length sa) (length sb)
-- approxStringMatch' sa sb = ( ((sal-ld)/sal), ((sbl-ld)/sbl), ld) where
-- sal = fromIntegral $ length sa
-- sbl = fromIntegral $ length sb
-- ld = fromIntegral $ levenshteinDistance defaultEditCosts sa sb
--type DbEntry = Reference
--type Database = [Reference]
-- deriving (Show, Eq)
-- type Database = [(String, Int)] | AdamHarries/yggdrasil | spare/old/Main.hs | mit | 3,127 | 6 | 13 | 649 | 566 | 311 | 255 | 53 | 3 |
module Test.Hspec.Expectations.Pretty.MatcherSpec (main, spec) where
import Test.Hspec
import Test.Hspec.Expectations.Pretty.Matcher
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "matchList" $ do
it "succeeds if arguments are empty lists" $ do
matchList [] ([] :: [Int]) `shouldBe` Nothing
it "succeeds if arguments are equal up to permutation" $ do
matchList [1, 2, 2, 3] [3, 2, 1, 2 :: Int] `shouldBe` Nothing
context "when arguments are not equal up to permutation" $ do
it "shows extra elements" $ do
[1, 2, 2, 3] `matchList` [1, 2, 3 :: Int] `shouldBe` (Just . unlines) [
"Actual list is not a permutation of expected list!"
, " expected list contains: [1, 2, 3]"
, " actual list contains: [1, 2, 2, 3]"
, " the extra elements are: [2]"
]
it "shows missing elements" $ do
[1, 2, 3] `matchList` [1, 2, 2, 3 :: Int] `shouldBe` (Just . unlines) [
"Actual list is not a permutation of expected list!"
, " expected list contains: [1, 2, 2, 3]"
, " actual list contains: [1, 2, 3]"
, " the missing elements are: [2]"
]
| myfreeweb/hspec-expectations-pretty-diff | test/Test/Hspec/Expectations/Pretty/MatcherSpec.hs | mit | 1,241 | 0 | 19 | 393 | 308 | 176 | 132 | 25 | 1 |
module THInstanceReification.Prelude.TH
(
module Exports,
purify,
tryToReify,
)
where
import THInstanceReification.Prelude.Basic
import Language.Haskell.TH as Exports
import Language.Haskell.TH.ExpandSyns as Exports
purify :: Q a -> a
purify = unsafePerformIO . runQ
tryToReify :: Name -> Q (Maybe Info)
tryToReify n = recover (return Nothing) (fmap Just $ reify n)
| nikita-volkov/th-instance-reification | src/THInstanceReification/Prelude/TH.hs | mit | 378 | 0 | 8 | 60 | 114 | 66 | 48 | 12 | 1 |
{-# LANGUAGE ExplicitForAll #-}
module Day1 where
import Data.List.Extra (groupOn)
readSingleLine :: FilePath -> IO String
readSingleLine fp = head . lines <$> readFile fp
getInputs :: IO [Int]
getInputs = map (read . (:[])) <$> readSingleLine "../input/01.txt"
--------------------------------------------------------------------------------
partOne :: IO Int
partOne = checkOne <$> getInputs
-- 1122 produces a sum of 3 (1 + 2) because the first digit (1) matches the second digit and the third digit (2) matches the fourth digit.
-- 1111 produces 4 because each digit (all 1) matches the next.
-- 1234 produces 0 because no digit matches the next.
-- 91212129 produces 9 because the only digit that matches the next one is the last digit, 9.
duplicates :: forall a. Eq a => [a] -> [(a,Int)]
duplicates = foldr f []
where f :: Eq a => a -> [(a,Int)] -> [(a,Int)]
f x [] = [(x,0)]
f x ((y,n):ys) | x == y = (y,succ n): f y ys
| otherwise = (x, 0) : f y ys
checkOne :: [Int] -> Int
checkOne [] = 0
checkOne [_] = 0
checkOne (x:xs) = foldr f 0 ls
where f :: (Int,Int) -> Int -> Int
f (a,b) n = a * b + n
ls = filter ((> 0). snd) . map head . groupOn fst $ duplicates (x:xs ++ [x])
--------------------------------------------------------------------------------
partTwo :: IO Int
partTwo = checkTwo <$> getInputs
-- 1212 produces 6: the list contains 4 items, and all four digits match the digit 2 items ahead.
-- 1221 produces 0, because every comparison is between a 1 and a 2.
-- 123425 produces 4, because both 2s match each other, but no other digit has a match.
-- 123123 produces 12.
-- 12131415 produces 4.
checkTwo :: [Int] -> Int
checkTwo xs = (* 2) . sum . uncurry (zipWith f) $ splitAt l xs
where l = length xs `div` 2
f :: Int -> Int -> Int
f a b | a == b = a
| otherwise = 0
| wizzup/advent_of_code | 2017/haskell/Day_01.hs | mit | 1,897 | 0 | 13 | 447 | 574 | 311 | 263 | 30 | 2 |
import Test.QuickCheck
import Game.Spacegoo
map3 :: (a->b) -> (a,a,a) -> (b,b,b)
map3 f (a,b,c) = (f a, f b, f c)
nemesisWins :: Units -> Property
nemesisWins u = (map3 (max 0) u == u) ==> winsAgainst u (nemesisOf u) == False
minimizeUnitsWins :: Units -> Units -> Property
minimizeUnitsWins a d = winsAgainst a' d' ==> winsAgainst (minimizeUnits a' d') d'
where
(a',d') | winsAgainst (map3 abs a) (map3 abs d) = (map3 abs a, map3 abs d)
| otherwise = (map3 abs d, map3 abs a)
| nomeata/haskell-spacegoo | test.hs | mit | 527 | 0 | 11 | 138 | 267 | 138 | 129 | 10 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ViewPatterns #-}
module Myracloud.DNS where
import Control.Applicative ((<$>))
import Control.Monad.Trans.Either
import qualified Data.Aeson as A
import qualified Data.ByteString.Char8 as B8
import qualified Data.ByteString.Lazy as BL
import Data.Function (on)
import Data.Monoid
import Data.Proxy
import Data.Text
import Myracloud.Signature
import Myracloud.Types
import Myracloud.Util
import Servant.API
import Servant.Client
type DnsListApi = "en" :> "rapi" :> "dnsRecords"
:> Capture "site" Site
:> Capture "page" Page
:> Header "Date" Date
:> Header "Authorization" Authorization
:> Header "Content-Type" ContentType
:> Get '[JSON] (Result (ObjectVO DnsRecord))
dnsListApi :: Proxy DnsListApi
dnsListApi = Proxy
-- | List the available Dns records.
listRecords :: Credentials
-> Site -- ^ Site to list the records for
-> Page
-> BaseUrl -- ^ details of the server, such as 'myraUri'
-> EitherT ServantError IO (Result (ObjectVO DnsRecord))
listRecords (access, secret) site@(Site s') page' base = do
iso <- currentTimestamp
let contentType = ContentType "application/json"
sigData = MyraSignature
{ myra_rqBody = mempty
, myra_method = getMethod dnsListApi
, myra_uri = "/en/rapi/dnsRecords/"
<> B8.pack (unpack s')
<> "/"
<> B8.pack (show page')
, myra_contentType = _unContentType contentType
, myra_date = iso
}
sig = myraSignature access secret sigData
client dnsListApi base site page'
(Just $ Date iso)
(Just $ Authorization sig)
(Just contentType)
runList :: Credentials -> Site -> Page -> BaseUrl
-> IO (Either ServantError (Result (ObjectVO DnsRecord)))
runList c s p b = runEitherT $ listRecords c s p b
-- | Runs the list command on consecutive pages until no more records
-- are returned.
runListAll :: Credentials -> Site -> BaseUrl
-> IO (Either ServantError (Result [DnsRecord]))
runListAll c s b = runEitherT $ runListAll' c s b
runListAll' :: Credentials -> Site -> BaseUrl
-> EitherT ServantError IO (Result [DnsRecord])
runListAll' c s b = loop 1
where
loop :: Page -> EitherT ServantError IO (Result [DnsRecord])
loop p = listRecords c s p b >>= \case
Failure x -> return $ Failure x
Success x -> case list x of
[] -> return $ Success []
xs | pageSize x > Prelude.length xs -> return $ Success xs
| otherwise -> fmap (xs ++) <$> loop (succ p)
type DnsCreateApi = "en" :> "rapi" :> "dnsRecords"
:> Capture "site" Site
:> Header "Date" Date
:> Header "Authorization" Authorization
:> Header "Content-Type" ContentType
:> ReqBody '[JSON] DnsRecordCreate
:> Put '[JSON] (Result ResultVO)
dnsCreateApi :: Proxy DnsCreateApi
dnsCreateApi = Proxy
createRecord :: Credentials -> DnsRecordCreate -> Site -> BaseUrl
-> EitherT ServantError IO (Result ResultVO)
createRecord (access, secret) r site@(Site s') b = do
iso <- currentTimestamp
let contentType = ContentType "application/json"
sigData = MyraSignature
{ myra_rqBody = Just . BL.toStrict $ A.encode r
, myra_method = getMethod dnsCreateApi
, myra_uri = "/en/rapi/dnsRecords/" <> B8.pack (unpack s')
, myra_contentType = _unContentType contentType
, myra_date = iso
}
sig = myraSignature access secret sigData
client dnsCreateApi b
site
(Just $ Date iso)
(Just $ Authorization sig)
(Just contentType) r
runCreate :: Credentials -> DnsRecordCreate -> Site -> BaseUrl
-> IO (Either ServantError (Result ResultVO))
runCreate c r s b = runEitherT $ createRecord c r s b
type DeleteResult = Result () {-ResultVO-}
type DnsDeleteApi = "en" :> "rapi" :> "dnsRecords"
:> Capture "site" Site
:> Header "Date" Date
:> Header "Authorization" Authorization
:> Header "Content-Type" ContentType
:> ReqBody '[JSON] DnsRecordDelete
:> Delete '[JSON] DeleteResult
dnsDeleteApi :: Proxy DnsDeleteApi
dnsDeleteApi = Proxy
deleteRecord :: Credentials -> DnsRecordDelete -> Site -> BaseUrl
-> EitherT ServantError IO DeleteResult
deleteRecord (access, secret) r site@(Site s') b = do
iso <- currentTimestamp
let contentType = ContentType "application/json"
sigData = MyraSignature
{ myra_rqBody = Just . BL.toStrict $ A.encode r
, myra_method = getMethod dnsDeleteApi
, myra_uri = "/en/rapi/dnsRecords/" <> B8.pack (unpack s')
, myra_contentType = _unContentType contentType
, myra_date = iso
}
sig = myraSignature access secret sigData
client dnsDeleteApi b
site
(Just $ Date iso)
(Just $ Authorization sig)
(Just contentType) r
runDelete :: Credentials -> DnsRecordDelete -> Site -> BaseUrl
-> IO (Either ServantError DeleteResult)
runDelete c r s b = runEitherT $ deleteRecord c r s b
type DnsUpdateApi = "en" :> "rapi" :> "dnsRecords"
:> Capture "site" Site
:> Header "Date" Date
:> Header "Authorization" Authorization
:> Header "Content-Type" ContentType
:> ReqBody '[JSON] DnsRecordUpdate
:> Post '[JSON] (Result ResultVO)
dnsUpdateApi :: Proxy DnsUpdateApi
dnsUpdateApi = Proxy
updateRecord :: Credentials -> DnsRecordUpdate -> Site -> BaseUrl
-> EitherT ServantError IO (Result ResultVO)
updateRecord (access, secret) r site@(Site s') b = do
iso <- currentTimestamp
let contentType = ContentType "application/json"
sigData = MyraSignature
{ myra_rqBody = Just . BL.toStrict $ A.encode r
, myra_method = getMethod dnsUpdateApi
, myra_uri = "/en/rapi/dnsRecords/" <> B8.pack (unpack s')
, myra_contentType = _unContentType contentType
, myra_date = iso
}
sig = myraSignature access secret sigData
client dnsUpdateApi b
site
(Just $ Date iso)
(Just $ Authorization sig)
(Just contentType) r
runUpdate :: Credentials -> DnsRecordUpdate -> Site -> BaseUrl
-> IO (Either ServantError (Result ResultVO))
runUpdate c r s b = runEitherT $ updateRecord c r s b
search :: Credentials -> Site -> BaseUrl
-> Maybe Page
-> Site -- ^ Subdomain to search for
-> IO (Either ServantError (Result [DnsRecord]))
search c s b p (Site sub) = runEitherT $ searchBy' c s b p ((== sub) . name)
searchBy' :: Credentials -> Site -> BaseUrl
-> Maybe Page -- ^ Specific page to focus the search on, if any
-> (DnsRecord -> Bool) -- ^ 'Prelude.filter' predicate
-> EitherT ServantError IO (Result [DnsRecord])
searchBy' c s b p rpred = go p >>= \case
Success xs -> return . Success $ Prelude.filter rpred xs
x -> return x
where
go Nothing = runListAll' c s b
go (Just p) = fmap list <$> listRecords c s p b
| zalora/myrapi | src/Myracloud/DNS.hs | mit | 7,527 | 0 | 19 | 2,216 | 2,136 | 1,090 | 1,046 | 169 | 3 |
{-# LANGUAGE DeriveGeneric #-}
module RevReport where
import Data.Functor
import qualified Data.Map as M
import qualified Data.ByteString.Lazy as BS
import Data.Aeson
import Development.Shake.Command
import Text.Read
import ReportTypes
import ReadResult
import qualified BenchmarkSettings as S
git args = cmd (words "git -C repository" ++ args)
revReportMain :: [String] -> IO ()
revReportMain (this:parents) = do
settings <- S.readSettings "gipeda.yaml"
thisM <- readCSV this
parentM <- case parents of
p:_ -> readCSV p
_ -> return M.empty
log <- case parents of
p:_ -> fromStdout <$> git ["log", p ++ ".."++ this]
_ -> fromStdout <$> git ["show", "-s", this]
msg <- fromStdout <$> git ["show", "--format=%s", "-s", this]
dateS <- fromStdout <$> git ["show", "--format=%ct","-s",this]
date <- case readMaybe dateS of
Just date -> return date
Nothing -> error $ "Could not parse date " ++ show dateS
let rep = createReport settings this parents thisM parentM log msg date
let doc = emptyGlobalReport { revisions = Just (M.singleton this rep) }
BS.putStr (encode doc)
branchReportMain :: [String]-> IO ()
branchReportMain [branchName, this, other] = do
settings <- S.readSettings "gipeda.yaml"
thisM <- readCSV this
otherM <- readCSV other
log <- fromStdout <$> git ["log", "--oneline", other ++ ".."++ this]
let commits = length (lines log)
let rep = createBranchReport settings this other thisM otherM commits
let doc = emptyGlobalReport { branches = Just (M.singleton branchName rep) }
BS.putStr (encode doc)
| nomeata/gipeda | src/RevReport.hs | mit | 1,638 | 5 | 17 | 356 | 565 | 291 | 274 | 40 | 4 |
data Matrix a = Matrix { width :: Int, height :: Int, data :: [a] } deriving (Show, Eq)
| 5outh/Haskell-Graphics-Projects | cellular.hs | mit | 89 | 0 | 9 | 20 | 43 | 26 | 17 | -1 | -1 |
-- @ mapR :: _ -> i:[a] -> {o:[b] | (len i) = (len o)} @-}
--needed when synth isnt using prelude (for speed of testing)
myId :: a -> a
myId = id
map2 :: (a->b)->[a]->[b]
map2 f []=[]
map2 f (x:xs)=
f x:f x:map2 f xs
mapR :: (a->b)->[a]->[b]
mapR f []=[]
mapR f (x:xs)= mapR f xs ++ [f x]
f :: a -> [a]
f x = [x,x]
-- map2 id, map f
exs :: [([Int],[Int])]
exs=[ ([1, 2, 3],[1, 1, 2, 2, 3, 3]) ]
| santolucito/ives | tests/stutter.hs | mit | 404 | 2 | 9 | 97 | 286 | 148 | 138 | 13 | 1 |
module Magento.Locale (
addLocale
) where
import Control.Monad (when)
import System.Directory (createDirectoryIfMissing, doesFileExist)
import System.FilePath.Posix (joinPath, takeDirectory)
import Template.Locale (localeXml)
import Magento.Module.Path (localeBasePath)
import Magento.Module (
ModuleInfo,
getName,
getConfigXml,
getFullName)
import Util (
writeFileAndPrint,
capitalize,
capitalizePath,
lowercase)
import Data.String.Utils (join)
import Util.XML (insertXmlIfMissing)
addLocale :: ModuleInfo -> String -> String -> IO ()
addLocale info scope localeName = do
insertLocaleXmlIfMissing info scope
createLocaleCsvIfMissing info localeName
insertLocaleXmlIfMissing :: ModuleInfo -> String -> IO ()
insertLocaleXmlIfMissing info scope = do
xml <- localeXml (getFullName info) (localeFname info)
insertXmlIfMissing (getConfigXml info) (xpath scope) xml
createLocaleCsvIfMissing :: ModuleInfo -> String -> IO ()
createLocaleCsvIfMissing info localeName =
let path = localePath info localeName in do
createDirectoryIfMissing True (takeDirectory path)
writeLocaleCsvIfMissing path
writeLocaleCsvIfMissing :: FilePath -> IO ()
writeLocaleCsvIfMissing path = do
exists <- doesFileExist path
when (not exists) $ writeFileAndPrint path ""
xpath :: String -> String
xpath scope = join "" ["/config/", scopeName scope, "/translate"]
scopeName :: String -> String
scopeName "frontend" = "frontend"
scopeName "admin" = "adminhtml"
localeFname :: ModuleInfo -> String
localeFname info =
join "" [getFullName info, ".csv"]
localePath :: ModuleInfo -> String -> String
localePath info localeName =
joinPath [
localeBasePath info,
localeName,
localeFname info
]
| prasmussen/magmod | Magento/Locale.hs | mit | 1,781 | 0 | 11 | 323 | 498 | 258 | 240 | 50 | 1 |
{-# LANGUAGE GADTs #-}
module Handler.Equipment
( equipment
) where
import Handler.Helpers
import Model
import View.Equipment
equipment :: App Response
equipment = routeResource $ defaultActions {
resActionList = equipmentList
, resActionNew = equipmentNew
, resActionEdit = equipmentEdit
, resActionCreate = equipmentCreate
, resActionUpdate = equipmentUpdate
}
equipmentRes :: Resource Equipment
equipmentRes = defaultResource {
resNewView = equipmentNewView
, resEditView = equipmentEditView
, resIndexUri = "/equipment"
}
equipmentList :: App Response
equipmentList = do
equipment <- runDB $ selectList [] [] :: App [Entity Equipment]
ok $ toResponse $ equipmentListView equipment
equipmentNew :: App Response
equipmentNew = do
view <- getForm "equipment" (equipmentForm Nothing)
ok $ toResponse $ equipmentNewView view
equipmentEdit :: Entity Equipment -> App Response
equipmentEdit ent@(Entity key equipment) = do
view <- getForm "equipment" (equipmentForm (Just equipment))
ok $ toResponse $ equipmentEditView ent view
equipmentCreate :: App Response
equipmentCreate = do
post <- runForm "equipment" (equipmentForm Nothing)
handleCreate equipmentRes post
equipmentUpdate :: Entity Equipment -> App Response
equipmentUpdate ent@(Entity key equipment) = do
post <- runForm "equipment" (equipmentForm (Just equipment))
handleUpdate equipmentRes ent post
equipmentForm :: Monad m => Formlet Text m Equipment
equipmentForm e = Equipment
<$> "make" .: validate notEmpty (string (equipmentMake <$> e))
<*> "model" .: validate notEmpty (string (equipmentModel <$> e))
<*> "serialNumber" .: optionalString (equipmentSerialNumber =<< e)
<*> "replacementCost" .: optionalStringRead "must be an integer" (equipmentReplacementCost =<< e)
| flipstone/glados | src/Handler/Equipment.hs | mit | 1,798 | 0 | 16 | 291 | 509 | 258 | 251 | 44 | 1 |
{-# LANGUAGE StaticPointers, GADTs #-}
module Data.Box ( Box
, intoBox
, closeBox
, outBox ) where
import GHC.StaticPtr
import Data.ByteString
import Data.Binary
data Box a where
Pure :: StaticPtr a -> Box a
Close :: Box (a -> b) -> Box a -> Box b
intoBox :: StaticPtr a -> Box a
intoBox = Pure
closeBox :: Box (a -> b) -> Box a -> Box b
closeBox = Close
outBox :: Box a -> a
outBox (Pure a) = deRefStaticPtr a
outBox (Close f a) = outBox f (outBox a)
| jozefg/modal | src/Data/Box.hs | mit | 508 | 0 | 9 | 148 | 195 | 102 | 93 | 18 | 1 |
{-# LANGUAGE QuasiQuotes #-}
{-
Generator for a JavaScript implementation of the Skein block function.
Based on the public domain C implementation by Doug Whiting.
Code size is ~1.7kB zipped after minification when fully unrolled
performance ~ 50MB/s on v8 and SpiderMonkey
author: Luite Stegeman - 2014
-}
module Main where
import Data.Bits
import Data.Maybe
import Data.Monoid
import System.IO
import Language.Javascript.JMacro
import Text.PrettyPrint.Leijen.Text (renderPretty, displayIO)
import qualified Control.Exception as E
import System.Process
--------------------------------------------------------------------------------
-- configuration
-- number of rounds
nRounds :: Int
nRounds = 72
-- arguments are byte arrays, no separate offsets
byteArray :: Bool
byteArray = True
-- completely unroll all rounds, eliminating the loop
unroll :: Bool
unroll = True
-- store w in local variables instead of an array
localW :: Bool
localW = True
-- store kw (ks and ts) in local variables (only for unrolled loop!)
localKw :: Bool
localKw = True
-- inline the 64 bit addition calls
inlineAdd :: Bool
inlineAdd = True
--------------------------------------------------------------------------------
-- constants
-- key schedule parity
skein_ks_parity_a, skein_ks_parity_b :: Integer
skein_ks_parity_a = 0xA9FC1A22
skein_ks_parity_b = 0x1BD11BDA
-- rotation constants for each round
threefish_rotation :: [(Int,Int)]
threefish_rotation =
[ (14,16), (52,57), (23,40), (5,37)
, (25,33), (46,12), (58,22), (32,32)
]
--------------------------------------------------------------------------------
-- yep we have to emulate 64 bit arithmetic
data JInt64
= JInt64Var Ident Ident
| JInt64Arr Ident Int (Maybe JExpr)
| JInt64Val Integer
deriving Show
val :: Integer -> JInt64
val = JInt64Val
var :: Ident -> JExpr
var = ValExpr . JVar
-- expressions
eA, eB :: JInt64 -> JExpr
eA (JInt64Var a _) = var a
eA (JInt64Arr a n me) = [jmacroE| `var a`[`arrE (2*n) me`] |]
eA (JInt64Val n) = toJExpr (n .&. 0xFFFFFFFF)
eB (JInt64Var _ b) = var b
eB (JInt64Arr a n me) = [jmacroE| `var a`[`arrE (2*n+1) me`] |]
eB (JInt64Val n) = toJExpr $ (n `shiftR` 32) .&. 0xFFFFFFFF
arrE :: Int -> Maybe JExpr -> JExpr
arrE 0 me = fromMaybe (toJExpr (0::Integer)) me
arrE n me = case me of
Nothing -> toJExpr n
Just e -> [jmacroE| `n`+2*`e` |]
-- lvalues
lvA, lvB :: JInt64 -> JExpr
lvA j@(JInt64Val{}) = error ("not an lvalue: " ++ show j)
lvA j = eA j
lvB j@(JInt64Val{}) = error ("not an lvalue: " ++ show j)
lvB j = eB j
x :: Int -> JInt64
x n
| n >=0 && n <= 3 = JInt64Var (StrI $ 'x':show n++"a") (StrI $ 'x':show n++"b")
| otherwise = error ("x out of range: " ++ show n)
declI64 :: String -> JStat
declI64 name = DeclStat (StrI $ name ++ "a") Nothing <>
DeclStat (StrI $ name ++ "b") Nothing
declWGlobal, declWLocal :: JStat
declWGlobal
| localW = mempty
| otherwise = [jmacro| var !h$Threefish_w = new Int32Array(8); |]
declWLocal
| localW = mconcat $ map (declI64.(('w':).show)) [0..3]
| otherwise = [jmacro| var !_w = $Threefish_w; |]
w :: Int -> JInt64
w n | n < 0 || n > 3 = error "w: out of range"
| localW = JInt64Var (StrI $ "w" ++ show n ++ "a") (StrI $ "w" ++ show n ++ "b")
| otherwise = JInt64Arr (StrI "_w") n Nothing
declKwGlobal, declKwLocal :: JStat
declKwGlobal
| localKw && unroll = mempty
| localKw = error "localKw is only available when unrolled"
| otherwise =
let n = if unroll then 16 else 16 + nRounds * 4
in [jmacro| var !h$Threefish_kw = new Int32Array(`n`); |]
declKwLocal
| localKw && unroll = mconcat $ map (declI64.(("kw"++).show)) [0..7]
| localKw = error "localKw is only available when unrolled"
| otherwise = [jmacro| var !_kw = $Threefish_kw; |]
kw :: Int -> Maybe JExpr -> JInt64
kw n e
| localKw && isJust e = error "dynamic offset with localKw"
| localKw = JInt64Var (StrI $ "kw" ++ show n ++ "a") (StrI $ "kw" ++ show n ++ "b")
| otherwise = JInt64Arr (StrI "_kw") n e
kw_key_base, kw_twk_base :: Int
kw_key_base = 3
kw_twk_base = 0
ks, ts :: Int -> JInt64
ks n = kw (n + kw_key_base) Nothing
ts n = kw (n + kw_twk_base) Nothing
ks', ts' :: Int -> JExpr -> JInt64
ks' n e = kw (n + kw_key_base) (Just e)
ts' n e = kw (n + kw_twk_base) (Just e)
-- x = y + z
add_64 :: JInt64 -> JInt64 -> JInt64 -> JStat
add_64 x y z = jadd64 (eA x) (eB x) (eA y) (eB y) (eA z) (eB z)
-- x = y + z + w
add3_64 :: JInt64 -> JInt64 -> JInt64 -> JInt64 -> JStat
add3_64 x y z w =
jadd3_64 (eA x) (eB x) (eA y) (eB y) (eA z) (eB z) (eA w) (eB w)
-- x += y
addTo_64 :: JInt64 -> JInt64 -> JStat
addTo_64 x y = jadd64 (eA x) (eB x) (eA x) (eB x) (eA y) (eB y)
-- x += y + z
addTo2_64 :: JInt64 -> JInt64 -> JInt64 -> JStat
addTo2_64 x y z =
jadd3_64 (eA x) (eB x) (eA x) (eB x) (eA y) (eB y) (eA z) (eB z)
-- x += y + c (c must be in [0,2^31-1] )
addTo2_64_c :: JInt64 -> JInt64 -> Int -> JStat
addTo2_64_c x y 0 = addTo_64 x y
addTo2_64_c x y c =
jadd3_64 (eA x) (eB x) (eA x) (eB x) (eA y) (eB y) (toJExpr c) (toJExpr (0::Int))
-- add a regular JS number to an int64, must be small!
addNumTo_64 :: JInt64 -> Int -> Maybe JExpr -> JStat
addNumTo_64 x 0 Nothing = mempty
addNumTo_64 x n me =
let n' = toJExpr n
e = maybe n' (\ee -> [jmacroE| `n'`+`ee` |]) me
in jadd64 (eA x) (eB x) (eA x) (eB x) e [jmacroE| 0 |]
-- x ^= y
xorTo_64 :: JInt64 -> JInt64 -> JStat
xorTo_64 x y = [jmacro| `eA x` ^= `eA y`;
`eB x` ^= `eB y`;
|]
-- x = y + z
jadd64 :: JExpr -> JExpr -> JExpr -> JExpr -> JExpr -> JExpr -> JStat
jadd64 xa xb ya yb za zb
| inlineAdd = add64Body (Just xa) xb ya yb za zb
| otherwise = [jmacro|
`xa` = add64(`ya`,`yb`,`za`,`zb`);
`xb` = add64_ret1;
|]
-- x = y + z + w
jadd3_64 :: JExpr -> JExpr -> JExpr -> JExpr -> JExpr -> JExpr -> JExpr -> JExpr -> JStat
jadd3_64 xa xb ya yb za zb wa wb
| inlineAdd = add3_64Body (Just xa) xb ya yb za zb wa wb
| otherwise = [jmacro|
`xa` = add3_64(`ya`,`yb`,`za`,`zb`, `wa`, `wb`);
`xb` = add64_ret1;
|]
add64Decl :: JStat
add64Decl
| inlineAdd = add64LocalDecl
| otherwise = [jmacro| var !add64_ret1;
function !add64(xa,xb,ya,yb) {
`add64LocalDecl`;
`add64Body Nothing add64_ret1 xa xb ya yb`;
}
function !add3_64(xa,xb,ya,yb,za,zb) {
`add64LocalDecl`;
`add3_64Body Nothing add64_ret1 xa xb ya yb za zb`;
}
|]
add64LocalDecl =
[jmacro| var !c1, !c0; |]
add64Body :: Maybe JExpr -> JExpr -> JExpr -> JExpr -> JExpr -> JExpr -> JStat
add64Body ta tb xa xb ya yb = [jmacro|
c0 = (`xa` & 0xFFFFFF) + (`ya` & 0xFFFFFF);
c1 = (c0 >>> 24) + (`xa` >>> 24) + (`ya` >>> 24) +
((`xb` & 0xFFFF)<<8) + ((`yb` & 0xFFFF) << 8);
`tb` = (((c1 >>> 24) + (`xb` >>> 16) + (`yb` >>> 16)) << 16) +
((c1 >> 8) & 0xFFFF);
`r`;
|] where
r = let v = [jmacroE| (c1 << 24) | (c0 & 0xFFFFFF) |]
in case ta of
Nothing -> [jmacro| return `v`; |]
Just e -> [jmacro| `e` = `v`; |]
add3_64Body :: Maybe JExpr -> JExpr -> JExpr -> JExpr -> JExpr -> JExpr
-> JExpr -> JExpr -> JStat
add3_64Body ta tb xa xb ya yb za zb = [jmacro|
c0 = (`xa` & 0xFFFFFF) + (`ya` & 0xFFFFFF) + (`za` & 0xFFFFFF);
c1 = (c0 >>> 24) + (`xa` >>> 24) + (`ya` >>> 24) + (`za` >>> 24) +
((`xb` & 0xFFFF)<<8) + ((`yb` & 0xFFFF) << 8) + ((`zb` & 0xFFFF)<<8);
`tb` = (((c1 >>> 24) + (`xb` >>> 16) + (`yb` >>> 16) + (`zb` >>> 16)) << 16) +
((c1 >> 8) & 0xFFFF);
`r`;
|] where
r = let v = [jmacroE| (c1 << 24) | (c0 & 0xFFFFFF) |]
in case ta of
Nothing -> [jmacro| return `v`; |]
Just e -> [jmacro| `e` = `v`; |]
-- x = rotl(x,r)
-- uses tmp1
rotL_64 :: JInt64 -> Int -> JStat
rotL_64 x r
| r > 63 || r < 0 = error ("rotL: invalid argument: " ++ show r)
| r == 0 = mempty
| r == 32 = [jmacro| tmp1 = `eB x`;
`eB x` = `eA x`;
`eA x` = tmp1;
|]
| r < 32 = [jmacro| tmp1 = `eB x`;
`eB x` = (`eB x` << `r`) | (`eA x` >>> `32-r`);
`eA x` = (`eA x` << `r`) | (tmp1 >>> `32-r`);
|]
| otherwise = [jmacro| tmp1 = `eB x`;
`eB x` = (`eA x` << `r-32`) | (`eB x` >>> `64-r`);
`eA x` = (tmp1 << `r-32`) | (`eA x` >>> `64-r`);
|]
-- x = rotl(x,r) ^ y
-- uses tmp1
rotL_64_xor :: JInt64 -> Int -> JInt64 -> JStat
rotL_64_xor x r y
| r > 63 || r < 0 = error ("rotL: invalid argument: " ++ show r)
| r == 0 = [jmacro| `eA x` ^= `eA y`;
`eB x` ^= `eB y`;
|]
| r == 32 = [jmacro| tmp1 = `eB x`;
`eB x` = `eA x` ^ `eB y`;
`eA x` = tmp1 ^ `eA y`;
|]
| r < 32 = [jmacro| tmp1 = `eB x`;
`eB x` = ((`eB x` << `r`) | (`eA x` >>> `32-r`)) ^ `eB y`;
`eA x` = ((`eA x` << `r`) | (tmp1 >>> `32-r`)) ^ `eA y`;
|]
| otherwise = [jmacro| tmp1 = `eB x`;
`eB x` = ((`eA x` << `r-32`) | (`eB x` >>> `64-r`)) ^ `eB y`;
`eA x` = ((tmp1 << `r-32`) | (`eA x` >>> `64-r`)) ^ `eA y`;
|]
(.=) :: JInt64 -> JInt64 -> JStat
x .= y = [jmacro| `lvA x` = `eA y`;
`lvB x` = `eB y`;
|]
u32Ptr :: String -> Int -> JInt64
u32Ptr name n
| byteArray = JInt64Arr (StrI name) n Nothing
| otherwise = JInt64Arr (StrI name) n (Just $ ValExpr . JVar . StrI $ name ++ "_o")
keyPtr, blkPtr, cryptPtr :: Int -> JInt64
keyPtr = u32Ptr "keyPtrI3"
blkPtr = u32Ptr "blkPtrI3"
cryptPtr = u32Ptr "cryptPtrI3"
--------------------------------------------------------------------------------
-- the algorithm
rounds :: JStat
rounds
| unroll = mconcat $ map round8u [0..nRounds `div` 8-1]
| otherwise = [jmacro| var rnd;
for(rnd=1;rnd<=`2*div nRounds 8`;rnd+=2) {
`round8l rnd`;
}
|]
-- loop variant
round8l :: JExpr -> JStat
round8l rnd = round4a <> i256l rnd 0 <> round4b <> i256l rnd 1
i256l :: JExpr -> Int -> JStat
i256l rnd r = addTo_64 (x 0) (ks' r rnd) <>
addTo2_64 (x 1) (ks' (r+1) rnd) (ts' r rnd) <>
addTo2_64 (x 2) (ks' (r+2) rnd) (ts' (r+1) rnd) <>
addTo_64 (x 3) (ks' (r+3) rnd) <>
addNumTo_64 (x 3) r (Just rnd) <>
ks' (r+4) rnd .= ks' (r-1) rnd <>
ts' (r+2) rnd .= ts' (r-1) rnd
-- unrolled variant
round8u :: Int -> JStat
round8u n = round4a <> i256u (2*n) <> round4b <> i256u (2*n+1)
i256u :: Int -> JStat
i256u r = addTo_64 (x 0) (ks ((r+1)`mod`5)) <>
addTo2_64 (x 1) (ks ((r+2)`mod`5)) (ts ((r+1)`mod`3)) <>
addTo2_64 (x 2) (ks ((r+3)`mod`5)) (ts ((r+2)`mod`3)) <>
addTo2_64_c (x 3) (ks ((r+4)`mod`5)) (r+1)
-- common to both loop and unrolled
r256 :: Int -> Int -> Int -> Int -> Int -> JStat
r256 r p0 p1 p2 p3 = addTo_64 (x p0) (x p1) <>
rotL_64_xor (x p1) rot1 (x p0) <>
addTo_64 (x p2) (x p3) <>
rotL_64_xor (x p3) rot2 (x p2)
where
(rot1, rot2) = threefish_rotation !! (r-1)
round4a, round4b :: JStat
round4a = r256 1 0 1 2 3 <>
r256 2 0 3 2 1 <>
r256 3 0 1 2 3 <>
r256 4 0 3 2 1
round4b = r256 5 0 1 2 3 <>
r256 6 0 3 2 1 <>
r256 7 0 1 2 3 <>
r256 8 0 3 2 1
processBlock
| byteArray =
[jmacro|
function !h$Threefish_256_Process_Block(keyPtr_d, keyPtr_o_zero, blkPtr_d, blkPtr_o_zero, cryptPtr_d, cryptPtr_o_zero, w32out) {
var !keyPtrI3 = keyPtr_d.i3;
var !blkPtrI3 = blkPtr_d.i3;
var !cryptPtrI3 = cryptPtr_d.i3;
`body w32out`;
}
|]
| otherwise =
[jmacro|
function !h$Threefish_256_Process_Block(keyPtr_d, keyPtr_o, blkPtr_d, blkPtr_o, cryptPtr_d, cryptPtr_o, w32out) {
if(keyPtr_o & 3 || blkPtr_o & 3 || cryptPtr_o & 3)
throw new Error("h$Threefish_256_Process_Block: unaligned pointer");
var !keyPtrI3 = keyPtr_d.i3, !keyPtrI3_o = keyPtr_o >> 2;
var !blkPtrI3 = blkPtr_d.i3, !blkPtrI3_o = blkPtr_o >> 2;
var !cryptPtrI3 = cryptPtr_d.i3, !cryptPtrI3_o = cryptPtr_o >> 2;
`body w32out`;
}
|]
where
-- todo: implement word swapping for w32out
body w32out = [jmacro|
`add64Decl`;
// context vars
var !x0a,!x0b,!x1a,!x1b,!x2a,!x2b,!x3a,!x3b;
var !tmp1;
`declWLocal`;
`declKwLocal`;
`ks 0 .= keyPtr 0`;
`ks 1 .= keyPtr 1`;
`ks 2 .= keyPtr 2`;
`ks 3 .= keyPtr 3`;
// ks[4] = ks[0] ^ ks[1] ^ ks[2] ^ ks[3] ^ SKEIN_KS_PARITY;
`lvA (ks 4)` = `eA (ks 0)` ^ `eA (ks 1)` ^ `eA (ks 2)` ^ `eA (ks 3)` ^ `skein_ks_parity_a`;
`lvB (ks 4)` = `eB (ks 0)` ^ `eB (ks 1)` ^ `eB (ks 2)` ^ `eB (ks 3)` ^ `skein_ks_parity_b`;
`ts 0 .= val 0`;
`ts 1 .= val 0`;
`ts 2 .= val 0`;
`w 0 .= blkPtr 0`;
`w 1 .= blkPtr 1`;
`w 2 .= blkPtr 2`;
`w 3 .= blkPtr 3`;
`add_64 (x 0) (w 0) (ks 0)`;
`add3_64 (x 1) (w 1) (ks 1) (ts 0)`;
`add3_64 (x 2) (w 2) (ks 2) (ts 1)`;
`add_64 (x 3) (w 3) (ks 3)`;
`rounds`;
`cryptPtr 0 .= x 0`;
`cryptPtr 1 .= x 1`;
`cryptPtr 2 .= x 2`;
`cryptPtr 3 .= x 3`;
|]
main = do
withFile "threefish_block.js" WriteMode $ \h -> do
hPutStrLn h "// generated by generate-threefish-block"
displayIO h . renderPretty 0.8 120 . renderJs $
declWGlobal <> declKwGlobal <> processBlock <>
[jmacro|
if(typeof exports !== 'undefined')
exports.h$Threefish_256_Process_Block = h$Threefish_256_Process_Block;
|]
callProcess "npx" [ "google-closure-compiler"
, "--js=threefish_block.js"
, "--js_output_file=threefish_block.min.js"
]
`E.onException`
hPutStrLn stderr "exception not running minifier. perhaps you need to install a JVM or npx by running:\nnpm install -g npx"
| ghcjs/ghcjs | lib/boot/shims/utils/threefish/Main.hs | mit | 14,786 | 2 | 17 | 4,712 | 3,868 | 2,060 | 1,808 | 224 | 2 |
module Common.GtkGoal where
import Data.List (isInfixOf)
import Data.Char (toLower)
import Interfaces.GenericATPState
import Logic.Prover
import Static.GTheory
-- * Datatypes and functions for prover
data Goal = Goal
{ gStatus :: GStatus
, gName :: String }
deriving (Eq, Ord)
toGtkGoal :: (String, Maybe BasicProof) -> Goal
toGtkGoal (n, st) =
Goal { gName = n
, gStatus = maybe GOpen basicProofToGStatus st }
showGoal :: Goal -> String
showGoal (Goal { gName = n, gStatus = s }) =
spanString s $ statusToPrefix s ++ n
data GStatus = GOpen
| GTimeout
| GDisproved
| GInconsistent
| GProved
| GGuessed
| GConjectured
| GHandwritten
deriving (Eq, Ord)
instance Show GStatus where
show GProved = spanString GProved "Proved"
show GInconsistent = spanString GInconsistent "Inconsistent"
show GDisproved = spanString GDisproved "Disproved"
show GOpen = spanString GOpen "Open"
show GTimeout = spanString GTimeout "Open (Timeout!)"
show GGuessed = spanString GGuessed "Guessed"
show GConjectured = spanString GConjectured "Conjectured"
show GHandwritten = spanString GHandwritten "Handwritten"
showSimple :: GStatus -> String
showSimple gs = case gs of
GProved -> "Proved"
GInconsistent -> "Inconsistent"
GDisproved -> "Disproved"
GOpen -> "Open"
GTimeout -> "Open (Timeout!)"
GGuessed -> "Guessed"
GConjectured -> "Conjectured"
GHandwritten -> "Handwritten"
statusToColor :: GStatus -> String
statusToColor s = case s of
GOpen -> "black"
GProved -> "green"
GDisproved -> "red"
GTimeout -> "blue"
GInconsistent -> "orange"
GGuessed -> "darkgreen"
GConjectured -> "darkgreen"
GHandwritten -> "darkgreen"
statusToPrefix :: GStatus -> String
statusToPrefix s = case s of
GOpen -> "[ ] "
GProved -> "[+] "
GDisproved -> "[-] "
GTimeout -> "[t] "
GInconsistent -> "[*] "
GGuessed -> "[.] "
GConjectured -> "[:] "
GHandwritten -> "[/] "
spanString :: GStatus -> String -> String
spanString s m = "<span color=\"" ++ statusToColor s ++ "\">" ++ m ++ "</span>"
-- | Converts a ProofStatus into a GStatus
proofStatusToGStatus :: ProofStatus a -> GStatus
proofStatusToGStatus p = case goalStatus p of
Proved False -> GInconsistent
Proved True -> GProved
Disproved -> GDisproved
Open (Reason s) ->
if any (isInfixOf "timeout" . map toLower) s then GTimeout else GOpen
-- | Converts a BasicProof into a GStatus
basicProofToGStatus :: BasicProof -> GStatus
basicProofToGStatus p = case p of
BasicProof _ st -> proofStatusToGStatus st
Guessed -> GGuessed
Conjectured -> GConjectured
Handwritten -> GHandwritten
-- | Converts a GenericConfig into a GStatus
genericConfigToGStatus :: GenericConfig a -> GStatus
genericConfigToGStatus cfg = case proofStatusToGStatus $ proofStatus cfg of
GOpen -> if timeLimitExceeded cfg then GTimeout else GOpen
s -> s
| nevrenato/Hets_Fork | Common/GtkGoal.hs | gpl-2.0 | 3,146 | 0 | 12 | 828 | 778 | 403 | 375 | 84 | 8 |
module Main where
import Text.ParserCombinators.Parsec
import qualified Text.ParserCombinators.Parsec.Token as P
import Text.ParserCombinators.Parsec.Language (javaStyle)
import Data.Time.Calendar
import Data.Char (isSpace)
import System.IO
-- Put your favorite file name here
fname = "fxt.ledger"
main = do res <- parseFromFile journal fname
case res of
Left err -> do putStr "parse error at " ; print err
Right rs -> mapM_ (putStrLn . show) rs
-- I use mapM_ to print one record per line; otherwise, calling
-- show of a list will put everything on the same line.
type Entry = String
type Directive = String
type Transaction = String
type Code = String
type Payee = String
type Amount = String
type Commodity = String
type Quantity = String
type Annotation = String
type Account = [String]
type Date = String
journal :: Parser [Entry]
journal = do xs <- many ( entry
<|> directive
<|> comment
<|> emptyLine )
eof
return [x | Just x <- xs]
-- WARNING: this has to be the last production or it will not parse
-- The reason is that the parser could choose this production by matching
-- empty and it will not be able to backtrack once it discovers empty is not
-- followed by a newline.
emptyLine :: Parser (Maybe Entry)
emptyLine = do many (oneOf " \t")
newline
return Nothing
comment :: Parser (Maybe Entry)
comment = do oneOf ";*hb"
skipMany (satisfy (/= '\n'))
newline
return Nothing
directive :: Parser (Maybe Entry)
directive = do (oneOf "!*" >> wordDirective) <|> charDirective
newline
return Nothing
wordDirective :: Parser Directive
wordDirective = (string "include" >> longText)
<|> (string "account" >> longText)
<|> (string "end")
<|> (string "alias" >> identifier >> char '=' >> longText)
<|> (string "def" >> longText)
-- WARNING: should this really include trailing white space?
longText :: Parser String
longText = many1 (satisfy (/= '\n'))
-- WARNING: not sure about this one
identifier :: Parser String
identifier = many1 (satisfy (not . isSpace))
charDirective :: Parser Directive
charDirective = do oneOf "iIoO"
date
time
longText
return "i|I|o|O directive"
<|> do char 'D'
amount
return "D directive"
<|> do char 'A'
longText
return "A directive"
<|> do char 'C'
commodity
char '='
amount
return "C directive"
<|> do char 'P'
date
time
commodity
amount
return "P directive"
<|> do char 'N'
commodity
return "N directive"
<|> do char 'Y'
count 4 digit
return "Y directive"
<|> do string "--"
identifier
longText
return "-- directive"
date :: Parser Date
date = do count 4 digit
dateSep
count 2 digit
dateSep
count 2 digit
return "date"
dateSep :: Parser ()
dateSep = do oneOf "/-."
return ()
time :: Parser ()
time = do count 2 digit
char ':'
count 2 digit
char ':'
count 2 digit
return ()
commodity = identifier
-- WARNING: I need to define amount correctly
amount = identifier
entry :: Parser (Maybe Entry)
entry = do e <- plainEntry
return (Just e)
plainEntry :: Parser Entry
plainEntry = do date
optional (char '=' >> date)
optional status
optional code
fullString
optional note
newline
many1 transaction
return "plain entry"
status :: Parser Char
status = oneOf "*!"
code :: Parser String
code = between (char '(') (char ')') (many1 (noneOf ")\n"))
note :: Parser String
note = do char ';'
cs <- longText
return cs
-- WARNING: this is not defined in grammar.y
fullString :: Parser String
fullString = many1 (noneOf ";\n")
transaction :: Parser Transaction
transaction = do many1 (oneOf " \t")
optional status
account
-- missing values_opt from grammar.y
optional note
newline
return "transaction"
account :: Parser String
account = accountName
<|> between (char '(') (char ')') accountName
<|> between (char '[') (char ']') accountName
accountName :: Parser String
accountName = do ns <- sepBy1 accountName' (char ':')
return (concat ns)
accountName' :: Parser String
accountName' = many1 (noneOf ")]:;\n")
| mgax/beancount | lib/haskell/LedgerParser.hs | gpl-2.0 | 5,110 | 0 | 14 | 1,912 | 1,279 | 607 | 672 | 144 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{- |
Module : Main
Parses command-line options and starts torrent client
-}
module Main where
import Prelude hiding (log, length, readFile, writeFile)
import Data.ByteString.Char8 (readFile, writeFile, unpack)
import System.Environment (getArgs)
import System.Directory (doesFileExist)
import System.Posix.Signals (installHandler, Handler(Catch), sigINT, sigTERM)
import Text.ParserCombinators.Parsec (ParseError)
import Control.Lens
import Control.Concurrent.MVar
import Control.Concurrent
import Data.IORef
import FuncTorrent.Bencode (decode)
import FuncTorrent.Logger (initLogger, logMessage, logStop)
import FuncTorrent.Metainfo (Info(..), Metainfo(..), mkMetaInfo)
import FuncTorrent.Peer (handShake)
import FuncTorrent.Tracker (tracker, peers, mkTrackerResponse)
import FuncTorrent.ControlThread
import FuncTorrent.ServerThread
peerId :: String
peerId = "-HS0001-*-*-20150215"
main :: IO ()
main = do
args <- getArgs
logR <- initLogger
let log = logMessage logR
log "Starting up functorrent"
log $ "Parsing input file " ++ concat args
parseTorrentFile args log >>= startTorrentConc log
logStop logR
usage :: IO ()
usage = putStrLn "usage: functorrent torrent-file"
parseTorrentFile :: [String] -> (String -> IO ()) -> IO [Metainfo]
parseTorrentFile [a] log = do
fileExist <- doesFileExist a
if fileExist
then readFile a >>= getMetaInfo
else error "file does not exist"
where
getMetaInfo torrentStr =
case decode torrentStr of
Left e -> logError e log >> return []
Right d ->
case mkMetaInfo d of
Nothing -> log "Unable to make meta info file"
>> return []
Just m -> return [m]
parseTorrentFile _ _ = usage >> return []
startTorrent :: (String -> IO ()) -> [Metainfo] -> IO ()
startTorrent log (m:_) = do
log "Input File OK"
log $ "Downloading file : " ++ name (info m)
log "Trying to fetch peers"
log $ "Trackers: " ++ head (announceList m)
response <- tracker m peerId
-- TODO: Write to ~/.functorrent/caches
writeFile (name (info m) ++ ".cache") response
case decode response of
Left e -> logError e log
Right trackerInfo ->
case mkTrackerResponse trackerInfo of
Left e -> log $ "Error" ++ unpack e
Right peerResp -> do
log $ "Peers List : " ++ (show . peers $ peerResp)
let p1 = head (peers peerResp)
msg <- handShake p1 (infoHash m) peerId
log $ "handshake: " ++ show msg
return ()
startTorrent _ [] = return ()
startTorrentConc :: (String -> IO ()) -> [Metainfo] -> IO ()
startTorrentConc log (m:ms) = do
-- Handle user-interrupt
interrupt <- newEmptyMVar
_ <- installHandler sigINT (Catch $ putMVar interrupt 1) Nothing
_ <- installHandler sigTERM (Catch $ putMVar interrupt 1) Nothing
-- Fork Control-Thread(s)
(ct,_) <- initControlThread m
-- Fork Server-Thread
(st,_) <- initServerThread [(m,ct)]
-- Wait For user-interrupt
_ <- takeMVar interrupt
-- Exit gracefully
putMVar (st ^. serverTAction) FuncTorrent.ServerThread.Stop
writeIORef (ct ^. controlTAction) FuncTorrent.ControlThread.Stop
yield
threadDelay $ 4*1000*1000
logError :: ParseError -> (String -> IO ()) -> IO ()
logError e logMsg = logMsg $ "parse error: \n" ++ show e
| dfordivam/functorrent | src/Main.hs | gpl-3.0 | 3,340 | 0 | 20 | 707 | 1,076 | 543 | 533 | 81 | 4 |
module Handler.Library where
import Import
import Util.Widgets (getSortValue, sortListByOption, sortWidget)
-- | A 'LibraryItem' 'Entity' & 'Book' packed into a Tuple.
type LibraryItemAndBook = (Entity LibraryItem, Book)
-- | Display all LibraryItems.
getLibraryR :: Handler Html
getLibraryR = do
sortVal <- getSortValue "status"
items <- runDB $ selectList [] []
books <- mapM (\(Entity _ i) -> runDB $ getJust $ libraryItemBook i) items
let itemsAndBooks = sortLibrary sortVal $ zip items books
(libraryWidget, libraryEnctype) <- generateFormPost libraryItemIsbnForm
defaultLayout $ do
setTitle "Library"
$(widgetFile "library/library")
where sortLibrary = sortListByOption librarySortingOptions
librarySortWidget = sortWidget librarySortingOptions
-- | Return the GET Value, Name & Sorting Function for Library Sort Types.
librarySortingOptions :: [(Text, Text,
LibraryItemAndBook -> LibraryItemAndBook -> Ordering)]
librarySortingOptions =
[ ("status", "Status", sortStatus `on` getItem)
, ("rating", "Rating", comparing $ libraryItemRating . getItem)
, ("name", "Name (A-Z)", flipComparing $ bookTitle . snd)
, ("name-reverse", "Name (Z-A)", comparing $ bookTitle . snd)
, ("newest", "Newest", comparing $ libraryItemAddedOn . getItem)
, ("oldest", "Oldest", flipComparing $ libraryItemAddedOn . getItem)
, ("recently-finished", "Recently Finished", comparing $ libraryItemLastFinishedOn . getItem)
, ("times-finished", "Times Finished", comparing $ libraryItemCompletionCount . getItem)
]
where getItem (Entity _ i, _) = i
sortStatus item1 item2 =
let result = comparing libraryItemInProgress item1 item2
in if result == EQ
then flipComparing libraryItemHasFinished item1 item2
else result
| prikhi/MyBookList | Handler/Library.hs | gpl-3.0 | 1,953 | 0 | 12 | 474 | 459 | 250 | 209 | 33 | 2 |
module Nirum.Package.ModuleSet ( ImportError ( CircularImportError
, MissingImportError
, MissingModulePathError
)
, ModuleSet
, fromList
, fromMap
, keys
, keysSet
, length
, lookup
, null
, toAscList
, toList
, toMap
) where
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import Prelude hiding (length, lookup, null)
import qualified Nirum.Constructs.DeclarationSet as DS
import Nirum.Constructs.Identifier (Identifier)
import Nirum.Constructs.Module (Module (Module), imports)
import Nirum.Constructs.ModulePath (ModulePath)
import Nirum.Constructs.TypeDeclaration ( TypeDeclaration ( Import
, ServiceDeclaration
, TypeDeclaration
)
)
data ImportError = CircularImportError [ModulePath]
| MissingModulePathError ModulePath ModulePath
| MissingImportError ModulePath ModulePath Identifier
deriving (Eq, Ord, Show)
-- | The set of 'Module' values. It can be looked up by its 'ModulePath'.
newtype ModuleSet = ModuleSet (M.Map ModulePath Module) deriving (Eq, Ord, Show)
fromMap :: M.Map ModulePath Module -> Either (S.Set ImportError) ModuleSet
fromMap ms
| S.null importErrors = Right moduleSet
| otherwise = Left importErrors
where
moduleSet :: ModuleSet
moduleSet = ModuleSet ms
importErrors :: S.Set ImportError
importErrors = detectImportErrors moduleSet
fromList :: [(ModulePath, Module)] -> Either (S.Set ImportError) ModuleSet
fromList = fromMap . M.fromList
toMap :: ModuleSet -> M.Map ModulePath Module
toMap (ModuleSet ms) = ms
toList :: ModuleSet -> [(ModulePath, Module)]
toList = M.toList . toMap
toAscList :: ModuleSet -> [(ModulePath, Module)]
toAscList = M.toAscList . toMap
length :: ModuleSet -> Int
length = M.size . toMap
null :: ModuleSet -> Bool
null = M.null . toMap
keys :: ModuleSet -> [ModulePath]
keys = M.keys . toMap
keysSet :: ModuleSet -> S.Set ModulePath
keysSet = M.keysSet . toMap
lookup :: ModulePath -> ModuleSet -> Maybe Module
lookup path = M.lookup path . toMap
detectImportErrors :: ModuleSet -> S.Set ImportError
detectImportErrors moduleSet = detectMissingImports moduleSet `S.union`
detectCircularImports moduleSet
detectMissingImports :: ModuleSet -> S.Set ImportError
detectMissingImports moduleSet =
S.fromList [e | (path, mod') <- toList moduleSet, e <- detect path mod']
where
detect :: ModulePath -> Module -> [ImportError]
detect path module' =
[ e
| (path', idents) <- M.toList (imports module')
, e <- case lookup path' moduleSet of
Nothing -> [MissingModulePathError path path']
Just (Module decls _) ->
[ e
| i <- S.toList idents
, e <- case DS.lookup i decls of
Just TypeDeclaration {} -> []
Just ServiceDeclaration {} -> []
Just Import {} -> [MissingImportError path path' i]
Nothing -> [MissingImportError path path' i]
]
]
detectCircularImports :: ModuleSet -> S.Set ImportError
detectCircularImports (ModuleSet ms) =
S.fromList [e | path <- M.keys ms, e <- detect path []]
where
moduleImports :: M.Map ModulePath (S.Set ModulePath)
moduleImports =
M.fromList [ (path, M.keysSet $ imports module')
| (path, module') <- M.toList ms
]
detect :: ModulePath -> [ModulePath] -> [ImportError]
detect path reversedCycle
| path `elem` reversedCycle =
[CircularImportError $ reverse reversedCycle']
| otherwise =
case M.lookup path moduleImports of
Just paths -> [ e
| path' <- S.toList paths
, e <- detect path' reversedCycle'
]
Nothing -> []
where
reversedCycle' :: [ModulePath]
reversedCycle' = path : reversedCycle
| dahlia/nirum | src/Nirum/Package/ModuleSet.hs | gpl-3.0 | 4,745 | 0 | 20 | 1,836 | 1,150 | 618 | 532 | 96 | 5 |
{-
CC_Clones - Classic games reimplemented
Β© Callum Lowcay 2006-2013
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
{-# LANGUAGE Arrows #-}
{-# LANGUAGE RecordWildCards #-}
module Main where
import Common.Counters
import Common.Graphics
import Common.HighScores
import Control.Applicative
import Control.Monad
import Control.Monad.Reader (runReaderT)
import Data.IORef
import FRP.Events
import FRP.Yampa
import Graphics.UI.SDL hiding (Event, NoEvent)
import Graphics.UI.SDL.Keysym
import Graphics.UI.SDL.Time
import Graphics.UI.SDL.TTF
import qualified Data.Map as M
import qualified Graphics.UI.SDL as SDL
import Spaceships.Assets
import Spaceships.GameState
import Spaceships.Render
windowCaption :: String
windowCaption = "Spaceships!"
-- Entry point
main :: IO ()
main = do
initSDL
setCaption windowCaption windowCaption
assets@(Assets{..}) <- loadAssets
gs <- initGlobalState
time <- newIORef =<< fromIntegral <$> getTicks
reactimate
(Event <$> getSDLEvents)
(getInput time)
(handleOutput assets)
(sfMain gs (gfx M.! Digits))
Graphics.UI.SDL.quit
-- Library initialisation
initSDL :: IO ()
initSDL = do
Graphics.UI.SDL.init [InitVideo]
setVideoMode 728 546 32 [HWSurface, DoubleBuf]
Graphics.UI.SDL.TTF.init
return ()
getInput :: IORef Int -> Bool -> IO (DTime, Maybe (Event SDLEvents))
getInput time canBlock = do
t0 <- readIORef time
t1 <- fromIntegral <$> getTicks
writeIORef time t1
let dt = t1 - t0
sdlevents <- getSDLEvents
let events = if null sdlevents then NoEvent else Event sdlevents
return ((fromIntegral dt) / 1000, Just events)
return (1 / 60, Just events)
handleOutput :: Assets -> Bool -> (FullGameOutput, Bool) -> IO Bool
handleOutput assets hasChanged (go, quitNow) = do
case go of
FullGameOutput (HighScore _ _ (Event EditingStart)) _ _ -> startEditing
FullGameOutput (HighScore _ hs (Event EditingStop)) _ _ -> endEditing hs
_ -> return ()
runReaderT (renderOutput go) assets
surface <- getVideoSurface
SDL.flip surface
return quitNow
initGlobalState :: IO GlobalState
initGlobalState = do
hs <- loadHighScoreTable "spaceships"
return$ GlobalState {
gs_score = 0,
gs_level = 1,
gs_highScores = hs
}
sfMain :: GlobalState -> Sprite -> SF (Event SDLEvents) (FullGameOutput, Bool)
sfMain gs digits = proc e -> do
quitEvents <- sdlQuitEvents -< e
escEvents <- sdlKeyPresses (mkKey SDLK_ESCAPE) True -< e
out <- addCounters
(initCounter digits 5) (initCounter digits 2) (introMode gs) -< e
returnA -< (out, isEvent quitEvents || isEvent escEvents)
| CLowcay/CC_Clones | src/spaceships.hs | gpl-3.0 | 3,113 | 22 | 14 | 511 | 854 | 441 | 413 | 76 | 3 |
{-# LANGUAGE DeriveGeneric #-}
module Phascal.Arm where
import Control.Monad (liftM, liftM2, join)
import Data.Bits
import Data.Word
import Prelude hiding (lookup)
import Data.List (intersperse)
import Data.Maybe (fromJust)
import GHC.Generics
import Control.DeepSeq
import Phascal.Ast
import Phascal.SymbolTable
data CompileError = UndefinedVar String
deriving(Show, Eq)
varAddr :: SymTable -> String -> Address
varAddr syms v = RegOffset "fp" (slotnum * 4)
where slotnum = frameOffset $ fromJust (lookup v syms)
type Reg = String
data Instr = Ldr Reg Address
| Str Reg Address
| Push [Reg]
| Pop [Reg]
| Ldm Reg [Reg]
| Add Reg Reg Reg
| OrR Reg Reg Reg
| SubRRR Reg Reg Reg
| SubRRI Reg Reg Int -- reg := reg - immediate
| Svc Int
| EorRI Reg Reg Int
| MovRR Reg Reg -- reg := reg
| MovRI Reg Int -- reg := immediate
| Bl String
deriving(Show, Eq, Generic)
data Address = RegOffset Reg Int
| AddrContaining Int -- =0x1242 notation, i.e. put this value in the
-- binary and refer to it by its address
deriving(Show, Eq, Generic)
data Directive = Instruction Instr
| Label String
| Globl String
deriving(Show, Eq, Generic)
instance NFData Instr
instance NFData Address
instance NFData Directive
-- | @canImmediate n@ indicates whether the number @n@ is representable as
-- an arm assembly immediate.
--
-- Arm uses an 8 bit immediate with a 4 bit rotate field (where the rotation is
-- in units of 2 bits). There's a good description of the whole thing here:
--
-- <http://alisdair.mcdiarmid.org/2014/01/12/arm-immediate-value-encoding.html>
canImmediate :: Int -> Bool
canImmediate n = any (\rot -> n' `rotate` rot < 256) [0,2..30]
where n' = fromIntegral n :: Word32
formatInstr :: Instr -> String
formatInstr (Ldr reg addr) = formatApply "ldr" [reg, formatAddr addr]
formatInstr (Str reg addr) = formatApply "str" [reg, formatAddr addr]
formatInstr (Push regs) = formatApply "push" [formatRegList regs]
formatInstr (Pop regs) = formatApply "pop" [formatRegList regs]
formatInstr (Ldm base regs) = formatApply "ldm" [base, formatRegList regs]
formatInstr (Add ret lhs rhs) = formatApply "add" [ret, lhs, rhs]
formatInstr (OrR ret lhs rhs) = formatApply "orr" [ret, lhs, rhs]
formatInstr (SubRRR rd rm rs) = formatApply "sub" [rd, rm, rs]
formatInstr (SubRRI ret lhs rhs) = formatApply "sub" [ret, lhs, formatInt rhs]
formatInstr (Svc n) = formatApply "svc" [formatInt n]
formatInstr (EorRI rd rs int) = formatApply "eor" [rd, rs, formatInt int]
formatInstr (MovRI reg val) = formatApply "mov" [reg, formatInt val]
formatInstr (MovRR lhs rhs) = formatApply "mov" [lhs, rhs]
formatInstr (Bl label) = formatApply "bl" [label]
formatAddr :: Address -> String
formatAddr (RegOffset reg off) = join ["[", commaSep [reg, formatInt off], "]"]
formatAddr (AddrContaining addr) = "=" ++ show addr
formatInt :: Int -> String
formatInt value = "#" ++ show value
formatRegList :: [Reg] -> String
formatRegList regs = join ["{", commaSep regs, "}"]
-- | @formatApply op args@ is the string represention of pneumonic @op@
-- applied to the (already formatted) list of arguments @args@.
formatApply :: String -> [String] -> String
formatApply op args = op ++ " " ++ commaSep args
commaSep :: [String] -> String
commaSep items = join (intersperse ", " items)
formatDirective :: Directive -> String
formatDirective (Instruction instr) = "\t" ++ formatInstr instr ++ "\n"
formatDirective (Label lbl) = lbl ++ ":\n"
formatDirective (Globl sym) = ".globl " ++ sym ++ "\n"
compileExpr :: SymTable -> Expr -> [Directive]
compileExpr syms (Var v) = [Instruction $ Ldr "r0" (varAddr syms v)]
compileExpr syms (Num n) = [Instruction $ if canImmediate n
then MovRI "r0" n
else Ldr "r0" (AddrContaining n)]
compileExpr _ T = [Instruction (MovRI "r0" 1)]
compileExpr _ F = [Instruction (MovRI "r0" 0)]
compileExpr syms (Not ex) =
(compileExpr syms ex) ++ [Instruction $ EorRI "r0" "r0" 1]
compileExpr syms (Pos ex) = compileExpr syms ex
compileExpr syms (Neg ex) =
(compileExpr syms ex) ++ (map Instruction [ MovRI "r1" 0
, SubRRR "r0" "r1" "r0"
])
compileExpr syms (Op op lhs rhs) =
let
[lAsm, rAsm] = map compileSubExpr [lhs, rhs]
opAsm = compileBinOp op
in
lAsm ++ rAsm ++ [Instruction $ Pop ["r0", "r1"]] ++ opAsm
where
compileSubExpr ex = (compileExpr syms ex) ++ [Instruction $ Push ["r0"]]
compileBinOp :: BinOp -> [Directive]
compileBinOp Plus = [Instruction $ Add "r0" "r0" "r1"]
compileBinOp Or = [Instruction $ OrR "r0" "r0" "r1"]
compileStatement :: SymTable -> Statement -> [Directive]
compileStatement syms (Assign v ex) = do
let addr = varAddr syms v in
(compileExpr syms ex) ++ [Instruction $ Str "r0" addr]
compileProgram :: Program -> [Directive]
compileProgram p =
let body' = map (compileStatement $ makeSymTable p) (body p) in
join [ entryPoint (name p)
, [Label (name p)]
, functionPrologue
, join body'
, functionEpilogue
]
functionPrologue :: [Directive]
functionPrologue = instrs [ MovRR "ip" "sp"
, Push ["fp", "ip", "lr", "pc"]
, SubRRI "fp" "ip" 4
]
functionEpilogue :: [Directive]
functionEpilogue = instrs [ Ldm "sp" ["fp", "sp", "lr"]
, MovRR "pc" "lr"
]
entryPoint :: String -> [Directive]
entryPoint mainSym = [ Globl "_start"
, Label "_start"
] ++ instrs [ MovRI "fp" 0
, Bl mainSym
, MovRI "r7" 1
, Svc 0
]
instrs :: [Instr] -> [Directive]
instrs = map Instruction
| zenhack/phascal | src/Phascal/Arm.hs | gpl-3.0 | 6,204 | 0 | 12 | 1,793 | 1,941 | 1,023 | 918 | 126 | 2 |
{-# LANGUAGE NoImplicitPrelude, GeneralizedNewtypeDeriving #-}
module Data.Store.Guid
(Guid, make, bs, length, new, combine, augment, fromString, asHex) where
import Prelude.Compat hiding (length)
import Control.DeepSeq (NFData(..))
import Control.Lens ((%~), _1)
import Control.Monad (guard)
import Data.Binary (Binary(..))
import Data.Binary.Get (getByteString)
import Data.Binary.Put (putByteString)
import Data.Binary.Utils (encodeS)
import qualified Data.ByteString as SBS
import Data.ByteString.NFData ()
import qualified Data.ByteString.UTF8 as UTF8
import Data.ByteString.Utils (randomBS, xorBS)
import qualified Data.Char as Char
import Data.Hashable (Hashable, hashWithSalt)
import Data.Maybe (fromMaybe)
import Numeric.Utils (encodeHex)
import System.Random (Random(..), split)
newtype Guid = Guid { bs :: SBS.ByteString }
deriving (Eq, Ord, Read, NFData, Hashable)
instance Show Guid where
show g =
fromMaybe ((('G':) . take 6 . asHex) g) $
decodeDebugGuid g
decodeDebugGuid :: Guid -> Maybe String
decodeDebugGuid (Guid g) = do
guard $ all (== '\x00') shouldBeZeros
guard $ all isOkChar preZeros
return $
if null preZeros
then "\"\""
else preZeros
where
(preZeros, shouldBeZeros) = break (== '\x00') $ UTF8.toString g
isOkChar x = Char.isAlphaNum x || elem x " *+/<>-=_:"
inGuid :: (SBS.ByteString -> SBS.ByteString) -> Guid -> Guid
inGuid f = Guid . f . bs
inGuid2 :: (SBS.ByteString -> SBS.ByteString -> SBS.ByteString) ->
Guid -> Guid -> Guid
inGuid2 f = inGuid . f . bs
asHex :: Guid -> String
asHex = encodeHex . bs
instance Random Guid where
randomR = error "randomR: you nuts?"
random = (_1 %~ Guid . SBS.pack . take 16 . randoms) . split
length :: Int
length = 16
make :: SBS.ByteString -> Guid
make bytes
| l > length = error ("Invalid GUID: too long: " ++ show bytes)
| l < length = Guid $ bytes `mappend` SBS.replicate (length - l) 0
| otherwise = Guid bytes
where
l = SBS.length bytes
-- | Use only strings shorter than Guid.length
fromString :: String -> Guid
fromString = make . UTF8.fromString
instance Binary Guid where
get = Guid `fmap` getByteString length
put = putByteString . bs
new :: IO Guid
new = Guid `fmap` randomBS length
combine :: Guid -> Guid -> Guid
combine x y =
inGuid (rbs x . rbs y) xorGuid
where
rbs = xorBS . encodeS . hashWithSalt 0 . bs
xorGuid = inGuid2 xorBS x y
augment :: String -> Guid -> Guid
augment = combine . fromString
| rvion/lamdu | bottlelib/Data/Store/Guid.hs | gpl-3.0 | 2,708 | 0 | 13 | 702 | 885 | 488 | 397 | -1 | -1 |
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE RecordWildCards #-}
module VirMat.IO.Import.CommandLine
( getJob
) where
import Options.Applicative
import VirMat.IO.Import.Types
import VirMat.Core.Sampling
parseJob :: Parser JobRequest
parseJob = VoronoiJob
<$> parseDimension
<*> parseStructureSize
<*> parseDistType
<*> parseDistribution
<*> parseSeed
<*> parseOutFile
getJob :: IO JobRequest
getJob = execParser opts
where
opts = info (helper <*> parseJob)
( fullDesc
<> progDesc "Virtual microstructure generator in 3D/2D."
<> header "Virmat"
<> footer "Legend: k = scaling factor; mu = average; s = variance; o = offset"
)
parseDimension :: Parser Dimension
parseDimension = let
d3 = flag' Dimension3D (long "3d" <> help "Generate 3D microstructures (default).")
d2 = flag' Dimension2D (long "2d" <> help "Generate 2D microstructures.")
in d3 <|> d2 <|> pure Dimension3D
parseStructureSize :: Parser StructureSize
parseStructureSize = let
nd = (NGrains . max 0) <$> option auto
( long "n2d" <>
metavar "INT" <>
help "Generate a microstructure with a given number of grains" )
bd = SizeBox <$> option auto
( long "n2d" <>
metavar "(DOUBLE, DOUBLE)" <>
help "Generate a 2D microstructure with a given bounding box" )
in nd <|> bd <|> pure (NGrains 500)
parseDistType :: Parser DistributionType
parseDistType = let
pn = (PackedDistribution . max 0) <$> option auto
( long "packed-n" <>
metavar "INT" <>
help "Packing grain placement with a given number of iterations." )
r = flag' RandomDistribution
( long "random" <> help "Random grain placement." )
p = flag' (PackedDistribution 60)
( long "packed" <> help "Packing grain placement with 60 iterations (default)." )
in pn <|> p <|> r <|> pure (PackedDistribution 60)
parseDistribution::Parser [CombDist]
parseDistribution = many $ parseLogNormal <|> parseNormal <|> parseUniform
parseNormal :: Parser CombDist
parseNormal = let
func (a,b,c) = CombDist $ Normal a b c
p = option auto
( long "norm" <>
metavar "(k, mu, s)" <>
help "Normal distribution." )
in func <$> p
parseLogNormal :: Parser CombDist
parseLogNormal = let
func (a,b,c,d) = CombDist $ LogNormal a b c d
p = option auto
( long "lnorm" <>
metavar "(k, mu, mode, o)" <>
help "Log Normal distribution." )
in func <$> p
parseUniform :: Parser CombDist
parseUniform = let
func (a,b,c) = CombDist $ Uniform a b c
p = option auto
( long "uniform" <>
metavar "(k, mu, s)" <>
help "Uniform distribution." )
in func <$> p
parseSeed :: Parser (Maybe Int)
parseSeed = optional $ option auto
( long "seed" <>
metavar "INT" <>
help "Random seed." )
parseOutFile :: Parser Output
parseOutFile = Output
<$> strOption
( long "dir" <>
short 'd' <>
metavar "FILEPATH" <>
help "Output directory." )
<*> strOption
( long "sample" <>
short 's' <>
metavar "STR" <>
help "Sample name." )
<*> parseShow
parseShow :: Parser [ShowType]
parseShow = let
v = flag' ShowVoronoi (long "showvoronoi" <> help "Show Voronoi grains.")
b = flag' ShowBox (long "showbox" <> help "Show enclosing box.")
h = flag' ShowHull (long "showhull" <> help "Show convex hull.")
p = flag' ShowPoints (long "showpoints" <> help "Show weighted points.")
s = flag' ShowSimplex (long "showsimplex" <> help "Show Delaunay Triangulation.")
f = flag' ShowForces (long "showforces" <> help "Show forces in packing.")
in many (v <|> b <|> h <|> p <|> s <|> f)
| lostbean/VirMat | src/VirMat/IO/Import/CommandLine.hs | gpl-3.0 | 3,923 | 0 | 14 | 1,143 | 1,050 | 516 | 534 | 102 | 1 |
module OpenSandbox.Data.YggdrasilSpec (main,spec) where
import OpenSandbox.Data.Yggdrasil
import Test.Hspec
import Test.QuickCheck
spec :: Spec
spec = return ()
main :: IO ()
main = hspec spec
| oldmanmike/opensandbox | test/OpenSandbox/Data/YggdrasilSpec.hs | gpl-3.0 | 197 | 0 | 6 | 29 | 64 | 37 | 27 | 8 | 1 |
{-# LANGUAGE TypeSynonymInstances , OverlappingInstances #-}
module Database.Design.Ampersand.ADL1.Expression (
subst
,foldlMapExpression,foldrMapExpression
,primitives,isMp1, isEEps
,isPos,isNeg, deMorganERad, deMorganECps, deMorganEUni, deMorganEIsc, notCpl, isCpl
,exprIsc2list, exprUni2list, exprCps2list, exprRad2list, exprPrd2list
,insParentheses)
where
import Database.Design.Ampersand.Basics (uni)
import Database.Design.Ampersand.Core.AbstractSyntaxTree
--import Debug.Trace
-- fatal :: Int -> String -> a
-- fatal = fatalMsg "ADL1.Expression"
-- | subst is used to replace each occurrence of a relation
-- with an expression. The parameter expr will therefore be applied to an
-- expression of the form Erel rel.
subst :: (Declaration,Expression) -> Expression -> Expression
subst (decl,expr) = subs
where
subs (EEqu (l,r)) = EEqu (subs l,subs r)
subs (EImp (l,r)) = EImp (subs l,subs r)
subs (EIsc (l,r)) = EIsc (subs l,subs r)
subs (EUni (l,r)) = EUni (subs l,subs r)
subs (EDif (l,r)) = EDif (subs l,subs r)
subs (ELrs (l,r)) = ELrs (subs l,subs r)
subs (ERrs (l,r)) = ERrs (subs l,subs r)
subs (EDia (l,r)) = EDia (subs l,subs r)
subs (ECps (l,r)) = ECps (subs l,subs r)
subs (ERad (l,r)) = ERad (subs l,subs r)
subs (EPrd (l,r)) = EPrd (subs l,subs r)
subs (EKl0 e ) = EKl0 (subs e)
subs (EKl1 e ) = EKl1 (subs e)
subs (EFlp e ) = EFlp (subs e)
subs (ECpl e ) = ECpl (subs e)
subs (EBrk e) = EBrk (subs e)
subs e@(EDcD d ) | d==decl = expr
| otherwise = e
subs e@EDcI{} = e
subs e@EEps{} = e
subs e@EDcV{} = e
subs e@EMp1{} = e
foldlMapExpression :: (a -> r -> a) -> (Declaration->r) -> a -> Expression -> a
foldlMapExpression f = foldrMapExpression f' where f' x y = f y x
foldrMapExpression :: (r -> a -> a) -> (Declaration->r) -> a -> Expression -> a
foldrMapExpression f g a (EEqu (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (EImp (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (EIsc (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (EUni (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (EDif (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (ELrs (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (ERrs (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (EDia (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (ECps (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (ERad (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (EPrd (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (EKl0 e) = foldrMapExpression f g a e
foldrMapExpression f g a (EKl1 e) = foldrMapExpression f g a e
foldrMapExpression f g a (EFlp e) = foldrMapExpression f g a e
foldrMapExpression f g a (ECpl e) = foldrMapExpression f g a e
foldrMapExpression f g a (EBrk e) = foldrMapExpression f g a e
foldrMapExpression f g a (EDcD d) = f (g d) a
foldrMapExpression _ _ a EDcI{} = a
foldrMapExpression _ _ a EEps{} = a
foldrMapExpression _ _ a EDcV{} = a
foldrMapExpression _ _ a EMp1{} = a
primitives :: Expression -> [Expression]
primitives expr =
case expr of
(EEqu (l,r)) -> primitives l `uni` primitives r
(EImp (l,r)) -> primitives l `uni` primitives r
(EIsc (l,r)) -> primitives l `uni` primitives r
(EUni (l,r)) -> primitives l `uni` primitives r
(EDif (l,r)) -> primitives l `uni` primitives r
(ELrs (l,r)) -> primitives l `uni` primitives r
(ERrs (l,r)) -> primitives l `uni` primitives r
(EDia (l,r)) -> primitives l `uni` primitives r
(ECps (l,r)) -> primitives l `uni` primitives r
(ERad (l,r)) -> primitives l `uni` primitives r
(EPrd (l,r)) -> primitives l `uni` primitives r
(EKl0 e) -> primitives e
(EKl1 e) -> primitives e
(EFlp e) -> primitives e
(ECpl e) -> primitives e
(EBrk e) -> primitives e
EDcD{} -> [expr]
EDcI{} -> [expr]
EEps{} -> [] -- Since EEps is inserted for typing reasons only, we do not consider it a primitive..
EDcV{} -> [expr]
EMp1{} -> [expr]
-- | The rule of De Morgan requires care with respect to the complement.
-- The following function provides a function to manipulate with De Morgan correctly.
deMorganERad :: Expression -> Expression
deMorganERad (ECpl (ERad (l,r)))
= notCpl (deMorganERad l) .:. notCpl (deMorganERad r)
deMorganERad (ERad (l,r))
= notCpl (notCpl (deMorganERad l) .:. notCpl (deMorganERad r))
deMorganERad e = e
deMorganECps :: Expression -> Expression
deMorganECps (ECpl (ECps (l,r)))
= notCpl (deMorganECps l) .!. notCpl (deMorganECps r)
deMorganECps (ECps (l,r))
= notCpl (notCpl (deMorganECps l) .!. notCpl (deMorganECps r))
deMorganECps e = e
deMorganEUni :: Expression -> Expression
deMorganEUni (ECpl (EUni (l,r)))
= notCpl (deMorganEUni l) ./\. notCpl (deMorganEUni r)
deMorganEUni (EUni (l,r))
= notCpl (notCpl (deMorganEUni l) ./\. notCpl (deMorganEUni r))
deMorganEUni e = e
deMorganEIsc :: Expression -> Expression
deMorganEIsc (ECpl (EIsc (l,r)))
= notCpl (deMorganEIsc l) .\/. notCpl (deMorganEIsc r)
deMorganEIsc (EIsc (l,r))
= notCpl (notCpl (deMorganEIsc l) .\/. notCpl (deMorganEIsc r))
deMorganEIsc e = e
notCpl :: Expression -> Expression
notCpl (ECpl e) = e
notCpl e = ECpl e
isCpl :: Expression -> Bool
isCpl (ECpl{}) = True
isCpl _ = False
isPos :: Expression -> Bool
isPos (ECpl{}) = False
isPos _ = True
isNeg :: Expression -> Bool
isNeg = not . isPos
isMp1 :: Expression -> Bool
isMp1 EMp1{} = True
isMp1 _ = False
isEEps :: Expression -> Bool
isEEps EEps{} = True
isEEps _ = False
exprIsc2list, exprUni2list, exprCps2list, exprRad2list, exprPrd2list :: Expression -> [Expression]
exprIsc2list (EIsc (l,r)) = exprIsc2list l++exprIsc2list r
exprIsc2list r = [r]
exprUni2list (EUni (l,r)) = exprUni2list l++exprUni2list r
exprUni2list r = [r]
exprCps2list (ECps (l,r)) = exprCps2list l++exprCps2list r
exprCps2list r = [r]
exprRad2list (ERad (l,r)) = exprRad2list l++exprRad2list r
exprRad2list r = [r]
exprPrd2list (EPrd (l,r)) = exprPrd2list l++exprPrd2list r
exprPrd2list r = [r]
insParentheses :: Expression -> Expression
insParentheses expr = insPar 0 expr
where
wrap :: Integer -> Integer -> Expression -> Expression
wrap i j e' = if i<=j then e' else EBrk (insPar 0 e')
insPar :: Integer -> Expression -> Expression
insPar i (EEqu (l,r)) = wrap i 0 (insPar 1 l .==. insPar 1 r)
insPar i (EImp (l,r)) = wrap i 0 (insPar 1 l .|-. insPar 1 r)
insPar i x@EIsc{} = wrap i 2 (foldr1 (./\.) [insPar 3 e | e<-exprIsc2list x ])
insPar i x@EUni{} = wrap i 2 (foldr1 (.\/.) [insPar 3 e | e<-exprUni2list x ])
insPar i (EDif (l,r)) = wrap i 4 (insPar 5 l .-. insPar 5 r)
insPar i (ELrs (l,r)) = wrap i 6 (insPar 7 l ./. insPar 7 r)
insPar i (ERrs (l,r)) = wrap i 6 (insPar 7 l .\. insPar 7 r)
insPar i (EDia (l,r)) = wrap i 6 (insPar 7 l .<>. insPar 7 r)
insPar i x@ECps{} = wrap i 8 (foldr1 (.:.) [insPar 9 e | e<-exprCps2list x ])
insPar i x@ERad{} = wrap i 8 (foldr1 (.!.) [insPar 9 e | e<-exprRad2list x ])
insPar i x@EPrd{} = wrap i 8 (foldr1 (.*.) [insPar 9 e | e<-exprPrd2list x ])
insPar _ (EKl0 e) = EKl0 (insPar 10 e)
insPar _ (EKl1 e) = EKl1 (insPar 10 e)
insPar _ (EFlp e) = EFlp (insPar 10 e)
insPar _ (ECpl e) = ECpl (insPar 10 e)
insPar i (EBrk e) = insPar i e
insPar _ x = x
{-
insPar 0 (r/\s/\t/\x/\y |- p)
=
wrap 0 0 (insPar 1 (r/\s/\t/\x/\y) |- insPar 1 p)
=
insPar 1 (r/\s/\t/\x/\y) |- insPar 1 p
=
wrap 1 2 (foldr1 f [insPar 3 e | e<-exprIsc2list (r/\s/\t/\x/\y) ]) |- p where f x y = EIsc (x,y)
=
foldr1 f [insPar 3 e | e<-exprIsc2list (r/\s/\t/\x/\y) ] |- p where f x y = EIsc (x,y)
=
foldr1 f [insPar 3 e | e<-[r,s,t,x,y] ] |- p where f x y = EIsc (x,y)
=
foldr1 f [insPar 3 r,insPar 3 s,insPar 3 t,insPar 3 x,insPar 3 y] |- p where f x y = EIsc (x,y)
=
foldr1 f [r,s,t,x,y] |- p where f x y = EIsc (x,y)
=
r/\s/\t/\x/\y |- p
insPar 0 (r;s;t;x;y |- p)
=
wrap 0 0 (insPar 1 (r;s;t;x;y) |- insPar 1 p)
=
insPar 1 (r;s;t;x;y) |- insPar 1 p
=
wrap 1 8 (insPar 8 r ; insPar 8 (s;t;x;y)) |- p
=
r; insPar 8 (s;t;x;y) |- p
=
r; wrap 8 8 (insPar 8 s; insPar 8 (t;x;y)) |- p
=
r; insPar 8 s; insPar 8 (t;x;y) |- p
=
r; s; insPar 8 (t;x;y) |- p
-}
| DanielSchiavini/ampersand | src/Database/Design/Ampersand/ADL1/Expression.hs | gpl-3.0 | 9,240 | 0 | 13 | 2,455 | 3,717 | 1,918 | 1,799 | 156 | 21 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 karamellpelle@hotmail.com
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Game.LevelPuzzle.Output.Plain.ShadeDot
(
shadeDot,
shadeDotNode,
shadeDotColor,
shadeDotUseTexPlain,
shadeDotUseTexBonus,
shadeDotUseTexTele0,
shadeDotUseTexTele1,
shadeDotUseTexFinish,
) where
import MyPrelude
import Game
import Game.Data.Color
import Game.LevelPuzzle
import OpenGL
import OpenGL.Helpers
shadeDot :: ShadeDot -> Float -> Mat4 -> Mat4 -> IO ()
shadeDot sh alpha projmodv normal = do
glUseProgram $ shadeDotPrg sh
-- alpha
glUniform1f (shadeDotUniAlpha sh) $ rTF alpha
-- projmodv
uniformMat4 (shadeDotUniProjModvMatrix sh) projmodv
-- normal
uniformMat4AsMat3 (shadeDotUniNormalMatrix sh) normal
-- vao
glBindVertexArrayOES $ shadeDotVAO sh
glActiveTexture gl_TEXTURE0
shadeDotNode :: ShadeDot -> Node -> IO ()
shadeDotNode sh (Node x y z) = do
glUniform3f (shadeDotUniPos sh) (fI x) (fI y) (fI z)
glDrawArrays gl_TRIANGLE_STRIP 0 $ fI $ valueLevelPuzzleDotStacks *
(2 * (valueLevelPuzzleDotSlices + 1) + 2)
shadeDotColor :: ShadeDot -> Color -> IO ()
shadeDotColor sh color =
uniformColor (shadeDotUniColor sh) color
shadeDotUseTexPlain :: ShadeDot -> IO ()
shadeDotUseTexPlain sh =
glBindTexture gl_TEXTURE_2D $ shadeDotTexPlain sh
shadeDotUseTexBonus :: ShadeDot -> IO ()
shadeDotUseTexBonus sh =
glBindTexture gl_TEXTURE_2D $ shadeDotTexBonus sh
shadeDotUseTexTele0 :: ShadeDot -> IO ()
shadeDotUseTexTele0 sh =
glBindTexture gl_TEXTURE_2D $ shadeDotTexTele0 sh
shadeDotUseTexTele1 :: ShadeDot -> IO ()
shadeDotUseTexTele1 sh =
glBindTexture gl_TEXTURE_2D $ shadeDotTexTele1 sh
shadeDotUseTexFinish :: ShadeDot -> IO ()
shadeDotUseTexFinish sh =
glBindTexture gl_TEXTURE_2D $ shadeDotTexFinish sh
| karamellpelle/grid | source/Game/LevelPuzzle/Output/Plain/ShadeDot.hs | gpl-3.0 | 2,568 | 0 | 12 | 532 | 507 | 262 | 245 | 47 | 1 |
module ContextTest where
| juanbono/tapl-haskell | untyped/test/ContextTest.hs | gpl-3.0 | 25 | 0 | 2 | 3 | 4 | 3 | 1 | 1 | 0 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
-- Module : Network.AWS.Waiters
-- Copyright : (c) 2013-2015 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
module Network.AWS.Waiters
(
-- * Types
Acceptor
, Accept (..)
, Wait (..)
-- * Acceptors
, accept
-- * Matchers
, matchAll
, matchAny
, matchError
, matchStatus
-- * Util
, nonEmpty
-- * Lenses
, module Control.Lens
) where
import Control.Lens
import Data.ByteString (ByteString)
import Data.Maybe
import Data.Text (Text)
import qualified Data.Text as Text
import Network.AWS.Data
import Network.AWS.Error
import Network.AWS.Types
import Network.HTTP.Types
type Acceptor a = Request a -> Response' a -> Maybe Accept
data Accept
= AcceptSuccess
| AcceptFailure
| AcceptRetry
deriving (Eq, Show)
instance ToBuilder Accept where
build = \case
AcceptSuccess -> "Success"
AcceptFailure -> "Failure"
AcceptRetry -> "Retry"
-- | Timing and acceptance criteria to check fulfillment of a remote operation.
data Wait a = Wait
{ _waitName :: !ByteString
, _waitAttempts :: !Int
, _waitDelay :: !Int
, _waitAcceptors :: [Acceptor a]
}
accept :: Wait a -> Acceptor a
accept w rq rs = listToMaybe . mapMaybe (\f -> f rq rs) $ _waitAcceptors w
matchAll :: Eq b => b -> Accept -> Fold (Rs a) b -> Acceptor a
matchAll x a l = match (allOf l (== x)) a
matchAny :: Eq b => b -> Accept -> Fold (Rs a) b -> Acceptor a
matchAny x a l = match (anyOf l (== x)) a
matchStatus :: Int -> Accept -> Acceptor a
matchStatus x a _ = \case
Left (ServiceError _ s _)
| x == statusCode s -> Just a
Right (s, _)
| x == statusCode s -> Just a
_ -> Nothing
matchError :: AWSErrorCode (Er (Sv a)) => ErrorCode -> Accept -> Acceptor a
matchError c a _ = \case
Left (ServiceError _ _ e)
| Just c == awsErrorCode e -> Just a
_ -> Nothing
match :: (Rs a -> Bool) -> Accept -> Acceptor a
match f a _ = \case
Right (_, rs)
| f rs -> Just a
_ -> Nothing
nonEmpty :: Fold a Text -> Fold a Bool
nonEmpty l = l . to Text.null
| romanb/amazonka | core/src/Network/AWS/Waiters.hs | mpl-2.0 | 2,803 | 0 | 12 | 884 | 747 | 395 | 352 | 72 | 3 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.StorageGateway.DescribeGatewayInformation
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- This operation returns metadata about a gateway such as its name,
-- network interfaces, configured time zone, and the state (whether the
-- gateway is running or not). To specify which gateway to describe, use
-- the Amazon Resource Name (ARN) of the gateway in your request.
--
-- /See:/ <http://docs.aws.amazon.com/storagegateway/latest/APIReference/API_DescribeGatewayInformation.html AWS API Reference> for DescribeGatewayInformation.
module Network.AWS.StorageGateway.DescribeGatewayInformation
(
-- * Creating a Request
describeGatewayInformation
, DescribeGatewayInformation
-- * Request Lenses
, dgiGatewayARN
-- * Destructuring the Response
, describeGatewayInformationResponse
, DescribeGatewayInformationResponse
-- * Response Lenses
, dgirsGatewayState
, dgirsGatewayARN
, dgirsGatewayNetworkInterfaces
, dgirsNextUpdateAvailabilityDate
, dgirsLastSoftwareUpdate
, dgirsGatewayId
, dgirsGatewayType
, dgirsGatewayTimezone
, dgirsResponseStatus
) where
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
import Network.AWS.StorageGateway.Types
import Network.AWS.StorageGateway.Types.Product
-- | A JSON object containing the id of the gateway.
--
-- /See:/ 'describeGatewayInformation' smart constructor.
newtype DescribeGatewayInformation = DescribeGatewayInformation'
{ _dgiGatewayARN :: Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeGatewayInformation' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dgiGatewayARN'
describeGatewayInformation
:: Text -- ^ 'dgiGatewayARN'
-> DescribeGatewayInformation
describeGatewayInformation pGatewayARN_ =
DescribeGatewayInformation'
{ _dgiGatewayARN = pGatewayARN_
}
-- | Undocumented member.
dgiGatewayARN :: Lens' DescribeGatewayInformation Text
dgiGatewayARN = lens _dgiGatewayARN (\ s a -> s{_dgiGatewayARN = a});
instance AWSRequest DescribeGatewayInformation where
type Rs DescribeGatewayInformation =
DescribeGatewayInformationResponse
request = postJSON storageGateway
response
= receiveJSON
(\ s h x ->
DescribeGatewayInformationResponse' <$>
(x .?> "GatewayState") <*> (x .?> "GatewayARN") <*>
(x .?> "GatewayNetworkInterfaces" .!@ mempty)
<*> (x .?> "NextUpdateAvailabilityDate")
<*> (x .?> "LastSoftwareUpdate")
<*> (x .?> "GatewayId")
<*> (x .?> "GatewayType")
<*> (x .?> "GatewayTimezone")
<*> (pure (fromEnum s)))
instance ToHeaders DescribeGatewayInformation where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("StorageGateway_20130630.DescribeGatewayInformation"
:: ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON DescribeGatewayInformation where
toJSON DescribeGatewayInformation'{..}
= object
(catMaybes [Just ("GatewayARN" .= _dgiGatewayARN)])
instance ToPath DescribeGatewayInformation where
toPath = const "/"
instance ToQuery DescribeGatewayInformation where
toQuery = const mempty
-- | A JSON object containing the following fields:
--
-- /See:/ 'describeGatewayInformationResponse' smart constructor.
data DescribeGatewayInformationResponse = DescribeGatewayInformationResponse'
{ _dgirsGatewayState :: !(Maybe Text)
, _dgirsGatewayARN :: !(Maybe Text)
, _dgirsGatewayNetworkInterfaces :: !(Maybe [NetworkInterface])
, _dgirsNextUpdateAvailabilityDate :: !(Maybe Text)
, _dgirsLastSoftwareUpdate :: !(Maybe Text)
, _dgirsGatewayId :: !(Maybe Text)
, _dgirsGatewayType :: !(Maybe Text)
, _dgirsGatewayTimezone :: !(Maybe Text)
, _dgirsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeGatewayInformationResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dgirsGatewayState'
--
-- * 'dgirsGatewayARN'
--
-- * 'dgirsGatewayNetworkInterfaces'
--
-- * 'dgirsNextUpdateAvailabilityDate'
--
-- * 'dgirsLastSoftwareUpdate'
--
-- * 'dgirsGatewayId'
--
-- * 'dgirsGatewayType'
--
-- * 'dgirsGatewayTimezone'
--
-- * 'dgirsResponseStatus'
describeGatewayInformationResponse
:: Int -- ^ 'dgirsResponseStatus'
-> DescribeGatewayInformationResponse
describeGatewayInformationResponse pResponseStatus_ =
DescribeGatewayInformationResponse'
{ _dgirsGatewayState = Nothing
, _dgirsGatewayARN = Nothing
, _dgirsGatewayNetworkInterfaces = Nothing
, _dgirsNextUpdateAvailabilityDate = Nothing
, _dgirsLastSoftwareUpdate = Nothing
, _dgirsGatewayId = Nothing
, _dgirsGatewayType = Nothing
, _dgirsGatewayTimezone = Nothing
, _dgirsResponseStatus = pResponseStatus_
}
-- | One of the values that indicates the operating state of the gateway.
dgirsGatewayState :: Lens' DescribeGatewayInformationResponse (Maybe Text)
dgirsGatewayState = lens _dgirsGatewayState (\ s a -> s{_dgirsGatewayState = a});
-- | Undocumented member.
dgirsGatewayARN :: Lens' DescribeGatewayInformationResponse (Maybe Text)
dgirsGatewayARN = lens _dgirsGatewayARN (\ s a -> s{_dgirsGatewayARN = a});
-- | A NetworkInterface array that contains descriptions of the gateway
-- network interfaces.
dgirsGatewayNetworkInterfaces :: Lens' DescribeGatewayInformationResponse [NetworkInterface]
dgirsGatewayNetworkInterfaces = lens _dgirsGatewayNetworkInterfaces (\ s a -> s{_dgirsGatewayNetworkInterfaces = a}) . _Default . _Coerce;
-- | The date on which an update to the gateway is available. This date is in
-- the time zone of the gateway. If the gateway is not available for an
-- update this field is not returned in the response.
dgirsNextUpdateAvailabilityDate :: Lens' DescribeGatewayInformationResponse (Maybe Text)
dgirsNextUpdateAvailabilityDate = lens _dgirsNextUpdateAvailabilityDate (\ s a -> s{_dgirsNextUpdateAvailabilityDate = a});
-- | The date on which the last software update was applied to the gateway.
-- If the gateway has never been updated, this field does not return a
-- value in the response.
dgirsLastSoftwareUpdate :: Lens' DescribeGatewayInformationResponse (Maybe Text)
dgirsLastSoftwareUpdate = lens _dgirsLastSoftwareUpdate (\ s a -> s{_dgirsLastSoftwareUpdate = a});
-- | The gateway ID.
dgirsGatewayId :: Lens' DescribeGatewayInformationResponse (Maybe Text)
dgirsGatewayId = lens _dgirsGatewayId (\ s a -> s{_dgirsGatewayId = a});
-- | The type of the gateway.
dgirsGatewayType :: Lens' DescribeGatewayInformationResponse (Maybe Text)
dgirsGatewayType = lens _dgirsGatewayType (\ s a -> s{_dgirsGatewayType = a});
-- | One of the values that indicates the time zone configured for the
-- gateway.
dgirsGatewayTimezone :: Lens' DescribeGatewayInformationResponse (Maybe Text)
dgirsGatewayTimezone = lens _dgirsGatewayTimezone (\ s a -> s{_dgirsGatewayTimezone = a});
-- | The response status code.
dgirsResponseStatus :: Lens' DescribeGatewayInformationResponse Int
dgirsResponseStatus = lens _dgirsResponseStatus (\ s a -> s{_dgirsResponseStatus = a});
| fmapfmapfmap/amazonka | amazonka-storagegateway/gen/Network/AWS/StorageGateway/DescribeGatewayInformation.hs | mpl-2.0 | 8,353 | 0 | 19 | 1,705 | 1,170 | 693 | 477 | 134 | 1 |
{- ORMOLU_DISABLE -}
-- Implicit CAD. Copyright (C) 2011, Christopher Olah (chris@colah.ca)
-- Copyright 2014-2019, Julia Longtin (julial@turinglace.com)
-- Released under the GNU AGPLV3+, see LICENSE
-- Use existing instances for the wrapped types rather than manually manking them
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Graphics.Implicit.FastIntUtil (Fastβ(Fastβ), toFastβ, fromFastβ) where
import Prelude (Integral, Num, Eq, Ord, Enum, Real, Show, Read, Int, id)
class FastN n where
fromFastβ :: Fastβ -> n
toFastβ :: n -> Fastβ
instance FastN Int where
fromFastβ (Fastβ a) = a
{-# INLINABLE fromFastβ #-}
toFastβ = Fastβ
{-# INLINABLE toFastβ #-}
instance FastN Fastβ where
fromFastβ = id
{-# INLINABLE fromFastβ #-}
toFastβ = id
{-# INLINABLE toFastβ #-}
-- System integers, meant to go fast, and have no chance of wrapping 2^31.
newtype Fastβ = Fastβ Int
deriving (Show, Read, Eq, Ord, Num, Enum, Integral, Real)
| colah/ImplicitCAD | Graphics/Implicit/FastIntUtil.hs | agpl-3.0 | 1,002 | 10 | 10 | 170 | 212 | 124 | 88 | 17 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Network.TelegramBot.Types where
import Control.Applicative
import Control.Lens
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans.Either
import Data.Aeson
import Data.Aeson.TH
import Data.Aeson.Encode.Pretty
import Data.Char
import Data.Int
import Data.Monoid
import Data.Proxy
import Data.Text (Text)
import Servant.API
import Servant.Client
import Text.Show
import Network.TelegramBot.Common
import Network.TelegramBot.Files
import qualified Data.Text as T
import qualified Data.Text.IO as T
type InputFile = Text
newtype TrueTg = TrueTg { _trueTg :: All }
deriving (Eq, Show)
makePrisms ''All
makePrisms ''TrueTg
_TgBool :: Iso' TrueTg Bool
_TgBool = _TrueTg._All
_BoolTg :: Iso' Bool TrueTg
_BoolTg = from _TgBool
deriving instance ToJSON All
deriving instance FromJSON All
instance FromJSON TrueTg where
parseJSON (Bool True) = pure $ TrueTg (All True)
parseJSON _ = pure $ TrueTg (All False)
instance ToJSON TrueTg where
toJSON (TrueTg (All True)) = Bool True
toJSON _ = Null
data Response t = Response
{ _responseOk :: All
, _responseDescription :: Maybe Text
, _responseResult :: Maybe t
} deriving (Eq, Show)
instance Monoid t => Monoid (Response t) where
mempty = Response (All False) Nothing Nothing
mappend (Response xOk xDesc xRes) (Response yOk yDesc yRes) =
Response (xOk <> yOk) (xDesc <> yDesc) (xRes <> yRes)
data Update = Update
{ _updateUpdateId :: Int32
, _updateMessage :: Message
} deriving (Eq, Show)
data User = User
{ _userId :: Int32
, _userFirstName :: Text
, _userLastName :: Maybe Text
, _userUsername :: Maybe Text
} deriving (Eq, Show)
data Chat = Chat
{ _chatId :: Int32
, _chatType :: ChatType
, _chatTitle :: Maybe Text
, _chatUsername :: Maybe Text
, _chatFirstName :: Maybe Text
, _chatLastName :: Maybe Text
} deriving (Eq, Show)
data ChatType = Private | Group | Supergroup | Channel
deriving (Eq, Ord, Enum, Bounded, Read, Show)
type ChatId = OneOf '[Int32, Text]
data ChatAction
= Typing
| UploadPhoto
| RecordVideo | UploadVideo
| RecordAudio | UploadAudio
| UploadDocument
| FindLocation
deriving (Eq, Ord, Enum, Bounded, Read, Show)
data Message = Message
{ _messageMessageId :: Int32
, _messageFrom :: Maybe User
, _messageDate :: Int32
, _messageChat :: Chat
, _messageForwardFrom :: Maybe User
, _messageForwardDate :: Maybe Int32
, _messageReplyToMessage :: Maybe Message
, _messageText :: Maybe Text
, _messageAudio :: Maybe Audio
, _messageDocument :: Maybe Document
, _messagePhoto :: Maybe [PhotoSize]
, _messageSticker :: Maybe Sticker
, _messageVideo :: Maybe Video
, _messageVoice :: Maybe Voice
, _messageCaption :: Maybe Text
, _messageLocation :: Maybe Location
, _messageNewChatParticipant :: Maybe User
, _messageLeftChatParticipant :: Maybe User
, _messageNewChatTitle :: Maybe Text
, _messageNewChatPhoto :: Maybe [PhotoSize]
, _messageDeleteChatPhoto :: Maybe TrueTg
, _messageGroupChatCreated :: Maybe TrueTg
, _messageSupergroupChatCreated :: Maybe TrueTg
, _messageChannelChatCreated :: Maybe TrueTg
, _messageMigrateToChatId :: Maybe Int32
, _messageMigrateFromChatId :: Maybe Int32
} deriving (Eq, Show)
data ParseMode = Markdown
deriving (Eq, Ord, Enum, Bounded, Show, Read)
data PhotoSize = PhotoSize
{ _photoSizeFileId :: Text
, _photoSizeWidth :: Int32
, _photoSizeHeight :: Int32
, _photoSizeFileSize :: Maybe Int32
} deriving (Eq, Show)
data Audio = Audio
{ _audioFileId :: Text
, _audioDuration :: Int32
, _audioPerformer :: Maybe Text
, _audioTitle :: Maybe Text
, _audioMimeType :: Maybe Text
, _audioFileSize :: Maybe Text
} deriving (Eq, Show)
data Document = Document
{ _documentFileId :: Text
, _documentThumb :: Maybe PhotoSize
, _documentFileName :: Maybe Text
, _documentMimeType :: Maybe Text
, _documentFileSize :: Maybe Int32
} deriving (Eq, Show)
data Sticker = Sticker
{ _stickerFileId :: Text
, _stickerWidth :: Int32
, _stickerHeight :: Int32
, _stickerThumb :: Maybe PhotoSize
, _stickerFileSize :: Maybe Int32
} deriving (Eq, Show)
data Video = Video
{ _videoFileId :: Text
, _videoWidth :: Int32
, _videoHeight :: Int32
, _videoDuration :: Int32
, _videoThumb :: Maybe PhotoSize
, _videoMimeType :: Maybe Text
, _videoFileSize :: Maybe Int32
} deriving (Eq, Show)
data Voice = Voice
{ _voiceFileId :: Text
, _voiceDuration :: Int32
, _voiceMimeType :: Maybe Text
, _voiceFileSize :: Maybe Int32
} deriving (Eq, Show)
data Contact = Contact
{ _contactPhoneNumber :: Text
, _contactFirstName :: Text
, _contactLastName :: Maybe Text
, _contactUserId :: Maybe Int32
} deriving (Eq, Show)
data Location = Location
{ _locationLongitude :: Float
, _locationLatitude :: Float
} deriving (Eq, Show)
data UserProfilePhotos = UserProfilePhotos
{ _userProfilePhotosTotalCount :: Int32
, _userProfilePhotosPhoto :: [[PhotoSize]]
} deriving (Eq, Show)
data File = File
{ _fileId :: Text
, _fileSize :: Maybe Int32
, _filePath :: Maybe Text
} deriving (Eq, Show)
type ReplyMarkup = OneOf '[ReplyKeyboardMarkup, ReplyKeyboardHide, ForceReply]
data ReplyKeyboardMarkup = ReplyKeyboardMarkup
{ _replyKeyboardMarkupKeyboard :: [[Text]]
, _replyKeyboardMarkupResizeKeyboard :: Maybe Bool
, _replyKeyboardMarkupOneTimeKeyboard :: Maybe Bool
, _replyKeyboardMarkupSelective :: Maybe Bool
} deriving (Eq, Show)
data ReplyKeyboardHide = ReplyKeyboardHide
{ _replyKeyboardHideHideKeyboard :: TrueTg
, _replyKeyboardHideSelective :: Maybe Bool
} deriving (Eq, Show)
data ForceReply = ForceReply
{ _forceReplyForceReply :: TrueTg
, _forceReplySelective :: Maybe Bool
} deriving (Eq, Show)
$(deriveJSON tgJSONOptions{fieldLabelModifier = snakeDrop 9} ''Response)
$(deriveJSON tgJSONOptions{fieldLabelModifier = snakeDrop 7} ''Update)
$(deriveJSON tgJSONOptions{fieldLabelModifier = snakeDrop 5} ''User)
$(deriveJSON tgJSONOptions{fieldLabelModifier = snakeDrop 5} ''Chat)
$(deriveJSON tgJSONOptions ''ChatType)
$(deriveJSON tgJSONOptions ''ChatAction)
$(deriveJSON tgJSONOptions{fieldLabelModifier = snakeDrop 8} ''Message)
$(deriveJSON tgJSONOptions ''ParseMode)
$(deriveJSON tgJSONOptions{fieldLabelModifier = snakeDrop 10} ''PhotoSize)
$(deriveJSON tgJSONOptions{fieldLabelModifier = snakeDrop 6} ''Audio)
$(deriveJSON tgJSONOptions{fieldLabelModifier = snakeDrop 9} ''Document)
$(deriveJSON tgJSONOptions{fieldLabelModifier = snakeDrop 8} ''Sticker)
$(deriveJSON tgJSONOptions{fieldLabelModifier = snakeDrop 6} ''Video)
$(deriveJSON tgJSONOptions{fieldLabelModifier = snakeDrop 6} ''Voice)
$(deriveJSON tgJSONOptions{fieldLabelModifier = snakeDrop 8} ''Contact)
$(deriveJSON tgJSONOptions{fieldLabelModifier = snakeDrop 9} ''Location)
$(deriveJSON tgJSONOptions{fieldLabelModifier = snakeDrop 18} ''UserProfilePhotos)
$(deriveJSON tgJSONOptions{fieldLabelModifier = snakeDrop 5} ''File)
$(deriveJSON tgJSONOptions{fieldLabelModifier = snakeDrop 20} ''ReplyKeyboardMarkup)
$(deriveJSON tgJSONOptions{fieldLabelModifier = snakeDrop 18} ''ReplyKeyboardHide)
$(deriveJSON tgJSONOptions{fieldLabelModifier = snakeDrop 11} ''ForceReply)
-- _234567890123456789012345
makeLenses ''Response
makeLenses ''Update
makeLenses ''User
makeLenses ''Chat
makeLenses ''ChatType
makeLenses ''ChatAction
makeLenses ''Message
makeLenses ''ParseMode
makeLenses ''PhotoSize
makeLenses ''Audio
makeLenses ''Document
makeLenses ''Sticker
makeLenses ''Video
makeLenses ''Voice
makeLenses ''Contact
makeLenses ''Location
makeLenses ''UserProfilePhotos
makeLenses ''File
makeLenses ''ReplyKeyboardMarkup
makeLenses ''ReplyKeyboardHide
makeLenses ''ForceReply
| bb010g/telegram-bot | src/Network/TelegramBot/Types.hs | agpl-3.0 | 8,370 | 0 | 10 | 1,710 | 2,346 | 1,261 | 1,085 | 231 | 1 |
module Network.Haskoin.Script.Units (tests) where
import Test.HUnit (Assertion, assertBool)
import Test.Framework (Test, testGroup)
import Test.Framework.Providers.HUnit (testCase)
import Data.Maybe (fromJust)
import Network.Haskoin.Script
import Network.Haskoin.Crypto
import Network.Haskoin.Util
tests :: [Test]
tests =
[ testGroup "Canonical signatures"
(map canonicalVectorsMap $ zip canonicalVectors [0..])
, testGroup "Non canonical sigatures"
(map notCanonicalVectorsMap $ zip notCanonicalVectors [0..])
, testGroup "Multi Signatures"
(map mapMulSigVector $ zip mulSigVectors [0..])
, testGroup "Signature decoding"
(map sigDecodeMap $ zip scriptSigSignatures [0..])
]
canonicalVectorsMap :: (String,Int) -> Test.Framework.Test
canonicalVectorsMap (_,i) =
testCase ("Canonical Sig " ++ (show i)) func
where
func = testCanonicalSig $ canonicalVectors !! i
notCanonicalVectorsMap :: (String,Int) -> Test.Framework.Test
notCanonicalVectorsMap (_,i) =
testCase ("Not canonical Sig " ++ (show i)) func
where
func = testNotCanonicalSig $ notCanonicalVectors !! i
sigDecodeMap :: ( String, Int ) -> Test.Framework.Test
sigDecodeMap (_,i) =
testCase ( "Signature " ++ ( show i ) ) func
where
func = testSigDecode $ scriptSigSignatures !! i
testCanonicalSig :: String -> Assertion
testCanonicalSig str =
assertBool " > Canonical Sig" $ isRight $ decodeCanonicalSig bs
where
bs = fromJust $ hexToBS str
testNotCanonicalSig :: String -> Assertion
testNotCanonicalSig str =
assertBool " > Not canonical sig" $ isLeft $ decodeCanonicalSig bs
where
bs = fromJust $ hexToBS str
mapMulSigVector :: ((String,String),Int) -> Test.Framework.Test
mapMulSigVector (v,i) =
testCase name $ runMulSigVector v
where
name = "MultiSignature vector " ++ (show i)
runMulSigVector :: (String,String) -> Assertion
runMulSigVector (a,ops) =
assertBool " > MultiSig Vector" $ a == b
where
s = decode' $ fromJust $ hexToBS ops
b = addrToBase58 $ scriptAddr $ fromRight $ decodeOutput s
testSigDecode :: String -> Assertion
testSigDecode str =
let bs = fromJust $ hexToBS str
eitherSig = decodeSig bs
in
assertBool ( unwords [ "Decode failed:", fromLeft eitherSig ] ) $ isRight eitherSig
{- Canonical Signatures -}
-- Test vectors from bitcoind
-- http://github.com/bitcoin/bitcoin/blob/master/src/test/data/sig_canonical.json
canonicalVectors :: [String]
canonicalVectors =
[ "300602010102010101" -- Changed 0x00 to 0x01 as 0x00 is invalid
, "3008020200ff020200ff01"
, "304402203932c892e2e550f3af8ee4ce9c215a87f9bb831dcac87b2838e2c2eaa891df0c022030b61dd36543125d56b9f9f3a1f9353189e5af33cdda8d77a5209aec03978fa001"
, "30450220076045be6f9eca28ff1ec606b833d0b87e70b2a630f5e3a496b110967a40f90a0221008fffd599910eefe00bc803c688c2eca1d2ba7f6b180620eaa03488e6585db6ba01"
, "3046022100876045be6f9eca28ff1ec606b833d0b87e70b2a630f5e3a496b110967a40f90a0221008fffd599910eefe00bc803c688c2eca1d2ba7f6b180620eaa03488e6585db6ba01"
]
notCanonicalVectors :: [String]
notCanonicalVectors =
[ "30050201ff020001"
, "30470221005990e0584b2b238e1dfaad8d6ed69ecc1a4a13ac85fc0b31d0df395eb1ba6105022200002d5876262c288beb511d061691bf26777344b702b00f8fe28621fe4e566695ed01"
, "304402205990e0584b2b238e1dfaad8d6ed69ecc1a4a13ac85fc0b31d0df395eb1ba610502202d5876262c288beb511d061691bf26777344b702b00f8fe28621fe4e566695ed11"
, "314402205990e0584b2b238e1dfaad8d6ed69ecc1a4a13ac85fc0b31d0df395eb1ba610502202d5876262c288beb511d061691bf26777344b702b00f8fe28621fe4e566695ed01"
, "304502205990e0584b2b238e1dfaad8d6ed69ecc1a4a13ac85fc0b31d0df395eb1ba610502202d5876262c288beb511d061691bf26777344b702b00f8fe28621fe4e566695ed01"
, "301f01205990e0584b2b238e1dfaad8d6ed69ecc1a4a13ac85fc0b31d0df395eb101"
, "304502205990e0584b2b238e1dfaad8d6ed69ecc1a4a13ac85fc0b31d0df395eb1ba610502202d5876262c288beb511d061691bf26777344b702b00f8fe28621fe4e566695ed0001"
, "304401205990e0584b2b238e1dfaad8d6ed69ecc1a4a13ac85fc0b31d0df395eb1ba610502202d5876262c288beb511d061691bf26777344b702b00f8fe28621fe4e566695ed01"
, "3024020002202d5876262c288beb511d061691bf26777344b702b00f8fe28621fe4e566695ed01"
, "304402208990e0584b2b238e1dfaad8d6ed69ecc1a4a13ac85fc0b31d0df395eb1ba610502202d5876262c288beb511d061691bf26777344b702b00f8fe28621fe4e566695ed01"
, "30450221005990e0584b2b238e1dfaad8d6ed69ecc1a4a13ac85fc0b31d0df395eb1ba610502202d5876262c288beb511d061691bf26777344b702b00f8fe28621fe4e566695ed01"
, "304402205990e0584b2b238e1dfaad8d6ed69ecc1a4a13ac85fc0b31d0df395eb1ba610501202d5876262c288beb511d061691bf26777344b702b00f8fe28621fe4e566695ed01"
, "302402205990e0584b2b238e1dfaad8d6ed69ecc1a4a13ac85fc0b31d0df395eb1ba6105020001"
, "304402205990e0584b2b238e1dfaad8d6ed69ecc1a4a13ac85fc0b31d0df395eb1ba61050220fd5876262c288beb511d061691bf26777344b702b00f8fe28621fe4e566695ed01"
, "304502205990e0584b2b238e1dfaad8d6ed69ecc1a4a13ac85fc0b31d0df395eb1ba61050221002d5876262c288beb511d061691bf26777344b702b00f8fe28621fe4e566695ed01"
]
mulSigVectors :: [(String,String)]
mulSigVectors =
[ ( "3QJmV3qfvL9SuYo34YihAf3sRCW3qSinyC"
, "52410491bba2510912a5bd37da1fb5b1673010e43d2c6d812c514e91bfa9f2eb129e1c183329db55bd868e209aac2fbc02cb33d98fe74bf23f0c235d6126b1d8334f864104865c40293a680cb9c020e7b1e106d8c1916d3cef99aa431a56d253e69256dac09ef122b1a986818a7cb624532f062c1d1f8722084861c5c3291ccffef4ec687441048d2455d2403e08708fc1f556002f1b6cd83f992d085097f9974ab08a28838f07896fbab08f39495e15fa6fad6edbfb1e754e35fa1c7844c41f322a1863d4621353ae"
)
]
scriptSigSignatures :: [ String ]
scriptSigSignatures =
-- Signature in input of txid 1983a69265920c24f89aac81942b1a59f7eb30821a8b3fb258f88882b6336053
[ "304402205ca6249f43538908151fe67b26d020306c0e59fa206cf9f3ccf641f33357119d02206c82f244d04ac0a48024fb9cc246b66e58598acf206139bdb7b75a2941a2b1e401"
-- Signature in input of txid fb0a1d8d34fa5537e461ac384bac761125e1bfa7fec286fa72511240fa66864d Strange DER sizes. But in Blockchain
, "3048022200002b83d59c1d23c08efd82ee0662fec23309c3adbcbd1f0b8695378db4b14e736602220000334a96676e58b1bb01784cb7c556dd8ce1c220171904da22e18fe1e7d1510db501"
]
| nuttycom/haskoin | tests/Network/Haskoin/Script/Units.hs | unlicense | 6,250 | 0 | 12 | 734 | 831 | 465 | 366 | 84 | 1 |
{-
ΠΠΎΠ΄ΡΠ»Ρ ΠΊΠΎΠΏΠΈΡΠΎΠ²ΡΠΈΠΊΠΎΠ².
https://github.com/denisshevchenko/ruhaskell
ΠΡΠ΅ ΠΏΡΠ°Π²Π° ΠΏΡΠΈΠ½Π°Π΄Π»Π΅ΠΆΠ°Ρ ΡΡΡΡΠΊΠΎΡΠ·ΡΡΠ½ΠΎΠΌΡ ΡΠΎΠΎΠ±ΡΠ΅ΡΡΠ²Ρ Haskell-ΡΠ°Π·ΡΠ°Π±ΠΎΡΡΠΈΠΊΠΎΠ², 2015 Π³.
-}
{-# LANGUAGE OverloadedStrings #-}
module Copiers (
justCopy,
justCreateAndCopy,
justCompressAndCopy
) where
import Hakyll
-- ΠΠ΅ΡΡΠΌ Π½Π΅ΡΡΠΎ Π³ΠΎΡΠΎΠ²ΠΎΠ΅ ΠΈ ΠΏΡΠΎΡΡΠΎ ΠΊΠΎΠΏΠΈΡΡΠ΅ΠΌ Π² ΠΈΡΠΎΠ³ΠΎΠ²ΡΠΉ ΡΠ°ΠΉΡ.
justCopy :: Pattern -> Rules ()
justCopy something = match something $ do
route idRoute
compile copyFileCompiler
-- Π‘ΠΎΠ·Π΄Π°ΡΠΌ Π½Π΅ΡΡΠΎ ΠΏΡΡΡΠΎΠ΅ ΠΈ ΠΏΡΠΎΡΡΠΎ ΠΊΠΎΠΏΠΈΡΡΠ΅ΠΌ Π² ΠΈΡΠΎΠ³ΠΎΠ²ΡΠΉ ΡΠ°ΠΉΡ.
justCreateAndCopy :: Identifier -> Rules ()
justCreateAndCopy something = create [something] $ do
route idRoute
compile copyFileCompiler
-- Π‘ΠΆΠΈΠΌΠ°Π΅ΠΌ Π½Π΅ΡΡΠΎ Π³ΠΎΡΠΎΠ²ΠΎΠ΅ ΠΈ ΠΏΡΠΎΡΡΠΎ ΠΊΠΎΠΏΠΈΡΡΠ΅ΠΌ Π² ΠΈΡΠΎΠ³ΠΎΠ²ΡΠΉ ΡΠ°ΠΉΡ.
justCompressAndCopy :: Pattern -> Rules ()
justCompressAndCopy something = match something $ do
route idRoute
compile compressCssCompiler
| akamch/ruhaskell-old | src/Copiers.hs | unlicense | 1,113 | 0 | 8 | 169 | 149 | 73 | 76 | 18 | 1 |
-----------------------------------------------------------------------------
-- Copyright 2012 Microsoft Corporation.
--
-- This is free software; you can redistribute it and/or modify it under the
-- terms of the Apache License, Version 2.0. A copy of the License can be
-- found in the file "license.txt" at the root of this distribution.
-----------------------------------------------------------------------------
module Type.InferMonad( Inf, InfGamma
, runInfer
-- * substitutation
, zapSubst
, subst, extendSub
-- * Environment
, getGamma
, extendGamma, extendGammaCore
, extendInfGamma, extendInfGammaCore
, withGammaType
-- * Name resolution
, qualifyName
, resolveName, resolveNameEx
, resolveFunName
, resolveConName
, lookupConName
, lookupFunName
, lookupNameEx, NameContext(..), maybeToContext
, lookupInfName
, getModuleName
, lookupImportName
, lookupNameN
, findDataInfo
, withDefName
, currentDefName
-- * Misc.
, allowReturn, isReturnAllowed
, withLhs, isLhs
, getPrettyEnv
, splitEffect
-- * Operations
, generalize
, improve
, instantiate, instantiateNoEx
, checkEmptyPredicates
, checkCasing
, normalize
-- * Unification
, Context(..)
, inferUnify, inferUnifies
, inferSubsume
, typeError
, contextError
, infError, infWarning
-- * Documentation, Intellisense
, addRangeInfo
) where
import Data.List( partition, sortBy)
import Lib.PPrint
import Common.Range
import Common.Unique
import Common.Failure
import Common.Error
import Common.Name
import Common.NamePrim(nameTpVoid,nameTpPure,nameTpIO,nameTpST,nameTpRead,nameTpWrite,namePredHeapDiv,nameReturn)
-- import Common.Syntax( DefSort(..) )
import Common.ColorScheme
import Kind.Kind
import Kind.ImportMap
import Kind.Newtypes
import Kind.Synonym
import Type.Type
import Type.TypeVar
import Type.Kind
import qualified Type.Pretty as Pretty
import qualified Core.Core as Core
import Type.Operations hiding (instantiate, instantiateNoEx)
import qualified Type.Operations as Op
import Type.Assumption
import Type.InfGamma
import Type.Unify
import Common.Message( docFromRange, table, tablex)
import Core.Pretty()
import Syntax.RangeMap( RangeMap, RangeInfo(..), rangeMapInsert )
import qualified Lib.Trace( trace )
trace s x =
-- Lib.Trace.trace (" " ++ s)
x
{--------------------------------------------------------------------------
Generalization
--------------------------------------------------------------------------}
generalize :: Range -> Range -> Effect -> Rho -> Core.Expr -> Inf (Scheme,Core.Expr )
generalize contextRange range eff tp@(TForall _ _ _) core0
= {-
trace ("generalize forall: " ++ show tp) $
return (tp,core0)
-}
do seff <- subst eff
stp <- subst tp
free0 <- freeInGamma
let free = tvsUnion free0 (fuv seff)
ps0 <- splitPredicates free
if (tvsIsEmpty (fuv ({- seff, -} stp)))
then -- Lib.Trace.trace ("generalize forall: " ++ show (pretty stp)) $
return (tp,core0)
else -- Lib.Trace.trace ("generalize forall-inst: " ++ show (pretty seff, pretty stp) ++ " with " ++ show ps0) $
do (rho,tvars,icore) <- instantiate range stp
generalize contextRange range seff rho (icore core0)
generalize contextRange range eff0 rho0 core0
= do seff <- subst eff0
srho <- subst rho0
free0 <- freeInGamma
let free = tvsUnion free0 (fuv seff)
ps0 <- splitPredicates free
score0 <- subst core0
sub <- getSub
trace (" generalize: " ++ show (seff,srho) ++ " with " ++ show ps0
{- ++ " and free " ++ show (tvsList free) -}
{- ++ "\n subst=" ++ show (take 10 $ subList sub) -}
{- ++ "\ncore: " ++ show score0 -})
$ return ()
-- simplify and improve predicates
(ps1,(eff1,rho1),core1) <- simplifyAndResolve contextRange free ps0 (seff,srho)
-- trace (" improved to: " ++ show (eff1,rho1) ++ " with " ++ show ps1 ++ " and free " ++ show (tvsList free) {- ++ "\ncore: " ++ show score0 -}) $ return ()
let -- generalized variables
tvars0 = filter (\tv -> not (tvsMember tv free)) (ofuv (TForall [] (map evPred ps1) rho1))
if (null tvars0)
then do addPredicates ps1 -- add them back to solve later (?)
score <- subst (core1 core0)
-- substitute more free variables in the core with ()
let score1 = substFree free score
nrho <- normalizeX free rho1
trace (" generalized to (as rho type): " ++ show (nrho)) $ return ()
return (nrho,score1)
else do -- check that the computation is total
inferUnify (Check "Generalized values cannot have an effect" contextRange) range typeTotal eff1
-- simplify and improve again since we can have substituted more
(ps2,(eff2,rho2),core2) <- simplifyAndImprove contextRange free ps1 (eff1,rho1)
-- due to improvement, our constraints may need to be split again
addPredicates ps2
ps3 <- splitPredicates free
-- simplify and improve again since we can have substituted more
(ps4,(eff4,rho4),core4) <- simplifyAndImprove contextRange free ps3 (eff2,rho2)
-- check for satisifiable constraints
checkSatisfiable contextRange ps4
score <- subst (core4 (core2 (core1 core0)))
-- trace (" before normalize: " ++ show (eff4,rho4) ++ " with " ++ show ps4) $ return ()
-- update the free variables since substitution may have changed it
free1 <- freeInGamma
let free = tvsUnion free1 (fuv eff4)
-- (rho5,coref) <- isolate free rho4
let rho5 = rho4
coref = id
nrho <- normalizeX free rho5
-- trace (" normalized: " ++ show (nrho) ++ " from " ++ show rho4) $ return ()
let -- substitute to Bound ones
tvars = filter (\tv -> not (tvsMember tv free)) (ofuv (TForall [] (map evPred ps4) nrho))
bvars = [TypeVar id kind Bound | TypeVar id kind _ <- tvars]
bsub = subNew (zip tvars (map TVar bvars))
(TForall [] ps5 rho5) = bsub |-> (TForall [] (map evPred ps4) nrho)
-- core
core5 = Core.addTypeLambdas bvars $
bsub |-> score
-- no lambdas for now...
-- (Core.addLambda (map evName ps4) score)
resTp = quantifyType bvars (qualifyType ps5 rho5)
-- extendSub bsub
-- substitute more free variables in the core with ()
let core6 = substFree free core5
trace (" generalized to: " ++ show (resTp)) $ return ()
return (resTp, core6)
where
substFree free core
= let fvars = tvsDiff (ftv core) free
tcon kind
= if (kind == kindEffect)
then typeTotal
else if (kind == kindStar)
then typeVoid
else TCon (TypeCon nameTpVoid kind) -- make something up for now
in if (tvsIsEmpty fvars)
then core
else let sub = subNew [(tv,tcon (getKind tv)) | tv <- tvsList fvars]
in sub |-> core
improve :: Range -> Range -> Effect -> Rho -> Core.Expr -> Inf (Rho,Effect,Core.Expr )
improve contextRange range eff0 rho0 core0
= do seff <- subst eff0
srho <- subst rho0
free <- freeInGamma
-- let free = tvsUnion free0 (fuv seff)
sps <- splitPredicates free
score0 <- subst core0
trace (" improve: " ++ show (seff,srho) ++ " with " ++ show sps ++ " and free " ++ show (tvsList free) {- ++ "\ncore: " ++ show score0 -}) $ return ()
-- isolate: do first to discharge certain hdiv predicates.
-- todo: in general, we must to this after some improvement since that can lead to substitutions that may enable isolation..
(ps0,eff0,coref0) <- isolate (tvsUnions [free,ftv srho]) sps seff
-- simplify and improve predicates
(ps1,(eff1,rho1),coref1) <- simplifyAndResolve contextRange free ps0 (eff0,srho)
addPredicates ps1 -- add unsolved ones back
-- isolate
-- (eff2,coref2) <- isolate (tvsUnions [free,ftv rho1,ftv ps1]) eff1
(nrho) <- normalizeX free rho1
-- trace (" improve normalized: " ++ show (nrho) ++ " from " ++ show rho1) $ return ()
trace (" improved to: " ++ show (eff1,nrho) ++ " with " ++ show ps1) $ return ()
return (nrho,eff1,coref1 (coref0 core0))
instantiate :: Range -> Scheme -> Inf (Rho,[TypeVar],Core.Expr -> Core.Expr)
instantiate range tp | isRho tp
= do rho <- Op.extend tp
return (rho,[],id)
instantiate range tp
= do (tvars,ps,rho,coref) <- instantiateEx range tp
addPredicates ps
return (rho, tvars, coref)
instantiateNoEx :: Range -> Scheme -> Inf (Rho,[TypeVar],Core.Expr -> Core.Expr)
instantiateNoEx range tp | isRho tp
= return (tp,[],id)
instantiateNoEx range tp
= do (tvars,ps,rho,coref) <- Op.instantiateNoEx range tp
addPredicates ps
return (rho, tvars, coref)
-- | Automatically remove heap effects when safe to do so.
isolate :: Tvs -> [Evidence] -> Effect -> Inf ([Evidence],Effect, Core.Expr -> Core.Expr)
isolate free ps eff
= -- trace ("isolate: " ++ show eff ++ " with free " ++ show (tvsList free)) $
let (ls,tl) = extractOrderedEffect eff
in case filter (\l -> labelName l `elem` [nameTpRead,nameTpWrite]) ls of
(TApp _ [TVar h] : _)
-> -- has heap variable 'h' in its effect
do (polyPs,ps1) <- splitHDiv h ps
if not (tvsMember h free || tvsMember h (ftv ps1))
then do -- yeah, we can isolate, and discharge the polyPs hdiv predicates
tv <- freshTVar kindEffect Meta
(Just syn) <- lookupSynonym nameTpST
let [bvar] = synInfoParams syn
st = subNew [(bvar,TVar h)] |-> synInfoType syn
nofailUnify $ unify (effectExtend st tv) eff
neweff <- subst tv
sps <- subst ps1
-- return (sps, neweff, id) -- TODO: supply evidence (i.e. apply the run function)
-- and try again
isolate free sps neweff
else return (ps,eff,id)
_ -> return (ps,eff,id)
where
-- | 'splitHDiv h ps' splits predicates 'ps'. Predicates of the form hdiv<h,tp,e> where tp does
-- not contain h are returned as the first element, all others as the second. This includes
-- constraints where hdiv<h,a,e> for example where a is polymorphic. Normally, we need to assume
-- divergence conservatively in such case; however, when we isolate, we know it cannot be instatiated
-- to contain a reference to h and it is safe to discharge them during isolation without implying
-- divergence. See test\type\talpin-jouvelot1 for an example: fun rid(x) { r = ref(x); return !r }
splitHDiv :: TypeVar -> [Evidence] -> Inf ([Evidence],[Evidence])
splitHDiv heapTv []
= return ([],[])
splitHDiv heapTv (ev:evs)
= do (evs1,evs2) <- splitHDiv heapTv evs
let defaultRes = (evs1,ev:evs2)
case evPred ev of
PredIFace name [hp,tp,eff] | name == namePredHeapDiv
-> do shp <- subst hp
case expandSyn shp of
h@(TVar tv) | tv == heapTv
-> do stp <- subst tp
if (not (h `elem` heapTypes stp))
then return (ev:evs1,evs2) -- even if polymorphic, we are ok if we isolate
else return defaultRes
_ -> return defaultRes
_ -> return defaultRes
data Variance = Neg | Inv | Pos
deriving (Eq,Ord,Enum,Show)
vflip Neg = Pos
vflip Pos = Neg
vflip Inv = Inv
normalize :: Rho -> Inf Rho
normalize tp
= do free <- freeInGamma
normalizeX free tp
normalizeX :: Tvs -> Rho -> Inf Rho
normalizeX free tp
= case tp of
TForall [] [] t
-> normalizeX free t
TSyn syn targs t
-> do t' <- normalizeX free t
return (TSyn syn targs t')
TFun args eff res
-> do (ls,tl) <- nofailUnify $ extractNormalizeEffect eff
eff' <- case expandSyn tl of
-- remove tail variables in the result type
(TVar tv) | isMeta tv && not (tvsMember tv free) && not (tvsMember tv (ftv (res:map snd args)))
-> return (effectFixed ls)
_ -> do ls' <- mapM (normalizex Pos) ls
tl' <- normalizex Pos tl
return (effectExtends ls' tl')
args' <- mapM (\(name,arg) -> do{arg' <- normalizex Neg arg; return (name,arg')}) args
res' <- normalizex Pos res
niceEff <- nicefyEffect eff'
return (TFun args' niceEff res')
_ -> normalizex Pos tp
where
normalizex Inv tp
= return tp
normalizex var tp
= case tp of
TFun args eff res
-> do (ls,tl) <- nofailUnify $ extractNormalizeEffect eff
eff' <- case expandSyn tl of
-- we can only do this if 'tl' does not also occur anywhere else without
-- the same label present...
-- see 'catch' and 'run' for example
{-
(TVar tv) | isMeta tv && var == Neg -- remove labels in extensible argument types
-> normalizex var tl
-}
_ -> do ls' <- mapM (normalizex var) ls
tl' <- normalizex var tl
return (effectExtends ls' tl')
args' <- mapM (\(name,arg) -> do{arg' <- normalizex (vflip var) arg; return (name,arg')}) args
res' <- normalizex var res
niceEff <- nicefyEffect eff'
return (TFun args' niceEff res')
TForall vars preds t
-> do t' <- normalizex var t
return (TForall vars preds t')
TApp t args
-> do t' <- normalizex var t
return (TApp t' args)
TSyn syn args t
-> do t' <- normalizex var t
return (TSyn syn args t')
_ -> return tp
nicefyEffect :: Effect -> Inf Effect
nicefyEffect eff
= do let (ls,tl) = extractOrderedEffect eff
ls' <- matchAliases [nameTpIO, nameTpST, nameTpPure] ls
return (foldr (\l t -> TApp (TCon tconEffectExtend) [l,t]) tl ls') -- cannot use effectExtends since we want to keep synonyms
where
matchAliases :: [Name] -> [Tau] -> Inf [Tau]
matchAliases names ls
= case names of
[] -> return ls
(name:ns)
-> do (pre,post) <- tryAlias ls name
post' <- matchAliases ns post
return (pre ++ post')
tryAlias :: [Tau] -> Name -> Inf ([Tau],[Tau])
tryAlias [] name
= return ([],[])
tryAlias ls name
= do mbsyn <- lookupSynonym name
case mbsyn of
Nothing -> return ([],ls)
Just syn
-> let (ls2,tl2) = extractOrderedEffect (synInfoType syn)
in if (null ls2 || not (isEffectEmpty tl2))
then return ([],ls)
else let params = synInfoParams syn
(sls,insts) = findInsts params ls2 ls
in -- Lib.Trace.trace ("* try alias: " ++ show (synInfoName syn, ls, sls)) $
case (isSubset [] sls ls) of
Just rest
-> -- Lib.Trace.trace (" synonym replace: " ++ show (synInfoName syn, ls, sls, rest)) $
return ([TSyn (TypeSyn name (synInfoKind syn) (synInfoRank syn) (Just syn)) insts (effectFixed sls)], rest)
_ -> return ([], ls)
findInsts :: [TypeVar] -> [Tau] -> [Tau] -> ([Tau],[Tau])
findInsts [] ls _
= (ls,[])
findInsts params ls1 ls2
= case filter matchParams ls1 of
[] -> (ls1,map TVar params)
(tp:_)
-> let name = labelName tp
in case filter (\t -> labelName t == name) ls2 of
(TApp _ args : _) | length args == length params
-> (subNew (zip params args) |-> ls1, args)
_ -> (ls1, map TVar params)
where
matchParams (TApp _ args) = (map TVar params == args)
matchParams _ = False
isSubset :: [Tau] -> [Tau] -> [Tau] -> Maybe [Tau]
isSubset acc ls1 ls2
= case (ls1,ls2) of
([],[]) -> Just (reverse acc)
([],(l2:ll2)) -> Just (reverse acc ++ ls2)
(l1:ll1, []) -> Nothing
(l1:ll1,l2:ll2)
-> if (labelName l1 < labelName l2)
then Nothing
else if (labelName l1 > labelName l2)
then isSubset (l2:acc) ls1 ll2
else if (l1 == l2)
then isSubset acc ll1 ll2
else Nothing
splitEffect :: Effect -> Inf ([Tau],Effect)
splitEffect eff
= nofailUnify (extractNormalizeEffect eff)
-- | Simplify and improve contraints.
simplifyAndImprove :: Range -> Tvs -> [Evidence] -> (Effect,Type) -> Inf ([Evidence],(Effect,Type),Core.Expr -> Core.Expr)
simplifyAndImprove range free [] efftp
= return ([],efftp,id)
simplifyAndImprove range free evs efftp
= do (evs1,core1) <- improveEffects range free evs efftp
efftp1 <- subst efftp
return (evs1,efftp1,core1)
-- | Simplify and resolve contraints.
simplifyAndResolve :: Range -> Tvs -> [Evidence] -> (Effect,Type) -> Inf ([Evidence],(Effect,Type),Core.Expr -> Core.Expr)
simplifyAndResolve range free [] efftp
= return ([],efftp,id)
simplifyAndResolve range free evs efftp
= do evs0 <- resolveHeapDiv free evs -- must be done *before* improveEffects since it can add "div <= e" constraints
(evs1,core1) <- improveEffects range free evs0 efftp
efftp1 <- subst efftp
return (evs1,efftp1,core1)
resolveHeapDiv :: Tvs -> [Evidence] -> Inf [Evidence]
resolveHeapDiv free []
= return []
resolveHeapDiv free (ev:evs)
= case evPred ev of
PredIFace name [hp,tp,eff] | name == namePredHeapDiv
-> trace (" resolveHeapDiv: " ++ show (hp,tp,eff)) $
do stp <- subst tp
shp <- subst hp
let tvsTp = ftv stp
tvsHp = ftv hp
if (expandSyn shp `elem` heapTypes stp ||
not (tvsIsEmpty (ftv stp)) -- conservative guess...
)
then do -- return (ev{ evPred = PredSub typeDivergent eff } : evs')
tv <- freshTVar kindEffect Meta
let divEff = effectExtend typeDivergent tv
inferUnify (Infer (evRange ev)) (evRange ev) eff divEff
resolveHeapDiv free evs
else resolveHeapDiv free evs -- definitely ok
_ -> do evs' <- resolveHeapDiv free evs
return (ev:evs')
heapTypes :: Type -> [Type]
heapTypes tp
= case expandSyn tp of
TForall _ ps r -> concatMap heapTypesPred ps ++ heapTypes r
TFun xs e r -> concatMap (heapTypes . snd) xs ++ heapTypes e ++ heapTypes r
TApp t ts | getKind tp /= kindHeap
-> concatMap heapTypes (t:ts)
t -> if (getKind t == kindHeap) then [t] else []
heapTypesPred p
= case p of
PredSub t1 t2 -> heapTypes t1 ++ heapTypes t2
PredIFace _ ts -> concatMap heapTypes ts
improveEffects :: Range -> Tvs -> [Evidence] -> (Effect,Type) -> Inf ([Evidence],Core.Expr -> Core.Expr)
improveEffects contextRange free evs etp
= return (evs,id)
{--------------------------------------------------------------------------
Satisfiable constraints
--------------------------------------------------------------------------}
checkEmptyPredicates :: Range -> Inf (Core.Expr -> Core.Expr)
checkEmptyPredicates contextRange
= do free <- freeInGamma
ps <- getPredicates
(ps1,_,core1) <- simplifyAndImprove contextRange free ps (typeTotal,typeUnit)
setPredicates ps1
checkSatisfiable contextRange ps1
return core1
-- | Check if all constraints are potentially satisfiable. Assumes that
-- the constraints have already been simplified and improved.
checkSatisfiable :: Range -> [Evidence] -> Inf ()
checkSatisfiable contextRange ps
= do mapM_ check ps
where
check ev
= case evPred ev of
PredSub _ _ -> predicateError contextRange (evRange ev) "Constraint cannot be satisfied" (evPred ev)
_ -> return ()
{--------------------------------------------------------------------------
Unify Helpers
--------------------------------------------------------------------------}
data Context = Check String Range
| Infer Range
instance Ranged Context where
getRange (Check _ rng) = rng
getRange (Infer rng) = rng
inferUnify :: Context -> Range -> Type -> Type -> Inf ()
inferUnify context range expected tp
= do (sexp,stp) <- subst (expected,tp)
-- trace ("infer unify: " ++ show (sexp,stp)) $ return ()
res <- doUnify (unify sexp stp)
case res of
Right () -> return ()
Left err -> unifyError context range err sexp stp
inferUnifies :: Context -> [(Range,Type)] -> Inf Type
inferUnifies context tps
= case tps of
[] -> matchFailure "Type.InferMonad.inferUnifies"
[(rng,tp)] -> return tp
((rng1,tp1):(rng2,tp2):rest)
-> do let rng = combineRange rng1 rng2
inferUnify context rng tp1 tp2
tp <- subst tp1
inferUnifies context ((rng,tp):rest)
inferSubsume :: Context -> Range -> Type -> Type -> Inf (Type,Core.Expr -> Core.Expr)
inferSubsume context range expected tp
= do free <- freeInGamma
(sexp,stp) <- subst (expected,tp)
-- trace ("inferSubsume: " ++ show (sexp,stp) ++ " with free " ++ show (tvsList free)) $ return ()
res <- doUnify (subsume range free sexp stp)
case res of
Right (t,ps,coref) -> do addPredicates ps
return (t,coref)
Left err -> do unifyError context range err sexp stp
return (expected,id)
nofailUnify :: Unify a -> Inf a
nofailUnify u
= do res <- runUnify u
case res of
(Right x,sub)
-> do extendSub sub
return x
(Left err,sub)
-> do extendSub sub
failure ("Type.InferMonad.runUnify: should never fail!")
doUnify :: Unify a -> Inf (Either UnifyError a)
doUnify u
= do res <- runUnify u
case res of
(Right x,sub)
-> do extendSub sub
return (Right x)
(Left err,sub)
-> do extendSub sub
return (Left err)
{--------------------------------------------------------------------------
Unification errors
--------------------------------------------------------------------------}
unifyError :: Context -> Range -> UnifyError -> Type -> Type -> Inf a
unifyError context range err xtp1 xtp2
= do free <- freeInGamma
tp1 <- subst xtp1 >>= normalizeX free
tp2 <- subst xtp2 >>= normalizeX free
env <- getEnv
unifyError' (prettyEnv env) context range err tp1 tp2
unifyError' env context range err tp1 tp2
= do infError range $
text message <$>
table ([(text "context", docFromRange (Pretty.colors env) rangeContext)
,(text "term", docFromRange (Pretty.colors env) range)
,(text ("inferred " ++ nameType), nice2)
]
++ nomatch
++ extra
++ hint
)
where
(rangeContext,extra)
= case context of
Check msg range -> (range,[(text "because", text msg)])
Infer range -> (range,[])
[nice1,nice2]
= Pretty.niceTypes showEnv [tp1,tp2]
showEnv
= case err of
NoMatchKind -> env{ Pretty.showKinds = True }
_ -> env
nomatch
= case err of
NoSubsume -> [(text "is less polymorph as",nice1)]
NoEntail -> [(text "is not entailed by",nice1)]
NoArgMatch _ _ -> []
_ -> [(text ("expected " ++ nameType),nice1)]
nameType
= if (getKind tp1 == kindEffect)
then "effect"
else "type"
(message,hint)
= case err of
NoMatch -> (nameType ++ "s do not match",[])
NoMatchKind -> ("kinds do not match",[])
NoMatchPred -> ("predicates do not match",[])
NoSubsume -> ("type is not polymorph enough",[])
NoEntail -> ("predicates cannot be resolved",[])
Infinite -> ("types do not match (due to an infinite type)",[(text "hint",text "annotate the function definition?")])
NoArgMatch n m -> if (m<0)
then ("only functions can be applied",[])
else ("application has too " ++ (if (n > m) then "few" else "many") ++ " arguments"
,[(text "hint",text ("expecting " ++ show n ++ " argument" ++ (if n == 1 then "" else "s") ++ " but has been given " ++ show m))])
predicateError :: Range -> Range -> String -> Pred -> Inf ()
predicateError contextRange range message pred
= do env <- getEnv
spred <- subst pred
predicateError' (prettyEnv env) contextRange range message spred
predicateError' env contextRange range message pred
= do infError range $
text message <$>
table [(text "context", docFromRange (Pretty.colors env) contextRange)
,(text "origin", docFromRange (Pretty.colors env) range)
,(text "constraint", nicePred)
]
where
nicePred = Pretty.ppPred env pred
typeError :: Range -> Range -> Doc -> Type -> [(Doc,Doc)] -> Inf ()
typeError contextRange range message xtp extra
= do env <- getEnv
free <- freeInGamma
tp <- subst xtp >>= normalizeX free
typeError' (prettyEnv env) contextRange range message tp extra
typeError' env contextRange range message tp extra
= do infError range $
message <$>
table ([(text "context", docFromRange (Pretty.colors env) contextRange)
,(text "term", docFromRange (Pretty.colors env) range)
,(text "inferred type", Pretty.niceType env tp)
] ++ extra)
contextError :: Range -> Range -> Doc -> [(Doc,Doc)] -> Inf ()
contextError contextRange range message extra
= do env <- getEnv
contextError' (prettyEnv env) contextRange range message extra
contextError' env contextRange range message extra
= do infError range $
message <$>
table ([(text "context", docFromRange (Pretty.colors env) contextRange)
,(text "term", docFromRange (Pretty.colors env) range)
]
++ extra)
{--------------------------------------------------------------------------
Inference monad
--------------------------------------------------------------------------}
data Inf a = Inf (Env -> St -> Res a)
data Res a = Ok !a !St ![(Range,Doc)]
| Err !(Range,Doc) ![(Range,Doc)]
data Env = Env{ prettyEnv :: !Pretty.Env
, context :: !Name -- | current module name
, currentDef :: !Name
, types :: !Newtypes
, synonyms :: !Synonyms
, gamma :: !Gamma
, infgamma :: !InfGamma
, imports :: !ImportMap
, returnAllowed :: Bool
, inLhs :: Bool
}
data St = St{ uniq :: !Int, sub :: !Sub, preds :: ![Evidence], mbRangeMap :: Maybe RangeMap }
runInfer :: Pretty.Env -> Maybe RangeMap -> Synonyms -> Newtypes -> ImportMap -> Gamma -> Name -> Int -> Inf a -> Error (a,Int,Maybe RangeMap)
runInfer env mbrm syns newTypes imports assumption context unique (Inf f)
= case f (Env env context (newName "") newTypes syns assumption infgammaEmpty imports False False) (St unique subNull [] mbrm) of
Err err warnings -> addWarnings warnings (errorMsg (ErrorType [err]))
Ok x st warnings -> addWarnings warnings (ok (x,uniq st, (sub st) |-> mbRangeMap st))
zapSubst :: Inf ()
zapSubst
= do env <- getEnv
assertion "not an empty infgamma" (infgammaIsEmpty (infgamma env)) $
do updateSt (\st -> assertion "no empty preds" (null (preds st)) $
st{ sub = subNull, preds = [], mbRangeMap = (sub st) |-> mbRangeMap st } ) -- this can be optimized further by splitting the rangemap into a 'substited part' and a part that needs to be done..
return ()
instance Functor Inf where
fmap f (Inf i) = Inf (\env st -> case i env st of
Ok x st1 w -> Ok (f x) st1 w
Err err w -> Err err w)
instance Monad Inf where
return x = Inf (\env st -> Ok x st [])
(Inf i) >>= f = Inf (\env st0 -> case i env st0 of
Ok x st1 w1 -> case f x of
Inf j -> case j env st1 of
Ok y st2 w2 -> Ok y st2 (w1++w2)
Err err w2 -> Err err (w1++w2)
Err err w -> Err err w)
instance HasUnique Inf where
updateUnique f = Inf (\env st -> Ok (uniq st) st{uniq = f (uniq st)} [])
getEnv :: Inf Env
getEnv
= Inf (\env st -> Ok env st [])
withEnv :: (Env -> Env) -> Inf a -> Inf a
withEnv f (Inf i)
= Inf (\env st -> i (f env) st)
updateSt :: (St -> St) -> Inf St
updateSt f
= Inf (\env st -> Ok st (f st) [])
infError :: Range -> Doc -> Inf a
infError range doc
= do addRangeInfo range (Error doc)
Inf (\env st -> Err (range,doc) [])
infWarning :: Range -> Doc -> Inf ()
infWarning range doc
= do addRangeInfo range (Warning doc)
Inf (\env st -> Ok () st [(range,doc)])
getPrettyEnv :: Inf Pretty.Env
getPrettyEnv
= do env <- getEnv
return (prettyEnv env)
lookupSynonym :: Name -> Inf (Maybe SynInfo)
lookupSynonym name
= do env <- getEnv
return (synonymsLookup name (synonyms env) )
addRangeInfo :: Range -> RangeInfo -> Inf ()
addRangeInfo rng info
= Inf (\env st -> Ok () (st{ mbRangeMap = case mbRangeMap st of { Just rm -> Just (rangeMapInsert rng info rm); other -> other }}) [])
{--------------------------------------------------------------------------
Helpers
--------------------------------------------------------------------------}
getSt :: Inf St
getSt
= updateSt id
setSt :: St -> Inf St
setSt st
= updateSt (const st)
allowReturn :: Bool -> Inf a -> Inf a
allowReturn allow inf
= withEnv (\env -> env{ returnAllowed = allow }) inf
withLhs :: Inf a -> Inf a
withLhs inf
= withEnv (\env -> env{ inLhs = True }) inf
isLhs :: Inf Bool
isLhs
= do env <- getEnv
return (inLhs env)
isReturnAllowed :: Inf Bool
isReturnAllowed
= do env <- getEnv
return (returnAllowed env)
getSub :: Inf Sub
getSub
= do st <- getSt
return (sub st)
subst :: HasTypeVar a => a -> Inf a
subst x
= do sub <- getSub
return (sub |-> x)
extendSub :: Sub -> Inf ()
extendSub s
= do -- trace ("Type.InferMonad.extendSub: " ++ show (subList s)) $
updateSt (\st -> st{ sub = s @@ (sub st) })
return ()
substWatch :: Inf a -> Inf (Bool,a)
substWatch inf
= do sub1 <- getSub
x <- inf
sub2 <- getSub
return (subCount sub1 /= subCount sub2, x)
getGamma :: Inf Gamma
getGamma
= do env <- getEnv
return (gamma env)
extendGammaCore :: Bool -> [Core.DefGroup] -> Inf a -> Inf (a)
extendGammaCore isAlreadyCanonical [] inf
= inf
extendGammaCore isAlreadyCanonical (coreGroup:coreDefss) inf
= -- Lib.Trace.trace ("extend gamma: " ++ show (zip (nameSchemes coreDefs) (nameInfos coreDefs))) $
extendGamma isAlreadyCanonical (nameInfos coreGroup) (extendGammaCore isAlreadyCanonical coreDefss inf)
where
nameInfos (Core.DefRec defs) = map coreDefInfo defs
nameInfos (Core.DefNonRec def) = [coreDefInfo def]
extendGamma :: Bool -> [(Name,NameInfo)] -> Inf a -> Inf (a)
extendGamma isAlreadyCanonical defs inf
= do env <- getEnv
(gamma') <- extend (context env) defs (gamma env)
withEnv (\env -> env{ gamma = gamma' }) inf
where
extend ctx [] (gamma)
= return (gamma)
extend ctx ((name,info):rest) (gamma)
= do let matches = gammaLookup name gamma
localMatches = [(qname,info) | (qname,info) <- matches, not (isInfoImport info), qualifier qname == ctx || qualifier qname == nameNil, isSameNamespace qname name ]
mapM (checkNoOverlap ctx name info) localMatches
-- trace (" extend gamma: " ++ show (name,info)) $
let (cinfo)
= -- if null localMatches then (info) else
if (isAlreadyCanonical) then info else
let cname = Core.canonicalName (length localMatches) (if isQualified name then name else qualify ctx name)
in case info of
InfoVal{} -> info{ infoCName = cname } -- during recursive let's we use InfoVal sometimes for functions..
InfoFun{} -> info{ infoCName = cname }
InfoExternal{} -> info{ infoCName = cname }
_ -> info
-- Lib.Trace.trace (" extend gamma: " ++ show (name,infoType info) ++ " with " ++ show (infoCanonicalName name cinfo) ++ " (matches: " ++ show (length matches,ctx,map fst matches)) $
extend ctx rest (gammaExtend name cinfo gamma)
checkNoOverlap :: Name -> Name -> NameInfo -> (Name,NameInfo) -> Inf ()
checkNoOverlap ctx name info (name2,info2)
= do checkCasingOverlap (infoRange info) name name2 info
free <- freeInGamma
res <- runUnify (overlaps (infoRange info) free (infoType info) (infoType info2))
case fst res of
Right _ ->
do env <- getEnv
let [nice1,nice2] = Pretty.niceTypes (prettyEnv env) [infoType info,infoType info2]
(_,_,rho1) = splitPredType (infoType info)
(_,_,rho2) = splitPredType (infoType info2)
valueType = not (isFun rho1 && isFun rho2)
if (isFun rho1 && isFun rho2)
then infError (infoRange info) (text "definition" <+> Pretty.ppName (prettyEnv env) name <+> text "overlaps with an earlier definition of the same name" <$>
table ([(text "type",nice1)
,(text "overlaps",nice2)
,(text "because", text "definitions with the same name must differ on the argument types")])
)
else infError (infoRange info) (text "definition" <+> Pretty.ppName (prettyEnv env) name <+> text "is already defined in this module" <$>
text "because: only functions can have overloaded names")
Left _ -> return ()
extendInfGammaCore :: Bool -> [Core.DefGroup] -> Inf a -> Inf a
extendInfGammaCore topLevel [] inf
= inf
extendInfGammaCore topLevel (coreDefs:coreDefss) inf
= extendInfGamma topLevel (extracts coreDefs) (extendInfGammaCore topLevel coreDefss inf)
where
extracts (Core.DefRec defs) = map extract defs
extracts (Core.DefNonRec def) = [extract def]
extract def
= coreDefInfo def -- (Core.defName def,(Core.defNameRange def, Core.defType def, Core.defSort def))
extendInfGamma :: Bool -> [(Name,NameInfo)] -> Inf a -> Inf a
extendInfGamma topLevel tnames inf
= do env <- getEnv
infgamma' <- extend (context env) (gamma env) [] [(unqualify name,info) | (name,info) <- tnames, not (isWildcard name)] (infgamma env)
withEnv (\env -> env{ infgamma = infgamma' }) inf
where
extend :: Name -> Gamma -> [(Name,NameInfo)] -> [(Name,NameInfo)] -> InfGamma -> Inf InfGamma
extend ctx gamma seen [] infgamma
= return infgamma
extend ctx gamma seen (x@(name,info):rest) infgamma
= do let qname = infoCanonicalName name info
range = infoRange info
tp = infoType info
case (lookup name seen) of
Just (info2)
-> do checkCasingOverlap range name (infoCanonicalName name info2) info2
env <- getEnv
infError range (Pretty.ppName (prettyEnv env) name <+> text "is already defined at" <+> pretty (show (infoRange info2))
<$> text " hint: if these are potentially recursive definitions, give a full type signature to disambiguate them.")
Nothing
-> do case (infgammaLookupX name infgamma) of
Just info2 | infoCanonicalName name info2 /= nameReturn
-> do checkCasingOverlap range name (infoCanonicalName name info2) info2
env <- getEnv
infWarning range (Pretty.ppName (prettyEnv env) name <+> text "shadows an earlier local definition or parameter")
_ -> return ()
extend ctx gamma (x:seen) rest (infgammaExtend qname (info{ infoCName = if topLevel then createCanonicalName ctx gamma qname else qname}) infgamma)
createCanonicalName ctx gamma qname
= let matches = gammaLookup (unqualify qname) gamma
localMatches = [(qname,info) | (qname,info) <- matches, not (isInfoImport info), qualifier qname == ctx || qualifier qname == nameNil ]
cname = Core.canonicalName (length localMatches) qname
in cname
withGammaType :: Range -> Type -> Inf a -> Inf a
withGammaType range tp inf
= do defName <- currentDefName
name <- uniqueName (show defName)
extendInfGamma False [(name,(InfoVal name tp range False))] inf
currentDefName :: Inf Name
currentDefName
= do env <- getEnv
return (currentDef env)
withDefName :: Name -> Inf a -> Inf a
withDefName name inf
= withEnv (\env -> env{ currentDef = name }) inf
qualifyName :: Name -> Inf Name
qualifyName name
= do env <- getEnv
return (qualify (context env) name)
getModuleName :: Inf Name
getModuleName
= do env <- getEnv
return (context env)
freeInGamma :: Inf Tvs
freeInGamma
= do env <- getEnv
sub <- getSub
return (fuv (sub |-> (infgamma env)))
splitPredicates :: Tvs -> Inf [Evidence]
splitPredicates free
= do st <- getSt
ps <- subst (preds st)
let (ps0,ps1) = -- partition (\p -> not (tvsIsEmpty (tvsDiff (fuv p) free))) ps
partition (\p -> let tvs = (fuv p) in (tvsIsEmpty tvs || not (tvsIsEmpty (tvsDiff tvs free)))) ps
setSt (st{ preds = ps1 })
-- trace ("splitpredicates: " ++ show (ps0,ps1)) $ return ()
return ps0
addPredicates :: [Evidence] -> Inf ()
addPredicates []
= return ()
addPredicates ps
= do updateSt (\st -> st{ preds = (preds st) ++ ps })
return ()
getPredicates :: Inf [Evidence]
getPredicates
= do st <- getSt
subst (preds st)
setPredicates :: [Evidence] -> Inf ()
setPredicates ps
= do updateSt (\st -> st{ preds = ps })
return ()
findDataInfo :: Name -> Inf DataInfo
findDataInfo typeName
= do env <- getEnv
case newtypesLookup typeName (types env) of
Just info -> return info
Nothing -> failure ("Type.InferMonad.findDataInfo: unknown type: " ++ show typeName ++ "\n in: " ++ show (types env))
-- | Lookup a name with a certain type and return the fully qualified name and its type
resolveName :: Name -> Maybe(Type,Range) -> Range -> Inf (Name,Type,NameInfo)
resolveName name mbType range
= case mbType of
Just (tp,ctxRange) -> resolveNameEx infoFilter (Just infoFilterAmb) name (CtxType tp) ctxRange range
Nothing -> resolveNameEx infoFilter (Just infoFilterAmb) name CtxNone range range
where
infoFilter = isInfoValFunExt
infoFilterAmb = not . isInfoImport
-- | Lookup a name with a number of arguments and return the fully qualified name and its type
resolveFunName :: Name -> NameContext -> Range -> Range -> Inf (Name,Type,NameInfo)
resolveFunName name ctx rangeContext range
= resolveNameEx infoFilter (Just infoFilterAmb) name ctx rangeContext range
where
infoFilter = isInfoValFunExt
infoFilterAmb = not . isInfoImport
resolveConName :: Name -> Maybe (Type) -> Range -> Inf (Name,Type,Core.ConRepr,ConInfo)
resolveConName name mbType range
= do (qname,tp,info) <- resolveNameEx isInfoCon Nothing name (maybeToContext mbType) range range
return (qname,tp,infoRepr info,infoCon info)
resolveNameEx :: (NameInfo -> Bool) -> Maybe (NameInfo -> Bool) -> Name -> NameContext -> Range -> Range -> Inf (Name,Type,NameInfo)
resolveNameEx infoFilter mbInfoFilterAmb name ctx rangeContext range
= do matches <- lookupNameEx infoFilter name ctx range
case matches of
[] -> do amb <- case ctx of
CtxNone -> return []
_ -> lookupNameEx infoFilter name CtxNone range
env <- getEnv
let penv = prettyEnv env
ctxTerm rangeContext = [(text "context", docFromRange (Pretty.colors penv) rangeContext)
,(text "term", docFromRange (Pretty.colors penv) range)]
case (ctx,amb) of
(CtxType tp, [(qname,info)])
-> do let [nice1,nice2] = Pretty.niceTypes penv [tp,infoType info]
infError range (text "identifier" <+> Pretty.ppName penv name <+> text "does not match the argument types" <$>
table (ctxTerm rangeContext ++
[(text "inferred type",nice2)
,(text "expected type",nice1)]))
(CtxType tp, (_:rest))
-> infError range (text "identifier" <+> Pretty.ppName penv name <+> text "has no matching definition" <$>
table (ctxTerm rangeContext ++
[(text "inferred type", Pretty.niceType penv tp)
,(text "candidates", align (tablex 0 (ppCandidates env "" amb)))]))
(CtxFunArgs fixed named, (_:rest))
-> do let message = "takes " ++ show (fixed + length named) ++ " argument(s)" ++
(if null named then "" else " with such parameter names")
infError range (text "no function" <+> Pretty.ppName penv name <+> text message <> ppAmbiguous env "" amb)
(CtxFunTypes partial fixed named, (_:rest))
-> do let docs = Pretty.niceTypes penv (fixed ++ map snd named)
fdocs = take (length fixed) docs
ndocs = [color (colorParameter (Pretty.colors penv)) (pretty n <+> text ":") <+> tpdoc |
((_,n),tpdoc) <- zip named (drop (length fixed) docs)]
pdocs = if partial then [text "..."] else []
argsDoc = color (colorType (Pretty.colors penv)) $
parens (hsep (punctuate comma (fdocs ++ ndocs ++ pdocs))) <+>
text "-> ..."
infError range (text "no function" <+> Pretty.ppName penv name <+> text "is defined that matches the argument types" <$>
table (ctxTerm rangeContext ++
[(text "inferred type", argsDoc)
,(text "candidates", align (tablex 0 (ppCandidates env "" amb)))]))
_ -> do amb2 <- case mbInfoFilterAmb of
Just infoFilterAmb -> lookupNameEx infoFilterAmb name ctx range
Nothing -> return []
case amb2 of
(_:_)
-> infError range ((text "identifier" <+> Pretty.ppName penv name <+> text "is undefined") <$>
(text "perhaps you meant: " <> ppOr penv (map fst amb2)))
_ -> infError range (text "identifier" <+> Pretty.ppName penv name <+> text "is undefined")
[(qname,info)]
-> do checkCasing range name qname info
return (qname,infoType info,info)
_ -> do env <- getEnv
infError range (text "identifier" <+> Pretty.ppName (prettyEnv env) name <+> text "is ambiguous" <> ppAmbiguous env hintTypeSig matches)
where
hintTypeSig = "give a type annotation to the function parameters or arguments"
checkCasingOverlaps :: Range -> Name -> [(Name,NameInfo)] -> Inf ()
checkCasingOverlaps range name matches
= -- this is called when various definitions (possibly from different modules) match with a name
-- we could check here that all these definitions agree on the casing
-- .. but I think it is better to only complain if the actual definition
-- used has a different casing to reduce potential conflicts between modules
return ()
checkCasingOverlap :: Range -> Name -> Name -> NameInfo -> Inf ()
checkCasingOverlap range name qname info
= do case caseOverlaps name qname info of
Just qname1
-> do env <- getEnv
infError range (text (infoElement info) <+> Pretty.ppName (prettyEnv env) (unqualify name) <+> text "is already in scope with a different casing as" <+> Pretty.ppName (prettyEnv env) (importsAlias qname1 (imports env)))
_ -> return ()
checkCasing :: Range -> Name -> Name -> NameInfo -> Inf ()
checkCasing range name qname info
= do case caseOverlaps name qname info of
Nothing -> return ()
Just qname1
-> do env <- getEnv
infError range (text (infoElement info) <+> Pretty.ppName (prettyEnv env) (unqualify name) <+> text "should be cased as" <+> Pretty.ppName (prettyEnv env) (importsAlias qname1 (imports env)))
caseOverlaps :: Name -> Name -> NameInfo -> (Maybe Name)
caseOverlaps name qname info
= let qname1 = case info of
InfoImport{infoAlias = alias} -> alias
_ -> qname
in if (nameCaseOverlap ((if isQualified name then id else unqualify) (Core.nonCanonicalName qname1)) name)
then Just qname1
else Nothing
ppOr :: Pretty.Env -> [Name] -> Doc
ppOr env [] = empty
ppOr env [name] = Pretty.ppName env name
ppOr env names = hcat (map (\name -> Pretty.ppName env name <> text ", ") (init names)) <+> text "or" <+> Pretty.ppName env (last names)
ppAmbiguous :: Env -> String -> [(Name,NameInfo)] -> Doc
ppAmbiguous env hint infos
= text ". Possible candidates: " <$> table (ppCandidates env hint infos)
ppCandidates :: Env -> String -> [(Name,NameInfo)] -> [(Doc,Doc)]
ppCandidates env hint nameInfos
= let penv = prettyEnv env
modName = context env
n = 6
sorted = sortBy (\(name1,info1) (name2,info2) ->
if (qualifier name1 == modName && qualifier name2 /= modName)
then LT
else if (qualifier name1 /= modName && qualifier name2 == modName)
then GT
else compare (not (isRho (infoType info1))) (not (isRho (infoType info2)))
) nameInfos
(defs,rest) = splitAt n sorted
in (if null rest
then map (ppNameInfo env) defs
else map (ppNameInfo env) (init defs) ++ [(text "...", text "or" <+> pretty (length rest + 1) <+> text "other definitions")])
++
(if (null hint) then [] else [(text "hint",text hint)])
ppNameInfo env (name,info)
= (Pretty.ppName (prettyEnv env) (importsAlias name (imports env)), Pretty.ppType (prettyEnv env) (infoType info))
lookupImportName :: Name -> Range -> Inf (Maybe (Name,NameInfo))
lookupImportName name range
= do matches <- lookupNameEx (const True) name CtxNone range
case matches of
[] -> do env <- getPrettyEnv
infError range (text "identifier" <+> Pretty.ppName env name <+> text "is undefined")
return Nothing
_ -> case filter (isInfoImport . snd) matches of
[] -> return Nothing
[(_,info)] -> return (Just (infoFullName info, info))
_ -> failure ("Type.InferMonad.lookupImportName: " ++ show name ++ ": is ambiguous?")
lookupConName :: Name -> Maybe (Type) -> Range -> Inf (Maybe (Name,Type,NameInfo))
lookupConName name mbType range
= do matches <- lookupNameEx isInfoCon name (maybeToContext mbType) range
case matches of
[] -> return Nothing
[(name,info)] -> return (Just (name,infoType info,info))
_ -> do env <- getEnv
infError range (text "constructor" <+> Pretty.ppName (prettyEnv env) name <+> text "is ambiguous" <> ppAmbiguous env hintQualify matches)
where
hintQualify = "qualify the constructor name to disambiguate it"
lookupFunName :: Name -> Maybe (Type,Range) -> Range -> Inf (Maybe (Name,Type,NameInfo))
lookupFunName name mbType range
= do matches <- lookupNameEx isInfoFun name (maybeRToContext mbType) range
case matches of
[] -> return Nothing
[(name,info)] -> return (Just (name,infoType info,info))
_ -> do env <- getEnv
infError range (text "identifier" <+> Pretty.ppName (prettyEnv env) name <+> text "is ambiguous" <> ppAmbiguous env hintQualify matches)
where
hintQualify = "qualify the name to disambiguate it"
lookupNameN :: Name -> Int -> [Name] -> Range -> Inf [(Name,NameInfo)]
lookupNameN name fixed named range
= lookupNameEx (const True) name (CtxFunArgs fixed named) range
{-
do matches <-
case matches of
[] -> do amb <- lookupNameEx isInfoFun name CtxNone range
env <- getEnv
if null amb
then infError range (text "identifier" <+> Pretty.ppName (prettyEnv env) name <+> text "is undefined")
else infError range (text "no function" <+> Pretty.ppName (prettyEnv env) name
<+> text "accepts" <+> (pretty (fixed + length named)) <+> text "arguments"
<> ppAmbiguous env "" amb)
return []
_ -> return matches
-}
lookupInfName :: Name -> Inf (Maybe (Name,Type))
lookupInfName name
= do env <- getEnv
return (infgammaLookup name (infgamma env))
maybeToContext :: Maybe Type -> NameContext
maybeToContext mbType
= case mbType of
Just tp -> CtxType tp
Nothing -> CtxNone
maybeRToContext :: Maybe (Type,Range) -> NameContext
maybeRToContext mbTypeRange
= maybeToContext (fmap fst mbTypeRange)
data NameContext
= CtxNone -- ^ just a name
| CtxType Type -- ^ a name that can appear in a context with this type
| CtxFunArgs Int [Name] -- ^ function name with @n@ fixed arguments and followed by the given named arguments
| CtxFunTypes Bool [Type] [(Name,Type)] -- ^ are only some arguments supplied?, function name, with fixed and named arguments
deriving (Show)
lookupNameEx :: (NameInfo -> Bool) -> Name -> NameContext -> Range -> Inf [(Name,NameInfo)]
lookupNameEx infoFilter name ctx range
= -- trace ("lookup: " ++ show name ++ ": " ++ show mbType) $
do env <- getEnv
case infgammaLookupX name (infgamma env) of
Just info | infoFilter info
-> do sinfo <- subst info
return [(infoCanonicalName name info, sinfo)] -- TODO: what about local definitions without local type variables or variables?
_ -> -- trace ("gamma: " ++ show (gamma env)) $
-- lookup global candidates
do let candidates = filter (infoFilter . snd) (gammaLookup name (gamma env))
case candidates of
[(qname,info)] -> return candidates
[] -> return [] -- infError range (Pretty.ppName (prettyEnv env) name <+> text "is undefined")
_ -> do checkCasingOverlaps range name candidates
-- lookup global candidates that match the expected type
matches <- case ctx of
CtxNone -> return candidates
CtxType expect -> do mss <- mapM (matchType expect) candidates
return (concat mss)
CtxFunArgs n named -> do mss <- mapM (matchNamedArgs n named) candidates
return (concat mss)
CtxFunTypes partial fixed named -> do mss <- mapM (matchArgs partial fixed named) candidates
return (concat mss)
-- trace ("matches: " ++ show matches) $
case matches of
[(qname,info)] -> return matches
_ -> do -- lookup global names defined in the current module
{-
let localMatches = [(qname,info) | (qname,info) <- matches, qualifier qname == context env]
case localMatches of
[(qname,info)] -> return localMatches
_ ->
-}
return matches
{- do
let localCands = [(qname,info) | (qname,info) <- candidates, qualifier qname == context env]
case localCands of
[(qname,info)] -> return (qname,info)
_ -> return localCands -- infError range (Pretty.ppName (prettyEnv env) name <+> text "is ambiguous")
-}
where
matchType :: Type -> (Name,NameInfo) -> Inf [(Name,NameInfo)]
matchType expect (name,info)
= do free <- freeInGamma
res <- runUnify (subsume range free expect (infoType info))
case res of
(Right _,_) -> return [(name,info)]
(Left _,_) -> return []
matchNamedArgs :: Int -> [Name] -> (Name,NameInfo) -> Inf [(Name,NameInfo)]
matchNamedArgs n named (name,info)
= do res <- runUnify (matchNamed range (infoType info) n named)
case res of
(Right _,_) -> return [(name,info)]
(Left _,_) -> return []
matchArgs :: Bool -> [Type] -> [(Name,Type)] -> (Name,NameInfo) -> Inf [(Name,NameInfo)]
matchArgs matchSome fixed named (name,info)
= do free <- freeInGamma
res <- runUnify (matchArguments matchSome range free (infoType info) fixed named)
case res of
(Right _,_) -> return [(name,info)]
(Left _,_) -> return []
| lpeterse/koka | src/Type/InferMonad.hs | apache-2.0 | 58,903 | 0 | 29 | 20,915 | 16,436 | 8,301 | 8,135 | 1,023 | 16 |
{- Copyright 2014 David Farrell <shokku.ra@gmail.com>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-}
module IRCD.Env where
import IRCD.Types
mapEnvClients :: (Clients -> Clients) -> Env -> Env
mapEnvClients f env = env {envClients = f (envClients env)}
mapEnvHandlers :: ([Handler] -> [Handler]) -> Env -> Env
mapEnvHandlers f env = env {envHandlers = f (envHandlers env)}
mapEnvTransformers :: ([Transformer] -> [Transformer]) -> Env -> Env
mapEnvTransformers f env = env {envTransformers = f (envTransformers env)}
| shockkolate/lambdircd | src/IRCD/Env.hs | apache-2.0 | 1,027 | 0 | 9 | 174 | 165 | 91 | 74 | 8 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-|
Contains primitive traits for grouplike algebraic structures. The existence of
is generally True/False, but more than two values are also possible, and
some values may carry values or functions as "payload": a "unit element" trait
might, for example, contain the value of the unit element, and a "uniquely factorizable"
trait might carry a factorization function.
For any trait, its values are distinct types, but each is an instance
of a class which provides a method for converting these values into
a common sum type. This sum type can then be used to perform reflection
about the traits of an algebraic structure at runtime, or to put
algebraic structures with different traits into the same collection.
The following traits exist:
[@Commutative@] Commutative: @for all a, b: a + b = b + a@. AntiCommutative:
@for all a,b and the inverse function ': a+b = (b+a)'@.
[@Associative@] @for all a, b, c: (a + b) + c = a + (b + c)@
[@Idempotent@] @for all a: a + a = a@
[@UnitElement@] @for all a and the unit element 1: a + 1 = 1 + a = a@
[@LeftDivisible@] @for all a, b there exists x: x + a = b@
[@RightDivisible@] @for all a, b there exists y: a + y = b@
[@Invertible@] @for all a, their inverse a' and the unit element 1: a + a' = a' + a = 1@
-}
module Grouplike.Traits where
import Helper
import Templates
--non-sum types:
-- TagCommutative = TagCommutative
--sum types:
-- CommutativityValue = Commutative | ...
--classes:
-- Commutative
-- Tags for the individual traits.
-- Every individual trait consists of:
-- 1. The sum type of its values.
-- 2. The NON-SUM type of its values, which exist to separate structures
-- with different values for a trait in the eyes of the type checker.
-- 3. A class which transforms the non-sum type into the sum type for dynamic checking.
--
-- A tag may be a simple enum (Commutative | NonCommutative | UnknownCommutative), or
-- it may have a "payload", for example a unit element, which will exist in
-- some cases, i.e.: UnitElement a | NoUnitElement | UnknownElement
-- Simple enums can be made with the macro @makeEnumTag@. Tags with payload are made with @makeContentTag@,
-- where each option has type (String, Bool). True in the second component indicates the existence of data
-- for that particular constructor.
$(makeEnumTag "Commutativity" ["Commutative", "AntiCommutative", "UnknownCommutative"])
$(makeEnumTag "Associativity" ["Associative", "UnknownAssociative"])
$(makeEnumTag "Idempotence" ["Idempotent", "UnknownIdempotent"])
$(makeContentTag "UnitElement" [("UnitElement", True), ("UnknownUnitElement", False)])
$(makeContentTag "LeftDivider" [("LeftDivider", True), ("UnknownLeftDivider", False)])
$(makeContentTag "RightDivider" [("RightDivider", True), ("UnknownRightDivider", False)])
$(makeContentTag "Inverse" [("Inverse", True), ("UnknownInverse", False)])
grouplikeTraits = ["Commutative", "Associative", "Idempotent", "UnitElement",
"LeftDivider", "RightDivider", "Inverse"]
instance Show CommutativityValue where
show UnknownCommutative = ""
show Commutative = "Commutative"
show AntiCommutative = "AntiCommutative"
instance Show AssociativityValue where
show UnknownAssociative = ""
show Associative = "Associative"
instance Show IdempotenceValue where
show UnknownIdempotent = ""
show Idempotent = "Idempotent"
instance Show el => Show (UnitElementValue el) where
show UnknownUnitElement = ""
show (UnitElement el) = "UnitElement " ++ show el
instance Show (LeftDividerValue el) where
show UnknownLeftDivider = ""
show (LeftDivider el) = "LeftDivider"
instance Show (RightDividerValue el) where
show UnknownRightDivider = ""
show (RightDivider el) = "RightDivider"
instance Show (InverseValue el) where
show UnknownInverse = ""
show (Inverse el) = "Inverse"
-- |A grouplike structure with a binary operation.
class Grouplike s where
op :: s el -> Bin el
--Individual traits which compose into the known structures (Monoids, Groups, etc.)
-- |A commutative grouplike structure: a `op` b = b `op` a.
class Grouplike s => Commutative s where
-- |An anticommutative grouplike structure: a `op` b = inv (b `op` a).
class Grouplike s => Anticommutative s where
-- |A associative grouplike structure: (a `op` b) `op` c = a `op` (b `op` c).
class Grouplike s => Associative s where
-- |An idempotent grouplike structure: x `op` x = x
class Grouplike s => Idempotent s where
-- |A grouplike structure with a unit element U: a `op` U = U `op` a = a
class Grouplike s => HasUnitElement s where
ident :: s el -> el
-- |A grouplike structure where there exists a left divider l for
-- every pair of elements a,b: a `op` l = b
class Grouplike s => LeftDivisible s where
lDiv :: s el -> Bin el
-- |A grouplike structure where there exists a right divider r for
-- every pair of elements a,b: r `op` a = b
class Grouplike s => RightDivisible s where
rDiv :: s el -> Bin el
-- |A grouplike structure which has both left and right dividers.
class (Grouplike s, LeftDivisible s, RightDivisible s) => Divisible s where
-- |A grouplike structure where every element a has an inverse ia,
-- which reduces a to the unit element U: a `op` ia = U
class (Grouplike s, HasUnitElement s) => Invertible s where
inverse :: s el -> Un el
| jtapolczai/algebra | Grouplike/Traits.hs | apache-2.0 | 5,371 | 0 | 9 | 987 | 735 | 389 | 346 | -1 | -1 |
module LengthLexicographic where
newtype LengthList a = LengthList [a]
deriving(Show,Eq)
lst (LengthList a) = a
instance Ord a => Ord (LengthList a) where
compare (LengthList x) (LengthList y) = let lenCmp = compare (length x) (length y)
in
case lenCmp of
EQ -> compare x y
_ -> lenCmp
x = LengthList [1,2,3,4,5]
y = LengthList [1,2,3,4]
z = LengthList [1,2,3,4,6]
| lisphacker/codewars | LengthLexicographic.hs | bsd-2-clause | 565 | 0 | 12 | 261 | 199 | 108 | 91 | 12 | 1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QStyleOption.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:21
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QStyleOption (
QqStyleOption(..)
,QqStyleOption_nf(..)
,qinit
,setFontMetrics
,qStyleOption_delete
)
where
import Foreign.C.Types
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Enums.Core.Qt
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
class QqStyleOption x1 where
qStyleOption :: x1 -> IO (QStyleOption ())
instance QqStyleOption (()) where
qStyleOption ()
= withQStyleOptionResult $
qtc_QStyleOption
foreign import ccall "qtc_QStyleOption" qtc_QStyleOption :: IO (Ptr (TQStyleOption ()))
instance QqStyleOption ((QStyleOption t1)) where
qStyleOption (x1)
= withQStyleOptionResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QStyleOption1 cobj_x1
foreign import ccall "qtc_QStyleOption1" qtc_QStyleOption1 :: Ptr (TQStyleOption t1) -> IO (Ptr (TQStyleOption ()))
instance QqStyleOption ((Int)) where
qStyleOption (x1)
= withQStyleOptionResult $
qtc_QStyleOption2 (toCInt x1)
foreign import ccall "qtc_QStyleOption2" qtc_QStyleOption2 :: CInt -> IO (Ptr (TQStyleOption ()))
instance QqStyleOption ((Int, Int)) where
qStyleOption (x1, x2)
= withQStyleOptionResult $
qtc_QStyleOption3 (toCInt x1) (toCInt x2)
foreign import ccall "qtc_QStyleOption3" qtc_QStyleOption3 :: CInt -> CInt -> IO (Ptr (TQStyleOption ()))
class QqStyleOption_nf x1 where
qStyleOption_nf :: x1 -> IO (QStyleOption ())
instance QqStyleOption_nf (()) where
qStyleOption_nf ()
= withObjectRefResult $
qtc_QStyleOption
instance QqStyleOption_nf ((QStyleOption t1)) where
qStyleOption_nf (x1)
= withObjectRefResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QStyleOption1 cobj_x1
instance QqStyleOption_nf ((Int)) where
qStyleOption_nf (x1)
= withObjectRefResult $
qtc_QStyleOption2 (toCInt x1)
instance QqStyleOption_nf ((Int, Int)) where
qStyleOption_nf (x1, x2)
= withObjectRefResult $
qtc_QStyleOption3 (toCInt x1) (toCInt x2)
instance Qdirection (QStyleOption a) (()) (IO (LayoutDirection)) where
direction x0 ()
= withQEnumResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOption_direction cobj_x0
foreign import ccall "qtc_QStyleOption_direction" qtc_QStyleOption_direction :: Ptr (TQStyleOption a) -> IO CLong
instance QfontMetrics (QStyleOption a) (()) where
fontMetrics x0 ()
= withQFontMetricsResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOption_fontMetrics cobj_x0
foreign import ccall "qtc_QStyleOption_fontMetrics" qtc_QStyleOption_fontMetrics :: Ptr (TQStyleOption a) -> IO (Ptr (TQFontMetrics ()))
qinit :: QStyleOption a -> ((QWidget t1)) -> IO ()
qinit x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QStyleOption_init cobj_x0 cobj_x1
foreign import ccall "qtc_QStyleOption_init" qtc_QStyleOption_init :: Ptr (TQStyleOption a) -> Ptr (TQWidget t1) -> IO ()
instance QinitFrom (QStyleOption a) ((QWidget t1)) where
initFrom x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QStyleOption_initFrom cobj_x0 cobj_x1
foreign import ccall "qtc_QStyleOption_initFrom" qtc_QStyleOption_initFrom :: Ptr (TQStyleOption a) -> Ptr (TQWidget t1) -> IO ()
instance Qpalette (QStyleOption a) (()) where
palette x0 ()
= withQPaletteResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOption_palette cobj_x0
foreign import ccall "qtc_QStyleOption_palette" qtc_QStyleOption_palette :: Ptr (TQStyleOption a) -> IO (Ptr (TQPalette ()))
instance Qqqrect (QStyleOption a) (()) (IO (QRect ())) where
qqrect x0 ()
= withQRectResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOption_rect cobj_x0
foreign import ccall "qtc_QStyleOption_rect" qtc_QStyleOption_rect :: Ptr (TQStyleOption a) -> IO (Ptr (TQRect ()))
instance Qqrect (QStyleOption a) (()) (IO (Rect)) where
qrect x0 ()
= withRectResult $ \crect_ret_x crect_ret_y crect_ret_w crect_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOption_rect_qth cobj_x0 crect_ret_x crect_ret_y crect_ret_w crect_ret_h
foreign import ccall "qtc_QStyleOption_rect_qth" qtc_QStyleOption_rect_qth :: Ptr (TQStyleOption a) -> Ptr CInt -> Ptr CInt -> Ptr CInt -> Ptr CInt -> IO ()
instance QsetDirection (QStyleOption a) ((LayoutDirection)) where
setDirection x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOption_setDirection cobj_x0 (toCLong $ qEnum_toInt x1)
foreign import ccall "qtc_QStyleOption_setDirection" qtc_QStyleOption_setDirection :: Ptr (TQStyleOption a) -> CLong -> IO ()
setFontMetrics :: QStyleOption a -> ((QFontMetrics t1)) -> IO ()
setFontMetrics x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QStyleOption_setFontMetrics cobj_x0 cobj_x1
foreign import ccall "qtc_QStyleOption_setFontMetrics" qtc_QStyleOption_setFontMetrics :: Ptr (TQStyleOption a) -> Ptr (TQFontMetrics t1) -> IO ()
instance QsetPalette (QStyleOption a) ((QPalette t1)) where
setPalette x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QStyleOption_setPalette cobj_x0 cobj_x1
foreign import ccall "qtc_QStyleOption_setPalette" qtc_QStyleOption_setPalette :: Ptr (TQStyleOption a) -> Ptr (TQPalette t1) -> IO ()
instance QqqsetRect (QStyleOption a) ((QRect t1)) where
qqsetRect x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QStyleOption_setRect cobj_x0 cobj_x1
foreign import ccall "qtc_QStyleOption_setRect" qtc_QStyleOption_setRect :: Ptr (TQStyleOption a) -> Ptr (TQRect t1) -> IO ()
instance QqsetRect (QStyleOption a) ((Rect)) where
qsetRect x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCRect x1 $ \crect_x1_x crect_x1_y crect_x1_w crect_x1_h ->
qtc_QStyleOption_setRect_qth cobj_x0 crect_x1_x crect_x1_y crect_x1_w crect_x1_h
foreign import ccall "qtc_QStyleOption_setRect_qth" qtc_QStyleOption_setRect_qth :: Ptr (TQStyleOption a) -> CInt -> CInt -> CInt -> CInt -> IO ()
instance QsetState (QStyleOption a) ((Int)) where
setState x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOption_setState cobj_x0 (toCInt x1)
foreign import ccall "qtc_QStyleOption_setState" qtc_QStyleOption_setState :: Ptr (TQStyleOption a) -> CInt -> IO ()
instance QsetType (QStyleOption a) ((Int)) where
setType x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOption_setType cobj_x0 (toCInt x1)
foreign import ccall "qtc_QStyleOption_setType" qtc_QStyleOption_setType :: Ptr (TQStyleOption a) -> CInt -> IO ()
instance QsetVersion (QStyleOption a) ((Int)) where
setVersion x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOption_setVersion cobj_x0 (toCInt x1)
foreign import ccall "qtc_QStyleOption_setVersion" qtc_QStyleOption_setVersion :: Ptr (TQStyleOption a) -> CInt -> IO ()
instance Qstate (QStyleOption a) (()) (IO (Int)) where
state x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOption_state cobj_x0
foreign import ccall "qtc_QStyleOption_state" qtc_QStyleOption_state :: Ptr (TQStyleOption a) -> IO CInt
instance Qqtype (QStyleOption a) (()) (IO (Int)) where
qtype x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOption_type cobj_x0
foreign import ccall "qtc_QStyleOption_type" qtc_QStyleOption_type :: Ptr (TQStyleOption a) -> IO CInt
instance Qversion (QStyleOption a) (()) (IO (Int)) where
version x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOption_version cobj_x0
foreign import ccall "qtc_QStyleOption_version" qtc_QStyleOption_version :: Ptr (TQStyleOption a) -> IO CInt
qStyleOption_delete :: QStyleOption a -> IO ()
qStyleOption_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOption_delete cobj_x0
foreign import ccall "qtc_QStyleOption_delete" qtc_QStyleOption_delete :: Ptr (TQStyleOption a) -> IO ()
| keera-studios/hsQt | Qtc/Gui/QStyleOption.hs | bsd-2-clause | 8,297 | 0 | 13 | 1,320 | 2,556 | 1,310 | 1,246 | -1 | -1 |
{-# LANGUAGE RecordWildCards #-}
-- | DSL for testing the modular solver
module UnitTests.Distribution.Client.Dependency.Modular.DSL (
ExampleDependency(..)
, Dependencies(..)
, ExTest(..)
, ExPreference(..)
, ExampleDb
, ExampleVersionRange
, ExamplePkgVersion
, ExamplePkgName
, ExampleAvailable(..)
, ExampleInstalled(..)
, IndepGoals(..)
, ReorderGoals(..)
, exAv
, exInst
, exFlag
, exResolve
, extractInstallPlan
, withSetupDeps
, withTest
, withTests
) where
-- base
import Data.Either (partitionEithers)
import Data.Maybe (catMaybes)
import Data.List (nub)
import Data.Monoid
import Data.Version
import qualified Data.Map as Map
-- Cabal
import qualified Distribution.Compiler as C
import qualified Distribution.InstalledPackageInfo as C
import qualified Distribution.Package as C
hiding (HasUnitId(..))
import qualified Distribution.PackageDescription as C
import qualified Distribution.Simple.PackageIndex as C.PackageIndex
import qualified Distribution.System as C
import qualified Distribution.Version as C
import Language.Haskell.Extension (Extension(..), Language)
-- cabal-install
import Distribution.Client.ComponentDeps (ComponentDeps)
import Distribution.Client.Dependency
import Distribution.Client.Dependency.Types
import Distribution.Client.Types
import qualified Distribution.Client.InstallPlan as CI.InstallPlan
import qualified Distribution.Client.PackageIndex as CI.PackageIndex
import qualified Distribution.Client.PkgConfigDb as PC
import qualified Distribution.Client.ComponentDeps as CD
{-------------------------------------------------------------------------------
Example package database DSL
In order to be able to set simple examples up quickly, we define a very
simple version of the package database here explicitly designed for use in
tests.
The design of `ExampleDb` takes the perspective of the solver, not the
perspective of the package DB. This makes it easier to set up tests for
various parts of the solver, but makes the mapping somewhat awkward, because
it means we first map from "solver perspective" `ExampleDb` to the package
database format, and then the modular solver internally in `IndexConversion`
maps this back to the solver specific data structures.
IMPLEMENTATION NOTES
--------------------
TODO: Perhaps these should be made comments of the corresponding data type
definitions. For now these are just my own conclusions and may be wrong.
* The difference between `GenericPackageDescription` and `PackageDescription`
is that `PackageDescription` describes a particular _configuration_ of a
package (for instance, see documentation for `checkPackage`). A
`GenericPackageDescription` can be turned into a `PackageDescription` in
two ways:
a. `finalizePackageDescription` does the proper translation, by taking
into account the platform, available dependencies, etc. and picks a
flag assignment (or gives an error if no flag assignment can be found)
b. `flattenPackageDescription` ignores flag assignment and just joins all
components together.
The slightly odd thing is that a `GenericPackageDescription` contains a
`PackageDescription` as a field; both of the above functions do the same
thing: they take the embedded `PackageDescription` as a basis for the result
value, but override `library`, `executables`, `testSuites`, `benchmarks`
and `buildDepends`.
* The `condTreeComponents` fields of a `CondTree` is a list of triples
`(condition, then-branch, else-branch)`, where the `else-branch` is
optional.
-------------------------------------------------------------------------------}
type ExamplePkgName = String
type ExamplePkgVersion = Int
type ExamplePkgHash = String -- for example "installed" packages
type ExampleFlagName = String
type ExampleTestName = String
type ExampleVersionRange = C.VersionRange
data Dependencies = NotBuildable | Buildable [ExampleDependency]
deriving Show
data ExampleDependency =
-- | Simple dependency on any version
ExAny ExamplePkgName
-- | Simple dependency on a fixed version
| ExFix ExamplePkgName ExamplePkgVersion
-- | Dependencies indexed by a flag
| ExFlag ExampleFlagName Dependencies Dependencies
-- | Dependency on a language extension
| ExExt Extension
-- | Dependency on a language version
| ExLang Language
-- | Dependency on a pkg-config package
| ExPkg (ExamplePkgName, ExamplePkgVersion)
deriving Show
data ExTest = ExTest ExampleTestName [ExampleDependency]
exFlag :: ExampleFlagName -> [ExampleDependency] -> [ExampleDependency]
-> ExampleDependency
exFlag n t e = ExFlag n (Buildable t) (Buildable e)
data ExPreference = ExPref String ExampleVersionRange
data ExampleAvailable = ExAv {
exAvName :: ExamplePkgName
, exAvVersion :: ExamplePkgVersion
, exAvDeps :: ComponentDeps [ExampleDependency]
} deriving Show
-- | Constructs an 'ExampleAvailable' package for the 'ExampleDb',
-- given:
--
-- 1. The name 'ExamplePkgName' of the available package,
-- 2. The version 'ExamplePkgVersion' available
-- 3. The list of dependency constraints 'ExampleDependency'
-- that this package has. 'ExampleDependency' provides
-- a number of pre-canned dependency types to look at.
--
exAv :: ExamplePkgName -> ExamplePkgVersion -> [ExampleDependency]
-> ExampleAvailable
exAv n v ds = ExAv { exAvName = n, exAvVersion = v
, exAvDeps = CD.fromLibraryDeps n ds }
withSetupDeps :: ExampleAvailable -> [ExampleDependency] -> ExampleAvailable
withSetupDeps ex setupDeps = ex {
exAvDeps = exAvDeps ex <> CD.fromSetupDeps setupDeps
}
withTest :: ExampleAvailable -> ExTest -> ExampleAvailable
withTest ex test = withTests ex [test]
withTests :: ExampleAvailable -> [ExTest] -> ExampleAvailable
withTests ex tests =
let testCDs = CD.fromList [(CD.ComponentTest name, deps)
| ExTest name deps <- tests]
in ex { exAvDeps = exAvDeps ex <> testCDs }
-- | An installed package in 'ExampleDb'; construct me with 'exInst'.
data ExampleInstalled = ExInst {
exInstName :: ExamplePkgName
, exInstVersion :: ExamplePkgVersion
, exInstHash :: ExamplePkgHash
, exInstBuildAgainst :: [ExamplePkgHash]
} deriving Show
-- | Constructs an example installed package given:
--
-- 1. The name of the package 'ExamplePkgName', i.e., 'String'
-- 2. The version of the package 'ExamplePkgVersion', i.e., 'Int'
-- 3. The IPID for the package 'ExamplePkgHash', i.e., 'String'
-- (just some unique identifier for the package.)
-- 4. The 'ExampleInstalled' packages which this package was
-- compiled against.)
--
exInst :: ExamplePkgName -> ExamplePkgVersion -> ExamplePkgHash
-> [ExampleInstalled] -> ExampleInstalled
exInst pn v hash deps = ExInst pn v hash (map exInstHash deps)
-- | An example package database is a list of installed packages
-- 'ExampleInstalled' and available packages 'ExampleAvailable'.
-- Generally, you want to use 'exInst' and 'exAv' to construct
-- these packages.
type ExampleDb = [Either ExampleInstalled ExampleAvailable]
type DependencyTree a = C.CondTree C.ConfVar [C.Dependency] a
newtype IndepGoals = IndepGoals Bool
deriving Show
newtype ReorderGoals = ReorderGoals Bool
deriving Show
exDbPkgs :: ExampleDb -> [ExamplePkgName]
exDbPkgs = map (either exInstName exAvName)
exAvSrcPkg :: ExampleAvailable -> UnresolvedSourcePackage
exAvSrcPkg ex =
let (libraryDeps, exts, mlang, pcpkgs) = splitTopLevel (CD.libraryDeps (exAvDeps ex))
testSuites = [(name, deps) | (CD.ComponentTest name, deps) <- CD.toList (exAvDeps ex)]
in SourcePackage {
packageInfoId = exAvPkgId ex
, packageSource = LocalTarballPackage "<<path>>"
, packageDescrOverride = Nothing
, packageDescription = C.GenericPackageDescription {
C.packageDescription = C.emptyPackageDescription {
C.package = exAvPkgId ex
, C.libraries = error "not yet configured: library"
, C.executables = error "not yet configured: executables"
, C.testSuites = error "not yet configured: testSuites"
, C.benchmarks = error "not yet configured: benchmarks"
, C.buildDepends = error "not yet configured: buildDepends"
, C.setupBuildInfo = Just C.SetupBuildInfo {
C.setupDepends = mkSetupDeps (CD.setupDeps (exAvDeps ex)),
C.defaultSetupDepends = False
}
}
, C.genPackageFlags = nub $ concatMap extractFlags $
CD.libraryDeps (exAvDeps ex) ++ concatMap snd testSuites
, C.condLibraries = [(exAvName ex, mkCondTree (extsLib exts <> langLib mlang <> pcpkgLib pcpkgs)
disableLib
(Buildable libraryDeps))]
, C.condExecutables = []
, C.condTestSuites =
let mkTree = mkCondTree mempty disableTest . Buildable
in map (\(t, deps) -> (t, mkTree deps)) testSuites
, C.condBenchmarks = []
}
}
where
-- Split the set of dependencies into the set of dependencies of the library,
-- the dependencies of the test suites and extensions.
splitTopLevel :: [ExampleDependency]
-> ( [ExampleDependency]
, [Extension]
, Maybe Language
, [(ExamplePkgName, ExamplePkgVersion)] -- pkg-config
)
splitTopLevel [] =
([], [], Nothing, [])
splitTopLevel (ExExt ext:deps) =
let (other, exts, lang, pcpkgs) = splitTopLevel deps
in (other, ext:exts, lang, pcpkgs)
splitTopLevel (ExLang lang:deps) =
case splitTopLevel deps of
(other, exts, Nothing, pcpkgs) -> (other, exts, Just lang, pcpkgs)
_ -> error "Only 1 Language dependency is supported"
splitTopLevel (ExPkg pkg:deps) =
let (other, exts, lang, pcpkgs) = splitTopLevel deps
in (other, exts, lang, pkg:pcpkgs)
splitTopLevel (dep:deps) =
let (other, exts, lang, pcpkgs) = splitTopLevel deps
in (dep:other, exts, lang, pcpkgs)
-- Extract the total set of flags used
extractFlags :: ExampleDependency -> [C.Flag]
extractFlags (ExAny _) = []
extractFlags (ExFix _ _) = []
extractFlags (ExFlag f a b) = C.MkFlag {
C.flagName = C.FlagName f
, C.flagDescription = ""
, C.flagDefault = True
, C.flagManual = False
}
: concatMap extractFlags (deps a ++ deps b)
where
deps :: Dependencies -> [ExampleDependency]
deps NotBuildable = []
deps (Buildable ds) = ds
extractFlags (ExExt _) = []
extractFlags (ExLang _) = []
extractFlags (ExPkg _) = []
mkCondTree :: Monoid a => a -> (a -> a) -> Dependencies -> DependencyTree a
mkCondTree x dontBuild NotBuildable =
C.CondNode {
C.condTreeData = dontBuild x
, C.condTreeConstraints = []
, C.condTreeComponents = []
}
mkCondTree x dontBuild (Buildable deps) =
let (directDeps, flaggedDeps) = splitDeps deps
in C.CondNode {
C.condTreeData = x -- Necessary for language extensions
, C.condTreeConstraints = map mkDirect directDeps
, C.condTreeComponents = map (mkFlagged dontBuild) flaggedDeps
}
mkDirect :: (ExamplePkgName, Maybe ExamplePkgVersion) -> C.Dependency
mkDirect (dep, Nothing) = C.Dependency (C.PackageName dep) C.anyVersion
mkDirect (dep, Just n) = C.Dependency (C.PackageName dep) (C.thisVersion v)
where
v = Version [n, 0, 0] []
mkFlagged :: Monoid a
=> (a -> a)
-> (ExampleFlagName, Dependencies, Dependencies)
-> (C.Condition C.ConfVar
, DependencyTree a, Maybe (DependencyTree a))
mkFlagged dontBuild (f, a, b) = ( C.Var (C.Flag (C.FlagName f))
, mkCondTree mempty dontBuild a
, Just (mkCondTree mempty dontBuild b)
)
-- Split a set of dependencies into direct dependencies and flagged
-- dependencies. A direct dependency is a tuple of the name of package and
-- maybe its version (no version means any version) meant to be converted
-- to a 'C.Dependency' with 'mkDirect' for example. A flagged dependency is
-- the set of dependencies guarded by a flag.
--
-- TODO: Take care of flagged language extensions and language flavours.
splitDeps :: [ExampleDependency]
-> ( [(ExamplePkgName, Maybe Int)]
, [(ExampleFlagName, Dependencies, Dependencies)]
)
splitDeps [] =
([], [])
splitDeps (ExAny p:deps) =
let (directDeps, flaggedDeps) = splitDeps deps
in ((p, Nothing):directDeps, flaggedDeps)
splitDeps (ExFix p v:deps) =
let (directDeps, flaggedDeps) = splitDeps deps
in ((p, Just v):directDeps, flaggedDeps)
splitDeps (ExFlag f a b:deps) =
let (directDeps, flaggedDeps) = splitDeps deps
in (directDeps, (f, a, b):flaggedDeps)
splitDeps (_:deps) = splitDeps deps
-- Currently we only support simple setup dependencies
mkSetupDeps :: [ExampleDependency] -> [C.Dependency]
mkSetupDeps deps =
let (directDeps, []) = splitDeps deps in map mkDirect directDeps
-- A 'C.Library' with just the given extensions in its 'BuildInfo'
extsLib :: [Extension] -> C.Library
extsLib es = mempty { C.libBuildInfo = mempty { C.otherExtensions = es } }
-- A 'C.Library' with just the given extensions in its 'BuildInfo'
langLib :: Maybe Language -> C.Library
langLib (Just lang) = mempty { C.libBuildInfo = mempty { C.defaultLanguage = Just lang } }
langLib _ = mempty
disableLib :: C.Library -> C.Library
disableLib lib =
lib { C.libBuildInfo = (C.libBuildInfo lib) { C.buildable = False }}
disableTest :: C.TestSuite -> C.TestSuite
disableTest test =
test { C.testBuildInfo = (C.testBuildInfo test) { C.buildable = False }}
-- A 'C.Library' with just the given pkgconfig-depends in its 'BuildInfo'
pcpkgLib :: [(ExamplePkgName, ExamplePkgVersion)] -> C.Library
pcpkgLib ds = mempty { C.libBuildInfo = mempty { C.pkgconfigDepends = [mkDirect (n, (Just v)) | (n,v) <- ds] } }
exAvPkgId :: ExampleAvailable -> C.PackageIdentifier
exAvPkgId ex = C.PackageIdentifier {
pkgName = C.PackageName (exAvName ex)
, pkgVersion = Version [exAvVersion ex, 0, 0] []
}
exInstInfo :: ExampleInstalled -> C.InstalledPackageInfo
exInstInfo ex = C.emptyInstalledPackageInfo {
C.installedUnitId = C.mkUnitId (exInstHash ex)
, C.sourcePackageId = exInstPkgId ex
, C.depends = map C.mkUnitId (exInstBuildAgainst ex)
}
exInstPkgId :: ExampleInstalled -> C.PackageIdentifier
exInstPkgId ex = C.PackageIdentifier {
pkgName = C.PackageName (exInstName ex)
, pkgVersion = Version [exInstVersion ex, 0, 0] []
}
exAvIdx :: [ExampleAvailable] -> CI.PackageIndex.PackageIndex UnresolvedSourcePackage
exAvIdx = CI.PackageIndex.fromList . map exAvSrcPkg
exInstIdx :: [ExampleInstalled] -> C.PackageIndex.InstalledPackageIndex
exInstIdx = C.PackageIndex.fromList . map exInstInfo
exResolve :: ExampleDb
-- List of extensions supported by the compiler, or Nothing if unknown.
-> Maybe [Extension]
-- List of languages supported by the compiler, or Nothing if unknown.
-> Maybe [Language]
-> PC.PkgConfigDb
-> [ExamplePkgName]
-> Solver
-> Maybe Int
-> IndepGoals
-> ReorderGoals
-> [ExPreference]
-> ([String], Either String CI.InstallPlan.SolverInstallPlan)
exResolve db exts langs pkgConfigDb targets solver mbj (IndepGoals indepGoals) (ReorderGoals reorder) prefs
= runProgress $ resolveDependencies C.buildPlatform
compiler pkgConfigDb
solver
params
where
defaultCompiler = C.unknownCompilerInfo C.buildCompilerId C.NoAbiTag
compiler = defaultCompiler { C.compilerInfoExtensions = exts
, C.compilerInfoLanguages = langs
}
(inst, avai) = partitionEithers db
instIdx = exInstIdx inst
avaiIdx = SourcePackageDb {
packageIndex = exAvIdx avai
, packagePreferences = Map.empty
}
enableTests = fmap (\p -> PackageConstraintStanzas
(C.PackageName p) [TestStanzas])
(exDbPkgs db)
targets' = fmap (\p -> NamedPackage (C.PackageName p) []) targets
params = addPreferences (fmap toPref prefs)
$ addConstraints (fmap toLpc enableTests)
$ setIndependentGoals indepGoals
$ setReorderGoals reorder
$ setMaxBackjumps mbj
$ standardInstallPolicy instIdx avaiIdx targets'
toLpc pc = LabeledPackageConstraint pc ConstraintSourceUnknown
toPref (ExPref n v) = PackageVersionPreference (C.PackageName n) v
extractInstallPlan :: CI.InstallPlan.SolverInstallPlan
-> [(ExamplePkgName, ExamplePkgVersion)]
extractInstallPlan = catMaybes . map confPkg . CI.InstallPlan.toList
where
confPkg :: CI.InstallPlan.SolverPlanPackage -> Maybe (String, Int)
confPkg (CI.InstallPlan.Configured pkg) = Just $ srcPkg pkg
confPkg _ = Nothing
srcPkg :: SolverPackage UnresolvedPkgLoc -> (String, Int)
srcPkg cpkg =
let C.PackageIdentifier (C.PackageName p) (Version (n:_) _) =
packageInfoId (solverPkgSource cpkg)
in (p, n)
{-------------------------------------------------------------------------------
Auxiliary
-------------------------------------------------------------------------------}
-- | Run Progress computation
--
-- Like `runLog`, but for the more general `Progress` type.
runProgress :: Progress step e a -> ([step], Either e a)
runProgress = go
where
go (Step s p) = let (ss, result) = go p in (s:ss, result)
go (Fail e) = ([], Left e)
go (Done a) = ([], Right a)
| gbaz/cabal | cabal-install/tests/UnitTests/Distribution/Client/Dependency/Modular/DSL.hs | bsd-3-clause | 19,053 | 0 | 20 | 5,107 | 3,939 | 2,196 | 1,743 | 290 | 19 |
{-# LANGUAGE FlexibleInstances, StandaloneDeriving, DeriveGeneric #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module NFDataInstances where
import Control.DeepSeq
import Control.DeepSeq.Generics
import GHC.Generics hiding (Fixity,Associativity)
import Control.Monad.Trans.State
import Data.Functor.Identity
import Text.Parsec.Error
import Lambda
import Parser
import Name
import Value
import Type
import FreeEnvironment
import Associativity
import InfixFix
import SourcePostion
import BruijnTerm
import TypeError
deriving instance Generic Name
instance NFData Name where
rnf = genericRnf
deriving instance Generic (TypeA i)
instance NFData i => NFData (TypeA i) where
rnf = genericRnf
deriving instance Generic (Def i n)
instance (NFData i, NFData n) => NFData (Def i n) where
rnf = genericRnf
deriving instance Generic Free
instance NFData Free where
rnf = genericRnf
deriving instance Generic Bound
instance NFData Bound where
rnf = genericRnf
deriving instance Generic (TypeError i j)
instance (NFData i, NFData j) => NFData (TypeError i j)where
rnf = genericRnf
deriving instance Generic (UndefinedVar i b)
instance (NFData i, NFData b) => NFData (UndefinedVar i b)where
rnf = genericRnf
deriving instance Generic UnificationError
instance NFData UnificationError where
rnf = genericRnf
deriving instance Generic Fixity
instance NFData Fixity where
rnf = genericRnf
deriving instance Generic Associativity
instance NFData Associativity where
rnf = genericRnf
deriving instance Generic (StateT s m a)
instance NFData (Control.Monad.Trans.State.StateT Stack Identity Value ) where
rnf = genericRnf
deriving instance Generic TypeInstance
instance NFData TypeInstance where
rnf = genericRnf
deriving instance Generic Parser.ParseError
instance NFData Parser.ParseError where
rnf = genericRnf
deriving instance Generic InfixError
instance NFData InfixError where
rnf = genericRnf
-- deriving instance Generic Text.Parsec.Pos.SourcePos
instance NFData SourcePos where
rnf = flip seq ()
-- deriving instance Generic Text.Parsec.Error.ParseError
instance NFData Text.Parsec.Error.ParseError where
rnf = flip seq ()
deriving instance Generic Value
instance NFData Value where
rnf v = seq v ()
deriving instance Generic (LamTerm i j n )
instance (NFData i, NFData j, NFData n) => NFData (LamTerm i j n) where
rnf = genericRnf
| kwibus/myLang | tests/bench/NFDataInstances.hs | bsd-3-clause | 2,434 | 0 | 8 | 424 | 657 | 355 | 302 | 72 | 0 |
{-# LANGUAGE UndecidableInstances, FlexibleInstances, MultiParamTypeClasses #-}
module Generic.Control.MonadPlus where
import Prelude ()
import Generic.Control.Alternative
import Generic.Control.Monad
class (Alternative j m, Monad j m) => MonadPlus j m
instance (Alternative j m, Monad j m) => MonadPlus j m
| sebastiaanvisser/AwesomePrelude | src/Generic/Control/MonadPlus.hs | bsd-3-clause | 312 | 0 | 6 | 42 | 82 | 46 | 36 | -1 | -1 |
module MyEither where
import Control.Applicative
data MyEither l r = MyLeft l | MyRight r
deriving (Eq, Show)
instance Functor (MyEither l) where
fmap _ (MyLeft l) = MyLeft l
fmap f (MyRight r) = MyRight $ f r
instance Applicative (MyEither l) where
pure = MyRight
(MyRight f) <*> fB = fmap f fB
(MyLeft l) <*> _ = MyLeft l
instance Monad (MyEither l) where
(MyRight r) >>= f = f r
(MyLeft l) >>= _ = MyLeft l
instance Monoid l => Alternative (MyEither l) where
empty = MyLeft mempty
(MyLeft _) <|> rhs = rhs
lhs <|> _ = lhs
| hjwylde/haskell-type-classes-workshop | src/MyEither.hs | bsd-3-clause | 600 | 0 | 8 | 179 | 264 | 130 | 134 | 18 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Main where
import Control.Applicative ((<*>), pure)
import Control.Arrow ((&&&))
import Control.Monad (forM_)
import qualified Data.Aeson as A
import qualified Data.Aeson.TH as A
import qualified Data.ByteString.Lazy.Char8 as BL
import qualified Data.JSON.Schema.Generator as G
import qualified Data.List as List
import Data.Map (fromList)
import Data.Monoid ((<>))
import Data.Proxy (Proxy(Proxy))
import Data.Typeable (typeOf)
import GHC.Generics
import System.Exit (ExitCode(ExitSuccess), exitWith)
import System.IO (Handle, IOMode(WriteMode), hClose, hPutStr, hPutStrLn, withFile)
import System.Process (CreateProcess(..), StdStream(CreatePipe)
, createProcess, proc, system, waitForProcess)
import Types
import Values
--
-- Instances
--
instance G.JSONSchemaGen RecordType1
instance G.JSONSchemaGen RecordType2
instance G.JSONSchemaGen ProductType1
instance G.JSONSchemaGen ProductType2
instance G.JSONSchemaGen UnitType1
instance G.JSONSchemaGen UnitType2
instance G.JSONSchemaGen MixType1
--instance G.JSONSchemaPrim UnitType2 where
-- toSchemaPrim opts _ = G.scSchemaType . G.toSchema opts $ (Proxy :: Proxy UnitType2)
--
-- TestData
--
data TestDatum =
#if MIN_VERSION_aeson(1,0,0)
forall a. (Generic a, A.GToJSON A.Zero (Rep a), SchemaName (Rep a))
#else
forall a. (Generic a, A.GToJSON (Rep a), SchemaName (Rep a))
#endif
=> TestDatum { tdName :: String
, tdValue :: a
}
#if MIN_VERSION_aeson(1,0,0)
testDatum :: (Generic a, A.GToJSON A.Zero (Rep a), SchemaName (Rep a)) => String -> a -> TestDatum
#else
testDatum :: (Generic a, A.GToJSON (Rep a), SchemaName (Rep a)) => String -> a -> TestDatum
#endif
testDatum = TestDatum
testData :: [TestDatum]
testData =
[ TestDatum "recordData11" recordType11
, testDatum "recordData12" recordType22
, testDatum "productData11" productData11
, testDatum "productData12" productData12
, testDatum "unitData1" unitData1
, testDatum "unitData2" unitData2
, testDatum "unitData3" unitData3
, testDatum "mixData11" mixData11
, testDatum "mixData12" mixData12
, testDatum "mixData13" mixData13
]
--
-- Encoder
--
aesonOptions :: Bool -> Bool -> A.SumEncoding -> A.Options
aesonOptions allNullary omitNothing sumEncoding = A.defaultOptions
{ A.allNullaryToStringTag = allNullary
, A.omitNothingFields = omitNothing
, A.sumEncoding = sumEncoding
}
optPatterns :: [A.Options]
optPatterns =
[ aesonOptions True True
, aesonOptions True False
, aesonOptions False True
, aesonOptions False False
]
<*>
[ A.defaultTaggedObject
, A.ObjectWithSingleField
, A.TwoElemArray
]
#if MIN_VERSION_aeson(1,0,0)
encode :: (Generic a, A.GToJSON A.Zero (Rep a)) => A.Options -> a -> BL.ByteString
#else
encode :: (Generic a, A.GToJSON (Rep a)) => A.Options -> a -> BL.ByteString
#endif
encode opt a = A.encode (A.genericToJSON opt a)
--
-- Print values as json in python
--
optToStr :: String -> A.Options -> String
optToStr symbol aopts =
"# " ++ symbol ++ " (allNullaryToStringTag: " ++ show a
++ ", omitNothingFields: " ++ show b
++ ", sumEncoding: " ++ showC c ++ ")"
where
a = A.allNullaryToStringTag aopts
b = A.omitNothingFields aopts
c = A.sumEncoding aopts
showC :: A.SumEncoding -> String
showC A.TwoElemArray = "array"
showC A.ObjectWithSingleField = "object"
showC _ = "tag"
pairsOptSymbol :: [A.Options] -> String -> [(A.Options, String)]
pairsOptSymbol opts name = go opts (1 :: Int)
where
go [] _ = []
go (opt:opts') n = (opt, name ++ "_" ++ show n) : go opts' (n + 1)
#if MIN_VERSION_aeson(1,0,0)
printValueAsJson :: (Generic a, A.GToJSON A.Zero (Rep a)) => Handle -> [A.Options] -> String -> a -> IO ()
#else
printValueAsJson :: (Generic a, A.GToJSON (Rep a)) => Handle -> [A.Options] -> String -> a -> IO ()
#endif
printValueAsJson h opts name value =
forM_ (pairsOptSymbol opts name) . uncurry $ \opt symbol -> do
hPutStrLn h $ optToStr symbol opt
hPutStr h $ symbol ++ " = json.loads('"
BL.hPutStr h $ Main.encode opt value
hPutStrLn h "')"
hPutStrLn h ""
printValueAsJsonInPython :: FilePath -> IO ()
printValueAsJsonInPython path =
withFile path WriteMode $ \h -> do
hPutStrLn h "# -*- coding: utf-8 -*-"
hPutStrLn h "import json"
hPutStrLn h ""
forM_ testData $ \(TestDatum name value) ->
printValueAsJson h optPatterns name value
--
-- Print type definitions as schema in individualy json files
--
printTypeAsSchema :: (Generic a, G.JSONSchemaGen a, SchemaName (Rep a))
=> FilePath -> G.Options -> [A.Options] -> Proxy a -> IO ()
printTypeAsSchema dir opts aoptss a =
forM_ aoptss $ \aopts -> do
let fa = fmap from a
let filename = schemaName opts aopts fa
let suffix = "." ++ schemaSuffix opts aopts fa
let path = dir ++ "/" ++ filename
let opts' = opts { G.schemaIdSuffix = suffix }
withFile path WriteMode $ \h ->
BL.hPutStrLn h $ G.generate' opts' aopts a
class SchemaName f where
schemaName :: G.Options -> A.Options -> Proxy (f a) -> FilePath
schemaSuffix :: G.Options -> A.Options -> Proxy (f a) -> String
instance (Datatype d) => SchemaName (D1 d f) where
schemaName opts aopts p = modName ++ "." ++ typName ++ "." ++ schemaSuffix opts aopts p
where
modName = moduleName (undefined :: D1 d f p)
typName = datatypeName (undefined :: D1 d f p)
schemaSuffix opts aopts _ =
show a ++ "." ++ show b ++ "." ++ showC c ++ G.schemaIdSuffix opts
where
a = A.allNullaryToStringTag aopts
b = A.omitNothingFields aopts
c = A.sumEncoding aopts
schemaOptions :: G.Options
schemaOptions = G.defaultOptions
{ G.baseUri = "https://github.com/yuga/jsonschema-gen/tests/"
, G.schemaIdSuffix = ".json"
, G.typeRefMap = fromList
[ (typeOf (undefined :: RecordType2), "https://github.com/yuga/jsonschema-gen/tests/Types.RecordType2.True.False.tag.json")
, (typeOf (undefined :: ProductType2), "https://github.com/yuga/jsonschema-gen/tests/Types.ProductType2.True.False.tag.json")
]
}
schemaOptions' :: G.Options
schemaOptions'= schemaOptions
{ G.typeRefMap = fromList
[ (typeOf (undefined :: RecordType2), "https://github.com/yuga/jsonschema-gen/tests/Types.RecordType2.True.False.tag.json")
, (typeOf (undefined :: ProductType2), "https://github.com/yuga/jsonschema-gen/tests/Types.ProductType2.True.False.tag.json")
, (typeOf (undefined :: UnitType2), "https://github.com/yuga/jsonschema-gen/tests/Types.UnitType2.True.False.tag.json")
]
, G.fieldTypeMap = fromList [("recordField1A", G.FieldType (Proxy :: Proxy UnitType2))]
}
printTypeAsSchemaInJson :: FilePath -> IO ()
printTypeAsSchemaInJson dir = do
printTypeAsSchema dir schemaOptions' optPatterns (Proxy :: Proxy RecordType1)
printTypeAsSchema dir schemaOptions optPatterns (Proxy :: Proxy RecordType2)
printTypeAsSchema dir schemaOptions optPatterns (Proxy :: Proxy ProductType1)
printTypeAsSchema dir schemaOptions optPatterns (Proxy :: Proxy ProductType2)
printTypeAsSchema dir schemaOptions optPatterns (Proxy :: Proxy UnitType1)
printTypeAsSchema dir schemaOptions optPatterns (Proxy :: Proxy UnitType2)
printTypeAsSchema dir schemaOptions optPatterns (Proxy :: Proxy MixType1)
--
-- Print jsonschema validator in python
--
convertToPythonLoadingSchema :: (Generic a, SchemaName (Rep a)) => G.Options -> [A.Options] -> Proxy a -> ([String], [String])
convertToPythonLoadingSchema opts aoptss a =
let fa = fmap from a
toLoader aopts =
let filename = schemaName opts aopts fa
symbol = "schema_" ++ map dotToLowline filename
in (symbol ++ " = json.load(codecs.open(schemaPath + '" ++ filename ++ "', 'r', 'utf-8'))")
toStore aopts =
let filename = schemaName opts aopts fa
symbol = "schema_" ++ map dotToLowline filename
in ("'" ++ G.baseUri opts ++ filename ++ "' : " ++ symbol)
in (map toLoader &&& map toStore) aoptss
dotToLowline :: Char -> Char
dotToLowline '.' = '_'
dotToLowline c = c
printLoadSchemas :: Handle -> IO ()
printLoadSchemas h = do
let (loader, store) = convertToPythonLoadingSchema schemaOptions' optPatterns (Proxy :: Proxy RecordType1)
<> convertToPythonLoadingSchema schemaOptions optPatterns (Proxy :: Proxy RecordType2)
<> convertToPythonLoadingSchema schemaOptions optPatterns (Proxy :: Proxy ProductType1)
<> convertToPythonLoadingSchema schemaOptions optPatterns (Proxy :: Proxy ProductType2)
<> convertToPythonLoadingSchema schemaOptions optPatterns (Proxy :: Proxy UnitType1)
<> convertToPythonLoadingSchema schemaOptions optPatterns (Proxy :: Proxy UnitType2)
<> convertToPythonLoadingSchema schemaOptions optPatterns (Proxy :: Proxy MixType1)
hPutStrLn h "schemaPath = os.path.dirname(os.path.realpath(__file__)) + '/'"
mapM_ (hPutStrLn h) loader
hPutStrLn h ""
mapM_ (hPutStrLn h . concat) . chunk 2 $ beginmap : (List.intersperse comma store) ++ [endofmap]
where
chunk n = takeWhile (not . null) . map (take n) . iterate (drop n)
beginmap = "selfStore = { "
comma = " , "
endofmap = " }"
printValidate :: Handle -> [A.Options] -> IO ()
printValidate h aoptss = do
hPutStrLn h "def mkValidator(schema):"
hPutStrLn h " resolver = jsonschema.RefResolver(schema[u'id'], schema, store=selfStore)"
hPutStrLn h " validator = jsonschema.Draft4Validator(schema, resolver=resolver)"
hPutStrLn h " return validator"
hPutStrLn h ""
forM_ testData $ \(TestDatum name value) ->
forM_ (pairsOptSymbol aoptss name) . uncurry $ \aopts dataSymbol -> do
let schemaFilename = schemaName schemaOptions aopts (fmap from . pure $ value)
let schemaSymbol = map dotToLowline schemaFilename
hPutStrLn h $ "mkValidator(" ++ "schema_" ++ schemaSymbol ++ ").validate(jsondata." ++ dataSymbol ++ ")"
printValidatorInPython :: FilePath -> IO ()
printValidatorInPython path =
withFile path WriteMode $ \h -> do
hPutStrLn h "# -*- coding: utf-8 -*-"
hPutStrLn h "import codecs"
hPutStrLn h "import json"
hPutStrLn h "import jsondata"
hPutStrLn h "import jsonschema"
hPutStrLn h "import os"
hPutStrLn h ""
printLoadSchemas h
hPutStrLn h ""
printValidate h optPatterns
--
-- Run Test
--
pythonProcess :: FilePath -> CreateProcess
pythonProcess dir = (proc "python" [dir ++ "/jsonvalidator.py"])
{ std_in = CreatePipe
#if MIN_VERSION_process(1,2,0)
, delegate_ctlc = True
#endif
}
runTest :: FilePath -> IO ()
runTest dir = do
handles <- createProcess $ pythonProcess dir
case handles of
(Just hIn, _, _, hP) -> do
hClose hIn
ec <- waitForProcess hP
exitWith ec
_ -> fail $ "Failed to launch python"
--
-- Main
--
main :: IO ()
main = do
let dir = "tests"
printValueAsJsonInPython (dir ++ "/jsondata.py")
printTypeAsSchemaInJson dir
printValidatorInPython (dir ++ "/jsonvalidator.py")
ec <- system "python --version"
case ec of
ExitSuccess -> runTest dir
_ -> putStrLn "If you have 'python' in your PATH, this test runs jsonvalidator.py"
| yuga/jsonschema-gen | tests/Main.hs | bsd-3-clause | 11,951 | 17 | 20 | 2,745 | 3,186 | 1,638 | 1,548 | 226 | 2 |
{-# LANGUAGE TemplateHaskell #-}
module Simple.Plugin(plugin) where
import UniqFM
import GhcPlugins
import qualified ErrUtils
-- For annotation tests
import Simple.DataStructures
import Control.Monad
import Data.Monoid
import Data.Dynamic
import qualified Language.Haskell.TH as TH
plugin :: Plugin
plugin = defaultPlugin {
installCoreToDos = install,
pluginRecompile = purePlugin
}
install :: [CommandLineOption] -> [CoreToDo] -> CoreM [CoreToDo]
install options todos = do
putMsgS $ "Simple Plugin Passes Queried"
putMsgS $ "Got options: " ++ unwords options
-- Create some actual passes to continue the test.
return $ CoreDoPluginPass "Main pass" mainPass
: todos
findNameBinds :: String -> [CoreBind] -> First Name
findNameBinds target = mconcat . map (findNameBind target)
findNameBind :: String -> CoreBind -> First Name
findNameBind target (NonRec b e) = findNameBndr target b
findNameBind target (Rec bes) = mconcat (map (findNameBndr target . fst) bes)
findNameBndr :: String -> CoreBndr -> First Name
findNameBndr target b
= if getOccString (varName b) == target
then First (Just (varName b))
else First Nothing
mainPass :: ModGuts -> CoreM ModGuts
mainPass guts = do
putMsgS "Simple Plugin Pass Run"
(_, anns) <- getAnnotations deserializeWithData guts
bindsOnlyPass (mapM (changeBind anns Nothing)) guts
changeBind :: UniqFM [ReplaceWith] -> Maybe String -> CoreBind -> CoreM CoreBind
changeBind anns mb_replacement (NonRec b e) = changeBindPr anns mb_replacement b e >>= (return . uncurry NonRec)
changeBind anns mb_replacement (Rec bes) = liftM Rec $ mapM (uncurry (changeBindPr anns mb_replacement)) bes
changeBindPr :: UniqFM [ReplaceWith] -> Maybe String -> CoreBndr -> CoreExpr -> CoreM (CoreBndr, CoreExpr)
changeBindPr anns mb_replacement b e = do
case lookupWithDefaultUFM anns [] b of
[] -> do
e' <- changeExpr anns mb_replacement e
return (b, e')
[ReplaceWith replace_string] -> do
e' <- changeExpr anns (Just replace_string) e
return (b, e')
_ -> do dflags <- getDynFlags
error ("Too many change_anns on one binder:" ++ showPpr dflags b)
changeExpr :: UniqFM [ReplaceWith] -> Maybe String -> CoreExpr -> CoreM CoreExpr
changeExpr anns mb_replacement e = let go = changeExpr anns mb_replacement in case e of
Lit (LitString _) -> case mb_replacement of
Nothing -> return e
Just replacement -> do
putMsgS "Performing Replacement"
return $ Lit (LitString (bytesFS (mkFastString replacement)))
App e1 e2 -> liftM2 App (go e1) (go e2)
Lam b e -> liftM (Lam b) (go e)
Let bind e -> liftM2 Let (changeBind anns mb_replacement bind) (go e)
Case e b ty alts -> liftM4 Case (go e) (return b) (return ty) (mapM (changeAlt anns mb_replacement) alts)
Cast e coerce -> liftM2 Cast (go e) (return coerce)
Tick t e -> liftM (Tick t) (go e)
_ -> return e
changeAlt :: UniqFM [ReplaceWith] -> Maybe String -> CoreAlt -> CoreM CoreAlt
changeAlt anns mb_replacement (con, bs, e) = liftM (\e' -> (con, bs, e')) (changeExpr anns mb_replacement e)
| sdiehl/ghc | testsuite/tests/plugins/simple-plugin/Simple/Plugin.hs | bsd-3-clause | 3,278 | 0 | 22 | 786 | 1,123 | 555 | 568 | 65 | 9 |
-------------------------------------------------------------
-- Parser for WHILE from Nielson, Nielson and Hankin
-- and various other sources.
-------------------------------------------------------------
module While( prettyWhileFromFile ) where
import WhileAS
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Expr
import qualified Text.ParserCombinators.Parsec.Token as P
import Text.ParserCombinators.Parsec.Language( javaStyle )
prettyWhileFromFile fname
= do{ input <- readFile fname
; putStr input
; case parse program fname input of
Left err -> do{ putStr "parse error at "
; print err
}
Right x -> print x
}
--renum :: Prog -> Prog
--renum p = rn (1,p)
--rn :: (Int, Stat) -> (Int, Stat)
--rn (x,s) = case s of
-- Assign vi ae _ -> (x+1,Assign vi ae x)
-- Skip _ -> (x+1, Skip x)
-- Seq [Stat] ->
-- If be _ s1 s2 -> do{ (newx, newthen) <- rn (x+1,s1)
-- ; (newerx, newelse) <- rn (newx,s2)
-- ; return (newerx, If be x newthen newelse)
-- }
-- While be _ s -> do{ (newx, news) <- rn (x+1,s)
-- ; return (newx, While be x+1 news)
-- }
-----------------------------------------------------------
-- A program is simply an expression.
-----------------------------------------------------------
program
= do{ stats <- semiSep1 stat
; return (if length stats < 2 then head stats else Seq stats)
}
stat :: Parser Stat
stat = choice
[ do { reserved "skip";
return (Skip 0)
}
, ifStat
, whileStat
, sequenceStat
, try assignStat
]
assignStat :: Parser Stat
assignStat = do{ id <- identifier
; symbol ":="
; s <- aritExpr
; return (Assign id s 0)
}
ifStat :: Parser Stat
ifStat = do{ reserved "if"
; cond <- boolExpr
; reserved "then"
; thenpart <- stat
; reserved "else"
; elsepart <- stat
; return (If cond 0 thenpart elsepart)
}
whileStat :: Parser Stat
whileStat = do{ reserved "while"
; cond <- boolExpr
; reserved "do"
; body <- stat
; return (While cond 0 body)
}
sequenceStat :: Parser Stat
sequenceStat = do{ stats <- parens (semiSep1 stat)
; return (if length stats < 2 then head stats else Seq stats)
}
boolExpr:: Parser BExp
boolExpr = buildExpressionParser boolOperators relExpr
relExpr :: Parser BExp
relExpr = do{ arg1 <- aritExpr
; op <- choice [string "=", try (string "<>"), try (string "<="), string "<", try (string ">="), string ">"]
; arg2 <- aritExpr
; return (RelOp op arg1 arg2)
}
aritExpr :: Parser AExp
aritExpr = buildExpressionParser aritOperators simpleArit
-- Everything mapping bools to bools
boolOperators =
[ [ prefix "not"]
, [ opbb "and" AssocRight ] -- right for shortcircuit
, [ opbb "or" AssocRight ] -- right for shortcircuit
]
where
opbb name assoc = Infix (do{ reservedOp name
; return (\x y -> BOp name x y)
}) assoc
prefix name = Prefix (do{ reservedOp name
; return (\x -> BUnOp name x)
})
-- Everything mapping pairs of ints to ints
aritOperators =
[ [ op "*" AssocLeft, op "/" AssocLeft ]
, [ op "+" AssocLeft, op "-" AssocLeft ]
, [ op "&" AssocRight ] -- bitwise and delivering an int
, [ op "|" AssocRight ] -- bitwise or delivering an int
]
where
op name assoc = Infix (do{ reservedOp name
; return (\x y -> AOp name x y)
}) assoc
simpleArit = choice [ intLiteral
, parens aritExpr
, variable
]
simpleBool = choice [ boolLiteral
, parens boolExpr
]
boolLiteral = do{ reserved "false"
; return (BoolLit True)
}
<|>
do{ reserved "true"
; return (BoolLit False)
}
intLiteral = do{ i <- integer; return (IntLit i) }
variable = do{ id <- identifier
; return (Var id)
}
-----------------------------------------------------------
-- The lexer
-----------------------------------------------------------
lexer = P.makeTokenParser whileDef
whileDef = javaStyle
{ -- Kept the Java single line comments, but officially the language has no comments
P.reservedNames = [ "true", "false", "do", "else", "not",
"if", "then", "while", "skip"
-- , "begin", "proc", "is", "end", "val", "res", "malloc"
]
, P.reservedOpNames= [ "and", "or", "not", "<", "<=", ">", ">=", ":=", "+", "&", "-", "/"]
, P.opLetter = oneOf (concat (P.reservedOpNames whileDef))
, P.caseSensitive = False
}
parens = P.parens lexer
braces = P.braces lexer
semiSep1 = P.semiSep1 lexer
whiteSpace = P.whiteSpace lexer
symbol = P.symbol lexer
identifier = P.identifier lexer
reserved = P.reserved lexer
reservedOp = P.reservedOp lexer
integer = P.integer lexer
charLiteral = P.charLiteral lexer
stringLiteral = P.stringLiteral lexer
| FranklinChen/hugs98-plus-Sep2006 | packages/parsec/examples/while/While.hs | bsd-3-clause | 5,966 | 0 | 14 | 2,317 | 1,291 | 689 | 602 | 103 | 2 |
{-# LANGUAGE OverloadedStrings, TemplateHaskell #-}
{-
This module provides a system which can watch one or more symbols
which are loaded from local modules. If the files containing those
modules are changed, then the code is automatically recompiled and
loaded into the running process.
-}
module Plugins where
import Control.Applicative
import Control.Concurrent.STM
import Control.Concurrent.STM.TMVar
import Control.Monad
import Data.Map (Map)
import Data.Maybe (catMaybes)
import qualified Data.Map as Map
import Data.Monoid (mempty)
import Data.IORef
import Data.String (fromString)
import DynFlags
-- import Filesystem.Path (FilePath, dirname, filename)
import GHC
import GHC.Paths
import GhcMonad (liftIO) -- from ghc7.7 and up you can use the usual
import Language.Haskell.TH.Syntax as TH (Name(Name),NameFlavour(NameG), NameSpace(VarName), OccName(..), ModName(..))
import Module (moduleNameSlashes)
import System.FilePath (takeDirectory, takeFileName)
import System.FSNotify
import Unsafe.Coerce
import qualified Filter as F
import Prelude hiding (filter)
import Language.Haskell.TH (ExpQ, appE, varE)
import Language.Haskell.TH.Lift (lift)
import HscTypes
{-
We need to watch a bunch of files and reload.
We need a way to map from a specific symbol to it's loaded value.
What happens when there is an error?
What happens when there are multiple symbols from the same file?
When a module is reloaded how do we ensure all the symbols get reloaded?
There are two phases:
1. reloading all the modules
2. evaluating the symbols
We can start with the brute force variant -- when any module is touched, we just reload everything.
-}
------------------------------------------------------------------------------
-- Helper Functions
------------------------------------------------------------------------------
-- | extract the module name and occurance name of a symbol
--
-- can fail if the 'Name' is not for the right type of thing.
nameToModFunc :: TH.Name -> (ModuleName, String)
nameToModFunc (Name (OccName occName) (NameG VarName _ (ModName mn))) =
(mkModuleName mn, occName)
nameToModFunc n = error $ "nameToModFunc failed because Name was not the right kind. " ++ show n
-- | predicate: event caused by file being added
isAdded :: Event -> Bool
isAdded (Added {}) = True
isAdded _ = False
-- | predicate: event caused by file being modified
isModified :: Event -> Bool
isModified (Modified {}) = True
isModified _ = False
-- | watch a single file for changes
watchFile :: WatchManager -> (FilePath, FilePath) -> IO () -> IO StopListening
watchFile wm (dir, file) action =
watchDir wm dir
(\e -> takeFileName (eventPath e) == file)
(\e -> if (isAdded e || isModified e)
then action
else return ())
-- | watch a bunch of files for changes
watchFiles :: WatchManager -> [FilePath] -> IO () -> IO ()
watchFiles wm fps action =
do let pairs = Map.toList . Map.fromListWith (++) . map (\(x,y) -> (if x == mempty then "." else x,[y])) $ map splitFileName fps
print pairs
mapM_ watchFiles' pairs
where
splitFileName fp = (takeDirectory fp, takeFileName fp)
watchFiles' :: (FilePath, [FilePath]) -> IO StopListening
watchFiles' (dir, files) =
watchDir wm dir
(\e -> takeFileName (eventPath e) `elem` files)
(\e -> if (isAdded e || isModified e)
then action
else return ())
-- | wrapper for calling a Ghc action
--
-- defaults to 'HscAsm' and 'LinkInMemory'
withSession' :: Ghc a -> IO a
withSession' action =
defaultErrorHandler defaultFatalMessager defaultFlushOut $ do
runGhc (Just libdir) $ do
dflags <- getSessionDynFlags
setSessionDynFlags $ dflags { hscTarget = HscAsm
, ghcLink = LinkInMemory
}
action
------------------------------------------------------------------------------
-- PluginsHandle
------------------------------------------------------------------------------
data PluginsHandle = PluginsHandle
{ phWatchManager :: TMVar WatchManager
, phSymMap :: TMVar (Map TH.Name HValue)
}
-- | create a new, empty 'PluginsHandle'
newPluginsHandle :: IO PluginsHandle
newPluginsHandle =
PluginsHandle <$> (newTMVarIO =<< startManager) <*> newTMVarIO Map.empty
-- | set the list of modules that GHC should load
setTargets' :: [(ModuleName, String)] -> Ghc ()
setTargets' syms =
do targets <- mapM (\(mod,_) -> (liftIO $ print $ moduleNameString mod) >> guessTarget (moduleNameSlashes mod) Nothing) syms
setTargets targets
return ()
-- | recompile and reload modified modules currently in the watch
-- list. Also update the watch list based on the new dependency graph.
--
-- FIXME: we probably need some form of semaphore here to protect use against multiple simultaneous calls
reload :: PluginsHandle
-> [TH.Name]
-> IO ()
reload ph newSyms =
do m <- atomically $ takeTMVar (phSymMap ph)
m' <- withSession' $ do
let names = (Map.keys m) ++ newSyms
syms = map nameToModFunc names
setTargets' syms
vals <- loadSyms syms
updateWatches ph
return $ Map.fromList $ zip names vals
atomically $ putTMVar (phSymMap ph) m'
-- | look at the current module graph and update the list of watched
-- files accordingly.
updateWatches :: PluginsHandle
-> Ghc ()
updateWatches ph =
do wm <- liftIO $ do
newWM <- startManager
oldWM <- atomically $ do old <- takeTMVar (phWatchManager ph)
putTMVar (phWatchManager ph) newWM
return old
stopManager oldWM
return newWM
modGraph <- getModuleGraph
let files = catMaybes $ map (ml_hs_file . ms_location) modGraph
liftIO $ do putStr "Now watching: "
print files
watchFiles wm (map fromString files) (reload ph [])
-- | load the modules+symbols
loadSyms :: [(ModuleName, String)] -> Ghc [HValue]
loadSyms syms =
do res <- load LoadAllTargets
-- Bringing the module into the context
setContext (map (IIDecl . simpleImportDecl . fst) syms)
let symNames = map (\(modName, symName) -> moduleNameString modName ++ "." ++ symName) syms
liftIO $ print symNames
mapM compileExpr symNames
-- | look up the symbol refered to by 'TH.Name' and call
-- 'unsafeCoerce' to cast it to type 'a'.
--
-- see also: 'lookupName'
unsafeLookupName :: TH.Name
-> a
-> PluginsHandle
-> IO a
unsafeLookupName n _ ph =
do sym <- atomically $ do m <- readTMVar (phSymMap ph)
case Map.lookup n m of
Nothing -> error "Invalid name"
(Just s) -> return s
return $ unsafeCoerce sym
-- | TH to safely lookup a symbol
--
-- generates a function like:
--
-- lookupName :: TH.Name -> PluginsHandle -> IO a
--
-- where the type 'a' is derived from the actual type of the symbol
-- refered to by 'Name' which must be in scope at compile time.
lookupName :: TH.Name -> ExpQ
lookupName name =
appE (appE [| unsafeLookupName |] (lift name)) (varE name)
| Happstack/plugins-ng | Plugins.hs | bsd-3-clause | 7,491 | 0 | 18 | 1,944 | 1,638 | 867 | 771 | 123 | 3 |
{-# LANGUAGE RankNTypes #-}
module Main where
import Blinkomatic
import Color (RGB(..), blackRGB, rgb_w2d)
import Control.Exception (bracket)
import Control.Monad (forM_)
import Control.Monad.Identity (Identity(..))
import Control.Monad.State (StateT, evalStateT)
import Control.Monad.Trans (MonadIO(liftIO), lift)
import Control.Wire (Wire, Event, Session(..), Timed(..), countSession_, stepWire, stepSession)
import Control.Concurrent.STM (atomically)
import Control.Concurrent.STM.TVar (TVar, newTVar, readTVar, writeTVar)
import Control.Wire.Unsafe.Event
import Data.Time.Clock
import qualified Data.IntMap as IntMap
import Data.IntMap (IntMap)
import Data.Maybe (fromJust)
import Data.Vector (Vector)
import qualified Data.Vector as V
import qualified Data.Vector.Mutable as MV
import Data.Word (Word8)
import qualified Graphics.Rendering.OpenGL as GL
import Graphics.Rendering.OpenGL (($=))
import qualified Graphics.UI.GLFW as GLFW
import Sound.NH.MIDI.Core (MIDI(MidiClock))
data Scene = Scene
{ elWires :: MV.IOVector Switch
, tclLights :: TVar (Vector (RGB Word8))
}
colorMap :: IntMap (GL.Color3 GL.GLfloat)
colorMap = IntMap.fromList
[ (0, GL.Color3 1.0 0.0 0.0) -- red
, (1, GL.Color3 1.0 0.5 0.0) -- dark orange
, (2, GL.Color3 1.0 0.8 0.0) -- light orange
, (3, GL.Color3 1.0 1.0 0.0) -- yellow
, (4, GL.Color3 0.5 1.0 0.0) -- light green
, (5, GL.Color3 0.0 0.0 1.0) -- blue
, (6, GL.Color3 0.7 0.7 0.9) -- violet
, (7, GL.Color3 1.0 1.0 1.0) -- white
]
-- | Resize the viewport and set the projection matrix
resize window w h = do
-- These are all analogous to the standard OpenGL functions
GL.viewport $= (GL.Position 0 0, GL.Size (fromIntegral w) (fromIntegral h))
GL.matrixMode $= GL.Projection
GL.loadIdentity
GL.perspective 45 (fromIntegral w / fromIntegral h) 1 100
GL.matrixMode $= GL.Modelview 0
-- | This will print and clear the OpenGL errors
printErrors = GL.get GL.errors >>= mapM_ print
drawScene :: (MonadIO m) => GLFW.Window -> Scene -> m ()
drawScene window scene = liftIO $
do GL.clear [GL.ColorBuffer, GL.DepthBuffer]
GL.loadIdentity
GL.translate $ GL.Vector3 (-20) 0 (-50 :: GL.GLfloat)
-- el wire
GL.renderPrimitive GL.Quads $
forM_ [0..7] $ \i -> do
switch <- liftIO $ MV.read (elWires scene) (fromIntegral i)
forM_ [(0,0), (1,0), (1,1), (0, 1)] $ \(x, y) ->
let vtx = GL.Vertex3 (x + (fromIntegral i * 2)) y 0 :: GL.Vertex3 GL.GLfloat
in do case switch of
Open -> GL.color (GL.Color3 0.1 0.1 (0.1 :: GL.GLfloat))
Close -> GL.color $ fromJust $ IntMap.lookup i colorMap
GL.vertex vtx
-- tcl
tcl <- atomically $ readTVar $ tclLights scene
GL.renderPrimitive GL.Quads $
forM_ [0..24::Int] $ \i -> do
let light = tcl V.! (fromIntegral i)
forM_ [(0,0), (1,0), (1,1), (0, 1)] $ \(x, y) ->
let vtx = GL.Vertex3 (x + (fromIntegral i * 2)) (y+10) 0 :: GL.Vertex3 GL.GLfloat
in do let RGB r g b = rgb_w2d light
GL.color (GL.Color3 (realToFrac r) (realToFrac g) (realToFrac b :: GL.GLfloat))
GL.vertex vtx
printErrors
GL.flush
GLFW.swapBuffers window
simulateShow :: (MonadIO m) =>
GLFW.Window
-> Scene
-> Session m (Timed Int ())
-> MidiLights
-> m a
simulateShow window scene s0 w0 = loop s0 w0
where
loop s' w' = do
(ds, s) <- stepSession s'
let Identity (mx, w) = stepWire w' ds (Right NoEvent)
case mx of
(Right (Event actions)) ->
do let perform (Print s) = liftIO $ putStrLn s
perform (SendCmd (Command channel switch)) =
liftIO $ MV.write (elWires scene) (fromIntegral channel) switch
perform (TCL tcl) = liftIO $ atomically $ writeTVar (tclLights scene) tcl
mapM_ perform actions
liftIO $ GLFW.pollEvents
drawScene window scene
_ -> return ()
loop s w
simulate :: Double -> MidiLights -> IO ()
simulate bpm midiLights =
bracket GLFW.init (const GLFW.terminate) $ \True -> do
(Just window) <- GLFW.createWindow 640 480 "blinkomatic" Nothing Nothing
GLFW.makeContextCurrent (Just window)
GLFW.setWindowSizeCallback window (Just resize)
GL.depthFunc $= Just GL.Less
el <- MV.replicate 10 Open
tcl <- atomically $ newTVar $ V.replicate 25 blackRGB
simulateShow window (Scene el tcl) (fakeMidiClock bpm) midiLights
return ()
fakeMidiClock :: (MonadIO m) => Double -> MidiSession m
fakeMidiClock bpm =
Session $ do
t0 <- liftIO getCurrentTime
return (Timed 0 (), loop t0)
where
secondsPerPulse :: NominalDiffTime
secondsPerPulse = ((1 / (realToFrac bpm)) * 60) / 24
loop t' =
Session $ do
t <- liftIO getCurrentTime
let dt = diffUTCTime t t'
dmc :: Int
dmc = floor (realToFrac (dt / secondsPerPulse) :: Double)
if dt > secondsPerPulse
then return (Timed dmc (), loop (addUTCTime ((fromIntegral dmc) * secondsPerPulse) t'))
else return (Timed 0 (), loop t')
main :: IO ()
main = simulate 120 tclFade
| n-heptane-lab/blinkomatic | Simulatomatic.hs | bsd-3-clause | 5,363 | 3 | 30 | 1,441 | 1,996 | 1,047 | 949 | 121 | 4 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Setup
-- Copyright : (c) David Himmelstrup 2005
-- License : BSD-like
--
-- Maintainer : lemmih@gmail.com
-- Stability : provisional
-- Portability : portable
--
--
-----------------------------------------------------------------------------
module Distribution.Client.Setup
( globalCommand, GlobalFlags(..), defaultGlobalFlags, globalRepos
, configureCommand, ConfigFlags(..), filterConfigureFlags
, configureExCommand, ConfigExFlags(..), defaultConfigExFlags
, configureExOptions
, buildCommand, BuildFlags(..), BuildExFlags(..), SkipAddSourceDepsCheck(..)
, replCommand, testCommand, benchmarkCommand
, installCommand, InstallFlags(..), installOptions, defaultInstallFlags
, listCommand, ListFlags(..)
, updateCommand
, upgradeCommand
, uninstallCommand
, infoCommand, InfoFlags(..)
, fetchCommand, FetchFlags(..)
, freezeCommand, FreezeFlags(..)
, getCommand, unpackCommand, GetFlags(..)
, checkCommand
, formatCommand
, uploadCommand, UploadFlags(..)
, reportCommand, ReportFlags(..)
, runCommand
, initCommand, IT.InitFlags(..)
, sdistCommand, SDistFlags(..), SDistExFlags(..), ArchiveFormat(..)
, win32SelfUpgradeCommand, Win32SelfUpgradeFlags(..)
, actAsSetupCommand, ActAsSetupFlags(..)
, sandboxCommand, defaultSandboxLocation, SandboxFlags(..)
, execCommand, ExecFlags(..)
, userConfigCommand, UserConfigFlags(..)
, parsePackageArgs
--TODO: stop exporting these:
, showRepo
, parseRepo
, readRepo
) where
import Distribution.Client.Types
( Username(..), Password(..), Repo(..), RemoteRepo(..), LocalRepo(..) )
import Distribution.Client.BuildReports.Types
( ReportLevel(..) )
import Distribution.Client.Dependency.Types
( AllowNewer(..), PreSolver(..) )
import qualified Distribution.Client.Init.Types as IT
( InitFlags(..), PackageType(..) )
import Distribution.Client.Targets
( UserConstraint, readUserConstraint )
import Distribution.Utils.NubList
( NubList, toNubList, fromNubList)
import Distribution.Simple.Compiler (PackageDB)
import Distribution.Simple.Program
( defaultProgramConfiguration )
import Distribution.Simple.Command hiding (boolOpt, boolOpt')
import qualified Distribution.Simple.Command as Command
import qualified Distribution.Simple.Setup as Cabal
import Distribution.Simple.Setup
( ConfigFlags(..), BuildFlags(..), ReplFlags
, TestFlags(..), BenchmarkFlags(..)
, SDistFlags(..), HaddockFlags(..)
, readPackageDbList, showPackageDbList
, Flag(..), toFlag, fromFlag, flagToMaybe, flagToList
, optionVerbosity, boolOpt, boolOpt', trueArg, falseArg, optionNumJobs )
import Distribution.Simple.InstallDirs
( PathTemplate, InstallDirs(sysconfdir)
, toPathTemplate, fromPathTemplate )
import Distribution.Version
( Version(Version), anyVersion, thisVersion )
import Distribution.Package
( PackageIdentifier, packageName, packageVersion, Dependency(..) )
import Distribution.PackageDescription
( BuildType(..), RepoKind(..) )
import Distribution.Text
( Text(..), display )
import Distribution.ReadE
( ReadE(..), readP_to_E, succeedReadE )
import qualified Distribution.Compat.ReadP as Parse
( ReadP, readP_to_S, readS_to_P, char, munch1, pfail, sepBy1, (+++) )
import Distribution.Verbosity
( Verbosity, normal )
import Distribution.Simple.Utils
( wrapText, wrapLine )
import Data.Char
( isSpace, isAlphaNum )
import Data.List
( intercalate, deleteFirstsBy )
import Data.Maybe
( listToMaybe, maybeToList, fromMaybe )
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid
( Monoid(..) )
#endif
import Control.Monad
( liftM )
import System.FilePath
( (</>) )
import Network.URI
( parseAbsoluteURI, uriToString )
-- ------------------------------------------------------------
-- * Global flags
-- ------------------------------------------------------------
-- | Flags that apply at the top level, not to any sub-command.
data GlobalFlags = GlobalFlags {
globalVersion :: Flag Bool,
globalNumericVersion :: Flag Bool,
globalConfigFile :: Flag FilePath,
globalSandboxConfigFile :: Flag FilePath,
globalRemoteRepos :: NubList RemoteRepo, -- ^ Available Hackage servers.
globalCacheDir :: Flag FilePath,
globalLocalRepos :: NubList FilePath,
globalLogsDir :: Flag FilePath,
globalWorldFile :: Flag FilePath,
globalRequireSandbox :: Flag Bool,
globalIgnoreSandbox :: Flag Bool
}
defaultGlobalFlags :: GlobalFlags
defaultGlobalFlags = GlobalFlags {
globalVersion = Flag False,
globalNumericVersion = Flag False,
globalConfigFile = mempty,
globalSandboxConfigFile = mempty,
globalRemoteRepos = mempty,
globalCacheDir = mempty,
globalLocalRepos = mempty,
globalLogsDir = mempty,
globalWorldFile = mempty,
globalRequireSandbox = Flag False,
globalIgnoreSandbox = Flag False
}
globalCommand :: [Command action] -> CommandUI GlobalFlags
globalCommand commands = CommandUI {
commandName = "",
commandSynopsis =
"Command line interface to the Haskell Cabal infrastructure.",
commandUsage = \pname ->
"See http://www.haskell.org/cabal/ for more information.\n"
++ "\n"
++ "Usage: " ++ pname ++ " [GLOBAL FLAGS] [COMMAND [FLAGS]]\n",
commandDescription = Just $ \pname ->
let
commands' = commands ++ [commandAddAction helpCommandUI undefined]
cmdDescs = getNormalCommandDescriptions commands'
-- if new commands are added, we want them to appear even if they
-- are not included in the custom listing below. Thus, we calculate
-- the `otherCmds` list and append it under the `other` category.
-- Alternatively, a new testcase could be added that ensures that
-- the set of commands listed here is equal to the set of commands
-- that are actually available.
otherCmds = deleteFirstsBy (==) (map fst cmdDescs)
[ "help"
, "update"
, "install"
, "fetch"
, "list"
, "info"
, "user-config"
, "get"
, "init"
, "configure"
, "build"
, "clean"
, "run"
, "repl"
, "test"
, "bench"
, "check"
, "sdist"
, "upload"
, "report"
, "freeze"
, "haddock"
, "hscolour"
, "copy"
, "register"
, "sandbox"
, "exec"
]
maxlen = maximum $ [length name | (name, _) <- cmdDescs]
align str = str ++ replicate (maxlen - length str) ' '
startGroup n = " ["++n++"]"
par = ""
addCmd n = case lookup n cmdDescs of
Nothing -> ""
Just d -> " " ++ align n ++ " " ++ d
addCmdCustom n d = case lookup n cmdDescs of -- make sure that the
-- command still exists.
Nothing -> ""
Just _ -> " " ++ align n ++ " " ++ d
in
"Commands:\n"
++ unlines (
[ startGroup "global"
, addCmd "update"
, addCmd "install"
, par
, addCmd "help"
, addCmd "info"
, addCmd "list"
, addCmd "fetch"
, addCmd "user-config"
, par
, startGroup "package"
, addCmd "get"
, addCmd "init"
, par
, addCmd "configure"
, addCmd "build"
, addCmd "clean"
, par
, addCmd "run"
, addCmd "repl"
, addCmd "test"
, addCmd "bench"
, par
, addCmd "check"
, addCmd "sdist"
, addCmd "upload"
, addCmd "report"
, par
, addCmd "freeze"
, addCmd "haddock"
, addCmd "hscolour"
, addCmd "copy"
, addCmd "register"
, par
, startGroup "sandbox"
, addCmd "sandbox"
, addCmd "exec"
, addCmdCustom "repl" "Open interpreter with access to sandbox packages."
] ++ if null otherCmds then [] else par
:startGroup "other"
:[addCmd n | n <- otherCmds])
++ "\n"
++ "For more information about a command use:\n"
++ " " ++ pname ++ " COMMAND --help\n"
++ "or " ++ pname ++ " help COMMAND\n"
++ "\n"
++ "To install Cabal packages from hackage use:\n"
++ " " ++ pname ++ " install foo [--dry-run]\n"
++ "\n"
++ "Occasionally you need to update the list of available packages:\n"
++ " " ++ pname ++ " update\n",
commandNotes = Nothing,
commandDefaultFlags = mempty,
commandOptions = \showOrParseArgs ->
(case showOrParseArgs of ShowArgs -> take 6; ParseArgs -> id)
[option ['V'] ["version"]
"Print version information"
globalVersion (\v flags -> flags { globalVersion = v })
trueArg
,option [] ["numeric-version"]
"Print just the version number"
globalNumericVersion (\v flags -> flags { globalNumericVersion = v })
trueArg
,option [] ["config-file"]
"Set an alternate location for the config file"
globalConfigFile (\v flags -> flags { globalConfigFile = v })
(reqArgFlag "FILE")
,option [] ["sandbox-config-file"]
"Set an alternate location for the sandbox config file (default: './cabal.sandbox.config')"
globalConfigFile (\v flags -> flags { globalSandboxConfigFile = v })
(reqArgFlag "FILE")
,option [] ["require-sandbox"]
"requiring the presence of a sandbox for sandbox-aware commands"
globalRequireSandbox (\v flags -> flags { globalRequireSandbox = v })
(boolOpt' ([], ["require-sandbox"]) ([], ["no-require-sandbox"]))
,option [] ["ignore-sandbox"]
"Ignore any existing sandbox"
globalIgnoreSandbox (\v flags -> flags { globalIgnoreSandbox = v })
trueArg
,option [] ["remote-repo"]
"The name and url for a remote repository"
globalRemoteRepos (\v flags -> flags { globalRemoteRepos = v })
(reqArg' "NAME:URL" (toNubList . maybeToList . readRepo) (map showRepo . fromNubList))
,option [] ["remote-repo-cache"]
"The location where downloads from all remote repos are cached"
globalCacheDir (\v flags -> flags { globalCacheDir = v })
(reqArgFlag "DIR")
,option [] ["local-repo"]
"The location of a local repository"
globalLocalRepos (\v flags -> flags { globalLocalRepos = v })
(reqArg' "DIR" (\x -> toNubList [x]) fromNubList)
,option [] ["logs-dir"]
"The location to put log files"
globalLogsDir (\v flags -> flags { globalLogsDir = v })
(reqArgFlag "DIR")
,option [] ["world-file"]
"The location of the world file"
globalWorldFile (\v flags -> flags { globalWorldFile = v })
(reqArgFlag "FILE")
]
}
instance Monoid GlobalFlags where
mempty = GlobalFlags {
globalVersion = mempty,
globalNumericVersion = mempty,
globalConfigFile = mempty,
globalSandboxConfigFile = mempty,
globalRemoteRepos = mempty,
globalCacheDir = mempty,
globalLocalRepos = mempty,
globalLogsDir = mempty,
globalWorldFile = mempty,
globalRequireSandbox = mempty,
globalIgnoreSandbox = mempty
}
mappend a b = GlobalFlags {
globalVersion = combine globalVersion,
globalNumericVersion = combine globalNumericVersion,
globalConfigFile = combine globalConfigFile,
globalSandboxConfigFile = combine globalConfigFile,
globalRemoteRepos = combine globalRemoteRepos,
globalCacheDir = combine globalCacheDir,
globalLocalRepos = combine globalLocalRepos,
globalLogsDir = combine globalLogsDir,
globalWorldFile = combine globalWorldFile,
globalRequireSandbox = combine globalRequireSandbox,
globalIgnoreSandbox = combine globalIgnoreSandbox
}
where combine field = field a `mappend` field b
globalRepos :: GlobalFlags -> [Repo]
globalRepos globalFlags = remoteRepos ++ localRepos
where
remoteRepos =
[ Repo (Left remote) cacheDir
| remote <- fromNubList $ globalRemoteRepos globalFlags
, let cacheDir = fromFlag (globalCacheDir globalFlags)
</> remoteRepoName remote ]
localRepos =
[ Repo (Right LocalRepo) local
| local <- fromNubList $ globalLocalRepos globalFlags ]
-- ------------------------------------------------------------
-- * Config flags
-- ------------------------------------------------------------
configureCommand :: CommandUI ConfigFlags
configureCommand = c
{ commandDefaultFlags = mempty
, commandNotes = Just $ \pname -> (case commandNotes c of
Nothing -> ""
Just n -> n pname ++ "\n")
++ "Examples:\n"
++ " " ++ pname ++ " configure\n"
++ " Configure with defaults;\n"
++ " " ++ pname ++ " configure --enable-tests -fcustomflag\n"
++ " Configure building package including tests,\n"
++ " with some package-specific flag.\n"
}
where
c = Cabal.configureCommand defaultProgramConfiguration
configureOptions :: ShowOrParseArgs -> [OptionField ConfigFlags]
configureOptions = commandOptions configureCommand
filterConfigureFlags :: ConfigFlags -> Version -> ConfigFlags
filterConfigureFlags flags cabalLibVersion
| cabalLibVersion >= Version [1,22,0] [] = flags_latest
-- ^ NB: we expect the latest version to be the most common case.
| cabalLibVersion < Version [1,3,10] [] = flags_1_3_10
| cabalLibVersion < Version [1,10,0] [] = flags_1_10_0
| cabalLibVersion < Version [1,14,0] [] = flags_1_14_0
| cabalLibVersion < Version [1,18,0] [] = flags_1_18_0
| cabalLibVersion < Version [1,19,1] [] = flags_1_19_0
| cabalLibVersion < Version [1,19,2] [] = flags_1_19_1
| cabalLibVersion < Version [1,21,1] [] = flags_1_20_0
| cabalLibVersion < Version [1,22,0] [] = flags_1_21_0
| otherwise = flags_latest
where
-- Cabal >= 1.19.1 uses '--dependency' and does not need '--constraint'.
flags_latest = flags { configConstraints = [] }
-- Cabal < 1.22 doesn't know about '--disable-debug-info'.
flags_1_21_0 = flags_latest { configDebugInfo = NoFlag }
-- Cabal < 1.21.1 doesn't know about 'disable-relocatable'
-- Cabal < 1.21.1 doesn't know about 'enable-profiling'
flags_1_20_0 =
flags_1_21_0 { configRelocatable = NoFlag
, configProf = NoFlag
, configProfExe = configProf flags
, configProfLib =
mappend (configProf flags) (configProfLib flags)
, configCoverage = NoFlag
, configLibCoverage = configCoverage flags
}
-- Cabal < 1.19.2 doesn't know about '--exact-configuration' and
-- '--enable-library-stripping'.
flags_1_19_1 = flags_1_20_0 { configExactConfiguration = NoFlag
, configStripLibs = NoFlag }
-- Cabal < 1.19.1 uses '--constraint' instead of '--dependency'.
flags_1_19_0 = flags_1_19_1 { configDependencies = []
, configConstraints = configConstraints flags }
-- Cabal < 1.18.0 doesn't know about --extra-prog-path and --sysconfdir.
flags_1_18_0 = flags_1_19_0 { configProgramPathExtra = toNubList []
, configInstallDirs = configInstallDirs_1_18_0}
configInstallDirs_1_18_0 = (configInstallDirs flags) { sysconfdir = NoFlag }
-- Cabal < 1.14.0 doesn't know about '--disable-benchmarks'.
flags_1_14_0 = flags_1_18_0 { configBenchmarks = NoFlag }
-- Cabal < 1.10.0 doesn't know about '--disable-tests'.
flags_1_10_0 = flags_1_14_0 { configTests = NoFlag }
-- Cabal < 1.3.10 does not grok the '--constraints' flag.
flags_1_3_10 = flags_1_10_0 { configConstraints = [] }
-- ------------------------------------------------------------
-- * Config extra flags
-- ------------------------------------------------------------
-- | cabal configure takes some extra flags beyond runghc Setup configure
--
data ConfigExFlags = ConfigExFlags {
configCabalVersion :: Flag Version,
configExConstraints:: [UserConstraint],
configPreferences :: [Dependency],
configSolver :: Flag PreSolver,
configAllowNewer :: Flag AllowNewer
}
defaultConfigExFlags :: ConfigExFlags
defaultConfigExFlags = mempty { configSolver = Flag defaultSolver
, configAllowNewer = Flag AllowNewerNone }
configureExCommand :: CommandUI (ConfigFlags, ConfigExFlags)
configureExCommand = configureCommand {
commandDefaultFlags = (mempty, defaultConfigExFlags),
commandOptions = \showOrParseArgs ->
liftOptions fst setFst
(filter ((`notElem` ["constraint", "dependency", "exact-configuration"])
. optionName) $ configureOptions showOrParseArgs)
++ liftOptions snd setSnd (configureExOptions showOrParseArgs)
}
where
setFst a (_,b) = (a,b)
setSnd b (a,_) = (a,b)
configureExOptions :: ShowOrParseArgs -> [OptionField ConfigExFlags]
configureExOptions _showOrParseArgs =
[ option [] ["cabal-lib-version"]
("Select which version of the Cabal lib to use to build packages "
++ "(useful for testing).")
configCabalVersion (\v flags -> flags { configCabalVersion = v })
(reqArg "VERSION" (readP_to_E ("Cannot parse cabal lib version: "++)
(fmap toFlag parse))
(map display . flagToList))
, option [] ["constraint"]
"Specify constraints on a package (version, installed/source, flags)"
configExConstraints (\v flags -> flags { configExConstraints = v })
(reqArg "CONSTRAINT"
(fmap (\x -> [x]) (ReadE readUserConstraint))
(map display))
, option [] ["preference"]
"Specify preferences (soft constraints) on the version of a package"
configPreferences (\v flags -> flags { configPreferences = v })
(reqArg "CONSTRAINT"
(readP_to_E (const "dependency expected")
(fmap (\x -> [x]) parse))
(map display))
, optionSolver configSolver (\v flags -> flags { configSolver = v })
, option [] ["allow-newer"]
("Ignore upper bounds in all dependencies or " ++ allowNewerArgument)
configAllowNewer (\v flags -> flags { configAllowNewer = v})
(optArg allowNewerArgument
(fmap Flag allowNewerParser) (Flag AllowNewerAll)
allowNewerPrinter)
]
where allowNewerArgument = "DEPS"
instance Monoid ConfigExFlags where
mempty = ConfigExFlags {
configCabalVersion = mempty,
configExConstraints= mempty,
configPreferences = mempty,
configSolver = mempty,
configAllowNewer = mempty
}
mappend a b = ConfigExFlags {
configCabalVersion = combine configCabalVersion,
configExConstraints= combine configExConstraints,
configPreferences = combine configPreferences,
configSolver = combine configSolver,
configAllowNewer = combine configAllowNewer
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Build flags
-- ------------------------------------------------------------
data SkipAddSourceDepsCheck =
SkipAddSourceDepsCheck | DontSkipAddSourceDepsCheck
deriving Eq
data BuildExFlags = BuildExFlags {
buildOnly :: Flag SkipAddSourceDepsCheck
}
buildExOptions :: ShowOrParseArgs -> [OptionField BuildExFlags]
buildExOptions _showOrParseArgs =
option [] ["only"]
"Don't reinstall add-source dependencies (sandbox-only)"
buildOnly (\v flags -> flags { buildOnly = v })
(noArg (Flag SkipAddSourceDepsCheck))
: []
buildCommand :: CommandUI (BuildFlags, BuildExFlags)
buildCommand = parent {
commandDefaultFlags = (commandDefaultFlags parent, mempty),
commandOptions =
\showOrParseArgs -> liftOptions fst setFst
(commandOptions parent showOrParseArgs)
++
liftOptions snd setSnd (buildExOptions showOrParseArgs)
}
where
setFst a (_,b) = (a,b)
setSnd b (a,_) = (a,b)
parent = Cabal.buildCommand defaultProgramConfiguration
instance Monoid BuildExFlags where
mempty = BuildExFlags {
buildOnly = mempty
}
mappend a b = BuildExFlags {
buildOnly = combine buildOnly
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Repl command
-- ------------------------------------------------------------
replCommand :: CommandUI (ReplFlags, BuildExFlags)
replCommand = parent {
commandDefaultFlags = (commandDefaultFlags parent, mempty),
commandOptions =
\showOrParseArgs -> liftOptions fst setFst
(commandOptions parent showOrParseArgs)
++
liftOptions snd setSnd (buildExOptions showOrParseArgs)
}
where
setFst a (_,b) = (a,b)
setSnd b (a,_) = (a,b)
parent = Cabal.replCommand defaultProgramConfiguration
-- ------------------------------------------------------------
-- * Test command
-- ------------------------------------------------------------
testCommand :: CommandUI (TestFlags, BuildFlags, BuildExFlags)
testCommand = parent {
commandDefaultFlags = (commandDefaultFlags parent,
Cabal.defaultBuildFlags, mempty),
commandOptions =
\showOrParseArgs -> liftOptions get1 set1
(commandOptions parent showOrParseArgs)
++
liftOptions get2 set2
(Cabal.buildOptions progConf showOrParseArgs)
++
liftOptions get3 set3 (buildExOptions showOrParseArgs)
}
where
get1 (a,_,_) = a; set1 a (_,b,c) = (a,b,c)
get2 (_,b,_) = b; set2 b (a,_,c) = (a,b,c)
get3 (_,_,c) = c; set3 c (a,b,_) = (a,b,c)
parent = Cabal.testCommand
progConf = defaultProgramConfiguration
-- ------------------------------------------------------------
-- * Bench command
-- ------------------------------------------------------------
benchmarkCommand :: CommandUI (BenchmarkFlags, BuildFlags, BuildExFlags)
benchmarkCommand = parent {
commandDefaultFlags = (commandDefaultFlags parent,
Cabal.defaultBuildFlags, mempty),
commandOptions =
\showOrParseArgs -> liftOptions get1 set1
(commandOptions parent showOrParseArgs)
++
liftOptions get2 set2
(Cabal.buildOptions progConf showOrParseArgs)
++
liftOptions get3 set3 (buildExOptions showOrParseArgs)
}
where
get1 (a,_,_) = a; set1 a (_,b,c) = (a,b,c)
get2 (_,b,_) = b; set2 b (a,_,c) = (a,b,c)
get3 (_,_,c) = c; set3 c (a,b,_) = (a,b,c)
parent = Cabal.benchmarkCommand
progConf = defaultProgramConfiguration
-- ------------------------------------------------------------
-- * Fetch command
-- ------------------------------------------------------------
data FetchFlags = FetchFlags {
-- fetchOutput :: Flag FilePath,
fetchDeps :: Flag Bool,
fetchDryRun :: Flag Bool,
fetchSolver :: Flag PreSolver,
fetchMaxBackjumps :: Flag Int,
fetchReorderGoals :: Flag Bool,
fetchIndependentGoals :: Flag Bool,
fetchShadowPkgs :: Flag Bool,
fetchStrongFlags :: Flag Bool,
fetchVerbosity :: Flag Verbosity
}
defaultFetchFlags :: FetchFlags
defaultFetchFlags = FetchFlags {
-- fetchOutput = mempty,
fetchDeps = toFlag True,
fetchDryRun = toFlag False,
fetchSolver = Flag defaultSolver,
fetchMaxBackjumps = Flag defaultMaxBackjumps,
fetchReorderGoals = Flag False,
fetchIndependentGoals = Flag False,
fetchShadowPkgs = Flag False,
fetchStrongFlags = Flag False,
fetchVerbosity = toFlag normal
}
fetchCommand :: CommandUI FetchFlags
fetchCommand = CommandUI {
commandName = "fetch",
commandSynopsis = "Downloads packages for later installation.",
commandUsage = usageAlternatives "fetch" [ "[FLAGS] PACKAGES"
],
commandDescription = Just $ \_ ->
"Note that it currently is not possible to fetch the dependencies for a\n"
++ "package in the current directory.\n",
commandNotes = Nothing,
commandDefaultFlags = defaultFetchFlags,
commandOptions = \ showOrParseArgs -> [
optionVerbosity fetchVerbosity (\v flags -> flags { fetchVerbosity = v })
-- , option "o" ["output"]
-- "Put the package(s) somewhere specific rather than the usual cache."
-- fetchOutput (\v flags -> flags { fetchOutput = v })
-- (reqArgFlag "PATH")
, option [] ["dependencies", "deps"]
"Resolve and fetch dependencies (default)"
fetchDeps (\v flags -> flags { fetchDeps = v })
trueArg
, option [] ["no-dependencies", "no-deps"]
"Ignore dependencies"
fetchDeps (\v flags -> flags { fetchDeps = v })
falseArg
, option [] ["dry-run"]
"Do not install anything, only print what would be installed."
fetchDryRun (\v flags -> flags { fetchDryRun = v })
trueArg
] ++
optionSolver fetchSolver (\v flags -> flags { fetchSolver = v }) :
optionSolverFlags showOrParseArgs
fetchMaxBackjumps (\v flags -> flags { fetchMaxBackjumps = v })
fetchReorderGoals (\v flags -> flags { fetchReorderGoals = v })
fetchIndependentGoals (\v flags -> flags { fetchIndependentGoals = v })
fetchShadowPkgs (\v flags -> flags { fetchShadowPkgs = v })
fetchStrongFlags (\v flags -> flags { fetchStrongFlags = v })
}
-- ------------------------------------------------------------
-- * Freeze command
-- ------------------------------------------------------------
data FreezeFlags = FreezeFlags {
freezeDryRun :: Flag Bool,
freezeTests :: Flag Bool,
freezeBenchmarks :: Flag Bool,
freezeSolver :: Flag PreSolver,
freezeMaxBackjumps :: Flag Int,
freezeReorderGoals :: Flag Bool,
freezeIndependentGoals :: Flag Bool,
freezeShadowPkgs :: Flag Bool,
freezeStrongFlags :: Flag Bool,
freezeVerbosity :: Flag Verbosity
}
defaultFreezeFlags :: FreezeFlags
defaultFreezeFlags = FreezeFlags {
freezeDryRun = toFlag False,
freezeTests = toFlag False,
freezeBenchmarks = toFlag False,
freezeSolver = Flag defaultSolver,
freezeMaxBackjumps = Flag defaultMaxBackjumps,
freezeReorderGoals = Flag False,
freezeIndependentGoals = Flag False,
freezeShadowPkgs = Flag False,
freezeStrongFlags = Flag False,
freezeVerbosity = toFlag normal
}
freezeCommand :: CommandUI FreezeFlags
freezeCommand = CommandUI {
commandName = "freeze",
commandSynopsis = "Freeze dependencies.",
commandDescription = Just $ \_ -> wrapText $
"Calculates a valid set of dependencies and their exact versions. "
++ "If successful, saves the result to the file `cabal.config`.\n"
++ "\n"
++ "The package versions specified in `cabal.config` will be used for "
++ "any future installs.\n"
++ "\n"
++ "An existing `cabal.config` is ignored and overwritten.\n",
commandNotes = Nothing,
commandUsage = usageFlags "freeze",
commandDefaultFlags = defaultFreezeFlags,
commandOptions = \ showOrParseArgs -> [
optionVerbosity freezeVerbosity (\v flags -> flags { freezeVerbosity = v })
, option [] ["dry-run"]
"Do not freeze anything, only print what would be frozen"
freezeDryRun (\v flags -> flags { freezeDryRun = v })
trueArg
, option [] ["tests"]
"freezing of the dependencies of any tests suites in the package description file."
freezeTests (\v flags -> flags { freezeTests = v })
(boolOpt [] [])
, option [] ["benchmarks"]
"freezing of the dependencies of any benchmarks suites in the package description file."
freezeBenchmarks (\v flags -> flags { freezeBenchmarks = v })
(boolOpt [] [])
] ++
optionSolver freezeSolver (\v flags -> flags { freezeSolver = v }) :
optionSolverFlags showOrParseArgs
freezeMaxBackjumps (\v flags -> flags { freezeMaxBackjumps = v })
freezeReorderGoals (\v flags -> flags { freezeReorderGoals = v })
freezeIndependentGoals (\v flags -> flags { freezeIndependentGoals = v })
freezeShadowPkgs (\v flags -> flags { freezeShadowPkgs = v })
freezeStrongFlags (\v flags -> flags { freezeStrongFlags = v })
}
-- ------------------------------------------------------------
-- * Other commands
-- ------------------------------------------------------------
updateCommand :: CommandUI (Flag Verbosity)
updateCommand = CommandUI {
commandName = "update",
commandSynopsis = "Updates list of known packages.",
commandDescription = Just $ \_ ->
"For all known remote repositories, download the package list.\n",
commandNotes = Just $ \_ ->
relevantConfigValuesText ["remote-repo"
,"remote-repo-cache"
,"local-repo"],
commandUsage = usageFlags "update",
commandDefaultFlags = toFlag normal,
commandOptions = \_ -> [optionVerbosity id const]
}
upgradeCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags)
upgradeCommand = configureCommand {
commandName = "upgrade",
commandSynopsis = "(command disabled, use install instead)",
commandDescription = Nothing,
commandUsage = usageFlagsOrPackages "upgrade",
commandDefaultFlags = (mempty, mempty, mempty, mempty),
commandOptions = commandOptions installCommand
}
{-
cleanCommand :: CommandUI ()
cleanCommand = makeCommand name shortDesc longDesc emptyFlags options
where
name = "clean"
shortDesc = "Removes downloaded files"
longDesc = Nothing
emptyFlags = ()
options _ = []
-}
checkCommand :: CommandUI (Flag Verbosity)
checkCommand = CommandUI {
commandName = "check",
commandSynopsis = "Check the package for common mistakes.",
commandDescription = Just $ \_ -> wrapText $
"Expects a .cabal package file in the current directory.\n"
++ "\n"
++ "The checks correspond to the requirements to packages on Hackage. "
++ "If no errors and warnings are reported, Hackage will accept this "
++ "package.\n",
commandNotes = Nothing,
commandUsage = \pname -> "Usage: " ++ pname ++ " check\n",
commandDefaultFlags = toFlag normal,
commandOptions = \_ -> []
}
formatCommand :: CommandUI (Flag Verbosity)
formatCommand = CommandUI {
commandName = "format",
commandSynopsis = "Reformat the .cabal file using the standard style.",
commandDescription = Nothing,
commandNotes = Nothing,
commandUsage = usageAlternatives "format" ["[FILE]"],
commandDefaultFlags = toFlag normal,
commandOptions = \_ -> []
}
uninstallCommand :: CommandUI (Flag Verbosity)
uninstallCommand = CommandUI {
commandName = "uninstall",
commandSynopsis = "Warn about 'uninstall' not being implemented.",
commandDescription = Nothing,
commandNotes = Nothing,
commandUsage = usageAlternatives "uninstall" ["PACKAGES"],
commandDefaultFlags = toFlag normal,
commandOptions = \_ -> []
}
runCommand :: CommandUI (BuildFlags, BuildExFlags)
runCommand = CommandUI {
commandName = "run",
commandSynopsis = "Builds and runs an executable.",
commandDescription = Just $ \pname -> wrapText $
"Builds and then runs the specified executable. If no executable is "
++ "specified, but the package contains just one executable, that one "
++ "is built and executed.\n"
++ "\n"
++ "Use `" ++ pname ++ " test --show-details=streaming` to run a "
++ "test-suite and get its full output.\n",
commandNotes = Just $ \pname ->
"Examples:\n"
++ " " ++ pname ++ " run\n"
++ " Run the only executable in the current package;\n"
++ " " ++ pname ++ " run foo -- --fooflag\n"
++ " Works similar to `./foo --fooflag`.\n",
commandUsage = usageAlternatives "run"
["[FLAGS] [EXECUTABLE] [-- EXECUTABLE_FLAGS]"],
commandDefaultFlags = mempty,
commandOptions =
\showOrParseArgs -> liftOptions fst setFst
(commandOptions parent showOrParseArgs)
++
liftOptions snd setSnd
(buildExOptions showOrParseArgs)
}
where
setFst a (_,b) = (a,b)
setSnd b (a,_) = (a,b)
parent = Cabal.buildCommand defaultProgramConfiguration
-- ------------------------------------------------------------
-- * Report flags
-- ------------------------------------------------------------
data ReportFlags = ReportFlags {
reportUsername :: Flag Username,
reportPassword :: Flag Password,
reportVerbosity :: Flag Verbosity
}
defaultReportFlags :: ReportFlags
defaultReportFlags = ReportFlags {
reportUsername = mempty,
reportPassword = mempty,
reportVerbosity = toFlag normal
}
reportCommand :: CommandUI ReportFlags
reportCommand = CommandUI {
commandName = "report",
commandSynopsis = "Upload build reports to a remote server.",
commandDescription = Nothing,
commandNotes = Just $ \_ ->
"You can store your Hackage login in the ~/.cabal/config file\n",
commandUsage = usageAlternatives "report" ["[FLAGS]"],
commandDefaultFlags = defaultReportFlags,
commandOptions = \_ ->
[optionVerbosity reportVerbosity (\v flags -> flags { reportVerbosity = v })
,option ['u'] ["username"]
"Hackage username."
reportUsername (\v flags -> flags { reportUsername = v })
(reqArg' "USERNAME" (toFlag . Username)
(flagToList . fmap unUsername))
,option ['p'] ["password"]
"Hackage password."
reportPassword (\v flags -> flags { reportPassword = v })
(reqArg' "PASSWORD" (toFlag . Password)
(flagToList . fmap unPassword))
]
}
instance Monoid ReportFlags where
mempty = ReportFlags {
reportUsername = mempty,
reportPassword = mempty,
reportVerbosity = mempty
}
mappend a b = ReportFlags {
reportUsername = combine reportUsername,
reportPassword = combine reportPassword,
reportVerbosity = combine reportVerbosity
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Get flags
-- ------------------------------------------------------------
data GetFlags = GetFlags {
getDestDir :: Flag FilePath,
getPristine :: Flag Bool,
getSourceRepository :: Flag (Maybe RepoKind),
getVerbosity :: Flag Verbosity
}
defaultGetFlags :: GetFlags
defaultGetFlags = GetFlags {
getDestDir = mempty,
getPristine = mempty,
getSourceRepository = mempty,
getVerbosity = toFlag normal
}
getCommand :: CommandUI GetFlags
getCommand = CommandUI {
commandName = "get",
commandSynopsis = "Download/Extract a package's source code (repository).",
commandDescription = Just $ \_ -> wrapText $
"Creates a local copy of a package's source code. By default it gets "
++ "the source\ntarball and unpacks it in a local subdirectory. "
++ "Alternatively, with -s it will\nget the code from the source "
++ "repository specified by the package.\n",
commandNotes = Just $ \pname ->
"Examples:\n"
++ " " ++ pname ++ " get hlint\n"
++ " Download the latest stable version of hlint;\n"
++ " " ++ pname ++ " get lens --source-repository=head\n"
++ " Download the source repository (i.e. git clone from github).\n",
commandUsage = usagePackages "get",
commandDefaultFlags = defaultGetFlags,
commandOptions = \_ -> [
optionVerbosity getVerbosity (\v flags -> flags { getVerbosity = v })
,option "d" ["destdir"]
"Where to place the package source, defaults to the current directory."
getDestDir (\v flags -> flags { getDestDir = v })
(reqArgFlag "PATH")
,option "s" ["source-repository"]
"Copy the package's source repository (ie git clone, darcs get, etc as appropriate)."
getSourceRepository (\v flags -> flags { getSourceRepository = v })
(optArg "[head|this|...]" (readP_to_E (const "invalid source-repository")
(fmap (toFlag . Just) parse))
(Flag Nothing)
(map (fmap show) . flagToList))
, option [] ["pristine"]
("Unpack the original pristine tarball, rather than updating the "
++ ".cabal file with the latest revision from the package archive.")
getPristine (\v flags -> flags { getPristine = v })
trueArg
]
}
-- 'cabal unpack' is a deprecated alias for 'cabal get'.
unpackCommand :: CommandUI GetFlags
unpackCommand = getCommand {
commandName = "unpack",
commandUsage = usagePackages "unpack"
}
instance Monoid GetFlags where
mempty = GetFlags {
getDestDir = mempty,
getPristine = mempty,
getSourceRepository = mempty,
getVerbosity = mempty
}
mappend a b = GetFlags {
getDestDir = combine getDestDir,
getPristine = combine getPristine,
getSourceRepository = combine getSourceRepository,
getVerbosity = combine getVerbosity
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * List flags
-- ------------------------------------------------------------
data ListFlags = ListFlags {
listInstalled :: Flag Bool,
listSimpleOutput :: Flag Bool,
listVerbosity :: Flag Verbosity,
listPackageDBs :: [Maybe PackageDB]
}
defaultListFlags :: ListFlags
defaultListFlags = ListFlags {
listInstalled = Flag False,
listSimpleOutput = Flag False,
listVerbosity = toFlag normal,
listPackageDBs = []
}
listCommand :: CommandUI ListFlags
listCommand = CommandUI {
commandName = "list",
commandSynopsis = "List packages matching a search string.",
commandDescription = Just $ \_ -> wrapText $
"List all packages, or all packages matching one of the search"
++ " strings.\n"
++ "\n"
++ "If there is a sandbox in the current directory and "
++ "config:ignore-sandbox is False, use the sandbox package database. "
++ "Otherwise, use the package database specified with --package-db. "
++ "If not specified, use the user package database.\n",
commandNotes = Just $ \pname ->
"Examples:\n"
++ " " ++ pname ++ " list pandoc\n"
++ " Will find pandoc, pandoc-citeproc, pandoc-lens, ...\n",
commandUsage = usageAlternatives "list" [ "[FLAGS]"
, "[FLAGS] STRINGS"],
commandDefaultFlags = defaultListFlags,
commandOptions = \_ -> [
optionVerbosity listVerbosity (\v flags -> flags { listVerbosity = v })
, option [] ["installed"]
"Only print installed packages"
listInstalled (\v flags -> flags { listInstalled = v })
trueArg
, option [] ["simple-output"]
"Print in a easy-to-parse format"
listSimpleOutput (\v flags -> flags { listSimpleOutput = v })
trueArg
, option "" ["package-db"]
( "Append the given package database to the list of package"
++ " databases used (to satisfy dependencies and register into)."
++ " May be a specific file, 'global' or 'user'. The initial list"
++ " is ['global'], ['global', 'user'], or ['global', $sandbox],"
++ " depending on context. Use 'clear' to reset the list to empty."
++ " See the user guide for details.")
listPackageDBs (\v flags -> flags { listPackageDBs = v })
(reqArg' "DB" readPackageDbList showPackageDbList)
]
}
instance Monoid ListFlags where
mempty = ListFlags {
listInstalled = mempty,
listSimpleOutput = mempty,
listVerbosity = mempty,
listPackageDBs = mempty
}
mappend a b = ListFlags {
listInstalled = combine listInstalled,
listSimpleOutput = combine listSimpleOutput,
listVerbosity = combine listVerbosity,
listPackageDBs = combine listPackageDBs
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Info flags
-- ------------------------------------------------------------
data InfoFlags = InfoFlags {
infoVerbosity :: Flag Verbosity,
infoPackageDBs :: [Maybe PackageDB]
}
defaultInfoFlags :: InfoFlags
defaultInfoFlags = InfoFlags {
infoVerbosity = toFlag normal,
infoPackageDBs = []
}
infoCommand :: CommandUI InfoFlags
infoCommand = CommandUI {
commandName = "info",
commandSynopsis = "Display detailed information about a particular package.",
commandDescription = Just $ \_ -> wrapText $
"If there is a sandbox in the current directory and "
++ "config:ignore-sandbox is False, use the sandbox package database. "
++ "Otherwise, use the package database specified with --package-db. "
++ "If not specified, use the user package database.\n",
commandNotes = Nothing,
commandUsage = usageAlternatives "info" ["[FLAGS] PACKAGES"],
commandDefaultFlags = defaultInfoFlags,
commandOptions = \_ -> [
optionVerbosity infoVerbosity (\v flags -> flags { infoVerbosity = v })
, option "" ["package-db"]
( "Append the given package database to the list of package"
++ " databases used (to satisfy dependencies and register into)."
++ " May be a specific file, 'global' or 'user'. The initial list"
++ " is ['global'], ['global', 'user'], or ['global', $sandbox],"
++ " depending on context. Use 'clear' to reset the list to empty."
++ " See the user guide for details.")
infoPackageDBs (\v flags -> flags { infoPackageDBs = v })
(reqArg' "DB" readPackageDbList showPackageDbList)
]
}
instance Monoid InfoFlags where
mempty = InfoFlags {
infoVerbosity = mempty,
infoPackageDBs = mempty
}
mappend a b = InfoFlags {
infoVerbosity = combine infoVerbosity,
infoPackageDBs = combine infoPackageDBs
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Install flags
-- ------------------------------------------------------------
-- | Install takes the same flags as configure along with a few extras.
--
data InstallFlags = InstallFlags {
installDocumentation :: Flag Bool,
installHaddockIndex :: Flag PathTemplate,
installDryRun :: Flag Bool,
installMaxBackjumps :: Flag Int,
installReorderGoals :: Flag Bool,
installIndependentGoals :: Flag Bool,
installShadowPkgs :: Flag Bool,
installStrongFlags :: Flag Bool,
installReinstall :: Flag Bool,
installAvoidReinstalls :: Flag Bool,
installOverrideReinstall :: Flag Bool,
installUpgradeDeps :: Flag Bool,
installOnly :: Flag Bool,
installOnlyDeps :: Flag Bool,
installRootCmd :: Flag String,
installSummaryFile :: NubList PathTemplate,
installLogFile :: Flag PathTemplate,
installBuildReports :: Flag ReportLevel,
installReportPlanningFailure :: Flag Bool,
installSymlinkBinDir :: Flag FilePath,
installOneShot :: Flag Bool,
installNumJobs :: Flag (Maybe Int),
installRunTests :: Flag Bool,
installOfflineMode :: Flag Bool
}
defaultInstallFlags :: InstallFlags
defaultInstallFlags = InstallFlags {
installDocumentation = Flag False,
installHaddockIndex = Flag docIndexFile,
installDryRun = Flag False,
installMaxBackjumps = Flag defaultMaxBackjumps,
installReorderGoals = Flag False,
installIndependentGoals= Flag False,
installShadowPkgs = Flag False,
installStrongFlags = Flag False,
installReinstall = Flag False,
installAvoidReinstalls = Flag False,
installOverrideReinstall = Flag False,
installUpgradeDeps = Flag False,
installOnly = Flag False,
installOnlyDeps = Flag False,
installRootCmd = mempty,
installSummaryFile = mempty,
installLogFile = mempty,
installBuildReports = Flag NoReports,
installReportPlanningFailure = Flag False,
installSymlinkBinDir = mempty,
installOneShot = Flag False,
installNumJobs = mempty,
installRunTests = mempty,
installOfflineMode = Flag False
}
where
docIndexFile = toPathTemplate ("$datadir" </> "doc"
</> "$arch-$os-$compiler" </> "index.html")
allowNewerParser :: ReadE AllowNewer
allowNewerParser = ReadE $ \s ->
case s of
"" -> Right AllowNewerNone
"False" -> Right AllowNewerNone
"True" -> Right AllowNewerAll
_ ->
case readPToMaybe pkgsParser s of
Just pkgs -> Right . AllowNewerSome $ pkgs
Nothing -> Left ("Cannot parse the list of packages: " ++ s)
where
pkgsParser = Parse.sepBy1 parse (Parse.char ',')
allowNewerPrinter :: Flag AllowNewer -> [Maybe String]
allowNewerPrinter (Flag AllowNewerNone) = [Just "False"]
allowNewerPrinter (Flag AllowNewerAll) = [Just "True"]
allowNewerPrinter (Flag (AllowNewerSome pkgs)) =
[Just . intercalate "," . map display $ pkgs]
allowNewerPrinter NoFlag = []
defaultMaxBackjumps :: Int
defaultMaxBackjumps = 2000
defaultSolver :: PreSolver
defaultSolver = Choose
allSolvers :: String
allSolvers = intercalate ", " (map display ([minBound .. maxBound] :: [PreSolver]))
installCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags)
installCommand = CommandUI {
commandName = "install",
commandSynopsis = "Install packages.",
commandUsage = usageAlternatives "install" [ "[FLAGS]"
, "[FLAGS] PACKAGES"
],
commandDescription = Just $ \_ -> wrapText $
"Installs one or more packages. By default, the installed package"
++ " will be registered in the user's package database or, if a sandbox"
++ " is present in the current directory, inside the sandbox.\n"
++ "\n"
++ "If PACKAGES are specified, downloads and installs those packages."
++ " Otherwise, install the package in the current directory (and/or its"
++ " dependencies) (there must be exactly one .cabal file in the current"
++ " directory).\n"
++ "\n"
++ "When using a sandbox, the flags for `install` only affect the"
++ " current command and have no effect on future commands. (To achieve"
++ " that, `configure` must be used.)\n"
++ " In contrast, without a sandbox, the flags to `install` are saved and"
++ " affect future commands such as `build` and `repl`. See the help for"
++ " `configure` for a list of commands being affected.\n"
++ "\n"
++ "Installed executables will by default (and without a sandbox)"
++ " be put into `~/.cabal/bin/`."
++ " If you want installed executable to be available globally, make"
++ " sure that the PATH environment variable contains that directory.\n"
++ "When using a sandbox, executables will be put into"
++ " `$SANDBOX/bin/` (by default: `./.cabal-sandbox/bin/`).\n"
++ "\n"
++ "When specifying --bindir, consider also specifying --datadir;"
++ " this way the sandbox can be deleted and the executable should"
++ " continue working as long as bindir and datadir are left untouched.",
commandNotes = Just $ \pname ->
( case commandNotes
$ Cabal.configureCommand defaultProgramConfiguration
of Just desc -> desc pname ++ "\n"
Nothing -> ""
)
++ "Examples:\n"
++ " " ++ pname ++ " install "
++ " Package in the current directory\n"
++ " " ++ pname ++ " install foo "
++ " Package from the hackage server\n"
++ " " ++ pname ++ " install foo-1.0 "
++ " Specific version of a package\n"
++ " " ++ pname ++ " install 'foo < 2' "
++ " Constrained package version\n"
++ " " ++ pname ++ " install haddock --bindir=$HOME/hask-bin/ --datadir=$HOME/hask-data/\n"
++ " " ++ (map (const ' ') pname)
++ " "
++ " Change installation destination\n",
commandDefaultFlags = (mempty, mempty, mempty, mempty),
commandOptions = \showOrParseArgs ->
liftOptions get1 set1
(filter ((`notElem` ["constraint", "dependency"
, "exact-configuration"])
. optionName) $
configureOptions showOrParseArgs)
++ liftOptions get2 set2 (configureExOptions showOrParseArgs)
++ liftOptions get3 set3 (installOptions showOrParseArgs)
++ liftOptions get4 set4 (haddockOptions showOrParseArgs)
}
where
get1 (a,_,_,_) = a; set1 a (_,b,c,d) = (a,b,c,d)
get2 (_,b,_,_) = b; set2 b (a,_,c,d) = (a,b,c,d)
get3 (_,_,c,_) = c; set3 c (a,b,_,d) = (a,b,c,d)
get4 (_,_,_,d) = d; set4 d (a,b,c,_) = (a,b,c,d)
haddockOptions :: ShowOrParseArgs -> [OptionField HaddockFlags]
haddockOptions showOrParseArgs
= [ opt { optionName = "haddock-" ++ name,
optionDescr = [ fmapOptFlags (\(_, lflags) -> ([], map ("haddock-" ++) lflags)) descr
| descr <- optionDescr opt] }
| opt <- commandOptions Cabal.haddockCommand showOrParseArgs
, let name = optionName opt
, name `elem` ["hoogle", "html", "html-location"
,"executables", "tests", "benchmarks", "all", "internal", "css"
,"hyperlink-source", "hscolour-css"
,"contents-location"]
]
where
fmapOptFlags :: (OptFlags -> OptFlags) -> OptDescr a -> OptDescr a
fmapOptFlags modify (ReqArg d f p r w) = ReqArg d (modify f) p r w
fmapOptFlags modify (OptArg d f p r i w) = OptArg d (modify f) p r i w
fmapOptFlags modify (ChoiceOpt xs) = ChoiceOpt [(d, modify f, i, w) | (d, f, i, w) <- xs]
fmapOptFlags modify (BoolOpt d f1 f2 r w) = BoolOpt d (modify f1) (modify f2) r w
installOptions :: ShowOrParseArgs -> [OptionField InstallFlags]
installOptions showOrParseArgs =
[ option "" ["documentation"]
"building of documentation"
installDocumentation (\v flags -> flags { installDocumentation = v })
(boolOpt [] [])
, option [] ["doc-index-file"]
"A central index of haddock API documentation (template cannot use $pkgid)"
installHaddockIndex (\v flags -> flags { installHaddockIndex = v })
(reqArg' "TEMPLATE" (toFlag.toPathTemplate)
(flagToList . fmap fromPathTemplate))
, option [] ["dry-run"]
"Do not install anything, only print what would be installed."
installDryRun (\v flags -> flags { installDryRun = v })
trueArg
] ++
optionSolverFlags showOrParseArgs
installMaxBackjumps (\v flags -> flags { installMaxBackjumps = v })
installReorderGoals (\v flags -> flags { installReorderGoals = v })
installIndependentGoals (\v flags -> flags { installIndependentGoals = v })
installShadowPkgs (\v flags -> flags { installShadowPkgs = v })
installStrongFlags (\v flags -> flags { installStrongFlags = v }) ++
[ option [] ["reinstall"]
"Install even if it means installing the same version again."
installReinstall (\v flags -> flags { installReinstall = v })
(yesNoOpt showOrParseArgs)
, option [] ["avoid-reinstalls"]
"Do not select versions that would destructively overwrite installed packages."
installAvoidReinstalls (\v flags -> flags { installAvoidReinstalls = v })
(yesNoOpt showOrParseArgs)
, option [] ["force-reinstalls"]
"Reinstall packages even if they will most likely break other installed packages."
installOverrideReinstall (\v flags -> flags { installOverrideReinstall = v })
(yesNoOpt showOrParseArgs)
, option [] ["upgrade-dependencies"]
"Pick the latest version for all dependencies, rather than trying to pick an installed version."
installUpgradeDeps (\v flags -> flags { installUpgradeDeps = v })
(yesNoOpt showOrParseArgs)
, option [] ["only-dependencies"]
"Install only the dependencies necessary to build the given packages"
installOnlyDeps (\v flags -> flags { installOnlyDeps = v })
(yesNoOpt showOrParseArgs)
, option [] ["dependencies-only"]
"A synonym for --only-dependencies"
installOnlyDeps (\v flags -> flags { installOnlyDeps = v })
(yesNoOpt showOrParseArgs)
, option [] ["root-cmd"]
"Command used to gain root privileges, when installing with --global."
installRootCmd (\v flags -> flags { installRootCmd = v })
(reqArg' "COMMAND" toFlag flagToList)
, option [] ["symlink-bindir"]
"Add symlinks to installed executables into this directory."
installSymlinkBinDir (\v flags -> flags { installSymlinkBinDir = v })
(reqArgFlag "DIR")
, option [] ["build-summary"]
"Save build summaries to file (name template can use $pkgid, $compiler, $os, $arch)"
installSummaryFile (\v flags -> flags { installSummaryFile = v })
(reqArg' "TEMPLATE" (\x -> toNubList [toPathTemplate x]) (map fromPathTemplate . fromNubList))
, option [] ["build-log"]
"Log all builds to file (name template can use $pkgid, $compiler, $os, $arch)"
installLogFile (\v flags -> flags { installLogFile = v })
(reqArg' "TEMPLATE" (toFlag.toPathTemplate)
(flagToList . fmap fromPathTemplate))
, option [] ["remote-build-reporting"]
"Generate build reports to send to a remote server (none, anonymous or detailed)."
installBuildReports (\v flags -> flags { installBuildReports = v })
(reqArg "LEVEL" (readP_to_E (const $ "report level must be 'none', "
++ "'anonymous' or 'detailed'")
(toFlag `fmap` parse))
(flagToList . fmap display))
, option [] ["report-planning-failure"]
"Generate build reports when the dependency solver fails. This is used by the Hackage build bot."
installReportPlanningFailure (\v flags -> flags { installReportPlanningFailure = v })
trueArg
, option [] ["one-shot"]
"Do not record the packages in the world file."
installOneShot (\v flags -> flags { installOneShot = v })
(yesNoOpt showOrParseArgs)
, option [] ["run-tests"]
"Run package test suites during installation."
installRunTests (\v flags -> flags { installRunTests = v })
trueArg
, optionNumJobs
installNumJobs (\v flags -> flags { installNumJobs = v })
, option [] ["offline"]
"Don't download packages from the Internet."
installOfflineMode (\v flags -> flags { installOfflineMode = v })
(yesNoOpt showOrParseArgs)
] ++ case showOrParseArgs of -- TODO: remove when "cabal install"
-- avoids
ParseArgs ->
[ option [] ["only"]
"Only installs the package in the current directory."
installOnly (\v flags -> flags { installOnly = v })
trueArg ]
_ -> []
instance Monoid InstallFlags where
mempty = InstallFlags {
installDocumentation = mempty,
installHaddockIndex = mempty,
installDryRun = mempty,
installReinstall = mempty,
installAvoidReinstalls = mempty,
installOverrideReinstall = mempty,
installMaxBackjumps = mempty,
installUpgradeDeps = mempty,
installReorderGoals = mempty,
installIndependentGoals= mempty,
installShadowPkgs = mempty,
installStrongFlags = mempty,
installOnly = mempty,
installOnlyDeps = mempty,
installRootCmd = mempty,
installSummaryFile = mempty,
installLogFile = mempty,
installBuildReports = mempty,
installReportPlanningFailure = mempty,
installSymlinkBinDir = mempty,
installOneShot = mempty,
installNumJobs = mempty,
installRunTests = mempty,
installOfflineMode = mempty
}
mappend a b = InstallFlags {
installDocumentation = combine installDocumentation,
installHaddockIndex = combine installHaddockIndex,
installDryRun = combine installDryRun,
installReinstall = combine installReinstall,
installAvoidReinstalls = combine installAvoidReinstalls,
installOverrideReinstall = combine installOverrideReinstall,
installMaxBackjumps = combine installMaxBackjumps,
installUpgradeDeps = combine installUpgradeDeps,
installReorderGoals = combine installReorderGoals,
installIndependentGoals= combine installIndependentGoals,
installShadowPkgs = combine installShadowPkgs,
installStrongFlags = combine installStrongFlags,
installOnly = combine installOnly,
installOnlyDeps = combine installOnlyDeps,
installRootCmd = combine installRootCmd,
installSummaryFile = combine installSummaryFile,
installLogFile = combine installLogFile,
installBuildReports = combine installBuildReports,
installReportPlanningFailure = combine installReportPlanningFailure,
installSymlinkBinDir = combine installSymlinkBinDir,
installOneShot = combine installOneShot,
installNumJobs = combine installNumJobs,
installRunTests = combine installRunTests,
installOfflineMode = combine installOfflineMode
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Upload flags
-- ------------------------------------------------------------
data UploadFlags = UploadFlags {
uploadCheck :: Flag Bool,
uploadUsername :: Flag Username,
uploadPassword :: Flag Password,
uploadPasswordCmd :: Flag [String],
uploadVerbosity :: Flag Verbosity
}
defaultUploadFlags :: UploadFlags
defaultUploadFlags = UploadFlags {
uploadCheck = toFlag False,
uploadUsername = mempty,
uploadPassword = mempty,
uploadPasswordCmd = mempty,
uploadVerbosity = toFlag normal
}
uploadCommand :: CommandUI UploadFlags
uploadCommand = CommandUI {
commandName = "upload",
commandSynopsis = "Uploads source packages to Hackage.",
commandDescription = Nothing,
commandNotes = Just $ \_ ->
"You can store your Hackage login in the ~/.cabal/config file\n"
++ relevantConfigValuesText ["username", "password"],
commandUsage = \pname ->
"Usage: " ++ pname ++ " upload [FLAGS] TARFILES\n",
commandDefaultFlags = defaultUploadFlags,
commandOptions = \_ ->
[optionVerbosity uploadVerbosity (\v flags -> flags { uploadVerbosity = v })
,option ['c'] ["check"]
"Do not upload, just do QA checks."
uploadCheck (\v flags -> flags { uploadCheck = v })
trueArg
,option ['u'] ["username"]
"Hackage username."
uploadUsername (\v flags -> flags { uploadUsername = v })
(reqArg' "USERNAME" (toFlag . Username)
(flagToList . fmap unUsername))
,option ['p'] ["password"]
"Hackage password."
uploadPassword (\v flags -> flags { uploadPassword = v })
(reqArg' "PASSWORD" (toFlag . Password)
(flagToList . fmap unPassword))
,option ['P'] ["password-command"]
"Command to get Hackage password."
uploadPasswordCmd (\v flags -> flags { uploadPasswordCmd = v })
(reqArg' "PASSWORD" (Flag . words) (fromMaybe [] . flagToMaybe))
]
}
instance Monoid UploadFlags where
mempty = UploadFlags {
uploadCheck = mempty,
uploadUsername = mempty,
uploadPassword = mempty,
uploadPasswordCmd = mempty,
uploadVerbosity = mempty
}
mappend a b = UploadFlags {
uploadCheck = combine uploadCheck,
uploadUsername = combine uploadUsername,
uploadPassword = combine uploadPassword,
uploadPasswordCmd = combine uploadPasswordCmd,
uploadVerbosity = combine uploadVerbosity
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Init flags
-- ------------------------------------------------------------
emptyInitFlags :: IT.InitFlags
emptyInitFlags = mempty
defaultInitFlags :: IT.InitFlags
defaultInitFlags = emptyInitFlags { IT.initVerbosity = toFlag normal }
initCommand :: CommandUI IT.InitFlags
initCommand = CommandUI {
commandName = "init",
commandSynopsis = "Create a new .cabal package file (interactively).",
commandDescription = Just $ \_ -> wrapText $
"Cabalise a project by creating a .cabal, Setup.hs, and "
++ "optionally a LICENSE file.\n"
++ "\n"
++ "Calling init with no arguments (recommended) uses an "
++ "interactive mode, which will try to guess as much as "
++ "possible and prompt you for the rest. Command-line "
++ "arguments are provided for scripting purposes. "
++ "If you don't want interactive mode, be sure to pass "
++ "the -n flag.\n",
commandNotes = Nothing,
commandUsage = \pname ->
"Usage: " ++ pname ++ " init [FLAGS]\n",
commandDefaultFlags = defaultInitFlags,
commandOptions = \_ ->
[ option ['n'] ["non-interactive"]
"Non-interactive mode."
IT.nonInteractive (\v flags -> flags { IT.nonInteractive = v })
trueArg
, option ['q'] ["quiet"]
"Do not generate log messages to stdout."
IT.quiet (\v flags -> flags { IT.quiet = v })
trueArg
, option [] ["no-comments"]
"Do not generate explanatory comments in the .cabal file."
IT.noComments (\v flags -> flags { IT.noComments = v })
trueArg
, option ['m'] ["minimal"]
"Generate a minimal .cabal file, that is, do not include extra empty fields. Also implies --no-comments."
IT.minimal (\v flags -> flags { IT.minimal = v })
trueArg
, option [] ["overwrite"]
"Overwrite any existing .cabal, LICENSE, or Setup.hs files without warning."
IT.overwrite (\v flags -> flags { IT.overwrite = v })
trueArg
, option [] ["package-dir"]
"Root directory of the package (default = current directory)."
IT.packageDir (\v flags -> flags { IT.packageDir = v })
(reqArgFlag "DIRECTORY")
, option ['p'] ["package-name"]
"Name of the Cabal package to create."
IT.packageName (\v flags -> flags { IT.packageName = v })
(reqArg "PACKAGE" (readP_to_E ("Cannot parse package name: "++)
(toFlag `fmap` parse))
(flagToList . fmap display))
, option [] ["version"]
"Initial version of the package."
IT.version (\v flags -> flags { IT.version = v })
(reqArg "VERSION" (readP_to_E ("Cannot parse package version: "++)
(toFlag `fmap` parse))
(flagToList . fmap display))
, option [] ["cabal-version"]
"Required version of the Cabal library."
IT.cabalVersion (\v flags -> flags { IT.cabalVersion = v })
(reqArg "VERSION_RANGE" (readP_to_E ("Cannot parse Cabal version range: "++)
(toFlag `fmap` parse))
(flagToList . fmap display))
, option ['l'] ["license"]
"Project license."
IT.license (\v flags -> flags { IT.license = v })
(reqArg "LICENSE" (readP_to_E ("Cannot parse license: "++)
(toFlag `fmap` parse))
(flagToList . fmap display))
, option ['a'] ["author"]
"Name of the project's author."
IT.author (\v flags -> flags { IT.author = v })
(reqArgFlag "NAME")
, option ['e'] ["email"]
"Email address of the maintainer."
IT.email (\v flags -> flags { IT.email = v })
(reqArgFlag "EMAIL")
, option ['u'] ["homepage"]
"Project homepage and/or repository."
IT.homepage (\v flags -> flags { IT.homepage = v })
(reqArgFlag "URL")
, option ['s'] ["synopsis"]
"Short project synopsis."
IT.synopsis (\v flags -> flags { IT.synopsis = v })
(reqArgFlag "TEXT")
, option ['c'] ["category"]
"Project category."
IT.category (\v flags -> flags { IT.category = v })
(reqArg' "CATEGORY" (\s -> toFlag $ maybe (Left s) Right (readMaybe s))
(flagToList . fmap (either id show)))
, option ['x'] ["extra-source-file"]
"Extra source file to be distributed with tarball."
IT.extraSrc (\v flags -> flags { IT.extraSrc = v })
(reqArg' "FILE" (Just . (:[]))
(fromMaybe []))
, option [] ["is-library"]
"Build a library."
IT.packageType (\v flags -> flags { IT.packageType = v })
(noArg (Flag IT.Library))
, option [] ["is-executable"]
"Build an executable."
IT.packageType
(\v flags -> flags { IT.packageType = v })
(noArg (Flag IT.Executable))
, option [] ["main-is"]
"Specify the main module."
IT.mainIs
(\v flags -> flags { IT.mainIs = v })
(reqArgFlag "FILE")
, option [] ["language"]
"Specify the default language."
IT.language
(\v flags -> flags { IT.language = v })
(reqArg "LANGUAGE" (readP_to_E ("Cannot parse language: "++)
(toFlag `fmap` parse))
(flagToList . fmap display))
, option ['o'] ["expose-module"]
"Export a module from the package."
IT.exposedModules
(\v flags -> flags { IT.exposedModules = v })
(reqArg "MODULE" (readP_to_E ("Cannot parse module name: "++)
((Just . (:[])) `fmap` parse))
(maybe [] (fmap display)))
, option [] ["extension"]
"Use a LANGUAGE extension (in the other-extensions field)."
IT.otherExts
(\v flags -> flags { IT.otherExts = v })
(reqArg "EXTENSION" (readP_to_E ("Cannot parse extension: "++)
((Just . (:[])) `fmap` parse))
(maybe [] (fmap display)))
, option ['d'] ["dependency"]
"Package dependency."
IT.dependencies (\v flags -> flags { IT.dependencies = v })
(reqArg "PACKAGE" (readP_to_E ("Cannot parse dependency: "++)
((Just . (:[])) `fmap` parse))
(maybe [] (fmap display)))
, option [] ["source-dir"]
"Directory containing package source."
IT.sourceDirs (\v flags -> flags { IT.sourceDirs = v })
(reqArg' "DIR" (Just . (:[]))
(fromMaybe []))
, option [] ["build-tool"]
"Required external build tool."
IT.buildTools (\v flags -> flags { IT.buildTools = v })
(reqArg' "TOOL" (Just . (:[]))
(fromMaybe []))
, optionVerbosity IT.initVerbosity (\v flags -> flags { IT.initVerbosity = v })
]
}
where readMaybe s = case reads s of
[(x,"")] -> Just x
_ -> Nothing
-- ------------------------------------------------------------
-- * SDist flags
-- ------------------------------------------------------------
-- | Extra flags to @sdist@ beyond runghc Setup sdist
--
data SDistExFlags = SDistExFlags {
sDistFormat :: Flag ArchiveFormat
}
deriving Show
data ArchiveFormat = TargzFormat | ZipFormat -- | ...
deriving (Show, Eq)
defaultSDistExFlags :: SDistExFlags
defaultSDistExFlags = SDistExFlags {
sDistFormat = Flag TargzFormat
}
sdistCommand :: CommandUI (SDistFlags, SDistExFlags)
sdistCommand = Cabal.sdistCommand {
commandDefaultFlags = (commandDefaultFlags Cabal.sdistCommand, defaultSDistExFlags),
commandOptions = \showOrParseArgs ->
liftOptions fst setFst (commandOptions Cabal.sdistCommand showOrParseArgs)
++ liftOptions snd setSnd sdistExOptions
}
where
setFst a (_,b) = (a,b)
setSnd b (a,_) = (a,b)
sdistExOptions =
[option [] ["archive-format"] "archive-format"
sDistFormat (\v flags -> flags { sDistFormat = v })
(choiceOpt
[ (Flag TargzFormat, ([], ["targz"]),
"Produce a '.tar.gz' format archive (default and required for uploading to hackage)")
, (Flag ZipFormat, ([], ["zip"]),
"Produce a '.zip' format archive")
])
]
instance Monoid SDistExFlags where
mempty = SDistExFlags {
sDistFormat = mempty
}
mappend a b = SDistExFlags {
sDistFormat = combine sDistFormat
}
where
combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Win32SelfUpgrade flags
-- ------------------------------------------------------------
data Win32SelfUpgradeFlags = Win32SelfUpgradeFlags {
win32SelfUpgradeVerbosity :: Flag Verbosity
}
defaultWin32SelfUpgradeFlags :: Win32SelfUpgradeFlags
defaultWin32SelfUpgradeFlags = Win32SelfUpgradeFlags {
win32SelfUpgradeVerbosity = toFlag normal
}
win32SelfUpgradeCommand :: CommandUI Win32SelfUpgradeFlags
win32SelfUpgradeCommand = CommandUI {
commandName = "win32selfupgrade",
commandSynopsis = "Self-upgrade the executable on Windows",
commandDescription = Nothing,
commandNotes = Nothing,
commandUsage = \pname ->
"Usage: " ++ pname ++ " win32selfupgrade PID PATH\n",
commandDefaultFlags = defaultWin32SelfUpgradeFlags,
commandOptions = \_ ->
[optionVerbosity win32SelfUpgradeVerbosity
(\v flags -> flags { win32SelfUpgradeVerbosity = v})
]
}
instance Monoid Win32SelfUpgradeFlags where
mempty = Win32SelfUpgradeFlags {
win32SelfUpgradeVerbosity = mempty
}
mappend a b = Win32SelfUpgradeFlags {
win32SelfUpgradeVerbosity = combine win32SelfUpgradeVerbosity
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * ActAsSetup flags
-- ------------------------------------------------------------
data ActAsSetupFlags = ActAsSetupFlags {
actAsSetupBuildType :: Flag BuildType
}
defaultActAsSetupFlags :: ActAsSetupFlags
defaultActAsSetupFlags = ActAsSetupFlags {
actAsSetupBuildType = toFlag Simple
}
actAsSetupCommand :: CommandUI ActAsSetupFlags
actAsSetupCommand = CommandUI {
commandName = "act-as-setup",
commandSynopsis = "Run as-if this was a Setup.hs",
commandDescription = Nothing,
commandNotes = Nothing,
commandUsage = \pname ->
"Usage: " ++ pname ++ " act-as-setup\n",
commandDefaultFlags = defaultActAsSetupFlags,
commandOptions = \_ ->
[option "" ["build-type"]
"Use the given build type."
actAsSetupBuildType (\v flags -> flags { actAsSetupBuildType = v })
(reqArg "BUILD-TYPE" (readP_to_E ("Cannot parse build type: "++)
(fmap toFlag parse))
(map display . flagToList))
]
}
instance Monoid ActAsSetupFlags where
mempty = ActAsSetupFlags {
actAsSetupBuildType = mempty
}
mappend a b = ActAsSetupFlags {
actAsSetupBuildType = combine actAsSetupBuildType
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Sandbox-related flags
-- ------------------------------------------------------------
data SandboxFlags = SandboxFlags {
sandboxVerbosity :: Flag Verbosity,
sandboxSnapshot :: Flag Bool, -- FIXME: this should be an 'add-source'-only
-- flag.
sandboxLocation :: Flag FilePath
}
defaultSandboxLocation :: FilePath
defaultSandboxLocation = ".cabal-sandbox"
defaultSandboxFlags :: SandboxFlags
defaultSandboxFlags = SandboxFlags {
sandboxVerbosity = toFlag normal,
sandboxSnapshot = toFlag False,
sandboxLocation = toFlag defaultSandboxLocation
}
sandboxCommand :: CommandUI SandboxFlags
sandboxCommand = CommandUI {
commandName = "sandbox",
commandSynopsis = "Create/modify/delete a sandbox.",
commandDescription = Just $ \pname -> concat
[ paragraph $ "Sandboxes are isolated package databases that can be used"
++ " to prevent dependency conflicts that arise when many different"
++ " packages are installed in the same database (i.e. the user's"
++ " database in the home directory)."
, paragraph $ "A sandbox in the current directory (created by"
++ " `sandbox init`) will be used instead of the user's database for"
++ " commands such as `install` and `build`. Note that (a directly"
++ " invoked) GHC will not automatically be aware of sandboxes;"
++ " only if called via appropriate " ++ pname
++ " commands, e.g. `repl`, `build`, `exec`."
, paragraph $ "Currently, " ++ pname ++ " will not search for a sandbox"
++ " in folders above the current one, so cabal will not see the sandbox"
++ " if you are in a subfolder of a sandbox."
, paragraph "Subcommands:"
, headLine "init:"
, indentParagraph $ "Initialize a sandbox in the current directory."
++ " An existing package database will not be modified, but settings"
++ " (such as the location of the database) can be modified this way."
, headLine "delete:"
, indentParagraph $ "Remove the sandbox; deleting all the packages"
++ " installed inside."
, headLine "add-source:"
, indentParagraph $ "Make one or more local packages available in the"
++ " sandbox. PATHS may be relative or absolute."
++ " Typical usecase is when you need"
++ " to make a (temporary) modification to a dependency: You download"
++ " the package into a different directory, make the modification,"
++ " and add that directory to the sandbox with `add-source`."
, indentParagraph $ "Unless given `--snapshot`, any add-source'd"
++ " dependency that was modified since the last build will be"
++ " re-installed automatically."
, headLine "delete-source:"
, indentParagraph $ "Remove an add-source dependency; however, this will"
++ " not delete the package(s) that have been installed in the sandbox"
++ " from this dependency. You can either unregister the package(s) via"
++ " `" ++ pname ++ " sandbox hc-pkg unregister` or re-create the"
++ " sandbox (`sandbox delete; sandbox init`)."
, headLine "list-sources:"
, indentParagraph $ "List the directories of local packages made"
++ " available via `" ++ pname ++ " add-source`."
, headLine "hc-pkg:"
, indentParagraph $ "Similar to `ghc-pkg`, but for the sandbox package"
++ " database. Can be used to list specific/all packages that are"
++ " installed in the sandbox. For subcommands, see the help for"
++ " ghc-pkg. Affected by the compiler version specified by `configure`."
],
commandNotes = Just $ \pname ->
relevantConfigValuesText ["require-sandbox"
,"ignore-sandbox"]
++ "\n"
++ "Examples:\n"
++ " Set up a sandbox with one local dependency, located at ../foo:\n"
++ " " ++ pname ++ " sandbox init\n"
++ " " ++ pname ++ " sandbox add-source ../foo\n"
++ " " ++ pname ++ " install --only-dependencies\n"
++ " Reset the sandbox:\n"
++ " " ++ pname ++ " sandbox delete\n"
++ " " ++ pname ++ " sandbox init\n"
++ " " ++ pname ++ " install --only-dependencies\n"
++ " List the packages in the sandbox:\n"
++ " " ++ pname ++ " sandbox hc-pkg list\n"
++ " Unregister the `broken` package from the sandbox:\n"
++ " " ++ pname ++ " sandbox hc-pkg -- --force unregister broken\n",
commandUsage = usageAlternatives "sandbox"
[ "init [FLAGS]"
, "delete [FLAGS]"
, "add-source [FLAGS] PATHS"
, "delete-source [FLAGS] PATHS"
, "list-sources [FLAGS]"
, "hc-pkg [FLAGS] [--] COMMAND [--] [ARGS]"
],
commandDefaultFlags = defaultSandboxFlags,
commandOptions = \_ ->
[ optionVerbosity sandboxVerbosity
(\v flags -> flags { sandboxVerbosity = v })
, option [] ["snapshot"]
"Take a snapshot instead of creating a link (only applies to 'add-source')"
sandboxSnapshot (\v flags -> flags { sandboxSnapshot = v })
trueArg
, option [] ["sandbox"]
"Sandbox location (default: './.cabal-sandbox')."
sandboxLocation (\v flags -> flags { sandboxLocation = v })
(reqArgFlag "DIR")
]
}
instance Monoid SandboxFlags where
mempty = SandboxFlags {
sandboxVerbosity = mempty,
sandboxSnapshot = mempty,
sandboxLocation = mempty
}
mappend a b = SandboxFlags {
sandboxVerbosity = combine sandboxVerbosity,
sandboxSnapshot = combine sandboxSnapshot,
sandboxLocation = combine sandboxLocation
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Exec Flags
-- ------------------------------------------------------------
data ExecFlags = ExecFlags {
execVerbosity :: Flag Verbosity
}
defaultExecFlags :: ExecFlags
defaultExecFlags = ExecFlags {
execVerbosity = toFlag normal
}
execCommand :: CommandUI ExecFlags
execCommand = CommandUI {
commandName = "exec",
commandSynopsis = "Give a command access to the sandbox package repository.",
commandDescription = Just $ \pname -> wrapText $
-- TODO: this is too GHC-focused for my liking..
"A directly invoked GHC will not automatically be aware of any"
++ " sandboxes: the GHC_PACKAGE_PATH environment variable controls what"
++ " GHC uses. `" ++ pname ++ " exec` can be used to modify this variable:"
++ " COMMAND will be executed in a modified environment and thereby uses"
++ " the sandbox package database.\n"
++ "\n"
++ "If there is no sandbox, behaves as identity (executing COMMAND).\n"
++ "\n"
++ "Note that other " ++ pname ++ " commands change the environment"
++ " variable appropriately already, so there is no need to wrap those"
++ " in `" ++ pname ++ " exec`. But with `" ++ pname ++ " exec`, the user"
++ " has more control and can, for example, execute custom scripts which"
++ " indirectly execute GHC.\n"
++ "\n"
++ "Note that `" ++ pname ++ " repl` is different from `" ++ pname
++ " exec -- ghci` as the latter will not forward any additional flags"
++ " being defined in the local package to ghci.\n"
++ "\n"
++ "See `" ++ pname ++ " sandbox`.\n",
commandNotes = Just $ \pname ->
"Examples:\n"
++ " " ++ pname ++ " exec -- ghci -Wall\n"
++ " Start a repl session with sandbox packages and all warnings;\n"
++ " " ++ pname ++ " exec gitit -- -f gitit.cnf\n"
++ " Give gitit access to the sandbox packages, and pass it a flag;\n"
++ " " ++ pname ++ " exec runghc Foo.hs\n"
++ " Execute runghc on Foo.hs with runghc configured to use the\n"
++ " sandbox package database (if a sandbox is being used).\n",
commandUsage = \pname ->
"Usage: " ++ pname ++ " exec [FLAGS] [--] COMMAND [--] [ARGS]\n",
commandDefaultFlags = defaultExecFlags,
commandOptions = \_ ->
[ optionVerbosity execVerbosity
(\v flags -> flags { execVerbosity = v })
]
}
instance Monoid ExecFlags where
mempty = ExecFlags {
execVerbosity = mempty
}
mappend a b = ExecFlags {
execVerbosity = combine execVerbosity
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * UserConfig flags
-- ------------------------------------------------------------
data UserConfigFlags = UserConfigFlags {
userConfigVerbosity :: Flag Verbosity
}
instance Monoid UserConfigFlags where
mempty = UserConfigFlags {
userConfigVerbosity = toFlag normal
}
mappend a b = UserConfigFlags {
userConfigVerbosity = combine userConfigVerbosity
}
where combine field = field a `mappend` field b
userConfigCommand :: CommandUI UserConfigFlags
userConfigCommand = CommandUI {
commandName = "user-config",
commandSynopsis = "Display and update the user's global cabal configuration.",
commandDescription = Just $ \_ -> wrapText $
"When upgrading cabal, the set of configuration keys and their default"
++ " values may change. This command provides means to merge the existing"
++ " config in ~/.cabal/config"
++ " (i.e. all bindings that are actually defined and not commented out)"
++ " and the default config of the new version.\n"
++ "\n"
++ "diff: Shows a pseudo-diff of the user's ~/.cabal/config file and"
++ " the default configuration that would be created by cabal if the"
++ " config file did not exist.\n"
++ "update: Applies the pseudo-diff to the configuration that would be"
++ " created by default, and write the result back to ~/.cabal/config.",
commandNotes = Nothing,
commandUsage = usageAlternatives "user-config" ["diff", "update"],
commandDefaultFlags = mempty,
commandOptions = \ _ -> [
optionVerbosity userConfigVerbosity (\v flags -> flags { userConfigVerbosity = v })
]
}
-- ------------------------------------------------------------
-- * GetOpt Utils
-- ------------------------------------------------------------
reqArgFlag :: ArgPlaceHolder ->
MkOptDescr (b -> Flag String) (Flag String -> b -> b) b
reqArgFlag ad = reqArg ad (succeedReadE Flag) flagToList
liftOptions :: (b -> a) -> (a -> b -> b)
-> [OptionField a] -> [OptionField b]
liftOptions get set = map (liftOption get set)
yesNoOpt :: ShowOrParseArgs -> MkOptDescr (b -> Flag Bool) (Flag Bool -> b -> b) b
yesNoOpt ShowArgs sf lf = trueArg sf lf
yesNoOpt _ sf lf = Command.boolOpt' flagToMaybe Flag (sf, lf) ([], map ("no-" ++) lf) sf lf
optionSolver :: (flags -> Flag PreSolver)
-> (Flag PreSolver -> flags -> flags)
-> OptionField flags
optionSolver get set =
option [] ["solver"]
("Select dependency solver to use (default: " ++ display defaultSolver ++ "). Choices: " ++ allSolvers ++ ", where 'choose' chooses between 'topdown' and 'modular' based on compiler version.")
get set
(reqArg "SOLVER" (readP_to_E (const $ "solver must be one of: " ++ allSolvers)
(toFlag `fmap` parse))
(flagToList . fmap display))
optionSolverFlags :: ShowOrParseArgs
-> (flags -> Flag Int ) -> (Flag Int -> flags -> flags)
-> (flags -> Flag Bool ) -> (Flag Bool -> flags -> flags)
-> (flags -> Flag Bool ) -> (Flag Bool -> flags -> flags)
-> (flags -> Flag Bool ) -> (Flag Bool -> flags -> flags)
-> (flags -> Flag Bool ) -> (Flag Bool -> flags -> flags)
-> [OptionField flags]
optionSolverFlags showOrParseArgs getmbj setmbj getrg setrg _getig _setig getsip setsip getstrfl setstrfl =
[ option [] ["max-backjumps"]
("Maximum number of backjumps allowed while solving (default: " ++ show defaultMaxBackjumps ++ "). Use a negative number to enable unlimited backtracking. Use 0 to disable backtracking completely.")
getmbj setmbj
(reqArg "NUM" (readP_to_E ("Cannot parse number: "++)
(fmap toFlag (Parse.readS_to_P reads)))
(map show . flagToList))
, option [] ["reorder-goals"]
"Try to reorder goals according to certain heuristics. Slows things down on average, but may make backtracking faster for some packages."
getrg setrg
(yesNoOpt showOrParseArgs)
-- TODO: Disabled for now because it does not work as advertised (yet).
{-
, option [] ["independent-goals"]
"Treat several goals on the command line as independent. If several goals depend on the same package, different versions can be chosen."
getig setig
(yesNoOpt showOrParseArgs)
-}
, option [] ["shadow-installed-packages"]
"If multiple package instances of the same version are installed, treat all but one as shadowed."
getsip setsip
(yesNoOpt showOrParseArgs)
, option [] ["strong-flags"]
"Do not defer flag choices (this used to be the default in cabal-install <= 1.20)."
getstrfl setstrfl
(yesNoOpt showOrParseArgs)
]
usageFlagsOrPackages :: String -> String -> String
usageFlagsOrPackages name pname =
"Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n"
++ " or: " ++ pname ++ " " ++ name ++ " [PACKAGES]\n"
usagePackages :: String -> String -> String
usagePackages name pname =
"Usage: " ++ pname ++ " " ++ name ++ " [PACKAGES]\n"
usageFlags :: String -> String -> String
usageFlags name pname =
"Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n"
--TODO: do we want to allow per-package flags?
parsePackageArgs :: [String] -> Either String [Dependency]
parsePackageArgs = parsePkgArgs []
where
parsePkgArgs ds [] = Right (reverse ds)
parsePkgArgs ds (arg:args) =
case readPToMaybe parseDependencyOrPackageId arg of
Just dep -> parsePkgArgs (dep:ds) args
Nothing -> Left $
show arg ++ " is not valid syntax for a package name or"
++ " package dependency."
readPToMaybe :: Parse.ReadP a a -> String -> Maybe a
readPToMaybe p str = listToMaybe [ r | (r,s) <- Parse.readP_to_S p str
, all isSpace s ]
parseDependencyOrPackageId :: Parse.ReadP r Dependency
parseDependencyOrPackageId = parse Parse.+++ liftM pkgidToDependency parse
where
pkgidToDependency :: PackageIdentifier -> Dependency
pkgidToDependency p = case packageVersion p of
Version [] _ -> Dependency (packageName p) anyVersion
version -> Dependency (packageName p) (thisVersion version)
showRepo :: RemoteRepo -> String
showRepo repo = remoteRepoName repo ++ ":"
++ uriToString id (remoteRepoURI repo) []
readRepo :: String -> Maybe RemoteRepo
readRepo = readPToMaybe parseRepo
parseRepo :: Parse.ReadP r RemoteRepo
parseRepo = do
name <- Parse.munch1 (\c -> isAlphaNum c || c `elem` "_-.")
_ <- Parse.char ':'
uriStr <- Parse.munch1 (\c -> isAlphaNum c || c `elem` "+-=._/*()@'$:;&!?~")
uri <- maybe Parse.pfail return (parseAbsoluteURI uriStr)
return $ RemoteRepo {
remoteRepoName = name,
remoteRepoURI = uri,
remoteRepoRootKeys = ()
}
-- ------------------------------------------------------------
-- * Helpers for Documentation
-- ------------------------------------------------------------
headLine :: String -> String
headLine = unlines
. map unwords
. wrapLine 79
. words
paragraph :: String -> String
paragraph = (++"\n")
. unlines
. map unwords
. wrapLine 79
. words
indentParagraph :: String -> String
indentParagraph = unlines
. map ((" "++).unwords)
. wrapLine 77
. words
relevantConfigValuesText :: [String] -> String
relevantConfigValuesText vs =
"Relevant global configuration keys:\n"
++ concat [" " ++ v ++ "\n" |v <- vs]
| ian-ross/cabal | cabal-install/Distribution/Client/Setup.hs | bsd-3-clause | 91,352 | 0 | 40 | 25,531 | 18,568 | 10,450 | 8,118 | 1,772 | 5 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable, DeriveFunctor, DeriveFoldable,
DeriveTraversable #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-} -- Note [Pass sensitive types]
-- in module PlaceHolder
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleInstances #-}
-- | Abstract syntax of global declarations.
--
-- Definitions for: @SynDecl@ and @ConDecl@, @ClassDecl@,
-- @InstDecl@, @DefaultDecl@ and @ForeignDecl@.
module HsDecls (
-- * Toplevel declarations
HsDecl(..), LHsDecl, HsDataDefn(..), HsDeriving,
-- ** Class or type declarations
TyClDecl(..), LTyClDecl,
TyClGroup(..), tyClGroupConcat, mkTyClGroup,
isClassDecl, isDataDecl, isSynDecl, tcdName,
isFamilyDecl, isTypeFamilyDecl, isDataFamilyDecl,
isOpenTypeFamilyInfo, isClosedTypeFamilyInfo,
tyFamInstDeclName, tyFamInstDeclLName,
countTyClDecls, pprTyClDeclFlavour,
tyClDeclLName, tyClDeclTyVars,
hsDeclHasCusk, famDeclHasCusk,
FamilyDecl(..), LFamilyDecl,
-- ** Instance declarations
InstDecl(..), LInstDecl, NewOrData(..), FamilyInfo(..),
TyFamInstDecl(..), LTyFamInstDecl, instDeclDataFamInsts,
DataFamInstDecl(..), LDataFamInstDecl, pprDataFamInstFlavour,
TyFamEqn(..), TyFamInstEqn, LTyFamInstEqn, TyFamDefltEqn, LTyFamDefltEqn,
HsTyPats,
LClsInstDecl, ClsInstDecl(..),
-- ** Standalone deriving declarations
DerivDecl(..), LDerivDecl,
-- ** @RULE@ declarations
LRuleDecls,RuleDecls(..),RuleDecl(..), LRuleDecl, RuleBndr(..),LRuleBndr,
collectRuleBndrSigTys,
flattenRuleDecls, pprFullRuleName,
-- ** @VECTORISE@ declarations
VectDecl(..), LVectDecl,
lvectDeclName, lvectInstDecl,
-- ** @default@ declarations
DefaultDecl(..), LDefaultDecl,
-- ** Template haskell declaration splice
SpliceExplicitFlag(..),
SpliceDecl(..), LSpliceDecl,
-- ** Foreign function interface declarations
ForeignDecl(..), LForeignDecl, ForeignImport(..), ForeignExport(..),
noForeignImportCoercionYet, noForeignExportCoercionYet,
CImportSpec(..),
-- ** Data-constructor declarations
ConDecl(..), LConDecl,
HsConDeclDetails, hsConDeclArgTys,
getConNames,
getConDetails,
gadtDeclDetails,
-- ** Document comments
DocDecl(..), LDocDecl, docDeclDoc,
-- ** Deprecations
WarnDecl(..), LWarnDecl,
WarnDecls(..), LWarnDecls,
-- ** Annotations
AnnDecl(..), LAnnDecl,
AnnProvenance(..), annProvenanceName_maybe,
-- ** Role annotations
RoleAnnotDecl(..), LRoleAnnotDecl, roleAnnotDeclName,
-- ** Injective type families
FamilyResultSig(..), LFamilyResultSig, InjectivityAnn(..), LInjectivityAnn,
resultVariableName,
-- * Grouping
HsGroup(..), emptyRdrGroup, emptyRnGroup, appendGroups
) where
-- friends:
import {-# SOURCE #-} HsExpr( LHsExpr, HsExpr, HsSplice, pprExpr, pprSplice )
-- Because Expr imports Decls via HsBracket
import HsBinds
import HsTypes
import HsDoc
import TyCon
import Name
import BasicTypes
import Coercion
import ForeignCall
import PlaceHolder ( PostTc,PostRn,PlaceHolder(..),DataId )
import NameSet
-- others:
import InstEnv
import Class
import Outputable
import Util
import SrcLoc
import Bag
import Maybes
import Data.Data hiding (TyCon,Fixity)
#if __GLASGOW_HASKELL__ < 709
import Data.Foldable ( Foldable )
import Data.Traversable ( Traversable )
#endif
{-
************************************************************************
* *
\subsection[HsDecl]{Declarations}
* *
************************************************************************
-}
type LHsDecl id = Located (HsDecl id)
-- ^ When in a list this may have
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnSemi'
--
-- For details on above see note [Api annotations] in ApiAnnotation
-- | A Haskell Declaration
data HsDecl id
= TyClD (TyClDecl id) -- ^ A type or class declaration.
| InstD (InstDecl id) -- ^ An instance declaration.
| DerivD (DerivDecl id)
| ValD (HsBind id)
| SigD (Sig id)
| DefD (DefaultDecl id)
| ForD (ForeignDecl id)
| WarningD (WarnDecls id)
| AnnD (AnnDecl id)
| RuleD (RuleDecls id)
| VectD (VectDecl id)
| SpliceD (SpliceDecl id) -- Includes quasi-quotes
| DocD (DocDecl)
| RoleAnnotD (RoleAnnotDecl id)
deriving (Typeable)
deriving instance (DataId id) => Data (HsDecl id)
-- NB: all top-level fixity decls are contained EITHER
-- EITHER SigDs
-- OR in the ClassDecls in TyClDs
--
-- The former covers
-- a) data constructors
-- b) class methods (but they can be also done in the
-- signatures of class decls)
-- c) imported functions (that have an IfacSig)
-- d) top level decls
--
-- The latter is for class methods only
-- | A 'HsDecl' is categorised into a 'HsGroup' before being
-- fed to the renamer.
data HsGroup id
= HsGroup {
hs_valds :: HsValBinds id,
hs_splcds :: [LSpliceDecl id],
hs_tyclds :: [TyClGroup id],
-- A list of mutually-recursive groups
-- No family-instances here; they are in hs_instds
-- Parser generates a singleton list;
-- renamer does dependency analysis
hs_instds :: [LInstDecl id],
-- Both class and family instance declarations in here
hs_derivds :: [LDerivDecl id],
hs_fixds :: [LFixitySig id],
-- Snaffled out of both top-level fixity signatures,
-- and those in class declarations
hs_defds :: [LDefaultDecl id],
hs_fords :: [LForeignDecl id],
hs_warnds :: [LWarnDecls id],
hs_annds :: [LAnnDecl id],
hs_ruleds :: [LRuleDecls id],
hs_vects :: [LVectDecl id],
hs_docs :: [LDocDecl]
} deriving (Typeable)
deriving instance (DataId id) => Data (HsGroup id)
emptyGroup, emptyRdrGroup, emptyRnGroup :: HsGroup a
emptyRdrGroup = emptyGroup { hs_valds = emptyValBindsIn }
emptyRnGroup = emptyGroup { hs_valds = emptyValBindsOut }
emptyGroup = HsGroup { hs_tyclds = [], hs_instds = [],
hs_derivds = [],
hs_fixds = [], hs_defds = [], hs_annds = [],
hs_fords = [], hs_warnds = [], hs_ruleds = [], hs_vects = [],
hs_valds = error "emptyGroup hs_valds: Can't happen",
hs_splcds = [],
hs_docs = [] }
appendGroups :: HsGroup a -> HsGroup a -> HsGroup a
appendGroups
HsGroup {
hs_valds = val_groups1,
hs_splcds = spliceds1,
hs_tyclds = tyclds1,
hs_instds = instds1,
hs_derivds = derivds1,
hs_fixds = fixds1,
hs_defds = defds1,
hs_annds = annds1,
hs_fords = fords1,
hs_warnds = warnds1,
hs_ruleds = rulds1,
hs_vects = vects1,
hs_docs = docs1 }
HsGroup {
hs_valds = val_groups2,
hs_splcds = spliceds2,
hs_tyclds = tyclds2,
hs_instds = instds2,
hs_derivds = derivds2,
hs_fixds = fixds2,
hs_defds = defds2,
hs_annds = annds2,
hs_fords = fords2,
hs_warnds = warnds2,
hs_ruleds = rulds2,
hs_vects = vects2,
hs_docs = docs2 }
=
HsGroup {
hs_valds = val_groups1 `plusHsValBinds` val_groups2,
hs_splcds = spliceds1 ++ spliceds2,
hs_tyclds = tyclds1 ++ tyclds2,
hs_instds = instds1 ++ instds2,
hs_derivds = derivds1 ++ derivds2,
hs_fixds = fixds1 ++ fixds2,
hs_annds = annds1 ++ annds2,
hs_defds = defds1 ++ defds2,
hs_fords = fords1 ++ fords2,
hs_warnds = warnds1 ++ warnds2,
hs_ruleds = rulds1 ++ rulds2,
hs_vects = vects1 ++ vects2,
hs_docs = docs1 ++ docs2 }
instance OutputableBndr name => Outputable (HsDecl name) where
ppr (TyClD dcl) = ppr dcl
ppr (ValD binds) = ppr binds
ppr (DefD def) = ppr def
ppr (InstD inst) = ppr inst
ppr (DerivD deriv) = ppr deriv
ppr (ForD fd) = ppr fd
ppr (SigD sd) = ppr sd
ppr (RuleD rd) = ppr rd
ppr (VectD vect) = ppr vect
ppr (WarningD wd) = ppr wd
ppr (AnnD ad) = ppr ad
ppr (SpliceD dd) = ppr dd
ppr (DocD doc) = ppr doc
ppr (RoleAnnotD ra) = ppr ra
instance OutputableBndr name => Outputable (HsGroup name) where
ppr (HsGroup { hs_valds = val_decls,
hs_tyclds = tycl_decls,
hs_instds = inst_decls,
hs_derivds = deriv_decls,
hs_fixds = fix_decls,
hs_warnds = deprec_decls,
hs_annds = ann_decls,
hs_fords = foreign_decls,
hs_defds = default_decls,
hs_ruleds = rule_decls,
hs_vects = vect_decls })
= vcat_mb empty
[ppr_ds fix_decls, ppr_ds default_decls,
ppr_ds deprec_decls, ppr_ds ann_decls,
ppr_ds rule_decls,
ppr_ds vect_decls,
if isEmptyValBinds val_decls
then Nothing
else Just (ppr val_decls),
ppr_ds (tyClGroupConcat tycl_decls),
ppr_ds inst_decls,
ppr_ds deriv_decls,
ppr_ds foreign_decls]
where
ppr_ds :: Outputable a => [a] -> Maybe SDoc
ppr_ds [] = Nothing
ppr_ds ds = Just (vcat (map ppr ds))
vcat_mb :: SDoc -> [Maybe SDoc] -> SDoc
-- Concatenate vertically with white-space between non-blanks
vcat_mb _ [] = empty
vcat_mb gap (Nothing : ds) = vcat_mb gap ds
vcat_mb gap (Just d : ds) = gap $$ d $$ vcat_mb blankLine ds
data SpliceExplicitFlag = ExplicitSplice | -- <=> $(f x y)
ImplicitSplice -- <=> f x y, i.e. a naked top level expression
deriving (Data, Typeable)
type LSpliceDecl name = Located (SpliceDecl name)
data SpliceDecl id
= SpliceDecl -- Top level splice
(Located (HsSplice id))
SpliceExplicitFlag
deriving (Typeable)
deriving instance (DataId id) => Data (SpliceDecl id)
instance OutputableBndr name => Outputable (SpliceDecl name) where
ppr (SpliceDecl (L _ e) _) = pprSplice e
{-
************************************************************************
* *
\subsection[SynDecl]{@data@, @newtype@ or @type@ (synonym) type declaration}
* *
************************************************************************
--------------------------------
THE NAMING STORY
--------------------------------
Here is the story about the implicit names that go with type, class,
and instance decls. It's a bit tricky, so pay attention!
"Implicit" (or "system") binders
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Each data type decl defines
a worker name for each constructor
to-T and from-T convertors
Each class decl defines
a tycon for the class
a data constructor for that tycon
the worker for that constructor
a selector for each superclass
All have occurrence names that are derived uniquely from their parent
declaration.
None of these get separate definitions in an interface file; they are
fully defined by the data or class decl. But they may *occur* in
interface files, of course. Any such occurrence must haul in the
relevant type or class decl.
Plan of attack:
- Ensure they "point to" the parent data/class decl
when loading that decl from an interface file
(See RnHiFiles.getSysBinders)
- When typechecking the decl, we build the implicit TyCons and Ids.
When doing so we look them up in the name cache (RnEnv.lookupSysName),
to ensure correct module and provenance is set
These are the two places that we have to conjure up the magic derived
names. (The actual magic is in OccName.mkWorkerOcc, etc.)
Default methods
~~~~~~~~~~~~~~~
- Occurrence name is derived uniquely from the method name
E.g. $dmmax
- If there is a default method name at all, it's recorded in
the ClassOpSig (in HsBinds), in the DefMethInfo field.
(DefMethInfo is defined in Class.hs)
Source-code class decls and interface-code class decls are treated subtly
differently, which has given me a great deal of confusion over the years.
Here's the deal. (We distinguish the two cases because source-code decls
have (Just binds) in the tcdMeths field, whereas interface decls have Nothing.
In *source-code* class declarations:
- When parsing, every ClassOpSig gets a DefMeth with a suitable RdrName
This is done by RdrHsSyn.mkClassOpSigDM
- The renamer renames it to a Name
- During typechecking, we generate a binding for each $dm for
which there's a programmer-supplied default method:
class Foo a where
op1 :: <type>
op2 :: <type>
op1 = ...
We generate a binding for $dmop1 but not for $dmop2.
The Class for Foo has a Nothing for op2 and
a Just ($dm_op1, VanillaDM) for op1.
The Name for $dmop2 is simply discarded.
In *interface-file* class declarations:
- When parsing, we see if there's an explicit programmer-supplied default method
because there's an '=' sign to indicate it:
class Foo a where
op1 = :: <type> -- NB the '='
op2 :: <type>
We use this info to generate a DefMeth with a suitable RdrName for op1,
and a NoDefMeth for op2
- The interface file has a separate definition for $dmop1, with unfolding etc.
- The renamer renames it to a Name.
- The renamer treats $dmop1 as a free variable of the declaration, so that
the binding for $dmop1 will be sucked in. (See RnHsSyn.tyClDeclFVs)
This doesn't happen for source code class decls, because they *bind* the default method.
Dictionary functions
~~~~~~~~~~~~~~~~~~~~
Each instance declaration gives rise to one dictionary function binding.
The type checker makes up new source-code instance declarations
(e.g. from 'deriving' or generic default methods --- see
TcInstDcls.tcInstDecls1). So we can't generate the names for
dictionary functions in advance (we don't know how many we need).
On the other hand for interface-file instance declarations, the decl
specifies the name of the dictionary function, and it has a binding elsewhere
in the interface file:
instance {Eq Int} = dEqInt
dEqInt :: {Eq Int} <pragma info>
So again we treat source code and interface file code slightly differently.
Source code:
- Source code instance decls have a Nothing in the (Maybe name) field
(see data InstDecl below)
- The typechecker makes up a Local name for the dict fun for any source-code
instance decl, whether it comes from a source-code instance decl, or whether
the instance decl is derived from some other construct (e.g. 'deriving').
- The occurrence name it chooses is derived from the instance decl (just for
documentation really) --- e.g. dNumInt. Two dict funs may share a common
occurrence name, but will have different uniques. E.g.
instance Foo [Int] where ...
instance Foo [Bool] where ...
These might both be dFooList
- The CoreTidy phase externalises the name, and ensures the occurrence name is
unique (this isn't special to dict funs). So we'd get dFooList and dFooList1.
- We can take this relaxed approach (changing the occurrence name later)
because dict fun Ids are not captured in a TyCon or Class (unlike default
methods, say). Instead, they are kept separately in the InstEnv. This
makes it easy to adjust them after compiling a module. (Once we've finished
compiling that module, they don't change any more.)
Interface file code:
- The instance decl gives the dict fun name, so the InstDecl has a (Just name)
in the (Maybe name) field.
- RnHsSyn.instDeclFVs treats the dict fun name as free in the decl, so that we
suck in the dfun binding
-}
type LTyClDecl name = Located (TyClDecl name)
-- | A type or class declaration.
data TyClDecl name
= -- | @type/data family T :: *->*@
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnType',
-- 'ApiAnnotation.AnnData',
-- 'ApiAnnotation.AnnFamily','ApiAnnotation.AnnDcolon',
-- 'ApiAnnotation.AnnWhere','ApiAnnotation.AnnOpenP',
-- 'ApiAnnotation.AnnDcolon','ApiAnnotation.AnnCloseP',
-- 'ApiAnnotation.AnnEqual','ApiAnnotation.AnnRarrow',
-- 'ApiAnnotation.AnnVbar'
-- For details on above see note [Api annotations] in ApiAnnotation
FamDecl { tcdFam :: FamilyDecl name }
| -- | @type@ declaration
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnType',
-- 'ApiAnnotation.AnnEqual',
-- For details on above see note [Api annotations] in ApiAnnotation
SynDecl { tcdLName :: Located name -- ^ Type constructor
, tcdTyVars :: LHsQTyVars name -- ^ Type variables; for an associated type
-- these include outer binders
, tcdRhs :: LHsType name -- ^ RHS of type declaration
, tcdFVs :: PostRn name NameSet }
| -- | @data@ declaration
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnData',
-- 'ApiAnnotation.AnnFamily',
-- 'ApiAnnotation.AnnNewType',
-- 'ApiAnnotation.AnnNewType','ApiAnnotation.AnnDcolon'
-- 'ApiAnnotation.AnnWhere',
-- For details on above see note [Api annotations] in ApiAnnotation
DataDecl { tcdLName :: Located name -- ^ Type constructor
, tcdTyVars :: LHsQTyVars name -- ^ Type variables; for an associated type
-- these include outer binders
-- Eg class T a where
-- type F a :: *
-- type F a = a -> a
-- Here the type decl for 'f' includes 'a'
-- in its tcdTyVars
, tcdDataDefn :: HsDataDefn name
, tcdDataCusk :: PostRn name Bool -- ^ does this have a CUSK?
, tcdFVs :: PostRn name NameSet }
| ClassDecl { tcdCtxt :: LHsContext name, -- ^ Context...
tcdLName :: Located name, -- ^ Name of the class
tcdTyVars :: LHsQTyVars name, -- ^ Class type variables
tcdFDs :: [Located (FunDep (Located name))],
-- ^ Functional deps
tcdSigs :: [LSig name], -- ^ Methods' signatures
tcdMeths :: LHsBinds name, -- ^ Default methods
tcdATs :: [LFamilyDecl name], -- ^ Associated types;
tcdATDefs :: [LTyFamDefltEqn name], -- ^ Associated type defaults
tcdDocs :: [LDocDecl], -- ^ Haddock docs
tcdFVs :: PostRn name NameSet
}
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnClass',
-- 'ApiAnnotation.AnnWhere','ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose'
-- - The tcdFDs will have 'ApiAnnotation.AnnVbar',
-- 'ApiAnnotation.AnnComma'
-- 'ApiAnnotation.AnnRarrow'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Typeable)
deriving instance (DataId id) => Data (TyClDecl id)
-- This is used in TcTyClsDecls to represent
-- strongly connected components of decls
-- No familiy instances in here
-- The role annotations must be grouped with their decls for the
-- type-checker to infer roles correctly
data TyClGroup name
= TyClGroup { group_tyclds :: [LTyClDecl name]
, group_roles :: [LRoleAnnotDecl name] }
deriving (Typeable)
deriving instance (DataId id) => Data (TyClGroup id)
tyClGroupConcat :: [TyClGroup name] -> [LTyClDecl name]
tyClGroupConcat = concatMap group_tyclds
mkTyClGroup :: [LTyClDecl name] -> TyClGroup name
mkTyClGroup decls = TyClGroup { group_tyclds = decls, group_roles = [] }
-- Simple classifiers for TyClDecl
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- | @True@ <=> argument is a @data@\/@newtype@
-- declaration.
isDataDecl :: TyClDecl name -> Bool
isDataDecl (DataDecl {}) = True
isDataDecl _other = False
-- | type or type instance declaration
isSynDecl :: TyClDecl name -> Bool
isSynDecl (SynDecl {}) = True
isSynDecl _other = False
-- | type class
isClassDecl :: TyClDecl name -> Bool
isClassDecl (ClassDecl {}) = True
isClassDecl _ = False
-- | type/data family declaration
isFamilyDecl :: TyClDecl name -> Bool
isFamilyDecl (FamDecl {}) = True
isFamilyDecl _other = False
-- | type family declaration
isTypeFamilyDecl :: TyClDecl name -> Bool
isTypeFamilyDecl (FamDecl (FamilyDecl { fdInfo = info })) = case info of
OpenTypeFamily -> True
ClosedTypeFamily {} -> True
_ -> False
isTypeFamilyDecl _ = False
-- | open type family info
isOpenTypeFamilyInfo :: FamilyInfo name -> Bool
isOpenTypeFamilyInfo OpenTypeFamily = True
isOpenTypeFamilyInfo _ = False
-- | closed type family info
isClosedTypeFamilyInfo :: FamilyInfo name -> Bool
isClosedTypeFamilyInfo (ClosedTypeFamily {}) = True
isClosedTypeFamilyInfo _ = False
-- | data family declaration
isDataFamilyDecl :: TyClDecl name -> Bool
isDataFamilyDecl (FamDecl (FamilyDecl { fdInfo = DataFamily })) = True
isDataFamilyDecl _other = False
-- Dealing with names
tyFamInstDeclName :: TyFamInstDecl name -> name
tyFamInstDeclName = unLoc . tyFamInstDeclLName
tyFamInstDeclLName :: TyFamInstDecl name -> Located name
tyFamInstDeclLName (TyFamInstDecl { tfid_eqn =
(L _ (TyFamEqn { tfe_tycon = ln })) })
= ln
tyClDeclLName :: TyClDecl name -> Located name
tyClDeclLName (FamDecl { tcdFam = FamilyDecl { fdLName = ln } }) = ln
tyClDeclLName decl = tcdLName decl
tcdName :: TyClDecl name -> name
tcdName = unLoc . tyClDeclLName
tyClDeclTyVars :: TyClDecl name -> LHsQTyVars name
tyClDeclTyVars (FamDecl { tcdFam = FamilyDecl { fdTyVars = tvs } }) = tvs
tyClDeclTyVars d = tcdTyVars d
countTyClDecls :: [TyClDecl name] -> (Int, Int, Int, Int, Int)
-- class, synonym decls, data, newtype, family decls
countTyClDecls decls
= (count isClassDecl decls,
count isSynDecl decls, -- excluding...
count isDataTy decls, -- ...family...
count isNewTy decls, -- ...instances
count isFamilyDecl decls)
where
isDataTy DataDecl{ tcdDataDefn = HsDataDefn { dd_ND = DataType } } = True
isDataTy _ = False
isNewTy DataDecl{ tcdDataDefn = HsDataDefn { dd_ND = NewType } } = True
isNewTy _ = False
-- | Does this declaration have a complete, user-supplied kind signature?
-- See Note [Complete user-supplied kind signatures]
hsDeclHasCusk :: TyClDecl Name -> Bool
hsDeclHasCusk (FamDecl { tcdFam = fam_decl }) = famDeclHasCusk Nothing fam_decl
hsDeclHasCusk (SynDecl { tcdTyVars = tyvars, tcdRhs = rhs })
= hsTvbAllKinded tyvars && rhs_annotated rhs
where
rhs_annotated (L _ ty) = case ty of
HsParTy lty -> rhs_annotated lty
HsKindSig {} -> True
_ -> False
hsDeclHasCusk (DataDecl { tcdDataCusk = cusk }) = cusk
hsDeclHasCusk (ClassDecl { tcdTyVars = tyvars }) = hsTvbAllKinded tyvars
-- Pretty-printing TyClDecl
-- ~~~~~~~~~~~~~~~~~~~~~~~~
instance OutputableBndr name
=> Outputable (TyClDecl name) where
ppr (FamDecl { tcdFam = decl }) = ppr decl
ppr (SynDecl { tcdLName = ltycon, tcdTyVars = tyvars, tcdRhs = rhs })
= hang (text "type" <+>
pp_vanilla_decl_head ltycon tyvars [] <+> equals)
4 (ppr rhs)
ppr (DataDecl { tcdLName = ltycon, tcdTyVars = tyvars, tcdDataDefn = defn })
= pp_data_defn (pp_vanilla_decl_head ltycon tyvars) defn
ppr (ClassDecl {tcdCtxt = context, tcdLName = lclas, tcdTyVars = tyvars,
tcdFDs = fds,
tcdSigs = sigs, tcdMeths = methods,
tcdATs = ats, tcdATDefs = at_defs})
| null sigs && isEmptyBag methods && null ats && null at_defs -- No "where" part
= top_matter
| otherwise -- Laid out
= vcat [ top_matter <+> text "where"
, nest 2 $ pprDeclList (map (pprFamilyDecl NotTopLevel . unLoc) ats ++
map ppr_fam_deflt_eqn at_defs ++
pprLHsBindsForUser methods sigs) ]
where
top_matter = text "class"
<+> pp_vanilla_decl_head lclas tyvars (unLoc context)
<+> pprFundeps (map unLoc fds)
instance OutputableBndr name => Outputable (TyClGroup name) where
ppr (TyClGroup { group_tyclds = tyclds, group_roles = roles })
= ppr tyclds $$
ppr roles
pp_vanilla_decl_head :: OutputableBndr name
=> Located name
-> LHsQTyVars name
-> HsContext name
-> SDoc
pp_vanilla_decl_head thing tyvars context
= hsep [pprHsContext context, pprPrefixOcc (unLoc thing), ppr tyvars]
pprTyClDeclFlavour :: TyClDecl a -> SDoc
pprTyClDeclFlavour (ClassDecl {}) = text "class"
pprTyClDeclFlavour (SynDecl {}) = text "type"
pprTyClDeclFlavour (FamDecl { tcdFam = FamilyDecl { fdInfo = info }})
= pprFlavour info <+> text "family"
pprTyClDeclFlavour (DataDecl { tcdDataDefn = HsDataDefn { dd_ND = nd } })
= ppr nd
{- *********************************************************************
* *
Data and type family declarations
* *
********************************************************************* -}
-- Note [FamilyResultSig]
-- ~~~~~~~~~~~~~~~~~~~~~~
--
-- This data type represents the return signature of a type family. Possible
-- values are:
--
-- * NoSig - the user supplied no return signature:
-- type family Id a where ...
--
-- * KindSig - the user supplied the return kind:
-- type family Id a :: * where ...
--
-- * TyVarSig - user named the result with a type variable and possibly
-- provided a kind signature for that variable:
-- type family Id a = r where ...
-- type family Id a = (r :: *) where ...
--
-- Naming result of a type family is required if we want to provide
-- injectivity annotation for a type family:
-- type family Id a = r | r -> a where ...
--
-- See also: Note [Injectivity annotation]
-- Note [Injectivity annotation]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- A user can declare a type family to be injective:
--
-- type family Id a = r | r -> a where ...
--
-- * The part after the "|" is called "injectivity annotation".
-- * "r -> a" part is called "injectivity condition"; at the moment terms
-- "injectivity annotation" and "injectivity condition" are synonymous
-- because we only allow a single injectivity condition.
-- * "r" is the "LHS of injectivity condition". LHS can only contain the
-- variable naming the result of a type family.
-- * "a" is the "RHS of injectivity condition". RHS contains space-separated
-- type and kind variables representing the arguments of a type
-- family. Variables can be omitted if a type family is not injective in
-- these arguments. Example:
-- type family Foo a b c = d | d -> a c where ...
--
-- Note that:
-- a) naming of type family result is required to provide injectivity
-- annotation
-- b) for associated types if the result was named then injectivity annotation
-- is mandatory. Otherwise result type variable is indistinguishable from
-- associated type default.
--
-- It is possible that in the future this syntax will be extended to support
-- more complicated injectivity annotations. For example we could declare that
-- if we know the result of Plus and one of its arguments we can determine the
-- other argument:
--
-- type family Plus a b = (r :: Nat) | r a -> b, r b -> a where ...
--
-- Here injectivity annotation would consist of two comma-separated injectivity
-- conditions.
--
-- See also Note [Injective type families] in TyCon
type LFamilyResultSig name = Located (FamilyResultSig name)
data FamilyResultSig name = -- see Note [FamilyResultSig]
NoSig
-- ^ - 'ApiAnnotation.AnnKeywordId' :
-- For details on above see note [Api annotations] in ApiAnnotation
| KindSig (LHsKind name)
-- ^ - 'ApiAnnotation.AnnKeywordId' :
-- 'ApiAnnotation.AnnOpenP','ApiAnnotation.AnnDcolon',
-- 'ApiAnnotation.AnnCloseP'
-- For details on above see note [Api annotations] in ApiAnnotation
| TyVarSig (LHsTyVarBndr name)
-- ^ - 'ApiAnnotation.AnnKeywordId' :
-- 'ApiAnnotation.AnnOpenP','ApiAnnotation.AnnDcolon',
-- 'ApiAnnotation.AnnCloseP', 'ApiAnnotation.AnnEqual'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving ( Typeable )
deriving instance (DataId name) => Data (FamilyResultSig name)
type LFamilyDecl name = Located (FamilyDecl name)
data FamilyDecl name = FamilyDecl
{ fdInfo :: FamilyInfo name -- type/data, closed/open
, fdLName :: Located name -- type constructor
, fdTyVars :: LHsQTyVars name -- type variables
, fdResultSig :: LFamilyResultSig name -- result signature
, fdInjectivityAnn :: Maybe (LInjectivityAnn name) -- optional injectivity ann
}
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnType',
-- 'ApiAnnotation.AnnData', 'ApiAnnotation.AnnFamily',
-- 'ApiAnnotation.AnnWhere', 'ApiAnnotation.AnnOpenP',
-- 'ApiAnnotation.AnnDcolon', 'ApiAnnotation.AnnCloseP',
-- 'ApiAnnotation.AnnEqual', 'ApiAnnotation.AnnRarrow',
-- 'ApiAnnotation.AnnVbar'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving ( Typeable )
deriving instance (DataId id) => Data (FamilyDecl id)
type LInjectivityAnn name = Located (InjectivityAnn name)
-- | If the user supplied an injectivity annotation it is represented using
-- InjectivityAnn. At the moment this is a single injectivity condition - see
-- Note [Injectivity annotation]. `Located name` stores the LHS of injectivity
-- condition. `[Located name]` stores the RHS of injectivity condition. Example:
--
-- type family Foo a b c = r | r -> a c where ...
--
-- This will be represented as "InjectivityAnn `r` [`a`, `c`]"
data InjectivityAnn name
= InjectivityAnn (Located name) [Located name]
-- ^ - 'ApiAnnotation.AnnKeywordId' :
-- 'ApiAnnotation.AnnRarrow', 'ApiAnnotation.AnnVbar'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving ( Data, Typeable )
data FamilyInfo name
= DataFamily
| OpenTypeFamily
-- | 'Nothing' if we're in an hs-boot file and the user
-- said "type family Foo x where .."
| ClosedTypeFamily (Maybe [LTyFamInstEqn name])
deriving( Typeable )
deriving instance (DataId name) => Data (FamilyInfo name)
-- | Does this family declaration have a complete, user-supplied kind signature?
famDeclHasCusk :: Maybe Bool
-- ^ if associated, does the enclosing class have a CUSK?
-> FamilyDecl name -> Bool
famDeclHasCusk _ (FamilyDecl { fdInfo = ClosedTypeFamily _
, fdTyVars = tyvars
, fdResultSig = L _ resultSig })
= hsTvbAllKinded tyvars && hasReturnKindSignature resultSig
famDeclHasCusk mb_class_cusk _ = mb_class_cusk `orElse` True
-- all un-associated open families have CUSKs!
-- | Does this family declaration have user-supplied return kind signature?
hasReturnKindSignature :: FamilyResultSig a -> Bool
hasReturnKindSignature NoSig = False
hasReturnKindSignature (TyVarSig (L _ (UserTyVar _))) = False
hasReturnKindSignature _ = True
-- | Maybe return name of the result type variable
resultVariableName :: FamilyResultSig a -> Maybe a
resultVariableName (TyVarSig sig) = Just $ hsLTyVarName sig
resultVariableName _ = Nothing
{-
Note [Complete user-supplied kind signatures]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We kind-check declarations differently if they have a complete, user-supplied
kind signature (CUSK). This is because we can safely generalise a CUSKed
declaration before checking all of the others, supporting polymorphic recursion.
See ghc.haskell.org/trac/ghc/wiki/GhcKinds/KindInference#Proposednewstrategy
and #9200 for lots of discussion of how we got here.
A declaration has a CUSK if we can know its complete kind without doing any
inference, at all. Here are the rules:
- A class or datatype is said to have a CUSK if and only if all of its type
variables are annotated. Its result kind is, by construction, Constraint or *
respectively.
- A type synonym has a CUSK if and only if all of its type variables and its
RHS are annotated with kinds.
- A closed type family is said to have a CUSK if and only if all of its type
variables and its return type are annotated.
- An open type family always has a CUSK -- unannotated type variables (and
return type) default to *.
- Additionally, if -XTypeInType is on, then a data definition with a top-level
:: must explicitly bind all kind variables to the right of the ::.
See test dependent/should_compile/KindLevels, which requires this case.
(Naturally, any kind variable mentioned before the :: should not be bound
after it.)
-}
instance (OutputableBndr name) => Outputable (FamilyDecl name) where
ppr = pprFamilyDecl TopLevel
pprFamilyDecl :: OutputableBndr name => TopLevelFlag -> FamilyDecl name -> SDoc
pprFamilyDecl top_level (FamilyDecl { fdInfo = info, fdLName = ltycon
, fdTyVars = tyvars
, fdResultSig = L _ result
, fdInjectivityAnn = mb_inj })
= vcat [ pprFlavour info <+> pp_top_level <+>
pp_vanilla_decl_head ltycon tyvars [] <+>
pp_kind <+> pp_inj <+> pp_where
, nest 2 $ pp_eqns ]
where
pp_top_level = case top_level of
TopLevel -> text "family"
NotTopLevel -> empty
pp_kind = case result of
NoSig -> empty
KindSig kind -> dcolon <+> ppr kind
TyVarSig tv_bndr -> text "=" <+> ppr tv_bndr
pp_inj = case mb_inj of
Just (L _ (InjectivityAnn lhs rhs)) ->
hsep [ vbar, ppr lhs, text "->", hsep (map ppr rhs) ]
Nothing -> empty
(pp_where, pp_eqns) = case info of
ClosedTypeFamily mb_eqns ->
( text "where"
, case mb_eqns of
Nothing -> text ".."
Just eqns -> vcat $ map ppr_fam_inst_eqn eqns )
_ -> (empty, empty)
pprFlavour :: FamilyInfo name -> SDoc
pprFlavour DataFamily = text "data"
pprFlavour OpenTypeFamily = text "type"
pprFlavour (ClosedTypeFamily {}) = text "type"
instance Outputable (FamilyInfo name) where
ppr info = pprFlavour info <+> text "family"
{- *********************************************************************
* *
Data types and data constructors
* *
********************************************************************* -}
data HsDataDefn name -- The payload of a data type defn
-- Used *both* for vanilla data declarations,
-- *and* for data family instances
= -- | Declares a data type or newtype, giving its constructors
-- @
-- data/newtype T a = <constrs>
-- data/newtype instance T [a] = <constrs>
-- @
HsDataDefn { dd_ND :: NewOrData,
dd_ctxt :: LHsContext name, -- ^ Context
dd_cType :: Maybe (Located CType),
dd_kindSig:: Maybe (LHsKind name),
-- ^ Optional kind signature.
--
-- @(Just k)@ for a GADT-style @data@,
-- or @data instance@ decl, with explicit kind sig
--
-- Always @Nothing@ for H98-syntax decls
dd_cons :: [LConDecl name],
-- ^ Data constructors
--
-- For @data T a = T1 | T2 a@
-- the 'LConDecl's all have 'ConDeclH98'.
-- For @data T a where { T1 :: T a }@
-- the 'LConDecls' all have 'ConDeclGADT'.
dd_derivs :: HsDeriving name -- ^ Optional 'deriving' claues
-- For details on above see note [Api annotations] in ApiAnnotation
}
deriving( Typeable )
deriving instance (DataId id) => Data (HsDataDefn id)
type HsDeriving name = Maybe (Located [LHsSigType name])
-- ^ The optional 'deriving' clause of a data declaration
--
-- @Nothing@ => not specified,
-- @Just []@ => derive exactly what is asked
--
-- It's a 'LHsSigType' because, with Generalised Newtype
-- Deriving, we can mention type variables that aren't
-- bound by the date type. e.g.
-- data T b = ... deriving( C [a] )
-- should producd a derived instance for (C [a] (T b))
--
-- The payload of the Maybe is Located so that we have a
-- place to hang the API annotations:
-- - 'ApiAnnotation.AnnKeywordId' :
-- 'ApiAnnotation.AnnDeriving',
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnClose'
data NewOrData
= NewType -- ^ @newtype Blah ...@
| DataType -- ^ @data Blah ...@
deriving( Eq, Data, Typeable ) -- Needed because Demand derives Eq
type LConDecl name = Located (ConDecl name)
-- ^ May have 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnSemi' when
-- in a GADT constructor list
-- For details on above see note [Api annotations] in ApiAnnotation
-- |
--
-- @
-- data T b = forall a. Eq a => MkT a b
-- MkT :: forall b a. Eq a => MkT a b
--
-- data T b where
-- MkT1 :: Int -> T Int
--
-- data T = Int `MkT` Int
-- | MkT2
--
-- data T a where
-- Int `MkT` Int :: T Int
-- @
--
-- - 'ApiAnnotation.AnnKeywordId's : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnDotdot','ApiAnnotation.AnnCLose',
-- 'ApiAnnotation.AnnEqual','ApiAnnotation.AnnVbar',
-- 'ApiAnnotation.AnnDarrow','ApiAnnotation.AnnDarrow',
-- 'ApiAnnotation.AnnForall','ApiAnnotation.AnnDot'
-- For details on above see note [Api annotations] in ApiAnnotation
data ConDecl name
= ConDeclGADT
{ con_names :: [Located name]
, con_type :: LHsSigType name
-- ^ The type after the β::β
, con_doc :: Maybe LHsDocString
-- ^ A possible Haddock comment.
}
| ConDeclH98
{ con_name :: Located name
, con_qvars :: Maybe (LHsQTyVars name)
-- User-written forall (if any), and its implicit
-- kind variables
-- Non-Nothing needs -XExistentialQuantification
-- e.g. data T a = forall b. MkT b (b->a)
-- con_qvars = {b}
, con_cxt :: Maybe (LHsContext name)
-- ^ User-written context (if any)
, con_details :: HsConDeclDetails name
-- ^ Arguments
, con_doc :: Maybe LHsDocString
-- ^ A possible Haddock comment.
} deriving (Typeable)
deriving instance (DataId name) => Data (ConDecl name)
type HsConDeclDetails name
= HsConDetails (LBangType name) (Located [LConDeclField name])
getConNames :: ConDecl name -> [Located name]
getConNames ConDeclH98 {con_name = name} = [name]
getConNames ConDeclGADT {con_names = names} = names
-- don't call with RdrNames, because it can't deal with HsAppsTy
getConDetails :: ConDecl name -> HsConDeclDetails name
getConDetails ConDeclH98 {con_details = details} = details
getConDetails ConDeclGADT {con_type = ty } = details
where
(details,_,_,_) = gadtDeclDetails ty
-- don't call with RdrNames, because it can't deal with HsAppsTy
gadtDeclDetails :: LHsSigType name
-> ( HsConDeclDetails name
, LHsType name
, LHsContext name
, [LHsTyVarBndr name] )
gadtDeclDetails HsIB {hsib_body = lbody_ty} = (details,res_ty,cxt,tvs)
where
(tvs, cxt, tau) = splitLHsSigmaTy lbody_ty
(details, res_ty) -- See Note [Sorting out the result type]
= case tau of
L _ (HsFunTy (L l (HsRecTy flds)) res_ty')
-> (RecCon (L l flds), res_ty')
_other -> (PrefixCon [], tau)
hsConDeclArgTys :: HsConDeclDetails name -> [LBangType name]
hsConDeclArgTys (PrefixCon tys) = tys
hsConDeclArgTys (InfixCon ty1 ty2) = [ty1,ty2]
hsConDeclArgTys (RecCon flds) = map (cd_fld_type . unLoc) (unLoc flds)
pp_data_defn :: OutputableBndr name
=> (HsContext name -> SDoc) -- Printing the header
-> HsDataDefn name
-> SDoc
pp_data_defn pp_hdr (HsDataDefn { dd_ND = new_or_data, dd_ctxt = L _ context
, dd_kindSig = mb_sig
, dd_cons = condecls, dd_derivs = derivings })
| null condecls
= ppr new_or_data <+> pp_hdr context <+> pp_sig
| otherwise
= hang (ppr new_or_data <+> pp_hdr context <+> pp_sig)
2 (pp_condecls condecls $$ pp_derivings)
where
pp_sig = case mb_sig of
Nothing -> empty
Just kind -> dcolon <+> ppr kind
pp_derivings = case derivings of
Nothing -> empty
Just (L _ ds) -> hsep [ text "deriving"
, parens (interpp'SP ds)]
instance OutputableBndr name => Outputable (HsDataDefn name) where
ppr d = pp_data_defn (\_ -> text "Naked HsDataDefn") d
instance Outputable NewOrData where
ppr NewType = text "newtype"
ppr DataType = text "data"
pp_condecls :: OutputableBndr name => [LConDecl name] -> SDoc
pp_condecls cs@(L _ ConDeclGADT{} : _) -- In GADT syntax
= hang (text "where") 2 (vcat (map ppr cs))
pp_condecls cs -- In H98 syntax
= equals <+> sep (punctuate (text " |") (map ppr cs))
instance (OutputableBndr name) => Outputable (ConDecl name) where
ppr = pprConDecl
pprConDecl :: OutputableBndr name => ConDecl name -> SDoc
pprConDecl (ConDeclH98 { con_name = L _ con
, con_qvars = mtvs
, con_cxt = mcxt
, con_details = details
, con_doc = doc })
= sep [ppr_mbDoc doc, pprHsForAll tvs cxt, ppr_details details]
where
ppr_details (InfixCon t1 t2) = hsep [ppr t1, pprInfixOcc con, ppr t2]
ppr_details (PrefixCon tys) = hsep (pprPrefixOcc con
: map (pprParendHsType . unLoc) tys)
ppr_details (RecCon fields) = pprPrefixOcc con
<+> pprConDeclFields (unLoc fields)
tvs = case mtvs of
Nothing -> []
Just (HsQTvs { hsq_explicit = tvs }) -> tvs
cxt = fromMaybe (noLoc []) mcxt
pprConDecl (ConDeclGADT { con_names = cons, con_type = res_ty, con_doc = doc })
= sep [ppr_mbDoc doc <+> ppr_con_names cons <+> dcolon
<+> ppr res_ty]
ppr_con_names :: (OutputableBndr name) => [Located name] -> SDoc
ppr_con_names = pprWithCommas (pprPrefixOcc . unLoc)
{-
************************************************************************
* *
Instance declarations
* *
************************************************************************
Note [Type family instance declarations in HsSyn]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The data type TyFamEqn represents one equation of a type family instance.
It is parameterised over its tfe_pats field:
* An ordinary type family instance declaration looks like this in source Haskell
type instance T [a] Int = a -> a
(or something similar for a closed family)
It is represented by a TyFamInstEqn, with *type* in the tfe_pats field.
* On the other hand, the *default instance* of an associated type looks like
this in source Haskell
class C a where
type T a b
type T a b = a -> b -- The default instance
It is represented by a TyFamDefltEqn, with *type variables* in the tfe_pats
field.
-}
----------------- Type synonym family instances -------------
type LTyFamInstEqn name = Located (TyFamInstEqn name)
-- ^ May have 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnSemi'
-- when in a list
-- For details on above see note [Api annotations] in ApiAnnotation
type LTyFamDefltEqn name = Located (TyFamDefltEqn name)
type HsTyPats name = HsImplicitBndrs name [LHsType name]
-- ^ Type patterns (with kind and type bndrs)
-- See Note [Family instance declaration binders]
{- Note [Family instance declaration binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The HsTyPats field is LHS patterns or a type/data family instance.
The hsib_vars of the HsImplicitBndrs are the template variables of the
type patterns, i.e. fv(pat_tys). Note in particular
* The hsib_vars *includes* any anonymous wildcards. For example
type instance F a _ = a
The hsib_vars will be {a, _}. Remember that each separate wildcard
'_' gets its own unique. In this context wildcards behave just like
an ordinary type variable, only anonymous.
* The hsib_vars *including* type variables that are already in scope
Eg class C s t where
type F t p :: *
instance C w (a,b) where
type F (a,b) x = x->a
The hsib_vars of the F decl are {a,b,x}, even though the F decl
is nested inside the 'instance' decl.
However after the renamer, the uniques will match up:
instance C w7 (a8,b9) where
type F (a8,b9) x10 = x10->a8
so that we can compare the type pattern in the 'instance' decl and
in the associated 'type' decl
-}
type TyFamInstEqn name = TyFamEqn name (HsTyPats name)
type TyFamDefltEqn name = TyFamEqn name (LHsQTyVars name)
-- See Note [Type family instance declarations in HsSyn]
-- | One equation in a type family instance declaration
-- See Note [Type family instance declarations in HsSyn]
data TyFamEqn name pats
= TyFamEqn
{ tfe_tycon :: Located name
, tfe_pats :: pats
, tfe_rhs :: LHsType name }
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnEqual'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving( Typeable )
deriving instance (DataId name, Data pats) => Data (TyFamEqn name pats)
type LTyFamInstDecl name = Located (TyFamInstDecl name)
data TyFamInstDecl name
= TyFamInstDecl
{ tfid_eqn :: LTyFamInstEqn name
, tfid_fvs :: PostRn name NameSet }
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnType',
-- 'ApiAnnotation.AnnInstance',
-- For details on above see note [Api annotations] in ApiAnnotation
deriving( Typeable )
deriving instance (DataId name) => Data (TyFamInstDecl name)
----------------- Data family instances -------------
type LDataFamInstDecl name = Located (DataFamInstDecl name)
data DataFamInstDecl name
= DataFamInstDecl
{ dfid_tycon :: Located name
, dfid_pats :: HsTyPats name -- LHS
, dfid_defn :: HsDataDefn name -- RHS
, dfid_fvs :: PostRn name NameSet } -- Free vars for dependency analysis
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnData',
-- 'ApiAnnotation.AnnNewType','ApiAnnotation.AnnInstance',
-- 'ApiAnnotation.AnnDcolon'
-- 'ApiAnnotation.AnnWhere','ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving( Typeable )
deriving instance (DataId name) => Data (DataFamInstDecl name)
----------------- Class instances -------------
type LClsInstDecl name = Located (ClsInstDecl name)
data ClsInstDecl name
= ClsInstDecl
{ cid_poly_ty :: LHsSigType name -- Context => Class Instance-type
-- Using a polytype means that the renamer conveniently
-- figures out the quantified type variables for us.
, cid_binds :: LHsBinds name -- Class methods
, cid_sigs :: [LSig name] -- User-supplied pragmatic info
, cid_tyfam_insts :: [LTyFamInstDecl name] -- Type family instances
, cid_datafam_insts :: [LDataFamInstDecl name] -- Data family instances
, cid_overlap_mode :: Maybe (Located OverlapMode)
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose',
-- For details on above see note [Api annotations] in ApiAnnotation
}
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnInstance',
-- 'ApiAnnotation.AnnWhere',
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnClose',
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Typeable)
deriving instance (DataId id) => Data (ClsInstDecl id)
----------------- Instances of all kinds -------------
type LInstDecl name = Located (InstDecl name)
data InstDecl name -- Both class and family instances
= ClsInstD
{ cid_inst :: ClsInstDecl name }
| DataFamInstD -- data family instance
{ dfid_inst :: DataFamInstDecl name }
| TyFamInstD -- type family instance
{ tfid_inst :: TyFamInstDecl name }
deriving (Typeable)
deriving instance (DataId id) => Data (InstDecl id)
instance (OutputableBndr name) => Outputable (TyFamInstDecl name) where
ppr = pprTyFamInstDecl TopLevel
pprTyFamInstDecl :: OutputableBndr name => TopLevelFlag -> TyFamInstDecl name -> SDoc
pprTyFamInstDecl top_lvl (TyFamInstDecl { tfid_eqn = eqn })
= text "type" <+> ppr_instance_keyword top_lvl <+> ppr_fam_inst_eqn eqn
ppr_instance_keyword :: TopLevelFlag -> SDoc
ppr_instance_keyword TopLevel = text "instance"
ppr_instance_keyword NotTopLevel = empty
ppr_fam_inst_eqn :: OutputableBndr name => LTyFamInstEqn name -> SDoc
ppr_fam_inst_eqn (L _ (TyFamEqn { tfe_tycon = tycon
, tfe_pats = pats
, tfe_rhs = rhs }))
= pp_fam_inst_lhs tycon pats [] <+> equals <+> ppr rhs
ppr_fam_deflt_eqn :: OutputableBndr name => LTyFamDefltEqn name -> SDoc
ppr_fam_deflt_eqn (L _ (TyFamEqn { tfe_tycon = tycon
, tfe_pats = tvs
, tfe_rhs = rhs }))
= text "type" <+> pp_vanilla_decl_head tycon tvs [] <+> equals <+> ppr rhs
instance (OutputableBndr name) => Outputable (DataFamInstDecl name) where
ppr = pprDataFamInstDecl TopLevel
pprDataFamInstDecl :: OutputableBndr name => TopLevelFlag -> DataFamInstDecl name -> SDoc
pprDataFamInstDecl top_lvl (DataFamInstDecl { dfid_tycon = tycon
, dfid_pats = pats
, dfid_defn = defn })
= pp_data_defn pp_hdr defn
where
pp_hdr ctxt = ppr_instance_keyword top_lvl <+> pp_fam_inst_lhs tycon pats ctxt
pprDataFamInstFlavour :: DataFamInstDecl name -> SDoc
pprDataFamInstFlavour (DataFamInstDecl { dfid_defn = (HsDataDefn { dd_ND = nd }) })
= ppr nd
pp_fam_inst_lhs :: OutputableBndr name
=> Located name
-> HsTyPats name
-> HsContext name
-> SDoc
pp_fam_inst_lhs thing (HsIB { hsib_body = typats }) context -- explicit type patterns
= hsep [ pprHsContext context, pprPrefixOcc (unLoc thing)
, hsep (map (pprParendHsType.unLoc) typats)]
instance (OutputableBndr name) => Outputable (ClsInstDecl name) where
ppr (ClsInstDecl { cid_poly_ty = inst_ty, cid_binds = binds
, cid_sigs = sigs, cid_tyfam_insts = ats
, cid_overlap_mode = mbOverlap
, cid_datafam_insts = adts })
| null sigs, null ats, null adts, isEmptyBag binds -- No "where" part
= top_matter
| otherwise -- Laid out
= vcat [ top_matter <+> text "where"
, nest 2 $ pprDeclList $
map (pprTyFamInstDecl NotTopLevel . unLoc) ats ++
map (pprDataFamInstDecl NotTopLevel . unLoc) adts ++
pprLHsBindsForUser binds sigs ]
where
top_matter = text "instance" <+> ppOverlapPragma mbOverlap
<+> ppr inst_ty
ppOverlapPragma :: Maybe (Located OverlapMode) -> SDoc
ppOverlapPragma mb =
case mb of
Nothing -> empty
Just (L _ (NoOverlap _)) -> text "{-# NO_OVERLAP #-}"
Just (L _ (Overlappable _)) -> text "{-# OVERLAPPABLE #-}"
Just (L _ (Overlapping _)) -> text "{-# OVERLAPPING #-}"
Just (L _ (Overlaps _)) -> text "{-# OVERLAPS #-}"
Just (L _ (Incoherent _)) -> text "{-# INCOHERENT #-}"
instance (OutputableBndr name) => Outputable (InstDecl name) where
ppr (ClsInstD { cid_inst = decl }) = ppr decl
ppr (TyFamInstD { tfid_inst = decl }) = ppr decl
ppr (DataFamInstD { dfid_inst = decl }) = ppr decl
-- Extract the declarations of associated data types from an instance
instDeclDataFamInsts :: [LInstDecl name] -> [DataFamInstDecl name]
instDeclDataFamInsts inst_decls
= concatMap do_one inst_decls
where
do_one (L _ (ClsInstD { cid_inst = ClsInstDecl { cid_datafam_insts = fam_insts } }))
= map unLoc fam_insts
do_one (L _ (DataFamInstD { dfid_inst = fam_inst })) = [fam_inst]
do_one (L _ (TyFamInstD {})) = []
{-
************************************************************************
* *
\subsection[DerivDecl]{A stand-alone instance deriving declaration}
* *
************************************************************************
-}
type LDerivDecl name = Located (DerivDecl name)
data DerivDecl name = DerivDecl
{ deriv_type :: LHsSigType name
, deriv_overlap_mode :: Maybe (Located OverlapMode)
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose',
-- 'ApiAnnotation.AnnDeriving',
-- 'ApiAnnotation.AnnInstance'
-- For details on above see note [Api annotations] in ApiAnnotation
}
deriving (Typeable)
deriving instance (DataId name) => Data (DerivDecl name)
instance (OutputableBndr name) => Outputable (DerivDecl name) where
ppr (DerivDecl ty o)
= hsep [text "deriving instance", ppOverlapPragma o, ppr ty]
{-
************************************************************************
* *
\subsection[DefaultDecl]{A @default@ declaration}
* *
************************************************************************
There can only be one default declaration per module, but it is hard
for the parser to check that; we pass them all through in the abstract
syntax, and that restriction must be checked in the front end.
-}
type LDefaultDecl name = Located (DefaultDecl name)
data DefaultDecl name
= DefaultDecl [LHsType name]
-- ^ - 'ApiAnnotation.AnnKeywordId's : 'ApiAnnotation.AnnDefault',
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Typeable)
deriving instance (DataId name) => Data (DefaultDecl name)
instance (OutputableBndr name)
=> Outputable (DefaultDecl name) where
ppr (DefaultDecl tys)
= text "default" <+> parens (interpp'SP tys)
{-
************************************************************************
* *
\subsection{Foreign function interface declaration}
* *
************************************************************************
-}
-- foreign declarations are distinguished as to whether they define or use a
-- Haskell name
--
-- * the Boolean value indicates whether the pre-standard deprecated syntax
-- has been used
--
type LForeignDecl name = Located (ForeignDecl name)
data ForeignDecl name
= ForeignImport
{ fd_name :: Located name -- defines this name
, fd_sig_ty :: LHsSigType name -- sig_ty
, fd_co :: PostTc name Coercion -- rep_ty ~ sig_ty
, fd_fi :: ForeignImport }
| ForeignExport
{ fd_name :: Located name -- uses this name
, fd_sig_ty :: LHsSigType name -- sig_ty
, fd_co :: PostTc name Coercion -- rep_ty ~ sig_ty
, fd_fe :: ForeignExport }
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnForeign',
-- 'ApiAnnotation.AnnImport','ApiAnnotation.AnnExport',
-- 'ApiAnnotation.AnnDcolon'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Typeable)
deriving instance (DataId name) => Data (ForeignDecl name)
{-
In both ForeignImport and ForeignExport:
sig_ty is the type given in the Haskell code
rep_ty is the representation for this type, i.e. with newtypes
coerced away and type functions evaluated.
Thus if the declaration is valid, then rep_ty will only use types
such as Int and IO that we know how to make foreign calls with.
-}
noForeignImportCoercionYet :: PlaceHolder
noForeignImportCoercionYet = PlaceHolder
noForeignExportCoercionYet :: PlaceHolder
noForeignExportCoercionYet = PlaceHolder
-- Specification Of an imported external entity in dependence on the calling
-- convention
--
data ForeignImport = -- import of a C entity
--
-- * the two strings specifying a header file or library
-- may be empty, which indicates the absence of a
-- header or object specification (both are not used
-- in the case of `CWrapper' and when `CFunction'
-- has a dynamic target)
--
-- * the calling convention is irrelevant for code
-- generation in the case of `CLabel', but is needed
-- for pretty printing
--
-- * `Safety' is irrelevant for `CLabel' and `CWrapper'
--
CImport (Located CCallConv) -- ccall or stdcall
(Located Safety) -- interruptible, safe or unsafe
(Maybe Header) -- name of C header
CImportSpec -- details of the C entity
(Located SourceText) -- original source text for
-- the C entity
deriving (Data, Typeable)
-- details of an external C entity
--
data CImportSpec = CLabel CLabelString -- import address of a C label
| CFunction CCallTarget -- static or dynamic function
| CWrapper -- wrapper to expose closures
-- (former f.e.d.)
deriving (Data, Typeable)
-- specification of an externally exported entity in dependence on the calling
-- convention
--
data ForeignExport = CExport (Located CExportSpec) -- contains the calling
-- convention
(Located SourceText) -- original source text for
-- the C entity
deriving (Data, Typeable)
-- pretty printing of foreign declarations
--
instance OutputableBndr name => Outputable (ForeignDecl name) where
ppr (ForeignImport { fd_name = n, fd_sig_ty = ty, fd_fi = fimport })
= hang (text "foreign import" <+> ppr fimport <+> ppr n)
2 (dcolon <+> ppr ty)
ppr (ForeignExport { fd_name = n, fd_sig_ty = ty, fd_fe = fexport }) =
hang (text "foreign export" <+> ppr fexport <+> ppr n)
2 (dcolon <+> ppr ty)
instance Outputable ForeignImport where
ppr (CImport cconv safety mHeader spec _) =
ppr cconv <+> ppr safety <+>
char '"' <> pprCEntity spec <> char '"'
where
pp_hdr = case mHeader of
Nothing -> empty
Just (Header _ header) -> ftext header
pprCEntity (CLabel lbl) =
text "static" <+> pp_hdr <+> char '&' <> ppr lbl
pprCEntity (CFunction (StaticTarget _ lbl _ isFun)) =
text "static"
<+> pp_hdr
<+> (if isFun then empty else text "value")
<+> ppr lbl
pprCEntity (CFunction (DynamicTarget)) =
text "dynamic"
pprCEntity (CWrapper) = text "wrapper"
instance Outputable ForeignExport where
ppr (CExport (L _ (CExportStatic _ lbl cconv)) _) =
ppr cconv <+> char '"' <> ppr lbl <> char '"'
{-
************************************************************************
* *
\subsection{Transformation rules}
* *
************************************************************************
-}
type LRuleDecls name = Located (RuleDecls name)
-- Note [Pragma source text] in BasicTypes
data RuleDecls name = HsRules { rds_src :: SourceText
, rds_rules :: [LRuleDecl name] }
deriving (Typeable)
deriving instance (DataId name) => Data (RuleDecls name)
type LRuleDecl name = Located (RuleDecl name)
data RuleDecl name
= HsRule -- Source rule
(Located (SourceText,RuleName)) -- Rule name
-- Note [Pragma source text] in BasicTypes
Activation
[LRuleBndr name] -- Forall'd vars; after typechecking this
-- includes tyvars
(Located (HsExpr name)) -- LHS
(PostRn name NameSet) -- Free-vars from the LHS
(Located (HsExpr name)) -- RHS
(PostRn name NameSet) -- Free-vars from the RHS
-- ^
-- - 'ApiAnnotation.AnnKeywordId' :
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnTilde',
-- 'ApiAnnotation.AnnVal',
-- 'ApiAnnotation.AnnClose',
-- 'ApiAnnotation.AnnForall','ApiAnnotation.AnnDot',
-- 'ApiAnnotation.AnnEqual',
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Typeable)
deriving instance (DataId name) => Data (RuleDecl name)
flattenRuleDecls :: [LRuleDecls name] -> [LRuleDecl name]
flattenRuleDecls decls = concatMap (rds_rules . unLoc) decls
type LRuleBndr name = Located (RuleBndr name)
data RuleBndr name
= RuleBndr (Located name)
| RuleBndrSig (Located name) (LHsSigWcType name)
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnDcolon','ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Typeable)
deriving instance (DataId name) => Data (RuleBndr name)
collectRuleBndrSigTys :: [RuleBndr name] -> [LHsSigWcType name]
collectRuleBndrSigTys bndrs = [ty | RuleBndrSig _ ty <- bndrs]
pprFullRuleName :: Located (SourceText, RuleName) -> SDoc
pprFullRuleName (L _ (_, n)) = doubleQuotes $ ftext n
instance OutputableBndr name => Outputable (RuleDecls name) where
ppr (HsRules _ rules) = ppr rules
instance OutputableBndr name => Outputable (RuleDecl name) where
ppr (HsRule name act ns lhs _fv_lhs rhs _fv_rhs)
= sep [text "{-# RULES" <+> pprFullRuleName name
<+> ppr act,
nest 4 (pp_forall <+> pprExpr (unLoc lhs)),
nest 4 (equals <+> pprExpr (unLoc rhs) <+> text "#-}") ]
where
pp_forall | null ns = empty
| otherwise = forAllLit <+> fsep (map ppr ns) <> dot
instance OutputableBndr name => Outputable (RuleBndr name) where
ppr (RuleBndr name) = ppr name
ppr (RuleBndrSig name ty) = ppr name <> dcolon <> ppr ty
{-
************************************************************************
* *
\subsection{Vectorisation declarations}
* *
************************************************************************
A vectorisation pragma, one of
{-# VECTORISE f = closure1 g (scalar_map g) #-}
{-# VECTORISE SCALAR f #-}
{-# NOVECTORISE f #-}
{-# VECTORISE type T = ty #-}
{-# VECTORISE SCALAR type T #-}
-}
type LVectDecl name = Located (VectDecl name)
data VectDecl name
= HsVect
SourceText -- Note [Pragma source text] in BasicTypes
(Located name)
(LHsExpr name)
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnEqual','ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
| HsNoVect
SourceText -- Note [Pragma source text] in BasicTypes
(Located name)
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
| HsVectTypeIn -- pre type-checking
SourceText -- Note [Pragma source text] in BasicTypes
Bool -- 'TRUE' => SCALAR declaration
(Located name)
(Maybe (Located name)) -- 'Nothing' => no right-hand side
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnType','ApiAnnotation.AnnClose',
-- 'ApiAnnotation.AnnEqual'
-- For details on above see note [Api annotations] in ApiAnnotation
| HsVectTypeOut -- post type-checking
Bool -- 'TRUE' => SCALAR declaration
TyCon
(Maybe TyCon) -- 'Nothing' => no right-hand side
| HsVectClassIn -- pre type-checking
SourceText -- Note [Pragma source text] in BasicTypes
(Located name)
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClass','ApiAnnotation.AnnClose',
-- For details on above see note [Api annotations] in ApiAnnotation
| HsVectClassOut -- post type-checking
Class
| HsVectInstIn -- pre type-checking (always SCALAR) !!!FIXME: should be superfluous now
(LHsSigType name)
| HsVectInstOut -- post type-checking (always SCALAR) !!!FIXME: should be superfluous now
ClsInst
deriving (Typeable)
deriving instance (DataId name) => Data (VectDecl name)
lvectDeclName :: NamedThing name => LVectDecl name -> Name
lvectDeclName (L _ (HsVect _ (L _ name) _)) = getName name
lvectDeclName (L _ (HsNoVect _ (L _ name))) = getName name
lvectDeclName (L _ (HsVectTypeIn _ _ (L _ name) _)) = getName name
lvectDeclName (L _ (HsVectTypeOut _ tycon _)) = getName tycon
lvectDeclName (L _ (HsVectClassIn _ (L _ name))) = getName name
lvectDeclName (L _ (HsVectClassOut cls)) = getName cls
lvectDeclName (L _ (HsVectInstIn _))
= panic "HsDecls.lvectDeclName: HsVectInstIn"
lvectDeclName (L _ (HsVectInstOut _))
= panic "HsDecls.lvectDeclName: HsVectInstOut"
lvectInstDecl :: LVectDecl name -> Bool
lvectInstDecl (L _ (HsVectInstIn _)) = True
lvectInstDecl (L _ (HsVectInstOut _)) = True
lvectInstDecl _ = False
instance OutputableBndr name => Outputable (VectDecl name) where
ppr (HsVect _ v rhs)
= sep [text "{-# VECTORISE" <+> ppr v,
nest 4 $
pprExpr (unLoc rhs) <+> text "#-}" ]
ppr (HsNoVect _ v)
= sep [text "{-# NOVECTORISE" <+> ppr v <+> text "#-}" ]
ppr (HsVectTypeIn _ False t Nothing)
= sep [text "{-# VECTORISE type" <+> ppr t <+> text "#-}" ]
ppr (HsVectTypeIn _ False t (Just t'))
= sep [text "{-# VECTORISE type" <+> ppr t, text "=", ppr t', text "#-}" ]
ppr (HsVectTypeIn _ True t Nothing)
= sep [text "{-# VECTORISE SCALAR type" <+> ppr t <+> text "#-}" ]
ppr (HsVectTypeIn _ True t (Just t'))
= sep [text "{-# VECTORISE SCALAR type" <+> ppr t, text "=", ppr t', text "#-}" ]
ppr (HsVectTypeOut False t Nothing)
= sep [text "{-# VECTORISE type" <+> ppr t <+> text "#-}" ]
ppr (HsVectTypeOut False t (Just t'))
= sep [text "{-# VECTORISE type" <+> ppr t, text "=", ppr t', text "#-}" ]
ppr (HsVectTypeOut True t Nothing)
= sep [text "{-# VECTORISE SCALAR type" <+> ppr t <+> text "#-}" ]
ppr (HsVectTypeOut True t (Just t'))
= sep [text "{-# VECTORISE SCALAR type" <+> ppr t, text "=", ppr t', text "#-}" ]
ppr (HsVectClassIn _ c)
= sep [text "{-# VECTORISE class" <+> ppr c <+> text "#-}" ]
ppr (HsVectClassOut c)
= sep [text "{-# VECTORISE class" <+> ppr c <+> text "#-}" ]
ppr (HsVectInstIn ty)
= sep [text "{-# VECTORISE SCALAR instance" <+> ppr ty <+> text "#-}" ]
ppr (HsVectInstOut i)
= sep [text "{-# VECTORISE SCALAR instance" <+> ppr i <+> text "#-}" ]
{-
************************************************************************
* *
\subsection[DocDecl]{Document comments}
* *
************************************************************************
-}
type LDocDecl = Located (DocDecl)
data DocDecl
= DocCommentNext HsDocString
| DocCommentPrev HsDocString
| DocCommentNamed String HsDocString
| DocGroup Int HsDocString
deriving (Data, Typeable)
-- Okay, I need to reconstruct the document comments, but for now:
instance Outputable DocDecl where
ppr _ = text "<document comment>"
docDeclDoc :: DocDecl -> HsDocString
docDeclDoc (DocCommentNext d) = d
docDeclDoc (DocCommentPrev d) = d
docDeclDoc (DocCommentNamed _ d) = d
docDeclDoc (DocGroup _ d) = d
{-
************************************************************************
* *
\subsection[DeprecDecl]{Deprecations}
* *
************************************************************************
We use exported entities for things to deprecate.
-}
type LWarnDecls name = Located (WarnDecls name)
-- Note [Pragma source text] in BasicTypes
data WarnDecls name = Warnings { wd_src :: SourceText
, wd_warnings :: [LWarnDecl name]
}
deriving (Data, Typeable)
type LWarnDecl name = Located (WarnDecl name)
data WarnDecl name = Warning [Located name] WarningTxt
deriving (Data, Typeable)
instance OutputableBndr name => Outputable (WarnDecls name) where
ppr (Warnings _ decls) = ppr decls
instance OutputableBndr name => Outputable (WarnDecl name) where
ppr (Warning thing txt)
= hsep [text "{-# DEPRECATED", ppr thing, doubleQuotes (ppr txt), text "#-}"]
{-
************************************************************************
* *
\subsection[AnnDecl]{Annotations}
* *
************************************************************************
-}
type LAnnDecl name = Located (AnnDecl name)
data AnnDecl name = HsAnnotation
SourceText -- Note [Pragma source text] in BasicTypes
(AnnProvenance name) (Located (HsExpr name))
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnType'
-- 'ApiAnnotation.AnnModule'
-- 'ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Typeable)
deriving instance (DataId name) => Data (AnnDecl name)
instance (OutputableBndr name) => Outputable (AnnDecl name) where
ppr (HsAnnotation _ provenance expr)
= hsep [text "{-#", pprAnnProvenance provenance, pprExpr (unLoc expr), text "#-}"]
data AnnProvenance name = ValueAnnProvenance (Located name)
| TypeAnnProvenance (Located name)
| ModuleAnnProvenance
deriving (Data, Typeable, Functor)
deriving instance Foldable AnnProvenance
deriving instance Traversable AnnProvenance
annProvenanceName_maybe :: AnnProvenance name -> Maybe name
annProvenanceName_maybe (ValueAnnProvenance (L _ name)) = Just name
annProvenanceName_maybe (TypeAnnProvenance (L _ name)) = Just name
annProvenanceName_maybe ModuleAnnProvenance = Nothing
pprAnnProvenance :: OutputableBndr name => AnnProvenance name -> SDoc
pprAnnProvenance ModuleAnnProvenance = text "ANN module"
pprAnnProvenance (ValueAnnProvenance (L _ name))
= text "ANN" <+> ppr name
pprAnnProvenance (TypeAnnProvenance (L _ name))
= text "ANN type" <+> ppr name
{-
************************************************************************
* *
\subsection[RoleAnnot]{Role annotations}
* *
************************************************************************
-}
type LRoleAnnotDecl name = Located (RoleAnnotDecl name)
-- See #8185 for more info about why role annotations are
-- top-level declarations
data RoleAnnotDecl name
= RoleAnnotDecl (Located name) -- type constructor
[Located (Maybe Role)] -- optional annotations
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnType',
-- 'ApiAnnotation.AnnRole'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Data, Typeable)
instance OutputableBndr name => Outputable (RoleAnnotDecl name) where
ppr (RoleAnnotDecl ltycon roles)
= text "type role" <+> ppr ltycon <+>
hsep (map (pp_role . unLoc) roles)
where
pp_role Nothing = underscore
pp_role (Just r) = ppr r
roleAnnotDeclName :: RoleAnnotDecl name -> name
roleAnnotDeclName (RoleAnnotDecl (L _ name) _) = name
| GaloisInc/halvm-ghc | compiler/hsSyn/HsDecls.hs | bsd-3-clause | 76,668 | 0 | 17 | 22,454 | 12,482 | 6,841 | 5,641 | 916 | 7 |
{-# LANGUAGE StandaloneDeriving,FlexibleContexts,MultiParamTypeClasses,TypeOperators,FlexibleInstances,DeriveFunctor,DeriveFoldable,DeriveTraversable,GeneralizedNewtypeDeriving,DeriveDataTypeable,TypeFamilies #-}
module Space.SO3 where
import Control.Applicative
import Space.Class
import Linear.V3
--import Linear.Vector
import qualified Linear.Matrix as M
import Data.Data
import Control.Lens
import Data.Distributive
import Data.Traversable (Traversable)
import Data.Foldable (Foldable)
import MultiLinear.Class
import Rotation.SO3
import Exponential.Class
import Exponential.SO3
instance Space SO3 where
dim _ _ = 3
x |+| d = x !*! expM d
y |-| x = logM (transpose x !*! y)
| massudaw/mtk | Space/SO3.hs | bsd-3-clause | 693 | 0 | 9 | 81 | 146 | 82 | 64 | 19 | 0 |
{-# language CPP #-}
-- | = Name
--
-- XR_KHR_composition_layer_color_scale_bias - instance extension
--
-- = Specification
--
-- See
-- <https://www.khronos.org/registry/OpenXR/specs/1.0/html/xrspec.html#XR_KHR_composition_layer_color_scale_bias XR_KHR_composition_layer_color_scale_bias>
-- in the main specification for complete information.
--
-- = Registered Extension Number
--
-- 35
--
-- = Revision
--
-- 5
--
-- = Extension and Version Dependencies
--
-- - Requires OpenXR 1.0
--
-- = See Also
--
-- 'CompositionLayerColorScaleBiasKHR'
--
-- = Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/OpenXR/specs/1.0/html/xrspec.html#XR_KHR_composition_layer_color_scale_bias OpenXR Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module OpenXR.Extensions.XR_KHR_composition_layer_color_scale_bias ( CompositionLayerColorScaleBiasKHR(..)
, KHR_composition_layer_color_scale_bias_SPEC_VERSION
, pattern KHR_composition_layer_color_scale_bias_SPEC_VERSION
, KHR_COMPOSITION_LAYER_COLOR_SCALE_BIAS_EXTENSION_NAME
, pattern KHR_COMPOSITION_LAYER_COLOR_SCALE_BIAS_EXTENSION_NAME
) where
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import OpenXR.CStruct (FromCStruct)
import OpenXR.CStruct (FromCStruct(..))
import OpenXR.CStruct (ToCStruct)
import OpenXR.CStruct (ToCStruct(..))
import OpenXR.Zero (Zero(..))
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import Foreign.Ptr (Ptr)
import Data.Kind (Type)
import OpenXR.Core10.OtherTypes (Color4f)
import OpenXR.Core10.Enums.StructureType (StructureType)
import OpenXR.Core10.Enums.StructureType (StructureType(TYPE_COMPOSITION_LAYER_COLOR_SCALE_BIAS_KHR))
-- | XrCompositionLayerColorScaleBiasKHR - defines color scale and bias for
-- layer textures
--
-- == Member Descriptions
--
-- = Description
--
-- 'CompositionLayerColorScaleBiasKHR' contains the information needed to
-- scale and bias the color of layer textures.
--
-- The 'CompositionLayerColorScaleBiasKHR' structure /can/ be applied by
-- applications to composition layers by adding an instance of the struct
-- to the 'OpenXR.Core10.OtherTypes.CompositionLayerBaseHeader'::@next@
-- list.
--
-- == Valid Usage (Implicit)
--
-- - #VUID-XrCompositionLayerColorScaleBiasKHR-extension-notenabled# The
-- @XR_KHR_composition_layer_color_scale_bias@ extension /must/ be
-- enabled prior to using 'CompositionLayerColorScaleBiasKHR'
--
-- - #VUID-XrCompositionLayerColorScaleBiasKHR-type-type# @type@ /must/
-- be
-- 'OpenXR.Core10.Enums.StructureType.TYPE_COMPOSITION_LAYER_COLOR_SCALE_BIAS_KHR'
--
-- - #VUID-XrCompositionLayerColorScaleBiasKHR-next-next# @next@ /must/
-- be @NULL@ or a valid pointer to the
-- <https://www.khronos.org/registry/OpenXR/specs/1.0/html/xrspec.html#valid-usage-for-structure-pointer-chains next structure in a structure chain>
--
-- = See Also
--
-- 'OpenXR.Core10.OtherTypes.Color4f',
-- 'OpenXR.Core10.OtherTypes.CompositionLayerBaseHeader',
-- 'OpenXR.Core10.Enums.StructureType.StructureType'
data CompositionLayerColorScaleBiasKHR = CompositionLayerColorScaleBiasKHR
{ -- | @colorScale@ is an 'OpenXR.Core10.OtherTypes.Color4f' which will
-- modulate the color sourced from the images.
colorScale :: Color4f
, -- | @colorBias@ is an 'OpenXR.Core10.OtherTypes.Color4f' which will offset
-- the color sourced from the images.
colorBias :: Color4f
}
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (CompositionLayerColorScaleBiasKHR)
#endif
deriving instance Show CompositionLayerColorScaleBiasKHR
instance ToCStruct CompositionLayerColorScaleBiasKHR where
withCStruct x f = allocaBytes 48 $ \p -> pokeCStruct p x (f p)
pokeCStruct p CompositionLayerColorScaleBiasKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (TYPE_COMPOSITION_LAYER_COLOR_SCALE_BIAS_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Color4f)) (colorScale)
poke ((p `plusPtr` 32 :: Ptr Color4f)) (colorBias)
f
cStructSize = 48
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (TYPE_COMPOSITION_LAYER_COLOR_SCALE_BIAS_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Color4f)) (zero)
poke ((p `plusPtr` 32 :: Ptr Color4f)) (zero)
f
instance FromCStruct CompositionLayerColorScaleBiasKHR where
peekCStruct p = do
colorScale <- peekCStruct @Color4f ((p `plusPtr` 16 :: Ptr Color4f))
colorBias <- peekCStruct @Color4f ((p `plusPtr` 32 :: Ptr Color4f))
pure $ CompositionLayerColorScaleBiasKHR
colorScale colorBias
instance Storable CompositionLayerColorScaleBiasKHR where
sizeOf ~_ = 48
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero CompositionLayerColorScaleBiasKHR where
zero = CompositionLayerColorScaleBiasKHR
zero
zero
type KHR_composition_layer_color_scale_bias_SPEC_VERSION = 5
-- No documentation found for TopLevel "XR_KHR_composition_layer_color_scale_bias_SPEC_VERSION"
pattern KHR_composition_layer_color_scale_bias_SPEC_VERSION :: forall a . Integral a => a
pattern KHR_composition_layer_color_scale_bias_SPEC_VERSION = 5
type KHR_COMPOSITION_LAYER_COLOR_SCALE_BIAS_EXTENSION_NAME = "XR_KHR_composition_layer_color_scale_bias"
-- No documentation found for TopLevel "XR_KHR_COMPOSITION_LAYER_COLOR_SCALE_BIAS_EXTENSION_NAME"
pattern KHR_COMPOSITION_LAYER_COLOR_SCALE_BIAS_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern KHR_COMPOSITION_LAYER_COLOR_SCALE_BIAS_EXTENSION_NAME = "XR_KHR_composition_layer_color_scale_bias"
| expipiplus1/vulkan | openxr/src/OpenXR/Extensions/XR_KHR_composition_layer_color_scale_bias.hs | bsd-3-clause | 6,321 | 0 | 14 | 1,111 | 951 | 566 | 385 | -1 | -1 |
-- Common wrapper for HTML pages
module Distribution.Server.Pages.Template
( hackagePage
, hackagePageWith
, hackagePageWithHead
) where
import Text.XHtml.Strict
--TODO: replace all this with external templates
-- | Create top-level HTML document by wrapping the Html with boilerplate.
hackagePage :: String -> [Html] -> Html
hackagePage = hackagePageWithHead []
hackagePageWithHead :: [Html] -> String -> [Html] -> Html
hackagePageWithHead headExtra docTitle docContent =
hackagePageWith headExtra docTitle docSubtitle docContent bodyExtra
where
docSubtitle = anchor ! [href introductionURL, theclass "caption"] << "Hackage :: [Package]"
bodyExtra = []
hackagePageWith :: [Html] -> String -> Html -> [Html] -> [Html] -> Html
hackagePageWith headExtra docTitle docSubtitle docContent bodyExtra =
toHtml [ header << (docHead ++ headExtra)
, body << (docBody ++ bodyExtra) ]
where
docHead = [ thetitle << (docTitle ++ " | Hackage")
, thelink ! [ rel "stylesheet"
, href googleFontURL] << noHtml
, thelink ! [ rel "stylesheet"
, href stylesheetURL
, thetype "text/css"] << noHtml
, thelink ! [ rel "icon"
, href faviconURL
, thetype "image/png"] << noHtml
, meta ! [ name "viewport"
, content "width=device-width, initial-scale=1"]
-- if Search is enabled
, thelink ! [ rel "search", href "/packages/opensearch.xml"
, thetype "application/opensearchdescription+xml"
, title "Hackage" ] << noHtml
]
docBody = [ thediv ! [identifier "page-header"] << docHeader
, thediv ! [identifier "content"] << docContent ]
docHeader = [ docSubtitle
, navigationBar
]
navigationBar :: Html
navigationBar =
ulist ! [theclass "links", identifier "page-menu"]
<< map (li <<)
[ anchor ! [href introductionURL] << "Home"
, form ! [action "/packages/search", theclass "search", method "get"]
<< [ button ! [thetype "submit"] << "Search", spaceHtml
, input ! [thetype "text", name "terms" ] ]
, anchor ! [href pkgListURL] << "Browse"
, anchor ! [href recentAdditionsURL] << "What's new"
, anchor ! [href uploadURL] << "Upload"
, anchor ! [href accountsURL] << "User accounts"
]
googleFontURL :: URL
googleFontURL = "https://fonts.googleapis.com/css?family=PT+Sans:400,400i,700"
stylesheetURL :: URL
stylesheetURL = "/static/hackage.css"
faviconURL :: URL
faviconURL = "/static/favicon.png"
-- URL of the package list
pkgListURL :: URL
pkgListURL = "/packages/browse"
-- URL of the upload form
introductionURL :: URL
introductionURL = "/"
-- URL of the upload form
uploadURL :: URL
uploadURL = "/upload"
-- URL about user accounts, including the form to change passwords
accountsURL :: URL
accountsURL = "/accounts"
-- URL of the admin front end
--
-- TODO: Currently unused.
_adminURL :: URL
_adminURL = "/admin"
-- URL of the list of recent additions to the database
recentAdditionsURL :: URL
recentAdditionsURL = "/packages/recent"
| edsko/hackage-server | Distribution/Server/Pages/Template.hs | bsd-3-clause | 3,400 | 0 | 13 | 1,002 | 712 | 395 | 317 | 64 | 1 |
module Infinity.Core (
-- * Types
Bot(..), Server(..),
-- * Functions on 'Bot'
newbot, -- :: Bot
joinchan, partchan, -- :: Channel -> Server -> Server
joinserv, partserv, -- :: Server -> Bot -> Bot
joinservs, -- :: [(Server,Handle)] -> Bot -> Bot
updateserv, -- :: Server -> Bot -> Bot
newadmin, deladmin, -- :: Nick -> Server -> Server
isadmin, -- :: Nick -> Server -> Bool
servexists, -- :: Server -> Bot -> Bool
servnum -- :: Bot -> Int
) where
import Infinity.Util
import System.IO
import Data.Map (Map)
import Data.Set (Set)
import qualified Data.Set as S
import qualified Data.Map as M
-- | Describes a single IRC server
data Server = Server {
address :: String, -- ^ what server to connect to
port :: Int, -- ^ what port
channels :: Set String, -- ^ what channels to enter
nickname :: String, -- ^ bot nick
password :: String, -- ^ bot password, can be empty
realname :: String, -- ^ bot's real name
administrators :: Set String -- ^ bot admins
} deriving (Eq,Show,Ord)
-- | This corresponds to the bot's
-- general state overall.
newtype Bot = Bot {
servs :: Map Server Handle -- ^ Connected servers
} deriving (Eq,Show)
-- | Returns a new, empty bot
newbot :: Bot
newbot = Bot (M.empty)
-- | Joins a channel
joinchan :: Channel -> Server -> Server
joinchan chan serv = if chan `S.member` x then serv else newserv (S.insert chan x)
where x = channels serv
newserv y = serv{channels=y}
-- | Joins multiple channels
joinchans :: [Channel] -> Server -> Server
joinchans chans serv = foldr joinchan serv chans
-- | Parts a channel
partchan :: Channel -> Server -> Server
partchan chan serv = if chan `S.notMember` x then serv else newserv (S.delete chan x)
where x = channels serv
newserv y = serv{channels=y}
-- | Checks to see if a server is already connected
-- We have this because 'joinserv' is pure, and it needs
-- the handle to insert into the map. So first we check
-- to see if it's already a member or not, before running
-- joinserv.
servexists :: Bot -> Server -> Bool
servexists (Bot s) serv = serv `M.member` s
-- | Adds a new server
joinserv :: Handle -> Server -> Bot -> Bot
joinserv h serv x@(Bot s) =
if serv `M.member` s then x
else (Bot $ M.insert serv h s)
-- | Adds multiple new servers
joinservs :: [(Server,Handle)] -> Bot -> Bot
joinservs x b = foldr (\(s,h) b' -> joinserv h s b') b x
-- | Parts a server
partserv :: Server -> Bot -> Bot
partserv serv x@(Bot s) =
if serv `M.notMember` s then x
else (Bot (M.delete serv s))
-- | Updates a server inside the bot, the server
-- must exist
updateserv :: Server -> Bot -> Bot
updateserv serv x@(Bot s) =
let h = snd $ (M.toList s) !! (M.findIndex serv s) in
joinserv h serv (partserv serv x)
-- | Adds a new bot administrator
newadmin :: Nick -> Server -> Server
newadmin n serv = if n `S.member` x then serv else newserv (S.insert n x)
where x = administrators serv
newserv y = serv{administrators=y}
-- | Removes a bot administrator
deladmin :: Nick -> Server -> Server
deladmin n serv = if n `S.notMember` x then serv else newserv (S.delete n x)
where x = administrators serv
newserv y = serv{administrators=y}
-- | Checks if a user is an administrator
isadmin :: Nick -> Server -> Bool
isadmin n serv = n `S.member` x
where x = administrators serv
-- | Number of currently connected servers
servnum :: Bot -> Int
servnum (Bot x) = M.size x
| thoughtpolice/infinity | src/Infinity/Core.hs | bsd-3-clause | 3,638 | 0 | 13 | 946 | 994 | 569 | 425 | 70 | 2 |
{-# language QuasiQuotes #-}
module Render.VkException
( vkExceptionRenderElement
) where
import Data.Text as T
import Prettyprinter
import Data.Vector as V
import Polysemy.Input
import Relude hiding ( ask )
import Text.InterpolatedString.Perl6.Unindented
import Text.Pandoc
import Control.Exception
import Documentation
import Error
import Haskell as H
import Haskell.Name ( )
import Render.Element
import Render.SpecInfo
import Render.Type
import Spec.Types
vkExceptionRenderElement
:: (HasErr r, HasRenderParams r, HasSpecInfo r)
=> (Documentee -> Maybe Documentation)
-> Enum'
-> Sem r RenderElement
vkExceptionRenderElement getDocumentation vkResultEnum =
genRe "VulkanException declaration" $ do
tellExplicitModule =<< mkModuleName ["Exception"]
tellNotReexportable
RenderParams {..} <- input
tellImportWithAll ''Control.Exception.Exception
vkResultTyDoc <- renderType =<< cToHsType DoNotPreserve successCodeType
tellImportWithAll (mkTyName (eName vkResultEnum))
tellExport (EData exceptionTypeName)
let resultPatterns = evName <$> eValues vkResultEnum
cases <- V.mapMaybe id
<$> forV resultPatterns (displayExceptionCase getDocumentation)
tellDoc [qci|
-- | This exception is thrown from calls to marshalled Vulkan commands
-- which return a negative VkResult.
newtype {exceptionTypeName} = {exceptionTypeName} \{ vulkanExceptionResult :: {vkResultTyDoc} }
deriving (Eq, Ord, Read, Show)
instance Exception {exceptionTypeName} where
displayException ({exceptionTypeName} r) = show r ++ ": " ++ resultString r
-- | A human understandable message for each VkResult
resultString :: {vkResultTyDoc} -> String
resultString = \case
{indent 2 . vcat $ V.toList cases}
r -> show r
|]
displayExceptionCase
:: HasRenderParams r
=> (Documentee -> Maybe Documentation)
-> CName
-> Sem r (Maybe (Doc ()))
displayExceptionCase getDocumentation pat = do
RenderParams {..} <- input
let pat' = mkPatternName pat
pure $ fmap
((pretty pat' <+> "->") <+>)
(documentationToString =<< getDocumentation (Nested "VkResult" pat))
-- | Get a string expression from some documentation
documentationToString :: Documentation -> Maybe (Doc ())
documentationToString Documentation {..} =
let writerOptions = def
in viaShow . fixupResultDescription <$> eitherToMaybe
(runPure (writePlain writerOptions (prepareForPlain dDocumentation)))
-- |
-- - Keep only the first sentence
-- - Drop the first word (it's the enum name)
fixupResultDescription :: Text -> Text
fixupResultDescription =
T.takeWhile (/= '.') . T.unwords . tailSafe . T.words . T.replace "\8217" "'"
tailSafe :: [a] -> [a]
tailSafe = \case
[] -> []
_ : xs -> xs
prepareForPlain :: Pandoc -> Pandoc
prepareForPlain = topDown removeEmph
where
removeEmph :: [Inline] -> [Inline]
removeEmph is = removeEmphInline =<< is
removeEmphInline :: Inline -> [Inline]
removeEmphInline = \case
Emph is -> is
i -> [i]
----------------------------------------------------------------
-- Utils
----------------------------------------------------------------
eitherToMaybe :: Either a b -> Maybe b
eitherToMaybe = \case
Left _ -> Nothing
Right x -> Just x
| expipiplus1/vulkan | generate-new/src/Render/VkException.hs | bsd-3-clause | 3,581 | 0 | 14 | 899 | 735 | 382 | 353 | -1 | -1 |
--------------------------------------------------------------------------------
-- |
-- Module : FRP.Yampa.Integration
-- Copyright : (c) Antony Courtney and Henrik Nilsson, Yale University, 2003
-- License : BSD-style (see the LICENSE file in the distribution)
--
-- Maintainer : ivan.perez@keera.co.uk
-- Stability : provisional
-- Portability : non-portable (GHC extensions)
--
-- Integration and derivation of input signals.
--
-- In continuous time, these primitives define SFs that integrate/derive the
-- input signal. Since this is subject to the sampling resolution, simple
-- versions are implemented (like the rectangle rule for the integral).
--
-- In discrete time, all we do is count the number of events.
--
-- The combinator 'iterFrom' gives enough flexibility to program your own
-- leak-free integration and derivation SFs.
--
-- Many primitives and combinators in this module require instances of
-- simple-affine-spaces's 'VectorSpace'. Yampa does not enforce the use of a
-- particular vector space implementation, meaning you could use 'integral' for
-- example with other vector types like V2, V1, etc. from the library linear.
-- For an example, see
-- <https://gist.github.com/walseb/1e0a0ca98aaa9469ab5da04e24f482c2 this gist>.
--------------------------------------------------------------------------------
module FRP.Yampa.Integration (
-- * Integration
integral, -- :: VectorSpace a s => SF a a
imIntegral, -- :: VectorSpace a s => a -> SF a a
impulseIntegral, -- :: VectorSpace a k => SF (a, Event a) a
count, -- :: Integral b => SF (Event a) (Event b)
-- * Differentiation
derivative, -- :: VectorSpace a s => SF a a -- Crude!
iterFrom -- :: (a -> a -> DTime -> b -> b) -> b -> SF a b
) where
import Control.Arrow
import Data.VectorSpace
import FRP.Yampa.Event
import FRP.Yampa.Hybrid
import FRP.Yampa.InternalCore (SF(..), SF'(..), DTime)
------------------------------------------------------------------------------
-- Integration and differentiation
------------------------------------------------------------------------------
-- | Integration using the rectangle rule.
{-# INLINE integral #-}
integral :: VectorSpace a s => SF a a
integral = SF {sfTF = tf0}
where
tf0 a0 = (integralAux igrl0 a0, igrl0)
igrl0 = zeroVector
integralAux igrl a_prev = SF' tf -- True
where
tf dt a = (integralAux igrl' a, igrl')
where
igrl' = igrl ^+^ realToFrac dt *^ a_prev
-- | \"Immediate\" integration (using the function's value at the current time)
imIntegral :: VectorSpace a s => a -> SF a a
imIntegral = ((\ _ a' dt v -> v ^+^ realToFrac dt *^ a') `iterFrom`)
-- | Integrate using an auxiliary function that takes the current and the last
-- input, the time between those samples, and the last output, and returns a
-- new output.
iterFrom :: (a -> a -> DTime -> b -> b) -> b -> SF a b
f `iterFrom` b = SF (iterAux b)
where
iterAux b a = (SF' (\ dt a' -> iterAux (f a a' dt b) a'), b)
-- | A very crude version of a derivative. It simply divides the
-- value difference by the time difference. Use at your own risk.
derivative :: VectorSpace a s => SF a a
derivative = SF {sfTF = tf0}
where
tf0 a0 = (derivativeAux a0, zeroVector)
derivativeAux a_prev = SF' tf -- True
where
tf dt a = (derivativeAux a, (a ^-^ a_prev) ^/ realToFrac dt)
-- | Integrate the first input signal and add the /discrete/ accumulation (sum)
-- of the second, discrete, input signal.
impulseIntegral :: VectorSpace a k => SF (a, Event a) a
impulseIntegral = (integral *** accumHoldBy (^+^) zeroVector) >>^ uncurry (^+^)
-- | Count the occurrences of input events.
--
-- >>> embed count (deltaEncode 1 [Event 'a', NoEvent, Event 'b'])
-- [Event 1,NoEvent,Event 2]
count :: Integral b => SF (Event a) (Event b)
count = accumBy (\n _ -> n + 1) 0
-- Vim modeline
-- vim:set tabstop=8 expandtab:
| ivanperez-keera/Yampa | yampa/src/FRP/Yampa/Integration.hs | bsd-3-clause | 4,094 | 0 | 13 | 916 | 615 | 358 | 257 | 34 | 1 |
module Bluetooth.Internal.HasInterface where
import Control.Concurrent (readMVar, swapMVar, modifyMVar_)
import Control.Monad.Except (liftIO, mapExceptT)
import Control.Monad
import Control.Monad.Writer.Strict (WriterT)
import Data.Proxy
import Data.Word (Word16)
import DBus
import DBus.Types (SomeSignal, methodError, object)
import GHC.TypeLits
import Lens.Micro
import qualified Data.ByteString as BS
import qualified Data.Map as Map
import qualified Data.Text as T
import Bluetooth.Internal.Errors
import Bluetooth.Internal.Interfaces
import Bluetooth.Internal.Types
import Bluetooth.Internal.Utils
import Bluetooth.Internal.Lenses
-- The Bluez DBus API makes certain requirements about the interfaces
-- that objects must meet. These requirements are outlined in:
--
-- https://kernel.googlesource.com/pub/scm/bluetooth/bluez/+/5.43/doc/gatt-api.txt
--
-- In this file, we declare how our objects meet these interfaces.
-- A class that declares that @obj@ meets the interface @iface@.
class KnownSymbol iface => HasInterface obj (iface :: Symbol) where
getInterface :: obj -> Proxy iface -> Interface
withInterface :: HasInterface obj iface => obj -> Proxy iface -> Object
withInterface o p = object (T.pack i) $ getInterface o p
where
i = symbolVal p
-- * ObjectManager
instance HasInterface Application ObjectManager where
getInterface app _ =
Interface
{ interfaceMethods = [getManagedObjects]
, interfaceSignals = []
, interfaceAnnotations = []
, interfaceProperties = []
}
where
getManagedObjects :: Method
getManagedObjects
= Method (repMethod (return app :: IO Application))
"GetManagedObjects"
Done
("rep" :> Done)
-- * Properties
-- | The @org.freedesktop.DBus.Properties@ interface.
--
-- See the <https://dbus.freedesktop.org/doc/dbus-specification.html#standard-interfaces-properties
-- relevant section of the DBus spec> for more information.
type ChangedProperties = 'TypeStruct
[ 'DBusSimpleType 'TypeString -- interface_name
, AnyDBusDict -- changed_properties
, 'TypeArray ('DBusSimpleType 'TypeString) -- invalidated_properties
]
-- A helper function for constructing D-Bus Property interfaces. Pass a
-- non-Nothing if the object supports the PropertiesChanged signal.
defPropIFace :: forall a.
( Representable a
, RepType a ~ AnyDBusDict
)
=> Maybe ObjectPath -> T.Text -> a -> Interface
defPropIFace opath supportedIFaceName val =
Interface
{ interfaceMethods = [getAll]
, interfaceSignals = signals
, interfaceAnnotations = []
, interfaceProperties = []
}
where
getAll
= Method (repMethod go)
"GetAll"
("interface" :> Done)
("rep" :> Done)
where
go :: T.Text -> MethodHandlerT IO a
go iface
| iface == supportedIFaceName = return val
| otherwise = methodError invalidArgs
signals = case opath of
Nothing -> []
Just p -> [SSD propertiesChanged]
where
propertiesChanged :: SignalDescription '[ChangedProperties]
propertiesChanged = SignalDescription
{ signalDPath = p
, signalDInterface = T.pack propertiesIFace
, signalDMember = "PropertiesChanged"
, signalDArguments = "changes" :> Done
}
instance HasInterface (WithObjectPath (Service 'Local)) Properties where
getInterface service _
= defPropIFace (Just $ service ^. path) (T.pack gattServiceIFace) service
instance HasInterface (WithObjectPath (CharacteristicBS 'Local)) Properties where
getInterface char _
= baseIface { interfaceProperties = SomeProperty prop
: interfaceProperties baseIface }
where
baseIface = defPropIFace (Just $ char ^. path) (T.pack gattCharacteristicIFace) char
prop = mkProperty (char ^. path)
(T.pack gattCharacteristicIFace)
"Value"
(handlerToMethodHandler <$> char ^. value . readValue)
(fmap handlerToMethodHandler <$> char ^. value . writeValue)
PECSTrue
instance HasInterface Advertisement Properties where
getInterface adv _
= defPropIFace Nothing (T.pack leAdvertisementIFace) adv
-- * GattService
instance HasInterface (WithObjectPath (Service 'Local)) GattService where
getInterface service _ =
Interface
{ interfaceMethods = []
, interfaceSignals = []
, interfaceAnnotations = []
, interfaceProperties = [SomeProperty uuid', SomeProperty primary]
}
where
uuid' :: Property (RepType UUID)
uuid' = Property
{ propertyPath = objectPath $ (service ^. path . toText) </> "UUID"
, propertyInterface = T.pack gattServiceIFace
, propertyName = "UUID"
, propertyGet = Just . return $ toRep (service ^. value . uuid)
, propertySet = Nothing
, propertyEmitsChangedSignal = PECSFalse
}
primary :: Property (RepType Bool)
primary = Property
{ propertyPath = objectPath $ (service ^. path . toText) </> "Primary"
, propertyInterface = T.pack gattServiceIFace
, propertyName = "Primary"
, propertyGet = Just . return $ toRep True
, propertySet = Nothing
, propertyEmitsChangedSignal = PECSFalse
}
-- * GattCharacteristic
acceptingOptions :: MethodHandlerT IO BS.ByteString
-> CharacteristicOptions
-> MethodHandlerT IO BS.ByteString
acceptingOptions handler opts = case opts ^. offset of
Nothing -> handler
Just v -> BS.drop (fromInteger $ toInteger v) <$> handler
handlerToMethodHandler :: Handler a -> MethodHandlerT IO a
handlerToMethodHandler (Handler h) = MHT $ mapExceptT go h
where
go :: IO (Either T.Text a) -> WriterT [SomeSignal] IO (Either MsgError a)
go x = liftIO $ x >>= \x' -> case x' of
Left e -> return . Left $ MsgError e Nothing []
Right v -> return $ Right v
instance HasInterface (WithObjectPath (CharacteristicBS 'Local)) GattCharacteristic where
getInterface char _ =
Interface
{ interfaceMethods = [readVal, writeVal, startNotify, stopNotify]
, interfaceSignals = []
, interfaceAnnotations = []
, interfaceProperties = [ SomeProperty uuid'
, SomeProperty service
, SomeProperty flags
, SomeProperty notifying
, SomeProperty $ valProp char
]
}
where
notSup :: MethodHandlerT IO ()
notSup = methodError notSupported
readVal = case char ^. value . readValue of
Just v -> Method (repMethod $ acceptingOptions $ handlerToMethodHandler v)
"ReadValue" ("options" :> Done) ("rep" :> Done)
Nothing -> Method (repMethod notSup) "ReadValue" Done Done
writeVal = case char ^. value . writeValue of
Just w -> Method (repMethod $ go w)
"WriteValue" ("arg" :> "options" :> Done) ("rep" :> Done)
Nothing -> Method (repMethod notSup) "WriteValue" Done Done
where
go :: (BS.ByteString -> Handler Bool)
-> BS.ByteString -> CharacteristicOptions -> MethodHandlerT IO Bool
go writeTheVal newVal _opts = do
res <- handlerToMethodHandler $ writeTheVal newVal
nots <- liftIO $ readMVar $
characteristicIsNotifying (char ^. value . uuid)
when (nots && res) $ propertyChanged (valProp char) newVal
return res
stopNotify = Method (repMethod go) "StopNotify" Done Done
where
go :: MethodHandlerT IO ()
go = liftIO . void $
swapMVar (characteristicIsNotifying $ char ^. value . uuid) False
startNotify = Method (repMethod go) "StartNotify" Done Done
where
go :: MethodHandlerT IO ()
go = liftIO . void $
swapMVar (characteristicIsNotifying $ char ^. value . uuid) True
uuid' :: Property (RepType UUID)
uuid' = Property
{ propertyPath = objectPath $ (char ^. path . toText) </> "UUID"
, propertyInterface = T.pack gattCharacteristicIFace
, propertyName = "UUID"
, propertyGet = Just . return . toRep $ char ^. value . uuid
, propertySet = Nothing
, propertyEmitsChangedSignal = PECSFalse
}
service :: Property (RepType ObjectPath)
service = Property
{ propertyPath = objectPath $ (char ^. path . toText) </> "Service"
, propertyInterface = T.pack gattCharacteristicIFace
, propertyName = "Service"
, propertyGet = Just . return . toRep . objectPath . parentPath
$ char ^. path . toText
, propertySet = Nothing
, propertyEmitsChangedSignal = PECSFalse
}
flags :: Property (RepType [CharacteristicProperty])
flags = Property
{ propertyPath = objectPath $ (char ^. path . toText) </> "Flags"
, propertyInterface = T.pack gattCharacteristicIFace
, propertyName = "Flags"
, propertyGet = Just . return . toRep $ char ^. value . properties
, propertySet = Nothing
, propertyEmitsChangedSignal = PECSFalse
}
notifying :: Property (RepType Bool)
notifying = Property
{ propertyPath = objectPath $ (char ^. path . toText) </> "Notifying"
, propertyInterface = T.pack gattCharacteristicIFace
, propertyName = "Notifying"
, propertyGet = Just $ liftIO $ toRep <$> readMVar mvar
, propertySet = Just $ \new -> liftIO $ do
case fromRep new of
Nothing -> return False
Just v -> modifyMVar_ mvar (const $ return v) >> return True
, propertyEmitsChangedSignal = PECSFalse
}
where
mvar = characteristicIsNotifying (char ^. value . uuid)
valProp :: WithObjectPath (CharacteristicBS 'Local)
-> Property (RepType BS.ByteString)
valProp char = mkProperty (char ^. path)
(T.pack gattCharacteristicIFace)
"Value"
(handlerToMethodHandler <$> char ^. value . readValue)
(fmap handlerToMethodHandler <$> char ^. value . writeValue)
PECSTrue
instance HasInterface (WithObjectPath Advertisement) LEAdvertisement where
getInterface adv _ =
Interface
{ interfaceMethods = [release]
, interfaceSignals = []
, interfaceAnnotations = []
, interfaceProperties = [ SomeProperty type'
, SomeProperty serviceUUIDs'
, SomeProperty manufacturerData'
, SomeProperty solicitUUIDs'
, SomeProperty serviceData'
, SomeProperty includeTxPower']
}
where
release = Method (repMethod (return () :: IO ())) "Release" Done Done
type' :: Property (RepType AdvertisementType)
type' = Property
{ propertyPath = objectPath $ (adv ^. path . toText) </> "Type"
, propertyInterface = T.pack leAdvertisementIFace
, propertyName = "Type"
, propertyGet = Just . return . toRep $ adv ^. value . type_
, propertySet = Nothing
, propertyEmitsChangedSignal = PECSFalse
}
serviceUUIDs' :: Property (RepType [UUID])
serviceUUIDs' = Property
{ propertyPath = objectPath $ (adv ^. path . toText) </> "ServiceUUIDs"
, propertyInterface = T.pack leAdvertisementIFace
, propertyName = "ServiceUUIDs"
, propertyGet = Just . return . toRep $ adv ^. value . serviceUUIDs
, propertySet = Nothing
, propertyEmitsChangedSignal = PECSFalse
}
solicitUUIDs' :: Property (RepType [UUID])
solicitUUIDs' = Property
{ propertyPath = objectPath $ (adv ^. path . toText) </> "SolicitUUIDs"
, propertyInterface = T.pack leAdvertisementIFace
, propertyName = "SolicitUUIDs"
, propertyGet = Just . return . toRep $ adv ^. value . solicitUUIDs
, propertySet = Nothing
, propertyEmitsChangedSignal = PECSFalse
}
manufacturerData' :: Property (RepType (Map.Map Word16 BS.ByteString))
manufacturerData' = Property
{ propertyPath = objectPath $ (adv ^. path . toText) </> "ManufacturerData"
, propertyInterface = T.pack leAdvertisementIFace
, propertyName = "ManufacturerData"
, propertyGet = Just . return . toRep $ adv ^. value . manufacturerData
, propertySet = Nothing
, propertyEmitsChangedSignal = PECSFalse
}
serviceData' :: Property (RepType (Map.Map UUID BS.ByteString))
serviceData' = Property
{ propertyPath = objectPath $ (adv ^. path . toText) </> "ServiceData"
, propertyInterface = T.pack leAdvertisementIFace
, propertyName = "ServiceData"
, propertyGet = Just . return . toRep $ adv ^. value . serviceData
, propertySet = Nothing
, propertyEmitsChangedSignal = PECSFalse
}
includeTxPower' :: Property (RepType Bool)
includeTxPower' = Property
{ propertyPath = objectPath $ (adv ^. path . toText) </> "IncludeTxPower"
, propertyInterface = T.pack leAdvertisementIFace
, propertyName = "IncludeTxPower"
, propertyGet = Just . return . toRep $ adv ^. value . includeTxPower
, propertySet = Nothing
, propertyEmitsChangedSignal = PECSFalse
}
| plow-technologies/ble | src/Bluetooth/Internal/HasInterface.hs | bsd-3-clause | 13,846 | 0 | 22 | 4,083 | 3,359 | 1,812 | 1,547 | -1 | -1 |
{-# LANGUAGE EmptyDataDecls #-}
module Generic.Data.Bool where
import Prelude ()
infixr 3 &&
infixr 2 ||
data Bool
class BoolC j where
false :: j Bool
true :: j Bool
bool :: j a -> j a -> j Bool -> j a
if' :: BoolC j => j Bool -> j a -> j a -> j a
if' b x y = bool y x b
(&&) :: BoolC j => j Bool -> j Bool -> j Bool
x && y = bool false y x
(||) :: BoolC j => j Bool -> j Bool -> j Bool
x || y = bool y true x
not :: BoolC j => j Bool -> j Bool
not = bool true false
| tomlokhorst/AwesomePrelude | src/Generic/Data/Bool.hs | bsd-3-clause | 486 | 0 | 10 | 144 | 266 | 129 | 137 | -1 | -1 |
-- {-# OPTIONS_GHC -cpp -DDEBUG #-}
{-# OPTIONS_GHC -cpp #-}
-----------------------------------------------------------------------------
-- |
-- Module : Berp.Base.ControlStack
-- Copyright : (c) 2010 Bernie Pope
-- License : BSD-style
-- Maintainer : florbitous@gmail.com
-- Stability : experimental
-- Portability : ghc
--
-- Operations on the control stack.
--
-----------------------------------------------------------------------------
#include "BerpDebug.h"
module Berp.Base.ControlStack
( isEmpty, isProcedureCall, isExceptionHandler, isWhileLoop, isGeneratorCall
, unwind, unwindPastWhileLoop, unwindUpToWhileLoop, push, pop, nullifyTopHandler
, unwindYieldContext, dumpStack, getControlStack, setControlStack
)
where
import Control.Monad.State
import Berp.Base.SemanticTypes (ControlStack (..), Eval, EvalState (..), Object (..))
import Berp.Base.LiftedIO as LIO (writeIORef, putStrLn)
import {-# SOURCE #-} Berp.Base.StdTypes.None (none)
isEmpty :: ControlStack -> Bool
isEmpty EmptyStack = True
isEmpty _ = False
isProcedureCall :: ControlStack -> Bool
isProcedureCall (ProcedureCall {}) = True
isProcedureCall _ = False
isExceptionHandler :: ControlStack -> Bool
isExceptionHandler (ExceptionHandler {}) = True
isExceptionHandler _ = False
isWhileLoop :: ControlStack -> Bool
isWhileLoop (WhileLoop {}) = True
isWhileLoop _ = False
isGeneratorCall :: ControlStack -> Bool
isGeneratorCall (GeneratorCall {}) = True
isGeneratorCall _ = False
{- Unwind the control stack and execute any "finally" exception handlers
that we pass along the way. Returns the stack with the most recently popped
element remaining.
-}
unwind :: (ControlStack -> Bool) -> Eval ControlStack
unwind pred = do
stack <- gets state_control_stack
unwindFrame stack
where
unwindFrame :: ControlStack -> Eval ControlStack
-- XXX should be an exception
unwindFrame EmptyStack = error $ "unwindFrame: empty control stack"
unwindFrame stack@(ExceptionHandler { exception_finally = maybeFinally }) = do
pop
_ <- maybe (return none) id maybeFinally
if pred stack
then return stack
else unwind pred
unwindFrame stack
| pred stack = pop >> return stack
| otherwise = pop >> unwind pred
unwindYieldContext :: Eval Object -> Eval (Object -> Eval Object)
unwindYieldContext continuation = do
stack <- gets state_control_stack
let (generatorYield, generatorObj, newStack, context) = unwindYieldWorker stack
LIO.writeIORef (object_continuation generatorObj) continuation
LIO.writeIORef (object_stack_context generatorObj) context
setControlStack newStack
return generatorYield
where
unwindYieldWorker :: ControlStack -> (Object -> Eval Object, Object, ControlStack, ControlStack -> ControlStack)
-- XXX this should be an exception
unwindYieldWorker EmptyStack = error "unwindYieldWorker: empty control stack"
unwindYieldWorker (ProcedureCall {}) = error "unwindYieldWorker: procedure call"
unwindYieldWorker (ExceptionHandler handler finally tail) =
(yield, obj, stack, ExceptionHandler handler finally . context)
where
(yield, obj, stack, context) = unwindYieldWorker tail
unwindYieldWorker (WhileLoop start end tail) =
(yield, obj, stack, WhileLoop start end . context)
where
(yield, obj, stack, context) = unwindYieldWorker tail
unwindYieldWorker (GeneratorCall yield obj tail) = (yield, obj, tail, id)
unwindPastWhileLoop :: Eval ControlStack
unwindPastWhileLoop = do
stack <- unwindUpToWhileLoop
pop
return stack
unwindUpToWhileLoop :: Eval ControlStack
unwindUpToWhileLoop = do
stack <- gets state_control_stack
unwindFrame stack
where
unwindFrame :: ControlStack -> Eval ControlStack
-- XXX should be an exception, should mention continue/break called outside of loop
unwindFrame EmptyStack = error $ "unwindUpToWhileLoop: empty control stack"
unwindFrame (ExceptionHandler { exception_finally = maybeFinally }) = do
pop
_ <- maybe (return none) id maybeFinally
unwindUpToWhileLoop
unwindFrame stack@(WhileLoop {}) = return stack
-- XXX should be an exception which mentions continue/break called outside of loop
unwindFrame (ProcedureCall {}) = error $ "unwindUpToWhileLoop: procedure call"
unwindFrame (GeneratorCall {}) = error $ "unwindUpToWhileLoop: generator call"
pop :: Eval ()
pop = do
stack <- gets state_control_stack
case stack of
-- should be an exception
EmptyStack -> error "pop: empty stack"
_other -> setControlStack $ control_stack_tail stack
push :: (ControlStack -> ControlStack) -> Eval ()
push frame = do
stack <- gets state_control_stack
setControlStack (frame stack)
setControlStack :: ControlStack -> Eval ()
setControlStack stack = modify $ \state -> state { state_control_stack = stack }
getControlStack :: Eval ControlStack
getControlStack = gets state_control_stack
-- assumes top of stack is an exception handler
nullifyTopHandler :: Eval ()
nullifyTopHandler = do
IF_DEBUG(dumpStack)
stack <- gets state_control_stack
case stack of
ExceptionHandler {} ->
setControlStack $ stack { exception_handler = Nothing }
_other -> error $ "nullifyTopHandler: top of stack is not an exception handler: " ++ show stack
dumpStack :: Eval ()
dumpStack = do
LIO.putStrLn "--- Bottom of stack ---"
stack <- gets state_control_stack
mapStackM printer stack
LIO.putStrLn "--- Top of stack ---"
where
printer :: ControlStack -> Eval ()
printer (ProcedureCall {}) = LIO.putStrLn "ProcedureCall"
printer (ExceptionHandler {}) = LIO.putStrLn "ExceptionHandler"
printer (WhileLoop {}) = LIO.putStrLn "WhileLoop"
printer (GeneratorCall {}) = LIO.putStrLn "GeneratorCall"
printer (EmptyStack {}) = LIO.putStrLn "EmptyStack"
mapStackM :: Monad m => (ControlStack -> m ()) -> ControlStack -> m ()
mapStackM _f EmptyStack = return ()
mapStackM f stack = f stack >> mapStackM f (control_stack_tail stack)
| bjpop/berp | libs/src/Berp/Base/ControlStack.hs | bsd-3-clause | 6,063 | 0 | 12 | 1,117 | 1,474 | 762 | 712 | 112 | 5 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
module Data.Accounting.Account(
Accounts(..),
accounts,
balance,
empty,
account,
-- * Combinators
merge
) where
import Control.Applicative hiding (empty)
import Control.Lens hiding (children)
import Control.Monad.State
import Data.AdditiveGroup
import Data.Char
import Data.Foldable
import qualified Data.Map.Strict as M
import Data.Semigroup
import Data.Text (Text)
import qualified Data.Text as T
import Data.TreeMap (TreeMap(..), pathTo)
import Data.Accounting.Currency (Currency)
import Text.Parsec.Text
import Text.Parsec hiding ((<|>), many)
-- | An account is a `TreeMap String Currency` and `Accounts` is a top-level
-- account.
newtype Accounts = Accounts { _accounts :: TreeMap Text (Currency Text) }
deriving (Eq, Ord, Show)
makeLenses ''Accounts
instance Semigroup Accounts where
(<>) = merge
instance Monoid Accounts where
mempty = empty
mappend = merge
instance AdditiveGroup Accounts where
zeroV = mempty
l ^+^ r = l <> r
negateV = Accounts . negateV . view accounts
-- | An empty set of `Accounts`
empty :: Accounts
empty = Accounts mempty
-- | Get the total balance of an `Accounts` value
balance :: Accounts -> Currency Text
balance = fold . view accounts
-- | Merge two `Accounts`. `merge` is associative and commutative and `empty`
-- acts as its unit.
merge :: Accounts -> Accounts -> Accounts
merge l r = Accounts $ mappend (view accounts l) (view accounts r)
-- | Create an `Accounts` object with a single top-level account
account :: TreeMap Text (Currency Text) -> Accounts
account = Accounts
| j-mueller/hldb | src/Data/Accounting/Account.hs | bsd-3-clause | 1,781 | 0 | 9 | 399 | 401 | 236 | 165 | 44 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
module Turtle.Line
( Line
, lineToText
, textToLines
, linesToText
, textToLine
, unsafeTextToLine
, NewlineForbidden(..)
) where
import Data.Text (Text)
import qualified Data.Text as Text
#if __GLASGOW_HASKELL__ >= 708
import Data.Coerce
#endif
import Data.String
#if __GLASGOW_HASKELL__ >= 710
#else
import Data.Monoid
#endif
import Data.Maybe
import Data.Typeable
import Control.Exception
-- | The `NewlineForbidden` exception is thrown when you construct a `Line`
-- using an overloaded string literal or by calling `fromString` explicitly
-- and the supplied string contains newlines. This is a programming error to
-- do so: if you aren't sure that the input string is newline-free, do not
-- rely on the @`IsString` `Line`@ instance.
--
-- When debugging, it might be useful to look for implicit invocations of
-- `fromString` for `Line`:
--
-- > >>> sh (do { line <- "Hello\nWorld"; echo line })
-- > *** Exception: NewlineForbidden
--
-- In the above example, `echo` expects its argument to be a `Line`, thus
-- @line :: `Line`@. Since we bind @line@ in `Shell`, the string literal
-- @\"Hello\\nWorld\"@ has type @`Shell` `Line`@. The
-- @`IsString` (`Shell` `Line`)@ instance delegates the construction of a
-- `Line` to the @`IsString` `Line`@ instance, where the exception is thrown.
--
-- To fix the problem, use `textToLines`:
--
-- > >>> sh (do { line <- select (textToLines "Hello\nWorld"); echo line })
-- > Hello
-- > World
data NewlineForbidden = NewlineForbidden
deriving (Show, Typeable)
instance Exception NewlineForbidden
-- | A line of text (does not contain newlines).
newtype Line = Line Text
deriving (Eq, Ord, Show, Monoid)
instance IsString Line where
fromString = fromMaybe (throw NewlineForbidden) . textToLine . fromString
-- | Convert a line to a text value.
lineToText :: Line -> Text
lineToText (Line t) = t
-- | Split text into lines. The inverse of `linesToText`.
textToLines :: Text -> [Line]
textToLines =
#if __GLASGOW_HASKELL__ >= 708
coerce Text.lines
#else
map unsafeTextToLine . Text.lines
#endif
-- | Merge lines into a single text value.
linesToText :: [Line] -> Text
linesToText =
#if __GLASGOW_HASKELL__ >= 708
coerce Text.unlines
#else
Text.unlines . map lineToText
#endif
-- | Try to convert a text value into a line.
-- Precondition (checked): the argument does not contain newlines.
textToLine :: Text -> Maybe Line
textToLine = fromSingleton . textToLines
where
fromSingleton [] = Just (Line "")
fromSingleton [a] = Just a
fromSingleton _ = Nothing
-- | Convert a text value into a line.
-- Precondition (unchecked): the argument does not contain newlines.
unsafeTextToLine :: Text -> Line
unsafeTextToLine = Line
| PierreR/Haskell-Turtle-Library | src/Turtle/Line.hs | bsd-3-clause | 2,902 | 0 | 10 | 535 | 349 | 214 | 135 | 41 | 3 |
module Main where
import Test.Hspec
import Day6
import Data.ByteString
spec :: Spec
spec = do
describe "Day6" $ do
context "decode" $ do
it "should return correct val for test input" $ do
decode mostCommon day6TestInput `shouldBe` "easter"
it "should return correct val for day 6 input" $ do
decode mostCommon day6Input `shouldBe` "qrqlznrl"
it "should return correct val for test input least common" $ do
decode leastCommon day6TestInput `shouldBe` "advent"
it "should return correct val for day 6 input least common" $ do
decode leastCommon day6Input `shouldBe` "kgzdfaon"
main :: IO ()
main = hspec spec
day6TestInput = [
"eedadn",
"drvtee",
"eandsr",
"raavrd",
"atevrs",
"tsrnev",
"sdttsa",
"rasrtv",
"nssdts",
"ntnada",
"svetve",
"tesnvt",
"vntsnd",
"vrdear",
"dvrsen",
"enarar"]
day6Input = [
"blrqqadw",
"hxwteava",
"xtzzneor",
"ufydaiyx",
"hhvcoozu",
"nbbvuvmd",
"usvdcalw",
"rpntrbyo",
"kgjholvo",
"nlohafzu",
"gxmifiuy",
"xndolmhm",
"zmtsfmtq",
"wkdzmogx",
"aogqflji",
"uphmjtnl",
"jkqognlw",
"mdzsbrij",
"zyxolasw",
"kvdyikgy",
"xpxgmuqq",
"viuoqyap",
"simgbmca",
"qxcjewnz",
"ivwoedjr",
"mlmzozlr",
"jmyallmm",
"oeecmjte",
"miuvzeww",
"qtfsiigb",
"lstgpdfp",
"pevoamxy",
"mjtpbedv",
"ugbasbgg",
"idcnuhtx",
"wwhcrxdr",
"prrugmrq",
"npoiywvw",
"zpkohigv",
"wngoczfk",
"gxnmxano",
"cbacsmte",
"meclajtf",
"zhztflqy",
"grqqlecu",
"qjbzsptn",
"ebagoidi",
"egegrnyh",
"jccwkqle",
"ikkwrzqt",
"nedkjwhf",
"vildwwrp",
"ugrejotn",
"kdajfbqw",
"gyiwhxpd",
"eemhkuwh",
"jvfhoiqi",
"wsgyaiip",
"jzjvagvv",
"lqupczes",
"fetfptqt",
"msmlbgcf",
"iukfpgrm",
"ctymfjpj",
"rbrpmkvx",
"glooxgap",
"scctgiai",
"lakkjuyk",
"gaekimfl",
"bhfaybki",
"qaiazzpf",
"dwqkvsee",
"fuhbygkt",
"zhkggelc",
"haigokzn",
"jpuangaj",
"tpunltos",
"izqxnhhd",
"oeyxnqkn",
"vzvelmik",
"ddnaozap",
"mdlhkdlu",
"oglyexnm",
"mcgxswpe",
"jzkjknxc",
"gfqnuhfs",
"ztnxzwng",
"bnuxixlk",
"vmumdwec",
"kuxxbcbv",
"vdcfhyso",
"qtdesxqr",
"qciljohn",
"qqlluqzz",
"njhvvqbw",
"knakngrj",
"pradgsbt",
"koffjwwy",
"tvrkgjql",
"mqtxerte",
"smigupym",
"bxxvoskx",
"jerbindg",
"snlgnowp",
"qsuxtdsu",
"fnpexyoa",
"ffwifdad",
"mvgrpczm",
"oxszzrsb",
"pxefzlch",
"mcgbeauh",
"neseoapm",
"iwnulsrv",
"zhinoifi",
"lfmjmmtk",
"fsxcqurn",
"gmkkhfuh",
"nuqeimxo",
"uvjdgkdo",
"ohtmvkcu",
"albuiptc",
"piaihrgr",
"fjviblws",
"qotlvddl",
"gchijkjr",
"azzrnqhy",
"xrynrbck",
"pdvkcekk",
"thscvzai",
"eoapfznw",
"hpgoissz",
"ifnesaoy",
"eniqycje",
"hmjmghcp",
"sfyrvbbi",
"tuxcoidt",
"icysmkcf",
"ycagvtls",
"dohqfcgc",
"taitvkzk",
"bblnroyh",
"grdklrua",
"qpijbooa",
"pcwtjacj",
"mrvxbefl",
"oodwrtvj",
"xosqbcie",
"zbquakff",
"ypwpamng",
"rpfbkssq",
"fctgmcav",
"hdtcdfcf",
"ctboapkz",
"qypakerm",
"vebdtsmq",
"cyxqtbtt",
"dcnpkmnu",
"hnjppwfo",
"gqmfdahb",
"hxiqcrbe",
"rpxazkak",
"nmtraoky",
"sisqtogo",
"oycwooev",
"lmmitjey",
"rytzptco",
"waatgjdu",
"khsuxyse",
"cxjltfxn",
"eedsmcld",
"fngdicwe",
"lkomchdq",
"ulvabpoz",
"oyhjvimr",
"dpyexiwi",
"wjfzkbbv",
"ihohnaxx",
"ajxfefrv",
"bplrrpcz",
"rtamodoq",
"slwrcibk",
"sgwdtumz",
"vlemhplz",
"dnpkqvad",
"ytolejsa",
"ojevrxsc",
"bgbmnvyv",
"lmrousup",
"yyubvohm",
"bqaqltmt",
"vfbzzthz",
"ylehjmop",
"exddqqwo",
"xqfxejzq",
"myyuypku",
"zyvmvbla",
"cnpquvbp",
"yaxdddeq",
"cnrjqdra",
"lwphgfgf",
"zqdbcnmn",
"qelivdwx",
"wpnwomgu",
"xzephbpa",
"yghrabgr",
"pnjsyhth",
"okdznczw",
"urwcwwfm",
"hjrsrrzk",
"foklmzqs",
"mjldwaun",
"dabalbmb",
"jmtqvwst",
"uhtzixah",
"blclhmjf",
"wilsnjwb",
"qeeriszr",
"vbfagerv",
"afegxkkh",
"zwzausdd",
"ysfgzvbw",
"ymjlmnmz",
"rnrbxnij",
"ihvhqtvk",
"ofwrugbp",
"ontvlhfu",
"sfjgpqpx",
"oyzkaiyl",
"xfmvkfkh",
"pqpeeptl",
"jdyueahx",
"plghatyl",
"yrdizope",
"lrurgkqw",
"xdqtlmww",
"dkaiotxb",
"iegjcmln",
"iupoupxa",
"zrepcilx",
"tpewzoxi",
"munsmbpj",
"fvhsucvb",
"rlwchfml",
"kcmlbubj",
"jnhurapr",
"dflwxeii",
"wtypbujm",
"jivypmpr",
"argvlhnz",
"acyvvplf",
"naqafzfw",
"ngepfsju",
"xfpuwtji",
"pqgkxbmj",
"oeygjbxs",
"evoydkqq",
"nhuoohdi",
"wrznguek",
"ssirmkbq",
"ackhglvh",
"egszqozo",
"rhnhhxul",
"mqabqvun",
"yunlcuvd",
"zklsneau",
"itanrdqb",
"pvxbkwoc",
"rqbqjyuv",
"ioxjpvqd",
"pzkgsdej",
"yklripsi",
"iohazhoh",
"umxxpdaw",
"czfnfgxt",
"xaxvkjjc",
"qhgvdvaa",
"iobwhxjq",
"jwfwqqjs",
"cbrfgjpp",
"conpdlzv",
"wbcmssue",
"gyqkseid",
"ozrzahxt",
"rzowboce",
"lhntmyie",
"tlrcktzc",
"lxmzpvku",
"ckliqrdt",
"qlmalosg",
"ovvyxrnr",
"gctjwzrl",
"ooqvxzac",
"dbdqzzly",
"fpsjzuxx",
"njndzgel",
"hjfqofhh",
"txhcpktf",
"otceqnmc",
"dduyepiz",
"bsxdbzgs",
"zklbicun",
"rstnuwtg",
"tasiqsbs",
"wewnwuyn",
"zvgkuxxp",
"nxcmlrmx",
"mizqhlnv",
"xyxzfeca",
"qkeuwzgi",
"ajnzmfks",
"ejszlxyc",
"xzfggxpd",
"jbooydts",
"eisoqvuo",
"hdfpevns",
"alybbyrb",
"yvpylcnz",
"tdpcycrv",
"kwptuqyw",
"ncobyufk",
"fclvkbek",
"tgnfcfup",
"vbcuaudl",
"hublkdvy",
"aoetzcyl",
"fsiuwhbi",
"eyolgmxh",
"siptvnjn",
"shvycepr",
"ntrwmime",
"dbdnbfyt",
"bwluchce",
"uigenqhy",
"krxdyhap",
"avycqglh",
"gguniqpm",
"wcwzelyd",
"wzurdris",
"rmhstxuj",
"vuaozvvq",
"bsdgqrpx",
"twnvkunt",
"nqgqtugs",
"vzkvghwg",
"ypceflob",
"dsyzunmb",
"kvhacqqr",
"ozlfwkjl",
"pyznytxd",
"ykdkbfgf",
"eajwdyia",
"bhkxsxcc",
"vytpdoop",
"ibpypdrh",
"dkkjnwng",
"lxwkkldf",
"nbtckkoy",
"qtjyffvl",
"sbitpceb",
"sxmhbcuy",
"zorovlxd",
"bazreact",
"cwzggemu",
"uowhquji",
"eijszbmy",
"aarneovu",
"grhvjqyo",
"fzheiyvq",
"nzsdrlli",
"wfsdwsok",
"wrqjuygq",
"ggpffnri",
"wkycrfjm",
"drksyjxn",
"smuhwcxa",
"iabdvvyj",
"esidunjn",
"decnfzwl",
"ysihdzkf",
"zokmsjgk",
"pxuddjdo",
"uemyoegc",
"glqycmsw",
"fvfkqzdu",
"mhotjpqc",
"pfyuopbx",
"tibutsqb",
"krzcqnkv",
"djqpmsmb",
"vbufrshp",
"mmzsrikm",
"zkjbrtoo",
"uopielbd",
"jmketnly",
"raomwphg",
"uwocphkf",
"lvktwagm",
"lqmorzgf",
"rihrgrdp",
"cnbuplfg",
"hwfjvxcj",
"sfgptuic",
"ixkimxsx",
"kfpicnix",
"tvpybbrf",
"navehxpr",
"rwbcttbq",
"obqcxwjd",
"fuiskmfg",
"xcvfxoeh",
"tsmaaoyx",
"qjhiyeex",
"qwfxiyxq",
"ctkyxatm",
"hyxhsvmy",
"puknicfi",
"hbwzmyks",
"uczqlycu",
"wkywzgqs",
"kzfehffd",
"aoooehdc",
"lnijvgrg",
"aedbnxzk",
"lusvnger",
"ltpbpgiq",
"aypxjgwo",
"lgejygmw",
"auqexwja",
"fwszagnq",
"aiafpduf",
"lyltmest",
"agtasqwl",
"fqrlliiw",
"udarpyjv",
"kxotyded",
"aodevwdt",
"lmmfarbx",
"snjwogeo",
"ehfmpymn",
"yahfaxeo",
"xudbdnog",
"rrkxhhsy",
"hdxadfck",
"dmnujkng",
"cujvjtry",
"srwxylvi",
"dwohbywb",
"cvspfupf",
"czvvjhfw",
"wvyjwtzz",
"vfooqywj",
"bmulxlpz",
"hbukjylo",
"bdhsvgdg",
"vnrrqyue",
"hjveswxf",
"yxgzdjwn",
"byonsarh",
"edbmtqyz",
"owvunnfp",
"wfqqsuyj",
"cwckbkwt",
"plesmdky",
"pzatdacm",
"nqfyxhij",
"jjwqitsc",
"tejffykk",
"yllyznoo",
"kkqhuqlc",
"hxchsqos",
"buvmceha",
"kbzymzrl",
"kiemcigv",
"txmjfujf",
"vfnystic",
"kvaiybnq",
"ztrwxszz",
"wiyawlfm",
"sgedycpx",
"isafnieb",
"bpspuqvx",
"fqjtxrtb",
"bgjdrvhb",
"mnsbgbhe",
"jpsqcfzz",
"fpumugea",
"qqutezwg",
"eoabntsw",
"tupqchzt",
"ezwjasja",
"rsguwrqg",
"cqzcijqd",
"rhxlhksr",
"vcwlknrc",
"eiqbcafb",
"lwzbrrtr",
"aomiovcj",
"ujxshcar",
"fbpjehma",
"bgdphfwg",
"iukvlxvq",
"ptawvjzy",
"styyqrqd",
"itobtfvm",
"yqnpsyha",
"vkwfaykp",
"zwpoxkzx",
"uqwasoht",
"tkgfmnvj",
"xkilydvt",
"xlmkpdaz",
"xfvukjte",
"yyzpwped",
"xzxwnrlm",
"ausmhunn",
"qgiiljhq",
"njqhxprl",
"fgfxiphp",
"kkzjpuur",
"dcqixesl",
"tthldwgg",
"nkjxnttn",
"cjtiiltj",
"drlzddsv",
"xxluiael",
"kjjsewia",
"danhtpxa",
"edexzcqw",
"mrqewvuh",
"opwtwbbt",
"rdbsaeke",
"viistwnj",
"llcndvsm",
"jeejjqyb",
"hstekias",
"gmswtskg",
"qhdktszo",
"ptbryiff",
"jrtlgbag",
"gjbbbfnu",
"uirwdwzh",
"esmntxej",
"vdcmrenk",
"tagtsvaz",
"hnewrron",
"zydwkvuh",
"zscfhzxk",
"sazgunom",
"gqcxdowc",
"twmxtniu",
"wfblhfiv",
"barpdrob",
"jwjrnqhv",
"xvnysjvz",
"jvsftvqs",
"jivuhphv",
"grbezkpe",
"xuolyqis",
"smuxlqpu",
"rticwcrh",
"huzyzxul",
"pgqawldg",
"mdcgejab",
"rlrgwpfo",
"uqhvyglu",
"csinjsjy",
"ydorfrud",
"gmcnjnbr",
"qzvizjbt",
"vejkuvii",
"uhfrombz",
"clgrjlys"]
| reidwilbur/aoc2016 | test/Day6Spec.hs | bsd-3-clause | 8,909 | 0 | 17 | 1,937 | 1,935 | 1,260 | 675 | 608 | 1 |
module Jhc.Type.Basic(module Jhc.Type.Basic, module Jhc.Type.Word) where
import Jhc.Prim.Prim
import Jhc.Type.Word
type String = [Char]
data Maybe a = Nothing | Just a
data Either a b = Left a | Right b
data Char = Char Char_
data Integer = Integer BitsMax_
type Bool__ = Bool_
type Int__ = Bits32_
type Char__ = Bits32_
type Enum__ = Bits16_
type Addr__ = BitsPtr_
| dec9ue/jhc_copygc | lib/jhc/Jhc/Type/Basic.hs | gpl-2.0 | 376 | 0 | 6 | 72 | 121 | 78 | 43 | 13 | 0 |
import Data.Char
import Data.List
main = do
line <- fmap (intersperse '-' . reverse . map toUpper) getLine
putStrLn line
| alexliew/learn_you_a_haskell | code/fmapping_io.hs | unlicense | 126 | 0 | 12 | 25 | 51 | 24 | 27 | 5 | 1 |
import Test.DocTest (doctest)
main :: IO ()
main = doctest ["-fobject-code", "System/Random/MWC.hs"]
| bos/mwc-random | tests/doctests.hs | bsd-2-clause | 102 | 0 | 6 | 13 | 35 | 19 | 16 | 3 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE CPP #-}
module Web.Twitter.Conduit.Api
(
-- * Search
SearchTweets
, searchTweets
, search
-- * Direct Messages
, DirectMessages
, directMessages
, DirectMessagesSent
, directMessagesSent
, DirectMessagesShow
, directMessagesShow
, DirectMessagesDestroy
, directMessagesDestroy
, DirectMessagesNew
, directMessagesNew
-- * Friends & Followers
, FriendshipsNoRetweetsIds
, friendshipsNoRetweetsIds
, FriendsIds
, friendsIds
, FollowersIds
, followersIds
, FriendshipsIncoming
, friendshipsIncoming
, FriendshipsOutgoing
, friendshipsOutgoing
, FriendshipsCreate
, friendshipsCreate
, FriendshipsDestroy
, friendshipsDestroy
-- , friendshipsUpdate
-- , friendshipsShow
, FriendsList
, friendsList
, FollowersList
, followersList
-- , friendshipsLookup
-- * Users
-- , accountSettings
, AccountVerifyCredentials
, accountVerifyCredentials
-- , accountSettingsUpdate
-- , accountUpdateDeliveryDevice
-- , accountUpdateProfile
-- , accountUpdateProfileBackgroundImage
-- , accountUpdateProfileColors
-- , accoutUpdateProfileImage
-- , blocksList
-- , blocksIds
-- , blocksCreate
-- , blocksDestroy
, UsersLookup
, usersLookup
, UsersShow
, usersShow
-- , usersSearch
-- , usersContributees
-- , usersContributors
-- , accuntRemoveProfileBanner
-- , accuntUpdateProfileBanner
-- , usersProfileBanner
-- , mutesUsersCreate
-- , mutesUsersDestroy
-- , mutesUsersIds
-- , mutesUsersList
-- * Suggested Users
-- , usersSuggestionsSlug
-- , usersSuggestions
-- , usersSuggestionsSlugMembers
-- * Favorites
, FavoritesList
, favoritesList
, FavoritesDestroy
, favoritesDestroy
, FavoritesCreate
, favoritesCreate
-- * Lists
-- , listsList
, ListsStatuses
, listsStatuses
, ListsMembersDestroy
, listsMembersDestroy
, ListsMemberships
, listsMemberships
, ListsSubscribers
, listsSubscribers
-- , listsSubscribersCreate
-- , listsSubscribersShow
-- , listsSubscribersDestroy
-- , listsMembersCreateAll
-- , listsMembersShow
, ListsMembers
, listsMembers
, ListsMembersCreate
, listsMembersCreate
, ListsDestroy
, listsDestroy
, ListsUpdate
, listsUpdate
, ListsCreate
, listsCreate
, ListsShow
, listsShow
, ListsSubscriptions
, listsSubscriptions
-- , listsMembersDestroyAll
, ListsOwnerships
, listsOwnerships
-- * Saved Searches
-- savedSearchesList
-- savedSearchesShowId
-- savedSearchesCreate
-- savedSearchesDestroyId
-- * Places & Geo
-- geoIdPlaceId
-- geoReverseGeocode
-- geoSearch
-- geoSimilarPlaces
-- geoPlace
-- * media
, MediaUpload
, mediaUpload
) where
import Web.Twitter.Types
import Web.Twitter.Conduit.Parameters
import Web.Twitter.Conduit.Parameters.TH
import Web.Twitter.Conduit.Base
import Web.Twitter.Conduit.Request
import Web.Twitter.Conduit.Cursor
import Network.HTTP.Client.MultipartFormData
import qualified Data.Text as T
import Data.Default
-- $setup
-- >>> :set -XOverloadedStrings
-- >>> import Control.Lens
data SearchTweets
-- | Returns search query.
--
-- You can perform a search query using 'call':
--
-- @
-- res <- 'call' ('searchTweets' \"search text\")
-- 'liftIO' . 'print' $ res ^. 'searchResultStatuses'
-- @
--
-- >>> searchTweets "search text"
-- APIRequestGet "https://api.twitter.com/1.1/search/tweets.json" [("q","search text")]
-- >>> searchTweets "search text" & lang ?~ "ja" & count ?~ 100
-- APIRequestGet "https://api.twitter.com/1.1/search/tweets.json" [("count","100"),("lang","ja"),("q","search text")]
searchTweets :: T.Text -- ^ search string
-> APIRequest SearchTweets (SearchResult [SearchStatus])
searchTweets q = APIRequestGet (endpoint ++ "search/tweets.json") [("q", PVString q)]
deriveHasParamInstances ''SearchTweets
[ "lang"
, "locale"
-- , "result_type"
, "count"
, "until"
, "since_id"
, "max_id"
, "include_entities"
-- , "callback" (needless)
]
-- | Alias of 'searchTweets', for backward compatibility
search :: T.Text -- ^ search string
-> APIRequest SearchTweets (SearchResult [SearchStatus])
search = searchTweets
data DirectMessages
-- | Returns query data which asks recent direct messages sent to the authenticating user.
--
-- You can perform a query using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'directMessages' '&' 'count' '?~' 100
-- @
--
-- >>> directMessages
-- APIRequestGet "https://api.twitter.com/1.1/direct_messages.json" []
-- >>> directMessages & count ?~ 100
-- APIRequestGet "https://api.twitter.com/1.1/direct_messages.json" [("count","100")]
directMessages :: APIRequest DirectMessages [DirectMessage]
directMessages = APIRequestGet (endpoint ++ "direct_messages.json") def
deriveHasParamInstances ''DirectMessages
[ "since_id"
, "max_id"
, "count"
, "include_entities"
, "skip_status"
]
data DirectMessagesSent
-- | Returns query data which asks recent direct messages sent by the authenticating user.
--
-- You can perform a query using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'directMessagesSent' '&' 'count' '?~' 100
-- @
--
-- >>> directMessagesSent
-- APIRequestGet "https://api.twitter.com/1.1/direct_messages/sent.json" []
-- >>> directMessagesSent & count ?~ 100
-- APIRequestGet "https://api.twitter.com/1.1/direct_messages/sent.json" [("count","100")]
directMessagesSent :: APIRequest DirectMessagesSent [DirectMessage]
directMessagesSent = APIRequestGet (endpoint ++ "direct_messages/sent.json") def
deriveHasParamInstances ''DirectMessagesSent
[ "since_id"
, "max_id"
, "count"
, "include_entities"
, "page"
, "skip_status"
]
data DirectMessagesShow
-- | Returns query data which asks a single direct message, specified by an id parameter.
--
-- You can perform a query using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'directMessagesShow' 1234567890
-- @
--
-- >>> directMessagesShow 1234567890
-- APIRequestGet "https://api.twitter.com/1.1/direct_messages/show.json" [("id","1234567890")]
directMessagesShow :: StatusId -> APIRequest DirectMessagesShow DirectMessage
directMessagesShow sId = APIRequestGet (endpoint ++ "direct_messages/show.json") [("id", PVInteger sId)]
data DirectMessagesDestroy
-- | Returns post data which destroys the direct message specified in the required ID parameter.
--
-- You can perform a query using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'directMessagesDestroy' 1234567890
-- @
--
-- >>> directMessagesDestroy 1234567890
-- APIRequestPost "https://api.twitter.com/1.1/direct_messages/destroy.json" [("id","1234567890")]
directMessagesDestroy :: StatusId -> APIRequest DirectMessagesDestroy DirectMessage
directMessagesDestroy sId = APIRequestPost (endpoint ++ "direct_messages/destroy.json") [("id", PVInteger sId)]
deriveHasParamInstances ''DirectMessagesDestroy
[ "include_entities"
]
data DirectMessagesNew
-- | Returns post data which sends a new direct message to the specified user from the authenticating user.
--
-- You can perform a post using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'directMessagesNew' (ScreenNameParam \"thimura\") \"Hello DM\"
-- @
--
-- >>> directMessagesNew (ScreenNameParam "thimura") "Hello DM"
-- APIRequestPost "https://api.twitter.com/1.1/direct_messages/new.json" [("text","Hello DM"),("screen_name","thimura")]
-- >>> directMessagesNew (UserIdParam 69179963) "Hello thimura! by UserId"
-- APIRequestPost "https://api.twitter.com/1.1/direct_messages/new.json" [("text","Hello thimura! by UserId"),("user_id","69179963")]
directMessagesNew :: UserParam -> T.Text -> APIRequest DirectMessagesNew DirectMessage
directMessagesNew q msg = APIRequestPost (endpoint ++ "direct_messages/new.json") (("text", PVString msg):mkUserParam q)
data FriendshipsNoRetweetsIds
-- | Returns a collection of user_ids that the currently authenticated user does not want to receive retweets from.
--
-- You can perform a request using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'friendshipsNoRetweetsIds'
-- @
--
-- >>> friendshipsNoRetweetsIds
-- APIRequestGet "https://api.twitter.com/1.1/friendships/no_retweets/ids.json" []
friendshipsNoRetweetsIds :: APIRequest FriendshipsNoRetweetsIds [UserId]
friendshipsNoRetweetsIds = APIRequestGet (endpoint ++ "friendships/no_retweets/ids.json") []
data FriendsIds
-- | Returns query data which asks a collection of user IDs for every user the specified user is following.
--
-- You can perform a query using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'friendsIds' ('ScreenNameParam' \"thimura\")
-- @
--
-- Or, you can iterate with 'sourceWithCursor':
--
-- @
-- 'sourceWithCursor' ('friendsIds' ('ScreenNameParam' \"thimura\")) $$ CL.consume
-- @
--
-- >>> friendsIds (ScreenNameParam "thimura")
-- APIRequestGet "https://api.twitter.com/1.1/friends/ids.json" [("screen_name","thimura")]
-- >>> friendsIds (ScreenNameParam "thimura") & count ?~ 5000
-- APIRequestGet "https://api.twitter.com/1.1/friends/ids.json" [("count","5000"),("screen_name","thimura")]
friendsIds :: UserParam -> APIRequest FriendsIds (WithCursor IdsCursorKey UserId)
friendsIds q = APIRequestGet (endpoint ++ "friends/ids.json") (mkUserParam q)
deriveHasParamInstances ''FriendsIds
[ "cursor"
-- , "stringify_ids" -- (needless)
, "count"
]
data FollowersIds
-- | Returns query data which asks a collection of user IDs for every user following the specified user.
--
-- You can perform a query using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'followersIds' ('ScreenNameParam' \"thimura\")
-- @
--
-- Or, you can iterate with 'sourceWithCursor':
--
-- @
-- 'sourceWithCursor' ('followersIds' ('ScreenNameParam' \"thimura\")) $$ CL.consume
-- @
--
-- >>> followersIds (ScreenNameParam "thimura")
-- APIRequestGet "https://api.twitter.com/1.1/followers/ids.json" [("screen_name","thimura")]
-- >>> followersIds (ScreenNameParam "thimura") & count ?~ 5000
-- APIRequestGet "https://api.twitter.com/1.1/followers/ids.json" [("count","5000"),("screen_name","thimura")]
followersIds :: UserParam -> APIRequest FollowersIds (WithCursor IdsCursorKey UserId)
followersIds q = APIRequestGet (endpoint ++ "followers/ids.json") (mkUserParam q)
deriveHasParamInstances ''FollowersIds
[ "cursor"
-- , "stringify_ids" -- (needless)
, "count"
]
data FriendshipsIncoming
-- | Returns a collection of numeric IDs for every user who has a pending request to follow the authenticating user.
--
-- You can perform a request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'friendshipsIncoming'
-- @
--
-- Or, you can iterate with 'sourceWithCursor':
--
-- @
-- 'sourceWithCursor' 'friendshipsIncoming' $$ CL.consume
-- @
--
-- >>> friendshipsIncoming
-- APIRequestGet "https://api.twitter.com/1.1/friendships/incoming.json" []
friendshipsIncoming :: APIRequest FriendshipsIncoming (WithCursor IdsCursorKey UserId)
friendshipsIncoming = APIRequestGet (endpoint ++ "friendships/incoming.json") def
deriveHasParamInstances ''FriendshipsIncoming
[ "cursor"
-- , "stringify_ids" -- (needless)
]
data FriendshipsOutgoing
-- | Returns a collection of numeric IDs for every protected user for whom the authenticating user has a pending follow request.
--
-- You can perform a request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'friendshipsOutgoing'
-- @
--
-- Or, you can iterate with 'sourceWithCursor':
--
-- @
-- 'sourceWithCursor' 'friendshipsOutgoing' $$ CL.consume
-- @
--
-- >>> friendshipsOutgoing
-- APIRequestGet "https://api.twitter.com/1.1/friendships/outgoing.json" []
friendshipsOutgoing :: APIRequest FriendshipsOutgoing (WithCursor IdsCursorKey UserId)
friendshipsOutgoing = APIRequestGet (endpoint ++ "friendships/outgoing.json") def
deriveHasParamInstances ''FriendshipsOutgoing
[ "cursor"
-- , "stringify_ids" -- (needless)
]
data FriendshipsCreate
-- | Returns post data which follows the user specified in the ID parameter.
--
-- You can perform request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'friendshipsCreate' ('ScreenNameParam' \"thimura\")
-- @
--
-- >>> friendshipsCreate (ScreenNameParam "thimura")
-- APIRequestPost "https://api.twitter.com/1.1/friendships/create.json" [("screen_name","thimura")]
-- >>> friendshipsCreate (UserIdParam 69179963)
-- APIRequestPost "https://api.twitter.com/1.1/friendships/create.json" [("user_id","69179963")]
friendshipsCreate :: UserParam -> APIRequest FriendshipsCreate User
friendshipsCreate user = APIRequestPost (endpoint ++ "friendships/create.json") (mkUserParam user)
deriveHasParamInstances ''FriendshipsCreate
[ "follow"
]
data FriendshipsDestroy
-- | Returns post data which follows the user specified in the ID parameter.
--
-- You can perform request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'friendshipsDestroy' ('ScreenNameParam' \"thimura\")
-- @
--
-- >>> friendshipsDestroy (ScreenNameParam "thimura")
-- APIRequestPost "https://api.twitter.com/1.1/friendships/destroy.json" [("screen_name","thimura")]
-- >>> friendshipsDestroy (UserIdParam 69179963)
-- APIRequestPost "https://api.twitter.com/1.1/friendships/destroy.json" [("user_id","69179963")]
friendshipsDestroy :: UserParam -> APIRequest FriendshipsDestroy User
friendshipsDestroy user = APIRequestPost (endpoint ++ "friendships/destroy.json") (mkUserParam user)
data FriendsList
-- | Returns query data which asks a cursored collection of user objects for every user the specified users is following.
--
-- You can perform request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'friendsList' ('ScreenNameParam' \"thimura\")
-- @
--
-- Or, you can iterate with 'sourceWithCursor':
--
-- @
-- 'sourceWithCursor' ('friendsList' ('ScreenNameParam' \"thimura\")) $$ CL.consume
-- @
--
-- >>> friendsList (ScreenNameParam "thimura")
-- APIRequestGet "https://api.twitter.com/1.1/friends/list.json" [("screen_name","thimura")]
-- >>> friendsList (UserIdParam 69179963)
-- APIRequestGet "https://api.twitter.com/1.1/friends/list.json" [("user_id","69179963")]
friendsList :: UserParam -> APIRequest FriendsList (WithCursor UsersCursorKey User)
friendsList q = APIRequestGet (endpoint ++ "friends/list.json") (mkUserParam q)
deriveHasParamInstances ''FriendsList
[ "cursor"
, "count"
, "skip_status"
, "include_user_entities"
]
data FollowersList
-- | Returns query data which asks a cursored collection of user objects for users following the specified user.
--
-- You can perform request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'followersList' ('ScreenNameParam' \"thimura\")
-- @
--
-- Or, you can iterate with 'sourceWithCursor':
--
-- @
-- 'sourceWithCursor' ('followersList' ('ScreenNameParam' \"thimura\")) $$ CL.consume
-- @
--
-- >>> followersList (ScreenNameParam "thimura")
-- APIRequestGet "https://api.twitter.com/1.1/followers/list.json" [("screen_name","thimura")]
-- >>> followersList (UserIdParam 69179963)
-- APIRequestGet "https://api.twitter.com/1.1/followers/list.json" [("user_id","69179963")]
followersList :: UserParam -> APIRequest FollowersList (WithCursor UsersCursorKey User)
followersList q = APIRequestGet (endpoint ++ "followers/list.json") (mkUserParam q)
deriveHasParamInstances ''FollowersList
[ "cursor"
, "count"
, "skip_status"
, "include_user_entities"
]
data AccountVerifyCredentials
-- | Returns query data asks that the credential is valid.
--
-- You can perform request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'accountVerifyCredentials'
-- @
--
-- >>> accountVerifyCredentials
-- APIRequestGet "https://api.twitter.com/1.1/account/verify_credentials.json" []
accountVerifyCredentials :: APIRequest AccountVerifyCredentials User
accountVerifyCredentials = APIRequestGet (endpoint ++ "account/verify_credentials.json") []
deriveHasParamInstances ''AccountVerifyCredentials
[ "include_entities"
, "skip_status"
]
data UsersLookup
-- | Returns query data asks user objects.
--
-- You can perform request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'usersLookup' ('ScreenNameListParam' [\"thimura\", \"twitterapi\"])
-- @
--
-- >>> usersLookup (ScreenNameListParam ["thimura", "twitterapi"])
-- APIRequestGet "https://api.twitter.com/1.1/users/lookup.json" [("screen_name","thimura,twitterapi")]
usersLookup :: UserListParam -> APIRequest UsersLookup [User]
usersLookup q = APIRequestGet (endpoint ++ "users/lookup.json") (mkUserListParam q)
deriveHasParamInstances ''UsersLookup
[ "include_entities"
]
data UsersShow
-- | Returns query data asks the user specified by user id or screen name parameter.
--
-- You can perform request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'usersShow' ('ScreenNameParam' \"thimura\")
-- @
--
-- >>> usersShow (ScreenNameParam "thimura")
-- APIRequestGet "https://api.twitter.com/1.1/users/show.json" [("screen_name","thimura")]
usersShow :: UserParam -> APIRequest UsersShow User
usersShow q = APIRequestGet (endpoint ++ "users/show.json") (mkUserParam q)
deriveHasParamInstances ''UsersShow
[ "include_entities"
]
data FavoritesList
-- | Returns the 20 most recent Tweets favorited by the specified user.
--
-- You can perform request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'favoritesList' (ScreenNameParam \"thimura\")
-- @
--
-- >>> favoritesList Nothing
-- APIRequestGet "https://api.twitter.com/1.1/favorites/list.json" []
-- >>> favoritesList (Just (ScreenNameParam "thimura"))
-- APIRequestGet "https://api.twitter.com/1.1/favorites/list.json" [("screen_name","thimura")]
-- >>> favoritesList (Just (UserIdParam 69179963))
-- APIRequestGet "https://api.twitter.com/1.1/favorites/list.json" [("user_id","69179963")]
favoritesList :: Maybe UserParam -> APIRequest FavoritesList [Status]
favoritesList mbuser = APIRequestGet (endpoint ++ "favorites/list.json") (mkParam mbuser)
where
mkParam Nothing = []
mkParam (Just usr) = mkUserParam usr
deriveHasParamInstances ''FavoritesList
[ "count"
, "since_id"
, "max_id"
, "include_entities"
]
data FavoritesCreate
-- | Returns post data which favorites the status specified in the ID parameter as the authenticating user.
--
-- You can perform request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'favoritesCreate' 1234567890
-- @
--
-- >>> favoritesCreate 1234567890
-- APIRequestPost "https://api.twitter.com/1.1/favorites/create.json" [("id","1234567890")]
favoritesCreate :: StatusId -> APIRequest FavoritesCreate Status
favoritesCreate sid = APIRequestPost (endpoint ++ "favorites/create.json") [("id", PVInteger sid)]
deriveHasParamInstances ''FavoritesCreate
[ "include_entities"
]
data FavoritesDestroy
-- | Returns post data unfavorites the status specified in the ID paramter as the authenticating user.
--
-- You can perform request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'favoritesDestroy' 1234567890
-- @
--
-- >>> favoritesDestroy 1234567890
-- APIRequestPost "https://api.twitter.com/1.1/favorites/destroy.json" [("id","1234567890")]
favoritesDestroy :: StatusId -> APIRequest FavoritesDestroy Status
favoritesDestroy sid = APIRequestPost (endpoint ++ "favorites/destroy.json") [("id", PVInteger sid)]
deriveHasParamInstances ''FavoritesDestroy
[ "include_entities"
]
data ListsStatuses
-- | Returns the query parameter which fetches a timeline of tweets authored by members of the specified list.
--
-- You can perform request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'listsStatuses' ('ListNameParam' "thimura/haskell")
-- @
--
-- If you need more statuses, you can obtain those by using 'sourceWithMaxId':
-- @
-- res <- sourceWithMaxId ('listsStatuses' ('ListNameParam' "thimura/haskell") & count ?~ 200) $$ CL.take 1000
-- @
--
-- >>> listsStatuses (ListNameParam "thimura/haskell")
-- APIRequestGet "https://api.twitter.com/1.1/lists/statuses.json" [("slug","haskell"),("owner_screen_name","thimura")]
-- >>> listsStatuses (ListIdParam 20849097)
-- APIRequestGet "https://api.twitter.com/1.1/lists/statuses.json" [("list_id","20849097")]
listsStatuses :: ListParam -> APIRequest ListsStatuses [Status]
listsStatuses q = APIRequestGet (endpoint ++ "lists/statuses.json") (mkListParam q)
deriveHasParamInstances ''ListsStatuses
[ "since_id"
, "max_id"
, "count"
, "include_entities"
, "include_rts"
]
data ListsMembersDestroy
-- | Returns the post parameter which removes the specified member from the list.
--
-- You can perform request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'listsMembersDestroy' ('ListNameParam' "thimura/haskell") ('ScreenNameParam' "thimura")
-- @
--
-- >>> listsMembersDestroy (ListNameParam "thimura/haskell") (ScreenNameParam "thimura")
-- APIRequestPost "https://api.twitter.com/1.1/lists/members/destroy.json" [("slug","haskell"),("owner_screen_name","thimura"),("screen_name","thimura")]
-- >>> listsMembersDestroy (ListIdParam 20849097) (UserIdParam 69179963)
-- APIRequestPost "https://api.twitter.com/1.1/lists/members/destroy.json" [("list_id","20849097"),("user_id","69179963")]
listsMembersDestroy :: ListParam -> UserParam -> APIRequest ListsMembersDestroy List
listsMembersDestroy list user = APIRequestPost (endpoint ++ "lists/members/destroy.json") (mkListParam list ++ mkUserParam user)
data ListsMemberships
-- | Returns the request parameters which asks the lists the specified user has been added to.
-- If 'UserParam' are not provided, the memberships for the authenticating user are returned.
--
-- You can perform request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'listsMemberships' ('ListNameParam' "thimura/haskell")
-- @
--
-- >>> listsMemberships Nothing
-- APIRequestGet "https://api.twitter.com/1.1/lists/memberships.json" []
-- >>> listsMemberships (Just (ScreenNameParam "thimura"))
-- APIRequestGet "https://api.twitter.com/1.1/lists/memberships.json" [("screen_name","thimura")]
-- >>> listsMemberships (Just (UserIdParam 69179963))
-- APIRequestGet "https://api.twitter.com/1.1/lists/memberships.json" [("user_id","69179963")]
listsMemberships :: Maybe UserParam -> APIRequest ListsMemberships (WithCursor ListsCursorKey List)
listsMemberships q = APIRequestGet (endpoint ++ "lists/memberships.json") $ maybe [] mkUserParam q
deriveHasParamInstances ''ListsMemberships
[ "cursor"
, "count"
]
data ListsSubscribers
-- | Returns the request parameter which asks the subscribers of the specified list.
--
-- You can perform request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'listsSubscribers' ('ListNameParam' "thimura/haskell")
-- @
--
-- >>> listsSubscribers (ListNameParam "thimura/haskell")
-- APIRequestGet "https://api.twitter.com/1.1/lists/subscribers.json" [("slug","haskell"),("owner_screen_name","thimura")]
-- >>> listsSubscribers (ListIdParam 20849097)
-- APIRequestGet "https://api.twitter.com/1.1/lists/subscribers.json" [("list_id","20849097")]
listsSubscribers :: ListParam -> APIRequest ListsSubscribers (WithCursor UsersCursorKey User)
listsSubscribers q = APIRequestGet (endpoint ++ "lists/subscribers.json") (mkListParam q)
deriveHasParamInstances ''ListsSubscribers
[ "cursor"
, "count"
, "skip_status"
]
data ListsSubscriptions
-- | Returns the request parameter which obtains a collection of the lists the specified user is subscribed to.
--
-- You can perform request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'listsSubscriptions' ('ListNameParam' "thimura/haskell")
-- @
--
-- >>> listsSubscriptions Nothing
-- APIRequestGet "https://api.twitter.com/1.1/lists/subscriptions.json" []
-- >>> listsSubscriptions (Just (ScreenNameParam "thimura"))
-- APIRequestGet "https://api.twitter.com/1.1/lists/subscriptions.json" [("screen_name","thimura")]
-- >>> listsSubscriptions (Just (UserIdParam 69179963))
-- APIRequestGet "https://api.twitter.com/1.1/lists/subscriptions.json" [("user_id","69179963")]
listsSubscriptions :: Maybe UserParam -> APIRequest ListsSubscriptions (WithCursor ListsCursorKey List)
listsSubscriptions q = APIRequestGet (endpoint ++ "lists/subscriptions.json") $ maybe [] mkUserParam q
deriveHasParamInstances ''ListsSubscriptions
[ "cursor"
, "count"
]
data ListsOwnerships
-- | Returns the request parameter which asks the lists owned by the specified Twitter user.
--
-- You can perform request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'listsOwnerships' ('ListNameParam' "thimura/haskell")
-- @
--
-- >>> listsOwnerships Nothing
-- APIRequestGet "https://api.twitter.com/1.1/lists/ownerships.json" []
-- >>> listsOwnerships (Just (ScreenNameParam "thimura"))
-- APIRequestGet "https://api.twitter.com/1.1/lists/ownerships.json" [("screen_name","thimura")]
-- >>> listsOwnerships (Just (UserIdParam 69179963))
-- APIRequestGet "https://api.twitter.com/1.1/lists/ownerships.json" [("user_id","69179963")]
listsOwnerships :: Maybe UserParam -> APIRequest ListsOwnerships (WithCursor ListsCursorKey List)
listsOwnerships q = APIRequestGet (endpoint ++ "lists/ownerships.json") $ maybe [] mkUserParam q
deriveHasParamInstances ''ListsOwnerships
[ "cursor"
, "count"
]
data ListsMembers
-- | Returns query data asks the members of the specified list.
--
-- You can perform request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'listsMembers' ('ListNameParam' "thimura/haskell")
-- @
--
-- >>> listsMembers (ListNameParam "thimura/haskell")
-- APIRequestGet "https://api.twitter.com/1.1/lists/members.json" [("slug","haskell"),("owner_screen_name","thimura")]
-- >>> listsMembers (ListIdParam 20849097)
-- APIRequestGet "https://api.twitter.com/1.1/lists/members.json" [("list_id","20849097")]
listsMembers :: ListParam -> APIRequest ListsMembers (WithCursor UsersCursorKey User)
listsMembers q = APIRequestGet (endpoint ++ "lists/members.json") (mkListParam q)
deriveHasParamInstances ''ListsMembers
[ "cursor"
, "skip_status"
]
data ListsMembersCreate
-- | Returns the post parameter which adds a member to a list.
--
-- You can perform request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'listsMembersCreate' ('ListNameParam' "thimura/haskell") ('ScreenNameParam' "thimura")
-- @
--
-- >>> listsMembersCreate (ListNameParam "thimura/haskell") (ScreenNameParam "thimura")
-- APIRequestPost "https://api.twitter.com/1.1/lists/members/create.json" [("slug","haskell"),("owner_screen_name","thimura"),("screen_name","thimura")]
-- >>> listsMembersCreate (ListIdParam 20849097) (UserIdParam 69179963)
-- APIRequestPost "https://api.twitter.com/1.1/lists/members/create.json" [("list_id","20849097"),("user_id","69179963")]
listsMembersCreate :: ListParam -> UserParam -> APIRequest ListsMembersCreate List
listsMembersCreate list user = APIRequestPost (endpoint ++ "lists/members/create.json") (mkListParam list ++ mkUserParam user)
data ListsDestroy
-- | Returns the post parameter which deletes the specified list.
--
-- You can perform request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'listsDestroy' ('ListNameParam' "thimura/haskell")
-- @
--
-- >>> listsDestroy (ListNameParam "thimura/haskell")
-- APIRequestPost "https://api.twitter.com/1.1/lists/destroy.json" [("slug","haskell"),("owner_screen_name","thimura")]
-- >>> listsDestroy (ListIdParam 20849097)
-- APIRequestPost "https://api.twitter.com/1.1/lists/destroy.json" [("list_id","20849097")]
listsDestroy :: ListParam -> APIRequest ListsDestroy List
listsDestroy list = APIRequestPost (endpoint ++ "lists/destroy.json") (mkListParam list)
data ListsUpdate
-- | Returns the post parameter which updates the specified list.
--
-- You can perform request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'listsUpdate' ('ListNameParam' "thimura/haskell") True (Just "Haskellers")
-- @
--
-- >>> listsUpdate (ListNameParam "thimura/haskell") True (Just "Haskellers")
-- APIRequestPost "https://api.twitter.com/1.1/lists/update.json" [("slug","haskell"),("owner_screen_name","thimura"),("description","Haskellers"),("mode","public")]
listsUpdate :: ListParam
-> Bool -- ^ is public
-> Maybe T.Text -- ^ description
-> APIRequest ListsUpdate List
listsUpdate list isPublic description = APIRequestPost (endpoint ++ "lists/update.json") (mkListParam list ++ p')
where
p = [("mode", PVString . mode $ isPublic)]
p' = maybe id (\d -> (("description", PVString d):)) description p
mode True = "public"
mode False = "private"
data ListsCreate
-- | Returns the post parameter which creates a new list for the authenticated user.
--
-- You can perform request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'listsCreate' ('ListNameParam' "thimura/haskell")
-- @
--
-- >>> listsCreate "haskell" True Nothing
-- APIRequestPost "https://api.twitter.com/1.1/lists/create.json" [("name","haskell"),("mode","public")]
-- >>> listsCreate "haskell" False Nothing
-- APIRequestPost "https://api.twitter.com/1.1/lists/create.json" [("name","haskell"),("mode","private")]
-- >>> listsCreate "haskell" True (Just "Haskellers")
-- APIRequestPost "https://api.twitter.com/1.1/lists/create.json" [("description","Haskellers"),("name","haskell"),("mode","public")]
listsCreate :: T.Text -- ^ list name
-> Bool -- ^ whether public(True) or private(False)
-> Maybe T.Text -- ^ the description to give the list
-> APIRequest ListsCreate List
listsCreate name isPublic description = APIRequestPost (endpoint ++ "lists/create.json") p'
where
p = [("name", PVString name), ("mode", PVString . mode $ isPublic)]
p' = maybe id (\d -> (("description", PVString d):)) description p
mode True = "public"
mode False = "private"
data ListsShow
-- | Returns the request parameter which asks the specified list.
--
-- You can perform request by using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'listsShow' ('ListNameParam' "thimura/haskell")
-- @
--
-- >>> listsShow (ListNameParam "thimura/haskell")
-- APIRequestGet "https://api.twitter.com/1.1/lists/show.json" [("slug","haskell"),("owner_screen_name","thimura")]
-- >>> listsShow (ListIdParam 20849097)
-- APIRequestGet "https://api.twitter.com/1.1/lists/show.json" [("list_id","20849097")]
listsShow :: ListParam -> APIRequest ListsShow List
listsShow q = APIRequestGet (endpoint ++ "lists/show.json") (mkListParam q)
data MediaUpload
-- | Upload media and returns the media data.
--
-- You can update your status with multiple media by calling 'mediaUpload' and 'update' successively.
--
-- First, you should upload media with 'mediaUpload':
--
-- @
-- res1 <- 'call' twInfo mgr '$' 'mediaUpload' ('MediaFromFile' \"\/path\/to\/upload\/file1.png\")
-- res2 <- 'call' twInfo mgr '$' 'mediaUpload' ('MediaRequestBody' \"file2.png\" \"[.. file body ..]\")
-- @
--
-- and then collect the resulting media IDs and update your status by calling 'update':
--
-- @
-- 'call' twInfo mgr '$' 'update' \"Hello World\" '&' 'mediaIds' '?~' ['mediaId' res1, 'mediaId' res2]
-- @
--
-- See: <https://dev.twitter.com/docs/api/multiple-media-extended-entities>
--
-- >>> mediaUpload (MediaFromFile "/home/test/test.png")
-- APIRequestPostMultipart "https://upload.twitter.com/1.1/media/upload.json" []
mediaUpload :: MediaData
-> APIRequest MediaUpload UploadedMedia
mediaUpload mediaData =
APIRequestPostMultipart uri [] [mediaBody mediaData]
where
uri = "https://upload.twitter.com/1.1/media/upload.json"
mediaBody (MediaFromFile fp) = partFileSource "media" fp
mediaBody (MediaRequestBody filename filebody) = partFileRequestBody "media" filename filebody
| johan--/twitter-conduit | Web/Twitter/Conduit/Api.hs | bsd-2-clause | 32,497 | 0 | 12 | 5,008 | 3,122 | 1,912 | 1,210 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
module Test.CommandLine (tests) where
import Fay.Compiler.Prelude
import Test.Util
import System.Directory
import System.Environment
import System.FilePath
import Test.Tasty
import Test.Tasty.HUnit
import Test.Tasty.TH
tests :: TestTree
tests = $testGroupGenerator
compileFile :: [String] -> IO (Either String String)
compileFile flags = do
whatAGreatFramework <- fmap (fmap (\x -> x</>"bin"</>"fay") . lookup "HASKELL_SANDBOX")
getEnvironment
fay <- fayPath
let path = fromMaybe "couldn't find fay" (whatAGreatFramework <|> fay)
exists <- doesFileExist path
if exists
then do r <- readAllFromProcess path flags ""
return $ case r of
Left (l,_) -> Left ("Reason: " ++ l)
Right (_,t) -> Right t
else error $ "fay path not are existing: " ++ path
case_executable :: Assertion
case_executable = do
fay <- fayPath
assertBool "Could not find fay executable" (isJust fay)
case_compile :: Assertion
case_compile = do
whatAGreatFramework <- fmap (lookup "HASKELL_PACKAGE_SANDBOX") getEnvironment
res <- compileFile (["--include=tests", "tests/RecordImport_Import.hs","--no-ghc"] ++
["--package-conf=" ++ packageConf | Just packageConf <- [whatAGreatFramework] ])
assertBool (fromLeft res) (isRight res)
| fpco/fay | src/tests/Test/CommandLine.hs | bsd-3-clause | 1,441 | 0 | 16 | 371 | 389 | 200 | 189 | 35 | 3 |
module Database.DSH.VSL.Dot
( renderVSLDot
) where
import qualified Data.Foldable as F
import qualified Data.IntMap as Map
import Data.List
import qualified Data.List.NonEmpty as N
import Prelude hiding ((<$>))
import Text.PrettyPrint.ANSI.Leijen
import qualified Database.Algebra.Dag as Dag
import Database.Algebra.Dag.Common as C
import qualified Database.DSH.Common.Lang as L
import Database.DSH.Common.Pretty
import Database.DSH.Common.Type
import Database.DSH.Common.VectorLang
import Database.DSH.VSL.Lang
nodeToDoc :: AlgNode -> Doc
nodeToDoc n = text "id:" <+> int n
tagsToDoc :: [Tag] -> Doc
tagsToDoc ts = vcat $ map text ts
labelToDoc :: AlgNode -> String -> Doc -> [Tag] -> Doc
labelToDoc n s as ts = nodeToDoc n <$> (text s <> parens as) <$> tagsToDoc (nub ts)
lookupTags :: AlgNode -> NodeMap [Tag] -> [Tag]
lookupTags = Map.findWithDefault []
renderFun :: Doc -> [Doc] -> Doc
renderFun name args = name <> parens (hsep $ punctuate comma args)
renderFrameSpec :: FrameSpec -> Doc
renderFrameSpec FAllPreceding = text "allprec"
renderFrameSpec (FNPreceding n) = int n <+> text "prec"
renderAggrFun :: Pretty e => AggrFun e -> Doc
renderAggrFun (AggrSum t c) = renderFun (text "sum" <> char '_' <> renderColumnType t)
[pretty c]
renderAggrFun (AggrMin c) = renderFun (text "min") [pretty c]
renderAggrFun (AggrMax c) = renderFun (text "max") [pretty c]
renderAggrFun (AggrAvg c) = renderFun (text "avg") [pretty c]
renderAggrFun (AggrAny c) = renderFun (text "any") [pretty c]
renderAggrFun (AggrAll c) = renderFun (text "all") [pretty c]
renderAggrFun AggrCount = renderFun (text "count") []
renderAggrFun (AggrCountDistinct c) = renderFun (text "countDistinct") [pretty c]
renderWinFun :: Pretty e => WinFun e -> Doc
renderWinFun (WinSum c) = renderFun (text "sum") [pretty c]
renderWinFun (WinMin c) = renderFun (text "min") [pretty c]
renderWinFun (WinMax c) = renderFun (text "max") [pretty c]
renderWinFun (WinAvg c) = renderFun (text "avg") [pretty c]
renderWinFun (WinAny c) = renderFun (text "any") [pretty c]
renderWinFun (WinAll c) = renderFun (text "all") [pretty c]
renderWinFun (WinFirstValue c) = renderFun (text "first_value") [pretty c]
renderWinFun WinCount = renderFun (text "count") []
renderColumnType :: ScalarType -> Doc
renderColumnType = text . show
bracketList :: (a -> Doc) -> [a] -> Doc
bracketList f = brackets . hsep . punctuate comma . map f
renderColName :: L.ColName -> Doc
renderColName (L.ColName c) = text c
renderCol :: (L.ColName, ScalarType) -> Doc
renderCol (c, t) = renderColName c <> text "::" <> renderColumnType t
renderJoinConjunct :: Pretty e => L.JoinConjunct e -> Doc
renderJoinConjunct (L.JoinConjunct e1 o e2) =
pretty e1 <+> text (pp o) <+> pretty e2
renderJoinPred :: Pretty e => L.JoinPredicate e -> Doc
renderJoinPred (L.JoinPred conjs) = brackets
$ hsep
$ punctuate (text "&&")
$ map renderJoinConjunct $ N.toList conjs
renderSegments :: VecSegs -> Doc
renderSegments (UnitSeg seg) = renderSegment seg
renderSegments (Segs segs) = vcat $ map renderSegment $ F.toList segs
renderSegment :: SegD -> Doc
renderSegment s = list $ map pretty $ F.toList s
renderSegLookup :: SegmentLookup -> String
renderSegLookup Direct = "M"
renderSegLookup Unit = "U"
-- | Create the node label from an operator description
opDotLabel :: (Pretty e, Pretty r) => NodeMap [Tag] -> AlgNode -> VSLOp r e -> Doc
opDotLabel tm i (UnOp (WinFun (wfun, wspec)) _) = labelToDoc i "winaggr"
(renderWinFun wfun <> comma <+> renderFrameSpec wspec)
(lookupTags i tm)
opDotLabel tm i (NullaryOp (Lit (ty, segs))) = labelToDoc i "lit"
(pretty ty <> comma <$> renderSegments segs) (lookupTags i tm)
opDotLabel tm i (NullaryOp (TableRef (n, schema))) =
labelToDoc i "table"
(text n <> text "\n"
<> align (bracketList (\c -> renderCol c <> text "\n")
(N.toList $ L.tableCols schema)))
(lookupTags i tm)
opDotLabel tm i (UnOp Distinct _) = labelToDoc i "unique" empty (lookupTags i tm)
opDotLabel tm i (UnOp Number _) = labelToDoc i "number" empty (lookupTags i tm)
opDotLabel tm i (UnOp MergeMap _) = labelToDoc i "mergemap" empty (lookupTags i tm)
opDotLabel tm i (UnOp Segment _) = labelToDoc i "segment" empty (lookupTags i tm)
opDotLabel tm i (UnOp Unsegment _) = labelToDoc i "unsegment" empty (lookupTags i tm)
opDotLabel tm i (UnOp UnitMap _) = labelToDoc i "unitmap" empty (lookupTags i tm)
opDotLabel tm i (UnOp UpdateUnit _) = labelToDoc i "updateunit" empty (lookupTags i tm)
opDotLabel tm i (UnOp Reverse _) = labelToDoc i "reverse" empty (lookupTags i tm)
opDotLabel tm i (UnOp R1 _) = labelToDoc i "R1" empty (lookupTags i tm)
opDotLabel tm i (UnOp R2 _) = labelToDoc i "R2" empty (lookupTags i tm)
opDotLabel tm i (UnOp R3 _) = labelToDoc i "R3" empty (lookupTags i tm)
opDotLabel tm i (UnOp (Project e) _) = labelToDoc i "project" pLabel (lookupTags i tm)
where pLabel = pretty e
opDotLabel tm i (UnOp (Select e) _) = labelToDoc i "select" (pretty e) (lookupTags i tm)
opDotLabel tm i (UnOp (GroupAggr (g, a)) _) = labelToDoc i "groupaggr" (pretty g <+> bracketList renderAggrFun [a]) (lookupTags i tm)
opDotLabel tm i (UnOp (Fold a) _) = labelToDoc i "aggrseg" (renderAggrFun a) (lookupTags i tm)
opDotLabel tm i (UnOp (Sort e) _) = labelToDoc i "sort" (pretty e) (lookupTags i tm)
opDotLabel tm i (UnOp (Group e) _) = labelToDoc i "group" (pretty e) (lookupTags i tm)
opDotLabel tm i (BinOp ReplicateSeg _ _) = labelToDoc i "replicatenest" empty (lookupTags i tm)
opDotLabel tm i (BinOp ReplicateScalar _ _) = labelToDoc i "replicatescalar" empty (lookupTags i tm)
opDotLabel tm i (BinOp Materialize _ _) = labelToDoc i "materialize" empty (lookupTags i tm)
opDotLabel tm i (BinOp UpdateMap _ _) = labelToDoc i "updatemap" empty (lookupTags i tm)
opDotLabel tm i (BinOp UnboxSng _ _) = labelToDoc i "unboxsng" empty (lookupTags i tm)
opDotLabel tm i (BinOp (UnboxDefault vs) _ _) = labelToDoc i "unboxdefault" (bracketList pretty $ N.toList vs) (lookupTags i tm)
opDotLabel tm i (BinOp Append _ _) = labelToDoc i "append" empty (lookupTags i tm)
opDotLabel tm i (BinOp Align _ _) = labelToDoc i "align" empty (lookupTags i tm)
opDotLabel tm i (BinOp Zip _ _) = labelToDoc i "zip" empty (lookupTags i tm)
opDotLabel tm i (BinOp CartProduct _ _) = labelToDoc i "cartproduct" empty (lookupTags i tm)
opDotLabel tm i (BinOp (ThetaJoin (l1, l2, p)) _ _) =
labelToDoc i ("thetajoin" ++ renderSegLookup l1 ++ renderSegLookup l2) (renderJoinPred p) (lookupTags i tm)
opDotLabel tm i (BinOp (NestJoin (l1, l2, p)) _ _) =
labelToDoc i ("nestjoin" ++ renderSegLookup l1 ++ renderSegLookup l2) (renderJoinPred p) (lookupTags i tm)
opDotLabel tm i (BinOp (AntiJoin (l1, l2, p)) _ _) =
labelToDoc i ("antijoin" ++ renderSegLookup l1 ++ renderSegLookup l2)(renderJoinPred p) (lookupTags i tm)
opDotLabel tm i (BinOp (SemiJoin (l1, l2, p)) _ _) =
labelToDoc i ("semijoin" ++ renderSegLookup l1 ++ renderSegLookup l2) (renderJoinPred p) (lookupTags i tm)
opDotLabel tm i (BinOp (GroupJoin (l1, l2, p, as)) _ _) =
labelToDoc i ("groupjoin" ++ renderSegLookup l1 ++ renderSegLookup l2) (renderJoinPred p <+> bracketList renderAggrFun (N.toList $ L.getNE as)) (lookupTags i tm)
opDotLabel tm i (TerOp Combine _ _ _) = labelToDoc i "combine" empty (lookupTags i tm)
opDotColor :: VSLOp r e -> DotColor
opDotColor (BinOp CartProduct _ _) = DCRed
opDotColor (BinOp Materialize _ _) = DCSalmon
opDotColor (BinOp (ThetaJoin _) _ _) = DCGreen
opDotColor (BinOp (NestJoin _) _ _) = DCGreen
opDotColor (BinOp (SemiJoin _) _ _) = DCGreen
opDotColor (BinOp (AntiJoin _) _ _) = DCGreen
opDotColor (BinOp (GroupJoin _) _ _) = DCGreen
opDotColor (UnOp (Sort _) _) = DCTomato
opDotColor (UnOp (Group _) _) = DCTomato
opDotColor (BinOp UnboxSng _ _) = DCTan
opDotColor (BinOp ReplicateSeg _ _) = DCTan
opDotColor (BinOp ReplicateScalar _ _) = DCTan
opDotColor (BinOp Align _ _) = DCTan
opDotColor (TerOp Combine _ _ _) = DCDodgerBlue
opDotColor (UnOp (Select _) _) = DCLightSkyBlue
opDotColor (UnOp (Fold _) _) = DCCrimson
opDotColor (UnOp (WinFun _) _) = DCTomato
opDotColor (UnOp (GroupAggr (_, _)) _) = DCTomato
opDotColor (UnOp (Project _) _) = DCLightSkyBlue
opDotColor (BinOp UpdateMap _ _) = DCCyan
opDotColor (UnOp UnitMap _) = DCCornFlowerBlue
opDotColor (UnOp MergeMap _) = DCCornFlowerBlue
opDotColor (UnOp UpdateUnit _) = DCCornFlowerBlue
opDotColor _ = DCGray
-- Dot colors
data DotColor = DCTomato
| DCSalmon
| DCGray
| DimDCGray
| DCGold
| DCTan
| DCRed
| DCCrimson
| DCGreen
| DCSeaGreen
| DCYelloGreen
| DCSienna
| DCBeige
| DCDodgerBlue
| DCLightSkyBlue
| DCBlueViolet
| DCHotPink
| DCBrown
| DCCyan
| DCCornFlowerBlue
renderColor :: DotColor -> Doc
renderColor DCTomato = text "tomato"
renderColor DCSalmon = text "salmon"
renderColor DCGray = text "gray"
renderColor DimDCGray = text "dimgray"
renderColor DCGold = text "gold"
renderColor DCTan = text "tan"
renderColor DCRed = text "red"
renderColor DCCrimson = text "crimson"
renderColor DCGreen = text "green"
renderColor DCSeaGreen = text "seagreen"
renderColor DCYelloGreen = text "yellowgreen"
renderColor DCSienna = text "sienna"
renderColor DCBeige = text "beige"
renderColor DCDodgerBlue = text "dodgerblue"
renderColor DCLightSkyBlue = text "lightskyblue"
renderColor DCHotPink = text "hotpink"
renderColor DCBrown = text "brown"
renderColor DCCyan = text "cyan"
renderColor DCBlueViolet = text "blueviolet"
renderColor DCCornFlowerBlue = text "cornflowerblue"
escapeLabel :: String -> String
escapeLabel s = concatMap escapeChar s
escapeChar :: Char -> [Char]
escapeChar '\n' = ['\\', 'n']
escapeChar '\\' = ['\\', '\\']
escapeChar '\"' = ['\\', '"']
escapeChar c = [c]
-- Type of Dot style options
data DotStyle = Dashed
-- label of Dot nodes
type DotLabel = String
-- id of Dot nodes
type DotNodeID = Int
-- Type of Dot nodes
data DotNode = DotNode DotNodeID DotLabel DotColor (Maybe DotStyle)
-- Type of Dot edges
data DotEdge = DotEdge DotNodeID DotNodeID
-- Generate the preamble of a Dot file
preamble :: Doc
preamble = graphAttributes <$> nodeAttributes
where nodeAttributes = text "node" <+> (brackets $ text "style=filled" <> comma <+> text "shape=box") <> semi
graphAttributes = text "ordering=out;"
renderDotNode :: DotNode -> Doc
renderDotNode (DotNode n l c s) =
int n
<+> (brackets $ (((text "label=") <> (dquotes $ text l))
<> comma
<+> (text "color=") <> (renderColor c)
<> styleDoc))
<> semi
where
styleDoc = case s of
Just Dashed -> comma <+> text "style=dashed"
Nothing -> empty
renderDotEdge :: DotEdge -> Doc
renderDotEdge (DotEdge u v) = int u <+> text "->" <+> int v <> semi
-- | Render a Dot document from the preamble, nodes and edges
renderDot :: [DotNode] -> [DotEdge] -> Doc
renderDot ns es = text "digraph" <> (braces $ preamble <$> nodeSection <$> edgeSection)
where
nodeSection = vcat $ map renderDotNode ns
edgeSection = vcat $ map renderDotEdge es
-- | Create an abstract Dot node from an X100 operator description
constructDotNode :: (Pretty r, Pretty e)
=> [AlgNode]
-> NodeMap [Tag]
-> (AlgNode, VSL r e)
-> DotNode
constructDotNode rootNodes ts (n, op) =
if elem n rootNodes then
DotNode n l c (Just Dashed)
else
DotNode n l c Nothing
where
l = escapeLabel $ pp $ opDotLabel ts n $ unVSL op
c = opDotColor $ unVSL op
-- | Create an abstract Dot edge
constructDotEdge :: (AlgNode, AlgNode) -> DotEdge
constructDotEdge = uncurry DotEdge
-- | extract the operator descriptions and list of edges from a DAG
-- FIXME no apparent reason to use topological ordering here
extractGraphStructure :: Ordish r e
=> Dag.AlgebraDag (VSL r e)
-> ([(AlgNode, VSL r e)], [(AlgNode, AlgNode)])
extractGraphStructure d = (operators, childs)
where
nodes = Dag.topsort d
operators = zip nodes $ map (flip Dag.operator d) nodes
childs = concat $ map (\(n, op) -> zip (repeat n) (Dag.opChildren op)) operators
-- | Render an SL plan into a dot file (GraphViz).
renderVSLDot :: (Ord r, Ord e, Show r, Show e, Pretty r, Pretty e)
=> NodeMap [Tag]
-> [AlgNode]
-> NodeMap (VSL r e)
-> String
renderVSLDot ts roots m = pp $ renderDot dotNodes dotEdges
where
(opLabels, edges) = extractGraphStructure d
d = Dag.mkDag m roots
dotNodes = map (constructDotNode roots ts) opLabels
dotEdges = map constructDotEdge edges
| ulricha/dsh | src/Database/DSH/VSL/Dot.hs | bsd-3-clause | 13,689 | 0 | 18 | 3,522 | 4,902 | 2,483 | 2,419 | 251 | 2 |
module HW11.AParser (Parser, runParser, satisfy, char, posInt) where
import Control.Applicative
import Data.Char
newtype Parser a = Parser { runParser :: String -> Maybe (a, String) }
satisfy :: (Char -> Bool) -> Parser Char
satisfy p = Parser f
where
f [] = Nothing
f (x:xs)
| p x = Just (x, xs)
| otherwise = Nothing
char :: Char -> Parser Char
char c = satisfy (== c)
posInt :: Parser Integer
posInt = Parser f
where
f xs
| null ns = Nothing
| otherwise = Just (read ns, rest)
where (ns, rest) = span isDigit xs
inParser f = Parser . f . runParser
first :: (a -> b) -> (a,c) -> (b,c)
first f (x,y) = (f x, y)
instance Functor Parser where
fmap = inParser . fmap . fmap . first
instance Applicative Parser where
pure a = Parser (\s -> Just (a, s))
(Parser fp) <*> xp = Parser $ \s ->
case fp s of
Nothing -> Nothing
Just (f,s') -> runParser (f <$> xp) s'
instance Alternative Parser where
empty = Parser (const Nothing)
Parser p1 <|> Parser p2 = Parser $ liftA2 (<|>) p1 p2
| cgag/cis-194-solutions | src/HW11/AParser.hs | bsd-3-clause | 1,094 | 0 | 13 | 312 | 498 | 260 | 238 | 32 | 2 |
{-# LANGUAGE OverloadedStrings, CPP #-}
module ExceptionSpec (main, spec) where
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative
#endif
import Control.Monad
import Network.HTTP
import Network.Stream
import Network.HTTP.Types hiding (Header)
import Network.Wai hiding (Response)
import Network.Wai.Internal (Request(..))
import Network.Wai.Handler.Warp
import Test.Hspec
import Control.Exception
import qualified Data.Streaming.Network as N
import Control.Concurrent.Async (withAsync)
import Network.Socket (sClose)
main :: IO ()
main = hspec spec
withTestServer :: (Int -> IO a) -> IO a
withTestServer inner = bracket
(N.bindRandomPortTCP "127.0.0.1")
(sClose . snd)
$ \(prt, lsocket) -> do
withAsync (runSettingsSocket defaultSettings lsocket testApp)
$ \_ -> inner prt
testApp :: Application
testApp (Network.Wai.Internal.Request {pathInfo = [x]}) f
| x == "statusError" =
f $ responseLBS undefined [] "foo"
| x == "headersError" =
f $ responseLBS ok200 undefined "foo"
| x == "headerError" =
f $ responseLBS ok200 [undefined] "foo"
| x == "bodyError" =
f $ responseLBS ok200 [] undefined
| x == "ioException" = do
void $ fail "ioException"
f $ responseLBS ok200 [] "foo"
testApp _ f =
f $ responseLBS ok200 [] "foo"
spec :: Spec
spec = describe "responds even if there is an exception" $ do
{- Disabling these tests. We can consider forcing evaluation in Warp.
it "statusError" $ do
sc <- rspCode <$> sendGET "http://127.0.0.1:2345/statusError"
sc `shouldBe` (5,0,0)
it "headersError" $ do
sc <- rspCode <$> sendGET "http://127.0.0.1:2345/headersError"
sc `shouldBe` (5,0,0)
it "headerError" $ do
sc <- rspCode <$> sendGET "http://127.0.0.1:2345/headerError"
sc `shouldBe` (5,0,0)
it "bodyError" $ do
sc <- rspCode <$> sendGET "http://127.0.0.1:2345/bodyError"
sc `shouldBe` (5,0,0)
-}
it "ioException" $ withTestServer $ \prt -> do
sc <- rspCode <$> sendGET (concat $ ["http://127.0.0.1:", show prt, "/ioException"])
sc `shouldBe` (5,0,0)
----------------------------------------------------------------
sendGET :: String -> IO (Response String)
sendGET url = sendGETwH url []
sendGETwH :: String -> [Header] -> IO (Response String)
sendGETwH url hdr = unResult $ simpleHTTP $ (getRequest url) { rqHeaders = hdr }
unResult :: IO (Result (Response String)) -> IO (Response String)
unResult action = do
res <- action
case res of
Right rsp -> return rsp
Left _ -> error "Connection error"
| AndrewRademacher/wai | warp/test/ExceptionSpec.hs | mit | 2,716 | 0 | 18 | 659 | 685 | 359 | 326 | 54 | 2 |
{-# LANGUAGE CPP #-}
#if !defined(TESTING) && __GLASGOW_HASKELL__ >= 703
{-# LANGUAGE Trustworthy #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Data.IntMap.Lazy
-- Copyright : (c) Daan Leijen 2002
-- (c) Andriy Palamarchuk 2008
-- License : BSD-style
-- Maintainer : libraries@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- An efficient implementation of maps from integer keys to values
-- (dictionaries).
--
-- API of this module is strict in the keys, but lazy in the values.
-- If you need value-strict maps, use "Data.IntMap.Strict" instead.
-- The 'IntMap' type itself is shared between the lazy and strict modules,
-- meaning that the same 'IntMap' value can be passed to functions in
-- both modules (although that is rarely needed).
--
-- These modules are intended to be imported qualified, to avoid name
-- clashes with Prelude functions, e.g.
--
-- > import Data.IntMap.Lazy (IntMap)
-- > import qualified Data.IntMap.Lazy as IntMap
--
-- The implementation is based on /big-endian patricia trees/. This data
-- structure performs especially well on binary operations like 'union'
-- and 'intersection'. However, my benchmarks show that it is also
-- (much) faster on insertions and deletions when compared to a generic
-- size-balanced map implementation (see "Data.Map").
--
-- * Chris Okasaki and Andy Gill, \"/Fast Mergeable Integer Maps/\",
-- Workshop on ML, September 1998, pages 77-86,
-- <http://citeseer.ist.psu.edu/okasaki98fast.html>
--
-- * D.R. Morrison, \"/PATRICIA -- Practical Algorithm To Retrieve
-- Information Coded In Alphanumeric/\", Journal of the ACM, 15(4),
-- October 1968, pages 514-534.
--
-- Operation comments contain the operation time complexity in
-- the Big-O notation <http://en.wikipedia.org/wiki/Big_O_notation>.
-- Many operations have a worst-case complexity of /O(min(n,W))/.
-- This means that the operation can become linear in the number of
-- elements with a maximum of /W/ -- the number of bits in an 'Int'
-- (32 or 64).
-----------------------------------------------------------------------------
module Data.IntMap.Lazy (
-- * Strictness properties
-- $strictness
-- * Map type
#if !defined(TESTING)
IntMap, Key -- instance Eq,Show
#else
IntMap(..), Key -- instance Eq,Show
#endif
-- * Operators
, (!), (\\)
-- * Query
, IM.null
, size
, member
, notMember
, IM.lookup
, findWithDefault
, lookupLT
, lookupGT
, lookupLE
, lookupGE
-- * Construction
, empty
, singleton
-- ** Insertion
, insert
, insertWith
, insertWithKey
, insertLookupWithKey
-- ** Delete\/Update
, delete
, adjust
, adjustWithKey
, update
, updateWithKey
, updateLookupWithKey
, alter
-- * Combine
-- ** Union
, union
, unionWith
, unionWithKey
, unions
, unionsWith
-- ** Difference
, difference
, differenceWith
, differenceWithKey
-- ** Intersection
, intersection
, intersectionWith
, intersectionWithKey
-- ** Universal combining function
, mergeWithKey
-- * Traversal
-- ** Map
, IM.map
, mapWithKey
, traverseWithKey
, mapAccum
, mapAccumWithKey
, mapAccumRWithKey
, mapKeys
, mapKeysWith
, mapKeysMonotonic
-- * Folds
, IM.foldr
, IM.foldl
, foldrWithKey
, foldlWithKey
, foldMapWithKey
-- ** Strict folds
, foldr'
, foldl'
, foldrWithKey'
, foldlWithKey'
-- * Conversion
, elems
, keys
, assocs
, keysSet
, fromSet
-- ** Lists
, toList
, fromList
, fromListWith
, fromListWithKey
-- ** Ordered lists
, toAscList
, toDescList
, fromAscList
, fromAscListWith
, fromAscListWithKey
, fromDistinctAscList
-- * Filter
, IM.filter
, filterWithKey
, partition
, partitionWithKey
, mapMaybe
, mapMaybeWithKey
, mapEither
, mapEitherWithKey
, split
, splitLookup
, splitRoot
-- * Submap
, isSubmapOf, isSubmapOfBy
, isProperSubmapOf, isProperSubmapOfBy
-- * Min\/Max
, findMin
, findMax
, deleteMin
, deleteMax
, deleteFindMin
, deleteFindMax
, updateMin
, updateMax
, updateMinWithKey
, updateMaxWithKey
, minView
, maxView
, minViewWithKey
, maxViewWithKey
-- * Debugging
, showTree
, showTreeWith
) where
import Data.IntMap.Base as IM
-- $strictness
--
-- This module satisfies the following strictness property:
--
-- * Key arguments are evaluated to WHNF
--
-- Here are some examples that illustrate the property:
--
-- > insertWith (\ new old -> old) undefined v m == undefined
-- > insertWith (\ new old -> old) k undefined m == OK
-- > delete undefined m == undefined
| jwiegley/ghc-release | libraries/containers/Data/IntMap/Lazy.hs | gpl-3.0 | 5,007 | 0 | 5 | 1,290 | 431 | 317 | 114 | 102 | 0 |
{-
Copyright (C) 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Metad.Config where
import Control.Arrow (second)
import qualified Data.List as List (isPrefixOf)
import qualified Data.Map as Map
import Text.JSON
import qualified Text.JSON as JSON
import Ganeti.Constants as Constants
import Ganeti.Metad.Types
-- | Merges two instance configurations into one.
--
-- In the case where instance IPs (i.e., map keys) are repeated, the
-- old instance configuration is thrown away by 'Map.union' and
-- replaced by the new configuration. As a result, the old private
-- and secret OS parameters are completely lost.
mergeConfig :: InstanceParams -> InstanceParams -> InstanceParams
mergeConfig cfg1 cfg2 = cfg2 `Map.union` cfg1
-- | Extracts the OS parameters (public, private, secret) from a JSON
-- object.
--
-- This function checks whether the OS parameters are in fact a JSON
-- object.
getOsParams :: String -> String -> JSObject JSValue -> Result (JSObject JSValue)
getOsParams key msg jsonObj =
case lookup key (fromJSObject jsonObj) of
Nothing -> Error $ "Could not find " ++ msg ++ " OS parameters"
Just x -> readJSON x
getPublicOsParams :: JSObject JSValue -> Result (JSObject JSValue)
getPublicOsParams = getOsParams "osparams" "public"
getPrivateOsParams :: JSObject JSValue -> Result (JSObject JSValue)
getPrivateOsParams = getOsParams "osparams_private" "private"
getSecretOsParams :: JSObject JSValue -> Result (JSObject JSValue)
getSecretOsParams = getOsParams "osparams_secret" "secret"
-- | Merges the OS parameters (public, private, secret) in a single
-- data structure containing all parameters and their visibility.
--
-- Example:
--
-- > { "os-image": ["http://example.com/disk.img", "public"],
-- > "os-password": ["mypassword", "secret"] }
makeInstanceParams
:: JSObject JSValue -> JSObject JSValue -> JSObject JSValue -> JSValue
makeInstanceParams pub priv sec =
JSObject . JSON.toJSObject $
addVisibility "public" pub ++
addVisibility "private" priv ++
addVisibility "secret" sec
where
key = JSString . JSON.toJSString
addVisibility param params =
map (second (JSArray . (:[key param]))) (JSON.fromJSObject params)
getOsParamsWithVisibility :: JSValue -> Result JSValue
getOsParamsWithVisibility json =
do obj <- readJSON json
publicOsParams <- getPublicOsParams obj
privateOsParams <- getPrivateOsParams obj
secretOsParams <- getSecretOsParams obj
Ok $ makeInstanceParams publicOsParams privateOsParams secretOsParams
-- | Finds the IP address of the instance communication NIC in the
-- instance's NICs.
getInstanceCommunicationIp :: JSObject JSValue -> Result String
getInstanceCommunicationIp jsonObj =
getNics >>= getInstanceCommunicationNic >>= getIp
where
getIp nic =
case lookup "ip" (fromJSObject nic) of
Nothing -> Error "Could not find instance communication IP"
Just (JSString ip) -> Ok (JSON.fromJSString ip)
_ -> Error "Instance communication IP is not a string"
getInstanceCommunicationNic [] =
Error "Could not find instance communication NIC"
getInstanceCommunicationNic (JSObject nic:nics) =
case lookup "name" (fromJSObject nic) of
Just (JSString name)
| Constants.instanceCommunicationNicPrefix
`List.isPrefixOf` JSON.fromJSString name ->
Ok nic
_ -> getInstanceCommunicationNic nics
getInstanceCommunicationNic _ =
Error "Found wrong data in instance NICs"
getNics =
case lookup "nics" (fromJSObject jsonObj) of
Nothing -> Error "Could not find OS parameters key 'nics'"
Just (JSArray nics) -> Ok nics
_ -> Error "Instance nics is not an array"
-- | Extracts the OS parameters from the instance's parameters and
-- returns a data structure containing all the OS parameters and their
-- visibility indexed by the instance's IP address which is used in
-- the instance communication NIC.
getInstanceParams :: JSValue -> Result (String, InstanceParams)
getInstanceParams json =
case json of
JSObject jsonObj -> do
name <- case lookup "name" (fromJSObject jsonObj) of
Nothing -> Error "Could not find instance name"
Just (JSString x) -> Ok (JSON.fromJSString x)
_ -> Error "Name is not a string"
ip <- getInstanceCommunicationIp jsonObj
Ok (name, Map.fromList [(ip, json)])
_ ->
Error "Expecting a dictionary"
| dimara/ganeti | src/Ganeti/Metad/Config.hs | bsd-2-clause | 5,732 | 0 | 17 | 1,110 | 914 | 462 | 452 | 74 | 8 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.