code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# OPTIONS_GHC -fglasgow-exts #-}
{-# LANGUAGE BangPatterns #-}
module HEP.ModelScan.MSSMScan.Read where
import Data.List
import HEP.ModelScan.MSSMScan.Model
import HEP.Physics.MSSM.OutputPhys
import HEP.ModelScan.MSSMScan.Pattern
-- import HROOT
import qualified Data.ListLike as LL
import qualified Data.Iteratee.ListLike as Iter
import qualified Data.Map as M
import Control.Monad.IO.Class
class PrettyPrintable a where
pretty_print :: a -> IO ()
instance PrettyPrintable PatternOccurrenceList where
pretty_print (PO lst) = mapM_ putStrLn lst'
where lst' = map formatting lst
formatting x = show (fst x) ++ ":" ++ show (snd x)
type ModelCountIO a = Iter.Iteratee [FullModel a] IO
print_fullmodel fullmodel = show (idnum fullmodel) ++ " : " ++
show (inputparam fullmodel) ++ " | " ++
show (outputphys fullmodel)
prettyprint :: (Model a) => FullModel a -> IO ()
prettyprint x = putStrLn $ "id =" ++ show (idnum x) ++ " : "
++ show (inputparam x)
++ " : " ++ show ( take 7 $ (sortmassassoc.makeassocmass.outputphys) x )
assoc_sort :: (Model a) => (Int,(ModelInput a,OutputPhys)) -> FullModel a
assoc_sort (id1,(input1,output1)) = FullModel id1 input1 output1
fullassoc rparityassoc nonsmassoc
where fullassoc = map fst $ (sortmassassoc.makeassocmass) output1
rparityassoc = filter isrparityodd fullassoc
nonsmassoc = filter isNonSM fullassoc
--- tidyup sort
tidyup_1st_2nd_gen list =
let (lst1,lst2) = break isFstOrSndGen list
in if lst2 == []
then list
else let (headtype, headtypelst) = headcheck $ head lst2
(lst1',lst2') = span (flip elem headtypelst) lst2
in lst1 ++ headtype : tidyup_1st_2nd_gen lst2'
headcheck x | x `elem` kind_SupL = (SupL , kind_SupL)
| x `elem` kind_SdownL = (SdownL, kind_SdownL)
| x `elem` kind_SupR = (SupR , kind_SupR)
| x `elem` kind_SdownR = (SdownR, kind_SdownR)
| x `elem` kind_SleptonL = (SelectronL, kind_SleptonL)
| x `elem` kind_SleptonR = (SelectronR, kind_SleptonR)
| x `elem` kind_Sneutrino = (SeneutrinoL, kind_Sneutrino)
| otherwise = (undefined,undefined)
fstOrSndGen = [SupL,SupR,SdownL,SdownR,SstrangeL,SstrangeR,ScharmL,ScharmR
,SelectronL,SelectronR,SmuonL,SmuonR,SeneutrinoL,SmuneutrinoL]
isFstOrSndGen x = elem x fstOrSndGen
kind_SupL = [SupL,ScharmL]
kind_SdownL = [SdownL,SstrangeL]
kind_SupR = [SupR,ScharmR]
kind_SdownR = [SdownR,SstrangeR]
kind_SleptonL = [SelectronL,SmuonL]
kind_SleptonR = [SelectronR,SmuonR]
kind_Sneutrino = [SeneutrinoL,SmuneutrinoL]
--- cut functions.
applycut :: (Model a) => [(OutputPhys -> Bool)]
-> [(FullModel a-> Bool) ]
-> FullModel a
-> Bool
applycut cuts compcuts x = let boollst1 = map (\f->f ph) cuts
boollst2 = map (\f->f x) compcuts
in and boollst1
&& and boollst2
where ph = outputphys x
---- pattern ordering
priority :: M.Map MassType Int
priority = M.fromList [ (Neutralino1,1), (Neutralino2,2), (Chargino1,3)
, (Stau1,4), (Gluino,5), (HeavyHiggs,6), (AHiggs,7)
, (CHiggs,8), (Stop1,9), (SelectronR,9) ]
massorder x y = let lookupresult = (M.lookup x priority, M.lookup y priority)
in case lookupresult of
(Nothing,Nothing) -> compare x y
(Nothing,Just _) -> LT
(Just _, Nothing) -> GT
(Just a, Just b) -> compare b a
patternorder (x1:x2:xs) (y1:y2:ys) = let x1y1order = flip massorder x1 y1
x2y2order = flip massorder x2 y2
in case x1y1order of
EQ -> x2y2order
otherwise -> x1y1order
patternorder _ _ = EQ
patternoccorder (patt1,occ1) (patt2,occ2) =
let patorder = patternorder patt1 patt2
in case patorder of
EQ -> compare occ2 occ1
_ -> patorder
|
wavewave/MSSMScan
|
src/HEP/ModelScan/MSSMScan/Read.hs
|
bsd-2-clause
| 4,374
| 0
| 14
| 1,413
| 1,365
| 745
| 620
| 86
| 4
|
{-# LANGUAGE OverloadedStrings #-}
module Handler.Admin where
import Import
import Yesod.Auth
{-
profileForm :: Html -> MForm App App (FormResult User, Widget)
profileForm :: renderDivs $ User
<$> areq textField "Name" Nothing
<*> areq textField "Email address" Nothing
<*> areq textField "Access Level" Nothing
-}
getAdminProfileR :: Handler RepHtml
getAdminProfileR = do
credentials <- requireAuthId
defaultLayout $ do
$(widgetFile "adminprofile")
|
madebyjeffrey/socrsite
|
Handler/Admin.hs
|
bsd-2-clause
| 483
| 0
| 12
| 93
| 53
| 28
| 25
| -1
| -1
|
module Main where
import Control.Monad.Reader
import Control.Monad (when)
import Data.Configurator
import Data.Traversable (traverse)
import Data.Either
import qualified Data.Foldable as DF
import Data.Maybe
import qualified Data.Map as M
import qualified Data.Aeson as A
import Options.Applicative
import Safe (headMay)
import Text.Printf (printf)
import Network.ImageTrove.MainBruker
main = imageTroveMain
|
carlohamalainen/imagetrove-uploader
|
MainBruker.hs
|
bsd-2-clause
| 411
| 0
| 5
| 48
| 103
| 67
| 36
| 15
| 1
|
module Test.Day5 where
import Day5 as D5
import Test.Tasty
import Test.Tasty.HUnit
day5 :: TestTree
day5 = testGroup "Doesn't He Have Intern-Elves For This?" [part1, part2]
part1 :: TestTree
part1 = testGroup "Part 1" [p1Tests, p1Puzzle]
p1Tests :: TestTree
p1Tests = testGroup "Test Cases" $
[
testCase "Example 1" $ True @?= D5.nice "ugknbfddgicrmopn"
, testCase "Example 2" $ True @?= D5.nice "aaa"
, testCase "Example 3" $ False @?= D5.nice "jchzalrnumimnmhp"
, testCase "Example 4" $ False @?= D5.nice "haegwjzuvuyypxyu"
, testCase "Example 5" $ False @?= D5.nice "dvszwmarrgswjxmb"
]
p1Puzzle :: TestTree
p1Puzzle = testCaseSteps "Puzzle" $ \_ -> do
strings <- fmap lines $ readFile "input/day5.txt"
255 @?= length (filter nice strings)
part2 :: TestTree
part2 = testGroup "Part 2" [p2Tests, p2Puzzle]
p2Tests :: TestTree
p2Tests = testGroup "Test Cases" $
[
testCase "Example 1" $ True @?= D5.nice2 "qjhvhtzxzqqjkmpb"
, testCase "Example 2" $ True @?= D5.nice2 "xxyxx"
, testCase "Example 3" $ False @?= D5.nice2 "aaa"
, testCase "Example 4" $ False @?= D5.nice2 "uurcxstgmygtbstg"
, testCase "Example 5" $ False @?= D5.nice2 "ieodomkazucvgmuy"
]
p2Puzzle :: TestTree
p2Puzzle = testCaseSteps "Puzzle" $ \_ -> do
strings <- fmap lines $ readFile "input/day5.txt"
55 @?= length (filter nice2 strings)
|
taylor1791/adventofcode
|
2015/test/Test/Day5.hs
|
bsd-2-clause
| 1,357
| 0
| 12
| 253
| 423
| 213
| 210
| 34
| 1
|
--
-- Copyright (c) 2009 - 2010 Brendan Hickey - http://bhickey.net
-- New BSD License (see http://www.opensource.org/licenses/bsd-license.php)
--
module Data.Heap.Skew
(SkewHeap, head, tail, merge, singleton, empty, null, fromList, toList, insert)
where
import Prelude hiding (head, tail, null)
data (Ord a) => SkewHeap a =
SkewLeaf
| SkewHeap a (SkewHeap a) (SkewHeap a) deriving (Eq, Ord)
empty :: (Ord a) => SkewHeap a
empty = SkewLeaf
null :: (Ord a) => SkewHeap a -> Bool
null SkewLeaf = True
null _ = False
singleton :: (Ord a) => a -> SkewHeap a
singleton n = SkewHeap n SkewLeaf SkewLeaf
insert :: (Ord a) => a -> SkewHeap a -> SkewHeap a
insert a h = merge h (singleton a)
merge :: (Ord a) => SkewHeap a -> SkewHeap a -> SkewHeap a
merge SkewLeaf n = n
merge n SkewLeaf = n
merge h1 h2 = foldl1 assemble $ reverse $ listMerge head (cutRight h1) (cutRight h2)
listMerge :: (Ord b) => (a -> b) -> [a] -> [a] -> [a]
listMerge _ [] s = s
listMerge _ f [] = f
listMerge c f@(h1:t1) s@(h2:t2) =
if c h1 <= c h2
then h1 : listMerge c t1 s
else h2 : listMerge c f t2
cutRight :: (Ord a) => SkewHeap a -> [SkewHeap a]
cutRight SkewLeaf = []
cutRight (SkewHeap a l r) = SkewHeap a l SkewLeaf : cutRight r
-- assumes h1 >= h2, merge relies on this
assemble :: (Ord a) => SkewHeap a -> SkewHeap a -> SkewHeap a
assemble h1 (SkewHeap a l SkewLeaf) = SkewHeap a h1 l
assemble _ _ = error "invalid heap assembly"
head :: (Ord a) => SkewHeap a -> a
head SkewLeaf = error "head of empty heap"
head (SkewHeap a _ _) = a
tail :: (Ord a) => SkewHeap a -> SkewHeap a
tail SkewLeaf = error "tail of empty heap"
tail (SkewHeap _ l r) = merge l r
toList :: (Ord a) => SkewHeap a -> [a]
toList SkewLeaf = []
toList (SkewHeap n l r) = n : toList (merge l r)
fromList :: (Ord a) => [a] -> SkewHeap a
fromList [] = SkewLeaf
fromList l = mergeList (map singleton l)
where mergeList [a] = a
mergeList x = mergeList (mergePairs x)
mergePairs (a:b:c) = merge a b : mergePairs c
mergePairs x = x
|
bhickey/TreeStructures
|
Data/Heap/Skew.hs
|
bsd-3-clause
| 2,079
| 0
| 10
| 501
| 922
| 476
| 446
| 48
| 3
|
module Examples.Example5 where
import System.FilePath
import System.Directory
import System.Posix
import Control.Monad
import Graphics.UI.VE
import ErrVal
import Examples.Utils(testE)
type UserCode = Int
type RoleCode = Int
data EnvStruct = EnvStruct {
user :: UserCode,
role :: RoleCode
}
userCodeVE, roleCodeVE :: VE (IOE FilePath) Int
userCodeVE = EnumVE (IOE (dirContents "users"))
roleCodeVE = EnumVE (IOE (dirContents "roles"))
envStructVE :: VE (IOE FilePath) EnvStruct
envStructVE = mapVE toStruct fromStruct
( label "user" userCodeVE
.*. label "role" roleCodeVE
)
where
toStruct (a,b) = eVal (EnvStruct a b)
fromStruct (EnvStruct a b) = (a,b)
dirContents :: String -> FilePath -> IO [String]
dirContents subdir root = do
putStrLn ("Reading enums from " ++ dir)
dirExists <- fileExist dir
if dirExists then filterM isRegFile =<< getDirectoryContents dir
else return []
where
isRegFile fp = fmap isRegularFile (getFileStatus (combine dir fp))
dir = combine root subdir
test = testE envStructVE "/tmp"
|
timbod7/veditor
|
demo/Examples/Example5.hs
|
bsd-3-clause
| 1,113
| 0
| 11
| 250
| 357
| 188
| 169
| 31
| 2
|
module HasOffers.API.Affiliate.Offer
where
import Data.Text
import GHC.Generics
import Data.Aeson
import Control.Applicative
import Network.HTTP.Client
import qualified Data.ByteString.Char8 as BS
import HasOffers.API.Common
--------------------------------------------------------------------------------
acceptOfferTermsAndConditions params =
Call "Affiliate_Offer"
"acceptOfferTermsAndConditions"
"POST"
[ Param "offer_id" True $ getParam params 0
]
findAll params =
Call "Affiliate_Offer"
"findAll"
"GET"
[ Param "filters" False $ getParam params 0
, Param "sort" False $ getParam params 1
, Param "limit" False $ getParam params 2
, Param "page" False $ getParam params 3
, Param "fields" False $ getParam params 4
, Param "contain" False $ getParam params 5
]
findAllFeaturedOfferIds params =
Call "Affiliate_Offer"
"findAllFeaturedOfferIds"
"POST"
[
]
findByCreativeType params =
Call "Affiliate_Offer"
"findByCreativeType"
"POST"
[ Param "type" False $ getParam params 0
]
findById params =
Call "Affiliate_Offer"
"findById"
"GET"
[ Param "id" True $ getParam params 0
, Param "fields" False $ getParam params 1
]
findMyApprovedOffers params =
Call "Affiliate_Offer"
"findMyApprovedOffers"
"GET"
[ Param "filters" False $ getParam params 0
, Param "sort" False $ getParam params 1
, Param "limit" False $ getParam params 2
, Param "page" False $ getParam params 3
, Param "fields" False $ getParam params 4
, Param "contain" False $ getParam params 5
]
findMyOffers params =
Call "Affiliate_Offer"
"findMyOffers"
"GET"
[ Param "filters" False $ getParam params 0
, Param "sort" False $ getParam params 1
, Param "limit" False $ getParam params 2
, Param "page" False $ getParam params 3
, Param "fields" False $ getParam params 4
, Param "contain" False $ getParam params 5
]
generateTrackingLink params =
Call "Affiliate_Offer"
"generateTrackingLink"
"POST"
[ Param "offer_id" True $ getParam params 0
, Param "params" False $ getParam params 1
, Param "options" False $ getParam params 2
]
getApprovalQuestions params =
Call "Affiliate_Offer"
"getApprovalQuestions"
"GET"
[ Param "offer_id" True $ getParam params 0
]
getCategories params =
Call "Affiliate_Offer"
"getCategories"
"GET"
[ Param "ids" True $ getParam params 0
]
getPayoutDetails params =
Call "Affiliate_Offer"
"getPayoutDetailss"
"GET"
[ Param "offer_id" True $ getParam params 0
]
getPixels params =
Call "Affiliate_Offer"
"getPixels"
"GET"
[ Param "id" True $ getParam params 0
, Param "status" False $ getParam params 1
]
getTargetCountries params =
Call "Affiliate_Offer"
"getTargetCountries"
"GET"
[ Param "ids" True $ getParam params 0
]
getThumbnail params =
Call "Affiliate_Offer"
"getThumbnail"
"GET"
[ Param "ids" True $ getParam params 0
]
requestOfferAccess params =
Call "Affiliate_Offer"
"requestOfferAccess"
"POST"
[ Param "offer_id" True $ getParam params 0
, Param "answers" False $ getParam params 1
]
|
kelecorix/api-hasoffers
|
src/HasOffers/API/Affiliate/Offer.hs
|
bsd-3-clause
| 3,614
| 0
| 8
| 1,132
| 885
| 427
| 458
| 103
| 1
|
module Exercises where
addOneIfOdd :: (Integral a) => a -> a
addOneIfOdd n = case odd n of
True -> f n
False -> n
where f = (\x -> x + 1)
addFive :: Integer -> Integer -> Integer
addFive = (\x y -> (if x > y then y else x) + 5) :: Integer -> Integer -> Integer
mflip :: (t1 -> t2 -> a) -> t2 -> t1 -> a
mflip f x y = f y x
functionC :: (Ord a) => a -> a -> a
functionC x y =
case comparison of
True -> x
False -> y
where comparison = x > y
-- Exercises: Case Practice
ifEvenAdd2 :: (Integral a) => a -> a
ifEvenAdd2 n =
case isEven of
True -> n + 2
False -> n
where isEven = even n
nums :: (Num a, Num a1, Ord a) => a -> a1
nums x =
case compare x 0 of
LT -> -1
EQ -> 0
GT -> 1
-- Exercises: Artful Dodgy
dodgy :: (Num a) => a -> a -> a
dodgy x y = x + y * 10
oneIsOne :: (Num a) => a -> a
oneIsOne = dodgy 1
oneIsTwo :: (Num a) => a -> a
oneIsTwo = (flip dodgy) 2
-- Chapter Exercises: Let's write code
tensDigit :: Integral a => a -> a
tensDigit x = d
where xLast = x `div` 10
d = xLast `mod` 10
tensDigit' :: Integral a => a -> a
tensDigit' x = d
where (xLast,_) = x `divMod` 10
(_,d) = xLast `divMod` 10
hunsD :: Integral a => a -> a
hunsD x = d
where (xLast,_) = x `divMod` 100
(_,d) = xLast `divMod` 10
foldBold1 :: a -> a -> Bool -> a
foldBold1 x y flag
| flag == True = x
| otherwise = y
foldBold2 :: a -> a -> Bool -> a
foldBold2 x y flag =
case flag of
True -> x
False -> y
g :: (a -> b) -> (a, c) -> (b, c)
g f (a, c) = (f a, c)
|
dsaenztagarro/haskellbook
|
src/chapter7/Exercises.hs
|
bsd-3-clause
| 1,546
| 0
| 10
| 465
| 779
| 419
| 360
| 57
| 3
|
module Main where
import Test.Framework
import Tests.Compiler
import Tests.Property
main :: IO ()
main = defaultMain [ compilerTests
, propertyTests
]
|
deadfoxygrandpa/Elm
|
tests/hs/CompilerTest.hs
|
bsd-3-clause
| 193
| 0
| 6
| 63
| 43
| 25
| 18
| 7
| 1
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1998
\section[Literal]{@Literal@: Machine literals (unboxed, of course)}
-}
{-# LANGUAGE CPP, DeriveDataTypeable #-}
module Literal
(
-- * Main data type
Literal(..) -- Exported to ParseIface
-- ** Creating Literals
, mkMachInt, mkMachWord
, mkMachInt64, mkMachWord64
, mkMachFloat, mkMachDouble
, mkMachChar, mkMachString
, mkLitInteger
-- ** Operations on Literals
, literalType
, absentLiteralOf
, pprLiteral
-- ** Predicates on Literals and their contents
, litIsDupable, litIsTrivial, litIsLifted
, inIntRange, inWordRange, tARGET_MAX_INT, inCharRange
, isZeroLit
, litFitsInChar
, litValue
-- ** Coercions
, word2IntLit, int2WordLit
, narrow8IntLit, narrow16IntLit, narrow32IntLit
, narrow8WordLit, narrow16WordLit, narrow32WordLit
, char2IntLit, int2CharLit
, float2IntLit, int2FloatLit, double2IntLit, int2DoubleLit
, nullAddrLit, float2DoubleLit, double2FloatLit
) where
#include "HsVersions.h"
import TysPrim
import PrelNames
import Type
import TyCon
import Outputable
import FastString
import BasicTypes
import Binary
import Constants
import DynFlags
import UniqFM
import Util
import Data.ByteString (ByteString)
import Data.Int
import Data.Word
import Data.Char
import Data.Data ( Data )
import Numeric ( fromRat )
{-
************************************************************************
* *
\subsection{Literals}
* *
************************************************************************
-}
-- | So-called 'Literal's are one of:
--
-- * An unboxed (/machine/) literal ('MachInt', 'MachFloat', etc.),
-- which is presumed to be surrounded by appropriate constructors
-- (@Int#@, etc.), so that the overall thing makes sense.
--
-- * The literal derived from the label mentioned in a \"foreign label\"
-- declaration ('MachLabel')
data Literal
= ------------------
-- First the primitive guys
MachChar Char -- ^ @Char#@ - at least 31 bits. Create with 'mkMachChar'
| MachStr ByteString -- ^ A string-literal: stored and emitted
-- UTF-8 encoded, we'll arrange to decode it
-- at runtime. Also emitted with a @'\0'@
-- terminator. Create with 'mkMachString'
| MachNullAddr -- ^ The @NULL@ pointer, the only pointer value
-- that can be represented as a Literal. Create
-- with 'nullAddrLit'
| MachInt Integer -- ^ @Int#@ - at least @WORD_SIZE_IN_BITS@ bits. Create with 'mkMachInt'
| MachInt64 Integer -- ^ @Int64#@ - at least 64 bits. Create with 'mkMachInt64'
| MachWord Integer -- ^ @Word#@ - at least @WORD_SIZE_IN_BITS@ bits. Create with 'mkMachWord'
| MachWord64 Integer -- ^ @Word64#@ - at least 64 bits. Create with 'mkMachWord64'
| MachFloat Rational -- ^ @Float#@. Create with 'mkMachFloat'
| MachDouble Rational -- ^ @Double#@. Create with 'mkMachDouble'
| MachLabel FastString
(Maybe Int)
FunctionOrData
-- ^ A label literal. Parameters:
--
-- 1) The name of the symbol mentioned in the declaration
--
-- 2) The size (in bytes) of the arguments
-- the label expects. Only applicable with
-- @stdcall@ labels. @Just x@ => @\<x\>@ will
-- be appended to label name when emitting assembly.
| LitInteger Integer Type -- ^ Integer literals
-- See Note [Integer literals]
deriving Data
{-
Note [Integer literals]
~~~~~~~~~~~~~~~~~~~~~~~
An Integer literal is represented using, well, an Integer, to make it
easier to write RULEs for them. They also contain the Integer type, so
that e.g. literalType can return the right Type for them.
They only get converted into real Core,
mkInteger [c1, c2, .., cn]
during the CorePrep phase, although TidyPgm looks ahead at what the
core will be, so that it can see whether it involves CAFs.
When we initally build an Integer literal, notably when
deserialising it from an interface file (see the Binary instance
below), we don't have convenient access to the mkInteger Id. So we
just use an error thunk, and fill in the real Id when we do tcIfaceLit
in TcIface.
Binary instance
-}
instance Binary Literal where
put_ bh (MachChar aa) = do putByte bh 0; put_ bh aa
put_ bh (MachStr ab) = do putByte bh 1; put_ bh ab
put_ bh (MachNullAddr) = do putByte bh 2
put_ bh (MachInt ad) = do putByte bh 3; put_ bh ad
put_ bh (MachInt64 ae) = do putByte bh 4; put_ bh ae
put_ bh (MachWord af) = do putByte bh 5; put_ bh af
put_ bh (MachWord64 ag) = do putByte bh 6; put_ bh ag
put_ bh (MachFloat ah) = do putByte bh 7; put_ bh ah
put_ bh (MachDouble ai) = do putByte bh 8; put_ bh ai
put_ bh (MachLabel aj mb fod)
= do putByte bh 9
put_ bh aj
put_ bh mb
put_ bh fod
put_ bh (LitInteger i _) = do putByte bh 10; put_ bh i
get bh = do
h <- getByte bh
case h of
0 -> do
aa <- get bh
return (MachChar aa)
1 -> do
ab <- get bh
return (MachStr ab)
2 -> do
return (MachNullAddr)
3 -> do
ad <- get bh
return (MachInt ad)
4 -> do
ae <- get bh
return (MachInt64 ae)
5 -> do
af <- get bh
return (MachWord af)
6 -> do
ag <- get bh
return (MachWord64 ag)
7 -> do
ah <- get bh
return (MachFloat ah)
8 -> do
ai <- get bh
return (MachDouble ai)
9 -> do
aj <- get bh
mb <- get bh
fod <- get bh
return (MachLabel aj mb fod)
_ -> do
i <- get bh
-- See Note [Integer literals]
return $ mkLitInteger i (panic "Evaluated the place holder for mkInteger")
instance Outputable Literal where
ppr lit = pprLiteral (\d -> d) lit
instance Eq Literal where
a == b = case (a `compare` b) of { EQ -> True; _ -> False }
a /= b = case (a `compare` b) of { EQ -> False; _ -> True }
instance Ord Literal where
a <= b = case (a `compare` b) of { LT -> True; EQ -> True; GT -> False }
a < b = case (a `compare` b) of { LT -> True; EQ -> False; GT -> False }
a >= b = case (a `compare` b) of { LT -> False; EQ -> True; GT -> True }
a > b = case (a `compare` b) of { LT -> False; EQ -> False; GT -> True }
compare a b = cmpLit a b
{-
Construction
~~~~~~~~~~~~
-}
-- | Creates a 'Literal' of type @Int#@
mkMachInt :: DynFlags -> Integer -> Literal
mkMachInt dflags x = ASSERT2( inIntRange dflags x, integer x )
MachInt x
-- | Creates a 'Literal' of type @Word#@
mkMachWord :: DynFlags -> Integer -> Literal
mkMachWord dflags x = ASSERT2( inWordRange dflags x, integer x )
MachWord x
-- | Creates a 'Literal' of type @Int64#@
mkMachInt64 :: Integer -> Literal
mkMachInt64 x = MachInt64 x
-- | Creates a 'Literal' of type @Word64#@
mkMachWord64 :: Integer -> Literal
mkMachWord64 x = MachWord64 x
-- | Creates a 'Literal' of type @Float#@
mkMachFloat :: Rational -> Literal
mkMachFloat = MachFloat
-- | Creates a 'Literal' of type @Double#@
mkMachDouble :: Rational -> Literal
mkMachDouble = MachDouble
-- | Creates a 'Literal' of type @Char#@
mkMachChar :: Char -> Literal
mkMachChar = MachChar
-- | Creates a 'Literal' of type @Addr#@, which is appropriate for passing to
-- e.g. some of the \"error\" functions in GHC.Err such as @GHC.Err.runtimeError@
mkMachString :: String -> Literal
-- stored UTF-8 encoded
mkMachString s = MachStr (fastStringToByteString $ mkFastString s)
mkLitInteger :: Integer -> Type -> Literal
mkLitInteger = LitInteger
inIntRange, inWordRange :: DynFlags -> Integer -> Bool
inIntRange dflags x = x >= tARGET_MIN_INT dflags && x <= tARGET_MAX_INT dflags
inWordRange dflags x = x >= 0 && x <= tARGET_MAX_WORD dflags
inCharRange :: Char -> Bool
inCharRange c = c >= '\0' && c <= chr tARGET_MAX_CHAR
-- | Tests whether the literal represents a zero of whatever type it is
isZeroLit :: Literal -> Bool
isZeroLit (MachInt 0) = True
isZeroLit (MachInt64 0) = True
isZeroLit (MachWord 0) = True
isZeroLit (MachWord64 0) = True
isZeroLit (MachFloat 0) = True
isZeroLit (MachDouble 0) = True
isZeroLit _ = False
-- | Returns the 'Integer' contained in the 'Literal', for when that makes
-- sense, i.e. for 'Char', 'Int', 'Word' and 'LitInteger'.
litValue :: Literal -> Integer
litValue (MachChar c) = toInteger $ ord c
litValue (MachInt i) = i
litValue (MachInt64 i) = i
litValue (MachWord i) = i
litValue (MachWord64 i) = i
litValue (LitInteger i _) = i
litValue l = pprPanic "litValue" (ppr l)
{-
Coercions
~~~~~~~~~
-}
narrow8IntLit, narrow16IntLit, narrow32IntLit,
narrow8WordLit, narrow16WordLit, narrow32WordLit,
char2IntLit, int2CharLit,
float2IntLit, int2FloatLit, double2IntLit, int2DoubleLit,
float2DoubleLit, double2FloatLit
:: Literal -> Literal
word2IntLit, int2WordLit :: DynFlags -> Literal -> Literal
word2IntLit dflags (MachWord w)
| w > tARGET_MAX_INT dflags = MachInt (w - tARGET_MAX_WORD dflags - 1)
| otherwise = MachInt w
word2IntLit _ l = pprPanic "word2IntLit" (ppr l)
int2WordLit dflags (MachInt i)
| i < 0 = MachWord (1 + tARGET_MAX_WORD dflags + i) -- (-1) ---> tARGET_MAX_WORD
| otherwise = MachWord i
int2WordLit _ l = pprPanic "int2WordLit" (ppr l)
narrow8IntLit (MachInt i) = MachInt (toInteger (fromInteger i :: Int8))
narrow8IntLit l = pprPanic "narrow8IntLit" (ppr l)
narrow16IntLit (MachInt i) = MachInt (toInteger (fromInteger i :: Int16))
narrow16IntLit l = pprPanic "narrow16IntLit" (ppr l)
narrow32IntLit (MachInt i) = MachInt (toInteger (fromInteger i :: Int32))
narrow32IntLit l = pprPanic "narrow32IntLit" (ppr l)
narrow8WordLit (MachWord w) = MachWord (toInteger (fromInteger w :: Word8))
narrow8WordLit l = pprPanic "narrow8WordLit" (ppr l)
narrow16WordLit (MachWord w) = MachWord (toInteger (fromInteger w :: Word16))
narrow16WordLit l = pprPanic "narrow16WordLit" (ppr l)
narrow32WordLit (MachWord w) = MachWord (toInteger (fromInteger w :: Word32))
narrow32WordLit l = pprPanic "narrow32WordLit" (ppr l)
char2IntLit (MachChar c) = MachInt (toInteger (ord c))
char2IntLit l = pprPanic "char2IntLit" (ppr l)
int2CharLit (MachInt i) = MachChar (chr (fromInteger i))
int2CharLit l = pprPanic "int2CharLit" (ppr l)
float2IntLit (MachFloat f) = MachInt (truncate f)
float2IntLit l = pprPanic "float2IntLit" (ppr l)
int2FloatLit (MachInt i) = MachFloat (fromInteger i)
int2FloatLit l = pprPanic "int2FloatLit" (ppr l)
double2IntLit (MachDouble f) = MachInt (truncate f)
double2IntLit l = pprPanic "double2IntLit" (ppr l)
int2DoubleLit (MachInt i) = MachDouble (fromInteger i)
int2DoubleLit l = pprPanic "int2DoubleLit" (ppr l)
float2DoubleLit (MachFloat f) = MachDouble f
float2DoubleLit l = pprPanic "float2DoubleLit" (ppr l)
double2FloatLit (MachDouble d) = MachFloat d
double2FloatLit l = pprPanic "double2FloatLit" (ppr l)
nullAddrLit :: Literal
nullAddrLit = MachNullAddr
{-
Predicates
~~~~~~~~~~
-}
-- | True if there is absolutely no penalty to duplicating the literal.
-- False principally of strings.
--
-- "Why?", you say? I'm glad you asked. Well, for one duplicating strings would
-- blow up code sizes. Not only this, it's also unsafe.
--
-- Consider a program that wants to traverse a string. One way it might do this
-- is to first compute the Addr# pointing to the end of the string, and then,
-- starting from the beginning, bump a pointer using eqAddr# to determine the
-- end. For instance,
--
-- @
-- -- Given pointers to the start and end of a string, count how many zeros
-- -- the string contains.
-- countZeros :: Addr# -> Addr# -> -> Int
-- countZeros start end = go start 0
-- where
-- go off n
-- | off `addrEq#` end = n
-- | otherwise = go (off `plusAddr#` 1) n'
-- where n' | isTrue# (indexInt8OffAddr# off 0# ==# 0#) = n + 1
-- | otherwise = n
-- @
--
-- Consider what happens if we considered strings to be trivial (and therefore
-- duplicable) and emitted a call like @countZeros "hello"# ("hello"#
-- `plusAddr`# 5)@. The beginning and end pointers do not belong to the same
-- string, meaning that an iteration like the above would blow up terribly.
-- This is what happened in #12757.
--
-- Ultimately the solution here is to make primitive strings a bit more
-- structured, ensuring that the compiler can't inline in ways that will break
-- user code. One approach to this is described in #8472.
litIsTrivial :: Literal -> Bool
-- c.f. CoreUtils.exprIsTrivial
litIsTrivial (MachStr _) = False
litIsTrivial (LitInteger {}) = False
litIsTrivial _ = True
-- | True if code space does not go bad if we duplicate this literal
-- Currently we treat it just like 'litIsTrivial'
litIsDupable :: DynFlags -> Literal -> Bool
-- c.f. CoreUtils.exprIsDupable
litIsDupable _ (MachStr _) = False
litIsDupable dflags (LitInteger i _) = inIntRange dflags i
litIsDupable _ _ = True
litFitsInChar :: Literal -> Bool
litFitsInChar (MachInt i) = i >= toInteger (ord minBound)
&& i <= toInteger (ord maxBound)
litFitsInChar _ = False
litIsLifted :: Literal -> Bool
litIsLifted (LitInteger {}) = True
litIsLifted _ = False
{-
Types
~~~~~
-}
-- | Find the Haskell 'Type' the literal occupies
literalType :: Literal -> Type
literalType MachNullAddr = addrPrimTy
literalType (MachChar _) = charPrimTy
literalType (MachStr _) = addrPrimTy
literalType (MachInt _) = intPrimTy
literalType (MachWord _) = wordPrimTy
literalType (MachInt64 _) = int64PrimTy
literalType (MachWord64 _) = word64PrimTy
literalType (MachFloat _) = floatPrimTy
literalType (MachDouble _) = doublePrimTy
literalType (MachLabel _ _ _) = addrPrimTy
literalType (LitInteger _ t) = t
absentLiteralOf :: TyCon -> Maybe Literal
-- Return a literal of the appropriate primtive
-- TyCon, to use as a placeholder when it doesn't matter
absentLiteralOf tc = lookupUFM absent_lits (tyConName tc)
absent_lits :: UniqFM Literal
absent_lits = listToUFM [ (addrPrimTyConKey, MachNullAddr)
, (charPrimTyConKey, MachChar 'x')
, (intPrimTyConKey, MachInt 0)
, (int64PrimTyConKey, MachInt64 0)
, (floatPrimTyConKey, MachFloat 0)
, (doublePrimTyConKey, MachDouble 0)
, (wordPrimTyConKey, MachWord 0)
, (word64PrimTyConKey, MachWord64 0) ]
{-
Comparison
~~~~~~~~~~
-}
cmpLit :: Literal -> Literal -> Ordering
cmpLit (MachChar a) (MachChar b) = a `compare` b
cmpLit (MachStr a) (MachStr b) = a `compare` b
cmpLit (MachNullAddr) (MachNullAddr) = EQ
cmpLit (MachInt a) (MachInt b) = a `compare` b
cmpLit (MachWord a) (MachWord b) = a `compare` b
cmpLit (MachInt64 a) (MachInt64 b) = a `compare` b
cmpLit (MachWord64 a) (MachWord64 b) = a `compare` b
cmpLit (MachFloat a) (MachFloat b) = a `compare` b
cmpLit (MachDouble a) (MachDouble b) = a `compare` b
cmpLit (MachLabel a _ _) (MachLabel b _ _) = a `compare` b
cmpLit (LitInteger a _) (LitInteger b _) = a `compare` b
cmpLit lit1 lit2 | litTag lit1 < litTag lit2 = LT
| otherwise = GT
litTag :: Literal -> Int
litTag (MachChar _) = 1
litTag (MachStr _) = 2
litTag (MachNullAddr) = 3
litTag (MachInt _) = 4
litTag (MachWord _) = 5
litTag (MachInt64 _) = 6
litTag (MachWord64 _) = 7
litTag (MachFloat _) = 8
litTag (MachDouble _) = 9
litTag (MachLabel _ _ _) = 10
litTag (LitInteger {}) = 11
{-
Printing
~~~~~~~~
* See Note [Printing of literals in Core]
-}
pprLiteral :: (SDoc -> SDoc) -> Literal -> SDoc
pprLiteral _ (MachChar c) = pprPrimChar c
pprLiteral _ (MachStr s) = pprHsBytes s
pprLiteral _ (MachNullAddr) = text "__NULL"
pprLiteral _ (MachInt i) = pprPrimInt i
pprLiteral _ (MachInt64 i) = pprPrimInt64 i
pprLiteral _ (MachWord w) = pprPrimWord w
pprLiteral _ (MachWord64 w) = pprPrimWord64 w
pprLiteral _ (MachFloat f) = float (fromRat f) <> primFloatSuffix
pprLiteral _ (MachDouble d) = double (fromRat d) <> primDoubleSuffix
pprLiteral add_par (LitInteger i _) = pprIntegerVal add_par i
pprLiteral add_par (MachLabel l mb fod) = add_par (text "__label" <+> b <+> ppr fod)
where b = case mb of
Nothing -> pprHsString l
Just x -> doubleQuotes (text (unpackFS l ++ '@':show x))
pprIntegerVal :: (SDoc -> SDoc) -> Integer -> SDoc
-- See Note [Printing of literals in Core].
pprIntegerVal add_par i | i < 0 = add_par (integer i)
| otherwise = integer i
{-
Note [Printing of literals in Core]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The function `add_par` is used to wrap parenthesis around negative integers
(`LitInteger`) and labels (`MachLabel`), if they occur in a context requiring
an atomic thing (for example function application).
Although not all Core literals would be valid Haskell, we are trying to stay
as close as possible to Haskell syntax in the printing of Core, to make it
easier for a Haskell user to read Core.
To that end:
* We do print parenthesis around negative `LitInteger`, because we print
`LitInteger` using plain number literals (no prefix or suffix), and plain
number literals in Haskell require parenthesis in contexts like function
application (i.e. `1 - -1` is not valid Haskell).
* We don't print parenthesis around other (negative) literals, because they
aren't needed in GHC/Haskell either (i.e. `1# -# -1#` is accepted by GHC's
parser).
Literal Output Output if context requires
an atom (if different)
------- ------- ----------------------
MachChar 'a'#
MachStr "aaa"#
MachNullAddr "__NULL"
MachInt -1#
MachInt64 -1L#
MachWord 1##
MachWord64 1L##
MachFloat -1.0#
MachDouble -1.0##
LitInteger -1 (-1)
MachLabel "__label" ... ("__label" ...)
-}
|
mettekou/ghc
|
compiler/basicTypes/Literal.hs
|
bsd-3-clause
| 19,887
| 0
| 16
| 5,992
| 4,250
| 2,208
| 2,042
| 288
| 2
|
{-
(c) The University of Glasgow 2006-2008
(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
-}
{-# LANGUAGE CPP, NondecreasingIndentation #-}
{-# LANGUAGE MultiWayIf #-}
-- | Module for constructing @ModIface@ values (interface files),
-- writing them to disk and comparing two versions to see if
-- recompilation is required.
module GHC.Iface.Utils (
mkPartialIface,
mkFullIface,
mkIfaceTc,
writeIfaceFile, -- Write the interface file
checkOldIface, -- See if recompilation is required, by
-- comparing version information
RecompileRequired(..), recompileRequired,
mkIfaceExports,
coAxiomToIfaceDecl,
tyThingToIfaceDecl -- Converting things to their Iface equivalents
) where
{-
-----------------------------------------------
Recompilation checking
-----------------------------------------------
A complete description of how recompilation checking works can be
found in the wiki commentary:
https://gitlab.haskell.org/ghc/ghc/wikis/commentary/compiler/recompilation-avoidance
Please read the above page for a top-down description of how this all
works. Notes below cover specific issues related to the implementation.
Basic idea:
* In the mi_usages information in an interface, we record the
fingerprint of each free variable of the module
* In mkIface, we compute the fingerprint of each exported thing A.f.
For each external thing that A.f refers to, we include the fingerprint
of the external reference when computing the fingerprint of A.f. So
if anything that A.f depends on changes, then A.f's fingerprint will
change.
Also record any dependent files added with
* addDependentFile
* #include
* -optP-include
* In checkOldIface we compare the mi_usages for the module with
the actual fingerprint for all each thing recorded in mi_usages
-}
#include "HsVersions.h"
import GhcPrelude
import GHC.Iface.Syntax
import BinFingerprint
import GHC.Iface.Load
import GHC.CoreToIface
import FlagChecker
import DsUsage ( mkUsageInfo, mkUsedNames, mkDependencies )
import Id
import Annotations
import CoreSyn
import Class
import TyCon
import CoAxiom
import ConLike
import DataCon
import Type
import TcType
import InstEnv
import FamInstEnv
import TcRnMonad
import GHC.Hs
import HscTypes
import Finder
import DynFlags
import VarEnv
import Var
import Name
import Avail
import RdrName
import NameEnv
import NameSet
import Module
import GHC.Iface.Binary
import ErrUtils
import Digraph
import SrcLoc
import Outputable
import BasicTypes hiding ( SuccessFlag(..) )
import Unique
import Util hiding ( eqListBy )
import FastString
import Maybes
import Binary
import Fingerprint
import Exception
import UniqSet
import Packages
import ExtractDocs
import Control.Monad
import Data.Function
import Data.List
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Ord
import Data.IORef
import System.Directory
import System.FilePath
import Plugins ( PluginRecompile(..), PluginWithArgs(..), LoadedPlugin(..),
pluginRecompile', plugins )
--Qualified import so we can define a Semigroup instance
-- but it doesn't clash with Outputable.<>
import qualified Data.Semigroup
{-
************************************************************************
* *
\subsection{Completing an interface}
* *
************************************************************************
-}
mkPartialIface :: HscEnv
-> ModDetails
-> ModGuts
-> PartialModIface
mkPartialIface hsc_env mod_details
ModGuts{ mg_module = this_mod
, mg_hsc_src = hsc_src
, mg_usages = usages
, mg_used_th = used_th
, mg_deps = deps
, mg_rdr_env = rdr_env
, mg_fix_env = fix_env
, mg_warns = warns
, mg_hpc_info = hpc_info
, mg_safe_haskell = safe_mode
, mg_trust_pkg = self_trust
, mg_doc_hdr = doc_hdr
, mg_decl_docs = decl_docs
, mg_arg_docs = arg_docs
}
= mkIface_ hsc_env this_mod hsc_src used_th deps rdr_env fix_env warns hpc_info self_trust
safe_mode usages doc_hdr decl_docs arg_docs mod_details
-- | Fully instantiate a interface
-- Adds fingerprints and potentially code generator produced information.
mkFullIface :: HscEnv -> PartialModIface -> Maybe NameSet -> IO ModIface
mkFullIface hsc_env partial_iface mb_non_cafs = do
let decls
| gopt Opt_OmitInterfacePragmas (hsc_dflags hsc_env)
= mi_decls partial_iface
| otherwise
= updateDeclCafInfos (mi_decls partial_iface) mb_non_cafs
full_iface <-
{-# SCC "addFingerprints" #-}
addFingerprints hsc_env partial_iface{ mi_decls = decls }
-- Debug printing
dumpIfSet_dyn (hsc_dflags hsc_env) Opt_D_dump_hi "FINAL INTERFACE" FormatText (pprModIface full_iface)
return full_iface
updateDeclCafInfos :: [IfaceDecl] -> Maybe NameSet -> [IfaceDecl]
updateDeclCafInfos decls Nothing = decls
updateDeclCafInfos decls (Just non_cafs) = map update_decl decls
where
update_decl decl
| IfaceId nm ty details id_info <- decl
, elemNameSet nm non_cafs
= IfaceId nm ty details $
case id_info of
NoInfo -> HasInfo [HsNoCafRefs]
HasInfo infos -> HasInfo (HsNoCafRefs : infos)
| otherwise
= decl
-- | Make an interface from the results of typechecking only. Useful
-- for non-optimising compilation, or where we aren't generating any
-- object code at all ('HscNothing').
mkIfaceTc :: HscEnv
-> SafeHaskellMode -- The safe haskell mode
-> ModDetails -- gotten from mkBootModDetails, probably
-> TcGblEnv -- Usages, deprecations, etc
-> IO ModIface
mkIfaceTc hsc_env safe_mode mod_details
tc_result@TcGblEnv{ tcg_mod = this_mod,
tcg_src = hsc_src,
tcg_imports = imports,
tcg_rdr_env = rdr_env,
tcg_fix_env = fix_env,
tcg_merged = merged,
tcg_warns = warns,
tcg_hpc = other_hpc_info,
tcg_th_splice_used = tc_splice_used,
tcg_dependent_files = dependent_files
}
= do
let used_names = mkUsedNames tc_result
let pluginModules =
map lpModule (cachedPlugins (hsc_dflags hsc_env))
deps <- mkDependencies
(thisInstalledUnitId (hsc_dflags hsc_env))
(map mi_module pluginModules) tc_result
let hpc_info = emptyHpcInfo other_hpc_info
used_th <- readIORef tc_splice_used
dep_files <- (readIORef dependent_files)
-- Do NOT use semantic module here; this_mod in mkUsageInfo
-- is used solely to decide if we should record a dependency
-- or not. When we instantiate a signature, the semantic
-- module is something we want to record dependencies for,
-- but if you pass that in here, we'll decide it's the local
-- module and does not need to be recorded as a dependency.
-- See Note [Identity versus semantic module]
usages <- mkUsageInfo hsc_env this_mod (imp_mods imports) used_names
dep_files merged pluginModules
let (doc_hdr', doc_map, arg_map) = extractDocs tc_result
let partial_iface = mkIface_ hsc_env
this_mod hsc_src
used_th deps rdr_env
fix_env warns hpc_info
(imp_trust_own_pkg imports) safe_mode usages
doc_hdr' doc_map arg_map
mod_details
mkFullIface hsc_env partial_iface Nothing
mkIface_ :: HscEnv -> Module -> HscSource
-> Bool -> Dependencies -> GlobalRdrEnv
-> NameEnv FixItem -> Warnings -> HpcInfo
-> Bool
-> SafeHaskellMode
-> [Usage]
-> Maybe HsDocString
-> DeclDocMap
-> ArgDocMap
-> ModDetails
-> PartialModIface
mkIface_ hsc_env
this_mod hsc_src used_th deps rdr_env fix_env src_warns
hpc_info pkg_trust_req safe_mode usages
doc_hdr decl_docs arg_docs
ModDetails{ md_insts = insts,
md_fam_insts = fam_insts,
md_rules = rules,
md_anns = anns,
md_types = type_env,
md_exports = exports,
md_complete_sigs = complete_sigs }
-- NB: notice that mkIface does not look at the bindings
-- only at the TypeEnv. The previous Tidy phase has
-- put exactly the info into the TypeEnv that we want
-- to expose in the interface
= do
let semantic_mod = canonicalizeHomeModule (hsc_dflags hsc_env) (moduleName this_mod)
entities = typeEnvElts type_env
decls = [ tyThingToIfaceDecl entity
| entity <- entities,
let name = getName entity,
not (isImplicitTyThing entity),
-- No implicit Ids and class tycons in the interface file
not (isWiredInName name),
-- Nor wired-in things; the compiler knows about them anyhow
nameIsLocalOrFrom semantic_mod name ]
-- Sigh: see Note [Root-main Id] in TcRnDriver
-- NB: ABSOLUTELY need to check against semantic_mod,
-- because all of the names in an hsig p[H=<H>]:H
-- are going to be for <H>, not the former id!
-- See Note [Identity versus semantic module]
fixities = sortBy (comparing fst)
[(occ,fix) | FixItem occ fix <- nameEnvElts fix_env]
-- The order of fixities returned from nameEnvElts is not
-- deterministic, so we sort by OccName to canonicalize it.
-- See Note [Deterministic UniqFM] in UniqDFM for more details.
warns = src_warns
iface_rules = map coreRuleToIfaceRule rules
iface_insts = map instanceToIfaceInst $ fixSafeInstances safe_mode insts
iface_fam_insts = map famInstToIfaceFamInst fam_insts
trust_info = setSafeMode safe_mode
annotations = map mkIfaceAnnotation anns
icomplete_sigs = map mkIfaceCompleteSig complete_sigs
ModIface {
mi_module = this_mod,
-- Need to record this because it depends on the -instantiated-with flag
-- which could change
mi_sig_of = if semantic_mod == this_mod
then Nothing
else Just semantic_mod,
mi_hsc_src = hsc_src,
mi_deps = deps,
mi_usages = usages,
mi_exports = mkIfaceExports exports,
-- Sort these lexicographically, so that
-- the result is stable across compilations
mi_insts = sortBy cmp_inst iface_insts,
mi_fam_insts = sortBy cmp_fam_inst iface_fam_insts,
mi_rules = sortBy cmp_rule iface_rules,
mi_fixities = fixities,
mi_warns = warns,
mi_anns = annotations,
mi_globals = maybeGlobalRdrEnv rdr_env,
mi_used_th = used_th,
mi_decls = decls,
mi_hpc = isHpcUsed hpc_info,
mi_trust = trust_info,
mi_trust_pkg = pkg_trust_req,
mi_complete_sigs = icomplete_sigs,
mi_doc_hdr = doc_hdr,
mi_decl_docs = decl_docs,
mi_arg_docs = arg_docs,
mi_final_exts = () }
where
cmp_rule = comparing ifRuleName
-- Compare these lexicographically by OccName, *not* by unique,
-- because the latter is not stable across compilations:
cmp_inst = comparing (nameOccName . ifDFun)
cmp_fam_inst = comparing (nameOccName . ifFamInstTcName)
dflags = hsc_dflags hsc_env
-- We only fill in mi_globals if the module was compiled to byte
-- code. Otherwise, the compiler may not have retained all the
-- top-level bindings and they won't be in the TypeEnv (see
-- Desugar.addExportFlagsAndRules). The mi_globals field is used
-- by GHCi to decide whether the module has its full top-level
-- scope available. (#5534)
maybeGlobalRdrEnv :: GlobalRdrEnv -> Maybe GlobalRdrEnv
maybeGlobalRdrEnv rdr_env
| targetRetainsAllBindings (hscTarget dflags) = Just rdr_env
| otherwise = Nothing
ifFamInstTcName = ifFamInstFam
-----------------------------
writeIfaceFile :: DynFlags -> FilePath -> ModIface -> IO ()
writeIfaceFile dflags hi_file_path new_iface
= do createDirectoryIfMissing True (takeDirectory hi_file_path)
writeBinIface dflags hi_file_path new_iface
-- -----------------------------------------------------------------------------
-- Look up parents and versions of Names
-- This is like a global version of the mi_hash_fn field in each ModIface.
-- Given a Name, it finds the ModIface, and then uses mi_hash_fn to get
-- the parent and version info.
mkHashFun
:: HscEnv -- needed to look up versions
-> ExternalPackageState -- ditto
-> (Name -> IO Fingerprint)
mkHashFun hsc_env eps name
| isHoleModule orig_mod
= lookup (mkModule (thisPackage dflags) (moduleName orig_mod))
| otherwise
= lookup orig_mod
where
dflags = hsc_dflags hsc_env
hpt = hsc_HPT hsc_env
pit = eps_PIT eps
occ = nameOccName name
orig_mod = nameModule name
lookup mod = do
MASSERT2( isExternalName name, ppr name )
iface <- case lookupIfaceByModule hpt pit mod of
Just iface -> return iface
Nothing -> do
-- This can occur when we're writing out ifaces for
-- requirements; we didn't do any /real/ typechecking
-- so there's no guarantee everything is loaded.
-- Kind of a heinous hack.
iface <- initIfaceLoad hsc_env . withException
$ loadInterface (text "lookupVers2") mod ImportBySystem
return iface
return $ snd (mi_hash_fn (mi_final_exts iface) occ `orElse`
pprPanic "lookupVers1" (ppr mod <+> ppr occ))
-- ---------------------------------------------------------------------------
-- Compute fingerprints for the interface
{-
Note [Fingerprinting IfaceDecls]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The general idea here is that we first examine the 'IfaceDecl's and determine
the recursive groups of them. We then walk these groups in dependency order,
serializing each contained 'IfaceDecl' to a "Binary" buffer which we then
hash using MD5 to produce a fingerprint for the group.
However, the serialization that we use is a bit funny: we override the @putName@
operation with our own which serializes the hash of a 'Name' instead of the
'Name' itself. This ensures that the fingerprint of a decl changes if anything
in its transitive closure changes. This trick is why we must be careful about
traversing in dependency order: we need to ensure that we have hashes for
everything referenced by the decl which we are fingerprinting.
Moreover, we need to be careful to distinguish between serialization of binding
Names (e.g. the ifName field of a IfaceDecl) and non-binding (e.g. the ifInstCls
field of a IfaceClsInst): only in the non-binding case should we include the
fingerprint; in the binding case we shouldn't since it is merely the name of the
thing that we are currently fingerprinting.
-}
-- | Add fingerprints for top-level declarations to a 'ModIface'.
--
-- See Note [Fingerprinting IfaceDecls]
addFingerprints
:: HscEnv
-> PartialModIface
-> IO ModIface
addFingerprints hsc_env iface0
= do
eps <- hscEPS hsc_env
let
decls = mi_decls iface0
warn_fn = mkIfaceWarnCache (mi_warns iface0)
fix_fn = mkIfaceFixCache (mi_fixities iface0)
-- The ABI of a declaration represents everything that is made
-- visible about the declaration that a client can depend on.
-- see IfaceDeclABI below.
declABI :: IfaceDecl -> IfaceDeclABI
-- TODO: I'm not sure if this should be semantic_mod or this_mod.
-- See also Note [Identity versus semantic module]
declABI decl = (this_mod, decl, extras)
where extras = declExtras fix_fn ann_fn non_orph_rules non_orph_insts
non_orph_fis top_lvl_name_env decl
-- This is used for looking up the Name of a default method
-- from its OccName. See Note [default method Name]
top_lvl_name_env =
mkOccEnv [ (nameOccName nm, nm)
| IfaceId { ifName = nm } <- decls ]
-- Dependency edges between declarations in the current module.
-- This is computed by finding the free external names of each
-- declaration, including IfaceDeclExtras (things that a
-- declaration implicitly depends on).
edges :: [ Node Unique IfaceDeclABI ]
edges = [ DigraphNode abi (getUnique (getOccName decl)) out
| decl <- decls
, let abi = declABI decl
, let out = localOccs $ freeNamesDeclABI abi
]
name_module n = ASSERT2( isExternalName n, ppr n ) nameModule n
localOccs =
map (getUnique . getParent . getOccName)
-- NB: names always use semantic module, so
-- filtering must be on the semantic module!
-- See Note [Identity versus semantic module]
. filter ((== semantic_mod) . name_module)
. nonDetEltsUniqSet
-- It's OK to use nonDetEltsUFM as localOccs is only
-- used to construct the edges and
-- stronglyConnCompFromEdgedVertices is deterministic
-- even with non-deterministic order of edges as
-- explained in Note [Deterministic SCC] in Digraph.
where getParent :: OccName -> OccName
getParent occ = lookupOccEnv parent_map occ `orElse` occ
-- maps OccNames to their parents in the current module.
-- e.g. a reference to a constructor must be turned into a reference
-- to the TyCon for the purposes of calculating dependencies.
parent_map :: OccEnv OccName
parent_map = foldl' extend emptyOccEnv decls
where extend env d =
extendOccEnvList env [ (b,n) | b <- ifaceDeclImplicitBndrs d ]
where n = getOccName d
-- Strongly-connected groups of declarations, in dependency order
groups :: [SCC IfaceDeclABI]
groups = stronglyConnCompFromEdgedVerticesUniq edges
global_hash_fn = mkHashFun hsc_env eps
-- How to output Names when generating the data to fingerprint.
-- Here we want to output the fingerprint for each top-level
-- Name, whether it comes from the current module or another
-- module. In this way, the fingerprint for a declaration will
-- change if the fingerprint for anything it refers to (transitively)
-- changes.
mk_put_name :: OccEnv (OccName,Fingerprint)
-> BinHandle -> Name -> IO ()
mk_put_name local_env bh name
| isWiredInName name = putNameLiterally bh name
-- wired-in names don't have fingerprints
| otherwise
= ASSERT2( isExternalName name, ppr name )
let hash | nameModule name /= semantic_mod = global_hash_fn name
-- Get it from the REAL interface!!
-- This will trigger when we compile an hsig file
-- and we know a backing impl for it.
-- See Note [Identity versus semantic module]
| semantic_mod /= this_mod
, not (isHoleModule semantic_mod) = global_hash_fn name
| otherwise = return (snd (lookupOccEnv local_env (getOccName name)
`orElse` pprPanic "urk! lookup local fingerprint"
(ppr name $$ ppr local_env)))
-- This panic indicates that we got the dependency
-- analysis wrong, because we needed a fingerprint for
-- an entity that wasn't in the environment. To debug
-- it, turn the panic into a trace, uncomment the
-- pprTraces below, run the compile again, and inspect
-- the output and the generated .hi file with
-- --show-iface.
in hash >>= put_ bh
-- take a strongly-connected group of declarations and compute
-- its fingerprint.
fingerprint_group :: (OccEnv (OccName,Fingerprint),
[(Fingerprint,IfaceDecl)])
-> SCC IfaceDeclABI
-> IO (OccEnv (OccName,Fingerprint),
[(Fingerprint,IfaceDecl)])
fingerprint_group (local_env, decls_w_hashes) (AcyclicSCC abi)
= do let hash_fn = mk_put_name local_env
decl = abiDecl abi
--pprTrace "fingerprinting" (ppr (ifName decl) ) $ do
hash <- computeFingerprint hash_fn abi
env' <- extend_hash_env local_env (hash,decl)
return (env', (hash,decl) : decls_w_hashes)
fingerprint_group (local_env, decls_w_hashes) (CyclicSCC abis)
= do let decls = map abiDecl abis
local_env1 <- foldM extend_hash_env local_env
(zip (repeat fingerprint0) decls)
let hash_fn = mk_put_name local_env1
-- pprTrace "fingerprinting" (ppr (map ifName decls) ) $ do
let stable_abis = sortBy cmp_abiNames abis
-- put the cycle in a canonical order
hash <- computeFingerprint hash_fn stable_abis
let pairs = zip (repeat hash) decls
local_env2 <- foldM extend_hash_env local_env pairs
return (local_env2, pairs ++ decls_w_hashes)
-- we have fingerprinted the whole declaration, but we now need
-- to assign fingerprints to all the OccNames that it binds, to
-- use when referencing those OccNames in later declarations.
--
extend_hash_env :: OccEnv (OccName,Fingerprint)
-> (Fingerprint,IfaceDecl)
-> IO (OccEnv (OccName,Fingerprint))
extend_hash_env env0 (hash,d) = do
return (foldr (\(b,fp) env -> extendOccEnv env b (b,fp)) env0
(ifaceDeclFingerprints hash d))
--
(local_env, decls_w_hashes) <-
foldM fingerprint_group (emptyOccEnv, []) groups
-- when calculating fingerprints, we always need to use canonical
-- ordering for lists of things. In particular, the mi_deps has various
-- lists of modules and suchlike, so put these all in canonical order:
let sorted_deps = sortDependencies (mi_deps iface0)
-- The export hash of a module depends on the orphan hashes of the
-- orphan modules below us in the dependency tree. This is the way
-- that changes in orphans get propagated all the way up the
-- dependency tree.
--
-- Note [A bad dep_orphs optimization]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- In a previous version of this code, we filtered out orphan modules which
-- were not from the home package, justifying it by saying that "we'd
-- pick up the ABI hashes of the external module instead". This is wrong.
-- Suppose that we have:
--
-- module External where
-- instance Show (a -> b)
--
-- module Home1 where
-- import External
--
-- module Home2 where
-- import Home1
--
-- The export hash of Home1 needs to reflect the orphan instances of
-- External. It's true that Home1 will get rebuilt if the orphans
-- of External, but we also need to make sure Home2 gets rebuilt
-- as well. See #12733 for more details.
let orph_mods
= filter (/= this_mod) -- Note [Do not update EPS with your own hi-boot]
$ dep_orphs sorted_deps
dep_orphan_hashes <- getOrphanHashes hsc_env orph_mods
-- Note [Do not update EPS with your own hi-boot]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- (See also #10182). When your hs-boot file includes an orphan
-- instance declaration, you may find that the dep_orphs of a module you
-- import contains reference to yourself. DO NOT actually load this module
-- or add it to the orphan hashes: you're going to provide the orphan
-- instances yourself, no need to consult hs-boot; if you do load the
-- interface into EPS, you will see a duplicate orphan instance.
orphan_hash <- computeFingerprint (mk_put_name local_env)
(map ifDFun orph_insts, orph_rules, orph_fis)
-- the export list hash doesn't depend on the fingerprints of
-- the Names it mentions, only the Names themselves, hence putNameLiterally.
export_hash <- computeFingerprint putNameLiterally
(mi_exports iface0,
orphan_hash,
dep_orphan_hashes,
dep_pkgs (mi_deps iface0),
-- See Note [Export hash depends on non-orphan family instances]
dep_finsts (mi_deps iface0),
-- dep_pkgs: see "Package Version Changes" on
-- wiki/commentary/compiler/recompilation-avoidance
mi_trust iface0)
-- Make sure change of Safe Haskell mode causes recomp.
-- Note [Export hash depends on non-orphan family instances]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- Suppose we have:
--
-- module A where
-- type instance F Int = Bool
--
-- module B where
-- import A
--
-- module C where
-- import B
--
-- The family instance consistency check for C depends on the dep_finsts of
-- B. If we rename module A to A2, when the dep_finsts of B changes, we need
-- to make sure that C gets rebuilt. Effectively, the dep_finsts are part of
-- the exports of B, because C always considers them when checking
-- consistency.
--
-- A full discussion is in #12723.
--
-- We do NOT need to hash dep_orphs, because this is implied by
-- dep_orphan_hashes, and we do not need to hash ordinary class instances,
-- because there is no eager consistency check as there is with type families
-- (also we didn't store it anywhere!)
--
-- put the declarations in a canonical order, sorted by OccName
let sorted_decls = Map.elems $ Map.fromList $
[(getOccName d, e) | e@(_, d) <- decls_w_hashes]
-- the flag hash depends on:
-- - (some of) dflags
-- it returns two hashes, one that shouldn't change
-- the abi hash and one that should
flag_hash <- fingerprintDynFlags dflags this_mod putNameLiterally
opt_hash <- fingerprintOptFlags dflags putNameLiterally
hpc_hash <- fingerprintHpcFlags dflags putNameLiterally
plugin_hash <- fingerprintPlugins hsc_env
-- the ABI hash depends on:
-- - decls
-- - export list
-- - orphans
-- - deprecations
-- - flag abi hash
mod_hash <- computeFingerprint putNameLiterally
(map fst sorted_decls,
export_hash, -- includes orphan_hash
mi_warns iface0)
-- The interface hash depends on:
-- - the ABI hash, plus
-- - the module level annotations,
-- - usages
-- - deps (home and external packages, dependent files)
-- - hpc
iface_hash <- computeFingerprint putNameLiterally
(mod_hash,
ann_fn (mkVarOcc "module"), -- See mkIfaceAnnCache
mi_usages iface0,
sorted_deps,
mi_hpc iface0)
let
final_iface_exts = ModIfaceBackend
{ mi_iface_hash = iface_hash
, mi_mod_hash = mod_hash
, mi_flag_hash = flag_hash
, mi_opt_hash = opt_hash
, mi_hpc_hash = hpc_hash
, mi_plugin_hash = plugin_hash
, mi_orphan = not ( all ifRuleAuto orph_rules
-- See Note [Orphans and auto-generated rules]
&& null orph_insts
&& null orph_fis)
, mi_finsts = not (null (mi_fam_insts iface0))
, mi_exp_hash = export_hash
, mi_orphan_hash = orphan_hash
, mi_warn_fn = warn_fn
, mi_fix_fn = fix_fn
, mi_hash_fn = lookupOccEnv local_env
}
final_iface = iface0 { mi_decls = sorted_decls, mi_final_exts = final_iface_exts }
--
return final_iface
where
this_mod = mi_module iface0
semantic_mod = mi_semantic_module iface0
dflags = hsc_dflags hsc_env
(non_orph_insts, orph_insts) = mkOrphMap ifInstOrph (mi_insts iface0)
(non_orph_rules, orph_rules) = mkOrphMap ifRuleOrph (mi_rules iface0)
(non_orph_fis, orph_fis) = mkOrphMap ifFamInstOrph (mi_fam_insts iface0)
ann_fn = mkIfaceAnnCache (mi_anns iface0)
-- | Retrieve the orphan hashes 'mi_orphan_hash' for a list of modules
-- (in particular, the orphan modules which are transitively imported by the
-- current module).
--
-- Q: Why do we need the hash at all, doesn't the list of transitively
-- imported orphan modules suffice?
--
-- A: If one of our transitive imports adds a new orphan instance, our
-- export hash must change so that modules which import us rebuild. If we just
-- hashed the [Module], the hash would not change even when a new instance was
-- added to a module that already had an orphan instance.
--
-- Q: Why don't we just hash the orphan hashes of our direct dependencies?
-- Why the full transitive closure?
--
-- A: Suppose we have these modules:
--
-- module A where
-- instance Show (a -> b) where
-- module B where
-- import A -- **
-- module C where
-- import A
-- import B
--
-- Whether or not we add or remove the import to A in B affects the
-- orphan hash of B. But it shouldn't really affect the orphan hash
-- of C. If we hashed only direct dependencies, there would be no
-- way to tell that the net effect was a wash, and we'd be forced
-- to recompile C and everything else.
getOrphanHashes :: HscEnv -> [Module] -> IO [Fingerprint]
getOrphanHashes hsc_env mods = do
eps <- hscEPS hsc_env
let
hpt = hsc_HPT hsc_env
pit = eps_PIT eps
get_orph_hash mod =
case lookupIfaceByModule hpt pit mod of
Just iface -> return (mi_orphan_hash (mi_final_exts iface))
Nothing -> do -- similar to 'mkHashFun'
iface <- initIfaceLoad hsc_env . withException
$ loadInterface (text "getOrphanHashes") mod ImportBySystem
return (mi_orphan_hash (mi_final_exts iface))
--
mapM get_orph_hash mods
sortDependencies :: Dependencies -> Dependencies
sortDependencies d
= Deps { dep_mods = sortBy (compare `on` (moduleNameFS.fst)) (dep_mods d),
dep_pkgs = sortBy (compare `on` fst) (dep_pkgs d),
dep_orphs = sortBy stableModuleCmp (dep_orphs d),
dep_finsts = sortBy stableModuleCmp (dep_finsts d),
dep_plgins = sortBy (compare `on` moduleNameFS) (dep_plgins d) }
-- | Creates cached lookup for the 'mi_anns' field of ModIface
-- Hackily, we use "module" as the OccName for any module-level annotations
mkIfaceAnnCache :: [IfaceAnnotation] -> OccName -> [AnnPayload]
mkIfaceAnnCache anns
= \n -> lookupOccEnv env n `orElse` []
where
pair (IfaceAnnotation target value) =
(case target of
NamedTarget occn -> occn
ModuleTarget _ -> mkVarOcc "module"
, [value])
-- flipping (++), so the first argument is always short
env = mkOccEnv_C (flip (++)) (map pair anns)
{-
************************************************************************
* *
The ABI of an IfaceDecl
* *
************************************************************************
Note [The ABI of an IfaceDecl]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The ABI of a declaration consists of:
(a) the full name of the identifier (inc. module and package,
because these are used to construct the symbol name by which
the identifier is known externally).
(b) the declaration itself, as exposed to clients. That is, the
definition of an Id is included in the fingerprint only if
it is made available as an unfolding in the interface.
(c) the fixity of the identifier (if it exists)
(d) for Ids: rules
(e) for classes: instances, fixity & rules for methods
(f) for datatypes: instances, fixity & rules for constrs
Items (c)-(f) are not stored in the IfaceDecl, but instead appear
elsewhere in the interface file. But they are *fingerprinted* with
the declaration itself. This is done by grouping (c)-(f) in IfaceDeclExtras,
and fingerprinting that as part of the declaration.
-}
type IfaceDeclABI = (Module, IfaceDecl, IfaceDeclExtras)
data IfaceDeclExtras
= IfaceIdExtras IfaceIdExtras
| IfaceDataExtras
(Maybe Fixity) -- Fixity of the tycon itself (if it exists)
[IfaceInstABI] -- Local class and family instances of this tycon
-- See Note [Orphans] in InstEnv
[AnnPayload] -- Annotations of the type itself
[IfaceIdExtras] -- For each constructor: fixity, RULES and annotations
| IfaceClassExtras
(Maybe Fixity) -- Fixity of the class itself (if it exists)
[IfaceInstABI] -- Local instances of this class *or*
-- of its associated data types
-- See Note [Orphans] in InstEnv
[AnnPayload] -- Annotations of the type itself
[IfaceIdExtras] -- For each class method: fixity, RULES and annotations
[IfExtName] -- Default methods. If a module
-- mentions a class, then it can
-- instantiate the class and thereby
-- use the default methods, so we must
-- include these in the fingerprint of
-- a class.
| IfaceSynonymExtras (Maybe Fixity) [AnnPayload]
| IfaceFamilyExtras (Maybe Fixity) [IfaceInstABI] [AnnPayload]
| IfaceOtherDeclExtras
data IfaceIdExtras
= IdExtras
(Maybe Fixity) -- Fixity of the Id (if it exists)
[IfaceRule] -- Rules for the Id
[AnnPayload] -- Annotations for the Id
-- When hashing a class or family instance, we hash only the
-- DFunId or CoAxiom, because that depends on all the
-- information about the instance.
--
type IfaceInstABI = IfExtName -- Name of DFunId or CoAxiom that is evidence for the instance
abiDecl :: IfaceDeclABI -> IfaceDecl
abiDecl (_, decl, _) = decl
cmp_abiNames :: IfaceDeclABI -> IfaceDeclABI -> Ordering
cmp_abiNames abi1 abi2 = getOccName (abiDecl abi1) `compare`
getOccName (abiDecl abi2)
freeNamesDeclABI :: IfaceDeclABI -> NameSet
freeNamesDeclABI (_mod, decl, extras) =
freeNamesIfDecl decl `unionNameSet` freeNamesDeclExtras extras
freeNamesDeclExtras :: IfaceDeclExtras -> NameSet
freeNamesDeclExtras (IfaceIdExtras id_extras)
= freeNamesIdExtras id_extras
freeNamesDeclExtras (IfaceDataExtras _ insts _ subs)
= unionNameSets (mkNameSet insts : map freeNamesIdExtras subs)
freeNamesDeclExtras (IfaceClassExtras _ insts _ subs defms)
= unionNameSets $
mkNameSet insts : mkNameSet defms : map freeNamesIdExtras subs
freeNamesDeclExtras (IfaceSynonymExtras _ _)
= emptyNameSet
freeNamesDeclExtras (IfaceFamilyExtras _ insts _)
= mkNameSet insts
freeNamesDeclExtras IfaceOtherDeclExtras
= emptyNameSet
freeNamesIdExtras :: IfaceIdExtras -> NameSet
freeNamesIdExtras (IdExtras _ rules _) = unionNameSets (map freeNamesIfRule rules)
instance Outputable IfaceDeclExtras where
ppr IfaceOtherDeclExtras = Outputable.empty
ppr (IfaceIdExtras extras) = ppr_id_extras extras
ppr (IfaceSynonymExtras fix anns) = vcat [ppr fix, ppr anns]
ppr (IfaceFamilyExtras fix finsts anns) = vcat [ppr fix, ppr finsts, ppr anns]
ppr (IfaceDataExtras fix insts anns stuff) = vcat [ppr fix, ppr_insts insts, ppr anns,
ppr_id_extras_s stuff]
ppr (IfaceClassExtras fix insts anns stuff defms) =
vcat [ppr fix, ppr_insts insts, ppr anns,
ppr_id_extras_s stuff, ppr defms]
ppr_insts :: [IfaceInstABI] -> SDoc
ppr_insts _ = text "<insts>"
ppr_id_extras_s :: [IfaceIdExtras] -> SDoc
ppr_id_extras_s stuff = vcat (map ppr_id_extras stuff)
ppr_id_extras :: IfaceIdExtras -> SDoc
ppr_id_extras (IdExtras fix rules anns) = ppr fix $$ vcat (map ppr rules) $$ vcat (map ppr anns)
-- This instance is used only to compute fingerprints
instance Binary IfaceDeclExtras where
get _bh = panic "no get for IfaceDeclExtras"
put_ bh (IfaceIdExtras extras) = do
putByte bh 1; put_ bh extras
put_ bh (IfaceDataExtras fix insts anns cons) = do
putByte bh 2; put_ bh fix; put_ bh insts; put_ bh anns; put_ bh cons
put_ bh (IfaceClassExtras fix insts anns methods defms) = do
putByte bh 3
put_ bh fix
put_ bh insts
put_ bh anns
put_ bh methods
put_ bh defms
put_ bh (IfaceSynonymExtras fix anns) = do
putByte bh 4; put_ bh fix; put_ bh anns
put_ bh (IfaceFamilyExtras fix finsts anns) = do
putByte bh 5; put_ bh fix; put_ bh finsts; put_ bh anns
put_ bh IfaceOtherDeclExtras = putByte bh 6
instance Binary IfaceIdExtras where
get _bh = panic "no get for IfaceIdExtras"
put_ bh (IdExtras fix rules anns)= do { put_ bh fix; put_ bh rules; put_ bh anns }
declExtras :: (OccName -> Maybe Fixity)
-> (OccName -> [AnnPayload])
-> OccEnv [IfaceRule]
-> OccEnv [IfaceClsInst]
-> OccEnv [IfaceFamInst]
-> OccEnv IfExtName -- lookup default method names
-> IfaceDecl
-> IfaceDeclExtras
declExtras fix_fn ann_fn rule_env inst_env fi_env dm_env decl
= case decl of
IfaceId{} -> IfaceIdExtras (id_extras n)
IfaceData{ifCons=cons} ->
IfaceDataExtras (fix_fn n)
(map ifFamInstAxiom (lookupOccEnvL fi_env n) ++
map ifDFun (lookupOccEnvL inst_env n))
(ann_fn n)
(map (id_extras . occName . ifConName) (visibleIfConDecls cons))
IfaceClass{ifBody = IfConcreteClass { ifSigs=sigs, ifATs=ats }} ->
IfaceClassExtras (fix_fn n) insts (ann_fn n) meths defms
where
insts = (map ifDFun $ (concatMap at_extras ats)
++ lookupOccEnvL inst_env n)
-- Include instances of the associated types
-- as well as instances of the class (#5147)
meths = [id_extras (getOccName op) | IfaceClassOp op _ _ <- sigs]
-- Names of all the default methods (see Note [default method Name])
defms = [ dmName
| IfaceClassOp bndr _ (Just _) <- sigs
, let dmOcc = mkDefaultMethodOcc (nameOccName bndr)
, Just dmName <- [lookupOccEnv dm_env dmOcc] ]
IfaceSynonym{} -> IfaceSynonymExtras (fix_fn n)
(ann_fn n)
IfaceFamily{} -> IfaceFamilyExtras (fix_fn n)
(map ifFamInstAxiom (lookupOccEnvL fi_env n))
(ann_fn n)
_other -> IfaceOtherDeclExtras
where
n = getOccName decl
id_extras occ = IdExtras (fix_fn occ) (lookupOccEnvL rule_env occ) (ann_fn occ)
at_extras (IfaceAT decl _) = lookupOccEnvL inst_env (getOccName decl)
{- Note [default method Name] (see also #15970)
The Names for the default methods aren't available in Iface syntax.
* We originally start with a DefMethInfo from the class, contain a
Name for the default method
* We turn that into Iface syntax as a DefMethSpec which lacks a Name
entirely. Why? Because the Name can be derived from the method name
(in GHC.IfaceToCore), so doesn't need to be serialised into the interface
file.
But now we have to get the Name back, because the class declaration's
fingerprint needs to depend on it (this was the bug in #15970). This
is done in a slightly convoluted way:
* Then, in addFingerprints we build a map that maps OccNames to Names
* We pass that map to declExtras which laboriously looks up in the map
(using the derived occurrence name) to recover the Name we have just
thrown away.
-}
lookupOccEnvL :: OccEnv [v] -> OccName -> [v]
lookupOccEnvL env k = lookupOccEnv env k `orElse` []
{-
-- for testing: use the md5sum command to generate fingerprints and
-- compare the results against our built-in version.
fp' <- oldMD5 dflags bh
if fp /= fp' then pprPanic "computeFingerprint" (ppr fp <+> ppr fp')
else return fp
oldMD5 dflags bh = do
tmp <- newTempName dflags CurrentModule "bin"
writeBinMem bh tmp
tmp2 <- newTempName dflags CurrentModule "md5"
let cmd = "md5sum " ++ tmp ++ " >" ++ tmp2
r <- system cmd
case r of
ExitFailure _ -> throwGhcExceptionIO (PhaseFailed cmd r)
ExitSuccess -> do
hash_str <- readFile tmp2
return $! readHexFingerprint hash_str
-}
----------------------
-- mkOrphMap partitions instance decls or rules into
-- (a) an OccEnv for ones that are not orphans,
-- mapping the local OccName to a list of its decls
-- (b) a list of orphan decls
mkOrphMap :: (decl -> IsOrphan) -- Extract orphan status from decl
-> [decl] -- Sorted into canonical order
-> (OccEnv [decl], -- Non-orphan decls associated with their key;
-- each sublist in canonical order
[decl]) -- Orphan decls; in canonical order
mkOrphMap get_key decls
= foldl' go (emptyOccEnv, []) decls
where
go (non_orphs, orphs) d
| NotOrphan occ <- get_key d
= (extendOccEnv_Acc (:) singleton non_orphs occ d, orphs)
| otherwise = (non_orphs, d:orphs)
{-
************************************************************************
* *
COMPLETE Pragmas
* *
************************************************************************
-}
mkIfaceCompleteSig :: CompleteMatch -> IfaceCompleteMatch
mkIfaceCompleteSig (CompleteMatch cls tc) = IfaceCompleteMatch cls tc
{-
************************************************************************
* *
Keeping track of what we've slurped, and fingerprints
* *
************************************************************************
-}
mkIfaceAnnotation :: Annotation -> IfaceAnnotation
mkIfaceAnnotation (Annotation { ann_target = target, ann_value = payload })
= IfaceAnnotation {
ifAnnotatedTarget = fmap nameOccName target,
ifAnnotatedValue = payload
}
mkIfaceExports :: [AvailInfo] -> [IfaceExport] -- Sort to make canonical
mkIfaceExports exports
= sortBy stableAvailCmp (map sort_subs exports)
where
sort_subs :: AvailInfo -> AvailInfo
sort_subs (Avail n) = Avail n
sort_subs (AvailTC n [] fs) = AvailTC n [] (sort_flds fs)
sort_subs (AvailTC n (m:ms) fs)
| n==m = AvailTC n (m:sortBy stableNameCmp ms) (sort_flds fs)
| otherwise = AvailTC n (sortBy stableNameCmp (m:ms)) (sort_flds fs)
-- Maintain the AvailTC Invariant
sort_flds = sortBy (stableNameCmp `on` flSelector)
{-
Note [Original module]
~~~~~~~~~~~~~~~~~~~~~
Consider this:
module X where { data family T }
module Y( T(..) ) where { import X; data instance T Int = MkT Int }
The exported Avail from Y will look like
X.T{X.T, Y.MkT}
That is, in Y,
- only MkT is brought into scope by the data instance;
- but the parent (used for grouping and naming in T(..) exports) is X.T
- and in this case we export X.T too
In the result of mkIfaceExports, the names are grouped by defining module,
so we may need to split up a single Avail into multiple ones.
Note [Internal used_names]
~~~~~~~~~~~~~~~~~~~~~~~~~~
Most of the used_names are External Names, but we can have Internal
Names too: see Note [Binders in Template Haskell] in Convert, and
#5362 for an example. Such Names are always
- Such Names are always for locally-defined things, for which we
don't gather usage info, so we can just ignore them in ent_map
- They are always System Names, hence the assert, just as a double check.
************************************************************************
* *
Load the old interface file for this module (unless
we have it already), and check whether it is up to date
* *
************************************************************************
-}
data RecompileRequired
= UpToDate
-- ^ everything is up to date, recompilation is not required
| MustCompile
-- ^ The .hs file has been touched, or the .o/.hi file does not exist
| RecompBecause String
-- ^ The .o/.hi files are up to date, but something else has changed
-- to force recompilation; the String says what (one-line summary)
deriving Eq
instance Semigroup RecompileRequired where
UpToDate <> r = r
mc <> _ = mc
instance Monoid RecompileRequired where
mempty = UpToDate
recompileRequired :: RecompileRequired -> Bool
recompileRequired UpToDate = False
recompileRequired _ = True
-- | Top level function to check if the version of an old interface file
-- is equivalent to the current source file the user asked us to compile.
-- If the same, we can avoid recompilation. We return a tuple where the
-- first element is a bool saying if we should recompile the object file
-- and the second is maybe the interface file, where Nothing means to
-- rebuild the interface file and not use the existing one.
checkOldIface
:: HscEnv
-> ModSummary
-> SourceModified
-> Maybe ModIface -- Old interface from compilation manager, if any
-> IO (RecompileRequired, Maybe ModIface)
checkOldIface hsc_env mod_summary source_modified maybe_iface
= do let dflags = hsc_dflags hsc_env
showPass dflags $
"Checking old interface for " ++
(showPpr dflags $ ms_mod mod_summary) ++
" (use -ddump-hi-diffs for more details)"
initIfaceCheck (text "checkOldIface") hsc_env $
check_old_iface hsc_env mod_summary source_modified maybe_iface
check_old_iface
:: HscEnv
-> ModSummary
-> SourceModified
-> Maybe ModIface
-> IfG (RecompileRequired, Maybe ModIface)
check_old_iface hsc_env mod_summary src_modified maybe_iface
= let dflags = hsc_dflags hsc_env
getIface =
case maybe_iface of
Just _ -> do
traceIf (text "We already have the old interface for" <+>
ppr (ms_mod mod_summary))
return maybe_iface
Nothing -> loadIface
loadIface = do
let iface_path = msHiFilePath mod_summary
read_result <- readIface (ms_mod mod_summary) iface_path
case read_result of
Failed err -> do
traceIf (text "FYI: cannot read old interface file:" $$ nest 4 err)
traceHiDiffs (text "Old interface file was invalid:" $$ nest 4 err)
return Nothing
Succeeded iface -> do
traceIf (text "Read the interface file" <+> text iface_path)
return $ Just iface
src_changed
| gopt Opt_ForceRecomp (hsc_dflags hsc_env) = True
| SourceModified <- src_modified = True
| otherwise = False
in do
when src_changed $
traceHiDiffs (nest 4 $ text "Source file changed or recompilation check turned off")
case src_changed of
-- If the source has changed and we're in interactive mode,
-- avoid reading an interface; just return the one we might
-- have been supplied with.
True | not (isObjectTarget $ hscTarget dflags) ->
return (MustCompile, maybe_iface)
-- Try and read the old interface for the current module
-- from the .hi file left from the last time we compiled it
True -> do
maybe_iface' <- getIface
return (MustCompile, maybe_iface')
False -> do
maybe_iface' <- getIface
case maybe_iface' of
-- We can't retrieve the iface
Nothing -> return (MustCompile, Nothing)
-- We have got the old iface; check its versions
-- even in the SourceUnmodifiedAndStable case we
-- should check versions because some packages
-- might have changed or gone away.
Just iface -> checkVersions hsc_env mod_summary iface
-- | Check if a module is still the same 'version'.
--
-- This function is called in the recompilation checker after we have
-- determined that the module M being checked hasn't had any changes
-- to its source file since we last compiled M. So at this point in general
-- two things may have changed that mean we should recompile M:
-- * The interface export by a dependency of M has changed.
-- * The compiler flags specified this time for M have changed
-- in a manner that is significant for recompilation.
-- We return not just if we should recompile the object file but also
-- if we should rebuild the interface file.
checkVersions :: HscEnv
-> ModSummary
-> ModIface -- Old interface
-> IfG (RecompileRequired, Maybe ModIface)
checkVersions hsc_env mod_summary iface
= do { traceHiDiffs (text "Considering whether compilation is required for" <+>
ppr (mi_module iface) <> colon)
-- readIface will have verified that the InstalledUnitId matches,
-- but we ALSO must make sure the instantiation matches up. See
-- test case bkpcabal04!
; if moduleUnitId (mi_module iface) /= thisPackage (hsc_dflags hsc_env)
then return (RecompBecause "-this-unit-id changed", Nothing) else do {
; recomp <- checkFlagHash hsc_env iface
; if recompileRequired recomp then return (recomp, Nothing) else do {
; recomp <- checkOptimHash hsc_env iface
; if recompileRequired recomp then return (recomp, Nothing) else do {
; recomp <- checkHpcHash hsc_env iface
; if recompileRequired recomp then return (recomp, Nothing) else do {
; recomp <- checkMergedSignatures mod_summary iface
; if recompileRequired recomp then return (recomp, Nothing) else do {
; recomp <- checkHsig mod_summary iface
; if recompileRequired recomp then return (recomp, Nothing) else do {
; recomp <- checkHie mod_summary
; if recompileRequired recomp then return (recomp, Nothing) else do {
; recomp <- checkDependencies hsc_env mod_summary iface
; if recompileRequired recomp then return (recomp, Just iface) else do {
; recomp <- checkPlugins hsc_env iface
; if recompileRequired recomp then return (recomp, Nothing) else do {
-- Source code unchanged and no errors yet... carry on
--
-- First put the dependent-module info, read from the old
-- interface, into the envt, so that when we look for
-- interfaces we look for the right one (.hi or .hi-boot)
--
-- It's just temporary because either the usage check will succeed
-- (in which case we are done with this module) or it'll fail (in which
-- case we'll compile the module from scratch anyhow).
--
-- We do this regardless of compilation mode, although in --make mode
-- all the dependent modules should be in the HPT already, so it's
-- quite redundant
; updateEps_ $ \eps -> eps { eps_is_boot = mod_deps }
; recomp <- checkList [checkModUsage this_pkg u | u <- mi_usages iface]
; return (recomp, Just iface)
}}}}}}}}}}
where
this_pkg = thisPackage (hsc_dflags hsc_env)
-- This is a bit of a hack really
mod_deps :: ModuleNameEnv (ModuleName, IsBootInterface)
mod_deps = mkModDeps (dep_mods (mi_deps iface))
-- | Check if any plugins are requesting recompilation
checkPlugins :: HscEnv -> ModIface -> IfG RecompileRequired
checkPlugins hsc iface = liftIO $ do
new_fingerprint <- fingerprintPlugins hsc
let old_fingerprint = mi_plugin_hash (mi_final_exts iface)
pr <- mconcat <$> mapM pluginRecompile' (plugins (hsc_dflags hsc))
return $
pluginRecompileToRecompileRequired old_fingerprint new_fingerprint pr
fingerprintPlugins :: HscEnv -> IO Fingerprint
fingerprintPlugins hsc_env = do
fingerprintPlugins' $ plugins (hsc_dflags hsc_env)
fingerprintPlugins' :: [PluginWithArgs] -> IO Fingerprint
fingerprintPlugins' plugins = do
res <- mconcat <$> mapM pluginRecompile' plugins
return $ case res of
NoForceRecompile -> fingerprintString "NoForceRecompile"
ForceRecompile -> fingerprintString "ForceRecompile"
-- is the chance of collision worth worrying about?
-- An alternative is to fingerprintFingerprints [fingerprintString
-- "maybeRecompile", fp]
(MaybeRecompile fp) -> fp
pluginRecompileToRecompileRequired
:: Fingerprint -> Fingerprint -> PluginRecompile -> RecompileRequired
pluginRecompileToRecompileRequired old_fp new_fp pr
| old_fp == new_fp =
case pr of
NoForceRecompile -> UpToDate
-- we already checked the fingerprint above so a mismatch is not possible
-- here, remember that: `fingerprint (MaybeRecomp x) == x`.
MaybeRecompile _ -> UpToDate
-- when we have an impure plugin in the stack we have to unconditionally
-- recompile since it might integrate all sorts of crazy IO results into
-- its compilation output.
ForceRecompile -> RecompBecause "Impure plugin forced recompilation"
| old_fp `elem` magic_fingerprints ||
new_fp `elem` magic_fingerprints
-- The fingerprints do not match either the old or new one is a magic
-- fingerprint. This happens when non-pure plugins are added for the first
-- time or when we go from one recompilation strategy to another: (force ->
-- no-force, maybe-recomp -> no-force, no-force -> maybe-recomp etc.)
--
-- For example when we go from from ForceRecomp to NoForceRecomp
-- recompilation is triggered since the old impure plugins could have
-- changed the build output which is now back to normal.
= RecompBecause "Plugins changed"
| otherwise =
let reason = "Plugin fingerprint changed" in
case pr of
-- even though a plugin is forcing recompilation the fingerprint changed
-- which would cause recompilation anyways so we report the fingerprint
-- change instead.
ForceRecompile -> RecompBecause reason
_ -> RecompBecause reason
where
magic_fingerprints =
[ fingerprintString "NoForceRecompile"
, fingerprintString "ForceRecompile"
]
-- | Check if an hsig file needs recompilation because its
-- implementing module has changed.
checkHsig :: ModSummary -> ModIface -> IfG RecompileRequired
checkHsig mod_summary iface = do
dflags <- getDynFlags
let outer_mod = ms_mod mod_summary
inner_mod = canonicalizeHomeModule dflags (moduleName outer_mod)
MASSERT( moduleUnitId outer_mod == thisPackage dflags )
case inner_mod == mi_semantic_module iface of
True -> up_to_date (text "implementing module unchanged")
False -> return (RecompBecause "implementing module changed")
-- | Check if @.hie@ file is out of date or missing.
checkHie :: ModSummary -> IfG RecompileRequired
checkHie mod_summary = do
dflags <- getDynFlags
let hie_date_opt = ms_hie_date mod_summary
hs_date = ms_hs_date mod_summary
pure $ case gopt Opt_WriteHie dflags of
False -> UpToDate
True -> case hie_date_opt of
Nothing -> RecompBecause "HIE file is missing"
Just hie_date
| hie_date < hs_date
-> RecompBecause "HIE file is out of date"
| otherwise
-> UpToDate
-- | Check the flags haven't changed
checkFlagHash :: HscEnv -> ModIface -> IfG RecompileRequired
checkFlagHash hsc_env iface = do
let old_hash = mi_flag_hash (mi_final_exts iface)
new_hash <- liftIO $ fingerprintDynFlags (hsc_dflags hsc_env)
(mi_module iface)
putNameLiterally
case old_hash == new_hash of
True -> up_to_date (text "Module flags unchanged")
False -> out_of_date_hash "flags changed"
(text " Module flags have changed")
old_hash new_hash
-- | Check the optimisation flags haven't changed
checkOptimHash :: HscEnv -> ModIface -> IfG RecompileRequired
checkOptimHash hsc_env iface = do
let old_hash = mi_opt_hash (mi_final_exts iface)
new_hash <- liftIO $ fingerprintOptFlags (hsc_dflags hsc_env)
putNameLiterally
if | old_hash == new_hash
-> up_to_date (text "Optimisation flags unchanged")
| gopt Opt_IgnoreOptimChanges (hsc_dflags hsc_env)
-> up_to_date (text "Optimisation flags changed; ignoring")
| otherwise
-> out_of_date_hash "Optimisation flags changed"
(text " Optimisation flags have changed")
old_hash new_hash
-- | Check the HPC flags haven't changed
checkHpcHash :: HscEnv -> ModIface -> IfG RecompileRequired
checkHpcHash hsc_env iface = do
let old_hash = mi_hpc_hash (mi_final_exts iface)
new_hash <- liftIO $ fingerprintHpcFlags (hsc_dflags hsc_env)
putNameLiterally
if | old_hash == new_hash
-> up_to_date (text "HPC flags unchanged")
| gopt Opt_IgnoreHpcChanges (hsc_dflags hsc_env)
-> up_to_date (text "HPC flags changed; ignoring")
| otherwise
-> out_of_date_hash "HPC flags changed"
(text " HPC flags have changed")
old_hash new_hash
-- Check that the set of signatures we are merging in match.
-- If the -unit-id flags change, this can change too.
checkMergedSignatures :: ModSummary -> ModIface -> IfG RecompileRequired
checkMergedSignatures mod_summary iface = do
dflags <- getDynFlags
let old_merged = sort [ mod | UsageMergedRequirement{ usg_mod = mod } <- mi_usages iface ]
new_merged = case Map.lookup (ms_mod_name mod_summary)
(requirementContext (pkgState dflags)) of
Nothing -> []
Just r -> sort $ map (indefModuleToModule dflags) r
if old_merged == new_merged
then up_to_date (text "signatures to merge in unchanged" $$ ppr new_merged)
else return (RecompBecause "signatures to merge in changed")
-- If the direct imports of this module are resolved to targets that
-- are not among the dependencies of the previous interface file,
-- then we definitely need to recompile. This catches cases like
-- - an exposed package has been upgraded
-- - we are compiling with different package flags
-- - a home module that was shadowing a package module has been removed
-- - a new home module has been added that shadows a package module
-- See bug #1372.
--
-- In addition, we also check if the union of dependencies of the imported
-- modules has any difference to the previous set of dependencies. We would need
-- to recompile in that case also since the `mi_deps` field of ModIface needs
-- to be updated to match that information. This is one of the invariants
-- of interface files (see https://gitlab.haskell.org/ghc/ghc/wikis/commentary/compiler/recompilation-avoidance#interface-file-invariants).
-- See bug #16511.
--
-- Returns (RecompBecause <textual reason>) if recompilation is required.
checkDependencies :: HscEnv -> ModSummary -> ModIface -> IfG RecompileRequired
checkDependencies hsc_env summary iface
= do
checkList $
[ checkList (map dep_missing (ms_imps summary ++ ms_srcimps summary))
, do
(recomp, mnames_seen) <- runUntilRecompRequired $ map
checkForNewHomeDependency
(ms_home_imps summary)
case recomp of
UpToDate -> do
let
seen_home_deps = Set.unions $ map Set.fromList mnames_seen
checkIfAllOldHomeDependenciesAreSeen seen_home_deps
_ -> return recomp]
where
prev_dep_mods = dep_mods (mi_deps iface)
prev_dep_plgn = dep_plgins (mi_deps iface)
prev_dep_pkgs = dep_pkgs (mi_deps iface)
this_pkg = thisPackage (hsc_dflags hsc_env)
dep_missing (mb_pkg, L _ mod) = do
find_res <- liftIO $ findImportedModule hsc_env mod (mb_pkg)
let reason = moduleNameString mod ++ " changed"
case find_res of
Found _ mod
| pkg == this_pkg
-> if moduleName mod `notElem` map fst prev_dep_mods ++ prev_dep_plgn
then do traceHiDiffs $
text "imported module " <> quotes (ppr mod) <>
text " not among previous dependencies"
return (RecompBecause reason)
else
return UpToDate
| otherwise
-> if toInstalledUnitId pkg `notElem` (map fst prev_dep_pkgs)
then do traceHiDiffs $
text "imported module " <> quotes (ppr mod) <>
text " is from package " <> quotes (ppr pkg) <>
text ", which is not among previous dependencies"
return (RecompBecause reason)
else
return UpToDate
where pkg = moduleUnitId mod
_otherwise -> return (RecompBecause reason)
old_deps = Set.fromList $ map fst $ filter (not . snd) prev_dep_mods
isOldHomeDeps = flip Set.member old_deps
checkForNewHomeDependency (L _ mname) = do
let
mod = mkModule this_pkg mname
str_mname = moduleNameString mname
reason = str_mname ++ " changed"
-- We only want to look at home modules to check if any new home dependency
-- pops in and thus here, skip modules that are not home. Checking
-- membership in old home dependencies suffice because the `dep_missing`
-- check already verified that all imported home modules are present there.
if not (isOldHomeDeps mname)
then return (UpToDate, [])
else do
mb_result <- getFromModIface "need mi_deps for" mod $ \imported_iface -> do
let mnames = mname:(map fst $ filter (not . snd) $
dep_mods $ mi_deps imported_iface)
case find (not . isOldHomeDeps) mnames of
Nothing -> return (UpToDate, mnames)
Just new_dep_mname -> do
traceHiDiffs $
text "imported home module " <> quotes (ppr mod) <>
text " has a new dependency " <> quotes (ppr new_dep_mname)
return (RecompBecause reason, [])
return $ fromMaybe (MustCompile, []) mb_result
-- Performs all recompilation checks in the list until a check that yields
-- recompile required is encountered. Returns the list of the results of
-- all UpToDate checks.
runUntilRecompRequired [] = return (UpToDate, [])
runUntilRecompRequired (check:checks) = do
(recompile, value) <- check
if recompileRequired recompile
then return (recompile, [])
else do
(recomp, values) <- runUntilRecompRequired checks
return (recomp, value:values)
checkIfAllOldHomeDependenciesAreSeen seen_deps = do
let unseen_old_deps = Set.difference
old_deps
seen_deps
if not (null unseen_old_deps)
then do
let missing_dep = Set.elemAt 0 unseen_old_deps
traceHiDiffs $
text "missing old home dependency " <> quotes (ppr missing_dep)
return $ RecompBecause "missing old dependency"
else return UpToDate
needInterface :: Module -> (ModIface -> IfG RecompileRequired)
-> IfG RecompileRequired
needInterface mod continue
= do
mb_recomp <- getFromModIface
"need version info for"
mod
continue
case mb_recomp of
Nothing -> return MustCompile
Just recomp -> return recomp
getFromModIface :: String -> Module -> (ModIface -> IfG a)
-> IfG (Maybe a)
getFromModIface doc_msg mod getter
= do -- Load the imported interface if possible
let doc_str = sep [text doc_msg, ppr mod]
traceHiDiffs (text "Checking innterface for module" <+> ppr mod)
mb_iface <- loadInterface doc_str mod ImportBySystem
-- Load the interface, but don't complain on failure;
-- Instead, get an Either back which we can test
case mb_iface of
Failed _ -> do
traceHiDiffs (sep [text "Couldn't load interface for module",
ppr mod])
return Nothing
-- Couldn't find or parse a module mentioned in the
-- old interface file. Don't complain: it might
-- just be that the current module doesn't need that
-- import and it's been deleted
Succeeded iface -> Just <$> getter iface
-- | Given the usage information extracted from the old
-- M.hi file for the module being compiled, figure out
-- whether M needs to be recompiled.
checkModUsage :: UnitId -> Usage -> IfG RecompileRequired
checkModUsage _this_pkg UsagePackageModule{
usg_mod = mod,
usg_mod_hash = old_mod_hash }
= needInterface mod $ \iface -> do
let reason = moduleNameString (moduleName mod) ++ " changed"
checkModuleFingerprint reason old_mod_hash (mi_mod_hash (mi_final_exts iface))
-- We only track the ABI hash of package modules, rather than
-- individual entity usages, so if the ABI hash changes we must
-- recompile. This is safe but may entail more recompilation when
-- a dependent package has changed.
checkModUsage _ UsageMergedRequirement{ usg_mod = mod, usg_mod_hash = old_mod_hash }
= needInterface mod $ \iface -> do
let reason = moduleNameString (moduleName mod) ++ " changed (raw)"
checkModuleFingerprint reason old_mod_hash (mi_mod_hash (mi_final_exts iface))
checkModUsage this_pkg UsageHomeModule{
usg_mod_name = mod_name,
usg_mod_hash = old_mod_hash,
usg_exports = maybe_old_export_hash,
usg_entities = old_decl_hash }
= do
let mod = mkModule this_pkg mod_name
needInterface mod $ \iface -> do
let
new_mod_hash = mi_mod_hash (mi_final_exts iface)
new_decl_hash = mi_hash_fn (mi_final_exts iface)
new_export_hash = mi_exp_hash (mi_final_exts iface)
reason = moduleNameString mod_name ++ " changed"
-- CHECK MODULE
recompile <- checkModuleFingerprint reason old_mod_hash new_mod_hash
if not (recompileRequired recompile)
then return UpToDate
else do
-- CHECK EXPORT LIST
checkMaybeHash reason maybe_old_export_hash new_export_hash
(text " Export list changed") $ do
-- CHECK ITEMS ONE BY ONE
recompile <- checkList [ checkEntityUsage reason new_decl_hash u
| u <- old_decl_hash]
if recompileRequired recompile
then return recompile -- This one failed, so just bail out now
else up_to_date (text " Great! The bits I use are up to date")
checkModUsage _this_pkg UsageFile{ usg_file_path = file,
usg_file_hash = old_hash } =
liftIO $
handleIO handle $ do
new_hash <- getFileHash file
if (old_hash /= new_hash)
then return recomp
else return UpToDate
where
recomp = RecompBecause (file ++ " changed")
handle =
#if defined(DEBUG)
\e -> pprTrace "UsageFile" (text (show e)) $ return recomp
#else
\_ -> return recomp -- if we can't find the file, just recompile, don't fail
#endif
------------------------
checkModuleFingerprint :: String -> Fingerprint -> Fingerprint
-> IfG RecompileRequired
checkModuleFingerprint reason old_mod_hash new_mod_hash
| new_mod_hash == old_mod_hash
= up_to_date (text "Module fingerprint unchanged")
| otherwise
= out_of_date_hash reason (text " Module fingerprint has changed")
old_mod_hash new_mod_hash
------------------------
checkMaybeHash :: String -> Maybe Fingerprint -> Fingerprint -> SDoc
-> IfG RecompileRequired -> IfG RecompileRequired
checkMaybeHash reason maybe_old_hash new_hash doc continue
| Just hash <- maybe_old_hash, hash /= new_hash
= out_of_date_hash reason doc hash new_hash
| otherwise
= continue
------------------------
checkEntityUsage :: String
-> (OccName -> Maybe (OccName, Fingerprint))
-> (OccName, Fingerprint)
-> IfG RecompileRequired
checkEntityUsage reason new_hash (name,old_hash)
= case new_hash name of
Nothing -> -- We used it before, but it ain't there now
out_of_date reason (sep [text "No longer exported:", ppr name])
Just (_, new_hash) -- It's there, but is it up to date?
| new_hash == old_hash -> do traceHiDiffs (text " Up to date" <+> ppr name <+> parens (ppr new_hash))
return UpToDate
| otherwise -> out_of_date_hash reason (text " Out of date:" <+> ppr name)
old_hash new_hash
up_to_date :: SDoc -> IfG RecompileRequired
up_to_date msg = traceHiDiffs msg >> return UpToDate
out_of_date :: String -> SDoc -> IfG RecompileRequired
out_of_date reason msg = traceHiDiffs msg >> return (RecompBecause reason)
out_of_date_hash :: String -> SDoc -> Fingerprint -> Fingerprint -> IfG RecompileRequired
out_of_date_hash reason msg old_hash new_hash
= out_of_date reason (hsep [msg, ppr old_hash, text "->", ppr new_hash])
----------------------
checkList :: [IfG RecompileRequired] -> IfG RecompileRequired
-- This helper is used in two places
checkList [] = return UpToDate
checkList (check:checks) = do recompile <- check
if recompileRequired recompile
then return recompile
else checkList checks
{-
************************************************************************
* *
Converting things to their Iface equivalents
* *
************************************************************************
-}
tyThingToIfaceDecl :: TyThing -> IfaceDecl
tyThingToIfaceDecl (AnId id) = idToIfaceDecl id
tyThingToIfaceDecl (ATyCon tycon) = snd (tyConToIfaceDecl emptyTidyEnv tycon)
tyThingToIfaceDecl (ACoAxiom ax) = coAxiomToIfaceDecl ax
tyThingToIfaceDecl (AConLike cl) = case cl of
RealDataCon dc -> dataConToIfaceDecl dc -- for ppr purposes only
PatSynCon ps -> patSynToIfaceDecl ps
--------------------------
idToIfaceDecl :: Id -> IfaceDecl
-- The Id is already tidied, so that locally-bound names
-- (lambdas, for-alls) already have non-clashing OccNames
-- We can't tidy it here, locally, because it may have
-- free variables in its type or IdInfo
idToIfaceDecl id
= IfaceId { ifName = getName id,
ifType = toIfaceType (idType id),
ifIdDetails = toIfaceIdDetails (idDetails id),
ifIdInfo = toIfaceIdInfo (idInfo id) }
--------------------------
dataConToIfaceDecl :: DataCon -> IfaceDecl
dataConToIfaceDecl dataCon
= IfaceId { ifName = getName dataCon,
ifType = toIfaceType (dataConUserType dataCon),
ifIdDetails = IfVanillaId,
ifIdInfo = NoInfo }
--------------------------
coAxiomToIfaceDecl :: CoAxiom br -> IfaceDecl
-- We *do* tidy Axioms, because they are not (and cannot
-- conveniently be) built in tidy form
coAxiomToIfaceDecl ax@(CoAxiom { co_ax_tc = tycon, co_ax_branches = branches
, co_ax_role = role })
= IfaceAxiom { ifName = getName ax
, ifTyCon = toIfaceTyCon tycon
, ifRole = role
, ifAxBranches = map (coAxBranchToIfaceBranch tycon
(map coAxBranchLHS branch_list))
branch_list }
where
branch_list = fromBranches branches
-- 2nd parameter is the list of branch LHSs, in case of a closed type family,
-- for conversion from incompatible branches to incompatible indices.
-- For an open type family the list should be empty.
-- See Note [Storing compatibility] in CoAxiom
coAxBranchToIfaceBranch :: TyCon -> [[Type]] -> CoAxBranch -> IfaceAxBranch
coAxBranchToIfaceBranch tc lhs_s
(CoAxBranch { cab_tvs = tvs, cab_cvs = cvs
, cab_eta_tvs = eta_tvs
, cab_lhs = lhs, cab_roles = roles
, cab_rhs = rhs, cab_incomps = incomps })
= IfaceAxBranch { ifaxbTyVars = toIfaceTvBndrs tvs
, ifaxbCoVars = map toIfaceIdBndr cvs
, ifaxbEtaTyVars = toIfaceTvBndrs eta_tvs
, ifaxbLHS = toIfaceTcArgs tc lhs
, ifaxbRoles = roles
, ifaxbRHS = toIfaceType rhs
, ifaxbIncomps = iface_incomps }
where
iface_incomps = map (expectJust "iface_incomps"
. flip findIndex lhs_s
. eqTypes
. coAxBranchLHS) incomps
-----------------
tyConToIfaceDecl :: TidyEnv -> TyCon -> (TidyEnv, IfaceDecl)
-- We *do* tidy TyCons, because they are not (and cannot
-- conveniently be) built in tidy form
-- The returned TidyEnv is the one after tidying the tyConTyVars
tyConToIfaceDecl env tycon
| Just clas <- tyConClass_maybe tycon
= classToIfaceDecl env clas
| Just syn_rhs <- synTyConRhs_maybe tycon
= ( tc_env1
, IfaceSynonym { ifName = getName tycon,
ifRoles = tyConRoles tycon,
ifSynRhs = if_syn_type syn_rhs,
ifBinders = if_binders,
ifResKind = if_res_kind
})
| Just fam_flav <- famTyConFlav_maybe tycon
= ( tc_env1
, IfaceFamily { ifName = getName tycon,
ifResVar = if_res_var,
ifFamFlav = to_if_fam_flav fam_flav,
ifBinders = if_binders,
ifResKind = if_res_kind,
ifFamInj = tyConInjectivityInfo tycon
})
| isAlgTyCon tycon
= ( tc_env1
, IfaceData { ifName = getName tycon,
ifBinders = if_binders,
ifResKind = if_res_kind,
ifCType = tyConCType tycon,
ifRoles = tyConRoles tycon,
ifCtxt = tidyToIfaceContext tc_env1 (tyConStupidTheta tycon),
ifCons = ifaceConDecls (algTyConRhs tycon),
ifGadtSyntax = isGadtSyntaxTyCon tycon,
ifParent = parent })
| otherwise -- FunTyCon, PrimTyCon, promoted TyCon/DataCon
-- We only convert these TyCons to IfaceTyCons when we are
-- just about to pretty-print them, not because we are going
-- to put them into interface files
= ( env
, IfaceData { ifName = getName tycon,
ifBinders = if_binders,
ifResKind = if_res_kind,
ifCType = Nothing,
ifRoles = tyConRoles tycon,
ifCtxt = [],
ifCons = IfDataTyCon [],
ifGadtSyntax = False,
ifParent = IfNoParent })
where
-- NOTE: Not all TyCons have `tyConTyVars` field. Forcing this when `tycon`
-- is one of these TyCons (FunTyCon, PrimTyCon, PromotedDataCon) will cause
-- an error.
(tc_env1, tc_binders) = tidyTyConBinders env (tyConBinders tycon)
tc_tyvars = binderVars tc_binders
if_binders = toIfaceTyCoVarBinders tc_binders
-- No tidying of the binders; they are already tidy
if_res_kind = tidyToIfaceType tc_env1 (tyConResKind tycon)
if_syn_type ty = tidyToIfaceType tc_env1 ty
if_res_var = getOccFS `fmap` tyConFamilyResVar_maybe tycon
parent = case tyConFamInstSig_maybe tycon of
Just (tc, ty, ax) -> IfDataInstance (coAxiomName ax)
(toIfaceTyCon tc)
(tidyToIfaceTcArgs tc_env1 tc ty)
Nothing -> IfNoParent
to_if_fam_flav OpenSynFamilyTyCon = IfaceOpenSynFamilyTyCon
to_if_fam_flav AbstractClosedSynFamilyTyCon = IfaceAbstractClosedSynFamilyTyCon
to_if_fam_flav (DataFamilyTyCon {}) = IfaceDataFamilyTyCon
to_if_fam_flav (BuiltInSynFamTyCon {}) = IfaceBuiltInSynFamTyCon
to_if_fam_flav (ClosedSynFamilyTyCon Nothing) = IfaceClosedSynFamilyTyCon Nothing
to_if_fam_flav (ClosedSynFamilyTyCon (Just ax))
= IfaceClosedSynFamilyTyCon (Just (axn, ibr))
where defs = fromBranches $ coAxiomBranches ax
lhss = map coAxBranchLHS defs
ibr = map (coAxBranchToIfaceBranch tycon lhss) defs
axn = coAxiomName ax
ifaceConDecls (NewTyCon { data_con = con }) = IfNewTyCon (ifaceConDecl con)
ifaceConDecls (DataTyCon { data_cons = cons }) = IfDataTyCon (map ifaceConDecl cons)
ifaceConDecls (TupleTyCon { data_con = con }) = IfDataTyCon [ifaceConDecl con]
ifaceConDecls (SumTyCon { data_cons = cons }) = IfDataTyCon (map ifaceConDecl cons)
ifaceConDecls AbstractTyCon = IfAbstractTyCon
-- The AbstractTyCon case happens when a TyCon has been trimmed
-- during tidying.
-- Furthermore, tyThingToIfaceDecl is also used in TcRnDriver
-- for GHCi, when browsing a module, in which case the
-- AbstractTyCon and TupleTyCon cases are perfectly sensible.
-- (Tuple declarations are not serialised into interface files.)
ifaceConDecl data_con
= IfCon { ifConName = dataConName data_con,
ifConInfix = dataConIsInfix data_con,
ifConWrapper = isJust (dataConWrapId_maybe data_con),
ifConExTCvs = map toIfaceBndr ex_tvs',
ifConUserTvBinders = map toIfaceForAllBndr user_bndrs',
ifConEqSpec = map (to_eq_spec . eqSpecPair) eq_spec,
ifConCtxt = tidyToIfaceContext con_env2 theta,
ifConArgTys = map (tidyToIfaceType con_env2) arg_tys,
ifConFields = dataConFieldLabels data_con,
ifConStricts = map (toIfaceBang con_env2)
(dataConImplBangs data_con),
ifConSrcStricts = map toIfaceSrcBang
(dataConSrcBangs data_con)}
where
(univ_tvs, ex_tvs, eq_spec, theta, arg_tys, _)
= dataConFullSig data_con
user_bndrs = dataConUserTyVarBinders data_con
-- Tidy the univ_tvs of the data constructor to be identical
-- to the tyConTyVars of the type constructor. This means
-- (a) we don't need to redundantly put them into the interface file
-- (b) when pretty-printing an Iface data declaration in H98-style syntax,
-- we know that the type variables will line up
-- The latter (b) is important because we pretty-print type constructors
-- by converting to Iface syntax and pretty-printing that
con_env1 = (fst tc_env1, mkVarEnv (zipEqual "ifaceConDecl" univ_tvs tc_tyvars))
-- A bit grimy, perhaps, but it's simple!
(con_env2, ex_tvs') = tidyVarBndrs con_env1 ex_tvs
user_bndrs' = map (tidyUserTyCoVarBinder con_env2) user_bndrs
to_eq_spec (tv,ty) = (tidyTyVar con_env2 tv, tidyToIfaceType con_env2 ty)
-- By this point, we have tidied every universal and existential
-- tyvar. Because of the dcUserTyCoVarBinders invariant
-- (see Note [DataCon user type variable binders]), *every*
-- user-written tyvar must be contained in the substitution that
-- tidying produced. Therefore, tidying the user-written tyvars is a
-- simple matter of looking up each variable in the substitution,
-- which tidyTyCoVarOcc accomplishes.
tidyUserTyCoVarBinder :: TidyEnv -> TyCoVarBinder -> TyCoVarBinder
tidyUserTyCoVarBinder env (Bndr tv vis) =
Bndr (tidyTyCoVarOcc env tv) vis
classToIfaceDecl :: TidyEnv -> Class -> (TidyEnv, IfaceDecl)
classToIfaceDecl env clas
= ( env1
, IfaceClass { ifName = getName tycon,
ifRoles = tyConRoles (classTyCon clas),
ifBinders = toIfaceTyCoVarBinders tc_binders,
ifBody = body,
ifFDs = map toIfaceFD clas_fds })
where
(_, clas_fds, sc_theta, _, clas_ats, op_stuff)
= classExtraBigSig clas
tycon = classTyCon clas
body | isAbstractTyCon tycon = IfAbstractClass
| otherwise
= IfConcreteClass {
ifClassCtxt = tidyToIfaceContext env1 sc_theta,
ifATs = map toIfaceAT clas_ats,
ifSigs = map toIfaceClassOp op_stuff,
ifMinDef = fmap getOccFS (classMinimalDef clas)
}
(env1, tc_binders) = tidyTyConBinders env (tyConBinders tycon)
toIfaceAT :: ClassATItem -> IfaceAT
toIfaceAT (ATI tc def)
= IfaceAT if_decl (fmap (tidyToIfaceType env2 . fst) def)
where
(env2, if_decl) = tyConToIfaceDecl env1 tc
toIfaceClassOp (sel_id, def_meth)
= ASSERT( sel_tyvars == binderVars tc_binders )
IfaceClassOp (getName sel_id)
(tidyToIfaceType env1 op_ty)
(fmap toDmSpec def_meth)
where
-- Be careful when splitting the type, because of things
-- like class Foo a where
-- op :: (?x :: String) => a -> a
-- and class Baz a where
-- op :: (Ord a) => a -> a
(sel_tyvars, rho_ty) = splitForAllTys (idType sel_id)
op_ty = funResultTy rho_ty
toDmSpec :: (Name, DefMethSpec Type) -> DefMethSpec IfaceType
toDmSpec (_, VanillaDM) = VanillaDM
toDmSpec (_, GenericDM dm_ty) = GenericDM (tidyToIfaceType env1 dm_ty)
toIfaceFD (tvs1, tvs2) = (map (tidyTyVar env1) tvs1
,map (tidyTyVar env1) tvs2)
--------------------------
tidyTyConBinder :: TidyEnv -> TyConBinder -> (TidyEnv, TyConBinder)
-- If the type variable "binder" is in scope, don't re-bind it
-- In a class decl, for example, the ATD binders mention
-- (amd must mention) the class tyvars
tidyTyConBinder env@(_, subst) tvb@(Bndr tv vis)
= case lookupVarEnv subst tv of
Just tv' -> (env, Bndr tv' vis)
Nothing -> tidyTyCoVarBinder env tvb
tidyTyConBinders :: TidyEnv -> [TyConBinder] -> (TidyEnv, [TyConBinder])
tidyTyConBinders = mapAccumL tidyTyConBinder
tidyTyVar :: TidyEnv -> TyVar -> FastString
tidyTyVar (_, subst) tv = toIfaceTyVar (lookupVarEnv subst tv `orElse` tv)
--------------------------
instanceToIfaceInst :: ClsInst -> IfaceClsInst
instanceToIfaceInst (ClsInst { is_dfun = dfun_id, is_flag = oflag
, is_cls_nm = cls_name, is_cls = cls
, is_tcs = mb_tcs
, is_orphan = orph })
= ASSERT( cls_name == className cls )
IfaceClsInst { ifDFun = dfun_name,
ifOFlag = oflag,
ifInstCls = cls_name,
ifInstTys = map do_rough mb_tcs,
ifInstOrph = orph }
where
do_rough Nothing = Nothing
do_rough (Just n) = Just (toIfaceTyCon_name n)
dfun_name = idName dfun_id
--------------------------
famInstToIfaceFamInst :: FamInst -> IfaceFamInst
famInstToIfaceFamInst (FamInst { fi_axiom = axiom,
fi_fam = fam,
fi_tcs = roughs })
= IfaceFamInst { ifFamInstAxiom = coAxiomName axiom
, ifFamInstFam = fam
, ifFamInstTys = map do_rough roughs
, ifFamInstOrph = orph }
where
do_rough Nothing = Nothing
do_rough (Just n) = Just (toIfaceTyCon_name n)
fam_decl = tyConName $ coAxiomTyCon axiom
mod = ASSERT( isExternalName (coAxiomName axiom) )
nameModule (coAxiomName axiom)
is_local name = nameIsLocalOrFrom mod name
lhs_names = filterNameSet is_local (orphNamesOfCoCon axiom)
orph | is_local fam_decl
= NotOrphan (nameOccName fam_decl)
| otherwise
= chooseOrphanAnchor lhs_names
--------------------------
coreRuleToIfaceRule :: CoreRule -> IfaceRule
coreRuleToIfaceRule (BuiltinRule { ru_fn = fn})
= pprTrace "toHsRule: builtin" (ppr fn) $
bogusIfaceRule fn
coreRuleToIfaceRule (Rule { ru_name = name, ru_fn = fn,
ru_act = act, ru_bndrs = bndrs,
ru_args = args, ru_rhs = rhs,
ru_orphan = orph, ru_auto = auto })
= IfaceRule { ifRuleName = name, ifActivation = act,
ifRuleBndrs = map toIfaceBndr bndrs,
ifRuleHead = fn,
ifRuleArgs = map do_arg args,
ifRuleRhs = toIfaceExpr rhs,
ifRuleAuto = auto,
ifRuleOrph = orph }
where
-- For type args we must remove synonyms from the outermost
-- level. Reason: so that when we read it back in we'll
-- construct the same ru_rough field as we have right now;
-- see tcIfaceRule
do_arg (Type ty) = IfaceType (toIfaceType (deNoteType ty))
do_arg (Coercion co) = IfaceCo (toIfaceCoercion co)
do_arg arg = toIfaceExpr arg
bogusIfaceRule :: Name -> IfaceRule
bogusIfaceRule id_name
= IfaceRule { ifRuleName = fsLit "bogus", ifActivation = NeverActive,
ifRuleBndrs = [], ifRuleHead = id_name, ifRuleArgs = [],
ifRuleRhs = IfaceExt id_name, ifRuleOrph = IsOrphan,
ifRuleAuto = True }
|
sdiehl/ghc
|
compiler/GHC/Iface/Utils.hs
|
bsd-3-clause
| 89,366
| 9
| 39
| 27,303
| 14,549
| 7,624
| 6,925
| -1
| -1
|
module Music where
import Data.Binary (encode)
import qualified Data.ByteString.Lazy as BS
(ByteString, concat, putStr)
import Data.Function
import Data.Int (Int16)
import Data.List (unfoldr)
import System.Environment
import System.Random
import Synth
import Playback
-- |Plays a fading note with given waveform.
fadingNote :: Int -> (Time -> Signal) -> Double -> Signal
fadingNote n wave fadeSpeed = amp (fade fadeSpeed) (wave (midiNoteToFreq n))
-- |Plays a note with a nice timbre. Mixes slowly fading square wave with rapidly fading sine.
niceNote :: Int -> Signal
niceNote n = mix voice1 voice2
where
voice1 = volume 0.3 $ fadingNote n square 1.9
voice2 = volume 0.7 $ fadingNote n sine 4.0
-- |Mixes given notes into a single chord signal.
warmSynth :: Double -> Frequency -> Signal
warmSynth delta freq' =
mixMany
[ volume 0.5 (mix (sine freq) (sine (freq * (1 + delta))))
, volume 0.4 (mix (sine (2 * freq)) (sine (2 * freq * (1 + delta))))
, volume 0.05 (mix (square freq) (square (freq * (1 + delta))))
]
where
freq = freq' * 0.5
-- * Instruments!
-- |Calculates an oscillation frequency for a MIDI note number, in an equally tempered scale.
midiNoteToFreq :: Int -> Frequency
midiNoteToFreq n = f0 * (aF ** (fromIntegral n - midiA4))
where
aF = 2 ** (1.0 / 12.0)
f0 = 440.0 -- A-4 in an ETS is 440 Hz.
midiA4 = 69 -- A-4 in MIDI is 69.
noteToMidiNote :: Int -> Octave -> Int
noteToMidiNote t o = t + 12 * o
type Instrument = Int -> Signal
signalToInstrument :: (Frequency -> Signal) -> Instrument
signalToInstrument = (. midiNoteToFreq)
type Octave = Int
tonC = 0
tonCis = 1
tonD = 2
tonDis = 3
tonE = 4
tonF = 5
tonFis = 6
tonG = 7
tonGis = 8
tonA = 9
tonAis = 10
tonH = 11
tonC :: Int
tonCis :: Int
tonD :: Int
tonDis :: Int
tonE :: Int
tonF :: Int
tonFis :: Int
tonG :: Int
tonGis :: Int
tonA :: Int
tonAis :: Int
tonH :: Int
c, cis, d, dis, e, f, fis, g, gis, a, ais, b :: Octave
-> Time
-> Instrument
-> Music
c o t = Play t $ noteToMidiNote 0 o
cis o t = Play t $ noteToMidiNote 1 o
d o t = Play t $ noteToMidiNote 2 o
dis o t = Play t $ noteToMidiNote 3 o
e o t = Play t $ noteToMidiNote 4 o
f o t = Play t $ noteToMidiNote 5 o
fis o t = Play t $ noteToMidiNote 6 o
g o t = Play t $ noteToMidiNote 7 o
gis o t = Play t $ noteToMidiNote 8 o
a o t = Play t $ noteToMidiNote 9 o
ais o t = Play t $ noteToMidiNote 10 o
b o t = Play t $ noteToMidiNote 11 o
bd :: Instrument
bd n t = clip (-1) 1 (sine (midiNoteToFreq n * fade 10 t')) t'
where
t' = t * 8
clp :: Instrument
clp _ = square 8
hht :: Instrument
hht _ = clip (-0.3) 0.3 (amp (fade 10) noise)
hht2 :: Instrument
hht2 _ = clip (-0.3) 0.3 (amp (fade 100) noise)
sinc :: Instrument
sinc =
signalToInstrument
(\freq t ->
let x = freq * 2 * t
in sin (pi * x) / x)
wmm :: Instrument
wmm n = clip (-1) 1 (amp (unfade 15) (sine (midiNoteToFreq n)))
wrrrt :: Instrument
wrrrt _ = volume 0.4 (mix (square 9.01) (square 9))
brrst :: Instrument
brrst _ = volume 0.3 (square 4 . mix (const 2) (sine 3))
guuop :: Instrument
guuop n = sine (fromIntegral n) . mix (const 10) (sine 5)
---------------------------------------------------------------------------
data Music
= Rest !Time
| Play !Time
!Int
!Instrument
| Vol !Amplitude
!Music
| Music :|: Music
| Music :>: Music
infixl 2 :|:
infixr 3 :>:
transposeMusic :: Int -> Music -> Music
transposeMusic n m =
case m of
mL :|: mR -> transposeMusic n mL :|: transposeMusic n mR
m1 :>: m2 -> transposeMusic n m1 :>: transposeMusic n m2
Vol v m' -> Vol v (transposeMusic n m')
Play t note i -> Play t (note + n) i
Rest t -> Rest t
renderMusic :: Music -> Signal
renderMusic score t =
case score of
mL :|: mR -> mix (renderMusic mL) (renderMusic mR) t
m1 :>: m2 ->
if t < musicDuration m1
then renderMusic m1 t
else renderMusic m2 (t - musicDuration m1)
Play t' n i ->
if t < t'
then i n t
else silence t
Vol v m -> volume v (renderMusic m) t
Rest _ -> silence t
musicDuration :: Music -> Time
musicDuration m =
case m of
mL :|: mR -> (max `on` musicDuration) mL mR
m1 :>: m2 -> ((+) `on` musicDuration) m1 m2
Vol _ m' -> musicDuration m'
Play t _n _i -> t
Rest t -> t
rendr :: Music -> (Time, Signal)
rendr m = (musicDuration m, renderMusic m)
bpm :: Time -> Music -> Music
bpm b = tempo (b / 60)
tempo :: Time -> Music -> Music
tempo t m =
case m of
mL :|: mR -> tempo t mL :|: tempo t mR
m1 :>: m2 -> tempo t m1 :>: tempo t m2
Vol v m' -> Vol v (tempo t m')
Play t' n i -> Play (t' / t) n i
Rest t' -> Rest (t' / t)
-- * Music fun
times :: Int -> Music -> Music
times 1 m = m
times n m = m :>: times (n - 1) m
bassDrum :: Music
bassDrum = a 4 1 bd :>: Rest (3 / 8)
hihats :: Music
hihats = Rest (2 / 8) :>: a 4 1 hht :>: Rest (1 / 8)
simpleBeat :: Music
simpleBeat = bassDrum :|: hihats
cliqs :: Music
cliqs =
a 4 0.5 sinc :>:
Rest (1 / 16) :>:
a 5 0.5 sinc :>:
Rest (1 / 16) :>:
d 4 0.5 sinc :>: Rest (1 / 16) :>: e 5 0.5 sinc :>: Rest (1 / 16)
majorChord :: Music -> Music
majorChord m = m :|: transposeMusic 4 m :|: transposeMusic 7 m
minorChord :: Music -> Music
minorChord m = m :|: transposeMusic 3 m :|: transposeMusic 7 m
distributeOver :: Int -> Int -> Music -> Music -> Music
distributeOver beats bars beatM barM =
distributed (replicate beats beatM) (replicate (bars - beats) barM)
distributed :: [Music] -> [Music] -> Music
distributed beats bars = foldr1 (:>:) segment
where
segment = eucl (map pure beats) (map pure bars)
-- | Euclidean algorithm generates all traditional rhythms given a number of
-- rests and a co-prime number of hits.
eucl :: [[a]] -> [[a]] -> [a]
eucl xs ys
| null ys = head xs
| otherwise =
let xs' = zipWith (++) xs ys
ys'FromXs = drop (length ys) xs
ys'FromYs = drop (length xs) ys
ys' =
if null ys'FromXs
then ys'FromYs
else ys'FromXs
in eucl xs' ys'
|
sheyll/haskell-kurs-audio-demo
|
src/Music.hs
|
bsd-3-clause
| 6,222
| 0
| 15
| 1,715
| 2,579
| 1,329
| 1,250
| 201
| 7
|
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE StandaloneDeriving #-}
module Data.Conduit.Kafka where
import Control.Lens
import Data.Default (Default, def)
import Data.ByteString as BS (ByteString)
import Control.Concurrent (threadDelay)
import Control.Monad (forever)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Trans.Resource (MonadResource)
import Data.Conduit ( Producer(..)
, Consumer(..)
, bracketP
, yield
, await
)
import Haskakafka ( Kafka
, KafkaError (..)
, KafkaMessage (..)
, KafkaProduceMessage (..)
, KafkaProducePartition (..)
, KafkaTopic
, KafkaOffset(..)
, newKafka
, newKafkaTopic
, addBrokers
, startConsuming
, stopConsuming
, consumeMessage
, produceMessage
, drainOutQueue
)
import Haskakafka.InternalRdKafkaEnum ( RdKafkaTypeT(..)
, RdKafkaRespErrT(..)
)
deriving instance Show KafkaOffset
deriving instance Eq KafkaOffset
data KafkaSettings = KafkaSettings { _brokers :: !String
, _topic :: !String
, _partition :: !Int
, _offset :: !KafkaOffset
, _timeout :: !Int
, _configOverrides :: [(String, String)]
, _topicOverrides :: [(String, String)]
} deriving (Show, Eq)
makeLenses ''KafkaSettings
instance Default KafkaSettings where
def = KafkaSettings { _brokers = "0.0.0.0"
, _topic = "default_topic"
, _partition = 0
, _offset = KafkaOffsetBeginning
, _timeout = -1 -- no timeout
, _configOverrides = [("socket.timeout.ms", "50000")]
, _topicOverrides = [("request.timeout.ms", "50000")]
}
kafkaSource :: forall m. (MonadResource m, MonadIO m) => KafkaSettings -> Producer m KafkaMessage
kafkaSource ks = bracketP init fini consume
where
init :: IO (Kafka, KafkaTopic)
init = do
kafka <- newKafka RdKafkaConsumer (ks^.configOverrides)
addBrokers kafka (ks^.brokers)
topic <- newKafkaTopic kafka (ks^.topic) (ks^.topicOverrides)
startConsuming topic (ks^.partition) (ks^.offset)
return (kafka, topic)
fini :: (Kafka, KafkaTopic) -> IO ()
fini (_kafka, topic) = liftIO $ stopConsuming topic (ks^.partition)
consume :: (MonadIO m) => (Kafka, KafkaTopic) -> Producer m KafkaMessage
consume (k, t) = do
r <- liftIO $ consumeMessage t (ks^.partition) (ks^.timeout)
case r of
Left _error -> do
case _error of
KafkaResponseError RdKafkaRespErrPartitionEof -> do
liftIO $ threadDelay $ 1000 * 1000
consume(k, t)
otherwise -> do
liftIO $ print . show $ _error
return ()
Right m -> do
yield m
consume (k, t)
kafkaSink :: (MonadResource m, MonadIO m) => KafkaSettings -> Consumer BS.ByteString m (Maybe KafkaError)
kafkaSink ks = bracketP init fini produce
where
init :: IO (Kafka, KafkaTopic)
init = do
kafka <- newKafka RdKafkaProducer (ks^.configOverrides)
addBrokers kafka (ks^.brokers)
topic <- newKafkaTopic kafka (ks^.topic) (ks^.topicOverrides)
return (kafka, topic)
fini :: (Kafka, KafkaTopic) -> IO ()
fini (_kafka, _) = liftIO $ drainOutQueue _kafka
produce :: (MonadIO m) => (Kafka, KafkaTopic) -> Consumer BS.ByteString m (Maybe KafkaError)
produce (_, _topic) = forever $ do
_msg <- await
case _msg of
Just msg -> liftIO $ produceMessage _topic (KafkaSpecifiedPartition (ks^.partition)) (KafkaProduceMessage msg)
Nothing -> return $ Just (KafkaError "empty stream")
|
Atidot/kafka-conduit
|
src/Data/Conduit/Kafka.hs
|
bsd-3-clause
| 4,632
| 0
| 21
| 1,910
| 1,122
| 617
| 505
| 104
| 3
|
{-# LANGUAGE GeneralizedNewtypeDeriving, DeriveDataTypeable, DeriveFunctor, ViewPatterns #-}
{-# LANGUAGE RecordWildCards, OverloadedStrings, PatternGuards, ScopedTypeVariables #-}
-- | Types used to generate the input.
module Input.Item(
Sig(..), Ctx(..), Ty(..), prettySig,
Item(..), itemName,
Target(..), targetExpandURL, TargetId(..),
splitIPackage, splitIModule,
hseToSig, hseToItem, item_test,
unHTMLTarget
) where
import Numeric
import Control.Applicative
import Data.Tuple.Extra
import Language.Haskell.Exts
import Data.Char
import Data.List.Extra
import Data.Maybe
import Data.Ix
import Data.Binary
import Foreign.Storable
import Control.DeepSeq
import Data.Data
import General.Util
import General.Str
import General.IString
import Prelude
import qualified Data.Aeson as J
import Data.Aeson.Types
import Test.QuickCheck
---------------------------------------------------------------------
-- TYPES
data Sig n = Sig {sigCtx :: [Ctx n], sigTy :: [Ty n]} deriving (Show,Eq,Ord,Typeable,Data,Functor) -- list of -> types
data Ctx n = Ctx n n deriving (Show,Eq,Ord,Typeable,Data,Functor) -- context, second will usually be a free variable
data Ty n = TCon n [Ty n] | TVar n [Ty n] deriving (Show,Eq,Ord,Typeable,Data,Functor) -- type application, vectorised, all symbols may occur at multiple kinds
instance NFData n => NFData (Sig n) where rnf (Sig x y) = rnf x `seq` rnf y
instance NFData n => NFData (Ctx n) where rnf (Ctx x y) = rnf x `seq` rnf y
instance NFData n => NFData (Ty n) where
rnf (TCon x y) = rnf x `seq` rnf y
rnf (TVar x y) = rnf x `seq` rnf y
instance Binary n => Binary (Sig n) where
put (Sig a b) = put a >> put b
get = liftA2 Sig get get
instance Binary n => Binary (Ctx n) where
put (Ctx a b) = put a >> put b
get = liftA2 Ctx get get
instance Binary n => Binary (Ty n) where
put (TCon x y) = put (0 :: Word8) >> put x >> put y
put (TVar x y) = put (1 :: Word8) >> put x >> put y
get = do i :: Word8 <- get; liftA2 (case i of 0 -> TCon; 1 -> TVar) get get
prettySig :: Sig String -> String
prettySig Sig{..} =
(if length ctx > 1 then "(" ++ ctx ++ ") => "
else if null ctx then "" else ctx ++ " => ") ++
intercalate " -> " (map f sigTy)
where
ctx = intercalate ", " [a ++ " " ++ b | Ctx a b <- sigCtx]
f (TVar x xs) = f $ TCon x xs
f (TCon x []) = x
f (TCon x xs) = "(" ++ unwords (x : map f xs) ++ ")"
---------------------------------------------------------------------
-- ITEMS
data Item
= IPackage PkgName
| IModule ModName
| IName Str
| ISignature (Sig IString)
| IAlias Str [IString] (Sig IString)
| IInstance (Sig IString)
deriving (Show,Eq,Ord,Typeable,Data)
instance NFData Item where
rnf (IPackage x) = rnf x
rnf (IModule x) = rnf x
rnf (IName x) = x `seq` ()
rnf (ISignature x) = rnf x
rnf (IAlias a b c) = rnf (a,b,c)
rnf (IInstance a) = rnf a
itemName :: Item -> Maybe Str
itemName (IPackage x) = Just x
itemName (IModule x) = Just x
itemName (IName x) = Just x
itemName (ISignature _) = Nothing
itemName (IAlias x _ _) = Just x
itemName (IInstance _) = Nothing
---------------------------------------------------------------------
-- DATABASE
newtype TargetId = TargetId Word32 deriving (Eq,Ord,Storable,NFData,Ix,Typeable)
instance Show TargetId where
show (TargetId x) = showHex x ""
-- | A location of documentation.
data Target = Target
{targetURL :: URL -- ^ URL where this thing is located
,targetPackage :: Maybe (String, URL) -- ^ Name and URL of the package it is in (Nothing if it is a package)
,targetModule :: Maybe (String, URL) -- ^ Name and URL of the module it is in (Nothing if it is a package or module)
,targetType :: String -- ^ One of package, module or empty string
,targetItem :: String -- ^ HTML span of the item, using @\<s0\>@ for the name and @\<s1\>@ onwards for arguments
,targetDocs :: String -- ^ HTML documentation to show, a sequence of block level elements
} deriving (Show,Eq,Ord)
instance NFData Target where
rnf (Target a b c d e f) = rnf a `seq` rnf b `seq` rnf c `seq` rnf d `seq` rnf e `seq` rnf f
instance ToJSON Target where
toJSON (Target a b c d e f) = object [
("url", toJSON a),
("package", maybeNamedURL b),
("module", maybeNamedURL c),
("type", toJSON d),
("item", toJSON e),
("docs", toJSON f)
]
where
maybeNamedURL m = maybe emptyObject namedURL m
namedURL (name, url) = object [("name", toJSON name), ("url", toJSON url)]
instance FromJSON Target where
parseJSON = withObject "Target" $ \o ->
Target <$> o .: "url"
<*> o `namedUrl` "package"
<*> o `namedUrl` "module"
<*> o .: "type"
<*> o .: "item"
<*> o .: "docs"
where namedUrl o' n = do
mObj <- o' .: n
if null mObj then pure Nothing
else do
pkName <- mObj .: "name"
pkUrl <- mObj .: "url"
pure $ Just (pkName, pkUrl)
instance Arbitrary Target where
arbitrary = Target <$> a
<*> mNurl
<*> mNurl
<*> a
<*> a
<*> a
where a = arbitrary
mNurl = do
oneof [pure Nothing
, Just <$> liftA2 (,) a a]
targetExpandURL :: Target -> Target
targetExpandURL t@Target{..} = t{targetURL = url, targetModule = second (const mod) <$> targetModule}
where
pkg = maybe "" snd targetPackage
mod = maybe pkg (plus pkg . snd) targetModule
url = plus mod targetURL
plus a b | b == "" = ""
| ':':_ <- dropWhile isAsciiLower b = b -- match http: etc
| otherwise = a ++ b
unHTMLTarget :: Target -> Target
unHTMLTarget t@Target {..} = t{targetItem=unHTML targetItem, targetDocs=unHTML targetDocs}
splitIPackage, splitIModule :: [(a, Item)] -> [(Str, [(a, Item)])]
splitIPackage = splitUsing $ \x -> case snd x of IPackage x -> Just x; _ -> Nothing
splitIModule = splitUsing $ \x -> case snd x of IModule x -> Just x; _ -> Nothing
splitUsing :: (a -> Maybe Str) -> [a] -> [(Str, [a])]
splitUsing f = repeatedly $ \(x:xs) ->
let (a,b) = break (isJust . f) xs
in ((fromMaybe mempty $ f x, x:a), b)
item_test :: IO ()
item_test = testing "Input.Item.Target JSON (encode . decode = id) " $ do
quickCheck $ \(t :: Target) -> case J.eitherDecode $ J.encode t of
(Left e ) -> False
(Right t') -> t == t'
---------------------------------------------------------------------
-- HSE CONVERSION
hseToSig :: Type a -> Sig String
hseToSig = tyForall
where
-- forall at the top is different
tyForall (TyParen _ x) = tyForall x
tyForall (TyForall _ _ c t) | Sig cs ts <- tyForall t =
Sig (maybe [] (concatMap ctx . fromContext) c ++ cs) ts
tyForall x = Sig [] $ tyFun x
tyFun (TyParen _ x) = tyFun x
tyFun (TyFun _ a b) = ty a : tyFun b
tyFun x = [ty x]
ty (TyForall _ _ _ x) = TCon "\\/" [ty x]
ty x@TyFun{} = TCon "->" $ tyFun x
ty (TyTuple an box ts) = TCon (fromQName $ Special an $ TupleCon an box $ length ts - 1) (map ty ts)
ty (TyList _ x) = TCon "[]" [ty x]
ty (TyParArray _ x) = TCon "[::]" [ty x]
ty (TyApp _ x y) = case ty x of
TCon a b -> TCon a (b ++ [ty y])
TVar a b -> TVar a (b ++ [ty y])
ty (TyVar _ x) = TVar (fromName x) []
ty (TyCon _ x) = TCon (fromQName x) []
ty (TyInfix an a (UnpromotedName _ b) c) = ty $ let ap = TyApp an in TyCon an b `ap` a `ap` c
ty (TyKind _ x _) = ty x
ty (TyBang _ _ _ x) = ty x
ty (TyParen _ x) = ty x
ty _ = TVar "_" []
ctx (ParenA _ x) = ctx x
ctx (TypeA _ x) = ctxTy x
ctx _ = []
ctxTy (TyInfix an a (UnpromotedName _ con) b) = ctxTy $ TyApp an (TyApp an (TyCon an con) a) b
ctxTy (fromTyApps -> TyCon _ con:TyVar _ var:_) = [Ctx (fromQName con) (fromName var)]
ctxTy _ = []
fromTyApps (TyApp _ x y) = fromTyApps x ++ [y]
fromTyApps x = [x]
hseToItem :: Decl a -> [Item]
hseToItem (TypeSig _ names ty) = ISignature (toIString . strPack <$> hseToSig ty) : map (IName . strPack . fromName) names
hseToItem (TypeDecl _ (fromDeclHead -> (name, bind)) rhs) = [IAlias (strPack $ fromName name) (map (toIString . strPack . fromName . fromTyVarBind) bind) (toIString . strPack <$> hseToSig rhs)]
hseToItem (InstDecl an _ (fromIParen -> IRule _ _ ctx (fromInstHead -> (name, args))) _) = [IInstance $ fmap (toIString . strPack) $ hseToSig $ TyForall an Nothing ctx $ applyType (TyCon an name) args]
hseToItem x = map (IName . strPack) $ declNames x
|
ndmitchell/hoogle
|
src/Input/Item.hs
|
bsd-3-clause
| 8,920
| 0
| 19
| 2,491
| 3,589
| 1,858
| 1,731
| 185
| 23
|
module Module5.Task13 where
main' :: IO ()
main' = do
putStr $ "What is your name?\nName: "
name <- getLine
if null name
then main'
else putStrLn $ "Hi, " ++ name ++ "!"
|
dstarcev/stepic-haskell
|
src/Module5/Task13.hs
|
bsd-3-clause
| 185
| 0
| 10
| 49
| 61
| 31
| 30
| 8
| 2
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
module Duckling.Numeral.ID.Tests
( tests ) where
import Prelude
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Numeral.ID.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "ID Tests"
[ makeCorpusTest [This Numeral] corpus
]
|
rfranek/duckling
|
tests/Duckling/Numeral/ID/Tests.hs
|
bsd-3-clause
| 600
| 0
| 9
| 96
| 80
| 51
| 29
| 11
| 1
|
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
module Aws.DynamoDB.Commands.UpdateTable
( UpdateTable(..)
, UpdateTableResponse(..)
, updateTable
) where
import Aws.Core
import Aws.DynamoDB.Core
import Control.Applicative
import Data.Aeson
import qualified Data.Text as T
import qualified Test.QuickCheck as QC
data UpdateTable
= UpdateTable
{
provisionedThroughPut :: ProvisionedThroughput {- Yes -}
, tableName :: TableName {- Yes -}
}
deriving (Show, Eq)
instance ToJSON UpdateTable where
toJSON (UpdateTable a b) =
object[
"ProjectionedThroughPut" .= a
, "TableName" .= b
]
instance FromJSON UpdateTable where
parseJSON (Object v) = UpdateTable <$>
v .: "ProjectionedThroughPut" <*>
v .: "TableName"
instance QC.Arbitrary UpdateTable where
arbitrary = UpdateTable <$>
QC.arbitrary <*>
QC.arbitrary
data UpdateTableResponse
= UpdateTableResponse {
utrTableDescription :: Maybe TableDescription
}deriving (Show,Eq)
instance ToJSON UpdateTableResponse where
toJSON (UpdateTableResponse a) =
object[
"TableDescription" .= a
]
instance FromJSON UpdateTableResponse where
parseJSON (Object v) = UpdateTableResponse <$>
v .:? "TableDescription"
instance QC.Arbitrary UpdateTableResponse where
arbitrary = UpdateTableResponse <$> QC.arbitrary
updateTable :: ProvisionedThroughput -> TableName -> UpdateTable
updateTable a b = UpdateTable a b
instance SignQuery UpdateTable where
type ServiceConfiguration UpdateTable = DdbConfiguration
signQuery a@UpdateTable {..} = ddbSignQuery DdbQuery
{ ddbqMethod = Post
, ddbqRequest = ""
, ddbqQuery = []
, ddbqCommand = "DynamoDB_20120810.UpdateTable"
, ddbqBody = Just $ toJSON $ a
}
data UpdateTableResult = UpdateTableResult{
tableDescription :: Maybe TableDescription
}deriving(Show, Eq)
instance ToJSON UpdateTableResult where
toJSON (UpdateTableResult a) = object[
"TableDescription" .= a
]
instance FromJSON UpdateTableResult where
parseJSON (Object v) = UpdateTableResult <$>
v .:? "UpdateTableResult"
instance ResponseConsumer UpdateTable UpdateTableResponse where
type ResponseMetadata UpdateTableResponse = DdbMetadata
responseConsumer _ mref = ddbResponseConsumer mref $ \rsp -> cnv <$> jsonConsumer rsp
where
cnv (UpdateTableResult a) = UpdateTableResponse a
instance Transaction UpdateTable UpdateTableResponse
instance AsMemoryResponse UpdateTableResponse where
type MemoryResponse UpdateTableResponse = UpdateTableResponse
loadToMemory = return
|
ywata/dynamodb
|
Aws/DynamoDB/Commands/UpdateTable.hs
|
bsd-3-clause
| 3,019
| 0
| 10
| 825
| 616
| 336
| 280
| 72
| 1
|
{-# LANGUAGE ViewPatterns,
FlexibleContexts, FlexibleInstances, TypeSynonymInstances
#-}
module Insomnia.ToF.Module where
import Control.Applicative ((<$>))
import Control.Lens
import Control.Monad.Reader
import Control.Monad.Except (MonadError(..))
import Data.Monoid (Monoid(..), (<>), Endo(..))
import qualified Data.Map as M
import qualified Unbound.Generics.LocallyNameless as U
import qualified FOmega.Syntax as F
import qualified FOmega.SemanticSig as F
import qualified FOmega.MatchSigs as F
import qualified FOmega.SubSig as F
import Insomnia.Common.ModuleKind
import Insomnia.Common.Telescope
import Insomnia.Identifier
import Insomnia.Types
import Insomnia.TypeDefn
import Insomnia.ValueConstructor
import Insomnia.ModuleType
import Insomnia.Module
import Insomnia.Expr
import Insomnia.ToF.Env
import Insomnia.ToF.Summary
import Insomnia.ToF.Type
import Insomnia.ToF.Expr
import Insomnia.ToF.ModuleType
import Insomnia.ToF.Builtins
---------------------------------------- Modules
-- The translation of moduleExpr takes a 'Maybe Path' which is the name
-- of the module provided that it is defined at the toplevel, or else a simple structure
-- within another named module. This is a (gross) hack so that we can write things like
--
-- @@@
-- module Foo {
-- module Bar = assume { sig x : ... }
-- }
-- @@@
--
-- And try to find "Foo.Bar.x" in the list of known builtin primitives.
--
moduleExpr :: ToF m => Maybe Path -> ModuleExpr -> m (F.AbstractSig, F.Term)
moduleExpr modPath mdl_ =
case mdl_ of
ModuleStruct mk mdl -> do
ans@(sigStr, m) <- structure modPath mk mdl
case mk of
ModuleMK -> return ans
ModelMK -> let sig = F.ModelSem sigStr
s = F.AbstractSig $ U.bind [] sig
in return (s, m)
ModuleSeal me mt -> sealing me mt
ModuleAssume mty -> moduleAssume modPath mty
ModuleId p -> do
(sig, m) <- modulePath p
return (F.AbstractSig $ U.bind [] sig, m)
ModuleFun bnd ->
U.lunbind bnd $ \(tele, bodyMe) ->
moduleFunctor tele bodyMe
ModuleApp pfun pargs -> moduleApp pfun pargs
ModelLocal lcl bdy mt ->
modelLocal lcl bdy mt
ModelObserve mdl obss ->
modelObserve mdl obss
ModuleUnpack e modTy ->
moduleUnpack e modTy
moduleAssume :: ToF m
=> Maybe Path
-> ModuleType
-> m (F.AbstractSig, F.Term)
moduleAssume modPath_ modTy = do
case looksLikeBuiltin modPath_ modTy of
Just builtins -> makeBuiltinsModule builtins
Nothing -> do
absSig <- moduleType modTy
ty <- F.embedAbstractSig absSig
return (absSig, F.Assume ty)
structure :: ToF m
=> Maybe Path
-> ModuleKind
-> Module
-> m (F.AbstractSig, F.Term)
structure modPath mk (Module decls) = do
declarations modPath mk decls $ \(summary@(tvks,sig), fields, termHole) -> do
let semSig = F.ModSem sig
ty <- F.embedSemanticSig semSig
let r = F.Record fields
m = retMK mk $ F.packs (map (F.TV . fst) tvks) r (tvks, ty)
return (mkAbstractModuleSig summary, appEndo termHole m)
where
retMK :: ModuleKind -> F.Term -> F.Term
retMK ModuleMK = id
retMK ModelMK = F.Return
-- | 〚let { decls } in M : S〛 Unlike the F-ing modules calculus
-- where "let B in M" is explained as "{ B ; X = M}.X", because we're
-- in a monadic language in the model fragment, this is a primitive
-- construct. Suppose 〚 {decls} 〛 = e₁ : Dist ∃αs.Σ₁ and 〚S〛= ∃βs.Σ₂
-- and 〚Γ,αs,X:Σ₁⊢ M[X.ℓs/ℓs]〛= e₂ : Dist ∃γs.Σ₃ then
-- the local module translates as:
--
-- let Y ~ e₁ in unpack αs,X = Y in
-- let Z ~ e₂ in unpack γs,W = Z in
-- return (pack τs (f W) as ∃βs.Σ₂)
--
-- where αs,γs⊢ Σ₃ ≤ ∃βs.Σ₂ ↑ τs ⇝ f is the signature sealing coercion;
-- all the locals are fresh; and the [X.ℓs/ℓs] means to put in
-- projections from X for all the declarations in decls that appear in
-- e₂
--
-- The big picture is: we have two "monads", the distribution monad
-- and the existential packing "monad", so we use the elim forms to
-- take them both apart, and then return/pack the resulting modules.
modelLocal :: ToF m => Module -> ModuleExpr -> ModuleType -> m (F.AbstractSig, F.Term)
modelLocal lcl_ body_ mt_ = do
ascribedSig <- moduleType mt_
let (Module lclDecls) = lcl_
declarations Nothing ModelMK lclDecls $ \(_lclSummary, _lclFields, lclTermHole) -> do
(F.AbstractSig bodySigBnd, bodyTerm) <- moduleExpr Nothing body_
U.lunbind bodySigBnd $ \(gammas,bodySig) -> do
(taus, coer) <- do
(sig2, taus) <- F.matchSubst bodySig ascribedSig
coercion <- F.sigSubtyping bodySig sig2
return (taus, coercion)
z <- U.lfresh (U.string2Name "z")
w <- U.lfresh (U.string2Name "w")
packsAnnotation <- do
let (F.AbstractSig bnd) = ascribedSig
U.lunbind bnd $ \ (betas, s) -> do
ty <- F.embedSemanticSig s
return (betas, ty)
let
finalOut = F.packs taus (F.applyCoercion coer (F.V w)) packsAnnotation
unpackedGammas <- F.unpacks (map fst gammas) w (F.V z) $ finalOut
let
withE2 = F.Let $ U.bind (z, U.embed bodyTerm) unpackedGammas
localTerm = appEndo lclTermHole withE2
return (ascribedSig, localTerm)
-- | In the F-ing modules paper, (M:>S) is syntactic sugar, and only
-- (X :> S) is primitive. But if we expand out the sugar and apply
-- some commuting conversions, we get something in nice form and we
-- choose to implement that nice form.
--
--
-- 〚(M :> S)〛 = 〚({ X = M ; X' = (X :> S) }.X')〛
-- = unpack (αs, y) = 〚{ X = M ; X' = (X :> S)}〛in pack (αs, y.lX')
-- = unpack (αs, y) = (unpack (βs, z1) = 〚M〛in unpack (γs, z2) = 〚(X :> S)〛[z1 / X] in pack (βs++γs, { lX = z1 ; lX' = z2 })) in pack (αs, y.lX')
-- = unpack (βs, z1) = 〚M〛in unpack (γs, z2) = 〚(X :> S)〛[z1/X] in unpack (αs,y) = pack (βs++γs, { lX = X ; lX' = z2 }) in pack (αs, y.lX')
-- = unpack (βs, z1) = 〚M〛 in unpack (γs, z2) = 〚(X :> S)〛[z1/X] in pack (βs++γs, z2)
-- = unpack (βs, z1) = 〚M〛 in unpack (γs, z2) = pack (τs, f z1) in pack (βs++γs, z2) where Σ₁ ≤ Ξ ↑ τs ⇝ f where Σ₁ is the type of z1 and Ξ is 〚S〛
-- = unpack (βs, z1) = 〚M〛 in pack (βs++τs, f z1)
--
-- In other words, elaborate M and S and construct the coercion f and
-- discover the sealed types τs, then pack anything that M abstracted
-- together with anything that S seals. (The one missing bit is the
-- type annotation on the "pack" term, but it's easy. Suppose Ξ is
-- ∃δs.Σ₂, then the result has type ∃βs,δs.Σ₂)
sealing :: ToF m => ModuleExpr -> ModuleType -> m (F.AbstractSig, F.Term)
sealing me mt = do
xi@(F.AbstractSig xiBnd) <- moduleType mt
(F.AbstractSig sigBnd, m) <- moduleExpr Nothing me
U.lunbind sigBnd $ \(betas, sigma) -> do
(taus, coer) <- do
(sig2, taus) <- F.matchSubst sigma xi
coercion <- F.sigSubtyping sigma sig2
return (taus, coercion)
z1 <- U.lfresh (U.s2n "z")
let
packedTys = (map (F.TV . fst) betas)
++ taus
(xi', bdy) <- U.lunbind xiBnd $ \(deltas,sigma2) -> do
sigma2emb <- F.embedSemanticSig sigma2
let bdy = F.packs packedTys (F.applyCoercion coer $ F.V z1) (betas++deltas, sigma2emb)
xi' = F.AbstractSig $ U.bind (betas++deltas) sigma2
return (xi', bdy)
term <- F.unpacks (map fst betas) z1 m bdy
return (xi', term)
-- | (X1 : S1, ... Xn : Sn) -> { ... Xs ... }
-- translates to Λα1s,...αns.λX1:Σ1,...,Xn:Σn. mbody : ∀αs.Σ1→⋯Σn→Ξ
-- where 〚Si〛= ∃αi.Σi and 〚{... Xs ... }〛= mbody : Ξ
moduleFunctor :: ToF m
=> (Telescope (FunctorArgument ModuleType))
-> ModuleExpr
-> m (F.AbstractSig, F.Term)
moduleFunctor teleArgs bodyMe =
withFunctorArguments teleArgs $ \(tvks, argSigs) -> do
(resultAbs, mbody) <- moduleExpr Nothing bodyMe
let funSig = F.SemanticFunctor (map snd argSigs) resultAbs
s = F.FunctorSem $ U.bind tvks funSig
args <- forM argSigs $ \(v,argSig) -> do
argTy <- F.embedSemanticSig argSig
return (v, argTy)
let fnc = F.pLams' tvks $ F.lams args mbody
return (F.AbstractSig $ U.bind [] s,
fnc)
-- | p (p1, .... pn) becomes m [τ1s,…,τNs] (f1 m1) ⋯ (fn mn) : Ξ[τs/αs]
-- where 〚p〛 = m : ∀αs.Σ1′→⋯→Σn′→Ξ and 〚pi〛 = mi : Σi and (Σ1,…,Σn)≤∃αs.(Σ1′,…,Σn′) ↑ τs ⇝ fs
moduleApp :: ToF m
=> Path
-> [Path]
-> m (F.AbstractSig, F.Term)
moduleApp pfn pargs = do
(semFn, mfn) <- modulePath pfn
(argSigs, margs) <- mapAndUnzipM modulePath pargs
case semFn of
F.FunctorSem bnd ->
U.lunbind bnd $ \(tvks, F.SemanticFunctor paramSigs sigResult) -> do
let alphas = map fst tvks
(paramSigs', taus) <- F.matchSubsts argSigs (alphas, paramSigs)
coercions <- zipWithM F.sigSubtyping argSigs paramSigs'
let
m = (F.pApps mfn taus) `F.apps` (zipWith F.applyCoercion coercions margs)
s = U.substs (zip alphas taus) sigResult
return (s, m)
_ -> throwError "internal failure: ToF.moduleApp expected a functor"
modelObserve :: ToF m
=> ModuleExpr
-> [ObservationClause]
-> m (F.AbstractSig, F.Term)
modelObserve me obss = do
-- 〚M' = observe M where f is Q〛
--
-- Suppose 〚M〛: Dist {fs : τs} where f : {gs : σs}
-- and 〚Q〛: {gs : σs}
--
-- let
-- prior = 〚M〛
-- obs = 〚Q〛
-- kernel = λ x : {fs : τs} . x.f
-- in posterior [{fs:τs}] [{gs:σs}] kernel obs prior
--
(sig, prior) <- moduleExpr Nothing me
disttp <- F.embedAbstractSig sig
sigTps <- case disttp of
F.TExist {} -> throwError "internal error: ToF.modelObserve of an observed model with abstract types"
F.TDist (F.TRecord sigTps) -> return sigTps
_ -> throwError "internal error: ToF.modelObserve expected to see a distribution over models"
hole <- observationClauses sigTps obss
let posterior = hole prior
return (sig, posterior)
observationClauses :: ToF m
=> [(F.Field, F.Type)]
-> [ObservationClause]
-> m (F.Term -> F.Term)
observationClauses _sig [] = return id
observationClauses sigTp (obs:obss) = do
holeInner <- observationClause sigTp obs
holeOuter <- observationClauses sigTp obss
return (holeOuter . holeInner)
-- | observationClause {fs : τs} "where f is Q" ≙ λprior . posterior kernel mobs prior
-- where kernel = "λx:{fs:τs} . x.f"
-- and mobs = 〚Q〛
observationClause :: ToF m
=> [(F.Field, F.Type)]
-> ObservationClause
-> m (F.Term -> F.Term)
observationClause sigTp (ObservationClause f obsMe) = do
(_, mobs) <- moduleExpr Nothing obsMe
let recordTp = F.TRecord sigTp
projTp <- case F.selectField sigTp (F.FUser f) of
Just (F.TDist t) -> return t
Just _ -> throwError ("internal error: expected the model to have a submodel " ++ show f
++ ", but it's not even a distribution")
Nothing -> throwError ("internal error: expected model to have a submodel " ++ show f)
let
kernel = let
vx = U.s2n "x"
x = F.V vx
in F.Lam $ U.bind (vx, U.embed recordTp) $
F.Proj x (F.FUser f)
term = \prior ->
F.apps (F.pApps (F.V $ U.s2n "__BOOT.posterior")
[ recordTp , projTp ])
[ kernel
, mobs
, prior
]
return term
moduleUnpack :: ToF m => Expr -> ModuleType -> m (F.AbstractSig, F.Term)
moduleUnpack e modTy = do
m <- expr e
absSig <- moduleType modTy
return (absSig, m)
-- | Translation declarations.
-- This is a bit different from how F-ing modules does it in order to avoid producing quite so many
-- administrative redices, at the expense of being slightly more complex.
--
-- So the idea is that each declaration is going to produce two
-- things: A term with a hole and a description of the extra variable
-- bindings that it introduces in the scope of the hole.
--
-- For example, a type alias "type β = τ" will produce the term
-- let xβ = ↓[τ] in • and the description {user field β : [τ]} ⇝ {user field β = xβ}
-- The idea is that the "SigSummary" part of the description is the abstract semantic signature of the
-- final structure in which this declaration appears, and the field↦term part is the record value that
-- will be produced.
--
-- For nested submodules we'll have to do a bit more work in order to
-- extrude the scope of the existential variables (ie, the term with
-- the hole is an "unpacks" instead of a "let"), but it's the same
-- idea.
--
-- For value bindings we go in two steps: the signature (which was inserted by the type checker if omitted)
-- just extends the SigSummary, while the actual definition extends the record.
-- TODO: (This gets mutually recursive functions wrong. Need a letrec form in fomega)
declarations :: ToF m
=> Maybe Path
-> ModuleKind
-> [Decl]
-> (ModSummary -> m ans)
-> m ans
declarations _ _mk [] kont = kont mempty
declarations modPath mk (d:ds) kont = let
kont1 out1 = declarations modPath mk ds $ \outs -> kont $ out1 <> outs
in case d of
ValueDecl f vd -> valueDecl mk f vd kont1
SubmoduleDefn f me -> submoduleDefn modPath mk f me kont1
SampleModuleDefn f me -> do
when (mk /= ModelMK) $
throwError "internal error: ToF.declarations SampleModuleDecl in a module"
sampleModuleDefn f me kont1
TypeAliasDefn f al -> typeAliasDefn mk f al kont1
ImportDecl {} ->
throwError "internal error: ToF.declarations ImportDecl should have been desugared by the Insomnia typechecker"
{- importDecl p kont1 -}
TypeDefn f td -> typeDefn f (U.s2n f) td kont1
typeAliasDefn :: ToF m
=> ModuleKind
-> Field
-> TypeAlias
-> (ModSummary -> m ans)
-> m ans
typeAliasDefn _mk f (ManifestTypeAlias bnd) kont =
U.lunbind bnd $ \ (tvks, rhs) -> do
(tlam, tK) <- withTyVars tvks $ \tvks' -> do
(rhs', kcod) <- type' rhs
return (F.tLams tvks' rhs', F.kArrs (map snd tvks') kcod)
let tsig = F.TypeSem tlam tK
tc = U.s2n f :: TyConName
xc = U.s2n f :: F.Var
mr <- F.typeSemTerm tlam tK
let
mhole = Endo $ F.Let . U.bind (xc, U.embed mr)
thisOne = ((mempty, [(F.FUser f, tsig)]),
[(F.FUser f, F.V xc)],
mhole)
local (tyConEnv %~ M.insert tc tsig) $
kont thisOne
typeAliasDefn _mk f (DataCopyTypeAlias (TypePath pdefn fdefn) defn) kont = do
-- Add a new name for an existing generative type. Since the
-- abstract type variable is already in scope (since it was lifted
-- out to scope over all the modules where the type is visible), we
-- just need to alias the original type's datatype variable, and to
-- add all the constructors to the environment.
--
-- ie, suppose we had:
-- module M { data D = D1 | D2 }
-- module N { datatype D = data M.D }
-- module P { ... N.D1 ... }
-- we will get
-- unpack δ, M = { ... } in
-- let N = { D = M.D } in
-- let P = { ... N.D.dataIn.D1 ... } where N.D has a type that mentions δ
let rootLookup modId = do
ma <- view (modEnv . at modId)
case ma of
Nothing -> throwError "unexpected failure in ToF.typeAliasDefn - unbound module identifier"
Just (sig, x) -> return (sig, F.V x)
(dataSig, mpath) <- followUserPathAnything rootLookup (ProjP pdefn fdefn)
let tc = U.s2n f :: TyConName
xc = U.s2n f :: F.Var
-- map each constructor to a projection from the the corresponding
-- constructor field of the defining datatype.
conVs <- case defn of
EnumDefn {} -> return mempty
DataDefn bnd ->
U.lunbind bnd $ \(_tvks, cdefs) ->
return $ flip map cdefs $ \(ConstructorDef cname _) ->
let fcon = F.FCon (U.name2String cname)
in (cname, (xc, fcon))
let mhole = Endo (F.Let . U.bind (xc, U.embed mpath))
thisOne = ((mempty, [(F.FUser f, dataSig)]), [(F.FUser f, F.V xc)], mhole)
conEnv = M.fromList conVs
local (tyConEnv %~ M.insert tc dataSig)
$ local (valConEnv %~ M.union conEnv)
$ kont thisOne
submoduleDefn :: ToF m
=> Maybe Path
-> ModuleKind
-> Field
-> ModuleExpr
-> ((SigSummary, [(F.Field, F.Term)], Endo F.Term) -> m ans)
-> m ans
submoduleDefn modPath _mk f me kont = do
let modId = U.s2n f
(F.AbstractSig bnd, msub) <- moduleExpr (flip ProjP f <$> modPath) me
U.lunbind bnd $ \(tvks, modsig) -> do
xv <- U.lfresh (U.s2n f)
U.avoid [U.AnyName xv] $ local (modEnv %~ M.insert modId (modsig, xv)) $ do
let tvs = map fst tvks
(munp, avd) <- F.unpacksM tvs xv
let m = Endo $ munp msub
thisOne = ((tvks, [(F.FUser f, modsig)]),
[(F.FUser f, F.V xv)],
m)
U.avoid avd $ kont thisOne
sampleModuleDefn :: ToF m
=> Field
-> ModuleExpr
-> (ModSummary -> m ans)
-> m ans
sampleModuleDefn f me kont = do
let modId = U.s2n f
(F.AbstractSig bndMdl, msub) <- moduleExpr Nothing me
bnd <- U.lunbind bndMdl $ \(tvNull, semMdl) ->
case (tvNull, semMdl) of
([], F.ModelSem (F.AbstractSig bnd)) -> return bnd
_ -> throwError "internal error: ToF.sampleModelDefn expected a model with no applicative tyvars"
U.lunbind bnd $ \(tvks, modSig) -> do
let xv = U.s2n f
local (modEnv %~ M.insert modId (modSig, xv)) $ do
(munp, avd) <- F.unpacksM (map fst tvks) xv
let m = Endo $ F.LetSample . U.bind (xv, U.embed msub) . munp (F.V xv)
thisOne = ((tvks, [(F.FUser f, modSig)]),
[(F.FUser f, F.V xv)],
m)
U.avoid avd $ kont thisOne
valueDecl :: ToF m
=> ModuleKind
-> Field
-> ValueDecl
-> (ModSummary -> m ans)
-> m ans
valueDecl mk f vd kont =
let v = U.s2n f :: Var
in case vd of
SigDecl _stoch ty -> do
(ty', _k) <- type' ty
let vsig = F.ValSem ty'
xv = U.s2n f :: F.Var
let thisOne = ((mempty, [(F.FUser f, vsig)]),
mempty,
mempty)
local (valEnv %~ M.insert v (xv, StructureTermVar vsig))
$ U.avoid [U.AnyName v]
$ kont thisOne
FunDecl (Function eg) -> do
g <- case eg of
Left {} -> throwError "internal error: expected annotated function"
Right g -> return g
mt <- view (valEnv . at v)
(xv, semTy, _ty) <- case mt of
Just (xv, StructureTermVar sem) -> do
semTy <- F.embedSemanticSig sem
ty <- matchSemValRecord sem
return (xv, semTy, ty)
_ -> throwError "internal error: ToF.valueDecl FunDecl did not find type declaration for field"
m <- -- tyVarsAbstract ty $ \_tvks _ty' ->
generalize g $ \tvks _prenex e -> do
m_ <- expr e
return $ F.pLams tvks m_
let
mr = F.valSemTerm m
mhole = Endo $ F.LetRec . U.bind (U.rec [(xv, U.embed semTy, U.embed mr)])
thisOne = (mempty,
[(F.FUser f, F.V xv)],
mhole)
kont thisOne
SampleDecl e -> do
when (mk /= ModelMK) $
throwError "internal error: ToF.valueDecl SampleDecl in a module"
simpleValueBinding F.LetSample f v e kont
ParameterDecl e -> do
when (mk /= ModuleMK) $
throwError "internal error: ToF.valueDecl ParameterDecl in a model"
simpleValueBinding F.Let f v e kont
ValDecl {} -> throwError ("internal error: unexpected ValDecl in ToF.valueDecl;"
++" Insomnia typechecker should have converted into a SampleDecl or a ParameterDecl")
TabulatedSampleDecl tabfun -> do
when (mk /= ModelMK) $
throwError "internal error: ToF.valueDecl TabulatedSampleDecl in a module"
tabledSampleDecl f v tabfun kont
simpleValueBinding :: ToF m
=> (U.Bind (F.Var, U.Embed F.Term) F.Term -> F.Term)
-> Field
-> Var
-> Expr
-> (ModSummary -> m ans)
-> m ans
simpleValueBinding mkValueBinding f v e kont = do
mt <- view (valEnv . at v)
(xv, _prov) <- case mt of
Nothing -> throwError "internal error: ToF.valueDecl SampleDecl did not find and type declaration for field"
Just xty -> return xty
m <- expr e
let
mhole body =
mkValueBinding $ U.bind (xv, U.embed m)
$ F.Let $ U.bind (xv, U.embed $ F.valSemTerm $ F.V xv)
$ body
thisOne = (mempty,
[(F.FUser f, F.V xv)],
Endo mhole)
kont thisOne
tabledSampleDecl :: ToF m
=> Field
-> Var
-> TabulatedFun
-> (ModSummary -> m ans)
-> m ans
tabledSampleDecl f v tf kont = do
(v', mhole) <- letTabFun v tf (\v' mhole -> return (v', mhole))
let
mval = F.Let . U.bind (v', U.embed $ F.valSemTerm $ F.V v')
thisOne = (mempty,
[(F.FUser f, F.V v')],
Endo mhole <> Endo mval)
kont thisOne
generalize :: (U.Alpha a, ToF m) =>
Generalization a -> ([(F.TyVar, F.Kind)] -> PrenexCoercion -> a -> m r) -> m r
generalize (Generalization bnd prenexCoercion) kont =
U.lunbind bnd $ \(tvks, body) ->
withTyVars tvks $ \tvks' ->
kont tvks' prenexCoercion body
matchSemValRecord :: MonadError String m => F.SemanticSig -> m F.Type
matchSemValRecord (F.ValSem t) = return t
matchSemValRecord _ = throwError "internal error: expected a semantic object of a value binding"
tyVarsAbstract :: ToF m => F.Type -> ([(F.TyVar, F.Kind)] -> F.Type -> m r) -> m r
tyVarsAbstract t_ kont_ = tyVarsAbstract' t_ (\tvks -> kont_ (appEndo tvks []))
where
tyVarsAbstract' :: ToF m => F.Type -> (Endo [(F.TyVar, F.Kind)] -> F.Type -> m r) -> m r
tyVarsAbstract' t kont =
case t of
F.TForall bnd ->
U.lunbind bnd $ \((tv', U.unembed -> k), t') -> do
let tv = (U.s2n $ U.name2String tv') :: TyVar
id {- U.avoid [U.AnyName tv] -}
$ local (tyVarEnv %~ M.insert tv (tv', k))
$ tyVarsAbstract' t' $ \tvks t'' ->
kont (Endo ((tv', k):) <> tvks) t''
_ -> kont mempty t
modulePath :: ToF m => Path
-> m (F.SemanticSig, F.Term)
modulePath = let
rootLookup modId = do
ma <- view (modEnv . at modId)
case ma of
Nothing -> throwError "unexpected failure in ToF.modulePath - unbound module identifier"
Just (sig, x) -> return (sig, F.V x)
in
followUserPathAnything rootLookup
|
lambdageek/insomnia
|
src/Insomnia/ToF/Module.hs
|
bsd-3-clause
| 23,272
| 0
| 24
| 6,523
| 6,523
| 3,300
| 3,223
| 438
| 10
|
import qualified Data.ByteString.Lazy as L
import Random (randomRs, RandomGen, Random, mkStdGen, newStdGen)
import Data.Maybe
import Data.Word
import Data.Binary (encode)
import Data.Time.Clock (getCurrentTime, diffUTCTime)
import System.IO
import System.Console.CmdArgs
import Contract.Types
import Contract.Symbols (symbolToCode)
import Contract.Protocol (encodeFileHeader, encodeTick)
import qualified DataGeneration.CmdArgs as A
getRandomRateDeltas :: (RandomGen g) => g -> [Rate]
getRandomRateDeltas g = [deltas !! x | x <- indexes]
where
indexes = randomRs (0, upperBound) g
upperBound = (length deltas) - 1
-- smaller rate changes occur more often
deltas :: [Rate]
deltas =
(replicate 8 0) ++
(replicate 5 (-1)) ++ (replicate 5 1) ++
(replicate 3 (-2)) ++ (replicate 3 2) ++
(replicate 2 (-3)) ++ (replicate 2 3) ++
[4, (-4), 5, (-5)]
getTicks :: Tick -> [Rate] -> TimeOffset -> [Tick]
getTicks prevTick rateDeltas timeInterval = x : getTicks x (tail rateDeltas) timeInterval
where
x = getDeltaTick prevTick (head rateDeltas)
getDeltaTick currentTick rateDelta =
let nextTimeOffset = (+timeInterval) $ timeOffset currentTick
nextRate = (+rateDelta) $ rate currentTick
in Tick nextTimeOffset nextRate
main = do
args <- cmdArgs A.arguments
putStrLn $ "Generating for: " ++ show args ++ "\n"
startTime <- getCurrentTime
let fileName = (A.directory args) ++ "/" ++ (A.symbol args) ++ ".bin"
let header = encodeFileHeader $
Header (fromJust . symbolToCode $ A.symbol args)
(A.time args)
(A.interval args)
(A.points args)
let encodedTicks = map encodeTick $ take (fromIntegral $ A.points args) $
getTicks (Tick (A.time args) $ A.firstRate args)
(getRandomRateDeltas $ mkStdGen (A.random args))
(A.interval args)
L.writeFile fileName header
L.appendFile fileName $ L.concat encodedTicks
endTime <- getCurrentTime
putStrLn $ "Took: " ++ show (diffUTCTime endTime startTime)
|
thlorenz/Pricetory
|
src/DataGeneration/RandomDataGenerator.hs
|
bsd-3-clause
| 2,327
| 0
| 17
| 715
| 734
| 384
| 350
| 48
| 1
|
{-# LANGUAGE
FlexibleContexts
, FlexibleInstances
, ScopedTypeVariables
, TypeOperators
#-}
-- | Test if a data type is an enumeration (only zero-argument
-- constructors) generically using "GHC.Generics".
module Generics.Generic.IsEnum
( isEnum
, GIsEnum (..)
) where
import Data.Proxy
import GHC.Generics
-- | Class for testing if the functors from "GHC.Generics" are
-- enumerations. You generally don't need to give any custom
-- instances. Just call 'isEnum'.
class GIsEnum f where
gIsEnum :: Proxy (f a) -> Bool
instance GIsEnum V1 where
gIsEnum _ = False
instance GIsEnum (K1 i a) where
gIsEnum _ = False
instance GIsEnum U1 where
gIsEnum _ = True
instance GIsEnum Par1 where
gIsEnum _ = False
-- should be K1 R
instance GIsEnum (Rec1 f) where
gIsEnum _ = False
instance (GIsEnum f, GIsEnum g) => GIsEnum (f :+: g) where
gIsEnum _ = gIsEnum (Proxy :: Proxy (f a)) && gIsEnum (Proxy :: Proxy (g a))
instance (GIsEnum f, GIsEnum g) => GIsEnum (f :*: g) where
gIsEnum _ = False
instance GIsEnum f => GIsEnum (M1 C c f) where
gIsEnum _ = gIsEnum (Proxy :: Proxy (f a))
instance GIsEnum (M1 S c a) where
gIsEnum _ = False
instance GIsEnum f => GIsEnum (M1 D c f) where
gIsEnum _ = gIsEnum (Proxy :: Proxy (f a))
-- instance GIsEnum (f :.: g) where
-- | Generically test if a data type is an enumeration.
isEnum :: forall a. (Generic a, GIsEnum (Rep a)) => Proxy a -> Bool
isEnum _ = gIsEnum (Proxy :: Proxy ((Rep a) a))
|
silkapp/generic-aeson
|
src/Generics/Generic/IsEnum.hs
|
bsd-3-clause
| 1,479
| 0
| 11
| 315
| 473
| 247
| 226
| 34
| 1
|
module Main where
{-----------------------------------------------------------------------------
reactive-banana-wx
Example: Very simple arithmetic
------------------------------------------------------------------------------}
import Data.Maybe
import Graphics.UI.WX hiding (Event)
import Reactive.Banana
import Reactive.Banana.WX
{-----------------------------------------------------------------------------
Main
------------------------------------------------------------------------------}
main :: IO ()
main = start $ do
f <- frame [text := "Arithmetic"]
input1 <- entry f []
input2 <- entry f []
output <- staticText f []
set f [layout := margin 10 $ row 10
[widget input1, label "+", widget input2
, label "=", minsize (sz 40 20) $ widget output]]
let networkDescription :: MomentIO ()
networkDescription = do
binput1 <- behaviorText input1 ""
binput2 <- behaviorText input2 ""
let
result :: Behavior (Maybe Int)
result = f <$> binput1 <*> binput2
where
f x y = liftA2 (+) (readNumber x) (readNumber y)
readNumber s = listToMaybe [x | (x,"") <- reads s]
showNumber = maybe "--" show
sink output [text :== showNumber <$> result]
network <- compile networkDescription
actuate network
|
codygman/test-reactive-banana-wx
|
app/Main.hs
|
bsd-3-clause
| 1,411
| 0
| 20
| 368
| 367
| 180
| 187
| 27
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import Options.Applicative
import Vindinium
import Bot
import Data.String (fromString)
import Data.Text (pack, unpack)
data Cmd = Training Settings (Maybe Int) (Maybe Board)
| Arena Settings
deriving (Show, Eq)
cmdSettings :: Cmd -> Settings
cmdSettings (Training s _ _) = s
cmdSettings (Arena s) = s
settings :: Parser Settings
settings = Settings <$> (Key <$> argument (str >>= (return . pack)) (metavar "KEY"))
<*> (fromString <$> strOption (long "url" <> value "http://vindinium.org"))
trainingCmd :: Parser Cmd
trainingCmd = Training <$> settings
<*> optional (option auto (long "turns"))
<*> pure Nothing
arenaCmd :: Parser Cmd
arenaCmd = Arena <$> settings
cmd :: Parser Cmd
cmd = subparser
( command "training" (info trainingCmd
( progDesc "Run bot in training mode" ))
<> command "arena" (info arenaCmd
(progDesc "Run bot in arena mode" ))
)
runCmd :: Cmd -> IO ()
runCmd c = do
s <- runVindinium (cmdSettings c) $ do
case c of
(Training _ t b) -> playTraining t b bot
(Arena _) -> playArena bot
putStrLn $ "Game finished: " ++ unpack (stateViewUrl s)
main :: IO ()
main =
execParser opts >>= runCmd
where
opts = info (cmd <**> helper) idm
|
Herzult/vindinium-starter-haskell
|
src/Main.hs
|
mit
| 1,390
| 0
| 15
| 382
| 473
| 242
| 231
| 39
| 2
|
module Tests.Readers.Docx (tests) where
import Text.Pandoc.Options
import Text.Pandoc.Readers.Native
import Text.Pandoc.Definition
import Tests.Helpers
import Test.Framework
import Test.HUnit (assertBool)
import Test.Framework.Providers.HUnit
import qualified Data.ByteString.Lazy as B
import Text.Pandoc.Readers.Docx
import Text.Pandoc.Writers.Native (writeNative)
import qualified Data.Map as M
import Text.Pandoc.MediaBag (MediaBag, lookupMedia, mediaDirectory)
import Codec.Archive.Zip
-- We define a wrapper around pandoc that doesn't normalize in the
-- tests. Since we do our own normalization, we want to make sure
-- we're doing it right.
data NoNormPandoc = NoNormPandoc {unNoNorm :: Pandoc}
deriving Show
noNorm :: Pandoc -> NoNormPandoc
noNorm = NoNormPandoc
instance ToString NoNormPandoc where
toString d = writeNative def{ writerStandalone = s } $ toPandoc d
where s = case d of
NoNormPandoc (Pandoc (Meta m) _)
| M.null m -> False
| otherwise -> True
instance ToPandoc NoNormPandoc where
toPandoc = unNoNorm
compareOutput :: ReaderOptions
-> FilePath
-> FilePath
-> IO (NoNormPandoc, NoNormPandoc)
compareOutput opts docxFile nativeFile = do
df <- B.readFile docxFile
nf <- Prelude.readFile nativeFile
let (p, _) = readDocx opts df
return $ (noNorm p, noNorm (readNative nf))
testCompareWithOptsIO :: ReaderOptions -> String -> FilePath -> FilePath -> IO Test
testCompareWithOptsIO opts name docxFile nativeFile = do
(dp, np) <- compareOutput opts docxFile nativeFile
return $ test id name (dp, np)
testCompareWithOpts :: ReaderOptions -> String -> FilePath -> FilePath -> Test
testCompareWithOpts opts name docxFile nativeFile =
buildTest $ testCompareWithOptsIO opts name docxFile nativeFile
testCompare :: String -> FilePath -> FilePath -> Test
testCompare = testCompareWithOpts def
getMedia :: FilePath -> FilePath -> IO (Maybe B.ByteString)
getMedia archivePath mediaPath = do
zf <- B.readFile archivePath >>= return . toArchive
return $ findEntryByPath ("word/" ++ mediaPath) zf >>= (Just . fromEntry)
compareMediaPathIO :: FilePath -> MediaBag -> FilePath -> IO Bool
compareMediaPathIO mediaPath mediaBag docxPath = do
docxMedia <- getMedia docxPath mediaPath
let mbBS = case lookupMedia mediaPath mediaBag of
Just (_, bs) -> bs
Nothing -> error ("couldn't find " ++
mediaPath ++
" in media bag")
docxBS = case docxMedia of
Just bs -> bs
Nothing -> error ("couldn't find " ++
mediaPath ++
" in media bag")
return $ mbBS == docxBS
compareMediaBagIO :: FilePath -> IO Bool
compareMediaBagIO docxFile = do
df <- B.readFile docxFile
let (_, mb) = readDocx def df
bools <- mapM
(\(fp, _, _) -> compareMediaPathIO fp mb docxFile)
(mediaDirectory mb)
return $ and bools
testMediaBagIO :: String -> FilePath -> IO Test
testMediaBagIO name docxFile = do
outcome <- compareMediaBagIO docxFile
return $ testCase name (assertBool
("Media didn't match media bag in file " ++ docxFile)
outcome)
testMediaBag :: String -> FilePath -> Test
testMediaBag name docxFile = buildTest $ testMediaBagIO name docxFile
tests :: [Test]
tests = [ testGroup "inlines"
[ testCompare
"font formatting"
"docx/inline_formatting.docx"
"docx/inline_formatting.native"
, testCompare
"font formatting with character styles"
"docx/char_styles.docx"
"docx/char_styles.native"
, testCompare
"hyperlinks"
"docx/links.docx"
"docx/links.native"
, testCompare
"inline image"
"docx/image.docx"
"docx/image_no_embed.native"
, testCompare
"inline image in links"
"docx/inline_images.docx"
"docx/inline_images.native"
, testCompare
"handling unicode input"
"docx/unicode.docx"
"docx/unicode.native"
, testCompare
"literal tabs"
"docx/tabs.docx"
"docx/tabs.native"
, testCompare
"normalizing inlines"
"docx/normalize.docx"
"docx/normalize.native"
, testCompare
"normalizing inlines deep inside blocks"
"docx/deep_normalize.docx"
"docx/deep_normalize.native"
, testCompare
"move trailing spaces outside of formatting"
"docx/trailing_spaces_in_formatting.docx"
"docx/trailing_spaces_in_formatting.native"
, testCompare
"inline code (with VerbatimChar style)"
"docx/inline_code.docx"
"docx/inline_code.native"
]
, testGroup "blocks"
[ testCompare
"headers"
"docx/headers.docx"
"docx/headers.native"
, testCompare
"headers already having auto identifiers"
"docx/already_auto_ident.docx"
"docx/already_auto_ident.native"
, testCompare
"numbered headers automatically made into list"
"docx/numbered_header.docx"
"docx/numbered_header.native"
, testCompare
"i18n blocks (headers and blockquotes)"
"docx/i18n_blocks.docx"
"docx/i18n_blocks.native"
, testCompare
"lists"
"docx/lists.docx"
"docx/lists.native"
, testCompare
"definition lists"
"docx/definition_list.docx"
"docx/definition_list.native"
, testCompare
"footnotes and endnotes"
"docx/notes.docx"
"docx/notes.native"
, testCompare
"blockquotes (parsing indent as blockquote)"
"docx/block_quotes.docx"
"docx/block_quotes_parse_indent.native"
, testCompare
"hanging indents"
"docx/hanging_indent.docx"
"docx/hanging_indent.native"
, testCompare
"tables"
"docx/tables.docx"
"docx/tables.native"
, testCompare
"code block"
"docx/codeblock.docx"
"docx/codeblock.native"
, testCompare
"dropcap paragraphs"
"docx/drop_cap.docx"
"docx/drop_cap.native"
]
, testGroup "track changes"
[ testCompare
"insertion (default)"
"docx/track_changes_insertion.docx"
"docx/track_changes_insertion_accept.native"
, testCompareWithOpts def{readerTrackChanges=AcceptChanges}
"insert insertion (accept)"
"docx/track_changes_insertion.docx"
"docx/track_changes_insertion_accept.native"
, testCompareWithOpts def{readerTrackChanges=RejectChanges}
"remove insertion (reject)"
"docx/track_changes_insertion.docx"
"docx/track_changes_insertion_reject.native"
, testCompare
"deletion (default)"
"docx/track_changes_deletion.docx"
"docx/track_changes_deletion_accept.native"
, testCompareWithOpts def{readerTrackChanges=AcceptChanges}
"remove deletion (accept)"
"docx/track_changes_deletion.docx"
"docx/track_changes_deletion_accept.native"
, testCompareWithOpts def{readerTrackChanges=RejectChanges}
"insert deletion (reject)"
"docx/track_changes_deletion.docx"
"docx/track_changes_deletion_reject.native"
, testCompareWithOpts def{readerTrackChanges=AllChanges}
"keep insertion (all)"
"docx/track_changes_deletion.docx"
"docx/track_changes_deletion_all.native"
, testCompareWithOpts def{readerTrackChanges=AllChanges}
"keep deletion (all)"
"docx/track_changes_deletion.docx"
"docx/track_changes_deletion_all.native"
]
, testGroup "media"
[ testMediaBag
"image extraction"
"docx/image.docx"
]
, testGroup "metadata"
[ testCompareWithOpts def{readerStandalone=True}
"metadata fields"
"docx/metadata.docx"
"docx/metadata.native"
, testCompareWithOpts def{readerStandalone=True}
"stop recording metadata with normal text"
"docx/metadata_after_normal.docx"
"docx/metadata_after_normal.native"
]
]
|
rgaiacs/pandoc
|
tests/Tests/Readers/Docx.hs
|
gpl-2.0
| 8,934
| 0
| 16
| 2,854
| 1,413
| 732
| 681
| 219
| 3
|
module RightSection where
test :: [Int]
test = filter (False ==) [1..10]
|
roberth/uu-helium
|
test/typeerrors/Examples/RightSection.hs
|
gpl-3.0
| 74
| 0
| 6
| 13
| 31
| 19
| 12
| 3
| 1
|
{-# LANGUAGE NoImplicitPrelude, DeriveDataTypeable, DeriveGeneric, OverloadedStrings, TemplateHaskell #-}
module Lamdu.Infer
( makeScheme
, TypeVars(..)
, Dependencies(..), depsGlobalTypes, depsNominals, depSchemes
, infer, inferFromNom, inferApply
, Scope, emptyScope, Scope.scopeToTypeMap, Scope.insertTypeOf, Scope.skolems, Scope.skolemScopeVars
, Payload(..), plScope, plType
, M.Context, M.initialContext
, M.InferCtx(..), M.inferCtx, Infer
, freshInferredVarName
, freshInferredVar
, applyNominal
) where
import AST (Tree, Ann(..), annotations)
import AST.Term.Nominal (ToNom(..))
import AST.Term.Row (RowExtend(..))
import Control.DeepSeq (NFData(..))
import qualified Control.Lens as Lens
import Control.Lens.Operators
import Control.Lens.Tuple
import Data.Binary (Binary)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (fromMaybe)
import Data.Semigroup (Semigroup(..))
import Data.Typeable (Typeable)
import GHC.Generics (Generic)
import Lamdu.Calc.Term (Val)
import qualified Lamdu.Calc.Term as V
import Lamdu.Calc.Type (Type)
import qualified Lamdu.Calc.Type as T
import Lamdu.Calc.Type.Nominal (Nominal(..), nomParams, nomType)
import Lamdu.Calc.Type.Scheme (Scheme)
import Lamdu.Calc.Type.Vars (TypeVars(..))
import qualified Lamdu.Calc.Type.Vars as TV
import qualified Lamdu.Infer.Error as Err
import Lamdu.Infer.Internal.Monad (Infer)
import qualified Lamdu.Infer.Internal.Monad as M
import Lamdu.Infer.Internal.Scheme (makeScheme)
import qualified Lamdu.Infer.Internal.Scheme as Scheme
import Lamdu.Infer.Internal.Scope (Scope, emptyScope, SkolemScope)
import qualified Lamdu.Infer.Internal.Scope as Scope
import Lamdu.Infer.Internal.Subst (CanSubst(..))
import qualified Lamdu.Infer.Internal.Subst as Subst
import Lamdu.Infer.Internal.Unify (unifyUnsafe)
import Prelude.Compat
data Payload = Payload
{ _plType :: Type
, _plScope :: Scope
} deriving (Generic, Typeable, Show)
instance NFData Payload
instance Binary Payload
Lens.makeLenses ''Payload
instance TV.Free Payload where
free (Payload typ scope) =
TV.free typ <> TV.free scope
instance CanSubst Payload where
apply s (Payload typ scope) =
Payload (Subst.apply s typ) (Subst.apply s scope)
data Dependencies = Deps
{ _depsGlobalTypes :: Map V.Var Scheme
, _depsNominals :: Map T.NominalId Nominal
} deriving (Generic, Show, Eq, Ord)
instance NFData Dependencies
instance Binary Dependencies
Lens.makeLenses ''Dependencies
instance Semigroup Dependencies where
Deps t0 n0 <> Deps t1 n1 = Deps (t0 <> t1) (n0 <> n1)
instance Monoid Dependencies where
mempty = Deps Map.empty Map.empty
mappend = (<>)
depSchemes :: Lens.Traversal' Dependencies Scheme
depSchemes f (Deps globals nominals) =
Deps
<$> traverse f globals
<*> (traverse . nomType) f nominals
inferSubst :: Dependencies -> Scope -> Val a -> Infer (Scope, Val (Payload, a))
inferSubst deps rootScope val =
do
prevSubst <- M.getSubst
let rootScope' = Subst.apply prevSubst rootScope
(inferredVal, s) <- M.listenSubst $ inferInternal mkPayload deps rootScope' val
pure (rootScope', inferredVal & annotations . _1 %~ Subst.apply s)
where
mkPayload typ scope dat = (Payload typ scope, dat)
-- All accessed global IDs are supposed to be extracted from the
-- expression to build this global scope. This is slightly hacky but
-- much faster than a polymorphic monad underlying the InferCtx monad
-- allowing global access.
-- Use loadInfer for a safer interface
infer :: Dependencies -> Scope -> Val a -> Infer (Val (Payload, a))
infer deps scope val =
do
((scope', val'), results) <- M.listenNoTell $ inferSubst deps scope val
M.tell $ results & M.subst %~ Subst.intersect (TV.free scope')
pure val'
data CompositeHasTag = HasTag | DoesNotHaveTag | MayHaveTag T.RowVar
hasTag :: T.Tag -> T.Row -> CompositeHasTag
hasTag _ T.REmpty = DoesNotHaveTag
hasTag _ (T.RVar v) = MayHaveTag v
hasTag tag (T.RExtend t _ r)
| tag == t = HasTag
| otherwise = hasTag tag r
type InferHandler a b =
(Scope -> a -> Infer (Type, Tree (Ann b) V.Term)) -> Scope ->
M.Infer (Tree V.Term (Ann b), Type)
{-# INLINE freshInferredVar #-}
freshInferredVar :: (M.VarKind t, Monad m) => Scope -> String -> M.InferCtx m t
freshInferredVar = M.freshInferredVar . Scope.skolems
{-# INLINE freshInferredVarName #-}
freshInferredVarName :: (M.VarKind t, Monad m) => Scope -> String -> M.InferCtx m (T.Var t)
freshInferredVarName = M.freshInferredVarName . Scope.skolems
-- The "redundant" lambda tells GHC the argument saturation needed for
-- inlining
{-# ANN module ("HLint: ignore Redundant lambda" :: String) #-}
{-# INLINE inferLeaf #-}
inferLeaf :: Dependencies -> V.Leaf -> InferHandler a b
inferLeaf deps leaf = \_go locals ->
case leaf of
V.LHole -> freshInferredVar locals "h"
V.LVar n ->
case Scope.lookupTypeOf n locals of
Just t -> pure t
Nothing ->
case Map.lookup n (deps ^. depsGlobalTypes) of
Just s -> Scheme.instantiate (Scope.skolems locals) s
Nothing -> M.throwError $ Err.UnboundVariable n
V.LLiteral (V.PrimVal p _) -> pure $ T.TInst p Map.empty
V.LRecEmpty -> pure $ T.TRecord T.REmpty
V.LAbsurd -> freshInferredVar locals "a" <&> T.TFun (T.TVariant T.REmpty)
V.LFromNom n -> inferFromNom (deps ^. depsNominals) n locals
<&> (,) (V.BLeaf leaf)
{-# INLINE inferAbs #-}
inferAbs :: Tree (V.Lam V.Var V.Term) (Ann a) -> InferHandler (Val a) b
inferAbs (V.Lam n e) =
\go locals ->
do
tv <- freshInferredVar locals "a"
let locals' = Scope.insertTypeOf n tv locals
((t1, e'), s1) <- M.listenSubst $ go locals' e
pure (V.BLam (V.Lam n e'), T.TFun (Subst.apply s1 tv) t1)
{-# INLINE inferApply #-}
inferApply :: Tree (V.Apply V.Term) (Ann a) -> InferHandler (Val a) b
inferApply (V.Apply e1 e2) =
\go locals ->
do
((p1_t1, e1'), p1_s) <- M.listenSubst $ go locals e1
let p1 = Subst.apply p1_s
((p2_t2, e2'), p2_s) <- M.listenSubst $ go (p1 locals) e2
let p2_t1 = Subst.apply p2_s p1_t1
p2_tv <- freshInferredVar locals "a"
((), p3_s) <- M.listenSubst $ unifyUnsafe p2_t1 (T.TFun p2_t2 p2_tv)
let p3_tv = Subst.apply p3_s p2_tv
pure (V.BApp (V.Apply e1' e2'), p3_tv)
{-# INLINE inferGetField #-}
inferGetField :: V.GetField a -> InferHandler a b
inferGetField (V.GetField e name) = \go locals ->
do
(p1_t, e') <- go locals e
p1_tv <- freshInferredVar locals "a"
p1_tvRecName <- freshInferredVarName locals "r"
M.tellRowConstraint p1_tvRecName name
((), p2_s) <-
M.listenSubst $ unifyUnsafe p1_t $
T.TRecord $ T.RExtend name p1_tv $ TV.lift p1_tvRecName
let p2_tv = Subst.apply p2_s p1_tv
pure (V.BGetField (V.GetField e' name), p2_tv)
{-# INLINE inferInject #-}
inferInject :: V.Inject a -> InferHandler a b
inferInject (V.Inject name e) = \go locals ->
do
(t, e') <- go locals e
tvVariantName <- freshInferredVarName locals "s"
M.tellRowConstraint tvVariantName name
pure
( V.BInject (V.Inject name e')
, T.TVariant $ T.RExtend name t $ TV.lift tvVariantName
)
{-# INLINE inferCase #-}
inferCase :: Tree (RowExtend T.Tag V.Term V.Term) (Ann a) -> InferHandler (Val a) b
inferCase (RowExtend name m mm) = \go locals ->
do
((p1_tm, m'), p1_s) <- M.listenSubst $ go locals m
let p1 = Subst.apply p1_s
-- p1
((p2_tmm, mm'), p2_s) <- M.listenSubst $ go (p1 locals) mm
let p2 = Subst.apply p2_s
p2_tm = p2 p1_tm
-- p2
p2_tv <- freshInferredVar locals "a"
p2_tvRes <- freshInferredVar locals "res"
-- type(match) `unify` a->res
((), p3_s) <-
M.listenSubst $ unifyUnsafe p2_tm $ T.TFun p2_tv p2_tvRes
let p3 = Subst.apply p3_s
p3_tv = p3 p2_tv
p3_tvRes = p3 p2_tvRes
p3_tmm = p3 p2_tmm
-- p3
-- new variant type var "s":
tvVariantName <- freshInferredVarName locals "s"
M.tellRowConstraint tvVariantName name
let p3_tvVariant = TV.lift tvVariantName
-- type(mismatch) `unify` [ s ]->res
((), p4_s) <-
M.listenSubst $ unifyUnsafe p3_tmm $
T.TFun (T.TVariant p3_tvVariant) p3_tvRes
let p4 :: CanSubst a => a -> a
p4 = Subst.apply p4_s
p4_tvVariant = p4 p3_tvVariant
p4_tvRes = p4 p3_tvRes
p4_tv = p4 p3_tv
-- p4
pure
( V.BCase (RowExtend name m' mm')
, T.TFun (T.TVariant (T.RExtend name p4_tv p4_tvVariant)) p4_tvRes
)
{-# INLINE inferRecExtend #-}
inferRecExtend :: Tree (RowExtend T.Tag V.Term V.Term) (Ann a) -> InferHandler (Val a) b
inferRecExtend (RowExtend name e1 e2) = \go locals ->
do
((t1, e1'), s1) <- M.listenSubst $ go locals e1
((t2, e2'), s2) <- M.listenSubst $ go (Subst.apply s1 locals) e2
(rest, s3) <-
M.listenSubst $
case t2 of
T.TRecord x ->
-- In case t2 is already inferred as a TRecord,
-- verify it doesn't already have this field,
-- and avoid unnecessary unify from other case
case hasTag name x of
HasTag -> M.throwError $ Err.DuplicateField name x
DoesNotHaveTag -> pure x
MayHaveTag var -> x <$ M.tellRowConstraint var name
_ -> do
tv <- freshInferredVarName locals "r"
M.tellRowConstraint tv name
let tve = TV.lift tv
((), s) <- M.listenSubst $ unifyUnsafe t2 $ T.TRecord tve
pure $ Subst.apply s tve
let t1' = Subst.apply s3 $ Subst.apply s2 t1
pure
( V.BRecExtend (RowExtend name e1' e2')
, T.TRecord $ T.RExtend name t1' rest
)
getNominal :: Map T.NominalId Nominal -> T.NominalId -> M.Infer Nominal
getNominal nominals name =
case Map.lookup name nominals of
Nothing -> M.throwError $ Err.MissingNominal name
Just nominal -> pure nominal
-- errorizes if the map mismatches the map in the Nominal
applyNominal :: Map T.ParamId Type -> Nominal -> Scheme
applyNominal m (Nominal params scheme) =
Subst.apply subst scheme
where
subst = mempty { Subst.substTypes = Map.mapKeys (`find` params) m }
find k =
fromMaybe (error "Nominal.instantiate with wrong param map") .
Map.lookup k
nomTypes :: SkolemScope -> Map T.NominalId Nominal -> T.NominalId -> M.Infer (Type, Scheme)
nomTypes outerSkolemsScope nominals name =
do
nominal <- getNominal nominals name
p1_paramVals <-
nominal ^. nomParams
& Map.keysSet & Map.fromSet (const (M.freshInferredVar outerSkolemsScope "n"))
& sequenceA
pure (T.TInst name p1_paramVals, applyNominal p1_paramVals nominal)
{-# INLINE inferFromNom #-}
inferFromNom ::
Map T.NominalId Nominal -> T.NominalId -> Scope ->
M.InferCtx (Either Err.Error) Type
inferFromNom nominals n locals =
do
(outerType, innerScheme) <-
nomTypes (Scope.skolems locals) nominals n
innerType <- Scheme.instantiate (Scope.skolems locals) innerScheme
T.TFun outerType innerType & pure
{-# INLINE inferToNom #-}
inferToNom :: Map T.NominalId Nominal -> Tree (ToNom T.NominalId V.Term) k -> InferHandler (Tree k V.Term) a
inferToNom nominals (ToNom name val) = \go locals ->
do
(p1_outerType, p1_innerScheme) <- nomTypes (Scope.skolems locals) nominals name
((skolemRenames, p1_innerType), instantiateResults) <-
M.listen $ Scheme.instantiateWithRenames (Scope.skolems locals) p1_innerScheme
let skolems = TV.renameDest skolemRenames
M.addSkolems skolems $ M._constraints instantiateResults
(p1_t, val') <- go (Scope.insertSkolems skolems locals) val
((), p2_s) <- M.listenSubst $ unifyUnsafe p1_t p1_innerType
let p2_outerType = Subst.apply p2_s p1_outerType
pure
( V.BToNom (ToNom name val')
, p2_outerType
)
inferInternal ::
(Type -> Scope -> a -> b) ->
Dependencies -> Scope -> Val a -> Infer (Val b)
inferInternal f deps =
(fmap . fmap) snd . go
where
go locals (Ann pl body) =
( case body of
V.BLeaf leaf -> inferLeaf deps leaf
V.BLam lam -> inferAbs lam
V.BApp app -> inferApply app
V.BGetField getField -> inferGetField getField
V.BInject inject -> inferInject inject
V.BCase case_ -> inferCase case_
V.BRecExtend recExtend -> inferRecExtend recExtend
V.BToNom nom -> inferToNom (deps ^. depsNominals) nom
) go locals
<&> \(body', typ) -> (typ, Ann (f typ locals pl) body')
|
Peaker/Algorithm-W-Step-By-Step
|
src/Lamdu/Infer.hs
|
gpl-3.0
| 13,418
| 0
| 19
| 3,644
| 4,293
| 2,201
| 2,092
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Test.Prelude.Function (tests) where
import Data.Function
import Stg.Marshal
import Stg.Parser.QuasiQuoter
import qualified Stg.Prelude as Stg
import Test.Machine.Evaluate.TestTemplates.MarshalledValue
import Test.Orphans ()
import Test.QuickCheck.Modifiers
import Test.Tasty
tests :: TestTree
tests = testGroup "Function"
[ testId
, testConst
, testCompose
, testFix ]
testId :: TestTree
testId = marshalledValueTest defSpec
{ testName = "id"
, sourceSpec = \(x :: Integer) -> MarshalSourceSpec
{ resultVar = "main"
, expectedValue = x
, source = mconcat
[ toStg "x" x
, Stg.id
, [stg| main = \ => id x |] ]}}
testConst :: TestTree
testConst = marshalledValueTest defSpec
{ testName = "const"
, sourceSpec = \(x :: Integer, y :: Integer) -> MarshalSourceSpec
{ resultVar = "main"
, expectedValue = x
, source = mconcat
[ toStg "x" x
, toStg "y" y
, Stg.const
, [stg| main = \ => const x y |] ]}}
testCompose :: TestTree
testCompose = marshalledValueTest defSpec
{ testName = "compose (.)"
, sourceSpec = \x -> MarshalSourceSpec
{ resultVar = "main"
, expectedValue = ((*3) . (+2)) x
, source = mconcat
[ toStg "x" (x :: Integer)
, toStg "two" (2 :: Integer)
, toStg "three" (3 :: Integer)
, Stg.add
, Stg.mul
, Stg.compose
, Stg.const
, [stg|
plus2 = \x -> add x two;
times3 = \x -> mul x three;
plus2times3 = \ -> compose times3 plus2;
main = \ => plus2times3 x |] ]}}
testFix :: TestTree
testFix = marshalledValueTest defSpec
{ testName = "fix"
, sourceSpec = \(NonNegative (n :: Integer)) -> MarshalSourceSpec
{ resultVar = "main"
, expectedValue =
let fac' = \rec m -> if m == 0 then 1 else m * rec (m-1)
fac = fix fac'
in fac n
, source = mconcat
[ toStg "n" n
, toStg "zero" (0 :: Integer)
, toStg "one" (1 :: Integer)
, Stg.sub
, Stg.mul
, Stg.fix
, Stg.leq_Int
, Stg.const
, [stg|
fac' = \rec m -> case leq_Int m zero of
True -> one;
False -> case sub m one of
mMinusOne -> case rec mMinusOne of
recMMinusOne -> mul m recMMinusOne;
badBool -> Error_fac' badBool;
fac = \ => fix fac';
main = \ => fac n |] ]}}
|
quchen/stg
|
test/Testsuite/Test/Prelude/Function.hs
|
bsd-3-clause
| 2,875
| 0
| 20
| 1,126
| 605
| 364
| 241
| 73
| 2
|
{-# LANGUAGE DeriveDataTypeable #-}
module Control.Parallel.HdpH.Internal.Type.Msg where
import Control.Parallel.HdpH.Internal.Type.Par -- (Task)
import Control.Parallel.HdpH.Internal.Type.GRef (TaskRef)
import Control.Parallel.HdpH.Internal.Location (NodeId)
import Control.DeepSeq (NFData, rnf)
-- import Data.Serialize (Serialize)
import Data.Binary (Binary)
import qualified Data.Binary (put, get)
import Data.Word (Word8)
-----------------------------------------------------------------------------
-- HdpH messages (peer to peer)
-- 6 different types of messages dealing with fishing and pushing sparks;
-- the parameter 's' abstracts the type of sparks
data Msg m = PUSH -- eagerly pushing work
(Task m) -- task
| FISH -- looking for work
!NodeId -- fishing node
| SCHEDULE -- reply to FISH sender (when there is work)
(Task m) -- spark
!NodeId -- sender
| NOWORK -- reply to FISH sender (when there is no work)
| REQ
TaskRef -- The globalized spark pointer
!Int -- sequence number of task
!NodeId -- the node it would move from
!NodeId -- the node it has been scheduled to
| AUTH
TaskRef -- Spark to SCHEDULE onwards
!NodeId -- fishing node to send SCHEDULE to
| DENIED
!NodeId -- fishing node to return NOWORK to
| ACK -- notify the supervising node that a spark has been scheduled here
TaskRef -- The globalized spark pointer
!Int -- sequence number of task
!NodeId -- the node receiving the spark, just a sanity check
| DEADNODE -- a node has died (propagated from the transport layer)
!NodeId -- which node has died
| OBSOLETE -- absolete task copy (old sequence number)
!NodeId -- fishing node waiting for guarded spark (to receive NOWORK)
| HEARTBEAT -- keep-alive heartbeat message
-- Show instance (mainly for debugging)
instance Show (Msg m) where
showsPrec _ (PUSH _spark) = showString "PUSH(_)"
showsPrec _ (FISH fisher) = showString "FISH(" . shows fisher .
showString ")"
showsPrec _ (SCHEDULE _spark sender) = showString "SCHEDULE(_," .
shows sender . showString ")"
showsPrec _ (NOWORK) = showString "NOWORK"
showsPrec _ (REQ _sparkHandle thisSeq from to) = showString "REQ(_," .
shows thisSeq . showString "," .
shows from . showString "," .
shows to . showString ")"
showsPrec _ (AUTH _sparkHandle to) = showString "AUTH(_," .
shows to . showString ")"
showsPrec _ (DENIED to) = showString "DENIED(" .
shows to . showString ")"
showsPrec _ (ACK _gpsark _seqN newNode) = showString "ACK(_," .
shows newNode .
showString ")"
showsPrec _ (DEADNODE deadNode) = showString "DEADNODE(_," .
shows deadNode . showString ")"
showsPrec _ (OBSOLETE fisher) = showString "OBSOLETE" .
shows fisher . showString ")"
showsPrec _ (HEARTBEAT) = showString "HEARTBEAT"
instance NFData (Msg m) where
rnf (PUSH spark) = rnf spark
rnf (FISH fisher) = rnf fisher
rnf (SCHEDULE spark sender) = rnf spark `seq` rnf sender
rnf (NOWORK) = ()
rnf (REQ sparkHandle seqN from to) = rnf sparkHandle `seq` rnf seqN `seq` rnf from `seq` rnf to
rnf (AUTH sparkHandle to) = rnf sparkHandle `seq` rnf to
rnf (DENIED to) = rnf to
rnf (ACK sparkHandle seqN newNode) = rnf sparkHandle `seq` rnf seqN `seq` rnf newNode
rnf (DEADNODE deadNode) = rnf deadNode
rnf (OBSOLETE fisher) = rnf fisher
rnf (HEARTBEAT) = ()
instance Binary (Msg m) where
put (PUSH spark) = Data.Binary.put (0 :: Word8) >>
Data.Binary.put spark
put (FISH fisher) = Data.Binary.put (1 :: Word8) >>
Data.Binary.put fisher
put (SCHEDULE spark sender) = Data.Binary.put (2 :: Word8) >>
Data.Binary.put spark >>
Data.Binary.put sender
put (NOWORK) = Data.Binary.put (3 :: Word8)
put (REQ sparkHandle seqN from to) = Data.Binary.put (4 :: Word8) >>
Data.Binary.put sparkHandle >>
Data.Binary.put seqN >>
Data.Binary.put from >>
Data.Binary.put to
put (AUTH sparkHandle to) = Data.Binary.put (5 :: Word8) >>
Data.Binary.put sparkHandle >>
Data.Binary.put to
put (DENIED fishingNode) = Data.Binary.put (6 :: Word8) >>
Data.Binary.put fishingNode
put (ACK sparkHandle seqN fishingNode) = Data.Binary.put (7 :: Word8) >>
Data.Binary.put sparkHandle >>
Data.Binary.put seqN >>
Data.Binary.put fishingNode
put (DEADNODE deadNode) = Data.Binary.put (8 :: Word8) >>
Data.Binary.put deadNode
put (OBSOLETE fisher) = Data.Binary.put (9 :: Word8) >>
Data.Binary.put fisher
put (HEARTBEAT) = Data.Binary.put (10 :: Word8)
get = do tag <- Data.Binary.get
case tag :: Word8 of
0 -> do spark <- Data.Binary.get
return $ PUSH spark
1 -> do fisher <- Data.Binary.get
return $ FISH fisher
2 -> do spark <- Data.Binary.get
sender <- Data.Binary.get
return $ SCHEDULE spark sender
3 -> do return $ NOWORK
4 -> do sparkHandle <- Data.Binary.get
seqN <- Data.Binary.get
from <- Data.Binary.get
to <- Data.Binary.get
return $ REQ sparkHandle seqN from to
5 -> do sparkHandle <- Data.Binary.get
to <- Data.Binary.get
return $ AUTH sparkHandle to
6 -> do fishingNode <- Data.Binary.get
return $ DENIED fishingNode
7 -> do sparkHandle <- Data.Binary.get
seqN <- Data.Binary.get
fishingNode <- Data.Binary.get
return $ ACK sparkHandle seqN fishingNode
8 -> do deadNode <- Data.Binary.get
return $ DEADNODE deadNode
9 -> do fisher <- Data.Binary.get
return $ OBSOLETE fisher
10 -> do return $ HEARTBEAT
|
robstewart57/hdph-rs
|
src/Control/Parallel/HdpH/Internal/Type/Msg.hs
|
bsd-3-clause
| 7,419
| 0
| 14
| 3,081
| 1,696
| 869
| 827
| 150
| 0
|
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
module Main where
import Language.Hakaru.Evaluation.ConstantPropagation
import Language.Hakaru.Syntax.TypeCheck
import Language.Hakaru.Syntax.AST.Transforms (expandTransformations)
import Language.Hakaru.Syntax.ANF (normalize)
import Language.Hakaru.Syntax.CSE (cse)
import Language.Hakaru.Syntax.Prune (prune)
import Language.Hakaru.Syntax.Uniquify (uniquify)
import Language.Hakaru.Syntax.Hoist (hoist)
import Language.Hakaru.Summary
import Language.Hakaru.Command
import Language.Hakaru.CodeGen.Wrapper
import Language.Hakaru.CodeGen.CodeGenMonad
import Language.Hakaru.CodeGen.AST
import Language.Hakaru.CodeGen.Pretty
import Control.Monad.Reader
import Data.Monoid
import Data.Maybe (isJust)
import Data.Text hiding (any,map,filter,foldr)
import qualified Data.Text.IO as IO
import Text.PrettyPrint (render)
import Options.Applicative
import System.IO
import System.Process
import System.Exit
import Prelude hiding (concat)
data Options =
Options { debug :: Bool
, optimize :: Maybe Int
, summaryOpt :: Bool
, make :: Maybe String
, asFunc :: Maybe String
, fileIn :: String
, fileOut :: Maybe String
, par :: Bool -- turns on simd and sharedMem
, noWeightsOpt :: Bool
, showProbInLogOpt :: Bool
, garbageCollector :: Bool
-- , logProbs :: Bool
} deriving Show
main :: IO ()
main = do
opts <- parseOpts
prog <- readFromFile (fileIn opts)
runReaderT (compileHakaru prog) opts
options :: Parser Options
options = Options
<$> switch ( long "debug"
<> short 'D'
<> help "Prints Hakaru src, Hakaru AST, C AST, C src" )
<*> (optional $ option auto ( short 'O'
<> metavar "{0,1,2}"
<> help "perform Hakaru AST optimizations. optimization levels 0,1,2." ))
<*> switch ( long "summary"
<> short 'S'
<> help "Performs summarization optimization" )
<*> (optional $ strOption ( long "make"
<> short 'm'
<> help "Compiles generated C code with the compiler ARG"))
<*> (optional $ strOption ( long "as-function"
<> short 'F'
<> help "Compiles to a sampling C function with the name ARG" ))
<*> strArgument ( metavar "INPUT"
<> help "Program to be compiled")
<*> (optional $ strOption ( short 'o'
<> metavar "OUTPUT"
<> help "output FILE"))
<*> switch ( short 'j'
<> help "Generates multithreaded and simd parallel programs using OpenMP directives")
<*> switch ( long "no-weights"
<> short 'w'
<> help "Don't print the weights")
<*> switch ( long "show-prob-log"
<> help "Shows prob types as 'exp(<log-domain-value>)' instead of '<value>'")
<*> switch ( long "garbage-collector"
<> short 'g'
<> help "Use Boehm Garbage Collector")
-- <*> switch ( long "-no-log-space-probs"
-- <> help "Do not log `prob` types; WARNING this is more likely to underflow.")
parseOpts :: IO Options
parseOpts = execParser $ info (helper <*> options)
$ fullDesc <> progDesc "Compile Hakaru to C"
compileHakaru :: Text -> ReaderT Options IO ()
compileHakaru prog = ask >>= \config -> lift $ do
prog' <- parseAndInferWithDebug (debug config) prog
case prog' of
Left err -> IO.hPutStrLn stderr err
Right (TypedAST typ ast) -> do
astS <- case summaryOpt config of
True -> summary (expandTransformations ast)
False -> return (expandTransformations ast)
let ast' = TypedAST typ $ foldr id astS (abtPasses $ optimize config)
outPath = case fileOut config of
(Just f) -> f
Nothing -> "-"
codeGen = wrapProgram ast'
(asFunc config)
(PrintConfig { noWeights = noWeightsOpt config
, showProbInLog = showProbInLogOpt config })
codeGenConfig = emptyCG { sharedMem = par config
, simd = par config
, managedMem = garbageCollector config}
cast = CAST $ runCodeGenWith codeGen codeGenConfig
output = pack . render . pretty $ cast
when (debug config) $ do
putErrorLn $ hrule "Hakaru Type"
putErrorLn . pack . show $ typ
putErrorLn $ hrule "Hakaru AST"
putErrorLn $ pack $ show ast
when (isJust . optimize $ config) $ do
putErrorLn $ hrule "Hakaru AST'"
putErrorLn $ pack $ show ast'
putErrorLn $ hrule "C AST"
putErrorLn $ pack $ show cast
putErrorLn $ hrule "Fin"
case make config of
Nothing -> writeToFile outPath output
Just cc -> makeFile cc (fileOut config) (unpack output) config
where hrule s = concat [ "\n<=======================| "
, s
," |=======================>\n"]
abtPasses Nothing = []
abtPasses (Just 0) = [ constantPropagation ]
abtPasses (Just 1) = [ constantPropagation
, uniquify
, prune
, cse
, hoist
, uniquify ]
abtPasses (Just 2) = abtPasses (Just 1) ++ [normalize]
abtPasses _ = error "only optimization levels are 0, 1, and 2"
putErrorLn :: Text -> IO ()
putErrorLn = IO.hPutStrLn stderr
makeFile :: String -> Maybe String -> String -> Options -> IO ()
makeFile cc mout prog opts =
do let p = proc cc $ ["-pedantic"
,"-std=c99"
,"-lm"
,"-xc"
,"-"]
++ (case mout of
Nothing -> []
Just o -> ["-o" ++ o])
++ (if par opts then ["-fopenmp"] else [])
(Just inH, _, _, pH) <- createProcess p { std_in = CreatePipe
, std_out = CreatePipe }
hPutStrLn inH prog
hClose inH
exit <- waitForProcess pH
case exit of
ExitSuccess -> return ()
_ -> error $ cc ++ " returned exit code: " ++ show exit
|
zachsully/hakaru
|
commands/HKC.hs
|
bsd-3-clause
| 6,829
| 0
| 22
| 2,555
| 1,611
| 832
| 779
| 152
| 9
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# OPTIONS_GHC -fno-warn-missing-import-lists #-}
-- |
-- Module: $HEADER$
-- Description: Abstract API for DHT implementations.
-- Copyright: (c) 2015, Jan Šipr, Matej Kollár, Peter Trško
-- License: BSD3
--
-- Stability: experimental
-- Portability: NoImplicitPrelude
--
-- Abstract API for DHT implementations.
module Data.DHT
( module Data.DHT.Core
)
where
import Data.DHT.Core
|
FPBrno/dht-api
|
src/Data/DHT.hs
|
bsd-3-clause
| 451
| 0
| 5
| 86
| 33
| 26
| 7
| 5
| 0
|
{-# LANGUAGE NoMonomorphismRestriction #-}
module TAC.Emit where
import qualified TAC.Data as T
import qualified JVM.Type as J
program :: T.Program -> [ J.Statement ]
program stmts = ( do s <- stmts ; statement s ) ++ [ J.Halt ]
statement :: T.Statement -> [ J.Statement ]
statement s = case s of
T.Constant i c -> number c ++ number i ++ [ J.Store ]
T.Add i j k -> operation J.Add i j k
T.Mul i j k -> operation J.Mul i j k
number 0 = [ J.Push 0 ]
number k = replicate k ( J.Push 1 )
++ replicate (k-1) ( J.Add )
operation op i j k =
number j ++ [ J.Load ]
++ number k ++ [ J.Load ]
++ [ op ]
++ number i ++ [ J.Store ]
|
Erdwolf/autotool-bonn
|
src/TAC/Emit.hs
|
gpl-2.0
| 667
| 0
| 12
| 184
| 308
| 159
| 149
| 19
| 3
|
-------------------------------------------------------------------------------
-- |
-- Module : Main
-- Copyright : (c) University of Kansas
-- License : BSD3
-- Stability : experimental
--
-- TBD.
-------------------------------------------------------------------------------
module Main where
import System.Hardware.Haskino
prog :: Arduino ()
prog = loop $
do
--digitalWrite 2 True
_ <- digitalRead 2
return ()
main :: IO ()
main = withArduino True "/dev/cu.usbmodem1421" prog
-- main = compileProgram prog "listPack.ino"
-- main = putStrLn $ show newEvenOdd
|
ku-fpg/kansas-amber
|
firmware/Timing/Host/Test.hs
|
bsd-3-clause
| 632
| 0
| 9
| 141
| 81
| 47
| 34
| 9
| 1
|
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE CPP #-}
#ifdef DEBUG_CONFLICT_SETS
{-# LANGUAGE ImplicitParams #-}
#endif
module Distribution.Solver.Modular.Dependency (
-- * Variables
Var(..)
, simplifyVar
, varPI
, showVar
-- * Conflict sets
, ConflictSet
, CS.showCS
-- * Constrained instances
, CI(..)
, merge
-- * Flagged dependencies
, FlaggedDeps
, FlaggedDep(..)
, Dep(..)
, showDep
, flattenFlaggedDeps
, QualifyOptions(..)
, qualifyDeps
, unqualifyDeps
-- ** Setting/forgetting components
, forgetCompOpenGoal
, setCompFlaggedDeps
-- * Reverse dependency map
, RevDepMap
-- * Goals
, Goal(..)
, GoalReason(..)
, QGoalReason
, ResetVar(..)
, goalVarToConflictSet
, varToConflictSet
, goalReasonToVars
-- * Open goals
, OpenGoal(..)
, close
) where
import Prelude hiding (pi)
import Data.Map (Map)
import qualified Data.List as L
import Language.Haskell.Extension (Extension(..), Language(..))
import Distribution.Text
import Distribution.Solver.Modular.ConflictSet (ConflictSet)
import Distribution.Solver.Modular.Flag
import Distribution.Solver.Modular.Package
import Distribution.Solver.Modular.Var
import Distribution.Solver.Modular.Version
import qualified Distribution.Solver.Modular.ConflictSet as CS
import Distribution.Solver.Types.ComponentDeps (Component(..))
#ifdef DEBUG_CONFLICT_SETS
import GHC.Stack (CallStack)
#endif
{-------------------------------------------------------------------------------
Constrained instances
-------------------------------------------------------------------------------}
-- | Constrained instance. If the choice has already been made, this is
-- a fixed instance, and we record the package name for which the choice
-- is for convenience. Otherwise, it is a list of version ranges paired with
-- the goals / variables that introduced them.
data CI qpn = Fixed I (Var qpn) | Constrained [VROrigin qpn]
deriving (Eq, Show, Functor)
showCI :: CI QPN -> String
showCI (Fixed i _) = "==" ++ showI i
showCI (Constrained vr) = showVR (collapse vr)
-- | Merge constrained instances. We currently adopt a lazy strategy for
-- merging, i.e., we only perform actual checking if one of the two choices
-- is fixed. If the merge fails, we return a conflict set indicating the
-- variables responsible for the failure, as well as the two conflicting
-- fragments.
--
-- Note that while there may be more than one conflicting pair of version
-- ranges, we only return the first we find.
--
-- TODO: Different pairs might have different conflict sets. We're
-- obviously interested to return a conflict that has a "better" conflict
-- set in the sense the it contains variables that allow us to backjump
-- further. We might apply some heuristics here, such as to change the
-- order in which we check the constraints.
merge ::
#ifdef DEBUG_CONFLICT_SETS
(?loc :: CallStack) =>
#endif
Ord qpn => CI qpn -> CI qpn -> Either (ConflictSet qpn, (CI qpn, CI qpn)) (CI qpn)
merge c@(Fixed i g1) d@(Fixed j g2)
| i == j = Right c
| otherwise = Left (CS.union (varToConflictSet g1) (varToConflictSet g2), (c, d))
merge c@(Fixed (I v _) g1) (Constrained rs) = go rs -- I tried "reverse rs" here, but it seems to slow things down ...
where
go [] = Right c
go (d@(vr, g2) : vrs)
| checkVR vr v = go vrs
| otherwise = Left (CS.union (varToConflictSet g1) (varToConflictSet g2), (c, Constrained [d]))
merge c@(Constrained _) d@(Fixed _ _) = merge d c
merge (Constrained rs) (Constrained ss) = Right (Constrained (rs ++ ss))
{-------------------------------------------------------------------------------
Flagged dependencies
-------------------------------------------------------------------------------}
-- | Flagged dependencies
--
-- 'FlaggedDeps' is the modular solver's view of a packages dependencies:
-- rather than having the dependencies indexed by component, each dependency
-- defines what component it is in.
--
-- However, top-level goals are also modelled as dependencies, but of course
-- these don't actually belong in any component of any package. Therefore, we
-- parameterize 'FlaggedDeps' and derived datatypes with a type argument that
-- specifies whether or not we have a component: we only ever instantiate this
-- type argument with @()@ for top-level goals, or 'Component' for everything
-- else (we could express this as a kind at the type-level, but that would
-- require a very recent GHC).
--
-- Note however, crucially, that independent of the type parameters, the list
-- of dependencies underneath a flag choice or stanza choices _always_ uses
-- Component as the type argument. This is important: when we pick a value for
-- a flag, we _must_ know what component the new dependencies belong to, or
-- else we don't be able to construct fine-grained reverse dependencies.
type FlaggedDeps comp qpn = [FlaggedDep comp qpn]
-- | Flagged dependencies can either be plain dependency constraints,
-- or flag-dependent dependency trees.
data FlaggedDep comp qpn =
-- | Dependencies which are conditional on a flag choice.
Flagged (FN qpn) FInfo (TrueFlaggedDeps qpn) (FalseFlaggedDeps qpn)
-- | Dependencies which are conditional on whether or not a stanza
-- (e.g., a test suite or benchmark) is enabled.
| Stanza (SN qpn) (TrueFlaggedDeps qpn)
-- | Dependencies for which are always enabled, for the component
-- 'comp' (or requested for the user, if comp is @()@).
| Simple (Dep qpn) comp
deriving (Eq, Show)
-- | Conversatively flatten out flagged dependencies
--
-- NOTE: We do not filter out duplicates.
flattenFlaggedDeps :: FlaggedDeps Component qpn -> [(Dep qpn, Component)]
flattenFlaggedDeps = concatMap aux
where
aux :: FlaggedDep Component qpn -> [(Dep qpn, Component)]
aux (Flagged _ _ t f) = flattenFlaggedDeps t ++ flattenFlaggedDeps f
aux (Stanza _ t) = flattenFlaggedDeps t
aux (Simple d c) = [(d, c)]
type TrueFlaggedDeps qpn = FlaggedDeps Component qpn
type FalseFlaggedDeps qpn = FlaggedDeps Component qpn
-- | A dependency (constraint) associates a package name with a
-- constrained instance.
--
-- 'Dep' intentionally has no 'Functor' instance because the type variable
-- is used both to record the dependencies as well as who's doing the
-- depending; having a 'Functor' instance makes bugs where we don't distinguish
-- these two far too likely. (By rights 'Dep' ought to have two type variables.)
data Dep qpn = Dep qpn (CI qpn) -- dependency on a package
| Ext Extension -- dependency on a language extension
| Lang Language -- dependency on a language version
| Pkg PN VR -- dependency on a pkg-config package
deriving (Eq, Show)
showDep :: Dep QPN -> String
showDep (Dep qpn (Fixed i v) ) =
(if P qpn /= v then showVar v ++ " => " else "") ++
showQPN qpn ++ "==" ++ showI i
showDep (Dep qpn (Constrained [(vr, v)])) =
showVar v ++ " => " ++ showQPN qpn ++ showVR vr
showDep (Dep qpn ci ) =
showQPN qpn ++ showCI ci
showDep (Ext ext) = "requires " ++ display ext
showDep (Lang lang) = "requires " ++ display lang
showDep (Pkg pn vr) = "requires pkg-config package "
++ display pn ++ display vr
++ ", not found in the pkg-config database"
-- | Options for goal qualification (used in 'qualifyDeps')
--
-- See also 'defaultQualifyOptions'
data QualifyOptions = QO {
-- | Do we have a version of base relying on another version of base?
qoBaseShim :: Bool
-- Should dependencies of the setup script be treated as independent?
, qoSetupIndependent :: Bool
}
deriving Show
-- | Apply built-in rules for package qualifiers
--
-- Although the behaviour of 'qualifyDeps' depends on the 'QualifyOptions',
-- it is important that these 'QualifyOptions' are _static_. Qualification
-- does NOT depend on flag assignment; in other words, it behaves the same no
-- matter which choices the solver makes (modulo the global 'QualifyOptions');
-- we rely on this in 'linkDeps' (see comment there).
--
-- NOTE: It's the _dependencies_ of a package that may or may not be independent
-- from the package itself. Package flag choices must of course be consistent.
qualifyDeps :: QualifyOptions -> QPN -> FlaggedDeps Component PN -> FlaggedDeps Component QPN
qualifyDeps QO{..} (Q pp@(PP ns q) pn) = go
where
go :: FlaggedDeps Component PN -> FlaggedDeps Component QPN
go = map go1
go1 :: FlaggedDep Component PN -> FlaggedDep Component QPN
go1 (Flagged fn nfo t f) = Flagged (fmap (Q pp) fn) nfo (go t) (go f)
go1 (Stanza sn t) = Stanza (fmap (Q pp) sn) (go t)
go1 (Simple dep comp) = Simple (goD dep comp) comp
-- Suppose package B has a setup dependency on package A.
-- This will be recorded as something like
--
-- > Dep "A" (Constrained [(AnyVersion, Goal (P "B") reason])
--
-- Observe that when we qualify this dependency, we need to turn that
-- @"A"@ into @"B-setup.A"@, but we should not apply that same qualifier
-- to the goal or the goal reason chain.
goD :: Dep PN -> Component -> Dep QPN
goD (Ext ext) _ = Ext ext
goD (Lang lang) _ = Lang lang
goD (Pkg pkn vr) _ = Pkg pkn vr
goD (Dep dep ci) comp
| qBase dep = Dep (Q (PP ns (Base pn)) dep) (fmap (Q pp) ci)
| qSetup comp = Dep (Q (PP ns (Setup pn)) dep) (fmap (Q pp) ci)
| otherwise = Dep (Q (PP ns inheritedQ) dep) (fmap (Q pp) ci)
-- If P has a setup dependency on Q, and Q has a regular dependency on R, then
-- we say that the 'Setup' qualifier is inherited: P has an (indirect) setup
-- dependency on R. We do not do this for the base qualifier however.
--
-- The inherited qualifier is only used for regular dependencies; for setup
-- and base deppendencies we override the existing qualifier. See #3160 for
-- a detailed discussion.
inheritedQ :: Qualifier
inheritedQ = case q of
Setup _ -> q
Unqualified -> q
Base _ -> Unqualified
-- Should we qualify this goal with the 'Base' package path?
qBase :: PN -> Bool
qBase dep = qoBaseShim && unPackageName dep == "base"
-- Should we qualify this goal with the 'Setup' package path?
qSetup :: Component -> Bool
qSetup comp = qoSetupIndependent && comp == ComponentSetup
-- | Remove qualifiers from set of dependencies
--
-- This is used during link validation: when we link package @Q.A@ to @Q'.A@,
-- then all dependencies @Q.B@ need to be linked to @Q'.B@. In order to compute
-- what to link these dependencies to, we need to requalify @Q.B@ to become
-- @Q'.B@; we do this by first removing all qualifiers and then calling
-- 'qualifyDeps' again.
unqualifyDeps :: FlaggedDeps comp QPN -> FlaggedDeps comp PN
unqualifyDeps = go
where
go :: FlaggedDeps comp QPN -> FlaggedDeps comp PN
go = map go1
go1 :: FlaggedDep comp QPN -> FlaggedDep comp PN
go1 (Flagged fn nfo t f) = Flagged (fmap unq fn) nfo (go t) (go f)
go1 (Stanza sn t) = Stanza (fmap unq sn) (go t)
go1 (Simple dep comp) = Simple (goD dep) comp
goD :: Dep QPN -> Dep PN
goD (Dep qpn ci) = Dep (unq qpn) (fmap unq ci)
goD (Ext ext) = Ext ext
goD (Lang lang) = Lang lang
goD (Pkg pn vr) = Pkg pn vr
unq :: QPN -> PN
unq (Q _ pn) = pn
{-------------------------------------------------------------------------------
Setting/forgetting the Component
-------------------------------------------------------------------------------}
forgetCompOpenGoal :: OpenGoal Component -> OpenGoal ()
forgetCompOpenGoal = mapCompOpenGoal $ const ()
setCompFlaggedDeps :: Component -> FlaggedDeps () qpn -> FlaggedDeps Component qpn
setCompFlaggedDeps = mapCompFlaggedDeps . const
{-------------------------------------------------------------------------------
Auxiliary: Mapping over the Component goal
We don't export these, because the only type instantiations for 'a' and 'b'
here should be () or Component. (We could express this at the type level
if we relied on newer versions of GHC.)
-------------------------------------------------------------------------------}
mapCompOpenGoal :: (a -> b) -> OpenGoal a -> OpenGoal b
mapCompOpenGoal g (OpenGoal d gr) = OpenGoal (mapCompFlaggedDep g d) gr
mapCompFlaggedDeps :: (a -> b) -> FlaggedDeps a qpn -> FlaggedDeps b qpn
mapCompFlaggedDeps = L.map . mapCompFlaggedDep
mapCompFlaggedDep :: (a -> b) -> FlaggedDep a qpn -> FlaggedDep b qpn
mapCompFlaggedDep _ (Flagged fn nfo t f) = Flagged fn nfo t f
mapCompFlaggedDep _ (Stanza sn t ) = Stanza sn t
mapCompFlaggedDep g (Simple pn a ) = Simple pn (g a)
{-------------------------------------------------------------------------------
Reverse dependency map
-------------------------------------------------------------------------------}
-- | A map containing reverse dependencies between qualified
-- package names.
type RevDepMap = Map QPN [(Component, QPN)]
{-------------------------------------------------------------------------------
Goals
-------------------------------------------------------------------------------}
-- | A goal is just a solver variable paired with a reason.
-- The reason is only used for tracing.
data Goal qpn = Goal (Var qpn) (GoalReason qpn)
deriving (Eq, Show, Functor)
-- | Reason why a goal is being added to a goal set.
data GoalReason qpn =
UserGoal
| PDependency (PI qpn)
| FDependency (FN qpn) Bool
| SDependency (SN qpn)
deriving (Eq, Show, Functor)
type QGoalReason = GoalReason QPN
class ResetVar f where
resetVar :: Var qpn -> f qpn -> f qpn
instance ResetVar CI where
resetVar v (Fixed i _) = Fixed i v
resetVar v (Constrained vrs) = Constrained (L.map (\ (x, y) -> (x, resetVar v y)) vrs)
instance ResetVar Dep where
resetVar v (Dep qpn ci) = Dep qpn (resetVar v ci)
resetVar _ (Ext ext) = Ext ext
resetVar _ (Lang lang) = Lang lang
resetVar _ (Pkg pn vr) = Pkg pn vr
instance ResetVar Var where
resetVar = const
-- | Compute a singleton conflict set from a goal, containing just
-- the goal variable.
--
-- NOTE: This is just a call to 'varToConflictSet' under the hood;
-- the 'GoalReason' is ignored.
goalVarToConflictSet :: Goal qpn -> ConflictSet qpn
goalVarToConflictSet (Goal g _gr) = varToConflictSet g
-- | Compute a singleton conflict set from a 'Var'
varToConflictSet :: Var qpn -> ConflictSet qpn
varToConflictSet = CS.singleton
-- | A goal reason is mostly just a variable paired with the
-- decision we made for that variable (except for user goals,
-- where we cannot really point to a solver variable). This
-- function drops the decision and recovers the list of
-- variables (which will be empty or contain one element).
--
goalReasonToVars :: GoalReason qpn -> [Var qpn]
goalReasonToVars UserGoal = []
goalReasonToVars (PDependency (PI qpn _)) = [P qpn]
goalReasonToVars (FDependency qfn _) = [F qfn]
goalReasonToVars (SDependency qsn) = [S qsn]
{-------------------------------------------------------------------------------
Open goals
-------------------------------------------------------------------------------}
-- | For open goals as they occur during the build phase, we need to store
-- additional information about flags.
data OpenGoal comp = OpenGoal (FlaggedDep comp QPN) QGoalReason
deriving (Eq, Show)
-- | Closes a goal, i.e., removes all the extraneous information that we
-- need only during the build phase.
close :: OpenGoal comp -> Goal QPN
close (OpenGoal (Simple (Dep qpn _) _) gr) = Goal (P qpn) gr
close (OpenGoal (Simple (Ext _) _) _ ) =
error "Distribution.Solver.Modular.Dependency.close: called on Ext goal"
close (OpenGoal (Simple (Lang _) _) _ ) =
error "Distribution.Solver.Modular.Dependency.close: called on Lang goal"
close (OpenGoal (Simple (Pkg _ _) _) _ ) =
error "Distribution.Solver.Modular.Dependency.close: called on Pkg goal"
close (OpenGoal (Flagged qfn _ _ _ ) gr) = Goal (F qfn) gr
close (OpenGoal (Stanza qsn _) gr) = Goal (S qsn) gr
{-------------------------------------------------------------------------------
Version ranges paired with origins
-------------------------------------------------------------------------------}
type VROrigin qpn = (VR, Var qpn)
-- | Helper function to collapse a list of version ranges with origins into
-- a single, simplified, version range.
collapse :: [VROrigin qpn] -> VR
collapse = simplifyVR . L.foldr ((.&&.) . fst) anyVR
|
headprogrammingczar/cabal
|
cabal-install/Distribution/Solver/Modular/Dependency.hs
|
bsd-3-clause
| 16,846
| 0
| 14
| 3,510
| 3,425
| 1,840
| 1,585
| 195
| 8
|
-- |
-- Module : Control.Applicative.Lift
-- Copyright : (c) Ross Paterson 2010
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : ross@soi.city.ac.uk
-- Stability : experimental
-- Portability : portable
--
-- Adding a new kind of pure computation to an applicative functor.
module Control.Applicative.Lift (
Lift(..), unLift,
-- * Collecting errors
Errors, failure
) where
import Control.Applicative
import Data.Foldable (Foldable(foldMap))
import Data.Functor.Constant
import Data.Monoid (Monoid(mappend))
import Data.Traversable (Traversable(traverse))
-- | Applicative functor formed by adding pure computations to a given
-- applicative functor.
data Lift f a = Pure a | Other (f a)
instance (Functor f) => Functor (Lift f) where
fmap f (Pure x) = Pure (f x)
fmap f (Other y) = Other (fmap f y)
instance (Foldable f) => Foldable (Lift f) where
foldMap f (Pure x) = f x
foldMap f (Other y) = foldMap f y
instance (Traversable f) => Traversable (Lift f) where
traverse f (Pure x) = Pure <$> f x
traverse f (Other y) = Other <$> traverse f y
-- | A combination is 'Pure' only if both parts are.
instance (Applicative f) => Applicative (Lift f) where
pure = Pure
Pure f <*> Pure x = Pure (f x)
Pure f <*> Other y = Other (f <$> y)
Other f <*> Pure x = Other (($ x) <$> f)
Other f <*> Other y = Other (f <*> y)
-- | A combination is 'Pure' only either part is.
instance Alternative f => Alternative (Lift f) where
empty = Other empty
Pure x <|> _ = Pure x
Other _ <|> Pure y = Pure y
Other x <|> Other y = Other (x <|> y)
-- | Projection to the other functor.
unLift :: Applicative f => Lift f a -> f a
unLift (Pure x) = pure x
unLift (Other e) = e
-- | An applicative functor that collects a monoid (e.g. lists) of errors.
-- A sequence of computations fails if any of its components do, but
-- unlike monads made with 'ErrorT' from "Control.Monad.Trans.Error",
-- these computations continue after an error, collecting all the errors.
type Errors e = Lift (Constant e)
-- | Report an error.
failure :: Monoid e => e -> Errors e a
failure e = Other (Constant e)
|
technogeeky/d-A
|
src/Control/Applicative/Lift.hs
|
gpl-3.0
| 2,183
| 0
| 9
| 490
| 671
| 347
| 324
| 35
| 1
|
-- In this example, add type constructor 'Tree' to the export list.
module C3() where
data Tree a = Leaf a | Branch (Tree a) (Tree a)
sumTree:: (Num a) => Tree a -> a
sumTree (Leaf x ) = x
sumTree (Branch left right) = sumTree left + sumTree right
myFringe:: Tree a -> [a]
myFringe (Leaf x ) = [x]
myFringe (Branch left right) = myFringe left
class SameOrNot a where
isSame :: a -> a -> Bool
isNotSame :: a -> a -> Bool
instance SameOrNot Int where
isSame a b = a == b
isNotSame a b = a /= b
|
kmate/HaRe
|
old/testing/addToExport/C3.hs
|
bsd-3-clause
| 545
| 0
| 8
| 156
| 220
| 114
| 106
| 14
| 1
|
{-
This program converts Haskell source to HTML. Apart from the Haskell source
files, it requires the cross reference information produced by running
tstModules xrefs <files>
on the complete program.
-}
import HsLexerPass1
import System(getArgs)
import Unlit(readHaskellFile)
import MUtils
import RefsTypes
import HLex2html
import PathUtils(normf)
-- fix the hard coded path?
main =
do args <- getArgs
fms <- mapFst normf # (readIO =<< readFile "hi/ModuleSourceFiles.hv")
case args of
["file",m] -> putStrLn . maybe "?" id . lookup m . map swap $ fms
["html",m] -> module2html m fms
["f2html",f] -> file2html (normf f) fms
["all2html"] -> all2html fms
["modules"] -> putStr . unlines . map (\(f,m)->m++" "++f) $ fms
["files"] -> putStr . unlines . map fst $ fms
_ -> fail "Usage: hs2html (file <module> | html <module> | f2html <file> | files | modules | all2html)"
readl s = map read . lines $ s
readModule = readRefs >#< lexHaskellFile
readRefs :: Module -> IO Refs
readRefs m = readl # readFile ("hi/"++m++".refs")
lexHaskellFile f = lexerPass0 # readHaskellFile Nothing f
all2html fms = mapM_ one2html fms
where
one2html (f,m) = writeFile h.hlex2html (f,fms)=<<readModule (m,f)
where h = "hi/"++m++".html"
module2html m fms =
case [f|(f,m')<-fms,m'==m] of
f:_ -> haskell2html (f, fms) (m, f)
_ -> fail "Unknown module"
file2html f fms =
case [m|(f',m)<-fms,f'==f] of
m:_ -> haskell2html (f, fms) (m,f)
_ -> fail "Unknown source file"
haskell2html ffm mf = putStrLn.hlex2html ffm=<<readModule mf
|
forste/haReFork
|
tools/hs2html/hs2html.hs
|
bsd-3-clause
| 1,615
| 8
| 16
| 353
| 579
| 302
| 277
| 35
| 7
|
module Zoo where
type Foo = Int
{-@ bob :: Foo String @-}
bob = 10 :: Int
|
mightymoose/liquidhaskell
|
tests/crash/BadSyn3.hs
|
bsd-3-clause
| 77
| 0
| 4
| 21
| 19
| 13
| 6
| 3
| 1
|
module Vec0 () where
import Language.Haskell.Liquid.Prelude
-- import Data.List
import Data.Vector hiding (map, concat, zipWith, filter, foldr, foldl, (++))
propVec = (vs ! 3) == 3
where xs = [1,2,3,4] :: [Int]
vs = fromList xs
|
ssaavedra/liquidhaskell
|
tests/pos/vector00.hs
|
bsd-3-clause
| 256
| 0
| 7
| 64
| 95
| 61
| 34
| 6
| 1
|
{-# LANGUAGE CPP #-}
module RegAlloc.Graph.TrivColorable (
trivColorable,
)
where
#include "HsVersions.h"
import RegClass
import Reg
import GraphBase
import UniqFM
import Platform
import Panic
-- trivColorable ---------------------------------------------------------------
-- trivColorable function for the graph coloring allocator
--
-- This gets hammered by scanGraph during register allocation,
-- so needs to be fairly efficient.
--
-- NOTE: This only works for arcitectures with just RcInteger and RcDouble
-- (which are disjoint) ie. x86, x86_64 and ppc
--
-- The number of allocatable regs is hard coded in here so we can do
-- a fast comparison in trivColorable.
--
-- It's ok if these numbers are _less_ than the actual number of free
-- regs, but they can't be more or the register conflict
-- graph won't color.
--
-- If the graph doesn't color then the allocator will panic, but it won't
-- generate bad object code or anything nasty like that.
--
-- There is an allocatableRegsInClass :: RegClass -> Int, but doing
-- the unboxing is too slow for us here.
-- TODO: Is that still true? Could we use allocatableRegsInClass
-- without losing performance now?
--
-- Look at includes/stg/MachRegs.h to get the numbers.
--
-- Disjoint registers ----------------------------------------------------------
--
-- The definition has been unfolded into individual cases for speed.
-- Each architecture has a different register setup, so we use a
-- different regSqueeze function for each.
--
accSqueeze
:: Int
-> Int
-> (reg -> Int)
-> UniqFM reg
-> Int
accSqueeze count maxCount squeeze ufm = acc count (eltsUFM ufm)
where acc count [] = count
acc count _ | count >= maxCount = count
acc count (r:rs) = acc (count + squeeze r) rs
{- Note [accSqueeze]
~~~~~~~~~~~~~~~~~~~~
BL 2007/09
Doing a nice fold over the UniqSet makes trivColorable use
32% of total compile time and 42% of total alloc when compiling SHA1.hs from darcs.
Therefore the UniqFM is made non-abstract and we use custom fold.
MS 2010/04
When converting UniqFM to use Data.IntMap, the fold cannot use UniqFM internal
representation any more. But it is imperative that the accSqueeze stops
the folding if the count gets greater or equal to maxCount. We thus convert
UniqFM to a (lazy) list, do the fold and stops if necessary, which was
the most efficient variant tried. Benchmark compiling 10-times SHA1.hs follows.
(original = previous implementation, folding = fold of the whole UFM,
lazyFold = the current implementation,
hackFold = using internal representation of Data.IntMap)
original folding hackFold lazyFold
-O -fasm (used everywhere) 31.509s 30.387s 30.791s 30.603s
100.00% 96.44% 97.72% 97.12%
-fregs-graph 67.938s 74.875s 62.673s 64.679s
100.00% 110.21% 92.25% 95.20%
-fregs-iterative 89.761s 143.913s 81.075s 86.912s
100.00% 160.33% 90.32% 96.83%
-fnew-codegen 38.225s 37.142s 37.551s 37.119s
100.00% 97.17% 98.24% 97.11%
-fnew-codegen -fregs-graph 91.786s 91.51s 87.368s 86.88s
100.00% 99.70% 95.19% 94.65%
-fnew-codegen -fregs-iterative 206.72s 343.632s 194.694s 208.677s
100.00% 166.23% 94.18% 100.95%
-}
trivColorable
:: Platform
-> (RegClass -> VirtualReg -> Int)
-> (RegClass -> RealReg -> Int)
-> Triv VirtualReg RegClass RealReg
trivColorable platform virtualRegSqueeze realRegSqueeze RcInteger conflicts exclusions
| let cALLOCATABLE_REGS_INTEGER
= (case platformArch platform of
ArchX86 -> 3
ArchX86_64 -> 5
ArchPPC -> 16
ArchSPARC -> 14
ArchPPC_64 _ -> panic "trivColorable ArchPPC_64"
ArchARM _ _ _ -> panic "trivColorable ArchARM"
ArchARM64 -> panic "trivColorable ArchARM64"
ArchAlpha -> panic "trivColorable ArchAlpha"
ArchMipseb -> panic "trivColorable ArchMipseb"
ArchMipsel -> panic "trivColorable ArchMipsel"
ArchJavaScript-> panic "trivColorable ArchJavaScript"
ArchUnknown -> panic "trivColorable ArchUnknown")
, count2 <- accSqueeze 0 cALLOCATABLE_REGS_INTEGER
(virtualRegSqueeze RcInteger)
conflicts
, count3 <- accSqueeze count2 cALLOCATABLE_REGS_INTEGER
(realRegSqueeze RcInteger)
exclusions
= count3 < cALLOCATABLE_REGS_INTEGER
trivColorable platform virtualRegSqueeze realRegSqueeze RcFloat conflicts exclusions
| let cALLOCATABLE_REGS_FLOAT
= (case platformArch platform of
ArchX86 -> 0
ArchX86_64 -> 0
ArchPPC -> 0
ArchSPARC -> 22
ArchPPC_64 _ -> panic "trivColorable ArchPPC_64"
ArchARM _ _ _ -> panic "trivColorable ArchARM"
ArchARM64 -> panic "trivColorable ArchARM64"
ArchAlpha -> panic "trivColorable ArchAlpha"
ArchMipseb -> panic "trivColorable ArchMipseb"
ArchMipsel -> panic "trivColorable ArchMipsel"
ArchJavaScript-> panic "trivColorable ArchJavaScript"
ArchUnknown -> panic "trivColorable ArchUnknown")
, count2 <- accSqueeze 0 cALLOCATABLE_REGS_FLOAT
(virtualRegSqueeze RcFloat)
conflicts
, count3 <- accSqueeze count2 cALLOCATABLE_REGS_FLOAT
(realRegSqueeze RcFloat)
exclusions
= count3 < cALLOCATABLE_REGS_FLOAT
trivColorable platform virtualRegSqueeze realRegSqueeze RcDouble conflicts exclusions
| let cALLOCATABLE_REGS_DOUBLE
= (case platformArch platform of
ArchX86 -> 6
ArchX86_64 -> 0
ArchPPC -> 26
ArchSPARC -> 11
ArchPPC_64 _ -> panic "trivColorable ArchPPC_64"
ArchARM _ _ _ -> panic "trivColorable ArchARM"
ArchARM64 -> panic "trivColorable ArchARM64"
ArchAlpha -> panic "trivColorable ArchAlpha"
ArchMipseb -> panic "trivColorable ArchMipseb"
ArchMipsel -> panic "trivColorable ArchMipsel"
ArchJavaScript-> panic "trivColorable ArchJavaScript"
ArchUnknown -> panic "trivColorable ArchUnknown")
, count2 <- accSqueeze 0 cALLOCATABLE_REGS_DOUBLE
(virtualRegSqueeze RcDouble)
conflicts
, count3 <- accSqueeze count2 cALLOCATABLE_REGS_DOUBLE
(realRegSqueeze RcDouble)
exclusions
= count3 < cALLOCATABLE_REGS_DOUBLE
trivColorable platform virtualRegSqueeze realRegSqueeze RcDoubleSSE conflicts exclusions
| let cALLOCATABLE_REGS_SSE
= (case platformArch platform of
ArchX86 -> 8
ArchX86_64 -> 10
ArchPPC -> 0
ArchSPARC -> 0
ArchPPC_64 _ -> panic "trivColorable ArchPPC_64"
ArchARM _ _ _ -> panic "trivColorable ArchARM"
ArchARM64 -> panic "trivColorable ArchARM64"
ArchAlpha -> panic "trivColorable ArchAlpha"
ArchMipseb -> panic "trivColorable ArchMipseb"
ArchMipsel -> panic "trivColorable ArchMipsel"
ArchJavaScript-> panic "trivColorable ArchJavaScript"
ArchUnknown -> panic "trivColorable ArchUnknown")
, count2 <- accSqueeze 0 cALLOCATABLE_REGS_SSE
(virtualRegSqueeze RcDoubleSSE)
conflicts
, count3 <- accSqueeze count2 cALLOCATABLE_REGS_SSE
(realRegSqueeze RcDoubleSSE)
exclusions
= count3 < cALLOCATABLE_REGS_SSE
-- Specification Code ----------------------------------------------------------
--
-- The trivColorable function for each particular architecture should
-- implement the following function, but faster.
--
{-
trivColorable :: RegClass -> UniqSet Reg -> UniqSet Reg -> Bool
trivColorable classN conflicts exclusions
= let
acc :: Reg -> (Int, Int) -> (Int, Int)
acc r (cd, cf)
= case regClass r of
RcInteger -> (cd+1, cf)
RcFloat -> (cd, cf+1)
_ -> panic "Regs.trivColorable: reg class not handled"
tmp = foldUniqSet acc (0, 0) conflicts
(countInt, countFloat) = foldUniqSet acc tmp exclusions
squeese = worst countInt classN RcInteger
+ worst countFloat classN RcFloat
in squeese < allocatableRegsInClass classN
-- | Worst case displacement
-- node N of classN has n neighbors of class C.
--
-- We currently only have RcInteger and RcDouble, which don't conflict at all.
-- This is a bit boring compared to what's in RegArchX86.
--
worst :: Int -> RegClass -> RegClass -> Int
worst n classN classC
= case classN of
RcInteger
-> case classC of
RcInteger -> min n (allocatableRegsInClass RcInteger)
RcFloat -> 0
RcDouble
-> case classC of
RcFloat -> min n (allocatableRegsInClass RcFloat)
RcInteger -> 0
-- allocatableRegs is allMachRegNos with the fixed-use regs removed.
-- i.e., these are the regs for which we are prepared to allow the
-- register allocator to attempt to map VRegs to.
allocatableRegs :: [RegNo]
allocatableRegs
= let isFree i = freeReg i
in filter isFree allMachRegNos
-- | The number of regs in each class.
-- We go via top level CAFs to ensure that we're not recomputing
-- the length of these lists each time the fn is called.
allocatableRegsInClass :: RegClass -> Int
allocatableRegsInClass cls
= case cls of
RcInteger -> allocatableRegsInteger
RcFloat -> allocatableRegsDouble
allocatableRegsInteger :: Int
allocatableRegsInteger
= length $ filter (\r -> regClass r == RcInteger)
$ map RealReg allocatableRegs
allocatableRegsFloat :: Int
allocatableRegsFloat
= length $ filter (\r -> regClass r == RcFloat
$ map RealReg allocatableRegs
-}
|
acowley/ghc
|
compiler/nativeGen/RegAlloc/Graph/TrivColorable.hs
|
bsd-3-clause
| 11,932
| 0
| 15
| 4,602
| 1,024
| 510
| 514
| 112
| 45
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="de-DE">
<title>Selenium add-on</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
msrader/zap-extensions
|
src/org/zaproxy/zap/extension/selenium/resources/help_de_DE/helpset_de_DE.hs
|
apache-2.0
| 961
| 79
| 67
| 157
| 413
| 209
| 204
| -1
| -1
|
{-# LANGUAGE MagicHash #-}
module T13413 where
import GHC.Exts
fillBlock2 :: (Int# -> Int# -> IO ())
-> Int# -> Int# -> IO ()
fillBlock2 write x0 y0
= fillBlock y0 x0
where
{-# INLINE fillBlock #-}
fillBlock y ix
| 1# <- y >=# y0
= return ()
| otherwise
= do write ix x0
fillBlock (y +# 1#) ix
|
ezyang/ghc
|
testsuite/tests/simplCore/should_compile/T13413.hs
|
bsd-3-clause
| 371
| 0
| 12
| 138
| 129
| 63
| 66
| 14
| 1
|
{-# LANGUAGE PolyKinds, TypeFamilies #-}
module T7073 where
class Foo a where
type Bar a
type Bar a = Int
|
urbanslug/ghc
|
testsuite/tests/polykinds/T7073.hs
|
bsd-3-clause
| 123
| 0
| 6
| 37
| 27
| 16
| 11
| 5
| 0
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE StandaloneDeriving #-}
module T7947 where
import Data.Data
import Data.Typeable
import T7947a
import qualified T7947b as B
deriving instance Typeable A
deriving instance Typeable B.B
deriving instance Data A
deriving instance Data B.B
|
urbanslug/ghc
|
testsuite/tests/deriving/should_compile/T7947.hs
|
bsd-3-clause
| 289
| 0
| 6
| 43
| 61
| 36
| 25
| 11
| 0
|
{-# LANGUAGE FlexibleInstances, FunctionalDependencies, MultiParamTypeClasses #-}
module T9612 where
import Data.Monoid
import Control.Monad.Trans.Writer.Lazy( Writer, WriterT )
import Data.Functor.Identity( Identity )
class (Monoid w, Monad m) => MonadWriter w m | m -> w where
writer :: (a,w) -> m a
tell :: w -> m ()
listen :: m a -> m (a, w)
pass :: m (a, w -> w) -> m a
f ::(Eq a) => a -> (Int, a) -> Writer [(Int, a)] (Int, a)
f y (n,x) {- | y == x = return (n+1, x)
| otherwise = -}
= do tell (n,x)
return (1,y)
instance (Monoid w, Monad m) => MonadWriter w (WriterT w m) where
|
siddhanathan/ghc
|
testsuite/tests/typecheck/should_fail/T9612.hs
|
bsd-3-clause
| 636
| 0
| 10
| 163
| 269
| 150
| 119
| 15
| 1
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -Wno-overlapping-patterns -Wno-incomplete-patterns -Wno-incomplete-uni-patterns -Wno-incomplete-record-updates #-}
-- | Some simplification rules for t'BasicOp'.
module Futhark.Optimise.Simplify.Rules.BasicOp
( basicOpRules,
)
where
import Control.Monad
import Data.List (find, foldl', isSuffixOf, sort)
import Data.List.NonEmpty (NonEmpty (..))
import Futhark.Analysis.PrimExp.Convert
import qualified Futhark.Analysis.SymbolTable as ST
import Futhark.Construct
import Futhark.IR
import Futhark.IR.Prop.Aliases
import Futhark.Optimise.Simplify.Rule
import Futhark.Optimise.Simplify.Rules.Loop
import Futhark.Optimise.Simplify.Rules.Simple
import Futhark.Util
isCt1 :: SubExp -> Bool
isCt1 (Constant v) = oneIsh v
isCt1 _ = False
isCt0 :: SubExp -> Bool
isCt0 (Constant v) = zeroIsh v
isCt0 _ = False
data ConcatArg
= ArgArrayLit [SubExp]
| ArgReplicate [SubExp] SubExp
| ArgVar VName
toConcatArg :: ST.SymbolTable rep -> VName -> (ConcatArg, Certs)
toConcatArg vtable v =
case ST.lookupBasicOp v vtable of
Just (ArrayLit ses _, cs) ->
(ArgArrayLit ses, cs)
Just (Replicate shape se, cs) ->
(ArgReplicate [shapeSize 0 shape] se, cs)
_ ->
(ArgVar v, mempty)
fromConcatArg ::
MonadBuilder m =>
Type ->
(ConcatArg, Certs) ->
m VName
fromConcatArg t (ArgArrayLit ses, cs) =
certifying cs $ letExp "concat_lit" $ BasicOp $ ArrayLit ses $ rowType t
fromConcatArg elem_type (ArgReplicate ws se, cs) = do
let elem_shape = arrayShape elem_type
certifying cs $ do
w <- letSubExp "concat_rep_w" =<< toExp (sum $ map pe64 ws)
letExp "concat_rep" $ BasicOp $ Replicate (setDim 0 elem_shape w) se
fromConcatArg _ (ArgVar v, _) =
pure v
fuseConcatArg ::
[(ConcatArg, Certs)] ->
(ConcatArg, Certs) ->
[(ConcatArg, Certs)]
fuseConcatArg xs (ArgArrayLit [], _) =
xs
fuseConcatArg xs (ArgReplicate [w] se, cs)
| isCt0 w =
xs
| isCt1 w =
fuseConcatArg xs (ArgArrayLit [se], cs)
fuseConcatArg ((ArgArrayLit x_ses, x_cs) : xs) (ArgArrayLit y_ses, y_cs) =
(ArgArrayLit (x_ses ++ y_ses), x_cs <> y_cs) : xs
fuseConcatArg ((ArgReplicate x_ws x_se, x_cs) : xs) (ArgReplicate y_ws y_se, y_cs)
| x_se == y_se =
(ArgReplicate (x_ws ++ y_ws) x_se, x_cs <> y_cs) : xs
fuseConcatArg xs y =
y : xs
simplifyConcat :: BuilderOps rep => BottomUpRuleBasicOp rep
-- concat@1(transpose(x),transpose(y)) == transpose(concat@0(x,y))
simplifyConcat (vtable, _) pat _ (Concat i (x :| xs) new_d)
| Just r <- arrayRank <$> ST.lookupType x vtable,
let perm = [i] ++ [0 .. i -1] ++ [i + 1 .. r -1],
Just (x', x_cs) <- transposedBy perm x,
Just (xs', xs_cs) <- unzip <$> mapM (transposedBy perm) xs = Simplify $ do
concat_rearrange <-
certifying (x_cs <> mconcat xs_cs) $
letExp "concat_rearrange" $ BasicOp $ Concat 0 (x' :| xs') new_d
letBind pat $ BasicOp $ Rearrange perm concat_rearrange
where
transposedBy perm1 v =
case ST.lookupExp v vtable of
Just (BasicOp (Rearrange perm2 v'), vcs)
| perm1 == perm2 -> Just (v', vcs)
_ -> Nothing
-- Removing a concatenation that involves only a single array. This
-- may be produced as a result of other simplification rules.
simplifyConcat _ pat aux (Concat _ (x :| []) _) =
Simplify $
-- Still need a copy because Concat produces a fresh array.
auxing aux $ letBind pat $ BasicOp $ Copy x
-- concat xs (concat ys zs) == concat xs ys zs
simplifyConcat (vtable, _) pat (StmAux cs attrs _) (Concat i (x :| xs) new_d)
| x' /= x || concat xs' /= xs =
Simplify $
certifying (cs <> x_cs <> mconcat xs_cs) $
attributing attrs $
letBind pat $
BasicOp $ Concat i (x' :| zs ++ concat xs') new_d
where
(x' : zs, x_cs) = isConcat x
(xs', xs_cs) = unzip $ map isConcat xs
isConcat v = case ST.lookupBasicOp v vtable of
Just (Concat j (y :| ys) _, v_cs) | j == i -> (y : ys, v_cs)
_ -> ([v], mempty)
-- Fusing arguments to the concat when possible. Only done when
-- concatenating along the outer dimension for now.
simplifyConcat (vtable, _) pat aux (Concat 0 (x :| xs) outer_w)
| -- We produce the to-be-concatenated arrays in reverse order, so
-- reverse them back.
y : ys <-
forSingleArray $
reverse $
foldl' fuseConcatArg mempty $
map (toConcatArg vtable) $ x : xs,
length xs /= length ys =
Simplify $ do
elem_type <- lookupType x
y' <- fromConcatArg elem_type y
ys' <- mapM (fromConcatArg elem_type) ys
auxing aux $ letBind pat $ BasicOp $ Concat 0 (y' :| ys') outer_w
where
-- If we fuse so much that there is only a single input left, then
-- it must have the right size.
forSingleArray [(ArgReplicate _ v, cs)] =
[(ArgReplicate [outer_w] v, cs)]
forSingleArray ys = ys
simplifyConcat _ _ _ _ = Skip
ruleBasicOp :: BuilderOps rep => TopDownRuleBasicOp rep
ruleBasicOp vtable pat aux op
| Just (op', cs) <- applySimpleRules defOf seType op =
Simplify $ certifying (cs <> stmAuxCerts aux) $ letBind pat $ BasicOp op'
where
defOf = (`ST.lookupExp` vtable)
seType (Var v) = ST.lookupType v vtable
seType (Constant v) = Just $ Prim $ primValueType v
ruleBasicOp vtable pat _ (Update _ src _ (Var v))
| Just (BasicOp Scratch {}, _) <- ST.lookupExp v vtable =
Simplify $ letBind pat $ BasicOp $ SubExp $ Var src
-- If we are writing a single-element slice from some array, and the
-- element of that array can be computed as a PrimExp based on the
-- index, let's just write that instead.
ruleBasicOp vtable pat aux (Update safety src (Slice [DimSlice i n s]) (Var v))
| isCt1 n,
isCt1 s,
Just (ST.Indexed cs e) <- ST.index v [intConst Int64 0] vtable =
Simplify $ do
e' <- toSubExp "update_elem" e
auxing aux . certifying cs $
letBind pat $ BasicOp $ Update safety src (Slice [DimFix i]) e'
ruleBasicOp vtable pat _ (Update _ dest destis (Var v))
| Just (e, _) <- ST.lookupExp v vtable,
arrayFrom e =
Simplify $ letBind pat $ BasicOp $ SubExp $ Var dest
where
arrayFrom (BasicOp (Copy copy_v))
| Just (e', _) <- ST.lookupExp copy_v vtable =
arrayFrom e'
arrayFrom (BasicOp (Index src srcis)) =
src == dest && destis == srcis
arrayFrom (BasicOp (Replicate v_shape v_se))
| Just (Replicate dest_shape dest_se, _) <- ST.lookupBasicOp dest vtable,
v_se == dest_se,
shapeDims v_shape `isSuffixOf` shapeDims dest_shape =
True
arrayFrom _ =
False
ruleBasicOp vtable pat _ (Update _ dest is se)
| Just dest_t <- ST.lookupType dest vtable,
isFullSlice (arrayShape dest_t) is = Simplify $
case se of
Var v | not $ null $ sliceDims is -> do
v_reshaped <-
letExp (baseString v ++ "_reshaped") $
BasicOp $ Reshape (map DimNew $ arrayDims dest_t) v
letBind pat $ BasicOp $ Copy v_reshaped
_ -> letBind pat $ BasicOp $ ArrayLit [se] $ rowType dest_t
ruleBasicOp vtable pat (StmAux cs1 attrs _) (Update safety1 dest1 is1 (Var v1))
| Just (Update safety2 dest2 is2 se2, cs2) <- ST.lookupBasicOp v1 vtable,
Just (Copy v3, cs3) <- ST.lookupBasicOp dest2 vtable,
Just (Index v4 is4, cs4) <- ST.lookupBasicOp v3 vtable,
is4 == is1,
v4 == dest1 =
Simplify $
certifying (cs1 <> cs2 <> cs3 <> cs4) $ do
is5 <- subExpSlice $ sliceSlice (primExpSlice is1) (primExpSlice is2)
attributing attrs $ letBind pat $ BasicOp $ Update (max safety1 safety2) dest1 is5 se2
ruleBasicOp vtable pat _ (CmpOp (CmpEq t) se1 se2)
| Just m <- simplifyWith se1 se2 = Simplify m
| Just m <- simplifyWith se2 se1 = Simplify m
where
simplifyWith (Var v) x
| Just stm <- ST.lookupStm v vtable,
If p tbranch fbranch _ <- stmExp stm,
Just (y, z) <-
returns v (stmPat stm) tbranch fbranch,
not $ boundInBody tbranch `namesIntersect` freeIn y,
not $ boundInBody fbranch `namesIntersect` freeIn z = Just $ do
eq_x_y <-
letSubExp "eq_x_y" $ BasicOp $ CmpOp (CmpEq t) x y
eq_x_z <-
letSubExp "eq_x_z" $ BasicOp $ CmpOp (CmpEq t) x z
p_and_eq_x_y <-
letSubExp "p_and_eq_x_y" $ BasicOp $ BinOp LogAnd p eq_x_y
not_p <-
letSubExp "not_p" $ BasicOp $ UnOp Not p
not_p_and_eq_x_z <-
letSubExp "p_and_eq_x_y" $ BasicOp $ BinOp LogAnd not_p eq_x_z
letBind pat $
BasicOp $ BinOp LogOr p_and_eq_x_y not_p_and_eq_x_z
simplifyWith _ _ =
Nothing
returns v ifpat tbranch fbranch =
fmap snd . find ((== v) . patElemName . fst) $
zip (patElems ifpat) $
zip (map resSubExp (bodyResult tbranch)) (map resSubExp (bodyResult fbranch))
ruleBasicOp _ pat _ (Replicate (Shape []) se@Constant {}) =
Simplify $ letBind pat $ BasicOp $ SubExp se
ruleBasicOp _ pat _ (Replicate _ se)
| [Acc {}] <- patTypes pat =
Simplify $ letBind pat $ BasicOp $ SubExp se
ruleBasicOp _ pat _ (Replicate (Shape []) (Var v)) = Simplify $ do
v_t <- lookupType v
letBind pat $
BasicOp $
if primType v_t
then SubExp $ Var v
else Copy v
ruleBasicOp vtable pat _ (Replicate shape (Var v))
| Just (BasicOp (Replicate shape2 se), cs) <- ST.lookupExp v vtable =
Simplify $ certifying cs $ letBind pat $ BasicOp $ Replicate (shape <> shape2) se
ruleBasicOp _ pat _ (ArrayLit (se : ses) _)
| all (== se) ses =
Simplify $
let n = constant (fromIntegral (length ses) + 1 :: Int64)
in letBind pat $ BasicOp $ Replicate (Shape [n]) se
ruleBasicOp vtable pat aux (Index idd slice)
| Just inds <- sliceIndices slice,
Just (BasicOp (Reshape newshape idd2), idd_cs) <- ST.lookupExp idd vtable,
length newshape == length inds =
Simplify $
case shapeCoercion newshape of
Just _ ->
certifying idd_cs $
auxing aux $
letBind pat $ BasicOp $ Index idd2 slice
Nothing -> do
-- Linearise indices and map to old index space.
oldshape <- arrayDims <$> lookupType idd2
let new_inds =
reshapeIndex
(map pe64 oldshape)
(map pe64 $ newDims newshape)
(map pe64 inds)
new_inds' <-
mapM (toSubExp "new_index") new_inds
certifying idd_cs . auxing aux $
letBind pat $ BasicOp $ Index idd2 $ Slice $ map DimFix new_inds'
-- Copying an iota is pointless; just make it an iota instead.
ruleBasicOp vtable pat aux (Copy v)
| Just (Iota n x s it, v_cs) <- ST.lookupBasicOp v vtable =
Simplify . certifying v_cs . auxing aux $
letBind pat $ BasicOp $ Iota n x s it
-- Handle identity permutation.
ruleBasicOp _ pat _ (Rearrange perm v)
| sort perm == perm =
Simplify $ letBind pat $ BasicOp $ SubExp $ Var v
ruleBasicOp vtable pat aux (Rearrange perm v)
| Just (BasicOp (Rearrange perm2 e), v_cs) <- ST.lookupExp v vtable =
-- Rearranging a rearranging: compose the permutations.
Simplify . certifying v_cs . auxing aux $
letBind pat $ BasicOp $ Rearrange (perm `rearrangeCompose` perm2) e
ruleBasicOp vtable pat aux (Rearrange perm v)
| Just (BasicOp (Rotate offsets v2), v_cs) <- ST.lookupExp v vtable,
Just (BasicOp (Rearrange perm3 v3), v2_cs) <- ST.lookupExp v2 vtable = Simplify $ do
let offsets' = rearrangeShape (rearrangeInverse perm3) offsets
rearrange_rotate <- letExp "rearrange_rotate" $ BasicOp $ Rotate offsets' v3
certifying (v_cs <> v2_cs) $
auxing aux $
letBind pat $ BasicOp $ Rearrange (perm `rearrangeCompose` perm3) rearrange_rotate
-- Rearranging a replicate where the outer dimension is left untouched.
ruleBasicOp vtable pat aux (Rearrange perm v1)
| Just (BasicOp (Replicate dims (Var v2)), v1_cs) <- ST.lookupExp v1 vtable,
num_dims <- shapeRank dims,
(rep_perm, rest_perm) <- splitAt num_dims perm,
not $ null rest_perm,
rep_perm == [0 .. length rep_perm -1] =
Simplify $
certifying v1_cs $
auxing aux $ do
v <-
letSubExp "rearrange_replicate" $
BasicOp $ Rearrange (map (subtract num_dims) rest_perm) v2
letBind pat $ BasicOp $ Replicate dims v
-- A zero-rotation is identity.
ruleBasicOp _ pat _ (Rotate offsets v)
| all isCt0 offsets = Simplify $ letBind pat $ BasicOp $ SubExp $ Var v
ruleBasicOp vtable pat aux (Rotate offsets v)
| Just (BasicOp (Rearrange perm v2), v_cs) <- ST.lookupExp v vtable,
Just (BasicOp (Rotate offsets2 v3), v2_cs) <- ST.lookupExp v2 vtable = Simplify $ do
let offsets2' = rearrangeShape (rearrangeInverse perm) offsets2
addOffsets x y = letSubExp "summed_offset" $ BasicOp $ BinOp (Add Int64 OverflowWrap) x y
offsets' <- zipWithM addOffsets offsets offsets2'
rotate_rearrange <-
auxing aux $ letExp "rotate_rearrange" $ BasicOp $ Rearrange perm v3
certifying (v_cs <> v2_cs) $
letBind pat $ BasicOp $ Rotate offsets' rotate_rearrange
-- Combining Rotates.
ruleBasicOp vtable pat aux (Rotate offsets1 v)
| Just (BasicOp (Rotate offsets2 v2), v_cs) <- ST.lookupExp v vtable = Simplify $ do
offsets <- zipWithM add offsets1 offsets2
certifying v_cs $
auxing aux $
letBind pat $ BasicOp $ Rotate offsets v2
where
add x y = letSubExp "offset" $ BasicOp $ BinOp (Add Int64 OverflowWrap) x y
-- If we see an Update with a scalar where the value to be written is
-- the result of indexing some other array, then we convert it into an
-- Update with a slice of that array. This matters when the arrays
-- are far away (on the GPU, say), because it avoids a copy of the
-- scalar to and from the host.
ruleBasicOp vtable pat aux (Update safety arr_x (Slice slice_x) (Var v))
| Just _ <- sliceIndices (Slice slice_x),
Just (Index arr_y (Slice slice_y), cs_y) <- ST.lookupBasicOp v vtable,
ST.available arr_y vtable,
-- XXX: we should check for proper aliasing here instead.
arr_y /= arr_x,
Just (slice_x_bef, DimFix i, []) <- focusNth (length slice_x - 1) slice_x,
Just (slice_y_bef, DimFix j, []) <- focusNth (length slice_y - 1) slice_y = Simplify $ do
let slice_x' = Slice $ slice_x_bef ++ [DimSlice i (intConst Int64 1) (intConst Int64 1)]
slice_y' = Slice $ slice_y_bef ++ [DimSlice j (intConst Int64 1) (intConst Int64 1)]
v' <- letExp (baseString v ++ "_slice") $ BasicOp $ Index arr_y slice_y'
certifying cs_y . auxing aux $
letBind pat $ BasicOp $ Update safety arr_x slice_x' $ Var v'
-- Simplify away 0<=i when 'i' is from a loop of form 'for i < n'.
ruleBasicOp vtable pat aux (CmpOp CmpSle {} x y)
| Constant (IntValue (Int64Value 0)) <- x,
Var v <- y,
Just _ <- ST.lookupLoopVar v vtable =
Simplify $ auxing aux $ letBind pat $ BasicOp $ SubExp $ constant True
-- Simplify away i<n when 'i' is from a loop of form 'for i < n'.
ruleBasicOp vtable pat aux (CmpOp CmpSlt {} x y)
| Var v <- x,
Just n <- ST.lookupLoopVar v vtable,
n == y =
Simplify $ auxing aux $ letBind pat $ BasicOp $ SubExp $ constant True
-- Simplify away x<0 when 'x' has been used as array size.
ruleBasicOp vtable pat aux (CmpOp CmpSlt {} (Var x) y)
| isCt0 y,
maybe False ST.entryIsSize $ ST.lookup x vtable =
Simplify $ auxing aux $ letBind pat $ BasicOp $ SubExp $ constant False
-- Remove certificates for variables whose definition already contain
-- that certificate.
ruleBasicOp vtable pat aux (SubExp (Var v))
| cs <- unCerts $ stmAuxCerts aux,
not $ null cs,
Just v_cs <- unCerts . stmCerts <$> ST.lookupStm v vtable,
cs' <- filter (`notElem` v_cs) cs,
cs' /= cs =
Simplify . certifying (Certs cs') $
letBind pat $ BasicOp $ SubExp $ Var v
-- Remove UpdateAccs that contribute the neutral value, which is
-- always a no-op.
ruleBasicOp vtable pat aux (UpdateAcc acc _ vs)
| Pat [pe] <- pat,
Acc token _ _ _ <- patElemType pe,
Just (_, _, Just (_, ne)) <- ST.entryAccInput =<< ST.lookup token vtable,
vs == ne =
Simplify . auxing aux $ letBind pat $ BasicOp $ SubExp $ Var acc
-- Manifest of a a copy can be simplified to manifesting the original
-- array, if it is still available.
ruleBasicOp vtable pat aux (Manifest perm v1)
| Just (Copy v2, cs) <- ST.lookupBasicOp v1 vtable,
ST.available v2 vtable =
Simplify . auxing aux . certifying cs $
letBind pat $ BasicOp $ Manifest perm v2
ruleBasicOp _ _ _ _ =
Skip
topDownRules :: BuilderOps rep => [TopDownRule rep]
topDownRules =
[ RuleBasicOp ruleBasicOp
]
bottomUpRules :: BuilderOps rep => [BottomUpRule rep]
bottomUpRules =
[ RuleBasicOp simplifyConcat
]
-- | A set of simplification rules for t'BasicOp's. Includes rules
-- from "Futhark.Optimise.Simplify.Rules.Simple".
basicOpRules :: (BuilderOps rep, Aliased rep) => RuleBook rep
basicOpRules = ruleBook topDownRules bottomUpRules <> loopRules
|
diku-dk/futhark
|
src/Futhark/Optimise/Simplify/Rules/BasicOp.hs
|
isc
| 17,076
| 0
| 19
| 4,156
| 6,165
| 2,994
| 3,171
| 346
| 9
|
module Main where
import Geometry
import Drawing
main = drawPicture myPicture
myPicture points = drawTriangle (a,b,c)
& drawLabels [a,b,c] ["A","B","C"]
& messages [ "angle(ABC)=" ++ shownum (angle a b c)
, "angle(BCA)=" ++ shownum (angle b c a)
, "angle(CAB)=" ++ shownum (angle c a b)
]
where [a,b,c] = take 3 points
|
alphalambda/k12math
|
contrib/MHills/GeometryLessons/code/student/lesson4a.hs
|
mit
| 461
| 0
| 11
| 195
| 154
| 83
| 71
| 10
| 1
|
{-# LANGUAGE TemplateHaskell, OverloadedStrings #-}
module Data.NGH.Formats.Tests.Fasta
( tests ) where
import Test.Framework.TH
import Test.HUnit
import Test.Framework.Providers.HUnit
import Data.NGH.Formats.Fasta (writeSeq)
import qualified Data.ByteString.Lazy.Char8 as L8
tests = $(testGroupGenerator)
case_write = (length $ L8.lines $ formatted) @?= 3
where
formatted = writeSeq 4 "header" "actgact"
|
luispedro/NGH
|
Data/NGH/Formats/Tests/Fasta.hs
|
mit
| 427
| 0
| 9
| 66
| 100
| 63
| 37
| 11
| 1
|
{-# LANGUAGE ViewPatterns #-}
module JSImages where
import qualified CodeWorld as CW
import Graphics.Gloss hiding ((.*.),(.+.),(.-.))
import Graphics.Gloss.Data.Display
--lava = makeImage 100 100 "lava"
--nitro1 = makeImage 81 62 "nitro1"
--nitro2 = makeImage 81 62 "nitro2"
--nitro3 = makeImage 81 62 "nitro3"
--nitro4 = makeImage 81 62 "nitro4"
--puff = makeImage 60 40 "puff"
--bar1 = makeImage 9 12 "bar1"
--bar2 = makeImage 9 12 "bar2"
--bar3 = makeImage 9 12 "bar3"
--bar4 = makeImage 9 12 "bar4"
--p1 = makeImage 60 60 "p1"
--p2 = makeImage 60 60 "p2"
--p3 = makeImage 60 60 "p3"
--p4 = makeImage 60 60 "p4"
--c1 = makeImage 60 100 "c1"
--c2 = makeImage 60 100 "c2"
--c3 = makeImage 60 100 "c3"
--c4 = makeImage 60 100 "c4"
--n1 = makeImage 19 29 "n1"
--n2 = makeImage 19 29 "n2"
--n3 = makeImage 19 29 "n3"
--n4 = makeImage 19 29 "n4"
--btt = makeImage 11 11 "btt"
--mrt = makeImage 11 11 "mrt"
--f0 = makeImage 60 120 "f0"
--f1 = makeImage 70 120 "f1"
--f2 = makeImage 60 120 "f2"
--f3 = makeImage 60 120 "f3"
--f4 = makeImage 60 120 "f4"
--f5 = makeImage 60 120 "f5"
--f6 = makeImage 60 120 "f6"
--f7 = makeImage 60 120 "f7"
--f8 = makeImage 60 120 "f8"
--f9 = makeImage 60 120 "f9"
--fa = makeImage 52 120 "fa"
--fb = makeImage 54 120 "fb"
--fc = makeImage 52 120 "fc"
--fd = makeImage 52 120 "fd"
--fe = makeImage 48 120 "fe"
--ff = makeImage 48 120 "ff"
--fg = makeImage 52 120 "fg"
--fh = makeImage 52 120 "fh"
--fi = makeImage 45 120 "fi"
--fj = makeImage 52 120 "fj"
--fk = makeImage 52 120 "fk"
--fl = makeImage 56 120 "fl"
--fm = makeImage 86 120 "fm"
--fn = makeImage 52 120 "fn"
--fo = makeImage 52 120 "fo"
--fp = makeImage 48 120 "fp"
--fq = makeImage 52 120 "fq"
--fr = makeImage 52 120 "fr"
--fs = makeImage 48 120 "fs"
--ft = makeImage 44 120 "ft"
--fu = makeImage 52 120 "fu"
--fv = makeImage 50 120 "fv"
--fw = makeImage 82 120 "fw"
--fx = makeImage 52 120 "fx"
--fy = makeImage 50 120 "fy"
--fz = makeImage 44 120 "fz"
--timer = makeImage 660 590 "timer"
fixedscreenx,fixedscreeny :: Int
fixedscreenx = 1024
fixedscreeny = 768
placeImageTopLeft :: Float -> Int -> Int -> Picture -> Picture
placeImageTopLeft tamanho (realToFrac -> sizex) (realToFrac -> sizey) pic = Translate (sizex' / 2) (-sizey' / 2) $ Scale scalex scaley pic
where
scalex = tamanho / sizex
scaley = tamanho / sizey
sizex' = scalex * sizex
sizey' = scaley * sizey
placeImageCenter :: Float -> Int -> Int -> Picture -> Picture
placeImageCenter tamanho (realToFrac -> sizex) (realToFrac -> sizey) pic = Scale scalex scaley pic
where
scalex = tamanho / sizex
scaley = tamanho / sizey
loadImages :: Float -> Display -> IO [(String,Picture)]
loadImages tamanho screen@(Display screenx screeny) = do
lava <- loadImageById "lava"
nitro1 <- loadImageById "nitro1"
nitro2 <- loadImageById "nitro2"
nitro3 <- loadImageById "nitro3"
nitro4 <- loadImageById "nitro4"
puff <- loadImageById "puff"
bar1 <- loadImageById "bar1"
bar2 <- loadImageById "bar2"
bar3 <- loadImageById "bar3"
bar4 <- loadImageById "bar4"
p1 <- loadImageById "p1"
p2 <- loadImageById "p2"
p3 <- loadImageById "p3"
p4 <- loadImageById "p4"
c1 <- loadImageById "c1"
c2 <- loadImageById "c2"
c3 <- loadImageById "c3"
c4 <- loadImageById "c4"
n1 <- loadImageById "n1"
n2 <- loadImageById "n2"
n3 <- loadImageById "n3"
n4 <- loadImageById "n4"
btt <- loadImageById "btt"
mrt <- loadImageById "mrt"
f0 <- loadImageById "f0"
f1 <- loadImageById "f1"
f2 <- loadImageById "f2"
f3 <- loadImageById "f3"
f4 <- loadImageById "f4"
f5 <- loadImageById "f5"
f6 <- loadImageById "f6"
f7 <- loadImageById "f7"
f8 <- loadImageById "f8"
f9 <- loadImageById "f9"
fa <- loadImageById "fa"
fb <- loadImageById "fb"
fc <- loadImageById "fc"
fd <- loadImageById "fd"
fe <- loadImageById "fe"
ff <- loadImageById "ff"
fg <- loadImageById "fg"
fh <- loadImageById "fh"
fi <- loadImageById "fi"
fj <- loadImageById "fj"
fk <- loadImageById "fk"
fl <- loadImageById "fl"
fm <- loadImageById "fm"
fn <- loadImageById "fn"
fo <- loadImageById "fo"
fp <- loadImageById "fp"
fq <- loadImageById "fq"
fr <- loadImageById "fr"
fs <- loadImageById "fs"
ft <- loadImageById "ft"
fu <- loadImageById "fu"
fv <- loadImageById "fv"
fw <- loadImageById "fw"
fx <- loadImageById "fx"
fy <- loadImageById "fy"
fz <- loadImageById "fz"
let imgs = [("lava",placeImageTopLeft tamanho 100 100 lava)
,("nitro1",placeImageCenter tamanho 81 62 nitro1)
,("nitro2",placeImageCenter tamanho 81 62 nitro2)
,("nitro3",placeImageCenter tamanho 81 62 nitro3)
,("nitro4",placeImageCenter tamanho 81 62 nitro4)
,("puff" ,placeImageCenter tamanho 60 40 puff)
,("bar1",bar1)
,("bar2",bar2)
,("bar3",bar3)
,("bar4",bar4)
--,("timer",Translate (scalenunox 14) (scalenunoy 14) $ Scale s s timer)
,("1",n1)
,("2",n2)
,("3",n3)
,("4",n4)
,("btt",btt)
,("mrt",mrt)
,("p1",Scale 0.5 0.5 p1)
,("p2",Scale 0.5 0.5 p2)
,("p3",Scale 0.5 0.5 p3)
,("p4",Scale 0.5 0.5 p4)
,("c1",Rotate 90 $ Scale 0.5 0.5 $ placeImageCenter tamanho 60 100 c1)
,("c2",Rotate 90 $ Scale 0.5 0.5 $ placeImageCenter tamanho 60 100 c2)
,("c3",Rotate 90 $ Scale 0.5 0.5 $ placeImageCenter tamanho 60 100 c3)
,("c4",Rotate 90 $ Scale 0.5 0.5 $ placeImageCenter tamanho 60 100 c4)
,("f0",f0)
,("f1",f1)
,("f2",f2)
,("f3",f3)
,("f4",f4)
,("f5",f5)
,("f6",f6)
,("f7",f7)
,("f8",f8)
,("f9",f9)
,("fa",fa)
,("fb",fb)
,("fc",fc)
,("fd",fd)
,("fe",fe)
,("ff",ff)
,("fg",fg)
,("fh",fh)
,("fi",fi)
,("fj",fj)
,("fk",fk)
,("fl",fl)
,("fm",fm)
,("fn",fn)
,("fo",fo)
,("fp",fp)
,("fq",fq)
,("fr",fr)
,("fs",fs)
,("ft",ft)
,("fu",fu)
,("fv",fv)
,("fw",fw)
,("fx",fx)
,("fy",fy)
,("fz",fz)
]
return imgs
|
hpacheco/HAAP
|
examples/plab/oracle/JSImages.hs
|
mit
| 7,257
| 0
| 14
| 2,601
| 1,698
| 918
| 780
| 141
| 1
|
{-# LANGUAGE MonadComprehensions #-}
{-# LANGUAGE ParallelListComp #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module OlshausenOnStreams where
import qualified Data.AER.DVS128 as DVS
import Control.Monad.Random
import Control.Monad
import Control.Parallel.Strategies
import Control.Lens
import Data.Thyme.Clock
import Data.Thyme.Time
import Data.Thyme.Format
import qualified Data.Vector as V
import qualified Data.Vector.Algorithms.Merge as V
import qualified Data.Vector.Generic as G
import qualified Data.Vector.Unboxed as U
import Data.Function
import Data.List
import Data.Word7
import Data.Foldable
import Data.Binary
import Data.AffineSpace
import Data.DTW
import Data.List.Ordered
import qualified Numeric.AD as AD
import Linear
import System.Locale
import System.Directory
import OlshausenOnStreams.ArtificialData
import OlshausenOnStreams.Plotting
import Debug.Trace
import Math.GaussianQuadratureIntegration
type Event a = V4 a
type Events a = V.Vector (Event a)
type Patch a = Events a
type Patches a = [Events a]
type Phi a = Events a
type Phis a = [Events a]
time, posX, posY, pol :: Lens' (Event a) a
time = _x
posX = _y
posY = _z
pol = _w
fromDVSEvent :: Fractional a => DVS.Event DVS.Address -> Event a
fromDVSEvent (DVS.Event (DVS.Address p x y) t) =
V4 (toSeconds t) (fromIntegral x) (fromIntegral y) (if p == DVS.U then 1 else -1)
createRandomEvent :: (Num a, Random a, MonadRandom m) => m (Event a)
createRandomEvent = V4 <$> getRandomR (0,1) <*> getRandomR (0,127) <*> getRandomR (0,127) <*> getRandomR (0,1)
-- | create random eventstream of length n
createRandomEvents :: (U.Unbox a, Ord a, Num a, Random a, MonadRandom m) => Int -> m (Events a)
createRandomEvents n = sortEvents <$> G.replicateM n createRandomEvent
pickRandomEvents :: (G.Vector v a, MonadRandom m) => Int -> v a -> m (v a)
pickRandomEvents n es = do
offset <- getRandomR (0, G.length es - n - 1)
return $ G.slice offset n es
-- | extract space time cubes from an event stream
-- TODO: this could be much more efficient by using the fact that the
-- streams are sorted on time, therefore doing a binary search on that
extractSTC ::
NominalDiffTime
-> NominalDiffTime
-> Word7 -> Word7 -> Word7 -> Word7
-> [DVS.Event DVS.Address]
-> [DVS.Event DVS.Address]
extractSTC t0 t1 x0 x1 y0 y1 es = filter cond es
where cond (DVS.qview -> (_,x,y,t)) = x >= x0 && x < x1 &&
y >= y0 && y < y1 &&
t >= t0 && t < t1
extractRandomSTC ::
MonadRandom m =>
NominalDiffTime
-> Word7
-> Word7
-> [DVS.Event DVS.Address]
-> m [DVS.Event DVS.Address]
extractRandomSTC st sx sy es = do
let h = head es
let l = last es
rt <- getRandomR (DVS.timestamp h, DVS.timestamp l - st)
rx <- Word7 <$> getRandomR (0, 127 - unWord7 sx)
ry <- Word7 <$> getRandomR (0, 127 - unWord7 sy)
return $ extractSTC rt st rx sx ry sy es
{-scaleSTC :: (Monad m, Num a) => a -> a -> a -> a -> m (Event a) -> m (Event a)-}
{-scaleSTC ft fx fy fp stc = [ Event (ft * t) (fx * x) (fy * y) (fp * p) | (Event t x y p) <- stc ]-}
-- | TODO fix value NaNs ... hacked for now
normalizeSTC ::
(Floating a, Monad f, Foldable f) => f (Event a) -> f (Event a)
normalizeSTC es = fmap (set pol 1) [ (e - m) / s | e <- es ]
where m = mean es
s = stdDev es
onNormalizedSTC ::
(Floating b, Monad m, Monad m1, Foldable m1) =>
m1 b -> (m1 b -> m b) -> m b
onNormalizedSTC stc f = unnormalize . f . normalize' $ stc
where normalize' es = [ (e - m) / s | e <- es ]
unnormalize es = [ (e * s) + m | e <- es ]
m = mean stc
s = stdDev stc
reconstructEvents :: (Ord a, Num a) => [Event a] -> [Events a] -> Events a
reconstructEvents as φs = mergeEvents $ zipWith (\a φ -> V.map (\e -> a * e) φ) as φs
{-reconstructEvents as φs = mergeEvents $ [ [ a * e | e <- φ ] | a <- as | φ <- φs ]-}
-- | sort events based on timestamp
sortEvents :: (Ord a) => Events a -> Events a
sortEvents = G.modify (V.sortBy (compare `on` (^. time)))
{-concatEvents :: Ord a => [Events a] -> Events a-}
{-concatEvents es = sortEvents $ V.concat es-}
-- | merge multiple sorted event streams
mergeEvents :: Ord a => [Events a] -> Events a
mergeEvents = G.fromList . mergeAllBy (compare `on` (view time)) . map G.toList . sortHeads
where sortHeads = sortBy (compare `on` (view time . G.head))
mean :: (Fractional a, Foldable t) => t a -> a
mean xs = sum xs / (fromIntegral $ length xs)
stdDev :: (Floating a, Monad t, Foldable t) => t a -> a
stdDev xs = sqrt ((1 / (fromIntegral $ length xs)) * sum [ (x - m)^^(2::Int) | x <- xs ])
where m = mean xs
-- | super mega generic type 0o
{-eventDTW :: (Floating a, Ord a, G.Vector v e, e ~ Event a, Item (v e) ~ Event a, DataSet (v e)) -}
{- => v e -> v e -> Result a-}
eventDTW :: (Ord a, Floating a) => Events a -> Events a -> Result a
eventDTW = fastDtw qd reduceEventStream 1
-- type REvent s = Event (AD.Reverse s Float)
-- type REvents s = Events (AD.Reverse s Float)
--
-- {-# SPECIALIZE eventDTW :: forall s. Reifies s AD.Tape => Events (AD.Reverse s Float) -> Events (AD.Reverse s Float) -> Result (AD.Reverse s Float) #-}
--
-- | cut the eventstream in half by skipping every second event
reduceEventStream :: Events a -> Events a
reduceEventStream v = G.backpermute v (G.fromList [0,2..G.length v - 1])
sparseness :: Floating a => a -> a -> a
sparseness σ a = s (a / σ) where s x = log (1+x*x)
logit :: Floating a => a -> a
logit x = log (1+x*x)
-- | TODO give parameters as ... parameters
errorFunction :: (Floating a, Ord a)
=> Events a -> [Events a] -> [Event a] -> a
errorFunction patch φs as = r -- + b
where bos = 6.96
λ = 100
σ = 0.316
r = λ * cost (eventDTW patch (reconstructEvents as φs))
b = bos * sum [ sum (sparseness σ <$> a) | a <- as ]
doTheTimeWarp :: forall a. (Ord a, Num a, Floating a)
=> Events a -> [Events a] -> [Event a] -> [[Event a]]
doTheTimeWarp patch φs as = fromFlatList <$> AD.gradientDescent go (toFlatList as)
where go :: forall t. (AD.Scalar t ~ a, AD.Mode t, Floating t, Ord t) => V.Vector t -> t
go ts = errorFunction (AD.auto <$$> patch) (AD.auto <$$$> φs) (fromFlatList ts)
toFlatList :: [Event a] -> V.Vector a
toFlatList = V.concat . fmap (V.fromList . toList)
fromFlatList :: V.Vector a -> [Event a]
fromFlatList v | V.null v = []
| otherwise = toV h : fromFlatList t
where (h,t) = V.splitAt 4 v
toV (toList -> [a,b,c,d]) = V4 a b c d
toV _ = error "shouldn't happen"
{-fromFlatList [] = []-}
{-fromFlatList (a:b:c:d:xs) = (V4 a b c d) : fromFlatList xs-}
{-fromFlatList _ = error "shouldn't be used on lists that aren't of quadruples"-}
doTheTimeWarpAgain :: (Floating a, Ord a)
=> Events a -> [Events a] -> [Event a] -> [Event a]
doTheTimeWarpAgain patch φs = go . doTheTimeWarp patch φs
where go (a:b:xs) | errDiff a b < 0.01 = b
| otherwise = go (b:xs)
go _ = error "shouldn't happen"
errFun = errorFunction patch φs
errDiff a b = abs (errFun a - errFun b)
{--- | calculate as per patch, outer list is per patch, inner is as -}
{-fitAs :: MonadRandom m-}
{- => [Patch Float] -> [Phi Float] -> m [[Event Float]]-}
{-fitAs patches φs = mapM (\patch -> fitSingleAs patch φs) patches-}
{-fitSingleAs ::-}
{- (Floating a, Ord a, Random a, MonadRandom m) =>-}
{- Events a -> [Events a] -> [a] -> m [Event a]-}
{-fitSingleAs patch φs as = do-}
{- -- do gradient descent to find "perfect" as for this patch-}
{- return $ doTheTimeWarpAgain patch φs as-}
-- | calculate the "vector" along which the phi has to be pushed to get
-- closer to the patch
getPushDirections ::
(Floating t1, Ord t1) =>
Phi t1 -> Patch t1 -> V.Vector (Event t1)
getPushDirections φ patch = V.fromList dirs
where gs = groupBy ((==) `on` fst) . reverse $ dtwPath
gs' = [ (fst . head $ g, map snd g) | g <- gs ]
dirs = [ sum [ ((patch V.! y) - (φ V.! x) ) / genericLength ys | y <- ys] | (x,ys) <- gs' ]
(Result _ dtwPath) = eventDTW φ patch
updatePhi :: (Floating a, Ord a) => a -> Events a -> Events a -> Events a
updatePhi η φ patch = V.zipWith (+) φ (V.map ((η*c) *^) pds)
where pds = getPushDirections φ patch
c = min 1 (1 / (cost $ eventDTW φ patch))
oneIteration' ::
(Show a, Floating a, Ord a, Random a, MonadRandom m, NFData a) =>
[Patch a] -> [Phi a] -> m [Phi a]
oneIteration' patches φs = do
let numPatches = length patches
-- generate some random as
randomAs <- replicateM (length patches) $ replicateM (length φs) getRandom
-- calculate push directions for all patches
let scaledDirections = foldl1' (zipWith (V.zipWith (\a b -> a + b ^/ (fromIntegral numPatches))))
$ withStrategy (parList rdeepseq)
$ zipWith (oneIterationPatch φs) patches randomAs
{-traceM $ "finalDirections: " ++ show scaledDirections-}
-- apply updates
let updatedφs = zipWith (V.zipWith (+)) φs scaledDirections
{-traceM $ "updatedφs: " ++ show updatedφs-}
-- normalize φs again -> done
{-return $ map normalizeSTC updatedφs-}
return updatedφs
oneIterationPatch :: (Ord a, Floating a) => [Phi a] -> Patch a -> [V4 a] -> [V.Vector (V4 a)]
oneIterationPatch φs patch randomAs = scaledDirections
-- find as that best represent the given patch
where fittedAs = doTheTimeWarpAgain patch φs randomAs
-- scale phis with the given as
fittedφs = zipWith (\a φ -> (a*) <$> φ) fittedAs φs
{-fittedφs = normalizedφs-}
-- at this point the phis are normalized and scaled according to
-- the 'best' as, that is they match the normalized patches as
-- best as possible while just scaling in t,x,y and p.
pushDirections = map (\φ -> getPushDirections φ patch) fittedφs
-- push directions are scaled with the kai factor:
kaiFactor = map (\φ -> min 1 ( 1 / (cost $ eventDTW φ patch))) fittedφs
scaledDirections = zipWith (\f d -> V.map (f *^) d) kaiFactor pushDirections
instance Random a => Random (V4 a) where
randomR (V4 lx ly lz lw, V4 hx hy hz hw)
= runRand (V4 <$> getRandomR (lx,hx) <*> getRandomR (ly,hy) <*> getRandomR (lz,hz) <*> getRandomR (lw,hw))
random = runRand (V4 <$> getRandom <*> getRandom <*> getRandom <*> getRandom)
iterateNM :: Int -> (a -> IO a) -> a -> IO [a]
iterateNM 0 _ _ = return []
iterateNM n f x = do
tStart <- getCurrentTime
x' <- f x
tEnd <- getCurrentTime
traceM $ "iteration " ++ show n ++ " took " ++ show (toMicroseconds (tEnd .-. tStart)) ++ "µs"
xs' <- iterateNM (n-1) f x'
return $ x' : xs'
test :: IO ()
test = do
traceM "running"
traceM $ "gauss: " ++ show gaussIntegral3d
t <- formatTime defaultTimeLocale "%F_%T" <$> getCurrentTime
let dn = "data/test_" ++ t ++ "/"
createDirectoryIfMissing True dn
let numPhis = 8
sizePhis = 16
iterations = 500
-- create random patches
{-stcs <- replicateM 2 (sortEvents . V.fromList <$> randomPlane 128) :: IO [Events Float]-}
a <- normalizeSTC . sortEvents . V.fromList <$> plane (V3 0 0 0) (V3 0 1 1) 128 :: IO (Events Float)
b <- normalizeSTC . sortEvents . V.fromList <$> plane (V3 0 0 0) (V3 0 1 (-1)) 128 :: IO (Events Float)
let stcs = [a,b]
encodeFile (dn ++ "stcs.bin") (toList <$> stcs)
-- generate initial random phis
phis <- map normalizeSTC <$> (replicateM numPhis $ createRandomEvents sizePhis) :: IO [Events Float]
-- update φs many times
phis' <- iterateNM iterations (oneIteration' stcs) phis :: IO [[Events Float]]
encodeFile (dn ++ "phis.bin") (toList <$$> phis')
-- show phis
_ <- multiplotEvents . map normalizeSTC $ [a,b] ++ last phis'
-- write images
traceM "writing images"
forM_ (zip [0::Int ..] phis') $ \(i,p) -> do
let fn = dn ++ "it-" ++ show i ++ ".png"
multiPlotFile fn . map normalizeSTC $ stcs ++ p
return ()
infixl 4 <$$>
(<$$>) :: (Functor f, Functor g) => (a -> b) -> f (g a) -> f (g b)
f <$$> x = fmap (fmap f) x
infixl 4 <$$$>
(<$$$>) :: (Functor f, Functor g, Functor h) => (a -> b) -> f (g (h a)) -> f (g (h b))
f <$$$> x = fmap (fmap (fmap f)) x
gaussIntegral3d :: Float
gaussIntegral3d = nIntegrate128 (\z -> nIntegrate128 (\y -> nIntegrate128 (\x -> gauss (V3 x y z) identity) (-5) 5) (-5) 5) (-5) 5
gauss x a = exp ( Linear.dot (negate x *! a) x)
{-# INLINABLE gauss #-}
monteCarlo2d :: IO Float
monteCarlo2d = do
let n = 1000000
lTrue <- length . filter (\(V2 x y) -> y < gauss (V1 x) identity) <$> replicateM n (V2 <$> getRandomR (-5,5) <*> getRandomR (0,1) :: IO (V2 Float))
{-bs <- U.filter id <$> U.replicateM n $ do-}
{- (V2 x y) <- -}
{- return $ y < gauss (V1 x) identity-}
{-let lTrue = fromIntegral . U.length $ bs-}
return $ 10 * (fromIntegral lTrue / fromIntegral n)
{-rs <- U.replicateM n $ V2 <$> getRandomR (-5,5) -}
{- <*> getRandomR ( 0,1) :: IO (V2 Float]-}
{-let area = 10 * 1-}
{-let ratio = genericLength (filter (\(V2 x w) -> w < gauss (V1 x) identity) rs) / fromIntegral n-}
{-return $ area * ratio-}
{-monteCarlo n = do-}
{- rs <- replicateM n $ V4 <$> getRandomR (-5,5) -}
{- <*> getRandomR (-5,5)-}
{- <*> getRandomR (-5,5)-}
{- <*> getRandomR ( 0,1) :: IO [V4 Float]-}
{- let bs = map (\(V4 x y z w) -> w < gauss (V3 x y z) identity) rs-}
{- area = 10 * 10 * 10 * 1-}
{- let ratio = genericLength (filter id bs) / genericLength bs-}
{- return $ area * ratio-}
|
fhaust/aer-utils
|
src/OlshausenOnStreams.hs
|
mit
| 14,555
| 0
| 21
| 3,871
| 4,676
| 2,434
| 2,242
| 230
| 2
|
{-# htermination keysFM_GE :: (Ord a, Ord k) => FiniteMap (a,k) b -> (a,k) -> [(a,k)] #-}
import FiniteMap
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/FiniteMap_keysFM_GE_12.hs
|
mit
| 107
| 0
| 3
| 19
| 5
| 3
| 2
| 1
| 0
|
{-# LANGUAGE ViewPatterns #-}
module Unison.Codebase.Reflog where
import Data.Text (Text)
import qualified Data.Text as Text
import Unison.Codebase.Branch (Hash)
import qualified Unison.Codebase.Causal as Causal
import qualified Unison.Hash as Hash
data Entry =
Entry
{ from :: Hash
, to :: Hash
, reason :: Text
}
fromText :: Text -> Maybe Entry
fromText t =
case Text.words t of
(Hash.fromBase32Hex -> Just old) : (Hash.fromBase32Hex -> Just new) : (Text.unwords -> reason) ->
Just $ Entry (Causal.RawHash old) (Causal.RawHash new) reason
_ -> Nothing
toText :: Entry -> Text
toText (Entry old new reason) =
Text.unwords [ Hash.base32Hex . Causal.unRawHash $ old
, Hash.base32Hex . Causal.unRawHash $ new
, reason ]
|
unisonweb/platform
|
parser-typechecker/src/Unison/Codebase/Reflog.hs
|
mit
| 790
| 0
| 12
| 182
| 255
| 143
| 112
| 23
| 2
|
{-# LANGUAGE OverloadedStrings #-}
module FS (
collect
, fuzzySort
, socketPath
) where
import Control.Applicative ((<$>), (<*>))
import Control.Concurrent.STM (STM, TMVar, atomically, putTMVar,
takeTMVar)
import Control.Monad (liftM, (>=>))
import Data.List (isPrefixOf, isSuffixOf)
import Data.Monoid ((<>))
import Data.Text (Text)
import qualified Data.Text as T
import Data.Vector (Vector, filterM, mapM, mapM_)
import qualified Data.Vector as V
import Data.Vector.Algorithms.Intro (sortBy)
import Prelude hiding (mapM, mapM_)
import System.Directory (doesDirectoryExist,
getDirectoryContents,
getUserDocumentsDirectory)
import System.FilePath ((</>))
modifyTMVar :: TMVar a -> (a -> a) -> STM ()
modifyTMVar v f = takeTMVar v >>= putTMVar v . f
-- | Recursively collects every file under `base` into the TMVar.
collect :: TMVar (Vector Text) -> Text -> IO ()
collect cs base = do
fs <- qualify =<< contentsOf base
atomically $ modifyTMVar cs $ flip (<>) fs
directories fs
where qualify = mapM $ return . ((base <> "/") <>)
directories = filterM (doesDirectoryExist . T.unpack) >=> mapM_ (collect cs)
contentsOf :: Text -> IO (Vector Text)
contentsOf = contentsOf' >=> validate >=> pack
where contentsOf' = liftM V.fromList . getDirectoryContents . T.unpack
validFile = (&&) <$> (not . isPrefixOf ".") <*> (not . isSuffixOf ".pyc")
validate = filterM $ return . validFile
pack = mapM $ return . T.pack
fuzzyMatch :: String -> Text -> (Bool, Double, Text)
fuzzyMatch s p = fuzzyMatch' s p 0
fuzzyMatch' :: String -> Text -> Double -> (Bool, Double, Text)
fuzzyMatch' [] p n = (True, n, p)
fuzzyMatch' (x:xs) p n = maybe (False, n, p) match $ x `elemIndex` p
where match i = let (b', n', _) = fuzzyMatch' xs (T.drop i p) (n + fromIntegral i)
in (b', n', p)
elemIndex = T.findIndex . (==)
fuzzySort :: String -> Vector Text -> Vector Text
fuzzySort [] xs = xs
fuzzySort s xs = V.map ext $ V.modify (sortBy cmp) $ V.filter flt $ V.map (fuzzyMatch s) xs
where cmp (_, n, _) (_, n', _) = n `compare` n'
flt (b, _, _) = b
ext (_, _, t) = t
socketPath :: IO FilePath
socketPath = liftM (</> ".ff.socket") getUserDocumentsDirectory
|
Bogdanp/ff
|
src/FS.hs
|
mit
| 2,688
| 0
| 14
| 910
| 886
| 492
| 394
| 52
| 1
|
module Paths_HaskellPie (
version,
getBinDir, getLibDir, getDataDir, getLibexecDir,
getDataFileName, getSysconfDir
) where
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
catchIO = Exception.catch
version :: Version
version = Version {versionBranch = [0,0,0], versionTags = []}
bindir, libdir, datadir, libexecdir, sysconfdir :: FilePath
bindir = "/home/rewrite/.cabal/bin"
libdir = "/home/rewrite/.cabal/lib/x86_64-linux-ghc-7.8.3/HaskellPie-0.0.0"
datadir = "/home/rewrite/.cabal/share/x86_64-linux-ghc-7.8.3/HaskellPie-0.0.0"
libexecdir = "/home/rewrite/.cabal/libexec"
sysconfdir = "/home/rewrite/.cabal/etc"
getBinDir, getLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath
getBinDir = catchIO (getEnv "HaskellPie_bindir") (\_ -> return bindir)
getLibDir = catchIO (getEnv "HaskellPie_libdir") (\_ -> return libdir)
getDataDir = catchIO (getEnv "HaskellPie_datadir") (\_ -> return datadir)
getLibexecDir = catchIO (getEnv "HaskellPie_libexecdir") (\_ -> return libexecdir)
getSysconfDir = catchIO (getEnv "HaskellPie_sysconfdir") (\_ -> return sysconfdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
|
cirquit/HaskellPie
|
HaskellPie/dist/build/autogen/Paths_HaskellPie.hs
|
mit
| 1,367
| 0
| 10
| 182
| 368
| 211
| 157
| 28
| 1
|
module Hangman.RandomWord
( pickRandomWord
) where
import System.Random (randomRIO)
import Data.Char (isUpper)
type WordList = [String]
type WordFilter = (String -> Bool)
pickRandomWord :: IO String
pickRandomWord =
allWords >>= randomWord
allWords :: IO WordList
allWords =
readFile "data/dict.txt" >>= return . gameWords . lines
randomWord :: WordList -> IO String
randomWord wl = do
idx <- randomRIO (lowerBounds, upperBounds)
return $ wl !! idx
where
lowerBounds = 0
upperBounds = (length wl) - 1
gameWords :: WordList -> WordList
gameWords wl =
foldr filter wl gameFilters
gameFilters :: [WordFilter]
gameFilters = [not . properNoun, inRange [5..9]]
inRange :: [Int] -> WordFilter
inRange r = (`elem` r) . length
properNoun :: WordFilter
properNoun [] = False
properNoun (x:_) = isUpper x
|
joshuaclayton/hangman
|
src/Hangman/RandomWord.hs
|
mit
| 841
| 0
| 9
| 168
| 285
| 156
| 129
| 28
| 1
|
{-# LANGUAGE LambdaCase #-}
module EventSource where
import EventSourceHelper
import Data.Functor (($>))
import qualified Data.Set as Set
-- Types:
--- A query is a fold over a list of events.
---
--- data QueryT f e a where
--- instance Functor f => Functor (QueryT f a)
--- instance Functor f => Profunctor (QueryT f)
--- instance Applicative f => Applicative (QueryT f e)
---
--- runQ :: QueryT f e a -> [e] -> f a
--- A command is a query that returns an event to be appended.
---
--- newtype CommandT f e = CommandT (QueryT f e e)
---
--- runC :: Functor f => CommandT f e -> [e] -> f [e]
-- Domain:
type UserId = String
type GroupId = String
data Event = GroupCreated GroupId
| GroupDeleted GroupId
| UserCreated UserId
| UserDeleted UserId
| UserAddedToGroup UserId GroupId
| UserRemovedFromGroup UserId GroupId
deriving (Show, Eq)
data Error = GroupDoesNotExist
| UserDoesNotExist
| DuplicateGroupId
| DuplicateUserId
| UserIsAlreadyMember
| UserIsNotAMember
| GroupIsNotEmpty
deriving Show
type Result = Either Error
-- Signatures:
groupExists :: GroupId -> Query Event Bool
userExists :: UserId -> Query Event Bool
isMemberOfGroup :: UserId -> GroupId -> Query Event Bool
isMemberOfGroup' :: UserId -> GroupId -> QueryT Result Event Bool
findUsersInGroup :: GroupId -> Query Event (Set.Set UserId)
isGroupEmpty :: GroupId -> Query Event Bool
createGroup :: GroupId -> CommandT Result Event
deleteGroup :: GroupId -> CommandT Result Event
createUser :: UserId -> CommandT Result Event
addUserToGroup :: UserId -> GroupId -> CommandT Result Event
removeUserFromGroup :: UserId -> GroupId -> CommandT Result Event
-- Implementation:
groupExists gid = GroupCreated gid `lastHappenedAfter` GroupDeleted gid
userExists uid = UserCreated uid `lastHappenedAfter` UserDeleted uid
isMemberOfGroup uid gid = UserAddedToGroup uid gid `lastHappenedAfter` UserRemovedFromGroup uid gid
isMemberOfGroup' uid gid =
ensure (userExists uid) UserDoesNotExist *>
ensure (groupExists gid) GroupDoesNotExist *>
query (uid `isMemberOfGroup` gid)
findUsersInGroup gid = fold1 $ \case
UserAddedToGroup gid' uid | gid == gid' -> Set.insert uid
UserRemovedFromGroup gid' uid | gid == gid' -> Set.delete uid
_ -> id
isGroupEmpty gid = Set.null <$> findUsersInGroup gid
createGroup gid =
ensure (not <$> groupExists gid) DuplicateGroupId $>
[GroupCreated gid]
deleteGroup gid =
ensure (groupExists gid) GroupDoesNotExist $>
ensure (isGroupEmpty gid) GroupIsNotEmpty $>
[GroupDeleted gid]
createUser uid =
ensure (not <$> userExists uid) DuplicateUserId $>
[UserCreated uid]
addUserToGroup uid gid =
ensure (userExists uid) UserDoesNotExist *>
ensure (groupExists gid) GroupDoesNotExist *>
ensure (not <$> uid `isMemberOfGroup` gid) UserIsAlreadyMember $>
[UserAddedToGroup uid gid]
removeUserFromGroup uid gid =
ensure (userExists uid) UserDoesNotExist *>
ensure (groupExists gid) GroupDoesNotExist *>
ensure (uid `isMemberOfGroup` gid) UserIsNotAMember $>
[UserRemovedFromGroup uid gid]
scenario = runTX
[ createGroup "foo"
, createUser "bar"
, addUserToGroup "bar" "foo"
, createUser "baz"
, addUserToGroup "baz" "foo"
, removeUserFromGroup "bar" "foo"
, deleteGroup "foo"
]
|
srijs/haskell-eventsource
|
EventSource.hs
|
mit
| 3,391
| 0
| 12
| 696
| 832
| 434
| 398
| 74
| 3
|
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE CPP #-}
#if MIN_VERSION_base(4,8,1)
#define HAS_SOURCE_LOCATIONS
{-# LANGUAGE ImplicitParams #-}
#endif
module Test.Hspec.Core.SpecSpec (main, spec) where
import Prelude ()
import Helper
#ifdef HAS_SOURCE_LOCATIONS
import GHC.SrcLoc
import GHC.Stack
#endif
import Test.Hspec.Core.Spec (Item(..), Result(..))
import qualified Test.Hspec.Core.Runner as H
import Test.Hspec.Core.Spec (Tree(..), runSpecM)
import qualified Test.Hspec.Core.Spec as H
main :: IO ()
main = hspec spec
runSpec :: H.Spec -> IO [String]
runSpec = captureLines . H.hspecResult
spec :: Spec
spec = do
describe "describe" $ do
it "can be nested" $ do
[Node foo [Node bar [Leaf _]]] <- runSpecM $ do
H.describe "foo" $ do
H.describe "bar" $ do
H.it "baz" True
(foo, bar) `shouldBe` ("foo", "bar")
context "when no description is given" $ do
it "uses a default description" $ do
[Node d _] <- runSpecM (H.describe "" (pure ()))
d `shouldBe` "(no description given)"
describe "it" $ do
it "takes a description of a desired behavior" $ do
[Leaf item] <- runSpecM (H.it "whatever" True)
itemRequirement item `shouldBe` "whatever"
it "takes an example of that behavior" $ do
[Leaf item] <- runSpecM (H.it "whatever" True)
itemExample item defaultParams ($ ()) noOpProgressCallback `shouldReturn` Success
#ifdef HAS_SOURCE_LOCATIONS
it "adds source locations" $ do
[Leaf item] <- runSpecM (H.it "foo" True)
let [(_, loc)] = (getCallStack ?loc)
location = H.Location (srcLocFile loc) (pred $ srcLocStartLine loc) 32 H.ExactLocation
itemLocation item `shouldBe` Just location
#endif
context "when no description is given" $ do
it "uses a default description" $ do
[Leaf item] <- runSpecM (H.it "" True)
itemRequirement item `shouldBe` "(unspecified behavior)"
describe "pending" $ do
it "specifies a pending example" $ do
r <- runSpec $ do
H.it "foo" H.pending
r `shouldSatisfy` any (== " # PENDING: No reason given")
describe "pendingWith" $ do
it "specifies a pending example with a reason for why it's pending" $ do
r <- runSpec $ do
H.it "foo" $ do
H.pendingWith "for some reason"
r `shouldSatisfy` any (== " # PENDING: for some reason")
describe "parallel" $ do
it "marks examples for parallel execution" $ do
[Leaf item] <- runSpecM . H.parallel $ H.it "whatever" True
itemIsParallelizable item `shouldBe` True
it "is applied recursively" $ do
[Node _ [Node _ [Leaf item]]] <- runSpecM . H.parallel $ do
H.describe "foo" $ do
H.describe "bar" $ do
H.it "baz" True
itemIsParallelizable item `shouldBe` True
|
beni55/hspec
|
hspec-core/test/Test/Hspec/Core/SpecSpec.hs
|
mit
| 2,896
| 0
| 25
| 762
| 907
| 451
| 456
| 58
| 1
|
module Language.LambdaSpec where
import Test.Hspec
import Language.Lambda
import Language.Lambda.HspecUtils
spec :: Spec
spec = do
describe "evalString" $ do
it "evaluates simple strings" $ do
eval "x" `shouldBe` Right (Var "x")
eval "\\x. x" `shouldBe` Right (Abs "x" (Var "x"))
eval "f y" `shouldBe` Right (App (Var "f") (Var "y"))
it "reduces simple applications" $
eval "(\\x .x) y" `shouldBe` Right (Var "y")
it "reduces applications with nested redexes" $
eval "(\\f x. f x) (\\y. y)" `shouldBe` Right (Abs "x" (Var "x"))
describe "uniques" $ do
let alphabet = reverse ['a'..'z']
len = length alphabet
it "starts with plain alphabet" $
take len uniques `shouldBe` map (:[]) alphabet
it "adds index afterwards" $
take len (drop len uniques) `shouldBe` map (:['0']) alphabet
|
sgillespie/lambda-calculus
|
test/Language/LambdaSpec.hs
|
mit
| 870
| 0
| 18
| 214
| 309
| 152
| 157
| 22
| 1
|
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
module Network.BitFunctor2.Platform.Blockchain.Consensus ( checkConsensus
, generateConsensus
, ConsensusError
) where
import Network.BitFunctor2.Platform.Blockchain.Types
import qualified Network.BitFunctor2.Platform.Blockchain.Types as BC
import Network.BitFunctor2.Platform.Blockchain.Consensus.Types
import qualified Network.BitFunctor.Identifiable as Id
import Network.BitFunctor.Crypto.Hash
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.ByteString.Base64 as BS64
import Data.ByteArray (convert)
import Control.Monad.Reader
import Control.Monad.Except
import Control.Monad.Trans.Maybe
import qualified Data.Bool as B
import Data.Foldable (maximumBy)
import Data.Ord (Ordering (..))
data ConsensusError = ConsensusError T.Text
deriving (Eq, Show)
data ConsensusContext = ConsensusContext {
cctxNewBlockInfo :: NewBlockFullInfo,
cctxBlockHeight :: Int,
cctxTips :: [BlockchainBlockInfo]
} deriving (Eq, Show)
newtype ConsensusComputation a = ConsensusComputation {
unConsensusComputation :: ReaderT ConsensusContext (Except ConsensusError) a
} deriving ( Monad
, Functor
, Applicative
, MonadReader ConsensusContext
, MonadError ConsensusError
)
-- looks similar to maybeToExceptT, but it is not
maybeToEffect :: Monad m => m (Maybe a) -> m a -> m a
maybeToEffect x eff = x >>= maybe eff return
-- | The 'extractConsensusContext' function extracts the data required for
-- consensus algorithm operation.
-- This is, basically, the information about the block to achieve consensus on,
-- plus some algorithm-specific information from the Blockchain
extractConsensusContext :: Blockchain b => NewBlockFullInfo -> b ConsensusContext
extractConsensusContext nbfi = do
let prevId = blockMetaPrevId . blockMeta $ nbfiBlock nbfi
h <- maybe (return 0) -- default case – nbfi contains genesis block
(\x -> maybeToEffect (liftM (fmap (+1)) (BC.height x))
(throwError $ BC.Other "No height for prev block with ID"))
prevId
-- https://stackoverflow.com/questions/20293006/how-to-use-maybe-monad-inside-another-monad
-- https://stackoverflow.com/questions/16064143/maybe-monad-inside-stack-of-transformers
-- https://en.wikibooks.org/wiki/Haskell/Monad_transformers#A_simple_monad_transformer:_MaybeT
-- http://hackage.haskell.org/package/transformers-0.5.4.0/docs/Control-Monad-Trans-Maybe.html
-- !! https://stackoverflow.com/questions/32579133/simplest-non-trivial-monad-transformer-example-for-dummies-iomaybe
-- http://hackage.haskell.org/package/transformers-0.5.4.0/docs/Control-Monad-Trans-Class.html#v:lift
tHs <- BC.tips
tBi <- mapM (\bid -> do
height <- maybeToEffect (BC.height bid)
(throwError $ BC.Other "No height for block with ID")
block <- maybeToEffect (BC.block bid)
(throwError $ BC.Other "No block for ID")
return $ BlockchainBlockInfo bid height block) tHs
return $ ConsensusContext { cctxNewBlockInfo = nbfi
, cctxBlockHeight = h
, cctxTips = tBi
}
runConsensus :: ConsensusComputation a -> ConsensusContext -> Either ConsensusError a
runConsensus c ctx = runExcept $ runReaderT (unConsensusComputation c) ctx
checkConsensus :: Blockchain b => NewBlockFullInfo -> b (Either ConsensusError BlockchainBlockInfo)
checkConsensus nbfi = extractConsensusContext nbfi >>= \ctx -> return $ runConsensus c ctx
where c :: ConsensusComputation BlockchainBlockInfo
c = validatePoW >>= selectTip
validatePoW = do
h <- asks cctxBlockHeight
b <- asks $ nbfiBlock . cctxNewBlockInfo
bId <- asks $ nbfiId . cctxNewBlockInfo
declaredNonce <- asks $ powNonce . blockConsensus . nbfiBlock . cctxNewBlockInfo
B.bool (throwError $ ConsensusError "Wrong PoW nonce")
(return $ BlockchainBlockInfo bId h b)
(checkProofOfWork h b declaredNonce)
selectTip newBlock = do
choice <- asks cctxTips
return $ maximumBy (\b0 b1 -> compare (bbiHeight b0) (bbiHeight b1))
(newBlock:choice)
generateConsensus :: Blockchain b => NewBlockFullInfo -> b (Either ConsensusError ConsensusData)
generateConsensus nbfi = do
ctx <- extractConsensusContext nbfi
return $ runConsensus consensusGenerator ctx
where consensusGenerator :: ConsensusComputation ConsensusData
consensusGenerator = do
h <- asks cctxBlockHeight
b <- asks $ nbfiBlock . cctxNewBlockInfo
return $ ConsensusData { powNonce = proofOfWork h b }
-- this PoW algorithm is from
-- https://github.com/adjoint-io/nanochain/blob/master/src/Nanochain.hs
proofOfWork :: Int -> Block -> Int
proofOfWork height block = proofOfWorkWithBaseNonce height block 0
checkProofOfWork :: Int -> Block -> Int -> Bool
checkProofOfWork height block nonce = proofOfWorkWithBaseNonce height block nonce == nonce
proofOfWorkWithBaseNonce :: Int -> Block -> Int -> Int
proofOfWorkWithBaseNonce height block nonce = calcNonce nonce
where dbits = round $ logBase (2 :: Float) $ fromIntegral height
prefix = T.pack $ replicate dbits '0'
calcNonce n | prefix' == prefix = n
| otherwise = calcNonce $ n + 1
where hash' = Id.id $ Id.ByBinary block { blockConsensus = ConsensusData {powNonce = n} }
prefix' = T.take dbits . TE.decodeUtf8 . BS64.encode $ convert hash'
|
BitFunctor/bitfunctor
|
src/Network/BitFunctor2/Platform/Blockchain/Consensus.hs
|
mit
| 6,033
| 0
| 18
| 1,483
| 1,251
| 663
| 588
| 94
| 1
|
{-# LANGUAGE OverloadedStrings, FlexibleContexts #-}
module Discussion.Lexer (lex) where
--------------------------------------------------------------------------------
import Prelude hiding (lex)
import Text.Parsec
import Text.Parsec.String
-- import Text.Parsec.Text
import Text.Parsec.Pos
import Control.Applicative hiding (many, (<|>))
import Discussion.Data
import Discussion.Bool
--------------------------------------------------------------------------------
lex :: String -> Either ParseError [Token]
lex src = joinEOS <$> (parseLines . lines $ src)
--------------------------------------------------------------------------------
-- Token列の適切な場所にEOSを差し込みながらToken列のリストをconcatする
-- 次の行のToken列がSymbol "="を含めば、現在のToken列の末尾にEOSを挿入
-- 最後のToken列の末尾にもEOSを足す
joinEOS :: [[Token]] -> [Token]
joinEOS [] = []
joinEOS (toks:[]) = toks ++ [EOS]
joinEOS (toks1:toks2:contTokss) =
if Symbol "=" `elem` toks2
then joinEOS $ (toks1 ++ [EOS] ++ toks2) : contTokss
else joinEOS $ (toks1 ++ toks2) : contTokss
--------------------------------------------------------------------------------
parseLines :: [String] -> Either ParseError [[Token]]
parseLines = mapM parseLine
parseLine :: String -> Either ParseError [Token]
parseLine = parse pTokens ""
--------------------------------------------------------------------------------
-- Token列を取り出す
-- ストリームを使いきらずに途中でTokenのparseに失敗した場合は
-- Token列のparse全体が失敗する
pTokens :: Parser [Token]
pTokens = many pToken <* eof
pToken :: Parser Token
pToken = token' $ choice [pWord
, pNumber
, pSymbol
, pBackquote
, pLBrace
, pRBrace
, pLParen
, pRParen]
--------------------------------------------------------------------------------
-- /\w(\w|\d)+/
pWord :: Parser Token
pWord = Word <$> ((:) <$> letter <*> many alphaNum)
pNumber :: Parser Token
pNumber = Number . read <$> many1 digit
--------------------------------------------------------------------------------
-- /[!#$%&'*+,-.\/:;<=>?@\\^_|~]+/
pSymbol :: Parser Token
pSymbol = Symbol <$> (many1 . oneOf $ "!#$%&'*+,-./:;<=>?@\\^_|~")
--------------------------------------------------------------------------------
pBackquote :: Parser Token
pBackquote = char '`' *> return Backquote
pLBrace :: Parser Token
pLBrace = char '{' *> return LBrace
pRBrace :: Parser Token
pRBrace = char '}' *> return RBrace
pLParen :: Parser Token
pLParen = char '(' *> return LParen
pRParen :: Parser Token
pRParen = char ')' *> return RParen
--------------------------------------------------------------------------------
-- 前後の空白をtrim
token' :: Parser a -> Parser a
token' p = spaces *> p <* spaces
|
todays-mitsui/discussion
|
src/Discussion/Lexer.hs
|
mit
| 2,996
| 0
| 11
| 499
| 606
| 336
| 270
| 51
| 2
|
{-# LANGUAGE QuasiQuotes #-}
{- |
Module : Language.Egison.Math.Expr
Licence : MIT
This module implements the normalization of polynomials. Normalization rules
for particular mathematical functions (such as sqrt and sin/cos) are defined
in Rewrite.hs.
-}
module Language.Egison.Math.Normalize
( mathNormalize'
, termsGcd
, mathDivideTerm
) where
import Control.Egison
import Language.Egison.Math.Expr
mathNormalize' :: ScalarData -> ScalarData
mathNormalize' = mathDivide . mathRemoveZero . mathFold . mathRemoveZeroSymbol
termsGcd :: [TermExpr] -> TermExpr
termsGcd ts@(_:_) =
foldl1 (\(Term a xs) (Term b ys) -> Term (gcd a b) (monoGcd xs ys)) ts
where
monoGcd :: Monomial -> Monomial -> Monomial
monoGcd [] _ = []
monoGcd ((x, n):xs) ys =
case f (x, n) ys of
(_, 0) -> monoGcd xs ys
(z, m) -> (z, m) : monoGcd xs ys
f :: (SymbolExpr, Integer) -> Monomial -> (SymbolExpr, Integer)
f (x, _) [] = (x, 0)
f (Quote x, n) ((Quote y, m):ys)
| x == y = (Quote x, min n m)
| x == mathNegate y = (Quote x, min n m)
| otherwise = f (Quote x, n) ys
f (x, n) ((y, m):ys)
| x == y = (x, min n m)
| otherwise = f (x, n) ys
mathDivide :: ScalarData -> ScalarData
mathDivide mExpr@(Div (Plus _) (Plus [])) = mExpr
mathDivide mExpr@(Div (Plus []) (Plus _)) = mExpr
mathDivide (Div (Plus ts1) (Plus ts2)) =
let z@(Term c zs) = termsGcd (ts1 ++ ts2) in
case ts2 of
[Term a _] | a < 0 -> Div (Plus (map (`mathDivideTerm` Term (-c) zs) ts1))
(Plus (map (`mathDivideTerm` Term (-c) zs) ts2))
_ -> Div (Plus (map (`mathDivideTerm` z) ts1))
(Plus (map (`mathDivideTerm` z) ts2))
mathDivideTerm :: TermExpr -> TermExpr -> TermExpr
mathDivideTerm (Term a xs) (Term b ys) =
let (sgn, zs) = divMonomial xs ys in
Term (sgn * div a b) zs
where
divMonomial :: Monomial -> Monomial -> (Integer, Monomial)
divMonomial xs [] = (1, xs)
divMonomial xs ((y, m):ys) =
match dfs (y, xs) (Pair SymbolM (Multiset (Pair SymbolM Eql)))
-- Because we've applied |mathFold|, we can only divide the first matching monomial
[ [mc| (quote $s, ($x & negQuote #s, $n) : $xss) ->
let (sgn, xs') = divMonomial xss ys in
let sgn' = if even m then 1 else -1 in
if n == m then (sgn * sgn', xs')
else (sgn * sgn', (x, n - m) : xs') |]
, [mc| (_, (#y, $n) : $xss) ->
let (sgn, xs') = divMonomial xss ys in
if n == m then (sgn, xs') else (sgn, (y, n - m) : xs') |]
, [mc| _ -> divMonomial xs ys |]
]
mathRemoveZeroSymbol :: ScalarData -> ScalarData
mathRemoveZeroSymbol (Div (Plus ts1) (Plus ts2)) =
let ts1' = map (\(Term a xs) -> Term a (filter p xs)) ts1
ts2' = map (\(Term a xs) -> Term a (filter p xs)) ts2
in Div (Plus ts1') (Plus ts2')
where
p (_, 0) = False
p _ = True
mathRemoveZero :: ScalarData -> ScalarData
mathRemoveZero (Div (Plus ts1) (Plus ts2)) =
let ts1' = filter (\(Term a _) -> a /= 0) ts1 in
let ts2' = filter (\(Term a _) -> a /= 0) ts2 in
case ts1' of
[] -> Div (Plus []) (Plus [Term 1 []])
_ -> Div (Plus ts1') (Plus ts2')
mathFold :: ScalarData -> ScalarData
mathFold = mathTermFold . mathSymbolFold
-- x^2 y x -> x^3 y
mathSymbolFold :: ScalarData -> ScalarData
mathSymbolFold (Div (Plus ts1) (Plus ts2)) = Div (Plus (map f ts1)) (Plus (map f ts2))
where
f :: TermExpr -> TermExpr
f (Term a xs) =
let (sgn, ys) = g xs in Term (sgn * a) ys
g :: Monomial -> (Integer, Monomial)
g [] = (1, [])
g ((x, m):xs) =
match dfs (x, xs) (Pair SymbolM (Multiset (Pair SymbolM Eql)))
[ [mc| (quote $s, (negQuote #s, $n) : $xs) ->
let (sgn, ys) = g ((x, m + n) : xs) in
if even n then (sgn, ys) else (- sgn, ys) |]
, [mc| (_, (#x, $n) : $xs) -> g ((x, m + n) : xs) |]
, [mc| _ -> let (sgn', ys) = g xs in (sgn', (x, m):ys) |]
]
-- x^2 y + x^2 y -> 2 x^2 y
mathTermFold :: ScalarData -> ScalarData
mathTermFold (Div (Plus ts1) (Plus ts2)) = Div (Plus (f ts1)) (Plus (f ts2))
where
f :: [TermExpr] -> [TermExpr]
f [] = []
f (t:ts) =
match dfs (t, ts) (Pair TermM (Multiset TermM))
[ [mc| (term $a $xs, term $b (equalMonomial $sgn #xs) : $tss) ->
f (Term (sgn * a + b) xs : tss) |]
, [mc| _ -> t : f ts |]
]
|
egison/egison
|
hs-src/Language/Egison/Math/Normalize.hs
|
mit
| 4,487
| 0
| 19
| 1,314
| 1,713
| 917
| 796
| 84
| 5
|
import qualified Data.List as DL
numUniques :: (Eq a) =>[a] ->Int
numUniques = length . DL.nub
|
Bolt64/my_code
|
haskell/modules.hs
|
mit
| 96
| 0
| 7
| 17
| 41
| 24
| 17
| 3
| 1
|
module AppLogger where
log :: FilePath -> String -> IO ()
log filePath str = appendFile filePath $ str ++ "\n"
|
toddmohney/tweeter-bot
|
src/AppLogger.hs
|
mit
| 115
| 0
| 8
| 25
| 43
| 22
| 21
| 3
| 1
|
--------------------------------------------------------------------------------
-- |
-- Module : XMonad.Config.Components
-- Copyright : (c) whythat <yuri.zhykin@gmail.com>
-- License : BSD
--
-- Maintainer : whythat <yuri.zhykin@gmail.com>
-- Stability : unstable
-- Portability : unportable
--
-- This module provides possibility to specify in a clear way which software
-- components are to be used as default (i.e. terminal, screen locker,
-- web-browser, file browser etc.)
--
--------------------------------------------------------------------------------
module Components (getComponentTable) where
import System.IO.Unsafe (unsafePerformIO)
import System.Directory (getHomeDirectory, doesFileExist)
import System.FilePath.Posix (joinPath)
import Control.DeepSeq (force)
import Data.Char (isSpace)
import Data.Maybe (fromMaybe)
--------------------------------------------------------------------------------
-- main functionality --
--------------------------------------------------------------------------------
-- main function
-- returns function that takes `String` key and it turn returns corresponding
-- component name using data read from configuration file or data from default
-- table `defaultComponents`
getComponentTable :: FilePath -> (String -> String)
getComponentTable fname =
let ctbl = force $ map parseCompEntry $ lines $ rawData fname
in \c -> fromMaybe (fromMaybe "" (lookup c defaultComponents)) (lookup c ctbl)
-- parsing agorithm to obtain key-value pairs for component table
parseCompEntry :: String -> (String, String)
parseCompEntry line =
let ws = break (== '=') line
res@(key, val) = (trim (fst ws), trim (drop 1 (snd ws)))
in if (head val == '\'' && last val == '\'') then res else ("", "")
-- default table that contains <component, component-name> pairs
-- TODO: make it more generic if possible
defaultComponents :: [(String, String)]
defaultComponents =
[ ("terminal", "x-terminal-emulator")
, ("filebrowser", "xdg-open /home/$USERNAME") -- dirty but simple
]
-- unsafe IO action that reads data from configuration file if it exists and
-- if not, returns empty `String` value (safety of this is based on an
-- assumption that important computations that use this value all consider
-- empty string case
rawData :: FilePath -> String
rawData comprc = unsafePerformIO $
getFullPath comprc >>= \x ->
doesFileExist x >>= \c ->
if c then readFile x else return ""
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
-- utils & helpers --
--------------------------------------------------------------------------------
-- expand path that makes use of ~ relative notation
getFullPath :: FilePath -> IO FilePath
getFullPath s = getHomeDirectory >>= (\x -> return $ fullPath x s)
-- pure helper for `getFullPath` that simply concatenates paths if needed
fullPath :: FilePath -> FilePath -> FilePath
fullPath home_path s | head s == '~' = joinPath [home_path, drop 2 s]
| otherwise = s
-- remove leading and trailing spaces from a string
trim :: String -> String
trim = f . f where f = reverse . dropWhile isSpace
--------------------------------------------------------------------------------
|
yurizhykin/.xmonad
|
lib/Components.hs
|
mit
| 3,521
| 0
| 14
| 693
| 552
| 314
| 238
| 32
| 2
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
module Email where
import Network.Mail.SMTP
import Data.Text
import Data.List
import Data.Yaml
import GHC.Generics
import Data.Map
import Control.Monad.Error
data User = User {
jmbag :: String,
lastname :: String,
firstname:: String,
age :: Int,
email :: String,
role :: String,
location :: String } deriving (Show, Read, Ord, Eq)
recipientInfo = ["username", "location", "mailAdress", "name", "role"]
recipient1 = ["msaric", "Zagreb, Croatia", "msaric@hotmail.com", "Mario Saric", "proffessor"]
recipient2 = ["mpetricevic", "Zagreb, Croatia", "mpetricevic@fer.hr", "Marin Petricevic", "TA"]
recipient3 = ["dlozic", "Karlovac, Croatia", "dlozic@fer.hr", "David Lozic", "student"]
recipient4 = ["dsaric", "Varazdin, Croatia", "dsaric@fer.hr", "Doria Saric", "student"]
user1 = createMap recipient1
user2 = createMap recipient2
user3 = createMap recipient3
user4 = createMap recipient4
-- | An alias for the template contents
type Template = Text
-- | Some configuration object mirroring a file.
-- | Define your own structure and use Maybe X for
-- | optional fields.
data Configuration = Configuration { host :: String,
port :: String,
senderAdress :: String,
username :: String,
password :: String
} deriving (Show, Generic)
instance FromJSON Configuration
-- | Reads the e-mail configuration object from a file, using some
-- | default path to config file.
readConfig :: IO Configuration
readConfig = do
configFile <- decodeFile "./src/info.yml" :: IO (Maybe Configuration)
return $ case configFile of Just x -> x
Nothing -> error "Error reading configuration file"
-- | Parses an expression and returns either a result or an error
-- | message, if the parsing fails. If a variable is undefined, you
-- | can either return an error or replace it with an empty string.
-- | You can use a more elaborate type than String for the Map values,
-- | e.g. something that differentiates between Strings and Bools.
compileTemplate :: Template -> Map String String -> Text
compileTemplate message varsMap | patternFields == [] = message
| otherwise = "error"
where patternFields = toBeReplaced message
handlePattern :: Text -> Map String String -> Text
handlePattern p varsMap
| isExpression p = evaluateExpression p varsMap
| otherwise = case Data.Map.lookup (unpack p) varsMap of Nothing -> pack ""
Just x -> pack x
evaluateExpression :: Text -> Map String String -> Text
evaluateExpression expr mapx = if ((tellMeFunction $ extractCondition ifx) mapx) then ((handlePattern $ extractIfAction ifx) mapx)
else ((handlePattern $ extractElseAction elsex) mapx)
where ifx = (ifthenelse expr) !! 0
elsex = (ifthenelse expr) !! 1
createMap :: [String] -> Map String String
createMap maildata = fromList $ Data.List.zip recipientInfo maildata
-- IF-THEN-ELSE for example
isExpression :: Text -> Bool
isExpression txt
| Data.Text.head txt == '@' = True
| otherwise = False
-- | Sends an e-mail with given text to list of e-mail addresses
-- | using given configuration. Throws appropriate error upon failure.
--sendMail :: Configuration -> Text -> [String] -> IO ()
-- {% @if (condition) action -> condition
extractCondition :: Text -> Text
extractCondition txt = Data.Text.tail $ Data.Text.dropWhile (/= '(') (Data.Text.takeWhile (/= ')') txt)
-- {% @if (condition) action -> action
extractIfAction :: Text -> Text
extractIfAction txt = strip $ Data.Text.tail $ Data.Text.dropWhile (/= ')') txt
extractElseAction :: Text -> Text
extractElseAction expr = strip $ Data.Text.unwords $ Data.List.tail $ Data.Text.words expr
ifthenelse :: Text -> [Text]
ifthenelse expr = Data.List.init $ Data.List.tail $ Data.Text.splitOn "@" expr
tellMeFunction arg
| arg == "isProf" = isProf
| arg == "isStudent" = isStudent
| arg == "isTA" = isTA
whatRole :: Map String String -> String
whatRole user = case Data.Map.lookup "role" user of Nothing -> error "No info about the role provided for this user."
Just x -> x
isProf :: Map String String -> Bool
isProf user = if (whatRole user == "proffessor") then True else False
isTA :: Map String String -> Bool
isTA user = if (whatRole user == "TA") then True else False
isStudent :: Map String String -> Bool
isStudent user = if (whatRole user == "student") then True else False
-- Helper function that finds all the expressions between {% and %}
-- extracts parts of the template that need to be replaced... or have if-then-else conditions
toBeReplaced :: Text -> [Text]
toBeReplaced message = Data.List.tail $ Data.List.map (\x -> strip $ (splitOn closeSign x) !! 0) (splitOn openingSign message)
where openingSign = pack "{%"
closeSign = pack "%}"
|
cromulen/puh-project
|
src/Email.hs
|
mit
| 5,278
| 0
| 12
| 1,360
| 1,210
| 656
| 554
| 85
| 2
|
{-# LANGUAGE DeriveDataTypeable #-}
module Algebraic.Nested.Type
where
import Autolib.ToDoc hiding ( empty )
import Autolib.Reader
import qualified Autolib.Set as S
import Autolib.Size
import Autolib.Depth
import Autolib.Choose
import Data.Typeable
example :: Type Integer
example = read "{ 2, {}, {3, {4}}}"
data Type a = Make ( S.Set ( Item a ))
deriving ( Eq, Ord, Typeable )
instance ( Ord a, ToDoc a ) => ToDoc ( Type a ) where
toDoc ( Make xs )
= braces $ fsep $ punctuate comma
$ map toDoc $ S.setToList xs
instance ( Ord a, Reader a ) => Reader ( Type a ) where
reader = my_braces $ do
xs <- Autolib.Reader.sepBy reader my_comma
return $ Make $ S.mkSet xs
instance Size ( Type a ) where
size ( Make xs ) = sum $ map size $ S.setToList xs
full_size ( Make xs ) = succ $ sum $ map full_item_size $ S.setToList xs
top_length ( Make xs ) = S.cardinality xs
instance Depth ( Type a ) where
depth ( Make xs ) = 1 + maximum ( 0 : map depth ( S.setToList xs ) )
flatten ( Make xs ) = concat $ map flatten_item $ S.setToList xs
-----------------------------------------------------------------------
data Item a = Unit a | Packed ( Type a )
deriving ( Eq, Ord, Typeable )
instance ( Ord a, ToDoc a ) => ToDoc ( Item a ) where
toDoc ( Unit a ) = toDoc a
toDoc ( Packed p ) = toDoc p
instance ( Ord a, Reader a ) => Reader ( Item a ) where
reader
= do n <- reader ; return $ Packed n
<|> do i <- reader ; return $ Unit i
instance Size ( Item a ) where
size ( Unit a ) = 1
size ( Packed t ) = 1 + size t
full_item_size i = case i of
Unit a -> 1
Packed t -> 1 + full_size t
flatten_item i = case i of
Unit a -> [ a ]
Packed t -> flatten t
instance Depth ( Item a ) where
depth ( Unit a ) = 0
depth ( Packed t ) = depth t
|
marcellussiegburg/autotool
|
collection/src/Algebraic/Nested/Type.hs
|
gpl-2.0
| 1,870
| 0
| 13
| 519
| 769
| 386
| 383
| 49
| 2
|
module DistanceTables where
import qualified Data.Vector as V
import Control.Parallel.Strategies
import Data.List (nub, transpose)
import Data.Ix (range)
import Piece
import Board
import R
getFirstDupe :: Eq a => [a] -> a
getFirstDupe (x:y:z)
| x == y = x
| otherwise = getFirstDupe $ y:z
generateDistenceTableObst :: [Location] -> Color -> Rank -> V.Vector Integer
generateDistenceTableObst obst color piece = do
let startingTable = placeObst (V.update (emptyTable) $ V.fromList [(translatePairToVector(8,8), 0)]) obst
let accum = 0
let validMoves = [(8,8)]
getFirstDupe $ generateDistenceTable' validMoves startingTable (accum + 1) color piece
smallRing :: [Location] -> Piece -> V.Vector Integer
smallRing obst piece = do
let startingTable = placeObst (V.update (emptyTable) $ V.fromList [(translatePairToVector(8,8), 0)]) obst
let accum = 0
let validMoves = [(8,8)]
let shortDist = head $ generateDistenceTable' validMoves startingTable (accum + 1) (color piece) (rank piece)
applyToChessBoard (location piece) shortDist
bigRing :: V.Vector Integer -> Integer -> V.Vector Integer
bigRing dTable dist = V.map substituteDist dTable
where substituteDist x = if (x == dist) then 1 else 0
oval :: V.Vector Integer -> Integer -> V.Vector Integer
oval sumTable length = bigRing sumTable length
nextj_all_table smallR bigR ovl = V.map and3 (V.zip3 smallR bigR ovl)
where and3 = (\(x,y,z) -> if (x==1&&y==1&&z==1) then 1 else 0)
nextj_all smallR bigR ovl = getNext_jLocations $ nextj_all_table smallR bigR ovl
generateDistenceTable :: Color -> Rank -> V.Vector Integer
generateDistenceTable color piece = do
let startingTable = V.update (emptyTable) $ V.fromList [(translatePairToVector(8,8), 0)]
let accum = 0
let validMoves = [(8,8)]
getFirstDupe $ generateDistenceTable' validMoves startingTable (accum + 1) color piece
generateDistenceTable' ::
(Eq b, Num b) => [Location] -> V.Vector b -> b -> Color -> Rank -> [V.Vector b]
generateDistenceTable' validMoves lastMove accum color piece = do
let newPieces = map (makeChessDistancePiece color piece ) validMoves
--let validMoves2 = nub.concat $ map (genaricBoardMovments distanceBoard) newPieces
let validMoves2 = nub.concat $ map (advancedBoardMovments lastMove) newPieces
let filteredValidMoves = filter (\x -> lastMove V.! (translatePairToVector x) == (-1)) validMoves2
let vecUpdatePairs2 = zip (map translatePairToVector filteredValidMoves) (repeat accum)
let newMove = V.update lastMove $ V.fromList vecUpdatePairs2
newMove : (generateDistenceTable' validMoves2 newMove (accum + 1) color piece )
applyToChessBoard :: Location -> V.Vector a -> V.Vector a
applyToChessBoard locat@(x0, y0) dTable = V.fromList [dTable V.! translatePairToVector(x) | x <- offsetBoard]
where offsetBoard = [(x,y) | y <- [y0..7+y0], x <- [x0..7+x0]]
-- where offsetBoard = [(x,y) | y <- [(7+x0), (6+x0) .. x0], x <- [(7+x0), (6 + x0) ..x0]]
appliedDistenceTable piece obstList = do
applyToChessBoard (location piece) $ generateDistenceTableObst (map (\x -> (xLocat - (fst x) + 8, yLocat - (snd x) + 8) ) obstList) (color piece) (rank piece)
where xLocat = fst $ location piece
yLocat = snd $ location piece
mapx_p table destination = table V.! (translateChessPairToVector destination)
sumTable :: (Num c, Ord c) => V.Vector c -> V.Vector c -> V.Vector c
sumTable x y = V.zipWith mixVectors x y
mixVectors :: (Num a, Ord a) =>a -> a -> a
mixVectors x y
| x < 0 = x
| y < 0 = y
| otherwise = x + y
--indexToChessLocation :: Integer -> Location
indexToChessLocation index = (8-(index `mod` 8),8-(index `div` 8))
locationOnChessboard :: Location -> String
locationOnChessboard (x,y)
| x == 8 = "a" ++ (show y)
| x == 7 = "b" ++ (show y)
| x == 6 = "c" ++ (show y)
| x == 5 = "d" ++ (show y)
| x == 4 = "e" ++ (show y)
| x == 3 = "f" ++ (show y)
| x == 2 = "g" ++ (show y)
| x == 1 = "h" ++ (show y)
| otherwise = "xx"
trajectoryToString traj = do
let locats = map locationOnChessboard traj
concat ["a("++l++")" | l <- locats]
trajectoryToDotString [] = ""
trajectoryToDotString (x:[]) = ""
trajectoryToDotString (x:xs) = " " ++ (locationOnChessboard x) ++ " -> " ++ (locationOnChessboard (head xs)) ++ "\n" ++ trajectoryToDotString xs
getIndexOfnonZero table = filter (>=0) $ getIndexOfnonZero' table 0
getIndexOfnonZero' table index
| V.length table > index = (if table V.! index /= 0 then index else -1) : getIndexOfnonZero' table (index + 1)
| otherwise = []
getIndexOfNLength table nLength = filter (>=0) $ getIndexOfNLength' nLength table 0
getIndexOfNLength' n table index
| V.length table > index = (if table V.! index == n then index else -1) : getIndexOfNLength' n table (index + 1)
| otherwise = []
getNLengthLocations table n = map indexToChessLocation $ getIndexOfNLength table n
getNext_jLocations table = map indexToChessLocation $ getIndexOfnonZero table
buildTrajectoryBundle loopCount piece destination obsticals
| (location piece) == destination = [[location piece]]
| loopCount >= 124 = []
| rank piece == Pawn = bJTforP piece destination obsticals
| otherwise = do
let x = piece
let cbx = appliedDistenceTable x obsticals
if mapx_p cbx destination < (0) --is destination reachable?
then []
else do
let y = moveChessPieceUnchecked x destination
let cby = appliedDistenceTable y obsticals
let smallRingX = smallRing obsticals x
let bigRingX = bigRing cbx loopCount
let ovalX = oval (sumTable cbx cby) (mapx_p cbx destination)
let next_j = nextj_all smallRingX bigRingX ovalX
let currentList = [[location piece]]
bJT' currentList (loopCount + 1) piece destination obsticals cbx ovalX next_j
bJT = buildTrajectoryBundle
bJT' _ 124 _ _ _ _ _ _ = []
bJT' lastList _ _ _ _ _ _ [] = lastList
bJT' lastList loopCount piece destination obsticals cbx oval current_j
| location piece == destination = lastList
| otherwise = do
let smallRingX = smallRing obsticals piece
let bigRingX = bigRing cbx loopCount
let next_j = nextj_all smallRingX bigRingX oval
let movesList = concat $ [bJT 1 (moveChessPiece piece x) destination obsticals | x <- current_j]
let finalBundles = [l ++ j | j <- movesList , l <- lastList]
filter (verifyDestination destination) finalBundles
--TODO: Why doesn't bJT' work for pawns? Need to look into this.
bJTforP piece destination obsticals = do
if mapx_p (appliedDistenceTable piece obsticals) destination < (0) --is destination unreachable?
then [[]]
else do
let yMod = snd (location piece) - snd destination
if yMod > 0 then do
let xRange = repeat $ fst destination
[ zip xRange $ reverse [snd destination .. snd (location piece)] ]
else do
let xRange = repeat $ fst destination
[ zip xRange $ [snd (location piece) .. snd destination] ]
builtAcceptableTrajectoriesBundle loopCount piece destination obsticals maxLength = do
let x = piece
let cbx = appliedDistenceTable x obsticals
let y = moveChessPieceUnchecked x destination
let cby = appliedDistenceTable y obsticals
let distanceSum = sumTable cbx cby
let minDistence = V.minimum distanceSum
let maxDistence = V.maximum distanceSum
bAJT' loopCount piece destination obsticals maxLength x y cbx cby distanceSum minDistence maxDistence
bAJT = builtAcceptableTrajectoriesBundle
bAJT' loopCount piece destination obsticals maxLength x y cbx cby distanceSum minDistence maxDistence
| maxLength < minDistence = [[]]
| maxLength == minDistence = bJT loopCount piece destination obsticals
-- | maxLength > maxDistence = bAJT' loopCount piece destination obsticals maxDistence x y cbx cby distanceSum minDistence maxDistence
| otherwise = do
let acceptableLengths = drop 1 [minDistence .. maxLength]
let acceptableMidpoints = filter (\x -> x /= (location piece) && x /= destination) $ concat [getNLengthLocations distanceSum n | n <- acceptableLengths]
let midPointBundles = concat [combineBundles piece midPoint destination obsticals | midPoint <- acceptableMidpoints]
let shortestBundles = bJT 1 piece destination obsticals
let finalBundles = shortestBundles ++ midPointBundles
filter (verifyDestination destination) finalBundles
combineBundles piece midPoint destination obsticals = do
let bundleToMidpoint = map init $ filter (verifyDestination midPoint) $ bJT 1 piece midPoint obsticals
let bundleFromMidpoint = filter (verifyDestination destination) $ bJT 1 (moveChessPieceUnchecked piece midPoint) destination obsticals
[i ++ j | i <- bundleToMidpoint, j <- bundleFromMidpoint]
verifyDestination destination [] = False
verifyDestination destination x = (last x) == destination
|
joshuaunderwood7/HaskeLinGeom
|
DistanceTables.hs
|
gpl-3.0
| 9,293
| 0
| 19
| 2,187
| 3,255
| 1,604
| 1,651
| 158
| 3
|
{-# LANGUAGE NoImplicitPrelude #-}
module Lamdu.Infer.Internal.Subst
( HasVar(..), CompositeHasVar
, Subst(..), intersect
, CanSubst(..)
, fromRenames
) where
import Prelude.Compat hiding (null, lookup)
import Data.Map (Map)
import qualified Data.Map as Map
import qualified Data.Map.Utils as MapUtils
import Data.Maybe (fromMaybe)
import Data.Set (Set)
import Lamdu.Expr.Scheme (Scheme(..))
import Lamdu.Expr.Type (Type)
import qualified Lamdu.Expr.Type as T
import Lamdu.Expr.TypeVars (TypeVars(..))
import qualified Lamdu.Expr.TypeVars as TypeVars
import Text.PrettyPrint (Doc, nest, text, vcat, (<>), ($+$), (<+>))
import Text.PrettyPrint.HughesPJClass (Pretty(..))
type SubSubst t = Map (T.Var t) t
data Subst = Subst
{ substTypes :: SubSubst Type
, substRecordTypes :: SubSubst T.Product
, substSumTypes :: SubSubst T.Sum
} deriving Show
pPrintMap :: (Pretty k, Pretty v) => Map k v -> Doc
pPrintMap =
vcat . map prettyPair . Map.toList
where
prettyPair (k, v) = pPrint k <+> text ", " <+> pPrint v
instance Pretty Subst where
pPrint (Subst t r s) =
text "Subst:"
$+$ nest 4
( vcat
[ pPrintMap t
, pPrintMap r
, pPrintMap s
]
)
null :: Subst -> Bool
null (Subst t r s) = Map.null t && Map.null r && Map.null s
unionDisjoint :: (Pretty a, Pretty k, Ord k) => Map k a -> Map k a -> Map k a
unionDisjoint m1 m2 =
Map.unionWithKey collision m1 m2
where
collision k v0 v1 =
error $ show $ vcat
[ text "Given non-disjoint maps! Key=" <> pPrint k
, text " V0=" <> pPrint v0
, text " V1=" <> pPrint v1
, text " in " <> pPrint (Map.toList m1)
, text " vs " <> pPrint (Map.toList m2)
]
instance Monoid Subst where
mempty = Subst Map.empty Map.empty Map.empty
mappend subst0@(Subst t0 r0 s0) subst1@(Subst t1 r1 s1)
| null subst1 = subst0
| otherwise =
Subst
(t1 `unionDisjoint` Map.map (apply subst1) t0)
(r1 `unionDisjoint` Map.map (apply subst1) r0)
(s1 `unionDisjoint` Map.map (apply subst1) s0)
intersectMapSet :: Ord k => Set k -> Map k a -> Map k a
intersectMapSet s m = Map.intersection m $ Map.fromSet (const ()) s
intersect :: TypeVars -> Subst -> Subst
intersect (TypeVars tvs rtvs stvs) (Subst ts rs ss) =
Subst (intersectMapSet tvs ts) (intersectMapSet rtvs rs) (intersectMapSet stvs ss)
class TypeVars.Free a => CanSubst a where
apply :: Subst -> a -> a
class (TypeVars.VarKind t, CanSubst t) => HasVar t where
new :: T.Var t -> t -> Subst
lookup :: T.Var t -> Subst -> Maybe t
class TypeVars.CompositeVarKind p => CompositeHasVar p where
compositeNew :: SubSubst (T.Composite p) -> Subst
compositeGet :: Subst -> SubSubst (T.Composite p)
instance CompositeHasVar p => CanSubst (T.Composite p) where
apply _ T.CEmpty = T.CEmpty
apply s (T.CVar n) = fromMaybe (T.CVar n) $ lookup n s
apply s (T.CExtend n t r) = T.CExtend n (apply s t) (apply s r)
instance CanSubst Type where
apply s (T.TVar n) = fromMaybe (T.TVar n) $ lookup n s
apply s (T.TInst n p) = T.TInst n $ apply s <$> p
apply s (T.TFun t1 t2) = T.TFun (apply s t1) (apply s t2)
apply s (T.TRecord r) = T.TRecord $ apply s r
apply s (T.TSum r) = T.TSum $ apply s r
apply _ (T.TPrim p) = T.TPrim p
remove :: TypeVars -> Subst -> Subst
remove (TypeVars tvs rtvs stvs) (Subst subT subR subS) =
Subst
(MapUtils.differenceSet subT tvs)
(MapUtils.differenceSet subR rtvs)
(MapUtils.differenceSet subS stvs)
instance CanSubst Scheme where
apply s (Scheme forAll constraints typ) =
Scheme forAll
-- One need not apply subst on contraints because those are on forAll vars
constraints
(apply cleanS typ)
where
cleanS = remove forAll s
instance HasVar Type where
{-# INLINE new #-}
new tv t = mempty { substTypes = Map.singleton tv t }
{-# INLINE lookup #-}
lookup tv s = Map.lookup tv (substTypes s)
instance CompositeHasVar T.ProductTag where
{-# INLINE compositeGet #-}
compositeGet = substRecordTypes
{-# INLINE compositeNew #-}
compositeNew v = mempty { substRecordTypes = v }
instance CompositeHasVar T.SumTag where
{-# INLINE compositeGet #-}
compositeGet = substSumTypes
{-# INLINE compositeNew #-}
compositeNew v = mempty { substSumTypes = v }
instance CompositeHasVar p => HasVar (T.Composite p) where
{-# INLINE new #-}
new tv t = compositeNew $ Map.singleton tv t
{-# INLINE lookup #-}
lookup tv t = Map.lookup tv (compositeGet t)
{-# INLINE fromRenames #-}
fromRenames :: TypeVars.Renames -> Subst
fromRenames (TypeVars.Renames tvRenames prodRenames sumRenames) =
Subst
(fmap TypeVars.lift tvRenames)
(fmap TypeVars.lift prodRenames)
(fmap TypeVars.lift sumRenames)
|
da-x/Algorithm-W-Step-By-Step
|
Lamdu/Infer/Internal/Subst.hs
|
gpl-3.0
| 5,083
| 0
| 14
| 1,382
| 1,813
| 940
| 873
| 121
| 1
|
{-# LANGUAGE FlexibleInstances #-}
{-|
Module : Data.Valet.Utils.Renderers
Description : Some pre-built renderers for Valet.
Copyright : (c) Leonard Monnier, 2015
License : GPL-3
Maintainer : leonard.monnier@gmail.com
Stability : experimental
Portability : portable
-}
module Data.Valet.Utils.Renderers
( Vame(..)
, Errors
) where
import Data.Monoid
import qualified Data.Map.Strict as M
import qualified Data.Text as T
{-|
View, analysis, modifications and error renderer.
This renderer is an empty container which can then be filled in with concrete
implementation of each of the components.
-}
data Vame v a m e b = Vame
{ _view :: v
, _analysis :: a
, _modif :: m
, _error :: e
, _value :: b
}
instance (Monoid v, Monoid a, Monoid m, Monoid e, Monoid b) =>
Monoid (Vame v a m e b) where
mempty = Vame mempty mempty mempty mempty mempty
mappend (Vame v1 a1 m1 e1 x1) (Vame v2 a2 m2 e2 x2) =
Vame (v1 <> v2) (a1 <> a2) (m1 <> m2) (e1 <> e2) (x1 <> x2)
{-|
Simple renderer for errors as a strict Map.
All the errors of a given 'Value' will be reported under the same
key of the Map.
-}
type Errors = M.Map T.Text [T.Text]
|
momomimachli/Valet
|
src/Data/Valet/Utils/Renderers.hs
|
gpl-3.0
| 1,221
| 0
| 8
| 301
| 265
| 155
| 110
| 19
| 0
|
module Types where
import Options
--- this type is just used for parsing command line options
--- they are processed into an `App`, which gets passed around
--- to lower-level functions.
data KjOptions = KjOptions { optListOnly :: Bool
, optDetailOnly :: Bool
, optCatFile :: Bool
, optAutoRestart :: Bool
, optVerbose :: Bool
}
instance Options KjOptions where
defineOptions =
pure KjOptions
<*> mkViewOpt "list" "Print all available scripts (in machine readable format)"
<*> mkViewOpt "detail" "Print all available scripts (with docstring if available)"
<*> mkViewOpt "cat" "display contents of script"
<*> mkViewOpt "auto-restart" "automatically restart script when it terminates"
<*> mkViewOpt "verbose" "run in verbose mode"
where mkViewOpt long desc =
defineOption optionType_bool
(\o -> o { optionLongFlags = [long],
optionShortFlags = [head long],
optionDescription = desc,
optionDefault = False })
data RunMode = RunMode_List
| RunMode_Detail
| RunMode_Cat
| RunMode_Execute
data App = App
{ _app_runMode :: RunMode
, _app_autoRestart :: Bool
, _app_verbose :: Bool
, _app_args :: [String]
}
|
steventlamb/kj
|
src/Types.hs
|
gpl-3.0
| 1,440
| 0
| 14
| 504
| 222
| 129
| 93
| 30
| 0
|
{-# LANGUAGE OverloadedStrings #-}
module Types where
import Data.Tree
import Data.Aeson
--todo: move this to DirTree or someplace else
type DirTree = Tree FilePath
type Name = String
data Version = Version String | UnknownVersion
instance Show Version where
show (UnknownVersion) = "UnknownVersion"
show (Version a) = a
data Plugin = Plugin Name Version
deriving(Show)
instance ToJSON Plugin where
toJSON (Plugin n v) = object ["name" .= n, "version" .= (show v)]
data Website = Website {
getWebsiteType :: WebsiteType,
getVersion :: Version,
getPlugins :: [Plugin],
getDirTree :: DirTree
}
instance Show Website where
show (Website websiteType version plugins (Node path _ )) = show (path, websiteType, version, plugins)
instance ToJSON Website where
toJSON (Website t v ps td) = object ["path" .= rootLabel td, "type" .= show t, "version" .= show v, "plugins" .= ps]
type Conditions = [[FilePath]]
data WebsiteType = Wordpress | Drupal | UnknownType
deriving (Show)
-- todo: should this code be under settings?
instance FromJSON WebsiteType where
parseJSON "wordpress" = return Wordpress
parseJSON "drupal" = return Drupal
parseJSON _ = return UnknownType
|
MarkArts/wubstuts
|
src/Types.hs
|
gpl-3.0
| 1,213
| 0
| 10
| 231
| 370
| 202
| 168
| 30
| 0
|
module StackTypes where
--
-- * Part 1: A Rank-Based Type System for the Stack Language
--
-- ** The abstract syntax
--
type Prog = [Cmd]
data Cmd = Push Int
| Pop Int
| Add
| Mul
| Dup
| Inc
| Swap
deriving(Eq,Show)
type Stack = [Int]
type Rank = Int
type CmdRank = (Int,Int)
-- ** The semantic function that yields the semantics of a program
--
prog :: Prog -> Stack -> Maybe Stack
prog [] s = Just s
prog (c:cs) s = cmd c s >>= prog cs
-- ** The semantics function that yields the semantics of a command
--
cmd :: Cmd -> Stack -> Maybe Stack
cmd (Push n) s = Just (n:s)
cmd (Pop k) s = Just (drop k s)
cmd Add (n:k:s) = Just (n + k:s)
cmd Mul (n:k:s) = Just (n * k:s)
cmd Dup (n:s) = Just (n:n:s)
cmd Inc (n:s) = Just (n + 1:s)
cmd Swap (n:k:s) = Just (k:n:s)
cmd _ _ = Nothing
-- | 1. Define the function rankC that maps each stack operation to its rank
--
rankC :: Cmd -> CmdRank
rankC (Push _) = (0, 1)
rankC (Pop i) = (i, 0)
rankC (Add) = (2, 1)
rankC (Mul) = (2, 1)
rankC (Dup) = (1, 2)
rankC (Inc) = (1, 1)
rankC (Swap) = (2, 2)
-- | 2. Define the auxiliary function rankP that computes the rank of programs
--
rankP :: Prog -> Maybe Rank
rankP [] = Just 0
rankP progs = rankProgramHelper progs 0
rankProgramHelper :: Prog -> Rank -> Maybe Rank
rankProgramHelper [] progs_rank = Just progs_rank
rankProgramHelper (progs_head:progs_tail) progs_rank =
let (current_pops, curent_pushes) = rankC progs_head in
if current_pops <= progs_rank then rankProgramHelper progs_tail (progs_rank + curent_pushes - current_pops)
else Nothing
-- | 3. Define the semantic function semStatTC for evaluating stack programs
--
semStatTC :: Prog -> Maybe Stack
semStatTC prog_list =
if (rankP prog_list) /= Nothing then prog prog_list []
else Nothing
-- | EXTRA CREDIT
--
prog' = undefined
-- * Part 2: Runtime Stack
--
-- * Part 3: Static and Dynamic Scope
--
|
caperren/Archives
|
OSU Coursework/CS 381 - Programming Language Fundamentals/Homework 4/StackRank.perrenc.hs
|
gpl-3.0
| 2,017
| 0
| 11
| 527
| 726
| 396
| 330
| 47
| 2
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.KMS.GetKeyPolicy
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Retrieves a policy attached to the specified key.
--
-- <http://docs.aws.amazon.com/kms/latest/APIReference/API_GetKeyPolicy.html>
module Network.AWS.KMS.GetKeyPolicy
(
-- * Request
GetKeyPolicy
-- ** Request constructor
, getKeyPolicy
-- ** Request lenses
, gkpKeyId
, gkpPolicyName
-- * Response
, GetKeyPolicyResponse
-- ** Response constructor
, getKeyPolicyResponse
-- ** Response lenses
, gkprPolicy
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.KMS.Types
import qualified GHC.Exts
data GetKeyPolicy = GetKeyPolicy
{ _gkpKeyId :: Text
, _gkpPolicyName :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'GetKeyPolicy' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'gkpKeyId' @::@ 'Text'
--
-- * 'gkpPolicyName' @::@ 'Text'
--
getKeyPolicy :: Text -- ^ 'gkpKeyId'
-> Text -- ^ 'gkpPolicyName'
-> GetKeyPolicy
getKeyPolicy p1 p2 = GetKeyPolicy
{ _gkpKeyId = p1
, _gkpPolicyName = p2
}
-- | A unique identifier for the customer master key. This value can be a globally
-- unique identifier or the fully specified ARN to a key. Key ARN Example -
-- arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012 Globally Unique Key ID Example - 12345678-1234-1234-123456789012
--
gkpKeyId :: Lens' GetKeyPolicy Text
gkpKeyId = lens _gkpKeyId (\s a -> s { _gkpKeyId = a })
-- | String that contains the name of the policy. Currently, this must be
-- "default". Policy names can be discovered by calling 'ListKeyPolicies'.
gkpPolicyName :: Lens' GetKeyPolicy Text
gkpPolicyName = lens _gkpPolicyName (\s a -> s { _gkpPolicyName = a })
newtype GetKeyPolicyResponse = GetKeyPolicyResponse
{ _gkprPolicy :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'GetKeyPolicyResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'gkprPolicy' @::@ 'Maybe' 'Text'
--
getKeyPolicyResponse :: GetKeyPolicyResponse
getKeyPolicyResponse = GetKeyPolicyResponse
{ _gkprPolicy = Nothing
}
-- | A policy document in JSON format.
gkprPolicy :: Lens' GetKeyPolicyResponse (Maybe Text)
gkprPolicy = lens _gkprPolicy (\s a -> s { _gkprPolicy = a })
instance ToPath GetKeyPolicy where
toPath = const "/"
instance ToQuery GetKeyPolicy where
toQuery = const mempty
instance ToHeaders GetKeyPolicy
instance ToJSON GetKeyPolicy where
toJSON GetKeyPolicy{..} = object
[ "KeyId" .= _gkpKeyId
, "PolicyName" .= _gkpPolicyName
]
instance AWSRequest GetKeyPolicy where
type Sv GetKeyPolicy = KMS
type Rs GetKeyPolicy = GetKeyPolicyResponse
request = post "GetKeyPolicy"
response = jsonResponse
instance FromJSON GetKeyPolicyResponse where
parseJSON = withObject "GetKeyPolicyResponse" $ \o -> GetKeyPolicyResponse
<$> o .:? "Policy"
|
dysinger/amazonka
|
amazonka-kms/gen/Network/AWS/KMS/GetKeyPolicy.hs
|
mpl-2.0
| 3,991
| 0
| 9
| 893
| 514
| 312
| 202
| 62
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AndroidEnterprise.Devices.SetState
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Sets whether a device\'s access to Google services is enabled or
-- disabled. The device state takes effect only if enforcing EMM policies
-- on Android devices is enabled in the Google Admin Console. Otherwise,
-- the device state is ignored and all devices are allowed access to Google
-- services. This is only supported for Google-managed users.
--
-- /See:/ <https://developers.google.com/android/work/play/emm-api Google Play EMM API Reference> for @androidenterprise.devices.setState@.
module Network.Google.Resource.AndroidEnterprise.Devices.SetState
(
-- * REST Resource
DevicesSetStateResource
-- * Creating a Request
, devicesSetState
, DevicesSetState
-- * Request Lenses
, dssEnterpriseId
, dssPayload
, dssUserId
, dssDeviceId
) where
import Network.Google.AndroidEnterprise.Types
import Network.Google.Prelude
-- | A resource alias for @androidenterprise.devices.setState@ method which the
-- 'DevicesSetState' request conforms to.
type DevicesSetStateResource =
"androidenterprise" :>
"v1" :>
"enterprises" :>
Capture "enterpriseId" Text :>
"users" :>
Capture "userId" Text :>
"devices" :>
Capture "deviceId" Text :>
"state" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] DeviceState :>
Put '[JSON] DeviceState
-- | Sets whether a device\'s access to Google services is enabled or
-- disabled. The device state takes effect only if enforcing EMM policies
-- on Android devices is enabled in the Google Admin Console. Otherwise,
-- the device state is ignored and all devices are allowed access to Google
-- services. This is only supported for Google-managed users.
--
-- /See:/ 'devicesSetState' smart constructor.
data DevicesSetState = DevicesSetState'
{ _dssEnterpriseId :: !Text
, _dssPayload :: !DeviceState
, _dssUserId :: !Text
, _dssDeviceId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'DevicesSetState' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dssEnterpriseId'
--
-- * 'dssPayload'
--
-- * 'dssUserId'
--
-- * 'dssDeviceId'
devicesSetState
:: Text -- ^ 'dssEnterpriseId'
-> DeviceState -- ^ 'dssPayload'
-> Text -- ^ 'dssUserId'
-> Text -- ^ 'dssDeviceId'
-> DevicesSetState
devicesSetState pDssEnterpriseId_ pDssPayload_ pDssUserId_ pDssDeviceId_ =
DevicesSetState'
{ _dssEnterpriseId = pDssEnterpriseId_
, _dssPayload = pDssPayload_
, _dssUserId = pDssUserId_
, _dssDeviceId = pDssDeviceId_
}
-- | The ID of the enterprise.
dssEnterpriseId :: Lens' DevicesSetState Text
dssEnterpriseId
= lens _dssEnterpriseId
(\ s a -> s{_dssEnterpriseId = a})
-- | Multipart request metadata.
dssPayload :: Lens' DevicesSetState DeviceState
dssPayload
= lens _dssPayload (\ s a -> s{_dssPayload = a})
-- | The ID of the user.
dssUserId :: Lens' DevicesSetState Text
dssUserId
= lens _dssUserId (\ s a -> s{_dssUserId = a})
-- | The ID of the device.
dssDeviceId :: Lens' DevicesSetState Text
dssDeviceId
= lens _dssDeviceId (\ s a -> s{_dssDeviceId = a})
instance GoogleRequest DevicesSetState where
type Rs DevicesSetState = DeviceState
type Scopes DevicesSetState =
'["https://www.googleapis.com/auth/androidenterprise"]
requestClient DevicesSetState'{..}
= go _dssEnterpriseId _dssUserId _dssDeviceId
(Just AltJSON)
_dssPayload
androidEnterpriseService
where go
= buildClient
(Proxy :: Proxy DevicesSetStateResource)
mempty
|
rueshyna/gogol
|
gogol-android-enterprise/gen/Network/Google/Resource/AndroidEnterprise/Devices/SetState.hs
|
mpl-2.0
| 4,675
| 0
| 18
| 1,132
| 552
| 330
| 222
| 88
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.BinaryAuthorization.Systempolicy.GetPolicy
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets the current system policy in the specified location.
--
-- /See:/ <https://cloud.google.com/binary-authorization/ Binary Authorization API Reference> for @binaryauthorization.systempolicy.getPolicy@.
module Network.Google.Resource.BinaryAuthorization.Systempolicy.GetPolicy
(
-- * REST Resource
SystempolicyGetPolicyResource
-- * Creating a Request
, systempolicyGetPolicy
, SystempolicyGetPolicy
-- * Request Lenses
, sgpXgafv
, sgpUploadProtocol
, sgpAccessToken
, sgpUploadType
, sgpName
, sgpCallback
) where
import Network.Google.BinaryAuthorization.Types
import Network.Google.Prelude
-- | A resource alias for @binaryauthorization.systempolicy.getPolicy@ method which the
-- 'SystempolicyGetPolicy' request conforms to.
type SystempolicyGetPolicyResource =
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Policy
-- | Gets the current system policy in the specified location.
--
-- /See:/ 'systempolicyGetPolicy' smart constructor.
data SystempolicyGetPolicy =
SystempolicyGetPolicy'
{ _sgpXgafv :: !(Maybe Xgafv)
, _sgpUploadProtocol :: !(Maybe Text)
, _sgpAccessToken :: !(Maybe Text)
, _sgpUploadType :: !(Maybe Text)
, _sgpName :: !Text
, _sgpCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'SystempolicyGetPolicy' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sgpXgafv'
--
-- * 'sgpUploadProtocol'
--
-- * 'sgpAccessToken'
--
-- * 'sgpUploadType'
--
-- * 'sgpName'
--
-- * 'sgpCallback'
systempolicyGetPolicy
:: Text -- ^ 'sgpName'
-> SystempolicyGetPolicy
systempolicyGetPolicy pSgpName_ =
SystempolicyGetPolicy'
{ _sgpXgafv = Nothing
, _sgpUploadProtocol = Nothing
, _sgpAccessToken = Nothing
, _sgpUploadType = Nothing
, _sgpName = pSgpName_
, _sgpCallback = Nothing
}
-- | V1 error format.
sgpXgafv :: Lens' SystempolicyGetPolicy (Maybe Xgafv)
sgpXgafv = lens _sgpXgafv (\ s a -> s{_sgpXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
sgpUploadProtocol :: Lens' SystempolicyGetPolicy (Maybe Text)
sgpUploadProtocol
= lens _sgpUploadProtocol
(\ s a -> s{_sgpUploadProtocol = a})
-- | OAuth access token.
sgpAccessToken :: Lens' SystempolicyGetPolicy (Maybe Text)
sgpAccessToken
= lens _sgpAccessToken
(\ s a -> s{_sgpAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
sgpUploadType :: Lens' SystempolicyGetPolicy (Maybe Text)
sgpUploadType
= lens _sgpUploadType
(\ s a -> s{_sgpUploadType = a})
-- | Required. The resource name, in the format \`locations\/*\/policy\`.
-- Note that the system policy is not associated with a project.
sgpName :: Lens' SystempolicyGetPolicy Text
sgpName = lens _sgpName (\ s a -> s{_sgpName = a})
-- | JSONP
sgpCallback :: Lens' SystempolicyGetPolicy (Maybe Text)
sgpCallback
= lens _sgpCallback (\ s a -> s{_sgpCallback = a})
instance GoogleRequest SystempolicyGetPolicy where
type Rs SystempolicyGetPolicy = Policy
type Scopes SystempolicyGetPolicy =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient SystempolicyGetPolicy'{..}
= go _sgpName _sgpXgafv _sgpUploadProtocol
_sgpAccessToken
_sgpUploadType
_sgpCallback
(Just AltJSON)
binaryAuthorizationService
where go
= buildClient
(Proxy :: Proxy SystempolicyGetPolicyResource)
mempty
|
brendanhay/gogol
|
gogol-binaryauthorization/gen/Network/Google/Resource/BinaryAuthorization/Systempolicy/GetPolicy.hs
|
mpl-2.0
| 4,734
| 0
| 15
| 1,044
| 695
| 406
| 289
| 100
| 1
|
-- This program displays information about the platform
import ViperVM.Platform.Configuration
import ViperVM.Platform.Memory
import ViperVM.Platform.Platform
import ViperVM.Runtime.Logger
import ViperVM.Backends.Common.Buffer
import Data.Traversable (traverse)
import Data.Foldable (forM_)
import Data.Maybe (catMaybes)
import Control.Applicative ( (<$>) )
import Text.Printf (printf)
main :: IO ()
main = do
let
logger = stdOutLogger . filterLevel LogDebug
config = Configuration {
libraryOpenCL = "/usr/lib/libOpenCL.so",
eventHandler = \e -> logger (CustomLog (show e))
}
putStrLn "Initializing platform..."
platform <- initPlatform config
putStrLn "Querying platform infos..."
putStrLn (platformInfo platform)
putStrLn "Allocate a buffer in each memory..."
let mems = memories platform
f (AllocSuccess x) = Just x
f AllocError = Nothing
buffers <- catMaybes <$> traverse (\x -> f <$> bufferAllocate 1024 x) mems
putStrLn (printf "%d buffers have been allocated" (length buffers))
putStrLn "Release buffers..."
forM_ buffers bufferRelease
|
hsyl20/HViperVM
|
apps/PlatformTest.hs
|
lgpl-3.0
| 1,135
| 0
| 17
| 222
| 299
| 153
| 146
| 29
| 2
|
-- This file is part of "Loopless Functional Algorithms".
-- Copyright (c) 2005 Jamie Snape, Oxford University Computing Laboratory.
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- https://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module LooplessMixallBinary where
import List (unfoldr)
data Rose a = Node a ([Rose a],[Rose a]) | Splice (Int,Int) ([Rose a],[Rose a])
mixall = unfoldr step . prolog
prolog = fst . shareSpines'
shareSpines' xss = pair reverse (foldr addPair' ([],[]) xss)
where addPair' xs (psl,psr) = if null xs then (psl,psr)
else (Node (xs,False,p,q) rs:psl,Node (sx,even (length xs),p,q) rs:psr)
where sx = reverse xs
(p,q) = shape psr
rs = shareSpines' xss
shape [] = (0,0)
shape (Node (x:xs,swap,p,q) rs:psr) = if swap then (p,q+1)
else (p+q,1)
pair f (x,y) = (f x,f y)
consSplice (Splice (p,q) rs) ps = if p==0 && q==0 then ps
else Splice (p,q) rs:ps
step [] = Nothing
step (Node (x:xs,swap,p,q) rs:ps) = if null xs then Just (x,consSplice sp ps)
else Just (x,consSplice sp (Node (xs,not swap,p,q) rs:ps))
where sp = if swap then Splice (p+q,0) rs
else Splice (p,q) rs
step (Splice (p,q) (t:tr,b:br):ps) = if p==0 then step (b:consSplice (Splice (p,q-1) (tr,br)) ps)
else step (t:consSplice (Splice (p-1,q) (tr,br)) ps)
|
snape/LooplessFunctionalAlgorithms
|
LooplessMixallBinary.hs
|
apache-2.0
| 2,295
| 0
| 14
| 874
| 716
| 401
| 315
| 24
| 4
|
import System.IO
import System.Exit
import Test.HUnit
import Json
main :: IO ()
main = do
counts <- runTestTT (test [jsonTests])
if (errors counts + failures counts == 0)
then exitSuccess
else exitFailure
|
FlaPer87/haskell-marconiclient
|
tests/TestSuite.hs
|
apache-2.0
| 232
| 0
| 11
| 59
| 81
| 42
| 39
| 10
| 2
|
module Network.TShot where
import Network.TShot.Types
import Network.TShot.Parser
import Network.TShot.Database
import Network.TShot.Remote
|
crab2313/tshot
|
Network/TShot.hs
|
artistic-2.0
| 141
| 0
| 4
| 12
| 30
| 20
| 10
| 5
| 0
|
import Data.List (maximumBy)
import Data.Function (on)
splitByComma :: String -> (Int, Int)
splitByComma s = (read a, read (tail b))
where (a, b) = span (\x -> x /= ',') s
readInput :: IO [(Int, Int)]
readInput = readFile "input/p099_base_exp.txt" >>= (return . map splitByComma . words)
solve :: [(Int, Int)] -> Int
solve xs = fst $ maximumBy (compare `on` value) (zip [1 .. ] xs)
where value (index, (a, b)) = (fromIntegral b) * (log (fromIntegral a))
main = readInput >>= (print . solve)
|
foreverbell/project-euler-solutions
|
src/99.hs
|
bsd-3-clause
| 503
| 0
| 11
| 98
| 252
| 139
| 113
| 11
| 1
|
module Chat.Bot.TicTacToe where
import Chat.Bot.Answer.TicTacToe
import Chat.Bot.Misc.TicTacToe
import Chat.Data
import Data.List
-- |
-- LEVEL: Hard
--
-- USAGE: /ttt [POSITION]
--
-- EXAMPLE:
-- > /ttt NW
-- O........
-- > /ttt E
-- O....X...
--
-- This bot is used to play a game of tic-tac-toe.
-- This has been inspired by the following challenge.
-- It's worth trying to implement this in your language of choice. :)
--
-- http://blog.tmorris.net/posts/understanding-practical-api-design-static-typing-and-functional-programming/
-- |
-- Determine if the Position is already taken.
--
-- >>> canMove [(NW, O), (NE, X)] NW
-- False
-- >>> canMove [(NW, O), (NE, X)] E
-- True
--
-- HINTS:
-- any :: (a -> Bool) -> [a] -> Bool
--
canMove :: Board -> Position -> Bool
canMove b p =
notImplemented "TicTacToe.canMove" (canMoveAnswer b p)
-- |
-- If you want to save yourself some time calculating this, the following is a useful trick.
--
-- http://mathworld.wolfram.com/MagicSquare.html
--
-- >>> hasWon [(NW, O), (N, X), (NE, O), (W, X), (E, O), (M, X), (S, O), (SE, X), (SW, O)] O
-- False
-- >>> hasWon [(NW, O), (E, X), (W, O), (S, X), (SW, O)] X
-- False
-- >>> hasWon [(NW, O), (E, X), (W, O), (S, X), (SW, O)] O
-- True
--
-- HINTS:
-- permutations :: [a] -> [[a]]
-- any :: (a -> Bool) -> [a] -> Bool
-- map :: (a -> b) -> [a] -> [b]
-- filter :: (a -> Bool) -> [a] -> [a]
--
hasWon :: Board -> Player -> Bool
hasWon b p =
notImplemented "TicTacToe.hasWon" (hasWonAnswer b p)
-- |
-- >>> newTTT ~> NW
-- O........
-- >>> newTTT ~> NW ~> E ~> W ~> S ~> SW
-- O..O.XOX.
-- >>> newTTT ~> NW ~> N ~> NE ~> W ~> E ~> M ~> S ~> SE ~> SW
-- OXOXXOOOX
-- >>> newTTT ~> NW ~> E ~> W ~> S ~> SW ~> SW
-- O..O.XOX.
-- >>> newTTT ~> NW ~> NW
-- O........
--
-- HINTS:
-- length :: [a] -> Int
-- This will need to call `canMove` and `hasWon`
--
move :: Game -> Position -> Result
move g pos =
notImplemented "TicTacToe.move" (moveAnswer g pos)
-- See Misc/TicTacToe.hs
{-
data Position = NW | N | NE | W | M | E | SW | S | SE
deriving (Bounded, Enum, Eq, Ord, Show)
data Player = O | X
deriving Eq
type Move = (Position, Player)
type Board = [(Position, Player)]
data Game = Game Board Player
deriving (Eq, Show)
data Result = InProgress Game | Draw Board | Won Player Board
deriving Eq
-}
-- | A silly DSL function for making the examples easy to follow/run
(~>) :: Result -> Position -> Result
(~>) r p = case r of
InProgress g -> move g p
r -> r
|
charleso/haskell-in-haste
|
src/Chat/Bot/TicTacToe.hs
|
bsd-3-clause
| 2,532
| 0
| 8
| 570
| 254
| 166
| 88
| 18
| 2
|
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-|
Module : Numeric.AERN.IVP.Solver.Events.SplitNearEvents
Description : hybrid system simulation
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : mikkonecny@gmail.com
Stability : experimental
Portability : portable
Hybrid system simulation with splitting based on event localisation.
-}
module Numeric.AERN.IVP.Solver.Events.SplitNearEvents
(
solveHybridIVP_UsingPicardAndEventTree_SplitNearEvents
)
where
import Numeric.AERN.IVP.Solver.Events.Locate
import Numeric.AERN.IVP.Solver.Events.EventTree
import Numeric.AERN.IVP.Solver.Picard.UncertainValue
import Numeric.AERN.IVP.Specification.Hybrid
import Numeric.AERN.IVP.Specification.ODE
import Numeric.AERN.IVP.Solver.Bisection
import Numeric.AERN.RmToRn.Domain
import Numeric.AERN.RmToRn.New
import Numeric.AERN.RmToRn.Evaluation
import Numeric.AERN.RmToRn.Integration
import Numeric.AERN.RmToRn.Differentiation
import qualified Numeric.AERN.RealArithmetic.RefinementOrderRounding as ArithInOut
import Numeric.AERN.RealArithmetic.RefinementOrderRounding ((<+>|))
import Numeric.AERN.RealArithmetic.Measures
import Numeric.AERN.RealArithmetic.ExactOps (zero)
--import qualified Numeric.AERN.RealArithmetic.NumericOrderRounding as ArithUpDn
import qualified Numeric.AERN.NumericOrder as NumOrd
import Numeric.AERN.NumericOrder.Operators
import qualified Numeric.AERN.RefinementOrder as RefOrd
import Numeric.AERN.Basics.SizeLimits
import Numeric.AERN.Basics.Consistency
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.List as List
import Data.Maybe (catMaybes)
--import Control.Monad (liftM2)
import Numeric.AERN.Misc.Debug
_ = unsafePrint
solveHybridIVP_UsingPicardAndEventTree_SplitNearEvents ::
(CanAddVariables f,
CanRenameVariables f,
CanAdjustDomains f,
CanEvaluate f,
CanCompose f,
CanChangeSizeLimits f,
CanPartiallyEvaluate f,
HasProjections f,
HasConstFns f,
RefOrd.IntervalLike f,
HasAntiConsistency f,
NumOrd.RefinementRoundedLattice f,
RefOrd.PartialComparison f,
RoundedIntegration f,
RoundedFakeDerivative f,
ArithInOut.RoundedAdd f,
ArithInOut.RoundedSubtr f,
ArithInOut.RoundedMultiply f,
ArithInOut.RoundedMixedDivide f Int,
ArithInOut.RoundedMixedAdd f (Domain f),
ArithInOut.RoundedMixedMultiply f (Domain f),
ArithInOut.RoundedAbs f,
ArithInOut.RoundedReal (Domain f),
RefOrd.IntervalLike (Domain f),
HasAntiConsistency (Domain f),
Domain f ~ Imprecision (Domain f),
solvingInfoODESegment ~ (Maybe ([f],[f]), (Domain f, Maybe [Domain f])),
solvingInfoODE ~ BisectionInfo solvingInfoODESegment (solvingInfoODESegment, Maybe (Domain f)),
solvingInfoEvents ~ (Domain f, Maybe (HybridSystemUncertainState (Domain f)), EventInfo f),
Show f, Show (Domain f), Show (Var f), Show (SizeLimits f),
Eq (Var f))
=>
SizeLimits f {-^ size limits for all function -} ->
SizeLimitsChangeEffort f ->
PartialEvaluationEffortIndicator f ->
CompositionEffortIndicator f ->
EvaluationEffortIndicator f ->
IntegrationEffortIndicator f ->
FakeDerivativeEffortIndicator f ->
RefOrd.PartialCompareEffortIndicator f ->
ArithInOut.AddEffortIndicator f ->
ArithInOut.MultEffortIndicator f ->
ArithInOut.AbsEffortIndicator f ->
NumOrd.MinmaxInOutEffortIndicator f ->
ArithInOut.MixedDivEffortIndicator f Int ->
ArithInOut.MixedAddEffortIndicator f (Domain f) ->
ArithInOut.MixedMultEffortIndicator f (Domain f) ->
ArithInOut.RoundedRealEffortIndicator (Domain f) ->
Domain f {-^ event localisation min step size @s@ -} ->
Domain f {-^ event localisation max step size @s@ -} ->
Int {-^ maximum number of nodes in an event tree -} ->
Domain f {-^ initial widening @delta@ -} ->
Int {-^ @m@ -} ->
Var f {-^ @t0@ - the initial time variable -} ->
Domain f {-^ ode solving min step size @s@ -} ->
Domain f {-^ ode solving max step size @s@ -} ->
Imprecision (Domain f) {-^ split improvement threshold @eps@ -} ->
HybridIVP f
->
(
Maybe (HybridSystemUncertainState (Domain f))
,
[(
Domain f
-- end time of this segment (including the event resolution sub-segment)
,
Maybe (HybridSystemUncertainState (Domain f))
,
Map.Map HybSysMode
(
solvingInfoODE,
Maybe (HybridSystemUncertainState (Domain f)),
Maybe solvingInfoEvents
)
)
]
)
solveHybridIVP_UsingPicardAndEventTree_SplitNearEvents
sizeLimits effSizeLims effPEval effCompose effEval effInteg effDeriv effInclFn
effAddFn effMultFn effAbsFn effMinmaxFn
effDivFnInt effAddFnDom effMultFnDom effDom
locMinStepSize locMaxStepSize maxNodes
delta m t0Var odeMinStepSize odeMaxStepSize splitImprovementThreshold
hybivpG
=
solve hybivpG
where
solve hybivp =
solveHybridIVP_SplitNearEvents
solveHybridNoSplitting
solveODEWithSplitting
effEval effPEval effDom
locMinStepSize locMaxStepSize
hybivp
solveODEWithSplitting =
solveODEIVPUncertainValueExactTime_UsingPicard_Bisect
shouldWrap shouldShrinkWrap
sizeLimits effSizeLims effCompose effEval effInteg effDeriv effInclFn
effAddFn effMultFn effAbsFn effMinmaxFn
effDivFnInt effAddFnDom effMultFnDom effDom
delta m odeMinStepSize odeMaxStepSize splitImprovementThreshold
where
shouldWrap = True
shouldShrinkWrap = False
solveHybridNoSplitting hybivp =
(maybeFinalStateWithInvariants, (tEnd, maybeFinalStateWithInvariants, eventInfo))
where
tEnd = hybivp_tEnd hybivp
maybeFinalStateWithInvariants =
checkEmpty $
fmap filterInvariants maybeFinalState
where
checkEmpty (Just finalState)
| Map.null finalState =
error $
"mode invariant failed on a value passed between two segments" ++
"; maybeFinalState = " ++ show maybeFinalState
checkEmpty r = r
filterInvariants st =
Map.mapMaybeWithKey filterInvariantsVec st
where
filterInvariantsVec mode vec =
invariant vec
where
Just invariant =
Map.lookup mode modeInvariants
modeInvariants = hybsys_modeInvariants $ hybivp_system hybivp
[(_, eventInfo)] = modeEventInfoList
(maybeFinalState, modeEventInfoList) =
solveHybridIVP_UsingPicardAndEventTree
sizeLimits effPEval effCompose effEval effInteg effInclFn effAddFn effMultFn effAddFnDom effDom
maxNodes
delta m
t0Var
hybivp
solveHybridIVP_SplitNearEvents ::
(CanAddVariables f,
CanEvaluate f,
CanPartiallyEvaluate f,
CanCompose f,
CanAdjustDomains f,
HasProjections f,
HasConstFns f,
RefOrd.PartialComparison f,
RoundedIntegration f,
RefOrd.IntervalLike f,
ArithInOut.RoundedAdd f,
ArithInOut.RoundedMixedAdd f (Domain f),
ArithInOut.RoundedReal (Domain f),
RefOrd.IntervalLike(Domain f),
HasAntiConsistency (Domain f),
Domain f ~ Imprecision (Domain f),
Show f, Show (Domain f), Show (Var f), Show (SizeLimits f),
Show solvingInfoODESegmentOther,
solvingInfoODESegment ~ (Maybe ([f],[f]), solvingInfoODESegmentOther),
solvingInfoODE ~ BisectionInfo solvingInfoODESegment (solvingInfoODESegment, prec)
)
=>
(HybridIVP f -> (Maybe (HybridSystemUncertainState (Domain f)), solvingInfoEvents))
-- ^ solver to use on small segments that may contain events
->
(ODEIVP f -> (Maybe [Domain f], solvingInfoODE))
-- ^ solver to use on large segments before event localisation
->
EvaluationEffortIndicator f
->
PartialEvaluationEffortIndicator f
->
ArithInOut.RoundedRealEffortIndicator (Domain f)
->
Domain f -- ^ minimum segment length
->
Domain f -- ^ maximum segment length
->
(HybridIVP f) -- ^ problem to solve
->
(
Maybe (HybridSystemUncertainState (Domain f))
,
[(
Domain f
-- ^ end time of this segment (including the event resolution sub-segment)
,
Maybe (HybridSystemUncertainState (Domain f))
-- ^ state at the end time of this segment (if simulation has not failed)
,
Map.Map HybSysMode
(
solvingInfoODE,
Maybe (HybridSystemUncertainState (Domain f)),
Maybe solvingInfoEvents
)
-- ^ solving information (typically including an enclosure of all solutions)
)
]
)
solveHybridIVP_SplitNearEvents
solveHybridNoSplitting
solveODEWithSplitting
effEval _effPEval effDom
minStepSize _maxStepSize
(hybivpG :: HybridIVP f)
=
(finalState, segments)
{-
overview:
(1) apply solveODEWithSplitting over T for each initial mode/value combination
(2) for each computed enclosure, locate the first event on T, obtaining:
(maybe) interval T_mode \subseteq T where first event must occur + set of possible event types
(3) compute (maybe) T_e as follows: the left endpoint is the leftmost point of all T_mode,
the right endpoint is the rightmost point of all T_mode that transitively overlap with the
left-most T_mode.
(4)
(a) if we have T_ev \subseteq T, set t_R = \rightendpoint{T_ev}
and apply solveHybridNoSplitting on T_e to compute value A_R at t_R
(b) if we do not have any event, return [segment info]
(5) if t_R < \rightendpoint{T},
recursively apply this computation on the interval [t_R, \rightendpoint{T}]
-}
where
(_, finalState, _) = last segments
segments = splitSolve hybivpG
splitSolve hybivp =
(tEventR, stateAtTEventR, simulationInfoModeMap) : rest
where
effJoinMeet = ArithInOut.rrEffortJoinMeet sampleD effDom
effMinmax = ArithInOut.rrEffortMinmaxInOut sampleD effDom
-- effAdd = ArithInOut.fldEffortAdd sampleD $ ArithInOut.rrEffortField sampleD effDom
sampleD = tEventR
rest =
case (stateAtTEventR, tEventR <? tEnd) of
-- solving up to tEventR has not failed and there is more to solve:
(Just state, Just True) -> splitSolve (hybivpRest state)
_ -> []
where
hybivpRest midState =
hybivp
{
hybivp_tStart = tEventR,
hybivp_initialStateEnclosure = midState
}
stateAtTEventR =
case states of
[] -> Nothing
_ -> Just $ foldl1 (mergeHybridStates effJoinMeet) states
where
states = catMaybes $ map getState $ Map.elems simulationInfoModeMap
getState (_, state, _) = state
simulationInfoModeMap = Map.mapWithKey processEvents firstDipModeMap
processEvents mode (noEventsSolution, locateDipResult) =
case locateDipResult of
LDResNone ->
(noEventsSolutionUpTo tEventR, noEventsStateAt tEventR, Nothing)
LDResSome _certainty (tEventL, _) _possibleEvents
| ((tEventR <=? tEventL) == Just True)
-- an event was located but it could not happen before tEventR
-> (noEventsSolutionUpTo tEventR, noEventsStateAt tEventR, Nothing)
| otherwise
-- call solveHybridIVP_UsingPicardAndEventTree over (tEventL, tEventR)
->
(noEventsSolutionUpTo tEventL, stateAfterEvents, maybeSolvingInfo)
where
(stateAfterEvents, maybeSolvingInfo) = solveEvents tEventL
where
noEventsSolutionUpTo t =
-- cut off noEventsSolution at tEventR:
-- unsafePrint
-- (
-- "noEventsSolutionUpToR:"
-- ++ "\n tStart = " ++ show tStart
-- ++ "\n tEnd = " ++ show tEnd
-- ++ "\n tEventR = " ++ show tEventR
-- ++ "\n noEventsSolution =\n"
-- ++ showBisectionInfo (\indent info -> indent ++ show info) (\indent info -> indent) " " noEventsSolution
-- ) $
bisectionInfoTrimAt
effDom trimInfo removeInfo
noEventsSolution (tStart, tEnd) t
where
removeInfo (_, otherInfo) = (Nothing, otherInfo)
trimInfo (Nothing, otherInfo) = (Nothing, otherInfo)
trimInfo (Just (fns, midVals), otherInfo) =
(Just (trimmedFns, midVals), otherInfo)
where
trimmedFns =
map trimFn fns
trimFn fn =
-- unsafePrint
-- (
-- "solveHybridIVP_UsingPicardAndEventTree: trimInfo:"
-- ++ "\n sizeLimits of fn = " ++ show (getSizeLimits fn)
-- ++ "\n sizeLimits of trimmedFn = " ++ show (getSizeLimits trimmedFn)
-- )
trimmedFn
where
trimmedFn = adjustDomain fn tVar newTDom
newTDom = NumOrd.minOutEff effMinmax tDom t
Just tDom = lookupVar dombox tVar
dombox = getDomainBox fn
noEventsStateAt :: Domain f -> Maybe (HybridSystemUncertainState (Domain f))
noEventsStateAt t =
case valuesVariants of
[] -> Nothing
_ -> Just $ Map.singleton mode values
where
values =
foldl1 (zipWith (<\/>)) valuesVariants
where
(<\/>) = RefOrd.joinOutEff effJoinMeet
valuesVariants = catMaybes valuesMaybeVariants
[valuesMaybeVariants] =
bisectionInfoEvalFn effDom evalFnsAtTEventsR noEventsSolution (tStart, tEnd) t
evalFnsAtTEventsR (Just (fns,_), _) = Just $ map evalFnAtTEventsR fns
evalFnsAtTEventsR _ = Nothing
evalFnAtTEventsR fn = evalAtPointOutEff effEval boxD fn
where
boxD = insertVar tVar t boxFn
boxFn = getDomainBox fn
solveEvents tEventL =
case noEventsStateAt tEventL of
Nothing -> (Nothing, Nothing)
Just midState -> solveEventsFromState midState
where
solveEventsFromState midState =
case finalState2 of
Just _ ->
(finalState2, Just solvingInfo)
_ ->
(Nothing, Nothing)
where
(finalState2, solvingInfo) = solveHybridNoSplitting (hybivpEventRegion midState)
hybivpEventRegion midState =
hybivp
{
hybivp_tStart = tEventL,
hybivp_tEnd = tEventR,
hybivp_initialStateEnclosure = midState
}
tEventR :: Domain f
tEventR =
keepAddingIntersectingDomsAndReturnR leftmostDomR doms
-- compute a intersection-transitive-closure of all doms in dipInfos starting from leftmostDom
where
keepAddingIntersectingDomsAndReturnR dR domsLeft =
case intersectingDoms of
[] -> dR
_ -> keepAddingIntersectingDomsAndReturnR newR nonintersectingDoms
where
(intersectingDoms, nonintersectingDoms) =
List.partition intersectsDom domsLeft
where
intersectsDom (dL, _) = (dL <? dR) /= Just False
newR = foldl pickTheRightOne dR (map snd intersectingDoms)
where
pickTheRightOne d1 d2
| (d1 >? d2) == Just True = d1
| otherwise = d2
(_, leftmostDomR) =
foldr1 pickTheLeftOne ((tEnd, tEnd) : doms)
where
pickTheLeftOne d1@(d1L,_) d2@(d2L, _)
| (d1L <? d2L) == Just True = d1
| otherwise = d2
doms =
map getLDResDom $ filter (not . isLDResNone) $ map snd $ Map.elems firstDipModeMap
-- firstDipModeMap ::
-- (
-- solvingInfoODESegment ~ (Maybe [f], solvingInfoODESegmentOther),
-- solvingInfoODE ~ (BisectionInfo solvingInfoODESegment (solvingInfoODESegment, prec))
-- )
-- =>
-- Map.Map HybSysMode (solvingInfoODE, LocateDipResult (Domain f) HybSysEventKind)
firstDipModeMap =
Map.mapWithKey locate noEventsSolutionModeMap
where
locate mode noEventsSolution@(bisectionInfo) =
(noEventsSolution, dipInformation)
where
dipInformation =
locateFirstDipAmongMultipleFns
minStepSize
eventsNotRuledOutOnDom
invariantCertainlyViolatedOnDom
invariantIndecisiveThroughoutDom
(tStart, tEnd)
eventsNotRuledOutOnDom d =
Set.fromList $ map fst $ filter maybeActiveOnD eventSpecList
where
maybeActiveOnD (_, (_, _, _, pruneUsingTheGuard)) =
case checkConditionOnBisectedFunction guardIsExcluded d of
Just True -> False
_ -> True
where
guardIsExcluded valueVec =
case pruneUsingTheGuard d valueVec of
Nothing -> Just True
_ -> Nothing
invariantCertainlyViolatedOnDom d =
case checkConditionOnBisectedFunction invariantIsFalse d of
Just True -> True
_ -> False
where
invariantIsFalse valueVec =
case modeInvariant valueVec of
Nothing -> Just True
_ -> Nothing
invariantIndecisiveThroughoutDom _d =
False -- TODO
eventSpecList = Map.toList eventSpecMap
eventSpecMap = hybsys_eventSpecification hybsys mode
Just modeInvariant =
Map.lookup mode modeInvariants
checkConditionOnBisectedFunction valueCondition dom =
bisectionInfoCheckCondition effDom condition bisectionInfo (tStart, tEnd) dom
where
condition (Nothing, _) = Nothing
condition (Just (fns,_), _) =
valueCondition $ map eval fns
eval fn = evalAtPointOutEff effEval boxD fn
where
boxD = insertVar tVar dom boxFn
boxFn = getDomainBox fn
-- makeDetectionInfo (_, _, _, pruneGuard) =
-- ()
-- (otherConditionOnDom, dipFnPositiveOnDom, dipFnNegativeOnDom, dipFnEnclosesZeroOnDom)
-- where
-- otherConditionOnDom =
-- checkConditionOnBisectedFunction id otherCond
-- dipFnNegativeOnDom =
-- checkConditionOnBisectedFunction makeDipFnAsList (\[x] -> x <? (zero x))
-- dipFnPositiveOnDom =
-- checkConditionOnBisectedFunction makeDipFnAsList (\[x] -> (zero x) <? x)
-- dipFnEnclosesZeroOnDom dom =
-- liftM2 (&&)
-- (checkConditionOnBisectedFunction makeDipFnLEAsList leqZero dom)
-- (checkConditionOnBisectedFunction makeDipFnREAsList geqZero dom)
-- where
-- leqZero [x]=
-- x <=? (zero x)
-- geqZero [x]=
-- (zero x) <=? x
-- makeDipFnAsList :: [f] -> [f]
-- makeDipFnAsList fns = [makeDipFn fns]
-- makeDipFnLEAsList fns = [dipFnLE]
-- where
-- (dipFnLE, _) =
-- RefOrd.getEndpointsOut $
-- eliminateAllVarsButT $ makeDipFn fns
-- makeDipFnREAsList fns = [dipFnRE]
-- where
-- (_, dipFnRE) =
-- RefOrd.getEndpointsOut $
-- eliminateAllVarsButT $ makeDipFn fns
-- eliminateAllVarsButT fn =
-- pEvalAtPointOutEff effPEval domboxNoT fn
-- where
-- domboxNoT = removeVar tVar dombox
-- dombox = getDomainBox fn
---- noEventsSolutionModeMap ::
---- Map.Map HybSysMode (BisectionInfo solvingInfoODESegment (solvingInfoODESegment, prec))
noEventsSolutionModeMap =
Map.mapWithKey solve initialStateModeMap
where
solve mode initialValues =
snd $ solveODEWithSplitting (odeivp mode initialValues)
odeivp :: HybSysMode -> [Domain f] -> ODEIVP f
odeivp mode initialValues =
ODEIVP
{
odeivp_description = "ODE for " ++ show mode,
odeivp_componentNames = componentNames,
odeivp_intersectDomain = modeInvariant,
odeivp_field = field,
odeivp_tVar = tVar,
odeivp_tStart = tStart,
odeivp_tEnd = tEnd,
odeivp_makeInitialValueFnVec = makeInitValueFnVec,
odeivp_t0End = tStart,
odeivp_maybeExactValuesAtTEnd = Nothing,
odeivp_valuePlotExtents = error "odeivp_valuePlotExtents deliberately not set",
odeivp_enclosureRangeWidthLimit = (zero tStart) <+>| (100000 :: Int)
}
where
makeInitValueFnVec = makeFnVecFromInitialValues componentNames initialValues
Just field = Map.lookup mode modeFields
Just modeInvariant = Map.lookup mode modeInvariants
tVar = hybivp_tVar hybivp
tStart = hybivp_tStart hybivp
tEnd = hybivp_tEnd hybivp
-- tStepEnd = -- min(tEnd, tStart + maxStepSize)
-- NumOrd.minOutEff effMinmax tEnd tStartPlusMaxStep
-- where
-- (tStartPlusMaxStep, _) =
-- let ?addInOutEffort = effAdd in
-- RefOrd.getEndpointsOut $
-- tStart <+> maxStepSize
-- tDom = RefOrd.fromEndpointsOut (tStart, tEnd)
initialStateModeMap = hybivp_initialStateEnclosure hybivp
hybsys = hybivp_system hybivp
componentNames = hybsys_componentNames hybsys
modeFields = hybsys_modeFields hybsys
modeInvariants = hybsys_modeInvariants hybsys
|
michalkonecny/aern
|
aern-ivp/src/Numeric/AERN/IVP/Solver/Events/SplitNearEvents.hs
|
bsd-3-clause
| 24,808
| 0
| 41
| 9,274
| 3,774
| 2,022
| 1,752
| 374
| 15
|
{-|
Module: Data.Astro.Moon.MoonDetails
Description: Planet Details
Copyright: Alexander Ignatyev, 2016
Moon Details.
-}
module Data.Astro.Moon.MoonDetails
(
MoonDetails(..)
, MoonDistanceUnits(..)
, j2010MoonDetails
, mduToKm
)
where
import Data.Astro.Types (DecimalDegrees)
import Data.Astro.Time.Epoch (j2010)
import Data.Astro.Time.JulianDate (JulianDate(..))
-- | Details of the Moon's orbit at the epoch
data MoonDetails = MoonDetails {
mdEpoch :: JulianDate -- ^ the epoch
, mdL :: DecimalDegrees -- ^ mean longitude at the epoch
, mdP :: DecimalDegrees -- ^ mean longitude of the perigee at the epoch
, mdN :: DecimalDegrees -- ^ mean longitude of the node at the epoch
, mdI :: DecimalDegrees -- ^ inclination of the orbit
, mdE :: Double -- ^ eccentricity of the orbit
, mdA :: Double -- ^ semi-major axis of the orbit
, mdBigTheta :: DecimalDegrees -- ^ angular diameter at the distance `mdA` from the Earth
, mdPi :: DecimalDegrees -- ^ parallax at distance `mdA` from the Earth
} deriving (Show)
-- | Moon distance units, 1 MDU = semi-major axis of the Moon's orbit
newtype MoonDistanceUnits = MDU Double deriving (Show)
j2010MoonDetails = MoonDetails j2010 91.929336 130.143076 291.682547 5.145396 0.0549 384401 0.5181 0.9507
-- | Convert MoonDistanceUnits to km
mduToKm :: MoonDistanceUnits -> Double
mduToKm (MDU p) = p * (mdA j2010MoonDetails)
|
Alexander-Ignatyev/astro
|
src/Data/Astro/Moon/MoonDetails.hs
|
bsd-3-clause
| 1,441
| 0
| 8
| 289
| 224
| 143
| 81
| 24
| 1
|
module Logo.Builtins.IO (ioBuiltins) where
import qualified Data.Map as M
import Control.Applicative ((<$>))
import Control.Monad.Trans (lift, liftIO)
import System.Random (randomIO)
import Logo.Types
turtleIO :: IO a -> LogoEvaluator a
turtleIO = lift . liftIO
pr, random :: [LogoToken] -> LogoEvaluator LogoToken
ioBuiltins :: M.Map String LogoFunctionDef
ioBuiltins = M.fromList
[ ("pr", LogoFunctionDef 1 pr)
, ("random", LogoFunctionDef 1 random)
]
pr [t] = turtleIO $ do
putStrLn (show t)
return $ StrLiteral ""
pr _ = error "Invalid arguments to pr"
random [NumLiteral n] = turtleIO $ (NumLiteral . fromIntegral . (round :: Double -> Integer) . (* n) <$> randomIO)
random _ = error "Invalid arguments to random"
|
deepakjois/hs-logo
|
src/Logo/Builtins/IO.hs
|
bsd-3-clause
| 748
| 0
| 11
| 137
| 262
| 144
| 118
| 19
| 1
|
#!/usr/bin/env runhaskell
{-# LANGUAGE OverloadedStrings
#-}
import Aws.SSSP.App
main = web
|
airbnb/sssp
|
sssp.hs
|
bsd-3-clause
| 96
| 0
| 4
| 15
| 14
| 9
| 5
| 3
| 1
|
import Test.Hspec
import qualified Yesod.Content.PDFSpec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "Yesod.Content.PDF" Yesod.Content.PDFSpec.spec
|
alexkyllo/yesod-content-pdf
|
test/Spec.hs
|
bsd-3-clause
| 172
| 0
| 8
| 26
| 53
| 29
| 24
| 7
| 1
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeSynonymInstances #-}
module Language.PiSigma.Lexer
( Parser
, angles
, braces
, brackets
, charLiteral
, colon
, comma
, commaSep
, commaSep1
, decimal
, dot
, float
, hexadecimal
, identifier
, integer
, locate
, location
, locReserved
, locReservedOp
, locSymbol
, lexeme
, natural
, naturalOrFloat
, octal
, operator
, parens
, reserved
, reservedOp
, semi
, semiSep
, semiSep1
, squares
, stringLiteral
, symbol
, tokArr
, tokForce
, tokLam
, tokLift
, whiteSpace )
where
import Control.Applicative
import Control.Monad.Identity
import Data.Char
import Text.Parsec.Prim
( Parsec
, Stream (..)
, (<?>)
, getPosition
)
import qualified Text.Parsec.Token
as Token
import Text.ParserCombinators.Parsec
( SourcePos
, choice
, sourceColumn
, sourceLine
, sourceName )
import Text.ParserCombinators.Parsec.Char
import Language.PiSigma.Syntax
( Loc (..) )
import qualified Language.PiSigma.Util.String.Parser
as Parser
instance (Monad m) => Stream Parser.String m Char where
uncons = return . Parser.uncons
type Parser = Parsec Parser.String ()
nonIdentStr :: String
nonIdentStr = [ '('
, ')'
, '['
, ']'
, '{'
, '}' ]
opLetterStr :: String
opLetterStr = [ '!'
, '*'
, ','
, '-'
, ':'
, ';'
, '='
, '>'
, '\\'
, '^'
, '|'
, '♭'
, '♯'
, 'λ'
, '→'
, '∞' ]
-- * The lexical definition of PiSigma. Used to make token parsers.
pisigmaDef :: (Monad m) => Token.GenLanguageDef Parser.String u m
pisigmaDef = Token.LanguageDef
{ Token.commentStart = "{-"
, Token.commentEnd = "-}"
, Token.commentLine = "--"
, Token.nestedComments = True
, Token.identStart = satisfy $ \ c -> not (isSpace c)
&& not (c `elem` nonIdentStr)
&& not (c `elem` opLetterStr)
&& not (isControl c)
&& not (isDigit c)
, Token.identLetter = satisfy $ \ c -> not (isSpace c)
&& not (c `elem` nonIdentStr)
&& not (c `elem` opLetterStr)
&& not (isControl c)
, Token.opStart = oneOf ""
, Token.opLetter = oneOf opLetterStr
, Token.reservedNames = [ "Type"
, "case"
, "in"
, "let"
, "of"
, "split"
, "with"
, "Rec"
, "fold"
, "unfold"
, "as"]
, Token.reservedOpNames = [ "!"
, "*"
, ","
, "->"
, ":"
, ";"
, "="
, "\\"
, "^"
, "|"
, "♭"
, "♯"
, "λ"
, "→"
, "∞" ]
, Token.caseSensitive = True
}
-- * The PiSigma token parser, generated from the lexical definition.
tokenParser :: Token.GenTokenParser Parser.String () Identity
tokenParser = Token.makeTokenParser pisigmaDef
-- * PiSigma parser combinators.
angles :: Parser a -> Parser a
angles = Token.angles tokenParser
braces :: Parser a -> Parser a
braces = Token.braces tokenParser
brackets :: Parser a -> Parser a
brackets = Token.brackets tokenParser
charLiteral :: Parser Char
charLiteral = Token.charLiteral tokenParser
colon :: Parser String
colon = Token.colon tokenParser
comma :: Parser String
comma = Token.comma tokenParser
commaSep :: Parser a -> Parser [a]
commaSep = Token.commaSep tokenParser
commaSep1 :: Parser a -> Parser [a]
commaSep1 = Token.commaSep1 tokenParser
decimal :: Parser Integer
decimal = Token.decimal tokenParser
dot :: Parser String
dot = Token.dot tokenParser
float :: Parser Double
float = Token.float tokenParser
hexadecimal :: Parser Integer
hexadecimal = Token.hexadecimal tokenParser
identifier :: Parser String
identifier = Token.identifier tokenParser
integer :: Parser Integer
integer = Token.integer tokenParser
lexeme :: Parser a -> Parser a
lexeme = Token.lexeme tokenParser
natural :: Parser Integer
natural = Token.natural tokenParser
naturalOrFloat :: Parser (Either Integer Double)
naturalOrFloat = Token.naturalOrFloat tokenParser
octal :: Parser Integer
octal = Token.octal tokenParser
operator :: Parser String
operator = Token.operator tokenParser
parens :: Parser a -> Parser a
parens = Token.parens tokenParser
reserved :: String -> Parser ()
reserved = Token.reserved tokenParser
reservedOp :: String -> Parser ()
reservedOp = Token.reservedOp tokenParser
semi :: Parser String
semi = Token.semi tokenParser
semiSep :: Parser a -> Parser [a]
semiSep = Token.semiSep tokenParser
semiSep1 :: Parser a -> Parser [a]
semiSep1 = Token.semiSep1 tokenParser
squares :: Parser a -> Parser a
squares = Token.squares tokenParser
stringLiteral :: Parser String
stringLiteral = Token.stringLiteral tokenParser
symbol :: String -> Parser String
symbol = Token.symbol tokenParser
whiteSpace :: Parser ()
whiteSpace = Token.whiteSpace tokenParser
-- * Derived parser combinators
location :: Parser Loc
location = sourcePosToLoc <$> getPosition
locate :: Parser a -> Parser Loc
locate = (location <*)
sourcePosToLoc :: SourcePos -> Loc
sourcePosToLoc p = Loc (sourceName p) (sourceLine p) (sourceColumn p)
locReserved :: String -> Parser Loc
locReserved = locate . reserved
locReservedOp :: String -> Parser Loc
locReservedOp = locate . reservedOp
locSymbol :: String -> Parser Loc
locSymbol xs = locate (symbol xs) <?> show xs
tokArr :: Parser Loc
tokArr = locate (choice [ reservedOp "->"
, reservedOp "→"
] <?> "->")
tokForce :: Parser Loc
tokForce = locate (choice [ reservedOp "!"
, reservedOp "♭"
] <?> "!")
tokLam :: Parser Loc
tokLam = locate (choice [ reservedOp "\\"
, reservedOp "λ"
] <?> "\\")
tokLift :: Parser Loc
tokLift = locate (choice [ reservedOp "^"
, reservedOp "∞"
] <?> "^")
|
zlizta/PiSigma
|
src/Language/PiSigma/Lexer.hs
|
bsd-3-clause
| 7,988
| 0
| 15
| 3,518
| 1,692
| 931
| 761
| 224
| 1
|
{-# LANGUAGE RankNTypes, ScopedTypeVariables, GADTs #-}
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-- | A simpler, non-transformer version of this package's
-- "Control.Monad.Operational"\'s 'Program' type, using 'Free'
-- directly.
module Control.Monad.Operational.Simple
( module Control.Operational.Class
, Program(..)
, interpret
, fromProgram
, ProgramView(..)
, view
) where
import Control.Applicative
import Control.Monad.Free
import Control.Operational.Class
import Control.Operational.Instruction
import Data.Functor.Coyoneda
newtype Program instr a =
Program { -- | Intepret the program as a 'Free' monad.
toFree :: Free (Coyoneda instr) a
} deriving (Functor, Applicative, Monad)
instance Operational instr (Program instr) where
singleton = Program . liftF . liftInstr
-- | Interpret a 'Program' by translating each instruction to a
-- 'Monad' action. Does not use 'view'.
interpret :: forall m instr a. (Functor m, Monad m) =>
(forall x. instr x -> m x)
-> Program instr a
-> m a
interpret evalI = retract . hoistFree (liftEvalI evalI) . toFree
-- | Lift a 'Program' to any 'Operational' instance at least as
-- powerful as 'Monad'.
fromProgram
:: (Operational instr m, Functor m, Monad m) => Program instr a -> m a
fromProgram = interpret singleton
data ProgramView instr a where
Return :: a -> ProgramView instr a
(:>>=) :: instr a -> (a -> Program instr b) -> ProgramView instr b
view :: Program instr a -> ProgramView instr a
view = eval . toFree
where eval (Pure a) = Return a
eval (Free (Coyoneda f i)) = i :>>= (Program . f)
|
sacundim/free-operational
|
Control/Monad/Operational/Simple.hs
|
bsd-3-clause
| 1,739
| 0
| 11
| 382
| 419
| 235
| 184
| 36
| 2
|
-- |
-- Module : Kospi.Data
-- Copyright : Jared Tobin 2012
-- License : BSD3
--
-- Maintainer : jared@jtobin.ca
-- Stability : experimental
-- Portability : unknown
{-# OPTIONS_GHC -Wall #-}
module Kospi.Data
( -- * Data types
Quote(..)
, PQ(..)
) where
import Data.ByteString as B hiding (take)
import qualified Data.ByteString.Char8 as B8
import Data.Function (on)
import Data.Time
import System.Locale (defaultTimeLocale)
-- | A Quote containing only the information we're interested in.
data Quote = Quote { pktTime :: {-# UNPACK #-} !UTCTime
, acceptTime :: {-# UNPACK #-} !UTCTime
, issueCode :: {-# UNPACK #-} !ByteString
, bid5 :: {-# UNPACK #-} !PQ
, bid4 :: {-# UNPACK #-} !PQ
, bid3 :: {-# UNPACK #-} !PQ
, bid2 :: {-# UNPACK #-} !PQ
, bid1 :: {-# UNPACK #-} !PQ
, ask1 :: {-# UNPACK #-} !PQ
, ask2 :: {-# UNPACK #-} !PQ
, ask3 :: {-# UNPACK #-} !PQ
, ask4 :: {-# UNPACK #-} !PQ
, ask5 :: {-# UNPACK #-} !PQ } deriving Eq
-- | Rank Quotes by accept time at the exchange.
instance Ord Quote where
q0 `compare` q1 = let byTime = compare `on` acceptTime
in q0 `byTime` q1
-- | Show Quotes according to spec.
instance Show Quote where
show q = showTimeToPrecision 4 (pktTime q)
++ " " ++ showTimeToPrecision 2 (acceptTime q)
++ " " ++ B8.unpack (issueCode q)
++ " " ++ show (bid5 q)
++ " " ++ show (bid4 q)
++ " " ++ show (bid3 q)
++ " " ++ show (bid2 q)
++ " " ++ show (bid1 q)
++ " " ++ show (ask1 q)
++ " " ++ show (ask2 q)
++ " " ++ show (ask3 q)
++ " " ++ show (ask4 q)
++ " " ++ show (ask5 q)
-- | Price/quantity information for a given quote.
data PQ = PQ {-# UNPACK #-} !Int {-# UNPACK #-} !Int deriving Eq
-- | Show price/quantity information according to spec.
instance Show PQ where show (PQ a b) = show b ++ "@" ++ show a
-- | Pretty-print timestamps according to a custom spec.
showTimeToPrecision :: FormatTime t => Int -> t -> String
showTimeToPrecision n t = formatTime defaultTimeLocale "%F %H:%M:%S" t
++ "." ++ ms ++ " " ++ tz
where ms = take n $ formatTime defaultTimeLocale "%q" t
tz = formatTime defaultTimeLocale "%Z" t
|
jtobin/prompt-pcap
|
Kospi/Data.hs
|
bsd-3-clause
| 2,681
| 0
| 32
| 1,033
| 622
| 339
| 283
| 47
| 1
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TupleSections #-}
module Web.Stripe.Client
( StripeConfig(..)
, SecretKey(..)
, StripeVersion(..)
, StripeResponseCode(..)
, StripeFailure(..)
, StripeError(..)
, StripeErrorCode(..)
, StripeRequest(..)
, Stripe
, StripeT(StripeT)
, defaultConfig
, runStripeT
, baseSReq
, query
, queryData
, query_
{- Re-Export -}
, StdMethod(..)
) where
import Control.Applicative
import Control.Arrow ((***))
import Control.Exception as EX
import Control.Monad (MonadPlus, join, liftM, mzero)
import Control.Monad.Error (Error, ErrorT, MonadError, MonadIO,
noMsg, runErrorT, strMsg, throwError)
import Control.Monad.State (MonadState, StateT, get, runStateT)
import Control.Monad.Trans (MonadTrans, lift, liftIO)
import Data.Aeson (FromJSON (..), Value (..), decode',
eitherDecode', (.:), (.:?))
import Data.Aeson.Types (parseMaybe)
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as C8
import qualified Data.ByteString.Lazy as BL
import Data.Char (toLower)
import qualified Data.HashMap.Lazy as HML
import Data.Text (Text)
import qualified Data.Text as T
import Network.HTTP.Conduit
import Network.HTTP.Types
import Web.Stripe.Utils (textToByteString)
------------------------
-- General Data Types --
------------------------
-- | Configuration for the 'StripeT' monad transformer.
data StripeConfig = StripeConfig
{ stripeSecretKey :: SecretKey
, stripeCAFile :: FilePath
, stripeVersion :: StripeVersion
} deriving Show
-- | A key used when authenticating to the Stripe API.
newtype SecretKey = SecretKey { unSecretKey :: T.Text } deriving Show
-- | This represents the possible successes that a connection to the Stripe
-- API can encounter. For specificity, a success can be represented by other
-- error codes, and so the same is true in this data type.
--
-- Please consult the official Stripe REST API documentation on error codes
-- at <https://stripe.com/docs/api#errors> for more information.
data StripeResponseCode = OK | Unknown Int deriving (Show, Eq)
-- | This represents the possible failures that a connection to the Stripe API
-- can encounter.
--
-- Please consult the official Stripe REST API documentation on error codes
-- at <https://stripe.com/docs/api#errors> for more information.
data StripeFailure
= BadRequest (Maybe StripeError)
| Unauthorized (Maybe StripeError)
| NotFound (Maybe StripeError)
| PaymentRequired (Maybe StripeError)
| InternalServerError (Maybe StripeError)
| BadGateway (Maybe StripeError)
| ServiceUnavailable (Maybe StripeError)
| GatewayTimeout (Maybe StripeError)
| HttpFailure (Maybe Text)
| OtherFailure (Maybe Text)
deriving (Show, Eq)
-- | Describes a 'StripeFailure' in more detail, categorizing the error and
-- providing additional information about it. At minimum, this is a message,
-- and for 'CardError', this is a message, even more precise code
-- ('StripeErrorCode'), and potentially a paramter that helps suggest where an
-- error message should be displayed.
--
-- In case the appropriate error could not be determined from the specified
-- type, 'UnkownError' will be returned with the supplied type and message.
--
-- Please consult the official Stripe REST API documentation on error codes
-- at <https://stripe.com/docs/api#errors> for more information.
data StripeError
= InvalidRequestError Text
| APIError Text
| CardError Text StripeErrorCode (Maybe Text) -- message, code, params
| UnknownError Text Text -- type, message
deriving (Show, Eq)
-- | Attempts to describe a 'CardError' in more detail, classifying in what
-- specific way it failed.
--
-- Please consult the official Stripe REST API documentation on error codes
-- at <https://stripe.com/docs/api#errors> for more information.
data StripeErrorCode
= InvalidNumber
| IncorrectNumber
| InvalidExpiryMonth
| InvalidExpiryYear
| InvalidCVC
| ExpiredCard
| InvalidAmount
| IncorrectCVC
| CardDeclined
| Missing
| DuplicateTransaction
| ProcessingError
| UnknownErrorCode Text -- ^ Could not be matched; text gives error name.
deriving (Show, Eq)
-- | Represents a request to the Stripe API, providing the fields necessary to
-- specify a Stripe resource. More generally, 'baseSReq' will be desired as
-- it provides sensible defaults that can be overriden as needed.
data StripeRequest = StripeRequest
{ sMethod :: StdMethod
, sDestination :: [Text]
, sData :: [(B.ByteString, B.ByteString)]
, sQString :: [(String, String)]
} deriving Show
-- | Stripe Version
-- Represents Stripe API Versions
data StripeVersion = V20110915d
| OtherVersion String -- ^ "Format: 2011-09-15-d"
instance Show StripeVersion where
show V20110915d = "2011-09-15-d"
show (OtherVersion x) = x
------------------
-- Stripe Monad --
------------------
-- | A convenience specialization of the 'StripeT' monad transformer in which
-- the underlying monad is IO.
type Stripe a = StripeT IO a
-- | Defines the monad transformer under which all Stripe REST API resource
-- calls take place.
newtype StripeT m a = StripeT
{ unStripeT :: StateT StripeConfig (ErrorT StripeFailure m) a
} deriving ( Functor, Monad, MonadIO, MonadPlus
, MonadError StripeFailure
, MonadState StripeConfig
, Alternative
, Applicative
)
instance MonadTrans StripeT where
lift = StripeT . lift . lift
-- | Runs the 'StripeT' monad transformer with a given 'StripeConfig'. This will
-- handle all of the authorization dance steps necessary to utilize the
-- Stripe API.
--
-- Its use is demonstrated in other functions, such as 'query'.
runStripeT :: MonadIO m => StripeConfig -> StripeT m a -> m (Either StripeFailure a)
runStripeT cfg m =
runErrorT . liftM fst . (`runStateT` cfg) . unStripeT $ m
--------------
-- Querying --
--------------
-- | Provides a default 'StripeConfig'. Essentially, this inserts the 'SecretKey', but
-- leaves other fields blank. This is especially relavent due to the current
-- CA file check bug.
defaultConfig :: SecretKey -> StripeConfig
defaultConfig k = StripeConfig k "" V20110915d
-- | The basic 'StripeRequest' environment upon which all other Stripe API requests
-- will be built. Standard usage involves overriding one or more of the
-- fields. E.g., for a request to \"https://api.stripe.com/v1/coupons\",
-- one would have:
--
-- > baseSReq { sDestinaton = ["charges"] }
baseSReq :: StripeRequest
baseSReq = StripeRequest
{ sMethod = GET
, sDestination = []
, sData = []
, sQString = []
}
-- | Queries the Stripe API. This returns the response body along with the
-- 'StripeResponseCode' undecoded. Use 'query' to try to decode it into a 'JSON'
-- type. E.g.,
--
-- > let conf = StripeConfig "key" "secret"
-- >
-- > runStripeT conf $
-- > query' baseSReq { sDestination = ["charges"] }
query' :: MonadIO m => StripeRequest -> StripeT m (StripeResponseCode, BL.ByteString)
query' sReq = do
cfg <- get
req' <- maybe (throwError $ strMsg "Error Prepating the Request") return (prepRq cfg sReq)
let req = req' {checkStatus = \_ _ _ -> Nothing, responseTimeout = Just 10000000}
-- _TODO we should be able to pass in a manager rather thanusing the default manager
rsp' <- liftIO (EX.catch (fmap Right $ withManager $ httpLbs req) (return . Left))
case rsp' of
Left err -> throwError (HttpFailure $ Just (T.pack (show (err :: HttpException))))
Right rsp -> do
code <- toCode (responseStatus rsp) (responseBody rsp)
return (code, responseBody rsp)
-- | Queries the Stripe API and attempts to parse the results into a data type
-- that is an instance of 'JSON'. This is primarily for internal use by other
-- Stripe submodules, which supply the request values accordingly. However,
-- it can also be used directly. E.g.,
--
-- > let conf = StripeConfig "key" "CA file"
-- >
-- > runStripeT conf $
-- > query baseSReq { sDestination = ["charges"] }
query :: (MonadIO m, FromJSON a) => StripeRequest -> StripeT m (StripeResponseCode, a)
query req = query' req >>= \(code, ans) ->
either (throwError . strMsg .
(\msg -> "JSON parse error: " ++ msg ++ ". json: " ++ show ans))
(return . (code, )) $ eitherDecode' ans
-- | same as `query` but pulls out the value inside a data field and returns that
queryData :: (MonadIO m, FromJSON a) => StripeRequest -> StripeT m (StripeResponseCode, a)
queryData req = query' req >>= \(code, ans) -> do
val <- either (throwError . strMsg . ("JSON parse error: " ++)) return $ eitherDecode' ans
case val of
Object o -> do
objVal <- maybe (throwError $ strMsg "no data in json" ) return $
HML.lookup "data" o
obj <- maybe (throwError $ strMsg "parsed JSON didn't contain object") return $
parseMaybe parseJSON objVal
return (code, obj)
_ -> throwError $ strMsg "JSON was not object"
-- | Acts just like 'query', but on success, throws away the response. Errors
-- contacting the Stripe API will still be reported.
query_ :: MonadIO m => StripeRequest -> StripeT m ()
query_ req = query' req >> return ()
setUserAgent :: C8.ByteString -> Request -> Request
setUserAgent ua req = req { requestHeaders = ("User-Agent", ua) : filteredHeaders }
where
filteredHeaders = filter ((/= "User-Agent") . fst) $ requestHeaders req
-- | Transforms a 'StripeRequest' into a more general 'URI', which can be used to
-- make an authenticated query to the Stripe server.
-- _TODO there is lots of sloppy Text <-> String stuff here.. should fix
prepRq :: StripeConfig -> StripeRequest -> Maybe Request
prepRq StripeConfig{..} StripeRequest{..} =
flip fmap mReq $ \req -> applyBasicAuth k p $ (addBodyUa req)
{ queryString = renderQuery False qs
, requestHeaders = [ ("Stripe-Version", C8.pack . show $ stripeVersion) ]
, method = renderStdMethod sMethod
}
where
k = textToByteString $ unSecretKey stripeSecretKey
p = textToByteString ""
addBodyUa = urlEncodedBody sData . setUserAgent "hs-string/0.2 http-conduit"
mReq = parseUrl . T.unpack $ T.concat [
"https://api.stripe.com:443/v1/"
, T.intercalate "/" sDestination ]
qs = map (C8.pack *** Just . C8.pack) sQString
--------------------
-- Error Handling --
--------------------
-- | Given an HTTP status code and the response body as input, this function
-- determines whether or not the status code represents an error as
-- per Stripe\'s REST API documentation. If it does, 'StripeFailure' is thrown as
-- an error. Otherwise, 'StripeResponseCode' is returned, representing the status
-- of the request.
--
-- If an error is encountered, this function will attempt to decode the
-- response body with 'errorMsg' to retrieve (and return) an explanation with
-- the 'StripeFailure'.
toCode :: Monad m => Status -> BL.ByteString -> StripeT m StripeResponseCode
toCode c body = case statusCode c of
-- Successes
200 -> return OK
-- Failures
400 -> throwError $ BadRequest e
401 -> throwError $ Unauthorized e
404 -> throwError $ NotFound e
402 -> throwError $ PaymentRequired e
500 -> throwError $ InternalServerError e
502 -> throwError $ BadGateway e
503 -> throwError $ ServiceUnavailable e
504 -> throwError $ GatewayTimeout e
-- Unknown; assume success
i -> return $ Unknown i
where e = errorMsg body
-- | Converts a 'String'-represented error code into the 'StripeErrorCode' data
-- type to more descriptively classify errors.
--
-- If the string does not represent a known error code, 'UnknownErrorCode'
-- will be returned with the raw text representing the error code.
toCECode :: T.Text -> StripeErrorCode
toCECode c = case T.map toLower c of
"invalid_number" -> InvalidNumber
"incorrect_number" -> IncorrectNumber
"invalid_expiry_month" -> InvalidExpiryMonth
"invalid_expiry_year" -> InvalidExpiryYear
"invalid_cvc" -> InvalidCVC
"expired_card" -> ExpiredCard
"invalid_amount" -> InvalidAmount
"incorrect_cvc" -> IncorrectCVC
"card_declined" -> CardDeclined
"missing" -> Missing
"duplicate_transaction" -> DuplicateTransaction
"processing_error" -> ProcessingError
_ -> UnknownErrorCode c
-- | This function attempts to decode the contents of a response body as JSON
-- and retrieve an error message in an \"error\" field. E.g.,
--
-- >>> errorMsg "{\"error\":\"Oh no, an error!\"}"
-- Just "Oh no, an error!"
errorMsg :: BL.ByteString -> Maybe StripeError
errorMsg bs = join . fmap getErrorVal $ decode' bs
where
getErrorVal (Object o) = maybe Nothing (parseMaybe parseJSON) (HML.lookup "error" o)
getErrorVal _ = Nothing
-- | Attempts to parse error information provided with each error by the Stripe
-- API. In the parsing, the error is classified as a specific 'StripeError' and
-- any useful data, such as a message explaining the error, is extracted
-- accordingly.
instance FromJSON StripeError where
parseJSON (Object err) = do
type_ <- err .: "type"
msg <- err .: "message"
case T.map toLower type_ of
"invalid_request_error" -> return $ InvalidRequestError msg
"api_error" -> return $ APIError msg
"card_error" -> do
code <- err .: "code"
param <- err .:? "param"
return $ CardError msg (toCECode code) param
_ -> return $ UnknownError type_ msg
parseJSON _ = mzero
-- | Defines the behavior for more general error messages that can be thrown
-- with 'noMsg' and 'strMsg' in combination with 'throwError'.
instance Error StripeFailure where
noMsg = OtherFailure Nothing
strMsg = OtherFailure . Just . T.pack
|
michaelschade/hs-stripe
|
src/Web/Stripe/Client.hs
|
bsd-3-clause
| 14,781
| 0
| 19
| 3,645
| 2,565
| 1,443
| 1,122
| -1
| -1
|
-- |
-- Module : Network.SMTP.Auth
-- License : BSD-style
--
-- Maintainer : Nicolas DI PRIMA <nicolas@di-prima.fr>
-- Stability : experimental
-- Portability : unknown
--
module Network.SMTP.Auth
(
clientAuth
, serverAuthPlain
) where
import Network.SMTP.Types
import qualified Data.ByteString.Base64 as B64 (encode, decode)
import qualified Data.ByteString.Char8 as BC
-- | identify a user with the PLAIN method
clientPlain :: UserName -> Password -> BC.ByteString
clientPlain user passwd =
B64.encode $ BC.intercalate (BC.pack "\0")
[ user
, passwd
]
-- | auth to a server
clientAuth :: AuthType
-> BC.ByteString
-> UserName
-> Password
-> BC.ByteString
clientAuth PLAIN _ u p = clientPlain u p
clientAuth LOGIN _ _ _ = undefined
clientAuth CRAM_MD5 _ _ _ = undefined
-- | auth a client
serverAuthPlain :: BC.ByteString
-> Either String (UserName, Password)
serverAuthPlain buff =
case B64.decode buff of
Left err -> Left err
Right bs ->
let (login, pw) = BC.span (\c -> c /= '\0') bs
in Right (login, BC.drop 1 pw)
|
NicolasDP/maild
|
Network/SMTP/Auth.hs
|
bsd-3-clause
| 1,231
| 0
| 15
| 376
| 295
| 163
| 132
| 28
| 2
|
import Data.Text (Text)
import Data.Aeson
import Data.Aeson.Named
data TBanana = TBanana
{ tshape :: Field "banana-shape" Text
, tsize :: Field "banana size" (Maybe Int)
, tname :: Field "banana's name" Text
} deriving Show
deriveToJSONFields ''TBanana
b = Banana "foo" (Just 2) "bar"
|
mxswd/named-aeson
|
examples/Banana.hs
|
bsd-3-clause
| 299
| 0
| 11
| 58
| 99
| 53
| 46
| -1
| -1
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1997-1998
\section[BasicTypes]{Miscellanous types}
This module defines a miscellaneously collection of very simple
types that
\begin{itemize}
\item have no other obvious home
\item don't depend on any other complicated types
\item are used in more than one "part" of the compiler
\end{itemize}
-}
{-# LANGUAGE DeriveDataTypeable #-}
module BasicTypes(
Version, bumpVersion, initialVersion,
ConTag, ConTagZ, fIRST_TAG,
Arity, RepArity,
Alignment,
FunctionOrData(..),
WarningTxt(..), StringLiteral(..),
Fixity(..), FixityDirection(..),
defaultFixity, maxPrecedence, minPrecedence,
negateFixity, funTyFixity,
compareFixity,
RecFlag(..), isRec, isNonRec, boolToRecFlag,
Origin(..), isGenerated,
RuleName, pprRuleName,
TopLevelFlag(..), isTopLevel, isNotTopLevel,
DerivStrategy(..),
OverlapFlag(..), OverlapMode(..), setOverlapModeMaybe,
hasOverlappingFlag, hasOverlappableFlag, hasIncoherentFlag,
Boxity(..), isBoxed,
TupleSort(..), tupleSortBoxity, boxityTupleSort,
tupleParens,
sumParens, pprAlternative,
-- ** The OneShotInfo type
OneShotInfo(..),
noOneShotInfo, hasNoOneShotInfo, isOneShotInfo,
bestOneShot, worstOneShot,
OccInfo(..), seqOccInfo, zapFragileOcc, isOneOcc,
isDeadOcc, isStrongLoopBreaker, isWeakLoopBreaker, isNoOcc,
strongLoopBreaker, weakLoopBreaker,
InsideLam, insideLam, notInsideLam,
OneBranch, oneBranch, notOneBranch,
InterestingCxt,
EP(..),
DefMethSpec(..),
SwapFlag(..), flipSwap, unSwap, isSwapped,
CompilerPhase(..), PhaseNum,
Activation(..), isActive, isActiveIn, competesWith,
isNeverActive, isAlwaysActive, isEarlyActive,
RuleMatchInfo(..), isConLike, isFunLike,
InlineSpec(..), isEmptyInlineSpec,
InlinePragma(..), defaultInlinePragma, alwaysInlinePragma,
neverInlinePragma, dfunInlinePragma,
isDefaultInlinePragma,
isInlinePragma, isInlinablePragma, isAnyInlinePragma,
inlinePragmaSpec, inlinePragmaSat,
inlinePragmaActivation, inlinePragmaRuleMatchInfo,
setInlinePragmaActivation, setInlinePragmaRuleMatchInfo,
SuccessFlag(..), succeeded, failed, successIf,
FractionalLit(..), negateFractionalLit, integralFractionalLit,
SourceText,
IntWithInf, infinity, treatZeroAsInf, mkIntWithInf, intGtLimit
) where
import FastString
import Outputable
import SrcLoc ( Located,unLoc )
import StaticFlags( opt_PprStyle_Debug )
import Data.Data hiding (Fixity)
import Data.Function (on)
{-
************************************************************************
* *
\subsection[Arity]{Arity}
* *
************************************************************************
-}
-- | The number of value arguments that can be applied to a value before it does
-- "real work". So:
-- fib 100 has arity 0
-- \x -> fib x has arity 1
-- See also Note [Definition of arity] in CoreArity
type Arity = Int
-- | Representation Arity
--
-- The number of represented arguments that can be applied to a value before it does
-- "real work". So:
-- fib 100 has representation arity 0
-- \x -> fib x has representation arity 1
-- \(# x, y #) -> fib (x + y) has representation arity 2
type RepArity = Int
{-
************************************************************************
* *
Constructor tags
* *
************************************************************************
-}
-- | Constructor Tag
--
-- Type of the tags associated with each constructor possibility or superclass
-- selector
type ConTag = Int
-- | A *zero-indexed* constructor tag
type ConTagZ = Int
fIRST_TAG :: ConTag
-- ^ Tags are allocated from here for real constructors
-- or for superclass selectors
fIRST_TAG = 1
{-
************************************************************************
* *
\subsection[Alignment]{Alignment}
* *
************************************************************************
-}
type Alignment = Int -- align to next N-byte boundary (N must be a power of 2).
{-
************************************************************************
* *
One-shot information
* *
************************************************************************
-}
-- | If the 'Id' is a lambda-bound variable then it may have lambda-bound
-- variable info. Sometimes we know whether the lambda binding this variable
-- is a \"one-shot\" lambda; that is, whether it is applied at most once.
--
-- This information may be useful in optimisation, as computations may
-- safely be floated inside such a lambda without risk of duplicating
-- work.
data OneShotInfo
= NoOneShotInfo -- ^ No information
| ProbOneShot -- ^ The lambda is probably applied at most once
-- See Note [Computing one-shot info, and ProbOneShot] in Demand
| OneShotLam -- ^ The lambda is applied at most once.
deriving (Eq)
-- | It is always safe to assume that an 'Id' has no lambda-bound variable information
noOneShotInfo :: OneShotInfo
noOneShotInfo = NoOneShotInfo
isOneShotInfo, hasNoOneShotInfo :: OneShotInfo -> Bool
isOneShotInfo OneShotLam = True
isOneShotInfo _ = False
hasNoOneShotInfo NoOneShotInfo = True
hasNoOneShotInfo _ = False
worstOneShot, bestOneShot :: OneShotInfo -> OneShotInfo -> OneShotInfo
worstOneShot NoOneShotInfo _ = NoOneShotInfo
worstOneShot ProbOneShot NoOneShotInfo = NoOneShotInfo
worstOneShot ProbOneShot _ = ProbOneShot
worstOneShot OneShotLam os = os
bestOneShot NoOneShotInfo os = os
bestOneShot ProbOneShot OneShotLam = OneShotLam
bestOneShot ProbOneShot _ = ProbOneShot
bestOneShot OneShotLam _ = OneShotLam
pprOneShotInfo :: OneShotInfo -> SDoc
pprOneShotInfo NoOneShotInfo = empty
pprOneShotInfo ProbOneShot = text "ProbOneShot"
pprOneShotInfo OneShotLam = text "OneShot"
instance Outputable OneShotInfo where
ppr = pprOneShotInfo
{-
************************************************************************
* *
Swap flag
* *
************************************************************************
-}
data SwapFlag
= NotSwapped -- Args are: actual, expected
| IsSwapped -- Args are: expected, actual
instance Outputable SwapFlag where
ppr IsSwapped = text "Is-swapped"
ppr NotSwapped = text "Not-swapped"
flipSwap :: SwapFlag -> SwapFlag
flipSwap IsSwapped = NotSwapped
flipSwap NotSwapped = IsSwapped
isSwapped :: SwapFlag -> Bool
isSwapped IsSwapped = True
isSwapped NotSwapped = False
unSwap :: SwapFlag -> (a->a->b) -> a -> a -> b
unSwap NotSwapped f a b = f a b
unSwap IsSwapped f a b = f b a
{-
************************************************************************
* *
\subsection[FunctionOrData]{FunctionOrData}
* *
************************************************************************
-}
data FunctionOrData = IsFunction | IsData
deriving (Eq, Ord, Data)
instance Outputable FunctionOrData where
ppr IsFunction = text "(function)"
ppr IsData = text "(data)"
{-
************************************************************************
* *
\subsection[Version]{Module and identifier version numbers}
* *
************************************************************************
-}
type Version = Int
bumpVersion :: Version -> Version
bumpVersion v = v+1
initialVersion :: Version
initialVersion = 1
{-
************************************************************************
* *
Deprecations
* *
************************************************************************
-}
-- | A String Literal in the source, including its original raw format for use by
-- source to source manipulation tools.
data StringLiteral = StringLiteral
{ sl_st :: SourceText, -- literal raw source.
-- See not [Literal source text]
sl_fs :: FastString -- literal string value
} deriving Data
instance Eq StringLiteral where
(StringLiteral _ a) == (StringLiteral _ b) = a == b
-- | Warning Text
--
-- reason/explanation from a WARNING or DEPRECATED pragma
data WarningTxt = WarningTxt (Located SourceText)
[Located StringLiteral]
| DeprecatedTxt (Located SourceText)
[Located StringLiteral]
deriving (Eq, Data)
instance Outputable WarningTxt where
ppr (WarningTxt _ ws)
= doubleQuotes (vcat (map (ftext . sl_fs . unLoc) ws))
ppr (DeprecatedTxt _ ds)
= text "Deprecated:" <+>
doubleQuotes (vcat (map (ftext . sl_fs . unLoc) ds))
{-
************************************************************************
* *
Rules
* *
************************************************************************
-}
type RuleName = FastString
pprRuleName :: RuleName -> SDoc
pprRuleName rn = doubleQuotes (ftext rn)
{-
************************************************************************
* *
\subsection[Fixity]{Fixity info}
* *
************************************************************************
-}
------------------------
data Fixity = Fixity SourceText Int FixityDirection
-- Note [Pragma source text]
deriving Data
instance Outputable Fixity where
ppr (Fixity _ prec dir) = hcat [ppr dir, space, int prec]
instance Eq Fixity where -- Used to determine if two fixities conflict
(Fixity _ p1 dir1) == (Fixity _ p2 dir2) = p1==p2 && dir1 == dir2
------------------------
data FixityDirection = InfixL | InfixR | InfixN
deriving (Eq, Data)
instance Outputable FixityDirection where
ppr InfixL = text "infixl"
ppr InfixR = text "infixr"
ppr InfixN = text "infix"
------------------------
maxPrecedence, minPrecedence :: Int
maxPrecedence = 9
minPrecedence = 0
defaultFixity :: Fixity
defaultFixity = Fixity (show maxPrecedence) maxPrecedence InfixL
negateFixity, funTyFixity :: Fixity
-- Wired-in fixities
negateFixity = Fixity "6" 6 InfixL -- Fixity of unary negate
funTyFixity = Fixity "0" 0 InfixR -- Fixity of '->'
{-
Consider
\begin{verbatim}
a `op1` b `op2` c
\end{verbatim}
@(compareFixity op1 op2)@ tells which way to arrange appication, or
whether there's an error.
-}
compareFixity :: Fixity -> Fixity
-> (Bool, -- Error please
Bool) -- Associate to the right: a op1 (b op2 c)
compareFixity (Fixity _ prec1 dir1) (Fixity _ prec2 dir2)
= case prec1 `compare` prec2 of
GT -> left
LT -> right
EQ -> case (dir1, dir2) of
(InfixR, InfixR) -> right
(InfixL, InfixL) -> left
_ -> error_please
where
right = (False, True)
left = (False, False)
error_please = (True, False)
{-
************************************************************************
* *
\subsection[Top-level/local]{Top-level/not-top level flag}
* *
************************************************************************
-}
data TopLevelFlag
= TopLevel
| NotTopLevel
isTopLevel, isNotTopLevel :: TopLevelFlag -> Bool
isNotTopLevel NotTopLevel = True
isNotTopLevel TopLevel = False
isTopLevel TopLevel = True
isTopLevel NotTopLevel = False
instance Outputable TopLevelFlag where
ppr TopLevel = text "<TopLevel>"
ppr NotTopLevel = text "<NotTopLevel>"
{-
************************************************************************
* *
Boxity flag
* *
************************************************************************
-}
data Boxity
= Boxed
| Unboxed
deriving( Eq, Data )
isBoxed :: Boxity -> Bool
isBoxed Boxed = True
isBoxed Unboxed = False
instance Outputable Boxity where
ppr Boxed = text "Boxed"
ppr Unboxed = text "Unboxed"
{-
************************************************************************
* *
Recursive/Non-Recursive flag
* *
************************************************************************
-}
-- | Recursivity Flag
data RecFlag = Recursive
| NonRecursive
deriving( Eq, Data )
isRec :: RecFlag -> Bool
isRec Recursive = True
isRec NonRecursive = False
isNonRec :: RecFlag -> Bool
isNonRec Recursive = False
isNonRec NonRecursive = True
boolToRecFlag :: Bool -> RecFlag
boolToRecFlag True = Recursive
boolToRecFlag False = NonRecursive
instance Outputable RecFlag where
ppr Recursive = text "Recursive"
ppr NonRecursive = text "NonRecursive"
{-
************************************************************************
* *
Code origin
* *
************************************************************************
-}
data Origin = FromSource
| Generated
deriving( Eq, Data )
isGenerated :: Origin -> Bool
isGenerated Generated = True
isGenerated FromSource = False
instance Outputable Origin where
ppr FromSource = text "FromSource"
ppr Generated = text "Generated"
{-
************************************************************************
* *
Deriving strategies
* *
************************************************************************
-}
-- | Which technique the user explicitly requested when deriving an instance.
data DerivStrategy
-- See Note [Deriving strategies] in TcDeriv
= DerivStock -- ^ GHC's \"standard\" strategy, which is to implement a
-- custom instance for the data type. This only works for
-- certain types that GHC knows about (e.g., 'Eq', 'Show',
-- 'Functor' when @-XDeriveFunctor@ is enabled, etc.)
| DerivAnyclass -- ^ @-XDeriveAnyClass@
| DerivNewtype -- ^ @-XGeneralizedNewtypeDeriving@
deriving (Eq, Data)
instance Outputable DerivStrategy where
ppr DerivStock = text "stock"
ppr DerivAnyclass = text "anyclass"
ppr DerivNewtype = text "newtype"
{-
************************************************************************
* *
Instance overlap flag
* *
************************************************************************
-}
-- | The semantics allowed for overlapping instances for a particular
-- instance. See Note [Safe Haskell isSafeOverlap] (in `InstEnv.hs`) for a
-- explanation of the `isSafeOverlap` field.
--
-- - 'ApiAnnotation.AnnKeywordId' :
-- 'ApiAnnotation.AnnOpen' @'\{-\# OVERLAPPABLE'@ or
-- @'\{-\# OVERLAPPING'@ or
-- @'\{-\# OVERLAPS'@ or
-- @'\{-\# INCOHERENT'@,
-- 'ApiAnnotation.AnnClose' @`\#-\}`@,
-- For details on above see note [Api annotations] in ApiAnnotation
data OverlapFlag = OverlapFlag
{ overlapMode :: OverlapMode
, isSafeOverlap :: Bool
} deriving (Eq, Data)
setOverlapModeMaybe :: OverlapFlag -> Maybe OverlapMode -> OverlapFlag
setOverlapModeMaybe f Nothing = f
setOverlapModeMaybe f (Just m) = f { overlapMode = m }
hasIncoherentFlag :: OverlapMode -> Bool
hasIncoherentFlag mode =
case mode of
Incoherent _ -> True
_ -> False
hasOverlappableFlag :: OverlapMode -> Bool
hasOverlappableFlag mode =
case mode of
Overlappable _ -> True
Overlaps _ -> True
Incoherent _ -> True
_ -> False
hasOverlappingFlag :: OverlapMode -> Bool
hasOverlappingFlag mode =
case mode of
Overlapping _ -> True
Overlaps _ -> True
Incoherent _ -> True
_ -> False
data OverlapMode -- See Note [Rules for instance lookup] in InstEnv
= NoOverlap SourceText
-- See Note [Pragma source text]
-- ^ This instance must not overlap another `NoOverlap` instance.
-- However, it may be overlapped by `Overlapping` instances,
-- and it may overlap `Overlappable` instances.
| Overlappable SourceText
-- See Note [Pragma source text]
-- ^ Silently ignore this instance if you find a
-- more specific one that matches the constraint
-- you are trying to resolve
--
-- Example: constraint (Foo [Int])
-- instance Foo [Int]
-- instance {-# OVERLAPPABLE #-} Foo [a]
--
-- Since the second instance has the Overlappable flag,
-- the first instance will be chosen (otherwise
-- its ambiguous which to choose)
| Overlapping SourceText
-- See Note [Pragma source text]
-- ^ Silently ignore any more general instances that may be
-- used to solve the constraint.
--
-- Example: constraint (Foo [Int])
-- instance {-# OVERLAPPING #-} Foo [Int]
-- instance Foo [a]
--
-- Since the first instance has the Overlapping flag,
-- the second---more general---instance will be ignored (otherwise
-- it is ambiguous which to choose)
| Overlaps SourceText
-- See Note [Pragma source text]
-- ^ Equivalent to having both `Overlapping` and `Overlappable` flags.
| Incoherent SourceText
-- See Note [Pragma source text]
-- ^ Behave like Overlappable and Overlapping, and in addition pick
-- an an arbitrary one if there are multiple matching candidates, and
-- don't worry about later instantiation
--
-- Example: constraint (Foo [b])
-- instance {-# INCOHERENT -} Foo [Int]
-- instance Foo [a]
-- Without the Incoherent flag, we'd complain that
-- instantiating 'b' would change which instance
-- was chosen. See also note [Incoherent instances] in InstEnv
deriving (Eq, Data)
instance Outputable OverlapFlag where
ppr flag = ppr (overlapMode flag) <+> pprSafeOverlap (isSafeOverlap flag)
instance Outputable OverlapMode where
ppr (NoOverlap _) = empty
ppr (Overlappable _) = text "[overlappable]"
ppr (Overlapping _) = text "[overlapping]"
ppr (Overlaps _) = text "[overlap ok]"
ppr (Incoherent _) = text "[incoherent]"
pprSafeOverlap :: Bool -> SDoc
pprSafeOverlap True = text "[safe]"
pprSafeOverlap False = empty
{-
************************************************************************
* *
Tuples
* *
************************************************************************
-}
data TupleSort
= BoxedTuple
| UnboxedTuple
| ConstraintTuple
deriving( Eq, Data )
tupleSortBoxity :: TupleSort -> Boxity
tupleSortBoxity BoxedTuple = Boxed
tupleSortBoxity UnboxedTuple = Unboxed
tupleSortBoxity ConstraintTuple = Boxed
boxityTupleSort :: Boxity -> TupleSort
boxityTupleSort Boxed = BoxedTuple
boxityTupleSort Unboxed = UnboxedTuple
tupleParens :: TupleSort -> SDoc -> SDoc
tupleParens BoxedTuple p = parens p
tupleParens UnboxedTuple p = text "(#" <+> p <+> ptext (sLit "#)")
tupleParens ConstraintTuple p -- In debug-style write (% Eq a, Ord b %)
| opt_PprStyle_Debug = text "(%" <+> p <+> ptext (sLit "%)")
| otherwise = parens p
{-
************************************************************************
* *
Sums
* *
************************************************************************
-}
sumParens :: SDoc -> SDoc
sumParens p = ptext (sLit "(#") <+> p <+> ptext (sLit "#)")
-- | Pretty print an alternative in an unboxed sum e.g. "| a | |".
pprAlternative :: (a -> SDoc) -- ^ The pretty printing function to use
-> a -- ^ The things to be pretty printed
-> ConTag -- ^ Alternative (one-based)
-> Arity -- ^ Arity
-> SDoc -- ^ 'SDoc' where the alternative havs been pretty
-- printed and finally packed into a paragraph.
pprAlternative pp x alt arity =
fsep (replicate (alt - 1) vbar ++ [pp x] ++ replicate (arity - alt - 1) vbar)
{-
************************************************************************
* *
\subsection[Generic]{Generic flag}
* *
************************************************************************
This is the "Embedding-Projection pair" datatype, it contains
two pieces of code (normally either RenamedExpr's or Id's)
If we have a such a pair (EP from to), the idea is that 'from' and 'to'
represents functions of type
from :: T -> Tring
to :: Tring -> T
And we should have
to (from x) = x
T and Tring are arbitrary, but typically T is the 'main' type while
Tring is the 'representation' type. (This just helps us remember
whether to use 'from' or 'to'.
-}
-- | Embedding Projection pair
data EP a = EP { fromEP :: a, -- :: T -> Tring
toEP :: a } -- :: Tring -> T
{-
Embedding-projection pairs are used in several places:
First of all, each type constructor has an EP associated with it, the
code in EP converts (datatype T) from T to Tring and back again.
Secondly, when we are filling in Generic methods (in the typechecker,
tcMethodBinds), we are constructing bimaps by induction on the structure
of the type of the method signature.
************************************************************************
* *
\subsection{Occurrence information}
* *
************************************************************************
This data type is used exclusively by the simplifier, but it appears in a
SubstResult, which is currently defined in VarEnv, which is pretty near
the base of the module hierarchy. So it seemed simpler to put the
defn of OccInfo here, safely at the bottom
-}
-- | identifier Occurrence Information
data OccInfo
= NoOccInfo -- ^ There are many occurrences, or unknown occurrences
| IAmDead -- ^ Marks unused variables. Sometimes useful for
-- lambda and case-bound variables.
| OneOcc
!InsideLam
!OneBranch
!InterestingCxt -- ^ Occurs exactly once, not inside a rule
-- | This identifier breaks a loop of mutually recursive functions. The field
-- marks whether it is only a loop breaker due to a reference in a rule
| IAmALoopBreaker -- Note [LoopBreaker OccInfo]
!RulesOnly
deriving (Eq)
type RulesOnly = Bool
{-
Note [LoopBreaker OccInfo]
~~~~~~~~~~~~~~~~~~~~~~~~~~
IAmALoopBreaker True <=> A "weak" or rules-only loop breaker
Do not preInlineUnconditionally
IAmALoopBreaker False <=> A "strong" loop breaker
Do not inline at all
See OccurAnal Note [Weak loop breakers]
-}
isNoOcc :: OccInfo -> Bool
isNoOcc NoOccInfo = True
isNoOcc _ = False
seqOccInfo :: OccInfo -> ()
seqOccInfo occ = occ `seq` ()
-----------------
-- | Interesting Context
type InterestingCxt = Bool -- True <=> Function: is applied
-- Data value: scrutinised by a case with
-- at least one non-DEFAULT branch
-----------------
-- | Inside Lambda
type InsideLam = Bool -- True <=> Occurs inside a non-linear lambda
-- Substituting a redex for this occurrence is
-- dangerous because it might duplicate work.
insideLam, notInsideLam :: InsideLam
insideLam = True
notInsideLam = False
-----------------
type OneBranch = Bool -- True <=> Occurs in only one case branch
-- so no code-duplication issue to worry about
oneBranch, notOneBranch :: OneBranch
oneBranch = True
notOneBranch = False
strongLoopBreaker, weakLoopBreaker :: OccInfo
strongLoopBreaker = IAmALoopBreaker False
weakLoopBreaker = IAmALoopBreaker True
isWeakLoopBreaker :: OccInfo -> Bool
isWeakLoopBreaker (IAmALoopBreaker _) = True
isWeakLoopBreaker _ = False
isStrongLoopBreaker :: OccInfo -> Bool
isStrongLoopBreaker (IAmALoopBreaker False) = True -- Loop-breaker that breaks a non-rule cycle
isStrongLoopBreaker _ = False
isDeadOcc :: OccInfo -> Bool
isDeadOcc IAmDead = True
isDeadOcc _ = False
isOneOcc :: OccInfo -> Bool
isOneOcc (OneOcc {}) = True
isOneOcc _ = False
zapFragileOcc :: OccInfo -> OccInfo
zapFragileOcc (OneOcc {}) = NoOccInfo
zapFragileOcc occ = occ
instance Outputable OccInfo where
-- only used for debugging; never parsed. KSW 1999-07
ppr NoOccInfo = empty
ppr (IAmALoopBreaker ro) = text "LoopBreaker" <> if ro then char '!' else empty
ppr IAmDead = text "Dead"
ppr (OneOcc inside_lam one_branch int_cxt)
= text "Once" <> pp_lam <> pp_br <> pp_args
where
pp_lam | inside_lam = char 'L'
| otherwise = empty
pp_br | one_branch = empty
| otherwise = char '*'
pp_args | int_cxt = char '!'
| otherwise = empty
{-
************************************************************************
* *
Default method specfication
* *
************************************************************************
The DefMethSpec enumeration just indicates what sort of default method
is used for a class. It is generated from source code, and present in
interface files; it is converted to Class.DefMethInfo before begin put in a
Class object.
-}
-- | Default Method Specification
data DefMethSpec ty
= VanillaDM -- Default method given with polymorphic code
| GenericDM ty -- Default method given with code of this type
instance Outputable (DefMethSpec ty) where
ppr VanillaDM = text "{- Has default method -}"
ppr (GenericDM {}) = text "{- Has generic default method -}"
{-
************************************************************************
* *
\subsection{Success flag}
* *
************************************************************************
-}
data SuccessFlag = Succeeded | Failed
instance Outputable SuccessFlag where
ppr Succeeded = text "Succeeded"
ppr Failed = text "Failed"
successIf :: Bool -> SuccessFlag
successIf True = Succeeded
successIf False = Failed
succeeded, failed :: SuccessFlag -> Bool
succeeded Succeeded = True
succeeded Failed = False
failed Succeeded = False
failed Failed = True
{-
************************************************************************
* *
\subsection{Source Text}
* *
************************************************************************
Keeping Source Text for source to source conversions
Note [Pragma source text]
~~~~~~~~~~~~~~~~~~~~~~~~~
The lexer does a case-insensitive match for pragmas, as well as
accepting both UK and US spelling variants.
So
{-# SPECIALISE #-}
{-# SPECIALIZE #-}
{-# Specialize #-}
will all generate ITspec_prag token for the start of the pragma.
In order to be able to do source to source conversions, the original
source text for the token needs to be preserved, hence the
`SourceText` field.
So the lexer will then generate
ITspec_prag "{ -# SPECIALISE"
ITspec_prag "{ -# SPECIALIZE"
ITspec_prag "{ -# Specialize"
for the cases above.
[without the space between '{' and '-', otherwise this comment won't parse]
Note [Literal source text]
~~~~~~~~~~~~~~~~~~~~~~~~~~
The lexer/parser converts literals from their original source text
versions to an appropriate internal representation. This is a problem
for tools doing source to source conversions, so the original source
text is stored in literals where this can occur.
Motivating examples for HsLit
HsChar '\n' == '\x20`
HsCharPrim '\x41`# == `A`
HsString "\x20\x41" == " A"
HsStringPrim "\x20"# == " "#
HsInt 001 == 1
HsIntPrim 002# == 2#
HsWordPrim 003## == 3##
HsInt64Prim 004## == 4##
HsWord64Prim 005## == 5##
HsInteger 006 == 6
For OverLitVal
HsIntegral 003 == 0x003
HsIsString "\x41nd" == "And"
-}
type SourceText = String -- Note [Literal source text],[Pragma source text]
{-
************************************************************************
* *
\subsection{Activation}
* *
************************************************************************
When a rule or inlining is active
-}
-- | Phase Number
type PhaseNum = Int -- Compilation phase
-- Phases decrease towards zero
-- Zero is the last phase
data CompilerPhase
= Phase PhaseNum
| InitialPhase -- The first phase -- number = infinity!
instance Outputable CompilerPhase where
ppr (Phase n) = int n
ppr InitialPhase = text "InitialPhase"
-- See note [Pragma source text]
data Activation = NeverActive
| AlwaysActive
| ActiveBefore SourceText PhaseNum
-- Active only *strictly before* this phase
| ActiveAfter SourceText PhaseNum
-- Active in this phase and later
deriving( Eq, Data )
-- Eq used in comparing rules in HsDecls
-- | Rule Match Information
data RuleMatchInfo = ConLike -- See Note [CONLIKE pragma]
| FunLike
deriving( Eq, Data, Show )
-- Show needed for Lexer.x
data InlinePragma -- Note [InlinePragma]
= InlinePragma
{ inl_src :: SourceText -- Note [Pragma source text]
, inl_inline :: InlineSpec
, inl_sat :: Maybe Arity -- Just n <=> Inline only when applied to n
-- explicit (non-type, non-dictionary) args
-- That is, inl_sat describes the number of *source-code*
-- arguments the thing must be applied to. We add on the
-- number of implicit, dictionary arguments when making
-- the InlineRule, and don't look at inl_sat further
, inl_act :: Activation -- Says during which phases inlining is allowed
, inl_rule :: RuleMatchInfo -- Should the function be treated like a constructor?
} deriving( Eq, Data )
-- | Inline Specification
data InlineSpec -- What the user's INLINE pragma looked like
= Inline
| Inlinable
| NoInline
| EmptyInlineSpec -- Used in a place-holder InlinePragma in SpecPrag or IdInfo,
-- where there isn't any real inline pragma at all
deriving( Eq, Data, Show )
-- Show needed for Lexer.x
{-
Note [InlinePragma]
~~~~~~~~~~~~~~~~~~~
This data type mirrors what you can write in an INLINE or NOINLINE pragma in
the source program.
If you write nothing at all, you get defaultInlinePragma:
inl_inline = EmptyInlineSpec
inl_act = AlwaysActive
inl_rule = FunLike
It's not possible to get that combination by *writing* something, so
if an Id has defaultInlinePragma it means the user didn't specify anything.
If inl_inline = Inline or Inlineable, then the Id should have an InlineRule unfolding.
If you want to know where InlinePragmas take effect: Look in DsBinds.makeCorePair
Note [CONLIKE pragma]
~~~~~~~~~~~~~~~~~~~~~
The ConLike constructor of a RuleMatchInfo is aimed at the following.
Consider first
{-# RULE "r/cons" forall a as. r (a:as) = f (a+1) #-}
g b bs = let x = b:bs in ..x...x...(r x)...
Now, the rule applies to the (r x) term, because GHC "looks through"
the definition of 'x' to see that it is (b:bs).
Now consider
{-# RULE "r/f" forall v. r (f v) = f (v+1) #-}
g v = let x = f v in ..x...x...(r x)...
Normally the (r x) would *not* match the rule, because GHC would be
scared about duplicating the redex (f v), so it does not "look
through" the bindings.
However the CONLIKE modifier says to treat 'f' like a constructor in
this situation, and "look through" the unfolding for x. So (r x)
fires, yielding (f (v+1)).
This is all controlled with a user-visible pragma:
{-# NOINLINE CONLIKE [1] f #-}
The main effects of CONLIKE are:
- The occurrence analyser (OccAnal) and simplifier (Simplify) treat
CONLIKE thing like constructors, by ANF-ing them
- New function coreUtils.exprIsExpandable is like exprIsCheap, but
additionally spots applications of CONLIKE functions
- A CoreUnfolding has a field that caches exprIsExpandable
- The rule matcher consults this field. See
Note [Expanding variables] in Rules.hs.
-}
isConLike :: RuleMatchInfo -> Bool
isConLike ConLike = True
isConLike _ = False
isFunLike :: RuleMatchInfo -> Bool
isFunLike FunLike = True
isFunLike _ = False
isEmptyInlineSpec :: InlineSpec -> Bool
isEmptyInlineSpec EmptyInlineSpec = True
isEmptyInlineSpec _ = False
defaultInlinePragma, alwaysInlinePragma, neverInlinePragma, dfunInlinePragma
:: InlinePragma
defaultInlinePragma = InlinePragma { inl_src = "{-# INLINE"
, inl_act = AlwaysActive
, inl_rule = FunLike
, inl_inline = EmptyInlineSpec
, inl_sat = Nothing }
alwaysInlinePragma = defaultInlinePragma { inl_inline = Inline }
neverInlinePragma = defaultInlinePragma { inl_act = NeverActive }
inlinePragmaSpec :: InlinePragma -> InlineSpec
inlinePragmaSpec = inl_inline
-- A DFun has an always-active inline activation so that
-- exprIsConApp_maybe can "see" its unfolding
-- (However, its actual Unfolding is a DFunUnfolding, which is
-- never inlined other than via exprIsConApp_maybe.)
dfunInlinePragma = defaultInlinePragma { inl_act = AlwaysActive
, inl_rule = ConLike }
isDefaultInlinePragma :: InlinePragma -> Bool
isDefaultInlinePragma (InlinePragma { inl_act = activation
, inl_rule = match_info
, inl_inline = inline })
= isEmptyInlineSpec inline && isAlwaysActive activation && isFunLike match_info
isInlinePragma :: InlinePragma -> Bool
isInlinePragma prag = case inl_inline prag of
Inline -> True
_ -> False
isInlinablePragma :: InlinePragma -> Bool
isInlinablePragma prag = case inl_inline prag of
Inlinable -> True
_ -> False
isAnyInlinePragma :: InlinePragma -> Bool
-- INLINE or INLINABLE
isAnyInlinePragma prag = case inl_inline prag of
Inline -> True
Inlinable -> True
_ -> False
inlinePragmaSat :: InlinePragma -> Maybe Arity
inlinePragmaSat = inl_sat
inlinePragmaActivation :: InlinePragma -> Activation
inlinePragmaActivation (InlinePragma { inl_act = activation }) = activation
inlinePragmaRuleMatchInfo :: InlinePragma -> RuleMatchInfo
inlinePragmaRuleMatchInfo (InlinePragma { inl_rule = info }) = info
setInlinePragmaActivation :: InlinePragma -> Activation -> InlinePragma
setInlinePragmaActivation prag activation = prag { inl_act = activation }
setInlinePragmaRuleMatchInfo :: InlinePragma -> RuleMatchInfo -> InlinePragma
setInlinePragmaRuleMatchInfo prag info = prag { inl_rule = info }
instance Outputable Activation where
ppr AlwaysActive = brackets (text "ALWAYS")
ppr NeverActive = brackets (text "NEVER")
ppr (ActiveBefore _ n) = brackets (char '~' <> int n)
ppr (ActiveAfter _ n) = brackets (int n)
instance Outputable RuleMatchInfo where
ppr ConLike = text "CONLIKE"
ppr FunLike = text "FUNLIKE"
instance Outputable InlineSpec where
ppr Inline = text "INLINE"
ppr NoInline = text "NOINLINE"
ppr Inlinable = text "INLINABLE"
ppr EmptyInlineSpec = empty
instance Outputable InlinePragma where
ppr (InlinePragma { inl_inline = inline, inl_act = activation
, inl_rule = info, inl_sat = mb_arity })
= ppr inline <> pp_act inline activation <+> pp_sat <+> pp_info
where
pp_act Inline AlwaysActive = empty
pp_act NoInline NeverActive = empty
pp_act _ act = ppr act
pp_sat | Just ar <- mb_arity = parens (text "sat-args=" <> int ar)
| otherwise = empty
pp_info | isFunLike info = empty
| otherwise = ppr info
isActive :: CompilerPhase -> Activation -> Bool
isActive InitialPhase AlwaysActive = True
isActive InitialPhase (ActiveBefore {}) = True
isActive InitialPhase _ = False
isActive (Phase p) act = isActiveIn p act
isActiveIn :: PhaseNum -> Activation -> Bool
isActiveIn _ NeverActive = False
isActiveIn _ AlwaysActive = True
isActiveIn p (ActiveAfter _ n) = p <= n
isActiveIn p (ActiveBefore _ n) = p > n
competesWith :: Activation -> Activation -> Bool
-- See Note [Activation competition]
competesWith NeverActive _ = False
competesWith _ NeverActive = False
competesWith AlwaysActive _ = True
competesWith (ActiveBefore {}) AlwaysActive = True
competesWith (ActiveBefore {}) (ActiveBefore {}) = True
competesWith (ActiveBefore _ a) (ActiveAfter _ b) = a < b
competesWith (ActiveAfter {}) AlwaysActive = False
competesWith (ActiveAfter {}) (ActiveBefore {}) = False
competesWith (ActiveAfter _ a) (ActiveAfter _ b) = a >= b
{- Note [Competing activations]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Sometimes a RULE and an inlining may compete, or two RULES.
See Note [Rules and inlining/other rules] in Desugar.
We say that act1 "competes with" act2 iff
act1 is active in the phase when act2 *becomes* active
NB: remember that phases count *down*: 2, 1, 0!
It's too conservative to ensure that the two are never simultaneously
active. For example, a rule might be always active, and an inlining
might switch on in phase 2. We could switch off the rule, but it does
no harm.
-}
isNeverActive, isAlwaysActive, isEarlyActive :: Activation -> Bool
isNeverActive NeverActive = True
isNeverActive _ = False
isAlwaysActive AlwaysActive = True
isAlwaysActive _ = False
isEarlyActive AlwaysActive = True
isEarlyActive (ActiveBefore {}) = True
isEarlyActive _ = False
-- | Fractional Literal
--
-- Used (instead of Rational) to represent exactly the floating point literal that we
-- encountered in the user's source program. This allows us to pretty-print exactly what
-- the user wrote, which is important e.g. for floating point numbers that can't represented
-- as Doubles (we used to via Double for pretty-printing). See also #2245.
data FractionalLit
= FL { fl_text :: String -- How the value was written in the source
, fl_value :: Rational -- Numeric value of the literal
}
deriving (Data, Show)
-- The Show instance is required for the derived Lexer.x:Token instance when DEBUG is on
negateFractionalLit :: FractionalLit -> FractionalLit
negateFractionalLit (FL { fl_text = '-':text, fl_value = value }) = FL { fl_text = text, fl_value = negate value }
negateFractionalLit (FL { fl_text = text, fl_value = value }) = FL { fl_text = '-':text, fl_value = negate value }
integralFractionalLit :: Integer -> FractionalLit
integralFractionalLit i = FL { fl_text = show i, fl_value = fromInteger i }
-- Comparison operations are needed when grouping literals
-- for compiling pattern-matching (module MatchLit)
instance Eq FractionalLit where
(==) = (==) `on` fl_value
instance Ord FractionalLit where
compare = compare `on` fl_value
instance Outputable FractionalLit where
ppr = text . fl_text
{-
************************************************************************
* *
IntWithInf
* *
************************************************************************
Represents an integer or positive infinity
-}
-- | An integer or infinity
data IntWithInf = Int {-# UNPACK #-} !Int
| Infinity
deriving Eq
-- | A representation of infinity
infinity :: IntWithInf
infinity = Infinity
instance Ord IntWithInf where
compare Infinity Infinity = EQ
compare (Int _) Infinity = LT
compare Infinity (Int _) = GT
compare (Int a) (Int b) = a `compare` b
instance Outputable IntWithInf where
ppr Infinity = char '∞'
ppr (Int n) = int n
instance Num IntWithInf where
(+) = plusWithInf
(*) = mulWithInf
abs Infinity = Infinity
abs (Int n) = Int (abs n)
signum Infinity = Int 1
signum (Int n) = Int (signum n)
fromInteger = Int . fromInteger
(-) = panic "subtracting IntWithInfs"
intGtLimit :: Int -> IntWithInf -> Bool
intGtLimit _ Infinity = False
intGtLimit n (Int m) = n > m
-- | Add two 'IntWithInf's
plusWithInf :: IntWithInf -> IntWithInf -> IntWithInf
plusWithInf Infinity _ = Infinity
plusWithInf _ Infinity = Infinity
plusWithInf (Int a) (Int b) = Int (a + b)
-- | Multiply two 'IntWithInf's
mulWithInf :: IntWithInf -> IntWithInf -> IntWithInf
mulWithInf Infinity _ = Infinity
mulWithInf _ Infinity = Infinity
mulWithInf (Int a) (Int b) = Int (a * b)
-- | Turn a positive number into an 'IntWithInf', where 0 represents infinity
treatZeroAsInf :: Int -> IntWithInf
treatZeroAsInf 0 = Infinity
treatZeroAsInf n = Int n
-- | Inject any integer into an 'IntWithInf'
mkIntWithInf :: Int -> IntWithInf
mkIntWithInf = Int
|
snoyberg/ghc
|
compiler/basicTypes/BasicTypes.hs
|
bsd-3-clause
| 45,043
| 0
| 14
| 13,564
| 5,878
| 3,272
| 2,606
| 563
| 5
|
module Module5.Task19 where
import Control.Monad.Writer (Writer, execWriter, writer)
-- system code
shopping1 :: Shopping
shopping1 = do
purchase "Jeans" 19200
purchase "Water" 180
purchase "Lettuce" 328
-- solution code
type Shopping = Writer ([(String, Integer)]) ()
purchase :: String -> Integer -> Shopping
purchase item price = writer ((), [(item, price)])
total :: Shopping -> Integer
total = sum . (map snd) . execWriter
items :: Shopping -> [String]
items = (map fst) . execWriter
|
dstarcev/stepic-haskell
|
src/Module5/Task19.hs
|
bsd-3-clause
| 508
| 0
| 8
| 94
| 182
| 101
| 81
| 14
| 1
|
{-# LANGUAGE RecursiveDo #-}
module Widgets
( centreText
, renderButton
, button
, toggleButton
) where
import Reflex
import Reflex.Gloss.Scene
import Graphics.Gloss
smallText :: String -> Picture
smallText str = scale 0.1 0.1 $ text str
centreText :: String -> Picture
centreText str = translate (-x) (-y) $ smallText str where
x = fromIntegral (length str) * charWidth * 0.5
y = charHeight * 0.5
(charWidth, charHeight) = (6, 10)
renderButton :: Color -> Bool -> Bool -> String -> Vector -> Picture
renderButton c isHovering isPressing str (sx, sy) = mconcat
[ color fill $ rectangleSolid sx sy
, centreText str
, if isHovering then rectangleWire sx sy else mempty
] where
fill = if isHovering && isPressing then dark c else c
coloredButton :: Reflex t => Behavior t Color -> Behavior t String -> Behavior t Vector -> Scene t (Event t ())
coloredButton col str size = do
target <- targetRect size
(click, pressing) <- clicked LeftButton target
render $ renderButton <$> col <*> hovering target
<*> pressing <*> str <*> size
return click
button :: Reflex t => Behavior t String -> Behavior t Vector -> Scene t (Event t ())
button = coloredButton (pure $ light azure)
toggleButton :: Reflex t => Behavior t String -> Behavior t Vector -> Event t Bool -> Scene t (Dynamic t Bool, Event t Bool)
toggleButton str size setter = do
rec
click <- coloredButton (currentColor <$> current down) str size
let clickDown = tag (not <$> current down) click
down <- holdDyn False $ leftmost [clickDown, setter]
return (down, clickDown)
where
currentColor d = if d then orange else light azure
|
Saulzar/reflex-gloss-scene
|
examples/Widgets.hs
|
bsd-3-clause
| 1,713
| 0
| 15
| 409
| 633
| 316
| 317
| 39
| 3
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE QuasiQuotes #-}
module Area where
import Ivory.Language
import Ivory.Compile.C.CmdlineFrontend
[ivory|
struct val {
field :: Stored Uint32
}
|]
val :: MemArea ('Struct "val")
val = area "value" (Just (istruct [field .= ival 0]))
cval :: ConstMemArea ('Struct "val")
cval = constArea "cval" (istruct [field .= ival 10])
getVal :: Def ('[] ':-> Uint32)
getVal = proc "getVal" $ body $ do
ret =<< deref (addrOf val ~> field)
setVal :: Def ('[Uint32] ':-> ())
setVal = proc "setVal" $ \ n -> body $ do
store (addrOf val ~> field) n
retVoid
cmodule :: Module
cmodule = package "Area" $ do
incl getVal
incl setVal
defMemArea val
defConstMemArea cval
defStruct (Proxy :: Proxy "val")
main :: IO ()
main = runCompiler [cmodule] [] initialOpts { outDir = Nothing, constFold = True }
|
GaloisInc/ivory
|
ivory-examples/examples/Area.hs
|
bsd-3-clause
| 949
| 0
| 13
| 182
| 338
| 174
| 164
| 29
| 1
|
-- | A class for tree types and representations of selections on tree types, as well as functions for converting between text and tree selections.
module Language.GroteTrap.Trees (
-- * Paths and navigation
Path, root,
Nav, up, into, down, left, right, sibling,
-- * Tree types
Tree(..), depth, selectDepth, flatten, follow, child,
-- * Tree selections
Selectable(..), TreeSelection,
select, allSelections, selectionToRange, rangeToSelection, posToPath, isValidRange,
-- * Suggesting and fixing
suggestBy, suggest, repairBy, repair
) where
import Language.GroteTrap.Range
import Language.GroteTrap.Util
import Control.Monad (liftM)
import Data.List (sortBy, findIndex)
import Data.Maybe (isJust)
import Data.Ord (comparing)
------------------------------------
-- Paths and navigation
------------------------------------
-- | A path in a tree. Each integer denotes the selection of a child; these indices are 0-relative.
type Path = [Int]
-- | @root@ is the empty path.
root :: Path
root = []
-- | Navigation transforms one path to another.
type Nav = Path -> Path
-- | Move up to parent node. Moving up from root has no effect.
up :: Nav
up [] = []
up path = init path
-- | Move down into the nth child node. If @n@ is negative, the leftmost child is selected.
into :: Int -> Nav
into i = (++ [i `max` 0])
-- | Move down into first child node.
down :: Nav
down = into 0
-- | Move left one sibling.
left :: Nav
left = sibling (-1)
-- | Move right one sibling.
right :: Nav
right = sibling 1
-- | Move @n@ siblings to the right. @n@ can be negative. If the new child index becomes negative, the leftmost child is selected.
sibling :: Int -> Nav
sibling 0 [] = []
sibling _ [] = error "the root has no siblings"
sibling d p = into (last p + d) (up p)
------------------------------------
-- Parents and children
------------------------------------
-- | Tree types.
class Tree p where
-- | Yields this tree's subtrees.
children :: p -> [p]
-- | Pre-order depth-first traversal.
flatten :: Tree t => t -> [t]
flatten t = t : concatMap flatten (children t)
-- | Follows a path in a tree, returning the result in a monad.
follow :: (Monad m, Tree t) => t -> Path -> m t
follow parent [] = return parent
follow parent (t:ts) = do
c <- child parent t
follow c ts
-- | Moves down into a child.
child :: (Monad m, Tree t) => t -> Int -> m t
child t i
| i >= 0 && i < length cs = return (cs !! i)
| otherwise = fail ("child " ++ show i ++ " does not exist")
where cs = children t
-- | Yields the depth of the tree.
depth :: Tree t => t -> Int
depth t
| null depths = 1
| otherwise = 1 + (maximum . map depth . children) t
where depths = map depth $ children t
-- | Yields all ancestors at the specified depth.
selectDepth :: Tree t => Int -> t -> [t]
selectDepth 0 t = [t]
selectDepth d t = concatMap (selectDepth (d - 1)) (children t)
------------------------------------
-- Tree selections
------------------------------------
-- | Selection in a tree. The path indicates the left side of the selection; the int tells how many siblings to the right are included in the selection.
type TreeSelection = (Path, Int)
-- | Selectable trees.
class Tree t => Selectable t where
-- | Tells whether complete subranges of children may be selected in this tree node. If not, valid TreeSelections in this tree always have a second element @0@.
allowSubranges :: t -> Bool
-- | Enumerates all possible selections of a tree.
allSelections :: Selectable a => a -> [TreeSelection]
allSelections p = (root, 0) : subranges ++ recurse where
subranges
| allowSubranges p =
[ ([from], to - from)
| from <- [0 .. length cs - 2]
, to <- [from + 1 .. length cs - 1]
, from > 0 || to < length cs - 1
]
| otherwise = []
cs = children p
recurse = concat $ zipWith label cs [0 ..]
label c i = map (rt i) (allSelections c)
rt i (path, offset) = (i : path, offset)
-- | Selects part of a tree.
select :: (Monad m, Tree t) => t -> TreeSelection -> m [t]
select t (path, offset) = (sequence . map (follow t) . take (offset + 1) . iterate right) path
-- | Computes the range of a valid selection.
selectionToRange :: (Monad m, Tree a, Ranged a) => a -> TreeSelection -> m Range
selectionToRange parent (path, offset) = do
from <- follow parent path
to <- follow parent (sibling offset path)
return (begin from, end to)
-- | Converts a specified range to a corresponding selection and returns it in a monad.
rangeToSelection :: (Monad m, Selectable a, Ranged a) => a -> Range -> m TreeSelection
rangeToSelection p (b, e)
-- If the range matches that of the root, we're done.
| range p == (b, e) =
return (root, 0)
| otherwise =
-- Find the children whose ranges contain b and e.
let cs = children p
ri pos = findIndex (inRange pos . range) cs
in case (ri b, ri e) of
(Just l, Just r) ->
if l == r
-- b and e are contained by the same child!
-- Recurse into child and prepend child index.
then liftM (\(path, offset) -> (l : path, offset)) $
rangeToSelection (cs !! l) (b, e)
else if allowSubranges p && begin (cs !! l) == b && end (cs !! r) == e
-- b is the beginning of l, and e is the end
-- of r: a selection of a range of children.
-- Note that r - l > 0; else it would've been
-- caught by the previous test.
-- This also means that there are many ways
-- to select a single node: either select it
-- directly, or select all its children.
then return ([l], r - l)
-- All other cases are bad.
else fail "text selection does not have corresponding tree selection"
-- Either position is not contained
-- within any child. Can't be valid.
_ -> fail "text selection does not have corresponding tree selection"
-- | Returns the path to the deepest descendant whose range contains the specified position.
posToPath :: (Monad m, Tree a, Ranged a) => a -> Pos -> m Path
posToPath p pos = case break (inRange pos . range) (children p) of
(_, []) -> if pos `inRange` range p
then return root
else fail ("tree does not contain position " ++ show pos)
(no, c:_) -> liftM (length no :) (posToPath c pos)
-- | Tells whether the text selection corresponds to a tree selection.
isValidRange :: (Ranged a, Selectable a) => a -> Range -> Bool
isValidRange p = isJust . rangeToSelection p
------------------------------------
-- Suggesting and fixing
------------------------------------
-- | Yields all possible selections, ordered by distance to the specified range, closest first.
suggestBy :: (Selectable a, Ranged a) => (Range -> Range -> Int) -> a -> Range -> [TreeSelection]
suggestBy cost p r = sortBy (comparing distance) (allSelections p) where
distance = cost r . fromError . selectionToRange p
-- | @suggest@ uses 'distRange' as cost function.
suggest :: (Selectable a, Ranged a) => a -> Range -> [TreeSelection]
suggest = suggestBy distRange
-- | Takes @suggestBy@'s first suggestion and yields its range.
repairBy :: (Ranged a, Selectable a) => (Range -> Range -> Int) -> a -> Range -> Range
repairBy cost p = fromError . selectionToRange p . head . suggestBy cost p
-- | @repair@ uses 'distRange' as cost function.
repair :: (Ranged a, Selectable a) => a -> Range -> Range
repair = repairBy distRange
|
MedeaMelana/GroteTrap
|
Language/GroteTrap/Trees.hs
|
bsd-3-clause
| 7,725
| 0
| 19
| 1,982
| 2,022
| 1,085
| 937
| 110
| 4
|
-- |
-- Module: $Header$
-- Description: Utilities for parsing command line options.
-- Copyright: (c) 2018-2020 Peter Trško
-- License: BSD3
--
-- Maintainer: peter.trsko@gmail.com
-- Stability: experimental
-- Portability: GHC specific language extensions.
--
-- Utilities for parsing command line options.
module CommandWrapper.Core.Options.Optparse
(
-- * Generic API
parse
, subcommandParse
-- ** Helper Functions
, splitArguments
, splitArguments'
, execParserPure
, handleParseResult
)
where
import Prelude ((+), fromIntegral)
import Control.Applicative (pure)
import Data.Bool ((&&))
import Data.Either (Either(Left, Right))
import Data.Eq ((/=), (==))
import Data.Foldable (length)
import Data.Function (($), (.))
import Data.Functor (Functor, (<$>))
import qualified Data.List as List (span)
import Data.Maybe (listToMaybe)
import Data.Monoid (Endo, (<>))
import Data.String (String)
import Data.Word (Word)
import System.Environment (getArgs)
import System.Exit (ExitCode(ExitFailure), exitWith)
import System.IO (IO, stderr)
import Control.Comonad (Comonad, extract)
import Data.Verbosity (Verbosity)
import qualified Data.Verbosity as Verbosity (Verbosity(Normal))
import Data.Output.Colour (ColourOutput)
import qualified Data.Output.Colour as ColourOutput (ColourOutput(Auto))
import qualified Options.Applicative as Options
( ParserFailure(ParserFailure, execFailure)
, ParserInfo
, ParserPrefs
, ParserResult(CompletionInvoked, Failure, Success)
, parserFailure
)
import qualified Options.Applicative.Common as Options (runParserInfo)
import qualified Options.Applicative.Internal as Options (runP)
import CommandWrapper.Core.Environment.AppNames (AppNames(AppNames, usedName))
import CommandWrapper.Core.Environment.Params
( Params(Params, colour, name, subcommand, verbosity)
)
import CommandWrapper.Core.Message (dieFailedToParseOptions)
parse
:: forall globalMode mode config
. (Functor mode, Comonad globalMode)
=> AppNames
-> Options.ParserPrefs
-> Options.ParserInfo (globalMode (Endo config))
-> (forall a. globalMode (Endo a) -> Endo (mode a))
-> (forall a. Endo config -> [String] -> IO (Endo (mode a)))
-> IO (Endo (mode config))
parse appNames parserPrefs parserInfo fromGlobalMode parseArguments = do
(globalOptions, arguments) <- splitArguments <$> getArgs
globalMode <- handleParseResult' appNames (execParserPure' globalOptions)
let updateConfig = extract globalMode
updateCommand <- parseArguments updateConfig arguments
pure $ fromGlobalMode globalMode <> updateCommand
where
execParserPure'
:: [String]
-> Options.ParserResult (globalMode (Endo config))
execParserPure' = execParserPure parserPrefs parserInfo
handleParseResult' AppNames{usedName} =
-- TODO: We should at least respect `NO_COLOR`.
handleParseResult usedName Verbosity.Normal ColourOutput.Auto
subcommandParse
:: Params
-> Options.ParserPrefs
-> Options.ParserInfo (Endo (mode config))
-> [String]
-- ^ Command line arguments. Usually obtained by 'getArgs'.
-> IO (Endo (mode config))
subcommandParse params parserPrefs parserInfo =
handleParseResult' params . execParserPure'
where
execParserPure' = execParserPure parserPrefs parserInfo
handleParseResult' Params{colour, name, subcommand, verbosity} =
handleParseResult (name <> " " <> subcommand) verbosity colour
-- | Split arguments into global options and the rest.
--
-- @
-- COMMAND_WRAPPER [GLOBAL_OPTIONS] [[--] SUBCOMMAND [SUBCOMMAND_ARGUMENTS]]
-- @
--
-- >>> splitArguments ["-i", "--help"]
-- (["-i", "--help"], [])
-- >>> splitArguments ["-i", "help", "build"]
-- (["-i"], ["help", "build"])
-- >>> splitArguments ["-i", "--", "help", "build"]
-- (["-i"], ["help", "build"])
-- >>> splitArguments ["-i", "--", "--foo"]
-- (["-i"], ["--foo"])
splitArguments
:: [String]
-> ([String], [String])
splitArguments args = (globalOptions, subcommandAndItsArguments)
where
(globalOptions, _, subcommandAndItsArguments) = splitArguments' args
-- | Split arguments into global options and the rest.
--
-- @
-- COMMAND_WRAPPER [GLOBAL_OPTIONS] [[--] SUBCOMMAND [SUBCOMMAND_ARGUMENTS]]
-- @
--
-- >>> splitArguments' ["-i", "--help"]
-- (["-i", "--help"], 2, [])
-- >>> splitArguments' ["-i", "help", "build"]
-- (["-i"], 1, ["help", "build"])
-- >>> splitArguments' ["-i", "--", "help", "build"]
-- (["-i"], 2, ["help", "build"])
-- >>> splitArguments' ["-i", "--", "--foo"]
-- (["-i"], 2, ["--foo"])
--
-- >>> let arguments = ["-i", "--", "help", "build"]
-- >>> let (_, n, subcommandAndItsArguments) = splitArguments' arguments
-- >>> drop n arguments == subcommandAndItsArguments
-- True
splitArguments'
:: [String]
-> ([String], Word, [String])
splitArguments' args = case subcommandAndItsArguments' of
"--" : subcommandAndItsArguments ->
(globalOptions, n + 1, subcommandAndItsArguments)
subcommandAndItsArguments ->
(globalOptions, n, subcommandAndItsArguments)
where
(globalOptions, subcommandAndItsArguments') =
List.span (\arg -> arg /= "--" && listToMaybe arg == pure '-') args
n = fromIntegral (length globalOptions)
-- | Variant of 'Options.Applicative.execParserPure' that doesn't provide shell
-- completion.
execParserPure
:: Options.ParserPrefs
-- ^ Global preferences for this parser
-> Options.ParserInfo a
-- ^ Description of the program to run
-> [String]
-- ^ Program arguments
-> Options.ParserResult a
execParserPure pprefs pinfo args =
case Options.runP (Options.runParserInfo pinfo args) pprefs of
(Right r, _) ->
Options.Success r
(Left err, ctx) ->
Options.Failure (Options.parserFailure pprefs pinfo err ctx)
handleParseResult
:: String
-> Verbosity
-> ColourOutput
-> Options.ParserResult a
-> IO a
handleParseResult command verbosity colour = \case
Options.Success a ->
pure a
Options.Failure Options.ParserFailure{Options.execFailure} ->
let (err, _, _) = execFailure command
in dieFailedToParseOptions command verbosity colour stderr err
Options.CompletionInvoked _ ->
exitWith (ExitFailure 1) -- TODO: This is imposible case.
|
trskop/command-wrapper
|
command-wrapper-core/src/CommandWrapper/Core/Options/Optparse.hs
|
bsd-3-clause
| 6,355
| 0
| 20
| 1,146
| 1,360
| 791
| 569
| -1
| -1
|
{-# language CPP #-}
-- | = Name
--
-- VK_KHR_external_semaphore_win32 - device extension
--
-- == VK_KHR_external_semaphore_win32
--
-- [__Name String__]
-- @VK_KHR_external_semaphore_win32@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 79
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- - Requires @VK_KHR_external_semaphore@
--
-- [__Contact__]
--
-- - James Jones
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_KHR_external_semaphore_win32] @cubanismo%0A<<Here describe the issue or question you have about the VK_KHR_external_semaphore_win32 extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2016-10-21
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Contributors__]
--
-- - James Jones, NVIDIA
--
-- - Jeff Juliano, NVIDIA
--
-- - Carsten Rohde, NVIDIA
--
-- == Description
--
-- An application using external memory may wish to synchronize access to
-- that memory using semaphores. This extension enables an application to
-- export semaphore payload to and import semaphore payload from Windows
-- handles.
--
-- == New Commands
--
-- - 'getSemaphoreWin32HandleKHR'
--
-- - 'importSemaphoreWin32HandleKHR'
--
-- == New Structures
--
-- - 'ImportSemaphoreWin32HandleInfoKHR'
--
-- - 'SemaphoreGetWin32HandleInfoKHR'
--
-- - Extending 'Vulkan.Core10.QueueSemaphore.SemaphoreCreateInfo':
--
-- - 'ExportSemaphoreWin32HandleInfoKHR'
--
-- - Extending 'Vulkan.Core10.Queue.SubmitInfo':
--
-- - 'D3D12FenceSubmitInfoKHR'
--
-- == New Enum Constants
--
-- - 'KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME'
--
-- - 'KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION'
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR'
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR'
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR'
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR'
--
-- == Issues
--
-- 1) Do applications need to call @CloseHandle@() on the values returned
-- from 'getSemaphoreWin32HandleKHR' when @handleType@ is
-- 'Vulkan.Extensions.VK_KHR_external_semaphore_capabilities.EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR'?
--
-- __RESOLVED__: Yes, unless it is passed back in to another driver
-- instance to import the object. A successful get call transfers ownership
-- of the handle to the application. Destroying the semaphore object will
-- not destroy the handle or the handle’s reference to the underlying
-- semaphore resource.
--
-- 2) Should the language regarding KMT\/Windows 7 handles be moved to a
-- separate extension so that it can be deprecated over time?
--
-- __RESOLVED__: No. Support for them can be deprecated by drivers if they
-- choose, by no longer returning them in the supported handle types of the
-- instance level queries.
--
-- 3) Should applications be allowed to specify additional object
-- attributes for shared handles?
--
-- __RESOLVED__: Yes. Applications will be allowed to provide similar
-- attributes to those they would to any other handle creation API.
--
-- 4) How do applications communicate the desired fence values to use with
-- @D3D12_FENCE@-based Vulkan semaphores?
--
-- __RESOLVED__: There are a couple of options. The values for the signaled
-- and reset states could be communicated up front when creating the object
-- and remain static for the life of the Vulkan semaphore, or they could be
-- specified using auxiliary structures when submitting semaphore signal
-- and wait operations, similar to what is done with the keyed mutex
-- extensions. The latter is more flexible and consistent with the keyed
-- mutex usage, but the former is a much simpler API.
--
-- Since Vulkan tends to favor flexibility and consistency over simplicity,
-- a new structure specifying D3D12 fence acquire and release values is
-- added to the 'Vulkan.Core10.Queue.queueSubmit' function.
--
-- == Version History
--
-- - Revision 1, 2016-10-21 (James Jones)
--
-- - Initial revision
--
-- == See Also
--
-- 'D3D12FenceSubmitInfoKHR', 'ExportSemaphoreWin32HandleInfoKHR',
-- 'ImportSemaphoreWin32HandleInfoKHR', 'SemaphoreGetWin32HandleInfoKHR',
-- 'getSemaphoreWin32HandleKHR', 'importSemaphoreWin32HandleKHR'
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_KHR_external_semaphore_win32 Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_KHR_external_semaphore_win32 ( getSemaphoreWin32HandleKHR
, importSemaphoreWin32HandleKHR
, ImportSemaphoreWin32HandleInfoKHR(..)
, ExportSemaphoreWin32HandleInfoKHR(..)
, D3D12FenceSubmitInfoKHR(..)
, SemaphoreGetWin32HandleInfoKHR(..)
, KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION
, pattern KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION
, KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME
, pattern KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME
, HANDLE
, DWORD
, LPCWSTR
, SECURITY_ATTRIBUTES
) where
import Vulkan.Internal.Utils (traceAroundEvent)
import Control.Exception.Base (bracket)
import Control.Monad (unless)
import Control.Monad.IO.Class (liftIO)
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Marshal.Alloc (callocBytes)
import Foreign.Marshal.Alloc (free)
import GHC.Base (when)
import GHC.IO (throwIO)
import GHC.Ptr (nullFunPtr)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Cont (evalContT)
import Data.Vector (generateM)
import qualified Data.Vector (imapM_)
import qualified Data.Vector (length)
import qualified Data.Vector (null)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Control.Monad.IO.Class (MonadIO)
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import GHC.IO.Exception (IOErrorType(..))
import GHC.IO.Exception (IOException(..))
import Foreign.Ptr (FunPtr)
import Foreign.Ptr (Ptr)
import Data.Word (Word32)
import Data.Word (Word64)
import Data.Kind (Type)
import Control.Monad.Trans.Cont (ContT(..))
import Data.Vector (Vector)
import Vulkan.CStruct.Utils (advancePtrBytes)
import Vulkan.Extensions.VK_NV_external_memory_win32 (DWORD)
import Vulkan.Core10.Handles (Device)
import Vulkan.Core10.Handles (Device(..))
import Vulkan.Core10.Handles (Device(Device))
import Vulkan.Dynamic (DeviceCmds(pVkGetSemaphoreWin32HandleKHR))
import Vulkan.Dynamic (DeviceCmds(pVkImportSemaphoreWin32HandleKHR))
import Vulkan.Core10.Handles (Device_T)
import Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits (ExternalSemaphoreHandleTypeFlagBits)
import Vulkan.Extensions.VK_NV_external_memory_win32 (HANDLE)
import Vulkan.Extensions.VK_KHR_external_memory_win32 (LPCWSTR)
import Vulkan.Core10.Enums.Result (Result)
import Vulkan.Core10.Enums.Result (Result(..))
import Vulkan.Extensions.VK_NV_external_memory_win32 (SECURITY_ATTRIBUTES)
import Vulkan.Core10.Handles (Semaphore)
import Vulkan.Core11.Enums.SemaphoreImportFlagBits (SemaphoreImportFlags)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Exception (VulkanException(..))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR))
import Vulkan.Core10.Enums.Result (Result(SUCCESS))
import Vulkan.Extensions.VK_NV_external_memory_win32 (DWORD)
import Vulkan.Extensions.VK_NV_external_memory_win32 (HANDLE)
import Vulkan.Extensions.VK_KHR_external_memory_win32 (LPCWSTR)
import Vulkan.Extensions.VK_NV_external_memory_win32 (SECURITY_ATTRIBUTES)
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkGetSemaphoreWin32HandleKHR
:: FunPtr (Ptr Device_T -> Ptr SemaphoreGetWin32HandleInfoKHR -> Ptr HANDLE -> IO Result) -> Ptr Device_T -> Ptr SemaphoreGetWin32HandleInfoKHR -> Ptr HANDLE -> IO Result
-- | vkGetSemaphoreWin32HandleKHR - Get a Windows HANDLE for a semaphore
--
-- = Description
--
-- For handle types defined as NT handles, the handles returned by
-- 'getSemaphoreWin32HandleKHR' are owned by the application. To avoid
-- leaking resources, the application /must/ release ownership of them
-- using the @CloseHandle@ system call when they are no longer needed.
--
-- Exporting a Windows handle from a semaphore /may/ have side effects
-- depending on the transference of the specified handle type, as described
-- in
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#synchronization-semaphores-importing Importing Semaphore Payloads>.
--
-- == Return Codes
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-successcodes Success>]
--
-- - 'Vulkan.Core10.Enums.Result.SUCCESS'
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_TOO_MANY_OBJECTS'
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_HOST_MEMORY'
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_external_semaphore_win32 VK_KHR_external_semaphore_win32>,
-- 'Vulkan.Core10.Handles.Device', 'SemaphoreGetWin32HandleInfoKHR'
getSemaphoreWin32HandleKHR :: forall io
. (MonadIO io)
=> -- | @device@ is the logical device that created the semaphore being
-- exported.
--
-- #VUID-vkGetSemaphoreWin32HandleKHR-device-parameter# @device@ /must/ be
-- a valid 'Vulkan.Core10.Handles.Device' handle
Device
-> -- | @pGetWin32HandleInfo@ is a pointer to a 'SemaphoreGetWin32HandleInfoKHR'
-- structure containing parameters of the export operation.
--
-- #VUID-vkGetSemaphoreWin32HandleKHR-pGetWin32HandleInfo-parameter#
-- @pGetWin32HandleInfo@ /must/ be a valid pointer to a valid
-- 'SemaphoreGetWin32HandleInfoKHR' structure
SemaphoreGetWin32HandleInfoKHR
-> io (HANDLE)
getSemaphoreWin32HandleKHR device getWin32HandleInfo = liftIO . evalContT $ do
let vkGetSemaphoreWin32HandleKHRPtr = pVkGetSemaphoreWin32HandleKHR (case device of Device{deviceCmds} -> deviceCmds)
lift $ unless (vkGetSemaphoreWin32HandleKHRPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkGetSemaphoreWin32HandleKHR is null" Nothing Nothing
let vkGetSemaphoreWin32HandleKHR' = mkVkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHRPtr
pGetWin32HandleInfo <- ContT $ withCStruct (getWin32HandleInfo)
pPHandle <- ContT $ bracket (callocBytes @HANDLE 8) free
r <- lift $ traceAroundEvent "vkGetSemaphoreWin32HandleKHR" (vkGetSemaphoreWin32HandleKHR' (deviceHandle (device)) pGetWin32HandleInfo (pPHandle))
lift $ when (r < SUCCESS) (throwIO (VulkanException r))
pHandle <- lift $ peek @HANDLE pPHandle
pure $ (pHandle)
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkImportSemaphoreWin32HandleKHR
:: FunPtr (Ptr Device_T -> Ptr ImportSemaphoreWin32HandleInfoKHR -> IO Result) -> Ptr Device_T -> Ptr ImportSemaphoreWin32HandleInfoKHR -> IO Result
-- | vkImportSemaphoreWin32HandleKHR - Import a semaphore from a Windows
-- HANDLE
--
-- = Description
--
-- Importing a semaphore payload from Windows handles does not transfer
-- ownership of the handle to the Vulkan implementation. For handle types
-- defined as NT handles, the application /must/ release ownership using
-- the @CloseHandle@ system call when the handle is no longer needed.
--
-- Applications /can/ import the same semaphore payload into multiple
-- instances of Vulkan, into the same instance from which it was exported,
-- and multiple times into a given Vulkan instance.
--
-- == Return Codes
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-successcodes Success>]
--
-- - 'Vulkan.Core10.Enums.Result.SUCCESS'
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_HOST_MEMORY'
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_INVALID_EXTERNAL_HANDLE'
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_external_semaphore_win32 VK_KHR_external_semaphore_win32>,
-- 'Vulkan.Core10.Handles.Device', 'ImportSemaphoreWin32HandleInfoKHR'
importSemaphoreWin32HandleKHR :: forall io
. (MonadIO io)
=> -- | @device@ is the logical device that created the semaphore.
--
-- #VUID-vkImportSemaphoreWin32HandleKHR-device-parameter# @device@ /must/
-- be a valid 'Vulkan.Core10.Handles.Device' handle
Device
-> -- | @pImportSemaphoreWin32HandleInfo@ is a pointer to a
-- 'ImportSemaphoreWin32HandleInfoKHR' structure specifying the semaphore
-- and import parameters.
--
-- #VUID-vkImportSemaphoreWin32HandleKHR-pImportSemaphoreWin32HandleInfo-parameter#
-- @pImportSemaphoreWin32HandleInfo@ /must/ be a valid pointer to a valid
-- 'ImportSemaphoreWin32HandleInfoKHR' structure
ImportSemaphoreWin32HandleInfoKHR
-> io ()
importSemaphoreWin32HandleKHR device importSemaphoreWin32HandleInfo = liftIO . evalContT $ do
let vkImportSemaphoreWin32HandleKHRPtr = pVkImportSemaphoreWin32HandleKHR (case device of Device{deviceCmds} -> deviceCmds)
lift $ unless (vkImportSemaphoreWin32HandleKHRPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkImportSemaphoreWin32HandleKHR is null" Nothing Nothing
let vkImportSemaphoreWin32HandleKHR' = mkVkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHRPtr
pImportSemaphoreWin32HandleInfo <- ContT $ withCStruct (importSemaphoreWin32HandleInfo)
r <- lift $ traceAroundEvent "vkImportSemaphoreWin32HandleKHR" (vkImportSemaphoreWin32HandleKHR' (deviceHandle (device)) pImportSemaphoreWin32HandleInfo)
lift $ when (r < SUCCESS) (throwIO (VulkanException r))
-- | VkImportSemaphoreWin32HandleInfoKHR - Structure specifying Windows
-- handle to import to a semaphore
--
-- = Description
--
-- The handle types supported by @handleType@ are:
--
-- +---------------------------------------------------------------------------------------------------------------+------------------+---------------------+
-- | Handle Type | Transference | Permanence |
-- | | | Supported |
-- +===============================================================================================================+==================+=====================+
-- | 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT' | Reference | Temporary,Permanent |
-- +---------------------------------------------------------------------------------------------------------------+------------------+---------------------+
-- | 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT' | Reference | Temporary,Permanent |
-- +---------------------------------------------------------------------------------------------------------------+------------------+---------------------+
-- | 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT' | Reference | Temporary,Permanent |
-- +---------------------------------------------------------------------------------------------------------------+------------------+---------------------+
--
-- Handle Types Supported by 'ImportSemaphoreWin32HandleInfoKHR'
--
-- == Valid Usage
--
-- - #VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-01140#
-- @handleType@ /must/ be a value included in the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#synchronization-semaphore-handletypes-win32 Handle Types Supported by >
-- table
--
-- - #VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-01466# If
-- @handleType@ is not
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT'
-- or
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT',
-- @name@ /must/ be @NULL@
--
-- - #VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-01467# If
-- @handle@ is @NULL@, @name@ /must/ name a valid synchronization
-- primitive of the type specified by @handleType@
--
-- - #VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-01468# If
-- @name@ is @NULL@, @handle@ /must/ be a valid handle of the type
-- specified by @handleType@
--
-- - #VUID-VkImportSemaphoreWin32HandleInfoKHR-handle-01469# If @handle@
-- is not @NULL@, @name@ /must/ be @NULL@
--
-- - #VUID-VkImportSemaphoreWin32HandleInfoKHR-handle-01542# If @handle@
-- is not @NULL@, it /must/ obey any requirements listed for
-- @handleType@ in
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#external-semaphore-handle-types-compatibility external semaphore handle types compatibility>
--
-- - #VUID-VkImportSemaphoreWin32HandleInfoKHR-name-01543# If @name@ is
-- not @NULL@, it /must/ obey any requirements listed for @handleType@
-- in
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#external-semaphore-handle-types-compatibility external semaphore handle types compatibility>
--
-- - #VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-03261# If
-- @handleType@ is
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT'
-- or
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT',
-- the 'Vulkan.Core10.QueueSemaphore.SemaphoreCreateInfo'::@flags@
-- field /must/ match that of the semaphore from which @handle@ or
-- @name@ was exported
--
-- - #VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-03262# If
-- @handleType@ is
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT'
-- or
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT',
-- the
-- 'Vulkan.Core12.Promoted_From_VK_KHR_timeline_semaphore.SemaphoreTypeCreateInfo'::@semaphoreType@
-- field /must/ match that of the semaphore from which @handle@ or
-- @name@ was exported
--
-- - #VUID-VkImportSemaphoreWin32HandleInfoKHR-flags-03322# If @flags@
-- contains
-- 'Vulkan.Core11.Enums.SemaphoreImportFlagBits.SEMAPHORE_IMPORT_TEMPORARY_BIT',
-- the
-- 'Vulkan.Core12.Promoted_From_VK_KHR_timeline_semaphore.SemaphoreTypeCreateInfo'::@semaphoreType@
-- field of the semaphore from which @handle@ or @name@ was exported
-- /must/ not be
-- 'Vulkan.Core12.Enums.SemaphoreType.SEMAPHORE_TYPE_TIMELINE'
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkImportSemaphoreWin32HandleInfoKHR-sType-sType# @sType@
-- /must/ be
-- 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR'
--
-- - #VUID-VkImportSemaphoreWin32HandleInfoKHR-pNext-pNext# @pNext@
-- /must/ be @NULL@
--
-- - #VUID-VkImportSemaphoreWin32HandleInfoKHR-semaphore-parameter#
-- @semaphore@ /must/ be a valid 'Vulkan.Core10.Handles.Semaphore'
-- handle
--
-- - #VUID-VkImportSemaphoreWin32HandleInfoKHR-flags-parameter# @flags@
-- /must/ be a valid combination of
-- 'Vulkan.Core11.Enums.SemaphoreImportFlagBits.SemaphoreImportFlagBits'
-- values
--
-- == Host Synchronization
--
-- - Host access to @semaphore@ /must/ be externally synchronized
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_external_semaphore_win32 VK_KHR_external_semaphore_win32>,
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.ExternalSemaphoreHandleTypeFlagBits',
-- 'Vulkan.Core10.Handles.Semaphore',
-- 'Vulkan.Core11.Enums.SemaphoreImportFlagBits.SemaphoreImportFlags',
-- 'Vulkan.Core10.Enums.StructureType.StructureType',
-- 'importSemaphoreWin32HandleKHR'
data ImportSemaphoreWin32HandleInfoKHR = ImportSemaphoreWin32HandleInfoKHR
{ -- | @semaphore@ is the semaphore into which the payload will be imported.
semaphore :: Semaphore
, -- | @flags@ is a bitmask of
-- 'Vulkan.Core11.Enums.SemaphoreImportFlagBits.SemaphoreImportFlagBits'
-- specifying additional parameters for the semaphore payload import
-- operation.
flags :: SemaphoreImportFlags
, -- | @handleType@ is a
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.ExternalSemaphoreHandleTypeFlagBits'
-- value specifying the type of @handle@.
handleType :: ExternalSemaphoreHandleTypeFlagBits
, -- | @handle@ is @NULL@ or the external handle to import.
handle :: HANDLE
, -- | @name@ is @NULL@ or a null-terminated UTF-16 string naming the
-- underlying synchronization primitive to import.
name :: LPCWSTR
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (ImportSemaphoreWin32HandleInfoKHR)
#endif
deriving instance Show ImportSemaphoreWin32HandleInfoKHR
instance ToCStruct ImportSemaphoreWin32HandleInfoKHR where
withCStruct x f = allocaBytes 48 $ \p -> pokeCStruct p x (f p)
pokeCStruct p ImportSemaphoreWin32HandleInfoKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Semaphore)) (semaphore)
poke ((p `plusPtr` 24 :: Ptr SemaphoreImportFlags)) (flags)
poke ((p `plusPtr` 28 :: Ptr ExternalSemaphoreHandleTypeFlagBits)) (handleType)
poke ((p `plusPtr` 32 :: Ptr HANDLE)) (handle)
poke ((p `plusPtr` 40 :: Ptr LPCWSTR)) (name)
f
cStructSize = 48
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Semaphore)) (zero)
poke ((p `plusPtr` 28 :: Ptr ExternalSemaphoreHandleTypeFlagBits)) (zero)
f
instance FromCStruct ImportSemaphoreWin32HandleInfoKHR where
peekCStruct p = do
semaphore <- peek @Semaphore ((p `plusPtr` 16 :: Ptr Semaphore))
flags <- peek @SemaphoreImportFlags ((p `plusPtr` 24 :: Ptr SemaphoreImportFlags))
handleType <- peek @ExternalSemaphoreHandleTypeFlagBits ((p `plusPtr` 28 :: Ptr ExternalSemaphoreHandleTypeFlagBits))
handle <- peek @HANDLE ((p `plusPtr` 32 :: Ptr HANDLE))
name <- peek @LPCWSTR ((p `plusPtr` 40 :: Ptr LPCWSTR))
pure $ ImportSemaphoreWin32HandleInfoKHR
semaphore flags handleType handle name
instance Storable ImportSemaphoreWin32HandleInfoKHR where
sizeOf ~_ = 48
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero ImportSemaphoreWin32HandleInfoKHR where
zero = ImportSemaphoreWin32HandleInfoKHR
zero
zero
zero
zero
zero
-- | VkExportSemaphoreWin32HandleInfoKHR - Structure specifying additional
-- attributes of Windows handles exported from a semaphore
--
-- = Description
--
-- If
-- 'Vulkan.Core11.Promoted_From_VK_KHR_external_semaphore.ExportSemaphoreCreateInfo'
-- is not included in the same @pNext@ chain, this structure is ignored.
--
-- If
-- 'Vulkan.Core11.Promoted_From_VK_KHR_external_semaphore.ExportSemaphoreCreateInfo'
-- is included in the @pNext@ chain of
-- 'Vulkan.Core10.QueueSemaphore.SemaphoreCreateInfo' with a Windows
-- @handleType@, but either 'ExportSemaphoreWin32HandleInfoKHR' is not
-- included in the @pNext@ chain, or if it is but @pAttributes@ is set to
-- @NULL@, default security descriptor values will be used, and child
-- processes created by the application will not inherit the handle, as
-- described in the MSDN documentation for “Synchronization Object Security
-- and Access Rights”1. Further, if the structure is not present, the
-- access rights used depend on the handle type.
--
-- For handles of the following types:
--
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT'
--
-- The implementation /must/ ensure the access rights allow both signal and
-- wait operations on the semaphore.
--
-- For handles of the following types:
--
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT'
--
-- The access rights /must/ be:
--
-- @GENERIC_ALL@
--
-- [1]
-- <https://docs.microsoft.com/en-us/windows/win32/sync/synchronization-object-security-and-access-rights>
--
-- == Valid Usage
--
-- - #VUID-VkExportSemaphoreWin32HandleInfoKHR-handleTypes-01125# If
-- 'Vulkan.Core11.Promoted_From_VK_KHR_external_semaphore.ExportSemaphoreCreateInfo'::@handleTypes@
-- does not include
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT'
-- or
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT',
-- 'ExportSemaphoreWin32HandleInfoKHR' /must/ not be included in the
-- @pNext@ chain of 'Vulkan.Core10.QueueSemaphore.SemaphoreCreateInfo'
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkExportSemaphoreWin32HandleInfoKHR-sType-sType# @sType@
-- /must/ be
-- 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR'
--
-- - #VUID-VkExportSemaphoreWin32HandleInfoKHR-pAttributes-parameter# If
-- @pAttributes@ is not @NULL@, @pAttributes@ /must/ be a valid pointer
-- to a valid
-- 'Vulkan.Extensions.VK_NV_external_memory_win32.SECURITY_ATTRIBUTES'
-- value
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_external_semaphore_win32 VK_KHR_external_semaphore_win32>,
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data ExportSemaphoreWin32HandleInfoKHR = ExportSemaphoreWin32HandleInfoKHR
{ -- | @pAttributes@ is a pointer to a Windows
-- 'Vulkan.Extensions.VK_NV_external_memory_win32.SECURITY_ATTRIBUTES'
-- structure specifying security attributes of the handle.
attributes :: Ptr SECURITY_ATTRIBUTES
, -- | @dwAccess@ is a 'Vulkan.Extensions.VK_NV_external_memory_win32.DWORD'
-- specifying access rights of the handle.
dwAccess :: DWORD
, -- | @name@ is a null-terminated UTF-16 string to associate with the
-- underlying synchronization primitive referenced by NT handles exported
-- from the created semaphore.
name :: LPCWSTR
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (ExportSemaphoreWin32HandleInfoKHR)
#endif
deriving instance Show ExportSemaphoreWin32HandleInfoKHR
instance ToCStruct ExportSemaphoreWin32HandleInfoKHR where
withCStruct x f = allocaBytes 40 $ \p -> pokeCStruct p x (f p)
pokeCStruct p ExportSemaphoreWin32HandleInfoKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr (Ptr SECURITY_ATTRIBUTES))) (attributes)
poke ((p `plusPtr` 24 :: Ptr DWORD)) (dwAccess)
poke ((p `plusPtr` 32 :: Ptr LPCWSTR)) (name)
f
cStructSize = 40
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 24 :: Ptr DWORD)) (zero)
poke ((p `plusPtr` 32 :: Ptr LPCWSTR)) (zero)
f
instance FromCStruct ExportSemaphoreWin32HandleInfoKHR where
peekCStruct p = do
pAttributes <- peek @(Ptr SECURITY_ATTRIBUTES) ((p `plusPtr` 16 :: Ptr (Ptr SECURITY_ATTRIBUTES)))
dwAccess <- peek @DWORD ((p `plusPtr` 24 :: Ptr DWORD))
name <- peek @LPCWSTR ((p `plusPtr` 32 :: Ptr LPCWSTR))
pure $ ExportSemaphoreWin32HandleInfoKHR
pAttributes dwAccess name
instance Storable ExportSemaphoreWin32HandleInfoKHR where
sizeOf ~_ = 40
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero ExportSemaphoreWin32HandleInfoKHR where
zero = ExportSemaphoreWin32HandleInfoKHR
zero
zero
zero
-- | VkD3D12FenceSubmitInfoKHR - Structure specifying values for Direct3D 12
-- fence-backed semaphores
--
-- = Description
--
-- If the semaphore in 'Vulkan.Core10.Queue.SubmitInfo'::@pWaitSemaphores@
-- or 'Vulkan.Core10.Queue.SubmitInfo'::@pSignalSemaphores@ corresponding
-- to an entry in @pWaitSemaphoreValues@ or @pSignalSemaphoreValues@
-- respectively does not currently have a
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#synchronization-semaphores-payloads payload>
-- referring to a Direct3D 12 fence, the implementation /must/ ignore the
-- value in the @pWaitSemaphoreValues@ or @pSignalSemaphoreValues@ entry.
--
-- Note
--
-- As the introduction of the external semaphore handle type
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT'
-- predates that of timeline semaphores, support for importing semaphore
-- payloads from external handles of that type into semaphores created
-- (implicitly or explicitly) with a
-- 'Vulkan.Core12.Enums.SemaphoreType.SemaphoreType' of
-- 'Vulkan.Core12.Enums.SemaphoreType.SEMAPHORE_TYPE_BINARY' is preserved
-- for backwards compatibility. However, applications /should/ prefer
-- importing such handle types into semaphores created with a
-- 'Vulkan.Core12.Enums.SemaphoreType.SemaphoreType' of
-- 'Vulkan.Core12.Enums.SemaphoreType.SEMAPHORE_TYPE_TIMELINE', and use the
-- 'Vulkan.Core12.Promoted_From_VK_KHR_timeline_semaphore.TimelineSemaphoreSubmitInfo'
-- structure instead of the 'D3D12FenceSubmitInfoKHR' structure to specify
-- the values to use when waiting for and signaling such semaphores.
--
-- == Valid Usage
--
-- - #VUID-VkD3D12FenceSubmitInfoKHR-waitSemaphoreValuesCount-00079#
-- @waitSemaphoreValuesCount@ /must/ be the same value as
-- 'Vulkan.Core10.Queue.SubmitInfo'::@waitSemaphoreCount@, where
-- 'Vulkan.Core10.Queue.SubmitInfo' is in the @pNext@ chain of this
-- 'D3D12FenceSubmitInfoKHR' structure
--
-- - #VUID-VkD3D12FenceSubmitInfoKHR-signalSemaphoreValuesCount-00080#
-- @signalSemaphoreValuesCount@ /must/ be the same value as
-- 'Vulkan.Core10.Queue.SubmitInfo'::@signalSemaphoreCount@, where
-- 'Vulkan.Core10.Queue.SubmitInfo' is in the @pNext@ chain of this
-- 'D3D12FenceSubmitInfoKHR' structure
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkD3D12FenceSubmitInfoKHR-sType-sType# @sType@ /must/ be
-- 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR'
--
-- - #VUID-VkD3D12FenceSubmitInfoKHR-pWaitSemaphoreValues-parameter# If
-- @waitSemaphoreValuesCount@ is not @0@, and @pWaitSemaphoreValues@ is
-- not @NULL@, @pWaitSemaphoreValues@ /must/ be a valid pointer to an
-- array of @waitSemaphoreValuesCount@ @uint64_t@ values
--
-- - #VUID-VkD3D12FenceSubmitInfoKHR-pSignalSemaphoreValues-parameter# If
-- @signalSemaphoreValuesCount@ is not @0@, and
-- @pSignalSemaphoreValues@ is not @NULL@, @pSignalSemaphoreValues@
-- /must/ be a valid pointer to an array of
-- @signalSemaphoreValuesCount@ @uint64_t@ values
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_external_semaphore_win32 VK_KHR_external_semaphore_win32>,
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data D3D12FenceSubmitInfoKHR = D3D12FenceSubmitInfoKHR
{ -- | @waitSemaphoreValuesCount@ is the number of semaphore wait values
-- specified in @pWaitSemaphoreValues@.
waitSemaphoreValuesCount :: Word32
, -- | @pWaitSemaphoreValues@ is a pointer to an array of
-- @waitSemaphoreValuesCount@ values for the corresponding semaphores in
-- 'Vulkan.Core10.Queue.SubmitInfo'::@pWaitSemaphores@ to wait for.
waitSemaphoreValues :: Vector Word64
, -- | @signalSemaphoreValuesCount@ is the number of semaphore signal values
-- specified in @pSignalSemaphoreValues@.
signalSemaphoreValuesCount :: Word32
, -- | @pSignalSemaphoreValues@ is a pointer to an array of
-- @signalSemaphoreValuesCount@ values for the corresponding semaphores in
-- 'Vulkan.Core10.Queue.SubmitInfo'::@pSignalSemaphores@ to set when
-- signaled.
signalSemaphoreValues :: Vector Word64
}
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (D3D12FenceSubmitInfoKHR)
#endif
deriving instance Show D3D12FenceSubmitInfoKHR
instance ToCStruct D3D12FenceSubmitInfoKHR where
withCStruct x f = allocaBytes 48 $ \p -> pokeCStruct p x (f p)
pokeCStruct p D3D12FenceSubmitInfoKHR{..} f = evalContT $ do
lift $ poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR)
lift $ poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
let pWaitSemaphoreValuesLength = Data.Vector.length $ (waitSemaphoreValues)
waitSemaphoreValuesCount'' <- lift $ if (waitSemaphoreValuesCount) == 0
then pure $ fromIntegral pWaitSemaphoreValuesLength
else do
unless (fromIntegral pWaitSemaphoreValuesLength == (waitSemaphoreValuesCount) || pWaitSemaphoreValuesLength == 0) $
throwIO $ IOError Nothing InvalidArgument "" "pWaitSemaphoreValues must be empty or have 'waitSemaphoreValuesCount' elements" Nothing Nothing
pure (waitSemaphoreValuesCount)
lift $ poke ((p `plusPtr` 16 :: Ptr Word32)) (waitSemaphoreValuesCount'')
pWaitSemaphoreValues'' <- if Data.Vector.null (waitSemaphoreValues)
then pure nullPtr
else do
pPWaitSemaphoreValues <- ContT $ allocaBytes @Word64 (((Data.Vector.length (waitSemaphoreValues))) * 8)
lift $ Data.Vector.imapM_ (\i e -> poke (pPWaitSemaphoreValues `plusPtr` (8 * (i)) :: Ptr Word64) (e)) ((waitSemaphoreValues))
pure $ pPWaitSemaphoreValues
lift $ poke ((p `plusPtr` 24 :: Ptr (Ptr Word64))) pWaitSemaphoreValues''
let pSignalSemaphoreValuesLength = Data.Vector.length $ (signalSemaphoreValues)
signalSemaphoreValuesCount'' <- lift $ if (signalSemaphoreValuesCount) == 0
then pure $ fromIntegral pSignalSemaphoreValuesLength
else do
unless (fromIntegral pSignalSemaphoreValuesLength == (signalSemaphoreValuesCount) || pSignalSemaphoreValuesLength == 0) $
throwIO $ IOError Nothing InvalidArgument "" "pSignalSemaphoreValues must be empty or have 'signalSemaphoreValuesCount' elements" Nothing Nothing
pure (signalSemaphoreValuesCount)
lift $ poke ((p `plusPtr` 32 :: Ptr Word32)) (signalSemaphoreValuesCount'')
pSignalSemaphoreValues'' <- if Data.Vector.null (signalSemaphoreValues)
then pure nullPtr
else do
pPSignalSemaphoreValues <- ContT $ allocaBytes @Word64 (((Data.Vector.length (signalSemaphoreValues))) * 8)
lift $ Data.Vector.imapM_ (\i e -> poke (pPSignalSemaphoreValues `plusPtr` (8 * (i)) :: Ptr Word64) (e)) ((signalSemaphoreValues))
pure $ pPSignalSemaphoreValues
lift $ poke ((p `plusPtr` 40 :: Ptr (Ptr Word64))) pSignalSemaphoreValues''
lift $ f
cStructSize = 48
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
f
instance FromCStruct D3D12FenceSubmitInfoKHR where
peekCStruct p = do
waitSemaphoreValuesCount <- peek @Word32 ((p `plusPtr` 16 :: Ptr Word32))
pWaitSemaphoreValues <- peek @(Ptr Word64) ((p `plusPtr` 24 :: Ptr (Ptr Word64)))
let pWaitSemaphoreValuesLength = if pWaitSemaphoreValues == nullPtr then 0 else (fromIntegral waitSemaphoreValuesCount)
pWaitSemaphoreValues' <- generateM pWaitSemaphoreValuesLength (\i -> peek @Word64 ((pWaitSemaphoreValues `advancePtrBytes` (8 * (i)) :: Ptr Word64)))
signalSemaphoreValuesCount <- peek @Word32 ((p `plusPtr` 32 :: Ptr Word32))
pSignalSemaphoreValues <- peek @(Ptr Word64) ((p `plusPtr` 40 :: Ptr (Ptr Word64)))
let pSignalSemaphoreValuesLength = if pSignalSemaphoreValues == nullPtr then 0 else (fromIntegral signalSemaphoreValuesCount)
pSignalSemaphoreValues' <- generateM pSignalSemaphoreValuesLength (\i -> peek @Word64 ((pSignalSemaphoreValues `advancePtrBytes` (8 * (i)) :: Ptr Word64)))
pure $ D3D12FenceSubmitInfoKHR
waitSemaphoreValuesCount pWaitSemaphoreValues' signalSemaphoreValuesCount pSignalSemaphoreValues'
instance Zero D3D12FenceSubmitInfoKHR where
zero = D3D12FenceSubmitInfoKHR
zero
mempty
zero
mempty
-- | VkSemaphoreGetWin32HandleInfoKHR - Structure describing a Win32 handle
-- semaphore export operation
--
-- = Description
--
-- The properties of the handle returned depend on the value of
-- @handleType@. See
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.ExternalSemaphoreHandleTypeFlagBits'
-- for a description of the properties of the defined external semaphore
-- handle types.
--
-- == Valid Usage
--
-- - #VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01126#
-- @handleType@ /must/ have been included in
-- 'Vulkan.Core11.Promoted_From_VK_KHR_external_semaphore.ExportSemaphoreCreateInfo'::@handleTypes@
-- when the @semaphore@’s current payload was created
--
-- - #VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01127# If
-- @handleType@ is defined as an NT handle,
-- 'getSemaphoreWin32HandleKHR' /must/ be called no more than once for
-- each valid unique combination of @semaphore@ and @handleType@
--
-- - #VUID-VkSemaphoreGetWin32HandleInfoKHR-semaphore-01128# @semaphore@
-- /must/ not currently have its payload replaced by an imported
-- payload as described below in
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#synchronization-semaphores-importing Importing Semaphore Payloads>
-- unless that imported payload’s handle type was included in
-- 'Vulkan.Core11.Promoted_From_VK_KHR_external_semaphore_capabilities.ExternalSemaphoreProperties'::@exportFromImportedHandleTypes@
-- for @handleType@
--
-- - #VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01129# If
-- @handleType@ refers to a handle type with copy payload transference
-- semantics, as defined below in
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#synchronization-semaphores-importing Importing Semaphore Payloads>,
-- there /must/ be no queue waiting on @semaphore@
--
-- - #VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01130# If
-- @handleType@ refers to a handle type with copy payload transference
-- semantics, @semaphore@ /must/ be signaled, or have an associated
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#synchronization-semaphores-signaling semaphore signal operation>
-- pending execution
--
-- - #VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01131#
-- @handleType@ /must/ be defined as an NT handle or a global share
-- handle
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkSemaphoreGetWin32HandleInfoKHR-sType-sType# @sType@ /must/
-- be
-- 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR'
--
-- - #VUID-VkSemaphoreGetWin32HandleInfoKHR-pNext-pNext# @pNext@ /must/
-- be @NULL@
--
-- - #VUID-VkSemaphoreGetWin32HandleInfoKHR-semaphore-parameter#
-- @semaphore@ /must/ be a valid 'Vulkan.Core10.Handles.Semaphore'
-- handle
--
-- - #VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-parameter#
-- @handleType@ /must/ be a valid
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.ExternalSemaphoreHandleTypeFlagBits'
-- value
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_external_semaphore_win32 VK_KHR_external_semaphore_win32>,
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.ExternalSemaphoreHandleTypeFlagBits',
-- 'Vulkan.Core10.Handles.Semaphore',
-- 'Vulkan.Core10.Enums.StructureType.StructureType',
-- 'getSemaphoreWin32HandleKHR'
data SemaphoreGetWin32HandleInfoKHR = SemaphoreGetWin32HandleInfoKHR
{ -- | @semaphore@ is the semaphore from which state will be exported.
semaphore :: Semaphore
, -- | @handleType@ is a
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.ExternalSemaphoreHandleTypeFlagBits'
-- value specifying the type of handle requested.
handleType :: ExternalSemaphoreHandleTypeFlagBits
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (SemaphoreGetWin32HandleInfoKHR)
#endif
deriving instance Show SemaphoreGetWin32HandleInfoKHR
instance ToCStruct SemaphoreGetWin32HandleInfoKHR where
withCStruct x f = allocaBytes 32 $ \p -> pokeCStruct p x (f p)
pokeCStruct p SemaphoreGetWin32HandleInfoKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Semaphore)) (semaphore)
poke ((p `plusPtr` 24 :: Ptr ExternalSemaphoreHandleTypeFlagBits)) (handleType)
f
cStructSize = 32
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Semaphore)) (zero)
poke ((p `plusPtr` 24 :: Ptr ExternalSemaphoreHandleTypeFlagBits)) (zero)
f
instance FromCStruct SemaphoreGetWin32HandleInfoKHR where
peekCStruct p = do
semaphore <- peek @Semaphore ((p `plusPtr` 16 :: Ptr Semaphore))
handleType <- peek @ExternalSemaphoreHandleTypeFlagBits ((p `plusPtr` 24 :: Ptr ExternalSemaphoreHandleTypeFlagBits))
pure $ SemaphoreGetWin32HandleInfoKHR
semaphore handleType
instance Storable SemaphoreGetWin32HandleInfoKHR where
sizeOf ~_ = 32
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero SemaphoreGetWin32HandleInfoKHR where
zero = SemaphoreGetWin32HandleInfoKHR
zero
zero
type KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION"
pattern KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION :: forall a . Integral a => a
pattern KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION = 1
type KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME = "VK_KHR_external_semaphore_win32"
-- No documentation found for TopLevel "VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME"
pattern KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME = "VK_KHR_external_semaphore_win32"
|
expipiplus1/vulkan
|
src/Vulkan/Extensions/VK_KHR_external_semaphore_win32.hs
|
bsd-3-clause
| 46,052
| 0
| 22
| 7,922
| 5,420
| 3,217
| 2,203
| -1
| -1
|
{-|
module : Data.Maybe.Utils
description : Utility functions for "Data.Maybe"
copyright : (c) michael klein, 2016
license : bsd3
maintainer : lambdamichael(at)gmail.com
-}
module Data.Maybe.Utils where
-- | @justIfTrue b x == if b then Just x else Nothing@
justIfTrue :: Bool -> a -> Maybe a
justIfTrue True x = Just x
justIfTrue _ _ = Nothing
-- | `justIfTrue` with arguments flipped
justIf :: a -> Bool -> Maybe a
justIf = flip justIfTrue
|
michaeljklein/git-details
|
src/Data/Maybe/Utils.hs
|
bsd-3-clause
| 461
| 0
| 7
| 95
| 73
| 39
| 34
| 6
| 1
|
module Data.Geo.GPX.Lens.BoundsL where
import Data.Geo.GPX.Type.Bounds
import Data.Lens.Common
class BoundsL a where
boundsL :: Lens a (Maybe Bounds)
|
tonymorris/geo-gpx
|
src/Data/Geo/GPX/Lens/BoundsL.hs
|
bsd-3-clause
| 155
| 0
| 9
| 22
| 48
| 29
| 19
| 5
| 0
|
{- |
Space-efficient tabulation combinators.
Matrices are always filled in a triangular shape because a subword
with indices i and j fulfils i<=j. The triangular shape makes it
possible to use a space-efficient packed array for one-dimensional
nonterminals.
For two-dimensional nonterminals, the same logic is applied, although
there exists yet another source for space waste: overlapping subwords.
A better packing representation has yet to be found for this case.
-}
module ADP.Multi.TabulationTriangle where
import Data.Array
import ADP.Multi.Parser
-- | Two-dimensional tabulation for one-dim. parsers
-- using a packed one-dimensional array
table1' :: Array Int a -> Parser a b -> Parser a b
table1' z q =
let (_,n) = bounds z
arr = array (0,(n+1)*(n+2) `div` 2)
[(adr (i,j), q z [i,j])
| i <- [0..n], j <- [i..n] ]
adr (i,j) = i + (j*(j+1)) `div` 2
in \ _ [i,j] ->
if i <= j
then arr!adr (i,j)
else error "invalid subword"
-- | Two-dimensional tabulation for one-dim. parsers
-- using a packed one-dimensional array
table1 :: Array Int a -> RichParser a b -> RichParser a b
table1 z (info,q) = (info, table1' z q)
-- | Four-dimensional tabulation for two-dim. parsers
-- using a packed two-dimensional array
table2' :: Array Int a -> Parser a b -> Parser a b
table2' z q =
let (_,n) = bounds z
arr = array ((0,0),((n+1)*(n+2) `div` 2,(n+1)*(n+2) `div` 2))
[ ((adr (i,j),adr (k,l)), q z [i,j,k,l])
| i <- [0..n], j <- [i..n]
, k <- [0..n], l <- [k..n] ]
adr (i,j) = n*i - (i*(i-1)) `div` 2 + j
in \ _ [i,j,k,l] ->
if i <= j && k <= l
then arr!(adr (i,j), adr (k,l))
else error "invalid subword(s)"
-- | Four-dimensional tabulation for two-dim. parsers
-- using a packed two-dimensional array
table2 :: Array Int a -> RichParser a b -> RichParser a b
table2 z (info,q) = (info, table2' z q)
|
adp-multi/adp-multi
|
src/ADP/Multi/TabulationTriangle.hs
|
bsd-3-clause
| 2,012
| 0
| 15
| 537
| 723
| 401
| 322
| 30
| 2
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.