code
stringlengths 2
1.05M
| repo_name
stringlengths 5
101
| path
stringlengths 4
991
| language
stringclasses 3
values | license
stringclasses 5
values | size
int64 2
1.05M
|
|---|---|---|---|---|---|
-- Statistics for an Athletic Association
-- http://www.codewars.com/kata/55b3425df71c1201a800009c/
module Codewars.G964.Stat where
import Data.List (sort, intercalate)
import Data.List.Split (split, dropDelims, oneOf)
import Text.Printf (printf)
stat :: String -> String
stat "" = ""
stat results = present . map ((sum . zipWith (*) [3600, 60, 1]) . map (\s -> read s :: Double) . split (dropDelims $ oneOf "|")) . split (dropDelims $ oneOf ",") $ results
where present xs = "Range: " ++ (formatT . range $ xs) ++ " Average: " ++ (formatT . mean $ xs) ++ " Median: " ++ (formatT . median $ xs)
range xs = maximum xs - minimum xs
mean xs = sum xs / (fromIntegral . length $ xs)
median xs | odd n = head $ drop (n `div` 2) xs'
| even n = mean $ take 2 $ drop i xs'
where i = (length xs' `div` 2) - 1
xs' = sort xs
n = length xs
formatT x = intercalate "|" . map (printf "%02d") $ [h, m, s]
where t = truncate x :: Int
h = t `div` 3600
m = (t `div` 60) `mod` 60
s = t `mod` 60
|
gafiatulin/codewars
|
src/6 kyu/Stat.hs
|
Haskell
|
mit
| 1,191
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module System.Etc.Resolver.Cli.CommandTest where
import RIO
import qualified RIO.Set as Set
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.HUnit (assertBool, assertEqual, assertFailure, testCase)
import System.Etc
with_command_option_tests :: TestTree
with_command_option_tests = testGroup
"option input"
[ testCase "entry accepts short" $ do
let input = mconcat
[ "{ \"etc/cli\": {"
, " \"desc\": \"\""
, " , \"header\": \"\""
, " , \"commands\": {"
, " \"test\": {\"header\": \"\", \"desc\": \"\"}}}"
, ", \"etc/entries\": {"
, " \"greeting\": {"
, " \"etc/spec\": {"
, " \"type\": \"string\""
, " , \"cli\": {"
, " \"input\": \"option\""
, " , \"short\": \"g\""
, " , \"long\": \"greeting\""
, " , \"commands\": [\"test\"]"
, "}}}}}"
]
(spec :: ConfigSpec Text) <- parseConfigSpec input
(cmd, config) <- resolveCommandCliPure spec "program" ["test", "-g", "hello cli"]
assertEqual "invalid command output" "test" cmd
case getAllConfigSources ["greeting"] config of
Nothing -> assertFailure ("expecting to get entries for greeting\n" <> show config)
Just aSet -> assertBool ("expecting to see entry from env; got " <> show aSet)
(Set.member (Cli "hello cli") aSet)
, testCase "entry accepts long" $ do
let input = mconcat
[ "{ \"etc/cli\": {"
, " \"desc\": \"\""
, " , \"header\": \"\""
, " , \"commands\": {"
, " \"test\": {\"header\": \"\", \"desc\": \"\"}}}"
, ", \"etc/entries\": {"
, " \"greeting\": {"
, " \"etc/spec\": {"
, " \"type\": \"string\""
, " , \"cli\": {"
, " \"input\": \"option\""
, " , \"short\": \"g\""
, " , \"long\": \"greeting\""
, " , \"commands\": [\"test\"]"
, "}}}}}"
]
(spec :: ConfigSpec Text) <- parseConfigSpec input
(cmd, config) <- resolveCommandCliPure spec
"program"
["test", "--greeting", "hello cli"]
assertEqual "invalid command output" "test" cmd
case getAllConfigSources ["greeting"] config of
Nothing -> assertFailure ("expecting to get entries for greeting\n" <> show config)
Just aSet -> assertBool ("expecting to see entry from env; got " <> show aSet)
(Set.member (Cli "hello cli") aSet)
, testCase "entry gets validated with a type" $ do
let input = mconcat
[ "{ \"etc/cli\": {"
, " \"desc\": \"\""
, " , \"header\": \"\""
, " , \"commands\": {"
, " \"test\": {\"header\": \"\", \"desc\": \"\"}}}"
, ", \"etc/entries\": {"
, " \"greeting\": {"
, " \"etc/spec\": {"
, " \"type\": \"number\""
, " , \"cli\": {"
, " \"input\": \"option\""
, " , \"short\": \"g\""
, " , \"long\": \"greeting\""
, " , \"commands\": [\"test\"]"
, "}}}}}"
]
(spec :: ConfigSpec Text) <- parseConfigSpec input
case resolveCommandCliPure spec "program" ["test", "--greeting", "hello cli"] of
Left err -> case fromException err of
Just CliEvalExited{} -> return ()
_ -> assertFailure ("Expecting type validation to work on cli; got " <> show err)
Right _ -> assertFailure "Expecting type validation to work on cli"
, testCase "entry with required false does not barf" $ do
let input = mconcat
[ "{ \"etc/cli\": {"
, " \"desc\": \"\""
, " , \"header\": \"\""
, " , \"commands\": {"
, " \"test1\": {\"header\": \"\", \"desc\": \"\"}}}"
, ", \"etc/entries\": {"
, " \"greeting\": {"
, " \"etc/spec\": {"
, " \"type\": \"string\""
, " , \"cli\": {"
, " \"input\": \"option\""
, " , \"short\": \"g\""
, " , \"long\": \"greeting\""
, " , \"required\": false"
, " , \"commands\": [\"test1\"]"
, "}}}}}"
]
(spec :: ConfigSpec Text) <- parseConfigSpec input
(cmd, config) <- resolveCommandCliPure spec "program" ["test1"]
assertEqual "invalid command output" "test1" cmd
case getConfigValue ["greeting"] config of
Just aSet ->
assertFailure ("expecting to have no entry for greeting; got\n" <> show aSet)
(_ :: Maybe ()) -> return ()
, testCase "entry with required fails when option not given" $ do
let input = mconcat
[ "{ \"etc/cli\": {"
, " \"desc\": \"\""
, " , \"header\": \"\""
, " , \"commands\": {"
, " \"test\": {\"header\": \"\", \"desc\": \"\"}}}"
, ", \"etc/entries\": {"
, " \"greeting\": {"
, " \"etc/spec\": {"
, " \"type\": \"string\""
, " , \"cli\": {"
, " \"input\": \"option\""
, " , \"short\": \"g\""
, " , \"long\": \"greeting\""
, " , \"required\": true"
, " , \"commands\": [\"test\"]"
, "}}}}}"
]
(spec :: ConfigSpec Text) <- parseConfigSpec input
case resolveCommandCliPure spec "program" ["test"] of
Left err -> case fromException err of
Just CliEvalExited{} -> return ()
_ ->
assertFailure ("Expecting required validation to work on cli; got " <> show err)
Right _ -> assertFailure "Expecting required option to fail cli resolving"
]
with_command_argument_tests :: TestTree
with_command_argument_tests = testGroup
"argument input"
[ testCase "entry gets validated with a type" $ do
let input = mconcat
[ "{ \"etc/cli\": {"
, " \"desc\": \"\""
, " , \"header\": \"\""
, " , \"commands\": {"
, " \"test\": {\"header\": \"\", \"desc\": \"\"}}}"
, ", \"etc/entries\": {"
, " \"greeting\": {"
, " \"etc/spec\": {"
, " \"type\": \"number\""
, " , \"cli\": {"
, " \"input\": \"argument\""
, " , \"metavar\": \"GREETING\""
, " , \"commands\": [\"test\"]"
, "}}}}}"
]
(spec :: ConfigSpec Text) <- parseConfigSpec input
case resolveCommandCliPure spec "program" ["test", "hello cli"] of
Left err -> case fromException err of
Just CliEvalExited{} -> return ()
_ -> assertFailure ("Expecting type validation to work on cli; got " <> show err)
Right _ -> assertFailure "Expecting type validation to work on cli"
, testCase "entry with required false does not barf" $ do
let input = mconcat
[ "{ \"etc/cli\": {"
, " \"desc\": \"\""
, " , \"header\": \"\""
, " , \"commands\": {"
, " \"test\": {\"header\": \"\", \"desc\": \"\"}}}"
, ", \"etc/entries\": {"
, " \"greeting\": {"
, " \"etc/spec\": {"
, " \"type\": \"string\""
, " , \"cli\": {"
, " \"input\": \"argument\""
, " , \"metavar\": \"GREETING\""
, " , \"required\": false"
, " , \"commands\": [\"test\"]"
, "}}}}}"
]
(spec :: ConfigSpec Text) <- parseConfigSpec input
(cmd, config) <- resolveCommandCliPure spec "program" ["test"]
assertEqual "invalid command output" "test" cmd
case getConfigValue ["greeting"] config of
(Nothing :: Maybe ()) -> return ()
Just aSet ->
assertFailure ("expecting to have no entry for greeting; got\n" <> show aSet)
, testCase "entry with required fails when argument not given" $ do
let input = mconcat
[ "{ \"etc/cli\": {"
, " \"desc\": \"\""
, " , \"header\": \"\""
, " , \"commands\": {"
, " \"test\": {\"header\": \"\", \"desc\": \"\"}}}"
, ", \"etc/entries\": {"
, " \"greeting\": {"
, " \"etc/spec\": {"
, " \"type\": \"string\""
, " , \"cli\": {"
, " \"input\": \"argument\""
, " , \"metavar\": \"GREETING\""
, " , \"required\": true"
, " , \"commands\": [\"test\"]"
, "}}}}}"
]
(spec :: ConfigSpec Text) <- parseConfigSpec input
case resolveCommandCliPure spec "program" ["test"] of
Left err -> case fromException err of
Just CliEvalExited{} -> return ()
_ ->
assertFailure ("Expecting required validation to work on cli; got " <> show err)
Right _ -> assertFailure "Expecting required argument to fail cli resolving"
, testCase "supports same cli input on multiple arguments" $ do
let input = mconcat
[ "{ \"etc/cli\": {"
, " \"desc\": \"\""
, " , \"header\": \"\""
, " , \"commands\": {"
, " \"test\": {\"header\": \"\", \"desc\": \"\"}"
, " , \"other\": {\"header\": \"\", \"desc\": \"\"}}}"
, ", \"etc/entries\": {"
, " \"greeting\": {"
, " \"etc/spec\": {"
, " \"type\": \"string\""
, " , \"cli\": {"
, " \"input\": \"option\""
, " , \"short\": \"g\""
, " , \"metavar\": \"GREETING\""
, " , \"required\": false"
, " , \"commands\": [\"test\", \"other\"]"
, "}}}}}"
]
(spec :: ConfigSpec Text) <- parseConfigSpec input
(cmd1, config1) <- resolveCommandCliPure spec "program" ["test", "-g", "hello"]
(cmd2, config2) <- resolveCommandCliPure spec "program" ["other", "-g", "hello"]
assertEqual "" "test" cmd1
assertEqual "" "other" cmd2
assertEqual "" config1 config2
]
with_command :: TestTree
with_command =
testGroup "when command given" [with_command_option_tests, with_command_argument_tests]
without_command :: TestTree
without_command = testCase "fails when command not given" $ do
let input = mconcat
[ "{ \"etc/cli\": {"
, " \"desc\": \"\""
, " , \"header\": \"\""
, " , \"commands\": {"
, " \"test\": {\"header\": \"\", \"desc\": \"\"}}}"
, ", \"etc/entries\": {"
, " \"greeting\": {"
, " \"etc/spec\": {"
, " \"type\": \"string\""
, " , \"cli\": {"
, " \"input\": \"option\""
, " , \"short\": \"g\""
, " , \"metavar\": \"GREETING\""
, " , \"required\": true"
, " , \"commands\": [\"test\"]"
, "}}}}}"
]
(spec :: ConfigSpec Text) <- parseConfigSpec input
case resolveCommandCliPure spec "program" [] of
Left err -> case fromException err of
Just CliEvalExited{} -> return ()
_ -> assertFailure ("Expecting sub-command to be required; got " <> show err)
Right _ -> assertFailure "Expecting sub-command to be required; it wasn't"
tests :: TestTree
tests = testGroup "command" [with_command, without_command]
|
roman/Haskell-etc
|
etc/test/System/Etc/Resolver/Cli/CommandTest.hs
|
Haskell
|
mit
| 11,878
|
{-# LANGUAGE Arrows, NoMonomorphismRestriction, RebindableSyntax #-}
module System.ArrowVHDL.Circuit.Defaults where
import Control.Category
import Prelude hiding (id, (.))
import qualified Data.Bits as B -- (shiftL, shiftR, xor, (.&.))
import System.ArrowVHDL.Circuit
import System.ArrowVHDL.Circuit.Grid
import System.ArrowVHDL.Circuit.Arrow
import System.ArrowVHDL.Circuit.Auxillary
import System.ArrowVHDL.Circuit.Descriptor
import System.ArrowVHDL.Circuit.Graphs
import System.ArrowVHDL.Circuit.Show
type KeyChunk = Int
type ValChunk = Int
type Key = (KeyChunk, KeyChunk, KeyChunk, KeyChunk)
type KeyHalf = (KeyChunk, KeyChunk)
type Value = (ValChunk, ValChunk)
-- xor :: Bool -> Bool -> Bool
-- xor x y | x == True && y == False = True
-- | x == False && y == True = True
-- | otherwise = False
oneNodeCircuit :: String -> CircuitDescriptor
oneNodeCircuit s = emptyCircuit { nodeDesc = emptyNodeDesc { label = s } }
aId :: (Arrow a) => Grid a b b
aId
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "ID"
, sinks = mkPins 1
, sources = mkPins 1
}
, cycles = 1
, space = 1
}
$ arr id
aConst :: (Arrow a, Show b) => b -> Grid a c b
aConst x
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "CONST_" ++ (show x)
, sinks = mkPins 1 -- a sink is needed for the rewire-function to work properly (TODO: is this ok?)
, sources = mkPins 1
}
, cycles = 0
, space = 1
}
$ arr (const x)
(.&.) :: Bool -> Bool -> Bool
True .&. True = True
_ .&. _ = False
(.|.) :: Bool -> Bool -> Bool
False .|. False = False
_ .|. _ = True
xor :: Bool -> Bool -> Bool
xor True False = True
xor False True = True
xor _ _ = False
-- shiftL8 :: (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool))))))))
-- -> Int
-- -> (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool))))))))
-- shiftL8 (x1, (x2, (x3, (x4, (x5, (x6, (x7, (x8)))))))) i
-- | i == 0
-- = (x1, (x2, (x3, (x4, (x5, (x6, (x7, (x8))))))))
-- | i == 1
-- = (x2, (x3, (x4, (x5, (x6, (x7, (x8, (False))))))))
-- | i == 2
-- = (x3, (x4, (x5, (x6, (x7, (x8, (False, (False))))))))
-- | i == 3
-- = (x4, (x5, (x6, (x7, (x8, (False, (False, (False))))))))
-- | i == 4
-- = (x5, (x6, (x7, (x8, (False, (False, (False, (False))))))))
-- | i == 5
-- = (x6, (x7, (x8, (False, (False, (False, (False, (False))))))))
-- | i == 6
-- = (x7, (x8, (False, (False, (False, (False, (False, (False))))))))
-- | i == 7
-- = (x8, (False, (False, (False, (False, (False, (False, (False))))))))
-- | i == 8
-- = (False, (False, (False, (False, (False, (False, (False, (False))))))))
-- shiftL = shiftL8
-- shiftR8 :: (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool))))))))
-- -> Int
-- -> (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool))))))))
-- shiftR8 (x1, (x2, (x3, (x4, (x5, (x6, (x7, (x8)))))))) i
-- | i == 0
-- = (x1, (x2, (x3, (x4, (x5, (x6, (x7, (x8))))))))
-- | i == 1
-- = (False, (x1, (x2, (x3, (x4, (x5, (x6, (x7))))))))
-- | i == 2
-- = (False, (False, (x1, (x2, (x3, (x4, (x5, (x6))))))))
-- | i == 3
-- = (False, (False, (False, (x1, (x2, (x3, (x4, (x5))))))))
-- | i == 4
-- = (False, (False, (False, (False, (x1, (x2, (x3, (x4))))))))
-- | i == 5
-- = (False, (False, (False, (False, (False, (x1, (x2, (x3))))))))
-- | i == 6
-- = (False, (False, (False, (False, (False, (False, (x1, (x2))))))))
-- | i == 7
-- = (False, (False, (False, (False, (False, (False, (False, (x1))))))))
-- | i == 8
-- = (False, (False, (False, (False, (False, (False, (False, (False))))))))
-- shiftR = shiftR8
-- aAnd :: (Arrow a, Bits b) => Grid a (b, b) (b)
aAnd :: (Arrow a) => Grid a (Bool, Bool) (Bool)
aAnd
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "AND"
, sinks = mkPins 2
, sources = mkPins 1
}
, cycles = 1
, space = 4
}
$ arr (uncurry (.&.))
-- aOr :: (Arrow a, Bits b) => Grid a (b, b) (b) -- :: (Arrow a) => Grid a (Bool, Bool) (Bool)
aOr :: (Arrow a) => Grid a (Bool, Bool) (Bool)
aOr
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "OR"
, sinks = mkPins 2
, sources = mkPins 1
}
, cycles = 1
, space = 4
}
$ arr (uncurry (.|.))
aNot :: (Arrow a) => Grid a (Bool) (Bool)
aNot
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "NOT"
, sinks = mkPins 1
, sources = mkPins 1
}
, cycles = 1
, space = 2
}
$ arr (not)
aBXor :: (Arrow a, B.Bits b) => Grid a (b, b) (b) -- :: (Arrow a) => Grid a (Bool, Bool) (Bool)
aBXor
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "XOR"
, sinks = mkPins 2
, sources = mkPins 1
}
, cycles = 1
, space = 4
}
$ arr (uncurry B.xor)
aXor :: (Arrow a) => Grid a (Bool, Bool) (Bool)
aXor
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "XOR"
, sinks = mkPins 2
, sources = mkPins 1
}
, cycles = 1
, space = 4
}
$ arr (uncurry xor)
-- aFst :: (Arrow a, Bits b) => Grid a (b, c) (b)
aFst :: (Arrow a) => Grid a (b, c) (b)
aFst
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "FST"
, sinks = mkPins 2
, sources = mkPins 1
}
, cycles = 1
, space = 4
}
$ arr (fst)
-- aSnd :: (Arrow a, Bits c) => Grid a (b, c) (c)
aSnd :: (Arrow a) => Grid a (b, c) (c)
aSnd
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "SND"
, sinks = mkPins 2
, sources = mkPins 1
}
, cycles = 1
, space = 4
}
$ arr (snd)
aShiftL :: (Arrow a, B.Bits b) => Grid a (b, Int) (b)
-- aShiftL :: (Arrow a) => Grid a ((Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, Bool))))))), Int) (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, Bool)))))))
aShiftL
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "SHIFTL"
, sinks = mkPins 2
, sources = mkPins 1
}
, cycles = 1
, space = 6
}
$ arr (uncurry B.shiftL)
aShiftR :: (Arrow a, B.Bits b) => Grid a (b, Int) (b)
-- aShiftR :: (Arrow a) => Grid a ((Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, Bool))))))), Int) (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, Bool)))))))
aShiftR
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "SHIFTR"
, sinks = mkPins 2
, sources = mkPins 1
}
, cycles = 1
, space = 6
}
$ arr (uncurry B.shiftR)
aAdd :: (Arrow a, Num b) => Grid a (b, b) (b)
aAdd
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "ADD"
, sinks = mkPins 2
, sources = mkPins 1
}
, cycles = 1
, space = 4
}
$ arr (uncurry (+))
aFlip :: (Arrow a) => Grid a (b, c) (c, b)
aFlip
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "FLIP"
, sinks = mkPins 2
, sources = mkPins 2
}
, cycles = 1
, space = 4
}
$ arr (\(x, y) -> (y, x))
aSwapSnd :: (Arrow a) => Grid a ((b, c), d) ((b, d), c)
aSwapSnd
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "SWPSND"
, sinks = mkPins 2
, sources = mkPins 2
}
, cycles = 1
, space = 6
}
$ arr (\((x, y), z) -> ((x, z), y))
aAssocRight = a_ABc2aBC
aAssocLeft = a_aBC2ABc
a_ABc2aBC :: (Arrow a) => Grid a ((b, c), d) (b, (c, d))
a_ABc2aBC
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "ABc2aBC"
, sinks = mkPins 2
, sources = mkPins 2
}
, cycles = 1
, space = 6
}
$ arr (\((x, y), z) -> (x, (y, z)))
a_aBC2ABc :: (Arrow a) => Grid a (b, (c, d)) ((b, c), d)
a_aBC2ABc
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "aBC2ABc"
, sinks = mkPins 2
, sources = mkPins 2
}
, cycles = 1
, space = 6
}
$ arr (\(x, (y, z)) -> ((x, y), z))
-- |'aDistr' defines an distributivity of an expression ...
-- (x,(a,b)) -> ((x,a), (x,b))
-- aDistr :: (Arrow a, Bits b, Bits c, Bits d) => Grid a (b, (c, d)) ((b, c), (b, d))
aDistr :: (Arrow a) => Grid a (b, (c, d)) ((b, c), (b, d))
aDistr
= aDup
>>> second aFst *** second aSnd
-- |'aDdistr' is the reverse operation to the Distr operation
-- aDdistr :: (Arrow a, Bits b, Bits c, Bits d, Bits e) => Grid a ((b, c), (d, e)) ((b, d), (c, e))
aDdistr :: (Arrow a) => Grid a ((b, c), (d, e)) ((b, d), (c, e))
aDdistr
= aSwapSnd
>>> a_aBC2ABc *** aId
>>> a_ABc2aBC
>>> aId *** aFlip
aShiftL4 :: (Arrow a, B.Bits b) => Grid a b b
-- aShiftL4 :: (Arrow a) => Grid a (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, Bool))))))) (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, Bool)))))))
aShiftL4
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "SHIFTL4"
, sinks = mkPins 1
, sources = mkPins 1
}
, cycles = 1
, space = 6
}
$ arr (flip B.shiftL 4)
aShiftR5 :: (Arrow a, B.Bits b) => Grid a b b
-- aShiftR5 :: (Arrow a) => Grid a (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, Bool))))))) (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, (Bool, Bool)))))))
aShiftR5
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "SHIFTR5"
, sinks = mkPins 1
, sources = mkPins 1
}
, cycles = 1
, space = 6
}
$ arr (flip B.shiftR 5)
-- aShiftL4addKey :: (Arrow a) => Grid a (ValChunk, KeyChunk) Int
-- aShiftL4addKey
-- = first aShiftL4
-- >>> aAdd
-- aShiftR5addKey :: (Arrow a) => Grid a (ValChunk, KeyChunk) Int
-- aShiftR5addKey
-- = first aShiftR5
-- >>> aAdd
--- NOTE: Hier ist ein schönes Problem aufgetreten:
-- da weiter unten arr ... >>> aAdd verwendet wird, und arr ... vom Typ Arrow a ist
-- aber aAdd vom Typ Grid a ist, gibt's nen type-mismatch... entweder aAdd muss auf Arrow a
-- runtergebrochen werden, oder arr ... muss vorher schon in einen Grid gehoben werden :)
--
-- So oder so, schön ;)
--aAddMagic :: (Arrow a) => Grid a ValChunk Int
aXorMagic
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "ADDMAGIC"
, sinks = mkPins 1
, sources = mkPins 1
}
, cycles = 1
, space = 4
}
$ arr (\x -> (x, 2654435769)) >>> aBXor
--aDup :: (Arrow a) => Grid a b (b, b)
aDup
= augment
emptyCircuit
{ nodeDesc = emptyNodeDesc
{ label = "DUP"
, sinks = mkPins 1
, sources = mkPins 2
}
, cycles = 1
, space = 4
}
$ arr (\(x) -> (x, x))
aRegister :: (Arrow a) => Grid a b b
aRegister
= augment
( mkRegister $ emptyNodeDesc
{ sinks = mkPins 1
, sources = mkPins 1
}
)
$ arr id
-- aL_headtail :: (Arrow a) => Grid a ([b]) (b, [b])
-- aL_headtail
-- = augment
-- emptyCircuit
-- { nodeDesc = emptyNodeDesc
-- { label = "listHEADTAIL"
-- , sinks = mkPins 1
-- , sources = mkPins 2
-- }
-- , cycles = 2
-- , space = 16
-- }
-- $ arr (\(x:xs) -> (x, xs))
|
frosch03/arrowVHDL
|
src/System/ArrowVHDL/Circuit/Defaults.hs
|
Haskell
|
cc0-1.0
| 12,752
|
module ProjectEuler.A268681 (a268681) where
import Data.List (nub)
import Tables.A007318 (a007318_row)
a268681 :: Integer -> Integer
a268681 n = sum $ filter squareFree $ nub $ concatMap a007318_row [0..n-1] where
squareFree k = all (\d -> k `mod` d /= 0) $ map (^2) [2..20]
|
peterokagey/haskellOEIS
|
src/ProjectEuler/A268681.hs
|
Haskell
|
apache-2.0
| 280
|
module Helpers.CostasLikeArrays (countPermutationsUpToDihedralSymmetry, distinctDirections, distinctDistances) where
import Data.List (elemIndex, nub)
import Data.Maybe (mapMaybe)
import Data.Ratio ((%))
import Helpers.Subsets (eachPair)
type Permutation = [Int]
distinctDistances :: Permutation -> Int
distinctDistances permutation = length $ nub $ map distanceSquare $ eachPair $ zip [0..] permutation where
distanceSquare ((x1, y1), (x2, y2)) = (x1 - x2)^2 + (y1 - y2)^2
distinctDirections :: Permutation -> Int
distinctDirections permutation = length $ nub $ map direction $ eachPair $ zip [0..] permutation
-- direction :: (Int, Int) -> (Int, Int) -> Data.Ratio Int
direction ((x1, y1), (x2, y2)) = recip ratio `min` ratio where
ratio = abs $ (x1 - x2) % (y1 - y2)
quarterTurn :: Int -> Permutation -> Permutation
quarterTurn n permutation = mapMaybe (`elemIndex` permutation) [0..n-1]
horizontalSymmetries :: Int -> [Permutation] -> [Permutation]
horizontalSymmetries n = concatMap flips where
flips permutation = [permutation, flipped] where
flipped = map (n-1-) permutation
verticalSymmetries :: [Permutation] -> [Permutation]
verticalSymmetries = concatMap flips where
flips permutation = [permutation, flipped] where
flipped = reverse permutation
-- There's surely a more elegant way to do this.
rotationalSymmetries :: Int -> [Permutation] -> [Permutation]
rotationalSymmetries n = concatMap turns where
turns permutation = [permutation, quarterTurn n permutation]
canonicalRepresentative :: Int -> Permutation -> Permutation
canonicalRepresentative n permutation = minimum $ rotationalSymmetries n $ horizontalSymmetries n $ verticalSymmetries [permutation]
countPermutationsUpToDihedralSymmetry :: Int -> [Permutation] -> Int
countPermutationsUpToDihedralSymmetry n permutations = length $ nub $ map (canonicalRepresentative n) permutations
|
peterokagey/haskellOEIS
|
src/Helpers/CostasLikeArrays.hs
|
Haskell
|
apache-2.0
| 1,882
|
-- Show expressions in prefix notation
module OperationExtension1 where
import DataBase
import DataExtension
instance Show Lit
where
show (Lit i) = "Lit " ++ show i
instance (Exp x, Exp y, Show x, Show y) => Show (Add x y)
where
show (Add x y) = "Add (" ++ show x ++ ") (" ++ show y ++ ")"
instance (Exp x, Show x) => Show (Neg x)
where
show (Neg x) = "Neg (" ++ show x ++ ")"
|
egaburov/funstuff
|
Haskell/tytag/xproblem_src/samples/expressions/Haskell/OpenDatatype1/OperationExtension1.hs
|
Haskell
|
apache-2.0
| 392
|
module Time where
import Data.IORef (readIORef, IORef, newIORef, modifyIORef')
import Control.Monad
import qualified Graphics.UI.GLUT as GLUT
import Concurrency (writeIORef)
type FloatType = Float
type Time = FloatType
type DTime = FloatType
type TimeIORef = IORef Time
newTimeIORef :: IO TimeIORef
newTimeIORef = newIORef =<< elapsedTime
elapsedTime :: IO Time
elapsedTime = do
ms <- GLUT.get GLUT.elapsedTime
return $ fromIntegral ms / 1000
newTimeDelta :: TimeIORef -> IO DTime
newTimeDelta t = do
currentTime <- elapsedTime
lastTime <- writeIORef t currentTime
return $ currentTime - lastTime
|
epeld/zatacka
|
old/Time.hs
|
Haskell
|
apache-2.0
| 628
|
-----------------------------------------------------------------------------
-- |
-- Module : Haddock.Version
-- Copyright : (c) Simon Marlow 2003
-- License : BSD-like
--
-- Maintainer : haddock@projects.haskell.org
-- Stability : experimental
-- Portability : portable
-----------------------------------------------------------------------------
module Haddock.Version (
projectName, projectVersion, projectUrl
) where
import Paths_haddock_internal ( version )
import Data.Version ( showVersion )
projectName, projectUrl :: String
projectName = "Haddock"
projectUrl = "http://www.haskell.org/haddock/"
projectVersion :: String
projectVersion = showVersion version
|
ghcjs/haddock-internal
|
src/Haddock/Version.hs
|
Haskell
|
bsd-2-clause
| 705
|
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses #-}
-- |
-- Module : FRP.Animas.Vector3
-- Copyright : (c) Antony Courtney and Henrik Nilsson, Yale University, 2003
-- License : BSD-style (see the LICENSE file in the distribution)
--
-- Maintainer : nilsson@cs.yale.edu
-- Stability : provisional
-- Portability : non-portable (GHC extensions)
--
-- 3D vector abstraction (R^3).
--
-- ToDo: Deriving Show, or provide dedicated show instance?
module FRP.Animas.Vector3 (
Vector3,
vector3,
vector3X,
vector3Y,
vector3Z,
vector3XYZ,
vector3Spherical,
vector3Rho,
vector3Theta,
vector3Phi,
vector3RhoThetaPhi,
vector3Rotate
) where
import FRP.Animas.VectorSpace
import FRP.Animas.Forceable
-- | 3-dimensional vector
data RealFloat a => Vector3 a = Vector3 !a !a !a deriving (Eq, Show)
-- | Construct a 3 dimensional vector
vector3 :: RealFloat a => a -- ^ X magnitude
-> a -- ^ Y magnitude
-> a -- ^ Z magnitude
-> Vector3 a -- ^ Vector
vector3 x y z = Vector3 x y z
-- | X magnitude of the vector
vector3X :: RealFloat a => Vector3 a -> a
vector3X (Vector3 x _ _) = x
-- | Y magnitude of the vector
vector3Y :: RealFloat a => Vector3 a -> a
vector3Y (Vector3 _ y _) = y
-- | Z magnitude of the vector
vector3Z :: RealFloat a => Vector3 a -> a
vector3Z (Vector3 _ _ z) = z
-- | Ordered pair of magnitudes of the vector
vector3XYZ :: RealFloat a => Vector3 a
-> (a, a, a) -- ^ (X, Y, Z)
vector3XYZ (Vector3 x y z) = (x, y, z)
-- | Spherical coordinates to vector
vector3Spherical :: RealFloat a => a -- ^ magnitude
-> a -- ^ Theta-direction
-> a -- ^ Phi-direction
-> Vector3 a
vector3Spherical rho theta phi =
Vector3 (rhoSinPhi * cos theta) (rhoSinPhi * sin theta) (rho * cos phi)
where
rhoSinPhi = rho * sin phi
-- | Magnitude of a vector
vector3Rho :: RealFloat a => Vector3 a -> a
vector3Rho (Vector3 x y z) = sqrt (x * x + y * y + z * z)
-- | Theta-direction of a vector
vector3Theta :: RealFloat a => Vector3 a -> a
vector3Theta (Vector3 x y _) = atan2 y x
-- | Phi-direction of a vector
vector3Phi :: RealFloat a => Vector3 a -> a
vector3Phi v@(Vector3 _ _ z) = acos (z / vector3Rho v)
-- | Magnitude and directions of a vector as an ordered triple
vector3RhoThetaPhi :: RealFloat a => Vector3 a
-> (a, a, a) -- ^ (Rho, Theta, Phi)
vector3RhoThetaPhi (Vector3 x y z) = (rho, theta, phi)
where
rho = sqrt (x * x + y * y + z * z)
theta = atan2 y x
phi = acos (z / rho)
instance RealFloat a => VectorSpace (Vector3 a) a where
zeroVector = Vector3 0 0 0
a *^ (Vector3 x y z) = Vector3 (a * x) (a * y) (a * z)
(Vector3 x y z) ^/ a = Vector3 (x / a) (y / a) (z / a)
negateVector (Vector3 x y z) = (Vector3 (-x) (-y) (-z))
(Vector3 x1 y1 z1) ^+^ (Vector3 x2 y2 z2) = Vector3 (x1+x2) (y1+y2) (z1+z2)
(Vector3 x1 y1 z1) ^-^ (Vector3 x2 y2 z2) = Vector3 (x1-x2) (y1-y2) (z1-z2)
(Vector3 x1 y1 z1) `dot` (Vector3 x2 y2 z2) = x1 * x2 + y1 * y2 + z1 * z2
-- | Rotate a vector
vector3Rotate :: RealFloat a =>
a -- ^ Difference of theta
-> a -- ^ Difference of phi
-> Vector3 a -- ^ Initial vector
-> Vector3 a -- ^ Rotated vector
vector3Rotate theta' phi' v =
vector3Spherical (vector3Rho v)
(vector3Theta v + theta')
(vector3Phi v + phi')
instance RealFloat a => Forceable (Vector3 a) where
force = id
|
eamsden/Animas
|
src/FRP/Animas/Vector3.hs
|
Haskell
|
bsd-3-clause
| 3,576
|
{-# LANGUAGE LambdaCase, OverloadedStrings #-}
-- | Bash script evaluation.
module Bash.Config.Eval
( Eval(..)
, interpret
) where
import Control.Applicative
import Control.Monad.Reader.Class
import Control.Monad.State.Class
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Monoid hiding (Last)
import Bash.Config.Cond
import Bash.Config.Expand
import Bash.Config.Types
import Bash.Config.Word
-- | Interpret a script or function, returning the resulting environment
-- variables and function definitions. Any variables or functions missing
-- are assumed to be unknown.
interpret :: Eval a => a -> Env -> Either String Env
interpret a = fmap snd . runBash (eval a) Clean
-- | Evaluate with a dirty status.
dirty :: Eval a => a -> Bash ExitStatus
dirty a = Nothing <$ local (const Dirty) (eval a)
-- | Execute in a subshell. Environment changes during the subshell execution
-- will not affect the outside environment.
subshell :: Eval a => a -> Bash ExitStatus
subshell a = do
env <- get
r <- eval a
put env
return r
-- | Execute an assignment builtin.
assignBuiltin :: Word -> [Either Assign Word] -> Bash ExitStatus
assignBuiltin b args = Nothing <$ case Map.lookup b assignBuiltins of
Nothing -> return ()
Just f -> mapM_ f args
where
assignBuiltins = Map.fromList $
[ ("alias" , \_ -> return ())
, ("declare" , perform )
, ("export" , perform )
, ("local" , unassign)
, ("readonly", unassign)
, ("typeset" , perform )
]
perform (Left a) = () <$ eval a
perform _ = return ()
unassign (Left (Assign n _ _)) = unset n
unassign (Right w) = unset (toString w)
-- | Execute a simple command.
command :: String -> [String] -> Bash ExitStatus
command name args = do
defined <- gets functions
let allCommands = builtins <> fmap (const . eval) defined
case Map.lookup name allCommands of
Nothing -> return Nothing
Just f -> f args
-- | Execute a function definition.
functionDef :: Word -> Function -> Bash ExitStatus
functionDef w f = Just True <$ define name f
<|> Nothing <$ undefine name
where
name = toString w
-- | Interpreter builtins. These are commands the the interpreter knows
-- how to execute. Any command not in this map is assumed to be user-defined,
-- or external.
--
-- The implemented builtins are @test@, @[@, @true@, and @false@. Most shell
-- builtins are assumed to have unpredictable effects and will cause the
-- interpreter to fail. However, some shell builtins, such as
-- @break@, @continue@, @pwd@, etc. are assumed to be safe. Builtins that
-- could take an assignment as a parameter are implemented separately.
builtins :: Map String ([String] -> Bash ExitStatus)
builtins = Map.fromList $
-- implemented builtins
[ ("test" , return . test )
, ("[" , return . test_)
, ("true" , \_ -> return (Just True) )
, ("false", \_ -> return (Just False))
]
-- unsafe builtins
++ map (\name -> (name, \_ -> unimplemented name))
[ ".", "builtin", "caller", "enable", "exec", "exit", "let"
, "logout", "mapfile", "read", "readarray", "return", "source"
, "trap", "unset", "unalias"
]
-- | Executable commands.
class Eval a where
-- | Execute a command, and return its return value.
eval :: a -> Bash ExitStatus
instance Eval a => Eval [a] where
eval [] = return (Just True)
eval cs = last <$> mapM eval cs
instance Eval Script where
eval (Script l) = eval l
instance Eval Command where
eval (Simple c) = eval c
eval (Shell c) = eval c
eval (FunctionDef w f) = functionDef w f
eval Coproc = unimplemented "coproc"
instance Eval List where
eval (List cs) = eval cs
instance Eval AndOr where
eval (Last p ) = eval p
eval (And p cs) = eval p >>= \case
Nothing -> dirty cs
Just False -> return (Just False)
Just True -> eval cs
eval (Or p cs) = eval p >>= \case
Nothing -> dirty cs
Just False -> eval cs
Just True -> return (Just True)
instance Eval Pipeline where
eval (Pipeline b cs) = bang $ case cs of
[] -> return (Just True)
[c] -> eval c
_ -> subshell cs
where
bang = if b then invert else id
invert = fmap (fmap not)
instance Eval SimpleCommand where
eval (SimpleCommand as ws) = optional (expandWordList ws) >>= \case
Nothing -> return Nothing
Just [] -> eval as
Just (c:args) -> command c args
eval (AssignCommand b args) = assignBuiltin b args
instance Eval Assign where
eval (Assign name op a) = Just True <$ (assign name =<< expandValue a)
<|> Nothing <$ unset name
where
assign = case op of
Equals -> set
PlusEquals -> augment
instance Eval Function where
eval (Function body) = eval body
instance Eval ShellCommand where
eval (Subshell l ) = subshell l
eval (Group l ) = eval l
eval (Arith s ) = unimplemented $ "((" ++ s ++ "))"
eval (Cond ws ) = cond ws
eval (For _ _ l ) = dirty l
eval (ArithFor s _) = unimplemented $ "for ((" ++ s ++ "))"
eval (Select _ _ l) = dirty l
eval (Case _ cs ) = eval cs
eval (If p t f ) = eval p >>= \case
Nothing -> dirty t >> dirty f
Just r -> eval $ if r then t else f
eval (Until p l ) = dirty p >> dirty l
eval (While p l ) = dirty p >> dirty l
instance Eval CaseClause where
eval (CaseClause _ l _) = dirty l
|
knrafto/bash-config
|
src/Bash/Config/Eval.hs
|
Haskell
|
bsd-3-clause
| 5,812
|
{-# OPTIONS_GHC -fno-warn-missing-import-lists #-}
module Silvi
( module Silvi.Encode
, module Silvi.Random
, module Silvi.Types
) where
import Silvi.Encode
import Silvi.Random
import Silvi.Types
|
chessai/silvi
|
src/Silvi.hs
|
Haskell
|
bsd-3-clause
| 203
|
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.NV.FramebufferMultisampleCoverage
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.NV.FramebufferMultisampleCoverage (
-- * Extension Support
glGetNVFramebufferMultisampleCoverage,
gl_NV_framebuffer_multisample_coverage,
-- * Enums
pattern GL_MAX_MULTISAMPLE_COVERAGE_MODES_NV,
pattern GL_MULTISAMPLE_COVERAGE_MODES_NV,
pattern GL_RENDERBUFFER_COLOR_SAMPLES_NV,
pattern GL_RENDERBUFFER_COVERAGE_SAMPLES_NV,
-- * Functions
glRenderbufferStorageMultisampleCoverageNV
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
import Graphics.GL.Functions
|
haskell-opengl/OpenGLRaw
|
src/Graphics/GL/NV/FramebufferMultisampleCoverage.hs
|
Haskell
|
bsd-3-clause
| 959
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
-- |
-- Module : SMSAero.Types
-- Copyright : (c) 2016, GetShopTV
-- License : BSD3
-- Maintainer : nickolay@getshoptv.com
-- Stability : experimental
--
-- This module defines types used in SMSAero API.
module SMSAero.Types (
SMSAeroAuth(..),
Signature(..),
MessageId(..),
MessageBody(..),
Group(..),
Phone(..),
SMSAeroDate(..),
SendType(..),
DigitalChannel(..),
Name(..),
BirthDate(..),
ChannelName,
) where
import Control.Applicative (empty)
import Data.Aeson
import Data.Int (Int64)
import Data.Monoid
import Data.Time (UTCTime)
import Data.Time.Calendar (Day)
import Data.Time.Clock.POSIX (utcTimeToPOSIXSeconds, posixSecondsToUTCTime)
import Data.Text (Text)
import qualified Data.Text as Text
import Web.HttpApiData
-- | SMSAero sender's signature. This is used for the "from" field.
newtype Signature = Signature { getSignature :: Text } deriving (Eq, Show, FromJSON, ToJSON, ToHttpApiData, FromHttpApiData)
-- | SMSAero sent message id.
newtype MessageId = MessageId Int64
deriving (Eq, Show, Ord, FromJSON, ToJSON, ToHttpApiData, FromHttpApiData
#if MIN_VERSION_aeson(1,0,0)
, ToJSONKey, FromJSONKey
#endif
)
-- | SMSAero message body.
newtype MessageBody = MessageBody Text deriving (Eq, Show, FromJSON, ToJSON, ToHttpApiData, FromHttpApiData)
-- | SMSAero group name.
newtype Group = Group Text deriving (Eq, Show, FromJSON, ToJSON, ToHttpApiData, FromHttpApiData)
-- | SMSAero channel name.
type ChannelName = Text
-- | SMSAero authentication data.
data SMSAeroAuth = SMSAeroAuth
{ authUser :: Text -- ^ Username.
, authPassword :: Text -- ^ MD5 hash of a password.
}
instance FromJSON SMSAeroAuth where
parseJSON (Object o) = SMSAeroAuth
<$> o .: "user"
<*> o .: "password"
parseJSON _ = empty
instance ToJSON SMSAeroAuth where
toJSON SMSAeroAuth{..} = object
[ "user" .= authUser
, "password" .= authPassword ]
-- | Phone number.
newtype Phone = Phone { getPhone :: Int64 } deriving (Eq, Show, ToHttpApiData, FromHttpApiData)
-- | Date. Textually @SMSAeroDate@ is represented as a number of seconds since 01 Jan 1970.
newtype SMSAeroDate = SMSAeroDate { getSMSAeroDate :: UTCTime } deriving (Eq, Show)
instance ToHttpApiData SMSAeroDate where
toQueryParam (SMSAeroDate dt) = Text.pack (show (utcTimeToPOSIXSeconds dt))
instance FromHttpApiData SMSAeroDate where
parseQueryParam s = do
n <- fromInteger <$> parseQueryParam s
return (SMSAeroDate (posixSecondsToUTCTime n))
-- | Send type. This is used to describe send channel, equals to @FreeSignatureExceptMTC@ by default.
-- Textually @SendType@ is represented as a number from 1 to 6, excluding 5.
data SendType
= PaidSignature -- ^ Paid literal signature for all operators.
| FreeSignatureExceptMTC -- ^ Free literal signature for all operators except MTS.
| FreeSignature -- ^ Free literal signature for all operators.
| InfoSignature -- ^ Infosignature for all operators.
| International -- ^ International delivery (for RU and KZ operators).
deriving (Eq, Show, Bounded, Enum)
-- | Digital send channel. Textually represented as '1' if the parameter is present.
data DigitalChannel = DigitalChannel
instance ToHttpApiData DigitalChannel where
toQueryParam _ = "1"
instance FromHttpApiData DigitalChannel where
parseQueryParam "1" = Right DigitalChannel
parseQueryParam x = Left ("expected 1 for digital channel (but got " <> x <> ")")
instance ToHttpApiData SendType where
toQueryParam PaidSignature = "1"
toQueryParam FreeSignatureExceptMTC = "2"
toQueryParam FreeSignature = "3"
toQueryParam InfoSignature = "4"
toQueryParam International = "6"
instance FromHttpApiData SendType where
parseQueryParam = parseBoundedQueryParam
-- | Subscriber's name.
newtype Name = Name Text deriving (Eq, Show, ToHttpApiData, FromHttpApiData)
-- | Subscriber's birth date. Textually represented in %Y-%m-%d format.
newtype BirthDate = BirthDate Day deriving (Eq, Show, ToHttpApiData, FromHttpApiData)
|
GetShopTV/smsaero
|
src/SMSAero/Types.hs
|
Haskell
|
bsd-3-clause
| 4,219
|
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TupleSections #-}
import Control.Applicative
import Control.Exception
import Control.Monad
import Data.List
import Data.Traversable (for)
import Distribution.PackageDescription
import Distribution.Simple
import Distribution.Simple.BuildPaths
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.Setup
import Distribution.Simple.Utils
import Distribution.Simple.Program
import qualified Distribution.Verbosity as Verbosity
import System.Directory
import System.FilePath
import System.Info
main :: IO ()
main = defaultMainWithHooks simpleUserHooks
{ confHook = customConfHook
, buildHook = customBuildHook
, copyHook = customCopyHook
, cleanHook = customCleanHook
, hookedPrograms = hookedPrograms simpleUserHooks
++ [ makeProgram ]
}
customConfHook :: (GenericPackageDescription, HookedBuildInfo) -> ConfigFlags
-> IO LocalBuildInfo
customConfHook (pkg, pbi) flags = do
(_, includeDir, _) <- libvoyeurPaths
let addIncludeDirs = (onLocalLibBuildInfo . onIncludeDirs) (++ [".", includeDir])
addLibs = if os == "darwin"
then id
else (onLocalLibBuildInfo . onLdOptions) (++ ["-lbsd"])
lbi <- confHook simpleUserHooks (pkg, pbi) flags
return $ (addLibs . addIncludeDirs) lbi
customBuildHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> BuildFlags -> IO ()
customBuildHook pkg lbi usrHooks flags = do
putStrLn "Building libvoyeur..."
(libvoyeurDir, _, libDir) <- libvoyeurPaths
let verbosity = fromFlag (buildVerbosity flags)
runMake = runDbProgram verbosity makeProgram (withPrograms lbi)
inDir libvoyeurDir $
runMake []
buildHook simpleUserHooks pkg lbi usrHooks flags
notice verbosity "Relinking libvoyeur.."
let libObjs = map (libObjPath libDir) [ "voyeur"
, "net"
, "env"
, "event"
, "util"
]
componentLibs = concatMap componentLibNames $ componentsConfigs lbi
addStaticObjectFile libName objName = runAr ["r", libName, objName]
runAr = runDbProgram verbosity arProgram (withPrograms lbi)
forM_ componentLibs $ \componentLib -> do
when (withVanillaLib lbi) $
let libName = buildDir lbi </> mkLibName componentLib
in mapM_ (addStaticObjectFile libName) libObjs
when (withProfLib lbi) $
let libName = buildDir lbi </> mkProfLibName componentLib
in mapM_ (addStaticObjectFile libName) libObjs
when (withSharedLib lbi) $
let libName = buildDir lbi </> mkSharedLibName buildCompilerId componentLib
in mapM_ (addStaticObjectFile libName) libObjs
customCopyHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> CopyFlags -> IO ()
customCopyHook pkg lbi hooks flags = do
let verb = fromFlagOrDefault Verbosity.normal $ copyVerbosity flags
copyHook simpleUserHooks pkg lbi hooks flags
putStrLn "Installing libvoyeur helper libraries..."
let helperLibs = [ "exec", "exit", "open", "close" ]
helperLibFiles = map (("libvoyeur-" ++) . (<.> dllExtension)) helperLibs
helperLibDir = datadir (absoluteInstallDirs pkg lbi NoCopyDest)
(_, _, libDir) <- libvoyeurPaths
copyFiles verb helperLibDir $ map (libDir,) helperLibFiles
customCleanHook :: PackageDescription -> () -> UserHooks -> CleanFlags -> IO ()
customCleanHook pkg v hooks flags = do
putStrLn "Cleaning libvoyeur..."
let verb = fromFlagOrDefault Verbosity.normal $ cleanVerbosity flags
pgmConf <- configureProgram verb (simpleProgram "make") defaultProgramConfiguration
(libvoyeurDir, _, _) <- libvoyeurPaths
inDir libvoyeurDir $
runDbProgram verb makeProgram pgmConf ["clean"]
cleanHook simpleUserHooks pkg v hooks flags
libvoyeurPaths :: IO (FilePath, FilePath, FilePath)
libvoyeurPaths = do
curDir <- getCurrentDirectory
return (curDir </> "libvoyeur",
curDir </> "libvoyeur" </> "include",
curDir </> "libvoyeur" </> "build")
componentLibNames :: (ComponentName, ComponentLocalBuildInfo, [ComponentName]) -> [LibraryName]
componentLibNames (_, LibComponentLocalBuildInfo {..}, _) = componentLibraries
componentLibNames _ = []
makeProgram :: Program
makeProgram = simpleProgram "make"
libObjPath :: FilePath -> FilePath -> FilePath
libObjPath dir name = dir </> name <.> objExtension
inDir :: FilePath -> IO a -> IO a
inDir dir act = do
curDir <- getCurrentDirectory
bracket_ (setCurrentDirectory dir)
(setCurrentDirectory curDir)
act
type Lifter a b = (a -> a) -> b -> b
onLocalPkgDescr :: Lifter PackageDescription LocalBuildInfo
onLocalPkgDescr f lbi = lbi { localPkgDescr = f (localPkgDescr lbi) }
onLibrary :: Lifter Library PackageDescription
onLibrary f lpd = lpd { library = f <$> library lpd }
onLibBuildInfo :: Lifter BuildInfo Library
onLibBuildInfo f lib = lib { libBuildInfo = f (libBuildInfo lib) }
onLocalLibBuildInfo :: Lifter BuildInfo LocalBuildInfo
onLocalLibBuildInfo = onLocalPkgDescr . onLibrary . onLibBuildInfo
onIncludeDirs :: Lifter [FilePath] BuildInfo
onIncludeDirs f libbi = libbi { includeDirs = f (includeDirs libbi) }
onLdOptions :: Lifter [FilePath] BuildInfo
onLdOptions f libbi = libbi { ldOptions = f (ldOptions libbi) }
onPkgDescr :: Lifter PackageDescription GenericPackageDescription
onPkgDescr f gpd = gpd { packageDescription = f (packageDescription gpd) }
onExtraSrcFiles :: Lifter [FilePath] PackageDescription
onExtraSrcFiles f pd = pd { extraSrcFiles = f (extraSrcFiles pd) }
|
sethfowler/hslibvoyeur
|
Setup.hs
|
Haskell
|
bsd-3-clause
| 5,766
|
-- | This module exports the types used to create flag writes.
module Data.Factual.Write.Flag
(
-- * Flag type
Flag(..)
-- * Problem type
, Problem(..)
-- * Required modules
, module Data.Factual.Shared.Table
) where
import Data.Factual.Write
import Data.Factual.Shared.Table
import Data.Maybe (fromJust)
import Data.List.Utils (join)
import Data.Factual.Utils
import qualified Data.Map as M
-- | A Problem represents what is wrong with the row being flagged
data Problem = Duplicate
| Nonexistent
| Inaccurate
| Inappropriate
| Spam
| Other
deriving (Eq, Show)
-- | The Flag type represents a Write to be made to the API which flags a
-- row as having some kind of problem. The table and factualId identify the
-- problematic row, while the problem indicates the type of issue the row
-- has. The user is specified as a string. Other fields such as comment and
-- reference are optional. The debug flag is used to write in debug mode.
data Flag = Flag { table :: Table
, factualId :: String
, problem :: Problem
, user :: String
, comment :: Maybe String
, dataJSON :: Maybe String
, fields :: Maybe [String]
, reference :: Maybe String
} deriving (Eq, Show)
-- The Flag type is a member of the Write typeclass so it can be sent as a post
-- request to the API.
instance Write Flag where
path flag = (show $ table flag) ++ "/" ++ (factualId flag) ++ "/flag"
params _ = M.empty
body flag = M.fromList [ ("problem", show $ problem flag)
, ("user", user flag)
, commentPair flag
, dataPair flag
, fieldsPair flag
, referencePair flag ]
-- The following functions are helpers for the body function
commentPair :: Flag -> (String, String)
commentPair flag
| comment flag == Nothing = ("comment", "")
| otherwise = ("comment", fromJust $ comment flag)
dataPair :: Flag -> (String, String)
dataPair flag
| dataJSON flag == Nothing = ("data", "")
| otherwise = ("data", fromJust $ dataJSON flag)
fieldsPair :: Flag -> (String, String)
fieldsPair flag
| fields flag == Nothing = ("fields", "")
| otherwise = ("fields", arrayString)
where arrayString = "[" ++ (join "," $ fromJust $ fields flag) ++ "]"
referencePair :: Flag -> (String, String)
referencePair flag
| reference flag == Nothing = ("reference", "")
| otherwise = ("reference", fromJust $ reference flag)
|
rudyl313/factual-haskell-driver
|
Data/Factual/Write/Flag.hs
|
Haskell
|
bsd-3-clause
| 2,737
|
module Language.SequentCore.Driver.Flags (
SeqFlags(..), SeqDumpFlag(..), SeqGeneralFlag(..),
FloatOutSwitches(..), FinalPassSwitches(..), ContifySwitches(..),
sgopt, sgopt_set, sgopt_unset,
sdopt, sdopt_set, sdopt_unset,
parseSeqFlags
) where
import CmdLineParser
import FastString
import MonadUtils
import Outputable
import Panic
import SrcLoc
import Control.Monad
import Data.IntSet (IntSet)
import qualified Data.IntSet as IntSet
parseSeqFlags :: MonadIO m => [String]
-> m (SeqFlags, [String], [String])
parseSeqFlags args = do
let ((leftover, errs, warns), sflags)
= runCmdLine (processArgs seqFlags (map noLoc args))
defaultSeqFlags
unless (null errs) $ liftIO $
throwGhcExceptionIO $ errorsToGhcException errs
return (sflags, map unLoc leftover, map unLoc warns)
data SeqDumpFlag
= Opt_D_dump_llf
| Opt_D_dump_seq_xlate
| Opt_D_dump_seq_pipeline
| Opt_D_dump_cfy_stats
deriving (Eq, Ord, Enum)
data SeqGeneralFlag
= Opt_EnableSeqSimpl -- ^ Use Sequent Core simplifier (Language.SequentCore.Simpl)
| Opt_EnableSeqFloatOut -- ^ Use Sequent Core implementation of Float Out (Language.SequentCore.FloatOut)
| Opt_EnableSeqSpecConstr -- ^ Use Sequent Core implementation of SpecConstr (Language.SequentCore.SpecConstr)
| Opt_EnableContify -- ^ Use contification pass (aggressive mode)
| Opt_CombineSeqPasses -- ^ Avoid churning between Core and Sequent Core
-- TODO Contify more often so that there is nothing to gain by going back and forth
| Opt_ContifyBetweenSeqPasses -- ^ Contify (gently) between consecutive Sequent Core passes
| Opt_Contify_Simpl -- ^ Run (Sequent Core) simplifier after full contification
| Opt_CoreSimplAtEnd -- ^ Run the original simplifier at the very end of the pipeline
| Opt_SeqSimplAtEnd -- ^ Run the Sequent Core simplifier at the very end of the pipeline
| Opt_ProtectLastValArg
| Opt_IgnoreRealWorld
| Opt_FloatNullaryJoins -- ^ Always allowed to float a nullary join point
| Opt_LLF -- ^ Enable the late lambda lift pass
| Opt_LLF_AbsUnsat -- ^ allowed to abstract undersaturated applied let-bound variables?
| Opt_LLF_AbsSat -- ^ allowed to abstract saturated applied let-bound variables?
| Opt_LLF_AbsOversat -- ^ allowed to abstract oversaturated applied let-bound variables?
| Opt_LLF_CreatePAPs -- ^ allowed to float function bindings that occur unapplied
| Opt_LLF_Simpl -- ^ follow the late lambda lift with a simplification pass?
| Opt_LLF_Stabilize
| Opt_LLF_UseStr -- ^ use strictness in the late lambda float
| Opt_LLF_OneShot
deriving (Eq, Ord, Enum)
data SeqFlags = SeqFlags {
seqDumpFlags :: IntSet,
seqGeneralFlags :: IntSet,
lateFloatNonRecLam :: Maybe Int, -- ^ Limit on # abstracted variables for *late* non-recursive function floating (Nothing => all, Just 0 => none)
lateFloatRecLam :: Maybe Int, -- ^ " " " " " for *late* recursive function floating
lateFloatIfInClo :: Maybe Int, -- ^ Limit on # abstracted variables for floating a binding that occurs in a closure
lateFloatCloGrowth :: Maybe Int, -- ^ Limit on # additional free variables for closures in which the function occurs
lateFloatCloGrowthInLam :: Maybe Int
}
defaultSeqFlags :: SeqFlags
defaultSeqFlags =
SeqFlags {
seqDumpFlags = IntSet.empty,
seqGeneralFlags = IntSet.fromList (map fromEnum defaultGeneralFlags),
lateFloatNonRecLam = Just 10,
lateFloatRecLam = Just 6,
lateFloatIfInClo = Nothing,
lateFloatCloGrowth = Just 0,
lateFloatCloGrowthInLam = Just 0
}
defaultGeneralFlags :: [SeqGeneralFlag]
defaultGeneralFlags = [ Opt_LLF_AbsUnsat, Opt_LLF_UseStr, Opt_LLF_OneShot,
Opt_LLF_Simpl, Opt_LLF_Stabilize ]
-- | Test whether a 'SeqGeneralFlag' is set
sgopt :: SeqGeneralFlag -> SeqFlags -> Bool
sgopt f sflags = fromEnum f `IntSet.member` seqGeneralFlags sflags
-- | Set a 'SeqGeneralFlag'
sgopt_set :: SeqFlags -> SeqGeneralFlag -> SeqFlags
sgopt_set sfs f = sfs{ seqGeneralFlags = IntSet.insert (fromEnum f) (seqGeneralFlags sfs) }
-- | Unset a 'SeqGeneralFlag'
sgopt_unset :: SeqFlags -> SeqGeneralFlag -> SeqFlags
sgopt_unset sfs f = sfs{ seqGeneralFlags = IntSet.delete (fromEnum f) (seqGeneralFlags sfs) }
-- | Test whether a 'SeqDumpFlag' is set
sdopt :: SeqDumpFlag -> SeqFlags -> Bool
sdopt f sflags = fromEnum f `IntSet.member` seqDumpFlags sflags
-- | Set a 'SeqDumpFlag'
sdopt_set :: SeqFlags -> SeqDumpFlag -> SeqFlags
sdopt_set sfs f = sfs{ seqDumpFlags = IntSet.insert (fromEnum f) (seqDumpFlags sfs) }
-- | Unset a 'SeqDumpFlag'
sdopt_unset :: SeqFlags -> SeqDumpFlag -> SeqFlags
sdopt_unset sfs f = sfs{ seqDumpFlags = IntSet.delete (fromEnum f) (seqDumpFlags sfs) }
seqFlags :: [Flag (CmdLineP SeqFlags)]
seqFlags = [
Flag "ddump-llf" (setDumpFlag Opt_D_dump_llf)
, Flag "ddump-seq-xlate" (setDumpFlag Opt_D_dump_seq_xlate)
, Flag "ddump-seq-pipeline" (setDumpFlag Opt_D_dump_seq_pipeline)
, Flag "ddump-cfy-stats" (setDumpFlag Opt_D_dump_cfy_stats)
, Flag "fllf-nonrec-lam-limit" (intSuffix (\n f -> f{ lateFloatNonRecLam = Just n }))
, Flag "fllf-nonrec-lam-any" (noArg (\f -> f{ lateFloatNonRecLam = Nothing }))
, Flag "fno-llf-nonrec-lam" (noArg (\f -> f{ lateFloatNonRecLam = Just 0 }))
, Flag "fllf-rec-lam-limit" (intSuffix (\n f -> f{ lateFloatRecLam = Just n }))
, Flag "fllf-rec-lam-any" (noArg (\f -> f{ lateFloatRecLam = Nothing }))
, Flag "fno-llf-rec-lam" (noArg (\f -> f{ lateFloatRecLam = Just 0 }))
, Flag "fllf-clo-growth-limit" (intSuffix (\n f -> f{ lateFloatCloGrowth = Just n }))
, Flag "fllf-clo-growth-any" (noArg (\f -> f{ lateFloatCloGrowth = Nothing }))
, Flag "fno-llf-clo-growth" (noArg (\f -> f{ lateFloatCloGrowth = Just 0 }))
, Flag "fllf-in-clo-limit" (intSuffix (\n f -> f{ lateFloatIfInClo = Just n }))
, Flag "fllf-in-clo-any" (noArg (\f -> f{ lateFloatIfInClo = Nothing }))
, Flag "fno-llf-in-clo" (noArg (\f -> f{ lateFloatIfInClo = Just 0 }))
, Flag "fllf-clo-growth-in-lam-limit" (intSuffix (\n f -> f{ lateFloatCloGrowthInLam = Just n }))
, Flag "fllf-clo-growth-in-lam-any" (noArg (\f -> f{ lateFloatCloGrowthInLam = Nothing }))
, Flag "fno-llf-clo-growth-in-lam" (noArg (\f -> f{ lateFloatCloGrowthInLam = Just 0 }))
]
++ map (mkFlag turnOn "f" setGeneralFlag ) sFlags
++ map (mkFlag turnOff "fno-" unSetGeneralFlag) sFlags
type TurnOnFlag = Bool -- True <=> we are turning the flag on
-- False <=> we are turning the flag off
turnOn :: TurnOnFlag; turnOn = True
turnOff :: TurnOnFlag; turnOff = False
type FlagSpec flag
= ( String -- Flag in string form
, flag -- Flag in internal form
, TurnOnFlag -> DynP ()) -- Extra action to run when the flag is found
-- Typically, emit a warning or error
mkFlag :: TurnOnFlag -- ^ True <=> it should be turned on
-> String -- ^ The flag prefix
-> (flag -> DynP ()) -- ^ What to do when the flag is found
-> FlagSpec flag -- ^ Specification of this particular flag
-> Flag (CmdLineP SeqFlags)
mkFlag turn_on flagPrefix f (name, flag, extra_action)
= Flag (flagPrefix ++ name) (NoArg (f flag >> extra_action turn_on))
nop :: TurnOnFlag -> DynP ()
nop _ = return ()
sFlags :: [FlagSpec SeqGeneralFlag]
sFlags = [
( "seq-simpl", Opt_EnableSeqSimpl, nop),
( "seq-full-laziness", Opt_EnableSeqFloatOut, nop),
( "seq-spec-constr", Opt_EnableSeqSpecConstr, nop),
( "seq-contification", Opt_EnableContify, nop),
( "seq-combine-passes", Opt_CombineSeqPasses, nop),
( "seq-contify-between", Opt_ContifyBetweenSeqPasses, nop),
( "seq-contification-simpl", Opt_Contify_Simpl, nop),
( "seq-core-simpl-at-end", Opt_CoreSimplAtEnd, nop),
( "seq-simpl-at-end", Opt_SeqSimplAtEnd, nop),
( "llf", Opt_LLF, nop),
( "llf-abstract-undersat", Opt_LLF_AbsUnsat, nop),
( "llf-abstract-sat", Opt_LLF_AbsSat, nop),
( "llf-abstract-oversat", Opt_LLF_AbsOversat, nop),
( "llf-create-PAPs", Opt_LLF_CreatePAPs, nop),
( "llf-simpl", Opt_LLF_Simpl, nop),
( "llf-stabilize", Opt_LLF_Stabilize, nop),
( "llf-use-strictness", Opt_LLF_UseStr, nop),
( "llf-oneshot", Opt_LLF_OneShot, nop),
( "float-nullary-joins", Opt_FloatNullaryJoins, nop)
]
type DynP = EwM (CmdLineP SeqFlags)
noArg :: (SeqFlags -> SeqFlags) -> OptKind (CmdLineP SeqFlags)
noArg fn = NoArg (upd fn)
intSuffix :: (Int -> SeqFlags -> SeqFlags) -> OptKind (CmdLineP SeqFlags)
intSuffix fn = IntSuffix (\n -> upd (fn n))
upd :: (SeqFlags -> SeqFlags) -> DynP ()
upd f = liftEwM (do dflags <- getCmdLineState
putCmdLineState $! f dflags)
setDumpFlag :: SeqDumpFlag -> OptKind (CmdLineP SeqFlags)
setDumpFlag dump_flag = NoArg (setDumpFlag' dump_flag)
--------------------------
setGeneralFlag, unSetGeneralFlag :: SeqGeneralFlag -> DynP ()
setGeneralFlag f = upd (setGeneralFlag' f)
unSetGeneralFlag f = upd (unSetGeneralFlag' f)
setGeneralFlag' :: SeqGeneralFlag -> SeqFlags -> SeqFlags
setGeneralFlag' f dflags = sgopt_set dflags f
unSetGeneralFlag' :: SeqGeneralFlag -> SeqFlags -> SeqFlags
unSetGeneralFlag' f dflags = sgopt_unset dflags f
setDumpFlag' :: SeqDumpFlag -> DynP ()
setDumpFlag' dump_flag = upd (\dfs -> sdopt_set dfs dump_flag)
--------------------------
-- These two datatypes are copied from CoreMonad in the wip/llf branch. Defined
-- here so that both Driver and FloatOut can use them.
data FloatOutSwitches = FloatOutSwitches {
floatOutLambdas :: Maybe Int,
-- ^ Just n <=> float lambdas to top level, if doing so will
-- abstract over n or fewer value variables Nothing <=> float all
-- lambdas to top level, regardless of how many free variables Just
-- 0 is the vanilla case: float a lambda iff it has no free vars
floatOutConstants :: Bool,
-- ^ True <=> float constants to top level, even if they do not
-- escape a lambda
floatOutPartialApplications :: Bool,
-- ^ True <=> float out partial applications based on arity
-- information.
finalPass_ :: Maybe FinalPassSwitches
-- ^ Nothing <=> not the final pass, behave like normal
}
data FinalPassSwitches = FinalPassSwitches
{ fps_rec :: !(Maybe Int)
-- ^ used as floatOutLambdas for recursive lambdas
, fps_absUnsatVar :: !Bool
-- ^ abstract over undersaturated applied variables?
, fps_absSatVar :: !Bool
-- ^ abstract over exactly saturated applied variables? Doing so might lose some fast entries
, fps_absOversatVar :: !Bool
-- ^ abstracting over oversaturated applied variables?
, fps_createPAPs :: !Bool
-- ^ allowed to float functions occuring unapplied
, fps_cloGrowth :: !(Maybe Int)
-- ^ limits the number of free variables added to closures using the floated function
, fps_ifInClo :: !(Maybe Int)
-- ^ limits the number of abstracted variables allowed if the binder occurs in a closure
, fps_stabilizeFirst :: !Bool
-- ^ stabilizes an unstable unfolding before floating things out of
-- it, since floating out precludes specialization at the call-site
, fps_cloGrowthInLam :: !(Maybe Int)
-- ^ disallow the floating of a binding if it occurs in closure that
-- is allocated inside a lambda
, fps_trace :: !Bool
, fps_strictness :: !Bool
, fps_oneShot :: !Bool
}
instance Outputable FloatOutSwitches where
ppr = pprFloatOutSwitches
pprFloatOutSwitches :: FloatOutSwitches -> SDoc
pprFloatOutSwitches sw
= ptext (sLit "FOS") <+> (braces $
sep $ punctuate comma $
[ ptext (sLit "Lam =") <+> ppr (floatOutLambdas sw)
, ptext (sLit "Consts =") <+> ppr (floatOutConstants sw)
, ptext (sLit "PAPs =") <+> ppr (floatOutPartialApplications sw)
, ptext (sLit "Late =") <+> ppr (finalPass_ sw)])
instance Outputable FinalPassSwitches where
ppr = pprFinalPassSwitches
pprFinalPassSwitches :: FinalPassSwitches -> SDoc
pprFinalPassSwitches sw = sep $ punctuate comma $
[ ptext (sLit "Rec =") <+> ppr (fps_rec sw)
, ptext (sLit "AbsUnsatVar =") <+> ppr (fps_absUnsatVar sw)
, ptext (sLit "AbsSatVar =") <+> ppr (fps_absSatVar sw)
, ptext (sLit "AbsOversatVar =") <+> ppr (fps_absOversatVar sw)
, ptext (sLit "ClosureGrowth =") <+> ppr (fps_cloGrowth sw)
, ptext (sLit "ClosureGrowthInLam =") <+> ppr (fps_cloGrowthInLam sw)
, ptext (sLit "StabilizeFirst =") <+> ppr (fps_stabilizeFirst sw)
]
data ContifySwitches = ContifySwitches {
cs_gentle :: Bool
-- ^ True <=> minimal effort, as happens automatically after translation
}
instance Outputable ContifySwitches where
ppr = pprContifySwitches
pprContifySwitches :: ContifySwitches -> SDoc
pprContifySwitches sw
= text "ContifySwitches" <+> braces (text "Gentle =" <+> ppr (cs_gentle sw))
|
lukemaurer/sequent-core
|
src/Language/SequentCore/Driver/Flags.hs
|
Haskell
|
bsd-3-clause
| 13,520
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# OPTIONS -fno-warn-name-shadowing #-}
module Snap.Snaplet.Fay (
Fay
, initFay
, fayServe
, fayax
, toFayax
, fromFayax
) where
import Control.Applicative
import Control.Monad
import Control.Monad.Reader
import Control.Monad.State.Class
import Control.Monad.Trans.Writer
import qualified Data.Aeson as A
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy as BL
import qualified Data.Configurator as C
import Data.Data
import Data.List
import Data.Maybe
import Data.String
import Fay.Convert
import Snap.Core
import Snap.Snaplet
import Snap.Util.FileServe
import System.Directory
import System.FilePath
import Paths_snaplet_fay
import Snap.Snaplet.Fay.Internal
-- | Snaplet initialization
initFay :: SnapletInit b Fay
initFay = makeSnaplet "fay" description datadir $ do
config <- getSnapletUserConfig
fp <- getSnapletFilePath
(opts, errs) <- runWriterT $ do
compileModeStr <- logErr "Must specify compileMode" $ C.lookup config "compileMode"
compileMode <- case compileModeStr of
Just x -> logErr "Invalid compileMode" . return $ compileModeFromString x
Nothing -> return Nothing
verbose <- logErr "Must specify verbose" $ C.lookup config "verbose"
prettyPrint <- logErr "Must specify prettyPrint" $ C.lookup config "prettyPrint"
includeDirs <- logErr "Must specify includeDirs" $ C.lookup config "includeDirs"
let inc = maybe [] (split ',') includeDirs
inc' <- liftIO $ mapM canonicalizePath inc
packages <- logErr "Must specify packages" $ C.lookup config "packages"
let packs = maybe [] (split ',') packages
return (verbose, compileMode, prettyPrint, inc', packs)
let fay = case opts of
(Just verbose, Just compileMode, Just prettyPrint, includeDirs, packages) ->
Fay
{ snapletFilePath = fp
, verbose = verbose
, compileMode = compileMode
, prettyPrint = prettyPrint
, _includeDirs = fp : includeDirs
, packages = packages
}
_ -> error $ intercalate "\n" errs
-- Make sure snaplet/fay, snaplet/fay/src, snaplet/fay/js are present.
liftIO $ mapM_ createDirUnlessExists [fp, srcDir fay, destDir fay]
when (Production == compileMode fay) (liftIO $ compileAll fay)
return fay
where
-- TODO Use split package
split :: Eq a => a -> [a] -> [[a]]
split _ [] = []
split a as = takeWhile (/= a) as : split a (drop 1 $ dropWhile (/= a) as)
createDirUnlessExists fp = do
dirExists <- doesDirectoryExist fp
unless dirExists $ createDirectory fp
datadir = Just $ liftM (++ "/resources") getDataDir
description = "Automatic (re)compilation and serving of Fay files"
logErr :: MonadIO m => t -> IO (Maybe a) -> WriterT [t] m (Maybe a)
logErr err m = do
res <- liftIO m
when (isNothing res) (tell [err])
return res
compileModeFromString :: String -> Maybe CompileMode
compileModeFromString "Development" = Just Development
compileModeFromString "Production" = Just Production
compileModeFromString _ = Nothing
-- | Serves the compiled Fay scripts using the chosen compile mode.
fayServe :: Handler b Fay ()
fayServe = do
modifyResponse . setContentType $ "text/javascript;charset=utf-8"
get >>= compileWithMode . compileMode
-- | Send and receive JSON.
-- | Automatically decodes a JSON request into a Fay record which is
-- | passed to `g`. The handler `g` should then return a Fay record (of
-- | a possibly separate type) which is encoded and passed back as a
-- | JSON response.
-- | If you only want to send JSON and handle input manually, use toFayax.
-- | If you want to receive JSON and handle the response manually, use fromFayax
fayax :: (Data f1, Read f1, Show f2) => (f1 -> Handler h1 h2 f2) -> Handler h1 h2 ()
fayax g = do
res <- decode
case res of
Left body -> send500 $ Just body
Right res -> toFayax . g $ res
-- | fayax only sending JSON.
toFayax :: Show f2 => Handler h1 h2 f2 -> Handler h1 h2 ()
toFayax g = do
modifyResponse . setContentType $ "text/json;charset=utf-8"
writeLBS . A.encode . showToFay =<< g
-- | fayax only recieving JSON.
fromFayax :: (Data f1, Read f1) => (f1 -> Handler h1 h2 ()) -> Handler h1 h2 ()
fromFayax g = do
res <- decode
case res of
Left body -> send500 $ Just body
Right res -> g res
-- | Read the request input and convert to a Fay value.
decode :: (Data f1, Read f1) => Handler h1 h2 (Either ByteString f1)
decode = do
body <- readRequestBody 1024 -- Nothing will break by abusing this :)!
res <- return $ A.decode body >>= readFromFay
return $ case res of
Nothing -> Left. BS.concat . BL.toChunks $ "Could not decode " `BL.append` body
Just x -> Right x
-- | Compiles according to the specified mode.
compileWithMode :: CompileMode -> Handler b Fay ()
compileWithMode Development = do
cfg <- get
uri <- (srcDir cfg </>) . toHsName . filename . BS.unpack . rqURI <$> getRequest
res <- liftIO (compileFile cfg uri)
case res of
Success s -> writeBS $ fromString s
NotFound -> send404 Nothing
Error err -> send500 . Just . BS.pack $ err
-- Production compilation has already been done.
compileWithMode Production = get >>= serveDirectory . destDir
-- | Respond with Not Found
send404 :: Maybe ByteString -> Handler a b ()
send404 msg = do
modifyResponse $ setResponseStatus 404 "Not Found"
writeBS $ fromMaybe "Not Found" msg
finishWith =<< getResponse
-- | Respond with Internal Server Error
send500 :: Maybe ByteString -> Handler a b ()
send500 msg = do
modifyResponse $ setResponseStatus 500 "Internal Server Error"
writeBS $ fromMaybe "Internal Server Error" msg
finishWith =<< getResponse
|
bergmark/snaplet-fay
|
src/Snap/Snaplet/Fay.hs
|
Haskell
|
bsd-3-clause
| 6,274
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
module Kiosk.Backend.Data ( DataTemplateEntry (..)
, DataTemplateEntryKey (..)
, DataTemplate (..)
, TicketId (..)
, TemplateItem (..)
, dataTemplateEntryKey
, dataTemplateEntryValue
, decodeUUID
, getListOfSortedTemplateItems
, fromDataTemplateEntryToCsv
, fromDataTemplateEntryToS3Csv
, fromDataTemplateEntryToXlsxWorksheet
) where
import Kiosk.Backend.Data.DataTemplate
import Kiosk.Backend.Data.DataTemplateEntry
import Kiosk.Backend.Data.DataTemplateEntryKey
|
plow-technologies/cobalt-kiosk-data-template
|
src/Kiosk/Backend/Data.hs
|
Haskell
|
bsd-3-clause
| 959
|
module Day2 where
import Test.Hspec
import Utils
import qualified Text.Megaparsec.String as P
import qualified Text.Megaparsec as P
-- Input DSL
data Instruction = U | D | L | R deriving (Show)
-- Parsing
parser :: P.Parser [[Instruction]]
parser = P.sepBy (P.many (parserInstruction)) (P.string "\n")
parserInstruction :: P.Parser Instruction
parserInstruction = (U <$ P.string "U") P.<|>
(D <$ P.string "D") P.<|>
(L <$ P.string "L") P.<|>
(R <$ P.string "R")
-- Problem DSL
data KeyPad = KeyPad [[Char]] deriving (Show)
data Status = Status KeyPad (Int, Int) deriving (Show)
makeKeyPad s coord = Status (KeyPad (lines s)) coord
validCase k@(KeyPad s) (x, y) = y >= 0 && y < length s && x >= 0 && x < length (s !! y) && getKeyPad k (x, y) /= ' '
getKeyPad keyPad@(KeyPad s) (x, y) = s !! y !! x
getStatus (Status keyPad coord) = getKeyPad keyPad coord
moveKeyPad i (Status keyPad (x, y)) = Status keyPad (if validCase keyPad newCoord then newCoord else (x, y))
where newCoord = case i of
U -> (x, y - 1)
D -> (x, y + 1)
L -> (x - 1, y)
R -> (x + 1, y)
keyPad = makeKeyPad "123\n\
\456\n\
\789" (1, 1)
keyPad' = makeKeyPad " 1 \n\
\ 234 \n\
\56789\n\
\ ABC \n\
\ D " (0, 2)
-- utils
foldInstruction :: Status -> [Instruction] -> Status
foldInstruction keyPad xs = foldl (flip moveKeyPad) keyPad xs
genericDay :: Status -> [[Instruction]] -> [Char]
genericDay keypad code = map getStatus (tail (scanl foldInstruction keypad code))
-- FIRST problem
day code = genericDay keyPad code
-- SECOND problem
day' code = genericDay keyPad' code
-- tests and data
testData = [[U, L],
[R, R, D, D, D],
[L, U, R, D, L],
[U, U, U, U, D]
]
test = hspec $ do
describe "firstProblem" $ do
it "works" $ do
day testData `shouldBe` "1985"
--day 1 `shouldBe` (2 :: Int)
describe "secondProblem" $ do
it "works" $ do
day' testData `shouldBe` "5DB3"
describe "finally" $ do
it "works" $ do
day <$> content `shouldReturn` "47978"
day' <$> content `shouldReturn` "659AD"
fileContent = readFile "content/day2"
content = parse parser <$> fileContent
|
guibou/AdventOfCode2016
|
src/Day2.hs
|
Haskell
|
bsd-3-clause
| 2,376
|
module Handler.RemoveDeck where
import Kerchief.Prelude
import Prelude hiding (putStrLn)
import System.Directory (removeFile)
import System.FilePath ((</>))
import Kerchief (Kerchief, getDecksDir)
import Utils (askYesNo, getDirectoryContents')
handleRemoveDeck :: [String] -> Kerchief ()
handleRemoveDeck ["--help"] = printRemoveDeckUsage
handleRemoveDeck [name] = handleRemoveDeck' name
handleRemoveDeck _ = printRemoveDeckUsage
handleRemoveDeck' :: String -> Kerchief ()
handleRemoveDeck' name = do
decksDir <- getDecksDir
decks <- io (getDirectoryContents' decksDir)
if name `elem` decks
then askYesNo ("Are you sure you want to remove deck \"" ++ name ++ "\"? ")
(do
io $ removeFile (decksDir </> name)
putStrLn $ "Deck \"" ++ name ++ "\" removed.")
(putStrLn $ "Deck \"" ++ name ++ "\" not removed.")
else putStrLn $ "Deck \"" ++ name ++ "\" doesn't exist. See \"decks\"."
printRemoveDeckUsage :: Kerchief ()
printRemoveDeckUsage = putStrLn "TODO"
|
mitchellwrosen/kerchief
|
src/Handler/RemoveDeck.hs
|
Haskell
|
bsd-3-clause
| 1,119
|
{-|
Description: helpers for matching requests
contains various matching utilities
-}
{-# LANGUAGE TupleSections #-}
module Web.Respond.Request where
import Network.Wai
import qualified Data.ByteString.Lazy as LBS
import Control.Applicative ((<$>))
import Control.Monad.IO.Class (liftIO)
import qualified Network.HTTP.Media as Media
import Data.Maybe (fromMaybe)
import Web.Respond.Types
import Web.Respond.Monad
import Web.Respond.Response
-- * extracting the request body
-- | gets the body as a lazy ByteString using lazy IO (see 'lazyRequestBody')
getBodyLazy :: MonadRespond m => m LBS.ByteString
getBodyLazy = getRequest >>= liftIO . lazyRequestBody
-- | gets the body as a lazy ByteString using /strict/ IO (see 'strictRequestBody')
getBodyStrict :: MonadRespond m => m LBS.ByteString
getBodyStrict = getRequest >>= liftIO . strictRequestBody
-- ** extraction using FromBody
-- | use a FromBody instance to parse the body. uses 'getBodyLazy' to
-- lazily load the body data.
extractBodyLazy :: (ReportableError e, FromBody e a, MonadRespond m) => m (Either e a)
extractBodyLazy = fromBody <$> getBodyLazy
-- | uses a FromBody instance to parse the body. uses 'getBodyStrict' to
-- load the body strictly.
extractBodyStrict :: (ReportableError e, FromBody e a, MonadRespond m) => m (Either e a)
extractBodyStrict = fromBody <$> getBodyStrict
-- | extracts the body using 'extractBodyLazy'. runs the inner action only
-- if the body could be loaded and parseda using the FromBody instance;
-- otherwise responds with the reportable error by calling
-- 'handleBodyParseFailure'.
withRequiredBody :: (ReportableError e, FromBody e a, MonadRespond m) => (a -> m ResponseReceived) -> m ResponseReceived
withRequiredBody action = extractBodyLazy >>= either handleBodyParseFailure action
-- | extracts the body using 'extractBodyStrict'. runs the inner action only
-- if the body could be loaded and parseda using the FromBody instance;
-- otherwise responds with the reportable error by calling
-- 'handleBodyParseFailure'.
withRequiredBody' :: (ReportableError e, FromBody e a, MonadRespond m) => (a -> m ResponseReceived) -> m ResponseReceived
withRequiredBody' action = extractBodyStrict >>= either handleBodyParseFailure action
-- * authentication and authorization
-- | authenticate uses the result of the authentication action (if it
-- succssfully produced a result) to run the inner action function.
-- otherwise, it uses 'handleAuthFailed'.
authenticate :: (MonadRespond m, ReportableError e) => m (Either e a) -> (a -> m ResponseReceived) -> m ResponseReceived
authenticate auth inner = auth >>= either handleAuthFailed inner
-- | reauthenticate tries to use a prior authentication value to run the
-- inner action; if it's not availalble, it falls back to 'authenticate' to
-- apply the auth action and run the inner action.
reauthenticate :: (MonadRespond m, ReportableError e) => Maybe a -> m (Either e a) -> (a -> m ResponseReceived) -> m ResponseReceived
reauthenticate prior auth inner = maybe (authenticate auth inner) inner prior
-- | if given an error report value , respond immediately using
-- 'handleDenied'. otherwise, run the inner route.
authorize :: (ReportableError e, MonadRespond m) => Maybe e -> m ResponseReceived -> m ResponseReceived
authorize check inner = maybe inner handleAccessDenied check
-- | if the bool is true, run the inner. otherwise, handleDenied the
-- report.
authorizeBool :: (ReportableError e, MonadRespond m) => e -> Bool -> m ResponseReceived -> m ResponseReceived
authorizeBool report allowed inner
| allowed = inner
| otherwise = handleAccessDenied report
-- | authorize using an Either; if it's Left, fail using 'handleDenied' on
-- the contained ReportableError. if it's right, run the inner action using
-- the contained value,
authorizeE :: (ReportableError e, MonadRespond m) => Either e a -> (a -> m ResponseReceived) -> m ResponseReceived
authorizeE check inner = either handleAccessDenied inner check
-- * content negotiation
-- | selects action by accept header
routeAccept :: MonadRespond m
=> m a -- ^ default action - do this if nothing matches
-> [(Media.MediaType, m a)] -- ^ actions to perform for each accepted media type
-> m a -- ^ chosen action
routeAccept def mapped = getAcceptHeader >>= fromMaybe def . Media.mapAcceptMedia mapped
-- | defends the inner routes by first checking the Accept header and
-- failing if it cannot accept any media type in the list
checkAccepts :: MonadRespond m => [Media.MediaType] -> m ResponseReceived -> m ResponseReceived
checkAccepts list action = getAcceptHeader >>= maybe handleUnacceptableResponse (const action) . Media.matchAccept list
|
raptros/respond
|
src/Web/Respond/Request.hs
|
Haskell
|
bsd-3-clause
| 4,720
|
{-# LANGUAGE DeriveDataTypeable #-}
import qualified Data.ByteString.Lazy.Char8 as B
import System.Console.CmdArgs
import System.Exit
import Text.WikiEngine
import qualified Text.Blaze.Renderer.Utf8 as RenderUtf8 (renderHtml)
import qualified Text.Blaze.Renderer.Pretty as RenderPretty (renderHtml)
import qualified Data.ByteString.Lazy as L
renderCfg = defaultRenderCfg { rcfgCodeRenderType = CodeRenderSimple }
doMain (Render input pretty) = do
content <- readFile input
let wikiblocks =
case parseDocument content of
Right blocks -> blocks
Left errors -> error ("error parsing wiki content: " ++ show errors)
if pretty
then putStrLn $ RenderPretty.renderHtml $ renderAsHtml renderCfg wikiblocks
else L.putStrLn $ RenderUtf8.renderHtml $ renderAsHtml renderCfg wikiblocks
doMain (Raw input) = do
content <- readFile input
let wikiblocks =
case parseDocument content of
Right blocks -> blocks
Left errors -> error ("error parsing wiki content: " ++ show errors)
mapM_ (putStrLn . show) wikiblocks
doMain (Validate input) = do
content <- readFile input
case parseDocument content of
Right _ -> exitSuccess
Left _ -> exitFailure
data Opts =
Render { input :: FilePath, pretty :: Bool }
| Raw { input :: FilePath }
| Validate { input :: FilePath }
deriving (Show,Data,Typeable)
render = Render { input = def &= typFile, pretty = def }
raw = Raw { input = def &= typFile }
validate = Validate { input = def &= typFile }
mode = cmdArgsMode $ modes [raw,validate,render]
main = cmdArgsRun mode >>= doMain
|
vincenthz/wikiengine
|
Wikihtml.hs
|
Haskell
|
bsd-3-clause
| 1,567
|
import Common.Numbers.Numbers (powMod)
main = print $ 1 + (28433 * (powMod 2 (7830457 :: Int) modulo) `mod` modulo) where
modulo = 10^10 :: Integer
|
foreverbell/project-euler-solutions
|
src/97.hs
|
Haskell
|
bsd-3-clause
| 154
|
{-# LANGUAGE PatternGuards, ViewPatterns, CPP, ScopedTypeVariables #-}
module General.Util(
PkgName, ModName,
URL,
pretty, parseMode, applyType, applyFun1, unapplyFun, fromName, fromQName, fromTyVarBind, declNames, isTypeSig,
fromDeclHead, fromContext, fromIParen, fromInstHead,
tarballReadFiles,
isUpper1, isAlpha1,
joinPair,
testing, testEq,
showUTCTime,
strict,
withs,
escapeHTML, unescapeHTML, unHTML,
escapeURL,
takeSortOn,
Average, toAverage, fromAverage,
inRanges,
parseTrailingVersion,
trimVersion,
exitFail,
prettyTable,
getStatsPeakAllocBytes, getStatsCurrentLiveBytes, getStatsDebug,
hackagePackageURL, hackageModuleURL, hackageDeclURL, ghcModuleURL,
minimum', maximum',
general_util_test
) where
import Language.Haskell.Exts
import Control.Applicative
import Data.List.Extra
import Data.Char
import Data.Either.Extra
import Data.Semigroup
import Data.Tuple.Extra
import Control.Monad.Extra
import qualified Data.ByteString.Lazy as LBS
import qualified Data.Map as Map
import Data.Ix
import Numeric.Extra
import Codec.Compression.GZip as GZip
import Codec.Archive.Tar as Tar
import Data.Time.Clock
import Data.Time.Format
import Control.DeepSeq
import Control.Exception.Extra
import Test.QuickCheck
import Data.Version
import Data.Int
import System.IO
import System.Exit
import System.Mem
import GHC.Stats
import General.Str
import Prelude
import qualified Network.HTTP.Types.URI as URI
import qualified Data.ByteString.UTF8 as UTF8
type PkgName = Str
type ModName = Str
-- | A URL, complete with a @https:@ prefix.
type URL = String
#if __GLASGOW_HASKELL__ >= 802
#define RTS_STATS 1
#endif
showMb :: (Show a, Integral a) => a -> String
#if RTS_STATS
showMb x = show (x `div` (1024*1024)) ++ "Mb"
#else
showMb x = show x ++ "Mb"
#endif
#if RTS_STATS
withRTSStats :: (RTSStats -> a) -> IO (Maybe a)
withRTSStats f = ifM getRTSStatsEnabled (Just . f <$> getRTSStats) (pure Nothing)
#else
withGCStats :: (GCStats -> a) -> IO (Maybe a)
withGCStats f = ifM getGCStatsEnabled (Just . f <$> getGCStats) (pure Nothing)
#endif
getStatsCurrentLiveBytes :: IO (Maybe String)
getStatsCurrentLiveBytes = do
performGC
#if RTS_STATS
withRTSStats $ showMb . gcdetails_live_bytes . gc
#else
withGCStats $ showMb . currentBytesUsed
#endif
getStatsPeakAllocBytes :: IO (Maybe String)
getStatsPeakAllocBytes = do
#if RTS_STATS
withRTSStats $ showMb . max_mem_in_use_bytes
#else
withGCStats $ showMb . peakMegabytesAllocated
#endif
getStatsDebug :: IO (Maybe String)
getStatsDebug = do
let dump = replace ", " "\n" . takeWhile (/= '}') . drop1 . dropWhile (/= '{') . show
#if RTS_STATS
withRTSStats dump
#else
withGCStats dump
#endif
exitFail :: String -> IO ()
exitFail msg = do
hPutStrLn stderr msg
exitFailure
pretty :: Pretty a => a -> String
pretty = prettyPrintWithMode defaultMode{layout=PPNoLayout}
parseMode :: ParseMode
parseMode = defaultParseMode{extensions=map EnableExtension es}
where es = [ConstraintKinds,EmptyDataDecls,TypeOperators,ExplicitForAll,GADTs,KindSignatures,MultiParamTypeClasses
,TypeFamilies,FlexibleContexts,FunctionalDependencies,ImplicitParams,MagicHash,UnboxedTuples
,ParallelArrays,UnicodeSyntax,DataKinds,PolyKinds,PatternSynonyms]
applyType :: Type a -> [Type a] -> Type a
applyType x (t:ts) = applyType (TyApp (ann t) x t) ts
applyType x [] = x
applyFun1 :: [Type a] -> Type a
applyFun1 [x] = x
applyFun1 (x:xs) = TyFun (ann x) x $ applyFun1 xs
unapplyFun :: Type a -> [Type a]
unapplyFun (TyFun _ x y) = x : unapplyFun y
unapplyFun x = [x]
fromName :: Name a -> String
fromName (Ident _ x) = x
fromName (Symbol _ x) = x
fromQName :: QName a -> String
fromQName (Qual _ _ x) = fromName x
fromQName (UnQual _ x) = fromName x
fromQName (Special _ UnitCon{}) = "()"
fromQName (Special _ ListCon{}) = "[]"
fromQName (Special _ FunCon{}) = "->"
fromQName (Special _ (TupleCon _ box n)) = "(" ++ h ++ replicate n ',' ++ h ++ ")"
where h = ['#' | box == Unboxed]
fromQName (Special _ UnboxedSingleCon{}) = "(##)"
fromQName (Special _ Cons{}) = ":"
fromContext :: Context a -> [Asst a]
fromContext (CxSingle _ x) = [x]
fromContext (CxTuple _ xs) = xs
fromContext _ = []
fromIParen :: InstRule a -> InstRule a
fromIParen (IParen _ x) = fromIParen x
fromIParen x = x
fromTyVarBind :: TyVarBind a -> Name a
fromTyVarBind (KindedVar _ x _) = x
fromTyVarBind (UnkindedVar _ x) = x
fromDeclHead :: DeclHead a -> (Name a, [TyVarBind a])
fromDeclHead (DHead _ n) = (n, [])
fromDeclHead (DHInfix _ x n) = (n, [x])
fromDeclHead (DHParen _ x) = fromDeclHead x
fromDeclHead (DHApp _ dh x) = second (++[x]) $ fromDeclHead dh
fromInstHead :: InstHead a -> (QName a, [Type a])
fromInstHead (IHCon _ n) = (n, [])
fromInstHead (IHInfix _ x n) = (n, [x])
fromInstHead (IHParen _ x) = fromInstHead x
fromInstHead (IHApp _ ih x) = second (++[x]) $ fromInstHead ih
declNames :: Decl a -> [String]
declNames x = map fromName $ case x of
TypeDecl _ hd _ -> f hd
DataDecl _ _ _ hd _ _ -> f hd
GDataDecl _ _ _ hd _ _ _ -> f hd
TypeFamDecl _ hd _ _ -> f hd
DataFamDecl _ _ hd _ -> f hd
ClassDecl _ _ hd _ _ -> f hd
TypeSig _ names _ -> names
PatSynSig _ names _ _ _ _ _ -> names
_ -> []
where f x = [fst $ fromDeclHead x]
isTypeSig :: Decl a -> Bool
isTypeSig TypeSig{} = True
isTypeSig PatSynSig{} = True
isTypeSig _ = False
tarballReadFiles :: FilePath -> IO [(FilePath, LBS.ByteString)]
tarballReadFiles file = f . Tar.read . GZip.decompress <$> LBS.readFile file
where
f (Next e rest) | NormalFile body _ <- entryContent e = (entryPath e, body) : f rest
f (Next _ rest) = f rest
f Done = []
f (Fail e) = error $ "tarballReadFiles on " ++ file ++ ", " ++ show e
innerTextHTML :: String -> String
innerTextHTML ('<':xs) = innerTextHTML $ drop1 $ dropWhile (/= '>') xs
innerTextHTML (x:xs) = x : innerTextHTML xs
innerTextHTML [] = []
unHTML :: String -> String
unHTML = unescapeHTML . innerTextHTML
escapeURL :: String -> String
escapeURL = UTF8.toString . URI.urlEncode True . UTF8.fromString
isUpper1 (x:xs) = isUpper x
isUpper1 _ = False
isAlpha1 (x:xs) = isAlpha x
isAlpha1 [] = False
splitPair :: String -> String -> (String, String)
splitPair x y | (a,stripPrefix x -> Just b) <- breakOn x y = (a,b)
| otherwise = error $ "splitPair does not contain separator " ++ show x ++ " in " ++ show y
joinPair :: [a] -> ([a], [a]) -> [a]
joinPair sep (a,b) = a ++ sep ++ b
testing_, testing :: String -> IO () -> IO ()
testing_ name act = do putStr $ "Test " ++ name ++ " "; act
testing name act = do testing_ name act; putStrLn ""
testEq :: (Show a, Eq a) => a -> a -> IO ()
testEq a b | a == b = putStr "."
| otherwise = errorIO $ "Expected equal, but " ++ show a ++ " /= " ++ show b
showUTCTime :: String -> UTCTime -> String
showUTCTime = formatTime defaultTimeLocale
withs :: [(a -> r) -> r] -> ([a] -> r) -> r
withs [] act = act []
withs (f:fs) act = f $ \a -> withs fs $ \as -> act $ a:as
prettyTable :: Int -> String -> [(String, Double)] -> [String]
prettyTable dp units xs =
( padR len units ++ "\tPercent\tName") :
[ padL len (showDP dp b) ++ "\t" ++ padL 7 (showDP 1 (100 * b / tot) ++ "%") ++ "\t" ++ a
| (a,b) <- ("Total", tot) : sortOn (negate . snd) xs]
where
tot = sum $ map snd xs
len = length units `max` length (showDP dp tot)
padL n s = replicate (n - length s) ' ' ++ s
padR n s = s ++ replicate (n - length s) ' '
-- ensure that no value escapes in a thunk from the value
strict :: NFData a => IO a -> IO a
strict act = do
res <- try_ act
case res of
Left e -> do msg <- showException e; evaluate $ rnf msg; errorIO msg
Right v -> evaluate $ force v
data Average a = Average !a {-# UNPACK #-} !Int deriving Show -- a / b
toAverage :: a -> Average a
toAverage x = Average x 1
fromAverage :: Fractional a => Average a -> a
fromAverage (Average a b) = a / fromIntegral b
instance Num a => Semigroup (Average a) where
Average x1 x2 <> Average y1 y2 = Average (x1+y1) (x2+y2)
instance Num a => Monoid (Average a) where
mempty = Average 0 0
mappend = (<>)
data TakeSort k v = More !Int !(Map.Map k [v])
| Full !k !(Map.Map k [v])
-- | @takeSortOn n op == take n . sortOn op@
takeSortOn :: Ord k => (a -> k) -> Int -> [a] -> [a]
takeSortOn op n xs
| n <= 0 = []
| otherwise = concatMap reverse $ Map.elems $ getMap $ foldl' add (More n Map.empty) xs
where
getMap (More _ mp) = mp
getMap (Full _ mp) = mp
add (More n mp) x = (if n <= 1 then full else More (n-1)) $ Map.insertWith (++) (op x) [x] mp
add o@(Full mx mp) x = let k = op x in if k >= mx then o else full $ Map.insertWith (++) k [x] $ delMax mp
full mp = Full (fst $ Map.findMax mp) mp
delMax mp | Just ((k,_:vs), mp) <- Map.maxViewWithKey mp = if null vs then mp else Map.insert k vs mp
-- See https://ghc.haskell.org/trac/ghc/ticket/10830 - they broke maximumBy
maximumBy' :: (a -> a -> Ordering) -> [a] -> a
maximumBy' cmp = foldl1' $ \x y -> if cmp x y == GT then x else y
maximum' :: Ord a => [a] -> a
maximum' = maximumBy' compare
minimumBy' :: (a -> a -> Ordering) -> [a] -> a
minimumBy' cmp = foldl1' $ \x y -> if cmp x y == LT then x else y
minimum' :: Ord a => [a] -> a
minimum' = minimumBy' compare
hackagePackageURL :: PkgName -> URL
hackagePackageURL x = "https://hackage.haskell.org/package/" ++ strUnpack x
hackageModuleURL :: ModName -> URL
hackageModuleURL x = "/docs/" ++ ghcModuleURL x
ghcModuleURL :: ModName -> URL
ghcModuleURL x = replace "." "-" (strUnpack x) ++ ".html"
hackageDeclURL :: Bool -> String -> URL
hackageDeclURL typesig x = "#" ++ (if typesig then "v" else "t") ++ ":" ++ concatMap f x
where
f x | isLegal x = [x]
| otherwise = "-" ++ show (ord x) ++ "-"
-- isLegal is from haddock-api:Haddock.Utils; we need to use
-- the same escaping strategy here in order for fragment links
-- to work
isLegal ':' = True
isLegal '_' = True
isLegal '.' = True
isLegal c = isAscii c && isAlphaNum c
trimVersion :: Int -> Version -> Version
trimVersion i v = v{versionBranch = take 3 $ versionBranch v}
parseTrailingVersion :: String -> (String, [Int])
parseTrailingVersion = (reverse *** reverse) . f . reverse
where
f xs | (ver@(_:_),sep:xs) <- span isDigit xs
, sep == '-' || sep == '.'
, (a, b) <- f xs
= (a, Prelude.read (reverse ver) : b)
f xs = (xs, [])
-- | Equivalent to any (`inRange` x) xs, but more efficient
inRanges :: Ix a => [(a,a)] -> (a -> Bool)
inRanges xs = \x -> maybe False (`inRange` x) $ Map.lookupLE x mp
where
mp = foldl' add Map.empty xs
merge (l1,u1) (l2,u2) = (min l1 l2, max u1 u2)
overlap x1 x2 = x1 `inRange` fst x2 || x2 `inRange` fst x1
add mp x
| Just x2 <- Map.lookupLE (fst x) mp, overlap x x2 = add (Map.delete (fst x2) mp) (merge x x2)
| Just x2 <- Map.lookupGE (fst x) mp, overlap x x2 = add (Map.delete (fst x2) mp) (merge x x2)
| otherwise = uncurry Map.insert x mp
general_util_test :: IO ()
general_util_test = do
testing "General.Util.splitPair" $ do
let a === b = if a == b then putChar '.' else errorIO $ show (a,b)
splitPair ":" "module:foo:bar" === ("module","foo:bar")
do x <- try_ $ evaluate $ rnf $ splitPair "-" "module:foo"; isLeft x === True
splitPair "-" "module-" === ("module","")
testing_ "General.Util.inRanges" $ do
quickCheck $ \(x :: Int8) xs -> inRanges xs x == any (`inRange` x) xs
testing "General.Util.parseTrailingVersion" $ do
let a === b = if a == b then putChar '.' else errorIO $ show (a,b)
parseTrailingVersion "shake-0.15.2" === ("shake",[0,15,2])
parseTrailingVersion "test-of-stuff1" === ("test-of-stuff1",[])
|
ndmitchell/hoogle
|
src/General/Util.hs
|
Haskell
|
bsd-3-clause
| 12,086
|
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances, FlexibleContexts #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE UndecidableInstances #-}
module Web.Zwaluw.Regular
( mkRouters
, Routers
, RouterList(..)
-- * Re-exported from Generics.Regular
, deriveAll
, PF
) where
import Web.Zwaluw.Core
import Generics.Regular
infixr :&
-- | The type of the list of routers generated for type @r@.
type Routers r = RouterList (PF r) r
-- | Creates the routers for type @r@, one for each constructor. For example:
--
-- @Z rHome :& Z rUserOverview :& Z rUserDetail :& Z rArticle = mkRouters@
mkRouters :: (MkRouters (PF r), Regular r) => Routers r
mkRouters = mkRouters' to (Just . from)
data family RouterList f r
class MkRouters (f :: * -> *) where
mkRouters' :: (f r -> r) -> (r -> Maybe (f r)) -> RouterList f r
data instance RouterList (C c f) r = Z (forall t. Router (RouterLhs f r t) (r :- t))
instance MkRouter f => MkRouters (C c f) where
mkRouters' addLR matchLR = Z $ pure (hdMap (addLR . C) . mkP) (fmap mkS . hdTraverse (fmap unC . matchLR))
data instance RouterList (f :+: g) r = RouterList f r :& RouterList g r
instance (MkRouters f, MkRouters g) => MkRouters (f :+: g) where
mkRouters' addLR matchLR = mkRouters' (addLR . L) (matchL matchLR)
:& mkRouters' (addLR . R) (matchR matchLR)
where
matchL :: (r -> Maybe ((f :+: g) r)) -> r -> Maybe (f r)
matchL frm r = case frm r of
Just (L f) -> Just f
_ -> Nothing
matchR :: (r -> Maybe ((f :+: g) r)) -> r -> Maybe (g r)
matchR frm r = case frm r of
Just (R f) -> Just f
_ -> Nothing
type family RouterLhs (f :: * -> *) (r :: *) (t :: *) :: *
class MkRouter (f :: * -> *) where
mkP :: RouterLhs f r t -> (f r :- t)
mkS :: (f r :- t) -> RouterLhs f r t
type instance RouterLhs U r t = t
instance MkRouter U where
mkP t = U :- t
mkS (U :- r) = r
type instance RouterLhs (K a) r t = a :- t
instance MkRouter (K a) where
mkP (a :- t) = K a :- t
mkS (K a :- t) = a :- t
type instance RouterLhs I r t = r :- t
instance MkRouter I where
mkP (r :- t) = I r :- t
mkS (I r :- t) = r :- t
type instance RouterLhs (f :*: g) r t = RouterLhs f r (RouterLhs g r t)
instance (MkRouter f, MkRouter g) => MkRouter (f :*: g) where
mkP t = (f :*: g) :- t''
where
f :- t' = mkP t
g :- t'' = mkP t'
mkS ((f :*: g) :- t) = mkS (f :- mkS (g :- t))
|
MedeaMelana/Zwaluw
|
Web/Zwaluw/Regular.hs
|
Haskell
|
bsd-3-clause
| 2,485
|
{-# LANGUAGE BangPatterns, DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses #-}
module Database.Riak.Protocol.GetBucketRequest (GetBucketRequest(..)) where
import Prelude ((+), (/))
import qualified Prelude as Prelude'
import qualified Data.Typeable as Prelude'
import qualified Data.Data as Prelude'
import qualified Text.ProtocolBuffers.Header as P'
data GetBucketRequest = GetBucketRequest{bucket :: !P'.ByteString}
deriving (Prelude'.Show, Prelude'.Eq, Prelude'.Ord, Prelude'.Typeable, Prelude'.Data)
instance P'.Mergeable GetBucketRequest where
mergeAppend (GetBucketRequest x'1) (GetBucketRequest y'1) = GetBucketRequest (P'.mergeAppend x'1 y'1)
instance P'.Default GetBucketRequest where
defaultValue = GetBucketRequest P'.defaultValue
instance P'.Wire GetBucketRequest where
wireSize ft' self'@(GetBucketRequest x'1)
= case ft' of
10 -> calc'Size
11 -> P'.prependMessageSize calc'Size
_ -> P'.wireSizeErr ft' self'
where
calc'Size = (P'.wireSizeReq 1 12 x'1)
wirePut ft' self'@(GetBucketRequest x'1)
= case ft' of
10 -> put'Fields
11 -> do
P'.putSize (P'.wireSize 10 self')
put'Fields
_ -> P'.wirePutErr ft' self'
where
put'Fields
= do
P'.wirePutReq 10 12 x'1
wireGet ft'
= case ft' of
10 -> P'.getBareMessageWith update'Self
11 -> P'.getMessageWith update'Self
_ -> P'.wireGetErr ft'
where
update'Self wire'Tag old'Self
= case wire'Tag of
10 -> Prelude'.fmap (\ !new'Field -> old'Self{bucket = new'Field}) (P'.wireGet 12)
_ -> let (field'Number, wire'Type) = P'.splitWireTag wire'Tag in P'.unknown field'Number wire'Type old'Self
instance P'.MessageAPI msg' (msg' -> GetBucketRequest) GetBucketRequest where
getVal m' f' = f' m'
instance P'.GPB GetBucketRequest
instance P'.ReflectDescriptor GetBucketRequest where
getMessageInfo _ = P'.GetMessageInfo (P'.fromDistinctAscList [10]) (P'.fromDistinctAscList [10])
reflectDescriptorInfo _
= Prelude'.read
"DescriptorInfo {descName = ProtoName {protobufName = FIName \".Protocol.GetBucketRequest\", haskellPrefix = [MName \"Database\",MName \"Riak\"], parentModule = [MName \"Protocol\"], baseName = MName \"GetBucketRequest\"}, descFilePath = [\"Database\",\"Riak\",\"Protocol\",\"GetBucketRequest.hs\"], isGroup = False, fields = fromList [FieldInfo {fieldName = ProtoFName {protobufName' = FIName \".Protocol.GetBucketRequest.bucket\", haskellPrefix' = [MName \"Database\",MName \"Riak\"], parentModule' = [MName \"Protocol\",MName \"GetBucketRequest\"], baseName' = FName \"bucket\"}, fieldNumber = FieldId {getFieldId = 1}, wireTag = WireTag {getWireTag = 10}, packedTag = Nothing, wireTagLength = 1, isPacked = False, isRequired = True, canRepeat = False, mightPack = False, typeCode = FieldType {getFieldType = 12}, typeName = Nothing, hsRawDefault = Nothing, hsDefault = Nothing}], keys = fromList [], extRanges = [], knownKeys = fromList [], storeUnknown = False, lazyFields = False}"
|
iand675/hiker
|
Database/Riak/Protocol/GetBucketRequest.hs
|
Haskell
|
bsd-3-clause
| 3,116
|
{-# language CPP #-}
-- No documentation found for Chapter "Exception"
module OpenXR.Exception (OpenXrException(..)) where
import GHC.Exception.Type (Exception(..))
import OpenXR.Core10.Enums.Result (Result)
import OpenXR.Core10.Enums.Result (Result(..))
-- | This exception is thrown from calls to marshalled Vulkan commands
-- which return a negative VkResult.
newtype OpenXrException = OpenXrException { vulkanExceptionResult :: Result }
deriving (Eq, Ord, Read, Show)
instance Exception OpenXrException where
displayException (OpenXrException r) = show r ++ ": " ++ resultString r
-- | A human understandable message for each VkResult
resultString :: Result -> String
resultString = \case
r -> show r
|
expipiplus1/vulkan
|
openxr/src/OpenXR/Exception.hs
|
Haskell
|
bsd-3-clause
| 719
|
{-# LANGUAGE ForeignFunctionInterface, CPP #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.ARB.ShadingLanguageInclude
-- Copyright : (c) Sven Panne 2014
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- All raw functions and tokens from the ARB_shading_language_include extension,
-- see <http://www.opengl.org/registry/specs/ARB/shading_language_include.txt>.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.ARB.ShadingLanguageInclude (
-- * Functions
glNamedString,
glDeleteNamedString,
glCompileShaderInclude,
glIsNamedString,
glGetNamedString,
glGetNamedStringiv,
-- * Tokens
gl_SHADER_INCLUDE,
gl_NAMED_STRING_LENGTH,
gl_NAMED_STRING_TYPE
) where
import Foreign.C.Types
import Foreign.Ptr
import Graphics.Rendering.OpenGL.Raw.Core31.Types
import Graphics.Rendering.OpenGL.Raw.Extensions
--------------------------------------------------------------------------------
#include "HsOpenGLRaw.h"
extensionNameString :: String
extensionNameString = "GL_ARB_shading_language_include"
EXTENSION_ENTRY(dyn_glNamedString,ptr_glNamedString,"glNamedString",glNamedString,GLenum -> GLint -> Ptr GLchar -> GLint -> Ptr GLchar -> IO ())
EXTENSION_ENTRY(dyn_glDeleteNamedString,ptr_glDeleteNamedString,"glDeleteNamedString",glDeleteNamedString,GLint -> Ptr GLchar -> IO ())
EXTENSION_ENTRY(dyn_glCompileShaderInclude,ptr_glCompileShaderInclude,"glCompileShaderInclude",glCompileShaderInclude,GLuint -> GLsizei -> Ptr (Ptr GLchar) -> Ptr GLint -> IO ())
EXTENSION_ENTRY(dyn_glIsNamedString,ptr_glIsNamedString,"glIsNamedString",glIsNamedString,GLint -> Ptr GLchar -> IO GLboolean)
EXTENSION_ENTRY(dyn_glGetNamedString,ptr_glGetNamedString,"glGetNamedString",glGetNamedString,GLint -> Ptr GLchar -> GLsizei -> Ptr GLint -> Ptr GLchar -> IO ())
EXTENSION_ENTRY(dyn_glGetNamedStringiv,ptr_glGetNamedStringiv,"glGetNamedStringiv",glGetNamedStringiv,GLint -> Ptr GLchar -> GLenum -> Ptr GLint -> IO ())
gl_SHADER_INCLUDE :: GLenum
gl_SHADER_INCLUDE = 0x8DAE
gl_NAMED_STRING_LENGTH :: GLenum
gl_NAMED_STRING_LENGTH = 0x8DE9
gl_NAMED_STRING_TYPE :: GLenum
gl_NAMED_STRING_TYPE = 0x8DEA
|
mfpi/OpenGLRaw
|
src/Graphics/Rendering/OpenGL/Raw/ARB/ShadingLanguageInclude.hs
|
Haskell
|
bsd-3-clause
| 2,366
|
{-# LANGUAGE GADTs #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# OPTIONS_GHC -Wall #-}
--------------------------------------------------------------------
-- |
-- Copyright : (c) Edward Kmett 2014
-- License : BSD3
-- Maintainer: Edward Kmett <ekmett@gmail.com>
-- Stability : experimental
-- Portability: non-portable
--
--------------------------------------------------------------------
module Hask.Power where
import Hask.Core
import Hask.Rel
import Hask.Rep
import qualified Prelude
infixr 0 ⋔
type (⋔) = Power
class (Category ((~>) :: i -> i -> *), hom ~ Hom) => Powered (hom :: j -> j -> i) where
type Power :: i -> j -> j
flipped :: forall (a :: j) (u :: i) (b :: j) (a' :: j) (u' :: i) (b' :: j).
Iso (hom a (Power u b)) (hom a' (Power u' b')) (u `Hom` hom a b) (u' `Hom` hom a' b')
flip :: Powered hom => hom a (Power u b) ~> Hom u (hom a b)
flip = get flipped
unflip :: Powered hom => Hom u (hom a b) ~> hom a (Power u b)
unflip = beget flipped
-- flippedInternal :: forall (a :: i) (u :: i) (b :: i). CCC (Hom :: i -> i -> *) => Iso' ((b^u)^a) ((b^a)^u)
--flippedInternal = dimap (curry $ curry $ apply . first apply . associate (fmap1 swap))
-- (curry $ curry $ apply . first apply . associate (fmap1 swap))
instance Powered (->) where
type Power = (->)
flipped = dimap Prelude.flip Prelude.flip
instance Powered (|-) where
type Power = (|-)
flipped = dimap (curry $ curry $ apply . first apply . associate (fmap1 swap))
(curry $ curry $ apply . first apply . associate (fmap1 swap))
instance Powered (Lift1 (->)) where
type Power = Lift (->)
flipped = dimap (curry $ curry $ apply . first apply . associate (fmap1 swap))
(curry $ curry $ apply . first apply . associate (fmap1 swap))
--flipped = dimap (Nat $ beget _Lift . fmap1 (beget _Lift) . flip . fmap1 (get _Lift) . get _Lift)
-- (Nat $ beget _Lift . fmap1 (beget _Lift) . flip . fmap1 (get _Lift) . get _Lift)
instance Powered (Lift2 (Lift1 (->))) where
type Power = Lift (Lift (->))
flipped = dimap (curry $ curry $ apply . first apply . associate (fmap1 swap))
(curry $ curry $ apply . first apply . associate (fmap1 swap))
--flipped = dimap (Nat $ beget _Lift . Nat (beget _Lift . fmap1 (transport (beget _Lift) . beget _Lift) . flip . fmap1 (get _Lift . transport (get _Lift)) . get _Lift) . get _Lift)
-- (Nat $ beget _Lift . Nat (beget _Lift . fmap1 (transport (beget _Lift) . beget _Lift) . flip . fmap1 (get _Lift . transport (get _Lift)) . get _Lift) . get _Lift)
-- Power1 :: * -> (i -> *) -> (i -> *)
newtype Power1 v f a = Power { runPower :: v -> f a }
instance Powered (Nat :: (i -> *) -> (i -> *) -> *) where
type Power = Power1
flipped = dimap
(\k v -> Nat $ \f -> runPower (transport k f) v)
(\k -> Nat $ \a' -> Power $ \u' -> transport (k u') a')
instance Contravariant Power1 where
contramap f = nat2 $ Power . lmap f . runPower
instance Functor (Power1 v) where
fmap f = Nat $ Power . fmap1 (transport f) . runPower
instance Semimonoidal (Power1 v) where
ap2 = Nat $ \(Lift (Power va, Power vb)) -> Power $ \v -> Lift (va v, vb v)
instance Monoidal (Power1 v) where
ap0 = Nat $ \(Const ()) -> Power $ \_ -> Const ()
instance Semigroup m => Semigroup (Power1 v m) where
mult = multM
instance Monoid m => Monoid (Power1 v m) where
one = oneM
instance Semimonoidal f => Semimonoidal (Power1 v f) where
ap2 (Power vfa, Power vfb) = Power $ \v -> ap2 (vfa v, vfb v)
instance Monoidal f => Monoidal (Power1 v f) where
ap0 () = Power $ \_ -> ap0 ()
instance (Semimonoidal f, Semigroup m) => Semigroup (Power1 v f m) where
mult = multM
instance (Monoidal f, Monoid m) => Monoid (Power1 v f m) where
one = oneM
instance Functor f => Functor (Power1 v f) where
fmap f = Power . fmap1 (fmap f) . runPower
instance Corepresentable Power1 where
type Corep Power1 = Rel
_Corep = dimap (Nat $ \(Power ab) -> Lift (ab . get _Const))
(Nat $ \(Lift ab) -> Power (ab . beget _Const))
|
ekmett/hask
|
old/src/Hask/Power.hs
|
Haskell
|
bsd-3-clause
| 4,423
|
-- Copyright (c) 2017 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -funbox-strict-fields -Wall -Werror #-}
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances, FlexibleContexts #-}
-- | This module defines the FlatIR language.
--
-- FlatIR is a simply-typed flat-scoped intermediate language. It
-- is designed to be reasonably close to LLVM, with instructions
-- similar to LLVM's, but without some of the difficulties of LLVM.
--
-- At the moment, the FlatIR language is under revision, and will
-- probably change quite a bit.
--
-- Things that need to be done:
-- * Add notions of vtables and lookups to the language
-- * Add variant types
-- * Redesign/modify certain instructions (Deref, Call, Cast, Alloc)
-- * Add exception handling
module IR.FlatIR.Syntax(
-- * Indexes
Id,
Label,
Fieldname,
Typename,
Globalname,
-- * Operators and options
Binop(..),
Unop(..),
-- * Core language
-- ** Types
Type(..),
TypeDef(..),
FieldDef(..),
FormDef(..),
Ptr(..),
Mutability(..),
-- ** Execution
Exp(..),
LValue(..),
Stm(..),
Bind(..),
Transfer(..),
-- ** Definitions
DeclNames(..),
Block(..),
Body(..),
Global(..),
Module(..),
-- ** Utilities
renameType
) where
import Data.Array
import Data.Graph.Inductive.Graph(Graph)
import Data.Functor
import Data.Hashable
import Data.Maybe
import Data.Intervals(Intervals)
import Data.Position.DWARFPosition(DWARFPosition)
import Data.Word
import IR.Common.Alloc
import IR.Common.Body
import IR.Common.LValue
import IR.Common.Names
import IR.Common.Ptr
import IR.Common.Operator
import IR.Common.Rename
import IR.Common.RenameType
import IR.Common.Transfer
import Prelude hiding (head, init)
--import Prelude.Extras
--import Text.Format
import Text.XML.Expat.Pickle
import Text.XML.Expat.Tree(NodeG)
import qualified Data.ByteString as Strict
-- FlatIR is a simply-typed IR intended to be close to LLVM. It is
-- intended primarily as a jumping-off point for other languages
-- targeting LLVM.
-- Programs in FlatIR are equipped with very detailed information
-- about garbage collection. This is passed through to LLVM in the
-- form of metadata.
-- FlatIR also contains a virtual call abstraction, which allows
-- polymorphic languages to compile to FlatIR without having to
-- monomorphise everything. XXX IMPLEMENT THIS
-- FlatIR will eventually contain transaction information.
-- In general, any optimization pass that would be written for
-- FlatIR should instead be written for LLVM, unless there is a very
-- compelling reason for it. Examples would be optimizations that
-- deal with GC or virtual calls (or eventually transactions).
-- | Data for a structure field.
data FieldDef tagty =
FieldDef {
-- | The name of the field
fieldDefName :: !Strict.ByteString,
-- | The mutability of the field.
fieldDefMutability :: !Mutability,
-- | The type of the field.
fieldDefTy :: Type tagty,
-- | The position in source from which this arises.
fieldDefPos :: DWARFPosition Globalname Typename
}
-- | Data for a variant.
data FormDef tagty =
FormDef {
-- | The name of the variant.
formDefName :: !Strict.ByteString,
-- | The mutability of the variant data.
formDefMutability :: !Mutability,
-- | The variant type.
formDefTy :: Type tagty,
-- | The position in source from which this arises.
formDefPos :: DWARFPosition Globalname Typename
}
-- | Types. Types are monomorphic, and correspond roughly with LLVM
-- types.
data Type tagty =
-- | A function type
FuncType {
-- | The return type of the function.
funcTyRetTy :: Type tagty,
-- | The types of the arguments.
funcTyArgTys :: [Type tagty],
-- | The position in source from which this arises.
funcTyPos :: DWARFPosition Globalname Typename
}
-- | A structure, representing both tuples and records
| StructType {
-- | Whether or not the layout is strict.
structPacked :: !Bool,
-- | The fields of the struct.
structFields :: !(Array Fieldname (FieldDef tagty)),
-- | The position in source from which this arises.
structPos :: DWARFPosition Globalname Typename
}
-- | A variant, representing both tuples and records
| VariantType {
-- | The fields of the struct.
variantTyForms :: !(Array Formname (FormDef tagty)),
-- | The position in source from which this arises.
variantTyPos :: DWARFPosition Globalname Typename
}
-- | An array. Unlike LLVM arrays, these may be variable-sized
| ArrayType {
-- | The length of the array, if known.
arrayLen :: !(Maybe Word),
-- | The type of array elements.
arrayElemTy :: Type tagty,
-- | The position in source from which this arises.
arrayPos :: DWARFPosition Globalname Typename
}
-- | Pointers, both native and GC
| PtrType {
-- | The pointer information
ptrTy :: !(Ptr tagty (Type tagty)),
-- | The position in source from which this arises.
ptrPos :: DWARFPosition Globalname Typename
}
-- | An integer, possibly signed, with a size.
| IntType {
-- | Whether or not the int is signed.
intSigned :: !Bool,
-- | The size of the int in bits.
intSize :: !Word,
-- | The possible-value intervals for the integer.
intIntervals :: !(Intervals Integer),
-- | The position in source from which this arises.
intPos :: DWARFPosition Globalname Typename
}
-- | Floating point types
| FloatType {
-- | The size of the float in bits.
floatSize :: !Word,
-- | The position in source from which this arises.
floatPos :: DWARFPosition Globalname Typename
}
-- | A defined type
| IdType {
-- | The name for this type.
idName :: !Typename,
-- | The position in source from which this arises.
idPos :: DWARFPosition Globalname Typename
}
-- | The unit type, equivalent to SML unit and C/Java void
| UnitType {
-- | The position in source from which this arises.
unitPos :: DWARFPosition Globalname Typename
}
-- | An expression
data Exp tagty =
-- | Allocate an object.
Alloc {
-- | The allocation data.
allocData :: !(Allocation tagty (Type tagty) (Exp tagty)),
-- | The position in source from which this arises.
allocPos :: DWARFPosition Globalname Typename
}
-- | A binary operation
| Binop {
-- | The operator.
binopOp :: !Binop,
-- | The left hand side.
binopLeft :: Exp tagty,
-- | The right hand side.
binopRight :: Exp tagty,
-- | The position in source from which this arises.
binopPos :: DWARFPosition Globalname Typename
}
-- | Call a function.
| Call {
-- | The function being called. Must be a function value.
callFunc :: Exp tagty,
-- | The arguments to the function.
callArgs :: [Exp tagty],
-- | The position in source from which this arises.
callPos :: DWARFPosition Globalname Typename
}
-- | A unary operation
| Unop {
-- | The operator.
unopOp :: !Unop,
-- | The operand.
unopVal :: Exp tagty,
-- | The position in source from which this arises.
unopPos :: DWARFPosition Globalname Typename
}
-- | A conversion from one type to another.
| Conv {
-- | The type to which the value is being converted.
convTy :: Type tagty,
-- | The value being converted.
convVal :: Exp tagty,
-- | The position in source from which this arises.
convPos :: DWARFPosition Globalname Typename
}
-- | Treat an expression as if it were the given type regardless of
-- its actual type.
| Cast {
-- | The type to which the value is being cast.
castTy :: Type tagty,
-- | The value being cast.
castVal :: Exp tagty,
-- | The position in source from which this arises.
castPos :: DWARFPosition Globalname Typename
}
-- | Address of an LValue.
| AddrOf {
-- | The value having its address taken.
addrofVal :: LValue (Exp tagty),
-- | The position in source from which this arises.
addrofPos :: DWARFPosition Globalname Typename
}
-- | A structure literal.
| StructLit {
-- | The literal's type, must be a struct type.
structLitTy :: Type tagty,
-- | The constant's field values
structLitFields :: !(Array Fieldname (Exp tagty)),
-- | The position in source from which this arises.
structLitPos :: DWARFPosition Globalname Typename
}
-- | A variant literal.
| VariantLit {
-- | The literal's type, must be a variant type.
variantLitTy :: Type tagty,
-- | The literal's form.
variantLitForm :: !Formname,
-- | The literal's inner value.
variantLitVal :: Exp tagty,
-- | The position in source from which this arises.
variantLitPos :: DWARFPosition Globalname Typename
}
-- | An array literal
| ArrayLit {
-- | The constant's type, must be an array type.
arrayLitTy :: Type tagty,
-- | The constant's values
arrayLitVals :: [Exp tagty],
-- | The position in source from which this arises.
arrayLitPos :: DWARFPosition Globalname Typename
}
-- | A numerical constant with a given size and signedness XXX add a
-- floating point constant.
| IntLit {
-- | The constant's type, must be an integer or float type.
intLitTy :: Type tagty,
-- | The constant's value
intLitVal :: !Integer,
-- | The position in source from which this arises.
intLitPos :: DWARFPosition Globalname Typename
}
-- | An LValue.
| LValue { lvalueData :: !(LValue (Exp tagty)) }
-- | A global value. Represents a global variable or a function.
data Global tagty gr =
-- | A function
Function {
-- | Name of the function
funcName :: !(Maybe DeclNames),
-- | Return type
funcRetTy :: Type tagty,
-- | A map from identifiers for arguments and local variables to
-- their types.
funcValTys :: !(Array Id (Type tagty)),
-- | A list of the identifiers representing arguments
funcParams :: [Id],
-- | The function's body, if it has one
funcBody :: Maybe (Body (Exp tagty) (StmElems (Exp tagty)) gr),
-- | The position in source from which this arises.
funcPos :: DWARFPosition Globalname Typename
}
-- | A global variable
| GlobalVar {
-- | The name of the variable.
gvarName :: !(Maybe DeclNames),
-- | The type of the variable.
gvarTy :: Type tagty,
-- | The initializer.
gvarInit :: Maybe (Exp tagty),
-- | The variable's mutability.
gvarMutability :: !Mutability,
-- | The position in source from which this arises.
gvarPos :: DWARFPosition Globalname Typename
}
-- | Type definitions.
data TypeDef tagty =
-- | A full, named type definition.
TypeDef {
-- | The typedef's name.
typeDefStr :: !Strict.ByteString,
-- | The type.
typeDefTy :: !(Type tagty),
-- | Position of the type definition.
typeDefPos :: DWARFPosition Globalname Typename
}
-- | A type definition to a name.
| Name {
-- | The typedef's name.
nameStr :: !Strict.ByteString,
-- | Position of the type definition.
namePos :: DWARFPosition Globalname Typename
}
-- | An anonymous type definition.
| Anon {
-- | The type.
anonTy :: !(Type tagty),
-- | Position of the type definition.
anonPos :: DWARFPosition Globalname Typename
}
-- | A module. Represents a concept similar to an LLVM module.
data Module tagty tagdescty gr =
Module {
-- | Name of the module
modName :: !Strict.ByteString,
-- | A map from 'Typename's to their proper names and possibly their
-- definitions
modTypes :: !(Array Typename (TypeDef tagty)),
-- | A map from 'Tagname's to their definitions
modTags :: !(Array Tagname (TagDesc tagdescty)),
-- | Generated tagged types (this module will generate the
-- signatures and accessor definitions for all these 'Tagname's)
modGenTags :: [Tagname],
-- | A map from 'Globalname's to the corresponding definitions
modGlobals :: !(Array Globalname (Global tagty gr)),
-- | Should be a file position, indicating the file from which
-- this arises.
modPos :: DWARFPosition Globalname Typename
}
instance Eq tagty => Eq (FieldDef tagty) where
FieldDef { fieldDefName = name1, fieldDefTy = ty1,
fieldDefMutability = mut1 } ==
FieldDef { fieldDefName = name2, fieldDefTy = ty2,
fieldDefMutability = mut2 } =
mut1 == mut2 && name1 == name2 && ty1 == ty2
instance Eq tagty => Eq (FormDef tagty) where
FormDef { formDefName = name1, formDefTy = ty1, formDefMutability = mut1 } ==
FormDef { formDefName = name2, formDefTy = ty2, formDefMutability = mut2 } =
mut1 == mut2 && name1 == name2 && ty1 == ty2
instance Eq tagty => Eq (Type tagty) where
FuncType { funcTyRetTy = retty1, funcTyArgTys = params1 } ==
FuncType { funcTyRetTy = retty2, funcTyArgTys = params2 } =
retty1 == retty2 && params1 == params2
StructType { structPacked = packed1, structFields = fields1 } ==
StructType { structPacked = packed2, structFields = fields2 } =
packed1 == packed2 && fields1 == fields2
VariantType { variantTyForms = forms1 } ==
VariantType { variantTyForms = forms2 } =
forms1 == forms2
ArrayType { arrayLen = len1, arrayElemTy = inner1 } ==
ArrayType { arrayLen = len2, arrayElemTy = inner2 } =
len1 == len2 && inner1 == inner2
PtrType { ptrTy = objtype1 } == PtrType { ptrTy = objtype2 } =
objtype1 == objtype2
IntType { intSigned = signed1, intIntervals = intervals1,
intSize = size1 } ==
IntType { intSigned = signed2, intIntervals = intervals2,
intSize = size2 } =
signed1 == signed2 && size1 == size2 && intervals1 == intervals2
IdType { idName = name1 } == IdType { idName = name2 } = name1 == name2
FloatType { floatSize = size1 } == FloatType { floatSize = size2 } =
size1 == size2
(UnitType _) == (UnitType _) = True
_ == _ = False
instance Eq tagty => Eq (Exp tagty) where
Alloc { allocData = alloc1 } == Alloc { allocData = alloc2 } =
alloc1 == alloc2
Binop { binopOp = op1, binopLeft = left1, binopRight = right1 } ==
Binop { binopOp = op2, binopLeft = left2, binopRight = right2 } =
op1 == op2 && left1 == left2 && right1 == right2
Call { callFunc = func1, callArgs = args1 } ==
Call { callFunc = func2, callArgs = args2 } =
func1 == func2 && args1 == args2
Unop { unopOp = op1, unopVal = val1 } ==
Unop { unopOp = op2, unopVal = val2 } =
op1 == op2 && val1 == val2
Conv { convTy = ty1, convVal = val1 } ==
Conv { convTy = ty2, convVal = val2 } =
ty1 == ty2 && val1 == val2
Cast { castTy = ty1, castVal = val1 } ==
Cast { castTy = ty2, castVal = val2 } =
ty1 == ty2 && val1 == val2
AddrOf { addrofVal = val1 } == AddrOf { addrofVal = val2 } = val1 == val2
StructLit { structLitTy = ty1, structLitFields = fields1 } ==
StructLit { structLitTy = ty2, structLitFields = fields2 } =
ty1 == ty2 && fields1 == fields2
VariantLit { variantLitTy = ty1, variantLitForm = form1,
variantLitVal = val1 } ==
VariantLit { variantLitTy = ty2, variantLitForm = form2,
variantLitVal = val2 } =
form1 == form2 && ty1 == ty2 && val1 == val2
ArrayLit { arrayLitTy = ty1, arrayLitVals = vals1 } ==
ArrayLit { arrayLitTy = ty2, arrayLitVals = vals2 } =
ty1 == ty2 && vals1 == vals2
IntLit { intLitTy = ty1, intLitVal = val1 } ==
IntLit { intLitTy = ty2, intLitVal = val2 } =
ty1 == ty2 && val1 == val2
(LValue lval1) == (LValue lval2) = lval1 == lval2
_ == _ = False
instance Ord tagty => Ord (FieldDef tagty) where
compare FieldDef { fieldDefName = name1, fieldDefTy = ty1,
fieldDefMutability = mut1 }
FieldDef { fieldDefName = name2, fieldDefTy = ty2,
fieldDefMutability = mut2 } =
case compare mut1 mut2 of
EQ -> case compare name1 name2 of
EQ -> compare ty1 ty2
out -> out
out -> out
instance Ord tagty => Ord (FormDef tagty) where
compare FormDef { formDefName = name1, formDefTy = ty1,
formDefMutability = mut1 }
FormDef { formDefName = name2, formDefTy = ty2,
formDefMutability = mut2 } =
case compare mut1 mut2 of
EQ -> case compare name1 name2 of
EQ -> compare ty1 ty2
out -> out
out -> out
instance Ord tagty => Ord (Type tagty) where
compare FuncType { funcTyRetTy = retty1, funcTyArgTys = params1 }
FuncType { funcTyRetTy = retty2, funcTyArgTys = params2 } =
case compare retty1 retty2 of
EQ -> compare params1 params2
out -> out
compare FuncType {} _ = LT
compare _ FuncType {} = GT
compare StructType { structPacked = packed1, structFields = fields1 }
StructType { structPacked = packed2, structFields = fields2 } =
case compare packed1 packed2 of
EQ -> compare fields1 fields2
out -> out
compare StructType {} _ = LT
compare _ StructType {} = GT
compare VariantType { variantTyForms = forms1 }
VariantType { variantTyForms = forms2 } =
compare forms1 forms2
compare VariantType {} _ = LT
compare _ VariantType {} = GT
compare ArrayType { arrayLen = len1, arrayElemTy = inner1 }
ArrayType { arrayLen = len2, arrayElemTy = inner2 } =
case compare len1 len2 of
EQ -> compare inner1 inner2
out -> out
compare ArrayType {} _ = LT
compare _ ArrayType {} = GT
compare PtrType { ptrTy = objtype1 } PtrType { ptrTy = objtype2 } =
compare objtype1 objtype2
compare PtrType {} _ = LT
compare _ PtrType {} = GT
compare IntType { intSigned = signed1, intIntervals = intervals1,
intSize = size1 }
IntType { intSigned = signed2, intIntervals = intervals2,
intSize = size2 } =
case compare signed1 signed2 of
EQ -> case compare size1 size2 of
EQ -> compare intervals1 intervals2
out -> out
out -> out
compare IntType {} _ = LT
compare _ IntType {} = GT
compare IdType { idName = name1 } IdType { idName = name2 } =
compare name1 name2
compare IdType {} _ = LT
compare _ IdType {} = GT
compare FloatType { floatSize = size1 } FloatType { floatSize = size2 } =
compare size1 size2
compare FloatType {} _ = LT
compare _ FloatType {} = GT
compare (UnitType _) (UnitType _) = EQ
instance Ord tagty => Ord (Exp tagty) where
compare Alloc { allocData = alloc1 } Alloc { allocData = alloc2 } =
compare alloc1 alloc2
compare Alloc {} _ = LT
compare _ Alloc {} = GT
compare Binop { binopOp = op1, binopLeft = left1, binopRight = right1 }
Binop { binopOp = op2, binopLeft = left2, binopRight = right2 } =
case compare op1 op2 of
EQ -> case compare left1 left2 of
EQ -> compare right1 right2
out -> out
out -> out
compare Binop {} _ = LT
compare _ Binop {} = GT
compare Call { callFunc = func1, callArgs = args1 }
Call { callFunc = func2, callArgs = args2 } =
case compare func1 func2 of
EQ -> compare args1 args2
out -> out
compare Call {} _ = LT
compare _ Call {} = GT
compare Unop { unopOp = op1, unopVal = val1 }
Unop { unopOp = op2, unopVal = val2 } =
case compare op1 op2 of
EQ -> compare val1 val2
out -> out
compare Unop {} _ = LT
compare _ Unop {} = GT
compare Conv { convTy = ty1, convVal = val1 }
Conv { convTy = ty2, convVal = val2 } =
case compare ty1 ty2 of
EQ -> compare val1 val2
out -> out
compare Conv {} _ = LT
compare _ Conv {} = GT
compare Cast { castTy = ty1, castVal = val1 }
Cast { castTy = ty2, castVal = val2 } =
case compare ty1 ty2 of
EQ -> compare val1 val2
out -> out
compare Cast {} _ = LT
compare _ Cast {} = GT
compare AddrOf { addrofVal = val1 } AddrOf { addrofVal = val2 } =
compare val1 val2
compare AddrOf {} _ = LT
compare _ AddrOf {} = GT
compare StructLit { structLitTy = ty1, structLitFields = fields1 }
StructLit { structLitTy = ty2, structLitFields = fields2 } =
case compare ty1 ty2 of
EQ -> compare fields1 fields2
out -> out
compare StructLit {} _ = LT
compare _ StructLit {} = GT
compare VariantLit { variantLitTy = ty1, variantLitForm = form1,
variantLitVal = val1 }
VariantLit { variantLitTy = ty2, variantLitForm = form2,
variantLitVal = val2 } =
case compare form1 form2 of
EQ -> case compare ty1 ty2 of
EQ -> compare val1 val2
out -> out
out -> out
compare VariantLit {} _ = LT
compare _ VariantLit {} = GT
compare ArrayLit { arrayLitTy = ty1, arrayLitVals = vals1 }
ArrayLit { arrayLitTy = ty2, arrayLitVals = vals2 } =
case compare ty1 ty2 of
EQ -> compare vals1 vals2
out -> out
compare ArrayLit {} _ = LT
compare _ ArrayLit {} = GT
compare IntLit { intLitTy = ty1, intLitVal = val1 }
IntLit { intLitTy = ty2, intLitVal = val2 } =
case compare ty1 ty2 of
EQ -> compare val1 val2
out -> out
compare IntLit {} _ = LT
compare _ IntLit {} = GT
compare LValue { lvalueData = lval1 } LValue { lvalueData = lval2 } =
compare lval1 lval2
instance Hashable tagty => Hashable (FieldDef tagty) where
hashWithSalt s FieldDef { fieldDefName = name, fieldDefTy = ty,
fieldDefMutability = mut } =
s `hashWithSalt` mut `hashWithSalt` name `hashWithSalt` ty
instance Hashable tagty => Hashable (FormDef tagty) where
hashWithSalt s FormDef { formDefName = name, formDefTy = ty,
formDefMutability = mut } =
s `hashWithSalt` mut `hashWithSalt` name `hashWithSalt` ty
instance Hashable tagty => Hashable (Type tagty) where
hashWithSalt s FuncType { funcTyRetTy = retty, funcTyArgTys = params } =
s `hashWithSalt` (0 :: Int) `hashWithSalt` retty `hashWithSalt` params
hashWithSalt s StructType { structPacked = packed, structFields = fields } =
s `hashWithSalt` (1 :: Int) `hashWithSalt`
packed `hashWithSalt` elems fields
hashWithSalt s VariantType { variantTyForms = forms } =
s `hashWithSalt` (2 :: Int) `hashWithSalt` elems forms
hashWithSalt s ArrayType { arrayLen = Nothing, arrayElemTy = inner } =
s `hashWithSalt` (3 :: Int) `hashWithSalt` (0 :: Int) `hashWithSalt` inner
hashWithSalt s ArrayType { arrayLen = Just size, arrayElemTy = inner } =
s `hashWithSalt` (3 :: Int) `hashWithSalt` size `hashWithSalt` inner
hashWithSalt s PtrType { ptrTy = objtype } =
s `hashWithSalt` (4 :: Int) `hashWithSalt` objtype
hashWithSalt s IntType { intSigned = signed, intIntervals = intervals,
intSize = size } =
s `hashWithSalt` (5 :: Int) `hashWithSalt` signed `hashWithSalt`
intervals `hashWithSalt` size
hashWithSalt s IdType { idName = name } =
s `hashWithSalt` (6 :: Int) `hashWithSalt` name
hashWithSalt s FloatType { floatSize = size } =
s `hashWithSalt` (7 :: Int) `hashWithSalt` size
hashWithSalt s UnitType {} = s `hashWithSalt` (7 :: Int)
instance Hashable tagty => Hashable (Exp tagty) where
hashWithSalt s Alloc { allocData = alloc } =
s `hashWithSalt` (0 :: Int) `hashWithSalt` alloc
hashWithSalt s Binop { binopOp = op, binopLeft = left, binopRight = right } =
s `hashWithSalt` (1 :: Int) `hashWithSalt`
op `hashWithSalt` left `hashWithSalt` right
hashWithSalt s Call { callFunc = func, callArgs = args } =
s `hashWithSalt` (2 :: Int) `hashWithSalt` func `hashWithSalt` args
hashWithSalt s Unop { unopOp = op, unopVal = val } =
s `hashWithSalt` (3 :: Int) `hashWithSalt` op `hashWithSalt` val
hashWithSalt s Conv { convTy = ty, convVal = val } =
s `hashWithSalt` (4 :: Int) `hashWithSalt` ty `hashWithSalt` val
hashWithSalt s Cast { castTy = ty, castVal = val } =
s `hashWithSalt` (5 :: Int) `hashWithSalt` ty `hashWithSalt` val
hashWithSalt s AddrOf { addrofVal = val } =
s `hashWithSalt` (6 :: Int) `hashWithSalt` val
hashWithSalt s StructLit { structLitTy = ty, structLitFields = fields } =
s `hashWithSalt` (7 :: Int) `hashWithSalt` ty `hashWithSalt` elems fields
hashWithSalt s VariantLit { variantLitTy = ty, variantLitForm = form,
variantLitVal = val } =
s `hashWithSalt` (8 :: Int) `hashWithSalt`
form `hashWithSalt` ty `hashWithSalt` val
hashWithSalt s ArrayLit { arrayLitTy = ty, arrayLitVals = vals } =
s `hashWithSalt` (9 :: Int) `hashWithSalt` ty `hashWithSalt` vals
hashWithSalt s IntLit { intLitTy = ty, intLitVal = val } =
s `hashWithSalt` (10 :: Int) `hashWithSalt` ty `hashWithSalt` val
hashWithSalt s (LValue lval) =
s `hashWithSalt` (11 :: Int) `hashWithSalt` lval
instance RenameType Typename (FieldDef tagty) where
renameType f fdef @ FieldDef { fieldDefTy = ty } =
fdef { fieldDefTy = renameType f ty }
instance RenameType Typename (FormDef tagty) where
renameType f vdef @ FormDef { formDefTy = ty } =
vdef { formDefTy = renameType f ty }
instance RenameType Typename (Type tagty) where
renameType f ty @ FuncType { funcTyRetTy = retty, funcTyArgTys = argtys } =
ty { funcTyArgTys = renameType f argtys, funcTyRetTy = renameType f retty }
renameType f ty @ StructType { structFields = fields } =
ty { structFields = fmap (renameType f) fields }
renameType f ty @ VariantType { variantTyForms = forms } =
ty { variantTyForms = fmap (renameType f) forms }
renameType f ty @ ArrayType { arrayElemTy = elemty } =
ty { arrayElemTy = renameType f elemty }
-- renameType f ty @ PtrType { ptrTy = inner } =
-- ty { ptrTy = renameType f inner }
renameType f ty @ IdType { idName = name } = ty { idName = f name }
renameType _ ty = ty
instance RenameType Typename (Exp tagty) where
renameType f a @ Alloc { allocData = alloc } =
a { allocData = renameType f alloc }
renameType f e @ Binop { binopLeft = left, binopRight = right } =
e { binopLeft = renameType f left, binopRight = renameType f right }
renameType f e @ Call { callFunc = func, callArgs = args } =
e { callFunc = renameType f func, callArgs = renameType f args }
renameType f e @ Conv { convTy = ty, convVal = val } =
e { convTy = renameType f ty, convVal = renameType f val }
renameType f e @ Cast { castTy = ty, castVal = val } =
e { castTy = renameType f ty, castVal = renameType f val }
renameType f e @ Unop { unopVal = val } = e { unopVal = renameType f val }
renameType f e @ AddrOf { addrofVal = val } =
e { addrofVal = renameType f val }
renameType f e @ StructLit { structLitFields = fields, structLitTy = ty } =
e { structLitFields = renameTypeArray f fields,
structLitTy = renameType f ty }
renameType f e @ VariantLit { variantLitVal = val, variantLitTy = ty } =
e { variantLitVal = renameType f val, variantLitTy = renameType f ty }
renameType f e @ ArrayLit { arrayLitVals = vals, arrayLitTy = ty } =
e { arrayLitVals = renameType f vals, arrayLitTy = renameType f ty }
renameType f e @ IntLit { intLitTy = ty } =
e { intLitTy = renameType f ty }
renameType f (LValue l) = LValue (renameType f l)
instance Rename Id (Exp tagty) where
rename f a @ Alloc { allocData = alloc } = a { allocData = rename f alloc }
rename f e @ Binop { binopLeft = left, binopRight = right } =
e { binopLeft = rename f left, binopRight = rename f right }
rename f e @ Call { callFunc = func, callArgs = args } =
e { callFunc = rename f func, callArgs = rename f args }
rename f e @ Conv { convVal = val } = e { convVal = rename f val }
rename f e @ Cast { castVal = val } = e { castVal = rename f val }
rename f e @ Unop { unopVal = val } = e { unopVal = rename f val }
rename f e @ AddrOf { addrofVal = val } = e { addrofVal = rename f val }
rename f e @ StructLit { structLitFields = fields } =
e { structLitFields = renameArray f fields }
rename f e @ VariantLit { variantLitVal = val } =
e { variantLitVal = rename f val }
rename f e @ ArrayLit { arrayLitVals = vals } =
e { arrayLitVals = rename f vals }
rename f (LValue l) = LValue (rename f l)
rename _ e = e
funcTypePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Type typetag)
funcTypePickler =
let
revfunc FuncType { funcTyRetTy = retty, funcTyArgTys = argtys,
funcTyPos = pos } = (argtys, retty, pos)
revfunc _ = error "Can't convert to FuncType"
in
xpWrap (\(argtys, retty, pos) -> FuncType { funcTyRetTy = retty,
funcTyArgTys = argtys,
funcTyPos = pos },
revfunc)
(xpElemNodes (gxFromString "FuncType")
(xpTriple (xpElemNodes (gxFromString "args")
(xpList xpickle))
(xpElemNodes (gxFromString "ret") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
XmlPickler [NodeG [] tag text] (Fieldname, FieldDef typetag) where
xpickle =
xpWrap (\((idx, fname, mut), (ty, pos)) ->
(idx, FieldDef { fieldDefName = gxToByteString fname,
fieldDefMutability = mut, fieldDefTy = ty,
fieldDefPos = pos }),
\(idx, FieldDef { fieldDefName = fname, fieldDefMutability = mut,
fieldDefTy = ty, fieldDefPos = pos }) ->
((idx, gxFromByteString fname, mut), (ty, pos)))
(xpElem (gxFromString "field")
(xpTriple xpickle (xpAttr (gxFromString "name") xpText)
xpickle)
(xpPair (xpElemNodes (gxFromString "type") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
fieldsPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Array Fieldname (FieldDef typetag))
fieldsPickler =
xpWrap (\l -> array (toEnum 0, toEnum (length l)) l, assocs)
(xpElemNodes (gxFromString "fields") (xpList xpickle))
structTypePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Type typetag)
structTypePickler =
let
revfunc StructType { structPacked = packed, structFields = fields,
structPos = pos } = (packed, (fields, pos))
revfunc _ = error "Can't convert to StructType"
in
xpWrap (\(packed, (fields, pos)) -> StructType { structPacked = packed,
structFields = fields,
structPos = pos },
revfunc)
(xpElem (gxFromString "StructType")
(xpAttr (gxFromString "packed") xpPrim)
(xpPair (xpElemNodes (gxFromString "fields") fieldsPickler)
(xpElemNodes (gxFromString "pos") xpickle)))
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
XmlPickler [NodeG [] tag text] (Formname, FormDef typetag) where
xpickle =
xpWrap (\((idx, fname, mut), (ty, pos)) ->
(idx, FormDef { formDefName = gxToByteString fname,
formDefMutability = mut, formDefTy = ty,
formDefPos = pos }),
\(idx, FormDef { formDefMutability = mut, formDefPos = pos,
formDefName = fname, formDefTy = ty }) ->
((idx, gxFromByteString fname, mut), (ty, pos)))
(xpElem (gxFromString "form")
(xpTriple xpickle (xpAttr (gxFromString "name") xpText)
xpickle)
(xpPair (xpElemNodes (gxFromString "type") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
formsPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Array Formname (FormDef typetag))
formsPickler =
xpWrap (\l -> array (toEnum 0, toEnum (length l)) l, assocs)
(xpElemNodes (gxFromString "forms") (xpList xpickle))
variantTypePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Type typetag)
variantTypePickler =
let
revfunc VariantType { variantTyForms = forms, variantTyPos = pos } =
(forms, pos)
revfunc _ = error "Can't convert to VariantType"
in
xpWrap (\(forms, pos) -> VariantType { variantTyForms = forms,
variantTyPos = pos },
revfunc)
(xpElemNodes (gxFromString "VariantType")
(xpPair (xpElemNodes (gxFromString "forms")
formsPickler)
(xpElemNodes (gxFromString "pos") xpickle)))
arrayTypePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Type typetag)
arrayTypePickler =
let
revfunc ArrayType { arrayElemTy = elemty, arrayLen = len,
arrayPos = pos } = (len, (elemty, pos))
revfunc _ = error "Can't convert to ArrayType"
in
xpWrap (\(len, (elemty, pos)) -> ArrayType { arrayElemTy = elemty,
arrayLen = len,
arrayPos = pos },
revfunc)
(xpElem (gxFromString "ArrayType")
(xpOption (xpAttr (gxFromString "len") xpPrim))
(xpPair (xpElemNodes (gxFromString "elem") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
ptrTypePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Type typetag)
ptrTypePickler =
let
revfunc PtrType { ptrTy = ptrty, ptrPos = pos } = (ptrty, pos)
revfunc _ = error "Can't convert to PtrType"
in
xpWrap (\(ptrty, pos) -> PtrType { ptrTy = ptrty, ptrPos = pos },
revfunc)
(xpElemNodes (gxFromString "PtrType")
(xpPair (xpElemNodes (gxFromString "inner") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
intTypePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Type typetag)
intTypePickler =
let
revfunc IntType { intSize = size, intSigned = signed,
intIntervals = intervals, intPos = pos } =
((signed, size), (intervals, pos))
revfunc _ = error "Can't convert to IntType"
in
xpWrap (\((signed, size), (intervals, pos)) ->
IntType { intSize = size, intSigned = signed,
intIntervals = intervals, intPos = pos }, revfunc)
(xpElem (gxFromString "IntType")
(xpPair (xpAttr (gxFromString "signed") xpPrim)
(xpAttr (gxFromString "size") xpPrim))
(xpPair (xpElemNodes (gxFromString "intervals") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
floatTypePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Type typetag)
floatTypePickler =
let
revfunc FloatType { floatSize = size, floatPos = pos } = (size, pos)
revfunc _ = error "Can't convert to FloatType"
in
xpWrap (\(size, pos) -> FloatType { floatSize = size, floatPos = pos },
revfunc)
(xpElem (gxFromString "FloatType")
(xpAttr (gxFromString "size") xpPrim)
(xpElemNodes (gxFromString "pos") xpickle))
idTypePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Type typetag)
idTypePickler =
let
revfunc IdType { idName = tyname, idPos = pos } = (tyname, pos)
revfunc _ = error "Can't convert to IdType"
in
xpWrap (\(tyname, pos) -> IdType { idName = tyname, idPos = pos }, revfunc)
(xpElem (gxFromString "IdType") xpickle
(xpElemNodes (gxFromString "pos") xpickle))
unitTypePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Type typetag)
unitTypePickler =
let
revfunc (UnitType pos) = pos
revfunc _ = error "Can't convert to UnitType"
in
xpWrap (UnitType, revfunc)
(xpElemNodes (gxFromString "UnitType")
(xpElemNodes (gxFromString "pos") xpickle))
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
XmlPickler [NodeG [] tag text] (Type typetag) where
xpickle =
let
picker FuncType {} = 0
picker StructType {} = 1
picker VariantType {} = 2
picker ArrayType {} = 3
picker PtrType {} = 4
picker IntType {} = 5
picker FloatType {} = 6
picker IdType {} = 7
picker UnitType {} = 8
in
xpAlt picker [funcTypePickler, structTypePickler, variantTypePickler,
arrayTypePickler, ptrTypePickler, intTypePickler,
floatTypePickler, idTypePickler, unitTypePickler ]
binopPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Exp typetag)
binopPickler =
let
revfunc Binop { binopOp = op, binopLeft = left,
binopRight = right, binopPos = pos } =
(op, (left, right, pos))
revfunc _ = error "Can't convert to Binop"
in
xpWrap (\(op, (left, right, pos)) ->
Binop { binopOp = op, binopLeft = left,
binopRight = right, binopPos = pos }, revfunc)
(xpElem (gxFromString "Binop") xpickle
(xpTriple (xpElemNodes (gxFromString "left") xpickle)
(xpElemNodes (gxFromString "right") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
callPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Exp typetag)
callPickler =
let
revfunc Call { callFunc = func, callArgs = args, callPos = pos } =
(func, args, pos)
revfunc _ = error "Can't convert to Call"
in
xpWrap (\(func, args, pos) -> Call { callFunc = func, callArgs = args,
callPos = pos }, revfunc)
(xpElemNodes (gxFromString "Call")
(xpTriple (xpElemNodes (gxFromString "func") xpickle)
(xpElemNodes (gxFromString "args")
(xpList xpickle))
(xpElemNodes (gxFromString "pos") xpickle)))
unopPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Exp typetag)
unopPickler =
let
revfunc Unop { unopOp = op, unopVal = val, unopPos = pos } =
(op, (val, pos))
revfunc _ = error "Can't convert to Unop"
in
xpWrap (\(op, (val, pos)) -> Unop { unopOp = op, unopVal = val,
unopPos = pos }, revfunc)
(xpElem (gxFromString "Unop") xpickle
(xpPair (xpElemNodes (gxFromString "val") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
convPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Exp typetag)
convPickler =
let
revfunc Conv { convVal = val, convTy = ty, convPos = pos } = (val, ty, pos)
revfunc _ = error "Can't convert to Conv"
in
xpWrap (\(val, ty, pos) -> Conv { convVal = val, convTy = ty,
convPos = pos }, revfunc)
(xpElemNodes (gxFromString "Conv")
(xpTriple (xpElemNodes (gxFromString "val") xpickle)
(xpElemNodes (gxFromString "type") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
castPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Exp typetag)
castPickler =
let
revfunc Cast { castVal = val, castTy = ty, castPos = pos } = (val, ty, pos)
revfunc _ = error "Can't convert to Cast"
in
xpWrap (\(val, ty, pos) -> Cast { castVal = val, castTy = ty,
castPos = pos }, revfunc)
(xpElemNodes (gxFromString "Cast")
(xpTriple (xpElemNodes (gxFromString "val") xpickle)
(xpElemNodes (gxFromString "type") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
addrofPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Exp typetag)
addrofPickler =
let
revfunc AddrOf { addrofVal = val, addrofPos = pos } = (val, pos)
revfunc _ = error "Can't convert to AddrOf"
in
xpWrap (\(val, pos) -> AddrOf { addrofVal = val, addrofPos = pos },
revfunc)
(xpElemNodes (gxFromString "AddrOf")
(xpPair (xpElemNodes (gxFromString "val") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
structLitPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Exp typetag)
structLitPickler =
let
revfunc StructLit { structLitTy = ty, structLitFields = fields,
structLitPos = pos } = (ty, fields, pos)
revfunc _ = error "Can't convert to StructLit"
fieldValsPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Array Fieldname (Exp typetag))
fieldValsPickler =
xpWrap (\l -> array (toEnum 0, toEnum (length l)) l, assocs)
(xpList (xpElem (gxFromString "field") xpickle xpickle))
in
xpWrap (\(ty, fields, pos) ->
StructLit { structLitTy = ty, structLitFields = fields,
structLitPos = pos }, revfunc)
(xpElemNodes (gxFromString "StructLit")
(xpTriple (xpElemNodes (gxFromString "ty") xpickle)
(xpElemNodes (gxFromString "fields")
fieldValsPickler)
(xpElemNodes (gxFromString "pos") xpickle)))
variantLitPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Exp typetag)
variantLitPickler =
let
revfunc VariantLit { variantLitTy = ty, variantLitVal = val,
variantLitForm = form, variantLitPos = pos } =
(form, (ty, val, pos))
revfunc _ = error "Can't convert to VariantLit"
in
xpWrap (\(form, (ty, val, pos)) ->
VariantLit { variantLitTy = ty, variantLitVal = val,
variantLitForm = form, variantLitPos = pos }, revfunc)
(xpElem (gxFromString "VariantLit") xpickle
(xpTriple (xpElemNodes (gxFromString "ty") xpickle)
(xpElemNodes (gxFromString "val") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
arrayLitPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Exp typetag)
arrayLitPickler =
let
revfunc ArrayLit { arrayLitTy = ty, arrayLitVals = vals,
arrayLitPos = pos } = (ty, vals, pos)
revfunc _ = error "Can't convert to ArrayLit"
in
xpWrap (\(ty, vals, pos) -> ArrayLit { arrayLitTy = ty, arrayLitVals = vals,
arrayLitPos = pos }, revfunc)
(xpElemNodes (gxFromString "ArrayLit")
(xpTriple (xpElemNodes (gxFromString "ty") xpickle)
(xpElemNodes (gxFromString "vals")
(xpList xpickle))
(xpElemNodes (gxFromString "pos") xpickle)))
intLitPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Exp typetag)
intLitPickler =
let
revfunc IntLit { intLitTy = ty, intLitVal = val, intLitPos = pos } =
(val, (ty, pos))
revfunc _ = error "Can't convert to IntType"
in
xpWrap (\(val, (ty, pos)) -> IntLit { intLitTy = ty, intLitVal = val,
intLitPos = pos }, revfunc)
(xpElem (gxFromString "IntType")
(xpAttr (gxFromString "size") xpPrim)
(xpPair (xpElemNodes (gxFromString "type") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
lvaluePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
PU [NodeG [] tag text] (Exp typetag)
lvaluePickler =
let
revfunc (LValue lval) = lval
revfunc _ = error "Can't convert to LValue"
in
xpWrap (LValue, revfunc) (xpElemNodes (gxFromString "LValue") xpickle)
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] typetag) =>
XmlPickler [NodeG [] tag text] (Exp typetag) where
xpickle =
let
picker Alloc {} = 0
picker Binop {} = 1
picker Call {} = 1
picker Unop {} = 2
picker Conv {} = 3
picker Cast {} = 4
picker AddrOf {} = 5
picker StructLit {} = 6
picker VariantLit {} = 7
picker ArrayLit {} = 8
picker IntLit {} = 9
picker LValue {} = 10
in
xpAlt picker [undefined, binopPickler, callPickler, unopPickler,
convPickler, castPickler, addrofPickler,
structLitPickler, variantLitPickler, arrayLitPickler,
intLitPickler, lvaluePickler ]
functionPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text, Graph gr,
XmlPickler [NodeG [] tag text] tagty) =>
PU [NodeG [] tag text] (Global tagty gr)
functionPickler =
let
revfunc Function { funcName = fname, funcRetTy = retty, funcValTys = valtys,
funcParams = params, funcBody = body, funcPos = pos } =
(fname, (retty, valtys, params, body, pos))
revfunc _ = error "Can't convert to Function"
valtysPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] tagty) =>
PU [NodeG [] tag text] (Array Id (Type tagty))
valtysPickler =
xpWrap (\l -> array (toEnum 0, toEnum (length l)) l, assocs)
(xpList (xpElem (gxFromString "valty") xpickle xpickle))
in
xpWrap (\(fname, (retty, valtys, params, body, pos)) ->
Function { funcName = fname, funcRetTy = retty,
funcValTys = valtys, funcParams = params,
funcBody = body, funcPos = pos }, revfunc)
(xpElem (gxFromString "Function") (xpOption xpickle)
(xp5Tuple (xpElemNodes (gxFromString "retty") xpickle)
(xpElemNodes (gxFromString "valtys") valtysPickler)
(xpElemNodes (gxFromString "params")
(xpList xpickle))
(xpOption (xpElemNodes (gxFromString "body")
xpickle))
(xpElemNodes (gxFromString "pos") xpickle)))
globalvarPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] tagty) =>
PU [NodeG [] tag text] (Global tagty gr)
globalvarPickler =
let
revfunc GlobalVar { gvarName = gname, gvarTy = ty, gvarInit = init,
gvarMutability = mut, gvarPos = pos } =
((gname, mut), (ty, init, pos))
revfunc _ = error "Can't convert to GlobalVar"
in
xpWrap (\((gname, mut), (ty, init, pos)) ->
GlobalVar { gvarName = gname, gvarTy = ty, gvarInit = init,
gvarMutability = mut, gvarPos = pos }, revfunc)
(xpElem (gxFromString "Function") (xpPair (xpOption xpickle) xpickle)
(xpTriple (xpElemNodes (gxFromString "type") xpickle)
(xpOption (xpElemNodes (gxFromString "init")
xpickle))
(xpElemNodes (gxFromString "pos") xpickle)))
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text,
Graph gr, XmlPickler [NodeG [] tag text] tagty) =>
XmlPickler [NodeG [] tag text] (Global tagty gr) where
xpickle =
let
picker Function {} = 0
picker GlobalVar {} = 1
in
xpAlt picker [functionPickler, globalvarPickler]
{-
-- This mess is a good example of what I mean about format and a
-- naming function.
instance Graph gr => Format (Module gr) where
format (Module { modName = name, modTypes = types, modGlobals = globals,
modGCHeaders = gcheaders, modGenGCs = gcgen }) =
let
-- These functions here cause a heap of trouble. We want to
-- look up actual names, so they have to get moved inside the
-- format module call. This propogates downward and winds up
-- dragging almost everything inside.
formatTypename :: Typename -> Doc
formatTypename ty = "%" <> fst (types ! ty)
formatGCHeader :: GCHeader -> Doc
formatGCHeader hdr =
let
(ty, mut, mob) = gcheaders ! hdr
in
mut <+> mob <+> fst (types ! ty)
formatGlobalname :: Globalname -> Doc
formatGlobalname fname = "@" <>
(case globals ! fname of
Function { funcName = funcname } -> funcname
GlobalVar { gvarName = gvarname } -> gvarname)
formatType :: Type -> Doc
formatType (FuncType retty params) =
parenList (formatType retty) (map formatType params)
formatType (StructType packed fields) =
let
mapfun (str, mut, ty) =
mut <+> str <+> colon <+> formatType ty
fielddocs =
nest 2 (sep (punctuate comma (map mapfun (elems fields))))
in
if packed
then sep [format "<{", fielddocs, format "}>"]
else sep [lbrace, fielddocs, rbrace ]
formatType (PtrType (Native inner)) = formatType inner <> "*"
formatType (PtrType (GC ptrclass hdr)) =
ptrclass <+> formatGCHeader hdr
formatType (ArrayType (Just size) inner) =
formatType inner <> brackets size
formatType (ArrayType Nothing inner) = formatType inner <> "[]"
formatType (IntType True size) = "i" <> size
formatType (IntType False size) = "ui" <> size
formatType (IdType i) = formatTypename i
formatType (FloatType size) = format "f" <> size
formatType UnitType = format "unit"
formatExp (Call f args) =
parenList (formatExp f) (map formatExp args)
formatExp (GCAlloc header Nothing Nothing) =
"gcalloc" <+> formatGCHeader header
formatExp (GCAlloc header (Just size) Nothing) =
"gcalloc" <+> formatGCHeader header <+> brackets (formatExp size)
formatExp (GCAlloc header Nothing (Just gen)) =
"gcalloc" <+> formatGCHeader header <+> "gen" <+> formatExp gen
formatExp (GCAlloc header (Just size) (Just gen)) =
"gcalloc" <+> formatGCHeader header <+>
brackets (formatExp size) <+> "gen" <+> formatExp gen
formatExp (Binop op l r) =
parens (sep [ format op, formatExp l <> comma, formatExp r ])
formatExp (Unop op e) = parens (hang (format op) 2 (formatExp e))
formatExp (Conv ty inner) =
parens (sep [ format "conv", formatExp inner,
format "to", formatType ty ])
formatExp (Cast ty inner) =
parens (sep [ format "cast", formatExp inner,
format "to", formatType ty ])
formatExp (AddrOf l) = "addrof" <+> formatLValue l
formatExp (LValue l) = formatLValue l
formatExp (StructLit ty fields) =
let
headerdoc = "const" <+> formatType ty
in
braceBlock headerdoc (punctuate comma (map formatExp (elems fields)))
formatExp (ArrayLit ty inits) =
let
headerdoc = "const" <+> formatType ty
in
braceBlock headerdoc (punctuate comma (map formatExp inits))
formatExp (IntLit ty n) = hang (formatType ty) 2 (format n)
formatLValue :: LValue -> Doc
formatLValue (Deref e) = "*" <+> formatExp e
formatLValue (Index e i) = formatExp e <+> brackets (formatExp i)
formatLValue (Field (LValue (Deref e)) field) =
formatExp e <> "->" <> field
formatLValue (Field e field) = formatExp e <> "." <> field
formatLValue (Global g) = formatGlobalname g
formatLValue (Var v) = format v
formatStm :: Stm -> Doc
formatStm (Move dst src) =
formatLValue dst <+> "<-" <+> formatExp src
formatStm (Do e) = formatExp e
formatTransfer :: Transfer -> Doc
formatTransfer (Goto l) = "goto" <+> l
formatTransfer (Case e cases def) =
let
mapfun (i, l) = i <> colon <+> l
in
braceBlock ("case" <+> formatExp e)
(("default" <> colon <+> def) : map mapfun cases)
formatTransfer (Ret (Just e)) = "ret" <+> formatExp e
formatTransfer (Ret Nothing) = format "ret"
formatTransfer Unreachable = format "unreachable"
formatBlock (Block stms trans) =
vcat ((map formatStm stms) ++ [formatTransfer trans])
formatGlobal :: Graph gr => Global gr -> Doc
formatGlobal (Function { funcName = fname, funcRetTy = retty,
funcParams = argnames, funcValTys = vartypes,
funcBody = body }) =
let
argfun i = i <+> colon <+> formatType (vartypes ! i)
varfun (i, ty) = i <+> colon <+> formatType ty
header = parenList ("function" <+> fname) (map argfun argnames)
vardocs = map varfun (assocs vartypes)
fcontent =
case body of
Just (Body (Label entry) graph) ->
let
getnode = fromJust . lab graph
blockfun node =
("L" <> node <> colon) $$
nest 2 (formatBlock (getnode node))
in
vardocs ++ (map blockfun (dfs [entry] graph))
Nothing -> vardocs
in
braceBlock (header <+> colon <+> formatType retty) fcontent
formatGlobal (GlobalVar { gvarName = gname, gvarTy = ty,
gvarInit = Just body }) =
hang (hang ("global" <+> formatType ty) 2 gname) 2 (formatExp body)
formatGlobal (GlobalVar { gvarName = gname, gvarTy = ty,
gvarInit = Nothing }) =
hang ("global" <+> formatType ty) 2 gname
typefunc (tyname, Just ty) =
hang ("type" <+> tyname <+> equals) 2 (formatType ty)
typefunc (tyname, Nothing) = "type" <+> tyname
gcheaderfunc (GCHeader ind, (ty, mob, mut)) =
"gc_header_" <> ind <+> equals <+> mut <+> mob <+> fst (types ! ty)
gcgenfunc hdr = "gen" <+> formatGCHeader hdr
typesdocs = map typefunc (elems types)
gchdrdocs = map gcheaderfunc (assocs gcheaders)
gcgendocs = map gcgenfunc gcgen
globalsdocs = map formatGlobal (elems globals)
content = typesdocs ++ (space : gchdrdocs) ++
(space : gcgendocs) ++ (space : globalsdocs)
in
braceBlock ("module" <+> name) content
instance Graph gr => Show (Module gr) where
show = show . format
-}
|
emc2/chill
|
src/IR/FlatIR/Syntax.hs
|
Haskell
|
bsd-3-clause
| 60,607
|
{-# OPTIONS #-}
-----------------------------------------------------------------------------
-- |
-- Module : Language.Py.ParserMonad
-- Copyright : (c) 2009 Bernie Pope
-- License : BSD-style
-- Maintainer : bjpop@csse.unimelb.edu.au
-- Stability : experimental
-- Portability : ghc
--
-- Monad support for Python parser and lexer.
-----------------------------------------------------------------------------
module Language.Py.ParserMonad
( P
, execParser
, execParserKeepComments
, runParser
, thenP
, returnP
, setLocation
, getLocation
, getInput
, setInput
, getLastToken
, setLastToken
, setLastEOL
, getLastEOL
, ParseError (..)
, ParseState (..)
, initialState
, pushStartCode
, popStartCode
, getStartCode
, getIndent
, pushIndent
, popIndent
, getIndentStackDepth
, getParen
, pushParen
, popParen
, getParenStackDepth
, addComment
, getComments
, spanError
) where
import Language.Py.SrcLocation (SrcLocation (..), SrcSpan (..), Span (..))
import Language.Py.Token (Token (..))
import Language.Py.ParseError (ParseError (..))
import Control.Applicative ((<$>))
import Control.Monad.State.Class
import Control.Monad.State.Strict as State
import Control.Monad.Error as Error
import Control.Monad.Error.Class
import Control.Monad.Identity as Identity
import Control.Monad.Trans as Trans
import Language.Py.Pretty
internalError :: String -> P a
internalError = throwError . StrError
spanError :: Span a => a -> String -> P b
spanError x str = throwError $ StrError $ unwords [prettyText $ getSpan x, str]
data ParseState = ParseState
{ location :: !SrcLocation -- position at current input location
, input :: !String -- the current input
, previousToken :: !Token -- the previous token
, startCodeStack :: [Int] -- a stack of start codes for the state of the lexer
, indentStack :: [Int] -- a stack of source column positions of indentation levels
, parenStack :: [Token] -- a stack of parens and brackets for indentation handling
, lastEOL :: !SrcSpan -- location of the most recent end-of-line encountered
, comments :: [Token] -- accumulated comments
} deriving Show
initToken :: Token
initToken = NewlineToken SpanEmpty
initialState :: SrcLocation -> String -> [Int] -> ParseState
initialState initLoc inp scStack = ParseState
{ location = initLoc
, input = inp
, previousToken = initToken
, startCodeStack = scStack
, indentStack = [1]
, parenStack = []
, lastEOL = SpanEmpty
, comments = []
}
type P a = StateT ParseState (Either ParseError) a
execParser :: P a -> ParseState -> Either ParseError a
execParser = evalStateT
execParserKeepComments :: P a -> ParseState -> Either ParseError (a, [Token])
execParserKeepComments parser
= evalStateT (parser >>= \x -> getComments >>= \c -> return (x, c))
runParser :: P a -> ParseState -> Either ParseError (a, ParseState)
runParser = runStateT
{-# INLINE returnP #-}
returnP :: a -> P a
returnP = return
{-# INLINE thenP #-}
thenP :: P a -> (a -> P b) -> P b
thenP = (>>=)
{-
failP :: SrcSpan -> [String] -> P a
failP span strs = throwError (prettyText span ++ ": " ++ unwords strs)
-}
setLastEOL :: SrcSpan -> P ()
setLastEOL span = modify $ \s -> s { lastEOL = span }
getLastEOL :: P SrcSpan
getLastEOL = gets lastEOL
setLocation :: SrcLocation -> P ()
setLocation loc = modify $ \s -> s { location = loc }
getLocation :: P SrcLocation
getLocation = gets location
getInput :: P String
getInput = gets input
setInput :: String -> P ()
setInput inp = modify $ \s -> s { input = inp }
getLastToken :: P Token
getLastToken = gets previousToken
setLastToken :: Token -> P ()
setLastToken tok = modify $ \s -> s { previousToken = tok }
pushStartCode :: Int -> P ()
pushStartCode code = do
oldStack <- gets startCodeStack
modify $ \s -> s { startCodeStack = code : oldStack }
popStartCode :: P ()
popStartCode = do
oldStack <- gets startCodeStack
case oldStack of
[] -> internalError "fatal error in lexer: attempt to pop empty start code stack"
_:rest -> modify $ \s -> s { startCodeStack = rest }
getStartCode :: P Int
getStartCode = do
oldStack <- gets startCodeStack
case oldStack of
[] -> internalError "fatal error in lexer: start code stack empty on getStartCode"
code:_ -> return code
pushIndent :: Int -> P ()
pushIndent indent = do
oldStack <- gets indentStack
modify $ \s -> s { indentStack = indent : oldStack }
popIndent :: P ()
popIndent = do
oldStack <- gets indentStack
case oldStack of
[] -> internalError "fatal error in lexer: attempt to pop empty indentation stack"
_:rest -> modify $ \s -> s { indentStack = rest }
getIndent :: P Int
getIndent = do
oldStack <- gets indentStack
case oldStack of
[] -> internalError "fatal error in lexer: indent stack empty on getIndent"
indent:_ -> return indent
getIndentStackDepth :: P Int
getIndentStackDepth = gets (length . indentStack)
pushParen :: Token -> P ()
pushParen symbol = do
oldStack <- gets parenStack
modify $ \s -> s { parenStack = symbol : oldStack }
popParen :: P ()
popParen = do
oldStack <- gets parenStack
case oldStack of
[] -> internalError "fatal error in lexer: attempt to pop empty paren stack"
_:rest -> modify $ \s -> s { parenStack = rest }
getParen :: P (Maybe Token)
getParen = do
oldStack <- gets parenStack
case oldStack of
[] -> return Nothing
symbol:_ -> return $ Just symbol
getParenStackDepth :: P Int
getParenStackDepth = gets (length . parenStack)
addComment :: Token -> P ()
addComment c = do
oldComments <- gets comments
modify $ \s -> s { comments = c : oldComments }
getComments :: P [Token]
getComments = reverse <$> gets comments
|
codeq/language-py
|
src/Language/Py/ParserMonad.hs
|
Haskell
|
bsd-3-clause
| 5,757
|
-- |
-- Module : Crypto.PubKey.DSA
-- License : BSD-style
-- Maintainer : Vincent Hanquez <vincent@snarc.org>
-- Stability : experimental
-- Portability : Good
--
-- An implementation of the Digital Signature Algorithm (DSA)
{-# LANGUAGE DeriveDataTypeable #-}
module Crypto.PubKey.DSA
( Params(..)
, Signature(..)
, PublicKey(..)
, PrivateKey(..)
, PublicNumber
, PrivateNumber
-- * Generation
, generatePrivate
, calculatePublic
-- * Signature primitive
, sign
, signWith
-- * Verification primitive
, verify
-- * Key pair
, KeyPair(..)
, toPublicKey
, toPrivateKey
) where
import Crypto.Random.Types
import Data.Bits (testBit)
import Data.Data
import Data.Maybe
import Crypto.Number.Basic (numBits)
import Crypto.Number.ModArithmetic (expFast, expSafe, inverse)
import Crypto.Number.Serialize
import Crypto.Number.Generate
import Crypto.Internal.ByteArray (ByteArrayAccess(length), convert, index, dropView, takeView)
import Crypto.Internal.Imports
import Crypto.Hash
import Prelude hiding (length)
-- | DSA Public Number, usually embedded in DSA Public Key
type PublicNumber = Integer
-- | DSA Private Number, usually embedded in DSA Private Key
type PrivateNumber = Integer
-- | Represent DSA parameters namely P, G, and Q.
data Params = Params
{ params_p :: Integer -- ^ DSA p
, params_g :: Integer -- ^ DSA g
, params_q :: Integer -- ^ DSA q
} deriving (Show,Read,Eq,Data,Typeable)
instance NFData Params where
rnf (Params p g q) = p `seq` g `seq` q `seq` ()
-- | Represent a DSA signature namely R and S.
data Signature = Signature
{ sign_r :: Integer -- ^ DSA r
, sign_s :: Integer -- ^ DSA s
} deriving (Show,Read,Eq,Data,Typeable)
instance NFData Signature where
rnf (Signature r s) = r `seq` s `seq` ()
-- | Represent a DSA public key.
data PublicKey = PublicKey
{ public_params :: Params -- ^ DSA parameters
, public_y :: PublicNumber -- ^ DSA public Y
} deriving (Show,Read,Eq,Data,Typeable)
instance NFData PublicKey where
rnf (PublicKey params y) = y `seq` params `seq` ()
-- | Represent a DSA private key.
--
-- Only x need to be secret.
-- the DSA parameters are publicly shared with the other side.
data PrivateKey = PrivateKey
{ private_params :: Params -- ^ DSA parameters
, private_x :: PrivateNumber -- ^ DSA private X
} deriving (Show,Read,Eq,Data,Typeable)
instance NFData PrivateKey where
rnf (PrivateKey params x) = x `seq` params `seq` ()
-- | Represent a DSA key pair
data KeyPair = KeyPair Params PublicNumber PrivateNumber
deriving (Show,Read,Eq,Data,Typeable)
instance NFData KeyPair where
rnf (KeyPair params y x) = x `seq` y `seq` params `seq` ()
-- | Public key of a DSA Key pair
toPublicKey :: KeyPair -> PublicKey
toPublicKey (KeyPair params pub _) = PublicKey params pub
-- | Private key of a DSA Key pair
toPrivateKey :: KeyPair -> PrivateKey
toPrivateKey (KeyPair params _ priv) = PrivateKey params priv
-- | generate a private number with no specific property
-- this number is usually called X in DSA text.
generatePrivate :: MonadRandom m => Params -> m PrivateNumber
generatePrivate (Params _ _ q) = generateMax q
-- | Calculate the public number from the parameters and the private key
calculatePublic :: Params -> PrivateNumber -> PublicNumber
calculatePublic (Params p g _) x = expSafe g x p
-- | sign message using the private key and an explicit k number.
signWith :: (ByteArrayAccess msg, HashAlgorithm hash)
=> Integer -- ^ k random number
-> PrivateKey -- ^ private key
-> hash -- ^ hash function
-> msg -- ^ message to sign
-> Maybe Signature
signWith k pk hashAlg msg
| r == 0 || s == 0 = Nothing
| otherwise = Just $ Signature r s
where -- parameters
(Params p g q) = private_params pk
x = private_x pk
-- compute r,s
kInv = fromJust $ inverse k q
hm = os2ip $ hashWith hashAlg msg
r = expSafe g k p `mod` q
s = (kInv * (hm + x * r)) `mod` q
-- | sign message using the private key.
sign :: (ByteArrayAccess msg, HashAlgorithm hash, MonadRandom m) => PrivateKey -> hash -> msg -> m Signature
sign pk hashAlg msg = do
k <- generateMax q
case signWith k pk hashAlg msg of
Nothing -> sign pk hashAlg msg
Just sig -> return sig
where
(Params _ _ q) = private_params pk
-- | verify a bytestring using the public key.
verify :: (ByteArrayAccess msg, HashAlgorithm hash) => hash -> PublicKey -> Signature -> msg -> Bool
verify hashAlg pk (Signature r s) m
-- Reject the signature if either 0 < r < q or 0 < s < q is not satisfied.
| r <= 0 || r >= q || s <= 0 || s >= q = False
| otherwise = v == r
where (Params p g q) = public_params pk
y = public_y pk
hm = os2ip . truncateHash $ hashWith hashAlg m
w = fromJust $ inverse s q
u1 = (hm*w) `mod` q
u2 = (r*w) `mod` q
v = ((expFast g u1 p) * (expFast y u2 p)) `mod` p `mod` q
-- if the hash is larger than the size of q, truncate it; FIXME: deal with the case of a q not evenly divisible by 8
truncateHash h = if numBits (os2ip h) > numBits q then takeView h (numBits q `div` 8) else dropView h 0
|
tekul/cryptonite
|
Crypto/PubKey/DSA.hs
|
Haskell
|
bsd-3-clause
| 5,630
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- | Provide ability to upload tarballs to Hackage.
module Stack.Upload
( -- * Upload
upload
, uploadBytes
, uploadRevision
-- * Credentials
, HackageCreds
, loadCreds
) where
import Stack.Prelude
import Data.Aeson (FromJSON (..),
ToJSON (..),
decode', encode,
object, withObject,
(.:), (.=))
import qualified Data.ByteString.Char8 as S
import qualified Data.ByteString.Lazy as L
import qualified Data.Conduit.Binary as CB
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8)
import qualified Data.Text.IO as TIO
import Network.HTTP.Client (Response,
RequestBody(RequestBodyLBS),
Request)
import Network.HTTP.Simple (withResponse,
getResponseStatusCode,
getResponseBody,
setRequestHeader,
parseRequest,
httpNoBody)
import Network.HTTP.Client.MultipartFormData (formDataBody, partFileRequestBody,
partBS, partLBS)
import Network.HTTP.Client.TLS (getGlobalManager,
applyDigestAuth,
displayDigestAuthException)
import Stack.Types.Config
import Stack.Types.PackageIdentifier (PackageIdentifier, packageIdentifierString,
packageIdentifierName)
import Stack.Types.PackageName (packageNameString)
import System.Directory (createDirectoryIfMissing,
removeFile)
import System.FilePath ((</>), takeFileName)
import System.IO (hFlush, stdout, putStrLn, putStr, getLine, print) -- TODO remove putStrLn, use logInfo
import System.IO.Echo (withoutInputEcho)
-- | Username and password to log into Hackage.
--
-- Since 0.1.0.0
data HackageCreds = HackageCreds
{ hcUsername :: !Text
, hcPassword :: !Text
, hcCredsFile :: !FilePath
}
deriving Show
instance ToJSON HackageCreds where
toJSON (HackageCreds u p _) = object
[ "username" .= u
, "password" .= p
]
instance FromJSON (FilePath -> HackageCreds) where
parseJSON = withObject "HackageCreds" $ \o -> HackageCreds
<$> o .: "username"
<*> o .: "password"
-- | Load Hackage credentials, either from a save file or the command
-- line.
--
-- Since 0.1.0.0
loadCreds :: Config -> IO HackageCreds
loadCreds config = do
fp <- credsFile config
elbs <- tryIO $ L.readFile fp
case either (const Nothing) Just elbs >>= decode' of
Nothing -> fromPrompt fp
Just mkCreds -> do
unless (configSaveHackageCreds config) $ do
putStrLn "WARNING: You've set save-hackage-creds to false"
putStrLn "However, credentials were found at:"
putStrLn $ " " ++ fp
return $ mkCreds fp
where
fromPrompt fp = do
putStr "Hackage username: "
hFlush stdout
username <- TIO.getLine
password <- promptPassword
let hc = HackageCreds
{ hcUsername = username
, hcPassword = password
, hcCredsFile = fp
}
when (configSaveHackageCreds config) $ do
let prompt = "Save hackage credentials to file at " ++ fp ++ " [y/n]? "
putStr prompt
input <- loopPrompt prompt
putStrLn "NOTE: Avoid this prompt in the future by using: save-hackage-creds: false"
hFlush stdout
case input of
"y" -> do
L.writeFile fp (encode hc)
putStrLn "Saved!"
hFlush stdout
_ -> return ()
return hc
loopPrompt :: String -> IO String
loopPrompt p = do
input <- TIO.getLine
case input of
"y" -> return "y"
"n" -> return "n"
_ -> do
putStr p
loopPrompt p
credsFile :: Config -> IO FilePath
credsFile config = do
let dir = toFilePath (configStackRoot config) </> "upload"
createDirectoryIfMissing True dir
return $ dir </> "credentials.json"
-- | Lifted from cabal-install, Distribution.Client.Upload
promptPassword :: IO Text
promptPassword = do
putStr "Hackage password: "
hFlush stdout
-- save/restore the terminal echoing status (no echoing for entering the password)
passwd <- withoutInputEcho $ fmap T.pack getLine
putStrLn ""
return passwd
applyCreds :: HackageCreds -> Request -> IO Request
applyCreds creds req0 = do
manager <- getGlobalManager
ereq <- applyDigestAuth
(encodeUtf8 $ hcUsername creds)
(encodeUtf8 $ hcPassword creds)
req0
manager
case ereq of
Left e -> do
putStrLn "WARNING: No HTTP digest prompt found, this will probably fail"
case fromException e of
Just e' -> putStrLn $ displayDigestAuthException e'
Nothing -> print e
return req0
Right req -> return req
-- | Upload a single tarball with the given @Uploader@. Instead of
-- sending a file like 'upload', this sends a lazy bytestring.
--
-- Since 0.1.2.1
uploadBytes :: HackageCreds
-> String -- ^ tar file name
-> L.ByteString -- ^ tar file contents
-> IO ()
uploadBytes creds tarName bytes = do
let req1 = setRequestHeader "Accept" ["text/plain"]
"https://hackage.haskell.org/packages/"
formData = [partFileRequestBody "package" tarName (RequestBodyLBS bytes)]
req2 <- formDataBody formData req1
req3 <- applyCreds creds req2
putStr $ "Uploading " ++ tarName ++ "... "
hFlush stdout
withResponse req3 $ \res ->
case getResponseStatusCode res of
200 -> putStrLn "done!"
401 -> do
putStrLn "authentication failure"
handleIO (const $ return ()) (removeFile (hcCredsFile creds))
throwString "Authentication failure uploading to server"
403 -> do
putStrLn "forbidden upload"
putStrLn "Usually means: you've already uploaded this package/version combination"
putStrLn "Ignoring error and continuing, full message from Hackage below:\n"
printBody res
503 -> do
putStrLn "service unavailable"
putStrLn "This error some times gets sent even though the upload succeeded"
putStrLn "Check on Hackage to see if your pacakge is present"
printBody res
code -> do
putStrLn $ "unhandled status code: " ++ show code
printBody res
throwString $ "Upload failed on " ++ tarName
printBody :: Response (ConduitM () S.ByteString IO ()) -> IO ()
printBody res = runConduit $ getResponseBody res .| CB.sinkHandle stdout
-- | Upload a single tarball with the given @Uploader@.
--
-- Since 0.1.0.0
upload :: HackageCreds -> FilePath -> IO ()
upload creds fp = uploadBytes creds (takeFileName fp) =<< L.readFile fp
uploadRevision :: HackageCreds
-> PackageIdentifier
-> L.ByteString
-> IO ()
uploadRevision creds ident cabalFile = do
req0 <- parseRequest $ concat
[ "https://hackage.haskell.org/package/"
, packageIdentifierString ident
, "/"
, packageNameString $ packageIdentifierName ident
, ".cabal/edit"
]
req1 <- formDataBody
[ partLBS "cabalfile" cabalFile
, partBS "publish" "on"
]
req0
req2 <- applyCreds creds req1
void $ httpNoBody req2
|
MichielDerhaeg/stack
|
src/Stack/Upload.hs
|
Haskell
|
bsd-3-clause
| 8,635
|
module Models where
import Data.Monoid
import Language.Haskell.TH
import qualified Data.Text as Text
import Database.Persist.Quasi
import Database.Persist.Quasi.Internal
import Database.Persist.TH
import Database.Persist.Sql
-- TODO: we use lookupName and reify etc which breaks in IO. somehow need to
-- test this out elsewise
mkPersist' :: [UnboundEntityDef] -> IO [Dec]
mkPersist' = runQ . mkPersist sqlSettings
parseReferences' :: String -> IO Exp
parseReferences' = runQ . parseReferencesQ
parseReferencesQ :: String -> Q Exp
parseReferencesQ = parseReferences lowerCaseSettings . Text.pack
-- | # of models, # of fields
mkModels :: Int -> Int -> String
mkModels = mkModelsWithFieldModifier id
mkNullableModels :: Int -> Int -> String
mkNullableModels = mkModelsWithFieldModifier maybeFields
mkModelsWithFieldModifier :: (String -> String) -> Int -> Int -> String
mkModelsWithFieldModifier k i f =
unlines . fmap unlines . take i . map mkModel . zip [0..] . cycle $
[ "Model"
, "Foobar"
, "User"
, "King"
, "Queen"
, "Dog"
, "Cat"
]
where
mkModel :: (Int, String) -> [String]
mkModel (i', m) =
(m <> show i') : indent 4 (map k (mkFields f))
indent :: Int -> [String] -> [String]
indent i = map (replicate i ' ' ++)
mkFields :: Int -> [String]
mkFields i = take i $ map mkField $ zip [0..] $ cycle
[ "Bool"
, "Int"
, "String"
, "Double"
, "Text"
]
where
mkField :: (Int, String) -> String
mkField (i', typ) = "field" <> show i' <> "\t\t" <> typ
maybeFields :: String -> String
maybeFields = (++ " Maybe")
|
yesodweb/persistent
|
persistent/bench/Models.hs
|
Haskell
|
mit
| 1,640
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeOperators #-}
module DTypes.Classes.DTraversable
( DTraversable (..)
, dtraverse'
, dsequenceA'
, dtoList
) where
import DTypes.Classes.DFunctor
import DTypes.Compose
import DTypes.Trafo
import Data.Functor.Identity (Identity (..))
#if MIN_VERSION_base(4,8,0)
import Control.Applicative (Const (..))
#else
import Control.Applicative (Applicative (..), (<$>), Const (..))
import Data.Traversable (Traversable (..))
#endif
class DFunctor d => DTraversable (d :: (k -> *) -> *) where
{-# MINIMAL dtraverse | dsequenceA #-}
dtraverse :: Applicative g => (f ==> Compose g h) -> d f -> g (d h)
dtraverse f = dsequenceA . dfmap f
dsequenceA :: Applicative g => d (Compose g h) -> g (d h)
dsequenceA = dtraverse id
-- TODO: more functions
instance (Traversable f, DTraversable d) => DTraversable (Compose f d) where
dsequenceA (Compose x) = Compose <$> traverse dsequenceA x
dtraverse' :: (DTraversable d, Applicative g) => (f ==> g) -> d f -> g (d Identity)
dtraverse' f = dtraverse (Compose . fmap Identity . f)
dsequenceA' :: (DTraversable d, Applicative f) => d f -> f (d Identity)
dsequenceA' = dsequenceA . dfmap (Compose . fmap Identity)
dtoList
:: DTraversable (d :: (* -> *) -> *)
=> d (Const a)
-> [a]
dtoList = getConst . dtraverse' (Const . (:[]) . getConst) -- robot monkey!
{-
dFoldMap
:: (Monoid m, DTraversable d)
=> (forall a. f a -> m) -> d f -> m
dFoldMap f = getConst . dTraverse (Const . f)
dFold
:: (Monoid m, DTraversable d)
=> d (Const m) -> m
dFold = getConst . dSequenceA
dToList
:: DTraversable d
=> d (Const a) -> [a]
dToList = dFoldMap (pure . getConst)
-}
|
timjb/ftypes
|
src/DTypes/Classes/DTraversable.hs
|
Haskell
|
mit
| 1,725
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
module Tinc.Cache (
Cache
, CachedPackage(..)
, readCache
, findReusablePackages
, cachedExecutables
, populateCache
#ifdef TEST
, PopulateCacheAction(..)
, populateCacheAction
, PackageLocation(..)
, readPackageGraph
, readAddSourceHashes
, addAddSourceHashes
, listSandboxes
#endif
) where
import Control.Monad.Catch
import Control.Monad
import Control.Monad.IO.Class
import Data.List
import qualified Data.Map as Map
import Data.Yaml
import System.Directory hiding (getDirectoryContents, withCurrentDirectory)
import System.FilePath
import System.IO.Temp
import Data.Function
import Tinc.Fail
import Tinc.GhcInfo
import Tinc.GhcPkg
import Tinc.Package
import Tinc.PackageGraph
import Tinc.Process
import Tinc.Sandbox
import Tinc.SourceDependency
import Tinc.Types
import Util
data CachedPackage = CachedPackage {
cachedPackageName :: Package
, cachedPackageConfig :: Path PackageConfig
} deriving (Eq, Show)
cachedExecutables :: CachedPackage -> IO [FilePath]
cachedExecutables (CachedPackage package (Path config)) = do
exists <- doesDirectoryExist binDir
if exists
then listDirectoryContents binDir >>= mapM canonicalizePath
else return []
where
binDir = dropFileName config </> ".." </> "bin" </> showPackage package
findReusablePackages :: Cache -> [Package] -> [CachedPackage]
findReusablePackages (Cache globalPackages packageGraphs) installPlan = reusablePackages
where
reusablePackages :: [CachedPackage]
reusablePackages = nubBy ((==) `on` cachedPackageName) (concatMap findReusable packageGraphs)
findReusable :: PackageGraph PackageLocation -> [CachedPackage]
findReusable packageGraph =
[CachedPackage p c | (p, PackageConfig c) <- calculateReusablePackages packages packageGraph]
where
packages = nubBy ((==) `on` packageName) (installPlan ++ map fromSimplePackage globalPackages)
data Cache = Cache {
_cacheGlobalPackages :: [SimplePackage]
, _cachePackageGraphs :: [PackageGraph PackageLocation]
}
data PackageLocation = GlobalPackage | PackageConfig (Path PackageConfig)
deriving (Eq, Ord, Show)
fromSimplePackage :: SimplePackage -> Package
fromSimplePackage (SimplePackage name version) = Package name (Version version Nothing)
readPackageGraph :: (MonadIO m, Fail m, GhcPkg m) => [SimplePackage] -> Path PackageDb -> Path PackageDb -> m (PackageGraph PackageLocation)
readPackageGraph globalPackages globalPackageDb packageDb = do
packageConfigs <- liftIO $ cachedListPackages packageDb
let globalValues = map (, GlobalPackage) globalPackages
let values = map (fmap PackageConfig) packageConfigs
dot <- readDotFile
fromDot (globalValues ++ values) dot >>= liftIO . addAddSourceHashes packageDb
where
dotFile = path packageDb </> "packages.dot"
readDotFile = do
cachedIOAfter (liftIO $ touchPackageCache packageDb) dotFile $ do
readGhcPkg [globalPackageDb, packageDb] ["dot"]
addSourceHashesFile :: FilePath
addSourceHashesFile = "add-source.yaml"
readAddSourceHashes :: Path PackageDb -> IO [SourceDependency]
readAddSourceHashes packageDb = do
let file = path packageDb </> addSourceHashesFile
exists <- doesFileExist file
if exists
then decodeFileEither file >>= either (dieLoc . show) return
else return []
writeAddSourceHashes :: Path PackageDb -> [SourceDependency] -> IO ()
writeAddSourceHashes packageDb addSourceHashes
| null addSourceHashes = return ()
| otherwise = do
encodeFile (path packageDb </> addSourceHashesFile) addSourceHashes
touchPackageCache packageDb
addAddSourceHash :: Map.Map String String -> SimplePackage -> PackageLocation -> Package
addAddSourceHash hashes (SimplePackage name version) location = case location of
PackageConfig _ -> maybe package (\ hash -> Package name (Version version $ Just hash)) (Map.lookup (packageName package) hashes)
GlobalPackage -> package
where
package = Package name (Version version Nothing)
addAddSourceHashes :: Path PackageDb -> SimplePackageGraph PackageLocation -> IO (PackageGraph PackageLocation)
addAddSourceHashes packageDb graph = do
hashes <- mkMap <$> readAddSourceHashes packageDb
return $ mapIndex (addAddSourceHash hashes) graph
where
mkMap :: [SourceDependency] -> Map.Map String String
mkMap hashes = Map.fromList (map (\ (SourceDependency name hash) -> (name, hash)) hashes)
readCache :: GhcInfo -> Path CacheDir -> IO Cache
readCache ghcInfo cacheDir = do
globalPackages <- listGlobalPackages
sandboxes <- listSandboxes cacheDir
cache <- forM sandboxes $ \ sandbox -> do
packageDbPath <- findPackageDb sandbox
readPackageGraph globalPackages (ghcInfoGlobalPackageDb ghcInfo) packageDbPath
return (Cache globalPackages cache)
validMarker :: FilePath
validMarker = "tinc.valid.v3"
listSandboxes :: Path CacheDir -> IO [Path Sandbox]
listSandboxes (Path cacheDir) = map Path <$> listEntries
where
isValidCacheEntry :: FilePath -> IO Bool
isValidCacheEntry p = doesFileExist (p </> validMarker)
listEntries :: IO [FilePath]
listEntries = listDirectories cacheDir >>= filterM isValidCacheEntry
data PopulateCacheAction = PopulateCacheAction {
populateCacheActionInstallPlan :: [Package]
, populateCacheActionAddSource :: [Path SourceDependency]
, populateCacheActionWriteAddSourceHashes :: [SourceDependency]
} deriving (Eq, Show)
populateCacheAction :: Path SourceDependencyCache -> [Package] -> [CachedPackage] -> Either [CachedPackage] PopulateCacheAction
populateCacheAction sourceDependencyCache missing reusable
| null missing = Left reusable
| otherwise = Right PopulateCacheAction {
populateCacheActionInstallPlan = installPlan
, populateCacheActionAddSource = addSource
, populateCacheActionWriteAddSourceHashes = [SourceDependency name hash | Package name (Version _ (Just hash)) <- (missing ++ map cachedPackageName reusable)]
}
where
installPlan :: [Package]
installPlan = missing ++ [p | p@(Package _ (Version _ Nothing)) <- map cachedPackageName reusable]
addSource :: [Path SourceDependency]
addSource = map (sourceDependencyPath sourceDependencyCache) [SourceDependency name hash | Package name (Version _ (Just hash)) <- missing]
populateCache :: (MonadIO m, MonadMask m, Fail m, MonadProcess m) => Path CacheDir -> Path SourceDependencyCache -> [Package] -> [CachedPackage] -> m [CachedPackage]
populateCache cacheDir sourceDependencyCache missing reusable = either return populate (populateCacheAction sourceDependencyCache missing reusable)
where
populate PopulateCacheAction{..} = do
sandbox <- liftIO $ newCacheEntry cacheDir
withCurrentDirectory sandbox $ do
packageDb <- initSandbox populateCacheActionAddSource (map cachedPackageConfig reusable)
liftIO $ do
writeAddSourceHashes packageDb populateCacheActionWriteAddSourceHashes
writeFile validMarker ""
callProcessM "cabal" ("v1-install" : "--bindir=$prefix/bin/$pkgid" : map showPackage populateCacheActionInstallPlan)
map (uncurry CachedPackage)
. ignore_add_source_hashes_for_now_as_we_currently_do_not_need_them
<$> cachedListPackages packageDb
ignore_add_source_hashes_for_now_as_we_currently_do_not_need_them = map (\ (a, b) -> (fromSimplePackage a, b))
newCacheEntry :: Path CacheDir -> IO FilePath
newCacheEntry cacheDir = do
basename <- takeBaseName <$> getCurrentDirectory
createTempDirectory (path cacheDir) (basename ++ "-")
|
haskell-tinc/tinc
|
src/Tinc/Cache.hs
|
Haskell
|
bsd-3-clause
| 7,770
|
{-# LANGUAGE GeneralizedNewtypeDeriving, ConstraintKinds, PatternGuards, TupleSections #-}
module Idris.ParseExpr where
import Prelude hiding (pi)
import Text.Trifecta.Delta
import Text.Trifecta hiding (span, stringLiteral, charLiteral, natural, symbol, char, string, whiteSpace, Err)
import Text.Parser.LookAhead
import Text.Parser.Expression
import qualified Text.Parser.Token as Tok
import qualified Text.Parser.Char as Chr
import qualified Text.Parser.Token.Highlight as Hi
import Idris.AbsSyntax
import Idris.ParseHelpers
import Idris.ParseOps
import Idris.DSL
import Idris.Core.TT
import Control.Applicative
import Control.Monad
import Control.Monad.State.Strict
import Data.Function (on)
import Data.Maybe
import qualified Data.List.Split as Spl
import Data.List
import Data.Monoid
import Data.Char
import qualified Data.HashSet as HS
import qualified Data.Text as T
import qualified Data.ByteString.UTF8 as UTF8
import Debug.Trace
-- | Allow implicit type declarations
allowImp :: SyntaxInfo -> SyntaxInfo
allowImp syn = syn { implicitAllowed = True }
-- | Disallow implicit type declarations
disallowImp :: SyntaxInfo -> SyntaxInfo
disallowImp syn = syn { implicitAllowed = False }
{-| Parses an expression as a whole
@
FullExpr ::= Expr EOF_t;
@
-}
fullExpr :: SyntaxInfo -> IdrisParser PTerm
fullExpr syn = do x <- expr syn
eof
i <- get
return $ debindApp syn (desugar syn i x)
tryFullExpr :: SyntaxInfo -> IState -> String -> Either Err PTerm
tryFullExpr syn st input =
case runparser (fullExpr syn) st "" input of
Success tm -> Right tm
Failure e -> Left (Msg (show e))
{- | Parses an expression
@
Expr ::= Pi
@
-}
expr :: SyntaxInfo -> IdrisParser PTerm
expr = pi
{- | Parses an expression with possible operator applied
@
OpExpr ::= {- Expression Parser with Operators based on Expr' -};
@
-}
opExpr :: SyntaxInfo -> IdrisParser PTerm
opExpr syn = do i <- get
buildExpressionParser (table (idris_infixes i)) (expr' syn)
{- | Parses either an internally defined expression or
a user-defined one
@
Expr' ::= "External (User-defined) Syntax"
| InternalExpr;
@
-}
expr' :: SyntaxInfo -> IdrisParser PTerm
expr' syn = try (externalExpr syn)
<|> internalExpr syn
<?> "expression"
{- | Parses a user-defined expression -}
externalExpr :: SyntaxInfo -> IdrisParser PTerm
externalExpr syn = do i <- get
(FC fn start _) <- getFC
expr <- extensions syn (syntaxRulesList $ syntax_rules i)
(FC _ _ end) <- getFC
let outerFC = FC fn start end
return (mapPTermFC (fixFC outerFC) (fixFCH fn outerFC) expr)
<?> "user-defined expression"
where -- Fix non-highlighting FCs by approximating with the span of the syntax application
fixFC outer inner | inner `fcIn` outer = inner
| otherwise = outer
-- Fix highlighting FCs by making them useless, to avoid spurious highlights
fixFCH fn outer inner | inner `fcIn` outer = inner
| otherwise = FileFC fn
{- | Parses a simple user-defined expression -}
simpleExternalExpr :: SyntaxInfo -> IdrisParser PTerm
simpleExternalExpr syn = do i <- get
extensions syn (filter isSimple (syntaxRulesList $ syntax_rules i))
where
isSimple (Rule (Expr x:xs) _ _) = False
isSimple (Rule (SimpleExpr x:xs) _ _) = False
isSimple (Rule [Keyword _] _ _) = True
isSimple (Rule [Symbol _] _ _) = True
isSimple (Rule (_:xs) _ _) = case last xs of
Keyword _ -> True
Symbol _ -> True
_ -> False
isSimple _ = False
{- | Tries to parse a user-defined expression given a list of syntactic extensions -}
extensions :: SyntaxInfo -> [Syntax] -> IdrisParser PTerm
extensions syn rules = extension syn [] (filter isValid rules)
<?> "user-defined expression"
where
isValid :: Syntax -> Bool
isValid (Rule _ _ AnySyntax) = True
isValid (Rule _ _ PatternSyntax) = inPattern syn
isValid (Rule _ _ TermSyntax) = not (inPattern syn)
isValid (DeclRule _ _) = False
data SynMatch = SynTm PTerm | SynBind FC Name -- ^ the FC is for highlighting information
deriving Show
extension :: SyntaxInfo -> [Maybe (Name, SynMatch)] -> [Syntax] -> IdrisParser PTerm
extension syn ns rules =
choice $ flip map (groupBy (ruleGroup `on` syntaxSymbols) rules) $ \rs ->
case head rs of -- can never be []
Rule (symb:_) _ _ -> try $ do
n <- extensionSymbol symb
extension syn (n : ns) [Rule ss t ctx | (Rule (_:ss) t ctx) <- rs]
-- If we have more than one Rule in this bucket, our grammar is
-- nondeterministic.
Rule [] ptm _ -> return (flatten (updateSynMatch (mapMaybe id ns) ptm))
where
ruleGroup [] [] = True
ruleGroup (s1:_) (s2:_) = s1 == s2
ruleGroup _ _ = False
extensionSymbol :: SSymbol -> IdrisParser (Maybe (Name, SynMatch))
extensionSymbol (Keyword n) = do fc <- reservedFC (show n)
highlightP fc AnnKeyword
return Nothing
extensionSymbol (Expr n) = do tm <- expr syn
return $ Just (n, SynTm tm)
extensionSymbol (SimpleExpr n) = do tm <- simpleExpr syn
return $ Just (n, SynTm tm)
extensionSymbol (Binding n) = do (b, fc) <- name
return $ Just (n, SynBind fc b)
extensionSymbol (Symbol s) = do fc <- symbolFC s
highlightP fc AnnKeyword
return Nothing
flatten :: PTerm -> PTerm -- flatten application
flatten (PApp fc (PApp _ f as) bs) = flatten (PApp fc f (as ++ bs))
flatten t = t
updateSynMatch = update
where
updateB :: [(Name, SynMatch)] -> (Name, FC) -> (Name, FC)
updateB ns (n, fc) = case lookup n ns of
Just (SynBind tfc t) -> (t, tfc)
_ -> (n, fc)
update :: [(Name, SynMatch)] -> PTerm -> PTerm
update ns (PRef fc hls n) = case lookup n ns of
Just (SynTm t) -> t
_ -> PRef fc hls n
update ns (PPatvar fc n) = uncurry (flip PPatvar) $ updateB ns (n, fc)
update ns (PLam fc n nfc ty sc)
= let (n', nfc') = updateB ns (n, nfc)
in PLam fc n' nfc' (update ns ty) (update (dropn n ns) sc)
update ns (PPi p n fc ty sc)
= let (n', nfc') = updateB ns (n, fc)
in PPi (updTacImp ns p) n' nfc'
(update ns ty) (update (dropn n ns) sc)
update ns (PLet fc n nfc ty val sc)
= let (n', nfc') = updateB ns (n, nfc)
in PLet fc n' nfc' (update ns ty)
(update ns val) (update (dropn n ns) sc)
update ns (PApp fc t args)
= PApp fc (update ns t) (map (fmap (update ns)) args)
update ns (PAppBind fc t args)
= PAppBind fc (update ns t) (map (fmap (update ns)) args)
update ns (PMatchApp fc n) = let (n', nfc') = updateB ns (n, fc)
in PMatchApp nfc' n'
update ns (PIfThenElse fc c t f)
= PIfThenElse fc (update ns c) (update ns t) (update ns f)
update ns (PCase fc c opts)
= PCase fc (update ns c) (map (pmap (update ns)) opts)
update ns (PRewrite fc eq tm mty)
= PRewrite fc (update ns eq) (update ns tm) (fmap (update ns) mty)
update ns (PPair fc hls p l r) = PPair fc hls p (update ns l) (update ns r)
update ns (PDPair fc hls p l t r)
= PDPair fc hls p (update ns l) (update ns t) (update ns r)
update ns (PAs fc n t) = PAs fc (fst $ updateB ns (n, NoFC)) (update ns t)
update ns (PAlternative ms a as) = PAlternative ms a (map (update ns) as)
update ns (PHidden t) = PHidden (update ns t)
update ns (PGoal fc r n sc) = PGoal fc (update ns r) n (update ns sc)
update ns (PDoBlock ds) = PDoBlock $ map (upd ns) ds
where upd :: [(Name, SynMatch)] -> PDo -> PDo
upd ns (DoExp fc t) = DoExp fc (update ns t)
upd ns (DoBind fc n nfc t) = DoBind fc n nfc (update ns t)
upd ns (DoLet fc n nfc ty t) = DoLet fc n nfc (update ns ty) (update ns t)
upd ns (DoBindP fc i t ts)
= DoBindP fc (update ns i) (update ns t)
(map (\(l,r) -> (update ns l, update ns r)) ts)
upd ns (DoLetP fc i t) = DoLetP fc (update ns i) (update ns t)
update ns (PIdiom fc t) = PIdiom fc $ update ns t
update ns (PMetavar fc n) = uncurry (flip PMetavar) $ updateB ns (n, fc)
update ns (PProof tacs) = PProof $ map (updTactic ns) tacs
update ns (PTactics tacs) = PTactics $ map (updTactic ns) tacs
update ns (PDisamb nsps t) = PDisamb nsps $ update ns t
update ns (PUnifyLog t) = PUnifyLog $ update ns t
update ns (PNoImplicits t) = PNoImplicits $ update ns t
update ns (PQuasiquote tm mty) = PQuasiquote (update ns tm) (fmap (update ns) mty)
update ns (PUnquote t) = PUnquote $ update ns t
update ns (PQuoteName n res fc) = let (n', fc') = (updateB ns (n, fc))
in PQuoteName n' res fc'
update ns (PRunElab fc t nsp) = PRunElab fc (update ns t) nsp
update ns (PConstSugar fc t) = PConstSugar fc $ update ns t
-- PConstSugar probably can't contain anything substitutable, but it's hard to track
update ns t = t
updTactic :: [(Name, SynMatch)] -> PTactic -> PTactic
-- handle all the ones with Names explicitly, then use fmap for the rest with PTerms
updTactic ns (Intro ns') = Intro $ map (fst . updateB ns . (, NoFC)) ns'
updTactic ns (Focus n) = Focus . fst $ updateB ns (n, NoFC)
updTactic ns (Refine n bs) = Refine (fst $ updateB ns (n, NoFC)) bs
updTactic ns (Claim n t) = Claim (fst $ updateB ns (n, NoFC)) (update ns t)
updTactic ns (MatchRefine n) = MatchRefine (fst $ updateB ns (n, NoFC))
updTactic ns (LetTac n t) = LetTac (fst $ updateB ns (n, NoFC)) (update ns t)
updTactic ns (LetTacTy n ty tm) = LetTacTy (fst $ updateB ns (n, NoFC)) (update ns ty) (update ns tm)
updTactic ns (ProofSearch rec prover depth top psns hints) = ProofSearch rec prover depth
(fmap (fst . updateB ns . (, NoFC)) top) (map (fst . updateB ns . (, NoFC)) psns) (map (fst . updateB ns . (, NoFC)) hints)
updTactic ns (Try l r) = Try (updTactic ns l) (updTactic ns r)
updTactic ns (TSeq l r) = TSeq (updTactic ns l) (updTactic ns r)
updTactic ns (GoalType s tac) = GoalType s $ updTactic ns tac
updTactic ns (TDocStr (Left n)) = TDocStr . Left . fst $ updateB ns (n, NoFC)
updTactic ns t = fmap (update ns) t
updTacImp ns (TacImp o st scr) = TacImp o st (update ns scr)
updTacImp _ x = x
dropn :: Name -> [(Name, a)] -> [(Name, a)]
dropn n [] = []
dropn n ((x,t) : xs) | n == x = xs
| otherwise = (x,t):dropn n xs
{- | Parses a (normal) built-in expression
@
InternalExpr ::=
UnifyLog
| RecordType
| SimpleExpr
| Lambda
| QuoteGoal
| Let
| If
| RewriteTerm
| CaseExpr
| DoBlock
| App
;
@
-}
internalExpr :: SyntaxInfo -> IdrisParser PTerm
internalExpr syn =
unifyLog syn
<|> runElab syn
<|> disamb syn
<|> noImplicits syn
<|> recordType syn
<|> if_ syn
<|> lambda syn
<|> quoteGoal syn
<|> let_ syn
<|> rewriteTerm syn
<|> doBlock syn
<|> caseExpr syn
<|> app syn
<?> "expression"
{- | Parses the "impossible" keyword
@
Impossible ::= 'impossible'
@
-}
impossible :: IdrisParser PTerm
impossible = do fc <- reservedFC "impossible"
highlightP fc AnnKeyword
return PImpossible
{- | Parses a case expression
@
CaseExpr ::=
'case' Expr 'of' OpenBlock CaseOption+ CloseBlock;
@
-}
caseExpr :: SyntaxInfo -> IdrisParser PTerm
caseExpr syn = do kw1 <- reservedFC "case"; fc <- getFC
scr <- expr syn; kw2 <- reservedFC "of";
opts <- indentedBlock1 (caseOption syn)
highlightP kw1 AnnKeyword
highlightP kw2 AnnKeyword
return (PCase fc scr opts)
<?> "case expression"
{- | Parses a case in a case expression
@
CaseOption ::=
Expr (Impossible | '=>' Expr) Terminator
;
@
-}
caseOption :: SyntaxInfo -> IdrisParser (PTerm, PTerm)
caseOption syn = do lhs <- expr (syn { inPattern = True })
r <- impossible <|> symbol "=>" *> expr syn
return (lhs, r)
<?> "case option"
warnTacticDeprecation :: FC -> IdrisParser ()
warnTacticDeprecation fc =
do ist <- get
let cmdline = opt_cmdline (idris_options ist)
unless (NoOldTacticDeprecationWarnings `elem` cmdline) $
put ist { parserWarnings =
(fc, Msg "This style of tactic proof is deprecated. See %runElab for the replacement.") : parserWarnings ist }
{- | Parses a proof block
@
ProofExpr ::=
'proof' OpenBlock Tactic'* CloseBlock
;
@
-}
proofExpr :: SyntaxInfo -> IdrisParser PTerm
proofExpr syn = do kw <- reservedFC "proof"
ts <- indentedBlock1 (tactic syn)
highlightP kw AnnKeyword
warnTacticDeprecation kw
return $ PProof ts
<?> "proof block"
{- | Parses a tactics block
@
TacticsExpr :=
'tactics' OpenBlock Tactic'* CloseBlock
;
@
-}
tacticsExpr :: SyntaxInfo -> IdrisParser PTerm
tacticsExpr syn = do kw <- reservedFC "tactics"
ts <- indentedBlock1 (tactic syn)
highlightP kw AnnKeyword
warnTacticDeprecation kw
return $ PTactics ts
<?> "tactics block"
{- | Parses a simple expression
@
SimpleExpr ::=
{- External (User-defined) Simple Expression -}
| '?' Name
| % 'instance'
| 'Refl' ('{' Expr '}')?
| ProofExpr
| TacticsExpr
| FnName
| Idiom
| List
| Alt
| Bracketed
| Constant
| Type
| 'Void'
| Quasiquote
| NameQuote
| Unquote
| '_'
;
@
-}
simpleExpr :: SyntaxInfo -> IdrisParser PTerm
simpleExpr syn =
try (simpleExternalExpr syn)
<|> do (x, FC f (l, c) end) <- try (lchar '?' *> name)
return (PMetavar (FC f (l, c-1) end) x)
<|> do lchar '%'; fc <- getFC; reserved "instance"; return (PResolveTC fc)
<|> do reserved "elim_for"; fc <- getFC; t <- fst <$> fnName; return (PRef fc [] (SN $ ElimN t))
<|> proofExpr syn
<|> tacticsExpr syn
<|> try (do reserved "Type"; symbol "*"; return $ PUniverse AllTypes)
<|> do reserved "AnyType"; return $ PUniverse AllTypes
<|> PType <$> reservedFC "Type"
<|> do reserved "UniqueType"; return $ PUniverse UniqueType
<|> do reserved "NullType"; return $ PUniverse NullType
<|> do (c, cfc) <- constant
fc <- getFC
return (modifyConst syn fc (PConstant cfc c))
<|> do symbol "'"; fc <- getFC; str <- fst <$> name
return (PApp fc (PRef fc [] (sUN "Symbol_"))
[pexp (PConstant NoFC (Str (show str)))])
<|> do (x, fc) <- fnName
if inPattern syn
then option (PRef fc [fc] x)
(do reservedOp "@"
s <- simpleExpr syn
fcIn <- getFC
return (PAs fcIn x s))
else return (PRef fc [fc] x)
<|> idiom syn
<|> listExpr syn
<|> alt syn
<|> do reservedOp "!"
s <- simpleExpr syn
fc <- getFC
return (PAppBind fc s [])
<|> bracketed (disallowImp syn)
<|> quasiquote syn
<|> namequote syn
<|> unquote syn
<|> do lchar '_'; return Placeholder
<?> "expression"
{- |Parses an expression in braces
@
Bracketed ::= '(' Bracketed'
@
-}
bracketed :: SyntaxInfo -> IdrisParser PTerm
bracketed syn = do (FC fn (sl, sc) _) <- getFC
lchar '(' <?> "parenthesized expression"
bracketed' (FC fn (sl, sc) (sl, sc+1)) syn
{- |Parses the rest of an expression in braces
@
Bracketed' ::=
')'
| Expr ')'
| ExprList ')'
| Expr '**' Expr ')'
| Operator Expr ')'
| Expr Operator ')'
| Name ':' Expr '**' Expr ')'
;
@
-}
bracketed' :: FC -> SyntaxInfo -> IdrisParser PTerm
bracketed' open syn =
do (FC f start (l, c)) <- getFC
lchar ')'
return $ PTrue (spanFC open (FC f start (l, c+1))) TypeOrTerm
<|> try (do (ln, lnfc) <- name
colonFC <- lcharFC ':'
lty <- expr syn
starsFC <- reservedOpFC "**"
fc <- getFC
r <- expr syn
close <- lcharFC ')'
return (PDPair fc [open, colonFC, starsFC, close] TypeOrTerm (PRef lnfc [] ln) lty r))
<|> try (do fc <- getFC; o <- operator; e <- expr syn; lchar ')'
-- No prefix operators! (bit of a hack here...)
if (o == "-" || o == "!")
then fail "minus not allowed in section"
else return $ PLam fc (sMN 1000 "ARG") NoFC Placeholder
(PApp fc (PRef fc [] (sUN o)) [pexp (PRef fc [] (sMN 1000 "ARG")),
pexp e]))
<|> try (do l <- simpleExpr syn
op <- option Nothing (do o <- operator
lchar ')'
return (Just o))
fc0 <- getFC
case op of
Nothing -> bracketedExpr syn open l
Just o -> return $ PLam fc0 (sMN 1000 "ARG") NoFC Placeholder
(PApp fc0 (PRef fc0 [] (sUN o)) [pexp l,
pexp (PRef fc0 [] (sMN 1000 "ARG"))]))
<|> do l <- expr syn
bracketedExpr syn open l
-- | Parse the contents of parentheses, after an expression has been parsed.
bracketedExpr :: SyntaxInfo -> FC -> PTerm -> IdrisParser PTerm
bracketedExpr syn openParenFC e =
do lchar ')'; return e
<|> do exprs <- many (do comma <- lcharFC ','
r <- expr syn
return (r, comma))
closeParenFC <- lcharFC ')'
let hilite = [openParenFC, closeParenFC] ++ map snd exprs
return $ PPair openParenFC hilite TypeOrTerm e (mergePairs exprs)
<|> do starsFC <- reservedOpFC "**"
r <- expr syn
closeParenFC <- lcharFC ')'
return (PDPair starsFC [openParenFC, starsFC, closeParenFC] TypeOrTerm e Placeholder r)
<?> "end of bracketed expression"
where mergePairs :: [(PTerm, FC)] -> PTerm
mergePairs [(t, fc)] = t
mergePairs ((t, fc):rs) = PPair fc [] TypeOrTerm t (mergePairs rs)
-- bit of a hack here. If the integer doesn't fit in an Int, treat it as a
-- big integer, otherwise try fromInteger and the constants as alternatives.
-- a better solution would be to fix fromInteger to work with Integer, as the
-- name suggests, rather than Int
{-| Finds optimal type for integer constant -}
modifyConst :: SyntaxInfo -> FC -> PTerm -> PTerm
modifyConst syn fc (PConstant inFC (BI x))
| not (inPattern syn)
= PConstSugar inFC $ -- wrap in original location for highlighting
PAlternative [] FirstSuccess
(PApp fc (PRef fc [] (sUN "fromInteger")) [pexp (PConstant NoFC (BI (fromInteger x)))]
: consts)
| otherwise = PConstSugar inFC $
PAlternative [] FirstSuccess consts
where
consts = [ PConstant inFC (BI x)
, PConstant inFC (I (fromInteger x))
, PConstant inFC (B8 (fromInteger x))
, PConstant inFC (B16 (fromInteger x))
, PConstant inFC (B32 (fromInteger x))
, PConstant inFC (B64 (fromInteger x))
]
modifyConst syn fc x = x
{- | Parses an alternative expression
@
Alt ::= '(|' Expr_List '|)';
Expr_List ::=
Expr'
| Expr' ',' Expr_List
;
@
-}
alt :: SyntaxInfo -> IdrisParser PTerm
alt syn = do symbol "(|"; alts <- sepBy1 (expr' syn) (lchar ','); symbol "|)"
return (PAlternative [] FirstSuccess alts)
{- | Parses a possibly hidden simple expression
@
HSimpleExpr ::=
'.' SimpleExpr
| SimpleExpr
;
@
-}
hsimpleExpr :: SyntaxInfo -> IdrisParser PTerm
hsimpleExpr syn =
do lchar '.'
e <- simpleExpr syn
return $ PHidden e
<|> simpleExpr syn
<?> "expression"
{- | Parses a unification log expression
UnifyLog ::=
'%' 'unifyLog' SimpleExpr
;
-}
unifyLog :: SyntaxInfo -> IdrisParser PTerm
unifyLog syn = do (FC fn (sl, sc) kwEnd) <- try (lchar '%' *> reservedFC "unifyLog")
tm <- simpleExpr syn
highlightP (FC fn (sl, sc-1) kwEnd) AnnKeyword
return (PUnifyLog tm)
<?> "unification log expression"
{- | Parses a new-style tactics expression
RunTactics ::=
'%' 'runElab' SimpleExpr
;
-}
runElab :: SyntaxInfo -> IdrisParser PTerm
runElab syn = do (FC fn (sl, sc) kwEnd) <- try (lchar '%' *> reservedFC "runElab")
fc <- getFC
tm <- simpleExpr syn
highlightP (FC fn (sl, sc-1) kwEnd) AnnKeyword
return $ PRunElab fc tm (syn_namespace syn)
<?> "new-style tactics expression"
{- | Parses a disambiguation expression
Disamb ::=
'%' 'disamb' NameList Expr
;
-}
disamb :: SyntaxInfo -> IdrisParser PTerm
disamb syn = do kw <- reservedFC "with"
ns <- sepBy1 (fst <$> name) (lchar ',')
tm <- expr' syn
highlightP kw AnnKeyword
return (PDisamb (map tons ns) tm)
<?> "namespace disambiguation expression"
where tons (NS n s) = txt (show n) : s
tons n = [txt (show n)]
{- | Parses a no implicits expression
@
NoImplicits ::=
'%' 'noImplicits' SimpleExpr
;
@
-}
noImplicits :: SyntaxInfo -> IdrisParser PTerm
noImplicits syn = do try (lchar '%' *> reserved "noImplicits")
tm <- simpleExpr syn
return (PNoImplicits tm)
<?> "no implicits expression"
{- | Parses a function application expression
@
App ::=
'mkForeign' Arg Arg*
| MatchApp
| SimpleExpr Arg*
;
MatchApp ::=
SimpleExpr '<==' FnName
;
@
-}
app :: SyntaxInfo -> IdrisParser PTerm
app syn = do f <- simpleExpr syn
(do try $ reservedOp "<=="
fc <- getFC
ff <- fst <$> fnName
return (PLet fc (sMN 0 "match") NoFC
f
(PMatchApp fc ff)
(PRef fc [] (sMN 0 "match")))
<?> "matching application expression") <|> (do
fc <- getFC
i <- get
args <- many (do notEndApp; arg syn)
case args of
[] -> return f
_ -> return (flattenFromInt fc f args))
<?> "function application"
where
-- bit of a hack to deal with the situation where we're applying a
-- literal to an argument, which we may want for obscure applications
-- of fromInteger, and this will help disambiguate better.
-- We know, at least, it won't be one of the constants!
flattenFromInt fc (PAlternative _ x alts) args
| Just i <- getFromInt alts
= PApp fc (PRef fc [] (sUN "fromInteger")) (i : args)
flattenFromInt fc f args = PApp fc f args
getFromInt ((PApp _ (PRef _ _ n) [a]) : _) | n == sUN "fromInteger" = Just a
getFromInt (_ : xs) = getFromInt xs
getFromInt _ = Nothing
{-| Parses a function argument
@
Arg ::=
ImplicitArg
| ConstraintArg
| SimpleExpr
;
@
-}
arg :: SyntaxInfo -> IdrisParser PArg
arg syn = implicitArg syn
<|> constraintArg syn
<|> do e <- simpleExpr syn
return (pexp e)
<?> "function argument"
{-| Parses an implicit function argument
@
ImplicitArg ::=
'{' Name ('=' Expr)? '}'
;
@
-}
implicitArg :: SyntaxInfo -> IdrisParser PArg
implicitArg syn = do lchar '{'
(n, nfc) <- name
fc <- getFC
v <- option (PRef nfc [nfc] n) (do lchar '='
expr syn)
lchar '}'
return (pimp n v True)
<?> "implicit function argument"
{-| Parses a constraint argument (for selecting a named type class instance)
> ConstraintArg ::=
> '@{' Expr '}'
> ;
-}
constraintArg :: SyntaxInfo -> IdrisParser PArg
constraintArg syn = do symbol "@{"
e <- expr syn
symbol "}"
return (pconst e)
<?> "constraint argument"
{-| Parses a quasiquote expression (for building reflected terms using the elaborator)
> Quasiquote ::= '`(' Expr ')'
-}
quasiquote :: SyntaxInfo -> IdrisParser PTerm
quasiquote syn = do startFC <- symbolFC "`("
e <- expr syn { syn_in_quasiquote = (syn_in_quasiquote syn) + 1 ,
inPattern = False }
g <- optional $
do fc <- symbolFC ":"
ty <- expr syn { inPattern = False } -- don't allow antiquotes
return (ty, fc)
endFC <- symbolFC ")"
mapM_ (uncurry highlightP) [(startFC, AnnKeyword), (endFC, AnnKeyword), (spanFC startFC endFC, AnnQuasiquote)]
case g of
Just (_, fc) -> highlightP fc AnnKeyword
_ -> return ()
return $ PQuasiquote e (fst <$> g)
<?> "quasiquotation"
{-| Parses an unquoting inside a quasiquotation (for building reflected terms using the elaborator)
> Unquote ::= ',' Expr
-}
unquote :: SyntaxInfo -> IdrisParser PTerm
unquote syn = do guard (syn_in_quasiquote syn > 0)
startFC <- symbolFC "~"
e <- simpleExpr syn { syn_in_quasiquote = syn_in_quasiquote syn - 1 }
endFC <- getFC
highlightP startFC AnnKeyword
highlightP (spanFC startFC endFC) AnnAntiquote
return $ PUnquote e
<?> "unquotation"
{-| Parses a quotation of a name (for using the elaborator to resolve boring details)
> NameQuote ::= '`{' Name '}'
-}
namequote :: SyntaxInfo -> IdrisParser PTerm
namequote syn = do (startFC, res) <-
try (do fc <- symbolFC "`{{"
return (fc, False)) <|>
(do fc <- symbolFC "`{"
return (fc, True))
(n, nfc) <- fnName
endFC <- if res then symbolFC "}" else symbolFC "}}"
mapM_ (uncurry highlightP)
[ (startFC, AnnKeyword)
, (endFC, AnnKeyword)
, (spanFC startFC endFC, AnnQuasiquote)
]
return $ PQuoteName n res nfc
<?> "quoted name"
{-| Parses a record field setter expression
@
RecordType ::=
'record' '{' FieldTypeList '}';
@
@
FieldTypeList ::=
FieldType
| FieldType ',' FieldTypeList
;
@
@
FieldType ::=
FnName '=' Expr
;
@
-}
recordType :: SyntaxInfo -> IdrisParser PTerm
recordType syn =
do kw <- reservedFC "record"
lchar '{'
fgs <- fieldGetOrSet
lchar '}'
fc <- getFC
rec <- optional (simpleExpr syn)
highlightP kw AnnKeyword
case fgs of
Left fields ->
case rec of
Nothing ->
return (PLam fc (sMN 0 "fldx") NoFC Placeholder
(applyAll fc fields (PRef fc [] (sMN 0 "fldx"))))
Just v -> return (applyAll fc fields v)
Right fields ->
case rec of
Nothing ->
return (PLam fc (sMN 0 "fldx") NoFC Placeholder
(getAll fc (reverse fields)
(PRef fc [] (sMN 0 "fldx"))))
Just v -> return (getAll fc (reverse fields) v)
<?> "record setting expression"
where fieldSet :: IdrisParser ([Name], PTerm)
fieldSet = do ns <- fieldGet
lchar '='
e <- expr syn
return (ns, e)
<?> "field setter"
fieldGet :: IdrisParser [Name]
fieldGet = sepBy1 (fst <$> fnName) (symbol "->")
fieldGetOrSet :: IdrisParser (Either [([Name], PTerm)] [Name])
fieldGetOrSet = try (do fs <- sepBy1 fieldSet (lchar ',')
return (Left fs))
<|> do f <- fieldGet
return (Right f)
applyAll :: FC -> [([Name], PTerm)] -> PTerm -> PTerm
applyAll fc [] x = x
applyAll fc ((ns, e) : es) x
= applyAll fc es (doUpdate fc ns e x)
doUpdate fc [n] e get
= PApp fc (PRef fc [] (mkType n)) [pexp e, pexp get]
doUpdate fc (n : ns) e get
= PApp fc (PRef fc [] (mkType n))
[pexp (doUpdate fc ns e (PApp fc (PRef fc [] n) [pexp get])),
pexp get]
getAll :: FC -> [Name] -> PTerm -> PTerm
getAll fc [n] e = PApp fc (PRef fc [] n) [pexp e]
getAll fc (n:ns) e = PApp fc (PRef fc [] n) [pexp (getAll fc ns e)]
-- | Creates setters for record types on necessary functions
mkType :: Name -> Name
mkType (UN n) = sUN ("set_" ++ str n)
mkType (MN 0 n) = sMN 0 ("set_" ++ str n)
mkType (NS n s) = NS (mkType n) s
{- | Parses a type signature
@
TypeSig ::=
':' Expr
;
@
@
TypeExpr ::= ConstraintList? Expr;
@
-}
typeExpr :: SyntaxInfo -> IdrisParser PTerm
typeExpr syn = do cs <- if implicitAllowed syn then constraintList syn else return []
sc <- expr syn
return (bindList (PPi constraint) cs sc)
<?> "type signature"
{- | Parses a lambda expression
@
Lambda ::=
'\\' TypeOptDeclList LambdaTail
| '\\' SimpleExprList LambdaTail
;
@
@
SimpleExprList ::=
SimpleExpr
| SimpleExpr ',' SimpleExprList
;
@
@
LambdaTail ::=
Impossible
| '=>' Expr
@
-}
lambda :: SyntaxInfo -> IdrisParser PTerm
lambda syn = do lchar '\\' <?> "lambda expression"
((do xt <- try $ tyOptDeclList syn
fc <- getFC
sc <- lambdaTail
return (bindList (PLam fc) xt sc))
<|>
(do ps <- sepBy (do fc <- getFC
e <- simpleExpr (syn { inPattern = True })
return (fc, e))
(lchar ',')
sc <- lambdaTail
return (pmList (zip [0..] ps) sc)))
<?> "lambda expression"
where pmList :: [(Int, (FC, PTerm))] -> PTerm -> PTerm
pmList [] sc = sc
pmList ((i, (fc, x)) : xs) sc
= PLam fc (sMN i "lamp") NoFC Placeholder
(PCase fc (PRef fc [] (sMN i "lamp"))
[(x, pmList xs sc)])
lambdaTail :: IdrisParser PTerm
lambdaTail = impossible <|> symbol "=>" *> expr syn
{- | Parses a term rewrite expression
@
RewriteTerm ::=
'rewrite' Expr ('==>' Expr)? 'in' Expr
;
@
-}
rewriteTerm :: SyntaxInfo -> IdrisParser PTerm
rewriteTerm syn = do kw <- reservedFC "rewrite"
fc <- getFC
prf <- expr syn
giving <- optional (do symbol "==>"; expr' syn)
kw' <- reservedFC "in"; sc <- expr syn
highlightP kw AnnKeyword
highlightP kw' AnnKeyword
return (PRewrite fc
(PApp fc (PRef fc [] (sUN "sym")) [pexp prf]) sc
giving)
<?> "term rewrite expression"
{- |Parses a let binding
@
Let ::=
'let' Name TypeSig'? '=' Expr 'in' Expr
| 'let' Expr' '=' Expr' 'in' Expr
TypeSig' ::=
':' Expr'
;
@
-}
let_ :: SyntaxInfo -> IdrisParser PTerm
let_ syn = try (do kw <- reservedFC "let"
ls <- indentedBlock (let_binding syn)
kw' <- reservedFC "in"; sc <- expr syn
highlightP kw AnnKeyword; highlightP kw' AnnKeyword
return (buildLets ls sc))
<?> "let binding"
where buildLets [] sc = sc
buildLets ((fc, PRef nfc _ n, ty, v, []) : ls) sc
= PLet fc n nfc ty v (buildLets ls sc)
buildLets ((fc, pat, ty, v, alts) : ls) sc
= PCase fc v ((pat, buildLets ls sc) : alts)
let_binding syn = do fc <- getFC;
pat <- expr' (syn { inPattern = True })
ty <- option Placeholder (do lchar ':'; expr' syn)
lchar '='
v <- expr syn
ts <- option [] (do lchar '|'
sepBy1 (do_alt syn) (lchar '|'))
return (fc,pat,ty,v,ts)
{- | Parses a conditional expression
@
If ::= 'if' Expr 'then' Expr 'else' Expr
@
-}
if_ :: SyntaxInfo -> IdrisParser PTerm
if_ syn = (do ifFC <- reservedFC "if"
fc <- getFC
c <- expr syn
thenFC <- reservedFC "then"
t <- expr syn
elseFC <- reservedFC "else"
f <- expr syn
mapM_ (flip highlightP AnnKeyword) [ifFC, thenFC, elseFC]
return (PIfThenElse fc c t f))
<?> "conditional expression"
{- | Parses a quote goal
@
QuoteGoal ::=
'quoteGoal' Name 'by' Expr 'in' Expr
;
@
-}
quoteGoal :: SyntaxInfo -> IdrisParser PTerm
quoteGoal syn = do kw1 <- reservedFC "quoteGoal"; n <- fst <$> name;
kw2 <- reservedFC "by"
r <- expr syn
kw3 <- reservedFC "in"
fc <- getFC
sc <- expr syn
mapM_ (flip highlightP AnnKeyword) [kw1, kw2, kw3]
return (PGoal fc r n sc)
<?> "quote goal expression"
{- | Parses a dependent type signature
@
Pi ::= PiOpts Static? Pi'
@
@
Pi' ::=
OpExpr ('->' Pi)?
| '(' TypeDeclList ')' '->' Pi
| '{' TypeDeclList '}' '->' Pi
| '{' 'auto' TypeDeclList '}' '->' Pi
| '{' 'default' SimpleExpr TypeDeclList '}' '->' Pi
;
@
-}
bindsymbol opts st syn
= do symbol "->"
return (Exp opts st False)
explicitPi opts st syn
= do xt <- try (lchar '(' *> typeDeclList syn <* lchar ')')
binder <- bindsymbol opts st syn
sc <- expr syn
return (bindList (PPi binder) xt sc)
autoImplicit opts st syn
= do kw <- reservedFC "auto"
when (st == Static) $ fail "auto implicits can not be static"
xt <- typeDeclList syn
lchar '}'
symbol "->"
sc <- expr syn
highlightP kw AnnKeyword
return (bindList (PPi
(TacImp [] Dynamic (PTactics [ProofSearch True True 100 Nothing [] []]))) xt sc)
defaultImplicit opts st syn = do
kw <- reservedFC "default"
when (st == Static) $ fail "default implicits can not be static"
ist <- get
script' <- simpleExpr syn
let script = debindApp syn . desugar syn ist $ script'
xt <- typeDeclList syn
lchar '}'
symbol "->"
sc <- expr syn
highlightP kw AnnKeyword
return (bindList (PPi (TacImp [] Dynamic script)) xt sc)
normalImplicit opts st syn = do
xt <- typeDeclList syn <* lchar '}'
symbol "->"
cs <- constraintList syn
sc <- expr syn
let (im,cl)
= if implicitAllowed syn
then (Imp opts st False Nothing,
constraint)
else (Imp opts st False (Just (Impl False)),
Imp opts st False (Just (Impl True)))
return (bindList (PPi im) xt
(bindList (PPi cl) cs sc))
implicitPi opts st syn =
autoImplicit opts st syn
<|> defaultImplicit opts st syn
<|> normalImplicit opts st syn
unboundPi opts st syn = do
x <- opExpr syn
(do binder <- bindsymbol opts st syn
sc <- expr syn
return (PPi binder (sUN "__pi_arg") NoFC x sc))
<|> return x
pi :: SyntaxInfo -> IdrisParser PTerm
pi syn =
do opts <- piOpts syn
st <- static
explicitPi opts st syn
<|> try (do lchar '{'; implicitPi opts st syn)
<|> unboundPi opts st syn
<?> "dependent type signature"
{- | Parses Possible Options for Pi Expressions
@
PiOpts ::= '.'?
@
-}
piOpts :: SyntaxInfo -> IdrisParser [ArgOpt]
piOpts syn | implicitAllowed syn =
lchar '.' *> return [InaccessibleArg]
<|> return []
piOpts syn = return []
{- | Parses a type constraint list
@
ConstraintList ::=
'(' Expr_List ')' '=>'
| Expr '=>'
;
@
-}
constraintList :: SyntaxInfo -> IdrisParser [(Name, FC, PTerm)]
constraintList syn = try (constraintList1 syn)
<|> return []
constraintList1 :: SyntaxInfo -> IdrisParser [(Name, FC, PTerm)]
constraintList1 syn = try (do lchar '('
tys <- sepBy1 nexpr (lchar ',')
lchar ')'
reservedOp "=>"
return tys)
<|> try (do t <- opExpr (disallowImp syn)
reservedOp "=>"
return [(defname, NoFC, t)])
<?> "type constraint list"
where nexpr = try (do (n, fc) <- name; lchar ':'
e <- expr syn
return (n, fc, e))
<|> do e <- expr syn
return (defname, NoFC, e)
defname = sMN 0 "constrarg"
{- | Parses a type declaration list
@
TypeDeclList ::=
FunctionSignatureList
| NameList TypeSig
;
@
@
FunctionSignatureList ::=
Name TypeSig
| Name TypeSig ',' FunctionSignatureList
;
@
-}
typeDeclList :: SyntaxInfo -> IdrisParser [(Name, FC, PTerm)]
typeDeclList syn = try (sepBy1 (do (x, xfc) <- fnName
lchar ':'
t <- typeExpr (disallowImp syn)
return (x, xfc, t))
(lchar ','))
<|> do ns <- sepBy1 name (lchar ',')
lchar ':'
t <- typeExpr (disallowImp syn)
return (map (\(x, xfc) -> (x, xfc, t)) ns)
<?> "type declaration list"
{- | Parses a type declaration list with optional parameters
@
TypeOptDeclList ::=
NameOrPlaceholder TypeSig?
| NameOrPlaceholder TypeSig? ',' TypeOptDeclList
;
@
@
NameOrPlaceHolder ::= Name | '_';
@
-}
tyOptDeclList :: SyntaxInfo -> IdrisParser [(Name, FC, PTerm)]
tyOptDeclList syn = sepBy1 (do (x, fc) <- nameOrPlaceholder
t <- option Placeholder (do lchar ':'
expr syn)
return (x, fc, t))
(lchar ',')
<?> "type declaration list"
where nameOrPlaceholder :: IdrisParser (Name, FC)
nameOrPlaceholder = fnName
<|> do symbol "_"
return (sMN 0 "underscore", NoFC)
<?> "name or placeholder"
{- | Parses a list literal expression e.g. [1,2,3] or a comprehension [ (x, y) | x <- xs , y <- ys ]
@
ListExpr ::=
'[' ']'
| '[' Expr '|' DoList ']'
| '[' ExprList ']'
;
@
@
DoList ::=
Do
| Do ',' DoList
;
@
@
ExprList ::=
Expr
| Expr ',' ExprList
;
@
-}
listExpr :: SyntaxInfo -> IdrisParser PTerm
listExpr syn = do (FC f (l, c) _) <- getFC
lchar '['; fc <- getFC;
(try . token $ do (char ']' <?> "end of list expression")
(FC _ _ (l', c')) <- getFC
return (mkNil (FC f (l, c) (l', c'))))
<|> (do x <- expr syn <?> "expression"
(do try (lchar '|') <?> "list comprehension"
qs <- sepBy1 (do_ syn) (lchar ',')
lchar ']'
return (PDoBlock (map addGuard qs ++
[DoExp fc (PApp fc (PRef fc [] (sUN "return"))
[pexp x])]))) <|>
(do xs <- many (do (FC fn (sl, sc) _) <- getFC
lchar ',' <?> "list element"
let commaFC = FC fn (sl, sc) (sl, sc + 1)
elt <- expr syn
return (elt, commaFC))
(FC fn (sl, sc) _) <- getFC
lchar ']' <?> "end of list expression"
let rbrackFC = FC fn (sl, sc) (sl, sc+1)
return (mkList fc rbrackFC ((x, (FC f (l, c) (l, c+1))) : xs))))
<?> "list expression"
where
mkNil :: FC -> PTerm
mkNil fc = PRef fc [fc] (sUN "Nil")
mkList :: FC -> FC -> [(PTerm, FC)] -> PTerm
mkList errFC nilFC [] = PRef nilFC [nilFC] (sUN "Nil")
mkList errFC nilFC ((x, fc) : xs) = PApp errFC (PRef fc [fc] (sUN "::")) [pexp x, pexp (mkList errFC nilFC xs)]
addGuard :: PDo -> PDo
addGuard (DoExp fc e) = DoExp fc (PApp fc (PRef fc [] (sUN "guard"))
[pexp e])
addGuard x = x
{- | Parses a do-block
@
Do' ::= Do KeepTerminator;
@
@
DoBlock ::=
'do' OpenBlock Do'+ CloseBlock
;
@
-}
doBlock :: SyntaxInfo -> IdrisParser PTerm
doBlock syn
= do kw <- reservedFC "do"
ds <- indentedBlock1 (do_ syn)
highlightP kw AnnKeyword
return (PDoBlock ds)
<?> "do block"
{- | Parses an expression inside a do block
@
Do ::=
'let' Name TypeSig'? '=' Expr
| 'let' Expr' '=' Expr
| Name '<-' Expr
| Expr' '<-' Expr
| Expr
;
@
-}
do_ :: SyntaxInfo -> IdrisParser PDo
do_ syn
= try (do kw <- reservedFC "let"
(i, ifc) <- name
ty <- option Placeholder (do lchar ':'
expr' syn)
reservedOp "="
fc <- getFC
e <- expr syn
highlightP kw AnnKeyword
return (DoLet fc i ifc ty e))
<|> try (do kw <- reservedFC "let"
i <- expr' syn
reservedOp "="
fc <- getFC
sc <- expr syn
highlightP kw AnnKeyword
return (DoLetP fc i sc))
<|> try (do (i, ifc) <- name
symbol "<-"
fc <- getFC
e <- expr syn;
option (DoBind fc i ifc e)
(do lchar '|'
ts <- sepBy1 (do_alt syn) (lchar '|')
return (DoBindP fc (PRef ifc [ifc] i) e ts)))
<|> try (do i <- expr' syn
symbol "<-"
fc <- getFC
e <- expr syn;
option (DoBindP fc i e [])
(do lchar '|'
ts <- sepBy1 (do_alt syn) (lchar '|')
return (DoBindP fc i e ts)))
<|> do e <- expr syn
fc <- getFC
return (DoExp fc e)
<?> "do block expression"
do_alt syn = do l <- expr' syn
option (Placeholder, l)
(do symbol "=>"
r <- expr' syn
return (l, r))
{- | Parses an expression in idiom brackets
@
Idiom ::= '[|' Expr '|]';
@
-}
idiom :: SyntaxInfo -> IdrisParser PTerm
idiom syn
= do symbol "[|"
fc <- getFC
e <- expr syn
symbol "|]"
return (PIdiom fc e)
<?> "expression in idiom brackets"
{- |Parses a constant or literal expression
@
Constant ::=
'Integer'
| 'Int'
| 'Char'
| 'Double'
| 'String'
| 'Bits8'
| 'Bits16'
| 'Bits32'
| 'Bits64'
| Float_t
| Natural_t
| VerbatimString_t
| String_t
| Char_t
;
@
-}
constants :: [(String, Idris.Core.TT.Const)]
constants =
[ ("Integer", AType (ATInt ITBig))
, ("Int", AType (ATInt ITNative))
, ("Char", AType (ATInt ITChar))
, ("Double", AType ATFloat)
, ("String", StrType)
, ("prim__WorldType", WorldType)
, ("prim__TheWorld", TheWorld)
, ("Bits8", AType (ATInt (ITFixed IT8)))
, ("Bits16", AType (ATInt (ITFixed IT16)))
, ("Bits32", AType (ATInt (ITFixed IT32)))
, ("Bits64", AType (ATInt (ITFixed IT64)))
]
-- | Parse a constant and its source span
constant :: IdrisParser (Idris.Core.TT.Const, FC)
constant = choice [ do fc <- reservedFC name; return (ty, fc)
| (name, ty) <- constants
]
<|> do (f, fc) <- try float; return (Fl f, fc)
<|> do (i, fc) <- natural; return (BI i, fc)
<|> do (s, fc) <- verbatimStringLiteral; return (Str s, fc)
<|> do (s, fc) <- stringLiteral; return (Str s, fc)
<|> do (c, fc) <- try charLiteral; return (Ch c, fc) --Currently ambigous with symbols
<?> "constant or literal"
{- | Parses a verbatim multi-line string literal (triple-quoted)
@
VerbatimString_t ::=
'\"\"\"' ~'\"\"\"' '\"\"\"'
;
@
-}
verbatimStringLiteral :: MonadicParsing m => m (String, FC)
verbatimStringLiteral = token $ do (FC f start _) <- getFC
try $ string "\"\"\""
str <- manyTill anyChar $ try (string "\"\"\"")
(FC _ _ end) <- getFC
return (str, FC f start end)
{- | Parses a static modifier
@
Static ::=
'[' static ']'
;
@
-}
static :: IdrisParser Static
static = do reserved "[static]"; return Static
<|> return Dynamic
<?> "static modifier"
{- | Parses a tactic script
@
Tactic ::= 'intro' NameList?
| 'intros'
| 'refine' Name Imp+
| 'mrefine' Name
| 'rewrite' Expr
| 'induction' Expr
| 'equiv' Expr
| 'let' Name ':' Expr' '=' Expr
| 'let' Name '=' Expr
| 'focus' Name
| 'exact' Expr
| 'applyTactic' Expr
| 'reflect' Expr
| 'fill' Expr
| 'try' Tactic '|' Tactic
| '{' TacticSeq '}'
| 'compute'
| 'trivial'
| 'solve'
| 'attack'
| 'state'
| 'term'
| 'undo'
| 'qed'
| 'abandon'
| ':' 'q'
;
Imp ::= '?' | '_';
TacticSeq ::=
Tactic ';' Tactic
| Tactic ';' TacticSeq
;
@
-}
-- | A specification of the arguments that tactics can take
data TacticArg = NameTArg -- ^ Names: n1, n2, n3, ... n
| ExprTArg
| AltsTArg
| StringLitTArg
-- The FIXMEs are Issue #1766 in the issue tracker.
-- https://github.com/idris-lang/Idris-dev/issues/1766
-- | A list of available tactics and their argument requirements
tactics :: [([String], Maybe TacticArg, SyntaxInfo -> IdrisParser PTactic)]
tactics =
[ (["intro"], Nothing, const $ -- FIXME syntax for intro (fresh name)
do ns <- sepBy (spaced (fst <$> name)) (lchar ','); return $ Intro ns)
, noArgs ["intros"] Intros
, noArgs ["unfocus"] Unfocus
, (["refine"], Just ExprTArg, const $
do n <- spaced (fst <$> fnName)
imps <- many imp
return $ Refine n imps)
, (["claim"], Nothing, \syn ->
do n <- indentPropHolds gtProp *> (fst <$> name)
goal <- indentPropHolds gtProp *> expr syn
return $ Claim n goal)
, (["mrefine"], Just ExprTArg, const $
do n <- spaced (fst <$> fnName)
return $ MatchRefine n)
, expressionTactic ["rewrite"] Rewrite
, expressionTactic ["case"] CaseTac
, expressionTactic ["induction"] Induction
, expressionTactic ["equiv"] Equiv
, (["let"], Nothing, \syn -> -- FIXME syntax for let
do n <- (indentPropHolds gtProp *> (fst <$> name))
(do indentPropHolds gtProp *> lchar ':'
ty <- indentPropHolds gtProp *> expr' syn
indentPropHolds gtProp *> lchar '='
t <- indentPropHolds gtProp *> expr syn
i <- get
return $ LetTacTy n (desugar syn i ty) (desugar syn i t))
<|> (do indentPropHolds gtProp *> lchar '='
t <- indentPropHolds gtProp *> expr syn
i <- get
return $ LetTac n (desugar syn i t)))
, (["focus"], Just ExprTArg, const $
do n <- spaced (fst <$> name)
return $ Focus n)
, expressionTactic ["exact"] Exact
, expressionTactic ["applyTactic"] ApplyTactic
, expressionTactic ["byReflection"] ByReflection
, expressionTactic ["reflect"] Reflect
, expressionTactic ["fill"] Fill
, (["try"], Just AltsTArg, \syn ->
do t <- spaced (tactic syn)
lchar '|'
t1 <- spaced (tactic syn)
return $ Try t t1)
, noArgs ["compute"] Compute
, noArgs ["trivial"] Trivial
, noArgs ["unify"] DoUnify
, (["search"], Nothing, const $
do depth <- option 10 $ fst <$> natural
return (ProofSearch True True (fromInteger depth) Nothing [] []))
, noArgs ["instance"] TCInstance
, noArgs ["solve"] Solve
, noArgs ["attack"] Attack
, noArgs ["state", ":state"] ProofState
, noArgs ["term", ":term"] ProofTerm
, noArgs ["undo", ":undo"] Undo
, noArgs ["qed", ":qed"] Qed
, noArgs ["abandon", ":q"] Abandon
, noArgs ["skip"] Skip
, noArgs ["sourceLocation"] SourceFC
, expressionTactic [":e", ":eval"] TEval
, expressionTactic [":t", ":type"] TCheck
, expressionTactic [":search"] TSearch
, (["fail"], Just StringLitTArg, const $
do msg <- fst <$> stringLiteral
return $ TFail [Idris.Core.TT.TextPart msg])
, ([":doc"], Just ExprTArg, const $
do whiteSpace
doc <- (Right . fst <$> constant) <|> (Left . fst <$> fnName)
eof
return (TDocStr doc))
]
where
expressionTactic names tactic = (names, Just ExprTArg, \syn ->
do t <- spaced (expr syn)
i <- get
return $ tactic (desugar syn i t))
noArgs names tactic = (names, Nothing, const (return tactic))
spaced parser = indentPropHolds gtProp *> parser
imp :: IdrisParser Bool
imp = do lchar '?'; return False
<|> do lchar '_'; return True
tactic :: SyntaxInfo -> IdrisParser PTactic
tactic syn = choice [ do choice (map reserved names); parser syn
| (names, _, parser) <- tactics ]
<|> do lchar '{'
t <- tactic syn;
lchar ';';
ts <- sepBy1 (tactic syn) (lchar ';')
lchar '}'
return $ TSeq t (mergeSeq ts)
<|> ((lchar ':' >> empty) <?> "prover command")
<?> "tactic"
where
mergeSeq :: [PTactic] -> PTactic
mergeSeq [t] = t
mergeSeq (t:ts) = TSeq t (mergeSeq ts)
-- | Parses a tactic as a whole
fullTactic :: SyntaxInfo -> IdrisParser PTactic
fullTactic syn = do t <- tactic syn
eof
return t
|
NightRa/Idris-dev
|
src/Idris/ParseExpr.hs
|
Haskell
|
bsd-3-clause
| 52,579
|
{-# LANGUAGE Rank2Types #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Array.Lens
-- Copyright : (C) 2012-16 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <ekmett@gmail.com>
-- Stability : provisional
-- Portability : MPTCs, Rank2Types, LiberalTypeSynonyms
--
----------------------------------------------------------------------------
module Data.Array.Lens
(
-- * Setters
ixmapped
) where
import Control.Lens
import Data.Array.IArray hiding (index)
-- | This 'setter' can be used to derive a new 'IArray' from an old 'IAarray' by
-- applying a function to each of the indices to look it up in the old 'IArray'.
--
-- This is a /contravariant/ 'Setter'.
--
-- @
-- 'ixmap' ≡ 'over' '.' 'ixmapped'
-- 'ixmapped' ≡ 'setting' '.' 'ixmap'
-- 'over' ('ixmapped' b) f arr '!' i ≡ arr '!' f i
-- 'bounds' ('over' ('ixmapped' b) f arr) ≡ b
-- @
ixmapped :: (IArray a e, Ix i, Ix j) => (i,i) -> IndexPreservingSetter (a j e) (a i e) i j
ixmapped = setting . ixmap
{-# INLINE ixmapped #-}
|
ddssff/lens
|
src/Data/Array/Lens.hs
|
Haskell
|
bsd-3-clause
| 1,120
|
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.LineSegments
-- Copyright : (c) Sven Panne 2002-2013
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- This module corresponds to section 3.4 (Line Segments) of the OpenGL 2.1
-- specs.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.LineSegments (
-- * Line Rasterization
lineWidth,
-- * Line Stipple
lineStipple,
-- * Line Antialiasing
lineSmooth,
-- * Implementation-Dependent Limits
aliasedLineWidthRange, smoothLineWidthRange, smoothLineWidthGranularity
) where
import Control.Monad
import Graphics.Rendering.OpenGL.GL.Capability
import Graphics.Rendering.OpenGL.GL.QueryUtils
import Graphics.Rendering.OpenGL.GL.StateVar
import Graphics.Rendering.OpenGL.Raw
--------------------------------------------------------------------------------
-- | 'lineWidth' contains the rasterized width of both aliased and antialiased
-- lines. The initial value is 1. Using a line width other than 1 has different
-- effects, depending on whether line antialiasing is enabled (see
-- 'lineSmooth'). Line antialiasing is initially disabled.
--
-- If line antialiasing is disabled, the actual width is determined by rounding
-- the supplied width to the nearest integer. (If the rounding results in the
-- value 0, it is as if the line width were 1.) If /delta x/ >= /delta y/, /i/
-- pixels are filled in each column that is rasterized, where /i/ is the
-- rounded value of 'lineWidth'. Otherwise, /i/ pixels are filled in each row
-- that is rasterized.
--
-- If antialiasing is enabled, line rasterization produces a fragment for each
-- pixel square that intersects the region lying within the rectangle having
-- width equal to the current line width, length equal to the actual length of
-- the line, and centered on the mathematical line segment. The coverage value
-- for each fragment is the window coordinate area of the intersection of the
-- rectangular region with the corresponding pixel square. This value is saved
-- and used in the final rasterization step.
--
-- Not all widths can be supported when line antialiasing is enabled. If an
-- unsupported width is requested, the nearest supported width is used. Only
-- width 1 is guaranteed to be supported; others depend on the implementation.
-- Likewise, there is a range for aliased line widths as well. To query the
-- range of supported widths of antialiased lines and the size difference
-- between supported widths within the range, query 'smoothLineWidthRange' and
-- 'smoothLineWidthGranularity', respectively. For aliased lines, query the
-- supported range with 'aliasedLineWidthRange'.
--
-- The line width specified when 'lineWidth' is set is always returned when it
-- is queried. Clamping and rounding for aliased and antialiased lines have no
-- effect on the specified value.
--
-- A non-antialiased line width may be clamped to an implementation-dependent
-- maximum. Query 'aliasedLineWidthRange' to determine the maximum width.
--
-- An 'Graphics.Rendering.OpenGL.GLU.Errors.InvalidValue' is generated if
-- 'lineWidth' is set to a value less than or equal to zero.
--
-- An 'Graphics.Rendering.OpenGL.GLU.Errors.InvalidOperation' is generated if
-- 'lineWidth' is set during
-- 'Graphics.Rendering.OpenGL.GL.BeginEnd.renderPrimitive'.
lineWidth :: StateVar GLfloat
lineWidth = makeStateVar (getFloat1 id GetLineWidth) glLineWidth
--------------------------------------------------------------------------------
-- | Line stippling masks out certain fragments produced by rasterization; those
-- fragments will not be drawn. The masking is achieved by using three
-- parameters: the repeat count (1st element of the 'lineStipple' pair, clamped
-- to the range [ 1 .. 256 ]), the 16-bit line stipple pattern (2nd element),
-- and an integer stipple counter /s/.
--
-- The counter /s/ is reset to 0 at before the first action during
-- 'Graphics.Rendering.OpenGL.GL.BeginEnd.renderPrimitive' is called and before
-- each line segment during
-- 'Graphics.Rendering.OpenGL.GL.BeginEnd.renderPrimitive' is generated. It is
-- incremented after each fragment of a unit width aliased line segment is
-- generated or after each /i/ fragments of an /i/ width line segment are
-- generated. The /i/ fragments associated with count /s/ are masked out if
-- @'Data.Bits.testBit' /pattern/ (( /s/ \/ /factor/ ) /mod/ 16)@ is 'False',
-- otherwise these fragments are sent to the frame buffer. Bit zero of the
-- pattern is the least significant bit, i.e. it is used first.
--
-- Antialiased lines are treated as a sequence of rectangles of height 1 for
-- purposes of stippling. Whether rectangle /s/ is rasterized or not depends on
-- the fragment rule described for aliased lines, counting rectangles rather
-- than groups of fragments.
--
-- The initial value of 'lineStipple' is 'Nothing', i.e. line stippling is
-- disabled.
--
-- An 'Graphics.Rendering.OpenGL.GLU.Errors.InvalidOperation' is generated if
-- 'lineStipple' is set during
-- 'Graphics.Rendering.OpenGL.GL.BeginEnd.renderPrimitive'.
lineStipple :: StateVar (Maybe (GLint, GLushort))
lineStipple =
makeStateVarMaybe
(return CapLineStipple)
(liftM2 (,) (getInteger1 id GetLineStippleRepeat)
(getInteger1 fromIntegral GetLineStipplePattern))
(uncurry glLineStipple)
--------------------------------------------------------------------------------
-- | Controls whether line antialiasing is enabled. The initial state is
-- 'Graphics.Rendering.OpenGL.GL.Capability.Disabled'.
lineSmooth :: StateVar Capability
lineSmooth = makeCapability CapLineSmooth
--------------------------------------------------------------------------------
-- | The smallest and largest supported width of aliased lines.
aliasedLineWidthRange :: GettableStateVar (GLfloat, GLfloat)
aliasedLineWidthRange =
makeGettableStateVar $ getFloat2 (,) GetAliasedLineWidthRange
-- | The smallest and largest supported width of antialiased lines.
smoothLineWidthRange :: GettableStateVar (GLfloat, GLfloat)
smoothLineWidthRange =
makeGettableStateVar $ getFloat2 (,) GetSmoothLineWidthRange
-- | The antialiased line width granularity, i.e. the size difference between
-- supported widths.
smoothLineWidthGranularity :: GettableStateVar GLfloat
smoothLineWidthGranularity =
makeGettableStateVar $ getFloat1 id GetSmoothLineWidthGranularity
|
hesiod/OpenGL
|
src/Graphics/Rendering/OpenGL/GL/LineSegments.hs
|
Haskell
|
bsd-3-clause
| 6,619
|
------------------------------------------------------------------------------
--
-- Haskell: The Craft of Functional Programming, 3e
-- Simon Thompson
-- (c) Addison-Wesley, 1996-2011.
--
-- Chapter 10
--
-------------------------------------------------------------------------
-- Generalization: patterns of computation
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
module Chapter10 where
import Prelude hiding (map,filter,zipWith,foldr1,foldr,concat,and)
import Pictures hiding (flipV,beside)
import qualified Chapter7
-- Higher-order functions: functions as arguments
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-- Mapping a function along a list.
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
map,map' :: (a -> b) -> [a] -> [b]
map' f xs = [ f x | x <- xs ] -- (map.0)
map f [] = [] -- (map.1)
map f (x:xs) = f x : map f xs -- (map.2)
-- Examples using map.
-- Double all the elements of a list ...
doubleAll :: [Integer] -> [Integer]
doubleAll xs = map double xs
where
double x = 2*x
-- ... convert characters to their numeric codes ...
convertChrs :: [Char] -> [Int]
convertChrs xs = map fromEnum xs
-- ... flip a Picture in a vertical mirror.
flipV :: Picture -> Picture
flipV xs = map reverse xs
-- Modelling properties as functions
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-- Is an integer even?
isEven :: Integer -> Bool
isEven n = (n `mod` 2 == 0)
-- Is a list sorted?
isSorted :: [Integer] -> Bool
isSorted xs = (xs == iSort xs)
-- Filtering -- the filter function
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
filter :: (a -> Bool) -> [a] -> [a]
filter p [] = [] -- (filter.1)
filter p (x:xs)
| p x = x : filter p xs -- (filter.2)
| otherwise = filter p xs -- (filter.3)
-- A list comprehension also serves to define filter,
filter' p xs = [ x | x <- xs , p x ] -- (filter.0)
-- Combining zip and map -- the zipWith function
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
zipWith :: (a -> b -> c) -> [a] -> [b] -> [c]
zipWith f (x:xs) (y:ys) = f x y : zipWith f xs ys
zipWith f _ _ = []
beside :: Picture -> Picture -> Picture
beside pic1 pic2 = zipWith (++) pic1 pic2
-- Folding and primitive recursion
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-- Folding an operation into a non-empty list
foldr1 :: (a -> a -> a) -> [a] -> a
foldr1 f [x] = x -- (foldr1.1)
foldr1 f (x:xs) = f x (foldr1 f xs) -- (foldr1.2)
-- Examples using foldr1
foldEx1 = foldr1 (+) [3,98,1]
foldEx2 = foldr1 (||) [False,True,False]
foldEx3 = foldr1 (++) ["Freak ", "Out" , "", "!"]
foldEx4 = foldr1 min [6]
foldEx5 = foldr1 (*) [1 .. 6]
-- Folding into an arbitrary list: using a starting value on the empty list.
foldr f s [] = s -- (foldr.1)
foldr f s (x:xs) = f x (foldr f s xs) -- (foldr.2)
-- Concatenating a list using foldr.
concat :: [[a]] -> [a]
concat xs = foldr (++) [] xs
-- Conjoining a list of Bool using foldr.
and :: [Bool] -> Bool
and bs = foldr (&&) True bs
-- Can define foldr1 using foldr:
-- foldr1 f (x:xs) = foldr f x xs -- (foldr1.0)
-- Folding in general -- foldr again
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-- The type of foldr is more general than you would initially expect...
foldr :: (a -> b -> b) -> b -> [a] -> b
rev :: [a] -> [a]
rev xs = foldr snoc [] xs
snoc :: a -> [a] -> [a]
snoc x xs = xs ++ [x]
-- Sorting a list using foldr
iSort :: [Integer] -> [Integer]
iSort xs = foldr Chapter7.ins [] xs
-- From the exercises: a mystery function ...
mystery xs = foldr (++) [] (map sing xs)
sing x = [x]
-- Generalizing: splitting up lists
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-- Getting the first word from the front of a String ...
getWord :: String -> String
getWord [] = [] -- (getWord.1)
getWord (x:xs)
| elem x Chapter7.whitespace = [] -- (getWord.2)
| otherwise = x : getWord xs -- (getWord.3)
-- ... which generalizes to a function which gets items from the front of a list
-- until an item has the required property.
getUntil :: (a -> Bool) -> [a] -> [a]
getUntil p [] = []
getUntil p (x:xs)
| p x = []
| otherwise = x : getUntil p xs
-- The original getWord function defined from getUntil
-- getWord xs
-- = getUntil p xs
-- where
-- p x = elem x whitespace
|
c089/haskell-craft3e
|
Chapter10.hs
|
Haskell
|
mit
| 4,289
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
module Stack.Constants.Config
( distDirFromDir
, workDirFromDir
, distRelativeDir
, imageStagingDir
, projectDockerSandboxDir
, configCacheFile
, configCabalMod
, buildCachesDir
, testSuccessFile
, testBuiltFile
, hpcRelativeDir
, hpcDirFromDir
, objectInterfaceDirL
, templatesDir
) where
import Stack.Prelude
import Stack.Constants
import Stack.Types.Compiler
import Stack.Types.Config
import Stack.Types.PackageIdentifier
import Path
-- | Output .o/.hi directory.
objectInterfaceDirL :: HasBuildConfig env => Getting r env (Path Abs Dir)
objectInterfaceDirL = to $ \env -> -- FIXME is this idomatic lens code?
let workDir = view workDirL env
root = view projectRootL env
in root </> workDir </> $(mkRelDir "odir/")
-- | The directory containing the files used for dirtiness check of source files.
buildCachesDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory.
-> m (Path Abs Dir)
buildCachesDir dir =
liftM
(</> $(mkRelDir "stack-build-caches"))
(distDirFromDir dir)
-- | The filename used to mark tests as having succeeded
testSuccessFile :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory
-> m (Path Abs File)
testSuccessFile dir =
liftM
(</> $(mkRelFile "stack-test-success"))
(distDirFromDir dir)
-- | The filename used to mark tests as having built
testBuiltFile :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory
-> m (Path Abs File)
testBuiltFile dir =
liftM
(</> $(mkRelFile "stack-test-built"))
(distDirFromDir dir)
-- | The filename used for dirtiness check of config.
configCacheFile :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory.
-> m (Path Abs File)
configCacheFile dir =
liftM
(</> $(mkRelFile "stack-config-cache"))
(distDirFromDir dir)
-- | The filename used for modification check of .cabal
configCabalMod :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory.
-> m (Path Abs File)
configCabalMod dir =
liftM
(</> $(mkRelFile "stack-cabal-mod"))
(distDirFromDir dir)
-- | Directory for HPC work.
hpcDirFromDir
:: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory.
-> m (Path Abs Dir)
hpcDirFromDir fp =
liftM (fp </>) hpcRelativeDir
-- | Relative location of directory for HPC work.
hpcRelativeDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> m (Path Rel Dir)
hpcRelativeDir =
liftM (</> $(mkRelDir "hpc")) distRelativeDir
-- | Package's build artifacts directory.
distDirFromDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Path Abs Dir
-> m (Path Abs Dir)
distDirFromDir fp =
liftM (fp </>) distRelativeDir
-- | Package's working directory.
workDirFromDir :: (MonadReader env m, HasEnvConfig env)
=> Path Abs Dir
-> m (Path Abs Dir)
workDirFromDir fp = view $ workDirL.to (fp </>)
-- | Directory for project templates.
templatesDir :: Config -> Path Abs Dir
templatesDir config = view stackRootL config </> $(mkRelDir "templates")
-- | Relative location of build artifacts.
distRelativeDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> m (Path Rel Dir)
distRelativeDir = do
cabalPkgVer <- view cabalVersionL
platform <- platformGhcRelDir
wc <- view $ actualCompilerVersionL.to whichCompiler
-- Cabal version, suffixed with "_ghcjs" if we're using GHCJS.
envDir <-
parseRelDir $
(if wc == Ghcjs then (++ "_ghcjs") else id) $
packageIdentifierString $
PackageIdentifier cabalPackageName cabalPkgVer
platformAndCabal <- useShaPathOnWindows (platform </> envDir)
workDir <- view workDirL
return $
workDir </>
$(mkRelDir "dist") </>
platformAndCabal
-- | Docker sandbox from project root.
projectDockerSandboxDir :: (MonadReader env m, HasConfig env)
=> Path Abs Dir -- ^ Project root
-> m (Path Abs Dir) -- ^ Docker sandbox
projectDockerSandboxDir projectRoot = do
workDir <- view workDirL
return $ projectRoot </> workDir </> $(mkRelDir "docker/")
-- | Image staging dir from project root.
imageStagingDir :: (MonadReader env m, HasConfig env, MonadThrow m)
=> Path Abs Dir -- ^ Project root
-> Int -- ^ Index of image
-> m (Path Abs Dir) -- ^ Docker sandbox
imageStagingDir projectRoot imageIdx = do
workDir <- view workDirL
idxRelDir <- parseRelDir (show imageIdx)
return $ projectRoot </> workDir </> $(mkRelDir "image") </> idxRelDir
|
anton-dessiatov/stack
|
src/Stack/Constants/Config.hs
|
Haskell
|
bsd-3-clause
| 5,034
|
module Stack.Options.TestParser where
import Data.Maybe
import Data.Monoid.Extra
import Options.Applicative
import Options.Applicative.Args
import Options.Applicative.Builder.Extra
import Stack.Options.Utils
import Stack.Types.Config
-- | Parser for test arguments.
-- FIXME hide args
testOptsParser :: Bool -> Parser TestOptsMonoid
testOptsParser hide0 =
TestOptsMonoid
<$> firstBoolFlags
"rerun-tests"
"running already successful tests"
hide
<*> fmap
(fromMaybe [])
(optional
(argsOption
(long "test-arguments" <>
metavar "TEST_ARGS" <>
help "Arguments passed in to the test suite program" <>
hide)))
<*> optionalFirst
(switch
(long "coverage" <>
help "Generate a code coverage report" <>
hide))
<*> optionalFirst
(switch
(long "no-run-tests" <>
help "Disable running of tests. (Tests will still be built.)" <>
hide))
where hide = hideMods hide0
|
AndreasPK/stack
|
src/Stack/Options/TestParser.hs
|
Haskell
|
bsd-3-clause
| 1,314
|
{-# LANGUAGE CPP, Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude, MagicHash, StandaloneDeriving, BangPatterns,
KindSignatures, DataKinds, ConstraintKinds,
MultiParamTypeClasses, FunctionalDependencies #-}
{-# LANGUAGE AllowAmbiguousTypes #-}
-- ip :: IP x a => a is strictly speaking ambiguous, but IP is magic
{-# LANGUAGE UndecidableSuperClasses #-}
-- Because of the type-variable superclasses for tuples
{-# OPTIONS_GHC -Wno-unused-imports #-}
-- -Wno-unused-imports needed for the GHC.Tuple import below. Sigh.
{-# OPTIONS_GHC -Wno-unused-top-binds #-}
-- -Wno-unused-top-binds is there (I hope) to stop Haddock complaining
-- about the constraint tuples being defined but not used
{-# OPTIONS_HADDOCK hide #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.Classes
-- Copyright : (c) The University of Glasgow, 1992-2002
-- License : see libraries/base/LICENSE
--
-- Maintainer : cvs-ghc@haskell.org
-- Stability : internal
-- Portability : non-portable (GHC extensions)
--
-- Basic classes.
--
-----------------------------------------------------------------------------
module GHC.Classes(
-- * Implicit paramaters
IP(..),
-- * Equality and ordering
Eq(..),
Ord(..),
-- ** Monomorphic equality operators
-- | See GHC.Classes#matching_overloaded_methods_in_rules
eqInt, neInt,
eqWord, neWord,
eqChar, neChar,
eqFloat, eqDouble,
-- ** Monomorphic comparison operators
gtInt, geInt, leInt, ltInt, compareInt, compareInt#,
gtWord, geWord, leWord, ltWord, compareWord, compareWord#,
-- * Functions over Bool
(&&), (||), not,
-- * Integer arithmetic
divInt#, modInt#
) where
-- GHC.Magic is used in some derived instances
import GHC.Magic ()
import GHC.IntWord64
import GHC.Prim
import GHC.Tuple
import GHC.Types
#include "MachDeps.h"
infix 4 ==, /=, <, <=, >=, >
infixr 3 &&
infixr 2 ||
default () -- Double isn't available yet
-- | The syntax @?x :: a@ is desugared into @IP "x" a@
-- IP is declared very early, so that libraries can take
-- advantage of the implicit-call-stack feature
class IP (x :: Symbol) a | x -> a where
ip :: a
{- $matching_overloaded_methods_in_rules
Matching on class methods (e.g. @(==)@) in rewrite rules tends to be a bit
fragile. For instance, consider this motivating example from the @bytestring@
library,
> break :: (Word8 -> Bool) -> ByteString -> (ByteString, ByteString)
> breakByte :: Word8 -> ByteString -> (ByteString, ByteString)
> {-# RULES "break -> breakByte" forall a. break (== x) = breakByte x #-}
Here we have two functions, with @breakByte@ providing an optimized
implementation of @break@ where the predicate is merely testing for equality
with a known @Word8@. As written, however, this rule will be quite fragile as
the @(==)@ class operation rule may rewrite the predicate before our @break@
rule has a chance to fire.
For this reason, most of the primitive types in @base@ have 'Eq' and 'Ord'
instances defined in terms of helper functions with inlinings delayed to phase
1. For instance, @Word8@\'s @Eq@ instance looks like,
> instance Eq Word8 where
> (==) = eqWord8
> (/=) = neWord8
>
> eqWord8, neWord8 :: Word8 -> Word8 -> Bool
> eqWord8 (W8# x) (W8# y) = ...
> neWord8 (W8# x) (W8# y) = ...
> {-# INLINE [1] eqWord8 #-}
> {-# INLINE [1] neWord8 #-}
This allows us to save our @break@ rule above by rewriting it to instead match
against @eqWord8@,
> {-# RULES "break -> breakByte" forall a. break (`eqWord8` x) = breakByte x #-}
Currently this is only done for '(==)', '(/=)', '(<)', '(<=)', '(>)', and '(>=)'
for the types in "GHC.Word" and "GHC.Int".
-}
-- | The 'Eq' class defines equality ('==') and inequality ('/=').
-- All the basic datatypes exported by the "Prelude" are instances of 'Eq',
-- and 'Eq' may be derived for any datatype whose constituents are also
-- instances of 'Eq'.
--
-- Minimal complete definition: either '==' or '/='.
--
class Eq a where
(==), (/=) :: a -> a -> Bool
{-# INLINE (/=) #-}
{-# INLINE (==) #-}
x /= y = not (x == y)
x == y = not (x /= y)
{-# MINIMAL (==) | (/=) #-}
deriving instance Eq ()
deriving instance (Eq a, Eq b) => Eq (a, b)
deriving instance (Eq a, Eq b, Eq c) => Eq (a, b, c)
deriving instance (Eq a, Eq b, Eq c, Eq d) => Eq (a, b, c, d)
deriving instance (Eq a, Eq b, Eq c, Eq d, Eq e) => Eq (a, b, c, d, e)
deriving instance (Eq a, Eq b, Eq c, Eq d, Eq e, Eq f)
=> Eq (a, b, c, d, e, f)
deriving instance (Eq a, Eq b, Eq c, Eq d, Eq e, Eq f, Eq g)
=> Eq (a, b, c, d, e, f, g)
deriving instance (Eq a, Eq b, Eq c, Eq d, Eq e, Eq f, Eq g,
Eq h)
=> Eq (a, b, c, d, e, f, g, h)
deriving instance (Eq a, Eq b, Eq c, Eq d, Eq e, Eq f, Eq g,
Eq h, Eq i)
=> Eq (a, b, c, d, e, f, g, h, i)
deriving instance (Eq a, Eq b, Eq c, Eq d, Eq e, Eq f, Eq g,
Eq h, Eq i, Eq j)
=> Eq (a, b, c, d, e, f, g, h, i, j)
deriving instance (Eq a, Eq b, Eq c, Eq d, Eq e, Eq f, Eq g,
Eq h, Eq i, Eq j, Eq k)
=> Eq (a, b, c, d, e, f, g, h, i, j, k)
deriving instance (Eq a, Eq b, Eq c, Eq d, Eq e, Eq f, Eq g,
Eq h, Eq i, Eq j, Eq k, Eq l)
=> Eq (a, b, c, d, e, f, g, h, i, j, k, l)
deriving instance (Eq a, Eq b, Eq c, Eq d, Eq e, Eq f, Eq g,
Eq h, Eq i, Eq j, Eq k, Eq l, Eq m)
=> Eq (a, b, c, d, e, f, g, h, i, j, k, l, m)
deriving instance (Eq a, Eq b, Eq c, Eq d, Eq e, Eq f, Eq g,
Eq h, Eq i, Eq j, Eq k, Eq l, Eq m, Eq n)
=> Eq (a, b, c, d, e, f, g, h, i, j, k, l, m, n)
deriving instance (Eq a, Eq b, Eq c, Eq d, Eq e, Eq f, Eq g,
Eq h, Eq i, Eq j, Eq k, Eq l, Eq m, Eq n, Eq o)
=> Eq (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o)
instance (Eq a) => Eq [a] where
{-# SPECIALISE instance Eq [[Char]] #-}
{-# SPECIALISE instance Eq [Char] #-}
{-# SPECIALISE instance Eq [Int] #-}
[] == [] = True
(x:xs) == (y:ys) = x == y && xs == ys
_xs == _ys = False
deriving instance Eq Bool
deriving instance Eq Ordering
instance Eq Word where
(==) = eqWord
(/=) = neWord
-- See GHC.Classes#matching_overloaded_methods_in_rules
{-# INLINE [1] eqWord #-}
{-# INLINE [1] neWord #-}
eqWord, neWord :: Word -> Word -> Bool
(W# x) `eqWord` (W# y) = isTrue# (x `eqWord#` y)
(W# x) `neWord` (W# y) = isTrue# (x `neWord#` y)
-- See GHC.Classes#matching_overloaded_methods_in_rules
instance Eq Char where
(==) = eqChar
(/=) = neChar
-- See GHC.Classes#matching_overloaded_methods_in_rules
{-# INLINE [1] eqChar #-}
{-# INLINE [1] neChar #-}
eqChar, neChar :: Char -> Char -> Bool
(C# x) `eqChar` (C# y) = isTrue# (x `eqChar#` y)
(C# x) `neChar` (C# y) = isTrue# (x `neChar#` y)
instance Eq Float where
(==) = eqFloat
-- See GHC.Classes#matching_overloaded_methods_in_rules
{-# INLINE [1] eqFloat #-}
eqFloat :: Float -> Float -> Bool
(F# x) `eqFloat` (F# y) = isTrue# (x `eqFloat#` y)
instance Eq Double where
(==) = eqDouble
-- See GHC.Classes#matching_overloaded_methods_in_rules
{-# INLINE [1] eqDouble #-}
eqDouble :: Double -> Double -> Bool
(D# x) `eqDouble` (D# y) = isTrue# (x ==## y)
instance Eq Int where
(==) = eqInt
(/=) = neInt
-- See GHC.Classes#matching_overloaded_methods_in_rules
{-# INLINE [1] eqInt #-}
{-# INLINE [1] neInt #-}
eqInt, neInt :: Int -> Int -> Bool
(I# x) `eqInt` (I# y) = isTrue# (x ==# y)
(I# x) `neInt` (I# y) = isTrue# (x /=# y)
#if WORD_SIZE_IN_BITS < 64
instance Eq TyCon where
(==) (TyCon hi1 lo1 _ _) (TyCon hi2 lo2 _ _)
= isTrue# (hi1 `eqWord64#` hi2) && isTrue# (lo1 `eqWord64#` lo2)
instance Ord TyCon where
compare (TyCon hi1 lo1 _ _) (TyCon hi2 lo2 _ _)
| isTrue# (hi1 `gtWord64#` hi2) = GT
| isTrue# (hi1 `ltWord64#` hi2) = LT
| isTrue# (lo1 `gtWord64#` lo2) = GT
| isTrue# (lo1 `ltWord64#` lo2) = LT
| True = EQ
#else
instance Eq TyCon where
(==) (TyCon hi1 lo1 _ _) (TyCon hi2 lo2 _ _)
= isTrue# (hi1 `eqWord#` hi2) && isTrue# (lo1 `eqWord#` lo2)
instance Ord TyCon where
compare (TyCon hi1 lo1 _ _) (TyCon hi2 lo2 _ _)
| isTrue# (hi1 `gtWord#` hi2) = GT
| isTrue# (hi1 `ltWord#` hi2) = LT
| isTrue# (lo1 `gtWord#` lo2) = GT
| isTrue# (lo1 `ltWord#` lo2) = LT
| True = EQ
#endif
-- | The 'Ord' class is used for totally ordered datatypes.
--
-- Instances of 'Ord' can be derived for any user-defined
-- datatype whose constituent types are in 'Ord'. The declared order
-- of the constructors in the data declaration determines the ordering
-- in derived 'Ord' instances. The 'Ordering' datatype allows a single
-- comparison to determine the precise ordering of two objects.
--
-- Minimal complete definition: either 'compare' or '<='.
-- Using 'compare' can be more efficient for complex types.
--
class (Eq a) => Ord a where
compare :: a -> a -> Ordering
(<), (<=), (>), (>=) :: a -> a -> Bool
max, min :: a -> a -> a
compare x y = if x == y then EQ
-- NB: must be '<=' not '<' to validate the
-- above claim about the minimal things that
-- can be defined for an instance of Ord:
else if x <= y then LT
else GT
x < y = case compare x y of { LT -> True; _ -> False }
x <= y = case compare x y of { GT -> False; _ -> True }
x > y = case compare x y of { GT -> True; _ -> False }
x >= y = case compare x y of { LT -> False; _ -> True }
-- These two default methods use '<=' rather than 'compare'
-- because the latter is often more expensive
max x y = if x <= y then y else x
min x y = if x <= y then x else y
{-# MINIMAL compare | (<=) #-}
deriving instance Ord ()
deriving instance (Ord a, Ord b) => Ord (a, b)
deriving instance (Ord a, Ord b, Ord c) => Ord (a, b, c)
deriving instance (Ord a, Ord b, Ord c, Ord d) => Ord (a, b, c, d)
deriving instance (Ord a, Ord b, Ord c, Ord d, Ord e) => Ord (a, b, c, d, e)
deriving instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f)
=> Ord (a, b, c, d, e, f)
deriving instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g)
=> Ord (a, b, c, d, e, f, g)
deriving instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g,
Ord h)
=> Ord (a, b, c, d, e, f, g, h)
deriving instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g,
Ord h, Ord i)
=> Ord (a, b, c, d, e, f, g, h, i)
deriving instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g,
Ord h, Ord i, Ord j)
=> Ord (a, b, c, d, e, f, g, h, i, j)
deriving instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g,
Ord h, Ord i, Ord j, Ord k)
=> Ord (a, b, c, d, e, f, g, h, i, j, k)
deriving instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g,
Ord h, Ord i, Ord j, Ord k, Ord l)
=> Ord (a, b, c, d, e, f, g, h, i, j, k, l)
deriving instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g,
Ord h, Ord i, Ord j, Ord k, Ord l, Ord m)
=> Ord (a, b, c, d, e, f, g, h, i, j, k, l, m)
deriving instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g,
Ord h, Ord i, Ord j, Ord k, Ord l, Ord m, Ord n)
=> Ord (a, b, c, d, e, f, g, h, i, j, k, l, m, n)
deriving instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g,
Ord h, Ord i, Ord j, Ord k, Ord l, Ord m, Ord n, Ord o)
=> Ord (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o)
instance (Ord a) => Ord [a] where
{-# SPECIALISE instance Ord [[Char]] #-}
{-# SPECIALISE instance Ord [Char] #-}
{-# SPECIALISE instance Ord [Int] #-}
compare [] [] = EQ
compare [] (_:_) = LT
compare (_:_) [] = GT
compare (x:xs) (y:ys) = case compare x y of
EQ -> compare xs ys
other -> other
deriving instance Ord Bool
deriving instance Ord Ordering
-- We don't use deriving for Ord Char, because for Ord the derived
-- instance defines only compare, which takes two primops. Then
-- '>' uses compare, and therefore takes two primops instead of one.
instance Ord Char where
(C# c1) > (C# c2) = isTrue# (c1 `gtChar#` c2)
(C# c1) >= (C# c2) = isTrue# (c1 `geChar#` c2)
(C# c1) <= (C# c2) = isTrue# (c1 `leChar#` c2)
(C# c1) < (C# c2) = isTrue# (c1 `ltChar#` c2)
instance Ord Float where
(F# x) `compare` (F# y)
= if isTrue# (x `ltFloat#` y) then LT
else if isTrue# (x `eqFloat#` y) then EQ
else GT
(F# x) < (F# y) = isTrue# (x `ltFloat#` y)
(F# x) <= (F# y) = isTrue# (x `leFloat#` y)
(F# x) >= (F# y) = isTrue# (x `geFloat#` y)
(F# x) > (F# y) = isTrue# (x `gtFloat#` y)
instance Ord Double where
(D# x) `compare` (D# y)
= if isTrue# (x <## y) then LT
else if isTrue# (x ==## y) then EQ
else GT
(D# x) < (D# y) = isTrue# (x <## y)
(D# x) <= (D# y) = isTrue# (x <=## y)
(D# x) >= (D# y) = isTrue# (x >=## y)
(D# x) > (D# y) = isTrue# (x >## y)
instance Ord Int where
compare = compareInt
(<) = ltInt
(<=) = leInt
(>=) = geInt
(>) = gtInt
-- See GHC.Classes#matching_overloaded_methods_in_rules
{-# INLINE [1] gtInt #-}
{-# INLINE [1] geInt #-}
{-# INLINE [1] ltInt #-}
{-# INLINE [1] leInt #-}
gtInt, geInt, ltInt, leInt :: Int -> Int -> Bool
(I# x) `gtInt` (I# y) = isTrue# (x ># y)
(I# x) `geInt` (I# y) = isTrue# (x >=# y)
(I# x) `ltInt` (I# y) = isTrue# (x <# y)
(I# x) `leInt` (I# y) = isTrue# (x <=# y)
compareInt :: Int -> Int -> Ordering
(I# x#) `compareInt` (I# y#) = compareInt# x# y#
compareInt# :: Int# -> Int# -> Ordering
compareInt# x# y#
| isTrue# (x# <# y#) = LT
| isTrue# (x# ==# y#) = EQ
| True = GT
instance Ord Word where
compare = compareWord
(<) = ltWord
(<=) = leWord
(>=) = geWord
(>) = gtWord
-- See GHC.Classes#matching_overloaded_methods_in_rules
{-# INLINE [1] gtWord #-}
{-# INLINE [1] geWord #-}
{-# INLINE [1] ltWord #-}
{-# INLINE [1] leWord #-}
gtWord, geWord, ltWord, leWord :: Word -> Word -> Bool
(W# x) `gtWord` (W# y) = isTrue# (x `gtWord#` y)
(W# x) `geWord` (W# y) = isTrue# (x `geWord#` y)
(W# x) `ltWord` (W# y) = isTrue# (x `ltWord#` y)
(W# x) `leWord` (W# y) = isTrue# (x `leWord#` y)
compareWord :: Word -> Word -> Ordering
(W# x#) `compareWord` (W# y#) = compareWord# x# y#
compareWord# :: Word# -> Word# -> Ordering
compareWord# x# y#
| isTrue# (x# `ltWord#` y#) = LT
| isTrue# (x# `eqWord#` y#) = EQ
| True = GT
-- OK, so they're technically not part of a class...:
-- Boolean functions
-- | Boolean \"and\"
(&&) :: Bool -> Bool -> Bool
True && x = x
False && _ = False
-- | Boolean \"or\"
(||) :: Bool -> Bool -> Bool
True || _ = True
False || x = x
-- | Boolean \"not\"
not :: Bool -> Bool
not True = False
not False = True
------------------------------------------------------------------------
-- These don't really belong here, but we don't have a better place to
-- put them
-- These functions have built-in rules.
{-# NOINLINE [0] divInt# #-}
{-# NOINLINE [0] modInt# #-}
divInt# :: Int# -> Int# -> Int#
x# `divInt#` y#
-- Be careful NOT to overflow if we do any additional arithmetic
-- on the arguments... the following previous version of this
-- code has problems with overflow:
-- | (x# ># 0#) && (y# <# 0#) = ((x# -# y#) -# 1#) `quotInt#` y#
-- | (x# <# 0#) && (y# ># 0#) = ((x# -# y#) +# 1#) `quotInt#` y#
= if isTrue# (x# ># 0#) && isTrue# (y# <# 0#) then ((x# -# 1#) `quotInt#` y#) -# 1#
else if isTrue# (x# <# 0#) && isTrue# (y# ># 0#) then ((x# +# 1#) `quotInt#` y#) -# 1#
else x# `quotInt#` y#
modInt# :: Int# -> Int# -> Int#
x# `modInt#` y#
= if isTrue# (x# ># 0#) && isTrue# (y# <# 0#) ||
isTrue# (x# <# 0#) && isTrue# (y# ># 0#)
then if isTrue# (r# /=# 0#) then r# +# y# else 0#
else r#
where
!r# = x# `remInt#` y#
{- *************************************************************
* *
* Constraint tuples *
* *
************************************************************* -}
class ()
class (c1, c2) => (c1, c2)
class (c1, c2, c3) => (c1, c2, c3)
class (c1, c2, c3, c4) => (c1, c2, c3, c4)
class (c1, c2, c3, c4, c5) => (c1, c2, c3, c4, c5)
class (c1, c2, c3, c4, c5, c6) => (c1, c2, c3, c4, c5, c6)
class (c1, c2, c3, c4, c5, c6, c7) => (c1, c2, c3, c4, c5, c6, c7)
class (c1, c2, c3, c4, c5, c6, c7, c8) => (c1, c2, c3, c4, c5, c6, c7, c8)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17,c18)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57, c58)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57, c58)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57, c58,
c59)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57, c58,
c59)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57, c58,
c59, c60)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57, c58,
c59, c60)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57, c58,
c59, c60, c61)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57, c58,
c59, c60, c61)
class (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57, c58,
c59, c60, c61, c62)
=> (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16,
c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30,
c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44,
c45, c46, c47, c48, c49, c50, c51, c52, c53, c54, c55, c56, c57, c58,
c59, c60, c61, c62)
|
snoyberg/ghc
|
libraries/ghc-prim/GHC/Classes.hs
|
Haskell
|
bsd-3-clause
| 37,767
|
{-# LANGUAGE Arrows #-}
module CmdFail006 where
f = proc x -> ~(_ -< _)
|
sdiehl/ghc
|
testsuite/tests/parser/should_fail/cmdFail006.hs
|
Haskell
|
bsd-3-clause
| 73
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="pl-PL">
<title>Technology detection | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Zawartość</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Szukaj</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Ulubione</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/wappalyzer/src/main/javahelp/org/zaproxy/zap/extension/wappalyzer/resources/help_pl_PL/helpset_pl_PL.hs
|
Haskell
|
apache-2.0
| 984
|
module E.Annotate where
import Control.Monad.Reader
import Data.Monoid
import qualified Data.Traversable as T
import E.E
import E.Program
import E.Subst
import GenUtil
import Info.Info(Info)
import Name.Id
import Util.HasSize
import Util.SetLike
annotateCombs :: forall m . Monad m =>
(IdMap (Maybe E))
-> (Id -> Info -> m Info) -- ^ annotate based on Id map
-> (E -> Info -> m Info) -- ^ annotate letbound bindings
-> (E -> Info -> m Info) -- ^ annotate lambdabound bindings
-> [Comb] -- ^ terms to annotate
-> m [Comb]
annotateCombs imap idann letann lamann cs = do
cs <- forM cs $ \comb -> do
nfo <- letann (combBody comb) (tvrInfo $ combHead comb)
nt <- annotate imap idann letann lamann (tvrType $ combHead comb)
return $ combHead_u (tvrInfo_s nfo . tvrType_s nt) comb
let nimap = fromList [ (combIdent c, Just . EVar $ combHead c) | c <- cs ]
`mappend` imap
f :: (IdMap (Maybe E)) -> E -> m E
f ni e = annotate ni idann letann lamann e
let mrule :: Rule -> m Rule
mrule r = do
let g tvr = do
nfo <- idann (tvrIdent tvr) (tvrInfo tvr)
let ntvr = tvr { tvrInfo = nfo }
return (ntvr,minsert (tvrIdent tvr) (Just $ EVar ntvr))
bs <- mapM g $ ruleBinds r
let nnimap = (foldr (.) id $ snds bs) nimap :: IdMap (Maybe E)
args <- mapM (f nnimap) (ruleArgs r)
body <- (f nnimap) (ruleBody r)
return r { ruleBinds = fsts bs, ruleBody = body, ruleArgs = args }
forM cs $ \comb -> do
rs <- mapM mrule (combRules comb)
nb <- f nimap (combBody comb)
return . combRules_s rs . combBody_s nb $ comb
annotateDs :: Monad m =>
(IdMap (Maybe E))
-> (Id -> Info -> m Info) -- ^ annotate based on Id map
-> (E -> Info -> m Info) -- ^ annotate letbound bindings
-> (E -> Info -> m Info) -- ^ annotate lambdabound bindings
-> [(TVr,E)] -- ^ terms to annotate
-> m [(TVr,E)]
annotateDs imap idann letann lamann ds = do
ELetRec { eDefs = ds', eBody = Unknown } <- annotate imap idann letann lamann (ELetRec ds Unknown)
return ds'
annotateProgram :: Monad m =>
(IdMap (Maybe E))
-> (Id -> Info -> m Info) -- ^ annotate based on Id map
-> (E -> Info -> m Info) -- ^ annotate letbound bindings
-> (E -> Info -> m Info) -- ^ annotate lambdabound bindings
-> Program -- ^ terms to annotate
-> m Program
annotateProgram imap idann letann lamann prog = do
ds <- annotateCombs imap idann letann lamann (progCombinators prog)
return $ programUpdate $ prog { progCombinators = ds }
type AM m = ReaderT (IdMap (Maybe E)) m
annotate :: Monad m =>
(IdMap (Maybe E))
-> (Id -> Info -> m Info) -- ^ annotate based on Id map
-> (E -> Info -> m Info) -- ^ annotate letbound bindings
-> (E -> Info -> m Info) -- ^ annotate lambdabound bindings
-> E -- ^ term to annotate
-> m E
annotate imap idann letann lamann e = runReaderT (f e) imap where
f eo@(EVar tvr@(TVr { tvrIdent = i, tvrType = t })) = do
mp <- ask
case mlookup i mp of
Just (Just v) -> return v
_ -> return eo
f (ELam tvr e) = lp ELam tvr e
f (EPi tvr e) = lp EPi tvr e
f (EAp a b) = liftM2 EAp (f a) (f b)
f (EError x e) = liftM (EError x) (f e)
f (EPrim x es e) = liftM2 (EPrim x) (mapM f es) (f e)
f ELetRec { eDefs = dl, eBody = e } = do
dl' <- flip mapM dl $ \ (t,e) -> do
nfo <- lift $ letann e (tvrInfo t)
return t { tvrInfo = nfo }
(as,rs) <- liftM unzip $ mapMntvr dl'
local (foldr (.) id rs) $ do
ds <- mapM f (snds dl)
e' <- f e
return $ ELetRec (zip as ds) e'
f (ELit l) = liftM ELit $ litSMapM f l
f Unknown = return Unknown
f e@(ESort {}) = return e
f ec@(ECase {}) = do
e' <- f $ eCaseScrutinee ec
let caseBind = eCaseBind ec
(b',r) <- ntvr [] caseBind
d <- local r $ T.mapM f $ eCaseDefault ec
let da (Alt lc@LitCons { litName = s, litArgs = vs, litType = t } e) = do
t' <- f t
(as,rs) <- liftM unzip $ mapMntvr vs
e' <- local (foldr (.) id rs) $ f e
return $ Alt lc { litArgs = as, litType = t' } e'
da (Alt l e) = do
l' <- T.mapM f l
e' <- f e
return $ Alt l' e'
alts <- local r (mapM da $ eCaseAlts ec)
t' <- f (eCaseType ec)
return $ caseUpdate ECase { eCaseAllFV = error "no eCaseAllFV needed", eCaseScrutinee = e', eCaseType = t', eCaseDefault = d, eCaseBind = b', eCaseAlts = alts }
lp lam tvr@(TVr { tvrIdent = n, tvrType = t}) e | n == emptyId = do
t' <- f t
nfo <- lift $ lamann e (tvrInfo tvr)
nfo <- lift $ idann n nfo
e' <- local (minsert n Nothing) $ f e
return $ lam (tvr { tvrIdent = emptyId, tvrType = t', tvrInfo = nfo}) e'
lp lam tvr e = do
nfo <- lift $ lamann e (tvrInfo tvr)
(tv,r) <- ntvr [] tvr { tvrInfo = nfo }
e' <- local r $ f e
return $ lam tv e'
mapMntvr ts = f ts [] where
f [] xs = return $ reverse xs
f (t:ts) rs = do
(t',r) <- ntvr vs t
local r $ f ts ((t',r):rs)
vs = [ tvrIdent x | x <- ts ]
ntvr xs tvr@(TVr { tvrIdent = n, tvrType = t}) | n == emptyId = do
t' <- f t
nfo <- lift $ idann emptyId (tvrInfo tvr)
let nvr = (tvr { tvrType = t', tvrInfo = nfo})
return (nvr,id)
ntvr xs tvr@(TVr {tvrIdent = i, tvrType = t}) = do
t' <- f t
ss <- ask
nfo' <- lift $ idann i (tvrInfo tvr)
let i' = mnv xs i ss
let nvr = (tvr { tvrIdent = i', tvrType = t', tvrInfo = nfo'})
case i == i' of
True -> return (nvr,minsert i (Just $ EVar nvr))
False -> return (nvr,minsert i (Just $ EVar nvr) . minsert i' Nothing)
mnv xs i ss
| isInvalidId i || i `member` ss = newId (size ss) isOkay
| otherwise = i
where isOkay i = (i `notMember` ss) && (i `notElem` xs)
|
m-alvarez/jhc
|
src/E/Annotate.hs
|
Haskell
|
mit
| 6,286
|
{-# LANGUAGE CPP, MagicHash #-}
-- | Dynamically lookup up values from modules and loading them.
module DynamicLoading (
#ifdef GHCI
-- * Loading plugins
loadPlugins,
-- * Force loading information
forceLoadModuleInterfaces,
forceLoadNameModuleInterface,
forceLoadTyCon,
-- * Finding names
lookupRdrNameInModuleForPlugins,
-- * Loading values
getValueSafely,
getHValueSafely,
lessUnsafeCoerce
#endif
) where
#ifdef GHCI
import Linker ( linkModule, getHValue )
import SrcLoc ( noSrcSpan )
import Finder ( findImportedModule, cannotFindModule )
import TcRnMonad ( initTcInteractive, initIfaceTcRn )
import LoadIface ( loadPluginInterface )
import RdrName ( RdrName, ImportSpec(..), ImpDeclSpec(..)
, ImpItemSpec(..), mkGlobalRdrEnv, lookupGRE_RdrName
, gre_name, mkRdrQual )
import OccName ( mkVarOcc )
import RnNames ( gresFromAvails )
import DynFlags
import Plugins ( Plugin, CommandLineOption )
import PrelNames ( pluginTyConName )
import HscTypes
import BasicTypes ( HValue )
import TypeRep ( mkTyConTy, pprTyThingCategory )
import Type ( Type, eqType )
import TyCon ( TyCon )
import Name ( Name, nameModule_maybe )
import Id ( idType )
import Module ( Module, ModuleName )
import Panic
import FastString
import ErrUtils
import Outputable
import Exception
import Hooks
import Data.Maybe ( mapMaybe )
import GHC.Exts ( unsafeCoerce# )
loadPlugins :: HscEnv -> IO [(ModuleName, Plugin, [CommandLineOption])]
loadPlugins hsc_env
= do { plugins <- mapM (loadPlugin hsc_env) to_load
; return $ map attachOptions $ to_load `zip` plugins }
where
dflags = hsc_dflags hsc_env
to_load = pluginModNames dflags
attachOptions (mod_nm, plug) = (mod_nm, plug, options)
where
options = [ option | (opt_mod_nm, option) <- pluginModNameOpts dflags
, opt_mod_nm == mod_nm ]
loadPlugin :: HscEnv -> ModuleName -> IO Plugin
loadPlugin hsc_env mod_name
= do { let plugin_rdr_name = mkRdrQual mod_name (mkVarOcc "plugin")
dflags = hsc_dflags hsc_env
; mb_name <- lookupRdrNameInModuleForPlugins hsc_env mod_name
plugin_rdr_name
; case mb_name of {
Nothing ->
throwGhcExceptionIO (CmdLineError $ showSDoc dflags $ hsep
[ ptext (sLit "The module"), ppr mod_name
, ptext (sLit "did not export the plugin name")
, ppr plugin_rdr_name ]) ;
Just name ->
do { plugin_tycon <- forceLoadTyCon hsc_env pluginTyConName
; mb_plugin <- getValueSafely hsc_env name (mkTyConTy plugin_tycon)
; case mb_plugin of
Nothing ->
throwGhcExceptionIO (CmdLineError $ showSDoc dflags $ hsep
[ ptext (sLit "The value"), ppr name
, ptext (sLit "did not have the type")
, ppr pluginTyConName, ptext (sLit "as required")])
Just plugin -> return plugin } } }
-- | Force the interfaces for the given modules to be loaded. The 'SDoc' parameter is used
-- for debugging (@-ddump-if-trace@) only: it is shown as the reason why the module is being loaded.
forceLoadModuleInterfaces :: HscEnv -> SDoc -> [Module] -> IO ()
forceLoadModuleInterfaces hsc_env doc modules
= (initTcInteractive hsc_env $
initIfaceTcRn $
mapM_ (loadPluginInterface doc) modules)
>> return ()
-- | Force the interface for the module containing the name to be loaded. The 'SDoc' parameter is used
-- for debugging (@-ddump-if-trace@) only: it is shown as the reason why the module is being loaded.
forceLoadNameModuleInterface :: HscEnv -> SDoc -> Name -> IO ()
forceLoadNameModuleInterface hsc_env reason name = do
let name_modules = mapMaybe nameModule_maybe [name]
forceLoadModuleInterfaces hsc_env reason name_modules
-- | Load the 'TyCon' associated with the given name, come hell or high water. Fails if:
--
-- * The interface could not be loaded
-- * The name is not that of a 'TyCon'
-- * The name did not exist in the loaded module
forceLoadTyCon :: HscEnv -> Name -> IO TyCon
forceLoadTyCon hsc_env con_name = do
forceLoadNameModuleInterface hsc_env (ptext (sLit "contains a name used in an invocation of loadTyConTy")) con_name
mb_con_thing <- lookupTypeHscEnv hsc_env con_name
case mb_con_thing of
Nothing -> throwCmdLineErrorS dflags $ missingTyThingError con_name
Just (ATyCon tycon) -> return tycon
Just con_thing -> throwCmdLineErrorS dflags $ wrongTyThingError con_name con_thing
where dflags = hsc_dflags hsc_env
-- | Loads the value corresponding to a 'Name' if that value has the given 'Type'. This only provides limited safety
-- in that it is up to the user to ensure that that type corresponds to the type you try to use the return value at!
--
-- If the value found was not of the correct type, returns @Nothing@. Any other condition results in an exception:
--
-- * If we could not load the names module
-- * If the thing being loaded is not a value
-- * If the Name does not exist in the module
-- * If the link failed
getValueSafely :: HscEnv -> Name -> Type -> IO (Maybe a)
getValueSafely hsc_env val_name expected_type = do
mb_hval <- lookupHook getValueSafelyHook getHValueSafely dflags hsc_env val_name expected_type
case mb_hval of
Nothing -> return Nothing
Just hval -> do
value <- lessUnsafeCoerce dflags "getValueSafely" hval
return (Just value)
where
dflags = hsc_dflags hsc_env
getHValueSafely :: HscEnv -> Name -> Type -> IO (Maybe HValue)
getHValueSafely hsc_env val_name expected_type = do
forceLoadNameModuleInterface hsc_env (ptext (sLit "contains a name used in an invocation of getHValueSafely")) val_name
-- Now look up the names for the value and type constructor in the type environment
mb_val_thing <- lookupTypeHscEnv hsc_env val_name
case mb_val_thing of
Nothing -> throwCmdLineErrorS dflags $ missingTyThingError val_name
Just (AnId id) -> do
-- Check the value type in the interface against the type recovered from the type constructor
-- before finally casting the value to the type we assume corresponds to that constructor
if expected_type `eqType` idType id
then do
-- Link in the module that contains the value, if it has such a module
case nameModule_maybe val_name of
Just mod -> do linkModule hsc_env mod
return ()
Nothing -> return ()
-- Find the value that we just linked in and cast it given that we have proved it's type
hval <- getHValue hsc_env val_name
return (Just hval)
else return Nothing
Just val_thing -> throwCmdLineErrorS dflags $ wrongTyThingError val_name val_thing
where dflags = hsc_dflags hsc_env
-- | Coerce a value as usual, but:
--
-- 1) Evaluate it immediately to get a segfault early if the coercion was wrong
--
-- 2) Wrap it in some debug messages at verbosity 3 or higher so we can see what happened
-- if it /does/ segfault
lessUnsafeCoerce :: DynFlags -> String -> a -> IO b
lessUnsafeCoerce dflags context what = do
debugTraceMsg dflags 3 $ (ptext $ sLit "Coercing a value in") <+> (text context) <> (ptext $ sLit "...")
output <- evaluate (unsafeCoerce# what)
debugTraceMsg dflags 3 $ ptext $ sLit "Successfully evaluated coercion"
return output
-- | Finds the 'Name' corresponding to the given 'RdrName' in the
-- context of the 'ModuleName'. Returns @Nothing@ if no such 'Name'
-- could be found. Any other condition results in an exception:
--
-- * If the module could not be found
-- * If we could not determine the imports of the module
--
-- Can only be used for looking up names while loading plugins (and is
-- *not* suitable for use within plugins). The interface file is
-- loaded very partially: just enough that it can be used, without its
-- rules and instances affecting (and being linked from!) the module
-- being compiled. This was introduced by 57d6798.
--
-- See Note [Care with plugin imports] in LoadIface.
lookupRdrNameInModuleForPlugins :: HscEnv -> ModuleName -> RdrName -> IO (Maybe Name)
lookupRdrNameInModuleForPlugins hsc_env mod_name rdr_name = do
-- First find the package the module resides in by searching exposed packages and home modules
found_module <- findImportedModule hsc_env mod_name Nothing
case found_module of
Found _ mod -> do
-- Find the exports of the module
(_, mb_iface) <- initTcInteractive hsc_env $
initIfaceTcRn $
loadPluginInterface doc mod
case mb_iface of
Just iface -> do
-- Try and find the required name in the exports
let decl_spec = ImpDeclSpec { is_mod = mod_name, is_as = mod_name
, is_qual = False, is_dloc = noSrcSpan }
imp_spec = ImpSpec decl_spec ImpAll
env = mkGlobalRdrEnv (gresFromAvails (Just imp_spec) (mi_exports iface))
case lookupGRE_RdrName rdr_name env of
[gre] -> return (Just (gre_name gre))
[] -> return Nothing
_ -> panic "lookupRdrNameInModule"
Nothing -> throwCmdLineErrorS dflags $ hsep [ptext (sLit "Could not determine the exports of the module"), ppr mod_name]
err -> throwCmdLineErrorS dflags $ cannotFindModule dflags mod_name err
where
dflags = hsc_dflags hsc_env
doc = ptext (sLit "contains a name used in an invocation of lookupRdrNameInModule")
wrongTyThingError :: Name -> TyThing -> SDoc
wrongTyThingError name got_thing = hsep [ptext (sLit "The name"), ppr name, ptext (sLit "is not that of a value but rather a"), pprTyThingCategory got_thing]
missingTyThingError :: Name -> SDoc
missingTyThingError name = hsep [ptext (sLit "The name"), ppr name, ptext (sLit "is not in the type environment: are you sure it exists?")]
throwCmdLineErrorS :: DynFlags -> SDoc -> IO a
throwCmdLineErrorS dflags = throwCmdLineError . showSDoc dflags
throwCmdLineError :: String -> IO a
throwCmdLineError = throwGhcExceptionIO . CmdLineError
#endif
|
urbanslug/ghc
|
compiler/main/DynamicLoading.hs
|
Haskell
|
bsd-3-clause
| 10,751
|
{-# LANGUAGE PolyKinds , GADTs, ScopedTypeVariables, PatternSynonyms,
ViewPatterns #-}
module T12968 where
data TypeRep (a :: k)
data TRAppG (fun :: k2) where
TRAppG :: forall k1 k2 (a :: k1 -> k2) (b :: k1) .
TypeRep a -> TypeRep b -> TRAppG (a b)
pattern TRApp :: forall k2 (fun :: k2). ()
=> forall k1 (a :: k1 -> k2) (b :: k1). (fun ~ a b)
=> TypeRep a -> TypeRep b -> TypeRep fun
pattern TRApp a b <- ((undefined :: TypeRep fun -> TRAppG fun) -> TRAppG a b)
|
sdiehl/ghc
|
testsuite/tests/patsyn/should_compile/T12968.hs
|
Haskell
|
bsd-3-clause
| 515
|
module Distribution.Simple.Test.LibV09
( runTest
-- Test stub
, simpleTestStub
, stubFilePath, stubMain, stubName, stubWriteLog
, writeSimpleTestStub
) where
import Distribution.Compat.CreatePipe ( createPipe )
import Distribution.Compat.Environment ( getEnvironment )
import Distribution.Compat.TempFile ( openTempFile )
import Distribution.ModuleName ( ModuleName )
import qualified Distribution.PackageDescription as PD
import Distribution.Simple.Build.PathsModule ( pkgPathEnvVar )
import Distribution.Simple.BuildPaths ( exeExtension )
import Distribution.Simple.Compiler ( compilerInfo )
import Distribution.Simple.Hpc ( guessWay, markupTest, tixDir, tixFilePath )
import Distribution.Simple.InstallDirs
( fromPathTemplate, initialPathTemplateEnv, PathTemplateVariable(..)
, substPathTemplate , toPathTemplate, PathTemplate )
import qualified Distribution.Simple.LocalBuildInfo as LBI
import Distribution.Simple.Setup
( TestFlags(..), TestShowDetails(..), fromFlag, configCoverage )
import Distribution.Simple.Test.Log
import Distribution.Simple.Utils
( die, notice, rawSystemIOWithEnv, addLibraryPath )
import Distribution.System ( Platform (..) )
import Distribution.TestSuite
import Distribution.Text
import Distribution.Verbosity ( normal )
import Control.Exception ( bracket )
import Control.Monad ( when, unless )
import Data.Maybe ( mapMaybe )
import System.Directory
( createDirectoryIfMissing, doesDirectoryExist, doesFileExist
, getCurrentDirectory, removeDirectoryRecursive, removeFile
, setCurrentDirectory )
import System.Exit ( ExitCode(..), exitWith )
import System.FilePath ( (</>), (<.>) )
import System.IO ( hClose, hGetContents, hPutStr )
runTest :: PD.PackageDescription
-> LBI.LocalBuildInfo
-> TestFlags
-> PD.TestSuite
-> IO TestSuiteLog
runTest pkg_descr lbi flags suite = do
let isCoverageEnabled = fromFlag $ configCoverage $ LBI.configFlags lbi
way = guessWay lbi
pwd <- getCurrentDirectory
existingEnv <- getEnvironment
let cmd = LBI.buildDir lbi </> stubName suite
</> stubName suite <.> exeExtension
-- Check that the test executable exists.
exists <- doesFileExist cmd
unless exists $ die $ "Error: Could not find test program \"" ++ cmd
++ "\". Did you build the package first?"
-- Remove old .tix files if appropriate.
unless (fromFlag $ testKeepTix flags) $ do
let tDir = tixDir distPref way $ PD.testName suite
exists' <- doesDirectoryExist tDir
when exists' $ removeDirectoryRecursive tDir
-- Create directory for HPC files.
createDirectoryIfMissing True $ tixDir distPref way $ PD.testName suite
-- Write summary notices indicating start of test suite
notice verbosity $ summarizeSuiteStart $ PD.testName suite
suiteLog <- bracket openCabalTemp deleteIfExists $ \tempLog -> do
(rIn, wIn) <- createPipe
(rOut, wOut) <- createPipe
-- Prepare standard input for test executable
--appendFile tempInput $ show (tempInput, PD.testName suite)
hPutStr wIn $ show (tempLog, PD.testName suite)
hClose wIn
-- Run test executable
_ <- do let opts = map (testOption pkg_descr lbi suite) $ testOptions flags
dataDirPath = pwd </> PD.dataDir pkg_descr
tixFile = pwd </> tixFilePath distPref way (PD.testName suite)
pkgPathEnv = (pkgPathEnvVar pkg_descr "datadir", dataDirPath)
: existingEnv
shellEnv = [("HPCTIXFILE", tixFile) | isCoverageEnabled]
++ pkgPathEnv
-- Add (DY)LD_LIBRARY_PATH if needed
shellEnv' <- if LBI.withDynExe lbi
then do
let (Platform _ os) = LBI.hostPlatform lbi
clbi = LBI.getComponentLocalBuildInfo
lbi
(LBI.CTestName
(PD.testName suite))
paths <- LBI.depLibraryPaths
True False lbi clbi
return (addLibraryPath os paths shellEnv)
else return shellEnv
rawSystemIOWithEnv verbosity cmd opts Nothing (Just shellEnv')
-- these handles are closed automatically
(Just rIn) (Just wOut) (Just wOut)
-- Generate final log file name
let finalLogName l = testLogDir
</> testSuiteLogPath
(fromFlag $ testHumanLog flags) pkg_descr lbi
(testSuiteName l) (testLogs l)
-- Generate TestSuiteLog from executable exit code and a machine-
-- readable test log
suiteLog <- fmap ((\l -> l { logFile = finalLogName l }) . read)
$ readFile tempLog
-- Write summary notice to log file indicating start of test suite
appendFile (logFile suiteLog) $ summarizeSuiteStart $ PD.testName suite
-- Append contents of temporary log file to the final human-
-- readable log file
logText <- hGetContents rOut
appendFile (logFile suiteLog) logText
-- Write end-of-suite summary notice to log file
appendFile (logFile suiteLog) $ summarizeSuiteFinish suiteLog
-- Show the contents of the human-readable log file on the terminal
-- if there is a failure and/or detailed output is requested
let details = fromFlag $ testShowDetails flags
whenPrinting = when $ (details > Never)
&& (not (suitePassed $ testLogs suiteLog) || details == Always)
&& verbosity >= normal
whenPrinting $ putStr $ unlines $ lines logText
return suiteLog
-- Write summary notice to terminal indicating end of test suite
notice verbosity $ summarizeSuiteFinish suiteLog
when isCoverageEnabled $
markupTest verbosity lbi distPref (display $ PD.package pkg_descr) suite
return suiteLog
where
deleteIfExists file = do
exists <- doesFileExist file
when exists $ removeFile file
testLogDir = distPref </> "test"
openCabalTemp = do
(f, h) <- openTempFile testLogDir $ "cabal-test-" <.> "log"
hClose h >> return f
distPref = fromFlag $ testDistPref flags
verbosity = fromFlag $ testVerbosity flags
-- TODO: This is abusing the notion of a 'PathTemplate'. The result isn't
-- necessarily a path.
testOption :: PD.PackageDescription
-> LBI.LocalBuildInfo
-> PD.TestSuite
-> PathTemplate
-> String
testOption pkg_descr lbi suite template =
fromPathTemplate $ substPathTemplate env template
where
env = initialPathTemplateEnv
(PD.package pkg_descr) (LBI.pkgKey lbi)
(compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) ++
[(TestSuiteNameVar, toPathTemplate $ PD.testName suite)]
-- Test stub ----------
-- | The name of the stub executable associated with a library 'TestSuite'.
stubName :: PD.TestSuite -> FilePath
stubName t = PD.testName t ++ "Stub"
-- | The filename of the source file for the stub executable associated with a
-- library 'TestSuite'.
stubFilePath :: PD.TestSuite -> FilePath
stubFilePath t = stubName t <.> "hs"
-- | Write the source file for a library 'TestSuite' stub executable.
writeSimpleTestStub :: PD.TestSuite -- ^ library 'TestSuite' for which a stub
-- is being created
-> FilePath -- ^ path to directory where stub source
-- should be located
-> IO ()
writeSimpleTestStub t dir = do
createDirectoryIfMissing True dir
let filename = dir </> stubFilePath t
PD.TestSuiteLibV09 _ m = PD.testInterface t
writeFile filename $ simpleTestStub m
-- | Source code for library test suite stub executable
simpleTestStub :: ModuleName -> String
simpleTestStub m = unlines
[ "module Main ( main ) where"
, "import Distribution.Simple.Test.LibV09 ( stubMain )"
, "import " ++ show (disp m) ++ " ( tests )"
, "main :: IO ()"
, "main = stubMain tests"
]
-- | Main function for test stubs. Once, it was written directly into the stub,
-- but minimizing the amount of code actually in the stub maximizes the number
-- of detectable errors when Cabal is compiled.
stubMain :: IO [Test] -> IO ()
stubMain tests = do
(f, n) <- fmap read getContents
dir <- getCurrentDirectory
results <- tests >>= stubRunTests
setCurrentDirectory dir
stubWriteLog f n results
-- | The test runner used in library "TestSuite" stub executables. Runs a list
-- of 'Test's. An executable calling this function is meant to be invoked as
-- the child of a Cabal process during @.\/setup test@. A 'TestSuiteLog',
-- provided by Cabal, is read from the standard input; it supplies the name of
-- the test suite and the location of the machine-readable test suite log file.
-- Human-readable log information is written to the standard output for capture
-- by the calling Cabal process.
stubRunTests :: [Test] -> IO TestLogs
stubRunTests tests = do
logs <- mapM stubRunTests' tests
return $ GroupLogs "Default" logs
where
stubRunTests' (Test t) = do
l <- run t >>= finish
summarizeTest normal Always l
return l
where
finish (Finished result) =
return TestLog
{ testName = name t
, testOptionsReturned = defaultOptions t
, testResult = result
}
finish (Progress _ next) = next >>= finish
stubRunTests' g@(Group {}) = do
logs <- mapM stubRunTests' $ groupTests g
return $ GroupLogs (groupName g) logs
stubRunTests' (ExtraOptions _ t) = stubRunTests' t
maybeDefaultOption opt =
maybe Nothing (\d -> Just (optionName opt, d)) $ optionDefault opt
defaultOptions testInst = mapMaybe maybeDefaultOption $ options testInst
-- | From a test stub, write the 'TestSuiteLog' to temporary file for the calling
-- Cabal process to read.
stubWriteLog :: FilePath -> String -> TestLogs -> IO ()
stubWriteLog f n logs = do
let testLog = TestSuiteLog { testSuiteName = n, testLogs = logs, logFile = f }
writeFile (logFile testLog) $ show testLog
when (suiteError logs) $ exitWith $ ExitFailure 2
when (suiteFailed logs) $ exitWith $ ExitFailure 1
exitWith ExitSuccess
|
DavidAlphaFox/ghc
|
libraries/Cabal/Cabal/Distribution/Simple/Test/LibV09.hs
|
Haskell
|
bsd-3-clause
| 10,854
|
module B4 (myFringe) where
import D4 hiding (sumSquares)
import qualified D4
instance SameOrNot Float
where
isSameOrNot a b = a == b
isNotSame a b = a /= b
myFringe :: (Tree a) -> [a]
myFringe (Leaf x) = [x]
myFringe (Branch left right) = myFringe right
sumSquares ((x : xs)) = (x ^ 2) + (sumSquares xs)
sumSquares [] = 0
|
kmate/HaRe
|
old/testing/renaming/B4_AstOut.hs
|
Haskell
|
bsd-3-clause
| 343
|
{-# OPTIONS -fglasgow-exts -O -dshow-passes #-}
module Foo where
import GHC.Base
foo :: Int -> Int
foo (I# n#) = bar i i
where i# = n# +# 1#
i = I# i#
bar :: Int -> Int -> Int
{-# INLINE [0] bar #-}
bar _ n = n
{- The trouble here was
*** Simplify:
Result size = 25
Result size = 25
Result size = 25
Result size = 25
Result size = 25
*** Simplify:
Result size = 25
Result size = 25
Result size = 25
Result size = 25
Result size = 25
etc.
The reason was this:
x = n# +# 1#
i = I# x
Being an unboxed value, we were treating the argument context of x
as interesting, and hence inlining x in the arg of I#. But then we just
float it out again, giving an infinite loop.
-}
|
ezyang/ghc
|
testsuite/tests/eyeball/inline2.hs
|
Haskell
|
bsd-3-clause
| 810
|
module P004Spec where
import qualified P004 as P
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "isPalindrome" $
it "回文数判定" $ do
let t = True
let f = False
let input = [0, 1, 9, 10, 11, 12, 21, 22, 100, 101, 111, 112, 121, 1001, 1010, 2022, 3303, 4444, 4554]
let expected = [t, t, t, f, t, f, f, t, f, t, t, f, t, t, f, f, f, t, t]
map P.isPalindrome input `shouldBe` expected
describe "solveBasic" $
it "N桁の数を掛け合わせてできる最大の回文数" $
map P.solveBasic [1, 2] `shouldBe` [9, 9009]
describe "solve" $
it "N桁の数を掛け合わせてできる最大の回文数" $
map P.solve [1, 2] `shouldBe` [9, 9009]
|
yyotti/euler_haskell
|
test/P004Spec.hs
|
Haskell
|
mit
| 747
|
#!/usr/bin/env runhaskell
{-# LANGUAGE UnicodeSyntax #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NoImplicitPrelude #-}
-- import Control.Monad
-- import Data.Functor
-- import Data.Maybe
-- import Data.Monoid
import Debug.Trace
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import BasicPrelude hiding (empty)
import Prelude.Unicode
import Turtle
import Network.URI
import qualified Filesystem.Path.CurrentOS as P
f & g = g $ f
parseVCSLine ∷ Text → Either Text (URI,Text)
parseVCSLine l = case T.split (≡'@') l of
[uriStr,branch] → case parseURI (removeTrailingSlash $ T.unpack uriStr) of
Nothing → Left l
Just uri → Right (uri,branch)
_ → Left l
getVCSInfo ∷ Text → Shell [Text]
getVCSInfo pkg = do
vcs ← empty
& inproc "cabal-db" ["vcs", pkg]
& inproc "grep" ["://"]
& inshell "sed -r 's:\\x1B\\[[0-9;]*[mK]::g; s:^ *::'"
return $ lines vcs
stripDotGit x = fromMaybe x $ T.stripSuffix ".git" x
pathFromGitURI ∷ Text → Maybe Text
pathFromGitURI p = r $ reverse $ T.split (≡'/') p
where r [] = Nothing
r [""] = Nothing
r ("":xs) = r xs
r (x:_) = Just $ stripDotGit x
run ∷ Text → IO ExitCode
run x = do
wd ← pwd
echo $ T.pack $ concat["(", P.encodeString wd, ")$", T.unpack x]
shell x empty
removeTrailingSlash x = fromMaybe x $ T.unpack <$> T.stripSuffix "/" (T.pack x)
printVCS ∷ (URI,Text) → IO ()
printVCS (uri,br) = do
(pathFromGitURI $ T.pack $ uriPath uri) & \case
Nothing → return()
Just "zlib" → return()
Just d → do
h ← home
echo $ "cd " <> T.pack(P.encodeString(h <> "warpdeps"))
cd $ h <> "warpdeps"
run $ "git clone " <> stripDotGit(show uri) <> ".git"
ok ← testdir $ fromText d
if not ok then return() else do
cd $ fromText d
run $ "git checkout " <> br
run $ "src do-all -m program"
run $ "src push"
return()
main ∷ IO ()
main = sh $ do
x ← map parseVCSLine <$> getVCSInfo "warp"
forM (lefts x) $ traceM . T.unpack . ("Failed to parse VCS URI line: " <>)
forM (rights x) $ liftIO . printVCS
|
sourcegraph/srclib-haskell
|
process-all-dependencies.hs
|
Haskell
|
mit
| 2,199
|
import Control.Monad (unless)
import Test.Hspec (Spec, describe, expectationFailure, it, shouldBe)
import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith)
import House (rhyme)
main :: IO ()
main = hspecWith defaultConfig {configFastFail = True} specs
specs :: Spec
specs = describe "rhyme" $ do
-- First we test the input, line by line, to give more
-- useful error messages.
it "matches lines" $ sequence_ lineAssertions
-- Finally, because testing lines we are unable
-- to detect a missing newline at the end of the
-- lyrics, we test the full song.
it "matches full song" $ rhyme `shouldBe` lyrics
where
lineAssertions = zipWith checkLine [1 :: Int ..] $ zipMaybe (lines rhyme) (lines lyrics)
checkLine lineno (got, want) =
unless (got == want) $
expectationFailure $ "mismatch at line " ++ show lineno ++ "\nexpected: " ++ show want ++ "\n but got: " ++ show got
zipMaybe [] [] = []
zipMaybe (x:xs) [] = (Just x , Nothing) : zipMaybe xs []
zipMaybe [] (y:ys) = (Nothing, Just y ) : zipMaybe [] ys
zipMaybe (x:xs) (y:ys) = (Just x , Just y ) : zipMaybe xs ys
-- Lyrics extracted from `exercism/problem-specifications` on 2016-09-23.
lyrics :: String
lyrics = "This is the house that Jack built.\n\
\\n\
\This is the malt\n\
\that lay in the house that Jack built.\n\
\\n\
\This is the rat\n\
\that ate the malt\n\
\that lay in the house that Jack built.\n\
\\n\
\This is the cat\n\
\that killed the rat\n\
\that ate the malt\n\
\that lay in the house that Jack built.\n\
\\n\
\This is the dog\n\
\that worried the cat\n\
\that killed the rat\n\
\that ate the malt\n\
\that lay in the house that Jack built.\n\
\\n\
\This is the cow with the crumpled horn\n\
\that tossed the dog\n\
\that worried the cat\n\
\that killed the rat\n\
\that ate the malt\n\
\that lay in the house that Jack built.\n\
\\n\
\This is the maiden all forlorn\n\
\that milked the cow with the crumpled horn\n\
\that tossed the dog\n\
\that worried the cat\n\
\that killed the rat\n\
\that ate the malt\n\
\that lay in the house that Jack built.\n\
\\n\
\This is the man all tattered and torn\n\
\that kissed the maiden all forlorn\n\
\that milked the cow with the crumpled horn\n\
\that tossed the dog\n\
\that worried the cat\n\
\that killed the rat\n\
\that ate the malt\n\
\that lay in the house that Jack built.\n\
\\n\
\This is the priest all shaven and shorn\n\
\that married the man all tattered and torn\n\
\that kissed the maiden all forlorn\n\
\that milked the cow with the crumpled horn\n\
\that tossed the dog\n\
\that worried the cat\n\
\that killed the rat\n\
\that ate the malt\n\
\that lay in the house that Jack built.\n\
\\n\
\This is the rooster that crowed in the morn\n\
\that woke the priest all shaven and shorn\n\
\that married the man all tattered and torn\n\
\that kissed the maiden all forlorn\n\
\that milked the cow with the crumpled horn\n\
\that tossed the dog\n\
\that worried the cat\n\
\that killed the rat\n\
\that ate the malt\n\
\that lay in the house that Jack built.\n\
\\n\
\This is the farmer sowing his corn\n\
\that kept the rooster that crowed in the morn\n\
\that woke the priest all shaven and shorn\n\
\that married the man all tattered and torn\n\
\that kissed the maiden all forlorn\n\
\that milked the cow with the crumpled horn\n\
\that tossed the dog\n\
\that worried the cat\n\
\that killed the rat\n\
\that ate the malt\n\
\that lay in the house that Jack built.\n\
\\n\
\This is the horse and the hound and the horn\n\
\that belonged to the farmer sowing his corn\n\
\that kept the rooster that crowed in the morn\n\
\that woke the priest all shaven and shorn\n\
\that married the man all tattered and torn\n\
\that kissed the maiden all forlorn\n\
\that milked the cow with the crumpled horn\n\
\that tossed the dog\n\
\that worried the cat\n\
\that killed the rat\n\
\that ate the malt\n\
\that lay in the house that Jack built.\n"
-- 473a8c3f65f5e8aba509bad8d3632a10ee4927fe
|
exercism/xhaskell
|
exercises/practice/house/test/Tests.hs
|
Haskell
|
mit
| 4,864
|
-- CamelCase Method
-- https://www.codewars.com/kata/587731fda577b3d1b0001196
module CamelCase.JorgeVS.Kata where
import Data.Char (toUpper)
camelCase :: String -> String
camelCase = concatMap (\(x:xs) -> toUpper x:xs) . words
|
gafiatulin/codewars
|
src/6 kyu/CamelCase.hs
|
Haskell
|
mit
| 230
|
{-# LANGUAGE OverloadedStrings #-}
module InTheKnow.Routes.Common.Templates (
base
) where
import Prelude hiding (head, div)
import Text.Blaze.Html5 hiding (base)
import qualified Text.Blaze.Html5.Attributes as A
import Data.Text (Text)
import Data.Monoid ((<>))
base :: Text -> Html -> Html
base t content =
docTypeHtml $ do
head $ do
title (toHtml $ t <> " | InTheKnow")
body $ do
div ! A.class_ "content" $ content
div ! A.class_ "content2" $ content
|
jb55/intheknow
|
InTheKnow/Routes/Common/Templates.hs
|
Haskell
|
mit
| 488
|
{-# LANGUAGE RecordWildCards #-}
{- |
Generate and solve friction constraints for colliding objects.
-}
module Physics.Constraints.Contact.Friction where
import Control.Lens
import Physics.Constraint
import Physics.Constraints.SolutionProcessors
import Physics.Constraints.Types
import Physics.Contact.Types
import Physics.Linear
import Utils.Utils
constraintGen :: Flipping Contact
-> (PhysicalObj, PhysicalObj)
-> Constraint
constraintGen fContact ab =
flipExtract $ flipMap toConstraint fContact ab
{-# INLINE constraintGen #-}
toConstraint :: Contact
-> (PhysicalObj, PhysicalObj)
-> Constraint
toConstraint c ab = Constraint (jacobian c ab) 0
{-# INLINE toConstraint #-}
jacobian :: Contact
-> (PhysicalObj, PhysicalObj)
-> V6
jacobian Contact {..} (a, b) = ja `join3v3` jb
where
ja = ta `append2` ((p' `minusV2` xa) `crossV2` ta)
jb = tb `append2` ((p' `minusV2` xb) `crossV2` tb)
xa = _physObjPos a
xb = _physObjPos b
(P2 p') = _contactCenter
ta = negateV2 tb
tb = clockwiseV2 n
n = _contactNormal
{-# INLINE jacobian #-}
pairMu :: (Double, Double) -> Double
pairMu (ua, ub) = (ua + ub) / 2
{-# INLINE pairMu #-}
solutionProcessor :: (Double, Double)
-> Lagrangian
-> Lagrangian
-> Lagrangian
-> Processed Lagrangian
solutionProcessor ab nonpen = clampAbs (nonpen & lagrangianVal *~ pairMu ab)
{-# INLINE solutionProcessor #-}
|
ublubu/shapes
|
shapes/src/Physics/Constraints/Contact/Friction.hs
|
Haskell
|
mit
| 1,593
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
module Database.Persist.Sql.Raw where
import Database.Persist
import Database.Persist.Sql.Types
import Database.Persist.Sql.Class
import qualified Data.Map as Map
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Reader (ReaderT, ask, MonadReader)
import Control.Monad.Trans.Resource (release)
import Data.Acquire (allocateAcquire, Acquire, mkAcquire, with)
import Data.IORef (writeIORef, readIORef, newIORef)
import Control.Exception (throwIO)
import Control.Monad (when, liftM)
import Data.Text (Text, pack)
import Control.Monad.Logger (logDebugS, runLoggingT)
import Data.Int (Int64)
import qualified Data.Text as T
import Data.Conduit
import Control.Monad.Trans.Resource (MonadResource)
rawQuery :: (MonadResource m, MonadReader env m, HasPersistBackend env SqlBackend)
=> Text
-> [PersistValue]
-> Source m [PersistValue]
rawQuery sql vals = do
srcRes <- liftPersist $ rawQueryRes sql vals
(releaseKey, src) <- allocateAcquire srcRes
src
release releaseKey
rawQueryRes
:: (MonadIO m1, MonadIO m2)
=> Text
-> [PersistValue]
-> ReaderT SqlBackend m1 (Acquire (Source m2 [PersistValue]))
rawQueryRes sql vals = do
conn <- ask
let make = do
runLoggingT ($logDebugS (pack "SQL") $ pack $ show sql ++ " " ++ show vals)
(connLogFunc conn)
getStmtConn conn sql
return $ do
stmt <- mkAcquire make stmtReset
stmtQuery stmt vals
rawExecute :: MonadIO m => Text -> [PersistValue] -> ReaderT SqlBackend m ()
rawExecute x y = liftM (const ()) $ rawExecuteCount x y
rawExecuteCount :: MonadIO m => Text -> [PersistValue] -> ReaderT SqlBackend m Int64
rawExecuteCount sql vals = do
conn <- ask
runLoggingT ($logDebugS (pack "SQL") $ pack $ show sql ++ " " ++ show vals)
(connLogFunc conn)
stmt <- getStmt sql
res <- liftIO $ stmtExecute stmt vals
liftIO $ stmtReset stmt
return res
getStmt :: MonadIO m => Text -> ReaderT SqlBackend m Statement
getStmt sql = do
conn <- ask
liftIO $ getStmtConn conn sql
getStmtConn :: SqlBackend -> Text -> IO Statement
getStmtConn conn sql = do
smap <- liftIO $ readIORef $ connStmtMap conn
case Map.lookup sql smap of
Just stmt -> return stmt
Nothing -> do
stmt' <- liftIO $ connPrepare conn sql
iactive <- liftIO $ newIORef True
let stmt = Statement
{ stmtFinalize = do
active <- readIORef iactive
if active
then do
stmtFinalize stmt'
writeIORef iactive False
else return ()
, stmtReset = do
active <- readIORef iactive
when active $ stmtReset stmt'
, stmtExecute = \x -> do
active <- readIORef iactive
if active
then stmtExecute stmt' x
else throwIO $ StatementAlreadyFinalized sql
, stmtQuery = \x -> do
active <- liftIO $ readIORef iactive
if active
then stmtQuery stmt' x
else liftIO $ throwIO $ StatementAlreadyFinalized sql
}
liftIO $ writeIORef (connStmtMap conn) $ Map.insert sql stmt smap
return stmt
-- | Execute a raw SQL statement and return its results as a
-- list.
--
-- If you're using 'Entity'@s@ (which is quite likely), then you
-- /must/ use entity selection placeholders (double question
-- mark, @??@). These @??@ placeholders are then replaced for
-- the names of the columns that we need for your entities.
-- You'll receive an error if you don't use the placeholders.
-- Please see the 'Entity'@s@ documentation for more details.
--
-- You may put value placeholders (question marks, @?@) in your
-- SQL query. These placeholders are then replaced by the values
-- you pass on the second parameter, already correctly escaped.
-- You may want to use 'toPersistValue' to help you constructing
-- the placeholder values.
--
-- Since you're giving a raw SQL statement, you don't get any
-- guarantees regarding safety. If 'rawSql' is not able to parse
-- the results of your query back, then an exception is raised.
-- However, most common problems are mitigated by using the
-- entity selection placeholder @??@, and you shouldn't see any
-- error at all if you're not using 'Single'.
rawSql :: (RawSql a, MonadIO m)
=> Text -- ^ SQL statement, possibly with placeholders.
-> [PersistValue] -- ^ Values to fill the placeholders.
-> ReaderT SqlBackend m [a]
rawSql stmt = run
where
getType :: (x -> m [a]) -> a
getType = error "rawSql.getType"
x = getType run
process = rawSqlProcessRow
withStmt' colSubsts params sink = do
srcRes <- rawQueryRes sql params
liftIO $ with srcRes ($$ sink)
where
sql = T.concat $ makeSubsts colSubsts $ T.splitOn placeholder stmt
placeholder = "??"
makeSubsts (s:ss) (t:ts) = t : s : makeSubsts ss ts
makeSubsts [] [] = []
makeSubsts [] ts = [T.intercalate placeholder ts]
makeSubsts ss [] = error (concat err)
where
err = [ "rawsql: there are still ", show (length ss)
, "'??' placeholder substitutions to be made "
, "but all '??' placeholders have already been "
, "consumed. Please read 'rawSql's documentation "
, "on how '??' placeholders work."
]
run params = do
conn <- ask
let (colCount, colSubsts) = rawSqlCols (connEscapeName conn) x
withStmt' colSubsts params $ firstRow colCount
firstRow colCount = do
mrow <- await
case mrow of
Nothing -> return []
Just row
| colCount == length row -> getter mrow
| otherwise -> fail $ concat
[ "rawSql: wrong number of columns, got "
, show (length row), " but expected ", show colCount
, " (", rawSqlColCountReason x, ")." ]
getter = go id
where
go acc Nothing = return (acc [])
go acc (Just row) =
case process row of
Left err -> fail (T.unpack err)
Right r -> await >>= go (acc . (r:))
|
junjihashimoto/persistent
|
persistent/Database/Persist/Sql/Raw.hs
|
Haskell
|
mit
| 6,836
|
{-
**************************************************************
* Filename : RegTypes.hs *
* Author : Markus Forsberg *
* d97forma@dtek.chalmers.se *
* Last Modified : 5 July, 2001 *
* Lines : 219 *
**************************************************************
-}
module FST.RegTypes ( Reg(..), -- data type for the regular expression
Combinators, -- Type class for Combinators.
(<|>), -- Union combinator
(|>), -- Concatenation combinator
(<&>), -- Intersection combinator
(<->), -- Minus combinator
s, -- Symbol
eps, -- Epsilon
empty, -- Empty
complement, -- Complement
star, -- Star
plus, -- Plus
allS, -- All Symbol
allToSymbols, -- transform the 'all' symbol to union over
-- alphabet.
allFree, -- free a regular expression from 'all'
-- symbols.
reversal, -- reverse a regular expression.
acceptEps, -- Does the regular expression accept epsilon?
Symbols, -- Type class for Symbols.
symbols -- Collect the symbols in a
-- regular expression.
) where
import Data.List (nub)
{- **********************************************************
* Data type for a regular expression. *
**********************************************************
-}
data Reg a = Empty | -- []
Epsilon | -- 0
All | -- ?
Symbol a | -- a
Reg a :|: Reg a | -- [ r1 | r2 ]
Reg a :.: Reg a | -- [ r1 r2 ]
Reg a :&: Reg a | -- [ r1 & r2 ]
Complement (Reg a) | -- ~[ r1 ]
Star (Reg a) -- [ r2 ]*
deriving (Eq)
{- **********************************************************
* Combinators. *
* The regular expressions are simplified while combined. *
**********************************************************
-}
infixl 5 |> -- Concatenation
infixl 4 <|> -- Union
infixl 3 <&> -- Intersection
infixl 3 <-> -- Set minus
class Combinators a where
(<|>) :: a -> a -> a -- Union
(|>) :: a -> a -> a -- Concatenation
star :: a -> a -- Kleene's star
plus :: a -> a -- Kleene's plus
empty :: a
instance Eq a => Combinators (Reg a) where
Empty <|> b = b -- [ [] | r1 ] = r1
a <|> Empty = a -- [ r1 | [] ] = r1
_ <|> (Star All) = Star All
(Star All) <|> _ = Star All
a1@(a :.: b) <|> a2@(c :.: d)
| a1 == a2 = a1
| a == c = a |> (b <|> d)
| b == d = (a <|> c) |> b
| otherwise = a1 :|: a2
a <|> b
| a == b = a -- [ r1 | r1 ] = r1
| otherwise = a :|: b
Empty |> _ = empty -- [ [] r1 ] = []
_ |> Empty = empty -- [ r1 [] ] = []
Epsilon |> b = b -- [ 0 r1 ] = r1
a |> Epsilon = a -- [ r1 0 ] = r1
a |> b = a :.: b
star (Star a) = star a -- [r1]** = [r1]*
star (Epsilon) = eps -- [0]* = 0
star (Empty) = eps -- [ [] ]* = 0
star a = Star a
plus a = a |> star a
empty = Empty
{- Intersection -}
(<&>) :: Eq a => Reg a -> Reg a -> Reg a
_ <&> Empty = Empty -- [ r1 & [] ] = []
Empty <&> _ = Empty -- [ [] & r1 ] = []
(Star All) <&> a = a
a <&> (Star All) = a
a <&> b
| a == b = a -- [ r1 & r1 ] = r1
| otherwise = a :&: b
{- Minus. Definition A - B = A & ~B -}
(<->) :: Eq a => Reg a -> Reg a -> Reg a
Empty <-> _ = empty -- [ [] - r1 ] = []
a <-> Empty = a -- [ r1 - [] ] = r1
a <-> b
| a == b = empty -- [ r1 - r1 ] = []
| otherwise = a <&> (complement b)
s :: a -> Reg a
s a = Symbol a
eps :: Reg a
eps = Epsilon
allS :: Reg a
allS = All
complement :: Eq a => Reg a -> Reg a
complement Empty = star allS -- ~[ [] ] = ?*
complement Epsilon = plus allS -- ~[ 0 ] = [? ?*]
complement (Star All) = empty
complement (Complement a) = a
complement a = Complement a
{- *******************************************************************
* allToSymbols: ? -> [a|..] with respect to an alphabet [a] *
* allFreeReg: Construct a ?-free regular expression with respect *
* to an alphabet [a] *
*******************************************************************
-}
allToSymbols :: Eq a => [a] -> Reg a
allToSymbols sigma = case sigma of
[] -> empty
ys -> foldr1 (:|:) [s a| a <- ys]
allFree :: Eq a => Reg a -> [a] -> Reg a
allFree (a :|: b) sigma = (allFree a sigma) :|: (allFree b sigma)
allFree (a :.: b) sigma = (allFree a sigma) :.: (allFree b sigma)
allFree (a :&: b) sigma = (allFree a sigma) :&: (allFree b sigma)
allFree (Complement a) sigma = Complement (allFree a sigma)
allFree (Star a) sigma = Star (allFree a sigma)
allFree (All) sigma = allToSymbols sigma
allFree r _ = r
{- **********************************************************
* reversal: reverse the language denoted by the regular *
* expression. *
**********************************************************
-}
reversal :: Eq a => Reg a -> Reg a
reversal (a :|: b) = (reversal a) :|: (reversal b)
reversal (a :.: b) = (reversal b) :.: (reversal a)
reversal (a :&: b) = (reversal a) :&: (reversal b)
reversal (Complement a) = Complement (reversal a)
reversal (Star a) = Star (reversal a)
reversal r = r
{- ***********************************************************
* acceptEps: Examines if a regular expression accepts *
* the empty string. *
***********************************************************
-}
acceptEps :: Eq a => Reg a -> Bool
acceptEps (Epsilon) = True
acceptEps (Star _) = True
acceptEps (a :|: b) = acceptEps a || acceptEps b
acceptEps (a :.: b) = acceptEps a && acceptEps b
acceptEps (a :&: b) = acceptEps a && acceptEps b
acceptEps (Complement a) = not (acceptEps a)
acceptEps _ = False
{- **********************************************************
* Symbols: type class for the collection of symbols in a *
* expression. *
**********************************************************
-}
class Symbols f where
symbols :: Eq a => f a -> [a]
instance Symbols Reg where
symbols (Symbol a) = [a]
symbols (a :.: b) = nub $ (symbols a) ++ (symbols b)
symbols (a :|: b) = nub $ (symbols a) ++ (symbols b)
symbols (a :&: b) = nub $ (symbols a) ++ (symbols b)
symbols (Complement a) = symbols a
symbols (Star a) = symbols a
symbols _ = []
{- **********************************************************
* Instance of Show (Reg a) *
**********************************************************
-}
instance Show a => Show (Reg a) where
show (Empty) = "[0 - 0]"
show (Epsilon) = "0"
show (Symbol a) = show a
show (All) = "?"
show (Complement a) = "~" ++ "[" ++ show a ++ "]"
show (Star a) = "[" ++ show a ++ "]* "
show (a :|: b) = "[" ++ show a ++ " | " ++ show b ++ "]"
show (a :.: b) = "[" ++ show a ++ " " ++ show b ++ "]"
show (a :&: b) = "[" ++ show a ++ " & " ++ show b ++ "]"
|
SAdams601/ParRegexSearch
|
test/fst-0.9.0.1/FST/RegTypes.hs
|
Haskell
|
mit
| 7,853
|
-- Tube strike options calculator
-- http://www.codewars.com/kata/568ade64cfd7a55d9300003e/
module Codewars.Kata.Tube where
import Codewars.Kata.Tube.Types
calculator :: Double -> Double -> Double -> Decision
calculator distance busDrive busWalk | 60 * (distance / 5) < 10 = Walk
| 60 * (distance / 5) > 120 = Bus
| (distance / 5) <= (busWalk / 5) + (busDrive / 8) = Walk
| otherwise = Bus
|
gafiatulin/codewars
|
src/7 kyu/Tube.hs
|
Haskell
|
mit
| 504
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
module RLPTest where
import Control.Monad (sequence)
import Data.Aeson
import Data.Aeson.Types (typeMismatch)
import Data.ByteString
import qualified Data.ByteString.Lazy as BL
import qualified Data.Map.Strict as M
import Data.Maybe (fromJust)
import Data.Semigroup ((<>))
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.Vector as V
import Development.IncludeFile
import qualified Data.RLP as RLP
data RLPTestInput = StringInput ByteString
| NumberInput Integer
| ListInput [RLPTestInput]
deriving (Read, Show)
-- The test JSON takes advantage of the fact that you can mix and match types in JSON arrays
-- so naturally, that doesn't play well with haskell, and we can't *REALLY* make FromJSON and ToJSON
-- maintain identity. So we cheat :P. Our biggest issue is that RLP doesn't have an "Integer" type
-- it's effectively stored as a big-endian String. So we need to really handle the case of
-- StringInput == NumberInput and vice versa
instance Eq RLPTestInput where
(StringInput s1) == (StringInput s2) = s1 == s2
(NumberInput n1) == (NumberInput n2) = n1 == n2
(ListInput l1) == (ListInput l2) = l1 == l2
StringInput{} == ListInput{} = False -- impossible
(StringInput s) == (NumberInput n) = RLP.unpackBE (unpack s) == n -- todo this case
NumberInput{} == ListInput{} = False -- also impossible
n@NumberInput{} == s@StringInput{} = s == n -- take advantage of the commutative case
o1 == o2 = False
instance RLP.RLPEncodable RLPTestInput where
rlpEncode (StringInput s) = RLP.String s
rlpEncode (NumberInput n) = RLP.rlpEncode n
rlpEncode (ListInput xs) = RLP.Array $ RLP.rlpEncode <$> xs
rlpDecode (RLP.String s) = Right (StringInput s) -- todo this totes wont work for NumInputs
rlpDecode (RLP.Array xs) = ListInput <$> sequence (RLP.rlpDecode <$> xs)
data RLPTest = RLPTest { input :: RLPTestInput, output :: T.Text }
deriving (Eq, Read, Show)
instance FromJSON RLPTestInput where
parseJSON (String s) | T.null s = return (StringInput "")
| otherwise = case T.head s of
'#' -> return . NumberInput . read . T.unpack $ T.tail s
_ -> return . StringInput $ TE.encodeUtf8 s
parseJSON (Number n) = return . NumberInput $ round n
parseJSON (Array a) = ListInput . V.toList <$> V.forM a parseJSON
parseJSON x = typeMismatch "RLPTestInput" x
instance ToJSON RLPTestInput where
toJSON (StringInput s) = String $ TE.decodeUtf8 s
toJSON (NumberInput n) = Number $ fromIntegral n
toJSON (ListInput xs) = toJSON xs
instance FromJSON RLPTest where
parseJSON (Object o) = RLPTest <$> (o .: "in") <*> (o .: "out")
parseJSON x = typeMismatch "RLPTest" x
instance ToJSON RLPTest where
toJSON RLPTest{..} = object [ "in" .= input, "out" .= output ]
toEncoding RLPTest{..} = pairs ( "in" .= input <> "out" .= output )
$(includeFileInSource "test/resources/rlptest.json" "officialRLPTests'")
officialRLPTests :: Either String [(T.Text, RLPTest)]
officialRLPTests = M.toList <$> eitherDecode (BL.fromStrict officialRLPTests')
|
iostat/relapse
|
test/RLPTest.hs
|
Haskell
|
mit
| 3,532
|
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
module Spark.Core.ColumnSpec where
import Test.Hspec
import Data.List.NonEmpty(NonEmpty( (:|) ))
import Spark.Core.Context
import Spark.Core.Dataset
import Spark.Core.Column
import Spark.Core.Row
import Spark.Core.Functions
import Spark.Core.ColumnFunctions
import Spark.Core.SimpleAddSpec(run)
import Spark.Core.Internal.LocalDataFunctions(iPackTupleObs)
import Spark.Core.Internal.DatasetFunctions(untypedLocalData)
myScaler :: Column ref Double -> Column ref Double
myScaler col =
let cnt = asDouble (countCol col)
m = sumCol col / cnt
centered = col .- m
stdDev = sumCol (centered * centered) / cnt
in centered ./ stdDev
spec :: Spec
spec = do
describe "local data operations" $ do
run "broadcastPair_struct" $ do
let ds = dataset [1] :: Dataset Int
let cnt = countCol (asCol ds)
let c = collect (asCol ds .+ cnt)
res <- exec1Def c
res `shouldBe` [2]
run "LocalPack (doubles)" $ do
let x = untypedLocalData (1 :: LocalData Double)
let x2 = iPackTupleObs (x :| [x])
res <- exec1Def x2
res `shouldBe` rowArray [DoubleElement 1, DoubleElement 1]
run "LocalPack" $ do
let x = untypedLocalData (1 :: LocalData Int)
let x2 = iPackTupleObs (x :| [x])
res <- exec1Def x2
res `shouldBe` rowArray [IntElement 1, IntElement 1]
run "BroadcastPair" $ do
let x = 1 :: LocalData Int
let ds = dataset [2, 3] :: Dataset Int
let ds2 = broadcastPair ds x
res <- exec1Def (collect (asCol ds2))
res `shouldBe` [(2, 1), (3, 1)]
-- TODO: this combines a lot of elements together.
describe "columns - integration" $ do
run "mean" $ do
let ds = dataset [-1, 1] :: Dataset Double
let c = myScaler (asCol ds)
res <- exec1Def (collect c)
res `shouldBe` [-1, 1]
|
krapsh/kraps-haskell
|
test-integration/Spark/Core/ColumnSpec.hs
|
Haskell
|
apache-2.0
| 1,901
|
import Prelude ((+),(-),(==),(/=),(*),($),(.),(++),(&&),(||),(!!),div,mod,map,take,splitAt,replicate,length,fromIntegral,drop,head,Eq,Show)
import Data.ByteString (ByteString(..),append,cons,pack)
import Data.Word (Word8(..))
import Crypto.Hash.SHA256
type Bool = Word8
data Node = Terminal {h::ByteString, s::[Bool], v::ByteString}
| Branch {h::ByteString, s::[Bool], l::Node, r::Node} deriving (Eq,Show)
byte [a,b,c,d, e,f,g,h] = 0x80*a + 0x40*b + 0x20*c + 0x10*d + 8*e + 4*f + 2*g + h
packBits bs = if bs == [] then pack [] else (byte l) `cons` (packBits rr)
where (l, rr) = splitAt 8 $ bs ++ replicate (7 - (length bs - 1) `mod` 8) 0
bitArr bs = ((l+7)`div`8) `cons` (l`mod`8) `cons` (packBits bs) where l = fromIntegral (length bs)
terminal bs v = Terminal (hash $ bitArr bs `append` (pack $ replicate 64 0) `append` v) bs v
branch bs l r = Branch (hash $ bitArr bs `append` (h l) `append` (h r)) bs l r
withS s (Terminal _ _ v) = terminal s v; withS s (Branch _ _ l r) = branch s l r
commonPrefix (x:xs) (y:ys) = if x == y then x : commonPrefix xs ys else []; commonPrefix _ _ = []
empty = Terminal (pack $ replicate 32 0) [] (pack [])
set k v n = if s n == k || n == empty then terminal k v else
if s n == common then case k!!(length common) of -- a child of n will take (k,v), now n
{0 -> branch common (set new v (l n)) (r n); 1 -> branch common (l n) (set new v (r n))}
else case k!!(length common) of -- k branches of somewhere along (s n)
{0 -> branch common (terminal new v) (withS old n); 1 -> branch common (withS old n) (terminal new v)}
where new = drop (length common+1) k; old = drop (length common+1) (s n); common = commonPrefix k (s n)
|
andres-erbsen/dename
|
utils/hsverify/cbht.hs
|
Haskell
|
apache-2.0
| 1,686
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Kubernetes.V1.PersistentVolume where
import GHC.Generics
import Data.Text
import Kubernetes.V1.ObjectMeta
import Kubernetes.V1.PersistentVolumeSpec
import Kubernetes.V1.PersistentVolumeStatus
import qualified Data.Aeson
-- | PersistentVolume (PV) is a storage resource provisioned by an administrator. It is analogous to a node. More info: http://releases.k8s.io/HEAD/docs/user-guide/persistent-volumes.md
data PersistentVolume = PersistentVolume
{ kind :: Maybe Text -- ^ Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds
, apiVersion :: Maybe Text -- ^ APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#resources
, metadata :: Maybe ObjectMeta -- ^ Standard object's metadata. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#metadata
, spec :: Maybe PersistentVolumeSpec -- ^ Spec defines a specification of a persistent volume owned by the cluster. Provisioned by an administrator. More info: http://releases.k8s.io/HEAD/docs/user-guide/persistent-volumes.md#persistent-volumes
, status :: Maybe PersistentVolumeStatus -- ^ Status represents the current information/status for the persistent volume. Populated by the system. Read-only. More info: http://releases.k8s.io/HEAD/docs/user-guide/persistent-volumes.md#persistent-volumes
} deriving (Show, Eq, Generic)
instance Data.Aeson.FromJSON PersistentVolume
instance Data.Aeson.ToJSON PersistentVolume
|
minhdoboi/deprecated-openshift-haskell-api
|
kubernetes/lib/Kubernetes/V1/PersistentVolume.hs
|
Haskell
|
apache-2.0
| 2,004
|
module EdictDB where
import System.IO
import qualified Data.Text as DT
import Data.Text.Encoding
import qualified Data.ByteString.Char8 as C
type Word = (String, Char)
dbLookup :: String -> Maybe Word
dbLookup = undefined
returnLine :: IO String -> String
returnLine = undefined
getDict :: IO String
getDict = do
y <- openFile "edict" ReadMode
hSetEncoding y latin1
z <- hGetContents y
let k = decodeLatin1 $ C.pack z
hClose y
return $ DT.unpack k
|
MarkMcCaskey/Refcon
|
EdictDB.hs
|
Haskell
|
apache-2.0
| 468
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Store (Id, Url, FileType, genId, genPut, put, get) where
import Control.Monad.Trans.AWS
(sinkBody, runResourceT, runAWST, send, presignURL, newEnv
,Env, Seconds, toBody, RqBody, Region(..), Credentials(..))
import Network.AWS.S3
(getObject, putObject, gorsBody, PutObjectResponse
,BucketName(..), ObjectKey(..))
import Control.Monad.Trans
(liftIO)
import Control.Lens
(view)
import Data.Conduit.Binary
(sinkLbs)
import Data.Time
(getCurrentTime)
import Data.ByteString
(ByteString)
import Data.Text
(pack)
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as BSL
import Data.UUID
(UUID)
import System.Random
(randomIO)
import Config
(Domain)
import User
(Upload(..), FileType, FileName, Token)
import Network.S3
(S3Keys(..), S3Request(..), S3Method(S3PUT), generateS3URL, signedRequest)
import System.Environment as Sys
type Id
= UUID
type Url
= String
genId :: IO Id
genId = randomIO
genPut :: Domain -> Upload -> IO Url
genPut domain Upload{..} =
do
credentials <- (S3Keys . BS.pack) <$> Sys.getEnv "MS_AWS_ID" <*> (BS.pack <$> Sys.getEnv "MS_AWS_KEY")
let request = S3Request S3PUT (BS.pack fileType) (BS.pack $ domain ++ "-uploads") (BS.pack fileName) expiry
BS.unpack . signedRequest <$> generateS3URL credentials request
put :: Show a => Domain -> Id -> a -> IO PutObjectResponse
put domain id object =
do
env <- awsEnv
let req = send $ putObject (metaBucket domain) (key id) (body object)
runResourceT . runAWST env $ req
get :: Read a => Domain -> Id -> IO a
get domain id =
do
env <- awsEnv
let req = send $ getObject (metaBucket domain) (key id)
body <- runResourceT . runAWST env $
do
resp <- req
sinkBody (view gorsBody resp) sinkLbs
return . read . BSL.unpack $ body
awsEnv :: IO Env
awsEnv = newEnv NorthVirginia Discover
metaBucket :: Domain -> BucketName
metaBucket domain =
BucketName $ pack $ domain ++ "/media-server/uploads/meta"
key :: Store.Id -> ObjectKey
key id =
ObjectKey $ pack . show $ id
body :: Show a => a -> RqBody
body =
toBody . show
expiry :: Integer
expiry = 30 * 60
|
svanderbleek/media-server
|
src/Store.hs
|
Haskell
|
bsd-3-clause
| 2,245
|
-- |Type aliases used throughout the crypto-api modules.
module Crypto.Types where
import Data.ByteString as B
import Data.ByteString.Lazy as L
-- |The length of a field (usually a ByteString) in bits
type BitLength = Int
-- |The length fo a field in bytes.
type ByteLength = Int
|
ekmett/crypto-api
|
Crypto/Types.hs
|
Haskell
|
bsd-3-clause
| 283
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Web.RTBBidder.Types.Request.Video (Video(..)) where
import qualified Data.Aeson as AESON
import Data.Aeson ((.=), (.:), (.:?), (.!=))
import qualified Data.Text as TX
import Web.RTBBidder.Types.Request.Banner (Banner(..))
data Video = Video
{ videoMimes :: [TX.Text]
, videoMinduration :: Maybe Int
, videoMaxduration :: Maybe Int
, videoProtocols :: [Int]
, videoProtocol :: Maybe Int -- DEPRECATED
, videoW :: Maybe Int
, videoH :: Maybe Int
, videoStartdelay :: Maybe Int
, videoPlacement :: Maybe Int
, videoLinearity :: Maybe Int
, videoSkip :: Maybe Int
, videoSkipmin :: Int
, videoSkipafter :: Int
, videoSequence :: Maybe Int
, videoBattr :: [Int]
, videoMaxextendeded :: Maybe Int
, videoMinbitrate :: Maybe Int
, videoMaxbitrate :: Maybe Int
, videoBoxingallowed :: Int
, videoPlaybackmethod :: [Int]
, videoPlaybackend :: Maybe Int
, videoDelivery :: [Int]
, videoPos :: Maybe Int
, videoCompanionad :: [Banner]
, videoApi :: [Int]
, videoCompaniontype :: [Int]
, videoExt :: Maybe AESON.Value
} deriving (Show, Eq)
instance AESON.FromJSON Video where
parseJSON = AESON.withObject "video" $ \o -> do
videoMimes <- o .: "mimes"
videoMinduration <- o .:? "minduration"
videoMaxduration <- o .:? "maxduration"
videoProtocols <- o .:? "protocols" .!= []
videoProtocol <- o .:? "protocol"
videoW <- o .:? "w"
videoH <- o .:? "h"
videoStartdelay <- o .:? "startdelay"
videoPlacement <- o .:? "placement"
videoLinearity <- o .:? "linearity"
videoSkip <- o .:? "skip"
videoSkipmin <- o .:? "skipmin" .!= 0
videoSkipafter <- o .:? "skipafter" .!= 0
videoSequence <- o .:? "sequence"
videoBattr <- o .:? "battr" .!= []
videoMaxextendeded <- o .:? "maxextended"
videoMinbitrate <- o .:? "minbitrate"
videoMaxbitrate <- o .:? "maxbitrate"
videoBoxingallowed <- o .:? "boxingallowed" .!= 1
videoPlaybackmethod <- o .:? "playbackmethod" .!= []
videoPlaybackend <- o .:? "playbackend"
videoDelivery <- o .:? "delivery" .!= []
videoPos <- o .:? "pos"
videoCompanionad <- o .:? "companionad" .!= []
videoApi <- o .:? "api" .!= []
videoCompaniontype <- o .:? "companiontype" .!= []
videoExt <- o .:? "ext"
return Video{..}
instance AESON.ToJSON Video where
toJSON Video{..} = AESON.object
[ "mimes" .= videoMimes
, "minduration" .= videoMinduration
, "maxduration" .= videoMaxduration
, "protocols" .= videoProtocols
, "protocol" .= videoProtocol
, "w" .= videoW
, "h" .= videoH
, "startdelay" .= videoStartdelay
, "placement" .= videoPlacement
, "linearity" .= videoLinearity
, "skip" .= videoSkip
, "skipmin" .= videoSkipmin
, "skipafter" .= videoSkipafter
, "sequence" .= videoSequence
, "battr" .= videoBattr
, "maxextended" .= videoMaxextendeded
, "minbitrate" .= videoMinbitrate
, "maxbitrate" .= videoMaxbitrate
, "boxingallowed" .= videoBoxingallowed
, "playbackmethod" .= videoPlaybackmethod
, "playbackend" .= videoPlaybackend
, "delivery" .= videoDelivery
, "pos" .= videoPos
, "companionad" .= videoCompanionad
, "api" .= videoApi
, "companiontype" .= videoCompaniontype
, "ext" .= videoExt
]
|
hiratara/hs-rtb-bidder
|
src/Web/RTBBidder/Types/Request/Video.hs
|
Haskell
|
bsd-3-clause
| 3,366
|
{-
(c) The AQUA Project, Glasgow University, 1993-1998
\section[Simplify]{The main module of the simplifier}
-}
{-# LANGUAGE CPP #-}
module Simplify ( simplTopBinds, simplExpr, simplRules ) where
#include "HsVersions.h"
import DynFlags
import SimplMonad
import Type hiding ( substTy, substTyVar, extendTvSubst, extendCvSubst )
import SimplEnv
import SimplUtils
import FamInstEnv ( FamInstEnv )
import Literal ( litIsLifted ) --, mkMachInt ) -- temporalily commented out. See #8326
import Id
import MkId ( seqId, voidPrimId )
import MkCore ( mkImpossibleExpr, castBottomExpr )
import IdInfo
import Name ( Name, mkSystemVarName, isExternalName, getOccFS )
import Coercion hiding ( substCo, substCoVar )
import OptCoercion ( optCoercion )
import FamInstEnv ( topNormaliseType_maybe )
import DataCon ( DataCon, dataConWorkId, dataConRepStrictness
, isMarkedStrict, dataConRepArgTys ) --, dataConTyCon, dataConTag, fIRST_TAG )
--import TyCon ( isEnumerationTyCon ) -- temporalily commented out. See #8326
import CoreMonad ( Tick(..), SimplifierMode(..) )
import CoreSyn
import Demand ( StrictSig(..), dmdTypeDepth, isStrictDmd )
import PprCore ( pprCoreExpr )
import CoreUnfold
import CoreUtils
import CoreArity
--import PrimOp ( tagToEnumKey ) -- temporalily commented out. See #8326
import Rules ( mkRuleInfo, lookupRule, getRules )
import TysPrim ( voidPrimTy ) --, intPrimTy ) -- temporalily commented out. See #8326
import BasicTypes ( TopLevelFlag(..), isTopLevel, RecFlag(..) )
import MonadUtils ( foldlM, mapAccumLM, liftIO )
import Maybes ( orElse )
--import Unique ( hasKey ) -- temporalily commented out. See #8326
import Control.Monad
import Outputable
import FastString
import Pair
import Util
import ErrUtils
{-
The guts of the simplifier is in this module, but the driver loop for
the simplifier is in SimplCore.hs.
-----------------------------------------
*** IMPORTANT NOTE ***
-----------------------------------------
The simplifier used to guarantee that the output had no shadowing, but
it does not do so any more. (Actually, it never did!) The reason is
documented with simplifyArgs.
-----------------------------------------
*** IMPORTANT NOTE ***
-----------------------------------------
Many parts of the simplifier return a bunch of "floats" as well as an
expression. This is wrapped as a datatype SimplUtils.FloatsWith.
All "floats" are let-binds, not case-binds, but some non-rec lets may
be unlifted (with RHS ok-for-speculation).
-----------------------------------------
ORGANISATION OF FUNCTIONS
-----------------------------------------
simplTopBinds
- simplify all top-level binders
- for NonRec, call simplRecOrTopPair
- for Rec, call simplRecBind
------------------------------
simplExpr (applied lambda) ==> simplNonRecBind
simplExpr (Let (NonRec ...) ..) ==> simplNonRecBind
simplExpr (Let (Rec ...) ..) ==> simplify binders; simplRecBind
------------------------------
simplRecBind [binders already simplfied]
- use simplRecOrTopPair on each pair in turn
simplRecOrTopPair [binder already simplified]
Used for: recursive bindings (top level and nested)
top-level non-recursive bindings
Returns:
- check for PreInlineUnconditionally
- simplLazyBind
simplNonRecBind
Used for: non-top-level non-recursive bindings
beta reductions (which amount to the same thing)
Because it can deal with strict arts, it takes a
"thing-inside" and returns an expression
- check for PreInlineUnconditionally
- simplify binder, including its IdInfo
- if strict binding
simplStrictArg
mkAtomicArgs
completeNonRecX
else
simplLazyBind
addFloats
simplNonRecX: [given a *simplified* RHS, but an *unsimplified* binder]
Used for: binding case-binder and constr args in a known-constructor case
- check for PreInLineUnconditionally
- simplify binder
- completeNonRecX
------------------------------
simplLazyBind: [binder already simplified, RHS not]
Used for: recursive bindings (top level and nested)
top-level non-recursive bindings
non-top-level, but *lazy* non-recursive bindings
[must not be strict or unboxed]
Returns floats + an augmented environment, not an expression
- substituteIdInfo and add result to in-scope
[so that rules are available in rec rhs]
- simplify rhs
- mkAtomicArgs
- float if exposes constructor or PAP
- completeBind
completeNonRecX: [binder and rhs both simplified]
- if the the thing needs case binding (unlifted and not ok-for-spec)
build a Case
else
completeBind
addFloats
completeBind: [given a simplified RHS]
[used for both rec and non-rec bindings, top level and not]
- try PostInlineUnconditionally
- add unfolding [this is the only place we add an unfolding]
- add arity
Right hand sides and arguments
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In many ways we want to treat
(a) the right hand side of a let(rec), and
(b) a function argument
in the same way. But not always! In particular, we would
like to leave these arguments exactly as they are, so they
will match a RULE more easily.
f (g x, h x)
g (+ x)
It's harder to make the rule match if we ANF-ise the constructor,
or eta-expand the PAP:
f (let { a = g x; b = h x } in (a,b))
g (\y. + x y)
On the other hand if we see the let-defns
p = (g x, h x)
q = + x
then we *do* want to ANF-ise and eta-expand, so that p and q
can be safely inlined.
Even floating lets out is a bit dubious. For let RHS's we float lets
out if that exposes a value, so that the value can be inlined more vigorously.
For example
r = let x = e in (x,x)
Here, if we float the let out we'll expose a nice constructor. We did experiments
that showed this to be a generally good thing. But it was a bad thing to float
lets out unconditionally, because that meant they got allocated more often.
For function arguments, there's less reason to expose a constructor (it won't
get inlined). Just possibly it might make a rule match, but I'm pretty skeptical.
So for the moment we don't float lets out of function arguments either.
Eta expansion
~~~~~~~~~~~~~~
For eta expansion, we want to catch things like
case e of (a,b) -> \x -> case a of (p,q) -> \y -> r
If the \x was on the RHS of a let, we'd eta expand to bring the two
lambdas together. And in general that's a good thing to do. Perhaps
we should eta expand wherever we find a (value) lambda? Then the eta
expansion at a let RHS can concentrate solely on the PAP case.
************************************************************************
* *
\subsection{Bindings}
* *
************************************************************************
-}
simplTopBinds :: SimplEnv -> [InBind] -> SimplM SimplEnv
simplTopBinds env0 binds0
= do { -- Put all the top-level binders into scope at the start
-- so that if a transformation rule has unexpectedly brought
-- anything into scope, then we don't get a complaint about that.
-- It's rather as if the top-level binders were imported.
-- See note [Glomming] in OccurAnal.
; env1 <- simplRecBndrs env0 (bindersOfBinds binds0)
; env2 <- simpl_binds env1 binds0
; freeTick SimplifierDone
; return env2 }
where
-- We need to track the zapped top-level binders, because
-- they should have their fragile IdInfo zapped (notably occurrence info)
-- That's why we run down binds and bndrs' simultaneously.
--
simpl_binds :: SimplEnv -> [InBind] -> SimplM SimplEnv
simpl_binds env [] = return env
simpl_binds env (bind:binds) = do { env' <- simpl_bind env bind
; simpl_binds env' binds }
simpl_bind env (Rec pairs) = simplRecBind env TopLevel pairs
simpl_bind env (NonRec b r) = do { (env', b') <- addBndrRules env b (lookupRecBndr env b)
; simplRecOrTopPair env' TopLevel NonRecursive b b' r }
{-
************************************************************************
* *
\subsection{Lazy bindings}
* *
************************************************************************
simplRecBind is used for
* recursive bindings only
-}
simplRecBind :: SimplEnv -> TopLevelFlag
-> [(InId, InExpr)]
-> SimplM SimplEnv
simplRecBind env0 top_lvl pairs0
= do { (env_with_info, triples) <- mapAccumLM add_rules env0 pairs0
; env1 <- go (zapFloats env_with_info) triples
; return (env0 `addRecFloats` env1) }
-- addFloats adds the floats from env1,
-- _and_ updates env0 with the in-scope set from env1
where
add_rules :: SimplEnv -> (InBndr,InExpr) -> SimplM (SimplEnv, (InBndr, OutBndr, InExpr))
-- Add the (substituted) rules to the binder
add_rules env (bndr, rhs)
= do { (env', bndr') <- addBndrRules env bndr (lookupRecBndr env bndr)
; return (env', (bndr, bndr', rhs)) }
go env [] = return env
go env ((old_bndr, new_bndr, rhs) : pairs)
= do { env' <- simplRecOrTopPair env top_lvl Recursive old_bndr new_bndr rhs
; go env' pairs }
{-
simplOrTopPair is used for
* recursive bindings (whether top level or not)
* top-level non-recursive bindings
It assumes the binder has already been simplified, but not its IdInfo.
-}
simplRecOrTopPair :: SimplEnv
-> TopLevelFlag -> RecFlag
-> InId -> OutBndr -> InExpr -- Binder and rhs
-> SimplM SimplEnv -- Returns an env that includes the binding
simplRecOrTopPair env top_lvl is_rec old_bndr new_bndr rhs
= do { dflags <- getDynFlags
; trace_bind dflags $
if preInlineUnconditionally dflags env top_lvl old_bndr rhs
-- Check for unconditional inline
then do tick (PreInlineUnconditionally old_bndr)
return (extendIdSubst env old_bndr (mkContEx env rhs))
else simplLazyBind env top_lvl is_rec old_bndr new_bndr rhs env }
where
trace_bind dflags thing_inside
| not (dopt Opt_D_verbose_core2core dflags)
= thing_inside
| otherwise
= pprTrace "SimplBind" (ppr old_bndr) thing_inside
-- trace_bind emits a trace for each top-level binding, which
-- helps to locate the tracing for inlining and rule firing
{-
simplLazyBind is used for
* [simplRecOrTopPair] recursive bindings (whether top level or not)
* [simplRecOrTopPair] top-level non-recursive bindings
* [simplNonRecE] non-top-level *lazy* non-recursive bindings
Nota bene:
1. It assumes that the binder is *already* simplified,
and is in scope, and its IdInfo too, except unfolding
2. It assumes that the binder type is lifted.
3. It does not check for pre-inline-unconditionally;
that should have been done already.
-}
simplLazyBind :: SimplEnv
-> TopLevelFlag -> RecFlag
-> InId -> OutId -- Binder, both pre-and post simpl
-- The OutId has IdInfo, except arity, unfolding
-> InExpr -> SimplEnv -- The RHS and its environment
-> SimplM SimplEnv
-- Precondition: rhs obeys the let/app invariant
simplLazyBind env top_lvl is_rec bndr bndr1 rhs rhs_se
= -- pprTrace "simplLazyBind" ((ppr bndr <+> ppr bndr1) $$ ppr rhs $$ ppr (seIdSubst rhs_se)) $
do { let rhs_env = rhs_se `setInScope` env
(tvs, body) = case collectTyAndValBinders rhs of
(tvs, [], body)
| surely_not_lam body -> (tvs, body)
_ -> ([], rhs)
surely_not_lam (Lam {}) = False
surely_not_lam (Tick t e)
| not (tickishFloatable t) = surely_not_lam e
-- eta-reduction could float
surely_not_lam _ = True
-- Do not do the "abstract tyyvar" thing if there's
-- a lambda inside, because it defeats eta-reduction
-- f = /\a. \x. g a x
-- should eta-reduce.
; (body_env, tvs') <- simplBinders rhs_env tvs
-- See Note [Floating and type abstraction] in SimplUtils
-- Simplify the RHS
; let rhs_cont = mkRhsStop (substTy body_env (exprType body))
; (body_env1, body1) <- simplExprF body_env body rhs_cont
-- ANF-ise a constructor or PAP rhs
; (body_env2, body2) <- prepareRhs top_lvl body_env1 bndr1 body1
; (env', rhs')
<- if not (doFloatFromRhs top_lvl is_rec False body2 body_env2)
then -- No floating, revert to body1
do { rhs' <- mkLam tvs' (wrapFloats body_env1 body1) rhs_cont
; return (env, rhs') }
else if null tvs then -- Simple floating
do { tick LetFloatFromLet
; return (addFloats env body_env2, body2) }
else -- Do type-abstraction first
do { tick LetFloatFromLet
; (poly_binds, body3) <- abstractFloats tvs' body_env2 body2
; rhs' <- mkLam tvs' body3 rhs_cont
; env' <- foldlM (addPolyBind top_lvl) env poly_binds
; return (env', rhs') }
; completeBind env' top_lvl bndr bndr1 rhs' }
{-
A specialised variant of simplNonRec used when the RHS is already simplified,
notably in knownCon. It uses case-binding where necessary.
-}
simplNonRecX :: SimplEnv
-> InId -- Old binder
-> OutExpr -- Simplified RHS
-> SimplM SimplEnv
-- Precondition: rhs satisfies the let/app invariant
simplNonRecX env bndr new_rhs
| isDeadBinder bndr -- Not uncommon; e.g. case (a,b) of c { (p,q) -> p }
= return env -- Here c is dead, and we avoid creating
-- the binding c = (a,b)
| Coercion co <- new_rhs
= return (extendCvSubst env bndr co)
| otherwise
= do { (env', bndr') <- simplBinder env bndr
; completeNonRecX NotTopLevel env' (isStrictId bndr) bndr bndr' new_rhs }
-- simplNonRecX is only used for NotTopLevel things
completeNonRecX :: TopLevelFlag -> SimplEnv
-> Bool
-> InId -- Old binder
-> OutId -- New binder
-> OutExpr -- Simplified RHS
-> SimplM SimplEnv
-- Precondition: rhs satisfies the let/app invariant
-- See Note [CoreSyn let/app invariant] in CoreSyn
completeNonRecX top_lvl env is_strict old_bndr new_bndr new_rhs
= do { (env1, rhs1) <- prepareRhs top_lvl (zapFloats env) new_bndr new_rhs
; (env2, rhs2) <-
if doFloatFromRhs NotTopLevel NonRecursive is_strict rhs1 env1
then do { tick LetFloatFromLet
; return (addFloats env env1, rhs1) } -- Add the floats to the main env
else return (env, wrapFloats env1 rhs1) -- Wrap the floats around the RHS
; completeBind env2 NotTopLevel old_bndr new_bndr rhs2 }
{-
{- No, no, no! Do not try preInlineUnconditionally in completeNonRecX
Doing so risks exponential behaviour, because new_rhs has been simplified once already
In the cases described by the folowing commment, postInlineUnconditionally will
catch many of the relevant cases.
-- This happens; for example, the case_bndr during case of
-- known constructor: case (a,b) of x { (p,q) -> ... }
-- Here x isn't mentioned in the RHS, so we don't want to
-- create the (dead) let-binding let x = (a,b) in ...
--
-- Similarly, single occurrences can be inlined vigourously
-- e.g. case (f x, g y) of (a,b) -> ....
-- If a,b occur once we can avoid constructing the let binding for them.
Furthermore in the case-binding case preInlineUnconditionally risks extra thunks
-- Consider case I# (quotInt# x y) of
-- I# v -> let w = J# v in ...
-- If we gaily inline (quotInt# x y) for v, we end up building an
-- extra thunk:
-- let w = J# (quotInt# x y) in ...
-- because quotInt# can fail.
| preInlineUnconditionally env NotTopLevel bndr new_rhs
= thing_inside (extendIdSubst env bndr (DoneEx new_rhs))
-}
----------------------------------
prepareRhs takes a putative RHS, checks whether it's a PAP or
constructor application and, if so, converts it to ANF, so that the
resulting thing can be inlined more easily. Thus
x = (f a, g b)
becomes
t1 = f a
t2 = g b
x = (t1,t2)
We also want to deal well cases like this
v = (f e1 `cast` co) e2
Here we want to make e1,e2 trivial and get
x1 = e1; x2 = e2; v = (f x1 `cast` co) v2
That's what the 'go' loop in prepareRhs does
-}
prepareRhs :: TopLevelFlag -> SimplEnv -> OutId -> OutExpr -> SimplM (SimplEnv, OutExpr)
-- Adds new floats to the env iff that allows us to return a good RHS
prepareRhs top_lvl env id (Cast rhs co) -- Note [Float coercions]
| Pair ty1 _ty2 <- coercionKind co -- Do *not* do this if rhs has an unlifted type
, not (isUnliftedType ty1) -- see Note [Float coercions (unlifted)]
= do { (env', rhs') <- makeTrivialWithInfo top_lvl env (getOccFS id) sanitised_info rhs
; return (env', Cast rhs' co) }
where
sanitised_info = vanillaIdInfo `setStrictnessInfo` strictnessInfo info
`setDemandInfo` demandInfo info
info = idInfo id
prepareRhs top_lvl env0 id rhs0
= do { (_is_exp, env1, rhs1) <- go 0 env0 rhs0
; return (env1, rhs1) }
where
go n_val_args env (Cast rhs co)
= do { (is_exp, env', rhs') <- go n_val_args env rhs
; return (is_exp, env', Cast rhs' co) }
go n_val_args env (App fun (Type ty))
= do { (is_exp, env', rhs') <- go n_val_args env fun
; return (is_exp, env', App rhs' (Type ty)) }
go n_val_args env (App fun arg)
= do { (is_exp, env', fun') <- go (n_val_args+1) env fun
; case is_exp of
True -> do { (env'', arg') <- makeTrivial top_lvl env' (getOccFS id) arg
; return (True, env'', App fun' arg') }
False -> return (False, env, App fun arg) }
go n_val_args env (Var fun)
= return (is_exp, env, Var fun)
where
is_exp = isExpandableApp fun n_val_args -- The fun a constructor or PAP
-- See Note [CONLIKE pragma] in BasicTypes
-- The definition of is_exp should match that in
-- OccurAnal.occAnalApp
go n_val_args env (Tick t rhs)
-- We want to be able to float bindings past this
-- tick. Non-scoping ticks don't care.
| tickishScoped t == NoScope
= do { (is_exp, env', rhs') <- go n_val_args env rhs
; return (is_exp, env', Tick t rhs') }
-- On the other hand, for scoping ticks we need to be able to
-- copy them on the floats, which in turn is only allowed if
-- we can obtain non-counting ticks.
| not (tickishCounts t) || tickishCanSplit t
= do { (is_exp, env', rhs') <- go n_val_args (zapFloats env) rhs
; let tickIt (id, expr) = (id, mkTick (mkNoCount t) expr)
floats' = seFloats $ env `addFloats` mapFloats env' tickIt
; return (is_exp, env' { seFloats = floats' }, Tick t rhs') }
go _ env other
= return (False, env, other)
{-
Note [Float coercions]
~~~~~~~~~~~~~~~~~~~~~~
When we find the binding
x = e `cast` co
we'd like to transform it to
x' = e
x = x `cast` co -- A trivial binding
There's a chance that e will be a constructor application or function, or something
like that, so moving the coercion to the usage site may well cancel the coercions
and lead to further optimisation. Example:
data family T a :: *
data instance T Int = T Int
foo :: Int -> Int -> Int
foo m n = ...
where
x = T m
go 0 = 0
go n = case x of { T m -> go (n-m) }
-- This case should optimise
Note [Preserve strictness when floating coercions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In the Note [Float coercions] transformation, keep the strictness info.
Eg
f = e `cast` co -- f has strictness SSL
When we transform to
f' = e -- f' also has strictness SSL
f = f' `cast` co -- f still has strictness SSL
Its not wrong to drop it on the floor, but better to keep it.
Note [Float coercions (unlifted)]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
BUT don't do [Float coercions] if 'e' has an unlifted type.
This *can* happen:
foo :: Int = (error (# Int,Int #) "urk")
`cast` CoUnsafe (# Int,Int #) Int
If do the makeTrivial thing to the error call, we'll get
foo = case error (# Int,Int #) "urk" of v -> v `cast` ...
But 'v' isn't in scope!
These strange casts can happen as a result of case-of-case
bar = case (case x of { T -> (# 2,3 #); F -> error "urk" }) of
(# p,q #) -> p+q
-}
makeTrivialArg :: SimplEnv -> ArgSpec -> SimplM (SimplEnv, ArgSpec)
makeTrivialArg env (ValArg e) = do
{ (env', e') <- makeTrivial NotTopLevel env (fsLit "arg") e
; return (env', ValArg e') }
makeTrivialArg env arg = return (env, arg) -- CastBy, TyArg
makeTrivial :: TopLevelFlag -> SimplEnv
-> FastString -- ^ a "friendly name" to build the new binder from
-> OutExpr -> SimplM (SimplEnv, OutExpr)
-- Binds the expression to a variable, if it's not trivial, returning the variable
makeTrivial top_lvl env context expr =
makeTrivialWithInfo top_lvl env context vanillaIdInfo expr
makeTrivialWithInfo :: TopLevelFlag -> SimplEnv
-> FastString
-- ^ a "friendly name" to build the new binder from
-> IdInfo -> OutExpr -> SimplM (SimplEnv, OutExpr)
-- Propagate strictness and demand info to the new binder
-- Note [Preserve strictness when floating coercions]
-- Returned SimplEnv has same substitution as incoming one
makeTrivialWithInfo top_lvl env context info expr
| exprIsTrivial expr -- Already trivial
|| not (bindingOk top_lvl expr expr_ty) -- Cannot trivialise
-- See Note [Cannot trivialise]
= return (env, expr)
| otherwise -- See Note [Take care] below
= do { uniq <- getUniqueM
; let name = mkSystemVarName uniq context
var = mkLocalIdOrCoVarWithInfo name expr_ty info
; env' <- completeNonRecX top_lvl env False var var expr
; expr' <- simplVar env' var
; return (env', expr') }
-- The simplVar is needed because we're constructing a new binding
-- a = rhs
-- And if rhs is of form (rhs1 |> co), then we might get
-- a1 = rhs1
-- a = a1 |> co
-- and now a's RHS is trivial and can be substituted out, and that
-- is what completeNonRecX will do
-- To put it another way, it's as if we'd simplified
-- let var = e in var
where
expr_ty = exprType expr
bindingOk :: TopLevelFlag -> CoreExpr -> Type -> Bool
-- True iff we can have a binding of this expression at this level
-- Precondition: the type is the type of the expression
bindingOk top_lvl _ expr_ty
| isTopLevel top_lvl = not (isUnliftedType expr_ty)
| otherwise = True
{-
Note [Cannot trivialise]
~~~~~~~~~~~~~~~~~~~~~~~~
Consider tih
f :: Int -> Addr#
foo :: Bar
foo = Bar (f 3)
Then we can't ANF-ise foo, even though we'd like to, because
we can't make a top-level binding for the Addr# (f 3). And if
so we don't want to turn it into
foo = let x = f 3 in Bar x
because we'll just end up inlining x back, and that makes the
simplifier loop. Better not to ANF-ise it at all.
A case in point is literal strings (a MachStr is not regarded as
trivial):
foo = Ptr "blob"#
We don't want to ANF-ise this.
************************************************************************
* *
\subsection{Completing a lazy binding}
* *
************************************************************************
completeBind
* deals only with Ids, not TyVars
* takes an already-simplified binder and RHS
* is used for both recursive and non-recursive bindings
* is used for both top-level and non-top-level bindings
It does the following:
- tries discarding a dead binding
- tries PostInlineUnconditionally
- add unfolding [this is the only place we add an unfolding]
- add arity
It does *not* attempt to do let-to-case. Why? Because it is used for
- top-level bindings (when let-to-case is impossible)
- many situations where the "rhs" is known to be a WHNF
(so let-to-case is inappropriate).
Nor does it do the atomic-argument thing
-}
completeBind :: SimplEnv
-> TopLevelFlag -- Flag stuck into unfolding
-> InId -- Old binder
-> OutId -> OutExpr -- New binder and RHS
-> SimplM SimplEnv
-- completeBind may choose to do its work
-- * by extending the substitution (e.g. let x = y in ...)
-- * or by adding to the floats in the envt
--
-- Precondition: rhs obeys the let/app invariant
completeBind env top_lvl old_bndr new_bndr new_rhs
| isCoVar old_bndr
= case new_rhs of
Coercion co -> return (extendCvSubst env old_bndr co)
_ -> return (addNonRec env new_bndr new_rhs)
| otherwise
= ASSERT( isId new_bndr )
do { let old_info = idInfo old_bndr
old_unf = unfoldingInfo old_info
occ_info = occInfo old_info
-- Do eta-expansion on the RHS of the binding
-- See Note [Eta-expanding at let bindings] in SimplUtils
; (new_arity, final_rhs) <- tryEtaExpandRhs env new_bndr new_rhs
-- Simplify the unfolding
; new_unfolding <- simplLetUnfolding env top_lvl old_bndr final_rhs old_unf
; dflags <- getDynFlags
; if postInlineUnconditionally dflags env top_lvl new_bndr occ_info
final_rhs new_unfolding
-- Inline and discard the binding
then do { tick (PostInlineUnconditionally old_bndr)
; return (extendIdSubst env old_bndr (DoneEx final_rhs)) }
-- Use the substitution to make quite, quite sure that the
-- substitution will happen, since we are going to discard the binding
else
do { let info1 = idInfo new_bndr `setArityInfo` new_arity
-- Unfolding info: Note [Setting the new unfolding]
info2 = info1 `setUnfoldingInfo` new_unfolding
-- Demand info: Note [Setting the demand info]
--
-- We also have to nuke demand info if for some reason
-- eta-expansion *reduces* the arity of the binding to less
-- than that of the strictness sig. This can happen: see Note [Arity decrease].
info3 | isEvaldUnfolding new_unfolding
|| (case strictnessInfo info2 of
StrictSig dmd_ty -> new_arity < dmdTypeDepth dmd_ty)
= zapDemandInfo info2 `orElse` info2
| otherwise
= info2
final_id = new_bndr `setIdInfo` info3
; -- pprTrace "Binding" (ppr final_id <+> ppr new_unfolding) $
return (addNonRec env final_id final_rhs) } }
-- The addNonRec adds it to the in-scope set too
------------------------------
addPolyBind :: TopLevelFlag -> SimplEnv -> OutBind -> SimplM SimplEnv
-- Add a new binding to the environment, complete with its unfolding
-- but *do not* do postInlineUnconditionally, because we have already
-- processed some of the scope of the binding
-- We still want the unfolding though. Consider
-- let
-- x = /\a. let y = ... in Just y
-- in body
-- Then we float the y-binding out (via abstractFloats and addPolyBind)
-- but 'x' may well then be inlined in 'body' in which case we'd like the
-- opportunity to inline 'y' too.
--
-- INVARIANT: the arity is correct on the incoming binders
addPolyBind top_lvl env (NonRec poly_id rhs)
= do { unfolding <- simplLetUnfolding env top_lvl poly_id rhs noUnfolding
-- Assumes that poly_id did not have an INLINE prag
-- which is perhaps wrong. ToDo: think about this
; let final_id = setIdInfo poly_id $
idInfo poly_id `setUnfoldingInfo` unfolding
; return (addNonRec env final_id rhs) }
addPolyBind _ env bind@(Rec _)
= return (extendFloats env bind)
-- Hack: letrecs are more awkward, so we extend "by steam"
-- without adding unfoldings etc. At worst this leads to
-- more simplifier iterations
{- Note [Arity decrease]
~~~~~~~~~~~~~~~~~~~~~~~~
Generally speaking the arity of a binding should not decrease. But it *can*
legitimately happen because of RULES. Eg
f = g Int
where g has arity 2, will have arity 2. But if there's a rewrite rule
g Int --> h
where h has arity 1, then f's arity will decrease. Here's a real-life example,
which is in the output of Specialise:
Rec {
$dm {Arity 2} = \d.\x. op d
{-# RULES forall d. $dm Int d = $s$dm #-}
dInt = MkD .... opInt ...
opInt {Arity 1} = $dm dInt
$s$dm {Arity 0} = \x. op dInt }
Here opInt has arity 1; but when we apply the rule its arity drops to 0.
That's why Specialise goes to a little trouble to pin the right arity
on specialised functions too.
Note [Setting the demand info]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the unfolding is a value, the demand info may
go pear-shaped, so we nuke it. Example:
let x = (a,b) in
case x of (p,q) -> h p q x
Here x is certainly demanded. But after we've nuked
the case, we'll get just
let x = (a,b) in h a b x
and now x is not demanded (I'm assuming h is lazy)
This really happens. Similarly
let f = \x -> e in ...f..f...
After inlining f at some of its call sites the original binding may
(for example) be no longer strictly demanded.
The solution here is a bit ad hoc...
************************************************************************
* *
\subsection[Simplify-simplExpr]{The main function: simplExpr}
* *
************************************************************************
The reason for this OutExprStuff stuff is that we want to float *after*
simplifying a RHS, not before. If we do so naively we get quadratic
behaviour as things float out.
To see why it's important to do it after, consider this (real) example:
let t = f x
in fst t
==>
let t = let a = e1
b = e2
in (a,b)
in fst t
==>
let a = e1
b = e2
t = (a,b)
in
a -- Can't inline a this round, cos it appears twice
==>
e1
Each of the ==> steps is a round of simplification. We'd save a
whole round if we float first. This can cascade. Consider
let f = g d
in \x -> ...f...
==>
let f = let d1 = ..d.. in \y -> e
in \x -> ...f...
==>
let d1 = ..d..
in \x -> ...(\y ->e)...
Only in this second round can the \y be applied, and it
might do the same again.
-}
simplExpr :: SimplEnv -> CoreExpr -> SimplM CoreExpr
simplExpr env expr = simplExprC env expr (mkBoringStop expr_out_ty)
where
expr_out_ty :: OutType
expr_out_ty = substTy env (exprType expr)
simplExprC :: SimplEnv -> CoreExpr -> SimplCont -> SimplM CoreExpr
-- Simplify an expression, given a continuation
simplExprC env expr cont
= -- pprTrace "simplExprC" (ppr expr $$ ppr cont {- $$ ppr (seIdSubst env) -} $$ ppr (seFloats env) ) $
do { (env', expr') <- simplExprF (zapFloats env) expr cont
; -- pprTrace "simplExprC ret" (ppr expr $$ ppr expr') $
-- pprTrace "simplExprC ret3" (ppr (seInScope env')) $
-- pprTrace "simplExprC ret4" (ppr (seFloats env')) $
return (wrapFloats env' expr') }
--------------------------------------------------
simplExprF :: SimplEnv -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplExprF env e cont
= {- pprTrace "simplExprF" (vcat
[ ppr e
, text "cont =" <+> ppr cont
, text "inscope =" <+> ppr (seInScope env)
, text "tvsubst =" <+> ppr (seTvSubst env)
, text "idsubst =" <+> ppr (seIdSubst env)
, text "cvsubst =" <+> ppr (seCvSubst env)
{- , ppr (seFloats env) -}
]) $ -}
simplExprF1 env e cont
simplExprF1 :: SimplEnv -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplExprF1 env (Var v) cont = simplIdF env v cont
simplExprF1 env (Lit lit) cont = rebuild env (Lit lit) cont
simplExprF1 env (Tick t expr) cont = simplTick env t expr cont
simplExprF1 env (Cast body co) cont = simplCast env body co cont
simplExprF1 env (Coercion co) cont = simplCoercionF env co cont
simplExprF1 env (Type ty) cont = ASSERT( contIsRhsOrArg cont )
rebuild env (Type (substTy env ty)) cont
simplExprF1 env (App fun arg) cont
= simplExprF env fun $
case arg of
Type ty -> ApplyToTy { sc_arg_ty = substTy env ty
, sc_hole_ty = substTy env (exprType fun)
, sc_cont = cont }
_ -> ApplyToVal { sc_arg = arg, sc_env = env
, sc_dup = NoDup, sc_cont = cont }
simplExprF1 env expr@(Lam {}) cont
= simplLam env zapped_bndrs body cont
-- The main issue here is under-saturated lambdas
-- (\x1. \x2. e) arg1
-- Here x1 might have "occurs-once" occ-info, because occ-info
-- is computed assuming that a group of lambdas is applied
-- all at once. If there are too few args, we must zap the
-- occ-info, UNLESS the remaining binders are one-shot
where
(bndrs, body) = collectBinders expr
zapped_bndrs | need_to_zap = map zap bndrs
| otherwise = bndrs
need_to_zap = any zappable_bndr (drop n_args bndrs)
n_args = countArgs cont
-- NB: countArgs counts all the args (incl type args)
-- and likewise drop counts all binders (incl type lambdas)
zappable_bndr b = isId b && not (isOneShotBndr b)
zap b | isTyVar b = b
| otherwise = zapLamIdInfo b
simplExprF1 env (Case scrut bndr _ alts) cont
= simplExprF env scrut (Select { sc_dup = NoDup, sc_bndr = bndr
, sc_alts = alts
, sc_env = env, sc_cont = cont })
simplExprF1 env (Let (Rec pairs) body) cont
= do { env' <- simplRecBndrs env (map fst pairs)
-- NB: bndrs' don't have unfoldings or rules
-- We add them as we go down
; env'' <- simplRecBind env' NotTopLevel pairs
; simplExprF env'' body cont }
simplExprF1 env (Let (NonRec bndr rhs) body) cont
= simplNonRecE env bndr (rhs, env) ([], body) cont
---------------------------------
simplType :: SimplEnv -> InType -> SimplM OutType
-- Kept monadic just so we can do the seqType
simplType env ty
= -- pprTrace "simplType" (ppr ty $$ ppr (seTvSubst env)) $
seqType new_ty `seq` return new_ty
where
new_ty = substTy env ty
---------------------------------
simplCoercionF :: SimplEnv -> InCoercion -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplCoercionF env co cont
= do { co' <- simplCoercion env co
; rebuild env (Coercion co') cont }
simplCoercion :: SimplEnv -> InCoercion -> SimplM OutCoercion
simplCoercion env co
= let opt_co = optCoercion (getTCvSubst env) co
in seqCo opt_co `seq` return opt_co
-----------------------------------
-- | Push a TickIt context outwards past applications and cases, as
-- long as this is a non-scoping tick, to let case and application
-- optimisations apply.
simplTick :: SimplEnv -> Tickish Id -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplTick env tickish expr cont
-- A scoped tick turns into a continuation, so that we can spot
-- (scc t (\x . e)) in simplLam and eliminate the scc. If we didn't do
-- it this way, then it would take two passes of the simplifier to
-- reduce ((scc t (\x . e)) e').
-- NB, don't do this with counting ticks, because if the expr is
-- bottom, then rebuildCall will discard the continuation.
-- XXX: we cannot do this, because the simplifier assumes that
-- the context can be pushed into a case with a single branch. e.g.
-- scc<f> case expensive of p -> e
-- becomes
-- case expensive of p -> scc<f> e
--
-- So I'm disabling this for now. It just means we will do more
-- simplifier iterations that necessary in some cases.
-- | tickishScoped tickish && not (tickishCounts tickish)
-- = simplExprF env expr (TickIt tickish cont)
-- For unscoped or soft-scoped ticks, we are allowed to float in new
-- cost, so we simply push the continuation inside the tick. This
-- has the effect of moving the tick to the outside of a case or
-- application context, allowing the normal case and application
-- optimisations to fire.
| tickish `tickishScopesLike` SoftScope
= do { (env', expr') <- simplExprF env expr cont
; return (env', mkTick tickish expr')
}
-- Push tick inside if the context looks like this will allow us to
-- do a case-of-case - see Note [case-of-scc-of-case]
| Select {} <- cont, Just expr' <- push_tick_inside
= simplExprF env expr' cont
-- We don't want to move the tick, but we might still want to allow
-- floats to pass through with appropriate wrapping (or not, see
-- wrap_floats below)
--- | not (tickishCounts tickish) || tickishCanSplit tickish
-- = wrap_floats
| otherwise
= no_floating_past_tick
where
-- Try to push tick inside a case, see Note [case-of-scc-of-case].
push_tick_inside =
case expr0 of
Case scrut bndr ty alts
-> Just $ Case (tickScrut scrut) bndr ty (map tickAlt alts)
_other -> Nothing
where (ticks, expr0) = stripTicksTop movable (Tick tickish expr)
movable t = not (tickishCounts t) ||
t `tickishScopesLike` NoScope ||
tickishCanSplit t
tickScrut e = foldr mkTick e ticks
-- Alternatives get annotated with all ticks that scope in some way,
-- but we don't want to count entries.
tickAlt (c,bs,e) = (c,bs, foldr mkTick e ts_scope)
ts_scope = map mkNoCount $
filter (not . (`tickishScopesLike` NoScope)) ticks
no_floating_past_tick =
do { let (inc,outc) = splitCont cont
; (env', expr') <- simplExprF (zapFloats env) expr inc
; let tickish' = simplTickish env tickish
; (env'', expr'') <- rebuild (zapFloats env')
(wrapFloats env' expr')
(TickIt tickish' outc)
; return (addFloats env env'', expr'')
}
-- Alternative version that wraps outgoing floats with the tick. This
-- results in ticks being duplicated, as we don't make any attempt to
-- eliminate the tick if we re-inline the binding (because the tick
-- semantics allows unrestricted inlining of HNFs), so I'm not doing
-- this any more. FloatOut will catch any real opportunities for
-- floating.
--
-- wrap_floats =
-- do { let (inc,outc) = splitCont cont
-- ; (env', expr') <- simplExprF (zapFloats env) expr inc
-- ; let tickish' = simplTickish env tickish
-- ; let wrap_float (b,rhs) = (zapIdStrictness (setIdArity b 0),
-- mkTick (mkNoCount tickish') rhs)
-- -- when wrapping a float with mkTick, we better zap the Id's
-- -- strictness info and arity, because it might be wrong now.
-- ; let env'' = addFloats env (mapFloats env' wrap_float)
-- ; rebuild env'' expr' (TickIt tickish' outc)
-- }
simplTickish env tickish
| Breakpoint n ids <- tickish
= Breakpoint n (map (getDoneId . substId env) ids)
| otherwise = tickish
-- Push type application and coercion inside a tick
splitCont :: SimplCont -> (SimplCont, SimplCont)
splitCont cont@(ApplyToTy { sc_cont = tail }) = (cont { sc_cont = inc }, outc)
where (inc,outc) = splitCont tail
splitCont (CastIt co c) = (CastIt co inc, outc)
where (inc,outc) = splitCont c
splitCont other = (mkBoringStop (contHoleType other), other)
getDoneId (DoneId id) = id
getDoneId (DoneEx e) = getIdFromTrivialExpr e -- Note [substTickish] in CoreSubst
getDoneId other = pprPanic "getDoneId" (ppr other)
-- Note [case-of-scc-of-case]
-- It's pretty important to be able to transform case-of-case when
-- there's an SCC in the way. For example, the following comes up
-- in nofib/real/compress/Encode.hs:
--
-- case scctick<code_string.r1>
-- case $wcode_string_r13s wild_XC w1_s137 w2_s138 l_aje
-- of _ { (# ww1_s13f, ww2_s13g, ww3_s13h #) ->
-- (ww1_s13f, ww2_s13g, ww3_s13h)
-- }
-- of _ { (ww_s12Y, ww1_s12Z, ww2_s130) ->
-- tick<code_string.f1>
-- (ww_s12Y,
-- ww1_s12Z,
-- PTTrees.PT
-- @ GHC.Types.Char @ GHC.Types.Int wild2_Xj ww2_s130 r_ajf)
-- }
--
-- We really want this case-of-case to fire, because then the 3-tuple
-- will go away (indeed, the CPR optimisation is relying on this
-- happening). But the scctick is in the way - we need to push it
-- inside to expose the case-of-case. So we perform this
-- transformation on the inner case:
--
-- scctick c (case e of { p1 -> e1; ...; pn -> en })
-- ==>
-- case (scctick c e) of { p1 -> scc c e1; ...; pn -> scc c en }
--
-- So we've moved a constant amount of work out of the scc to expose
-- the case. We only do this when the continuation is interesting: in
-- for now, it has to be another Case (maybe generalise this later).
{-
************************************************************************
* *
\subsection{The main rebuilder}
* *
************************************************************************
-}
rebuild :: SimplEnv -> OutExpr -> SimplCont -> SimplM (SimplEnv, OutExpr)
-- At this point the substitution in the SimplEnv should be irrelevant
-- only the in-scope set and floats should matter
rebuild env expr cont
= case cont of
Stop {} -> return (env, expr)
TickIt t cont -> rebuild env (mkTick t expr) cont
CastIt co cont -> rebuild env (mkCast expr co) cont
-- NB: mkCast implements the (Coercion co |> g) optimisation
Select { sc_bndr = bndr, sc_alts = alts, sc_env = se, sc_cont = cont }
-> rebuildCase (se `setFloats` env) expr bndr alts cont
StrictArg info _ cont -> rebuildCall env (info `addValArgTo` expr) cont
StrictBind b bs body se cont -> do { env' <- simplNonRecX (se `setFloats` env) b expr
-- expr satisfies let/app since it started life
-- in a call to simplNonRecE
; simplLam env' bs body cont }
ApplyToTy { sc_arg_ty = ty, sc_cont = cont}
-> rebuild env (App expr (Type ty)) cont
ApplyToVal { sc_arg = arg, sc_env = se, sc_dup = dup_flag, sc_cont = cont}
-- See Note [Avoid redundant simplification]
| isSimplified dup_flag -> rebuild env (App expr arg) cont
| otherwise -> do { arg' <- simplExpr (se `setInScope` env) arg
; rebuild env (App expr arg') cont }
{-
************************************************************************
* *
\subsection{Lambdas}
* *
************************************************************************
-}
simplCast :: SimplEnv -> InExpr -> Coercion -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplCast env body co0 cont0
= do { co1 <- simplCoercion env co0
; cont1 <- addCoerce co1 cont0
; simplExprF env body cont1 }
where
addCoerce co cont = add_coerce co (coercionKind co) cont
add_coerce _co (Pair s1 k1) cont -- co :: ty~ty
| s1 `eqType` k1 = return cont -- is a no-op
add_coerce co1 (Pair s1 _k2) (CastIt co2 cont)
| (Pair _l1 t1) <- coercionKind co2
-- e |> (g1 :: S1~L) |> (g2 :: L~T1)
-- ==>
-- e, if S1=T1
-- e |> (g1 . g2 :: S1~T1) otherwise
--
-- For example, in the initial form of a worker
-- we may find (coerce T (coerce S (\x.e))) y
-- and we'd like it to simplify to e[y/x] in one round
-- of simplification
, s1 `eqType` t1 = return cont -- The coerces cancel out
| otherwise = return (CastIt (mkTransCo co1 co2) cont)
add_coerce co (Pair s1s2 _t1t2) cont@(ApplyToTy { sc_arg_ty = arg_ty, sc_cont = tail })
-- (f |> g) ty ---> (f ty) |> (g @ ty)
-- This implements the PushT rule from the paper
| isForAllTy s1s2
= do { cont' <- addCoerce new_cast tail
; return (cont { sc_cont = cont' }) }
where
new_cast = mkInstCo co (mkNomReflCo arg_ty)
add_coerce co (Pair s1s2 t1t2) (ApplyToVal { sc_arg = arg, sc_env = arg_se
, sc_dup = dup, sc_cont = cont })
| isFunTy s1s2 -- This implements the Push rule from the paper
, isFunTy t1t2 -- Check t1t2 to ensure 'arg' is a value arg
-- (e |> (g :: s1s2 ~ t1->t2)) f
-- ===>
-- (e (f |> (arg g :: t1~s1))
-- |> (res g :: s2->t2)
--
-- t1t2 must be a function type, t1->t2, because it's applied
-- to something but s1s2 might conceivably not be
--
-- When we build the ApplyTo we can't mix the out-types
-- with the InExpr in the argument, so we simply substitute
-- to make it all consistent. It's a bit messy.
-- But it isn't a common case.
--
-- Example of use: Trac #995
= do { (dup', arg_se', arg') <- simplArg env dup arg_se arg
; cont' <- addCoerce co2 cont
; return (ApplyToVal { sc_arg = mkCast arg' (mkSymCo co1)
, sc_env = arg_se'
, sc_dup = dup'
, sc_cont = cont' }) }
where
-- we split coercion t1->t2 ~ s1->s2 into t1 ~ s1 and
-- t2 ~ s2 with left and right on the curried form:
-- (->) t1 t2 ~ (->) s1 s2
[co1, co2] = decomposeCo 2 co
add_coerce co _ cont = return (CastIt co cont)
simplArg :: SimplEnv -> DupFlag -> StaticEnv -> CoreExpr
-> SimplM (DupFlag, StaticEnv, OutExpr)
simplArg env dup_flag arg_env arg
| isSimplified dup_flag
= return (dup_flag, arg_env, arg)
| otherwise
= do { arg' <- simplExpr (arg_env `setInScope` env) arg
; return (Simplified, zapSubstEnv arg_env, arg') }
{-
************************************************************************
* *
\subsection{Lambdas}
* *
************************************************************************
Note [Zap unfolding when beta-reducing]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Lambda-bound variables can have stable unfoldings, such as
$j = \x. \b{Unf=Just x}. e
See Note [Case binders and join points] below; the unfolding for lets
us optimise e better. However when we beta-reduce it we want to
revert to using the actual value, otherwise we can end up in the
stupid situation of
let x = blah in
let b{Unf=Just x} = y
in ...b...
Here it'd be far better to drop the unfolding and use the actual RHS.
-}
simplLam :: SimplEnv -> [InId] -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplLam env [] body cont = simplExprF env body cont
-- Beta reduction
simplLam env (bndr:bndrs) body (ApplyToTy { sc_arg_ty = arg_ty, sc_cont = cont })
= do { tick (BetaReduction bndr)
; simplLam (extendTvSubst env bndr arg_ty) bndrs body cont }
simplLam env (bndr:bndrs) body (ApplyToVal { sc_arg = arg, sc_env = arg_se
, sc_cont = cont })
= do { tick (BetaReduction bndr)
; simplNonRecE env' (zap_unfolding bndr) (arg, arg_se) (bndrs, body) cont }
where
env' | Coercion co <- arg
= extendCvSubst env bndr co
| otherwise
= env
zap_unfolding bndr -- See Note [Zap unfolding when beta-reducing]
| isId bndr, isStableUnfolding (realIdUnfolding bndr)
= setIdUnfolding bndr NoUnfolding
| otherwise = bndr
-- discard a non-counting tick on a lambda. This may change the
-- cost attribution slightly (moving the allocation of the
-- lambda elsewhere), but we don't care: optimisation changes
-- cost attribution all the time.
simplLam env bndrs body (TickIt tickish cont)
| not (tickishCounts tickish)
= simplLam env bndrs body cont
-- Not enough args, so there are real lambdas left to put in the result
simplLam env bndrs body cont
= do { (env', bndrs') <- simplLamBndrs env bndrs
; body' <- simplExpr env' body
; new_lam <- mkLam bndrs' body' cont
; rebuild env' new_lam cont }
simplLamBndrs :: SimplEnv -> [InBndr] -> SimplM (SimplEnv, [OutBndr])
simplLamBndrs env bndrs = mapAccumLM simplLamBndr env bndrs
-------------
simplLamBndr :: SimplEnv -> Var -> SimplM (SimplEnv, Var)
-- Used for lambda binders. These sometimes have unfoldings added by
-- the worker/wrapper pass that must be preserved, because they can't
-- be reconstructed from context. For example:
-- f x = case x of (a,b) -> fw a b x
-- fw a b x{=(a,b)} = ...
-- The "{=(a,b)}" is an unfolding we can't reconstruct otherwise.
simplLamBndr env bndr
| isId bndr && hasSomeUnfolding old_unf -- Special case
= do { (env1, bndr1) <- simplBinder env bndr
; unf' <- simplUnfolding env1 NotTopLevel bndr old_unf
; let bndr2 = bndr1 `setIdUnfolding` unf'
; return (modifyInScope env1 bndr2, bndr2) }
| otherwise
= simplBinder env bndr -- Normal case
where
old_unf = idUnfolding bndr
------------------
simplNonRecE :: SimplEnv
-> InBndr -- The binder
-> (InExpr, SimplEnv) -- Rhs of binding (or arg of lambda)
-> ([InBndr], InExpr) -- Body of the let/lambda
-- \xs.e
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
-- simplNonRecE is used for
-- * non-top-level non-recursive lets in expressions
-- * beta reduction
--
-- It deals with strict bindings, via the StrictBind continuation,
-- which may abort the whole process
--
-- Precondition: rhs satisfies the let/app invariant
-- Note [CoreSyn let/app invariant] in CoreSyn
--
-- The "body" of the binding comes as a pair of ([InId],InExpr)
-- representing a lambda; so we recurse back to simplLam
-- Why? Because of the binder-occ-info-zapping done before
-- the call to simplLam in simplExprF (Lam ...)
-- First deal with type applications and type lets
-- (/\a. e) (Type ty) and (let a = Type ty in e)
simplNonRecE env bndr (Type ty_arg, rhs_se) (bndrs, body) cont
= ASSERT( isTyVar bndr )
do { ty_arg' <- simplType (rhs_se `setInScope` env) ty_arg
; simplLam (extendTvSubst env bndr ty_arg') bndrs body cont }
simplNonRecE env bndr (rhs, rhs_se) (bndrs, body) cont
= do dflags <- getDynFlags
case () of
_ | preInlineUnconditionally dflags env NotTopLevel bndr rhs
-> do { tick (PreInlineUnconditionally bndr)
; -- pprTrace "preInlineUncond" (ppr bndr <+> ppr rhs) $
simplLam (extendIdSubst env bndr (mkContEx rhs_se rhs)) bndrs body cont }
| isStrictId bndr -- Includes coercions
-> simplExprF (rhs_se `setFloats` env) rhs
(StrictBind bndr bndrs body env cont)
| otherwise
-> ASSERT( not (isTyVar bndr) )
do { (env1, bndr1) <- simplNonRecBndr env bndr
; (env2, bndr2) <- addBndrRules env1 bndr bndr1
; env3 <- simplLazyBind env2 NotTopLevel NonRecursive bndr bndr2 rhs rhs_se
; simplLam env3 bndrs body cont }
{-
************************************************************************
* *
Variables
* *
************************************************************************
-}
simplVar :: SimplEnv -> InVar -> SimplM OutExpr
-- Look up an InVar in the environment
simplVar env var
| isTyVar var = return (Type (substTyVar env var))
| isCoVar var = return (Coercion (substCoVar env var))
| otherwise
= case substId env var of
DoneId var1 -> return (Var var1)
DoneEx e -> return e
ContEx tvs cvs ids e -> simplExpr (setSubstEnv env tvs cvs ids) e
simplIdF :: SimplEnv -> InId -> SimplCont -> SimplM (SimplEnv, OutExpr)
simplIdF env var cont
= case substId env var of
DoneEx e -> simplExprF (zapSubstEnv env) e cont
ContEx tvs cvs ids e -> simplExprF (setSubstEnv env tvs cvs ids) e cont
DoneId var1 -> completeCall env var1 cont
-- Note [zapSubstEnv]
-- The template is already simplified, so don't re-substitute.
-- This is VITAL. Consider
-- let x = e in
-- let y = \z -> ...x... in
-- \ x -> ...y...
-- We'll clone the inner \x, adding x->x' in the id_subst
-- Then when we inline y, we must *not* replace x by x' in
-- the inlined copy!!
---------------------------------------------------------
-- Dealing with a call site
completeCall :: SimplEnv -> OutId -> SimplCont -> SimplM (SimplEnv, OutExpr)
completeCall env var cont
= do { ------------- Try inlining ----------------
dflags <- getDynFlags
; let (lone_variable, arg_infos, call_cont) = contArgs cont
n_val_args = length arg_infos
interesting_cont = interestingCallContext call_cont
unfolding = activeUnfolding env var
maybe_inline = callSiteInline dflags var unfolding
lone_variable arg_infos interesting_cont
; case maybe_inline of {
Just expr -- There is an inlining!
-> do { checkedTick (UnfoldingDone var)
; dump_inline dflags expr cont
; simplExprF (zapSubstEnv env) expr cont }
; Nothing -> do -- No inlining!
{ rule_base <- getSimplRules
; let info = mkArgInfo var (getRules rule_base var) n_val_args call_cont
; rebuildCall env info cont
}}}
where
dump_inline dflags unfolding cont
| not (dopt Opt_D_dump_inlinings dflags) = return ()
| not (dopt Opt_D_verbose_core2core dflags)
= when (isExternalName (idName var)) $
liftIO $ printOutputForUser dflags alwaysQualify $
sep [text "Inlining done:", nest 4 (ppr var)]
| otherwise
= liftIO $ printOutputForUser dflags alwaysQualify $
sep [text "Inlining done: " <> ppr var,
nest 4 (vcat [text "Inlined fn: " <+> nest 2 (ppr unfolding),
text "Cont: " <+> ppr cont])]
rebuildCall :: SimplEnv
-> ArgInfo
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
rebuildCall env (ArgInfo { ai_fun = fun, ai_args = rev_args, ai_strs = [] }) cont
-- When we run out of strictness args, it means
-- that the call is definitely bottom; see SimplUtils.mkArgInfo
-- Then we want to discard the entire strict continuation. E.g.
-- * case (error "hello") of { ... }
-- * (error "Hello") arg
-- * f (error "Hello") where f is strict
-- etc
-- Then, especially in the first of these cases, we'd like to discard
-- the continuation, leaving just the bottoming expression. But the
-- type might not be right, so we may have to add a coerce.
| not (contIsTrivial cont) -- Only do this if there is a non-trivial
= return (env, castBottomExpr res cont_ty) -- contination to discard, else we do it
where -- again and again!
res = argInfoExpr fun rev_args
cont_ty = contResultType cont
rebuildCall env info (CastIt co cont)
= rebuildCall env (addCastTo info co) cont
rebuildCall env info (ApplyToTy { sc_arg_ty = arg_ty, sc_cont = cont })
= rebuildCall env (info `addTyArgTo` arg_ty) cont
rebuildCall env info@(ArgInfo { ai_encl = encl_rules, ai_type = fun_ty
, ai_strs = str:strs, ai_discs = disc:discs })
(ApplyToVal { sc_arg = arg, sc_env = arg_se
, sc_dup = dup_flag, sc_cont = cont })
| isSimplified dup_flag -- See Note [Avoid redundant simplification]
= rebuildCall env (addValArgTo info' arg) cont
| str -- Strict argument
= -- pprTrace "Strict Arg" (ppr arg $$ ppr (seIdSubst env) $$ ppr (seInScope env)) $
simplExprF (arg_se `setFloats` env) arg
(StrictArg info' cci cont)
-- Note [Shadowing]
| otherwise -- Lazy argument
-- DO NOT float anything outside, hence simplExprC
-- There is no benefit (unlike in a let-binding), and we'd
-- have to be very careful about bogus strictness through
-- floating a demanded let.
= do { arg' <- simplExprC (arg_se `setInScope` env) arg
(mkLazyArgStop (funArgTy fun_ty) cci)
; rebuildCall env (addValArgTo info' arg') cont }
where
info' = info { ai_strs = strs, ai_discs = discs }
cci | encl_rules = RuleArgCtxt
| disc > 0 = DiscArgCtxt -- Be keener here
| otherwise = BoringCtxt -- Nothing interesting
rebuildCall env (ArgInfo { ai_fun = fun, ai_args = rev_args, ai_rules = rules }) cont
| null rules
= rebuild env (argInfoExpr fun rev_args) cont -- No rules, common case
| otherwise
= do { -- We've accumulated a simplified call in <fun,rev_args>
-- so try rewrite rules; see Note [RULEs apply to simplified arguments]
-- See also Note [Rules for recursive functions]
; let env' = zapSubstEnv env -- See Note [zapSubstEnv];
-- and NB that 'rev_args' are all fully simplified
; mb_rule <- tryRules env' rules fun (reverse rev_args) cont
; case mb_rule of {
Just (rule_rhs, cont') -> simplExprF env' rule_rhs cont'
-- Rules don't match
; Nothing -> rebuild env (argInfoExpr fun rev_args) cont -- No rules
} }
{-
Note [RULES apply to simplified arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's very desirable to try RULES once the arguments have been simplified, because
doing so ensures that rule cascades work in one pass. Consider
{-# RULES g (h x) = k x
f (k x) = x #-}
...f (g (h x))...
Then we want to rewrite (g (h x)) to (k x) and only then try f's rules. If
we match f's rules against the un-simplified RHS, it won't match. This
makes a particularly big difference when superclass selectors are involved:
op ($p1 ($p2 (df d)))
We want all this to unravel in one sweep.
Note [Avoid redundant simplification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Because RULES apply to simplified arguments, there's a danger of repeatedly
simplifying already-simplified arguments. An important example is that of
(>>=) d e1 e2
Here e1, e2 are simplified before the rule is applied, but don't really
participate in the rule firing. So we mark them as Simplified to avoid
re-simplifying them.
Note [Shadowing]
~~~~~~~~~~~~~~~~
This part of the simplifier may break the no-shadowing invariant
Consider
f (...(\a -> e)...) (case y of (a,b) -> e')
where f is strict in its second arg
If we simplify the innermost one first we get (...(\a -> e)...)
Simplifying the second arg makes us float the case out, so we end up with
case y of (a,b) -> f (...(\a -> e)...) e'
So the output does not have the no-shadowing invariant. However, there is
no danger of getting name-capture, because when the first arg was simplified
we used an in-scope set that at least mentioned all the variables free in its
static environment, and that is enough.
We can't just do innermost first, or we'd end up with a dual problem:
case x of (a,b) -> f e (...(\a -> e')...)
I spent hours trying to recover the no-shadowing invariant, but I just could
not think of an elegant way to do it. The simplifier is already knee-deep in
continuations. We have to keep the right in-scope set around; AND we have
to get the effect that finding (error "foo") in a strict arg position will
discard the entire application and replace it with (error "foo"). Getting
all this at once is TOO HARD!
************************************************************************
* *
Rewrite rules
* *
************************************************************************
-}
tryRules :: SimplEnv -> [CoreRule]
-> Id -> [ArgSpec] -> SimplCont
-> SimplM (Maybe (CoreExpr, SimplCont))
-- The SimplEnv already has zapSubstEnv applied to it
tryRules env rules fn args call_cont
| null rules
= return Nothing
{- Disabled until we fix #8326
| fn `hasKey` tagToEnumKey -- See Note [Optimising tagToEnum#]
, [_type_arg, val_arg] <- args
, Select dup bndr ((_,[],rhs1) : rest_alts) se cont <- call_cont
, isDeadBinder bndr
= do { dflags <- getDynFlags
; let enum_to_tag :: CoreAlt -> CoreAlt
-- Takes K -> e into tagK# -> e
-- where tagK# is the tag of constructor K
enum_to_tag (DataAlt con, [], rhs)
= ASSERT( isEnumerationTyCon (dataConTyCon con) )
(LitAlt tag, [], rhs)
where
tag = mkMachInt dflags (toInteger (dataConTag con - fIRST_TAG))
enum_to_tag alt = pprPanic "tryRules: tagToEnum" (ppr alt)
new_alts = (DEFAULT, [], rhs1) : map enum_to_tag rest_alts
new_bndr = setIdType bndr intPrimTy
-- The binder is dead, but should have the right type
; return (Just (val_arg, Select dup new_bndr new_alts se cont)) }
-}
| otherwise
= do { dflags <- getDynFlags
; case lookupRule dflags (getUnfoldingInRuleMatch env) (activeRule env)
fn (argInfoAppArgs args) rules of {
Nothing ->
do { nodump dflags -- This ensures that an empty file is written
; return Nothing } ; -- No rule matches
Just (rule, rule_rhs) ->
do { checkedTick (RuleFired (ru_name rule))
; let cont' = pushSimplifiedArgs env
(drop (ruleArity rule) args)
call_cont
-- (ruleArity rule) says how many args the rule consumed
; dump dflags rule rule_rhs
; return (Just (rule_rhs, cont')) }}}
where
dump dflags rule rule_rhs
| dopt Opt_D_dump_rule_rewrites dflags
= log_rule dflags Opt_D_dump_rule_rewrites "Rule fired" $ vcat
[ text "Rule:" <+> ftext (ru_name rule)
, text "Before:" <+> hang (ppr fn) 2 (sep (map ppr args))
, text "After: " <+> pprCoreExpr rule_rhs
, text "Cont: " <+> ppr call_cont ]
| dopt Opt_D_dump_rule_firings dflags
= log_rule dflags Opt_D_dump_rule_firings "Rule fired:" $
ftext (ru_name rule)
| otherwise
= return ()
nodump dflags
| dopt Opt_D_dump_rule_rewrites dflags
= liftIO $ dumpSDoc dflags alwaysQualify Opt_D_dump_rule_rewrites "" empty
| dopt Opt_D_dump_rule_firings dflags
= liftIO $ dumpSDoc dflags alwaysQualify Opt_D_dump_rule_firings "" empty
| otherwise
= return ()
log_rule dflags flag hdr details
= liftIO . dumpSDoc dflags alwaysQualify flag "" $
sep [text hdr, nest 4 details]
{-
Note [Optimising tagToEnum#]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have an enumeration data type:
data Foo = A | B | C
Then we want to transform
case tagToEnum# x of ==> case x of
A -> e1 DEFAULT -> e1
B -> e2 1# -> e2
C -> e3 2# -> e3
thereby getting rid of the tagToEnum# altogether. If there was a DEFAULT
alternative we retain it (remember it comes first). If not the case must
be exhaustive, and we reflect that in the transformed version by adding
a DEFAULT. Otherwise Lint complains that the new case is not exhaustive.
See #8317.
Note [Rules for recursive functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
You might think that we shouldn't apply rules for a loop breaker:
doing so might give rise to an infinite loop, because a RULE is
rather like an extra equation for the function:
RULE: f (g x) y = x+y
Eqn: f a y = a-y
But it's too drastic to disable rules for loop breakers.
Even the foldr/build rule would be disabled, because foldr
is recursive, and hence a loop breaker:
foldr k z (build g) = g k z
So it's up to the programmer: rules can cause divergence
************************************************************************
* *
Rebuilding a case expression
* *
************************************************************************
Note [Case elimination]
~~~~~~~~~~~~~~~~~~~~~~~
The case-elimination transformation discards redundant case expressions.
Start with a simple situation:
case x# of ===> let y# = x# in e
y# -> e
(when x#, y# are of primitive type, of course). We can't (in general)
do this for algebraic cases, because we might turn bottom into
non-bottom!
The code in SimplUtils.prepareAlts has the effect of generalise this
idea to look for a case where we're scrutinising a variable, and we
know that only the default case can match. For example:
case x of
0# -> ...
DEFAULT -> ...(case x of
0# -> ...
DEFAULT -> ...) ...
Here the inner case is first trimmed to have only one alternative, the
DEFAULT, after which it's an instance of the previous case. This
really only shows up in eliminating error-checking code.
Note that SimplUtils.mkCase combines identical RHSs. So
case e of ===> case e of DEFAULT -> r
True -> r
False -> r
Now again the case may be elminated by the CaseElim transformation.
This includes things like (==# a# b#)::Bool so that we simplify
case ==# a# b# of { True -> x; False -> x }
to just
x
This particular example shows up in default methods for
comparison operations (e.g. in (>=) for Int.Int32)
Note [Case elimination: lifted case]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If a case over a lifted type has a single alternative, and is being used
as a strict 'let' (all isDeadBinder bndrs), we may want to do this
transformation:
case e of r ===> let r = e in ...r...
_ -> ...r...
(a) 'e' is already evaluated (it may so if e is a variable)
Specifically we check (exprIsHNF e). In this case
we can just allocate the WHNF directly with a let.
or
(b) 'x' is not used at all and e is ok-for-speculation
The ok-for-spec bit checks that we don't lose any
exceptions or divergence.
NB: it'd be *sound* to switch from case to let if the
scrutinee was not yet WHNF but was guaranteed to
converge; but sticking with case means we won't build a
thunk
or
(c) 'x' is used strictly in the body, and 'e' is a variable
Then we can just substitute 'e' for 'x' in the body.
See Note [Eliminating redundant seqs]
For (b), the "not used at all" test is important. Consider
case (case a ># b of { True -> (p,q); False -> (q,p) }) of
r -> blah
The scrutinee is ok-for-speculation (it looks inside cases), but we do
not want to transform to
let r = case a ># b of { True -> (p,q); False -> (q,p) }
in blah
because that builds an unnecessary thunk.
Note [Eliminating redundant seqs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have this:
case x of r { _ -> ..r.. }
where 'r' is used strictly in (..r..), the case is effectively a 'seq'
on 'x', but since 'r' is used strictly anyway, we can safely transform to
(...x...)
Note that this can change the error behaviour. For example, we might
transform
case x of { _ -> error "bad" }
--> error "bad"
which is might be puzzling if 'x' currently lambda-bound, but later gets
let-bound to (error "good").
Nevertheless, the paper "A semantics for imprecise exceptions" allows
this transformation. If you want to fix the evaluation order, use
'pseq'. See Trac #8900 for an example where the loss of this
transformation bit us in practice.
See also Note [Empty case alternatives] in CoreSyn.
Just for reference, the original code (added Jan 13) looked like this:
|| case_bndr_evald_next rhs
case_bndr_evald_next :: CoreExpr -> Bool
-- See Note [Case binder next]
case_bndr_evald_next (Var v) = v == case_bndr
case_bndr_evald_next (Cast e _) = case_bndr_evald_next e
case_bndr_evald_next (App e _) = case_bndr_evald_next e
case_bndr_evald_next (Case e _ _ _) = case_bndr_evald_next e
case_bndr_evald_next _ = False
(This came up when fixing Trac #7542. See also Note [Eta reduction of
an eval'd function] in CoreUtils.)
Note [Case elimination: unlifted case]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
case a +# b of r -> ...r...
Then we do case-elimination (to make a let) followed by inlining,
to get
.....(a +# b)....
If we have
case indexArray# a i of r -> ...r...
we might like to do the same, and inline the (indexArray# a i).
But indexArray# is not okForSpeculation, so we don't build a let
in rebuildCase (lest it get floated *out*), so the inlining doesn't
happen either.
This really isn't a big deal I think. The let can be
Further notes about case elimination
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider: test :: Integer -> IO ()
test = print
Turns out that this compiles to:
Print.test
= \ eta :: Integer
eta1 :: Void# ->
case PrelNum.< eta PrelNum.zeroInteger of wild { __DEFAULT ->
case hPutStr stdout
(PrelNum.jtos eta ($w[] @ Char))
eta1
of wild1 { (# new_s, a4 #) -> PrelIO.lvl23 new_s }}
Notice the strange '<' which has no effect at all. This is a funny one.
It started like this:
f x y = if x < 0 then jtos x
else if y==0 then "" else jtos x
At a particular call site we have (f v 1). So we inline to get
if v < 0 then jtos x
else if 1==0 then "" else jtos x
Now simplify the 1==0 conditional:
if v<0 then jtos v else jtos v
Now common-up the two branches of the case:
case (v<0) of DEFAULT -> jtos v
Why don't we drop the case? Because it's strict in v. It's technically
wrong to drop even unnecessary evaluations, and in practice they
may be a result of 'seq' so we *definitely* don't want to drop those.
I don't really know how to improve this situation.
-}
---------------------------------------------------------
-- Eliminate the case if possible
rebuildCase, reallyRebuildCase
:: SimplEnv
-> OutExpr -- Scrutinee
-> InId -- Case binder
-> [InAlt] -- Alternatives (inceasing order)
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
--------------------------------------------------
-- 1. Eliminate the case if there's a known constructor
--------------------------------------------------
rebuildCase env scrut case_bndr alts cont
| Lit lit <- scrut -- No need for same treatment as constructors
-- because literals are inlined more vigorously
, not (litIsLifted lit)
= do { tick (KnownBranch case_bndr)
; case findAlt (LitAlt lit) alts of
Nothing -> missingAlt env case_bndr alts cont
Just (_, bs, rhs) -> simple_rhs bs rhs }
| Just (con, ty_args, other_args) <- exprIsConApp_maybe (getUnfoldingInRuleMatch env) scrut
-- Works when the scrutinee is a variable with a known unfolding
-- as well as when it's an explicit constructor application
= do { tick (KnownBranch case_bndr)
; case findAlt (DataAlt con) alts of
Nothing -> missingAlt env case_bndr alts cont
Just (DEFAULT, bs, rhs) -> simple_rhs bs rhs
Just (_, bs, rhs) -> knownCon env scrut con ty_args other_args
case_bndr bs rhs cont
}
where
simple_rhs bs rhs = ASSERT( null bs )
do { env' <- simplNonRecX env case_bndr scrut
-- scrut is a constructor application,
-- hence satisfies let/app invariant
; simplExprF env' rhs cont }
--------------------------------------------------
-- 2. Eliminate the case if scrutinee is evaluated
--------------------------------------------------
rebuildCase env scrut case_bndr alts@[(_, bndrs, rhs)] cont
-- See if we can get rid of the case altogether
-- See Note [Case elimination]
-- mkCase made sure that if all the alternatives are equal,
-- then there is now only one (DEFAULT) rhs
-- 2a. Dropping the case altogether, if
-- a) it binds nothing (so it's really just a 'seq')
-- b) evaluating the scrutinee has no side effects
| is_plain_seq
, exprOkForSideEffects scrut
-- The entire case is dead, so we can drop it
-- if the scrutinee converges without having imperative
-- side effects or raising a Haskell exception
-- See Note [PrimOp can_fail and has_side_effects] in PrimOp
= simplExprF env rhs cont
-- 2b. Turn the case into a let, if
-- a) it binds only the case-binder
-- b) unlifted case: the scrutinee is ok-for-speculation
-- lifted case: the scrutinee is in HNF (or will later be demanded)
| all_dead_bndrs
, if is_unlifted
then exprOkForSpeculation scrut -- See Note [Case elimination: unlifted case]
else exprIsHNF scrut -- See Note [Case elimination: lifted case]
|| scrut_is_demanded_var scrut
= do { tick (CaseElim case_bndr)
; env' <- simplNonRecX env case_bndr scrut
; simplExprF env' rhs cont }
-- 2c. Try the seq rules if
-- a) it binds only the case binder
-- b) a rule for seq applies
-- See Note [User-defined RULES for seq] in MkId
| is_plain_seq
= do { let scrut_ty = exprType scrut
rhs_ty = substTy env (exprType rhs)
out_args = [ TyArg { as_arg_ty = scrut_ty
, as_hole_ty = seq_id_ty }
, TyArg { as_arg_ty = rhs_ty
, as_hole_ty = piResultTy seq_id_ty scrut_ty }
, ValArg scrut]
rule_cont = ApplyToVal { sc_dup = NoDup, sc_arg = rhs
, sc_env = env, sc_cont = cont }
env' = zapSubstEnv env
-- Lazily evaluated, so we don't do most of this
; rule_base <- getSimplRules
; mb_rule <- tryRules env' (getRules rule_base seqId) seqId out_args rule_cont
; case mb_rule of
Just (rule_rhs, cont') -> simplExprF env' rule_rhs cont'
Nothing -> reallyRebuildCase env scrut case_bndr alts cont }
where
is_unlifted = isUnliftedType (idType case_bndr)
all_dead_bndrs = all isDeadBinder bndrs -- bndrs are [InId]
is_plain_seq = all_dead_bndrs && isDeadBinder case_bndr -- Evaluation *only* for effect
seq_id_ty = idType seqId
scrut_is_demanded_var :: CoreExpr -> Bool
-- See Note [Eliminating redundant seqs]
scrut_is_demanded_var (Cast s _) = scrut_is_demanded_var s
scrut_is_demanded_var (Var _) = isStrictDmd (idDemandInfo case_bndr)
scrut_is_demanded_var _ = False
rebuildCase env scrut case_bndr alts cont
= reallyRebuildCase env scrut case_bndr alts cont
--------------------------------------------------
-- 3. Catch-all case
--------------------------------------------------
reallyRebuildCase env scrut case_bndr alts cont
= do { -- Prepare the continuation;
-- The new subst_env is in place
(env', dup_cont, nodup_cont) <- prepareCaseCont env alts cont
-- Simplify the alternatives
; (scrut', case_bndr', alts') <- simplAlts env' scrut case_bndr alts dup_cont
; dflags <- getDynFlags
; let alts_ty' = contResultType dup_cont
; case_expr <- mkCase dflags scrut' case_bndr' alts_ty' alts'
-- Notice that rebuild gets the in-scope set from env', not alt_env
-- (which in any case is only build in simplAlts)
-- The case binder *not* scope over the whole returned case-expression
; rebuild env' case_expr nodup_cont }
{-
simplCaseBinder checks whether the scrutinee is a variable, v. If so,
try to eliminate uses of v in the RHSs in favour of case_bndr; that
way, there's a chance that v will now only be used once, and hence
inlined.
Historical note: we use to do the "case binder swap" in the Simplifier
so there were additional complications if the scrutinee was a variable.
Now the binder-swap stuff is done in the occurrence analyer; see
OccurAnal Note [Binder swap].
Note [knownCon occ info]
~~~~~~~~~~~~~~~~~~~~~~~~
If the case binder is not dead, then neither are the pattern bound
variables:
case <any> of x { (a,b) ->
case x of { (p,q) -> p } }
Here (a,b) both look dead, but come alive after the inner case is eliminated.
The point is that we bring into the envt a binding
let x = (a,b)
after the outer case, and that makes (a,b) alive. At least we do unless
the case binder is guaranteed dead.
Note [Case alternative occ info]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we are simply reconstructing a case (the common case), we always
zap the occurrence info on the binders in the alternatives. Even
if the case binder is dead, the scrutinee is usually a variable, and *that*
can bring the case-alternative binders back to life.
See Note [Add unfolding for scrutinee]
Note [Improving seq]
~~~~~~~~~~~~~~~~~~~
Consider
type family F :: * -> *
type instance F Int = Int
... case e of x { DEFAULT -> rhs } ...
where x::F Int. Then we'd like to rewrite (F Int) to Int, getting
case e `cast` co of x'::Int
I# x# -> let x = x' `cast` sym co
in rhs
so that 'rhs' can take advantage of the form of x'.
Notice that Note [Case of cast] (in OccurAnal) may then apply to the result.
Nota Bene: We only do the [Improving seq] transformation if the
case binder 'x' is actually used in the rhs; that is, if the case
is *not* a *pure* seq.
a) There is no point in adding the cast to a pure seq.
b) There is a good reason not to: doing so would interfere
with seq rules (Note [Built-in RULES for seq] in MkId).
In particular, this [Improving seq] thing *adds* a cast
while [Built-in RULES for seq] *removes* one, so they
just flip-flop.
You might worry about
case v of x { __DEFAULT ->
... case (v `cast` co) of y { I# -> ... }}
This is a pure seq (since x is unused), so [Improving seq] won't happen.
But it's ok: the simplifier will replace 'v' by 'x' in the rhs to get
case v of x { __DEFAULT ->
... case (x `cast` co) of y { I# -> ... }}
Now the outer case is not a pure seq, so [Improving seq] will happen,
and then the inner case will disappear.
The need for [Improving seq] showed up in Roman's experiments. Example:
foo :: F Int -> Int -> Int
foo t n = t `seq` bar n
where
bar 0 = 0
bar n = bar (n - case t of TI i -> i)
Here we'd like to avoid repeated evaluating t inside the loop, by
taking advantage of the `seq`.
At one point I did transformation in LiberateCase, but it's more
robust here. (Otherwise, there's a danger that we'll simply drop the
'seq' altogether, before LiberateCase gets to see it.)
-}
simplAlts :: SimplEnv
-> OutExpr
-> InId -- Case binder
-> [InAlt] -- Non-empty
-> SimplCont
-> SimplM (OutExpr, OutId, [OutAlt]) -- Includes the continuation
-- Like simplExpr, this just returns the simplified alternatives;
-- it does not return an environment
-- The returned alternatives can be empty, none are possible
simplAlts env scrut case_bndr alts cont'
= do { let env0 = zapFloats env
; (env1, case_bndr1) <- simplBinder env0 case_bndr
; fam_envs <- getFamEnvs
; (alt_env', scrut', case_bndr') <- improveSeq fam_envs env1 scrut
case_bndr case_bndr1 alts
; (imposs_deflt_cons, in_alts) <- prepareAlts scrut' case_bndr' alts
-- NB: it's possible that the returned in_alts is empty: this is handled
-- by the caller (rebuildCase) in the missingAlt function
; alts' <- mapM (simplAlt alt_env' (Just scrut') imposs_deflt_cons case_bndr' cont') in_alts
; -- pprTrace "simplAlts" (ppr case_bndr $$ ppr alts_ty $$ ppr alts_ty' $$ ppr alts $$ ppr cont') $
return (scrut', case_bndr', alts') }
------------------------------------
improveSeq :: (FamInstEnv, FamInstEnv) -> SimplEnv
-> OutExpr -> InId -> OutId -> [InAlt]
-> SimplM (SimplEnv, OutExpr, OutId)
-- Note [Improving seq]
improveSeq fam_envs env scrut case_bndr case_bndr1 [(DEFAULT,_,_)]
| not (isDeadBinder case_bndr) -- Not a pure seq! See Note [Improving seq]
, Just (co, ty2) <- topNormaliseType_maybe fam_envs (idType case_bndr1)
= do { case_bndr2 <- newId (fsLit "nt") ty2
; let rhs = DoneEx (Var case_bndr2 `Cast` mkSymCo co)
env2 = extendIdSubst env case_bndr rhs
; return (env2, scrut `Cast` co, case_bndr2) }
improveSeq _ env scrut _ case_bndr1 _
= return (env, scrut, case_bndr1)
------------------------------------
simplAlt :: SimplEnv
-> Maybe OutExpr -- The scrutinee
-> [AltCon] -- These constructors can't be present when
-- matching the DEFAULT alternative
-> OutId -- The case binder
-> SimplCont
-> InAlt
-> SimplM OutAlt
simplAlt env _ imposs_deflt_cons case_bndr' cont' (DEFAULT, bndrs, rhs)
= ASSERT( null bndrs )
do { let env' = addBinderUnfolding env case_bndr'
(mkOtherCon imposs_deflt_cons)
-- Record the constructors that the case-binder *can't* be.
; rhs' <- simplExprC env' rhs cont'
; return (DEFAULT, [], rhs') }
simplAlt env scrut' _ case_bndr' cont' (LitAlt lit, bndrs, rhs)
= ASSERT( null bndrs )
do { env' <- addAltUnfoldings env scrut' case_bndr' (Lit lit)
; rhs' <- simplExprC env' rhs cont'
; return (LitAlt lit, [], rhs') }
simplAlt env scrut' _ case_bndr' cont' (DataAlt con, vs, rhs)
= do { -- Deal with the pattern-bound variables
-- Mark the ones that are in ! positions in the
-- data constructor as certainly-evaluated.
-- NB: simplLamBinders preserves this eval info
; let vs_with_evals = add_evals (dataConRepStrictness con)
; (env', vs') <- simplLamBndrs env vs_with_evals
-- Bind the case-binder to (con args)
; let inst_tys' = tyConAppArgs (idType case_bndr')
con_app :: OutExpr
con_app = mkConApp2 con inst_tys' vs'
; env'' <- addAltUnfoldings env' scrut' case_bndr' con_app
; rhs' <- simplExprC env'' rhs cont'
; return (DataAlt con, vs', rhs') }
where
-- add_evals records the evaluated-ness of the bound variables of
-- a case pattern. This is *important*. Consider
-- data T = T !Int !Int
--
-- case x of { T a b -> T (a+1) b }
--
-- We really must record that b is already evaluated so that we don't
-- go and re-evaluate it when constructing the result.
-- See Note [Data-con worker strictness] in MkId.hs
add_evals the_strs
= go vs the_strs
where
go [] [] = []
go (v:vs') strs | isTyVar v = v : go vs' strs
go (v:vs') (str:strs)
| isMarkedStrict str = eval v : go vs' strs
| otherwise = zap v : go vs' strs
go _ _ = pprPanic "cat_evals"
(ppr con $$
ppr vs $$
ppr_with_length the_strs $$
ppr_with_length (dataConRepArgTys con) $$
ppr_with_length (dataConRepStrictness con))
where
ppr_with_length list
= ppr list <+> parens (text "length =" <+> ppr (length list))
-- NB: If this panic triggers, note that
-- NoStrictnessMark doesn't print!
zap v = zapIdOccInfo v -- See Note [Case alternative occ info]
eval v = zap v `setIdUnfolding` evaldUnfolding
addAltUnfoldings :: SimplEnv -> Maybe OutExpr -> OutId -> OutExpr -> SimplM SimplEnv
addAltUnfoldings env scrut case_bndr con_app
= do { dflags <- getDynFlags
; let con_app_unf = mkSimpleUnfolding dflags con_app
env1 = addBinderUnfolding env case_bndr con_app_unf
-- See Note [Add unfolding for scrutinee]
env2 = case scrut of
Just (Var v) -> addBinderUnfolding env1 v con_app_unf
Just (Cast (Var v) co) -> addBinderUnfolding env1 v $
mkSimpleUnfolding dflags (Cast con_app (mkSymCo co))
_ -> env1
; traceSmpl "addAltUnf" (vcat [ppr case_bndr <+> ppr scrut, ppr con_app])
; return env2 }
addBinderUnfolding :: SimplEnv -> Id -> Unfolding -> SimplEnv
addBinderUnfolding env bndr unf
| debugIsOn, Just tmpl <- maybeUnfoldingTemplate unf
= WARN( not (eqType (idType bndr) (exprType tmpl)),
ppr bndr $$ ppr (idType bndr) $$ ppr tmpl $$ ppr (exprType tmpl) )
modifyInScope env (bndr `setIdUnfolding` unf)
| otherwise
= modifyInScope env (bndr `setIdUnfolding` unf)
zapBndrOccInfo :: Bool -> Id -> Id
-- Consider case e of b { (a,b) -> ... }
-- Then if we bind b to (a,b) in "...", and b is not dead,
-- then we must zap the deadness info on a,b
zapBndrOccInfo keep_occ_info pat_id
| keep_occ_info = pat_id
| otherwise = zapIdOccInfo pat_id
{-
Note [Add unfolding for scrutinee]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In general it's unlikely that a variable scrutinee will appear
in the case alternatives case x of { ...x unlikely to appear... }
because the binder-swap in OccAnal has got rid of all such occcurrences
See Note [Binder swap] in OccAnal.
BUT it is still VERY IMPORTANT to add a suitable unfolding for a
variable scrutinee, in simplAlt. Here's why
case x of y
(a,b) -> case b of c
I# v -> ...(f y)...
There is no occurrence of 'b' in the (...(f y)...). But y gets
the unfolding (a,b), and *that* mentions b. If f has a RULE
RULE f (p, I# q) = ...
we want that rule to match, so we must extend the in-scope env with a
suitable unfolding for 'y'. It's *essential* for rule matching; but
it's also good for case-elimintation -- suppose that 'f' was inlined
and did multi-level case analysis, then we'd solve it in one
simplifier sweep instead of two.
Exactly the same issue arises in SpecConstr;
see Note [Add scrutinee to ValueEnv too] in SpecConstr
HOWEVER, given
case x of y { Just a -> r1; Nothing -> r2 }
we do not want to add the unfolding x -> y to 'x', which might seem cool,
since 'y' itself has different unfoldings in r1 and r2. Reason: if we
did that, we'd have to zap y's deadness info and that is a very useful
piece of information.
So instead we add the unfolding x -> Just a, and x -> Nothing in the
respective RHSs.
************************************************************************
* *
\subsection{Known constructor}
* *
************************************************************************
We are a bit careful with occurrence info. Here's an example
(\x* -> case x of (a*, b) -> f a) (h v, e)
where the * means "occurs once". This effectively becomes
case (h v, e) of (a*, b) -> f a)
and then
let a* = h v; b = e in f a
and then
f (h v)
All this should happen in one sweep.
-}
knownCon :: SimplEnv
-> OutExpr -- The scrutinee
-> DataCon -> [OutType] -> [OutExpr] -- The scrutinee (in pieces)
-> InId -> [InBndr] -> InExpr -- The alternative
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
knownCon env scrut dc dc_ty_args dc_args bndr bs rhs cont
= do { env' <- bind_args env bs dc_args
; env'' <- bind_case_bndr env'
; simplExprF env'' rhs cont }
where
zap_occ = zapBndrOccInfo (isDeadBinder bndr) -- bndr is an InId
-- Ugh!
bind_args env' [] _ = return env'
bind_args env' (b:bs') (Type ty : args)
= ASSERT( isTyVar b )
bind_args (extendTvSubst env' b ty) bs' args
bind_args env' (b:bs') (Coercion co : args)
= ASSERT( isCoVar b )
bind_args (extendCvSubst env' b co) bs' args
bind_args env' (b:bs') (arg : args)
= ASSERT( isId b )
do { let b' = zap_occ b
-- Note that the binder might be "dead", because it doesn't
-- occur in the RHS; and simplNonRecX may therefore discard
-- it via postInlineUnconditionally.
-- Nevertheless we must keep it if the case-binder is alive,
-- because it may be used in the con_app. See Note [knownCon occ info]
; env'' <- simplNonRecX env' b' arg -- arg satisfies let/app invariant
; bind_args env'' bs' args }
bind_args _ _ _ =
pprPanic "bind_args" $ ppr dc $$ ppr bs $$ ppr dc_args $$
text "scrut:" <+> ppr scrut
-- It's useful to bind bndr to scrut, rather than to a fresh
-- binding x = Con arg1 .. argn
-- because very often the scrut is a variable, so we avoid
-- creating, and then subsequently eliminating, a let-binding
-- BUT, if scrut is a not a variable, we must be careful
-- about duplicating the arg redexes; in that case, make
-- a new con-app from the args
bind_case_bndr env
| isDeadBinder bndr = return env
| exprIsTrivial scrut = return (extendIdSubst env bndr (DoneEx scrut))
| otherwise = do { dc_args <- mapM (simplVar env) bs
-- dc_ty_args are aready OutTypes,
-- but bs are InBndrs
; let con_app = Var (dataConWorkId dc)
`mkTyApps` dc_ty_args
`mkApps` dc_args
; simplNonRecX env bndr con_app }
-------------------
missingAlt :: SimplEnv -> Id -> [InAlt] -> SimplCont -> SimplM (SimplEnv, OutExpr)
-- This isn't strictly an error, although it is unusual.
-- It's possible that the simplifier might "see" that
-- an inner case has no accessible alternatives before
-- it "sees" that the entire branch of an outer case is
-- inaccessible. So we simply put an error case here instead.
missingAlt env case_bndr _ cont
= WARN( True, text "missingAlt" <+> ppr case_bndr )
return (env, mkImpossibleExpr (contResultType cont))
{-
************************************************************************
* *
\subsection{Duplicating continuations}
* *
************************************************************************
-}
prepareCaseCont :: SimplEnv
-> [InAlt] -> SimplCont
-> SimplM (SimplEnv,
SimplCont, -- Dupable part
SimplCont) -- Non-dupable part
-- We are considering
-- K[case _ of { p1 -> r1; ...; pn -> rn }]
-- where K is some enclosing continuation for the case
-- Goal: split K into two pieces Kdup,Knodup so that
-- a) Kdup can be duplicated
-- b) Knodup[Kdup[e]] = K[e]
-- The idea is that we'll transform thus:
-- Knodup[ (case _ of { p1 -> Kdup[r1]; ...; pn -> Kdup[rn] }
--
-- We may also return some extra bindings in SimplEnv (that scope over
-- the entire continuation)
--
-- When case-of-case is off, just make the entire continuation non-dupable
prepareCaseCont env alts cont
| not (sm_case_case (getMode env)) = return (env, mkBoringStop (contHoleType cont), cont)
| not (many_alts alts) = return (env, cont, mkBoringStop (contResultType cont))
| otherwise = mkDupableCont env cont
where
many_alts :: [InAlt] -> Bool -- True iff strictly > 1 non-bottom alternative
many_alts [] = False -- See Note [Bottom alternatives]
many_alts [_] = False
many_alts (alt:alts)
| is_bot_alt alt = many_alts alts
| otherwise = not (all is_bot_alt alts)
is_bot_alt (_,_,rhs) = exprIsBottom rhs
{-
Note [Bottom alternatives]
~~~~~~~~~~~~~~~~~~~~~~~~~~
When we have
case (case x of { A -> error .. ; B -> e; C -> error ..)
of alts
then we can just duplicate those alts because the A and C cases
will disappear immediately. This is more direct than creating
join points and inlining them away; and in some cases we would
not even create the join points (see Note [Single-alternative case])
and we would keep the case-of-case which is silly. See Trac #4930.
-}
mkDupableCont :: SimplEnv -> SimplCont
-> SimplM (SimplEnv, SimplCont, SimplCont)
mkDupableCont env cont
| contIsDupable cont
= return (env, cont, mkBoringStop (contResultType cont))
mkDupableCont _ (Stop {}) = panic "mkDupableCont" -- Handled by previous eqn
mkDupableCont env (CastIt ty cont)
= do { (env', dup, nodup) <- mkDupableCont env cont
; return (env', CastIt ty dup, nodup) }
-- Duplicating ticks for now, not sure if this is good or not
mkDupableCont env cont@(TickIt{})
= return (env, mkBoringStop (contHoleType cont), cont)
mkDupableCont env cont@(StrictBind {})
= return (env, mkBoringStop (contHoleType cont), cont)
-- See Note [Duplicating StrictBind]
mkDupableCont env (StrictArg info cci cont)
-- See Note [Duplicating StrictArg]
= do { (env', dup, nodup) <- mkDupableCont env cont
; (env'', args') <- mapAccumLM makeTrivialArg env' (ai_args info)
; return (env'', StrictArg (info { ai_args = args' }) cci dup, nodup) }
mkDupableCont env cont@(ApplyToTy { sc_cont = tail })
= do { (env', dup_cont, nodup_cont) <- mkDupableCont env tail
; return (env', cont { sc_cont = dup_cont }, nodup_cont ) }
mkDupableCont env (ApplyToVal { sc_arg = arg, sc_dup = dup, sc_env = se, sc_cont = cont })
= -- e.g. [...hole...] (...arg...)
-- ==>
-- let a = ...arg...
-- in [...hole...] a
do { (env', dup_cont, nodup_cont) <- mkDupableCont env cont
; (_, se', arg') <- simplArg env' dup se arg
; (env'', arg'') <- makeTrivial NotTopLevel env' (fsLit "karg") arg'
; let app_cont = ApplyToVal { sc_arg = arg'', sc_env = se'
, sc_dup = OkToDup, sc_cont = dup_cont }
; return (env'', app_cont, nodup_cont) }
mkDupableCont env cont@(Select { sc_bndr = case_bndr, sc_alts = [(_, bs, _rhs)] })
-- See Note [Single-alternative case]
-- | not (exprIsDupable rhs && contIsDupable case_cont)
-- | not (isDeadBinder case_bndr)
| all isDeadBinder bs -- InIds
&& not (isUnliftedType (idType case_bndr))
-- Note [Single-alternative-unlifted]
= return (env, mkBoringStop (contHoleType cont), cont)
mkDupableCont env (Select { sc_bndr = case_bndr, sc_alts = alts
, sc_env = se, sc_cont = cont })
= -- e.g. (case [...hole...] of { pi -> ei })
-- ===>
-- let ji = \xij -> ei
-- in case [...hole...] of { pi -> ji xij }
do { tick (CaseOfCase case_bndr)
; (env', dup_cont, nodup_cont) <- prepareCaseCont env alts cont
-- NB: We call prepareCaseCont here. If there is only one
-- alternative, then dup_cont may be big, but that's ok
-- because we push it into the single alternative, and then
-- use mkDupableAlt to turn that simplified alternative into
-- a join point if it's too big to duplicate.
-- And this is important: see Note [Fusing case continuations]
; let alt_env = se `setInScope` env'
; (alt_env', case_bndr') <- simplBinder alt_env case_bndr
; alts' <- mapM (simplAlt alt_env' Nothing [] case_bndr' dup_cont) alts
-- Safe to say that there are no handled-cons for the DEFAULT case
-- NB: simplBinder does not zap deadness occ-info, so
-- a dead case_bndr' will still advertise its deadness
-- This is really important because in
-- case e of b { (# p,q #) -> ... }
-- b is always dead, and indeed we are not allowed to bind b to (# p,q #),
-- which might happen if e was an explicit unboxed pair and b wasn't marked dead.
-- In the new alts we build, we have the new case binder, so it must retain
-- its deadness.
-- NB: we don't use alt_env further; it has the substEnv for
-- the alternatives, and we don't want that
; (env'', alts'') <- mkDupableAlts env' case_bndr' alts'
; return (env'', -- Note [Duplicated env]
Select { sc_dup = OkToDup
, sc_bndr = case_bndr', sc_alts = alts''
, sc_env = zapSubstEnv env''
, sc_cont = mkBoringStop (contHoleType nodup_cont) },
nodup_cont) }
mkDupableAlts :: SimplEnv -> OutId -> [InAlt]
-> SimplM (SimplEnv, [InAlt])
-- Absorbs the continuation into the new alternatives
mkDupableAlts env case_bndr' the_alts
= go env the_alts
where
go env0 [] = return (env0, [])
go env0 (alt:alts)
= do { (env1, alt') <- mkDupableAlt env0 case_bndr' alt
; (env2, alts') <- go env1 alts
; return (env2, alt' : alts' ) }
mkDupableAlt :: SimplEnv -> OutId -> (AltCon, [CoreBndr], CoreExpr)
-> SimplM (SimplEnv, (AltCon, [CoreBndr], CoreExpr))
mkDupableAlt env case_bndr (con, bndrs', rhs') = do
dflags <- getDynFlags
if exprIsDupable dflags rhs' -- Note [Small alternative rhs]
then return (env, (con, bndrs', rhs'))
else
do { let rhs_ty' = exprType rhs'
scrut_ty = idType case_bndr
case_bndr_w_unf
= case con of
DEFAULT -> case_bndr
DataAlt dc -> setIdUnfolding case_bndr unf
where
-- See Note [Case binders and join points]
unf = mkInlineUnfolding Nothing rhs
rhs = mkConApp2 dc (tyConAppArgs scrut_ty) bndrs'
LitAlt {} -> WARN( True, text "mkDupableAlt"
<+> ppr case_bndr <+> ppr con )
case_bndr
-- The case binder is alive but trivial, so why has
-- it not been substituted away?
used_bndrs' | isDeadBinder case_bndr = filter abstract_over bndrs'
| otherwise = bndrs' ++ [case_bndr_w_unf]
abstract_over bndr
| isTyVar bndr = True -- Abstract over all type variables just in case
| otherwise = not (isDeadBinder bndr)
-- The deadness info on the new Ids is preserved by simplBinders
; (final_bndrs', final_args) -- Note [Join point abstraction]
<- if (any isId used_bndrs')
then return (used_bndrs', varsToCoreExprs used_bndrs')
else do { rw_id <- newId (fsLit "w") voidPrimTy
; return ([setOneShotLambda rw_id], [Var voidPrimId]) }
; join_bndr <- newId (fsLit "$j") (mkLamTypes final_bndrs' rhs_ty')
-- Note [Funky mkLamTypes]
; let -- We make the lambdas into one-shot-lambdas. The
-- join point is sure to be applied at most once, and doing so
-- prevents the body of the join point being floated out by
-- the full laziness pass
really_final_bndrs = map one_shot final_bndrs'
one_shot v | isId v = setOneShotLambda v
| otherwise = v
join_rhs = mkLams really_final_bndrs rhs'
join_arity = exprArity join_rhs
join_call = mkApps (Var join_bndr) final_args
; env' <- addPolyBind NotTopLevel env (NonRec (join_bndr `setIdArity` join_arity) join_rhs)
; return (env', (con, bndrs', join_call)) }
-- See Note [Duplicated env]
{-
Note [Fusing case continuations]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's important to fuse two successive case continuations when the
first has one alternative. That's why we call prepareCaseCont here.
Consider this, which arises from thunk splitting (see Note [Thunk
splitting] in WorkWrap):
let
x* = case (case v of {pn -> rn}) of
I# a -> I# a
in body
The simplifier will find
(Var v) with continuation
Select (pn -> rn) (
Select [I# a -> I# a] (
StrictBind body Stop
So we'll call mkDupableCont on
Select [I# a -> I# a] (StrictBind body Stop)
There is just one alternative in the first Select, so we want to
simplify the rhs (I# a) with continuation (StricgtBind body Stop)
Supposing that body is big, we end up with
let $j a = <let x = I# a in body>
in case v of { pn -> case rn of
I# a -> $j a }
This is just what we want because the rn produces a box that
the case rn cancels with.
See Trac #4957 a fuller example.
Note [Case binders and join points]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this
case (case .. ) of c {
I# c# -> ....c....
If we make a join point with c but not c# we get
$j = \c -> ....c....
But if later inlining scrutines the c, thus
$j = \c -> ... case c of { I# y -> ... } ...
we won't see that 'c' has already been scrutinised. This actually
happens in the 'tabulate' function in wave4main, and makes a significant
difference to allocation.
An alternative plan is this:
$j = \c# -> let c = I# c# in ...c....
but that is bad if 'c' is *not* later scrutinised.
So instead we do both: we pass 'c' and 'c#' , and record in c's inlining
(a stable unfolding) that it's really I# c#, thus
$j = \c# -> \c[=I# c#] -> ...c....
Absence analysis may later discard 'c'.
NB: take great care when doing strictness analysis;
see Note [Lamba-bound unfoldings] in DmdAnal.
Also note that we can still end up passing stuff that isn't used. Before
strictness analysis we have
let $j x y c{=(x,y)} = (h c, ...)
in ...
After strictness analysis we see that h is strict, we end up with
let $j x y c{=(x,y)} = ($wh x y, ...)
and c is unused.
Note [Duplicated env]
~~~~~~~~~~~~~~~~~~~~~
Some of the alternatives are simplified, but have not been turned into a join point
So they *must* have an zapped subst-env. So we can't use completeNonRecX to
bind the join point, because it might to do PostInlineUnconditionally, and
we'd lose that when zapping the subst-env. We could have a per-alt subst-env,
but zapping it (as we do in mkDupableCont, the Select case) is safe, and
at worst delays the join-point inlining.
Note [Small alternative rhs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is worth checking for a small RHS because otherwise we
get extra let bindings that may cause an extra iteration of the simplifier to
inline back in place. Quite often the rhs is just a variable or constructor.
The Ord instance of Maybe in PrelMaybe.hs, for example, took several extra
iterations because the version with the let bindings looked big, and so wasn't
inlined, but after the join points had been inlined it looked smaller, and so
was inlined.
NB: we have to check the size of rhs', not rhs.
Duplicating a small InAlt might invalidate occurrence information
However, if it *is* dupable, we return the *un* simplified alternative,
because otherwise we'd need to pair it up with an empty subst-env....
but we only have one env shared between all the alts.
(Remember we must zap the subst-env before re-simplifying something).
Rather than do this we simply agree to re-simplify the original (small) thing later.
Note [Funky mkLamTypes]
~~~~~~~~~~~~~~~~~~~~~~
Notice the funky mkLamTypes. If the constructor has existentials
it's possible that the join point will be abstracted over
type variables as well as term variables.
Example: Suppose we have
data T = forall t. C [t]
Then faced with
case (case e of ...) of
C t xs::[t] -> rhs
We get the join point
let j :: forall t. [t] -> ...
j = /\t \xs::[t] -> rhs
in
case (case e of ...) of
C t xs::[t] -> j t xs
Note [Join point abstraction]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Join points always have at least one value argument,
for several reasons
* If we try to lift a primitive-typed something out
for let-binding-purposes, we will *caseify* it (!),
with potentially-disastrous strictness results. So
instead we turn it into a function: \v -> e
where v::Void#. The value passed to this function is void,
which generates (almost) no code.
* CPR. We used to say "&& isUnliftedType rhs_ty'" here, but now
we make the join point into a function whenever used_bndrs'
is empty. This makes the join-point more CPR friendly.
Consider: let j = if .. then I# 3 else I# 4
in case .. of { A -> j; B -> j; C -> ... }
Now CPR doesn't w/w j because it's a thunk, so
that means that the enclosing function can't w/w either,
which is a lose. Here's the example that happened in practice:
kgmod :: Int -> Int -> Int
kgmod x y = if x > 0 && y < 0 || x < 0 && y > 0
then 78
else 5
* Let-no-escape. We want a join point to turn into a let-no-escape
so that it is implemented as a jump, and one of the conditions
for LNE is that it's not updatable. In CoreToStg, see
Note [What is a non-escaping let]
* Floating. Since a join point will be entered once, no sharing is
gained by floating out, but something might be lost by doing
so because it might be allocated.
I have seen a case alternative like this:
True -> \v -> ...
It's a bit silly to add the realWorld dummy arg in this case, making
$j = \s v -> ...
True -> $j s
(the \v alone is enough to make CPR happy) but I think it's rare
There's a slight infelicity here: we pass the overall
case_bndr to all the join points if it's used in *any* RHS,
because we don't know its usage in each RHS separately
Note [Duplicating StrictArg]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The original plan had (where E is a big argument)
e.g. f E [..hole..]
==> let $j = \a -> f E a
in $j [..hole..]
But this is terrible! Here's an example:
&& E (case x of { T -> F; F -> T })
Now, && is strict so we end up simplifying the case with
an ArgOf continuation. If we let-bind it, we get
let $j = \v -> && E v
in simplExpr (case x of { T -> F; F -> T })
(ArgOf (\r -> $j r)
And after simplifying more we get
let $j = \v -> && E v
in case x of { T -> $j F; F -> $j T }
Which is a Very Bad Thing
What we do now is this
f E [..hole..]
==> let a = E
in f a [..hole..]
Now if the thing in the hole is a case expression (which is when
we'll call mkDupableCont), we'll push the function call into the
branches, which is what we want. Now RULES for f may fire, and
call-pattern specialisation. Here's an example from Trac #3116
go (n+1) (case l of
1 -> bs'
_ -> Chunk p fpc (o+1) (l-1) bs')
If we can push the call for 'go' inside the case, we get
call-pattern specialisation for 'go', which is *crucial* for
this program.
Here is the (&&) example:
&& E (case x of { T -> F; F -> T })
==> let a = E in
case x of { T -> && a F; F -> && a T }
Much better!
Notice that
* Arguments to f *after* the strict one are handled by
the ApplyToVal case of mkDupableCont. Eg
f [..hole..] E
* We can only do the let-binding of E because the function
part of a StrictArg continuation is an explicit syntax
tree. In earlier versions we represented it as a function
(CoreExpr -> CoreEpxr) which we couldn't take apart.
Do *not* duplicate StrictBind and StritArg continuations. We gain
nothing by propagating them into the expressions, and we do lose a
lot.
The desire not to duplicate is the entire reason that
mkDupableCont returns a pair of continuations.
Note [Duplicating StrictBind]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Unlike StrictArg, there doesn't seem anything to gain from
duplicating a StrictBind continuation, so we don't.
Note [Single-alternative cases]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This case is just like the ArgOf case. Here's an example:
data T a = MkT !a
...(MkT (abs x))...
Then we get
case (case x of I# x' ->
case x' <# 0# of
True -> I# (negate# x')
False -> I# x') of y {
DEFAULT -> MkT y
Because the (case x) has only one alternative, we'll transform to
case x of I# x' ->
case (case x' <# 0# of
True -> I# (negate# x')
False -> I# x') of y {
DEFAULT -> MkT y
But now we do *NOT* want to make a join point etc, giving
case x of I# x' ->
let $j = \y -> MkT y
in case x' <# 0# of
True -> $j (I# (negate# x'))
False -> $j (I# x')
In this case the $j will inline again, but suppose there was a big
strict computation enclosing the orginal call to MkT. Then, it won't
"see" the MkT any more, because it's big and won't get duplicated.
And, what is worse, nothing was gained by the case-of-case transform.
So, in circumstances like these, we don't want to build join points
and push the outer case into the branches of the inner one. Instead,
don't duplicate the continuation.
When should we use this strategy? We should not use it on *every*
single-alternative case:
e.g. case (case ....) of (a,b) -> (# a,b #)
Here we must push the outer case into the inner one!
Other choices:
* Match [(DEFAULT,_,_)], but in the common case of Int,
the alternative-filling-in code turned the outer case into
case (...) of y { I# _ -> MkT y }
* Match on single alternative plus (not (isDeadBinder case_bndr))
Rationale: pushing the case inwards won't eliminate the construction.
But there's a risk of
case (...) of y { (a,b) -> let z=(a,b) in ... }
Now y looks dead, but it'll come alive again. Still, this
seems like the best option at the moment.
* Match on single alternative plus (all (isDeadBinder bndrs))
Rationale: this is essentially seq.
* Match when the rhs is *not* duplicable, and hence would lead to a
join point. This catches the disaster-case above. We can test
the *un-simplified* rhs, which is fine. It might get bigger or
smaller after simplification; if it gets smaller, this case might
fire next time round. NB also that we must test contIsDupable
case_cont *too, because case_cont might be big!
HOWEVER: I found that this version doesn't work well, because
we can get let x = case (...) of { small } in ...case x...
When x is inlined into its full context, we find that it was a bad
idea to have pushed the outer case inside the (...) case.
There is a cost to not doing case-of-case; see Trac #10626.
Note [Single-alternative-unlifted]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Here's another single-alternative where we really want to do case-of-case:
data Mk1 = Mk1 Int# | Mk2 Int#
M1.f =
\r [x_s74 y_s6X]
case
case y_s6X of tpl_s7m {
M1.Mk1 ipv_s70 -> ipv_s70;
M1.Mk2 ipv_s72 -> ipv_s72;
}
of
wild_s7c
{ __DEFAULT ->
case
case x_s74 of tpl_s7n {
M1.Mk1 ipv_s77 -> ipv_s77;
M1.Mk2 ipv_s79 -> ipv_s79;
}
of
wild1_s7b
{ __DEFAULT -> ==# [wild1_s7b wild_s7c];
};
};
So the outer case is doing *nothing at all*, other than serving as a
join-point. In this case we really want to do case-of-case and decide
whether to use a real join point or just duplicate the continuation:
let $j s7c = case x of
Mk1 ipv77 -> (==) s7c ipv77
Mk1 ipv79 -> (==) s7c ipv79
in
case y of
Mk1 ipv70 -> $j ipv70
Mk2 ipv72 -> $j ipv72
Hence: check whether the case binder's type is unlifted, because then
the outer case is *not* a seq.
************************************************************************
* *
Unfoldings
* *
************************************************************************
-}
simplLetUnfolding :: SimplEnv-> TopLevelFlag
-> InId
-> OutExpr
-> Unfolding -> SimplM Unfolding
simplLetUnfolding env top_lvl id new_rhs unf
| isStableUnfolding unf
= simplUnfolding env top_lvl id unf
| otherwise
= bottoming `seq` -- See Note [Force bottoming field]
do { dflags <- getDynFlags
; return (mkUnfolding dflags InlineRhs (isTopLevel top_lvl) bottoming new_rhs) }
-- We make an unfolding *even for loop-breakers*.
-- Reason: (a) It might be useful to know that they are WHNF
-- (b) In TidyPgm we currently assume that, if we want to
-- expose the unfolding then indeed we *have* an unfolding
-- to expose. (We could instead use the RHS, but currently
-- we don't.) The simple thing is always to have one.
where
bottoming = isBottomingId id
simplUnfolding :: SimplEnv-> TopLevelFlag -> InId -> Unfolding -> SimplM Unfolding
-- Note [Setting the new unfolding]
simplUnfolding env top_lvl id unf
= case unf of
NoUnfolding -> return unf
OtherCon {} -> return unf
DFunUnfolding { df_bndrs = bndrs, df_con = con, df_args = args }
-> do { (env', bndrs') <- simplBinders rule_env bndrs
; args' <- mapM (simplExpr env') args
; return (mkDFunUnfolding bndrs' con args') }
CoreUnfolding { uf_tmpl = expr, uf_src = src, uf_guidance = guide }
| isStableSource src
-> do { expr' <- simplExpr rule_env expr
; case guide of
UnfWhen { ug_arity = arity, ug_unsat_ok = sat_ok } -- Happens for INLINE things
-> let guide' = UnfWhen { ug_arity = arity, ug_unsat_ok = sat_ok
, ug_boring_ok = inlineBoringOk expr' }
-- Refresh the boring-ok flag, in case expr'
-- has got small. This happens, notably in the inlinings
-- for dfuns for single-method classes; see
-- Note [Single-method classes] in TcInstDcls.
-- A test case is Trac #4138
in return (mkCoreUnfolding src is_top_lvl expr' guide')
-- See Note [Top-level flag on inline rules] in CoreUnfold
_other -- Happens for INLINABLE things
-> bottoming `seq` -- See Note [Force bottoming field]
do { dflags <- getDynFlags
; return (mkUnfolding dflags src is_top_lvl bottoming expr') } }
-- If the guidance is UnfIfGoodArgs, this is an INLINABLE
-- unfolding, and we need to make sure the guidance is kept up
-- to date with respect to any changes in the unfolding.
| otherwise -> return noUnfolding -- Discard unstable unfoldings
where
bottoming = isBottomingId id
is_top_lvl = isTopLevel top_lvl
act = idInlineActivation id
rule_env = updMode (updModeForStableUnfoldings act) env
-- See Note [Simplifying inside stable unfoldings] in SimplUtils
{-
Note [Force bottoming field]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to force bottoming, or the new unfolding holds
on to the old unfolding (which is part of the id).
Note [Setting the new unfolding]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* If there's an INLINE pragma, we simplify the RHS gently. Maybe we
should do nothing at all, but simplifying gently might get rid of
more crap.
* If not, we make an unfolding from the new RHS. But *only* for
non-loop-breakers. Making loop breakers not have an unfolding at all
means that we can avoid tests in exprIsConApp, for example. This is
important: if exprIsConApp says 'yes' for a recursive thing, then we
can get into an infinite loop
If there's an stable unfolding on a loop breaker (which happens for
INLINEABLE), we hang on to the inlining. It's pretty dodgy, but the
user did say 'INLINE'. May need to revisit this choice.
************************************************************************
* *
Rules
* *
************************************************************************
Note [Rules in a letrec]
~~~~~~~~~~~~~~~~~~~~~~~~
After creating fresh binders for the binders of a letrec, we
substitute the RULES and add them back onto the binders; this is done
*before* processing any of the RHSs. This is important. Manuel found
cases where he really, really wanted a RULE for a recursive function
to apply in that function's own right-hand side.
See Note [Loop breaking and RULES] in OccAnal.
-}
addBndrRules :: SimplEnv -> InBndr -> OutBndr -> SimplM (SimplEnv, OutBndr)
-- Rules are added back into the bin
addBndrRules env in_id out_id
| null old_rules
= return (env, out_id)
| otherwise
= do { new_rules <- simplRules env (Just (idName out_id)) old_rules
; let final_id = out_id `setIdSpecialisation` mkRuleInfo new_rules
; return (modifyInScope env final_id, final_id) }
where
old_rules = ruleInfoRules (idSpecialisation in_id)
simplRules :: SimplEnv -> Maybe Name -> [CoreRule] -> SimplM [CoreRule]
simplRules env mb_new_nm rules
= mapM simpl_rule rules
where
simpl_rule rule@(BuiltinRule {})
= return rule
simpl_rule rule@(Rule { ru_bndrs = bndrs, ru_args = args
, ru_fn = fn_name, ru_rhs = rhs })
= do { (env', bndrs') <- simplBinders env bndrs
; let rule_env = updMode updModeForRules env'
; args' <- mapM (simplExpr rule_env) args
; rhs' <- simplExpr rule_env rhs
; return (rule { ru_bndrs = bndrs'
, ru_fn = mb_new_nm `orElse` fn_name
, ru_args = args'
, ru_rhs = rhs' }) }
|
sgillespie/ghc
|
compiler/simplCore/Simplify.hs
|
Haskell
|
bsd-3-clause
| 124,418
|
module Bot where
import Args
import Control.Applicative.Trans.Either
import Control.Concurrent.WriteSem
import Control.Concurrent
import Control.Concurrent.Async
import Control.Exception (catchJust)
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import Control.Monad.Trans.Reader
import Data.Binary
import Data.Char
import Data.Classifier.NaiveBayes (NaiveBayes)
import Data.Coerce
import Data.Default.Class
import Data.Function (fix)
import Data.Maybe
import Data.Monoid ((<>))
import Data.Text (Text)
import Data.Time.Clock
import Data.Time.Format
import Data.Yaml
import Reddit hiding (failWith, bans)
import Reddit.Types.Comment (PostComments(..), CommentReference(..))
import Reddit.Types.Listing
import Reddit.Types.Subreddit (SubredditName(..))
import Reddit.Types.User (Username(..))
import System.Exit
import System.IO
import System.IO.Error
import qualified Data.Bounded.Set as Bounded
import qualified Data.Classifier.NaiveBayes as NB
import qualified Data.Counter as Counter
import qualified Data.Map as Map
import qualified Data.Text as Text
import qualified Data.Text.IO as Text
import qualified Reddit.Types.Comment as Comment
import qualified Reddit.Types.Post as Post
data ConcreteSettings =
ConcreteSettings { username :: Username
, password :: Password
, subreddit :: SubredditName
, replyText :: ReplyText
, refreshTime :: RefreshTime
, bans :: [Username]
, classifier :: Maybe (NaiveBayes Bool Text)
, useClassifier :: Bool
, verboseOutput :: Bool }
deriving (Show)
main :: IO ()
main = do
hSetBuffering stdout NoBuffering
hSetBuffering stderr NoBuffering
ConfigFile fp <- optionsFromArgs
decodeFileEither fp >>= \case
Left err -> do
print err
exitFailure
Right (Config b m) -> do
resolvedSettings <- mapM confirm $ resolve b m
let (lefts, rights) = Map.mapEither id resolvedSettings
if Map.null lefts
then do
sem <- newWriteSemIO
void $ mapConcurrently (\(k, s) -> run sem (s k)) $ Map.toList rights
else do
Map.foldrWithKey handleError (return ()) lefts
exitFailure
handleError :: SubredditName -> [Text] -> IO () -> IO ()
handleError (R r) errs m = m >> do
Text.putStrLn $ pluralize errs "Error" <> " with settings for subreddit /r/" <> r <> ":"
forM_ errs $ \err ->
Text.putStr $ " - " <> err
pluralize :: [a] -> Text -> Text
pluralize [_] x = x
pluralize _ x = x <> "s"
confirm :: Settings -> IO (Either [Text] (SubredditName -> ConcreteSettings))
confirm (Settings u p r b t c x v) =
runEitherA $
subredditLastSettings
<$> justOr ["Missing username"] u
<*> justOr ["Missing password"] p
<*> loadReply r
<*> pure (fromMaybe 5 t)
<*> loadBans b
<*> sequenceA (fmap loadClassifier c)
<*> pure x
<*> pure (fromMaybe False v)
subredditLastSettings :: Username -> Password -> ReplyText -> RefreshTime -> [Username] -> Maybe (NaiveBayes Bool Text) -> Bool -> Bool -> SubredditName -> ConcreteSettings
subredditLastSettings u p r t b n x v s = ConcreteSettings u p s r t b n x v
loadBans :: [Bans] -> EitherA [Text] IO [Username]
loadBans = fmap concat . sequenceA . map f
where
f (BansList us) = pure us
f (BansFilePath fp) = EitherA $
decodeFileEither fp >>= \case
Left err -> return $ Left [Text.pack $ show err]
Right xs -> return $ Right $ map Username xs
loadReply :: Maybe Reply -> EitherA [Text] IO ReplyText
loadReply x = case x of
Just r -> case r of
ReplyLiteral lit -> pure lit
ReplyFilePath fp -> readReplyFile fp
Nothing -> failWith ["Missing reply"]
readReplyFile :: FilePath -> EitherA [Text] IO ReplyText
readReplyFile fp = EitherA $ catchJust f (Right <$> Text.readFile fp) (return . Left . return)
where
f (isDoesNotExistError -> True) = Just "Reply file does not exist"
f (isPermissionError -> True) = Just "Incorrect permissions for reply file"
f _ = Nothing
loadClassifier :: FilePath -> EitherA [Text] IO (NaiveBayes Bool Text)
loadClassifier fp = EitherA $ f <$> decodeFileOrFail fp
where
f (Left _) = Left ["Classifier could not be read"]
f (Right x) = pure x
run :: WriteSem -> ConcreteSettings -> IO ()
run sem settings =
withAsync (loopWith (forever $ commentsLoop sem) sem settings) $ \c ->
case classifier settings of
Just _ ->
withAsync (loopWith (forever $ postsLoop sem) sem settings) $ \p ->
void $ waitBoth c p
Nothing -> wait c
loopWith :: RedditT (ReaderT ConcreteSettings IO) () -> WriteSem -> ConcreteSettings -> IO ()
loopWith act sem settings = do
res <- flip runReaderT settings $
runResumeRedditWith def { customUserAgent = Just "intolerable-bot v0.1.0.0"
, loginMethod = Credentials (coerce (username settings)) (password settings)
, rateLimitingEnabled = False } act
case res of
Left (APIError CredentialsError, _) ->
withWriteSem sem $
Text.putStrLn $ "Username / password details incorrect for /r/" <> coerce (subreddit settings)
Left (err, Nothing) -> do
liftIO $ print err
(5 * refreshTime settings) `seconds` threadDelay
loopWith act sem settings
Left (err, Just resume) -> do
liftIO $ print err
loopWith resume sem settings
Right () -> return ()
postsLoop :: WriteSem -> RedditT (ReaderT ConcreteSettings IO) ()
postsLoop sem = do
u <- lift $ asks username
r <- lift $ asks subreddit
t <- lift $ asks refreshTime
rt <- lift $ asks replyText
cls <- lift $ asks (fromJust . classifier)
use <- lift $ asks useClassifier
withInitial (Bounded.empty 500) $ \loop set -> do
Listing _ _ ps <- getPosts' (Options Nothing (Just 100)) New (Just r)
writeLogEntry sem r "got listing"
let news = filter (\x -> not $ Bounded.member (Post.postID x) set) ps
forM_ news $ \p ->
unless (Post.author p == u) $
case Post.content p of
Post.SelfPost m _ -> do
let c = Counter.fromList $ process m
case NB.test cls c of
Just True ->
if use
then do
PostComments _ cs <- getPostComments $ Post.postID p
actuals <- resolveComments (Post.postID p) cs
unless (any ((== u) . Comment.author) actuals) $ do
botReply <- reply p rt
writeLogEntry sem r $ mconcat
[ "Auto-responded to "
, coerce $ Post.postID p
, " ("
, coerce botReply
, ")" ]
else
writeLogEntry sem r $ mconcat
[ "Possible AI match @ "
, coerce $ Post.postID p ]
_ -> return ()
_ -> return ()
unless (null news) $ writeLogEntry sem r "got listing"
t `seconds` threadDelay
loop $ Bounded.insertAll (Post.postID <$> news) set
commentsLoop :: WriteSem -> RedditT (ReaderT ConcreteSettings IO) ()
commentsLoop sem = do
r <- lift $ asks subreddit
t <- lift $ asks refreshTime
withInitial (Bounded.empty 500) $ \loop set -> do
Listing _ _ cs <- getNewComments' (Options Nothing (Just 100)) (Just r)
let news = filter (\x -> not $ Bounded.member (Comment.commentID x) set) cs
mapM_ (commentResponder sem) news
unless (null news) $ writeLogEntry sem r "dealt with new comments"
t `seconds` threadDelay
loop $ Bounded.insertAll (Comment.commentID <$> news) set
commentResponder :: WriteSem -> Comment -> RedditT (ReaderT ConcreteSettings IO) ()
commentResponder sem c = do
u <- lift $ asks username
r <- lift $ asks subreddit
rt <- lift $ asks replyText
bs <- lift $ asks bans
when (shouldRespond u (Comment.body c)) $
unless (Comment.author c `elem` bs) $ do
writeLogEntry sem r "found a comment"
(selfpost, sibs) <- getSiblingComments c
unless (any ((== u) . Comment.author) sibs) $ do
writeLogEntry sem r $ "found a comment we didn't already respond to: " <> coerce (Comment.commentID c)
case Comment.inReplyTo c of
Just parentComment -> reply parentComment rt >>= logReply r
Nothing ->
when selfpost $
reply (Comment.parentLink c) rt >>= logReply r
where
logReply r botReply = writeLogEntry sem r $ mconcat
[ "Responded to "
, coerce (Comment.commentID c)
, " by "
, coerce (Comment.author c)
, " ("
, coerce botReply
, ")" ]
getSiblingComments :: MonadIO m => Comment -> RedditT m (Bool, [Comment])
getSiblingComments c = do
let parent = Comment.parentLink c
PostComments p cs <-
case Comment.inReplyTo c of
Just parentComment ->
getPostSubComments parent parentComment >>= \case
PostComments p (com:_) -> do
Listing _ _ cs <- mconcat <$> map Comment.replies <$> resolveComments parent [com]
return $ PostComments p cs
x -> return x
Nothing -> getPostComments parent
case Post.content p of
Post.SelfPost _ _ -> (,) True <$> resolveComments parent cs
_ -> (,) (isJust (Comment.inReplyTo c)) <$> resolveComments parent cs
resolveComments :: MonadIO m => PostID -> [CommentReference] -> RedditT m [Comment]
resolveComments p refs = concat <$> mapM f refs
where
f (Actual c) = return [c]
f (Reference _ cs) = do
moreComments <- getMoreChildren p cs
resolveComments p moreComments
shouldRespond :: Username -> Text -> Bool
shouldRespond (Username u) = Text.isInfixOf (Text.toCaseFold $ "u/" <> u) . Text.toCaseFold
withInitial :: a -> ((a -> b) -> a -> b) -> b
withInitial = flip fix
seconds :: MonadIO m => Int -> (Int -> IO ()) -> m ()
n `seconds` f = liftIO $ f $ n * 1000000
writeLogEntry :: MonadIO m => WriteSem -> SubredditName -> Text -> m ()
writeLogEntry sem (R r) t = do
time <- liftIO getCurrentTime
let space = " "
withWriteSem sem $
mapM_ Text.putStr
[ makeTime time
, space
, "/r/"
, r
, ": "
, t
, "\n" ]
makeTime :: UTCTime -> Text
makeTime t = Text.pack $ formatTime defaultTimeLocale (iso8601DateFormat (Just "%H:%M:%S")) t
process :: Text -> [Text]
process = filter (not . Text.null) .
map (Text.map toLower . Text.filter isAlpha) .
concatMap (Text.splitOn ".") .
Text.splitOn " " .
Text.filter (not . (== '-'))
|
intolerable/intolerable-bot
|
src/Bot.hs
|
Haskell
|
bsd-3-clause
| 10,673
|
{-# LANGUAGE RecordWildCards #-}
module Task where
import Text.Printf
import Simulation.Aivika
import Data.Functor
data System = System {
processingDistribution :: (String, Parameter Double)
, bufferCapacity :: Int
}
data Input = Input {
generationDistribution :: (String, Parameter Double)
, inputSystems :: [System]
, simulationTime :: Double
, outputPrecision :: Int
}
instance Show System where
show System{..} = fst processingDistribution ++ "-" ++ show bufferCapacity
instance Show Input where
show Input{..} = fst generationDistribution ++ "-" ++ show inputSystems
data Output = Output {
failChances :: [Double], -- ^ Вероятность отказа системы
queueSizes :: [Double], -- ^ Средний размер буфера
systemLoads :: [Double], -- ^ Загрузка системы
requestsCounts :: [Double], -- ^ Среднее число заявок в системе
awaitingTimes :: [Double], -- ^ Среднее время ожидания в буфере
totalTimes :: [Double], -- ^ Общее время пребывания заявки в системе
usedInput :: Input -- ^ Используемые входные данные
}
emptyOutput :: Input -> Output
emptyOutput input = Output [] [] [] [] [] [] input
data PartialOutput = PartialOutput {
failChance :: Double, -- ^ Вероятность отказа системы
queueSize :: Double, -- ^ Средний размер буфера
systemLoad :: Double, -- ^ Загрузка системы
requestsCount :: Double, -- ^ Среднее число заявок в системе
awaitingTime :: Double, -- ^ Среднее время ожидания в буфере
totalTime :: Double -- ^ Общее время пребывания заявки в системе
}
combineOutput :: Output -> PartialOutput -> Output
combineOutput output poutput = output {
failChances = failChances output ++ [failChance poutput]
, queueSizes = queueSizes output ++ [queueSize poutput]
, systemLoads = systemLoads output ++ [systemLoad poutput]
, requestsCounts = requestsCounts output ++ [requestsCount poutput]
, awaitingTimes = awaitingTimes output ++ [awaitingTime poutput]
, totalTimes = totalTimes output ++ [totalTime poutput]
}
combineOutputs :: Output -> [PartialOutput] -> Output
combineOutputs output = foldl combineOutput output
instance Show Output where
show Output{..} = unlines [
unwords ["Вероятность отказа в каждом буфере:", unwords $ printPrec <$> failChances]
, unwords ["Средний размер буферов:", unwords $ printPrec <$> queueSizes]
, unwords ["Загрузка подсистем:", unwords $ printPrec <$> systemLoads]
, unwords ["Среднее число заявок в системах:", unwords $ printPrec <$> requestsCounts]
, unwords ["Среднее время ожидания в буфере:", unwords $ printPrec <$> awaitingTimes]
, unwords ["Общее время пребывания заявки в системе:", unwords $ printPrec <$> totalTimes]
]
where
precision = show (outputPrecision usedInput)
printPrec :: Double -> String
printPrec = printf ("%."++precision++"f")
|
NCrashed/bmstu-aivika-tutorial-01
|
src/Task.hs
|
Haskell
|
bsd-3-clause
| 3,365
|
import System.Environment (getArgs)
import Data.List.Split (splitOn)
import Data.Bits (testBit)
compareBits :: [Int] -> String
compareBits [i, a, b] | testBit i (a - 1) == testBit i (b - 1) = "true"
| otherwise = "false"
main :: IO ()
main = do
[inpFile] <- getArgs
input <- readFile inpFile
putStr . unlines . map (compareBits . map read . splitOn ",") $ lines input
|
nikai3d/ce-challenges
|
easy/position.hs
|
Haskell
|
bsd-3-clause
| 442
|
{-# LANGUAGE CPP, DeriveDataTypeable, DeriveFunctor #-}
{-# LANGUAGE FlexibleInstances, TypeSynonymInstances #-}
{-# LANGUAGE PatternGuards, ScopedTypeVariables #-}
{-# LANGUAGE RecordWildCards, TemplateHaskell #-}
{- |
Module: Database.PostgreSQL.Simple.FromField
Copyright: (c) 2011 MailRank, Inc.
(c) 2011-2013 Leon P Smith
License: BSD3
Maintainer: Leon P Smith <leon@melding-monads.com>
Stability: experimental
The 'FromField' typeclass, for converting a single value in a row
returned by a SQL query into a more useful Haskell representation.
Note that each instance of 'FromField' is documented by a list of
compatible postgresql types.
A Haskell numeric type is considered to be compatible with all
PostgreSQL numeric types that are less accurate than it. For instance,
the Haskell 'Double' type is compatible with the PostgreSQL's 32-bit
@int@ type because it can represent a @int@ exactly. On the other hand,
since a 'Double' might lose precision if representing PostgreSQL's 64-bit
@bigint@, the two are /not/ considered compatible.
Note that the 'Float' and 'Double' instances use attoparsec's 'double'
conversion routine, which sacrifices some accuracy for speed. If you
need accuracy, consider first converting data to a 'Scientific' or 'Rational'
type, and then converting to a floating-point type. If you are defining
your own 'Database.PostgreSQL.Simple.FromRow.FromRow' instances, this can be
acheived simply by
@'fromRational' '<$>' 'Database.PostgreSQL.Simple.FromRow.field'@, although
this idiom is additionally compatible with PostgreSQL's @numeric@ type.
If this is unacceptable, you may find
'Database.PostgreSQL.Simple.FromRow.fieldWith' useful.
Also note that while converting to a 'Double' through the 'Scientific' type
is likely somewhat faster than converting through the 'Rational' type,
the 'Scientific' type has no way to represent @NaN@ and @±Infinity@ values.
Thus, if you need precision conversion of regular floating point values
and the possibility of receiving these special values from the backend,
stick with 'Rational'.
Because 'FromField' is a typeclass, one may provide conversions to
additional Haskell types without modifying postgresql-simple. This is
particularly useful for supporting PostgreSQL types that postgresql-simple
does not support out-of-box. Here's an example of what such an instance
might look like for a UUID type that implements the @Read@ class:
@
import Data.UUID ( UUID )
import Database.PostgreSQL.Simple.FromField
( FromField (fromField) , typeOid, returnError, ResultError (..) )
import Database.PostgreSQL.Simple.TypeInfo.Static (typoid, uuid)
import qualified Data.ByteString.Char8 as B
instance FromField UUID where
fromField f mdata =
if typeOid f /= typoid uuid
then returnError Incompatible f \"\"
else case B.unpack \`fmap\` mdata of
Nothing -> returnError UnexpectedNull f \"\"
Just dat ->
case [ x | (x,t) <- reads dat, (\"\",\"\") <- lex t ] of
[x] -> return x
_ -> returnError ConversionFailed f dat
@
Note that because PostgreSQL's @uuid@ type is built into postgres and is
not provided by an extension, the 'typeOid' of @uuid@ does not change and
thus we can examine it directly. One could hard-code the type oid, or
obtain it by other means, but in this case we simply pull it out of the
static table provided by postgresql-simple.
On the other hand if the type is provided by an extension, such as
@PostGIS@ or @hstore@, then the 'typeOid' is not stable and can vary from
database to database. In this case it is recommended that FromField
instances use 'typename' instead.
-}
module Database.PostgreSQL.Simple.FromField
(
FromField(..)
, FieldParser
, Conversion()
, runConversion
, conversionMap
, conversionError
, ResultError(..)
, returnError
, Field
, typename
, TypeInfo(..)
, Attribute(..)
, typeInfo
, typeInfoByOid
, name
, tableOid
, tableColumn
, format
, typeOid
, PQ.Oid(..)
, PQ.Format(..)
, pgArrayFieldParser
, fromJSONField
) where
#include "MachDeps.h"
import Control.Applicative ( (<|>), (<$>), pure, (*>) )
import Control.Concurrent.MVar (MVar, newMVar)
import Control.Exception (Exception)
import qualified Data.Aeson as JSON
import Data.Attoparsec.ByteString.Char8 hiding (Result)
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as B
import Data.Int (Int16, Int32, Int64)
import Data.IORef (IORef, newIORef)
import Data.Ratio (Ratio)
import Data.Time ( UTCTime, ZonedTime, LocalTime, Day, TimeOfDay )
import Data.Typeable (Typeable, typeOf)
import Data.Vector (Vector)
import Data.Vector.Mutable (IOVector)
import qualified Data.Vector as V
import Database.PostgreSQL.Simple.Internal
import Database.PostgreSQL.Simple.Compat
import Database.PostgreSQL.Simple.Ok
import Database.PostgreSQL.Simple.Types
import Database.PostgreSQL.Simple.TypeInfo as TI
import qualified Database.PostgreSQL.Simple.TypeInfo.Static as TI
import Database.PostgreSQL.Simple.TypeInfo.Macro as TI
import Database.PostgreSQL.Simple.Time
import Database.PostgreSQL.Simple.Arrays as Arrays
import qualified Database.PostgreSQL.LibPQ as PQ
import qualified Data.ByteString as SB
import qualified Data.ByteString.Char8 as B8
import qualified Data.ByteString.Lazy as LB
import qualified Data.Text as ST
import qualified Data.Text.Encoding as ST
import qualified Data.Text.Lazy as LT
import Data.CaseInsensitive (CI)
import qualified Data.CaseInsensitive as CI
import Data.UUID (UUID)
import qualified Data.UUID as UUID
import Data.Scientific (Scientific)
import GHC.Real (infinity, notANumber)
-- | Exception thrown if conversion from a SQL value to a Haskell
-- value fails.
data ResultError = Incompatible { errSQLType :: String
, errSQLTableOid :: Maybe PQ.Oid
, errSQLField :: String
, errHaskellType :: String
, errMessage :: String }
-- ^ The SQL and Haskell types are not compatible.
| UnexpectedNull { errSQLType :: String
, errSQLTableOid :: Maybe PQ.Oid
, errSQLField :: String
, errHaskellType :: String
, errMessage :: String }
-- ^ A SQL @NULL@ was encountered when the Haskell
-- type did not permit it.
| ConversionFailed { errSQLType :: String
, errSQLTableOid :: Maybe PQ.Oid
, errSQLField :: String
, errHaskellType :: String
, errMessage :: String }
-- ^ The SQL value could not be parsed, or could not
-- be represented as a valid Haskell value, or an
-- unexpected low-level error occurred (e.g. mismatch
-- between metadata and actual data in a row).
deriving (Eq, Show, Typeable)
instance Exception ResultError
left :: Exception a => a -> Conversion b
left = conversionError
type FieldParser a = Field -> Maybe ByteString -> Conversion a
-- | A type that may be converted from a SQL type.
class FromField a where
fromField :: FieldParser a
-- ^ Convert a SQL value to a Haskell value.
--
-- Returns a list of exceptions if the conversion fails. In the case of
-- library instances, this will usually be a single 'ResultError', but
-- may be a 'UnicodeException'.
--
-- Note that retaining any reference to the 'Field' argument causes
-- the entire @LibPQ.'PQ.Result'@ to be retained. Thus, implementations
-- of 'fromField' should return results that do not refer to this value
-- after the result have been evaluated to WHNF.
--
-- Note that as of @postgresql-simple-0.4.0.0@, the 'ByteString' value
-- has already been copied out of the @LibPQ.'PQ.Result'@ before it has
-- been passed to 'fromField'. This is because for short strings, it's
-- cheaper to copy the string than to set up a finalizer.
-- | Returns the data type name. This is the preferred way of identifying
-- types that do not have a stable type oid, such as types provided by
-- extensions to PostgreSQL.
--
-- More concretely, it returns the @typname@ column associated with the
-- type oid in the @pg_type@ table. First, postgresql-simple will check
-- the built-in, static table. If the type oid is not there,
-- postgresql-simple will check a per-connection cache, and then
-- finally query the database's meta-schema.
typename :: Field -> Conversion ByteString
typename field = typname <$> typeInfo field
typeInfo :: Field -> Conversion TypeInfo
typeInfo Field{..} = Conversion $ \conn -> do
Ok <$> (getTypeInfo conn =<< PQ.ftype result column)
typeInfoByOid :: PQ.Oid -> Conversion TypeInfo
typeInfoByOid oid = Conversion $ \conn -> do
Ok <$> (getTypeInfo conn oid)
-- | Returns the name of the column. This is often determined by a table
-- definition, but it can be set using an @as@ clause.
name :: Field -> Maybe ByteString
name Field{..} = unsafeDupablePerformIO (PQ.fname result column)
-- | Returns the name of the object id of the @table@ associated with the
-- column, if any. Returns 'Nothing' when there is no such table;
-- for example a computed column does not have a table associated with it.
-- Analogous to libpq's @PQftable@.
tableOid :: Field -> Maybe PQ.Oid
tableOid Field{..} = toMaybeOid (unsafeDupablePerformIO (PQ.ftable result column))
where
toMaybeOid x
= if x == PQ.invalidOid
then Nothing
else Just x
-- | If the column has a table associated with it, this returns the number
-- off the associated table column. Numbering starts from 0. Analogous
-- to libpq's @PQftablecol@.
tableColumn :: Field -> Int
tableColumn Field{..} = fromCol (unsafeDupablePerformIO (PQ.ftablecol result column))
where
fromCol (PQ.Col x) = fromIntegral x
-- | This returns whether the data was returned in a binary or textual format.
-- Analogous to libpq's @PQfformat@.
format :: Field -> PQ.Format
format Field{..} = unsafeDupablePerformIO (PQ.fformat result column)
-- | void
instance FromField () where
fromField f _bs
| typeOid f /= $(inlineTypoid TI.void) = returnError Incompatible f ""
| otherwise = pure ()
-- | For dealing with null values. Compatible with any postgresql type
-- compatible with type @a@. Note that the type is not checked if
-- the value is null, although it is inadvisable to rely on this
-- behavior.
instance (FromField a) => FromField (Maybe a) where
fromField _ Nothing = pure Nothing
fromField f bs = Just <$> fromField f bs
-- | compatible with any data type, but the value must be null
instance FromField Null where
fromField _ Nothing = pure Null
fromField f (Just _) = returnError ConversionFailed f "data is not null"
-- | bool
instance FromField Bool where
fromField f bs
| typeOid f /= $(inlineTypoid TI.bool) = returnError Incompatible f ""
| bs == Nothing = returnError UnexpectedNull f ""
| bs == Just "t" = pure True
| bs == Just "f" = pure False
| otherwise = returnError ConversionFailed f ""
-- | \"char\"
instance FromField Char where
fromField f bs =
if typeOid f /= $(inlineTypoid TI.char)
then returnError Incompatible f ""
else case bs of
Nothing -> returnError UnexpectedNull f ""
Just bs -> if B.length bs /= 1
then returnError ConversionFailed f "length not 1"
else return $! (B.head bs)
-- | int2
instance FromField Int16 where
fromField = atto ok16 $ signed decimal
-- | int2, int4
instance FromField Int32 where
fromField = atto ok32 $ signed decimal
#if WORD_SIZE_IN_BITS < 64
-- | int2, int4, and if compiled as 64-bit code, int8 as well.
-- This library was compiled as 32-bit code.
#else
-- | int2, int4, and if compiled as 64-bit code, int8 as well.
-- This library was compiled as 64-bit code.
#endif
instance FromField Int where
fromField = atto okInt $ signed decimal
-- | int2, int4, int8
instance FromField Int64 where
fromField = atto ok64 $ signed decimal
-- | int2, int4, int8
instance FromField Integer where
fromField = atto ok64 $ signed decimal
-- | int2, float4 (Uses attoparsec's 'double' routine, for
-- better accuracy convert to 'Scientific' or 'Rational' first)
instance FromField Float where
fromField = atto ok (realToFrac <$> pg_double)
where ok = $(mkCompats [TI.float4,TI.int2])
-- | int2, int4, float4, float8 (Uses attoparsec's 'double' routine, for
-- better accuracy convert to 'Scientific' or 'Rational' first)
instance FromField Double where
fromField = atto ok pg_double
where ok = $(mkCompats [TI.float4,TI.float8,TI.int2,TI.int4])
-- | int2, int4, float4, float8, numeric
instance FromField (Ratio Integer) where
fromField = atto ok pg_rational
where ok = $(mkCompats [TI.float4,TI.float8,TI.int2,TI.int4,TI.numeric])
-- | int2, int4, float4, float8, numeric
instance FromField Scientific where
fromField = atto ok rational
where ok = $(mkCompats [TI.float4,TI.float8,TI.int2,TI.int4,TI.numeric])
unBinary :: Binary t -> t
unBinary (Binary x) = x
pg_double :: Parser Double
pg_double
= (string "NaN" *> pure ( 0 / 0))
<|> (string "Infinity" *> pure ( 1 / 0))
<|> (string "-Infinity" *> pure (-1 / 0))
<|> double
pg_rational :: Parser Rational
pg_rational
= (string "NaN" *> pure notANumber )
<|> (string "Infinity" *> pure infinity )
<|> (string "-Infinity" *> pure (-infinity))
<|> rational
-- | bytea, name, text, \"char\", bpchar, varchar, unknown
instance FromField SB.ByteString where
fromField f dat = if typeOid f == $(inlineTypoid TI.bytea)
then unBinary <$> fromField f dat
else doFromField f okText' pure dat
-- | oid
instance FromField PQ.Oid where
fromField f dat = PQ.Oid <$> atto (== $(inlineTypoid TI.oid)) decimal f dat
-- | bytea, name, text, \"char\", bpchar, varchar, unknown
instance FromField LB.ByteString where
fromField f dat = LB.fromChunks . (:[]) <$> fromField f dat
unescapeBytea :: Field -> SB.ByteString
-> Conversion (Binary SB.ByteString)
unescapeBytea f str = case unsafeDupablePerformIO (PQ.unescapeBytea str) of
Nothing -> returnError ConversionFailed f "unescapeBytea failed"
Just str -> pure (Binary str)
-- | bytea
instance FromField (Binary SB.ByteString) where
fromField f dat = case format f of
PQ.Text -> doFromField f okBinary (unescapeBytea f) dat
PQ.Binary -> doFromField f okBinary (pure . Binary) dat
-- | bytea
instance FromField (Binary LB.ByteString) where
fromField f dat = Binary . LB.fromChunks . (:[]) . unBinary <$> fromField f dat
-- | name, text, \"char\", bpchar, varchar
instance FromField ST.Text where
fromField f = doFromField f okText $ (either left pure . ST.decodeUtf8')
-- FIXME: check character encoding
-- | name, text, \"char\", bpchar, varchar
instance FromField LT.Text where
fromField f dat = LT.fromStrict <$> fromField f dat
-- | citext
instance FromField (CI ST.Text) where
fromField f mdat = do
typ <- typename f
if typ /= "citext"
then returnError Incompatible f ""
else case mdat of
Nothing -> returnError UnexpectedNull f ""
Just dat -> either left (pure . CI.mk)
(ST.decodeUtf8' dat)
-- | citext
instance FromField (CI LT.Text) where
fromField f mdat = do
typ <- typename f
if typ /= "citext"
then returnError Incompatible f ""
else case mdat of
Nothing -> returnError UnexpectedNull f ""
Just dat -> either left (pure . CI.mk . LT.fromStrict)
(ST.decodeUtf8' dat)
-- | name, text, \"char\", bpchar, varchar
instance FromField [Char] where
fromField f dat = ST.unpack <$> fromField f dat
-- | timestamptz
instance FromField UTCTime where
fromField = ff $(inlineTypoid TI.timestamptz) "UTCTime" parseUTCTime
-- | timestamptz
instance FromField ZonedTime where
fromField = ff $(inlineTypoid TI.timestamptz) "ZonedTime" parseZonedTime
-- | timestamp
instance FromField LocalTime where
fromField = ff $(inlineTypoid TI.timestamp) "LocalTime" parseLocalTime
-- | date
instance FromField Day where
fromField = ff $(inlineTypoid TI.date) "Day" parseDay
-- | time
instance FromField TimeOfDay where
fromField = ff $(inlineTypoid TI.time) "TimeOfDay" parseTimeOfDay
-- | timestamptz
instance FromField UTCTimestamp where
fromField = ff $(inlineTypoid TI.timestamptz) "UTCTimestamp" parseUTCTimestamp
-- | timestamptz
instance FromField ZonedTimestamp where
fromField = ff $(inlineTypoid TI.timestamptz) "ZonedTimestamp" parseZonedTimestamp
-- | timestamp
instance FromField LocalTimestamp where
fromField = ff $(inlineTypoid TI.timestamp) "LocalTimestamp" parseLocalTimestamp
-- | date
instance FromField Date where
fromField = ff $(inlineTypoid TI.date) "Date" parseDate
ff :: PQ.Oid -> String -> (B8.ByteString -> Either String a)
-> Field -> Maybe B8.ByteString -> Conversion a
ff compatOid hsType parse f mstr =
if typeOid f /= compatOid
then err Incompatible ""
else case mstr of
Nothing -> err UnexpectedNull ""
Just str -> case parse str of
Left msg -> err ConversionFailed msg
Right val -> return val
where
err errC msg = do
typnam <- typename f
left $ errC (B8.unpack typnam)
(tableOid f)
(maybe "" B8.unpack (name f))
hsType
msg
{-# INLINE ff #-}
-- | Compatible with both types. Conversions to type @b@ are
-- preferred, the conversion to type @a@ will be tried after
-- the 'Right' conversion fails.
instance (FromField a, FromField b) => FromField (Either a b) where
fromField f dat = (Right <$> fromField f dat)
<|> (Left <$> fromField f dat)
-- | any postgresql array whose elements are compatible with type @a@
instance (FromField a, Typeable a) => FromField (PGArray a) where
fromField = pgArrayFieldParser fromField
pgArrayFieldParser :: Typeable a => FieldParser a -> FieldParser (PGArray a)
pgArrayFieldParser fieldParser f mdat = do
info <- typeInfo f
case info of
TI.Array{} ->
case mdat of
Nothing -> returnError UnexpectedNull f ""
Just dat -> do
case parseOnly (fromArray fieldParser info f) dat of
Left err -> returnError ConversionFailed f err
Right conv -> PGArray <$> conv
_ -> returnError Incompatible f ""
fromArray :: FieldParser a -> TypeInfo -> Field -> Parser (Conversion [a])
fromArray fieldParser typeInfo f = sequence . (parseIt <$>) <$> array delim
where
delim = typdelim (typelem typeInfo)
fElem = f{ typeOid = typoid (typelem typeInfo) }
parseIt item =
fieldParser f' $ if item' == "NULL" then Nothing else Just item'
where
item' = fmt delim item
f' | Arrays.Array _ <- item = f
| otherwise = fElem
instance (FromField a, Typeable a) => FromField (Vector a) where
fromField f v = V.fromList . fromPGArray <$> fromField f v
instance (FromField a, Typeable a) => FromField (IOVector a) where
fromField f v = liftConversion . V.unsafeThaw =<< fromField f v
-- | uuid
instance FromField UUID where
fromField f mbs =
if typeOid f /= $(inlineTypoid TI.uuid)
then returnError Incompatible f ""
else case mbs of
Nothing -> returnError UnexpectedNull f ""
Just bs ->
case UUID.fromASCIIBytes bs of
Nothing -> returnError ConversionFailed f "Invalid UUID"
Just uuid -> pure uuid
-- | json
instance FromField JSON.Value where
fromField f mbs =
if typeOid f /= $(inlineTypoid TI.json) && typeOid f /= $(inlineTypoid TI.jsonb)
then returnError Incompatible f ""
else case mbs of
Nothing -> returnError UnexpectedNull f ""
Just bs ->
#if MIN_VERSION_aeson(0,6,3)
case JSON.eitherDecodeStrict' bs of
#elif MIN_VERSION_bytestring(0,10,0)
case JSON.eitherDecode' $ LB.fromStrict bs of
#else
case JSON.eitherDecode' $ LB.fromChunks [bs] of
#endif
Left err -> returnError ConversionFailed f err
Right val -> pure val
-- | Parse a field to a JSON 'JSON.Value' and convert that into a
-- Haskell value using 'JSON.fromJSON'.
--
-- This can be used as the default implementation for the 'fromField'
-- method for Haskell types that have a JSON representation in
-- PostgreSQL.
--
-- The 'Typeable' constraint is required to show more informative
-- error messages when parsing fails.
fromJSONField :: (JSON.FromJSON a, Typeable a) => FieldParser a
fromJSONField f mbBs = do
value <- fromField f mbBs
case JSON.fromJSON value of
JSON.Error err -> returnError ConversionFailed f $
"JSON decoding error: " ++ err
JSON.Success x -> pure x
-- | Compatible with the same set of types as @a@. Note that
-- modifying the 'IORef' does not have any effects outside
-- the local process on the local machine.
instance FromField a => FromField (IORef a) where
fromField f v = liftConversion . newIORef =<< fromField f v
-- | Compatible with the same set of types as @a@. Note that
-- modifying the 'MVar' does not have any effects outside
-- the local process on the local machine.
instance FromField a => FromField (MVar a) where
fromField f v = liftConversion . newMVar =<< fromField f v
type Compat = PQ.Oid -> Bool
okText, okText', okBinary, ok16, ok32, ok64, okInt :: Compat
okText = $( mkCompats [ TI.name, TI.text, TI.char,
TI.bpchar, TI.varchar ] )
okText' = $( mkCompats [ TI.name, TI.text, TI.char,
TI.bpchar, TI.varchar, TI.unknown ] )
okBinary = (== $( inlineTypoid TI.bytea ))
ok16 = (== $( inlineTypoid TI.int2 ))
ok32 = $( mkCompats [TI.int2,TI.int4] )
ok64 = $( mkCompats [TI.int2,TI.int4,TI.int8] )
#if WORD_SIZE_IN_BITS < 64
okInt = ok32
#else
okInt = ok64
#endif
doFromField :: forall a . (Typeable a)
=> Field -> Compat -> (ByteString -> Conversion a)
-> Maybe ByteString -> Conversion a
doFromField f isCompat cvt (Just bs)
| isCompat (typeOid f) = cvt bs
| otherwise = returnError Incompatible f "types incompatible"
doFromField f _ _ _ = returnError UnexpectedNull f ""
-- | Given one of the constructors from 'ResultError', the field,
-- and an 'errMessage', this fills in the other fields in the
-- exception value and returns it in a 'Left . SomeException'
-- constructor.
returnError :: forall a err . (Typeable a, Exception err)
=> (String -> Maybe PQ.Oid -> String -> String -> String -> err)
-> Field -> String -> Conversion a
returnError mkErr f msg = do
typnam <- typename f
left $ mkErr (B.unpack typnam)
(tableOid f)
(maybe "" B.unpack (name f))
(show (typeOf (undefined :: a)))
msg
atto :: forall a. (Typeable a)
=> Compat -> Parser a -> Field -> Maybe ByteString
-> Conversion a
atto types p0 f dat = doFromField f types (go p0) dat
where
go :: Parser a -> ByteString -> Conversion a
go p s =
case parseOnly p s of
Left err -> returnError ConversionFailed f err
Right v -> pure v
|
avieth/postgresql-simple
|
src/Database/PostgreSQL/Simple/FromField.hs
|
Haskell
|
bsd-3-clause
| 24,587
|
{-
Copyright (c) 2004, Philippa Jane Cowderoy
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the original author nor the names of any
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
-}
module RecentChanges (recentChanges) where
import Text.XHtml
import PageTemplates
import PageIO
import Data.List
recentChanges env = do pns <- getPagenames
pds <- mapM getPageLastUpdated pns
pnds <- return $ filter (\(_,md) ->
case md of
Nothing -> False
Just d -> True
)
(zip pns pds)
opnds <- return $ sortBy ordering pnds
count <- case (lookup "count" env) of
Nothing -> return defaultCount
Just x -> case (reads x) of
[(i,_)]-> return i
_ -> return defaultCount
out <- return $
(concat
(intersperse
"\n\n"
(take count
(map (\(pn,Just d) ->
(linkTo pn) ++
" - " ++
(show d)
)
opnds
)
)
)
)
page ("Showing the "
++
(show count)
++
" most [:RecentChanges||RecentChanges:]:\n\n"
++
out
)
"RecentChanges"
env
where ordering (_,d1) (_,d2) = case d1 `compare` d2 of
LT -> GT
EQ -> EQ
GT -> LT
defaultCount = 50
linkTo pn = "["++pn++"|"++pn++"]"
|
nh2/flippi
|
src/RecentChanges.hs
|
Haskell
|
bsd-3-clause
| 4,025
|
module Main (main) where
import System.Environment (getArgs)
import Language.Java.Paragon.Error
import Language.Java.Paragon.Interaction.Flags
import Language.Java.Paragon.Parac
-- | Main method, invokes the compiler
main :: IO ()
main = do
(flags, files) <- compilerOpts =<< getArgs
mapM_ (compileFile flags) files
compileFile :: [Flag] -> String -> IO ()
compileFile flags file = do
err <- parac flags file
case err of
[] -> return ()
_ -> putStrLn $ showErrors err
showErrors :: [Error] -> String
showErrors [] = ""
showErrors (e:es) = showContext (errContext e)
++ pretty e ++ "\n"
++ showErrors es
showContext :: [ErrorContext] -> String
showContext [] = ""
showContext (c:cs) = context c ++ "\n"
++ showContext cs
|
bvdelft/paragon
|
src/Language/Java/Paragon.hs
|
Haskell
|
bsd-3-clause
| 812
|
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-|
Module : Numeric.AERN.RefinementOrder.ApproxOrder
Description : Comparisons with semidecidable order
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : mikkonecny@gmail.com
Stability : experimental
Portability : portable
Comparisons with semidecidable order.
This module is hidden and reexported via its parent RefinementOrder.
-}
module Numeric.AERN.RefinementOrder.PartialComparison
where
import Prelude hiding (EQ, LT, GT)
import Numeric.AERN.RefinementOrder.Extrema
import Numeric.AERN.RefinementOrder.Arbitrary
import Numeric.AERN.Basics.Arbitrary
import Numeric.AERN.Basics.Effort
import Numeric.AERN.Misc.Maybe
import Numeric.AERN.Basics.PartialOrdering
import Numeric.AERN.Basics.Laws.PartialRelation
import Numeric.AERN.Misc.Maybe
import Numeric.AERN.Misc.Bool
import Test.QuickCheck
import Test.Framework (testGroup, Test)
import Test.Framework.Providers.QuickCheck2 (testProperty)
infix 4 |==?, |<==>?, |</=>?, |<?, |<=?, |>=?, |>?, ⊏?, ⊑?, ⊒?, ⊐?
{-|
A type with semi-decidable equality and partial order
-}
class
(EffortIndicator (PartialCompareEffortIndicator t))
=>
PartialComparison t
where
type PartialCompareEffortIndicator t
pCompareDefaultEffort :: t -> PartialCompareEffortIndicator t
pCompareEff :: PartialCompareEffortIndicator t -> t -> t -> Maybe PartialOrdering
pCompareInFullEff :: PartialCompareEffortIndicator t -> t -> t -> PartialOrderingPartialInfo
pCompareInFullEff eff a b = partialOrdering2PartialInfo $ pCompareEff eff a b
-- | Partial equality
pEqualEff :: (PartialCompareEffortIndicator t) -> t -> t -> Maybe Bool
-- | Partial `is comparable to`.
pComparableEff :: (PartialCompareEffortIndicator t) -> t -> t -> Maybe Bool
-- | Partial `is not comparable to`.
pIncomparableEff :: (PartialCompareEffortIndicator t) -> t -> t -> Maybe Bool
pLessEff :: (PartialCompareEffortIndicator t) -> t -> t -> Maybe Bool
pLeqEff :: (PartialCompareEffortIndicator t) -> t -> t -> Maybe Bool
pGeqEff :: (PartialCompareEffortIndicator t) -> t -> t -> Maybe Bool
pGreaterEff :: (PartialCompareEffortIndicator t) -> t -> t -> Maybe Bool
-- defaults for all convenience operations:
pEqualEff effort a b =
pOrdInfEQ $ pCompareInFullEff effort a b
pLessEff effort a b =
pOrdInfLT $ pCompareInFullEff effort a b
pGreaterEff effort a b =
pOrdInfGT $ pCompareInFullEff effort a b
pLeqEff effort a b =
pOrdInfLEQ $ pCompareInFullEff effort a b
pGeqEff effort a b =
pOrdInfGEQ $ pCompareInFullEff effort a b
pComparableEff effort a b =
fmap not $ pOrdInfNC $ pCompareInFullEff effort a b
pIncomparableEff effort a b =
pOrdInfNC $ pCompareInFullEff effort a b
-- | Partial comparison with default effort
pCompare :: (PartialComparison t) => t -> t -> Maybe PartialOrdering
pCompare a = pCompareEff (pCompareDefaultEffort a) a
-- | Partial comparison with default effort
pCompareInFull :: (PartialComparison t) => t -> t -> PartialOrderingPartialInfo
pCompareInFull a = pCompareInFullEff (pCompareDefaultEffort a) a
-- | Partial `is comparable to` with default effort
pComparable :: (PartialComparison t) => t -> t -> Maybe Bool
pComparable a = pComparableEff (pCompareDefaultEffort a) a
-- | Partial `is comparable to` with default effort
(|<==>?) :: (PartialComparison t) => t -> t -> Maybe Bool
(|<==>?) = pComparable
-- | Partial `is not comparable to` with default effort
pIncomparable :: (PartialComparison t) => t -> t -> Maybe Bool
pIncomparable a = pIncomparableEff (pCompareDefaultEffort a) a
-- | Partial `is not comparable to` with default effort
(|</=>?) :: (PartialComparison t) => t -> t -> Maybe Bool
(|</=>?) = pIncomparable
-- | Partial equality with default effort
pEqual :: (PartialComparison t) => t -> t -> Maybe Bool
pEqual a = pEqualEff (pCompareDefaultEffort a) a
-- | Partial equality with default effort
(|==?) :: (PartialComparison t) => t -> t -> Maybe Bool
(|==?) = pEqual
-- | Partial `strictly less than` with default effort
pLess :: (PartialComparison t) => t -> t -> Maybe Bool
pLess a = pLessEff (pCompareDefaultEffort a) a
-- | Partial `strictly below` with default effort
(|<?) :: (PartialComparison t) => t -> t -> Maybe Bool
(|<?) = pLess
{-| Convenience Unicode notation for '|<?' -}
(⊏?) :: (PartialComparison t) => t -> t -> Maybe Bool
(⊏?) = (|<?)
-- | Partial `less than or equal to` with default effort
pLeq :: (PartialComparison t) => t -> t -> Maybe Bool
pLeq a = pLeqEff (pCompareDefaultEffort a) a
-- | Partial `below or equal to` with default effort
(|<=?) :: (PartialComparison t) => t -> t -> Maybe Bool
(|<=?) = pLeq
-- | Partial `strictly greater than` with default effort
pGreater :: (PartialComparison t) => t -> t -> Maybe Bool
pGreater a = pGreaterEff (pCompareDefaultEffort a) a
-- | Partial `strictly above` with default effort
(|>?) :: (PartialComparison t) => t -> t -> Maybe Bool
(|>?) = pGreater
{-| Convenience Unicode notation for '|>?' -}
(⊐?) :: (PartialComparison t) => t -> t -> Maybe Bool
(⊐?) = (|>?)
{-| Convenience Unicode notation for '|<=?' -}
(⊑?) :: (PartialComparison t) => t -> t -> Maybe Bool
(⊑?) = (|<=?)
-- | Partial `greater than or equal to` with default effort
pGeq :: (PartialComparison t) => t -> t -> Maybe Bool
pGeq a = pGeqEff (pCompareDefaultEffort a) a
-- | Partial `above or equal to` with default effort
(|>=?) :: (PartialComparison t) => t -> t -> Maybe Bool
(|>=?) = pGeq
{-| Convenience Unicode notation for '|>=?' -}
(⊒?) :: (PartialComparison t) => t -> t -> Maybe Bool
(⊒?) = (|>=?)
propPartialComparisonReflexiveEQ ::
(PartialComparison t) =>
t ->
(PartialCompareEffortIndicator t) ->
(UniformlyOrderedSingleton t) ->
Bool
propPartialComparisonReflexiveEQ _
effort
(UniformlyOrderedSingleton e)
=
case pCompareEff effort e e of Just EQ -> True; Nothing -> True; _ -> False
propPartialComparisonAntiSymmetric ::
(PartialComparison t) =>
t ->
UniformlyOrderedPair t ->
(PartialCompareEffortIndicator t) ->
Bool
propPartialComparisonAntiSymmetric _
(UniformlyOrderedPair (e1, e2))
effort
=
case (pCompareEff effort e2 e1, pCompareEff effort e1 e2) of
(Just b1, Just b2) -> b1 == partialOrderingTranspose b2
_ -> True
propPartialComparisonTransitiveEQ ::
(PartialComparison t) =>
t ->
UniformlyOrderedTriple t ->
(PartialCompareEffortIndicator t) ->
Bool
propPartialComparisonTransitiveEQ _
(UniformlyOrderedTriple (e1,e2,e3))
effort
=
partialTransitive (pEqualEff effort) e1 e2 e3
propPartialComparisonTransitiveLT ::
(PartialComparison t) =>
t ->
UniformlyOrderedTriple t ->
(PartialCompareEffortIndicator t) ->
Bool
propPartialComparisonTransitiveLT _
(UniformlyOrderedTriple (e1,e2,e3))
effort
=
partialTransitive (pLessEff effort) e1 e2 e3
propPartialComparisonTransitiveLE ::
(PartialComparison t) =>
t ->
UniformlyOrderedTriple t ->
(PartialCompareEffortIndicator t) ->
Bool
propPartialComparisonTransitiveLE _
(UniformlyOrderedTriple (e1,e2,e3))
effort
=
partialTransitive (pLeqEff effort) e1 e2 e3
propExtremaInPartialComparison ::
(PartialComparison t, HasExtrema t) =>
t ->
(UniformlyOrderedSingleton t) ->
(PartialCompareEffortIndicator t) ->
Bool
propExtremaInPartialComparison _
(UniformlyOrderedSingleton e)
effort
=
partialOrderExtrema (pLeqEff effort) (bottom e) (top e) e
testsPartialComparison ::
(PartialComparison t,
HasExtrema t,
ArbitraryOrderedTuple t, Show t)
=>
(String, t) ->
(Area t) ->
Test
testsPartialComparison (name, sample) area =
testGroup (name ++ " (⊑?)")
[
testProperty "anti symmetric" (area, propPartialComparisonAntiSymmetric sample)
,
testProperty "transitive EQ" (area, propPartialComparisonTransitiveEQ sample)
,
testProperty "transitive LE" (area, propPartialComparisonTransitiveLE sample)
,
testProperty "transitive LT" (area, propPartialComparisonTransitiveLT sample)
,
testProperty "extrema" (area, propExtremaInPartialComparison sample)
]
|
michalkonecny/aern
|
aern-order/src/Numeric/AERN/RefinementOrder/PartialComparison.hs
|
Haskell
|
bsd-3-clause
| 8,565
|
{-# LANGUAGE OverloadedStrings #-}
module Api
( app
) where
import Control.Applicative ((<$>))
import Control.Monad (when)
import Data.Maybe (isNothing)
import Data.Text ()
import qualified Database.Persist.Sqlite as P
import DB
import Helper
import qualified Network.HTTP.Types as HT
import Types
import Web.Scotty
app :: P.ConnectionPool -> ScottyM ()
app p = do
let db = runDB p
get "/spots" $ withRescue $ do
resources <- db $ map P.entityVal <$> P.selectList ([] :: [P.Filter Spot]) []
json $ toSpotsResponse resources
get "/spots/:id" $ do
key <- toKey <$> param "id"
resource <- db $ P.get (key :: SpotId)
case resource of
Just r -> json $ SpotResponse r
Nothing -> status HT.status404
put "/spots/:id" $ withRescue $ do
key <- toKey <$> param "id"
value <- fromSpotResponse <$> jsonData
db $ P.update key $ toUpdateQuery value
resource <- db $ P.get (key :: SpotId)
case resource of
Just r -> json $ SpotResponse r
Nothing -> status HT.status404
post "/spots" $ withRescue $ do
value <- fromSpotResponse <$> jsonData
key <- db $ P.insert value
resource <- db $ P.get key
json resource
delete "/spots/:id" $ withRescue $ do
key <- toKey <$> param "id"
resource <- db $ P.get (key :: SpotId)
when (isNothing resource) (status HT.status404)
_ <- db $ P.delete (key :: SpotId)
json True
|
fujimura/spot
|
src/Api.hs
|
Haskell
|
bsd-3-clause
| 1,675
|
{-|
Module : Idris.Erasure
Description : Utilities to erase irrelevant stuff.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE PatternGuards #-}
module Idris.Erasure (performUsageAnalysis, mkFieldName) where
import Idris.AbsSyntax
import Idris.ASTUtils
import Idris.Core.CaseTree
import Idris.Core.TT
import Idris.Core.Evaluate
import Idris.Primitives
import Idris.Error
import Debug.Trace
import System.IO.Unsafe
import Control.Category
import Prelude hiding (id, (.))
import Control.Arrow
import Control.Applicative
import Control.Monad.State
import Data.Maybe
import Data.List
import qualified Data.Set as S
import qualified Data.IntSet as IS
import qualified Data.Map as M
import qualified Data.IntMap as IM
import Data.Set (Set)
import Data.IntSet (IntSet)
import Data.Map (Map)
import Data.IntMap (IntMap)
import Data.Text (pack)
import qualified Data.Text as T
-- | UseMap maps names to the set of used (reachable) argument
-- positions.
type UseMap = Map Name (IntMap (Set Reason))
data Arg = Arg Int | Result deriving (Eq, Ord)
instance Show Arg where
show (Arg i) = show i
show Result = "*"
type Node = (Name, Arg)
type Deps = Map Cond DepSet
type Reason = (Name, Int) -- function name, argument index
-- | Nodes along with sets of reasons for every one.
type DepSet = Map Node (Set Reason)
-- | "Condition" is the conjunction of elementary assumptions along
-- the path from the root. Elementary assumption (f, i) means that
-- "function f uses the argument i".
type Cond = Set Node
-- | Variables carry certain information with them.
data VarInfo = VI
{ viDeps :: DepSet -- ^ dependencies drawn in by the variable
, viFunArg :: Maybe Int -- ^ which function argument this variable came from (defined only for patvars)
, viMethod :: Maybe Name -- ^ name of the metamethod represented by the var, if any
}
deriving Show
type Vars = Map Name VarInfo
-- | Perform usage analysis, write the relevant information in the
-- internal structures, returning the list of reachable names.
performUsageAnalysis :: [Name] -> Idris [Name]
performUsageAnalysis startNames = do
ctx <- tt_ctxt <$> getIState
case startNames of
[] -> return [] -- no main -> not compiling -> reachability irrelevant
main -> do
ci <- idris_interfaces <$> getIState
cg <- idris_callgraph <$> getIState
opt <- idris_optimisation <$> getIState
used <- idris_erasureUsed <$> getIState
externs <- idris_externs <$> getIState
-- Build the dependency graph.
let depMap = buildDepMap ci used (S.toList externs) ctx main
-- Search for reachable nodes in the graph.
let (residDeps, (reachableNames, minUse)) = minimalUsage depMap
usage = M.toList minUse
-- Print some debug info.
logErasure 5 $ "Original deps:\n" ++ unlines (map fmtItem . M.toList $ depMap)
logErasure 3 $ "Reachable names:\n" ++ unlines (map (indent . show) . S.toList $ reachableNames)
logErasure 4 $ "Minimal usage:\n" ++ fmtUseMap usage
logErasure 5 $ "Residual deps:\n" ++ unlines (map fmtItem . M.toList $ residDeps)
-- Check that everything reachable is accessible.
checkEnabled <- (WarnReach `elem`) . opt_cmdline . idris_options <$> getIState
when checkEnabled $
mapM_ (checkAccessibility opt) usage
-- Check that no postulates are reachable.
reachablePostulates <- S.intersection reachableNames . idris_postulates <$> getIState
when (not . S.null $ reachablePostulates)
$ ifail ("reachable postulates:\n" ++ intercalate "\n" [" " ++ show n | n <- S.toList reachablePostulates])
-- Store the usage info in the internal state.
mapM_ storeUsage usage
return $ S.toList reachableNames
where
indent = (" " ++)
fmtItem :: (Cond, DepSet) -> String
fmtItem (cond, deps) = indent $ show (S.toList cond) ++ " -> " ++ show (M.toList deps)
fmtUseMap :: [(Name, IntMap (Set Reason))] -> String
fmtUseMap = unlines . map (\(n,is) -> indent $ show n ++ " -> " ++ fmtIxs is)
fmtIxs :: IntMap (Set Reason) -> String
fmtIxs = intercalate ", " . map fmtArg . IM.toList
where
fmtArg (i, rs)
| S.null rs = show i
| otherwise = show i ++ " from " ++ intercalate ", " (map show $ S.toList rs)
storeUsage :: (Name, IntMap (Set Reason)) -> Idris ()
storeUsage (n, args) = fputState (cg_usedpos . ist_callgraph n) flat
where
flat = [(i, S.toList rs) | (i,rs) <- IM.toList args]
checkAccessibility :: Ctxt OptInfo -> (Name, IntMap (Set Reason)) -> Idris ()
checkAccessibility opt (n, reachable)
| Just (Optimise inaccessible dt) <- lookupCtxtExact n opt
, eargs@(_:_) <- [fmt n (S.toList rs) | (i,n) <- inaccessible, rs <- maybeToList $ IM.lookup i reachable]
= warn $ show n ++ ": inaccessible arguments reachable:\n " ++ intercalate "\n " eargs
| otherwise = return ()
where
fmt n [] = show n ++ " (no more information available)"
fmt n rs = show n ++ " from " ++ intercalate ", " [show rn ++ " arg# " ++ show ri | (rn,ri) <- rs]
warn = logErasure 0
-- | Find the minimal consistent usage by forward chaining.
minimalUsage :: Deps -> (Deps, (Set Name, UseMap))
minimalUsage = second gather . forwardChain
where
gather :: DepSet -> (Set Name, UseMap)
gather = foldr ins (S.empty, M.empty) . M.toList
where
ins :: (Node, Set Reason) -> (Set Name, UseMap) -> (Set Name, UseMap)
ins ((n, Result), rs) (ns, umap) = (S.insert n ns, umap)
ins ((n, Arg i ), rs) (ns, umap) = (ns, M.insertWith (IM.unionWith S.union) n (IM.singleton i rs) umap)
forwardChain :: Deps -> (Deps, DepSet)
forwardChain deps
| Just trivials <- M.lookup S.empty deps
= (M.unionWith S.union trivials)
`second` forwardChain (remove trivials . M.delete S.empty $ deps)
| otherwise = (deps, M.empty)
where
-- Remove the given nodes from the Deps entirely,
-- possibly creating new empty Conds.
remove :: DepSet -> Deps -> Deps
remove ds = M.mapKeysWith (M.unionWith S.union) (S.\\ M.keysSet ds)
-- | Build the dependency graph, starting the depth-first search from
-- a list of Names.
buildDepMap :: Ctxt InterfaceInfo -> [(Name, Int)] -> [(Name, Int)] ->
Context -> [Name] -> Deps
buildDepMap ci used externs ctx startNames
= addPostulates used $ dfs S.empty M.empty startNames
where
-- mark the result of Main.main as used with the empty assumption
addPostulates :: [(Name, Int)] -> Deps -> Deps
addPostulates used deps = foldr (\(ds, rs) -> M.insertWith (M.unionWith S.union) ds rs) deps (postulates used)
where
-- mini-DSL for postulates
(==>) ds rs = (S.fromList ds, M.fromList [(r, S.empty) | r <- rs])
it n is = [(sUN n, Arg i) | i <- is]
mn n is = [(MN 0 $ pack n, Arg i) | i <- is]
-- believe_me is special because it does not use all its arguments
specialPrims = S.fromList [sUN "prim__believe_me"]
usedNames = allNames deps S.\\ specialPrims
usedPrims = [(p_name p, p_arity p) | p <- primitives, p_name p `S.member` usedNames]
postulates used =
[ [] ==> concat
-- Main.main ( + export lists) and run__IO, are always evaluated
-- but they elude analysis since they come from the seed term.
[(map (\n -> (n, Result)) startNames)
,[(sUN "run__IO", Result), (sUN "run__IO", Arg 1)]
,[(sUN "call__IO", Result), (sUN "call__IO", Arg 2)]
-- Explicit usage declarations from a %used pragma
, map (\(n, i) -> (n, Arg i)) used
-- MkIO is read by run__IO,
-- but this cannot be observed in the source code of programs.
, it "MkIO" [2]
, it "prim__IO" [1]
-- Foreign calls are built with pairs, but mkForeign doesn't
-- have an implementation so analysis won't see them
, [(pairCon, Arg 2),
(pairCon, Arg 3)] -- Used in foreign calls
-- these have been discovered as builtins but are not listed
-- among Idris.Primitives.primitives
--, mn "__MkPair" [2,3]
, it "prim_fork" [0]
, it "unsafePerformPrimIO" [1]
-- believe_me is a primitive but it only uses its third argument
-- it is special-cased in usedNames above
, it "prim__believe_me" [2]
-- in general, all other primitives use all their arguments
, [(n, Arg i) | (n,arity) <- usedPrims, i <- [0..arity-1]]
-- %externs are assumed to use all their arguments
, [(n, Arg i) | (n,arity) <- externs, i <- [0..arity-1]]
-- mkForeign* functions are special-cased below
]
]
-- perform depth-first search
-- to discover all the names used in the program
-- and call getDeps for every name
dfs :: Set Name -> Deps -> [Name] -> Deps
dfs visited deps [] = deps
dfs visited deps (n : ns)
| n `S.member` visited = dfs visited deps ns
| otherwise = dfs (S.insert n visited) (M.unionWith (M.unionWith S.union) deps' deps) (next ++ ns)
where
next = [n | n <- S.toList depn, n `S.notMember` visited]
depn = S.delete n $ allNames deps'
deps' = getDeps n
-- extract all names that a function depends on
-- from the Deps of the function
allNames :: Deps -> Set Name
allNames = S.unions . map names . M.toList
where
names (cs, ns) = S.map fst cs `S.union` S.map fst (M.keysSet ns)
-- get Deps for a Name
getDeps :: Name -> Deps
getDeps (SN (WhereN i (SN (ImplementationCtorN interfaceN)) (MN i' field)))
= M.empty -- these deps are created when applying implementation ctors
getDeps n = case lookupDefExact n ctx of
Just def -> getDepsDef n def
Nothing -> error $ "erasure checker: unknown reference: " ++ show n
getDepsDef :: Name -> Def -> Deps
getDepsDef fn (Function ty t) = error "a function encountered" -- TODO
getDepsDef fn (TyDecl ty t) = M.empty
getDepsDef fn (Operator ty n' f) = M.empty -- TODO: what's this?
getDepsDef fn (CaseOp ci ty tys def tot cdefs)
= getDepsSC fn etaVars (etaMap `M.union` varMap) sc
where
-- we must eta-expand the definition with fresh variables
-- to capture these dependencies as well
etaIdx = [length vars .. length tys - 1]
etaVars = [eta i | i <- etaIdx]
etaMap = M.fromList [varPair (eta i) i | i <- etaIdx]
eta i = MN i (pack "eta")
-- the variables that arose as function arguments only depend on (n, i)
varMap = M.fromList [varPair v i | (v,i) <- zip vars [0..]]
varPair n argNo = (n, VI
{ viDeps = M.singleton (fn, Arg argNo) S.empty
, viFunArg = Just argNo
, viMethod = Nothing
})
(vars, sc) = cases_runtime cdefs
-- we use cases_runtime in order to have case-blocks
-- resolved to top-level functions before our analysis
etaExpand :: [Name] -> Term -> Term
etaExpand [] t = t
etaExpand (n : ns) t = etaExpand ns (App Complete t (P Ref n Erased))
getDepsSC :: Name -> [Name] -> Vars -> SC -> Deps
getDepsSC fn es vs ImpossibleCase = M.empty
getDepsSC fn es vs (UnmatchedCase msg) = M.empty
-- for the purposes of erasure, we can disregard the projection
getDepsSC fn es vs (ProjCase (Proj t i) alts) = getDepsSC fn es vs (ProjCase t alts) -- step
getDepsSC fn es vs (ProjCase (P _ n _) alts) = getDepsSC fn es vs (Case Shared n alts) -- base
-- other ProjCase's are not supported
getDepsSC fn es vs (ProjCase t alts) = error $ "ProjCase not supported:\n" ++ show (ProjCase t alts)
getDepsSC fn es vs (STerm t) = getDepsTerm vs [] (S.singleton (fn, Result)) (etaExpand es t)
getDepsSC fn es vs (Case sh n alts)
-- we case-split on this variable, which marks it as used
-- (unless there is exactly one case branch)
-- hence we add a new dependency, whose only precondition is
-- that the result of this function is used at all
= addTagDep $ unionMap (getDepsAlt fn es vs casedVar) alts -- coming from the whole subtree
where
addTagDep = case alts of
[_] -> id -- single branch, tag not used
_ -> M.insertWith (M.unionWith S.union) (S.singleton (fn, Result)) (viDeps casedVar)
casedVar = fromMaybe (error $ "nonpatvar in case: " ++ show n) (M.lookup n vs)
getDepsAlt :: Name -> [Name] -> Vars -> VarInfo -> CaseAlt -> Deps
getDepsAlt fn es vs var (FnCase n ns sc) = M.empty -- can't use FnCase at runtime
getDepsAlt fn es vs var (ConstCase c sc) = getDepsSC fn es vs sc
getDepsAlt fn es vs var (DefaultCase sc) = getDepsSC fn es vs sc
getDepsAlt fn es vs var (SucCase n sc)
= getDepsSC fn es (M.insert n var vs) sc -- we're not inserting the S-dependency here because it's special-cased
-- data constructors
getDepsAlt fn es vs var (ConCase n cnt ns sc)
= getDepsSC fn es (vs' `M.union` vs) sc -- left-biased union
where
-- Here we insert dependencies that arose from pattern matching on a constructor.
-- n = ctor name, j = ctor arg#, i = fun arg# of the cased var, cs = ctors of the cased var
vs' = M.fromList [(v, VI
{ viDeps = M.insertWith S.union (n, Arg j) (S.singleton (fn, varIdx)) (viDeps var)
, viFunArg = viFunArg var
, viMethod = meth j
})
| (v, j) <- zip ns [0..]]
-- this is safe because it's certainly a patvar
varIdx = fromJust (viFunArg var)
-- generate metamethod names, "n" is the implementation ctor
meth :: Int -> Maybe Name
meth | SN (ImplementationCtorN interfaceName) <- n = \j -> Just (mkFieldName n j)
| otherwise = \j -> Nothing
-- Named variables -> DeBruijn variables -> Conds/guards -> Term -> Deps
getDepsTerm :: Vars -> [(Name, Cond -> Deps)] -> Cond -> Term -> Deps
-- named variables introduce dependencies as described in `vs'
getDepsTerm vs bs cd (P _ n _)
-- de bruijns (lambda-bound, let-bound vars)
| Just deps <- lookup n bs
= deps cd
-- ctor-bound/arg-bound variables
| Just var <- M.lookup n vs
= M.singleton cd (viDeps var)
-- sanity check: machine-generated names shouldn't occur at top-level
| MN _ _ <- n
= error $ "erasure analysis: variable " ++ show n ++ " unbound in " ++ show (S.toList cd)
-- assumed to be a global reference
| otherwise = M.singleton cd (M.singleton (n, Result) S.empty)
-- dependencies of de bruijn variables are described in `bs'
getDepsTerm vs bs cd (V i) = snd (bs !! i) cd
getDepsTerm vs bs cd (Bind n bdr body)
-- here we just push IM.empty on the de bruijn stack
-- the args will be marked as used at the usage site
| Lam ty <- bdr = getDepsTerm vs ((n, const M.empty) : bs) cd body
| Pi _ ty _ <- bdr = getDepsTerm vs ((n, const M.empty) : bs) cd body
-- let-bound variables can get partially evaluated
-- it is sufficient just to plug the Cond in when the bound names are used
| Let ty t <- bdr = var t cd `union` getDepsTerm vs ((n, const M.empty) : bs) cd body
| NLet ty t <- bdr = var t cd `union` getDepsTerm vs ((n, const M.empty) : bs) cd body
where
var t cd = getDepsTerm vs bs cd t
-- applications may add items to Cond
getDepsTerm vs bs cd app@(App _ _ _)
| (fun, args) <- unApply app = case fun of
-- implementation constructors -> create metamethod deps
P (DCon _ _ _) ctorName@(SN (ImplementationCtorN interfaceName)) _
-> conditionalDeps ctorName args -- regular data ctor stuff
`union` unionMap (methodDeps ctorName) (zip [0..] args) -- method-specific stuff
-- ordinary constructors
P (TCon _ _) n _ -> unconditionalDeps args -- does not depend on anything
P (DCon _ _ _) n _ -> conditionalDeps n args -- depends on whether (n,#) is used
-- mkForeign* calls must be special-cased because they are variadic
-- All arguments must be marked as used, except for the first four,
-- which define the call type and are not needed at runtime.
P _ (UN n) _
| n == T.pack "mkForeignPrim"
-> unconditionalDeps $ drop 4 args
-- a bound variable might draw in additional dependencies,
-- think: f x = x 0 <-- here, `x' _is_ used
P _ n _
-- debruijn-bound name
| Just deps <- lookup n bs
-> deps cd `union` unconditionalDeps args
-- local name that refers to a method
| Just var <- M.lookup n vs
, Just meth <- viMethod var
-> viDeps var `ins` conditionalDeps meth args -- use the method instead
-- local name
| Just var <- M.lookup n vs
-- unconditional use
-> viDeps var `ins` unconditionalDeps args
-- global name
| otherwise
-- depends on whether the referred thing uses its argument
-> conditionalDeps n args
-- TODO: could we somehow infer how bound variables use their arguments?
V i -> snd (bs !! i) cd `union` unconditionalDeps args
-- we interpret applied lambdas as lets in order to reuse code here
Bind n (Lam ty) t -> getDepsTerm vs bs cd (lamToLet app)
-- and we interpret applied lets as lambdas
Bind n ( Let ty t') t -> getDepsTerm vs bs cd (App Complete (Bind n (Lam ty) t) t')
Bind n (NLet ty t') t -> getDepsTerm vs bs cd (App Complete (Bind n (Lam ty) t) t')
Proj t i
-> error $ "cannot[0] analyse projection !" ++ show i ++ " of " ++ show t
Erased -> M.empty
_ -> error $ "cannot analyse application of " ++ show fun ++ " to " ++ show args
where
union = M.unionWith $ M.unionWith S.union
ins = M.insertWith (M.unionWith S.union) cd
unconditionalDeps :: [Term] -> Deps
unconditionalDeps = unionMap (getDepsTerm vs bs cd)
conditionalDeps :: Name -> [Term] -> Deps
conditionalDeps n
= ins (M.singleton (n, Result) S.empty) . unionMap (getDepsArgs n) . zip indices
where
indices = map Just [0 .. getArity n - 1] ++ repeat Nothing
getDepsArgs n (Just i, t) = getDepsTerm vs bs (S.insert (n, Arg i) cd) t -- conditional
getDepsArgs n (Nothing, t) = getDepsTerm vs bs cd t -- unconditional
methodDeps :: Name -> (Int, Term) -> Deps
methodDeps ctorName (methNo, t)
= getDepsTerm (vars `M.union` vs) (bruijns ++ bs) cond body
where
vars = M.fromList [(v, VI
{ viDeps = deps i
, viFunArg = Just i
, viMethod = Nothing
}) | (v, i) <- zip args [0..]]
deps i = M.singleton (metameth, Arg i) S.empty
bruijns = reverse [(n, \cd -> M.singleton cd (deps i)) | (i, n) <- zip [0..] args]
cond = S.singleton (metameth, Result)
metameth = mkFieldName ctorName methNo
(args, body) = unfoldLams t
-- projections
getDepsTerm vs bs cd (Proj t (-1)) = getDepsTerm vs bs cd t -- naturals, (S n) -> n
getDepsTerm vs bs cd (Proj t i) = error $ "cannot[1] analyse projection !" ++ show i ++ " of " ++ show t
-- the easy cases
getDepsTerm vs bs cd (Constant _) = M.empty
getDepsTerm vs bs cd (TType _) = M.empty
getDepsTerm vs bs cd (UType _) = M.empty
getDepsTerm vs bs cd Erased = M.empty
getDepsTerm vs bs cd Impossible = M.empty
getDepsTerm vs bs cd t = error $ "cannot get deps of: " ++ show t
-- Get the number of arguments that might be considered for erasure.
getArity :: Name -> Int
getArity (SN (WhereN i' ctorName (MN i field)))
| Just (TyDecl (DCon _ _ _) ty) <- lookupDefExact ctorName ctx
= let argTys = map snd $ getArgTys ty
in if i <= length argTys
then length $ getArgTys (argTys !! i)
else error $ "invalid field number " ++ show i ++ " for " ++ show ctorName
| otherwise = error $ "unknown implementation constructor: " ++ show ctorName
getArity n = case lookupDefExact n ctx of
Just (CaseOp ci ty tys def tot cdefs) -> length tys
Just (TyDecl (DCon tag arity _) _) -> arity
Just (TyDecl (Ref) ty) -> length $ getArgTys ty
Just (Operator ty arity op) -> arity
Just df -> error $ "Erasure/getArity: unrecognised entity '"
++ show n ++ "' with definition: " ++ show df
Nothing -> error $ "Erasure/getArity: definition not found for " ++ show n
-- convert applications of lambdas to lets
-- Note that this transformation preserves de bruijn numbering
lamToLet :: Term -> Term
lamToLet (App _ (Bind n (Lam ty) tm) val) = Bind n (Let ty val) tm
-- split "\x_i -> T(x_i)" into [x_i] and T
unfoldLams :: Term -> ([Name], Term)
unfoldLams (Bind n (Lam ty) t) = let (ns,t') = unfoldLams t in (n:ns, t')
unfoldLams t = ([], t)
union :: Deps -> Deps -> Deps
union = M.unionWith (M.unionWith S.union)
unions :: [Deps] -> Deps
unions = M.unionsWith (M.unionWith S.union)
unionMap :: (a -> Deps) -> [a] -> Deps
unionMap f = M.unionsWith (M.unionWith S.union) . map f
-- | Make a field name out of a data constructor name and field number.
mkFieldName :: Name -> Int -> Name
mkFieldName ctorName fieldNo = SN (WhereN fieldNo ctorName $ sMN fieldNo "field")
|
enolan/Idris-dev
|
src/Idris/Erasure.hs
|
Haskell
|
bsd-3-clause
| 22,527
|
{-# LANGUAGE CPP #-}
-- |Routines for integrating Tor with the standard network library.
module Tor.NetworkStack.System(systemNetworkStack) where
import Data.Binary.Put
import Data.ByteString(ByteString)
import Data.ByteString.Lazy(toStrict)
import qualified Data.ByteString as BS
import Data.Word
import Network(listenOn, PortID(..))
import Network.BSD
import Network.Socket as Sys hiding (recv)
import Network.Socket.ByteString.Lazy(sendAll)
import qualified Network.Socket.ByteString as Sys
import Tor.DataFormat.TorAddress
import Tor.NetworkStack
-- |A Tor-compatible network stack that uses the 'network' library.
systemNetworkStack :: TorNetworkStack Socket Socket
systemNetworkStack = TorNetworkStack {
Tor.NetworkStack.connect = systemConnect
, Tor.NetworkStack.getAddress = systemLookup
, Tor.NetworkStack.listen = systemListen
, Tor.NetworkStack.accept = systemAccept
, Tor.NetworkStack.recv = systemRead
, Tor.NetworkStack.write = sendAll
, Tor.NetworkStack.flush = const (return ())
, Tor.NetworkStack.close = Sys.close
, Tor.NetworkStack.lclose = Sys.close
}
systemConnect :: String -> Word16 -> IO (Maybe Socket)
systemConnect addrStr port =
do let ainfo = defaultHints { addrFamily = AF_INET, addrSocketType = Stream }
hname = addrStr
sname = show port
addrinfos <- getAddrInfo (Just ainfo) (Just hname) (Just sname)
case addrinfos of
[] -> return Nothing
(x:_) ->
do sock <- socket AF_INET Stream defaultProtocol
Sys.connect sock (addrAddress x)
return (Just sock)
systemLookup :: String -> IO [TorAddress]
systemLookup hostname =
-- FIXME: Tack the hostname on the end, as a default?
do res <- getAddrInfo Nothing (Just hostname) Nothing
return (map (convertAddress . addrAddress) res)
systemListen :: Word16 -> IO Socket
systemListen port = listenOn (PortNumber (fromIntegral port))
convertAddress :: SockAddr -> TorAddress
convertAddress (SockAddrInet _ x) =
IP4 (ip4ToString (toStrict (runPut (putWord32be x))))
convertAddress (SockAddrInet6 _ _ (a,b,c,d) _) =
IP6 (ip6ToString (toStrict (runPut (mapM_ putWord32be [a,b,c,d]))))
convertAddress x =
error ("Incompatible address type: " ++ show x)
systemAccept :: Socket -> IO (Socket, TorAddress)
systemAccept lsock =
do (res, addr) <- Sys.accept lsock
return (res, convertAddress addr)
systemRead :: Socket -> Int -> IO ByteString
systemRead _ 0 = return BS.empty
systemRead sock amt =
do start <- Sys.recv sock (fromIntegral amt)
let left = fromIntegral (amt - fromIntegral (BS.length start))
if BS.null start
then return BS.empty
else (start `BS.append`) `fmap` systemRead sock left
|
GaloisInc/haskell-tor
|
src/Tor/NetworkStack/System.hs
|
Haskell
|
bsd-3-clause
| 2,757
|
-----------------------------------------------------------------------------
-- |
-- Module : Data.SBV.Examples.Misc.Floating
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : erkokl@gmail.com
-- Stability : experimental
--
-- Several examples involving IEEE-754 floating point numbers, i.e., single
-- precision 'Float' ('SFloat') and double precision 'Double' ('SDouble') types.
--
-- Note that arithmetic with floating point is full of surprises; due to precision
-- issues associativity of arithmetic operations typically do not hold. Also,
-- the presence of @NaN@ is always something to look out for.
-----------------------------------------------------------------------------
{-# LANGUAGE ScopedTypeVariables #-}
module Data.SBV.Examples.Misc.Floating where
import Data.SBV
-----------------------------------------------------------------------------
-- * FP addition is not associative
-----------------------------------------------------------------------------
-- | Prove that floating point addition is not associative. We have:
--
-- >>> prove assocPlus
-- Falsifiable. Counter-example:
-- s0 = -9.62965e-35 :: Float
-- s1 = Infinity :: Float
-- s2 = -Infinity :: Float
--
-- Indeed:
--
-- >>> let i = 1/0 :: Float
-- >>> (-9.62965e-35 + (i + (-i)))
-- NaN
-- >>> ((-9.62965e-35 + i) + (-i))
-- NaN
--
-- But keep in mind that @NaN@ does not equal itself in the floating point world! We have:
--
-- >>> let nan = 0/0 :: Float in nan == nan
-- False
assocPlus :: SFloat -> SFloat -> SFloat -> SBool
assocPlus x y z = x + (y + z) .== (x + y) + z
-- | Prove that addition is not associative, even if we ignore @NaN@/@Infinity@ values.
-- To do this, we use the predicate 'isPointFP', which is true of a floating point
-- number ('SFloat' or 'SDouble') if it is neither @NaN@ nor @Infinity@. (That is, it's a
-- representable point in the real-number line.)
--
-- We have:
--
-- >>> assocPlusRegular
-- Falsifiable. Counter-example:
-- x = -1.0491915e7 :: Float
-- y = 1967115.5 :: Float
-- z = 982003.94 :: Float
--
-- Indeed, we have:
--
-- >>> ((-1.0491915e7) + (1967115.5 + 982003.94)) :: Float
-- -7542795.5
-- >>> (((-1.0491915e7) + 1967115.5) + 982003.94) :: Float
-- -7542796.0
--
-- Note the significant difference between two additions!
assocPlusRegular :: IO ThmResult
assocPlusRegular = prove $ do [x, y, z] <- sFloats ["x", "y", "z"]
let lhs = x+(y+z)
rhs = (x+y)+z
-- make sure we do not overflow at the intermediate points
constrain $ isPointFP lhs
constrain $ isPointFP rhs
return $ lhs .== rhs
-----------------------------------------------------------------------------
-- * FP addition by non-zero can result in no change
-----------------------------------------------------------------------------
-- | Demonstrate that @a+b = a@ does not necessarily mean @b@ is @0@ in the floating point world,
-- even when we disallow the obvious solution when @a@ and @b@ are @Infinity.@
-- We have:
--
-- >>> nonZeroAddition
-- Falsifiable. Counter-example:
-- a = -2.0 :: Float
-- b = -3.0e-45 :: Float
--
-- Indeed, we have:
--
-- >>> (-2.0) + (-3.0e-45) == (-2.0 :: Float)
-- True
--
-- But:
--
-- >>> -3.0e-45 == (0::Float)
-- False
--
nonZeroAddition :: IO ThmResult
nonZeroAddition = prove $ do [a, b] <- sFloats ["a", "b"]
constrain $ isPointFP a
constrain $ isPointFP b
constrain $ a + b .== a
return $ b .== 0
-----------------------------------------------------------------------------
-- * FP multiplicative inverses may not exist
-----------------------------------------------------------------------------
-- | This example illustrates that @a * (1/a)@ does not necessarily equal @1@. Again,
-- we protect against division by @0@ and @NaN@/@Infinity@.
--
-- We have:
--
-- >>> multInverse
-- Falsifiable. Counter-example:
-- a = -2.0445642768532407e154 :: Double
--
-- Indeed, we have:
--
-- >>> let a = -2.0445642768532407e154 :: Double
-- >>> a * (1/a)
-- 0.9999999999999999
multInverse :: IO ThmResult
multInverse = prove $ do a <- sDouble "a"
constrain $ isPointFP a
constrain $ isPointFP (1/a)
return $ a * (1/a) .== 1
-----------------------------------------------------------------------------
-- * Effect of rounding modes
-----------------------------------------------------------------------------
-- | One interesting aspect of floating-point is that the chosen rounding-mode
-- can effect the results of a computation if the exact result cannot be precisely
-- represented. SBV exports the functions 'fpAdd', 'fpSub', 'fpMul', 'fpDiv', 'fpFMA'
-- and 'fpSqrt' which allows users to specify the IEEE supported 'RoundingMode' for
-- the operation. (Also see the class 'RoundingFloat'.) This example illustrates how SBV
-- can be used to find rounding-modes where, for instance, addition can produce different
-- results. We have:
--
-- >>> roundingAdd
-- Satisfiable. Model:
-- rm = RoundTowardPositive :: RoundingMode
-- x = 246080.08 :: Float
-- y = 16255.999 :: Float
--
-- Unfortunately we can't directly validate this result at the Haskell level, as Haskell only supports
-- 'RoundNearestTiesToEven'. We have:
--
-- >>> (246080.08 + 16255.999) :: Float
-- 262336.06
--
-- While we cannot directly see the result when the mode is 'RoundTowardPositive' in Haskell, we can use
-- SBV to provide us with that result thusly:
--
-- >>> sat $ \z -> z .== fpAdd sRoundTowardPositive 246080.08 (16255.999::SFloat)
-- Satisfiable. Model:
-- s0 = 262336.1 :: Float
--
-- We can see why these two resuls are indeed different. To see why, one would have to convert the
-- individual numbers to Float's, which would induce rounding-errors, add them up, and round-back;
-- a tedious operation, but one that might prove illimunating for the interested reader. We'll merely
-- note that floating point representation and semantics is indeed a thorny
-- subject, and point to <https://ece.uwaterloo.ca/~dwharder/NumericalAnalysis/02Numerics/Double/paper.pdf> as
-- an excellent guide.
roundingAdd :: IO SatResult
roundingAdd = sat $ do m :: SRoundingMode <- free "rm"
constrain $ m ./= literal RoundNearestTiesToEven
x <- sFloat "x"
y <- sFloat "y"
let lhs = fpAdd m x y
let rhs = x + y
constrain $ isPointFP lhs
constrain $ isPointFP rhs
return $ lhs ./= rhs
|
Copilot-Language/sbv-for-copilot
|
Data/SBV/Examples/Misc/Floating.hs
|
Haskell
|
bsd-3-clause
| 6,829
|
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Program : prim4.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:47
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Main where
import Qtc.Classes.Qccs
import Qtc.Classes.Qccs_h
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
import Qtc.Gui.Base
import Qtc.Enums.Base
import Qtc.Enums.Classes.Core
import Qtc.Enums.Core.Qt
import Qtc.Gui.QApplication
import Qtc.Gui.QMessageBox
import Qtc.Gui.QLabel
import Qtc.Gui.QLabel_h
import Qtc.Gui.QKeyEvent
import Data.IORef
import Data.IntMap
type CountMap = IntMap (IORef Int)
createCM :: IO CountMap
createCM = do
cellList <- mapM (\x -> do
nr <- newIORef 0
return (x, nr)
) [(qEnum_toInt eKey_A)..(qEnum_toInt eKey_Z)]
return $ fromList cellList
main :: IO Int
main = do
qApplication ()
tl <- qLabel "press any key from 'A' to 'Z'"
setAlignment tl (fAlignCenter::Alignment)
resize tl (200::Int, 60::Int)
mb <- qMessageBox tl
cm <- createCM
setHandler tl "keyPressEvent(QKeyEvent*)" $ tlkp cm mb
qshow tl ()
qApplicationExec ()
tlkp :: CountMap -> QMessageBox () -> QLabel () -> QKeyEvent () -> IO ()
tlkp cm mb this ke
= do
k <- key ke ()
if (member k cm)
then
do
cck <- readIORef $ cm ! k
let cp1 = cck + 1
t <- text ke ()
setText mb $ "You have pressed the '" ++ t ++ "' key " ++ (tpf cp1) ++ "!"
modifyIORef (cm ! k) (\_ -> cp1)
qshow mb ()
else
return ()
keyPressEvent_h this ke
where
tpf c
| c == 1 = "once"
| c == 2 = "twice"
| c > 2 = (show c) ++ " times"
|
uduki/hsQt
|
examples/prim4.hs
|
Haskell
|
bsd-2-clause
| 1,841
|
{-# LANGUAGE CPP #-}
-- | Handy functions for creating much Core syntax
module MkCore (
-- * Constructing normal syntax
mkCoreLet, mkCoreLets,
mkCoreApp, mkCoreApps, mkCoreConApps,
mkCoreLams, mkWildCase, mkIfThenElse,
mkWildValBinder, mkWildEvBinder,
sortQuantVars, castBottomExpr,
-- * Constructing boxed literals
mkWordExpr, mkWordExprWord,
mkIntExpr, mkIntExprInt,
mkIntegerExpr,
mkFloatExpr, mkDoubleExpr,
mkCharExpr, mkStringExpr, mkStringExprFS,
-- * Floats
FloatBind(..), wrapFloat,
-- * Constructing equality evidence boxes
mkEqBox,
-- * Constructing general big tuples
-- $big_tuples
mkChunkified,
-- * Constructing small tuples
mkCoreVarTup, mkCoreVarTupTy, mkCoreTup,
-- * Constructing big tuples
mkBigCoreVarTup, mkBigCoreVarTupTy,
mkBigCoreTup, mkBigCoreTupTy,
-- * Deconstructing small tuples
mkSmallTupleSelector, mkSmallTupleCase,
-- * Deconstructing big tuples
mkTupleSelector, mkTupleCase,
-- * Constructing list expressions
mkNilExpr, mkConsExpr, mkListExpr,
mkFoldrExpr, mkBuildExpr,
-- * Error Ids
mkRuntimeErrorApp, mkImpossibleExpr, errorIds,
rEC_CON_ERROR_ID, iRREFUT_PAT_ERROR_ID, rUNTIME_ERROR_ID,
nON_EXHAUSTIVE_GUARDS_ERROR_ID, nO_METHOD_BINDING_ERROR_ID,
pAT_ERROR_ID, eRROR_ID, rEC_SEL_ERROR_ID, aBSENT_ERROR_ID,
uNDEFINED_ID, tYPE_ERROR_ID, undefinedName
) where
#include "HsVersions.h"
import Id
import Var ( EvVar, setTyVarUnique )
import CoreSyn
import CoreUtils ( exprType, needsCaseBinding, bindNonRec )
import Literal
import HscTypes
import TysWiredIn
import PrelNames
import TcType ( mkSigmaTy )
import Type
import Coercion
import TysPrim
import DataCon ( DataCon, dataConWorkId )
import IdInfo ( vanillaIdInfo, setStrictnessInfo,
setArityInfo )
import Demand
import Name hiding ( varName )
import Outputable
import FastString
import UniqSupply
import BasicTypes
import Util
import Pair
import Constants
import DynFlags
import Data.Char ( ord )
import Data.List
import Data.Ord
#if __GLASGOW_HASKELL__ < 709
import Data.Word ( Word )
#endif
infixl 4 `mkCoreApp`, `mkCoreApps`
{-
************************************************************************
* *
\subsection{Basic CoreSyn construction}
* *
************************************************************************
-}
sortQuantVars :: [Var] -> [Var]
-- Sort the variables (KindVars, TypeVars, and Ids)
-- into order: Kind, then Type, then Id
sortQuantVars = sortBy (comparing withCategory)
where
withCategory v = (category v, v)
category :: Var -> Int
category v
| isKindVar v = 1
| isTyVar v = 2
| otherwise = 3
-- | Bind a binding group over an expression, using a @let@ or @case@ as
-- appropriate (see "CoreSyn#let_app_invariant")
mkCoreLet :: CoreBind -> CoreExpr -> CoreExpr
mkCoreLet (NonRec bndr rhs) body -- See Note [CoreSyn let/app invariant]
| needsCaseBinding (idType bndr) rhs
= Case rhs bndr (exprType body) [(DEFAULT,[],body)]
mkCoreLet bind body
= Let bind body
-- | Bind a list of binding groups over an expression. The leftmost binding
-- group becomes the outermost group in the resulting expression
mkCoreLets :: [CoreBind] -> CoreExpr -> CoreExpr
mkCoreLets binds body = foldr mkCoreLet body binds
-- | Construct an expression which represents the application of one expression
-- to the other
mkCoreApp :: CoreExpr -> CoreExpr -> CoreExpr
-- Respects the let/app invariant by building a case expression where necessary
-- See CoreSyn Note [CoreSyn let/app invariant]
mkCoreApp fun (Type ty) = App fun (Type ty)
mkCoreApp fun (Coercion co) = App fun (Coercion co)
mkCoreApp fun arg = ASSERT2( isFunTy fun_ty, ppr fun $$ ppr arg )
mk_val_app fun arg arg_ty res_ty
where
fun_ty = exprType fun
(arg_ty, res_ty) = splitFunTy fun_ty
-- | Construct an expression which represents the application of a number of
-- expressions to another. The leftmost expression in the list is applied first
-- Respects the let/app invariant by building a case expression where necessary
-- See CoreSyn Note [CoreSyn let/app invariant]
mkCoreApps :: CoreExpr -> [CoreExpr] -> CoreExpr
-- Slightly more efficient version of (foldl mkCoreApp)
mkCoreApps orig_fun orig_args
= go orig_fun (exprType orig_fun) orig_args
where
go fun _ [] = fun
go fun fun_ty (Type ty : args) = go (App fun (Type ty)) (applyTy fun_ty ty) args
go fun fun_ty (Coercion co : args) = go (App fun (Coercion co)) (applyCo fun_ty co) args
go fun fun_ty (arg : args) = ASSERT2( isFunTy fun_ty, ppr fun_ty $$ ppr orig_fun
$$ ppr orig_args )
go (mk_val_app fun arg arg_ty res_ty) res_ty args
where
(arg_ty, res_ty) = splitFunTy fun_ty
-- | Construct an expression which represents the application of a number of
-- expressions to that of a data constructor expression. The leftmost expression
-- in the list is applied first
mkCoreConApps :: DataCon -> [CoreExpr] -> CoreExpr
mkCoreConApps con args = mkCoreApps (Var (dataConWorkId con)) args
mk_val_app :: CoreExpr -> CoreExpr -> Type -> Type -> CoreExpr
-- Build an application (e1 e2),
-- or a strict binding (case e2 of x -> e1 x)
-- using the latter when necessary to respect the let/app invariant
-- See Note [CoreSyn let/app invariant]
mk_val_app fun arg arg_ty res_ty
| not (needsCaseBinding arg_ty arg)
= App fun arg -- The vastly common case
| otherwise
= Case arg arg_id res_ty [(DEFAULT,[],App fun (Var arg_id))]
where
arg_id = mkWildValBinder arg_ty
-- Lots of shadowing, but it doesn't matter,
-- because 'fun ' should not have a free wild-id
--
-- This is Dangerous. But this is the only place we play this
-- game, mk_val_app returns an expression that does not have
-- have a free wild-id. So the only thing that can go wrong
-- is if you take apart this case expression, and pass a
-- fragmet of it as the fun part of a 'mk_val_app'.
-----------
mkWildEvBinder :: PredType -> EvVar
mkWildEvBinder pred = mkWildValBinder pred
-- | Make a /wildcard binder/. This is typically used when you need a binder
-- that you expect to use only at a *binding* site. Do not use it at
-- occurrence sites because it has a single, fixed unique, and it's very
-- easy to get into difficulties with shadowing. That's why it is used so little.
-- See Note [WildCard binders] in SimplEnv
mkWildValBinder :: Type -> Id
mkWildValBinder ty = mkLocalId wildCardName ty
mkWildCase :: CoreExpr -> Type -> Type -> [CoreAlt] -> CoreExpr
-- Make a case expression whose case binder is unused
-- The alts should not have any occurrences of WildId
mkWildCase scrut scrut_ty res_ty alts
= Case scrut (mkWildValBinder scrut_ty) res_ty alts
mkIfThenElse :: CoreExpr -> CoreExpr -> CoreExpr -> CoreExpr
mkIfThenElse guard then_expr else_expr
-- Not going to be refining, so okay to take the type of the "then" clause
= mkWildCase guard boolTy (exprType then_expr)
[ (DataAlt falseDataCon, [], else_expr), -- Increasing order of tag!
(DataAlt trueDataCon, [], then_expr) ]
castBottomExpr :: CoreExpr -> Type -> CoreExpr
-- (castBottomExpr e ty), assuming that 'e' diverges,
-- return an expression of type 'ty'
-- See Note [Empty case alternatives] in CoreSyn
castBottomExpr e res_ty
| e_ty `eqType` res_ty = e
| otherwise = Case e (mkWildValBinder e_ty) res_ty []
where
e_ty = exprType e
{-
The functions from this point don't really do anything cleverer than
their counterparts in CoreSyn, but they are here for consistency
-}
-- | Create a lambda where the given expression has a number of variables
-- bound over it. The leftmost binder is that bound by the outermost
-- lambda in the result
mkCoreLams :: [CoreBndr] -> CoreExpr -> CoreExpr
mkCoreLams = mkLams
{-
************************************************************************
* *
\subsection{Making literals}
* *
************************************************************************
-}
-- | Create a 'CoreExpr' which will evaluate to the given @Int@
mkIntExpr :: DynFlags -> Integer -> CoreExpr -- Result = I# i :: Int
mkIntExpr dflags i = mkConApp intDataCon [mkIntLit dflags i]
-- | Create a 'CoreExpr' which will evaluate to the given @Int@
mkIntExprInt :: DynFlags -> Int -> CoreExpr -- Result = I# i :: Int
mkIntExprInt dflags i = mkConApp intDataCon [mkIntLitInt dflags i]
-- | Create a 'CoreExpr' which will evaluate to the a @Word@ with the given value
mkWordExpr :: DynFlags -> Integer -> CoreExpr
mkWordExpr dflags w = mkConApp wordDataCon [mkWordLit dflags w]
-- | Create a 'CoreExpr' which will evaluate to the given @Word@
mkWordExprWord :: DynFlags -> Word -> CoreExpr
mkWordExprWord dflags w = mkConApp wordDataCon [mkWordLitWord dflags w]
-- | Create a 'CoreExpr' which will evaluate to the given @Integer@
mkIntegerExpr :: MonadThings m => Integer -> m CoreExpr -- Result :: Integer
mkIntegerExpr i = do t <- lookupTyCon integerTyConName
return (Lit (mkLitInteger i (mkTyConTy t)))
-- | Create a 'CoreExpr' which will evaluate to the given @Float@
mkFloatExpr :: Float -> CoreExpr
mkFloatExpr f = mkConApp floatDataCon [mkFloatLitFloat f]
-- | Create a 'CoreExpr' which will evaluate to the given @Double@
mkDoubleExpr :: Double -> CoreExpr
mkDoubleExpr d = mkConApp doubleDataCon [mkDoubleLitDouble d]
-- | Create a 'CoreExpr' which will evaluate to the given @Char@
mkCharExpr :: Char -> CoreExpr -- Result = C# c :: Int
mkCharExpr c = mkConApp charDataCon [mkCharLit c]
-- | Create a 'CoreExpr' which will evaluate to the given @String@
mkStringExpr :: MonadThings m => String -> m CoreExpr -- Result :: String
-- | Create a 'CoreExpr' which will evaluate to a string morally equivalent to the given @FastString@
mkStringExprFS :: MonadThings m => FastString -> m CoreExpr -- Result :: String
mkStringExpr str = mkStringExprFS (mkFastString str)
mkStringExprFS str
| nullFS str
= return (mkNilExpr charTy)
| all safeChar chars
= do unpack_id <- lookupId unpackCStringName
return (App (Var unpack_id) (Lit (MachStr (fastStringToByteString str))))
| otherwise
= do unpack_id <- lookupId unpackCStringUtf8Name
return (App (Var unpack_id) (Lit (MachStr (fastStringToByteString str))))
where
chars = unpackFS str
safeChar c = ord c >= 1 && ord c <= 0x7F
-- This take a ~# b (or a ~# R b) and returns a ~ b (or Coercible a b)
mkEqBox :: Coercion -> CoreExpr
mkEqBox co = ASSERT2( typeKind ty2 `eqKind` k, ppr co $$ ppr ty1 $$ ppr ty2 $$ ppr (typeKind ty1) $$ ppr (typeKind ty2) )
Var (dataConWorkId datacon) `mkTyApps` [k, ty1, ty2] `App` Coercion co
where (Pair ty1 ty2, role) = coercionKindRole co
k = typeKind ty1
datacon = case role of
Nominal -> eqBoxDataCon
Representational -> coercibleDataCon
Phantom -> pprPanic "mkEqBox does not support boxing phantom coercions"
(ppr co)
{-
************************************************************************
* *
\subsection{Tuple constructors}
* *
************************************************************************
-}
-- $big_tuples
-- #big_tuples#
--
-- GHCs built in tuples can only go up to 'mAX_TUPLE_SIZE' in arity, but
-- we might concievably want to build such a massive tuple as part of the
-- output of a desugaring stage (notably that for list comprehensions).
--
-- We call tuples above this size \"big tuples\", and emulate them by
-- creating and pattern matching on >nested< tuples that are expressible
-- by GHC.
--
-- Nesting policy: it's better to have a 2-tuple of 10-tuples (3 objects)
-- than a 10-tuple of 2-tuples (11 objects), so we want the leaves of any
-- construction to be big.
--
-- If you just use the 'mkBigCoreTup', 'mkBigCoreVarTupTy', 'mkTupleSelector'
-- and 'mkTupleCase' functions to do all your work with tuples you should be
-- fine, and not have to worry about the arity limitation at all.
-- | Lifts a \"small\" constructor into a \"big\" constructor by recursive decompositon
mkChunkified :: ([a] -> a) -- ^ \"Small\" constructor function, of maximum input arity 'mAX_TUPLE_SIZE'
-> [a] -- ^ Possible \"big\" list of things to construct from
-> a -- ^ Constructed thing made possible by recursive decomposition
mkChunkified small_tuple as = mk_big_tuple (chunkify as)
where
-- Each sub-list is short enough to fit in a tuple
mk_big_tuple [as] = small_tuple as
mk_big_tuple as_s = mk_big_tuple (chunkify (map small_tuple as_s))
chunkify :: [a] -> [[a]]
-- ^ Split a list into lists that are small enough to have a corresponding
-- tuple arity. The sub-lists of the result all have length <= 'mAX_TUPLE_SIZE'
-- But there may be more than 'mAX_TUPLE_SIZE' sub-lists
chunkify xs
| n_xs <= mAX_TUPLE_SIZE = [xs]
| otherwise = split xs
where
n_xs = length xs
split [] = []
split xs = take mAX_TUPLE_SIZE xs : split (drop mAX_TUPLE_SIZE xs)
{-
Creating tuples and their types for Core expressions
@mkBigCoreVarTup@ builds a tuple; the inverse to @mkTupleSelector@.
* If it has only one element, it is the identity function.
* If there are more elements than a big tuple can have, it nests
the tuples.
-}
-- | Build a small tuple holding the specified variables
mkCoreVarTup :: [Id] -> CoreExpr
mkCoreVarTup ids = mkCoreTup (map Var ids)
-- | Bulid the type of a small tuple that holds the specified variables
mkCoreVarTupTy :: [Id] -> Type
mkCoreVarTupTy ids = mkBoxedTupleTy (map idType ids)
-- | Build a small tuple holding the specified expressions
mkCoreTup :: [CoreExpr] -> CoreExpr
mkCoreTup [] = Var unitDataConId
mkCoreTup [c] = c
mkCoreTup cs = mkConApp (tupleDataCon Boxed (length cs))
(map (Type . exprType) cs ++ cs)
-- | Build a big tuple holding the specified variables
mkBigCoreVarTup :: [Id] -> CoreExpr
mkBigCoreVarTup ids = mkBigCoreTup (map Var ids)
-- | Build the type of a big tuple that holds the specified variables
mkBigCoreVarTupTy :: [Id] -> Type
mkBigCoreVarTupTy ids = mkBigCoreTupTy (map idType ids)
-- | Build a big tuple holding the specified expressions
mkBigCoreTup :: [CoreExpr] -> CoreExpr
mkBigCoreTup = mkChunkified mkCoreTup
-- | Build the type of a big tuple that holds the specified type of thing
mkBigCoreTupTy :: [Type] -> Type
mkBigCoreTupTy = mkChunkified mkBoxedTupleTy
{-
************************************************************************
* *
Floats
* *
************************************************************************
-}
data FloatBind
= FloatLet CoreBind
| FloatCase CoreExpr Id AltCon [Var]
-- case e of y { C ys -> ... }
-- See Note [Floating cases] in SetLevels
instance Outputable FloatBind where
ppr (FloatLet b) = ptext (sLit "LET") <+> ppr b
ppr (FloatCase e b c bs) = hang (ptext (sLit "CASE") <+> ppr e <+> ptext (sLit "of") <+> ppr b)
2 (ppr c <+> ppr bs)
wrapFloat :: FloatBind -> CoreExpr -> CoreExpr
wrapFloat (FloatLet defns) body = Let defns body
wrapFloat (FloatCase e b con bs) body = Case e b (exprType body) [(con, bs, body)]
{-
************************************************************************
* *
\subsection{Tuple destructors}
* *
************************************************************************
-}
-- | Builds a selector which scrutises the given
-- expression and extracts the one name from the list given.
-- If you want the no-shadowing rule to apply, the caller
-- is responsible for making sure that none of these names
-- are in scope.
--
-- If there is just one 'Id' in the tuple, then the selector is
-- just the identity.
--
-- If necessary, we pattern match on a \"big\" tuple.
mkTupleSelector :: [Id] -- ^ The 'Id's to pattern match the tuple against
-> Id -- ^ The 'Id' to select
-> Id -- ^ A variable of the same type as the scrutinee
-> CoreExpr -- ^ Scrutinee
-> CoreExpr -- ^ Selector expression
-- mkTupleSelector [a,b,c,d] b v e
-- = case e of v {
-- (p,q) -> case p of p {
-- (a,b) -> b }}
-- We use 'tpl' vars for the p,q, since shadowing does not matter.
--
-- In fact, it's more convenient to generate it innermost first, getting
--
-- case (case e of v
-- (p,q) -> p) of p
-- (a,b) -> b
mkTupleSelector vars the_var scrut_var scrut
= mk_tup_sel (chunkify vars) the_var
where
mk_tup_sel [vars] the_var = mkSmallTupleSelector vars the_var scrut_var scrut
mk_tup_sel vars_s the_var = mkSmallTupleSelector group the_var tpl_v $
mk_tup_sel (chunkify tpl_vs) tpl_v
where
tpl_tys = [mkBoxedTupleTy (map idType gp) | gp <- vars_s]
tpl_vs = mkTemplateLocals tpl_tys
[(tpl_v, group)] = [(tpl,gp) | (tpl,gp) <- zipEqual "mkTupleSelector" tpl_vs vars_s,
the_var `elem` gp ]
-- | Like 'mkTupleSelector' but for tuples that are guaranteed
-- never to be \"big\".
--
-- > mkSmallTupleSelector [x] x v e = [| e |]
-- > mkSmallTupleSelector [x,y,z] x v e = [| case e of v { (x,y,z) -> x } |]
mkSmallTupleSelector :: [Id] -- The tuple args
-> Id -- The selected one
-> Id -- A variable of the same type as the scrutinee
-> CoreExpr -- Scrutinee
-> CoreExpr
mkSmallTupleSelector [var] should_be_the_same_var _ scrut
= ASSERT(var == should_be_the_same_var)
scrut
mkSmallTupleSelector vars the_var scrut_var scrut
= ASSERT( notNull vars )
Case scrut scrut_var (idType the_var)
[(DataAlt (tupleDataCon Boxed (length vars)), vars, Var the_var)]
-- | A generalization of 'mkTupleSelector', allowing the body
-- of the case to be an arbitrary expression.
--
-- To avoid shadowing, we use uniques to invent new variables.
--
-- If necessary we pattern match on a \"big\" tuple.
mkTupleCase :: UniqSupply -- ^ For inventing names of intermediate variables
-> [Id] -- ^ The tuple identifiers to pattern match on
-> CoreExpr -- ^ Body of the case
-> Id -- ^ A variable of the same type as the scrutinee
-> CoreExpr -- ^ Scrutinee
-> CoreExpr
-- ToDo: eliminate cases where none of the variables are needed.
--
-- mkTupleCase uniqs [a,b,c,d] body v e
-- = case e of v { (p,q) ->
-- case p of p { (a,b) ->
-- case q of q { (c,d) ->
-- body }}}
mkTupleCase uniqs vars body scrut_var scrut
= mk_tuple_case uniqs (chunkify vars) body
where
-- This is the case where don't need any nesting
mk_tuple_case _ [vars] body
= mkSmallTupleCase vars body scrut_var scrut
-- This is the case where we must make nest tuples at least once
mk_tuple_case us vars_s body
= let (us', vars', body') = foldr one_tuple_case (us, [], body) vars_s
in mk_tuple_case us' (chunkify vars') body'
one_tuple_case chunk_vars (us, vs, body)
= let (uniq, us') = takeUniqFromSupply us
scrut_var = mkSysLocal (fsLit "ds") uniq
(mkBoxedTupleTy (map idType chunk_vars))
body' = mkSmallTupleCase chunk_vars body scrut_var (Var scrut_var)
in (us', scrut_var:vs, body')
-- | As 'mkTupleCase', but for a tuple that is small enough to be guaranteed
-- not to need nesting.
mkSmallTupleCase
:: [Id] -- ^ The tuple args
-> CoreExpr -- ^ Body of the case
-> Id -- ^ A variable of the same type as the scrutinee
-> CoreExpr -- ^ Scrutinee
-> CoreExpr
mkSmallTupleCase [var] body _scrut_var scrut
= bindNonRec var scrut body
mkSmallTupleCase vars body scrut_var scrut
-- One branch no refinement?
= Case scrut scrut_var (exprType body)
[(DataAlt (tupleDataCon Boxed (length vars)), vars, body)]
{-
************************************************************************
* *
\subsection{Common list manipulation expressions}
* *
************************************************************************
Call the constructor Ids when building explicit lists, so that they
interact well with rules.
-}
-- | Makes a list @[]@ for lists of the specified type
mkNilExpr :: Type -> CoreExpr
mkNilExpr ty = mkConApp nilDataCon [Type ty]
-- | Makes a list @(:)@ for lists of the specified type
mkConsExpr :: Type -> CoreExpr -> CoreExpr -> CoreExpr
mkConsExpr ty hd tl = mkConApp consDataCon [Type ty, hd, tl]
-- | Make a list containing the given expressions, where the list has the given type
mkListExpr :: Type -> [CoreExpr] -> CoreExpr
mkListExpr ty xs = foldr (mkConsExpr ty) (mkNilExpr ty) xs
-- | Make a fully applied 'foldr' expression
mkFoldrExpr :: MonadThings m
=> Type -- ^ Element type of the list
-> Type -- ^ Fold result type
-> CoreExpr -- ^ "Cons" function expression for the fold
-> CoreExpr -- ^ "Nil" expression for the fold
-> CoreExpr -- ^ List expression being folded acress
-> m CoreExpr
mkFoldrExpr elt_ty result_ty c n list = do
foldr_id <- lookupId foldrName
return (Var foldr_id `App` Type elt_ty
`App` Type result_ty
`App` c
`App` n
`App` list)
-- | Make a 'build' expression applied to a locally-bound worker function
mkBuildExpr :: (MonadThings m, MonadUnique m)
=> Type -- ^ Type of list elements to be built
-> ((Id, Type) -> (Id, Type) -> m CoreExpr) -- ^ Function that, given information about the 'Id's
-- of the binders for the build worker function, returns
-- the body of that worker
-> m CoreExpr
mkBuildExpr elt_ty mk_build_inside = do
[n_tyvar] <- newTyVars [alphaTyVar]
let n_ty = mkTyVarTy n_tyvar
c_ty = mkFunTys [elt_ty, n_ty] n_ty
[c, n] <- sequence [mkSysLocalM (fsLit "c") c_ty, mkSysLocalM (fsLit "n") n_ty]
build_inside <- mk_build_inside (c, c_ty) (n, n_ty)
build_id <- lookupId buildName
return $ Var build_id `App` Type elt_ty `App` mkLams [n_tyvar, c, n] build_inside
where
newTyVars tyvar_tmpls = do
uniqs <- getUniquesM
return (zipWith setTyVarUnique tyvar_tmpls uniqs)
{-
************************************************************************
* *
Error expressions
* *
************************************************************************
-}
mkRuntimeErrorApp
:: Id -- Should be of type (forall a. Addr# -> a)
-- where Addr# points to a UTF8 encoded string
-> Type -- The type to instantiate 'a'
-> String -- The string to print
-> CoreExpr
mkRuntimeErrorApp err_id res_ty err_msg
= mkApps (Var err_id) [Type res_ty, err_string]
where
err_string = Lit (mkMachString err_msg)
mkImpossibleExpr :: Type -> CoreExpr
mkImpossibleExpr res_ty
= mkRuntimeErrorApp rUNTIME_ERROR_ID res_ty "Impossible case alternative"
{-
************************************************************************
* *
Error Ids
* *
************************************************************************
GHC randomly injects these into the code.
@patError@ is just a version of @error@ for pattern-matching
failures. It knows various ``codes'' which expand to longer
strings---this saves space!
@absentErr@ is a thing we put in for ``absent'' arguments. They jolly
well shouldn't be yanked on, but if one is, then you will get a
friendly message from @absentErr@ (rather than a totally random
crash).
@parError@ is a special version of @error@ which the compiler does
not know to be a bottoming Id. It is used in the @_par_@ and @_seq_@
templates, but we don't ever expect to generate code for it.
-}
errorIds :: [Id]
errorIds
= [ eRROR_ID, -- This one isn't used anywhere else in the compiler
-- But we still need it in wiredInIds so that when GHC
-- compiles a program that mentions 'error' we don't
-- import its type from the interface file; we just get
-- the Id defined here. Which has an 'open-tyvar' type.
uNDEFINED_ID, -- Ditto for 'undefined'. The big deal is to give it
-- an 'open-tyvar' type.
rUNTIME_ERROR_ID,
iRREFUT_PAT_ERROR_ID,
nON_EXHAUSTIVE_GUARDS_ERROR_ID,
nO_METHOD_BINDING_ERROR_ID,
pAT_ERROR_ID,
rEC_CON_ERROR_ID,
rEC_SEL_ERROR_ID,
aBSENT_ERROR_ID,
tYPE_ERROR_ID -- Used with Opt_DeferTypeErrors, see #10284
]
recSelErrorName, runtimeErrorName, absentErrorName :: Name
irrefutPatErrorName, recConErrorName, patErrorName :: Name
nonExhaustiveGuardsErrorName, noMethodBindingErrorName :: Name
typeErrorName :: Name
recSelErrorName = err_nm "recSelError" recSelErrorIdKey rEC_SEL_ERROR_ID
absentErrorName = err_nm "absentError" absentErrorIdKey aBSENT_ERROR_ID
runtimeErrorName = err_nm "runtimeError" runtimeErrorIdKey rUNTIME_ERROR_ID
irrefutPatErrorName = err_nm "irrefutPatError" irrefutPatErrorIdKey iRREFUT_PAT_ERROR_ID
recConErrorName = err_nm "recConError" recConErrorIdKey rEC_CON_ERROR_ID
patErrorName = err_nm "patError" patErrorIdKey pAT_ERROR_ID
typeErrorName = err_nm "typeError" typeErrorIdKey tYPE_ERROR_ID
noMethodBindingErrorName = err_nm "noMethodBindingError"
noMethodBindingErrorIdKey nO_METHOD_BINDING_ERROR_ID
nonExhaustiveGuardsErrorName = err_nm "nonExhaustiveGuardsError"
nonExhaustiveGuardsErrorIdKey nON_EXHAUSTIVE_GUARDS_ERROR_ID
err_nm :: String -> Unique -> Id -> Name
err_nm str uniq id = mkWiredInIdName cONTROL_EXCEPTION_BASE (fsLit str) uniq id
rEC_SEL_ERROR_ID, rUNTIME_ERROR_ID, iRREFUT_PAT_ERROR_ID, rEC_CON_ERROR_ID :: Id
pAT_ERROR_ID, nO_METHOD_BINDING_ERROR_ID, nON_EXHAUSTIVE_GUARDS_ERROR_ID :: Id
tYPE_ERROR_ID :: Id
aBSENT_ERROR_ID :: Id
rEC_SEL_ERROR_ID = mkRuntimeErrorId recSelErrorName
rUNTIME_ERROR_ID = mkRuntimeErrorId runtimeErrorName
iRREFUT_PAT_ERROR_ID = mkRuntimeErrorId irrefutPatErrorName
rEC_CON_ERROR_ID = mkRuntimeErrorId recConErrorName
pAT_ERROR_ID = mkRuntimeErrorId patErrorName
nO_METHOD_BINDING_ERROR_ID = mkRuntimeErrorId noMethodBindingErrorName
nON_EXHAUSTIVE_GUARDS_ERROR_ID = mkRuntimeErrorId nonExhaustiveGuardsErrorName
aBSENT_ERROR_ID = mkRuntimeErrorId absentErrorName
tYPE_ERROR_ID = mkRuntimeErrorId typeErrorName
mkRuntimeErrorId :: Name -> Id
mkRuntimeErrorId name = pc_bottoming_Id1 name runtimeErrorTy
runtimeErrorTy :: Type
-- The runtime error Ids take a UTF8-encoded string as argument
runtimeErrorTy = mkSigmaTy [openAlphaTyVar] [] (mkFunTy addrPrimTy openAlphaTy)
errorName :: Name
errorName = mkWiredInIdName gHC_ERR (fsLit "error") errorIdKey eRROR_ID
eRROR_ID :: Id
eRROR_ID = pc_bottoming_Id1 errorName errorTy
errorTy :: Type -- See Note [Error and friends have an "open-tyvar" forall]
errorTy = mkSigmaTy [openAlphaTyVar] [] (mkFunTys [mkListTy charTy] openAlphaTy)
undefinedName :: Name
undefinedName = mkWiredInIdName gHC_ERR (fsLit "undefined") undefinedKey uNDEFINED_ID
uNDEFINED_ID :: Id
uNDEFINED_ID = pc_bottoming_Id0 undefinedName undefinedTy
undefinedTy :: Type -- See Note [Error and friends have an "open-tyvar" forall]
undefinedTy = mkSigmaTy [openAlphaTyVar] [] openAlphaTy
{-
Note [Error and friends have an "open-tyvar" forall]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
'error' and 'undefined' have types
error :: forall (a::OpenKind). String -> a
undefined :: forall (a::OpenKind). a
Notice the 'OpenKind' (manifested as openAlphaTyVar in the code). This ensures that
"error" can be instantiated at
* unboxed as well as boxed types
* polymorphic types
This is OK because it never returns, so the return type is irrelevant.
See Note [OpenTypeKind accepts foralls] in TcUnify.
************************************************************************
* *
\subsection{Utilities}
* *
************************************************************************
-}
pc_bottoming_Id1 :: Name -> Type -> Id
-- Function of arity 1, which diverges after being given one argument
pc_bottoming_Id1 name ty
= mkVanillaGlobalWithInfo name ty bottoming_info
where
bottoming_info = vanillaIdInfo `setStrictnessInfo` strict_sig
`setArityInfo` 1
-- Make arity and strictness agree
-- Do *not* mark them as NoCafRefs, because they can indeed have
-- CAF refs. For example, pAT_ERROR_ID calls GHC.Err.untangle,
-- which has some CAFs
-- In due course we may arrange that these error-y things are
-- regarded by the GC as permanently live, in which case we
-- can give them NoCaf info. As it is, any function that calls
-- any pc_bottoming_Id will itself have CafRefs, which bloats
-- SRTs.
strict_sig = mkClosedStrictSig [evalDmd] botRes
-- These "bottom" out, no matter what their arguments
pc_bottoming_Id0 :: Name -> Type -> Id
-- Same but arity zero
pc_bottoming_Id0 name ty
= mkVanillaGlobalWithInfo name ty bottoming_info
where
bottoming_info = vanillaIdInfo `setStrictnessInfo` strict_sig
strict_sig = mkClosedStrictSig [] botRes
|
TomMD/ghc
|
compiler/coreSyn/MkCore.hs
|
Haskell
|
bsd-3-clause
| 31,850
|
{-# LANGUAGE CPP, GADTs #-}
-----------------------------------------------------------------------------
--
-- Pretty-printing of Cmm as C, suitable for feeding gcc
--
-- (c) The University of Glasgow 2004-2006
--
-- Print Cmm as real C, for -fvia-C
--
-- See wiki:Commentary/Compiler/Backends/PprC
--
-- This is simpler than the old PprAbsC, because Cmm is "macro-expanded"
-- relative to the old AbstractC, and many oddities/decorations have
-- disappeared from the data type.
--
-- This code generator is only supported in unregisterised mode.
--
-----------------------------------------------------------------------------
module PprC (
writeCs,
pprStringInCStyle
) where
#include "HsVersions.h"
-- Cmm stuff
import BlockId
import CLabel
import ForeignCall
import Cmm hiding (pprBBlock)
import PprCmm ()
import Hoopl
import CmmUtils
import CmmSwitch
-- Utils
import CPrim
import DynFlags
import FastString
import Outputable
import Platform
import UniqSet
import Unique
import Util
-- The rest
import Control.Monad.ST
import Data.Bits
import Data.Char
import Data.List
import Data.Map (Map)
import Data.Word
import System.IO
import qualified Data.Map as Map
import Control.Monad (liftM, ap)
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative (Applicative(..))
#endif
import qualified Data.Array.Unsafe as U ( castSTUArray )
import Data.Array.ST
-- --------------------------------------------------------------------------
-- Top level
pprCs :: DynFlags -> [RawCmmGroup] -> SDoc
pprCs dflags cmms
= pprCode CStyle (vcat $ map (\c -> split_marker $$ pprC c) cmms)
where
split_marker
| gopt Opt_SplitObjs dflags = ptext (sLit "__STG_SPLIT_MARKER")
| otherwise = empty
writeCs :: DynFlags -> Handle -> [RawCmmGroup] -> IO ()
writeCs dflags handle cmms
= printForC dflags handle (pprCs dflags cmms)
-- --------------------------------------------------------------------------
-- Now do some real work
--
-- for fun, we could call cmmToCmm over the tops...
--
pprC :: RawCmmGroup -> SDoc
pprC tops = vcat $ intersperse blankLine $ map pprTop tops
--
-- top level procs
--
pprTop :: RawCmmDecl -> SDoc
pprTop (CmmProc infos clbl _ graph) =
(case mapLookup (g_entry graph) infos of
Nothing -> empty
Just (Statics info_clbl info_dat) -> pprDataExterns info_dat $$
pprWordArray info_clbl info_dat) $$
(vcat [
blankLine,
extern_decls,
(if (externallyVisibleCLabel clbl)
then mkFN_ else mkIF_) (ppr clbl) <+> lbrace,
nest 8 temp_decls,
vcat (map pprBBlock blocks),
rbrace ]
)
where
blocks = toBlockListEntryFirst graph
(temp_decls, extern_decls) = pprTempAndExternDecls blocks
-- Chunks of static data.
-- We only handle (a) arrays of word-sized things and (b) strings.
pprTop (CmmData _section (Statics lbl [CmmString str])) =
hcat [
pprLocalness lbl, ptext (sLit "char "), ppr lbl,
ptext (sLit "[] = "), pprStringInCStyle str, semi
]
pprTop (CmmData _section (Statics lbl [CmmUninitialised size])) =
hcat [
pprLocalness lbl, ptext (sLit "char "), ppr lbl,
brackets (int size), semi
]
pprTop (CmmData _section (Statics lbl lits)) =
pprDataExterns lits $$
pprWordArray lbl lits
-- --------------------------------------------------------------------------
-- BasicBlocks are self-contained entities: they always end in a jump.
--
-- Like nativeGen/AsmCodeGen, we could probably reorder blocks to turn
-- as many jumps as possible into fall throughs.
--
pprBBlock :: CmmBlock -> SDoc
pprBBlock block =
nest 4 (pprBlockId (entryLabel block) <> colon) $$
nest 8 (vcat (map pprStmt (blockToList nodes)) $$ pprStmt last)
where
(_, nodes, last) = blockSplit block
-- --------------------------------------------------------------------------
-- Info tables. Just arrays of words.
-- See codeGen/ClosureInfo, and nativeGen/PprMach
pprWordArray :: CLabel -> [CmmStatic] -> SDoc
pprWordArray lbl ds
= sdocWithDynFlags $ \dflags ->
hcat [ pprLocalness lbl, ptext (sLit "StgWord")
, space, ppr lbl, ptext (sLit "[] = {") ]
$$ nest 8 (commafy (pprStatics dflags ds))
$$ ptext (sLit "};")
--
-- has to be static, if it isn't globally visible
--
pprLocalness :: CLabel -> SDoc
pprLocalness lbl | not $ externallyVisibleCLabel lbl = ptext (sLit "static ")
| otherwise = empty
-- --------------------------------------------------------------------------
-- Statements.
--
pprStmt :: CmmNode e x -> SDoc
pprStmt stmt =
sdocWithDynFlags $ \dflags ->
case stmt of
CmmEntry{} -> empty
CmmComment _ -> empty -- (hang (ptext (sLit "/*")) 3 (ftext s)) $$ ptext (sLit "*/")
-- XXX if the string contains "*/", we need to fix it
-- XXX we probably want to emit these comments when
-- some debugging option is on. They can get quite
-- large.
CmmTick _ -> empty
CmmUnwind{} -> empty
CmmAssign dest src -> pprAssign dflags dest src
CmmStore dest src
| typeWidth rep == W64 && wordWidth dflags /= W64
-> (if isFloatType rep then ptext (sLit "ASSIGN_DBL")
else ptext (sLit ("ASSIGN_Word64"))) <>
parens (mkP_ <> pprExpr1 dest <> comma <> pprExpr src) <> semi
| otherwise
-> hsep [ pprExpr (CmmLoad dest rep), equals, pprExpr src <> semi ]
where
rep = cmmExprType dflags src
CmmUnsafeForeignCall target@(ForeignTarget fn conv) results args ->
fnCall
where
(res_hints, arg_hints) = foreignTargetHints target
hresults = zip results res_hints
hargs = zip args arg_hints
ForeignConvention cconv _ _ ret = conv
cast_fn = parens (cCast (pprCFunType (char '*') cconv hresults hargs) fn)
-- See wiki:Commentary/Compiler/Backends/PprC#Prototypes
fnCall =
case fn of
CmmLit (CmmLabel lbl)
| StdCallConv <- cconv ->
pprCall (ppr lbl) cconv hresults hargs
-- stdcall functions must be declared with
-- a function type, otherwise the C compiler
-- doesn't add the @n suffix to the label. We
-- can't add the @n suffix ourselves, because
-- it isn't valid C.
| CmmNeverReturns <- ret ->
pprCall cast_fn cconv hresults hargs <> semi
| not (isMathFun lbl) ->
pprForeignCall (ppr lbl) cconv hresults hargs
_ ->
pprCall cast_fn cconv hresults hargs <> semi
-- for a dynamic call, no declaration is necessary.
CmmUnsafeForeignCall (PrimTarget MO_Touch) _results _args -> empty
CmmUnsafeForeignCall (PrimTarget (MO_Prefetch_Data _)) _results _args -> empty
CmmUnsafeForeignCall target@(PrimTarget op) results args ->
fn_call
where
cconv = CCallConv
fn = pprCallishMachOp_for_C op
(res_hints, arg_hints) = foreignTargetHints target
hresults = zip results res_hints
hargs = zip args arg_hints
fn_call
-- The mem primops carry an extra alignment arg.
-- We could maybe emit an alignment directive using this info.
-- We also need to cast mem primops to prevent conflicts with GCC
-- builtins (see bug #5967).
| Just _align <- machOpMemcpyishAlign op
= (ptext (sLit ";EF_(") <> fn <> char ')' <> semi) $$
pprForeignCall fn cconv hresults hargs
| otherwise
= pprCall fn cconv hresults hargs
CmmBranch ident -> pprBranch ident
CmmCondBranch expr yes no _ -> pprCondBranch expr yes no
CmmCall { cml_target = expr } -> mkJMP_ (pprExpr expr) <> semi
CmmSwitch arg ids -> sdocWithDynFlags $ \dflags ->
pprSwitch dflags arg ids
_other -> pprPanic "PprC.pprStmt" (ppr stmt)
type Hinted a = (a, ForeignHint)
pprForeignCall :: SDoc -> CCallConv -> [Hinted CmmFormal] -> [Hinted CmmActual]
-> SDoc
pprForeignCall fn cconv results args = fn_call
where
fn_call = braces (
pprCFunType (char '*' <> text "ghcFunPtr") cconv results args <> semi
$$ text "ghcFunPtr" <+> equals <+> cast_fn <> semi
$$ pprCall (text "ghcFunPtr") cconv results args <> semi
)
cast_fn = parens (parens (pprCFunType (char '*') cconv results args) <> fn)
pprCFunType :: SDoc -> CCallConv -> [Hinted CmmFormal] -> [Hinted CmmActual] -> SDoc
pprCFunType ppr_fn cconv ress args
= sdocWithDynFlags $ \dflags ->
let res_type [] = ptext (sLit "void")
res_type [(one, hint)] = machRepHintCType (localRegType one) hint
res_type _ = panic "pprCFunType: only void or 1 return value supported"
arg_type (expr, hint) = machRepHintCType (cmmExprType dflags expr) hint
in res_type ress <+>
parens (ccallConvAttribute cconv <> ppr_fn) <>
parens (commafy (map arg_type args))
-- ---------------------------------------------------------------------
-- unconditional branches
pprBranch :: BlockId -> SDoc
pprBranch ident = ptext (sLit "goto") <+> pprBlockId ident <> semi
-- ---------------------------------------------------------------------
-- conditional branches to local labels
pprCondBranch :: CmmExpr -> BlockId -> BlockId -> SDoc
pprCondBranch expr yes no
= hsep [ ptext (sLit "if") , parens(pprExpr expr) ,
ptext (sLit "goto"), pprBlockId yes <> semi,
ptext (sLit "else goto"), pprBlockId no <> semi ]
-- ---------------------------------------------------------------------
-- a local table branch
--
-- we find the fall-through cases
--
pprSwitch :: DynFlags -> CmmExpr -> SwitchTargets -> SDoc
pprSwitch dflags e ids
= (hang (ptext (sLit "switch") <+> parens ( pprExpr e ) <+> lbrace)
4 (vcat ( map caseify pairs ) $$ def)) $$ rbrace
where
(pairs, mbdef) = switchTargetsFallThrough ids
-- fall through case
caseify (ix:ixs, ident) = vcat (map do_fallthrough ixs) $$ final_branch ix
where
do_fallthrough ix =
hsep [ ptext (sLit "case") , pprHexVal ix (wordWidth dflags) <> colon ,
ptext (sLit "/* fall through */") ]
final_branch ix =
hsep [ ptext (sLit "case") , pprHexVal ix (wordWidth dflags) <> colon ,
ptext (sLit "goto") , (pprBlockId ident) <> semi ]
caseify (_ , _ ) = panic "pprSwitch: switch with no cases!"
def | Just l <- mbdef = ptext (sLit "default: goto") <+> pprBlockId l <> semi
| otherwise = empty
-- ---------------------------------------------------------------------
-- Expressions.
--
-- C Types: the invariant is that the C expression generated by
--
-- pprExpr e
--
-- has a type in C which is also given by
--
-- machRepCType (cmmExprType e)
--
-- (similar invariants apply to the rest of the pretty printer).
pprExpr :: CmmExpr -> SDoc
pprExpr e = case e of
CmmLit lit -> pprLit lit
CmmLoad e ty -> sdocWithDynFlags $ \dflags -> pprLoad dflags e ty
CmmReg reg -> pprCastReg reg
CmmRegOff reg 0 -> pprCastReg reg
CmmRegOff reg i
| i < 0 && negate_ok -> pprRegOff (char '-') (-i)
| otherwise -> pprRegOff (char '+') i
where
pprRegOff op i' = pprCastReg reg <> op <> int i'
negate_ok = negate (fromIntegral i :: Integer) <
fromIntegral (maxBound::Int)
-- overflow is undefined; see #7620
CmmMachOp mop args -> pprMachOpApp mop args
CmmStackSlot _ _ -> panic "pprExpr: CmmStackSlot not supported!"
pprLoad :: DynFlags -> CmmExpr -> CmmType -> SDoc
pprLoad dflags e ty
| width == W64, wordWidth dflags /= W64
= (if isFloatType ty then ptext (sLit "PK_DBL")
else ptext (sLit "PK_Word64"))
<> parens (mkP_ <> pprExpr1 e)
| otherwise
= case e of
CmmReg r | isPtrReg r && width == wordWidth dflags && not (isFloatType ty)
-> char '*' <> pprAsPtrReg r
CmmRegOff r 0 | isPtrReg r && width == wordWidth dflags && not (isFloatType ty)
-> char '*' <> pprAsPtrReg r
CmmRegOff r off | isPtrReg r && width == wordWidth dflags
, off `rem` wORD_SIZE dflags == 0 && not (isFloatType ty)
-- ToDo: check that the offset is a word multiple?
-- (For tagging to work, I had to avoid unaligned loads. --ARY)
-> pprAsPtrReg r <> brackets (ppr (off `shiftR` wordShift dflags))
_other -> cLoad e ty
where
width = typeWidth ty
pprExpr1 :: CmmExpr -> SDoc
pprExpr1 (CmmLit lit) = pprLit1 lit
pprExpr1 e@(CmmReg _reg) = pprExpr e
pprExpr1 other = parens (pprExpr other)
-- --------------------------------------------------------------------------
-- MachOp applications
pprMachOpApp :: MachOp -> [CmmExpr] -> SDoc
pprMachOpApp op args
| isMulMayOfloOp op
= ptext (sLit "mulIntMayOflo") <> parens (commafy (map pprExpr args))
where isMulMayOfloOp (MO_U_MulMayOflo _) = True
isMulMayOfloOp (MO_S_MulMayOflo _) = True
isMulMayOfloOp _ = False
pprMachOpApp mop args
| Just ty <- machOpNeedsCast mop
= ty <> parens (pprMachOpApp' mop args)
| otherwise
= pprMachOpApp' mop args
-- Comparisons in C have type 'int', but we want type W_ (this is what
-- resultRepOfMachOp says). The other C operations inherit their type
-- from their operands, so no casting is required.
machOpNeedsCast :: MachOp -> Maybe SDoc
machOpNeedsCast mop
| isComparisonMachOp mop = Just mkW_
| otherwise = Nothing
pprMachOpApp' :: MachOp -> [CmmExpr] -> SDoc
pprMachOpApp' mop args
= case args of
-- dyadic
[x,y] -> pprArg x <+> pprMachOp_for_C mop <+> pprArg y
-- unary
[x] -> pprMachOp_for_C mop <> parens (pprArg x)
_ -> panic "PprC.pprMachOp : machop with wrong number of args"
where
-- Cast needed for signed integer ops
pprArg e | signedOp mop = sdocWithDynFlags $ \dflags ->
cCast (machRep_S_CType (typeWidth (cmmExprType dflags e))) e
| needsFCasts mop = sdocWithDynFlags $ \dflags ->
cCast (machRep_F_CType (typeWidth (cmmExprType dflags e))) e
| otherwise = pprExpr1 e
needsFCasts (MO_F_Eq _) = False
needsFCasts (MO_F_Ne _) = False
needsFCasts (MO_F_Neg _) = True
needsFCasts (MO_F_Quot _) = True
needsFCasts mop = floatComparison mop
-- --------------------------------------------------------------------------
-- Literals
pprLit :: CmmLit -> SDoc
pprLit lit = case lit of
CmmInt i rep -> pprHexVal i rep
CmmFloat f w -> parens (machRep_F_CType w) <> str
where d = fromRational f :: Double
str | isInfinite d && d < 0 = ptext (sLit "-INFINITY")
| isInfinite d = ptext (sLit "INFINITY")
| isNaN d = ptext (sLit "NAN")
| otherwise = text (show d)
-- these constants come from <math.h>
-- see #1861
CmmVec {} -> panic "PprC printing vector literal"
CmmBlock bid -> mkW_ <> pprCLabelAddr (infoTblLbl bid)
CmmHighStackMark -> panic "PprC printing high stack mark"
CmmLabel clbl -> mkW_ <> pprCLabelAddr clbl
CmmLabelOff clbl i -> mkW_ <> pprCLabelAddr clbl <> char '+' <> int i
CmmLabelDiffOff clbl1 _ i
-- WARNING:
-- * the lit must occur in the info table clbl2
-- * clbl1 must be an SRT, a slow entry point or a large bitmap
-> mkW_ <> pprCLabelAddr clbl1 <> char '+' <> int i
where
pprCLabelAddr lbl = char '&' <> ppr lbl
pprLit1 :: CmmLit -> SDoc
pprLit1 lit@(CmmLabelOff _ _) = parens (pprLit lit)
pprLit1 lit@(CmmLabelDiffOff _ _ _) = parens (pprLit lit)
pprLit1 lit@(CmmFloat _ _) = parens (pprLit lit)
pprLit1 other = pprLit other
-- ---------------------------------------------------------------------------
-- Static data
pprStatics :: DynFlags -> [CmmStatic] -> [SDoc]
pprStatics _ [] = []
pprStatics dflags (CmmStaticLit (CmmFloat f W32) : rest)
-- floats are padded to a word, see #1852
| wORD_SIZE dflags == 8, CmmStaticLit (CmmInt 0 W32) : rest' <- rest
= pprLit1 (floatToWord dflags f) : pprStatics dflags rest'
| wORD_SIZE dflags == 4
= pprLit1 (floatToWord dflags f) : pprStatics dflags rest
| otherwise
= pprPanic "pprStatics: float" (vcat (map ppr' rest))
where ppr' (CmmStaticLit l) = sdocWithDynFlags $ \dflags ->
ppr (cmmLitType dflags l)
ppr' _other = ptext (sLit "bad static!")
pprStatics dflags (CmmStaticLit (CmmFloat f W64) : rest)
= map pprLit1 (doubleToWords dflags f) ++ pprStatics dflags rest
pprStatics dflags (CmmStaticLit (CmmInt i W64) : rest)
| wordWidth dflags == W32
= if wORDS_BIGENDIAN dflags
then pprStatics dflags (CmmStaticLit (CmmInt q W32) :
CmmStaticLit (CmmInt r W32) : rest)
else pprStatics dflags (CmmStaticLit (CmmInt r W32) :
CmmStaticLit (CmmInt q W32) : rest)
where r = i .&. 0xffffffff
q = i `shiftR` 32
pprStatics dflags (CmmStaticLit (CmmInt _ w) : _)
| w /= wordWidth dflags
= panic "pprStatics: cannot emit a non-word-sized static literal"
pprStatics dflags (CmmStaticLit lit : rest)
= pprLit1 lit : pprStatics dflags rest
pprStatics _ (other : _)
= pprPanic "pprWord" (pprStatic other)
pprStatic :: CmmStatic -> SDoc
pprStatic s = case s of
CmmStaticLit lit -> nest 4 (pprLit lit)
CmmUninitialised i -> nest 4 (mkC_ <> brackets (int i))
-- these should be inlined, like the old .hc
CmmString s' -> nest 4 (mkW_ <> parens(pprStringInCStyle s'))
-- ---------------------------------------------------------------------------
-- Block Ids
pprBlockId :: BlockId -> SDoc
pprBlockId b = char '_' <> ppr (getUnique b)
-- --------------------------------------------------------------------------
-- Print a MachOp in a way suitable for emitting via C.
--
pprMachOp_for_C :: MachOp -> SDoc
pprMachOp_for_C mop = case mop of
-- Integer operations
MO_Add _ -> char '+'
MO_Sub _ -> char '-'
MO_Eq _ -> ptext (sLit "==")
MO_Ne _ -> ptext (sLit "!=")
MO_Mul _ -> char '*'
MO_S_Quot _ -> char '/'
MO_S_Rem _ -> char '%'
MO_S_Neg _ -> char '-'
MO_U_Quot _ -> char '/'
MO_U_Rem _ -> char '%'
-- & Floating-point operations
MO_F_Add _ -> char '+'
MO_F_Sub _ -> char '-'
MO_F_Neg _ -> char '-'
MO_F_Mul _ -> char '*'
MO_F_Quot _ -> char '/'
-- Signed comparisons
MO_S_Ge _ -> ptext (sLit ">=")
MO_S_Le _ -> ptext (sLit "<=")
MO_S_Gt _ -> char '>'
MO_S_Lt _ -> char '<'
-- & Unsigned comparisons
MO_U_Ge _ -> ptext (sLit ">=")
MO_U_Le _ -> ptext (sLit "<=")
MO_U_Gt _ -> char '>'
MO_U_Lt _ -> char '<'
-- & Floating-point comparisons
MO_F_Eq _ -> ptext (sLit "==")
MO_F_Ne _ -> ptext (sLit "!=")
MO_F_Ge _ -> ptext (sLit ">=")
MO_F_Le _ -> ptext (sLit "<=")
MO_F_Gt _ -> char '>'
MO_F_Lt _ -> char '<'
-- Bitwise operations. Not all of these may be supported at all
-- sizes, and only integral MachReps are valid.
MO_And _ -> char '&'
MO_Or _ -> char '|'
MO_Xor _ -> char '^'
MO_Not _ -> char '~'
MO_Shl _ -> ptext (sLit "<<")
MO_U_Shr _ -> ptext (sLit ">>") -- unsigned shift right
MO_S_Shr _ -> ptext (sLit ">>") -- signed shift right
-- Conversions. Some of these will be NOPs, but never those that convert
-- between ints and floats.
-- Floating-point conversions use the signed variant.
-- We won't know to generate (void*) casts here, but maybe from
-- context elsewhere
-- noop casts
MO_UU_Conv from to | from == to -> empty
MO_UU_Conv _from to -> parens (machRep_U_CType to)
MO_SS_Conv from to | from == to -> empty
MO_SS_Conv _from to -> parens (machRep_S_CType to)
MO_FF_Conv from to | from == to -> empty
MO_FF_Conv _from to -> parens (machRep_F_CType to)
MO_SF_Conv _from to -> parens (machRep_F_CType to)
MO_FS_Conv _from to -> parens (machRep_S_CType to)
MO_S_MulMayOflo _ -> pprTrace "offending mop:"
(ptext $ sLit "MO_S_MulMayOflo")
(panic $ "PprC.pprMachOp_for_C: MO_S_MulMayOflo"
++ " should have been handled earlier!")
MO_U_MulMayOflo _ -> pprTrace "offending mop:"
(ptext $ sLit "MO_U_MulMayOflo")
(panic $ "PprC.pprMachOp_for_C: MO_U_MulMayOflo"
++ " should have been handled earlier!")
MO_V_Insert {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_V_Insert")
(panic $ "PprC.pprMachOp_for_C: MO_V_Insert"
++ " should have been handled earlier!")
MO_V_Extract {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_V_Extract")
(panic $ "PprC.pprMachOp_for_C: MO_V_Extract"
++ " should have been handled earlier!")
MO_V_Add {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_V_Add")
(panic $ "PprC.pprMachOp_for_C: MO_V_Add"
++ " should have been handled earlier!")
MO_V_Sub {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_V_Sub")
(panic $ "PprC.pprMachOp_for_C: MO_V_Sub"
++ " should have been handled earlier!")
MO_V_Mul {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_V_Mul")
(panic $ "PprC.pprMachOp_for_C: MO_V_Mul"
++ " should have been handled earlier!")
MO_VS_Quot {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VS_Quot")
(panic $ "PprC.pprMachOp_for_C: MO_VS_Quot"
++ " should have been handled earlier!")
MO_VS_Rem {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VS_Rem")
(panic $ "PprC.pprMachOp_for_C: MO_VS_Rem"
++ " should have been handled earlier!")
MO_VS_Neg {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VS_Neg")
(panic $ "PprC.pprMachOp_for_C: MO_VS_Neg"
++ " should have been handled earlier!")
MO_VU_Quot {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VU_Quot")
(panic $ "PprC.pprMachOp_for_C: MO_VU_Quot"
++ " should have been handled earlier!")
MO_VU_Rem {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VU_Rem")
(panic $ "PprC.pprMachOp_for_C: MO_VU_Rem"
++ " should have been handled earlier!")
MO_VF_Insert {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VF_Insert")
(panic $ "PprC.pprMachOp_for_C: MO_VF_Insert"
++ " should have been handled earlier!")
MO_VF_Extract {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VF_Extract")
(panic $ "PprC.pprMachOp_for_C: MO_VF_Extract"
++ " should have been handled earlier!")
MO_VF_Add {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VF_Add")
(panic $ "PprC.pprMachOp_for_C: MO_VF_Add"
++ " should have been handled earlier!")
MO_VF_Sub {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VF_Sub")
(panic $ "PprC.pprMachOp_for_C: MO_VF_Sub"
++ " should have been handled earlier!")
MO_VF_Neg {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VF_Neg")
(panic $ "PprC.pprMachOp_for_C: MO_VF_Neg"
++ " should have been handled earlier!")
MO_VF_Mul {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VF_Mul")
(panic $ "PprC.pprMachOp_for_C: MO_VF_Mul"
++ " should have been handled earlier!")
MO_VF_Quot {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VF_Quot")
(panic $ "PprC.pprMachOp_for_C: MO_VF_Quot"
++ " should have been handled earlier!")
signedOp :: MachOp -> Bool -- Argument type(s) are signed ints
signedOp (MO_S_Quot _) = True
signedOp (MO_S_Rem _) = True
signedOp (MO_S_Neg _) = True
signedOp (MO_S_Ge _) = True
signedOp (MO_S_Le _) = True
signedOp (MO_S_Gt _) = True
signedOp (MO_S_Lt _) = True
signedOp (MO_S_Shr _) = True
signedOp (MO_SS_Conv _ _) = True
signedOp (MO_SF_Conv _ _) = True
signedOp _ = False
floatComparison :: MachOp -> Bool -- comparison between float args
floatComparison (MO_F_Eq _) = True
floatComparison (MO_F_Ne _) = True
floatComparison (MO_F_Ge _) = True
floatComparison (MO_F_Le _) = True
floatComparison (MO_F_Gt _) = True
floatComparison (MO_F_Lt _) = True
floatComparison _ = False
-- ---------------------------------------------------------------------
-- tend to be implemented by foreign calls
pprCallishMachOp_for_C :: CallishMachOp -> SDoc
pprCallishMachOp_for_C mop
= case mop of
MO_F64_Pwr -> ptext (sLit "pow")
MO_F64_Sin -> ptext (sLit "sin")
MO_F64_Cos -> ptext (sLit "cos")
MO_F64_Tan -> ptext (sLit "tan")
MO_F64_Sinh -> ptext (sLit "sinh")
MO_F64_Cosh -> ptext (sLit "cosh")
MO_F64_Tanh -> ptext (sLit "tanh")
MO_F64_Asin -> ptext (sLit "asin")
MO_F64_Acos -> ptext (sLit "acos")
MO_F64_Atan -> ptext (sLit "atan")
MO_F64_Log -> ptext (sLit "log")
MO_F64_Exp -> ptext (sLit "exp")
MO_F64_Sqrt -> ptext (sLit "sqrt")
MO_F32_Pwr -> ptext (sLit "powf")
MO_F32_Sin -> ptext (sLit "sinf")
MO_F32_Cos -> ptext (sLit "cosf")
MO_F32_Tan -> ptext (sLit "tanf")
MO_F32_Sinh -> ptext (sLit "sinhf")
MO_F32_Cosh -> ptext (sLit "coshf")
MO_F32_Tanh -> ptext (sLit "tanhf")
MO_F32_Asin -> ptext (sLit "asinf")
MO_F32_Acos -> ptext (sLit "acosf")
MO_F32_Atan -> ptext (sLit "atanf")
MO_F32_Log -> ptext (sLit "logf")
MO_F32_Exp -> ptext (sLit "expf")
MO_F32_Sqrt -> ptext (sLit "sqrtf")
MO_WriteBarrier -> ptext (sLit "write_barrier")
MO_Memcpy _ -> ptext (sLit "memcpy")
MO_Memset _ -> ptext (sLit "memset")
MO_Memmove _ -> ptext (sLit "memmove")
(MO_BSwap w) -> ptext (sLit $ bSwapLabel w)
(MO_PopCnt w) -> ptext (sLit $ popCntLabel w)
(MO_Clz w) -> ptext (sLit $ clzLabel w)
(MO_Ctz w) -> ptext (sLit $ ctzLabel w)
(MO_AtomicRMW w amop) -> ptext (sLit $ atomicRMWLabel w amop)
(MO_Cmpxchg w) -> ptext (sLit $ cmpxchgLabel w)
(MO_AtomicRead w) -> ptext (sLit $ atomicReadLabel w)
(MO_AtomicWrite w) -> ptext (sLit $ atomicWriteLabel w)
(MO_UF_Conv w) -> ptext (sLit $ word2FloatLabel w)
MO_S_QuotRem {} -> unsupported
MO_U_QuotRem {} -> unsupported
MO_U_QuotRem2 {} -> unsupported
MO_Add2 {} -> unsupported
MO_SubWordC {} -> unsupported
MO_AddIntC {} -> unsupported
MO_SubIntC {} -> unsupported
MO_U_Mul2 {} -> unsupported
MO_Touch -> unsupported
(MO_Prefetch_Data _ ) -> unsupported
--- we could support prefetch via "__builtin_prefetch"
--- Not adding it for now
where unsupported = panic ("pprCallishMachOp_for_C: " ++ show mop
++ " not supported!")
-- ---------------------------------------------------------------------
-- Useful #defines
--
mkJMP_, mkFN_, mkIF_ :: SDoc -> SDoc
mkJMP_ i = ptext (sLit "JMP_") <> parens i
mkFN_ i = ptext (sLit "FN_") <> parens i -- externally visible function
mkIF_ i = ptext (sLit "IF_") <> parens i -- locally visible
-- from includes/Stg.h
--
mkC_,mkW_,mkP_ :: SDoc
mkC_ = ptext (sLit "(C_)") -- StgChar
mkW_ = ptext (sLit "(W_)") -- StgWord
mkP_ = ptext (sLit "(P_)") -- StgWord*
-- ---------------------------------------------------------------------
--
-- Assignments
--
-- Generating assignments is what we're all about, here
--
pprAssign :: DynFlags -> CmmReg -> CmmExpr -> SDoc
-- dest is a reg, rhs is a reg
pprAssign _ r1 (CmmReg r2)
| isPtrReg r1 && isPtrReg r2
= hcat [ pprAsPtrReg r1, equals, pprAsPtrReg r2, semi ]
-- dest is a reg, rhs is a CmmRegOff
pprAssign dflags r1 (CmmRegOff r2 off)
| isPtrReg r1 && isPtrReg r2 && (off `rem` wORD_SIZE dflags == 0)
= hcat [ pprAsPtrReg r1, equals, pprAsPtrReg r2, op, int off', semi ]
where
off1 = off `shiftR` wordShift dflags
(op,off') | off >= 0 = (char '+', off1)
| otherwise = (char '-', -off1)
-- dest is a reg, rhs is anything.
-- We can't cast the lvalue, so we have to cast the rhs if necessary. Casting
-- the lvalue elicits a warning from new GCC versions (3.4+).
pprAssign _ r1 r2
| isFixedPtrReg r1 = mkAssign (mkP_ <> pprExpr1 r2)
| Just ty <- strangeRegType r1 = mkAssign (parens ty <> pprExpr1 r2)
| otherwise = mkAssign (pprExpr r2)
where mkAssign x = if r1 == CmmGlobal BaseReg
then ptext (sLit "ASSIGN_BaseReg") <> parens x <> semi
else pprReg r1 <> ptext (sLit " = ") <> x <> semi
-- ---------------------------------------------------------------------
-- Registers
pprCastReg :: CmmReg -> SDoc
pprCastReg reg
| isStrangeTypeReg reg = mkW_ <> pprReg reg
| otherwise = pprReg reg
-- True if (pprReg reg) will give an expression with type StgPtr. We
-- need to take care with pointer arithmetic on registers with type
-- StgPtr.
isFixedPtrReg :: CmmReg -> Bool
isFixedPtrReg (CmmLocal _) = False
isFixedPtrReg (CmmGlobal r) = isFixedPtrGlobalReg r
-- True if (pprAsPtrReg reg) will give an expression with type StgPtr
-- JD: THIS IS HORRIBLE AND SHOULD BE RENAMED, AT THE VERY LEAST.
-- THE GARBAGE WITH THE VNonGcPtr HELPS MATCH THE OLD CODE GENERATOR'S OUTPUT;
-- I'M NOT SURE IF IT SHOULD REALLY STAY THAT WAY.
isPtrReg :: CmmReg -> Bool
isPtrReg (CmmLocal _) = False
isPtrReg (CmmGlobal (VanillaReg _ VGcPtr)) = True -- if we print via pprAsPtrReg
isPtrReg (CmmGlobal (VanillaReg _ VNonGcPtr)) = False -- if we print via pprAsPtrReg
isPtrReg (CmmGlobal reg) = isFixedPtrGlobalReg reg
-- True if this global reg has type StgPtr
isFixedPtrGlobalReg :: GlobalReg -> Bool
isFixedPtrGlobalReg Sp = True
isFixedPtrGlobalReg Hp = True
isFixedPtrGlobalReg HpLim = True
isFixedPtrGlobalReg SpLim = True
isFixedPtrGlobalReg _ = False
-- True if in C this register doesn't have the type given by
-- (machRepCType (cmmRegType reg)), so it has to be cast.
isStrangeTypeReg :: CmmReg -> Bool
isStrangeTypeReg (CmmLocal _) = False
isStrangeTypeReg (CmmGlobal g) = isStrangeTypeGlobal g
isStrangeTypeGlobal :: GlobalReg -> Bool
isStrangeTypeGlobal CCCS = True
isStrangeTypeGlobal CurrentTSO = True
isStrangeTypeGlobal CurrentNursery = True
isStrangeTypeGlobal BaseReg = True
isStrangeTypeGlobal r = isFixedPtrGlobalReg r
strangeRegType :: CmmReg -> Maybe SDoc
strangeRegType (CmmGlobal CCCS) = Just (ptext (sLit "struct CostCentreStack_ *"))
strangeRegType (CmmGlobal CurrentTSO) = Just (ptext (sLit "struct StgTSO_ *"))
strangeRegType (CmmGlobal CurrentNursery) = Just (ptext (sLit "struct bdescr_ *"))
strangeRegType (CmmGlobal BaseReg) = Just (ptext (sLit "struct StgRegTable_ *"))
strangeRegType _ = Nothing
-- pprReg just prints the register name.
--
pprReg :: CmmReg -> SDoc
pprReg r = case r of
CmmLocal local -> pprLocalReg local
CmmGlobal global -> pprGlobalReg global
pprAsPtrReg :: CmmReg -> SDoc
pprAsPtrReg (CmmGlobal (VanillaReg n gcp))
= WARN( gcp /= VGcPtr, ppr n ) char 'R' <> int n <> ptext (sLit ".p")
pprAsPtrReg other_reg = pprReg other_reg
pprGlobalReg :: GlobalReg -> SDoc
pprGlobalReg gr = case gr of
VanillaReg n _ -> char 'R' <> int n <> ptext (sLit ".w")
-- pprGlobalReg prints a VanillaReg as a .w regardless
-- Example: R1.w = R1.w & (-0x8UL);
-- JMP_(*R1.p);
FloatReg n -> char 'F' <> int n
DoubleReg n -> char 'D' <> int n
LongReg n -> char 'L' <> int n
Sp -> ptext (sLit "Sp")
SpLim -> ptext (sLit "SpLim")
Hp -> ptext (sLit "Hp")
HpLim -> ptext (sLit "HpLim")
CCCS -> ptext (sLit "CCCS")
CurrentTSO -> ptext (sLit "CurrentTSO")
CurrentNursery -> ptext (sLit "CurrentNursery")
HpAlloc -> ptext (sLit "HpAlloc")
BaseReg -> ptext (sLit "BaseReg")
EagerBlackholeInfo -> ptext (sLit "stg_EAGER_BLACKHOLE_info")
GCEnter1 -> ptext (sLit "stg_gc_enter_1")
GCFun -> ptext (sLit "stg_gc_fun")
other -> panic $ "pprGlobalReg: Unsupported register: " ++ show other
pprLocalReg :: LocalReg -> SDoc
pprLocalReg (LocalReg uniq _) = char '_' <> ppr uniq
-- -----------------------------------------------------------------------------
-- Foreign Calls
pprCall :: SDoc -> CCallConv -> [Hinted CmmFormal] -> [Hinted CmmActual] -> SDoc
pprCall ppr_fn cconv results args
| not (is_cishCC cconv)
= panic $ "pprCall: unknown calling convention"
| otherwise
=
ppr_assign results (ppr_fn <> parens (commafy (map pprArg args))) <> semi
where
ppr_assign [] rhs = rhs
ppr_assign [(one,hint)] rhs
= pprLocalReg one <> ptext (sLit " = ")
<> pprUnHint hint (localRegType one) <> rhs
ppr_assign _other _rhs = panic "pprCall: multiple results"
pprArg (expr, AddrHint)
= cCast (ptext (sLit "void *")) expr
-- see comment by machRepHintCType below
pprArg (expr, SignedHint)
= sdocWithDynFlags $ \dflags ->
cCast (machRep_S_CType $ typeWidth $ cmmExprType dflags expr) expr
pprArg (expr, _other)
= pprExpr expr
pprUnHint AddrHint rep = parens (machRepCType rep)
pprUnHint SignedHint rep = parens (machRepCType rep)
pprUnHint _ _ = empty
-- Currently we only have these two calling conventions, but this might
-- change in the future...
is_cishCC :: CCallConv -> Bool
is_cishCC CCallConv = True
is_cishCC CApiConv = True
is_cishCC StdCallConv = True
is_cishCC PrimCallConv = False
is_cishCC JavaScriptCallConv = False
-- ---------------------------------------------------------------------
-- Find and print local and external declarations for a list of
-- Cmm statements.
--
pprTempAndExternDecls :: [CmmBlock] -> (SDoc{-temps-}, SDoc{-externs-})
pprTempAndExternDecls stmts
= (vcat (map pprTempDecl (uniqSetToList temps)),
vcat (map (pprExternDecl False{-ToDo-}) (Map.keys lbls)))
where (temps, lbls) = runTE (mapM_ te_BB stmts)
pprDataExterns :: [CmmStatic] -> SDoc
pprDataExterns statics
= vcat (map (pprExternDecl False{-ToDo-}) (Map.keys lbls))
where (_, lbls) = runTE (mapM_ te_Static statics)
pprTempDecl :: LocalReg -> SDoc
pprTempDecl l@(LocalReg _ rep)
= hcat [ machRepCType rep, space, pprLocalReg l, semi ]
pprExternDecl :: Bool -> CLabel -> SDoc
pprExternDecl _in_srt lbl
-- do not print anything for "known external" things
| not (needsCDecl lbl) = empty
| Just sz <- foreignLabelStdcallInfo lbl = stdcall_decl sz
| otherwise =
hcat [ visibility, label_type lbl,
lparen, ppr lbl, text ");" ]
where
label_type lbl | isCFunctionLabel lbl = ptext (sLit "F_")
| otherwise = ptext (sLit "I_")
visibility
| externallyVisibleCLabel lbl = char 'E'
| otherwise = char 'I'
-- If the label we want to refer to is a stdcall function (on Windows) then
-- we must generate an appropriate prototype for it, so that the C compiler will
-- add the @n suffix to the label (#2276)
stdcall_decl sz = sdocWithDynFlags $ \dflags ->
ptext (sLit "extern __attribute__((stdcall)) void ") <> ppr lbl
<> parens (commafy (replicate (sz `quot` wORD_SIZE dflags) (machRep_U_CType (wordWidth dflags))))
<> semi
type TEState = (UniqSet LocalReg, Map CLabel ())
newtype TE a = TE { unTE :: TEState -> (a, TEState) }
instance Functor TE where
fmap = liftM
instance Applicative TE where
pure a = TE $ \s -> (a, s)
(<*>) = ap
instance Monad TE where
TE m >>= k = TE $ \s -> case m s of (a, s') -> unTE (k a) s'
return = pure
te_lbl :: CLabel -> TE ()
te_lbl lbl = TE $ \(temps,lbls) -> ((), (temps, Map.insert lbl () lbls))
te_temp :: LocalReg -> TE ()
te_temp r = TE $ \(temps,lbls) -> ((), (addOneToUniqSet temps r, lbls))
runTE :: TE () -> TEState
runTE (TE m) = snd (m (emptyUniqSet, Map.empty))
te_Static :: CmmStatic -> TE ()
te_Static (CmmStaticLit lit) = te_Lit lit
te_Static _ = return ()
te_BB :: CmmBlock -> TE ()
te_BB block = mapM_ te_Stmt (blockToList mid) >> te_Stmt last
where (_, mid, last) = blockSplit block
te_Lit :: CmmLit -> TE ()
te_Lit (CmmLabel l) = te_lbl l
te_Lit (CmmLabelOff l _) = te_lbl l
te_Lit (CmmLabelDiffOff l1 _ _) = te_lbl l1
te_Lit _ = return ()
te_Stmt :: CmmNode e x -> TE ()
te_Stmt (CmmAssign r e) = te_Reg r >> te_Expr e
te_Stmt (CmmStore l r) = te_Expr l >> te_Expr r
te_Stmt (CmmUnsafeForeignCall target rs es)
= do te_Target target
mapM_ te_temp rs
mapM_ te_Expr es
te_Stmt (CmmCondBranch e _ _ _) = te_Expr e
te_Stmt (CmmSwitch e _) = te_Expr e
te_Stmt (CmmCall { cml_target = e }) = te_Expr e
te_Stmt _ = return ()
te_Target :: ForeignTarget -> TE ()
te_Target (ForeignTarget e _) = te_Expr e
te_Target (PrimTarget{}) = return ()
te_Expr :: CmmExpr -> TE ()
te_Expr (CmmLit lit) = te_Lit lit
te_Expr (CmmLoad e _) = te_Expr e
te_Expr (CmmReg r) = te_Reg r
te_Expr (CmmMachOp _ es) = mapM_ te_Expr es
te_Expr (CmmRegOff r _) = te_Reg r
te_Expr (CmmStackSlot _ _) = panic "te_Expr: CmmStackSlot not supported!"
te_Reg :: CmmReg -> TE ()
te_Reg (CmmLocal l) = te_temp l
te_Reg _ = return ()
-- ---------------------------------------------------------------------
-- C types for MachReps
cCast :: SDoc -> CmmExpr -> SDoc
cCast ty expr = parens ty <> pprExpr1 expr
cLoad :: CmmExpr -> CmmType -> SDoc
cLoad expr rep
= sdocWithPlatform $ \platform ->
if bewareLoadStoreAlignment (platformArch platform)
then let decl = machRepCType rep <+> ptext (sLit "x") <> semi
struct = ptext (sLit "struct") <+> braces (decl)
packed_attr = ptext (sLit "__attribute__((packed))")
cast = parens (struct <+> packed_attr <> char '*')
in parens (cast <+> pprExpr1 expr) <> ptext (sLit "->x")
else char '*' <> parens (cCast (machRepPtrCType rep) expr)
where -- On these platforms, unaligned loads are known to cause problems
bewareLoadStoreAlignment ArchAlpha = True
bewareLoadStoreAlignment ArchMipseb = True
bewareLoadStoreAlignment ArchMipsel = True
bewareLoadStoreAlignment (ArchARM {}) = True
bewareLoadStoreAlignment ArchARM64 = True
-- Pessimistically assume that they will also cause problems
-- on unknown arches
bewareLoadStoreAlignment ArchUnknown = True
bewareLoadStoreAlignment _ = False
isCmmWordType :: DynFlags -> CmmType -> Bool
-- True of GcPtrReg/NonGcReg of native word size
isCmmWordType dflags ty = not (isFloatType ty)
&& typeWidth ty == wordWidth dflags
-- This is for finding the types of foreign call arguments. For a pointer
-- argument, we always cast the argument to (void *), to avoid warnings from
-- the C compiler.
machRepHintCType :: CmmType -> ForeignHint -> SDoc
machRepHintCType _ AddrHint = ptext (sLit "void *")
machRepHintCType rep SignedHint = machRep_S_CType (typeWidth rep)
machRepHintCType rep _other = machRepCType rep
machRepPtrCType :: CmmType -> SDoc
machRepPtrCType r
= sdocWithDynFlags $ \dflags ->
if isCmmWordType dflags r then ptext (sLit "P_")
else machRepCType r <> char '*'
machRepCType :: CmmType -> SDoc
machRepCType ty | isFloatType ty = machRep_F_CType w
| otherwise = machRep_U_CType w
where
w = typeWidth ty
machRep_F_CType :: Width -> SDoc
machRep_F_CType W32 = ptext (sLit "StgFloat") -- ToDo: correct?
machRep_F_CType W64 = ptext (sLit "StgDouble")
machRep_F_CType _ = panic "machRep_F_CType"
machRep_U_CType :: Width -> SDoc
machRep_U_CType w
= sdocWithDynFlags $ \dflags ->
case w of
_ | w == wordWidth dflags -> ptext (sLit "W_")
W8 -> ptext (sLit "StgWord8")
W16 -> ptext (sLit "StgWord16")
W32 -> ptext (sLit "StgWord32")
W64 -> ptext (sLit "StgWord64")
_ -> panic "machRep_U_CType"
machRep_S_CType :: Width -> SDoc
machRep_S_CType w
= sdocWithDynFlags $ \dflags ->
case w of
_ | w == wordWidth dflags -> ptext (sLit "I_")
W8 -> ptext (sLit "StgInt8")
W16 -> ptext (sLit "StgInt16")
W32 -> ptext (sLit "StgInt32")
W64 -> ptext (sLit "StgInt64")
_ -> panic "machRep_S_CType"
-- ---------------------------------------------------------------------
-- print strings as valid C strings
pprStringInCStyle :: [Word8] -> SDoc
pprStringInCStyle s = doubleQuotes (text (concatMap charToC s))
-- ---------------------------------------------------------------------------
-- Initialising static objects with floating-point numbers. We can't
-- just emit the floating point number, because C will cast it to an int
-- by rounding it. We want the actual bit-representation of the float.
-- This is a hack to turn the floating point numbers into ints that we
-- can safely initialise to static locations.
big_doubles :: DynFlags -> Bool
big_doubles dflags
| widthInBytes W64 == 2 * wORD_SIZE dflags = True
| widthInBytes W64 == wORD_SIZE dflags = False
| otherwise = panic "big_doubles"
castFloatToIntArray :: STUArray s Int Float -> ST s (STUArray s Int Int)
castFloatToIntArray = U.castSTUArray
castDoubleToIntArray :: STUArray s Int Double -> ST s (STUArray s Int Int)
castDoubleToIntArray = U.castSTUArray
-- floats are always 1 word
floatToWord :: DynFlags -> Rational -> CmmLit
floatToWord dflags r
= runST (do
arr <- newArray_ ((0::Int),0)
writeArray arr 0 (fromRational r)
arr' <- castFloatToIntArray arr
i <- readArray arr' 0
return (CmmInt (toInteger i) (wordWidth dflags))
)
doubleToWords :: DynFlags -> Rational -> [CmmLit]
doubleToWords dflags r
| big_doubles dflags -- doubles are 2 words
= runST (do
arr <- newArray_ ((0::Int),1)
writeArray arr 0 (fromRational r)
arr' <- castDoubleToIntArray arr
i1 <- readArray arr' 0
i2 <- readArray arr' 1
return [ CmmInt (toInteger i1) (wordWidth dflags)
, CmmInt (toInteger i2) (wordWidth dflags)
]
)
| otherwise -- doubles are 1 word
= runST (do
arr <- newArray_ ((0::Int),0)
writeArray arr 0 (fromRational r)
arr' <- castDoubleToIntArray arr
i <- readArray arr' 0
return [ CmmInt (toInteger i) (wordWidth dflags) ]
)
-- ---------------------------------------------------------------------------
-- Utils
wordShift :: DynFlags -> Int
wordShift dflags = widthInLog (wordWidth dflags)
commafy :: [SDoc] -> SDoc
commafy xs = hsep $ punctuate comma xs
-- Print in C hex format: 0x13fa
pprHexVal :: Integer -> Width -> SDoc
pprHexVal w rep
| w < 0 = parens (char '-' <>
ptext (sLit "0x") <> intToDoc (-w) <> repsuffix rep)
| otherwise = ptext (sLit "0x") <> intToDoc w <> repsuffix rep
where
-- type suffix for literals:
-- Integer literals are unsigned in Cmm/C. We explicitly cast to
-- signed values for doing signed operations, but at all other
-- times values are unsigned. This also helps eliminate occasional
-- warnings about integer overflow from gcc.
repsuffix W64 = sdocWithDynFlags $ \dflags ->
if cINT_SIZE dflags == 8 then char 'U'
else if cLONG_SIZE dflags == 8 then ptext (sLit "UL")
else if cLONG_LONG_SIZE dflags == 8 then ptext (sLit "ULL")
else panic "pprHexVal: Can't find a 64-bit type"
repsuffix _ = char 'U'
intToDoc :: Integer -> SDoc
intToDoc i = case truncInt i of
0 -> char '0'
v -> go v
-- We need to truncate value as Cmm backend does not drop
-- redundant bits to ease handling of negative values.
-- Thus the following Cmm code on 64-bit arch, like amd64:
-- CInt v;
-- v = {something};
-- if (v == %lobits32(-1)) { ...
-- leads to the following C code:
-- StgWord64 v = (StgWord32)({something});
-- if (v == 0xFFFFffffFFFFffffU) { ...
-- Such code is incorrect as it promotes both operands to StgWord64
-- and the whole condition is always false.
truncInt :: Integer -> Integer
truncInt i =
case rep of
W8 -> i `rem` (2^(8 :: Int))
W16 -> i `rem` (2^(16 :: Int))
W32 -> i `rem` (2^(32 :: Int))
W64 -> i `rem` (2^(64 :: Int))
_ -> panic ("pprHexVal/truncInt: C backend can't encode "
++ show rep ++ " literals")
go 0 = empty
go w' = go q <> dig
where
(q,r) = w' `quotRem` 16
dig | r < 10 = char (chr (fromInteger r + ord '0'))
| otherwise = char (chr (fromInteger r - 10 + ord 'a'))
|
AlexanderPankiv/ghc
|
compiler/cmm/PprC.hs
|
Haskell
|
bsd-3-clause
| 48,336
|
-- | Generating C symbol names emitted by the compiler.
module CPrim
( atomicReadLabel
, atomicWriteLabel
, atomicRMWLabel
, cmpxchgLabel
, popCntLabel
, bSwapLabel
, word2FloatLabel
) where
import CmmType
import CmmMachOp
import Outputable
popCntLabel :: Width -> String
popCntLabel w = "hs_popcnt" ++ pprWidth w
where
pprWidth W8 = "8"
pprWidth W16 = "16"
pprWidth W32 = "32"
pprWidth W64 = "64"
pprWidth w = pprPanic "popCntLabel: Unsupported word width " (ppr w)
bSwapLabel :: Width -> String
bSwapLabel w = "hs_bswap" ++ pprWidth w
where
pprWidth W16 = "16"
pprWidth W32 = "32"
pprWidth W64 = "64"
pprWidth w = pprPanic "bSwapLabel: Unsupported word width " (ppr w)
word2FloatLabel :: Width -> String
word2FloatLabel w = "hs_word2float" ++ pprWidth w
where
pprWidth W32 = "32"
pprWidth W64 = "64"
pprWidth w = pprPanic "word2FloatLabel: Unsupported word width " (ppr w)
atomicRMWLabel :: Width -> AtomicMachOp -> String
atomicRMWLabel w amop = "hs_atomic_" ++ pprFunName amop ++ pprWidth w
where
pprWidth W8 = "8"
pprWidth W16 = "16"
pprWidth W32 = "32"
pprWidth W64 = "64"
pprWidth w = pprPanic "atomicRMWLabel: Unsupported word width " (ppr w)
pprFunName AMO_Add = "add"
pprFunName AMO_Sub = "sub"
pprFunName AMO_And = "and"
pprFunName AMO_Nand = "nand"
pprFunName AMO_Or = "or"
pprFunName AMO_Xor = "xor"
cmpxchgLabel :: Width -> String
cmpxchgLabel w = "hs_cmpxchg" ++ pprWidth w
where
pprWidth W8 = "8"
pprWidth W16 = "16"
pprWidth W32 = "32"
pprWidth W64 = "64"
pprWidth w = pprPanic "cmpxchgLabel: Unsupported word width " (ppr w)
atomicReadLabel :: Width -> String
atomicReadLabel w = "hs_atomicread" ++ pprWidth w
where
pprWidth W8 = "8"
pprWidth W16 = "16"
pprWidth W32 = "32"
pprWidth W64 = "64"
pprWidth w = pprPanic "atomicReadLabel: Unsupported word width " (ppr w)
atomicWriteLabel :: Width -> String
atomicWriteLabel w = "hs_atomicwrite" ++ pprWidth w
where
pprWidth W8 = "8"
pprWidth W16 = "16"
pprWidth W32 = "32"
pprWidth W64 = "64"
pprWidth w = pprPanic "atomicWriteLabel: Unsupported word width " (ppr w)
|
frantisekfarka/ghc-dsi
|
compiler/nativeGen/CPrim.hs
|
Haskell
|
bsd-3-clause
| 2,263
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="es-ES">
<title>Getting started Guide</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Índice</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Buscar</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/gettingStarted/src/main/javahelp/org/zaproxy/zap/extension/gettingStarted/resources/help_es_ES/helpset_es_ES.hs
|
Haskell
|
apache-2.0
| 968
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fil-PH">
<title>Mabilis na Pagsisimula | Ekstensyon ng ZAP</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Mga Nilalaman</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Paghahanap</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Mga Paborito</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/quickstart/src/main/javahelp/org/zaproxy/zap/extension/quickstart/resources/help_fil_PH/helpset_fil_PH.hs
|
Haskell
|
apache-2.0
| 1,001
|
{-# LANGUAGE Unsafe #-}
{-# LANGUAGE NoImplicitPrelude
, BangPatterns
, MagicHash
, UnboxedTuples
#-}
{-# OPTIONS_HADDOCK hide #-}
{-# LANGUAGE StandaloneDeriving #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.ForeignPtr
-- Copyright : (c) The University of Glasgow, 1992-2003
-- License : see libraries/base/LICENSE
--
-- Maintainer : cvs-ghc@haskell.org
-- Stability : internal
-- Portability : non-portable (GHC extensions)
--
-- GHC's implementation of the 'ForeignPtr' data type.
--
-----------------------------------------------------------------------------
module GHC.ForeignPtr
(
ForeignPtr(..),
ForeignPtrContents(..),
FinalizerPtr,
FinalizerEnvPtr,
newForeignPtr_,
mallocForeignPtr,
mallocPlainForeignPtr,
mallocForeignPtrBytes,
mallocPlainForeignPtrBytes,
mallocForeignPtrAlignedBytes,
mallocPlainForeignPtrAlignedBytes,
addForeignPtrFinalizer,
addForeignPtrFinalizerEnv,
touchForeignPtr,
unsafeForeignPtrToPtr,
castForeignPtr,
newConcForeignPtr,
addForeignPtrConcFinalizer,
finalizeForeignPtr
) where
import Foreign.Storable
import Data.Foldable ( sequence_ )
import GHC.Show
import GHC.Base
import GHC.IORef
import GHC.STRef ( STRef(..) )
import GHC.Ptr ( Ptr(..), FunPtr(..) )
-- |The type 'ForeignPtr' represents references to objects that are
-- maintained in a foreign language, i.e., that are not part of the
-- data structures usually managed by the Haskell storage manager.
-- The essential difference between 'ForeignPtr's and vanilla memory
-- references of type @Ptr a@ is that the former may be associated
-- with /finalizers/. A finalizer is a routine that is invoked when
-- the Haskell storage manager detects that - within the Haskell heap
-- and stack - there are no more references left that are pointing to
-- the 'ForeignPtr'. Typically, the finalizer will, then, invoke
-- routines in the foreign language that free the resources bound by
-- the foreign object.
--
-- The 'ForeignPtr' is parameterised in the same way as 'Ptr'. The
-- type argument of 'ForeignPtr' should normally be an instance of
-- class 'Storable'.
--
data ForeignPtr a = ForeignPtr Addr# ForeignPtrContents
-- we cache the Addr# in the ForeignPtr object, but attach
-- the finalizer to the IORef (or the MutableByteArray# in
-- the case of a MallocPtr). The aim of the representation
-- is to make withForeignPtr efficient; in fact, withForeignPtr
-- should be just as efficient as unpacking a Ptr, and multiple
-- withForeignPtrs can share an unpacked ForeignPtr. Note
-- that touchForeignPtr only has to touch the ForeignPtrContents
-- object, because that ensures that whatever the finalizer is
-- attached to is kept alive.
data Finalizers
= NoFinalizers
| CFinalizers (Weak# ())
| HaskellFinalizers [IO ()]
data ForeignPtrContents
= PlainForeignPtr !(IORef Finalizers)
| MallocPtr (MutableByteArray# RealWorld) !(IORef Finalizers)
| PlainPtr (MutableByteArray# RealWorld)
instance Eq (ForeignPtr a) where
p == q = unsafeForeignPtrToPtr p == unsafeForeignPtrToPtr q
instance Ord (ForeignPtr a) where
compare p q = compare (unsafeForeignPtrToPtr p) (unsafeForeignPtrToPtr q)
instance Show (ForeignPtr a) where
showsPrec p f = showsPrec p (unsafeForeignPtrToPtr f)
-- |A finalizer is represented as a pointer to a foreign function that, at
-- finalisation time, gets as an argument a plain pointer variant of the
-- foreign pointer that the finalizer is associated with.
--
-- Note that the foreign function /must/ use the @ccall@ calling convention.
--
type FinalizerPtr a = FunPtr (Ptr a -> IO ())
type FinalizerEnvPtr env a = FunPtr (Ptr env -> Ptr a -> IO ())
newConcForeignPtr :: Ptr a -> IO () -> IO (ForeignPtr a)
--
-- ^Turns a plain memory reference into a foreign object by
-- associating a finalizer - given by the monadic operation - with the
-- reference. The storage manager will start the finalizer, in a
-- separate thread, some time after the last reference to the
-- @ForeignPtr@ is dropped. There is no guarantee of promptness, and
-- in fact there is no guarantee that the finalizer will eventually
-- run at all.
--
-- Note that references from a finalizer do not necessarily prevent
-- another object from being finalized. If A's finalizer refers to B
-- (perhaps using 'touchForeignPtr', then the only guarantee is that
-- B's finalizer will never be started before A's. If both A and B
-- are unreachable, then both finalizers will start together. See
-- 'touchForeignPtr' for more on finalizer ordering.
--
newConcForeignPtr p finalizer
= do fObj <- newForeignPtr_ p
addForeignPtrConcFinalizer fObj finalizer
return fObj
mallocForeignPtr :: Storable a => IO (ForeignPtr a)
-- ^ Allocate some memory and return a 'ForeignPtr' to it. The memory
-- will be released automatically when the 'ForeignPtr' is discarded.
--
-- 'mallocForeignPtr' is equivalent to
--
-- > do { p <- malloc; newForeignPtr finalizerFree p }
--
-- although it may be implemented differently internally: you may not
-- assume that the memory returned by 'mallocForeignPtr' has been
-- allocated with 'Foreign.Marshal.Alloc.malloc'.
--
-- GHC notes: 'mallocForeignPtr' has a heavily optimised
-- implementation in GHC. It uses pinned memory in the garbage
-- collected heap, so the 'ForeignPtr' does not require a finalizer to
-- free the memory. Use of 'mallocForeignPtr' and associated
-- functions is strongly recommended in preference to 'newForeignPtr'
-- with a finalizer.
--
mallocForeignPtr = doMalloc undefined
where doMalloc :: Storable b => b -> IO (ForeignPtr b)
doMalloc a
| I# size < 0 = error "mallocForeignPtr: size must be >= 0"
| otherwise = do
r <- newIORef NoFinalizers
IO $ \s ->
case newAlignedPinnedByteArray# size align s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(MallocPtr mbarr# r) #)
}
where !(I# size) = sizeOf a
!(I# align) = alignment a
-- | This function is similar to 'mallocForeignPtr', except that the
-- size of the memory required is given explicitly as a number of bytes.
mallocForeignPtrBytes :: Int -> IO (ForeignPtr a)
mallocForeignPtrBytes size | size < 0 =
error "mallocForeignPtrBytes: size must be >= 0"
mallocForeignPtrBytes (I# size) = do
r <- newIORef NoFinalizers
IO $ \s ->
case newPinnedByteArray# size s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(MallocPtr mbarr# r) #)
}
-- | This function is similar to 'mallocForeignPtrBytes', except that the
-- size and alignment of the memory required is given explicitly as numbers of
-- bytes.
mallocForeignPtrAlignedBytes :: Int -> Int -> IO (ForeignPtr a)
mallocForeignPtrAlignedBytes size _align | size < 0 =
error "mallocForeignPtrAlignedBytes: size must be >= 0"
mallocForeignPtrAlignedBytes (I# size) (I# align) = do
r <- newIORef NoFinalizers
IO $ \s ->
case newAlignedPinnedByteArray# size align s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(MallocPtr mbarr# r) #)
}
-- | Allocate some memory and return a 'ForeignPtr' to it. The memory
-- will be released automatically when the 'ForeignPtr' is discarded.
--
-- GHC notes: 'mallocPlainForeignPtr' has a heavily optimised
-- implementation in GHC. It uses pinned memory in the garbage
-- collected heap, as for mallocForeignPtr. Unlike mallocForeignPtr, a
-- ForeignPtr created with mallocPlainForeignPtr carries no finalizers.
-- It is not possible to add a finalizer to a ForeignPtr created with
-- mallocPlainForeignPtr. This is useful for ForeignPtrs that will live
-- only inside Haskell (such as those created for packed strings).
-- Attempts to add a finalizer to a ForeignPtr created this way, or to
-- finalize such a pointer, will throw an exception.
--
mallocPlainForeignPtr :: Storable a => IO (ForeignPtr a)
mallocPlainForeignPtr = doMalloc undefined
where doMalloc :: Storable b => b -> IO (ForeignPtr b)
doMalloc a
| I# size < 0 = error "mallocForeignPtr: size must be >= 0"
| otherwise = IO $ \s ->
case newAlignedPinnedByteArray# size align s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(PlainPtr mbarr#) #)
}
where !(I# size) = sizeOf a
!(I# align) = alignment a
-- | This function is similar to 'mallocForeignPtrBytes', except that
-- the internally an optimised ForeignPtr representation with no
-- finalizer is used. Attempts to add a finalizer will cause an
-- exception to be thrown.
mallocPlainForeignPtrBytes :: Int -> IO (ForeignPtr a)
mallocPlainForeignPtrBytes size | size < 0 =
error "mallocPlainForeignPtrBytes: size must be >= 0"
mallocPlainForeignPtrBytes (I# size) = IO $ \s ->
case newPinnedByteArray# size s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(PlainPtr mbarr#) #)
}
-- | This function is similar to 'mallocForeignPtrAlignedBytes', except that
-- the internally an optimised ForeignPtr representation with no
-- finalizer is used. Attempts to add a finalizer will cause an
-- exception to be thrown.
mallocPlainForeignPtrAlignedBytes :: Int -> Int -> IO (ForeignPtr a)
mallocPlainForeignPtrAlignedBytes size _align | size < 0 =
error "mallocPlainForeignPtrAlignedBytes: size must be >= 0"
mallocPlainForeignPtrAlignedBytes (I# size) (I# align) = IO $ \s ->
case newAlignedPinnedByteArray# size align s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(PlainPtr mbarr#) #)
}
addForeignPtrFinalizer :: FinalizerPtr a -> ForeignPtr a -> IO ()
-- ^This function adds a finalizer to the given foreign object. The
-- finalizer will run /before/ all other finalizers for the same
-- object which have already been registered.
addForeignPtrFinalizer (FunPtr fp) (ForeignPtr p c) = case c of
PlainForeignPtr r -> insertCFinalizer r fp 0# nullAddr# p ()
MallocPtr _ r -> insertCFinalizer r fp 0# nullAddr# p c
_ -> error "GHC.ForeignPtr: attempt to add a finalizer to a plain pointer"
-- Note [MallocPtr finalizers] (#10904)
--
-- When we have C finalizers for a MallocPtr, the memory is
-- heap-resident and would normally be recovered by the GC before the
-- finalizers run. To prevent the memory from being reused too early,
-- we attach the MallocPtr constructor to the "value" field of the
-- weak pointer when we call mkWeak# in ensureCFinalizerWeak below.
-- The GC will keep this field alive until the finalizers have run.
addForeignPtrFinalizerEnv ::
FinalizerEnvPtr env a -> Ptr env -> ForeignPtr a -> IO ()
-- ^ Like 'addForeignPtrFinalizerEnv' but allows the finalizer to be
-- passed an additional environment parameter to be passed to the
-- finalizer. The environment passed to the finalizer is fixed by the
-- second argument to 'addForeignPtrFinalizerEnv'
addForeignPtrFinalizerEnv (FunPtr fp) (Ptr ep) (ForeignPtr p c) = case c of
PlainForeignPtr r -> insertCFinalizer r fp 1# ep p ()
MallocPtr _ r -> insertCFinalizer r fp 1# ep p c
_ -> error "GHC.ForeignPtr: attempt to add a finalizer to a plain pointer"
addForeignPtrConcFinalizer :: ForeignPtr a -> IO () -> IO ()
-- ^This function adds a finalizer to the given @ForeignPtr@. The
-- finalizer will run /before/ all other finalizers for the same
-- object which have already been registered.
--
-- This is a variant of @addForeignPtrFinalizer@, where the finalizer
-- is an arbitrary @IO@ action. When it is invoked, the finalizer
-- will run in a new thread.
--
-- NB. Be very careful with these finalizers. One common trap is that
-- if a finalizer references another finalized value, it does not
-- prevent that value from being finalized. In particular, 'Handle's
-- are finalized objects, so a finalizer should not refer to a 'Handle'
-- (including @stdout@, @stdin@ or @stderr@).
--
addForeignPtrConcFinalizer (ForeignPtr _ c) finalizer =
addForeignPtrConcFinalizer_ c finalizer
addForeignPtrConcFinalizer_ :: ForeignPtrContents -> IO () -> IO ()
addForeignPtrConcFinalizer_ (PlainForeignPtr r) finalizer = do
noFinalizers <- insertHaskellFinalizer r finalizer
if noFinalizers
then IO $ \s ->
case r of { IORef (STRef r#) ->
case mkWeak# r# () (unIO $ foreignPtrFinalizer r) s of {
(# s1, _ #) -> (# s1, () #) }}
else return ()
addForeignPtrConcFinalizer_ f@(MallocPtr fo r) finalizer = do
noFinalizers <- insertHaskellFinalizer r finalizer
if noFinalizers
then IO $ \s ->
case mkWeak# fo () finalizer' s of
(# s1, _ #) -> (# s1, () #)
else return ()
where
finalizer' :: State# RealWorld -> (# State# RealWorld, () #)
finalizer' = unIO (foreignPtrFinalizer r >> touch f)
addForeignPtrConcFinalizer_ _ _ =
error "GHC.ForeignPtr: attempt to add a finalizer to plain pointer"
insertHaskellFinalizer :: IORef Finalizers -> IO () -> IO Bool
insertHaskellFinalizer r f = do
!wasEmpty <- atomicModifyIORef r $ \finalizers -> case finalizers of
NoFinalizers -> (HaskellFinalizers [f], True)
HaskellFinalizers fs -> (HaskellFinalizers (f:fs), False)
_ -> noMixingError
return wasEmpty
-- | A box around Weak#, private to this module.
data MyWeak = MyWeak (Weak# ())
insertCFinalizer ::
IORef Finalizers -> Addr# -> Int# -> Addr# -> Addr# -> value -> IO ()
insertCFinalizer r fp flag ep p val = do
MyWeak w <- ensureCFinalizerWeak r val
IO $ \s -> case addCFinalizerToWeak# fp p flag ep w s of
(# s1, 1# #) -> (# s1, () #)
-- Failed to add the finalizer because some other thread
-- has finalized w by calling foreignPtrFinalizer. We retry now.
-- This won't be an infinite loop because that thread must have
-- replaced the content of r before calling finalizeWeak#.
(# s1, _ #) -> unIO (insertCFinalizer r fp flag ep p val) s1
ensureCFinalizerWeak :: IORef Finalizers -> value -> IO MyWeak
ensureCFinalizerWeak ref@(IORef (STRef r#)) value = do
fin <- readIORef ref
case fin of
CFinalizers weak -> return (MyWeak weak)
HaskellFinalizers{} -> noMixingError
NoFinalizers -> IO $ \s ->
case mkWeakNoFinalizer# r# (unsafeCoerce# value) s of { (# s1, w #) ->
-- See Note [MallocPtr finalizers] (#10904)
case atomicModifyMutVar# r# (update w) s1 of
{ (# s2, (weak, needKill ) #) ->
if needKill
then case finalizeWeak# w s2 of { (# s3, _, _ #) ->
(# s3, weak #) }
else (# s2, weak #) }}
where
update _ fin@(CFinalizers w) = (fin, (MyWeak w, True))
update w NoFinalizers = (CFinalizers w, (MyWeak w, False))
update _ _ = noMixingError
noMixingError :: a
noMixingError = error $
"GHC.ForeignPtr: attempt to mix Haskell and C finalizers " ++
"in the same ForeignPtr"
foreignPtrFinalizer :: IORef Finalizers -> IO ()
foreignPtrFinalizer r = do
fs <- atomicModifyIORef r $ \fs -> (NoFinalizers, fs) -- atomic, see #7170
case fs of
NoFinalizers -> return ()
CFinalizers w -> IO $ \s -> case finalizeWeak# w s of
(# s1, 1#, f #) -> f s1
(# s1, _, _ #) -> (# s1, () #)
HaskellFinalizers actions -> sequence_ actions
newForeignPtr_ :: Ptr a -> IO (ForeignPtr a)
-- ^Turns a plain memory reference into a foreign pointer that may be
-- associated with finalizers by using 'addForeignPtrFinalizer'.
newForeignPtr_ (Ptr obj) = do
r <- newIORef NoFinalizers
return (ForeignPtr obj (PlainForeignPtr r))
touchForeignPtr :: ForeignPtr a -> IO ()
-- ^This function ensures that the foreign object in
-- question is alive at the given place in the sequence of IO
-- actions. In particular 'Foreign.ForeignPtr.withForeignPtr'
-- does a 'touchForeignPtr' after it
-- executes the user action.
--
-- Note that this function should not be used to express dependencies
-- between finalizers on 'ForeignPtr's. For example, if the finalizer
-- for a 'ForeignPtr' @F1@ calls 'touchForeignPtr' on a second
-- 'ForeignPtr' @F2@, then the only guarantee is that the finalizer
-- for @F2@ is never started before the finalizer for @F1@. They
-- might be started together if for example both @F1@ and @F2@ are
-- otherwise unreachable, and in that case the scheduler might end up
-- running the finalizer for @F2@ first.
--
-- In general, it is not recommended to use finalizers on separate
-- objects with ordering constraints between them. To express the
-- ordering robustly requires explicit synchronisation using @MVar@s
-- between the finalizers, but even then the runtime sometimes runs
-- multiple finalizers sequentially in a single thread (for
-- performance reasons), so synchronisation between finalizers could
-- result in artificial deadlock. Another alternative is to use
-- explicit reference counting.
--
touchForeignPtr (ForeignPtr _ r) = touch r
touch :: ForeignPtrContents -> IO ()
touch r = IO $ \s -> case touch# r s of s' -> (# s', () #)
unsafeForeignPtrToPtr :: ForeignPtr a -> Ptr a
-- ^This function extracts the pointer component of a foreign
-- pointer. This is a potentially dangerous operations, as if the
-- argument to 'unsafeForeignPtrToPtr' is the last usage
-- occurrence of the given foreign pointer, then its finalizer(s) will
-- be run, which potentially invalidates the plain pointer just
-- obtained. Hence, 'touchForeignPtr' must be used
-- wherever it has to be guaranteed that the pointer lives on - i.e.,
-- has another usage occurrence.
--
-- To avoid subtle coding errors, hand written marshalling code
-- should preferably use 'Foreign.ForeignPtr.withForeignPtr' rather
-- than combinations of 'unsafeForeignPtrToPtr' and
-- 'touchForeignPtr'. However, the latter routines
-- are occasionally preferred in tool generated marshalling code.
unsafeForeignPtrToPtr (ForeignPtr fo _) = Ptr fo
castForeignPtr :: ForeignPtr a -> ForeignPtr b
-- ^This function casts a 'ForeignPtr'
-- parameterised by one type into another type.
castForeignPtr f = unsafeCoerce# f
-- | Causes the finalizers associated with a foreign pointer to be run
-- immediately.
finalizeForeignPtr :: ForeignPtr a -> IO ()
finalizeForeignPtr (ForeignPtr _ (PlainPtr _)) = return () -- no effect
finalizeForeignPtr (ForeignPtr _ foreignPtr) = foreignPtrFinalizer refFinalizers
where
refFinalizers = case foreignPtr of
(PlainForeignPtr ref) -> ref
(MallocPtr _ ref) -> ref
PlainPtr _ ->
error "finalizeForeignPtr PlainPtr"
|
ml9951/ghc
|
libraries/base/GHC/ForeignPtr.hs
|
Haskell
|
bsd-3-clause
| 19,296
|
{-# LANGUAGE Unsafe #-}
{-# LANGUAGE NoImplicitPrelude
, BangPatterns
, MagicHash
, UnboxedTuples
#-}
{-# OPTIONS_HADDOCK hide #-}
{-# LANGUAGE StandaloneDeriving #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.ForeignPtr
-- Copyright : (c) The University of Glasgow, 1992-2003
-- License : see libraries/base/LICENSE
--
-- Maintainer : cvs-ghc@haskell.org
-- Stability : internal
-- Portability : non-portable (GHC extensions)
--
-- GHC's implementation of the 'ForeignPtr' data type.
--
-----------------------------------------------------------------------------
module GHC.ForeignPtr
(
ForeignPtr(..),
ForeignPtrContents(..),
FinalizerPtr,
FinalizerEnvPtr,
newForeignPtr_,
mallocForeignPtr,
mallocPlainForeignPtr,
mallocForeignPtrBytes,
mallocPlainForeignPtrBytes,
mallocForeignPtrAlignedBytes,
mallocPlainForeignPtrAlignedBytes,
addForeignPtrFinalizer,
addForeignPtrFinalizerEnv,
touchForeignPtr,
unsafeForeignPtrToPtr,
castForeignPtr,
plusForeignPtr,
newConcForeignPtr,
addForeignPtrConcFinalizer,
finalizeForeignPtr
) where
import Foreign.Storable
import Data.Foldable ( sequence_ )
import GHC.Show
import GHC.Base
import GHC.IORef
import GHC.STRef ( STRef(..) )
import GHC.Ptr ( Ptr(..), FunPtr(..) )
-- |The type 'ForeignPtr' represents references to objects that are
-- maintained in a foreign language, i.e., that are not part of the
-- data structures usually managed by the Haskell storage manager.
-- The essential difference between 'ForeignPtr's and vanilla memory
-- references of type @Ptr a@ is that the former may be associated
-- with /finalizers/. A finalizer is a routine that is invoked when
-- the Haskell storage manager detects that - within the Haskell heap
-- and stack - there are no more references left that are pointing to
-- the 'ForeignPtr'. Typically, the finalizer will, then, invoke
-- routines in the foreign language that free the resources bound by
-- the foreign object.
--
-- The 'ForeignPtr' is parameterised in the same way as 'Ptr'. The
-- type argument of 'ForeignPtr' should normally be an instance of
-- class 'Storable'.
--
data ForeignPtr a = ForeignPtr Addr# ForeignPtrContents
-- The Addr# in the ForeignPtr object is intentionally stored
-- separately from the finalizer. The primary aim of the
-- representation is to make withForeignPtr efficient; in fact,
-- withForeignPtr should be just as efficient as unpacking a
-- Ptr, and multiple withForeignPtrs can share an unpacked
-- ForeignPtr. As a secondary benefit, this representation
-- allows pointers to subregions within the same overall block
-- to share the same finalizer (see 'plusForeignPtr'). Note
-- that touchForeignPtr only has to touch the ForeignPtrContents
-- object, because that ensures that whatever the finalizer is
-- attached to is kept alive.
data Finalizers
= NoFinalizers
| CFinalizers (Weak# ())
| HaskellFinalizers [IO ()]
data ForeignPtrContents
= PlainForeignPtr !(IORef Finalizers)
| MallocPtr (MutableByteArray# RealWorld) !(IORef Finalizers)
| PlainPtr (MutableByteArray# RealWorld)
-- | @since 2.01
instance Eq (ForeignPtr a) where
p == q = unsafeForeignPtrToPtr p == unsafeForeignPtrToPtr q
-- | @since 2.01
instance Ord (ForeignPtr a) where
compare p q = compare (unsafeForeignPtrToPtr p) (unsafeForeignPtrToPtr q)
-- | @since 2.01
instance Show (ForeignPtr a) where
showsPrec p f = showsPrec p (unsafeForeignPtrToPtr f)
-- |A finalizer is represented as a pointer to a foreign function that, at
-- finalisation time, gets as an argument a plain pointer variant of the
-- foreign pointer that the finalizer is associated with.
--
-- Note that the foreign function /must/ use the @ccall@ calling convention.
--
type FinalizerPtr a = FunPtr (Ptr a -> IO ())
type FinalizerEnvPtr env a = FunPtr (Ptr env -> Ptr a -> IO ())
newConcForeignPtr :: Ptr a -> IO () -> IO (ForeignPtr a)
--
-- ^Turns a plain memory reference into a foreign object by
-- associating a finalizer - given by the monadic operation - with the
-- reference. The storage manager will start the finalizer, in a
-- separate thread, some time after the last reference to the
-- @ForeignPtr@ is dropped. There is no guarantee of promptness, and
-- in fact there is no guarantee that the finalizer will eventually
-- run at all.
--
-- Note that references from a finalizer do not necessarily prevent
-- another object from being finalized. If A's finalizer refers to B
-- (perhaps using 'touchForeignPtr', then the only guarantee is that
-- B's finalizer will never be started before A's. If both A and B
-- are unreachable, then both finalizers will start together. See
-- 'touchForeignPtr' for more on finalizer ordering.
--
newConcForeignPtr p finalizer
= do fObj <- newForeignPtr_ p
addForeignPtrConcFinalizer fObj finalizer
return fObj
mallocForeignPtr :: Storable a => IO (ForeignPtr a)
-- ^ Allocate some memory and return a 'ForeignPtr' to it. The memory
-- will be released automatically when the 'ForeignPtr' is discarded.
--
-- 'mallocForeignPtr' is equivalent to
--
-- > do { p <- malloc; newForeignPtr finalizerFree p }
--
-- although it may be implemented differently internally: you may not
-- assume that the memory returned by 'mallocForeignPtr' has been
-- allocated with 'Foreign.Marshal.Alloc.malloc'.
--
-- GHC notes: 'mallocForeignPtr' has a heavily optimised
-- implementation in GHC. It uses pinned memory in the garbage
-- collected heap, so the 'ForeignPtr' does not require a finalizer to
-- free the memory. Use of 'mallocForeignPtr' and associated
-- functions is strongly recommended in preference to 'newForeignPtr'
-- with a finalizer.
--
mallocForeignPtr = doMalloc undefined
where doMalloc :: Storable b => b -> IO (ForeignPtr b)
doMalloc a
| I# size < 0 = errorWithoutStackTrace "mallocForeignPtr: size must be >= 0"
| otherwise = do
r <- newIORef NoFinalizers
IO $ \s ->
case newAlignedPinnedByteArray# size align s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(MallocPtr mbarr# r) #)
}
where !(I# size) = sizeOf a
!(I# align) = alignment a
-- | This function is similar to 'mallocForeignPtr', except that the
-- size of the memory required is given explicitly as a number of bytes.
mallocForeignPtrBytes :: Int -> IO (ForeignPtr a)
mallocForeignPtrBytes size | size < 0 =
errorWithoutStackTrace "mallocForeignPtrBytes: size must be >= 0"
mallocForeignPtrBytes (I# size) = do
r <- newIORef NoFinalizers
IO $ \s ->
case newPinnedByteArray# size s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(MallocPtr mbarr# r) #)
}
-- | This function is similar to 'mallocForeignPtrBytes', except that the
-- size and alignment of the memory required is given explicitly as numbers of
-- bytes.
mallocForeignPtrAlignedBytes :: Int -> Int -> IO (ForeignPtr a)
mallocForeignPtrAlignedBytes size _align | size < 0 =
errorWithoutStackTrace "mallocForeignPtrAlignedBytes: size must be >= 0"
mallocForeignPtrAlignedBytes (I# size) (I# align) = do
r <- newIORef NoFinalizers
IO $ \s ->
case newAlignedPinnedByteArray# size align s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(MallocPtr mbarr# r) #)
}
-- | Allocate some memory and return a 'ForeignPtr' to it. The memory
-- will be released automatically when the 'ForeignPtr' is discarded.
--
-- GHC notes: 'mallocPlainForeignPtr' has a heavily optimised
-- implementation in GHC. It uses pinned memory in the garbage
-- collected heap, as for mallocForeignPtr. Unlike mallocForeignPtr, a
-- ForeignPtr created with mallocPlainForeignPtr carries no finalizers.
-- It is not possible to add a finalizer to a ForeignPtr created with
-- mallocPlainForeignPtr. This is useful for ForeignPtrs that will live
-- only inside Haskell (such as those created for packed strings).
-- Attempts to add a finalizer to a ForeignPtr created this way, or to
-- finalize such a pointer, will throw an exception.
--
mallocPlainForeignPtr :: Storable a => IO (ForeignPtr a)
mallocPlainForeignPtr = doMalloc undefined
where doMalloc :: Storable b => b -> IO (ForeignPtr b)
doMalloc a
| I# size < 0 = errorWithoutStackTrace "mallocForeignPtr: size must be >= 0"
| otherwise = IO $ \s ->
case newAlignedPinnedByteArray# size align s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(PlainPtr mbarr#) #)
}
where !(I# size) = sizeOf a
!(I# align) = alignment a
-- | This function is similar to 'mallocForeignPtrBytes', except that
-- the internally an optimised ForeignPtr representation with no
-- finalizer is used. Attempts to add a finalizer will cause an
-- exception to be thrown.
mallocPlainForeignPtrBytes :: Int -> IO (ForeignPtr a)
mallocPlainForeignPtrBytes size | size < 0 =
errorWithoutStackTrace "mallocPlainForeignPtrBytes: size must be >= 0"
mallocPlainForeignPtrBytes (I# size) = IO $ \s ->
case newPinnedByteArray# size s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(PlainPtr mbarr#) #)
}
-- | This function is similar to 'mallocForeignPtrAlignedBytes', except that
-- the internally an optimised ForeignPtr representation with no
-- finalizer is used. Attempts to add a finalizer will cause an
-- exception to be thrown.
mallocPlainForeignPtrAlignedBytes :: Int -> Int -> IO (ForeignPtr a)
mallocPlainForeignPtrAlignedBytes size _align | size < 0 =
errorWithoutStackTrace "mallocPlainForeignPtrAlignedBytes: size must be >= 0"
mallocPlainForeignPtrAlignedBytes (I# size) (I# align) = IO $ \s ->
case newAlignedPinnedByteArray# size align s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(PlainPtr mbarr#) #)
}
addForeignPtrFinalizer :: FinalizerPtr a -> ForeignPtr a -> IO ()
-- ^This function adds a finalizer to the given foreign object. The
-- finalizer will run /before/ all other finalizers for the same
-- object which have already been registered.
addForeignPtrFinalizer (FunPtr fp) (ForeignPtr p c) = case c of
PlainForeignPtr r -> insertCFinalizer r fp 0# nullAddr# p ()
MallocPtr _ r -> insertCFinalizer r fp 0# nullAddr# p c
_ -> errorWithoutStackTrace "GHC.ForeignPtr: attempt to add a finalizer to a plain pointer"
-- Note [MallocPtr finalizers] (#10904)
--
-- When we have C finalizers for a MallocPtr, the memory is
-- heap-resident and would normally be recovered by the GC before the
-- finalizers run. To prevent the memory from being reused too early,
-- we attach the MallocPtr constructor to the "value" field of the
-- weak pointer when we call mkWeak# in ensureCFinalizerWeak below.
-- The GC will keep this field alive until the finalizers have run.
addForeignPtrFinalizerEnv ::
FinalizerEnvPtr env a -> Ptr env -> ForeignPtr a -> IO ()
-- ^ Like 'addForeignPtrFinalizerEnv' but allows the finalizer to be
-- passed an additional environment parameter to be passed to the
-- finalizer. The environment passed to the finalizer is fixed by the
-- second argument to 'addForeignPtrFinalizerEnv'
addForeignPtrFinalizerEnv (FunPtr fp) (Ptr ep) (ForeignPtr p c) = case c of
PlainForeignPtr r -> insertCFinalizer r fp 1# ep p ()
MallocPtr _ r -> insertCFinalizer r fp 1# ep p c
_ -> errorWithoutStackTrace "GHC.ForeignPtr: attempt to add a finalizer to a plain pointer"
addForeignPtrConcFinalizer :: ForeignPtr a -> IO () -> IO ()
-- ^This function adds a finalizer to the given @ForeignPtr@. The
-- finalizer will run /before/ all other finalizers for the same
-- object which have already been registered.
--
-- This is a variant of @addForeignPtrFinalizer@, where the finalizer
-- is an arbitrary @IO@ action. When it is invoked, the finalizer
-- will run in a new thread.
--
-- NB. Be very careful with these finalizers. One common trap is that
-- if a finalizer references another finalized value, it does not
-- prevent that value from being finalized. In particular, 'Handle's
-- are finalized objects, so a finalizer should not refer to a 'Handle'
-- (including @stdout@, @stdin@ or @stderr@).
--
addForeignPtrConcFinalizer (ForeignPtr _ c) finalizer =
addForeignPtrConcFinalizer_ c finalizer
addForeignPtrConcFinalizer_ :: ForeignPtrContents -> IO () -> IO ()
addForeignPtrConcFinalizer_ (PlainForeignPtr r) finalizer = do
noFinalizers <- insertHaskellFinalizer r finalizer
if noFinalizers
then IO $ \s ->
case r of { IORef (STRef r#) ->
case mkWeak# r# () (unIO $ foreignPtrFinalizer r) s of {
(# s1, _ #) -> (# s1, () #) }}
else return ()
addForeignPtrConcFinalizer_ f@(MallocPtr fo r) finalizer = do
noFinalizers <- insertHaskellFinalizer r finalizer
if noFinalizers
then IO $ \s ->
case mkWeak# fo () finalizer' s of
(# s1, _ #) -> (# s1, () #)
else return ()
where
finalizer' :: State# RealWorld -> (# State# RealWorld, () #)
finalizer' = unIO (foreignPtrFinalizer r >> touch f)
addForeignPtrConcFinalizer_ _ _ =
errorWithoutStackTrace "GHC.ForeignPtr: attempt to add a finalizer to plain pointer"
insertHaskellFinalizer :: IORef Finalizers -> IO () -> IO Bool
insertHaskellFinalizer r f = do
!wasEmpty <- atomicModifyIORef r $ \finalizers -> case finalizers of
NoFinalizers -> (HaskellFinalizers [f], True)
HaskellFinalizers fs -> (HaskellFinalizers (f:fs), False)
_ -> noMixingError
return wasEmpty
-- | A box around Weak#, private to this module.
data MyWeak = MyWeak (Weak# ())
insertCFinalizer ::
IORef Finalizers -> Addr# -> Int# -> Addr# -> Addr# -> value -> IO ()
insertCFinalizer r fp flag ep p val = do
MyWeak w <- ensureCFinalizerWeak r val
IO $ \s -> case addCFinalizerToWeak# fp p flag ep w s of
(# s1, 1# #) -> (# s1, () #)
-- Failed to add the finalizer because some other thread
-- has finalized w by calling foreignPtrFinalizer. We retry now.
-- This won't be an infinite loop because that thread must have
-- replaced the content of r before calling finalizeWeak#.
(# s1, _ #) -> unIO (insertCFinalizer r fp flag ep p val) s1
ensureCFinalizerWeak :: IORef Finalizers -> value -> IO MyWeak
ensureCFinalizerWeak ref@(IORef (STRef r#)) value = do
fin <- readIORef ref
case fin of
CFinalizers weak -> return (MyWeak weak)
HaskellFinalizers{} -> noMixingError
NoFinalizers -> IO $ \s ->
case mkWeakNoFinalizer# r# (unsafeCoerce# value) s of { (# s1, w #) ->
-- See Note [MallocPtr finalizers] (#10904)
case atomicModifyMutVar# r# (update w) s1 of
{ (# s2, (weak, needKill ) #) ->
if needKill
then case finalizeWeak# w s2 of { (# s3, _, _ #) ->
(# s3, weak #) }
else (# s2, weak #) }}
where
update _ fin@(CFinalizers w) = (fin, (MyWeak w, True))
update w NoFinalizers = (CFinalizers w, (MyWeak w, False))
update _ _ = noMixingError
noMixingError :: a
noMixingError = errorWithoutStackTrace $
"GHC.ForeignPtr: attempt to mix Haskell and C finalizers " ++
"in the same ForeignPtr"
foreignPtrFinalizer :: IORef Finalizers -> IO ()
foreignPtrFinalizer r = do
fs <- atomicModifyIORef r $ \fs -> (NoFinalizers, fs) -- atomic, see #7170
case fs of
NoFinalizers -> return ()
CFinalizers w -> IO $ \s -> case finalizeWeak# w s of
(# s1, 1#, f #) -> f s1
(# s1, _, _ #) -> (# s1, () #)
HaskellFinalizers actions -> sequence_ actions
newForeignPtr_ :: Ptr a -> IO (ForeignPtr a)
-- ^Turns a plain memory reference into a foreign pointer that may be
-- associated with finalizers by using 'addForeignPtrFinalizer'.
newForeignPtr_ (Ptr obj) = do
r <- newIORef NoFinalizers
return (ForeignPtr obj (PlainForeignPtr r))
touchForeignPtr :: ForeignPtr a -> IO ()
-- ^This function ensures that the foreign object in
-- question is alive at the given place in the sequence of IO
-- actions. In particular 'Foreign.ForeignPtr.withForeignPtr'
-- does a 'touchForeignPtr' after it
-- executes the user action.
--
-- Note that this function should not be used to express dependencies
-- between finalizers on 'ForeignPtr's. For example, if the finalizer
-- for a 'ForeignPtr' @F1@ calls 'touchForeignPtr' on a second
-- 'ForeignPtr' @F2@, then the only guarantee is that the finalizer
-- for @F2@ is never started before the finalizer for @F1@. They
-- might be started together if for example both @F1@ and @F2@ are
-- otherwise unreachable, and in that case the scheduler might end up
-- running the finalizer for @F2@ first.
--
-- In general, it is not recommended to use finalizers on separate
-- objects with ordering constraints between them. To express the
-- ordering robustly requires explicit synchronisation using @MVar@s
-- between the finalizers, but even then the runtime sometimes runs
-- multiple finalizers sequentially in a single thread (for
-- performance reasons), so synchronisation between finalizers could
-- result in artificial deadlock. Another alternative is to use
-- explicit reference counting.
--
touchForeignPtr (ForeignPtr _ r) = touch r
touch :: ForeignPtrContents -> IO ()
touch r = IO $ \s -> case touch# r s of s' -> (# s', () #)
unsafeForeignPtrToPtr :: ForeignPtr a -> Ptr a
-- ^This function extracts the pointer component of a foreign
-- pointer. This is a potentially dangerous operations, as if the
-- argument to 'unsafeForeignPtrToPtr' is the last usage
-- occurrence of the given foreign pointer, then its finalizer(s) will
-- be run, which potentially invalidates the plain pointer just
-- obtained. Hence, 'touchForeignPtr' must be used
-- wherever it has to be guaranteed that the pointer lives on - i.e.,
-- has another usage occurrence.
--
-- To avoid subtle coding errors, hand written marshalling code
-- should preferably use 'Foreign.ForeignPtr.withForeignPtr' rather
-- than combinations of 'unsafeForeignPtrToPtr' and
-- 'touchForeignPtr'. However, the latter routines
-- are occasionally preferred in tool generated marshalling code.
unsafeForeignPtrToPtr (ForeignPtr fo _) = Ptr fo
castForeignPtr :: ForeignPtr a -> ForeignPtr b
-- ^This function casts a 'ForeignPtr'
-- parameterised by one type into another type.
castForeignPtr = coerce
plusForeignPtr :: ForeignPtr a -> Int -> ForeignPtr b
-- ^Advances the given address by the given offset in bytes.
--
-- The new 'ForeignPtr' shares the finalizer of the original,
-- equivalent from a finalization standpoint to just creating another
-- reference to the original. That is, the finalizer will not be
-- called before the new 'ForeignPtr' is unreachable, nor will it be
-- called an additional time due to this call, and the finalizer will
-- be called with the same address that it would have had this call
-- not happened, *not* the new address.
--
-- @since 4.10.0.0
plusForeignPtr (ForeignPtr addr c) (I# d) = ForeignPtr (plusAddr# addr d) c
-- | Causes the finalizers associated with a foreign pointer to be run
-- immediately.
finalizeForeignPtr :: ForeignPtr a -> IO ()
finalizeForeignPtr (ForeignPtr _ (PlainPtr _)) = return () -- no effect
finalizeForeignPtr (ForeignPtr _ foreignPtr) = foreignPtrFinalizer refFinalizers
where
refFinalizers = case foreignPtr of
(PlainForeignPtr ref) -> ref
(MallocPtr _ ref) -> ref
PlainPtr _ ->
errorWithoutStackTrace "finalizeForeignPtr PlainPtr"
|
ezyang/ghc
|
libraries/base/GHC/ForeignPtr.hs
|
Haskell
|
bsd-3-clause
| 20,344
|
{-# OPTIONS_GHC -Wall #-}
nomain :: IO ()
nomain = putStrLn used
used :: String
used = "T13839"
nonUsed :: ()
nonUsed = ()
|
sdiehl/ghc
|
testsuite/tests/rename/should_fail/T13839b.hs
|
Haskell
|
bsd-3-clause
| 126
|
module Demo where
--
import Lesson01
import Lesson02
import Lesson03
import Lesson04
import Lesson05
import Lesson07
import Lesson08
import Lesson09
import Lesson10
import Lesson11
import Lesson12
import Lesson13
import Lesson14
import Lesson15
import Lesson17
import Lesson18
--
import qualified SDL
import System.Environment
import System.Exit (die)
import Control.Exception (catch)
--
main :: IO ()
main = catch runLesson
(\e -> do let err = show (e :: SDL.SDLException)
die ("SDL_Error: "++ err))
runLesson :: IO ()
runLesson = do
args <- getArgs
let i = (read $ head (args++["0"])) :: Int
case i of
1 -> lesson01
2 -> lesson02
3 -> lesson03
4 -> lesson04
5 -> lesson05
7 -> lesson07
8 -> lesson08
9 -> lesson09
10 -> lesson10
11 -> lesson11
12 -> lesson12
13 -> lesson13
14 -> lesson14
15 -> lesson15
17 -> lesson17
18 -> lesson18
_ -> print $ "Lesson " ++ (show i) ++ " is undefined"
return ()
|
rueshyna/sdl2-examples
|
src/Demo.hs
|
Haskell
|
mit
| 1,045
|
{-# LANGUAGE DuplicateRecordFields #-}
module IR.Pure where
import Data.Word
import qualified IR.Common as C
import qualified IR.Name as Name
type Brand = C.ListBrand Name.CapnpQ
data File = File
{ fileId :: !Word64
, fileName :: FilePath
, decls :: [Decl]
, reExportEnums :: [Name.LocalQ]
-- ^ A list of enums that we should re-export from this module.
, usesRpc :: !Bool
-- ^ Whether or not the module uses rpc features. If not, we skip
-- the rpc related imports. This is mainly important to avoid a
-- cyclic dependency with rpc.capnp.
}
data Decl
= DataDecl Data
| ConstDecl Constant
| IFaceDecl Interface
data Data = Data
{ typeName :: Name.LocalQ
, typeParams :: [Name.UnQ]
, firstClass :: !Bool
-- ^ Whether this is a "first class" type, i.e. it is a type in the
-- capnproto sense, rather than an auxiliary type defined for a group
-- or an anonymous union.
--
-- Note that this *can* be set for unions, if they subsume the whole
-- struct, since in that case we collapse the two types in the
-- high-level API.
, cerialName :: Name.LocalQ
-- ^ The name of the type our 'Cerial' should be. This will only be
-- different from typeName if we're an anonymous union in a struct
-- that also has other fields; in this case our Cerial should be
-- the same as our parent struct.
, def :: DataDef
}
data DataDef
= Sum [Variant]
| Product [Field]
data Constant = Constant
{ name :: Name.LocalQ
, value :: C.Value Brand Name.CapnpQ
}
data Interface = IFace
{ name :: Name.CapnpQ
, typeParams :: [C.TypeParamRef Name.CapnpQ]
, interfaceId :: !Word64
, methods :: [Method]
, supers :: [(Interface, Brand)]
-- ^ Immediate superclasses
, ancestors :: [(Interface, Brand)]
-- ^ All ancestors, including 'supers'.
}
-- TODO(cleanup): this same type exists in IR.Flat; it doesn't make sense for
-- IR.Common, but we should factor this out.
data Method = Method
{ name :: Name.UnQ
, paramType :: C.CompositeType Brand Name.CapnpQ
, resultType :: C.CompositeType Brand Name.CapnpQ
}
data Field = Field
{ name :: Name.UnQ
-- ^ The name of the field.
, type_ :: C.Type Brand Name.CapnpQ
-- ^ The type of the field.
}
data Variant = Variant
{ name :: Name.LocalQ
, arg :: Maybe (C.Type Brand Name.CapnpQ)
}
|
zenhack/haskell-capnp
|
cmd/capnpc-haskell/IR/Pure.hs
|
Haskell
|
mit
| 2,499
|
import Data.List
import Data.Char
include :: String -> String -> Bool
include xs ys = or . map (isPrefixOf ys) . tails $ xs
joinWith :: [String] -> String -> String
joinWith xs sep = concat . init . concat $ [[x, sep] | x <- xs]
split :: String -> Char -> [String]
split "" _ = []
split xs c = let (ys, zs) = break (== c) xs
in if null zs then [ys] else ys : split (tail zs) c
main = do
putStrLn $ "Contains: " ++ show ("test" `include` "es")
putStrLn $ "Count: " ++ show (length . filter (=='t') $ "test")
putStrLn $ "HasPrefix: " ++ show (isPrefixOf "te" "test")
putStrLn $ "HasSuffix: " ++ show (isSuffixOf "st" "test")
putStrLn $ "Index: " ++ show (elemIndex 'e' "test")
putStrLn $ "Join: " ++ show (["a", "b"] `joinWith` "-")
putStrLn $ "Repeat: " ++ show (replicate 5 'a')
putStrLn $ "Replace: " ++ show (map (\x -> if x == 'o' then '0' else x) "foo")
putStrLn $ "Split: " ++ show (split "a-b-c-d-e" '-')
putStrLn $ "ToLower: " ++ map toLower "TEST"
putStrLn $ "ToUpper: " ++ map toUpper "test"
putStrLn ""
putStrLn $ "Len: " ++ show (length "hello")
putStrLn $ "Char:" ++ show ("hello" !! 1)
|
rkalis/monokalis-syntax
|
sample-files/Haskell.hs
|
Haskell
|
mit
| 1,200
|
{-# LANGUAGE OverloadedStrings #-}
import Network.SOAP
import Network.SOAP.Transport.HTTP
import Text.XML.Stream.Parse
import qualified Data.Text as T
import qualified Text.XML as XML
import qualified Text.XML.Writer as W
main :: IO ()
main = do
transport <- initTransport "http://www.webservicex.net/ConvertTemperature.asmx" traceRequest (iconv "utf-8")
out <- convertTemperatureCToF transport 25
print out
return ()
convertTemperatureCToF :: Transport -> Int -> IO T.Text
convertTemperatureCToF t c = invokeWS t "http://www.webserviceX.NET/ConvertTemp" () (body c) parser
body :: Int -> W.XML
body c = W.elementA "ConvertTemp" [("xmlns","http://www.webserviceX.NET/")] $ do
e "Temperature" (T.pack $ show c)
e "FromUnit" "degreeCelsius"
e "ToUnit" "degreeFahrenheit"
where
e :: XML.Name -> T.Text -> W.XML
e n t = W.element n t
parser :: ResponseParser T.Text
parser = StreamParser $
force "missing response" $
tagName "ConvertTempResponse" ignoreAttrs $ \_ ->
force "missing result" $
tagNoAttr "ConvertTempResult" content
|
twopoint718/hsoap-testing
|
Temp.hs
|
Haskell
|
mit
| 1,134
|
-- |
-- Module : Data.Edison.Assoc.AssocList
-- Copyright : Copyright (c) 2006, 2008 Robert Dockins
-- License : MIT; see COPYRIGHT file for terms and conditions
--
-- Maintainer : robdockins AT fastmail DOT fm
-- Stability : stable
-- Portability : GHC, Hugs (MPTC and FD)
--
-- The standard library "Data.Map" repackaged as an Edison
-- associative collection.
module Data.Edison.Assoc.StandardMap (
-- * Type of standard finite maps
FM,
-- * AssocX operations
empty,singleton,fromSeq,insert,insertSeq,union,unionSeq,delete,deleteAll,
deleteSeq,null,size,member,count,lookup,lookupM,lookupAll,
lookupAndDelete,lookupAndDeleteM,lookupAndDeleteAll,
lookupWithDefault,adjust,adjustAll,adjustOrInsert,adjustAllOrInsert,
adjustOrDelete,adjustOrDeleteAll,strict,strictWith,
map,fold,fold',fold1,fold1',filter,partition,elements,structuralInvariant,
-- * FiniteMapX operations
fromSeqWith,fromSeqWithKey,insertWith,insertWithKey,insertSeqWith,
insertSeqWithKey,unionl,unionr,unionWith,unionSeqWith,intersectionWith,
difference,properSubset,subset,properSubmapBy,submapBy,sameMapBy,
properSubmap,submap,sameMap,
-- * OrdAssocX operations
minView, minElem, deleteMin, unsafeInsertMin, maxView, maxElem, deleteMax,
unsafeInsertMax, foldr, foldr', foldl, foldl', foldr1, foldr1',
foldl1, foldl1', unsafeFromOrdSeq,
unsafeAppend, filterLT, filterLE, filterGT, filterGE,
partitionLT_GE, partitionLE_GT, partitionLT_GT,
-- * Assoc operations
toSeq,keys,mapWithKey,foldWithKey,foldWithKey',filterWithKey,partitionWithKey,
-- * OrdAssoc operations
minViewWithKey, minElemWithKey, maxViewWithKey, maxElemWithKey,
foldrWithKey, foldrWithKey', foldlWithKey, foldlWithKey', toOrdSeq,
-- * FiniteMap operations
unionWithKey,unionSeqWithKey,intersectionWithKey,
-- * Documentation
moduleName
) where
import Prelude hiding (null,map,lookup,foldr,foldl,foldr1,foldl1,filter)
import qualified Prelude
import qualified Data.Edison.Assoc as A
import qualified Data.Edison.Seq as S
import qualified Data.Edison.Seq.ListSeq as L
import Data.Edison.Assoc.Defaults
import Data.Int
import Test.QuickCheck (Arbitrary(..), CoArbitrary(..))
import qualified Data.Map as DM
type FM = DM.Map
moduleName :: String
moduleName = "Data.Edison.Assoc.StandardMap"
empty :: FM k a
singleton :: Ord k => k -> a -> FM k a
fromSeq :: (Ord k,S.Sequence seq) => seq (k,a) -> FM k a
insert :: Ord k => k -> a -> FM k a -> FM k a
insertSeq :: (Ord k,S.Sequence seq) => seq (k,a) -> FM k a -> FM k a
union :: Ord k => FM k a -> FM k a -> FM k a
unionSeq :: (Ord k,S.Sequence seq) => seq (FM k a) -> FM k a
delete :: Ord k => k -> FM k a -> FM k a
deleteAll :: Ord k => k -> FM k a -> FM k a
deleteSeq :: (Ord k,S.Sequence seq) => seq k -> FM k a -> FM k a
null :: FM k a -> Bool
size :: FM k a -> Int
member :: Ord k => k -> FM k a -> Bool
count :: Ord k => k -> FM k a -> Int
lookup :: Ord k => k -> FM k a -> a
lookupAll :: (Ord k,S.Sequence seq) => k -> FM k a -> seq a
lookupM :: (Ord k,Monad m) => k -> FM k a -> m a
lookupWithDefault :: Ord k => a -> k -> FM k a -> a
lookupAndDelete :: Ord k => k -> FM k a -> (a, FM k a)
lookupAndDeleteM :: (Ord k,Monad m) => k -> FM k a -> m (a, FM k a)
lookupAndDeleteAll :: (Ord k,S.Sequence seq) => k -> FM k a -> (seq a,FM k a)
adjust :: Ord k => (a->a) -> k -> FM k a -> FM k a
adjustAll :: Ord k => (a->a) -> k -> FM k a -> FM k a
adjustOrInsert :: Ord k => (a -> a) -> a -> k -> FM k a -> FM k a
adjustAllOrInsert :: Ord k => (a -> a) -> a -> k -> FM k a -> FM k a
adjustOrDelete :: Ord k => (a -> Maybe a) -> k -> FM k a -> FM k a
adjustOrDeleteAll :: Ord k => (a -> Maybe a) -> k -> FM k a -> FM k a
strict :: Ord k => FM k a -> FM k a
strictWith :: Ord k => (a -> b) -> FM k a -> FM k a
map :: Ord k => (a -> b) -> FM k a -> FM k b
fold :: Ord k => (a -> b -> b) -> b -> FM k a -> b
fold1 :: Ord k => (a -> a -> a) -> FM k a -> a
fold' :: Ord k => (a -> b -> b) -> b -> FM k a -> b
fold1' :: Ord k => (a -> a -> a) -> FM k a -> a
filter :: Ord k => (a -> Bool) -> FM k a -> FM k a
partition :: Ord k => (a -> Bool) -> FM k a -> (FM k a,FM k a)
elements :: (Ord k,S.Sequence seq) => FM k a -> seq a
minView :: (Ord k,Monad m) => FM k a -> m (a, FM k a)
minElem :: Ord k => FM k a -> a
deleteMin :: Ord k => FM k a -> FM k a
unsafeInsertMin :: Ord k => k -> a -> FM k a -> FM k a
maxView :: (Ord k,Monad m) => FM k a -> m (a, FM k a)
maxElem :: Ord k => FM k a -> a
deleteMax :: Ord k => FM k a -> FM k a
unsafeInsertMax :: Ord k => k -> a -> FM k a -> FM k a
foldr :: Ord k => (a -> b -> b) -> b -> FM k a -> b
foldl :: Ord k => (b -> a -> b) -> b -> FM k a -> b
foldr1 :: Ord k => (a -> a -> a) -> FM k a -> a
foldl1 :: Ord k => (a -> a -> a) -> FM k a -> a
foldr' :: Ord k => (a -> b -> b) -> b -> FM k a -> b
foldl' :: Ord k => (b -> a -> b) -> b -> FM k a -> b
foldr1' :: Ord k => (a -> a -> a) -> FM k a -> a
foldl1' :: Ord k => (a -> a -> a) -> FM k a -> a
unsafeFromOrdSeq :: (Ord k,S.Sequence seq) => seq (k,a) -> FM k a
unsafeAppend :: Ord k => FM k a -> FM k a -> FM k a
filterLT :: Ord k => k -> FM k a -> FM k a
filterGT :: Ord k => k -> FM k a -> FM k a
filterLE :: Ord k => k -> FM k a -> FM k a
filterGE :: Ord k => k -> FM k a -> FM k a
partitionLT_GE :: Ord k => k -> FM k a -> (FM k a,FM k a)
partitionLE_GT :: Ord k => k -> FM k a -> (FM k a,FM k a)
partitionLT_GT :: Ord k => k -> FM k a -> (FM k a,FM k a)
fromSeqWith :: (Ord k,S.Sequence seq) => (a -> a -> a)
-> seq (k,a) -> FM k a
fromSeqWithKey :: (Ord k,S.Sequence seq) => (k -> a -> a -> a)
-> seq (k,a) -> FM k a
insertWith :: Ord k => (a -> a -> a) -> k -> a
-> FM k a -> FM k a
insertWithKey :: Ord k => (k -> a -> a -> a) -> k -> a
-> FM k a -> FM k a
insertSeqWith :: (Ord k,S.Sequence seq) => (a -> a -> a) -> seq (k,a)
-> FM k a -> FM k a
insertSeqWithKey :: (Ord k,S.Sequence seq) => (k -> a -> a -> a) -> seq (k,a)
-> FM k a -> FM k a
unionl :: Ord k => FM k a -> FM k a -> FM k a
unionr :: Ord k => FM k a -> FM k a -> FM k a
unionWith :: Ord k => (a -> a -> a) -> FM k a -> FM k a -> FM k a
unionSeqWith :: (Ord k,S.Sequence seq) =>
(a -> a -> a) -> seq (FM k a) -> FM k a
intersectionWith :: Ord k => (a -> b -> c) -> FM k a -> FM k b -> FM k c
difference :: Ord k => FM k a -> FM k b -> FM k a
properSubset :: Ord k => FM k a -> FM k b -> Bool
subset :: Ord k => FM k a -> FM k b -> Bool
properSubmapBy :: Ord k => (a -> a -> Bool) -> FM k a -> FM k a -> Bool
submapBy :: Ord k => (a -> a -> Bool) -> FM k a -> FM k a -> Bool
sameMapBy :: Ord k => (a -> a -> Bool) -> FM k a -> FM k a -> Bool
properSubmap :: (Ord k,Eq a) => FM k a -> FM k a -> Bool
submap :: (Ord k,Eq a) => FM k a -> FM k a -> Bool
sameMap :: (Ord k,Eq a) => FM k a -> FM k a -> Bool
toSeq :: (Ord k,S.Sequence seq) => FM k a -> seq (k,a)
keys :: (Ord k,S.Sequence seq) => FM k a -> seq k
mapWithKey :: Ord k => (k -> a -> b) -> FM k a -> FM k b
foldWithKey :: Ord k => (k -> a -> b -> b) -> b -> FM k a -> b
foldWithKey' :: Ord k => (k -> a -> b -> b) -> b -> FM k a -> b
filterWithKey :: Ord k => (k -> a -> Bool) -> FM k a -> FM k a
partitionWithKey :: Ord k => (k -> a -> Bool) -> FM k a -> (FM k a,FM k a)
minViewWithKey :: (Ord k,Monad m) => FM k a -> m ((k, a), FM k a)
minElemWithKey :: Ord k => FM k a -> (k,a)
maxViewWithKey :: (Ord k,Monad m) => FM k a -> m ((k, a), FM k a)
maxElemWithKey :: Ord k => FM k a -> (k,a)
foldrWithKey :: (k -> a -> b -> b) -> b -> FM k a -> b
foldlWithKey :: (b -> k -> a -> b) -> b -> FM k a -> b
foldrWithKey' :: (k -> a -> b -> b) -> b -> FM k a -> b
foldlWithKey' :: (b -> k -> a -> b) -> b -> FM k a -> b
toOrdSeq :: (Ord k,S.Sequence seq) => FM k a -> seq (k,a)
unionWithKey :: Ord k => (k -> a -> a -> a) -> FM k a -> FM k a -> FM k a
unionSeqWithKey :: (Ord k,S.Sequence seq) => (k -> a -> a -> a)
-> seq (FM k a) -> FM k a
intersectionWithKey :: Ord k => (k -> a -> b -> c) -> FM k a -> FM k b -> FM k c
structuralInvariant :: Ord k => FM k a -> Bool
structuralInvariant = DM.valid
empty = DM.empty
singleton = DM.singleton
fromSeq = fromSeqUsingInsertSeq
insert = DM.insert
insertSeq = insertSeqUsingFoldr
union = DM.union
unionSeq = DM.unions . S.toList
delete = DM.delete
deleteAll = DM.delete -- by finite map property
deleteSeq = deleteSeqUsingFoldr
null = DM.null
size = DM.size
member = DM.member
count = countUsingMember
lookup k m = maybe (error (moduleName ++ ".lookup: failed")) id (DM.lookup k m)
lookupM k m = maybe (fail (moduleName ++ ".lookupM: failed")) return (DM.lookup k m)
lookupAll = lookupAllUsingLookupM
lookupWithDefault = DM.findWithDefault
lookupAndDelete = lookupAndDeleteDefault
lookupAndDeleteM = lookupAndDeleteMDefault
lookupAndDeleteAll = lookupAndDeleteAllDefault
adjust = DM.adjust
adjustAll = DM.adjust
adjustOrInsert = adjustOrInsertUsingMember
adjustAllOrInsert = adjustOrInsertUsingMember
adjustOrDelete = DM.update
adjustOrDeleteAll = DM.update
strict xs = DM.foldr (flip const) () xs `seq` xs
strictWith f xs = DM.foldr (\x z -> f x `seq` z) () xs `seq` xs
map = fmap
fold = DM.foldr
fold' f x xs = L.foldl' (flip f) x (DM.elems xs)
fold1 f xs = L.foldr1 f (DM.elems xs)
fold1' f xs = L.foldl1' (flip f) (DM.elems xs)
filter = DM.filter
partition = DM.partition
elements = elementsUsingFold
minView m = if DM.null m
then fail (moduleName ++ ".minView: failed")
else let ((_,x),m') = DM.deleteFindMin m
in return (x,m')
minElem = snd . DM.findMin
deleteMin = DM.deleteMin
unsafeInsertMin = DM.insert
maxView m = if DM.null m
then fail (moduleName ++ ".maxView: failed")
else let ((_,x),m') = DM.deleteFindMax m
in return (x,m')
maxElem = snd . DM.findMax
deleteMax = DM.deleteMax
unsafeInsertMax = DM.insert
foldr f x m = L.foldr f x (DM.elems m)
foldl f x m = L.foldl f x (DM.elems m)
foldr1 f m = L.foldr1 f (DM.elems m)
foldl1 f m = L.foldl1 f (DM.elems m)
foldr' f x m = L.foldr' f x (DM.elems m)
foldl' f x m = L.foldl' f x (DM.elems m)
foldr1' f m = L.foldr1' f (DM.elems m)
foldl1' f m = L.foldl1' f (DM.elems m)
unsafeFromOrdSeq = DM.fromAscList . S.toList
unsafeAppend = DM.union
filterLT k = fst . DM.split k
filterGT k = snd . DM.split k
filterLE k m = let (lt, mx, _ ) = DM.splitLookup k m in maybe lt (\x -> insert k x lt) mx
filterGE k m = let (_ , mx, gt) = DM.splitLookup k m in maybe gt (\x -> insert k x gt) mx
partitionLT_GE k m = let (lt, mx, gt) = DM.splitLookup k m in (lt, maybe gt (\x -> insert k x gt) mx)
partitionLE_GT k m = let (lt, mx, gt) = DM.splitLookup k m in (maybe lt (\x -> insert k x lt) mx, gt)
partitionLT_GT = DM.split
fromSeqWith f s = DM.fromListWith f (S.toList s)
fromSeqWithKey f s = DM.fromListWithKey f (S.toList s)
insertWith = DM.insertWith
insertWithKey = insertWithKeyUsingInsertWith
insertSeqWith = insertSeqWithUsingInsertWith
insertSeqWithKey = insertSeqWithKeyUsingInsertWithKey
unionl = DM.union
unionr = flip DM.union
unionWith = DM.unionWith
unionSeqWith = unionSeqWithUsingReduce
intersectionWith = DM.intersectionWith
difference = DM.difference
properSubset = DM.isProperSubmapOfBy (\_ _ -> True)
subset = DM.isSubmapOfBy (\_ _ -> True)
properSubmapBy = DM.isProperSubmapOfBy
submapBy = DM.isSubmapOfBy
sameMapBy = sameMapByUsingOrdLists
properSubmap = A.properSubmap
submap = A.submap
sameMap = A.sameMap
toSeq = toSeqUsingFoldWithKey
keys = keysUsingFoldWithKey
mapWithKey = DM.mapWithKey
foldWithKey = DM.foldrWithKey
foldWithKey' f x m = L.foldl' (\b (k,a) -> f k a b) x (DM.toList m)
filterWithKey = DM.filterWithKey
partitionWithKey = DM.partitionWithKey
minViewWithKey m = if DM.null m
then fail (moduleName ++ ".minViewWithKey: failed")
else return (DM.deleteFindMin m)
minElemWithKey = DM.findMin
maxViewWithKey m = if DM.null m
then fail (moduleName ++ ".maxViewWithKey: failed")
else return (DM.deleteFindMax m)
maxElemWithKey = DM.findMax
foldrWithKey = DM.foldrWithKey
foldrWithKey' f x m = L.foldr' (\(k,a) b -> f k a b) x (DM.toAscList m)
foldlWithKey f x m = L.foldl (\b (k,a) -> f b k a) x (DM.toAscList m)
foldlWithKey' f x m = L.foldl' (\b (k,a) -> f b k a) x (DM.toAscList m)
toOrdSeq = S.fromList . DM.toAscList
unionWithKey = DM.unionWithKey
unionSeqWithKey = unionSeqWithKeyUsingReduce
intersectionWithKey = DM.intersectionWithKey
instance Ord k => A.AssocX (FM k) k where
{empty = empty; singleton = singleton; fromSeq = fromSeq; insert = insert;
insertSeq = insertSeq; union = union; unionSeq = unionSeq;
delete = delete; deleteAll = deleteAll; deleteSeq = deleteSeq;
null = null; size = size; member = member; count = count;
lookup = lookup; lookupM = lookupM; lookupAll = lookupAll;
lookupAndDelete = lookupAndDelete; lookupAndDeleteM = lookupAndDeleteM;
lookupAndDeleteAll = lookupAndDeleteAll;
lookupWithDefault = lookupWithDefault; adjust = adjust;
adjustAll = adjustAll; adjustOrInsert = adjustOrInsert;
adjustAllOrInsert = adjustAllOrInsert;
adjustOrDelete = adjustOrDelete; adjustOrDeleteAll = adjustOrDeleteAll;
fold = fold; fold' = fold'; fold1 = fold1; fold1' = fold1';
filter = filter; partition = partition; elements = elements;
strict = strict; strictWith = strictWith;
structuralInvariant = structuralInvariant; instanceName _ = moduleName}
instance Ord k => A.OrdAssocX (FM k) k where
{minView = minView; minElem = minElem; deleteMin = deleteMin;
unsafeInsertMin = unsafeInsertMin; maxView = maxView; maxElem = maxElem;
deleteMax = deleteMax; unsafeInsertMax = unsafeInsertMax;
foldr = foldr; foldr' = foldr'; foldl = foldl; foldl' = foldl';
foldr1 = foldr1; foldr1' = foldr1'; foldl1 = foldl1; foldl1' = foldl1';
unsafeFromOrdSeq = unsafeFromOrdSeq; unsafeAppend = unsafeAppend;
filterLT = filterLT; filterGT = filterGT; filterLE = filterLE;
filterGE = filterGE; partitionLT_GE = partitionLT_GE;
partitionLE_GT = partitionLE_GT; partitionLT_GT = partitionLT_GT}
instance Ord k => A.FiniteMapX (FM k) k where
{fromSeqWith = fromSeqWith; fromSeqWithKey = fromSeqWithKey;
insertWith = insertWith; insertWithKey = insertWithKey;
insertSeqWith = insertSeqWith; insertSeqWithKey = insertSeqWithKey;
unionl = unionl; unionr = unionr; unionWith = unionWith;
unionSeqWith = unionSeqWith; intersectionWith = intersectionWith;
difference = difference; properSubset = properSubset; subset = subset;
properSubmapBy = properSubmapBy; submapBy = submapBy;
sameMapBy = sameMapBy}
instance Ord k => A.OrdFiniteMapX (FM k) k
instance Ord k => A.Assoc (FM k) k where
{toSeq = toSeq; keys = keys; mapWithKey = mapWithKey;
foldWithKey = foldWithKey; foldWithKey' = foldWithKey';
filterWithKey = filterWithKey;
partitionWithKey = partitionWithKey}
instance Ord k => A.OrdAssoc (FM k) k where
{minViewWithKey = minViewWithKey; minElemWithKey = minElemWithKey;
maxViewWithKey = maxViewWithKey; maxElemWithKey = maxElemWithKey;
foldrWithKey = foldrWithKey; foldrWithKey' = foldrWithKey';
foldlWithKey = foldlWithKey; foldlWithKey' = foldlWithKey';
toOrdSeq = toOrdSeq}
instance Ord k => A.FiniteMap (FM k) k where
{unionWithKey = unionWithKey; unionSeqWithKey = unionSeqWithKey;
intersectionWithKey = intersectionWithKey}
instance Ord k => A.OrdFiniteMap (FM k) k
|
robdockins/edison
|
edison-core/src/Data/Edison/Assoc/StandardMap.hs
|
Haskell
|
mit
| 17,103
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Network.Wai.WebSockets where
import Network.Wai
import Control.Exception (Exception, throwIO, assert)
import Control.Applicative ((<$>))
import Control.Monad (when, unless)
import Data.Typeable (Typeable)
import Blaze.ByteString.Builder
import Data.Monoid ((<>), mempty)
import qualified Crypto.Hash.SHA1 as SHA1
import Data.Word (Word8, Word32, Word64)
import Data.ByteString (ByteString)
import Data.Bits ((.|.), testBit, clearBit, shiftL, (.&.), Bits, xor, shiftR)
import qualified Data.Map as Map
import Data.Maybe (isJust)
import qualified Data.ByteString as S
import qualified Data.ByteString.Base64 as B64
import Data.IORef
type IsText = Bool
data Connection = Connection
{ connSend :: IsText -> ByteString -> IO ()
, connRecv :: IO ByteString
}
type WSApp a
= IO ByteString
-> (ByteString -> IO ())
-> (Connection -> IO a)
-> IO a
websocketsApp :: Request -> Maybe (WSApp a)
websocketsApp req
-- FIXME handle keep-alive, Upgrade | lookup "connection" reqhs /= Just "Upgrade" = backup sendResponse
| lookup "upgrade" reqhs /= Just "websocket" = Nothing
| lookup "sec-websocket-version" reqhs /= Just "13" = Nothing
| Just key <- lookup "sec-websocket-key" reqhs = Just $ \recvRaw sendRaw app -> do
let handshake = fromByteString "HTTP/1.1 101 Switching Protocols\r\nUpgrade: websocket\r\nConnection: Upgrade\r\nSec-WebSocket-Accept: "
<> fromByteString (B64.encode key')
<> fromByteString "\r\n\r\n"
key' = SHA1.hash $ key <> "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
toByteStringIO sendRaw handshake
src <- mkSource recvRaw
let recv front0 = waitForFrame src $ \isFinished _opcode _ _ getBS' -> do
let loop front = do
bs <- getBS'
if S.null bs
then return front
else loop $ front . (bs:)
front <- loop front0
if isFinished
then return $ S.concat $ front []
else recv front
app Connection
{ connSend = \isText payload -> do
let header = Frame True (if isText then OpText else OpBinary) Nothing $ fromIntegral $ S.length payload
toByteStringIO sendRaw $ wsDataToBuilder header <> fromByteString payload
, connRecv = recv id
}
| otherwise = Nothing
where
reqhs = requestHeaders req
type FrameFinished = Bool
type MaskingKey = Word32
type PayloadSize = Word64
data WSData payload
= Frame FrameFinished Opcode (Maybe MaskingKey) PayloadSize
| Payload payload
deriving Show
data Opcode = OpCont | OpText | OpBinary | OpClose | OpPing | OpPong
deriving (Show, Eq, Ord, Enum, Bounded)
opcodeToWord8 :: Opcode -> Word8
opcodeToWord8 OpCont = 0x0
opcodeToWord8 OpText = 0x1
opcodeToWord8 OpBinary = 0x2
opcodeToWord8 OpClose = 0x8
opcodeToWord8 OpPing = 0x9
opcodeToWord8 OpPong = 0xA
opcodeFromWord8 :: Word8 -> Maybe Opcode
opcodeFromWord8 =
flip Map.lookup m
where
m = Map.fromList $ map (\o -> (opcodeToWord8 o, o)) [minBound..maxBound]
wsDataToBuilder :: WSData Builder -> Builder
wsDataToBuilder (Payload builder) = builder
wsDataToBuilder (Frame finished opcode mmask payload) =
fromWord8 byte1
<> fromWord8 byte2
<> lenrest
<> maybe mempty fromWord32be mmask
where
byte1 = (if finished then 128 else 0) .|. opcodeToWord8 opcode
byte2 = (if isJust mmask then 128 else 0) .|. len1
(len1, lenrest)
| payload <= 125 = (fromIntegral payload, mempty)
| payload <= 65536 = (126, fromWord16be $ fromIntegral payload)
| otherwise = (127, fromWord64be $ fromIntegral payload)
data WSException = ConnectionClosed
| RSVBitsSet Word8
| InvalidOpcode Word8
deriving (Show, Typeable)
instance Exception WSException
data Source = Source (IO ByteString) (IORef ByteString)
mkSource :: IO ByteString -> IO Source
mkSource recv = Source recv <$> newIORef S.empty
-- | Guaranteed to never return an empty ByteString.
getBS :: Source -> IO ByteString
getBS (Source next ref) = do
bs <- readIORef ref
if S.null bs
then do
bs' <- next
when (S.null bs') (throwIO ConnectionClosed)
return bs'
else writeIORef ref S.empty >> return bs
leftover :: Source -> ByteString -> IO ()
leftover (Source _ ref) bs = writeIORef ref bs
getWord8 :: Source -> IO Word8
getWord8 src = do
bs <- getBS src
leftover src $ S.tail bs
return $ S.head bs
getBytes :: (Num word, Bits word) => Source -> Int -> IO word
getBytes src =
loop 0
where
loop total 0 = return total
loop total remaining = do
x <- getWord8 src -- FIXME not very efficient, better to use ByteString directly
loop (shiftL total 8 .|. fromIntegral x) (remaining - 1)
waitForFrame :: Source -> (FrameFinished -> Opcode -> Maybe MaskingKey -> PayloadSize -> IO ByteString -> IO a) -> IO a
waitForFrame src yield = do
byte1 <- getWord8 src
byte2 <- getWord8 src
when (testBit byte1 6 || testBit byte1 5 || testBit byte1 4)
$ throwIO $ RSVBitsSet byte1
let opcode' = byte1 .&. 0x0F
opcode <-
case opcodeFromWord8 opcode' of
Nothing -> throwIO $ InvalidOpcode opcode'
Just o -> return o
let isFinished = testBit byte1 7
isMasked = testBit byte2 7
len' = byte2 `clearBit` 7
payloadSize <-
case () of
()
| len' <= 125 -> return $ fromIntegral len'
| len' == 126 -> getBytes src 2
| otherwise -> assert (len' == 127) (getBytes src 8)
mmask <- if isMasked then Just <$> getBytes src 4 else return Nothing
let unmask' =
case mmask of
Nothing -> \_ bs -> bs
Just mask -> unmask mask
consumedRef <- newIORef 0
let getPayload = handlePayload unmask' payloadSize consumedRef
res <- yield isFinished opcode mmask payloadSize getPayload
let drain = do
bs <- getPayload
unless (S.null bs) drain
drain
return res
where
handlePayload unmask' totalSize consumedRef = do
consumed <- readIORef consumedRef
if consumed >= totalSize
then return S.empty
else do
bs <- getBS src
let len = fromIntegral $ S.length bs
consumed' = consumed + len
if consumed' <= totalSize
then do
writeIORef consumedRef consumed'
return $ unmask' consumed bs
else do
let (x, y) = S.splitAt (fromIntegral $ totalSize - consumed) bs
leftover src y
return $ unmask' consumed x
unmask :: MaskingKey -> Word64 -> ByteString -> ByteString
unmask key offset' masked =
-- we really want a mapWithIndex...
fst $ S.unfoldrN len f 0
where
len = S.length masked
f idx | idx >= len = Nothing
f idx = Just (getIndex idx, idx + 1)
offset = fromIntegral $ offset' `mod` 4
getIndex idx = S.index masked idx `xor` maskByte ((offset + idx) `mod` 4)
maskByte 0 = fromIntegral $ key `shiftR` 24
maskByte 1 = fromIntegral $ key `shiftR` 16
maskByte 2 = fromIntegral $ key `shiftR` 8
maskByte 3 = fromIntegral key
maskByte i = error $ "Network.Wai.WebSockets.unmask.maskByte: invalid input " ++ show i
|
snoyberg/wai-websockets-native
|
Network/Wai/WebSockets.hs
|
Haskell
|
mit
| 7,697
|
module Sound.Source where
import Data.Monoid
-- A source takes a time 't' and returns the amplitude of the source
-- at that time. 't' is a time in seconds, representing the current
-- time where 0.0 is the start of the audio data
newtype Source = Source { sample :: Double -> Double }
instance Monoid (Source) where
mempty = Source (const 0.0)
mappend (Source f) (Source g) = Source (\t -> f t + g t)
mconcat srcs = Source (\t -> foldr (\(Source f) x -> f t + x) 0.0 srcs)
type Synth = (Double -> Source)
sineSynth :: Double -> Source
sineSynth = Source . sineWave
sawSynth :: Double -> Source
sawSynth = Source . sawWave
triangleSynth :: Double -> Source
triangleSynth = Source . triangleWave
squareSynth :: Double -> Source
squareSynth = Source . squareWave
sineWave :: Double -> Double -> Double
sineWave freq t = sin (freq * t * 2 * pi)
sawWave :: Double -> Double -> Double
sawWave freq t = saw (freq * t)
where saw x = 2 * (x - fromInteger (floor (0.5 + x)))
triangleWave :: Double -> Double -> Double
triangleWave freq t = 2 * abs (sawWave freq t) - 1
squareWave :: Double -> Double -> Double
squareWave freq t
| s < 0 = -1
| otherwise = 1
where s = sineWave freq t
|
unknownloner/HaskellSynth
|
src/Sound/Source.hs
|
Haskell
|
mit
| 1,204
|
{-# OPTIONS_GHC -Wall #-}
module HW04 where
import Data.List
newtype Poly a = P [a]
-- Exercise 1 -----------------------------------------
x :: Num a => Poly a
x = P [0, 1]
-- Exercise 2 ----------------------------------------
trimTail :: (Eq a, Num a) => [a] -> [a]
trimTail = reverse . trimHead . reverse
trimHead :: (Eq a, Num a) => [a] -> [a]
trimHead = dropWhile (==0)
instance (Num a, Eq a) => Eq (Poly a) where
(==) (P l) (P l') = (trimTail l) == (trimTail l')
-- Exercise 3 -----------------------------------------
-- Get coefficient given the number
getce :: (Num a, Eq a, Show a) => a -> String
getce 1 = ""
getce n = show n
-- Generate pairs for array element with its index
items :: [a] -> [(a, Integer)]
items l = zip l [0,1..]
-- Get the term given a pair of coefficient and index
getTerm :: (Num a, Eq a, Show a) => (a, Integer) -> String
getTerm (0, _) = "0"
getTerm (n, 0) = show n
getTerm (n, 1) = getce n ++ "x"
getTerm (n, i) = getce n ++ "x^" ++ show i
instance (Num a, Eq a, Show a) => Show (Poly a) where
show (P l) = case trimTail l of
[] -> "0"
xs -> let terms = map getTerm $ items xs in
intercalate " + " $ reverse $ filter (/="0") $ terms
-- Exercise 4 -----------------------------------------
concatPoly :: Poly a -> Poly a -> Poly a
concatPoly (P l) (P l') = P (l ++ l')
plus :: Num a => Poly a -> Poly a -> Poly a
plus (P []) (P l) = P l
plus (P l) (P []) = P l
plus (P (n:ns)) (P (n':ns')) = concatPoly (P [n + n']) (P ns `plus` P ns')
-- Exercise 5 -----------------------------------------
getComb :: Num a => Poly a -> Poly a -> [Poly a]
getComb (P []) (P _) = []
getComb (P _) (P []) = []
getComb (P (n:ns)) (P l) = (P $ map (*n) l):(getComb (P ns) (P $ 0:l))
times :: Num a => Poly a -> Poly a -> Poly a
times p p' = sum $ getComb p p'
-- Exercise 6 -----------------------------------------
instance Num a => Num (Poly a) where
(+) = plus
(*) = times
negate (P l) = P $ map negate l
fromInteger n = P [fromInteger n]
-- No meaningful definitions exist
abs = undefined
signum = undefined
-- Exercise 7 -----------------------------------------
applyP :: Num a => Poly a -> a -> a
applyP (P l) n = evalP $ items l
where evalP ((ce, i):ps') = n ^ i * ce + evalP ps'
evalP [] = 0
-- Exercise 8 -----------------------------------------
class Num a => Differentiable a where
deriv :: a -> a
nderiv :: Int -> a -> a
nderiv 0 f = f
nderiv n f = nderiv (n-1) (deriv f)
-- Exercise 9 -----------------------------------------
instance Num a => Differentiable (Poly a) where
deriv (P l) = P $ drop 1 $ calcTerms $ items l
where calcTerms [] = []
calcTerms ((n, i):ps) = (n * (fromInteger i)):(calcTerms ps)
|
hanjoes/cis194
|
hw4/HW04.hs
|
Haskell
|
mit
| 2,807
|
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.IDBRequest
(js_getResult, getResult, js_getError, getError, js_getSource,
getSource, js_getTransaction, getTransaction, js_getReadyState,
getReadyState, success, error, IDBRequest, castToIDBRequest,
gTypeIDBRequest, IsIDBRequest, toIDBRequest)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"result\"]" js_getResult ::
IDBRequest -> IO (Nullable IDBAny)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/IDBRequest.result Mozilla IDBRequest.result documentation>
getResult ::
(MonadIO m, IsIDBRequest self) => self -> m (Maybe IDBAny)
getResult self
= liftIO (nullableToMaybe <$> (js_getResult (toIDBRequest self)))
foreign import javascript unsafe "$1[\"error\"]" js_getError ::
IDBRequest -> IO (Nullable DOMError)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/IDBRequest.error Mozilla IDBRequest.error documentation>
getError ::
(MonadIO m, IsIDBRequest self) => self -> m (Maybe DOMError)
getError self
= liftIO (nullableToMaybe <$> (js_getError (toIDBRequest self)))
foreign import javascript unsafe "$1[\"source\"]" js_getSource ::
IDBRequest -> IO (Nullable IDBAny)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/IDBRequest.source Mozilla IDBRequest.source documentation>
getSource ::
(MonadIO m, IsIDBRequest self) => self -> m (Maybe IDBAny)
getSource self
= liftIO (nullableToMaybe <$> (js_getSource (toIDBRequest self)))
foreign import javascript unsafe "$1[\"transaction\"]"
js_getTransaction :: IDBRequest -> IO (Nullable IDBTransaction)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/IDBRequest.transaction Mozilla IDBRequest.transaction documentation>
getTransaction ::
(MonadIO m, IsIDBRequest self) => self -> m (Maybe IDBTransaction)
getTransaction self
= liftIO
(nullableToMaybe <$> (js_getTransaction (toIDBRequest self)))
foreign import javascript unsafe "$1[\"readyState\"]"
js_getReadyState :: IDBRequest -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/IDBRequest.readyState Mozilla IDBRequest.readyState documentation>
getReadyState ::
(MonadIO m, IsIDBRequest self, FromJSString result) =>
self -> m result
getReadyState self
= liftIO (fromJSString <$> (js_getReadyState (toIDBRequest self)))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/IDBRequest.onsuccess Mozilla IDBRequest.onsuccess documentation>
success ::
(IsIDBRequest self, IsEventTarget self) => EventName self Event
success = unsafeEventName (toJSString "success")
-- | <https://developer.mozilla.org/en-US/docs/Web/API/IDBRequest.onerror Mozilla IDBRequest.onerror documentation>
error ::
(IsIDBRequest self, IsEventTarget self) => EventName self Event
error = unsafeEventName (toJSString "error")
|
manyoo/ghcjs-dom
|
ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/IDBRequest.hs
|
Haskell
|
mit
| 3,705
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE RecordWildCards #-}
module Main (main) where
#if !MIN_VERSION_base(4, 8, 0)
import Data.Monoid (mempty)
#endif
import Data.Word (Word8)
import Text.Printf (printf)
import System.Random (Random(random), RandomGen, getStdGen)
import Options.Applicative
#if MIN_VERSION_optparse_applicative(0, 13, 0)
import Data.Monoid ((<>))
#endif
import System.Clock (Clock(Monotonic), TimeSpec(sec, nsec), getTime, diffTimeSpec)
import Control.DeepSeq (force)
import qualified Data.Vector as V
import qualified Data.Vector.Storable as SV
import qualified Data.ReedSolomon as RS
data Options = Options { optionsN :: Int
, optionsK :: Int
, optionsSize :: Int
, optionsIterations :: Int
}
deriving (Show, Eq)
parser :: Parser Options
parser = Options
<$> option auto
( short 'n'
<> metavar "N"
<> value 9
<> showDefault
<> help "Number of data shards"
)
<*> option auto
( short 'k'
<> metavar "K"
<> value 3
<> showDefault
<> help "Number of parity shards to calculate"
)
<*> option auto
( short 's'
<> metavar "BYTES"
<> value (1024 * 1024)
<> showDefault
<> help "Total data size to encode"
)
<*> option auto
( short 'i'
<> metavar "COUNT"
<> value 500
<> showDefault
<> help "Number of encoding iterations"
)
go :: RS.Encoder -> V.Vector (SV.Vector Word8) -> Int -> IO ()
go enc shards = loop
where
loop n | n == 0 = return ()
| otherwise = do
parities <- force `fmap` RS.encode RS.defaultBackend enc shards
parities `seq` loop (n - 1)
makeVector :: (SV.Storable a, Random a, RandomGen g) => g -> Int -> SV.Vector a
makeVector gen0 cnt = SV.unfoldrN cnt (Just . random) gen0
time :: IO () -> IO TimeSpec
time act = do
start <- getTime Monotonic
act
diffTimeSpec start `fmap` getTime Monotonic
main :: IO ()
main = do
Options{..} <- execParser $ info (helper <*> parser) mempty
printf "Settings: N=%d K=%d size=%d iterations=%d\n"
optionsN optionsK optionsSize optionsIterations
enc <- RS.new optionsN optionsK
vecs <- RS.split enc =<< flip makeVector optionsSize `fmap` getStdGen
diff <- time (go enc vecs optionsIterations)
printf "Total time: %ds %dns\n" (sec diff) (nsec diff)
|
NicolasT/reedsolomon
|
bench/profiling.hs
|
Haskell
|
mit
| 2,602
|
module ParserSpec (spec) where
import Test.Hspec
import Test.Hspec.Expectations.Contrib
import Language.CFrp.Parser
import Language.CFrp.Syntax
spec :: Spec
spec = do
describe "parseExprString" $ do
it "parses arithmetic" $ do
"(1 + x * 3) - _a0 / 5"
`shouldBeParsedE`
SubE
(AddE
(IntE 1 ())
(MulE (VarE "x" ()) (IntE 3 ()) ())
())
(DivE (VarE "_a0" ()) (IntE 5 ()) ())
()
it "parses lambda" $ do
"\\x -> \\_y10 -> 1 + _y10"
`shouldBeParsedE`
AbsE "x"
(AbsE "_y10"
(AddE (IntE 1 ()) (VarE "_y10" ()) ())
())
()
it "parses application" $ do
"4 * (\\x y -> x + y + 1) 2 3 / 5"
`shouldBeParsedE`
DivE
(MulE
(IntE 4 ())
(AppE
(AbsE "x"
(AbsE "y"
(AddE
(AddE (VarE "x" ()) (VarE "y" ()) ())
(IntE 1 ())
())
())
())
[IntE 2 (), IntE 3 ()]
())
())
(IntE 5 ())
()
it "parses tuple" $ do
"(1, (x), (\\y -> y, (), 2))"
`shouldBeParsedE`
TupE [
IntE 1 ()
, VarE "x" ()
, TupE [AbsE "y" (VarE "y" ()) (), UnitE (), IntE 2 ()] ()
]
()
it "parses if" $ do
"if \\x -> x then \\y -> y + 2 else \\z -> z + 3"
`shouldBeParsedE`
IfE (AbsE "x" (VarE "x" ()) ())
(AbsE "y" (AddE (VarE "y" ()) (IntE 2 ()) ()) ())
(AbsE "z" (AddE (VarE "z" ()) (IntE 3 ()) ()) ())
()
it "parses let" $ do
"let f x y = x + y in f 1 2"
`shouldBeParsedE`
LetE "f"
(AbsE "x" (AbsE "y" (AddE (VarE "x" ()) (VarE "y" ()) ()) ()) ())
(AppE (VarE "f" ()) [IntE 1 (), IntE 2 ()] ())
()
it "parses sequence" $ do
"let f n = print_int n; print_int n in f 10; f 20"
`shouldBeParsedE`
LetE "f"
(AbsE "n"
(SeqE
(AppE (VarE "print_int" ()) [VarE "n" ()] ())
(AppE (VarE "print_int" ()) [VarE "n" ()] ())
())
())
(SeqE
(AppE (VarE "f" ()) [IntE 10 ()] ())
(AppE (VarE "f" ()) [IntE 20 ()] ())
())
()
describe "parseDeclString" $ do
it "parses input declaration" $ do
"%input lastPress :: Signal Int = last_press_input_node;"
`shouldBeParsedD`
InputD "lastPress" (SigT IntT) "last_press_input_node" ()
it "parses embed declaration" $ do
let code = "int func(int x)\n{\n if (x) { x += 1; };\nreturn x;\n}"
("%{\n" ++ code ++ "\n%}")
`shouldBeParsedD`
EmbedD code ()
describe "parseProgramString" $ do
it "parses program" $ do
let code = "int func(int x)\n{\n if (x) { x += 1; };\nreturn x;\n}"
let prog = unlines
[ "%input lastPress :: Signal Int = last_press_input_node;"
, "%{\n" ++ code ++ "\n%}"
, "f lastPress"
]
putStrLn prog
prog
`shouldBeParsedP`
([ InputD "lastPress" (SigT IntT) "last_press_input_node" ()
, EmbedD code ()
]
, AppE (VarE "f" ()) [VarE "lastPress" ()] ()
)
shouldBeParsedE :: String -> ParsedExpr -> Expectation
shouldBeParsedE = parsed parseExprString
shouldBeParsedD :: String -> ParsedDecl -> Expectation
shouldBeParsedD = parsed parseDeclString
shouldBeParsedP :: String -> ([ParsedDecl], ParsedExpr) -> Expectation
shouldBeParsedP = parsed parseProgramString
parsed :: (Show e, Show a, Eq a) => (String -> String -> Either e a) -> String -> a -> Expectation
parsed parser str expected = do
let got = parser "<test>" str
got `shouldSatisfy` isRight
let Right e = got
e `shouldBe` expected
|
psg-titech/cfrp
|
spec/ParserSpec.hs
|
Haskell
|
mit
| 3,924
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.