code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE PatternGuards, ScopedTypeVariables, RecordWildCards, ViewPatterns #-}
-- | Check the input/output pairs in the tests/ directory
module Test.InputOutput(testInputOutput) where
import Control.Applicative
import Data.Tuple.Extra
import Control.Exception
import Control.Monad
import Data.List.Extra
import Data.IORef
import System.Directory
import System.FilePath
import System.Console.CmdArgs.Explicit
import System.Console.CmdArgs.Verbosity
import System.Exit
import System.IO.Extra
import Prelude
import Test.Util
testInputOutput :: ([String] -> IO ()) -> IO ()
testInputOutput main = do
xs <- getDirectoryContents "tests"
xs <- return $ filter ((==) ".test" . takeExtension) xs
forM_ xs $ \file -> do
ios <- parseInputOutputs <$> readFile ("tests" </> file)
forM_ (zip [1..] ios) $ \(i,io@InputOutput{..}) -> do
progress
forM_ files $ \(name,contents) -> do
createDirectoryIfMissing True $ takeDirectory name
writeFile name contents
checkInputOutput main io{name= "_" ++ takeBaseName file ++ "_" ++ show i}
mapM_ (removeFile . fst) $ concatMap files ios
data InputOutput = InputOutput
{name :: String
,files :: [(FilePath, String)]
,run :: [String]
,output :: String
,exit :: Maybe ExitCode
} deriving Eq
parseInputOutputs :: String -> [InputOutput]
parseInputOutputs = f z . lines
where
z = InputOutput "unknown" [] [] "" Nothing
interest x = any (`isPrefixOf` x) ["----","FILE","RUN","OUTPUT","EXIT"]
f io ((stripPrefix "RUN " -> Just flags):xs) = f io{run = splitArgs flags} xs
f io ((stripPrefix "EXIT " -> Just code):xs) = f io{exit = Just $ let i = read code in if i == 0 then ExitSuccess else ExitFailure i} xs
f io ((stripPrefix "FILE " -> Just file):xs) | (str,xs) <- g xs = f io{files = files io ++ [(file,unlines str)]} xs
f io ("OUTPUT":xs) | (str,xs) <- g xs = f io{output = unlines str} xs
f io ((isPrefixOf "----" -> True):xs) = [io | io /= z] ++ f z xs
f io [] = [io | io /= z]
f io (x:xs) = error $ "Unknown test item, " ++ x
g = first (reverse . dropWhile null . reverse) . break interest
---------------------------------------------------------------------
-- CHECK INPUT/OUTPUT PAIRS
checkInputOutput :: ([String] -> IO ()) -> InputOutput -> IO ()
checkInputOutput main InputOutput{..} = do
code <- newIORef ExitSuccess
got <- fmap (reverse . dropWhile null . reverse . map trimEnd . lines . fst) $ captureOutput $
handle (\(e::SomeException) -> print e) $
handle (\(e::ExitCode) -> writeIORef code e) $ do
bracket getVerbosity setVerbosity $ const $ setVerbosity Normal >> main run
code <- readIORef code
(want,got) <- return $ matchStarStar (lines output) got
if maybe False (/= code) exit then
failed
["TEST FAILURE IN tests/" ++ name
,"WRONG EXIT CODE"
,"GOT : " ++ show code
,"WANT: " ++ show exit
]
else if length got == length want && and (zipWith matchStar want got) then
passed
else do
let trail = replicate (max (length got) (length want)) "<EOF>"
let (i,g,w):_ = [(i,g,w) | (i,g,w) <- zip3 [1..] (got++trail) (want++trail), not $ matchStar w g]
failed $
["TEST FAILURE IN tests/" ++ name
,"DIFFER ON LINE: " ++ show i
,"GOT : " ++ g
,"WANT: " ++ w
,"FULL OUTPUT FOR GOT:"] ++ got
-- | First string may have stars in it (the want)
matchStar :: String -> String -> Bool
matchStar ('*':xs) ys = any (matchStar xs) $ tails ys
matchStar (x:xs) (y:ys) = x == y && matchStar xs ys
matchStar [] [] = True
matchStar _ _ = False
matchStarStar :: [String] -> [String] -> ([String], [String])
matchStarStar want got = case break (== "**") want of
(_, []) -> (want, got)
(w1,_:w2) -> (w1++w2, g1 ++ takeEnd (length w2) g2)
where (g1,g2) = splitAt (length w1) got
|
bitemyapp/hlint
|
src/Test/InputOutput.hs
|
bsd-3-clause
| 4,054
| 0
| 21
| 1,045
| 1,587
| 828
| 759
| 84
| 8
|
{-# LANGUAGE TemplateHaskell, TypeFamilyDependencies, PolyKinds #-}
module T8884 where
import Language.Haskell.TH
import System.IO
type family Foo a = r | r -> a where
Foo x = x
type family Baz (a :: k) = (r :: k) | r -> a
type instance Baz x = x
$( do FamilyI foo@(ClosedTypeFamilyD (TypeFamilyHead _ tvbs1 res1 m_kind1)
[TySynEqn (Just bndrs1) (AppT _ lhs1) rhs1])
[] <- reify ''Foo
FamilyI baz@(OpenTypeFamilyD (TypeFamilyHead _ tvbs2 res2 m_kind2))
[inst@(TySynInstD (TySynEqn (Just bndrs2) (AppT _ lhs2) rhs2))] <- reify ''Baz
runIO $ putStrLn $ pprint foo
runIO $ putStrLn $ pprint baz
runIO $ putStrLn $ pprint inst
runIO $ hFlush stdout
return [ ClosedTypeFamilyD
(TypeFamilyHead (mkName "Foo'") tvbs1 res1 m_kind1)
[TySynEqn (Just bndrs1) (AppT (ConT (mkName "Foo'")) lhs1) rhs1]
, OpenTypeFamilyD
(TypeFamilyHead (mkName "Baz'") tvbs2 res2 m_kind2)
, TySynInstD (TySynEqn (Just bndrs2) (AppT (ConT (mkName "Baz'")) lhs2) rhs2)] )
|
sdiehl/ghc
|
testsuite/tests/th/T8884.hs
|
bsd-3-clause
| 1,107
| 0
| 19
| 313
| 408
| 210
| 198
| 23
| 0
|
import TiProgram(tcProgramFiles)
import PropParser(parse)
import PropSyntax(hsModName)
import ReAssocProp()
--import ReAssoc
--import PrettyPrint
import TiPropDecorate()
import ReAssocProp()
import ScopeNamesProp()
import NameMapsProp()
import Prop2Alfa
import Hs2Alfa(modPath)
import FileConv(printModule)
import System(getArgs)
import DirUtils(expand)
--import UTF8
main =
writeProgram . transProgram =<< tcProgram =<< expand =<< getArgs
where
writeProgram = mapM writeModule
writeModule (n,m) =
do putStrLn path
writeFile path ( {-toFile $-}prefix++printModule m)
where path=modPath n
tcProgram files = tcProgramFiles parse files
transProgram (mss,env) = map transModule' (concat mss)
where
transModule' m = (hsModName m,transModule (m,[]) (undefined,env))
prefix =
unlines [
magic,
"",
"--#include \"Haskell.alfa\"",
"{-# Alfa hiding on #-}"]
magic = "-- Automatically converted from Haskell by hs2alfa..."
|
forste/haReFork
|
tools/hs2alfa/tstProp2Alfa.hs
|
bsd-3-clause
| 1,002
| 6
| 12
| 197
| 280
| 158
| 122
| 30
| 1
|
module Ch6 (
(^!),
myand, myconcat, myreplicate2, (!!!), myelem,
merge,
msort,
mysum, mytake, mylast,
) where
-- 1.
(^!) :: Int -> Int -> Int
m ^! 0 = 1
m ^! n= m * (m ^! (n-1))
-- 2.
{-
length [1, 2, 3]
= 1 + (length [2, 3])
= 1 + (1 + (length [3]))
= 1 + (1 + (1 + length []))
= 1 + (1 + (1 + 0))
= 3
drop 3 [1, 2, 3, 4, 5]
= drop 2 [2, 3, 4, 5]
= drop 1 [3, 4, 5]
= drop 0 [4, 5]
= [4, 5]
init [1, 2, 3]
= 1 : init [2, 3]
= 1 : 2 : init [3]
= 1 : 2 : []
= [1, 2]
-}
-- 3.
myand :: [Bool] -> Bool
myand [] = True
myand (x:xs) | x = myand xs
| otherwise = False
myconcat :: [[a]] -> [a]
myconcat [] = []
myconcat (x:xs) = x ++ myconcat xs
myreplicate2 :: Int -> a -> [a]
myreplicate2 0 x = []
myreplicate2 n x = x : myreplicate2 (n-1) x
(!!!) :: [a] -> Int -> a
(x:xs) !!! 0 = x
(x:xs) !!! n = xs !!! (n-1)
myelem :: Eq a => a -> [a] -> Bool
myelem v [] = False
myelem v (x:xs) | v == x = True
| otherwise = myelem v xs
-- 4.
merge :: Ord a => [a] -> [a] -> [a]
merge x [] = x
merge [] y = y
merge (x:xs) (y:ys) | x < y = x : merge xs (y:ys)
| otherwise = y : merge (x:xs) ys
-- 5.
halve :: [a] -> ([a], [a])
halve xs = splitAt n xs
where n = length xs `div` 2
msort :: Ord a => [a] -> [a]
msort [] = []
msort [x] = [x]
msort xs = merge (msort ms) (msort ns)
where (ms, ns) = halve xs
-- 6.
mysum :: Num a => [a] -> a
-- mysum [] = 0
-- mysum (x:xs) = x + mysum xs
mysum xs = foldr (+) 0 xs
mytake :: Int -> [a] -> [a]
mytake 0 _ = []
mytake n [] = []
mytake n (x:xs) = x : mytake (n-1) xs
mylast :: [a] -> a
mylast [x] = x
mylast (_:xs) = mylast xs
|
nozaq/programming-in-haskell
|
src/Ch6.hs
|
mit
| 1,653
| 0
| 9
| 506
| 836
| 448
| 388
| 48
| 1
|
{-# LANGUAGE NoStarIsType #-}
{-# LANGUAGE TypeFamilies, KindSignatures, DataKinds, TypeOperators #-}
{-# LANGUAGE ScopedTypeVariables, FlexibleContexts #-}
{-# LANGUAGE TypeApplications, PartialTypeSignatures #-}
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE ConstraintKinds #-}
{-# OPTIONS_GHC -fplugin GHC.TypeLits.Normalise #-}
{-# OPTIONS_GHC -fplugin GHC.TypeLits.KnownNat.Solver #-}
{-# OPTIONS_GHC -fconstraint-solver-iterations=20 #-}
module Main where
import Control.Applicative
import Control.Category
import Control.Concurrent
import Control.DeepSeq
import Control.Monad
import Control.Monad.Trans
import Control.Monad.IO.Class
import qualified Control.Monad.State.Lazy as SL
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as LB
import qualified Data.ByteString.Lazy.Char8 as LC
import Data.Char
import Data.Foldable
import Data.Functor.Identity
import Data.IORef
import Data.List
import Data.Maybe
import Data.Monoid
import Data.Proxy
import Data.Random
import Data.Random.Distribution.Categorical
import Data.Traversable
import Data.Type.Equality ((:~:)(..))
import Data.Vector (Vector)
import qualified Data.Vector.Generic as V
import qualified Data.Vector.Unboxed as U
import Data.Word
import Debug.Trace
import GHC.TypeLits
import Options.Applicative
import System.Clock
import System.Environment
import System.IO
import System.IO.Unsafe
import System.Random
import Text.Printf
import AI.Funn.CL.Blob (Blob(..))
import qualified AI.Funn.CL.Blob as Blob
import AI.Funn.CL.Flat
import AI.Funn.CL.LSTM
import AI.Funn.CL.Mixing
import AI.Funn.CL.MonadCL
import AI.Funn.Common
import AI.Funn.Diff.Diff (Diff(..), Derivable(..), (>>>))
import qualified AI.Funn.Diff.Diff as Diff
import AI.Funn.Diff.Pointed
import AI.Funn.Diff.RNN
import qualified AI.Funn.Flat.Blob as F
import AI.Funn.Flat.ParBox
import AI.Funn.SGD
import AI.Funn.Space
import AI.Funn.TypeLits
sampleIO :: MonadIO m => RVar a -> m a
sampleIO v = liftIO (runRVar v StdRandom)
deepseqM :: (Monad m, NFData a) => a -> m ()
deepseqM x = deepseq x (return ())
unfoldM :: Monad m => Int -> m a -> m [a]
unfoldM 0 m = return []
unfoldM n m = (:) <$> m <*> unfoldM (n-1) m
onehot :: Vector (Blob Double 256)
onehot = unsafePerformIO $ sequenceA $ V.generate 256 gen
where
gen i = Blob.fromList (replicate i 0 ++ [1] ++ replicate (255 - i) 0)
fromGPU :: (MonadIO m, KnownNat n) => Blob Double n -> m (F.Blob n)
fromGPU b = F.fromList <$> Blob.toList b
toGPU :: (MonadIO m, KnownNat n) => F.Blob n -> m (Blob Double n)
toGPU b = Blob.fromList (F.toList b)
sampleRNN :: (MonadIO m) => Int -> st -> (st -> Blob Double 256 -> m (st, Blob Double 256)) -> m [Char]
sampleRNN n s0 next = SL.evalStateT (unfoldM n step) (s0, 0)
where
step = do
(s, c) <- SL.get
(new_s, new_q) <- lift $ next s (onehot V.! c)
new_qc <- fromGPU new_q
let
exps = V.map exp (F.getBlob new_qc)
factor = 1 / V.sum exps
ps = V.map (*factor) exps
new_c <- sampleIO (categorical $ zip (V.toList ps) [0 :: Int ..])
SL.put (new_s, new_c)
return (chr new_c)
data Commands = Train (Maybe FilePath) FilePath (Maybe FilePath) (Maybe FilePath) Int Double Integer
| Sample FilePath (Maybe Int)
deriving (Show)
instance (Additive m a, Applicative m) => Zero m (Vector a) where
zero = pure V.empty
instance (Additive m a, Monad m) => Semi m (Vector a) where
plus xs ys
| V.null xs = pure ys
| V.null ys = pure xs
| otherwise = V.zipWithM plus xs ys
instance (Additive m a, Monad m) => Additive m (Vector a)
type BlobD = Blob Double
lstm :: (MonadIO m, KnownNat n)
=> Ref s (BlobD (2*n))
-> Ref s (BlobD n)
-> Ref s (BlobD (4*n))
-> Pointed m s (Ref s (BlobD n), Ref s (BlobD n))
lstm pars hidden inputs = do ins <- packrec (pars, (hidden, inputs))
out <- lstmDiff <-- ins
unpack out
amix :: (MonadIO m, KnownNat size, KnownNat a, KnownNat b)
=> Proxy size -> Ref s (BlobD _) -> Ref s (BlobD a)
-> Pointed m s (Ref s (BlobD b))
amix p pars input = do ins <- pack (pars, input)
pushDiff ins (amixDiff p)
tanhP :: (MonadIO m, KnownNat a) => Ref s (BlobD a) -> Pointed m s (Ref s (BlobD a))
tanhP input = pushDiff input tanhDiff
split3 :: (MonadIO m, KnownNat a, KnownNat b, KnownNat c)
=> Ref s (BlobD (a+b+c))
-> Pointed m s (Ref s (BlobD a), Ref s (BlobD b), Ref s (BlobD c))
split3 input = do (part1, part23) <- splitDiff =<- input
(part2, part3) <- splitDiff =<- part23
return (part1, part2, part3)
type Affine m a = (Derivable a, Additive m (D a))
scanlP :: (Monad m, Affine m x, Affine m st, Affine m i, Affine m o)
=> Diff m (x,(st,i)) (st, o)
-> Ref s x
-> Ref s st
-> Ref s (Vector i)
-> Pointed m s (Ref s st, Ref s (Vector o))
scanlP diff x st vi = do ins <- packrec (x, (st, vi))
out <- pushDiff ins (scanlDiff diff)
unpack out
step :: (MonadIO m, KnownNat modelSize) => Proxy modelSize -> Diff m (BlobD _, ((BlobD modelSize, BlobD modelSize), BlobD 256)) ((BlobD modelSize, BlobD modelSize), BlobD 256)
step Proxy = runPointed $ \in_all -> do
(pars, ((hidden, prev), char)) <- unpackrec in_all
(p1, p2, p3) <- split3 pars
combined_in <- mergeDiff -<= (prev, char)
lstm_in <- tanhP =<< amix (Proxy @ 5) p1 combined_in
(hidden_out, lstm_out) <- lstm p2 hidden lstm_in
final_dist <- amix (Proxy @ 5) p3 lstm_out
packrec ((hidden_out, lstm_out), final_dist)
network :: (MonadIO m, KnownNat modelSize) => Proxy modelSize -> Diff m (BlobD _, Vector (BlobD 256)) (Vector (BlobD 256))
network modelSize = runPointed $ \in_all -> do
(pars, inputs) <- unpack in_all
(step_pars, h0, c0) <- split3 pars
initial_state <- pack (h0, c0)
(s, vo) <- scanlP (step modelSize) step_pars initial_state inputs
return vo
evalNetwork :: (MonadIO m, KnownNat modelSize) => Proxy modelSize -> Diff m (BlobD _, (Vector (BlobD 256), Vector Int)) Double
evalNetwork size = Diff.assocL >>> Diff.first network' >>> zipDiff >>> mapDiff softmaxCost >>> vsumDiff
where
network' = network size
train :: KnownNat modelSize => Proxy modelSize -> BlobD _ -> FilePath -> (Maybe FilePath) -> (Maybe FilePath) -> Int -> Double -> IO ()
train modelSize initialParameters input savefile logfile chunkSize learningRate =
do
let
step' = step modelSize
evalNetwork' = evalNetwork modelSize
runrnn par s c = do
((c', o), _) <- Diff.runDiff step' (par, (s, c))
return (c', o)
print (natVal initialParameters)
text <- B.readFile input
running_average <- newIORef 0
running_count <- newIORef 0
iteration <- newIORef (0 :: Int)
startTime <- getTime ProcessCPUTime
let
α :: Double
α = 0.99
tvec = V.fromList (B.unpack text) :: U.Vector Word8
ovec = V.map (\c -> onehot V.! (fromIntegral c)) (V.convert tvec) :: Vector (BlobD 256)
source :: IO (Vector (BlobD 256), Vector Int)
source = do s <- sampleIO (uniform 0 (V.length tvec - chunkSize))
let
input = (onehot V.! 0) `V.cons` V.slice s (chunkSize - 1) ovec
output = V.map fromIntegral (V.convert (V.slice s chunkSize tvec))
return (input, output)
objective p = do
sample <- source
(err, k) <- Diff.runDiff evalNetwork' (p,sample)
when (not (isInfinite err || isNaN err)) $ do
modifyIORef' running_average (\x -> (α*x + (1 - α)*err))
modifyIORef' running_count (\x -> (α*x + (1 - α)*1))
-- putStrLn $ "Error: " ++ show err
(dp, _) <- k 1
return dp
next p m = do
x <- do q <- readIORef running_average
w <- readIORef running_count
return ((q / w) / fromIntegral chunkSize)
modifyIORef' iteration (+1)
i <- readIORef iteration
now <- getTime ProcessCPUTime
let tdiff = fromIntegral (toNanoSecs (now - startTime)) / (10^9) :: Double
putStrLn $ printf "[% 11.4f | %i] %f" tdiff i x
when (i `mod` 50 == 0) $ do
let (par, c0) = Blob.splitBlob p
msg <- sampleRNN 200 (Blob.splitBlob c0) (runrnn par)
putStrLn (filter (\c -> isPrint c || isSpace c) msg)
when (i `mod` 100 == 0) $ do
case savefile of
Just savefile -> do
sp <- fromGPU p
let encoded = encodeToByteString (natVal modelSize, ParBox sp)
LB.writeFile (printf "%s-%6.6i-%5.5f.bin" savefile i x) $ encoded
LB.writeFile (savefile ++ "-latest.bin") $ encoded
Nothing -> return ()
m
liftIO $ putStrLn "before adam"
adam (Blob.adamBlob { adam_α = learningRate }) initialParameters objective next
main :: IO ()
main = do
hSetBuffering stdout LineBuffering
let optparser = (info (subparser $
command "train"
(info (Train
<$> optional (strOption (long "initial" <> action "file"))
<*> strOption (long "input" <> action "file")
<*> optional (strOption (long "output" <> action "file"))
<*> optional (strOption (long "log" <> action "file"))
<*> (option auto (long "chunksize") <|> pure 50)
<*> (option auto (long "lr") <|> pure 0.001)
<*> (option auto (long "modelSize") <|> pure 200)
)
(progDesc "Train NN."))
<>
command "sample"
(info (Sample
<$> strOption (long "snapshot" <> action "file")
<*> optional (option auto (long "length")))
(progDesc "Sample output.")))
fullDesc)
cmd <- customExecParser (prefs showHelpOnError) optparser
initOpenCL
case cmd of
Train Nothing input savefile logfile chunkSize lr modelSize -> do
withNat modelSize $ \(proxy :: Proxy modelSize) -> do
initial_spar <- sampleIO (F.generate $ uniform (-0.5) (0.5))
initial_par <- toGPU initial_spar
train proxy initial_par input savefile logfile chunkSize lr
Train (Just resumepath) input savefile logfile chunkSize lr _ -> do
(modelSize, box) <- decodeOrError <$> LB.readFile resumepath
withNat modelSize $ \(proxy :: Proxy modelSize) ->
case openParBox box of
Just initial_spar -> do
initial_par <- toGPU initial_spar
train proxy initial_par input savefile logfile chunkSize lr
Nothing -> error "model mismatch"
Sample initpath length -> do
(modelSize, box) <- decodeOrError <$> LB.readFile initpath
let n = fromMaybe 500 length
withNat modelSize $ \(proxy :: Proxy modelSize) ->
case openParBox box of
Just sinitial -> do
initial <- toGPU sinitial
let
(par, c0) = Blob.splitBlob initial
runrnn s c = do
((c', o), _) <- Diff.runDiff (step proxy) (par, (s, c))
return (c', o)
msg <- sampleRNN n (Blob.splitBlob c0) runrnn
putStrLn msg
Nothing -> error "model mismatch"
|
nshepperd/funn
|
cldiff.hs
|
mit
| 12,290
| 1
| 31
| 3,958
| 4,402
| 2,242
| 2,160
| 262
| 5
|
module Main
(
)
where
|
lhoghu/yahoo-portfolio-manager
|
ypm-server/Main.hs
|
mit
| 35
| 0
| 3
| 18
| 7
| 5
| 2
| 2
| 0
|
import Data.List
import Data.Char
count :: (Eq a) => a -> [a] -> Int
count n = length . filter (== n)
multiUnion :: (Eq a) => [a] -> [a] -> [a]
multiUnion xs ys = concat [(replicate (count x elements) x) | x <- nub elements]
where elements = xs ++ ys
multiIntersection :: (Eq a) => [a] -> [a] -> [a]
multiIntersection xs ys = concat [ (replicate (min (count x xs) (count x ys)) x) | x <- nub xs]
notNegative :: Int -> Int
notNegative x
| x >= 0 = x
|otherwise = 0
multiDifference :: (Eq a) => [a] -> [a] -> [a]
multiDifference xs ys = concat [(replicate (notNegative ((count x xs) - (count x ys))) x) | x <- nub xs]
divides :: Int -> Int -> Bool
divides d x = mod x d == 0
divisors :: Int -> [Int]
divisors x = [d | d <- [1..x], d `divides` x]
prime :: Int -> Bool
prime x = divisors x == [1, x]
goldbach :: Int -> [(Int, Int)]
goldbach n = [(x, n - x) | x <- [2..n-2], prime x, prime (n-x)]
encodeLetter :: Int -> Char -> Char
encodeLetter n letter
| n > 26 || n < 0 = encodeLetter (abs (mod n 26)) letter
| isLetter (chr code) = chr code
| otherwise = chr (code - 26)
where code = n + (ord letter)
encodeIfLetter :: Int -> Char -> Char
encodeIfLetter n symbol
| isLetter symbol = encodeLetter n symbol
| otherwise = symbol
cesarCypher :: Int -> String -> String
cesarCypher n = map (encodeIfLetter n)
insertAt :: Int -> a -> [a] -> [a]
insertAt 0 el xs = el : xs
insertAt n el (x:xs) = x : (insertAt (n - 1) el xs)
allInsertions :: a -> [a] -> [[a]]
allInsertions el xs = zipWith3 insertAt [0..(length xs)] (repeat el) (repeat xs)
allInsertionsInLists :: a -> [[a]] -> [[a]]
allInsertionsInLists _ [] = []
allInsertionsInLists x (xs:xss) = allInsertions x xs ++ allInsertionsInLists x xss
permute :: [a] -> [[a]]
permute [] = [[]]
permute (x:xs) = allInsertionsInLists x (permute xs)
insertInAll :: a -> [[a]] -> [[a]]
insertInAll x = map ((:) x)
combinations :: Int -> [a] -> [[a]]
combinations _ [] = [[]]
combinations 0 (x:xs) = []
combinations 1 (x:xs) = map (\el -> [el]) (x:xs)
combinations n (x:xs)
| n > length (x:xs) = []
| otherwise = insertInAll x (combinations (n - 1) xs) ++ combinations n xs
|
stoimenoff/functional-programming
|
exercises/ex14.hs
|
mit
| 2,174
| 0
| 14
| 492
| 1,230
| 642
| 588
| 55
| 1
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
module CommandArgs
( HDevTools(..)
, loadHDevTools
)
where
import qualified Config
import System.Console.CmdArgs.Implicit
import System.Environment (getProgName)
import System.Info (arch, os)
#ifdef CABAL
import Data.Version (showVersion)
import Paths_hdevtools (version)
#endif
programVersion :: String
programVersion =
#ifdef CABAL
"version " ++ showVersion version
#else
"unknown-version (not built with cabal)"
#endif
fullVersion :: String
fullVersion =
concat
[ programVersion
, " (ghc-", Config.cProjectVersion, "-", arch, "-", os, ")"
]
data HDevTools
= Admin
{ socket :: Maybe FilePath
, start_server :: Bool
, noDaemon :: Bool
, status :: Bool
, stop_server :: Bool
, no_cabal :: Bool
, verbose :: Bool
}
| Check
{ socket :: Maybe FilePath
, ghcOpts :: [String]
, file :: String
, no_cabal :: Bool
, verbose :: Bool
}
| ModuleFile
{ socket :: Maybe FilePath
, ghcOpts :: [String]
, module_ :: String
, no_cabal :: Bool
, verbose :: Bool
}
| Info
{ socket :: Maybe FilePath
, ghcOpts :: [String]
, file :: String
, identifier :: String
, no_cabal :: Bool
, verbose :: Bool
}
| Type
{ socket :: Maybe FilePath
, ghcOpts :: [String]
, file :: String
, line :: Int
, col :: Int
, no_cabal :: Bool
, verbose :: Bool
}
deriving (Show, Data, Typeable)
dummyAdmin :: HDevTools
dummyAdmin = Admin
{ socket = Nothing
, start_server = False
, noDaemon = False
, status = False
, stop_server = False
, no_cabal = False
, verbose = False
}
dummyCheck :: HDevTools
dummyCheck = Check
{ socket = Nothing
, ghcOpts = []
, file = ""
, no_cabal = False
, verbose = False
}
dummyModuleFile :: HDevTools
dummyModuleFile = ModuleFile
{ socket = Nothing
, ghcOpts = []
, module_ = ""
, no_cabal = False
, verbose = False
}
dummyInfo :: HDevTools
dummyInfo = Info
{ socket = Nothing
, ghcOpts = []
, file = ""
, identifier = ""
, no_cabal = False
, verbose = False
}
dummyType :: HDevTools
dummyType = Type
{ socket = Nothing
, ghcOpts = []
, file = ""
, line = 0
, col = 0
, no_cabal = False
, verbose = False
}
admin :: Annotate Ann
admin = record dummyAdmin
[ socket := def += typFile += help "socket file to use"
, start_server := def += help "start server"
, noDaemon := def += help "do not daemonize (only if --start-server)"
, status := def += help "show status of server"
, stop_server := def += help "shutdown the server"
, no_cabal := def += help "don't search for a cabal file and behave like there wasn't any cabal file"
, verbose := def += help "enable verbose mode"
] += help "Interactions with the server"
check :: Annotate Ann
check = record dummyCheck
[ socket := def += typFile += help "socket file to use"
, ghcOpts := def += typ "OPTION" += help "ghc options"
, file := def += typFile += argPos 0 += opt ""
] += help "Check a haskell source file for errors and warnings"
moduleFile :: Annotate Ann
moduleFile = record dummyModuleFile
[ socket := def += typFile += help "socket file to use"
, ghcOpts := def += typ "OPTION" += help "ghc options"
, module_ := def += typ "MODULE" += argPos 0
] += help "Get the haskell source file corresponding to a module name"
info :: Annotate Ann
info = record dummyInfo
[ socket := def += typFile += help "socket file to use"
, ghcOpts := def += typ "OPTION" += help "ghc options"
, file := def += typFile += argPos 0 += opt ""
, identifier := def += typ "IDENTIFIER" += argPos 1
] += help "Get info from GHC about the specified identifier"
type_ :: Annotate Ann
type_ = record dummyType
[ socket := def += typFile += help "socket file to use"
, ghcOpts := def += typ "OPTION" += help "ghc options"
, file := def += typFile += argPos 0 += opt ""
, line := def += typ "LINE" += argPos 1
, col := def += typ "COLUMN" += argPos 2
] += help "Get the type of the expression at the specified line and column"
full :: String -> Annotate Ann
full progName = modes_ [admin += auto, check, moduleFile, info, type_]
+= helpArg [name "h", groupname "Help"]
+= versionArg [groupname "Help"]
+= program progName
+= summary (progName ++ ": " ++ fullVersion)
loadHDevTools :: IO HDevTools
loadHDevTools = do
progName <- getProgName
cmdArgs_ (full progName) :: IO HDevTools
|
bennofs/hdevtools
|
src/CommandArgs.hs
|
mit
| 5,149
| 0
| 11
| 1,768
| 1,309
| 732
| 577
| 141
| 1
|
{-# OPTIONS #-}
-----------------------------------------------------------------------------
-- |
-- Module : Language.Python.Pyn.Lexer
-- Copyright : (c) 2016 Savor d'Isavano
-- License : MIT
-- Maintainer : anohigisavay@gmail.com
-- Stability : experimental
-- Portability : ghc
--
-- Lexical analysis for Pyn
-- See: <http://www.python.org/doc/2.6/reference/lexical_analysis.html>.
-----------------------------------------------------------------------------
module Language.Python.Pyn.Lexer (
-- * Lexical analysis
lex,
lexOneToken) where
import Prelude hiding (lex)
import Language.Python.Pyn.Parser.Lexer (lexToken, initStartCodeStack)
import Language.Python.Common.Token as Token
import Language.Python.Common.SrcLocation (initialSrcLocation)
import Language.Python.Common.ParserMonad
(ParseState (input), P, runParser, execParser, ParseError, initialState)
-- | Parse a string into a list of Python Tokens, or return an error.
lex :: String -- ^ The input stream (python source code).
-> String -- ^ The name of the python source (filename or input device).
-> Either ParseError [Token] -- ^ An error or a list of tokens.
lex input srcName =
execParser lexer state
where
initLoc = initialSrcLocation srcName
state = initialState initLoc input initStartCodeStack
-- | Try to lex the first token in an input string. Return either a parse error
-- or a pair containing the next token and the rest of the input after the token.
lexOneToken :: String -- ^ The input stream (python source code).
-> String -- ^ The name of the python source (filename or input device).
-> Either ParseError (Token, String) -- ^ An error or the next token and the rest of the input after the token.
lexOneToken source srcName =
case runParser lexToken state of
Left err -> Left err
Right (tok, state) -> Right (tok, input state)
where
initLoc = initialSrcLocation srcName
state = initialState initLoc source initStartCodeStack
lexer :: P [Token]
lexer = loop []
where
loop toks = do
tok <- lexToken
case tok of
EOFToken {} -> return (reverse toks)
other -> loop (tok:toks)
|
TOSPIO/pyn
|
lib/Language/Python/Pyn/Lexer.hs
|
mit
| 2,199
| 0
| 14
| 431
| 358
| 207
| 151
| 33
| 2
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Pianola (
Selector(..),
missing,
winnow,
context,
collect,
liftQ,
Pianola(..),
Delay,
peek,
peekMaybe,
retryPeek1s,
retryPeek,
poke,
pokeMaybe,
retryPoke1s,
retryPoke,
sleep,
with,
withMaybe,
withRetry1s,
withRetry,
ralentize,
ralentizeByTag,
autolog,
feed
) where
import Prelude hiding (catch,(.))
import Data.Functor.Compose
import Data.Monoid
import Control.Category
import Control.Arrow
import Control.Applicative
import Control.Monad
import Control.Monad.Free
import Control.Monad.Logic
import Control.Monad.Error
import Control.Monad.Trans
import Control.Monad.Trans.Maybe
import Control.Comonad
import Control.Comonad.Trans.Env
import Pipes
import Pianola.Util
--type Selector m l o a = o -> LogicT (Producer l (Query m)) a
type Selector m l o a = Kleisli (LogicT (Producer l (Query m))) o a
collect :: (Monad m, MonadPlus n) => Selector m l o a -> Selector m l o (n a)
collect = Kleisli . fmap f . runKleisli
where f x = lift $ observeAllT x >>= return . replusify
liftQ :: Monad m => Selector m l (Query m a) a
liftQ = Kleisli $ lift . lift
missing :: Monad m => Selector m l o a -> Selector m l o ()
missing k = Kleisli $ fmap lnot $ runKleisli k
winnow :: Monad m => Selector m l o a -> Selector m l o a
winnow k = Kleisli $ fmap once $ runKleisli k
-- arr ask would do the same job, if working with arrows...
context :: (Comonad c, Monad m) => Kleisli m (EnvT e c a) e
context = arr ask
--type ObserverF m l o = Compose ((->) o) (LogicT (Producer l (Query m)))
type ObserverF m l o = WrappedArrow (Kleisli (LogicT (Producer l (Query m)))) o
type Observer m l o = Free (ObserverF m l o)
focus :: Monad m => Selector m l o' o -> Observer m l o a -> Observer m l o' a
focus prefix v =
let nattrans (WrapArrow k) = WrapArrow $ prefix >>> k
in hoistFree nattrans v
runObserver :: Monad m => m o -> Observer m l o a -> ErrorT String (Producer l m) a
runObserver _ (Pure b) = return b
runObserver mom (Free f) =
let tomaybet = MaybeT . liftM replusify . observeManyT 1
toerrort = ErrorT . liftM (maybe (Left "# Selector without results.") Right) . runMaybeT
squint = fmap $ hoist (hoist runQuery) . toerrort . tomaybet
in join $ (lift . lift $ mom) >>= (squint . runKleisli . unwrapArrow $ runObserver mom <$> f)
type Delay = Int
newtype Pianola m l o a = Pianola
{ unPianola :: Producer (Change m) (Producer Delay (ErrorT String (Producer l (Observer m l o)))) a
} deriving (Functor,Monad,MonadError String)
instance Monad m => Loggy (Pianola m LogEntry o) where
logentry = Pianola . lift . lift . lift . logentry
peek :: Monad m => Selector m l o a -> Pianola m l o a
peek = Pianola . lift . lift . lift . lift . liftF . WrapArrow
peekMaybe :: Monad m => Selector m l o a -> Pianola m l o (Maybe a)
peekMaybe = peek . collect
retryPeek1s :: Monad m => Int -> Selector m l o a -> Pianola m l o (Maybe a)
retryPeek1s = retryPeek $ sleep 1
retryPeek :: Monad m => Pianola m l o u -> Int -> Selector m l o a -> Pianola m l o (Maybe a)
retryPeek delay times glance =
let retryPeek' [] = return Nothing
retryPeek' (x:xs) = do
z <- peekMaybe x
maybe (delay >> retryPeek' xs) (return.return) z
in retryPeek' $ replicate times glance
inject :: Monad m => Change m -> Pianola m l o ()
inject = Pianola . yield
poke :: Monad m => Selector m l o (Change m) -> Pianola m l o ()
poke locator = peek locator >>= inject
pokeMaybe :: Monad m => Selector m l o (Change m) -> Pianola m l o (Maybe ())
pokeMaybe locator = do
actionMaybe <- peekMaybe locator
case actionMaybe of
Nothing -> return Nothing
Just action -> inject action >> return (Just ())
retryPoke1s :: Monad m => Int -> Selector m l o (Change m) -> Pianola m l o (Maybe ())
retryPoke1s = retryPoke $ sleep 1
retryPoke :: Monad m => Pianola m l o u -> Int -> Selector m l o (Change m) -> Pianola m l o (Maybe ())
retryPoke delay times glance = do
actionMaybe <- retryPeek delay times glance
case actionMaybe of
Nothing -> return Nothing
Just action -> inject action >> return (Just ())
sleep :: Monad m => Delay -> Pianola m l o ()
sleep = Pianola . lift . yield
with :: Monad m => Selector m l o' o -> Pianola m l o a -> Pianola m l o' a
with prefix pi =
Pianola $ hoist (hoist (hoist (hoist $ focus prefix))) $ unPianola pi
withMaybe :: Monad m => Selector m l o' o -> Pianola m l o a -> Pianola m l o' (Maybe a)
withMaybe glance pi = do
r <- peekMaybe glance
case r of
Nothing -> return Nothing
Just _ -> with glance pi >>= return . Just
withRetry1s :: Monad m => Int -> Selector m l o' o -> Pianola m l o a -> Pianola m l o' (Maybe a)
withRetry1s = withRetry $ sleep 1
withRetry :: Monad m => Pianola m l o' u -> Int -> Selector m l o' o -> Pianola m l o a -> Pianola m l o' (Maybe a)
withRetry delay times glance pi = do
r <- retryPeek delay times glance
case r of
Nothing -> return Nothing
Just _ -> with glance pi >>= return . Just
ralentize :: Delay -> Pianola m l o a -> Pianola m l o a
ralentize = ralentizeByTag $ const True
ralentizeByTag :: ([Tag] -> Bool) -> Delay -> Pianola m l o a -> Pianola m l o a
ralentizeByTag f delay (Pianola p) =
let delayer = forever $ do
s <- await
yield s
when (f . tags $ s) (lift $ yield delay)
in Pianola $ p >-> delayer
autolog :: Pianola m LogEntry o a -> Pianola m LogEntry o a
autolog (Pianola p) =
let logger = forever $ do
s <- await
yield s
lift . lift . lift . logmsg $ fmtAction s
fmtAction s =
"### Executed action with tags:" <> mconcat ( map (" "<>) . tags $ s )
in Pianola $ p >-> logger
feed :: Monad m => m o -> Pianola m l o a -> Producer Delay (ErrorT String (Producer l m)) a
feed mom pi =
let smashMaybe m = runErrorT m >>= lift . ErrorT . return
smashProducer = forever $
await >>= lift . lift . yield
smash mp = runEffect $ smashMaybe mp >-> smashProducer
pi' = hoist (hoist (smash . hoist (hoist $ runObserver mom))) $ unPianola pi
injector = forever $ do
s <- await
lift . lift . lift . lift $ unseal s
in runEffect $ pi' >-> injector
|
danidiaz/pianola
|
src/Pianola.hs
|
mit
| 6,628
| 0
| 18
| 1,844
| 2,698
| 1,342
| 1,356
| 155
| 2
|
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.DeviceProximityEvent
(newDeviceProximityEvent, getValue, getMin, getMax,
DeviceProximityEvent(..), gTypeDeviceProximityEvent)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/DeviceProximityEvent Mozilla DeviceProximityEvent documentation>
newDeviceProximityEvent ::
(MonadDOM m, ToJSString type') =>
type' -> Maybe DeviceProximityEventInit -> m DeviceProximityEvent
newDeviceProximityEvent type' eventInitDict
= liftDOM
(DeviceProximityEvent <$>
new (jsg "DeviceProximityEvent")
[toJSVal type', toJSVal eventInitDict])
-- | <https://developer.mozilla.org/en-US/docs/Web/API/DeviceProximityEvent.value Mozilla DeviceProximityEvent.value documentation>
getValue :: (MonadDOM m) => DeviceProximityEvent -> m Double
getValue self = liftDOM ((self ^. js "value") >>= valToNumber)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/DeviceProximityEvent.min Mozilla DeviceProximityEvent.min documentation>
getMin :: (MonadDOM m) => DeviceProximityEvent -> m Double
getMin self = liftDOM ((self ^. js "min") >>= valToNumber)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/DeviceProximityEvent.max Mozilla DeviceProximityEvent.max documentation>
getMax :: (MonadDOM m) => DeviceProximityEvent -> m Double
getMax self = liftDOM ((self ^. js "max") >>= valToNumber)
|
ghcjs/jsaddle-dom
|
src/JSDOM/Generated/DeviceProximityEvent.hs
|
mit
| 2,325
| 0
| 10
| 322
| 532
| 321
| 211
| 33
| 1
|
module Unison.Runtime.SharedResourceMap where
import Control.Concurrent.MVar
import Control.Exception (finally)
import Data.Hashable (Hashable)
import Prelude hiding (lookup)
import qualified Unison.Runtime.ExpiringMap as M
-- import System.IO (stderr, hPutStrLn)
debug :: String -> IO ()
debug _ = pure ()-- hPutStrLn stderr ("[SharedResourceMap] " ++ msg)
data SharedResourceMap k v
= SharedResourceMap { acquiring :: M.ExpiringMap k (MVar ())
, resources :: M.ExpiringMap k v }
new :: (Hashable k, Eq k) => M.Seconds -> M.Seconds -> IO (SharedResourceMap k v)
new acquisition ttl = SharedResourceMap <$> M.new acquisition <*> M.new ttl
lookup :: (Eq k, Hashable k) => k -> SharedResourceMap k v -> IO (Maybe v)
lookup k m = M.lookup k (resources m)
lookupOrReplenish :: (Eq k, Hashable k) => k -> IO v -> SharedResourceMap k v -> IO v
lookupOrReplenish k replenish m = do
v <- lookup k m
case v of
Nothing -> M.lookup k (acquiring m) >>= \sem -> case sem of
Nothing -> do
debug "no lock allocated"
sem <- newMVar ()
_ <- M.insert k sem (acquiring m)
lookupOrReplenish k replenish m
Just sem -> do
debug "acquiring lock..."
takeMVar sem
debug "... acquired"
flip finally (putMVar sem () >> debug "releasing lock") $ do
v <- replenish
_ <- M.insert k v (resources m)
pure v
Just v -> pure v
|
nightscape/platform
|
node/src/Unison/Runtime/SharedResourceMap.hs
|
mit
| 1,440
| 0
| 23
| 366
| 526
| 259
| 267
| 34
| 3
|
{-# LANGUAGE OverloadedStrings #-}
module Web.Twitter
( search
, tweet
) where
import System.Process
-- This is just a wraper around a command line call to the python program python/twitter.hs
-- It is made into it's own module as a promise that I'll replace the python with a proper haskell
-- implementation.
twitterPy :: String
twitterPy = "python/twitter.py"
search :: String -> IO [String]
search str = fmap words (readProcess twitterPy ["-s", str] [])
tweet :: String -> Either String (IO String)
tweet str = if length str > 140
then Left "String is too long to tweet: must be less than 140 chars."
else Right (readProcess twitterPy ["-t", str] [])
|
aBathologist/borgmanities
|
src/Web/Twitter.hs
|
mit
| 717
| 0
| 9
| 173
| 146
| 80
| 66
| 13
| 2
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
module Haste.WebAudio.Internal where
import Haste.Foreign
-- | Web Audio context
newtype Ctx = Ctx JSAny
deriving (ToAny, FromAny)
-- | Web Audio is a monad
newtype WebAudio a = WebAudio { unWA :: Ctx -> IO a }
jsAudioContext :: IO Ctx
jsAudioContext = ffi "(function(){window.AudioContext = window.AudioContext||window.webkitAudioContext;return new AudioContext()})"
jsConnectNode :: (ToAny a, ToAny b) => a -> b -> IO ()
jsConnectNode = ffi "(function(a,b){a.connect(b)})"
jsDisconnectNode :: ToAny a => a -> IO ()
jsDisconnectNode = ffi "(function(a){a.disconnect()})"
jsNumberOfInputsNode :: ToAny a => a -> IO Int
jsNumberOfInputsNode = ffi "(function(a){return a.numberOfInputs})"
jsNumberOfOutputsNode :: ToAny a => a -> IO Int
jsNumberOfOutputsNode = ffi "(function(a){return a.numberOfOutputs})"
newtype StdDestination = StdDestination JSAny
deriving (ToAny, FromAny)
jsStdDestionationNode :: Ctx -> IO StdDestination
jsStdDestionationNode = ffi "(function(g){return g.destination})"
newtype Oscillator = Oscillator JSAny
deriving (ToAny, FromAny)
jsOscillatorNode :: Ctx -> IO Oscillator
jsOscillatorNode = ffi "(function(g){return g.createOscillator()})"
newtype Gain = Gain JSAny
deriving (ToAny, FromAny)
jsGainNode :: Ctx -> IO Gain
jsGainNode = ffi "(function(g){return g.createGain()})"
newtype Delay = Delay JSAny
deriving (ToAny, FromAny)
jsDelayNode :: Ctx -> IO Delay
jsDelayNode = ffi "(function(g){return g.createDelay()})"
newtype Convolution = Convolution JSAny
deriving (ToAny, FromAny)
jsConvolutionNode :: Ctx -> IO Convolution
jsConvolutionNode = ffi "(function(g){return g.createConvolver()})"
newtype Analyser = Analyser JSAny
deriving (ToAny, FromAny)
jsAnalyserNode :: Ctx -> IO Analyser
jsAnalyserNode = ffi "(function(g){return g.createAnalyser()})"
jsNodeStart :: ToAny a => a -> Int -> IO ()
jsNodeStart = ffi "(function(a,b){a.start(b)})"
jsNodeStop :: ToAny a => a -> IO ()
jsNodeStop = ffi "(function(a){a.stop()})"
jsOscillatorFrequency :: Oscillator -> Double -> IO ()
jsOscillatorFrequency = ffi "(function(a,b){a.frequency.value=b})"
jsOscillatorFrequencyGet :: Oscillator -> IO Double
jsOscillatorFrequencyGet = ffi "(function(a){return a.frequency.value})"
jsOscillatorType :: Oscillator -> String -> IO ()
jsOscillatorType = ffi "(function(a,b){a.type=b})"
jsGainValue :: Gain -> Double -> IO ()
jsGainValue = ffi "(function(a,b){a.gain.value=b})"
jsGainValueGet :: Gain -> IO Double
jsGainValueGet = ffi "(function(a){return a.gain.value})"
jsDelayValue :: Delay -> Double -> IO ()
jsDelayValue = ffi "(function(a,b){a.delayTime.value=b})"
jsAnalyserFFTSize :: Analyser -> Int -> IO ()
jsAnalyserFFTSize = ffi "(function(a,b){a.fftSize=b})"
jsGetByteFrequencyData :: FromAny a => Analyser -> IO a
jsGetByteFrequencyData = ffi "(function(a){var d=new Uint8Array(a.frequencyBinCount);a.getByteFrequencyData(d);return d})"
jsGetFloatFrequencyData :: FromAny a => Analyser -> IO a
jsGetFloatFrequencyData = ffi "(function(a){var d=new Float32Array(a.frequencyBinCount);a.getFloatFrequencyData(d);return d})"
jsGetByteTimeDomainData :: FromAny a => Analyser -> IO a
jsGetByteTimeDomainData = ffi "(function(a){var d=new Uint8Array(a.frequencyBinCount);a.getByteTimeDomainData(d);return d})"
jsGetFloatTimeDomainData :: FromAny a => Analyser -> IO a
jsGetFloatTimeDomainData = ffi "(function(a){var d=new Float32Array(a.frequencyBinCount);a.getFloatTimeDomainData(d);return d})"
|
akru/haste-webaudio
|
src/Haste/WebAudio/Internal.hs
|
mit
| 3,555
| 0
| 9
| 460
| 770
| 398
| 372
| 67
| 1
|
-- @Author: Zeyuan Shang
-- @Date: 2016-07-23 15:33:02
-- @Last Modified by: Zeyuan Shang
-- @Last Modified time: 2016-07-23 16:47:59
import Data.List
data Graph a = Graph [a] [(a, a)]
deriving (Show, Eq)
k4 = Graph ['a', 'b', 'c', 'd']
[('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'a'), ('a', 'c'), ('b', 'd')]
-- from 81
paths :: (Eq a) => a -> a -> [(a, a)] -> [[a]]
paths src dst edges
| src == dst = [[dst]]
| otherwise =
[src : path | edge <- edges, (fst edge) == src,
path <- (paths (snd edge) dst [e | e <- edges, e /= edge])] ++
[src : path | edge <- edges, (snd edge) == src,
path <- (paths (fst edge) dst [e | e <- edges, e /= edge])]
-- from 82
cycle' :: (Eq a) => a -> [(a, a)] -> [[a]]
cycle' a xs = [a : path | e <- xs, fst e == a, path <- paths (snd e) a [x | x <- xs, x /= e]] ++
[a : path | e <- xs, snd e == a, path <- paths (fst e) a [x | x <- xs, x /= e]]
spantree :: (Eq a) => Graph a -> [Graph a]
spantree (Graph vertices edges) = filter (connected) $ filter (not . cycles) $ filter (nodes) alltrees
where
numVertices = length vertices
numEdges = length edges
alltrees = [Graph vertices sub_edges | sub_edges <- foldr (\e es -> es ++ (map (e:) es)) [[]] edges]
nodes (Graph xs' ys') = numVertices == length xs'
cycles (Graph xs' ys') = any ((/=) 0 . length . flip cycle' ys') xs'
connected (Graph (x':xs') ys') = not $ any (null) [paths x' y' ys' | y' <- xs']
main = do
print $ length $ spantree k4
|
zeyuanxy/haskell-playground
|
ninety-nine-haskell-problems/vol9/83.hs
|
mit
| 1,586
| 50
| 17
| 471
| 820
| 441
| 379
| 26
| 1
|
{-# LANGUAGE TemplateHaskell #-}
module Render.Core.Manager
( newRenderManager
, newTriangleRenderer
, TriangleRenderer
, FontRenderer
, Triangle(..)
, newFontRenderer
, renderText
, RenderManager
, mkTextureUnit
)
where
import Debug.Trace
import Graphics.Rendering.FreeType.Internal.Face
import qualified Data.Set as Set
import Graphics.Rendering.OpenGL (($=))
import qualified Graphics.Rendering.OpenGL as GL
import qualified Graphics.Rendering.OpenGL.Raw as GLRaw
import Control.Lens
import Control.Monad.Morph (generalize, hoist)
import Control.Monad.State.Strict
import Data.Binary.IEEE754
import qualified Data.Map as Map
import qualified Data.Vector.Storable as V
import Data.Word
import Foreign.C.Types
import Foreign.Storable
import Foreign.Ptr
import Render.Core.Camera
import Render.Core.Error
import Render.Core.Render
import Render.Core.Text
import Data.Maybe
data RenderManager = RenderManager
{ _rmUsedTextures :: Set.Set GL.TextureUnit
} deriving (Show)
newRenderManager :: RenderManager
newRenderManager = RenderManager Set.empty
makeLenses ''RenderManager
--generalize :: Monad m => Identity a -> m a
--generalize = return . runIdentity
type Position = (Float, Float)
type Color = (Float, Float, Float, Float)
data Triangle = Triangle
{ _triangleV0 :: (Position, Color)
, _triangleV1 :: (Position, Color)
, _triangleV2 :: (Position, Color)
}
makeLenses ''Triangle
-- | creates a list for the vertex array
triangleToList :: Triangle -> [Word32]
triangleToList triangle = map floatToWord
-- | zero for padding
[ x0, y0, r0, g0, b0, a0, 0, 0
, x1, y1, r1, g1, b1, a1, 0, 0
, x2, y2, r2, g2, b2, a2, 0, 0
]
where
(x0, y0) = triangle^.triangleV0._1
(x1, y1) = triangle^.triangleV1._1
(x2, y2) = triangle^.triangleV2._1
(r0, g0, b0, a0) = triangle^.triangleV0._2
(r1, g1, b1, a1) = triangle^.triangleV1._2
(r2, g2, b2, a2) = triangle^.triangleV2._2
data TriangleRenderer = TriangleRenderer
{ _triangleData :: [Triangle]
, _triangleBuffer :: GL.BufferObject
, _triangleBufferSize :: Int
, _triangleElements :: GL.BufferObject
, _triangleVAO :: GL.VertexArrayObject
, _triangleShader :: GL.Program
}
makeLenses ''TriangleRenderer
-- | return the total number of vertices inside the renderer
-- | note: equals the length of elements in triangleElements
trNumVertices :: Getter TriangleRenderer Int
trNumVertices = to (\tr -> 3*length (tr^.triangleData))
-- | Create a static renderer for triangles
newTriangleRenderer :: [Triangle] -> IO TriangleRenderer
newTriangleRenderer triangles = do
[buffer, elementBuffer] <- GL.genObjectNames 2 :: IO [GL.BufferObject]
[vao] <- GL.genObjectNames 1 :: IO [GL.VertexArrayObject]
program <- setupShaders "untextured.vert" "untextured.frag"
GL.currentProgram $= Just program
GL.bindFragDataLocation program "color_final" $= 0
logGL "triangle renderer setup shaders"
_ <- uniformInfo program
let vertexData = V.fromList $ concatMap triangleToList triangles
let elementData = V.fromList $ map fromIntegral $ concatMap (\(i, l) -> map ((3*i) +) l) $ zip [0..length triangles - 1] (repeat [0, 1, 2]) :: V.Vector CUInt
uploadFromVec (V.length vertexData) GL.ArrayBuffer buffer vertexData
uploadFromVec (3*length triangles) GL.ElementArrayBuffer elementBuffer elementData
logGL "upload data"
GL.bindVertexArrayObject $= Just vao
logGL "bind vao"
GL.AttribLocation posLoc <- GL.get $ GL.attribLocation program "pos"
logGL "get pos loc"
GL.AttribLocation colorLoc <- GL.get $ GL.attribLocation program "myColor"
logGL "get color loc"
print colorLoc
print posLoc
GL.bindBuffer GL.ArrayBuffer $= Just buffer
logGL "bind array buffer"
GLRaw.glVertexAttribPointer posLoc 2 GLRaw.gl_FLOAT 0 32 nullPtr
logGL "set pos loc"
GLRaw.glEnableVertexAttribArray posLoc
logGL "enable pos attrib"
GLRaw.glVertexAttribPointer colorLoc 4 GLRaw.gl_FLOAT 0 32 (plusPtr nullPtr 8)
logGL "set color loc"
GLRaw.glEnableVertexAttribArray colorLoc
logGL "enable color attrib"
print vertexData
print elementData
return TriangleRenderer
{ _triangleData = triangles
, _triangleBuffer = buffer
, _triangleBufferSize = V.length vertexData
, _triangleElements = elementBuffer
, _triangleVAO = vao
, _triangleShader = program
}
-- | render all triangles stored in triangle renderer
-- | note: set camera first
renderTriangles :: TriangleRenderer -> Camera -> IO ()
renderTriangles renderer cam = do
GL.currentProgram $= Just (renderer^.triangleShader)
programSetViewProjection (renderer^.triangleShader) cam
GL.bindVertexArrayObject $= Just (renderer^.triangleVAO)
GL.bindBuffer GL.ElementArrayBuffer $= Just (renderer^.triangleElements)
--GLRaw.glDrawElements GLRaw.gl_TRIANGLES (fromIntegral $ renderer^.trNumVertices) GLRaw.gl_UNSIGNED_INT nullPtr
GLRaw.glDrawElements GLRaw.gl_TRIANGLES 3 GLRaw.gl_UNSIGNED_INT nullPtr
data TextBuffer = TextBuffer
{ _tbVertices :: V.Vector Word32
, _tbElements :: V.Vector Word32
}
data FontRenderer = FontRenderer
{ _frTextAtlas :: TextAtlas
, _frAtlasTexture :: GL.TextureObject
, _frAtlasTextureUnit :: Maybe GL.TextureUnit
, _frProgram :: GL.Program
, _frVAO :: GL.VertexArrayObject
, _frTopoBuffer :: GL.BufferObject
, _frColorBuffer :: GL.BufferObject
, _frElementBuffer :: GL.BufferObject
, _frAtlasUniform :: GL.BufferObject
, _ftFace :: FT_Face
}
makeLenses ''FontRenderer
-- | get a new texture unit
mkTextureUnit :: State RenderManager GL.TextureUnit
mkTextureUnit = do
usedTextures <- use rmUsedTextures
let texUnit = head $ filter (\unit -> not $ Set.member unit usedTextures) . map GL.TextureUnit $ [0..]
rmUsedTextures %= Set.insert texUnit
return texUnit
type BufferLocation = Int
type FreeSize = Int -- ^ In Bytes
data TextCache = TextCache
{ _tcBuffer :: GL.BufferObject
, _tcTexts :: Map.Map String BufferLocation
, _tcFreeSpace :: [(BufferLocation, FreeSize)]
}
tcResize :: TextCache -> IO TextCache
tcResize = undefined
ret :: Getter a (IO a)
ret = to (return)
l = do
t <- newTextCache
t^.ret
newTextCache :: IO TextCache
newTextCache =
let initialSize = 4 * 4 * 100 -- 100 chars (4 byte per char)
freeSpace = [(0, initialSize)]
in return TextCache
{ _tcBuffer = GL.nullBuffer
, _tcTexts = Map.empty
, _tcFreeSpace = freeSpace
}
tcAddText :: String -> TextCache -> IO TextCache
tcAddText = undefined
-- TODO: fragmentation, referencecount
-- | initialize a new font renderer using a loaded text atlas
newFontRenderer :: TextAtlas -> StateT RenderManager IO FontRenderer
newFontRenderer textAtlas = do
textureUnit <- hoist generalize mkTextureUnit
lift $ do
fm <- newFontManager
font <- newFont fm "/home/marco/workspace/haskell/henge/games/boom/data/font.otf" 64
face <- peek $ font^.fontFace
(shaped, _) <- shapeLine face (newText "AVKERNimlno") 1024
let glyphs' = map (\gl -> textAtlas^.atlasCodepointGlyphs.at (gl^.scCodepoint).to fromJust.glyphChar) shaped
print glyphs'
let indices = map atlasIndex glyphs'
[topoBuffer, colorBuffer, elementBuffer, atlasBuffer] <- GL.genObjectNames 4 :: IO [GL.BufferObject]
[vao] <- GL.genObjectNames 1 :: IO [GL.VertexArrayObject]
program <- setupShaders "text.vert" "text.frag"
GL.currentProgram $= Just program
let vertexData = V.fromList $ concatMap (\(index, glyph) -> [floatToWord $ glyph^.scOffset._1, floatToWord $ glyph^.scOffset._2, fromInteger index::Word32, floatToWord 0]) $ zip indices shaped
-- let vertexData = V.fromList $ [floatToWord 3, floatToWord 0, 0, 0] ++ [floatToWord 77, floatToWord 0, 20, 0]
let elementData = V.fromList $ take (V.length vertexData) [(0::Word32), (1::Word32)..] :: V.Vector Word32
let atlasData = traceShow vertexData $ atlasToStorable textAtlas
uploadFromVec (V.length vertexData) GL.ArrayBuffer topoBuffer vertexData
uploadFromVec (V.length elementData) GL.ElementArrayBuffer elementBuffer elementData
uploadFromVec (V.length atlasData) GL.UniformBuffer atlasBuffer atlasData
logGL "upload data"
GL.bindVertexArrayObject $= Just vao
GL.AttribLocation posLoc <- GL.get $ GL.attribLocation program "pos"
--GL.AttribLocation myColorLoc <- GL.get $ GL.attribLocation program "myColor"
GL.AttribLocation charIdLoc <- GL.get $ GL.attribLocation program "charId"
GL.bindBuffer GL.ArrayBuffer $= Just topoBuffer
GLRaw.glVertexAttribPointer posLoc 2 GLRaw.gl_FLOAT 0 16 nullPtr
GLRaw.glVertexAttribDivisor posLoc 1
GLRaw.glEnableVertexAttribArray posLoc
GLRaw.glVertexAttribIPointer charIdLoc 1 GLRaw.gl_INT 16 (plusPtr nullPtr 8)
GLRaw.glVertexAttribDivisor charIdLoc 1
GLRaw.glEnableVertexAttribArray charIdLoc
[imageTexture] <- GL.genObjectNames 1 :: IO [GL.TextureObject]
let image = textAtlas^.atlasImage
uploadImage textureUnit imageTexture image
return FontRenderer
{ _frTextAtlas = textAtlas
, _frAtlasTexture = imageTexture
, _frAtlasTextureUnit = Just textureUnit
, _frProgram = program
, _frVAO = vao
, _frTopoBuffer = topoBuffer
, _frColorBuffer = colorBuffer
, _frElementBuffer = elementBuffer
, _frAtlasUniform = atlasBuffer
}
-- | render a text using the font renderer
renderText :: FontRenderer -> Camera -> IO ()
renderText fr cam = do
GL.currentProgram $= Just (fr^.frProgram)
programSetViewProjection (fr^.frProgram) cam
charMapIndex <- GL.getUniformBlockIndex (fr^.frProgram) "CharMap"
logGL "renderText: getUniformBlockIndex"
GL.bindBufferBase' GL.UniformBuffer charMapIndex (fr^.frAtlasUniform)
logGL "renderText: uniform block bind buffer base'"
GL.uniformBlockBinding (fr^.frProgram) charMapIndex charMapIndex
logGL "renderText: uniform block binding"
sampler <- GL.get $ GL.uniformLocation (fr^.frProgram) "Texture0"
logGL "renderText: uniform location"
let Just textureUnit = fr^.frAtlasTextureUnit
GL.uniform sampler $= textureUnit
logGL "renderText: texture unit"
GL.bindVertexArrayObject $= Just (fr^.frVAO)
logGL "renderText: bindvao"
GL.bindBuffer GL.ElementArrayBuffer $= Just (fr^.frElementBuffer)
logGL "renderText: bindElementbuffer"
--GLRaw.glDrawElements GLRaw.gl_TRIANGLES (fromIntegral $ renderer^.trNumVertices) GLRaw.gl_UNSIGNED_INT nullPtr
--GLRaw.glDrawElements GLRaw.gl_TRIANGLES 12 GLRaw.gl_UNSIGNED_INT nullPtr
GLRaw.glDrawElementsInstanced GLRaw.gl_TRIANGLES 6 GLRaw.gl_UNSIGNED_INT (plusPtr nullPtr (2*4*6)) 4
-- GLRaw.glDrawArraysInstancedBaseInstance GLRaw.gl_TRIANGLES 1 6 1
--let from = 6
-- let count = 6
-- GLRaw.glDrawArraysInstanced GLRaw.gl_TRIANGLES from count 4
logGL "renderText: glDrawElements"
|
mfpi/halo
|
src/Render/Core/Manager.hs
|
mit
| 11,542
| 0
| 20
| 2,577
| 2,873
| 1,490
| 1,383
| -1
| -1
|
module Examples where
import Interaction
readWrite :: Interact () ()
readWrite = readin `sq` writeout id
copy :: Interact () ()
copy = readin `sq` writeout id `sq` copy
test1 :: IO ()
test1 = run readWrite ()
test2 :: IO ()
test2 = runL readWrite ()
readInt :: String -> Int
readInt = read
getInt :: Interact () Int
getInt = readin `sq` apply read
addNum :: Interact Int Int
addNum = readI ((+).readInt) `sq` showkeep
addNums :: Interact Int Int
addNums
= while (not.eof)
addNum
addNumsToZero :: Interact Int Int
addNumsToZero
= while (not.isZero)
addNum
where isZero (_,st) = st==0
collectNums :: Interact Int Int
collectNums
= addNum `pass_param` (\n -> start 0 `sq`
seqlist (replicate n addNum) `sq`
write "finished")
collector :: Interact () (Int,Int)
collector
= getInt `sq` -- state is counter
add_val_right 0 `sq` -- now is (counter,sum)
while ((>(0::Int)).fst.snd) -- still is (counter,sum)
(add_val_left () `sq` -- now ((),(counter,sum))
pass_on getInt `sq` -- now (Int,(counter,sum))
apply (\(p,(m,s))->(m-1,s+p)) `sq` -- is again (counter,sum)
wait `sq` -- see what happens when this is commented out :-)
showkeep)
{-
Getting the irrefutable patterns right - messy
-}
oneWord :: Interact () ()
oneWord = writeln "word"
manyWords :: Interact () ()
manyWords = oneWord `sq` manyWords
moreWords :: Interact () ()
moreWords x
= make_Output ["word"] (moreWords x)
evenmoreWords :: Interact () ()
evenmoreWords x
= (y,z,["word"]++out)
where
(y,z,out) = evenmoreWords x
-- Flavours of echo
necho ys
= "Prompt: " ++ [head ys] ++ "\n" ++ necho (tail ys)
echon (x:xs)
= "Prompt: " ++ [x] ++ "\n" ++ echon xs
|
simonjohnthompson/Interaction
|
Examples.hs
|
mit
| 2,056
| 0
| 15
| 712
| 662
| 366
| 296
| 55
| 1
|
module Main where
import Tictactoe.Att.Base
import Tictactoe.Def.Base
main :: IO ()
main = do
putStrLn "Game name: "
name <- getLine
putStrLn "Game mode: (D | A)?"
mode <- getLine
case mode of
"D" -> playDefender name
"A" -> playAttacker name
_ -> putStrLn "Game mode is unknown"
|
viktorasl/tictactoe-bot
|
src/Main.hs
|
mit
| 328
| 0
| 10
| 96
| 91
| 44
| 47
| 13
| 3
|
module Compose where
oddsFrom :: Integer -> [Integer]
oddsFrom = take 5 . filter odd . enumFrom
negateSum :: Num a => [a] -> a
negateSum = negate . sum
mySum :: Num a => [a] -> a
mySum = foldl (+) 0
ay :: [Char] -> Int
ay = length . filter (== 'a')
|
andrewMacmurray/haskell-book-solutions
|
src/ch7/compose.hs
|
mit
| 253
| 0
| 7
| 59
| 122
| 67
| 55
| 9
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module GitHub where
import Import
import Data.Aeson ((.:))
import Data.Aeson.Types (parseMaybe)
import Data.ByteString.Lazy (ByteString)
import Data.Maybe (fromJust)
import Data.Time.Clock (getCurrentTime)
import Data.Time.Format (formatTime)
import Network.HTTP.Types.Method (Method)
import System.Locale (defaultTimeLocale, iso8601DateFormat, timeFmt)
import qualified Data.Aeson as Aeson
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Text as Text
import qualified Data.Vector as Vector
import qualified Network.HTTP.Conduit as HTTP
getUserName :: Text -> IO Text
getUserName token = do
userContent <- HTTP.simpleHttp . Text.unpack $ "https://api.github.com/user?access_token=" `mappend` token
let name = maybe "" jsonString $ Aeson.decode userContent >>= parseMaybe userName
return name
where
userName (Object o) = o .: "name"
userName _ = fail "object.sha missing"
getBase :: Text -> Text -> Text -> IO (Text, Text)
getBase ref repoUrl token = do
refContent <- HTTP.simpleHttp . Text.unpack $ mconcat [repoUrl, "/git/refs/", ref, "?access_token=", token]
let commit = jsonString . fromJust $ Aeson.decode refContent >>= parseMaybe refCommit
commitContent <- HTTP.simpleHttp . Text.unpack $ mconcat [repoUrl, "/git/commits/", commit, "?access_token=", token]
let tree = jsonString . fromJust $ Aeson.decode commitContent >>= parseMaybe commitTree
return (commit, tree)
where
refCommit (Object o) = o .: "object" >>= (.: "sha")
refCommit _ = fail "object.sha missing"
commitTree (Object o) = o .: "tree" >>= (.: "sha")
commitTree _ = fail "tree.sha missing"
{-
createBlob :: Text -> Text -> Text -> IO Text
createBlob content repoUrl token = do
let o = HashMap.fromList [("content", Aeson.String content)]
body <- jsonReq "POST" o (mconcat [repoUrl, "/git/blobs"]) token
let blob = jsonString . fromJust $ Aeson.decode (HTTP.responseBody body) >>= parseMaybe bodyBlob
return blob
where
bodyBlob (Object o) = o .: "sha"
bodyBlob _ = fail "sha missing"
-}
createTree :: Text -> Text -> Text -> Text -> IO Text
createTree baseTree content repoUrl token = do
let o = HashMap.fromList
[ ("base_tree", Aeson.String baseTree)
, ("tree", Aeson.Array $ Vector.fromList
[ Aeson.Object $ HashMap.fromList
[ ("path", Aeson.String "LICENSE.txt")
, ("mode", Aeson.String "100644")
, ("type", Aeson.String "blob")
, ("content", Aeson.String content)
]
])
]
body <- jsonReq "POST" o (mconcat [repoUrl, "/git/trees"]) token
let tree = jsonString . fromJust $ Aeson.decode (HTTP.responseBody body) >>= parseMaybe bodyTree
return tree
where
bodyTree (Object o) = o .: "sha"
bodyTree _ = fail "sha missing"
createCommit :: Text -> Text -> Text -> Text -> Text -> IO Text
createCommit licenseName baseCommit tree repoUrl token = do
time <- getCurrentTime
let timeString = formatTime defaultTimeLocale iso8601DateTimeFormat time
o = HashMap.fromList
[ ("message", Aeson.String $ mconcat ["Add ", licenseName, " license"])
, ("author", Aeson.Object $ HashMap.fromList
[ ("name", Aeson.String "Licentious")
, ("email", Aeson.String "licentious@brianmckenna.org")
, ("date", Aeson.String $ Text.pack timeString)
])
, ("parents", Aeson.String baseCommit)
, ("tree", Aeson.String tree)
]
print timeString
body <- jsonReq "POST" o (mconcat [repoUrl, "/git/commits"]) token
let commit = jsonString . fromJust $ Aeson.decode (HTTP.responseBody body) >>= parseMaybe bodyCommit
return commit
where
bodyCommit (Object o) = o .: "sha"
bodyCommit _ = fail "sha missing"
updateRef :: Text -> Text -> Text -> Text -> IO ()
updateRef commit ref repoUrl token = do
let o = HashMap.fromList [("sha", Aeson.String commit)]
_ <- jsonReq "PATCH" o (mconcat [repoUrl, "/git/refs/", ref]) token
return ()
jsonReq :: Method -> Aeson.Object -> Text -> Text -> IO (HTTP.Response ByteString)
jsonReq method o url token = HTTP.withManager $ \man -> do
request <- HTTP.parseUrl . Text.unpack $ mconcat [url, "?access_token=", token]
let requestBody = mkJsonRequestBody o
HTTP.httpLbs (request { HTTP.method = method, HTTP.requestBody = requestBody }) man
mkJsonRequestBody :: Aeson.Object -> HTTP.RequestBody m
mkJsonRequestBody = HTTP.RequestBodyLBS . Aeson.encode . Aeson.Object
jsonString :: Aeson.Value -> Text
jsonString (Aeson.String t) = t
jsonString _ = ""
iso8601DateTimeFormat :: String
iso8601DateTimeFormat = iso8601DateFormat . Just . (++ "Z") $ timeFmt defaultTimeLocale
|
puffnfresh/licentious
|
GitHub.hs
|
mit
| 5,186
| 0
| 21
| 1,398
| 1,420
| 744
| 676
| 84
| 3
|
-- --------------------------------------------------------------------------
-- $Revision: 262 $ $Date: 2007-04-12 12:19:50 +0200 (Thu, 12 Apr 2007) $
-- --------------------------------------------------------------------------
-- |
--
-- Module : PureFP.OrdMap
-- Copyright : Peter Ljunglof 2002
-- License : GPL
--
-- Maintainer : otakar.smrz mff.cuni.cz
-- Stability : provisional
-- Portability : portable
--
-- Chapter 1 and Appendix A of /Pure Functional Parsing – an advanced
-- tutorial/ by Peter Ljunglöf
--
-- <http://www.ling.gu.se/~peb/pubs/p02-lic-thesis.pdf>
--------------------------------------------------
-- The class of ordered finite maps
-- as described in section 2.2.2
-- and an example implementation,
-- derived from the implementation in appendix A.2
module OrdMap (OrdMap(..), Map, makeMapWith, mapMapWithKey) where
import Data.List (intersperse)
--------------------------------------------------
-- the class of ordered finite maps
class OrdMap m where
emptyMap :: Ord s => m s a
(|->) :: Ord s => s -> a -> m s a
isEmptyMap :: Ord s => m s a -> Bool
(?) :: Ord s => m s a -> s -> Maybe a
lookupWith :: Ord s => a -> m s a -> s -> a
mergeWith :: Ord s => (a -> a -> a) -> m s a -> m s a -> m s a
unionMapWith :: Ord s => (a -> a -> a) -> [m s a] -> m s a
assocs :: Ord s => m s a -> [(s,a)]
ordMap :: Ord s => [(s,a)] -> m s a
mapMap :: Ord s => (a -> b) -> m s a -> m s b
lookupWith z m s = case m ? s of
Just a -> a
Nothing -> z
unionMapWith join = union
where union [] = emptyMap
union [xs] = xs
union xyss = mergeWith join (union xss) (union yss)
where (xss, yss) = split xyss
split (x:y:xyss) = let (xs, ys) = split xyss in (x:xs, y:ys)
split xs = (xs, [])
makeMapWith :: (Ord s, OrdMap m) => (a -> a -> a) -> [(s,a)] -> m s a
makeMapWith join [] = emptyMap
makeMapWith join [(s,a)] = s |-> a
makeMapWith join xyss = mergeWith join (makeMapWith join xss)
(makeMapWith join yss)
where (xss, yss) = split xyss
split (x:y:xys) = let (xs, ys) = split xys in (x:xs, y:ys)
split xs = (xs, [])
--------------------------------------------------
-- finite maps as ordered associaiton lists,
-- paired with binary search trees
data Map s a = Map [(s,a)] (TreeMap s a)
instance (Eq s, Eq a) => Eq (Map s a) where
Map xs _ == Map ys _ = xs == ys
instance (Show s, Show a) => Show (Map s a) where
show (Map ass _) = "{" ++ concat (intersperse "," (map show' ass)) ++ "}"
where show' (s,a) = show s ++ "|->" ++ show a
instance OrdMap Map where
emptyMap = Map [] (makeTree [])
s |-> a = Map [(s,a)] (makeTree [(s,a)])
isEmptyMap (Map ass _) = null ass
Map _ tree ? s = lookupTree s tree
mergeWith join (Map xss _) (Map yss _) = Map xyss (makeTree xyss)
where xyss = merge xss yss
merge [] yss = yss
merge xss [] = xss
merge xss@(x@(s,x'):xss') yss@(y@(t,y'):yss')
= case compare s t of
LT -> x : merge xss' yss
GT -> y : merge xss yss'
EQ -> (s, join x' y') : merge xss' yss'
assocs (Map xss _) = xss
ordMap xss = Map xss (makeTree xss)
mapMap f (Map ass atree) = Map [ (s,f a) | (s,a) <- ass ] (mapTree f atree)
mapMapWithKey f (Map ass atree) = Map [ (s,f s a) | (s,a) <- ass ]
(mapTreeWithKey f atree)
--------------------------------------------------
-- binary search trees
-- for logarithmic lookup time
data TreeMap s a = Nil | Node (TreeMap s a) s a (TreeMap s a)
makeTree ass = tree
where
(tree,[]) = sl2bst (length ass) ass
sl2bst 0 ass = (Nil, ass)
sl2bst 1 ((s,a):ass) = (Node Nil s a Nil, ass)
sl2bst n ass = (Node ltree s a rtree, css)
where llen = (n-1) `div` 2
rlen = n - 1 - llen
(ltree, (s,a):bss) = sl2bst llen ass
(rtree, css) = sl2bst rlen bss
lookupTree s Nil = Nothing
lookupTree s (Node left s' a right)
= case compare s s' of
LT -> lookupTree s left
GT -> lookupTree s right
EQ -> Just a
mapTree f Nil = Nil
mapTree f (Node l s a r) = Node (mapTree f l) s (f a) (mapTree f r)
mapTreeWithKey f Nil = Nil
mapTreeWithKey f (Node l s a r) = Node (mapTreeWithKey f l) s (f s a)
(mapTreeWithKey f r)
|
natetarrh/comp150
|
OrdMap.hs
|
mit
| 4,653
| 0
| 15
| 1,476
| 1,838
| 961
| 877
| 77
| 3
|
{-# LANGUAGE ImplicitParams #-}
module GameObjectRender
( renderMonadius
) where
import Data.Complex
import Util
import Data.Array ((!))
import GLWrapper
import GameObject
import GOConst
import RenderUtil
import qualified Render.VicViper as VicViper
when b act = if b then act else return ()
renderScore' :: Vector3 Double -> String -> IO ()
renderScore' pos str = preservingMatrix $ do
translate pos
renderWithShade (Color3 1 1 1) (Color3 0 0 1) $ do
scale 0.2 0.2 0.2
renderString MonoRoman str
renderScore variables = do
renderScore' (Vector3 (-300) 220 0) scoreStr
renderScore' (Vector3 ( 0) 220 0) scoreStr2
where
scoreStr = "1P " ++ ((padding '0' 8).show.totalScore) variables
scoreStr2 | Just score <- playTitle variables = score
| otherwise = "HI "++((padding '0' 8).show.hiScore) variables
renderWithTrans (x,y) act = preservingMatrix $ do
translate2D x y >> act
preservingMatrixWithColor (r,g,b) act = do
color $ Color3 r g b
act
-------------------------
--
-- drawing
--
-------------------------
renderMonadius :: Monadius -> IO ()
renderMonadius (Monadius (variables,objects))
= let ?gameclock = gameclock
in mapM_ renderGameObject objects
>> renderScore variables
where
gameclock = gameClock variables
-- returns an IO monad that can render the object.
-- renderGameObject :: GameObject -> IO ()
renderGameObject gauge@PowerUpGauge{position=x:+y} = renderWithTrans (x,y) $ do
color $ Color3 1.0 1.0 1.0
mapM_ (\(i,j) -> (if(i==activeGauge)then renderActive else renderNormal) j (isLimit i) i) $
zip [0..5] [0,90..] where
w=80
h=20
box = [(0,0),(w,0),(w,h),(0,h)]
cross = [(0,0),(w,h),(w,0),(0,h)]
renderFrame ugoF x l = preservingMatrix $ do
translate1D x
renderPrimitive LineLoop $ ugoF box
when l $ renderPrimitive Lines $ ugoF cross
renderNormal x l i = preservingMatrixWithColor (0.7,0.8,0.8) $ do
renderFrame (ugoVertices2D 0 1) x l
preservingMatrix $ do
ugoTranslate x 0 0 3
translate1D (w/2)
rotateZ (3 * sin(intToDouble gameclock/10))
translate1D (-w/2)
renderPowerUpName i
renderActive x l i = preservingMatrixWithColor (1,1,0) $ do
renderFrame (ugoVertices2DFreq 0 5 2) x l
preservingMatrix $ do
ugoTranslateFreq x 0 0 5 2
translate1D (w/2)
rotateZ (10 * sin(intToDouble gameclock/5))
scale 1.2 1.2 0
translate1D (-w/2)
renderPowerUpName i
activeGauge = powerUpPointer vicViper
isLimit i = powerUpLevels vicViper!i>=powerUpLimits!!i
renderPowerUpName i = do
translate2D 6 3.5
scale 0.15 0.13 0.15
renderString Roman $ ["SPEED","MISSILE","DOUBLE","LASER","OPTION"," ?"]!!i
renderGameObject VicViper{position = x:+y, hp = hp, ageAfterDeath = age} = VicViper.render (x,y) hp age
renderGameObject Option{position = x:+y} = renderWithTrans (x,y) $ do
renderWithShade (Color3 0.8 0 0) (Color3 0.4 0 0) $
renderPrimitive LineLoop $ ugoVertices2D 0 2
[(5,9),(9,7),(13,3),(13,(-3)),(9,(-7)),(5,(-9)),
((-5),(-9)),((-9),(-7)),((-13),(-3)),((-13),3),((-9),7),((-5),9)]
renderWithShade (Color3 1.0 0.45 0) (Color3 0.4 0.2 0) $
renderPrimitive LineStrip $ ugoVertices2D 0 1
[((-12.0),(3.4)),(0.8,8.7),((-8.1),(-0.9)),(4.0,5.8),(4.3,5.6),
((-4.4),(-6.8)),((-4.1),(-6.9)),(8.3,0.8),(9.0,0.6),(2.0,(-7.2))]
renderGameObject StandardMissile{position=x:+y,velocity=v} = renderWithTrans (x,y) $ do
let dir = (phase v) :: Double
rotateZ (dir / pi * 180)
color (Color3 1.0 0.9 0.5)
renderPrimitive LineLoop $ ugoVertices2D 0 1 [(0,0),(-7,2),(-7,-2)]
renderPrimitive LineStrip $ ugoVertexFreq (-11) 0 0 1 1 >> ugoVertexFreq (-17) 0 0 7 1
renderGameObject StandardRailgun{position=x:+y,velocity=v} =
renderWithTrans (x,y) $ do
let (_,phse)=polar v
rotateZ (phse / pi * 180)
color (Color3 1.0 0.9 0.5)
renderPrimitive Lines $ ugoVertices2D 0 1 [(0,0),((-5),0),((-9),0),((-11),0)]
renderGameObject laser@StandardLaser{position=x:+y,velocity=v} =
when (age laser >= 1) $ renderWithTrans (x,y) $ do
let (_,phs)=polar v
rotateZ (phs / pi * 180)
color (Color3 0.7 0.9 1.0)
renderPrimitive Lines $ ugoVertices2D 0 0 [(12,0),(-laserSpeed,0)]
renderGameObject Shield{position=x:+y, size = r,angle = theta} = renderWithTrans (x,y) $ do
rotateZ theta
renderWithShade (Color3 0.375 0.75 0.9375) (Color3 0.86 0.86 0.86) $ do
scale r r 0
renderTriangle
rotateZ 60
renderTriangle where
renderTriangle = renderPrimitive LineLoop $ ugoVertices2DFreq 0 0.1 1 $ map (\t -> (cos t,sin t)) [0,pi*2/3,pi*4/3]
renderGameObject powerUpCapsule@PowerUpCapsule{position=x:+y} = renderWithTrans (x,y) $ do
renderWithShade (Color3 0.9 0.9 0.9) (Color3 0.4 0.4 0.4) $ do
sotogawa >> rotateY (180) >> sotogawa
renderWithShade (Color3 1.0 0.0 0.0) (Color3 0.3 0.3 0.0) $ do
nakami
where
r = renderPrimitive LineStrip . ugoVertices2D 0 1
sotogawa = futa >> neji >> toge >> rotateX (180) >> toge
futa = r [((-10),6),((-6),10),(6,10),(10,6)]
neji = r [(12,4),(12,(-4))] >> r [(16,2),(16,(-2))]
toge = r [(10,8),(16,14)]
nakami = rotate 145 (Vector3 0.2 0.2 1) >> scale 9 6 1 >>
(renderPrimitive LineStrip $ ugoVertices2D 0 0.2 $ map (\n -> (cos$n*pi/8,sin$n*pi/8)) [1,15,3,13,5,11,7,9])
renderGameObject DiamondBomb{position = (x:+y),age=clock} = renderWithTrans (x,y) $ do
rotateZ (90*intToDouble(clock`mod`4))
color (Color3 1 1 1)
renderPrimitive LineLoop $ vertices2D 0 $ [a,b,c]
color (Color3 0.5 0.5 0.5)
renderPrimitive Lines $ vertices2D 0 $ [a,d,a,e]
renderPrimitive LineStrip $ vertices2D 0 $ [c,d,e,b]
where
[a,b,c,d,e] = [(0,0),(r,0),(0,r),(-r,0),(0,-r)]
r = diamondBombSize
-- c
-- /|\
-- d-a-b
-- \|/
-- e
renderGameObject TurnGear{position=x:+y,age=clock} = renderWithTrans (x,y) $ do
color $ Color3 1.0 0.7 1.0
rotateZ (5 * intToDouble clock) >> renderWing
rotateZ 120 >> renderWing
rotateZ 120 >> renderWing
where
renderWing = renderPrimitive LineLoop $ ugoVertices2D 0 2 $ map ((\(t:+u) -> (t,u)) . (\(r,t) -> mkPolar r (pi*t)) )
[(3,0), (3,2/3), (smallBacterianSize,1/3), (smallBacterianSize,0), (smallBacterianSize+3,-1/3)]
renderGameObject Flyer{position=x:+y,age=_,velocity = v,hasItem=item} = renderWithTrans (x,y) $ do
color (if item then Color3 1.0 0.2 0.2 else Color3 0.3 1.0 0.7)
rotateZ (phase v / pi * 180)
r [(-2,0),(-6,4),(-10,0),(-6,-4)]
r [(2,4),(16,4),(4,16),(-10,16)]
r [(2,-4),(16,-4),(4,-16),(-10,-16)]
where r = renderPrimitive LineLoop . ugoVertices2D 0 2
renderGameObject Ducker{position = (x:+y),hitDisp=hd,hasItem=item,velocity = v,gVelocity = g,age = a} = renderWithTrans (x,y) $ do
if signum (imagPart g) > 0 then scale 1 (-1) 1 else return ()
if signum (realPart v) < 0 then scale (-1) 1 1 else return ()
--after this, ducker is on the lower ground, looking right
color (if item then (Color3 1.0 0.2 0.2 :: Color3 Double) else (Color3 0.3 1.0 0.7 :: Color3 Double))
renderShape (0:+0) hd
renderPrimitive LineStrip $ vertices2D 0 [(0,0),(kx,ky),(fx,fy)]
where
fx:+fy=foot $ intToDouble a/2
kx:+ky=knee $ intToDouble a/2
foot theta = (16*cos(-theta)):+(-16+8*sin(-theta))
knee theta = foot theta * (0.5 :+ (- sqrt(square(legLen/magnitude(foot theta)) - 0.25)))
legLen = 16
renderGameObject Jumper{position = (x:+y),hitDisp=hd,hasItem=item,gravity = g,velocity=v} = renderWithTrans (x,y) $ do
color (if item then Color3 1.0 0.2 0.2 else Color3 0.3 1.0 0.7)
renderShape (0:+0) hd
if gsign >0 then rotateX 180 else return() -- after this you can assume that the object is not upside down
renderPrimitive LineStrip $ ugoVertices2D 0 2 $ [(15,-5),(25,-5+absvy*leg),(25,-25+absvy*leg)]
renderPrimitive LineStrip $ ugoVertices2D 0 2 $ [(-15,-5),(-25,-5+absvy*leg),(-25,-25+absvy*leg)]
where
gsign = signum $ imagPart g
absvy = imagPart v * gsign -- if falling (+) ascending (-)
leg = 1.5
renderGameObject Grashia{position = (x:+y),hitDisp=hd,hasItem=item,gunVector = nv,gravity = g,mode=m} = renderWithTrans (x,y) $ do
color (if item then Color3 1.0 0.2 0.2 else Color3 0.3 1.0 0.7)
renderShape (0:+0) hd
renderPrimitive LineLoop $ ugoVertices2D 0 2 $ map (\r -> (nvx*r,nvy*r)) [16,32]
when (m == 1) $ do
renderShape 0 $ Circular (16:+12*gsign) 4
renderShape 0 $ Circular ((-16):+12*gsign) 4
where
nvx:+nvy = nv
gsign = signum $ imagPart g
renderGameObject ScrambleHatch{hp=hp, position = (x:+y),hitDisp=_,gravity= g,gateAngle = angl} = renderWithTrans (x,y) $ do
color $ Color3 (1.2*(1-hpRate)) 0.5 (1.6*hpRate)
when (gsign >0) $ rotateX 180 -- after this you can assume that the object is not upside down
renderPrimitive LineLoop $ ugoVertices2DFreq 0 (angl*2) 1 [(-45,1),(-45,hatchHeight),(45,hatchHeight),(45,1)]
preservingMatrix $ do
translate (Vector3 45 hatchHeight 0)
rotateZ (-angl/pi*180)
renderPrimitive LineLoop $ ugoVertices2DFreq 0 (angl*1) 2 [(0,0),(-45,0),(-45,10)]
preservingMatrix $ do
translate (Vector3 (-45) hatchHeight 0)
rotateZ (angl/pi*180)
renderPrimitive LineLoop $ ugoVertices2DFreq 0 (angl*1) 2 [(0,0),(45,0),(45,10)]
where
gsign = signum $ imagPart g
hpRate = intToDouble hp / intToDouble hatchHP
renderGameObject LandScapeBlock{position=pos,hitDisp=hd} = preservingMatrixWithColor (0.6,0.2,0) $ do
renderShape pos hd
when (treasure!!(baseGameLevel variables)) $ do
render (Color3 0.7 0.23 0) (Vector3 0 0 60)
render (Color3 0.5 0.17 0) (Vector3 0 0 (-120))
where
render col trans = color col >> translate trans >> renderShape pos hd
renderGameObject me@Particle{position = x:+y,particleColor=pc}
= when (age me>=0) $ renderWithTrans (x,y) $ do
color $ modifyColor (\e -> e * decay + whiteout) pc
renderShape (0:+0) $ Circular (0:+0) (size me*extent)
where
p n = intCut $ (*n) $ intToDouble (age me) / decayTime me
extent = 0.5 + p 1
decay = exp $ p (-1)
whiteout = exp $ p (-2)
intCut = intToDouble.round
renderGameObject Star{position = x:+y,particleColor=c} = preservingMatrix $ do
color c
renderPrimitive LineStrip $ ugoVertices2D 0 2 [(0.1+x,0+y),(-0.1+x,0+y)]
renderGameObject DebugMessage{debugMessage=str} = putDebugStrLn str
renderGameObject _ = return ()
vicViper = head $ filter (\obj -> case obj of
VicViper{} -> True
_ -> False) objects
renderShape :: Complex Double -> Shape -> IO ()
renderShape (x:+y) s = case s of
Rectangular{bottomLeft = (l:+b), topRight = (r:+t)} ->
renderPrimitive LineLoop $ vertices2D 0 [(x+l,y+b),(x+l,y+t),(x+r,y+t),(x+r,y+b)]
Circular{center=cx:+cy, radius = r} -> preservingMatrix $ do
translate (Vector3 (cx+x) (cy+y) 0)
rotateZ (intToDouble gameclock*(45+pi))
scale r r 1
renderPrimitive LineLoop $ vertices2D 0 $ map (\t -> (cos(2/7*t*pi),sin(2/7*t*pi))) [0..6]
Shapes{children=cs} -> mapM_ (renderShape (x:+y)) cs
vertices2D :: Double -> [(Double,Double)] -> IO ()
vertices2D z xys = mapM_ (\(x,y) -> vertex $ Vertex3 x y z) xys
|
keqh/Monadius_rewrite
|
Monadius/GameObjectRender.hs
|
gpl-2.0
| 11,652
| 1
| 22
| 2,679
| 5,776
| 3,062
| 2,714
| 219
| 31
|
{-# LANGUAGE MagicHash #-}
{- |
Module : ./atermlib/src/ATerm/AbstractSyntax.hs
Description : the abstract syntax of shared ATerms and their lookup table
Copyright : (c) Klaus Luettich, C. Maeder, Uni Bremen 2002-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : non-portable(imports System.Mem.StableName and GHC.Prim)
the data types 'ShATerm' and 'ATermTable' plus some utilities
-}
module ATerm.AbstractSyntax
(ShATerm (..),
ATermTable,
emptyATermTable,
addATerm,
getATerm, toReadonlyATT,
getTopIndex,
getATerm', setATerm', getShATerm,
Key, getKey, setKey, mkKey,
getATermByIndex1, str2Char, integer2Int
) where
import qualified Data.Map as Map
import qualified Data.Map as IntMap
import Data.Dynamic
import Data.Array
import System.Mem.StableName
import GHC.Prim
import qualified Data.List as List
import Data.Maybe
data ShATerm =
ShAAppl String [Int] [Int]
| ShAList [Int] [Int]
| ShAInt Integer [Int]
deriving (Show, Eq, Ord)
data IntMap =
Updateable !(IntMap.Map Int ShATerm)
| Readonly !(Array Int ShATerm)
empty :: IntMap
empty = Updateable IntMap.empty
insert :: Int -> ShATerm -> IntMap -> IntMap
insert i s t = case t of
Updateable m -> Updateable $ IntMap.insert i s m
_ -> error "ATerm.insert"
find :: Int -> IntMap -> ShATerm
find i t = case t of
Updateable m -> IntMap.findWithDefault (ShAInt (-1) []) i m
Readonly a -> a ! i
data EqKey = EqKey (StableName ()) TypeRep deriving Eq
data Key = Key Int EqKey
mkKey :: Typeable a => a -> IO Key
mkKey t = do
s <- makeStableName t
return $ Key (hashStableName s) $ EqKey (unsafeCoerce # s) $ typeOf t
data ATermTable = ATT
(IntMap.Map Int [(EqKey, Int)])
!(Map.Map ShATerm Int) !IntMap Int
!(IntMap.Map Int [Dynamic])
toReadonlyATT :: ATermTable -> ATermTable
toReadonlyATT (ATT h s t i dM) = ATT h s
(case t of
Updateable m -> Readonly $ listArray (0, i) $ IntMap.elems m
_ -> t ) i dM
emptyATermTable :: ATermTable
emptyATermTable = ATT IntMap.empty Map.empty empty (-1) IntMap.empty
addATermNoFullSharing :: ShATerm -> ATermTable -> (ATermTable, Int)
addATermNoFullSharing t (ATT h a_iDFM i_aDFM i1 dM) = let j = i1 + 1 in
(ATT h (Map.insert t j a_iDFM) (insert j t i_aDFM) j dM, j)
addATerm :: ShATerm -> ATermTable -> (ATermTable, Int)
addATerm t at@(ATT _ a_iDFM _ _ _) =
case Map.lookup t a_iDFM of
Nothing -> addATermNoFullSharing t at
Just i -> (at, i)
setKey :: Key -> Int -> ATermTable -> IO (ATermTable, Int)
setKey (Key h e) i (ATT t s l m d) =
return (ATT (IntMap.insertWith (++) h [(e, i)] t) s l m d, i)
getKey :: Key -> ATermTable -> IO (Maybe Int)
getKey (Key h k) (ATT t _ _ _ _) =
return $ List.lookup k $ IntMap.findWithDefault [] h t
getATerm :: ATermTable -> ShATerm
getATerm (ATT _ _ i_aFM i _) = find i i_aFM
getShATerm :: Int -> ATermTable -> ShATerm
getShATerm i (ATT _ _ i_aFM _ _) = find i i_aFM
getTopIndex :: ATermTable -> Int
getTopIndex (ATT _ _ _ i _) = i
getATermByIndex1 :: Int -> ATermTable -> ATermTable
getATermByIndex1 i (ATT h a_iDFM i_aDFM _ dM) = ATT h a_iDFM i_aDFM i dM
getATerm' :: Typeable t => Int -> ATermTable -> Maybe t
getATerm' i (ATT _ _ _ _ dM) =
listToMaybe $ mapMaybe fromDynamic $ IntMap.findWithDefault [] i dM
setATerm' :: Typeable t => Int -> t -> ATermTable -> ATermTable
setATerm' i t (ATT h a_iDFM i_aDFM m dM) =
ATT h a_iDFM i_aDFM m $ IntMap.insertWith (++) i [toDyn t] dM
-- | conversion of a string in double quotes to a character
str2Char :: String -> Char
str2Char str = case str of
'\"' : sr@(_ : _) -> conv' (init sr) where
conv' r = case r of
[x] -> x
['\\', x] -> case x of
'n' -> '\n'
't' -> '\t'
'r' -> '\r'
'\"' -> '\"'
_ -> error "ATerm.AbstractSyntax: unexpected escape sequence"
_ -> error "ATerm.AbstractSyntax: String not convertible to Char"
_ -> error "ATerm.AbstractSyntax: String doesn't begin with '\"'"
-- | conversion of an unlimited integer to a machine int
integer2Int :: Integer -> Int
integer2Int x = if toInteger ((fromInteger :: Integer -> Int) x) == x
then fromInteger x else
error $ "ATerm.AbstractSyntax: Integer to big for Int: " ++ show x
|
gnn/Hets
|
atermlib/src/ATerm/AbstractSyntax.hs
|
gpl-2.0
| 4,364
| 0
| 16
| 1,000
| 1,520
| 792
| 728
| 109
| 8
|
module CTM where
import Debug.Trace (trace)
import Control.Monad
import Text.Printf
import GHC.Float
import Data.List
import Data.Map (fromList, findWithDefault, (!))
import Text.XML.Light
import Codec.Picture
import Codec.Picture.Types (createMutableImage, unsafeFreezeImage)
-- colors = ["White", "Silver", "Gray", "Red", "Maroon", "Yellow", "Olive",
-- "Lime", "Green", "Aqua", "Teal", "Blue", "Navy", "Fuchsia", "Purple"]
type Color = (Double, Double, Double)
colors = [
-- (100, 100, 100), -- White
-- (75, 75, 75), -- Silver
-- (50, 50, 50), -- Gray
-- (0, 0, 0), -- Black
(100, 0, 0), -- Red
(50, 0, 0), -- Maroon
(100, 100, 0), -- Yellow
(50, 50, 0), -- Olive
(0, 100, 0), -- Lime
(0, 50, 0), -- Green
(0, 100, 100), -- Aqua
(0, 50, 50), -- Teal
(0, 0, 100), -- Blue
(0, 0, 50), -- Navy
(100, 0, 100), -- Fuchsia
(50, 0, 50) -- Purple
]
data Surface = Surface Double Double Double Double
data Rectangle = Rectangle {
rectangleX,
rectangleY,
rectangleW,
rectangleH,
rectangleB :: Double
} deriving (Show, Eq)
data Dir = X | Y deriving (Show, Eq)
type TreeSize = Double
type Family = String
data Tree = Tree {
family :: Maybe Family,
treeSize :: TreeSize,
children :: [Tree]
} deriving Show
partitionRect :: Dir -> Rectangle -> TreeSize -> [TreeSize] -> [Rectangle]
partitionRect dir (Rectangle x y w h b) size childSizes = tail $ scanl (shiftRect dir) r0 widths
where (w0, r0) = case dir of
X -> (w, Rectangle 0 y x h b)
Y -> (h, Rectangle x 0 w y b)
widths = [w0 * s/size | s <- childSizes]
shiftRect X (Rectangle x y w h b) w' = Rectangle (x + w) y w' h (0.5*b)
shiftRect Y (Rectangle x y w h b) h' = Rectangle x (y + h) w h' (0.5*b)
f = 0.75
ctm :: Rectangle -> Tree -> [(Rectangle, Surface, Color)]
ctm rect tree = ctm' 0.5 X rect (Surface 0 0 0 0) tree
where ctm' h d r s (Tree family _ [] ) = [(r, s, familyToColor family)]
ctm' h d r s (Tree _ size children) =
concat [ctm' h' d' r' (addRidge h' d' r' s) t' | (r',t') <- zip childRects children]
where childSizes = map treeSize children
childRects = partitionRect d r size childSizes
d' = case d of X -> Y; Y -> X
-- d' = case r of
-- Rectangle _ _ w h _ | h > w -> Y
-- | otherwise -> X
h' = f * h
familyToColor family = case family of
Nothing -> (1, 1, 1)
family -> table ! family
table = fromList $ zip (families tree) (cycle colors)
families tree = nub $ (family tree) : concat [families child | child <- children tree]
addRidge :: Double -> Dir -> Rectangle -> Surface -> Surface
addRidge h X r s = Surface s1' s2' sy1 sy2
where Rectangle x _ w _ _ = r
Surface s1 s2 sy1 sy2 = s
(s1', s2' ) = addRidgeSub h x w s1 s2
addRidge h Y r s = Surface sx1 sx2 s1' s2'
where Rectangle _ x _ w _ = r
Surface sx1 sx2 s1 s2 = s
(s1', s2') = addRidgeSub h x w s1 s2
addRidgeSub h x w s1 s2 = ((s1 + 4*h*(x + x + w)/w), (s2 - 4*h/w))
renderCushion :: (Rectangle, Surface, Color) -> [(Int, Int, PixelRGB8)]
renderCushion (Rectangle x y w h _, Surface x1 x2 y1 y2, (r,g,b)) =
[(ix, iy, p (fromIntegral ix) (fromIntegral iy)) |
ix <- [(truncate (x + 0.5)) .. (truncate (x + w - 0.5))],
iy <- [(truncate (y + 0.5)) .. (truncate (y + h - 0.5))]]
where p ix iy = PixelRGB8 (round $ v*r/100) (round $ v*g/100) (round $ v*b/100)
where v = ia + max 0 (is * cosa)
cosa = (nx*lx + ny*ly + lz) / sqrt (nx*nx + ny*ny + 1.0)
nx = - (2*x2*(ix + 0.5) + x1)
ny = - (2*y2*(iy + 0.5) + y1)
ia = 40
is = 215
lx = 0.09759
ly = 0.19518
lz = 0.9759
-- imageCtm :: Int -> Int -> Tree -> Image Pixel8
-- imageCtm w h tree = generateImage fromPixels w h
-- where pixels = fromList [((x, y), (round p)) | (x,y,p) <- ps]
-- fromPixels x y = findWithDefault 0 (x, y) pixels
-- ps = concat $ map renderCushion $
-- ctm (Rectangle 0 0 (fromIntegral w) (fromIntegral h) 1) tree
writePngCtm :: Int -> Int -> Tree -> FilePath -> IO ()
-- writePngCtm w h tree path = writePng path $ imageCtm w h tree
writePngCtm w h tree path = mutableImageCtm w h tree >>= writePng path
mutableImageCtm:: Int -> Int -> Tree -> IO (Image PixelRGB8)
mutableImageCtm w h tree = do
let r0 = Rectangle 0 0 (fromIntegral w) (fromIntegral h) 1
let cushions = map renderCushion $ ctm r0 tree
img <- createMutableImage w h $ PixelRGB8 0 0 0
forM_ cushions $ \cushion ->
forM_ cushion $ \(x,y,p) ->
writePixel img x y p
unsafeFreezeImage img
-- svgRectangle :: String -> Rectangle -> Element
-- svgRectangle color (Rectangle x y w h b) =
-- unode "rect" [Attr (unqual "x") (printf "%.2f" x),
-- Attr (unqual "y") (printf "%.2f" y),
-- Attr (unqual "width") (printf "%.2f" w),
-- Attr (unqual "height") (printf "%.2f" h),
-- -- Attr (unqual "stroke-width") "1",
-- -- Attr (unqual "stroke") "black",
-- Attr (unqual "fill") color
-- -- Attr (unqual "fill-opacity") "0.2"
-- ]
-- svgCtm :: Double -> Double -> Tree -> Element
-- svgCtm width height tree = unode "svg" ([Attr (unqual "xmlns") "http://www.w3.org/2000/svg",
-- Attr (unqual "width") (printf "%.0fpx" width),
-- Attr (unqual "height") (printf "%.0fpx" height)],
-- (map (\(color, rect) -> svgRectangle color rect) $
-- zip (cycle colors) $
-- sortBy borderSort $
-- filter (\(Rectangle _ _ w h _) -> w >= 4 && h >= 4) $
-- ctm X (Rectangle 0 0 width height 1) tree))
-- where borderSort r0 r1 = compare (rectangleB r1) (rectangleB r0)
|
specify/TreeMap
|
CTM.hs
|
gpl-2.0
| 6,288
| 48
| 15
| 2,116
| 1,942
| 1,073
| 869
| 99
| 4
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleInstances #-}
module Utils where
import GHC.Generics
import Servant.API
import Data.List (intercalate)
import Data.List.Split (splitOn)
import qualified Data.Map as Map
import qualified Data.Text as T
import Test.QuickCheck
instance FromText [String] where
fromText = Just . splitOn "," . T.unpack
instance ToText [String] where
toText = T.pack . intercalate ","
lkp inputs l = case lookup l inputs of
Nothing -> Left $ "label " ++ T.unpack l ++ " not found"
Just v -> Right $ read (T.unpack v)
instance (Ord k, Arbitrary k, Arbitrary v) => Arbitrary (Map.Map k v) where
arbitrary = Map.fromList <$> arbitrary
|
massimo-zaniboni/netrobots
|
robot_examples/haskell-servant/rest_api/lib/Utils.hs
|
gpl-3.0
| 784
| 0
| 12
| 149
| 231
| 127
| 104
| 22
| 2
|
{- Copyright 2012 Dustin DeWeese
This file is part of peg.
peg is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
peg is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with peg. If not, see <http://www.gnu.org/licenses/>.
-}
{-# LANGUAGE FlexibleInstances, TupleSections, GeneralizedNewtypeDeriving, RankNTypes, ImpredicativeTypes #-}
module Search where
import Control.Monad
import Control.Monad.Trans
import System.IO
import Control.Monad.Identity
import Control.Monad.Cont
import System.IO.Unsafe
{-
data Tree a = Node (Tree a) (Tree a) | Leaf a | Empty deriving (Show)
instance Functor Tree where
fmap f (Leaf x) = Leaf (f x)
fmap f (Node x y) = Node (fmap f x) (fmap f y)
instance Monad Tree where
return = Leaf
Node x y >>= f = Node (x >>= f) (y >>= f)
Leaf x >>= f = f x
Empty >>= _ = Empty
instance MonadPlus Tree where
mzero = Empty
mplus = Node
-}
type Tree = TreeT Identity
newtype TreeT m a = TreeT { runTreeT :: m (TreeT' m a) }
data TreeT' m a = NodeT (TreeT m a) (TreeT m a) | LeafT a | EmptyT
instance (Functor m) => Functor (TreeT m) where
fmap f mt = TreeT . fmap f' $ runTreeT mt
where f' EmptyT = EmptyT
f' (LeafT x) = LeafT (f x)
f' (NodeT mx my) = NodeT (fmap f mx) (fmap f my)
instance (Monad m) => Monad (TreeT m) where
return = TreeT . return . LeafT
mt >>= f = TreeT $ do
t <- runTreeT mt
case t of
EmptyT -> return EmptyT
LeafT x -> runTreeT (f x)
NodeT mx my -> return $ NodeT (mx >>= f) (my >>= f)
instance (Monad m) => MonadPlus (TreeT m) where
mzero = TreeT . return $ EmptyT
mx `mplus` my = TreeT . return $ NodeT mx my
instance MonadIO (TreeT IO) where
liftIO = lift
instance MonadTrans TreeT where
lift = TreeT . liftM LeafT
data Queue a = Queue [a] ([a] -> [a])
emptyQueue = Queue [] id
pushQueue x (Queue l f) = Queue l (f . (x:))
popQueue :: Queue a -> (a, Queue a)
popQueue = popQ' . forceQueue
where popQ' (Queue (x:l) f) = (x, Queue l f)
popQ' (Queue [] f) = error "popQ: empty queue"
forceQueue (Queue [] f) = Queue (f []) id
forceQueue q = q
listQueue (Queue l f) = l ++ f []
nullQueue :: Queue a -> Bool
nullQueue = nullQ' . forceQueue
where nullQ' (Queue l _) = null l
data QueueStateT q r m a = QueueStateT {
runQueueStateT :: Queue q ->
((a, Queue q) -> m (Maybe r, Queue q)) ->
m (Maybe r, Queue q)
}
instance Functor (QueueStateT q r m) where
fmap f (QueueStateT qf) = QueueStateT $ \q k -> qf q (k . (\(x, q) -> (f x, q)))
instance Monad (QueueStateT q r m) where
return x = QueueStateT $ \q k -> k (x, q)
QueueStateT qf >>= f = QueueStateT $ \q k ->
qf q $ \(x, q') ->
runQueueStateT (f x) q' k
instance MonadIO (QueueStateT q r IO) where
liftIO = lift
instance MonadTrans (QueueStateT q r) where
lift m = QueueStateT $ \q k -> k . (, q) =<< m
pushQ :: q -> QueueStateT q r m ()
pushQ x = QueueStateT $ \q k -> k ((), pushQueue x q)
popQ :: (Monad m) => QueueStateT q r m q
popQ = QueueStateT $ \q k -> if nullQueue q
then return (Nothing, q)
else k $ popQueue q
runQ qm = runQueueStateT (fmap Just qm) emptyQueue return
runWithQ q qm = runQueueStateT (fmap Just qm) q return
runBFS c = runQ $ pushQ c >> bfs
bfs :: (Monad m) => QueueStateT (TreeT m a) r m a
bfs = do mc <- popQ
c <- lift $ runTreeT mc
case c of
LeafT x -> return x
EmptyT -> bfs
NodeT mx my -> pushQ mx >> pushQ my >> bfs
runBFSn n c = runBFSn' n $ pushQueue c emptyQueue
runBFSn' n q | n <= 0 = return []
| otherwise = do (mx, q') <- runWithQ q bfs
case mx of
Nothing -> return []
Just x -> fmap (x :) $ runBFSn' (n-1) q'
runBFSAll c = runBFSAll' $ pushQueue c emptyQueue
runBFSAll' q = do (mx, q') <- runWithQ q bfs
case mx of
Nothing -> return []
Just x -> fmap (x :) $ runBFSAll' q'
runBFSAllI c = runBFSAllI' $ pushQueue c emptyQueue
runBFSAllI' q = do (mx, q') <- runWithQ q bfs
case mx of
Nothing -> return []
Just x -> fmap (x :) . unsafeInterleaveIO $ runBFSAllI' q'
choose :: (MonadPlus m) => [a] -> m a
choose = foldr (mplus . return) mzero
test :: Int -> Maybe (Int, Int)
test c = runIdentity $ do
(x, q) <- runBFS $ do
x <- choose [1..]
y <- choose [x..]
guard $ x * y == c
return (x, y)
return x
testTreeT :: IO [(Int, Int)]
testTreeT = do
x <- runBFSAllI $ do
liftIO $ putStr "enter a number: "
c <- liftIO readLn
x <- choose [1..]
y <- choose [x..]
guard $ x * y == c
liftIO . putStrLn $ "a solution is " ++ show (x, y)
return (x, y)
return (take 4 x)
|
HackerFoo/peg
|
Search.hs
|
gpl-3.0
| 5,457
| 0
| 15
| 1,723
| 1,937
| 980
| 957
| 112
| 3
|
module Input where
import Life
import Data.Char
import qualified Data.Array.Repa as R
import Data.Array.Repa (Z(..),(:.)(..),computeP,extent)
-- Life 1.06 text format input
-- Read cell coordinates
parseLine :: String -> Maybe (Int,Int)
parseLine cs | any isNotNum cs = Nothing
| length (words cs) /= 2 = Nothing
| otherwise = Just $ (\[a,b] -> (read a, read b)) (words cs)
where isNotNum c = not (isDigit c) && notElem c " -\r"
-- Read a pattern file and return a grid with the pattern
parseFile :: FilePath -> Grid -> IO Grid
parseFile fp grid = do
file <- readFile fp
let (Z :. w :. h) = extent grid
coords = map parseLine $ lines file
computeP $ R.traverse grid id (\_ sh@(Z :. a :. b) ->
if (Just (a - div w 2, b - div h 2)) `elem` coords
then 1
else 0)
|
iurdan/haskell-life
|
src/Input.hs
|
gpl-3.0
| 822
| 0
| 17
| 210
| 348
| 185
| 163
| 19
| 2
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.TPU.Projects.Locations.Nodes.Reimage
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Reimages a node\'s OS.
--
-- /See:/ <https://cloud.google.com/tpu/ Cloud TPU API Reference> for @tpu.projects.locations.nodes.reimage@.
module Network.Google.Resource.TPU.Projects.Locations.Nodes.Reimage
(
-- * REST Resource
ProjectsLocationsNodesReimageResource
-- * Creating a Request
, projectsLocationsNodesReimage
, ProjectsLocationsNodesReimage
-- * Request Lenses
, plnrXgafv
, plnrUploadProtocol
, plnrAccessToken
, plnrUploadType
, plnrPayload
, plnrName
, plnrCallback
) where
import Network.Google.Prelude
import Network.Google.TPU.Types
-- | A resource alias for @tpu.projects.locations.nodes.reimage@ method which the
-- 'ProjectsLocationsNodesReimage' request conforms to.
type ProjectsLocationsNodesReimageResource =
"v1" :>
CaptureMode "name" "reimage" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] ReimageNodeRequest :>
Post '[JSON] Operation
-- | Reimages a node\'s OS.
--
-- /See:/ 'projectsLocationsNodesReimage' smart constructor.
data ProjectsLocationsNodesReimage =
ProjectsLocationsNodesReimage'
{ _plnrXgafv :: !(Maybe Xgafv)
, _plnrUploadProtocol :: !(Maybe Text)
, _plnrAccessToken :: !(Maybe Text)
, _plnrUploadType :: !(Maybe Text)
, _plnrPayload :: !ReimageNodeRequest
, _plnrName :: !Text
, _plnrCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsNodesReimage' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plnrXgafv'
--
-- * 'plnrUploadProtocol'
--
-- * 'plnrAccessToken'
--
-- * 'plnrUploadType'
--
-- * 'plnrPayload'
--
-- * 'plnrName'
--
-- * 'plnrCallback'
projectsLocationsNodesReimage
:: ReimageNodeRequest -- ^ 'plnrPayload'
-> Text -- ^ 'plnrName'
-> ProjectsLocationsNodesReimage
projectsLocationsNodesReimage pPlnrPayload_ pPlnrName_ =
ProjectsLocationsNodesReimage'
{ _plnrXgafv = Nothing
, _plnrUploadProtocol = Nothing
, _plnrAccessToken = Nothing
, _plnrUploadType = Nothing
, _plnrPayload = pPlnrPayload_
, _plnrName = pPlnrName_
, _plnrCallback = Nothing
}
-- | V1 error format.
plnrXgafv :: Lens' ProjectsLocationsNodesReimage (Maybe Xgafv)
plnrXgafv
= lens _plnrXgafv (\ s a -> s{_plnrXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plnrUploadProtocol :: Lens' ProjectsLocationsNodesReimage (Maybe Text)
plnrUploadProtocol
= lens _plnrUploadProtocol
(\ s a -> s{_plnrUploadProtocol = a})
-- | OAuth access token.
plnrAccessToken :: Lens' ProjectsLocationsNodesReimage (Maybe Text)
plnrAccessToken
= lens _plnrAccessToken
(\ s a -> s{_plnrAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plnrUploadType :: Lens' ProjectsLocationsNodesReimage (Maybe Text)
plnrUploadType
= lens _plnrUploadType
(\ s a -> s{_plnrUploadType = a})
-- | Multipart request metadata.
plnrPayload :: Lens' ProjectsLocationsNodesReimage ReimageNodeRequest
plnrPayload
= lens _plnrPayload (\ s a -> s{_plnrPayload = a})
-- | The resource name.
plnrName :: Lens' ProjectsLocationsNodesReimage Text
plnrName = lens _plnrName (\ s a -> s{_plnrName = a})
-- | JSONP
plnrCallback :: Lens' ProjectsLocationsNodesReimage (Maybe Text)
plnrCallback
= lens _plnrCallback (\ s a -> s{_plnrCallback = a})
instance GoogleRequest ProjectsLocationsNodesReimage
where
type Rs ProjectsLocationsNodesReimage = Operation
type Scopes ProjectsLocationsNodesReimage =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient ProjectsLocationsNodesReimage'{..}
= go _plnrName _plnrXgafv _plnrUploadProtocol
_plnrAccessToken
_plnrUploadType
_plnrCallback
(Just AltJSON)
_plnrPayload
tPUService
where go
= buildClient
(Proxy ::
Proxy ProjectsLocationsNodesReimageResource)
mempty
|
brendanhay/gogol
|
gogol-tpu/gen/Network/Google/Resource/TPU/Projects/Locations/Nodes/Reimage.hs
|
mpl-2.0
| 5,225
| 0
| 16
| 1,177
| 778
| 454
| 324
| 114
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Ml.Projects.Locations.Studies.Trials.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes a trial.
--
-- /See:/ <https://cloud.google.com/ml/ AI Platform Training & Prediction API Reference> for @ml.projects.locations.studies.trials.delete@.
module Network.Google.Resource.Ml.Projects.Locations.Studies.Trials.Delete
(
-- * REST Resource
ProjectsLocationsStudiesTrialsDeleteResource
-- * Creating a Request
, projectsLocationsStudiesTrialsDelete
, ProjectsLocationsStudiesTrialsDelete
-- * Request Lenses
, plstdXgafv
, plstdUploadProtocol
, plstdAccessToken
, plstdUploadType
, plstdName
, plstdCallback
) where
import Network.Google.MachineLearning.Types
import Network.Google.Prelude
-- | A resource alias for @ml.projects.locations.studies.trials.delete@ method which the
-- 'ProjectsLocationsStudiesTrialsDelete' request conforms to.
type ProjectsLocationsStudiesTrialsDeleteResource =
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Delete '[JSON] GoogleProtobuf__Empty
-- | Deletes a trial.
--
-- /See:/ 'projectsLocationsStudiesTrialsDelete' smart constructor.
data ProjectsLocationsStudiesTrialsDelete =
ProjectsLocationsStudiesTrialsDelete'
{ _plstdXgafv :: !(Maybe Xgafv)
, _plstdUploadProtocol :: !(Maybe Text)
, _plstdAccessToken :: !(Maybe Text)
, _plstdUploadType :: !(Maybe Text)
, _plstdName :: !Text
, _plstdCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsStudiesTrialsDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plstdXgafv'
--
-- * 'plstdUploadProtocol'
--
-- * 'plstdAccessToken'
--
-- * 'plstdUploadType'
--
-- * 'plstdName'
--
-- * 'plstdCallback'
projectsLocationsStudiesTrialsDelete
:: Text -- ^ 'plstdName'
-> ProjectsLocationsStudiesTrialsDelete
projectsLocationsStudiesTrialsDelete pPlstdName_ =
ProjectsLocationsStudiesTrialsDelete'
{ _plstdXgafv = Nothing
, _plstdUploadProtocol = Nothing
, _plstdAccessToken = Nothing
, _plstdUploadType = Nothing
, _plstdName = pPlstdName_
, _plstdCallback = Nothing
}
-- | V1 error format.
plstdXgafv :: Lens' ProjectsLocationsStudiesTrialsDelete (Maybe Xgafv)
plstdXgafv
= lens _plstdXgafv (\ s a -> s{_plstdXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plstdUploadProtocol :: Lens' ProjectsLocationsStudiesTrialsDelete (Maybe Text)
plstdUploadProtocol
= lens _plstdUploadProtocol
(\ s a -> s{_plstdUploadProtocol = a})
-- | OAuth access token.
plstdAccessToken :: Lens' ProjectsLocationsStudiesTrialsDelete (Maybe Text)
plstdAccessToken
= lens _plstdAccessToken
(\ s a -> s{_plstdAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plstdUploadType :: Lens' ProjectsLocationsStudiesTrialsDelete (Maybe Text)
plstdUploadType
= lens _plstdUploadType
(\ s a -> s{_plstdUploadType = a})
-- | Required. The trial name.
plstdName :: Lens' ProjectsLocationsStudiesTrialsDelete Text
plstdName
= lens _plstdName (\ s a -> s{_plstdName = a})
-- | JSONP
plstdCallback :: Lens' ProjectsLocationsStudiesTrialsDelete (Maybe Text)
plstdCallback
= lens _plstdCallback
(\ s a -> s{_plstdCallback = a})
instance GoogleRequest
ProjectsLocationsStudiesTrialsDelete
where
type Rs ProjectsLocationsStudiesTrialsDelete =
GoogleProtobuf__Empty
type Scopes ProjectsLocationsStudiesTrialsDelete =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsLocationsStudiesTrialsDelete'{..}
= go _plstdName _plstdXgafv _plstdUploadProtocol
_plstdAccessToken
_plstdUploadType
_plstdCallback
(Just AltJSON)
machineLearningService
where go
= buildClient
(Proxy ::
Proxy ProjectsLocationsStudiesTrialsDeleteResource)
mempty
|
brendanhay/gogol
|
gogol-ml/gen/Network/Google/Resource/Ml/Projects/Locations/Studies/Trials/Delete.hs
|
mpl-2.0
| 5,141
| 0
| 15
| 1,118
| 697
| 408
| 289
| 108
| 1
|
import Prelude hiding (mapM)
import Data.Traversable
import Data.List
import Data.Maybe
import Control.Concurrent.STM.Promise
import Control.Concurrent.STM.Promise.Process
import Control.Concurrent.STM.Promise.Tree
import Control.Concurrent.STM.Promise.Workers
import System.Environment
{- | A tree for this file structure:
├── mul-commutative
│ ├── induction_x_0.tptp
│ ├── induction_x_1.tptp
│ ├── induction_x_y_0.tptp
│ ├── induction_x_y_1.tptp
│ ├── induction_x_y_2.tptp
│ ├── induction_x_y_3.tptp
│ ├── induction_y_0.tptp
│ ├── induction_y_1.tptp
│ └── no_induction_0.tptp
└── plus-commutative
├── induction_x_0.tptp
├── induction_x_1.tptp
├── induction_x_y_0.tptp
├── induction_x_y_1.tptp
├── induction_x_y_2.tptp
├── induction_x_y_3.tptp
├── induction_y_0.tptp
├── induction_y_1.tptp
└── no_induction_0.tptp
-}
file_tree :: Tree FilePath
file_tree = fmap (++ ".tptp") $ tryAll
[ fmap ("mul-commutative/" ++) $ requireAny
[ fmap ("induction_x_" ++) $ requireAll $ map Leaf ["0","1"]
, fmap ("induction_y_" ++) $ requireAll $ map Leaf ["0","1"]
, fmap ("induction_x_y_" ++) $ requireAll $ map Leaf ["0","1","2","3"]
, Leaf "no_induction_0"
]
, fmap ("plus-commutative/" ++) $ requireAny
[ fmap ("induction_x_" ++) $ requireAll $ map Leaf ["0","1"]
, fmap ("induction_y_" ++) $ requireAll $ map Leaf ["0","1"]
, fmap ("induction_x_y_" ++) $ requireAll $ map Leaf ["0","1","2","3"]
, Leaf "no_induction_0"
]
]
success :: ProcessResult -> Bool
success r = excode r == ExitSuccess && any (`isInfixOf` stdout r) ok
where
ok = ["Theorem","Unsatisfiable"]
eproverPromise :: FilePath -> IO (Promise [(FilePath,Bool)])
eproverPromise file = do
let args = ["-xAuto","-tAuto",'-':"-tptp3-format","-s"]
cmd = "eprover"
{-
let args = ["-tptp","-nw"]
cmd = "z3"
-}
promise <- processPromise cmd (file : args) ""
let chres :: ProcessResult -> [(FilePath,Bool)]
chres r = [ (file,success r) ]
return $ fmap chres promise
main :: IO ()
main = do
tm:_ <- map read `fmap` getArgs
promise_tree <- mapM eproverPromise file_tree
let timeout = tm * 1000 -- microseconds
processes = 2
workers (Just timeout) processes (interleave promise_tree)
(_,res) <- evalTree (any (not . snd)) promise_tree
putStrLn "Results: "
mapM_ print res
|
danr/stm-promise
|
tptp/Test.hs
|
lgpl-3.0
| 2,683
| 0
| 12
| 590
| 651
| 353
| 298
| 42
| 1
|
module Data.Logic where
class Logic a where
and :: a -> a -> a
or :: a -> a -> a
not :: a -> a
(∧) :: Logic a => a -> a -> a
(∧) = Data.Logic.and
(∨) :: Logic a => a -> a -> a
(∨) = Data.Logic.or
infixr 3 ∧
infixr 2 ∨
|
bflyblue/tag
|
Data/Logic.hs
|
unlicense
| 253
| 0
| 8
| 81
| 124
| 71
| 53
| 11
| 1
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
module OrcBot.Types
(
-- * Args
Args(..)
-- * Settings
, Settings(..)
, orcOrchestrateSession
, orcIrcUri
-- * OrcT
, OrcT
, OrcIO
, withOrc
) where
import Control.Applicative
import qualified Control.Exception as Ex
import Control.Lens
import Control.Monad.Error.Class
import Control.Monad.IO.Class
import Control.Monad.Reader.Class
import Control.Monad.Trans.Class
import Control.Monad.Trans.Reader hiding (ask, local)
import Database.Orchestrate.Types hiding (ask)
import Database.Orchestrate.Utils (runO')
import Network.URI
data Args = Args
{
}
deriving (Show, Eq)
data Settings = Settings
{ _orcOrchestrateSession :: Session
, _orcIrcUri :: URI
} deriving (Show)
$(makeLenses ''Settings)
newtype OrcT m a = OrcT { runOrc :: ReaderT URI (OrchestrateT m) a }
deriving ( Functor, Applicative, Monad, MonadIO
, MonadError Ex.SomeException)
instance MonadTrans OrcT where
lift = OrcT . lift . lift
instance Monad m => MonadReader URI (OrcT m) where
ask = OrcT $ ask
local f = OrcT . local f . runOrc
type OrcIO = OrcT IO
withOrc :: Settings -> OrcIO a -> IO (Either Ex.SomeException a)
withOrc Settings{..} m =
runO' (runReaderT (runOrc m) _orcIrcUri) _orcOrchestrateSession
|
erochest/orcbot
|
src/OrcBot/Types.hs
|
apache-2.0
| 1,676
| 1
| 10
| 535
| 395
| 230
| 165
| 43
| 1
|
-- http://www.codewars.com/kata/52de9bd621c71b919c000592
module HyperSphere where
inSphere :: (Ord a, Num a) => [a] -> a -> Bool
inSphere xs r = sum (map (^2) xs) <= r^2
|
Bodigrim/katas
|
src/haskell/6-Hyper-Sphere.hs
|
bsd-2-clause
| 170
| 0
| 10
| 27
| 69
| 38
| 31
| 3
| 1
|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
#include "version-compatibility-macros.h"
-- | Definitions to write renderers based on looking at a 'SimpleDocStream' as
-- an instruction tape for a stack machine: text is written, annotations are
-- added (pushed) and later removed (popped).
module Prettyprinter.Render.Util.StackMachine (
-- * Simple, pre-defined stack machines
--
-- | These cover most basic use cases where there is not too much special
-- logic, and all that’s important is how to render text, and how to
-- add/remove an annotation.
renderSimplyDecorated,
renderSimplyDecoratedA,
-- * General stack machine
--
-- | These definitions allow defining a full-blown stack machine renderer,
-- allowing for arbitrary peeking, popping and what not.
StackMachine,
execStackMachine,
pushStyle,
unsafePopStyle,
unsafePeekStyle,
writeOutput,
) where
import Control.Applicative
import Data.Text (Text)
import qualified Data.Text as T
import Prettyprinter.Internal
import Prettyprinter.Render.Util.Panic
#if !(SEMIGROUP_MONOID_SUPERCLASS)
import Data.Monoid
#endif
-- $setup
--
-- (Definitions for the doctests)
--
-- >>> import Prettyprinter hiding ((<>))
-- >>> import qualified Data.Text.IO as T
-- | Simplest possible stack-based renderer.
--
-- For example, here is a document annotated with @()@, and the behaviour is to
-- write »>>>« at the beginning, and »<<<« at the end of the annotated region:
--
-- >>> let doc = "hello" <+> annotate () "world" <> "!"
-- >>> let sdoc = layoutPretty defaultLayoutOptions doc
-- >>> T.putStrLn (renderSimplyDecorated id (\() -> ">>>") (\() -> "<<<") sdoc)
-- hello >>>world<<<!
--
-- The monoid will be concatenated in a /right associative/ fashion.
renderSimplyDecorated
:: Monoid out
=> (Text -> out) -- ^ Render plain 'Text'
-> (ann -> out) -- ^ How to render an annotation
-> (ann -> out) -- ^ How to render the removed annotation
-> SimpleDocStream ann
-> out
renderSimplyDecorated text push pop = go []
where
go _ SFail = panicUncaughtFail
go [] SEmpty = mempty
go (_:_) SEmpty = panicInputNotFullyConsumed
go stack (SChar c rest) = text (T.singleton c) <> go stack rest
go stack (SText _l t rest) = text t <> go stack rest
go stack (SLine i rest) = text (T.singleton '\n') <> text (textSpaces i) <> go stack rest
go stack (SAnnPush ann rest) = push ann <> go (ann : stack) rest
go (ann:stack) (SAnnPop rest) = pop ann <> go stack rest
go [] SAnnPop{} = panicUnpairedPop
{-# INLINE renderSimplyDecorated #-}
-- | Version of 'renderSimplyDecoratedA' that allows for 'Applicative' effects.
renderSimplyDecoratedA
:: (Applicative f, Monoid out)
=> (Text -> f out) -- ^ Render plain 'Text'
-> (ann -> f out) -- ^ How to render an annotation
-> (ann -> f out) -- ^ How to render the removed annotation
-> SimpleDocStream ann
-> f out
renderSimplyDecoratedA text push pop = go []
where
go _ SFail = panicUncaughtFail
go [] SEmpty = pure mempty
go (_:_) SEmpty = panicInputNotFullyConsumed
go stack (SChar c rest) = text (T.singleton c) <++> go stack rest
go stack (SText _l t rest) = text t <++> go stack rest
go stack (SLine i rest) = text (T.singleton '\n') <++> text (textSpaces i) <++> go stack rest
go stack (SAnnPush ann rest) = push ann <++> go (ann : stack) rest
go (ann:stack) (SAnnPop rest) = pop ann <++> go stack rest
go [] SAnnPop{} = panicUnpairedPop
(<++>) = liftA2 mappend
{-# INLINE renderSimplyDecoratedA #-}
-- | @WriterT output StateT [style] a@, but with a strict Writer value.
--
-- The @output@ type is used to append data chunks to, the @style@ is the member
-- of a stack of styles to model nested styles with.
newtype StackMachine output style a = StackMachine ([style] -> (a, output, [style]))
{-# DEPRECATED StackMachine "Writing your own stack machine is probably more efficient and customizable; also consider using »renderSimplyDecorated(A)« instead" #-}
instance Functor (StackMachine output style) where
fmap f (StackMachine r) = StackMachine (\s ->
let (x1, w1, s1) = r s
in (f x1, w1, s1))
instance Monoid output => Applicative (StackMachine output style) where
pure x = StackMachine (\s -> (x, mempty, s))
StackMachine f <*> StackMachine x = StackMachine (\s ->
let (f1, w1, s1) = f s
(x2, w2, s2) = x s1
!w12 = w1 <> w2
in (f1 x2, w12, s2))
instance Monoid output => Monad (StackMachine output style) where
#if !(APPLICATIVE_MONAD)
return = pure
#endif
StackMachine r >>= f = StackMachine (\s ->
let (x1, w1, s1) = r s
StackMachine r1 = f x1
(x2, w2, s2) = r1 s1
!w12 = w1 <> w2
in (x2, w12, s2))
-- | Add a new style to the style stack.
pushStyle :: Monoid output => style -> StackMachine output style ()
pushStyle style = StackMachine (\styles -> ((), mempty, style : styles))
-- | Get the topmost style.
--
-- If the stack is empty, this raises an 'error'.
unsafePopStyle :: Monoid output => StackMachine output style style
unsafePopStyle = StackMachine (\stack -> case stack of
x:xs -> (x, mempty, xs)
[] -> panicPoppedEmpty )
-- | View the topmost style, but do not modify the stack.
--
-- If the stack is empty, this raises an 'error'.
unsafePeekStyle :: Monoid output => StackMachine output style style
unsafePeekStyle = StackMachine (\styles -> case styles of
x:_ -> (x, mempty, styles)
[] -> panicPeekedEmpty )
-- | Append a value to the output end.
writeOutput :: output -> StackMachine output style ()
writeOutput w = StackMachine (\styles -> ((), w, styles))
-- | Run the renderer and retrive the writing end
execStackMachine :: [styles] -> StackMachine output styles a -> (output, [styles])
execStackMachine styles (StackMachine r) = let (_, w, s) = r styles in (w, s)
|
quchen/prettyprinter
|
prettyprinter/src/Prettyprinter/Render/Util/StackMachine.hs
|
bsd-2-clause
| 6,265
| 0
| 13
| 1,617
| 1,505
| 807
| 698
| 89
| 9
|
{-# LANGUAGE QuasiQuotes, TemplateHaskell, ViewPatterns, DoAndIfThenElse #-}
-----------------------------------------------------------------------------
-- |
-- Module : Language.Haskell.TH.Desugar
-- Copyright : (c) 2012 Michael Sloan
-- License : BSD-style (see the LICENSE file)
-- Maintainer : Michael Sloan <mgsloan@gmail.com>
-- Stability : experimental
-- Portability : GHC only
--
-- This module provides Template Haskell utilities for desugaring expressions,
-- as in the Standard Report: <http://www.haskell.org/onlinereport/exps.html>
--
-----------------------------------------------------------------------------
module Language.Quasi.Internal.Desugar where
import qualified Data.Map as M
import Control.Arrow ( first )
import Control.Monad.Trans.Class ( lift )
import Data.List ( find )
import Language.Haskell.TH
import Language.Haskell.TH.PprLib ( punctuate, comma, quotes )
import Language.Quasi.Ast.TH
import Language.Quasi.Internal.Utils
( ErrorQ, errorQ, reifyCatch, normalClause, toUInfix )
-- Observation: In general, as things using TH approach language extensions /
-- plugins / advanced DSLs, interoperating with GHC's functionality becomes more
-- and more valuable. Currently this isn't at all possible. For example, it
-- would be really cool to be able to process and generate GHC core.
--
-- In the mean-time, we need to implement all of the GHC stuff, like desugarings,
-- or parsing (haskell-src-exts / meta), over again, in TH-land.
--
-- One interesting way this could go is if the TH stuff could be developed to
-- such an extent that parts of the language could start moving into libraries.
-- These de-sugarings are much more compact than GHC's, yet other than issues
-- with error messages, ought to be functionally similar (identical semantics).
-- TODO: Clean up handling of errors.
-- TODO: Resugarings.
-- Utilities
quoteCommaPpr :: Ppr a => [a] -> String
quoteCommaPpr xs = show . punctuate comma $ map (quotes . ppr) xs
-- | Constructs the specified infix operator, combining the @Exp@ arguments.
opE :: ExpQ -> String -> ExpQ -> ExpQ
opE l o r = infixE (Just l) (varE $ mkName o) (Just r)
-- | Unwraps @ForallC@s.
unforallCon :: Con -> Con
unforallCon (ForallC _ _ c) = c
unforallCon c = c
-- | Extracts @Left@ the value, calling @error@ on failure.
fromLeft :: Either a b -> a
fromLeft (Left x) = x
fromLeft _ = error "fromLeft"
-- | Extracts @Right@ the value, calling @error@ on failure.
fromRight :: Either a b -> b
fromRight (Right x) = x
fromRight _ = error "fromRight"
-- | Applies the de-sugarings that I think would be useful for doing
-- interesting code transformations.
{-
desugar :: forall a. Data a => a -> Q a
desugar = everywhereM (return `extM` helper)
where
helper (InfixE l o r) = dsInfix l o r
helper (DoE xs) = eterror $ dsDo xs
helper (CompE xs) = eterror $ dsDo xs
helper (RecConE n fs) = eterror $ dsRecCon n fs
helper (ListE xs) = dsList xs
-- helper (SigE e t) = dsSig e t
-- helper (LamE ps e) = dsLambda ps e
helper (ArithSeqE r) = dsArithSeq r
helper (CondE c t e) = dsIf c t e
helper e = return e
-}
dsParens :: Exp -> Exp
dsParens [e'| ( {{x}} ) |] = x
dsParens x = x
-- | Desugars an infix operator. This is close to the way that the Standard
-- Report de-sugars infix-operators:
--
-- (From Section 3.4: "Operator Applications")
-- The following identities hold:
-- @
-- e1 op e2 = (op) e1 e2
-- -e = negate e
-- @
--
-- (From Section 3.5: "Sections")
-- The following identities hold:
-- @
-- (op e) = \x -> x op e
-- (e op) = \x -> e op x
-- @
-- where @op@ is a binary operator, @e@ is an expression, and @x@ is a
-- variable that does not occur free in @e@.
--
-- Negation is already represented using @App@E in TH. This leaves us with
-- one last consideration: @-XPostfixOperators@. This extension makes it such
-- that @ (e !) @ is equivalent (from the point of view of both type checking
-- and execution) to the expression @ ((!) e) @.
--dsInfix :: (Maybe Exp) -> Exp -> (Maybe Exp) -> ExpQ
--dsInfix :: (Maybe Exp) -> Exp -> (Maybe Exp) -> ExpQ
{-
dsInfix Nothing o Nothing = return o
dsInfix (Just l) o Nothing = return [e'| {{o}} {{l}} |]
dsInfix (Just l) o (Just r) = return [e'| {{o}} {{l}} {{r}} |]
dsInfix Nothing o (Just r) = newName "x" >>= \x
-> return [e'| \{{VarP x}} -> {{VarE x}} {{o}} {{r}} |]
-}
dsInfix :: Exp -> ExpQ
dsInfix = helper . toUInfix
where
helper [e'| {{l}} `{{o}}` {{r}} |] = return [e'| {{o}} {{l}} {{r}} |]
helper [e'| ({{l}} `{{o}}` ) |] = return [e'| {{o}} {{l}} |]
helper [e'| ( `{{o}}` {{r}}) |] = newName "x" >>= \x
-> return [e'| \{{VarP x}} -> {{o}} {{VarE x}} {{r}} |]
helper e = return e
-- | Desugars the statements involved in a do-block, as described in the
-- Standard Report:
--
-- Do expressions satisfy these identities, which may be used as a
-- translation into the kernel, after eliminating empty stmts:
-- @
-- do {e} = e
-- do {e;stmts} = e >> do {stmts}
-- do {p <- e; stmts} = let ok p = do {stmts}
-- ok _ = fail "..."
-- in e >>= ok
-- do {let decls; stmts} = let decls in do {stmts}
-- @
-- The ellipsis \"...\" stands for a compiler-generated error message, passed
-- to fail, preferably giving some indication of the location of the
-- pattern-match failure; the functions @>>@, @>>=@, and @fail@ are operations
-- in the class Monad, as defined in the Prelude; and @ok@ is a fresh
-- identifier.
--
-- The behaviour deviates slightly from the Standard Report, and instead
-- implements @-XRebindableSyntax@ - the in scope monad operators are used,
-- and must be in scope.
--
-- This implementation also handles the @-XParallelListComp@ extension, as
-- this code is also used for desugaring list comprehensions.
dsDo :: [Stmt] -> ErrorQ Exp
dsDo [] = errorQ "Empty 'do' block"
dsDo [NoBindS e] = return e
dsDo [e] = errorQ
$ "The last statement in a 'do' block must be an expression " ++ pprint e
dsDo (x:xs) = process =<< dsDo xs
where
process rest = case x of
(LetS ds) -> return $ LetE ds rest
(BindS p e) -> lift $ processBind p e rest
(NoBindS e) -> lift $ opE (return e) ">>" (return rest)
--(ParS xs) ->
processBind p e rest = do
ok <- newName "ok"
letE [funD ok [ normalClause [return p] (return rest)
, normalClause [wildP] matchFail
]
]
(opE (return e) ">>=" (varE ok))
matchFail = [| fail "Pattern match failure in do expression" |]
-- | De-sugars lists as described in the Standard Report:
--
-- The following identity holds:
-- @
-- [e1, ..., ek] = e1 : (e2 : ( ... (ek : [])))
-- @
-- where @:@ and @[]@ are constructors for lists, as defined in the Prelude
-- (see Section 6.1.3). The types of @e1@ through @e@k must all be the same
-- (call it @t@), and the type of the overall expression is @[t]@
-- (see Section 4.1.2).
dsList :: Exp -> Exp
dsList (ListE xs) = foldr (\l r -> [e'| {{l}} : {{r}} |]) (ListE []) xs
dsList e = e
-- | De-sugars arithmetic sequences as described in the Standard Report:
--
-- Arithmetic sequences satisfy these identities:
-- @
-- [ e1.. ] = enumFrom e1
-- [ e1,e2.. ] = enumFromThen e1 e2
-- [ e1..e3 ] = enumFromTo e1 e3
-- [ e1,e2..e3 ] = enumFromThenTo e1 e2 e3
-- @
-- where enumFrom, enumFromThen, enumFromTo, and enumFromThenTo are class
-- methods in the class Enum as defined in the Prelude (see Figure 6.1).
dsArithSeq :: Exp -> Exp
dsArithSeq (ArithSeqE r) = case r of
(FromR f ) -> [e'| enumFrom {{f}} |]
(FromThenR f n ) -> [e'| enumFromThen {{f}} {{n}} |]
(FromToR f t) -> [e'| enumFromTo {{f}} {{t}} |]
(FromThenToR f n t) -> [e'| enumFromThenTo {{f}} {{n}} {{t}} |]
dsArithSeq e = e
--TODO: What is "ExplicitPArr", PArrSeq, etc?
-- | Desugars if statements as described in the Standard Report:
--
-- The following identity holds:
-- @
-- if e1 then e2 else e3 = case e1 of { True -> e2 ; False -> e3 }
-- @
-- where @True@ and @False@ are the two nullary constructors from the type
-- @Bool@, as defined in the Prelude. The type of @e1@ must be @Bool@; @e2@
-- and @e3@ must have the same type, which is also the type of the entire
-- conditional expression.
--
-- However, in order to be compatible with @-XRebindableSyntax@, it checks if
-- @ifThenElse@ is in scope. If so, then the desugaring is as follows:
-- @
-- if e1 then e2 else e3 = ifThenElse e1 e2 e3
-- @
dsIf :: Exp -> Exp -> Exp -> ExpQ
dsIf c t e = recover (return fallback) (reify ifThenElse >> return overloaded)
where
ifThenElse = mkName "ifThenElse"
overloaded = [ec| {{ifThenElse}} {{c}} {{t}} {{e}} |]
fallback = [e'| case {{c}} of { True -> {{t}}; False -> {{e}} } |]
-- | Desugars record construction as described in the Standard Report:
--
-- In the binding @f = v@, the field @f@ labels @v@.
-- @
-- C { bs } = C (pickC1 bs undefined) ...(pickCk bs undefined)
-- @
-- where @k@ is the arity of @C@.
--
-- The auxiliary function @pickCi bs d@ is defined as follows:
--
-- If the @i@th component of a constructor @C@ has the field label @f@, and
-- if @f=v@ appears in the binding list @bs@, then @pickCi bs d@ is @v@.
-- Otherwise, @pickCi bs d@ is the default value @d@.
dsRecCon :: Name -> [FieldExp] -> ErrorQ Exp
dsRecCon cn fes = process =<< lookupConstructors cn
where
femap = M.fromList $ map (first nameBase) fes
process fs = lift $ appsE (conE cn : map pickC fs)
where
pickC ((`M.lookup` femap) . nameBase -> Just e, _, _) = return e
pickC _ = varE $ mkName "undefined"
lookupConstructors n = do
-- There's gotta be a better way to reify a constructor's field description.
info <- reifyCatch n
case info of
(DataConI _ _ n' _)
| n /= n' -> lookupConstructors n'
| otherwise -> errorQ $ "Could not reify type constructor for "
++ "'" ++ pprint n ++ "' - instead got:\n"
++ pprint info
(TyConI dd@(DataD _ _ _ cs _)) -> do
case find recWereLookingFor $ map unforallCon cs of
Just (RecC _ fs) -> return fs
_ -> errorQ $ "Could not find record constructor '" ++ pprint n ++ "' in:\n"
++ pprint dd
_ -> errorQ $ "Could not desugar record type:\n"
++ pprint info
recWereLookingFor (RecC n' _) = nameBase cn == nameBase n'
recWereLookingFor _ = False
-- | Desugars record field updates as described in the Standard Report:
--
-- Using the prior definition of pick,
-- @
-- e { bs } = case e of
-- C1 v1 ... vk1 -> C1 (pickC11 bs v1) ... (pickC1k1 bs vk1)
-- ...
-- Cj v1 ... vkj -> Cj (pickCj1 bs v1) ... (pickCjkj bs vkj)
-- _ -> error "Update error"
-- @
-- where {C1,...,Cj} is the set of constructors containing all labels in bs,
-- and ki is the arity of Ci.
{- TODO
dsRecUpd :: Exp -> [FieldExp] -> ErrorQ Exp
dsRecUpd e fes = do
conType <- getConType
info <- reifyCatch conType
case info of
(TyConI (DataD _ _ _ cs _))
-> buildResult =<< (concat <$> mapM (mkMatch . unforallCon) cs)
_ -> errorQ $ "Type " ++ pprint conType ++ " does not reify to a DataType constructor. Instead\n"
++ pprint info
where
femap = M.fromList $ map (first nameBase) fes
buildResult [] = errorQ $ "No constructor has all these fields: " ++ nameList
buildResult matches
= lift . caseE (return e)
$ matches ++ [match wildP (normalB [| error "Update error" |]) []]
mkMatch (RecC n fs) = do
let results :: [(String, Maybe ExpQ)]
results = map (\(nameBase -> n, _, _) -> (n, M.lookup n femap)) fs
names <- mapM (\(n, m) -> maybe (newName n) (return . const under) m) results
let patterns = map (ifUnder wildP varP) names
vals = zipWith (\(_, e) -> ifUnder (return $ fromJust e) varE) results names
return [match (conP n patterns) (normalB . appsE $ conE n : vals)]
where
under = mkName "_"
ifUnder u f n = if n == under then u else f n
mkMatch _ = return []
nameList = quoteCommaPpr (map fst fes)
getConType :: ErrorQ Name
getConType = do
ts <- mapM (reifyConType . fst) fes
if all (uncurry (==)) . zip ts $ tail ts
then return $ head ts
else errorQ $ "The following constructors come from different types:\n"
++ nameList ++ " ( " ++ quoteCommaPpr ts ++ " respectively )"
reifyConType :: Name -> ErrorQ Name
reifyConType fn = do
info <- reifyCatch fn
case info of
(VarI _ (collectArrows [] -> (ConT cn:_)) _ _) -> return cn
_ -> errorQ $ "'" ++ pprint fn ++ "' doesn't reify as a field selector!"
-}
-- | Desugars record field selectors as described in the Standard Report:
--
-- A field label @f@ introduces a selector function defined as:
-- @
-- f x = case x of { C1 p11 ...p1k -> e1 ; ... ; Cn pn1 ...pnk -> en }
-- @
-- where @C1...Cn@ are all the constructors of the datatype containing a
-- field labeled with @f@, @p@ij is @y@ when @f@ labels the jth component
-- of @C@i or @_@ otherwise, and @e@i is @y@ when some field in @C@i has a
-- label of @f@ or @undefined@ otherwise.
--dsRecField :: Name -> ExpQ
--dsRecField = undefined
-- | This doesn't seem like a very useful de-sugaring to me, but it implements
-- the desugaring / translation of expression signatures specified in the
-- report:
--
-- @
-- e :: t = let { v :: t; v = e } in v
-- @
dsSig :: Exp -> Type -> ExpQ
dsSig e t = do
v <- newName "v"
return [e'| let { {{VarP v}} :: {{t}}; {{VarP v}} = {{e}} } in {{VarE v}} |]
--TODO: return [ec| let { {{v}} :: {{t}}; {{v}} = {{e}} } in {{v}} |]
-- | This doesn't seem like a very useful de-sugaring to me, but it implements
-- the desugaring / translation of lambdas specified in the report:
--
-- The following identity holds:
-- @
-- \p1 ... pn -> e = \ x1 ... xn -> case (x1, ..., xn) of (p1, ..., pn) -> e
-- @
-- where the @xi@ are new identifiers.
--
-- This isn't very useful, because it still uses lambdas!
dsLambda :: [Pat] -> Exp -> ExpQ
dsLambda ps e
| all isVar ps = return . ParensE $ LamE ps e
| otherwise = do
names <- mapM (const $ newName "x") ps
--TODO
--return [e'| \{{<x1 xn>map varP names}} -> case ({{<x1, xn> map varE names}}) of
-- |]
-- Parens used to prevent the de-sugaring from recursing forever.
parensE . lamE (map varP names)
$ caseE (tupE $ map varE names)
[return $ Match (TupP ps) (NormalB e) []]
where
isVar (VarP _) = True
isVar _ = False
-- While the report specifies that there is no translation desugaring for
-- tuples, I figure it might sometimes be useful to desugar them to the form
-- @(,,) x y z@.
--
-- @(e1, …, ek)@ for @k ≥ 2@ is an instance of a k-tuple as defined in the
-- Prelude, and requires no translation. If @t1@ through @tk@ are the types of
-- @e1@ through @ek@, respectively, then the type of the resulting tuple is
-- @(t1, …, tk)@ (see Section 4.1.2).
dsTuple :: Exp -> ExpQ
dsTuple (TupE xs)
= appsE
$ conE (mkName $ "GHC.Tuple.(" ++ map (const ',') xs ++ ")")
: map return xs
dsTuple e = return e
-- | From the standard report:
--
-- A pattern binding binds variables to values. A simple pattern binding has
-- form p = e. The pattern p is matched \“lazily\” as an irrefutable pattern, as
-- if there were an implicit ~ in front of it. See the translation in Section
-- 3.12. The general form of a pattern binding is p match, where a match is the
-- same structure as for function bindings above; in other words, a pattern
-- binding is:
--
-- @
-- p | gs1 = e1
-- | gs2 = e2
-- . . .
-- | gsm = em
-- where { decls }
-- @
--
-- Translation: The pattern binding above is semantically equivalent to this
-- simple pattern binding:
--
-- @
-- p = let decls in
-- case () of
-- () | gs1 -> e1
-- | gs2 -> e2
-- . . .
-- | gsm -> em
-- _ -> error "Unmatched pattern"
-- @
dsPatBinds :: Clause -> Clause
dsPatBinds (Clause ps b ds)
= Clause ps (NormalB . LetE ds $ mkBody b) []
where
unit = mkName "GHC.Tuple.()"
mkBody (NormalB e) = e
mkBody (GuardedB xs)
= CaseE (ConE unit)
[ Match (ConP unit []) (GuardedB xs) []
, Match WildP (NormalB [e'| error "Unmatched pattern" |]) []
]
--TODO: desugaring of pattern guards
-- | From the standard report:
-- The general binding form for functions is semantically equivalent to the
-- equation (i.e. simple pattern binding):
--
-- @
-- x = \x1 ... xk -> case (x1, . . . , xk) of
-- (p11 , . . . , p1k) match1
-- . . .
-- (pn1 , . . . , pnk) matchn
-- @
--
-- where the xi are new identifiers.
dsFun :: Dec -> Dec
dsFun (FunD n xs@(Clause pats _ _:_)) = FunD n [Clause [] (NormalB expr) []]
where
vars = map (mkName . ("arg"++) . show) [1..length pats]
expr = LamE (map VarP vars)
. CaseE (TupE $ map VarE vars)
$ map ((\(Clause ps b _) -> Match (TupP ps) b []) . dsPatBinds) xs
dsFun d = d
--TODO: recursively apply to function-including things?
--TODO: there are other ways to slice this.. For one thing, how do we do
-- de-sugaring of cases with guards / wheres.. Maybe have dsFun yield an
-- output ready to be processed by dsPatBinds ?
|
mgsloan/quasi-extras
|
src/Language/Quasi/Internal/Desugar.hs
|
bsd-3-clause
| 17,794
| 0
| 20
| 4,594
| 2,246
| 1,279
| 967
| 124
| 6
|
{-# LANGUAGE
DeriveDataTypeable
, FlexibleContexts
, FlexibleInstances
, GeneralizedNewtypeDeriving
, ImpredicativeTypes
, MultiParamTypeClasses
, RankNTypes
, ScopedTypeVariables
, StandaloneDeriving
, TemplateHaskell
, TupleSections
, TypeFamilies
, UndecidableInstances
#-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.UI.Toy.Transformed
-- Copyright : (c) 2011 Michael Sloan
-- License : BSD-style (see the LICENSE file)
--
-- Maintainer : Michael Sloan <mgsloan@gmail.com>
-- Stability : experimental
-- Portability : GHC only
--
-- 'Transformed' allows 'Interactive', 'Diagrammable' things to be transformed
-- and overlapped, such that the layout combinators from diagrams apply.
--
--------------------------------------------------------------------------------
module Graphics.UI.Toy.Transformed
( Transformed(..)
, mkTransformed
) where
import Control.Arrow ( first, second )
import Control.Lens hiding ( transform )
import Data.Data ( Data, Typeable1 )
import Data.Foldable ( foldMap )
import Diagrams.Prelude hiding ( over, view )
import Graphics.UI.Toy ( Interactive(..), InputState, MousePos )
import Graphics.UI.Toy.Diagrams ( Diagrammable(..), Clickable(..) )
-- | @'Transformed' a@ is like @[a]@, except that each element is stored with a
-- transformation that is used for 'Diagrammable' and 'Interactive'.
--
-- The semantics of the 'Monoid', 'Semigroup', 'Transformable', 'HasOrigin',
-- 'Enveloped', and 'Juxtaposable' instances are all intended to mimic the
-- ones for 'Diagram'. The 'Interactive', and 'Clickable' instances
-- appropriately transfrom the mouse coordinates into local coordinates.
newtype Transformed a = Transformed [(Transformation (V a), a)]
deriving (Monoid, Semigroup)
deriving instance Typeable1 Transformed
deriving instance (Data a, Data (Transformation (V a))) => Data (Transformed a)
_Transformed
:: Iso [(Transformation (V s), s)]
[(Transformation (V t), t)]
(Transformed s)
(Transformed t)
_Transformed = iso Transformed (\(Transformed xs) -> xs)
-- | @'mkTransformed' x@ constructs a 'Transformed' container for a single
-- object.
mkTransformed :: HasLinearMap (V a) => a -> Transformed a
mkTransformed = Transformed . (:[]) . (mempty, )
type instance V (Transformed a) = V a
instance HasLinearMap (V a) => HasOrigin (Transformed a) where
moveOriginTo p = translate (origin .-. p)
instance HasLinearMap (V a) => Transformable (Transformed a) where
transform a = from _Transformed %~ map (first (a <>))
instance (Enveloped a, HasLinearMap (V a))
=> Enveloped (Transformed a) where
getEnvelope = foldMap (\(t, x) -> transform t $ getEnvelope x)
. review _Transformed
instance HasStyle a => HasStyle (Transformed a) where
applyStyle s = from _Transformed %~ map (second $ applyStyle s)
instance ( v ~ V a, HasLinearMap v, InnerSpace v, OrderedField (Scalar v)
, Diagrammable b v q a, Semigroup q)
=> Diagrammable b v q (Transformed a) where
diagram = foldMap (\(t, x) -> transform t $ diagram x) . review _Transformed
instance (Enveloped a, HasLinearMap (V a))
=> Juxtaposable (Transformed a) where
juxtapose = juxtaposeDefault
instance ( Transformable (InputState b), V a ~ MousePos b, V a ~ V (InputState b)
, Interactive b a )
=> Interactive b (Transformed a) where
-- TODO: or together the boolean results
tick i = liftA (, True)
. overInpT (\i' -> liftA fst . tick i') i
mouse m = overInpT (mouse m)
keyboard k = overInpT (keyboard k)
overInpT :: ( Monad m, Functor m, Transformable inp, V a ~ V inp, V a' ~ V inp )
=> (inp -> a -> m a') -> inp -> Transformed a -> m (Transformed a')
overInpT f i (Transformed xs)
= Transformed <$> mapM (\(t, x) -> (t,) <$> f (transform t i) x) xs
instance (Clickable a, HasLinearMap (V a))
=> Clickable (Transformed a) where
clickInside (Transformed xs) p
= any (\(t, x) -> clickInside x $ transform t p) xs
|
mgsloan/toy-diagrams
|
src/Graphics/UI/Toy/Transformed.hs
|
bsd-3-clause
| 4,153
| 0
| 12
| 865
| 1,153
| 616
| 537
| 69
| 1
|
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
-- | Main Hell executable.
module Main where
import Control.Exception
import Data.ByteString (ByteString)
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as S8
import Data.Data
import Data.Foldable
import Data.Monoid
import Data.Sequence (Seq)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Hell.Interpreter
import Hell.Lexer
import Hell.Parser
import Hell.Types
import Options.Applicative.Simple
import System.IO
import qualified Text.Megaparsec as Mega
data Lex
= LexQuotedEmacs
| LexUnquotedEmacs
deriving (Eq, Show)
data Config = Config
{ configLex :: Maybe Lex
, configAction :: Maybe SomeAction
}
main :: IO ()
main = do
(opts, ()) <-
simpleOptions
"0"
"Hell"
"A action"
(Config <$>
(flag
Nothing
(Just LexQuotedEmacs)
(help "Lex stdin as actions and output token info for Emacs" <>
long "lex-commands-emacs") <|>
flag
Nothing
(Just LexUnquotedEmacs)
(help "Lex stdin as pure code and output token info for Emacs" <>
long "lex-pure-emacs")) <*>
optional
(option
(eitherReader
(parseQuotedByteString "<command>" . T.encodeUtf8 . T.pack))
(short 'c' <> long "command" <> help "Command pipeline to run")))
empty
case configLex opts of
Nothing ->
case configAction opts of
Nothing -> do putStrLn "Welcome to Hell.\n"
promptLoop
Just cmd -> interpretSomeAction stdin stdout stderr cmd
Just LexQuotedEmacs ->
S.interact (tokensToEmacs . lexQuotedByteString "<interactive>")
Just LexUnquotedEmacs ->
S.interact (tokensToEmacs . lexUnquotedByteString "<interactive>")
promptLoop :: IO ()
promptLoop = do
hSetBuffering stdout NoBuffering
putStr "$ "
mline <- try S8.getLine
case mline of
Left (_ :: IOException) -> pure ()
Right line ->
if S8.null line
then promptLoop
else do
case parseQuotedByteString "<stdin>" line of
Left e -> hPutStrLn stderr e
Right cmd -> interpretSomeAction stdin stdout stderr cmd
promptLoop
tokensToEmacs :: Either String (Seq (Located Token)) -> ByteString
tokensToEmacs xs =
"(" <> S.intercalate "\n " (map fromToken (either (const []) toList xs)) <>
")\n"
where
fromToken located =
"(" <> S.intercalate " " (map (\(k, v) -> ":" <> k <> " " <> v) keys) <>
")"
where
keys =
[ ( "start-line"
, S8.pack (show (Mega.unPos (Mega.sourceLine (locatedStart located)))))
, ( "start-column"
, S8.pack
(show (Mega.unPos (Mega.sourceColumn (locatedStart located)))))
, ( "end-line"
, S8.pack (show (Mega.unPos (Mega.sourceLine (locatedEnd located)))))
, ( "end-column"
, S8.pack (show (Mega.unPos (Mega.sourceColumn (locatedEnd located)))))
, ("type", S8.pack (show (toConstr (locatedThing located))))
]
|
chrisdone/hell
|
app/Main.hs
|
bsd-3-clause
| 3,293
| 0
| 19
| 1,008
| 917
| 474
| 443
| 94
| 4
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE GADTs #-}
module Day12 (run) where
import Control.Applicative hiding (optional)
import Control.Lens
import Control.Monad
import Data.Either
import Data.List
import Data.List.Split (splitOn)
import Data.Map.Strict (Map, (!), insert, fromList)
import Data.Maybe
import Text.Parsec (parse, try)
import Text.Parsec.Char
import Text.Parsec.Combinator
import Text.Parsec.String (Parser)
import Text.Read
data Instruction = Copy String Char | Inc Char | Dec Char | Jump String Int deriving Show
data State = State { _instructions :: [Instruction], _pos :: Int, _registers :: Map Char Int } deriving Show
makeLenses ''State
-- Parsing
lexeme :: Parser a -> Parser a
lexeme p = do
x <- p
whitespace
return x
whitespace :: Parser ()
whitespace = void $ many $ oneOf "\n\t "
copy :: Parser Instruction
copy = do
void $ lexeme (string "cpy")
num <- lexeme (many1 letter <|> many1 digit)
r <- letter
return $ Copy num r
jump :: Parser Instruction
jump = do
void $ lexeme (string "jnz")
r <- lexeme (many1 alphaNum)
num <- read <$> lexeme (many1 letter <|> (many1 ((char '-') <|> digit)))
return $ Jump r num
inc = Inc <$> registerInstruction "inc"
dec = Dec <$> registerInstruction "dec"
registerInstruction :: String -> Parser Char
registerInstruction s = (lexeme (string s) *> letter)
instruction :: Parser Instruction
instruction = copy <|> inc <|> dec <|> jump
-- Running
isEnd :: State -> Maybe State
isEnd s = if (s ^. pos) >= length (s ^. instructions) then Just s else Nothing
runState :: State -> [State]
runState (isEnd -> Just s) = [s]
runState s = (runState $ runInstruction i s)
where
(State is ((!!) is -> i) regs) = s
runInstruction :: Instruction -> State -> State
runInstruction (Copy (readMaybe -> Just i) r) s = s & registers . at r . _Just .~ (i :: Int) & pos +~ 1
runInstruction (Copy (listToMaybe -> Just i) r) s = s & registers . at r . _Just .~ fromJust (s ^. registers . at i) & pos +~ 1
runInstruction (Inc r) s = s & registers . at r . _Just +~ 1 & pos +~ 1
runInstruction (Dec r) s = s & registers . at r . _Just -~ 1 & pos +~ 1
runInstruction (Jump (readMaybe -> Just r) i) s = if (r :: Integer) /= 0 then s & pos +~ i else s & pos +~ 1
runInstruction (Jump (listToMaybe -> Just r) i) s = if fromJust (s ^. registers . at r) /= 0 then s & pos +~ i else s & pos +~ 1
run = do
print instructions
print $ map (^. registers) $ runState (State instructions 0 rs)
where
rs = fromList [('a', 0), ('b', 0), ('c', 1), ('d', 0)]
instructions = rights $ (parseInstructions <$> splitOn "|" input)
parseInstructions = parse instruction ""
sampleInput = "cpy 41 a|inc a|inc a|dec a|jnz a 2|dec a"
input = "cpy 1 a|cpy 1 b|cpy 26 d|jnz c 2|jnz 1 5|cpy 7 c|inc d|dec c|jnz c -2|cpy a c|inc a|dec b|jnz b -2|cpy c b|dec d|jnz d -6|cpy 14 c|cpy 14 d|inc a|dec d|jnz d -2|dec c|jnz c -5|"
|
ulyssesp/AoC
|
src/day12.hs
|
bsd-3-clause
| 2,986
| 0
| 17
| 681
| 1,152
| 603
| 549
| -1
| -1
|
import Data.Map (Map)
import qualified Data.Map as Map
import Parser
import Rules
trimEnd :: Rules -> Rules
trimEnd = Map.map removeEnd . simplifyAll
where removeEnd (T a) = Empty
removeEnd (Star a) = Star $ removeEnd a
removeEnd (Sum a b) = Sum (removeEnd a) (removeEnd b)
removeEnd (Product a b) = Product a (removeEnd b)
removeEnd a = a
main :: IO ()
main = do
rs <- readFile "rules_2"
(Grammar _ rules) <- case parse grammar rs of
Right r -> return r
Left e -> fail $ show e
putStrLn $ show $ Map.map toString $ simplifyAll rules
putStrLn $ show $ Map.map toString $ simplifyAll $ trimEnd rules
|
abbradar/comps
|
src/Lab2.hs
|
bsd-3-clause
| 649
| 4
| 12
| 162
| 279
| 136
| 143
| 19
| 5
|
{-# LANGUAGE OverloadedStrings #-}
module JsonTypesSpec where
import Test.Hspec
import Data.Aeson (encode
, decode
)
import qualified Data.ByteString.Lazy.Char8 as BL
import Data.Ratio
import qualified Data.Text as T
import Hop.Apps.Juno.JsonTypes
spec :: Spec
spec = do
describe "JsonTypes `create account` for Juno REST API." testJsonCreateAccount
describe "JsonTypes `adjust account` for Juno REST API." testJsonAdjustAccount
describe "JsonTypes `poll commands` for Juno REST API." testJsonPoll
describe "JsonTypes `ledger query` for Juno REST API." testJsonQuery
testJsonCreateAccount :: Spec
testJsonCreateAccount = do
it "decoding CreateAccountRequest byteString" $
(decode createAccountByteString :: Maybe CreateAccountRequest)
`shouldBe`
Just CreateAccountRequest
{_payload = AccountPayload {_account = "TSLA"},
_digest = Digest {_hash = "hashy", _key = "mykey"}
}
it "encoding create account bytestring" $
encodeCreateAccountRequest
`shouldBe`
"{\"payload\":{\"account\":\"TSLA\"},\"digest\":{\"hash\":\"hashy\",\"key\":\"mykey\"}}"
testJsonAdjustAccount :: Spec
testJsonAdjustAccount = do
it "decoding proper bytestring for AdjustAccountRequest" $
decodeAdjustRequest
`shouldBe`
Just AccountAdjustRequest {
_adjustAccountPayload = AccountAdjustPayload
{ _adjustAccount = "TSLA", _adjustAmount = JRational (100 % 1) }
, _adjustAccountDigest = Digest {_hash = "hashy", _key = "mykey"}
}
it "encoding adjust account bytestring" $
encodeAdjustRequest
`shouldBe` adjustRequestPayloadBS
testJsonPoll :: Spec
testJsonPoll = do
it "ecoding PollPayload" $
encodePollRequest
`shouldBe`
pollRequestByteString
it "encoding PollResult" $
encodePollResult
`shouldBe`
BL.pack "{\"status\":\"Accepted\",\"cmdid\":\"1\",\"payload\":\"res payload\",\"message\":\"nothing to say\",\"logidx\":-1}"
it "decode pollPayloadReqeust" $
decodePollPayloadRequest
`shouldBe`
Just PollPayloadRequest {_pollPayload = PollPayload {_cmdids = ["1","2","3"]}, _pollDigest = Digest {_hash = "hashy", _key = "mykey"}}
it "decode PollPayload only" $
decodePollPayload
`shouldBe`
Just PollPayload {_cmdids = ["1","2","3"]}
testJsonQuery :: Spec
testJsonQuery = do
it "encoding ledger request" $
ledgerQueryRequestEncode
`shouldBe`
ledgerQueryRequestBS
it "decoding ledger reqeust" $
(decode ledgerQueryRequestBS :: Maybe LedgerQueryRequest)
`shouldBe`
Just ledgerQueryRequestTX
------------------------------------------------------------------------------------------------
--- Local Helper Functions
------------------------------------------------------------------------------------------------
-- Create Account helpers
digestByteString :: BL.ByteString
digestByteString = BL.pack "{\"hash\":\"hashy\",\"key\":\"mykey\"}"
decodeDigest :: Maybe Digest
decodeDigest = decode digestByteString :: Maybe Digest
encodeCreateAccountRequest :: BL.ByteString
encodeCreateAccountRequest = encode (CreateAccountRequest (AccountPayload (T.pack "TSLA")) (Digest (T.pack "hashy") (T.pack "mykey")))
createAccountByteString :: BL.ByteString
createAccountByteString = BL.pack "{\"digest\":{\"hash\":\"hashy\",\"key\":\"mykey\"},\"payload\":{\"account\":\"TSLA\"}}"
-- | Adjust account helpers
adjustPayloadDecode :: Bool
adjustPayloadDecode = (decode $ BL.pack "{\"amount\":\"100%1\",\"account\":\"TSLA\"}" :: Maybe AccountAdjustPayload) == Just AccountAdjustPayload { _adjustAccount = "TSLA"
, _adjustAmount = JRational {jratio = 100 % 1}
}
adjustPayloadEncode :: Bool
adjustPayloadEncode = encode (AccountAdjustPayload "TSLA" (JRational (100 % 1))) == "{\"amount\":100,\"account\":\"TSLA\"}"
adjustRequestPayloadBS :: BL.ByteString
adjustRequestPayloadBS = BL.pack "{\"payload\":{\"amount\":\"100%1\",\"account\":\"TSLA\"},\"digest\":{\"hash\":\"hashy\",\"key\":\"mykey\"}}"
encodeAdjustRequest :: BL.ByteString
encodeAdjustRequest = encode $
AccountAdjustRequest (AccountAdjustPayload (T.pack "TSLA")
(JRational (100%1))
)
(Digest (T.pack "hashy") (T.pack "mykey"))
decodeAdjustRequest :: Maybe AccountAdjustRequest
decodeAdjustRequest = decode adjustRequestPayloadBS
-- | poll cmds helpers
encodePollRequest :: BL.ByteString
encodePollRequest = encode $ PollPayloadRequest
(PollPayload $ fmap T.pack ["1","2","3"])
Digest {_hash = "hashy", _key = "mykey"}
pollRequestByteString :: BL.ByteString
pollRequestByteString = BL.pack "{\"payload\":{\"cmdids\":[\"1\",\"2\",\"3\"]},\"digest\":{\"hash\":\"hashy\",\"key\":\"mykey\"}}"
decodePollPayloadRequest :: Maybe PollPayloadRequest
decodePollPayloadRequest = decode pollRequestByteString
decodedPollPayloadRequest :: Maybe PollPayloadRequest
decodedPollPayloadRequest = Just PollPayloadRequest {_pollPayload = PollPayload {_cmdids = ["1","2","3"]}, _pollDigest = Digest {_hash = "hashy", _key = "mykey"}}
decodePollPayload :: Maybe PollPayload
decodePollPayload = decode pollPayloadByteString
where
pollPayloadByteString :: BL.ByteString
pollPayloadByteString = "{\"cmdids\":[\"1\",\"2\",\"3\"]}"
-- | Test PollResults
encodePollResult :: BL.ByteString
encodePollResult = encode PollResult {
_pollStatus = "Accepted"
, _pollCmdId = "1"
, _logidx = -1
, _pollMessage = "nothing to say"
, _pollResPayload = "res payload"
}
--- ledger Query
queryTxOne :: QueryJson
queryTxOne = QueryJson Nothing (Just 1) Nothing Nothing
queryTxOneEncode :: BL.ByteString
queryTxOneEncode = encode queryTxOne
ledgerQueryEncode :: BL.ByteString
ledgerQueryEncode = encode $ LedgerQueryBody queryTxOne
ledgerQueryDecode :: Maybe LedgerQueryBody
ledgerQueryDecode = decode "{\"filter\":{\"tx\":1,\"sender\":null,\"receiver\":null,\"account\":null}}"
ledgerQueryRequestEncode :: BL.ByteString
ledgerQueryRequestEncode = encode $ LedgerQueryRequest (LedgerQueryBody queryTxOne) dig
ledgerQueryRequestBS :: BL.ByteString
ledgerQueryRequestBS = "{\"payload\":{\"filter\":{\"tx\":1,\"sender\":null,\"account\":null,\"receiver\":null}},\"digest\":{\"hash\":\"hashy\",\"key\":\"mykey\"}}"
ledgerQueryRequestDecode :: Maybe LedgerQueryRequest
ledgerQueryRequestDecode = decode "{\"payload\":{\"filter\":{\"tx\":1,\"sender\":\"\",\"receiver\":\"\",\"account\":\"\"}},\"digest\":{\"hash\":\"hashy\",\"key\":\"mykey\"}}"
ledgerQueryRequest :: LedgerQueryRequest
ledgerQueryRequest = LedgerQueryRequest {
payload = LedgerQueryBody {
_filter = QueryJson {
_queryAcct = Just ""
, _queryTx = Just 1
, _querySender = Just ""
, _queryReceiver = Just ""}
},
digest = Digest {_hash = "hashy", _key = "mykey"}
}
ledgerQueryRequestTX ::LedgerQueryRequest
ledgerQueryRequestTX = LedgerQueryRequest {
payload = LedgerQueryBody {
_filter = QueryJson {
_queryAcct = Nothing
, _queryTx = Just 1
, _querySender = Nothing
, _queryReceiver = Nothing}
},
digest = Digest {_hash = "hashy", _key = "mykey"}
}
dig :: Digest
dig = Digest {_hash = "hashy", _key = "mykey"}
|
haroldcarr/juno
|
z-no-longer-used/test/JsonTypesSpec.hs
|
bsd-3-clause
| 8,313
| 0
| 15
| 2,245
| 1,364
| 763
| 601
| 144
| 1
|
module Main (
main
) where
import Control.Monad (mapM_)
import System.Environment (getArgs)
import Language.Py.ParseError (ParseError(..))
import Language.Py.SrcLocation (SrcSpan(..), getSpan)
import Language.Py.Parser (parseModule)
import PyHint.Scanner (scanModule)
import PyHint.Files (getPythonFiles, getFixedContent)
import PyHint.Message (Message(..))
getErrorMessage :: ParseError -> Message
getErrorMessage (UnexpectedToken token) = Message "E000" "Syntax error: unexpected token" (getSpan token)
getErrorMessage (UnexpectedChar _ sloc) = Message "E000" "Syntax error: unexpected char" (getSpan sloc)
getErrorMessage (StrError _) = Message "E000" "Syntax error" SpanEmpty
scanFile :: String -> IO ()
scanFile file = do
content <- getFixedContent file
case parseModule content file of
Left e -> printMessage file (getErrorMessage e)
Right (m, _) -> mapM_ (printMessage file) (scanModule m)
printMessage :: String -> Message -> IO ()
printMessage filename (Message t msg span) = case span of
SpanCoLinear _ r c _ -> print filename r c
SpanMultiLine _ r c _ _ -> print filename r c
SpanPoint _ r c -> print filename r c
SpanEmpty -> print filename 0 0
where print f r c = putStrLn $ f ++ ":" ++ show r ++ ":" ++ show c ++ ": " ++ t ++ " " ++ msg
main = do
topdir <- fmap head getArgs
files <- getPythonFiles topdir
mapM_ scanFile files
|
codeq/pyhint
|
src/PyHint/Main.hs
|
bsd-3-clause
| 1,378
| 0
| 15
| 241
| 504
| 256
| 248
| 31
| 4
|
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
--
-- Copyright (c) 2009-2011, ERICSSON AB
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- * Neither the name of the ERICSSON AB nor the names of its contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--
module Feldspar.Core.Constructs.Mutable
( module Feldspar.Core.Constructs.Mutable
, module Language.Syntactic.Constructs.Monad
)
where
import Data.Map
import Data.Typeable
import System.IO.Unsafe
import Language.Syntactic
import Language.Syntactic.Constructs.Binding
import Language.Syntactic.Constructs.Binding.HigherOrder
import Language.Syntactic.Constructs.Monad
import Feldspar.Core.Types
import Feldspar.Core.Interpretation
import Feldspar.Core.Constructs.Binding
data Mutable a
where
Run :: Type a => Mutable (Mut a :-> Full a)
instance Semantic Mutable
where
semantics Run = Sem "runMutable" unsafePerformIO
instance Typed Mutable
where
typeDictSym Run = Just Dict
semanticInstances ''Mutable
instance EvalBind Mutable where evalBindSym = evalBindSymDefault
instance AlphaEq dom dom dom env => AlphaEq Mutable Mutable dom env
where
alphaEqSym = alphaEqSymDefault
instance Sharable (MONAD Mut)
instance Cumulative (MONAD Mut)
instance SizeProp (MONAD Mut)
where
sizeProp Return (WrapFull a :* Nil) = infoSize a
sizeProp Bind (_ :* WrapFull f :* Nil) = snd $ infoSize f
sizeProp Then (_ :* WrapFull b :* Nil) = infoSize b
sizeProp When _ = AnySize
instance Sharable Mutable
instance Cumulative Mutable
instance SizeProp Mutable
where
sizeProp Run (WrapFull a :* Nil) = infoSize a
monadProxy :: P Mut
monadProxy = P
instance ( MONAD Mut :<: dom
, (Variable :|| Type) :<: dom
, CLambda Type :<: dom
, Let :<: dom
, OptimizeSuper dom)
=> Optimize (MONAD Mut) dom
where
optimizeFeat opts bnd@Bind (ma :* f :* Nil) = do
ma' <- optimizeM opts ma
case getInfo ma' of
Info (MutType ty) sz vs src -> do
f' <- optimizeFunction opts (optimizeM opts) (Info ty sz vs src) f
case getInfo f' of
Info{} -> constructFeat opts bnd (ma' :* f' :* Nil)
optimizeFeat opts a args = optimizeFeatDefault opts a args
constructFeatOpt _ Bind (ma :* (lam :$ (ret :$ var)) :* Nil)
| Just (SubConstr2 (Lambda v1)) <- prjLambda lam
, Just Return <- prjMonad monadProxy ret
, Just (C' (Variable v2)) <- prjF var
, v1 == v2
, Just ma' <- gcast ma
= return ma'
constructFeatOpt opts Bind (ma :* (lam :$ body) :* Nil)
| Just (SubConstr2 (Lambda v)) <- prjLambda lam
, v `notMember` vars
= constructFeat opts Then (ma :* body :* Nil)
where
vars = infoVars $ getInfo body
-- (bind e1 (\x -> e2) >> e3 ==> bind e1 (\x -> e2 >> e3)
constructFeatOpt opts Then ((bnd :$ x :$ (lam :$ bd)) :* y :* Nil)
| Just Bind <- prjMonad monadProxy bnd
, Just lam'@(SubConstr2 (Lambda _)) <- prjLambda lam
= do
bb <- constructFeat opts Then (bd :* y :* Nil)
bd' <- constructFeat opts (reuseCLambda lam') (bb :* Nil)
constructFeatUnOpt opts Bind (x :* bd' :* Nil)
-- (bind (bind e1 (\x -> e2)) (\y -> e3) => bind e1 (\x -> bind e2 (\y-> e3))
constructFeatOpt opts Bind ((bnd :$ x :$ (lam :$ bd)) :* y :* Nil)
| Just Bind <- prjMonad monadProxy bnd
, Just lam'@(SubConstr2 (Lambda _)) <- prjLambda lam
= do
bb <- constructFeat opts Bind (bd :* y :* Nil)
bd' <- constructFeat opts (reuseCLambda lam') (bb :* Nil)
constructFeatUnOpt opts Bind (x :* bd' :* Nil)
-- return x >> mb ==> mb
constructFeatOpt _ Then ((ret :$ _) :* mb :* Nil)
| Just Return <- prjMonad monadProxy ret
= return mb
-- ma >> return () ==> ma
constructFeatOpt _ Then (ma :* (ret :$ u) :* Nil)
| Just Return <- prjMonad monadProxy ret
, Just TypeEq <- typeEq (infoType $ getInfo ma) (MutType UnitType)
, Just TypeEq <- typeEq (infoType $ getInfo ret) (MutType UnitType)
, Just () <- viewLiteral u
= return ma
constructFeatOpt opts a args = constructFeatUnOpt opts a args
constructFeatUnOpt opts Return args@(a :* Nil)
| Info {infoType = t} <- getInfo a
= constructFeatUnOptDefaultTyp opts (MutType t) Return args
constructFeatUnOpt opts Bind args@(_ :* (lam :$ body) :* Nil)
| Just (SubConstr2 (Lambda _)) <- prjLambda lam
, Info {infoType = t} <- getInfo body
= constructFeatUnOptDefaultTyp opts t Bind args
constructFeatUnOpt opts Then args@(_ :* mb :* Nil)
| Info {infoType = t} <- getInfo mb
= constructFeatUnOptDefaultTyp opts t Then args
constructFeatUnOpt opts When args =
constructFeatUnOptDefaultTyp opts voidTypeRep When args
instance (Mutable :<: dom, MONAD Mut :<: dom, OptimizeSuper dom) => Optimize Mutable dom
where
constructFeatUnOpt _ Run ((ret :$ a) :* Nil)
| Just Return <- prjMonad monadProxy ret = return a
constructFeatUnOpt opts Run args = constructFeatUnOptDefault opts Run args
|
emwap/feldspar-language
|
src/Feldspar/Core/Constructs/Mutable.hs
|
bsd-3-clause
| 6,895
| 0
| 19
| 1,722
| 1,759
| 888
| 871
| 111
| 1
|
module Main where
import Perseus.Api
import Control.Monad.IO.Class
import Data.Text
import Data.Time (UTCTime)
import Network.Wai
import Network.Wai.Handler.Warp
import Servant
server :: Server SampleApi
server = read :<|> write where
read metric = do
value <- liftIO $ readFile (metricFilename metric)
return $ metric ++ "=" ++ value ++ "\n"
-- return $ PostOk metric value
write metric value = do
liftIO $ writeFile (metricFilename metric) value
-- return $ GetOk metric value
return $ "OK, wrote " ++ metric ++ "=" ++ value ++ "\n"
app :: Application
app = serve sampleApi server
main :: IO ()
main = run 8081 app
path :: FilePath
path = "data/"
metricFilename :: String -> FilePath
metricFilename metric = path ++ metric
|
bergey/perseus
|
src/Perseus/Server.hs
|
bsd-3-clause
| 760
| 0
| 13
| 153
| 231
| 123
| 108
| 24
| 1
|
{-# LANGUAGE QuasiQuotes #-}
import LiquidHaskell
[lq| measure getfst :: (a, b) -> a
getfst (x, y) = x
|]
[lq| type Pair a b = {v0 : ({v:a | v = (getfst v0)}, b) | true } |]
[lq| type OPList a b = [(Pair a b)]<\h -> {v: (a, b) | (getfst v) >= (getfst h)}> |]
[lq| type OList a = [a]<\h -> {v: a | (v >= h)}> |]
[lq| getFsts :: OPList a b -> OList a |]
getFsts [] = []
getFsts ((x,_) : xs) = x : getFsts xs
|
spinda/liquidhaskell
|
tests/gsoc15/unknown/pos/PairMeasure.hs
|
bsd-3-clause
| 447
| 0
| 8
| 137
| 80
| 49
| 31
| 9
| 1
|
{-|
Utility functions shared by several modules of "Typechecker".
-}
module Typechecker.Util(TypecheckM
,whenM
,anyM
,allM
,unlessM
,concatMapM
,tcError
,pushError
,tcWarning
,pushWarning
,checkType
,resolveType
,resolveTypeAndCheckForLoops
,findFormalRefType
,isKnownRefType
,assertSafeTypeArguments
,checkTypeArgumentBounds
,subtypeOf
,assertSubtypeOf
,assertDistinctThing
,assertDistinct
,findTrait
,findADTClass
,findField
,findMethod
,findMethodWithCalledType
,findCapability
,findVar
,propagateResultType
,unifyTypes
,uniquifyTypeVars
,checkValidUseOfBreak
,checkValidUseOfContinue
,abstractTraitFrom
,isLinearType
,isSubordinateType
,isEncapsulatedType
,isLocalType
,isPassiveType
,isActiveType
,isSharedType
,isAliasableType
,isSharableType
,checkConjunction
,includesMarkerTrait
) where
import Identifiers
import Types as Ty
import AST.AST as AST
import Data.List
import Data.Maybe
import Text.Printf (printf)
import Debug.Trace
import Control.Monad.Reader
import Control.Monad.Except
import Control.Arrow(second)
import Control.Monad.State
-- Module dependencies
import Typechecker.TypeError
import Typechecker.Environment
-- Monadic versions of common functions
anyM :: (Monad m) => (a -> m Bool) -> [a] -> m Bool
anyM p = foldM (\b x -> liftM (b ||) (p x)) False
allM :: (Monad m) => (a -> m Bool) -> [a] -> m Bool
allM p = foldM (\b x -> liftM (b &&) (p x)) True
whenM :: (Monad m) => m Bool -> m () -> m ()
whenM cond action = cond >>= (`when` action)
unlessM :: (Monad m) => m Bool -> m () -> m ()
unlessM cond action = cond >>= (`unless` action)
findM :: (Monad m) => (a -> m Bool) -> [a] -> m (Maybe a)
findM _ [] = return Nothing
findM p (x:xs) = do
b <- p x
if b
then return $ Just x
else findM p xs
-- | A version of 'concatMap' that works with a monadic predicate.
-- Source: https://hackage.haskell.org/package/extra-1.5/docs/src/Control-Monad-Extra.html
concatMapM :: Monad m => (a -> m [b]) -> [a] -> m [b]
{-# INLINE concatMapM #-}
concatMapM op = foldr f (return [])
where
f x xs = do x <- op x
if null x
then xs
else do
xs <- xs
return $ x++xs
-- | The monad in which all typechecking is performed. A function
-- of return type @TypecheckM Bar@ may read from an 'Environment'
-- and returns a @Bar@ or throws a typechecking exception.
type TypecheckM a =
forall m . (MonadState [TCWarning] m,
MonadError TCError m,
MonadReader Environment m) => m a
-- | Convenience function for throwing an exception with the
-- current backtrace
tcError err =
do bt <- asks backtrace
throwError $ TCError err bt
-- | Push the expression @expr@ and throw error err
pushError expr err = local (pushBT expr) $ tcError err
tcWarning wrn =
do bt <- asks backtrace
modify (TCWarning bt wrn:)
pushWarning expr wrn = local (pushBT expr) $ tcWarning wrn
checkValidUseOfBreak = Typechecker.TypeError.validUseOfBreak . bt
checkValidUseOfContinue = Typechecker.TypeError.validUseOfContinue . bt
-- | @matchTypeParameterLength ty1 ty2@ ensures that the type parameter
-- lists of its arguments have the same length.
matchTypeParameterLength :: Type -> Type -> TypecheckM ()
matchTypeParameterLength ty1 ty2 = do
let params1 = getTypeParameters ty1
params2 = getTypeParameters ty2
unless (length params1 == length params2) $
tcError $ WrongNumberOfTypeParametersError
ty1 (length params1) ty2 (length params2)
checkType :: Type -> TypecheckM Type
checkType = typeMapM checkSingleType
where
checkSingleType ty
| isRefAtomType ty = do
ty' <- resolveType ty
-- ty' could be an unfolded type synonym
when (isRefAtomType ty') $ do
formal <- findFormalRefType ty'
let formalTypeParams = getTypeParameters formal
actualTypeParams = getTypeParameters ty'
assertSafeTypeArguments formalTypeParams actualTypeParams
checkTypeArgumentBounds formalTypeParams actualTypeParams
return ty'
| otherwise = resolveType ty
-- | @resolveType ty@ checks all the components of @ty@, resolving
-- reference types to traits or classes and making sure that any
-- type variables are in the current environment.
resolveType :: Type -> TypecheckM Type
resolveType = typeMapM resolveSingleType
resolveSingleType :: Type -> TypecheckM Type
resolveSingleType ty
| isTypeVar ty = do
params <- asks typeParameters
case find (ty ==) params of
Just ty' -> return $ ty' `withBoxOf` ty
Nothing -> tcError $ FreeTypeVariableError ty
| isRefAtomType ty = do
res <- resolveRefAtomType ty
formal <- findFormalRefType ty
if isTypeSynonym res
then resolveType res -- Force unfolding of type synonyms
else resolveMode res formal
| isCapabilityType ty =
resolveCapa ty
| isStringType ty = do
tcWarning StringDeprecatedWarning
return ty
| isTypeSynonym ty = do
unless (isModeless ty) $
tcError $ CannotHaveModeError ty
let unfolded = unfoldTypeSynonyms ty
resolveType unfolded
| isArrayType ty = do
let elementType = getResultType ty
when (isStackboundType elementType) $
tcError $ StackboundArrayTypeError elementType
return ty
| otherwise = return ty
where
resolveCapa t = do
let traits = typesFromCapability t
mapM_ resolveSingleTrait traits
assertDistinctThing "occurrence" "trait" traits
return t
resolveSingleTrait t
| isRefAtomType t = do
result <- asks $ traitLookup t
when (isNothing result) $
tcError $ UnknownTraitError t
| otherwise =
tcError $ MalformedCapabilityError t
resolveTypeAndCheckForLoops :: Type -> TypecheckM Type
resolveTypeAndCheckForLoops ty =
evalStateT (typeMapM resolveAndCheck ty) []
where
resolveAndCheck ty
| isRefAtomType ty = do
seen <- get
let tyid = getId ty
when (tyid `elem` seen) $
lift . tcError $ RecursiveTypesynonymError ty
res <- lift $ resolveRefAtomType ty
formal <- lift $ findFormalRefType ty
when (isTypeSynonym res) $ put (tyid : seen)
if isTypeSynonym res
then typeMapM resolveAndCheck res
else lift $ resolveMode res formal
| otherwise = lift $ resolveType ty
-- | Resolve a ref atom type (class type, trait type or typedef)
-- and ensure that it has the correct number type arguments.
resolveRefAtomType :: Type -> TypecheckM Type
resolveRefAtomType ty = do
formal <- findFormalRefType ty
matchTypeParameterLength formal ty
let res = formal `setTypeParameters` getTypeParameters ty
`withModeOf` ty
`withBoxOf` ty
return res
-- | Find the formal version of a type with any type parameters of
-- that type uninstantied. Throws a typechecking error if a formal
-- type is not found or if several matching formal types are
-- found.
findFormalRefType :: Type -> TypecheckM Type
findFormalRefType ty
| isRefAtomType ty = do
result <- asks $ refTypeLookup ty
case result of
Just [] ->
tcError $ UnknownRefTypeError ty
Just [formal] ->
case getRefNamespace formal of
Just ns -> do
unless (isExplicitNamespace ns) $
tcError $ UnknownRefTypeError ty
return formal
Nothing ->
error $ "Util.hs: No namespace after resolving type " ++ show ty
Just l ->
tcError $ AmbiguousTypeError ty l
Nothing ->
tcError $ UnknownNamespaceError (getRefNamespace ty)
| otherwise = error $ "Util.hs: " ++ Ty.showWithKind ty ++ " isn't a ref-type"
resolveMode :: Type -> Type -> TypecheckM Type
resolveMode actual formal
| isModeless actual && not (isModeless formal) =
resolveMode (actual `withModeOf` formal) formal
| isClassType actual = do
when (isModeless formal) $
unless (isModeless actual) $
tcError $ CannotHaveModeError actual
unless (actual `modeSubtypeOf` formal) $
tcError $ ModeOverrideError formal
when (isSharableSingleType actual) $
tcError $ CannotGiveSharableModeError actual
return actual
| isTraitType actual = do
when (isModeless actual) $
tcError $ ModelessError actual
unless (hasMinorMode formal || actual `modeSubtypeOf` formal) $
tcError $ ModeOverrideError formal
when (isReadSingleType actual) $
unless (isReadSingleType formal) $
tcError $ CannotGiveReadModeError actual
when (isSharableSingleType actual) $
tcError $ CannotGiveSharableModeError actual
return actual
| otherwise =
error $ "Util.hs: Cannot resolve unknown reftype: " ++ show formal
assertSafeTypeArguments :: [Type] -> [Type] -> TypecheckM ()
assertSafeTypeArguments = zipWithM_ assertSafeTypeArgument
where
assertSafeTypeArgument formal arg
| isModeless formal = do
unlessM (isAliasableType arg) $
tcError $ UnsafeTypeArgumentError formal arg
when (isArrayType arg) $
tcWarning ArrayTypeArgumentWarning
| isClassType arg
, isModeless arg = do
cap <- findCapability arg
let traits = typesFromCapability cap
mapM_ (assertSafeTypeArgument formal) traits
`catchError` \(TCError _ bt) ->
throwError $
TCError (UnsafeTypeArgumentError formal arg) bt
| otherwise = do
unlessM (isSharableType arg) $
unless (arg `modeSubtypeOf` formal || hasMinorMode arg) $
tcError $ UnsafeTypeArgumentError formal arg
when (isArrayType arg) $
tcWarning ArrayTypeArgumentWarning
checkTypeArgumentBounds :: [Type] -> [Type] -> TypecheckM ()
checkTypeArgumentBounds params args =
let bindings = zip params args
in zipWithM_ (checkBound bindings) params args
where
checkBound bindings param arg
| Just bound <- getBound param = do
unless (isCapabilityType bound) $
tcError $ MalformedCapabilityError bound
arg `assertSubtypeOf` replaceTypeVars bindings bound
| otherwise = return ()
subtypeOf :: Type -> Type -> TypecheckM Bool
subtypeOf sub super
| isStackboundType sub =
liftM (isStackboundType super &&) $ unbox sub `subtypeOf` unbox super
| isArrowType sub && isArrowType super = do
let subTypeParams = getTypeParameters sub
superTypeParams = getTypeParameters super
bindings = zip superTypeParams subTypeParams
subResultTy = getResultType sub
superResultType = replaceTypeVars bindings $ getResultType super
subArgTys = getArgTypes sub
superArgTys = map (replaceTypeVars bindings) $ getArgTypes super
subBounds = map getBound subTypeParams
superBounds = map (fmap (replaceTypeVars bindings) . getBound) superTypeParams
contravariance <- liftM and $ zipWithM subtypeOf superArgTys subArgTys
covariance <- subResultTy `subtypeOf` superResultType
return $ length subArgTys == length superArgTys &&
length subTypeParams == length superTypeParams &&
subBounds == superBounds &&
sub `modeSubtypeOf` super &&
contravariance && covariance
| isArrayType sub && isArrayType super =
getResultType sub `equivalentTo` getResultType super
| hasResultType sub && hasResultType super =
liftM (sub `hasSameKind` super &&) $
getResultType sub `subtypeOf` getResultType super
| isNullType sub = return (isNullType super || isRefType super)
| isClassType sub && isClassType super =
return $ sub == super
| isClassType sub && isCapabilityType super = do
capability <- findCapability sub
capability `capabilitySubtypeOf` super
| isTupleType sub && isTupleType super = do
let subArgTys = getArgTypes sub
superArgTys = getArgTypes super
results <- zipWithM subtypeOf subArgTys superArgTys
return $ and results && length subArgTys == length superArgTys
| isAbstractTraitType sub && isTraitType super =
return $ sub == abstractTraitFromTraitType super
| isTraitType sub && isAbstractTraitType super =
return $ abstractTraitFromTraitType sub == super
| isTraitType sub && isTraitType super =
return $ sub `modeSubtypeOf` super &&
sub == super
| isTraitType sub && isCapabilityType super = do
let traits = typesFromCapability super
allM (sub `subtypeOf`) traits
| isCapabilityType sub && isTraitType super = do
let traits = typesFromCapability sub
anyM (`subtypeOf` super) traits
| isCapabilityType sub && isCapabilityType super =
sub `capabilitySubtypeOf` super
| isUnionType sub && isUnionType super = do
let members1 = unionMembers sub
members2 = unionMembers super
allM (\ty -> anyM (ty `subtypeOf`) members2) members1
| isUnionType sub = do
let members1 = unionMembers sub
allM (`subtypeOf` super) members1
| isUnionType super = do
let members2 = unionMembers super
anyM (sub `subtypeOf`) members2
| isBottomType sub && (not . isBottomType $ super) = return True
| isNumeric sub && isNumeric super =
return $ sub `numericSubtypeOf` super
| isTypeVar sub && not (isTypeVar super)
, Just bound <- getBound sub
= bound `subtypeOf` super
| otherwise = return (sub == super)
where
capabilitySubtypeOf subCap superCap = do
let traits1 = typesFromCapability subCap
traits2 = typesFromCapability superCap
preservesConjunctions = subCap `preservesConjunctionsOf` superCap
preservesModes =
all (\t1 -> isReadSingleType t1 || isLinearSingleType t1 ||
any (`modeSubtypeOf` t1) traits2) traits1
isSubsumed <- allM (\t2 -> anyM (`subtypeOf` t2) traits1) traits2
return (preservesConjunctions && preservesModes && isSubsumed)
preservesConjunctionsOf subCap superCap =
let pairs1 = conjunctiveTypesFromCapability subCap
pairs2 = conjunctiveTypesFromCapability superCap
in all (`existsIn` pairs1) pairs2
existsIn (left, right) =
any (separates left right)
separates left right (l, r) =
all (`elem` l) left && all (`elem` r) right ||
all (`elem` l) right && all (`elem` r) left
numericSubtypeOf sub super
| isIntType sub && isRealType super = True
| isIntType sub && isUIntType super = True
| isUIntType sub && isIntType super = True
| otherwise = sub == super
assertSubtypeOf :: Type -> Type -> TypecheckM ()
assertSubtypeOf sub super =
unlessM (sub `subtypeOf` super) $ do
capability <- if isClassType sub
then do
cap <- findCapability sub
if isIncapability cap
then return Nothing
else return $ Just cap
else return Nothing
case capability of
Just cap ->
tcError $ TypeWithCapabilityMismatchError sub cap super
Nothing ->
tcError $ TypeMismatchError sub super
equivalentTo :: Type -> Type -> TypecheckM Bool
equivalentTo ty1 ty2 = do
b1 <- ty1 `subtypeOf` ty2
b2 <- ty2 `subtypeOf` ty1
return $ b1 && b2
includesMarkerTrait :: Type -> Type -> TypecheckM Bool
includesMarkerTrait ty trait
| isTraitType ty = return $ ty == trait
| isClassType ty = do
cap <- findCapability ty
includesMarkerTrait cap trait
| isCapabilityType ty = do
let traits = typesFromCapability ty
anyM (`includesMarkerTrait` trait) traits
| otherwise = return False
-- | Convenience function for asserting distinctness of a list of
-- things. @assertDistinct "declaration" "field" [f : Foo, f :
-- Bar]@ will throw an error with the message "Duplicate
-- declaration of field 'f'".
assertDistinctThing :: (Eq a, Show a) =>
String -> String -> [a] -> TypecheckM ()
assertDistinctThing something kind l =
let
duplicates = l \\ nub l
duplicate = head duplicates
in
unless (null duplicates) $
tcError $ DuplicateThingError something (kind ++ " " ++ show duplicate)
-- | Convenience function for asserting distinctness of a list of
-- things that @HasMeta@ (and thus knows how to print its own
-- kind). @assertDistinct "declaration" [f : Foo, f : Bar]@ will
-- throw an error with the message "Duplicate declaration of field
-- 'f'".
assertDistinct :: (Eq a, AST.HasMeta a) =>
String -> [a] -> TypecheckM ()
assertDistinct something l =
let
duplicates = l \\ nub l
first = head duplicates
in
unless (null duplicates) $
tcError $ DuplicateThingError something (AST.showWithKind first)
findADTClass :: Type -> TypecheckM ClassDecl
findADTClass t = do
result <- asks $ classLookup t
case result of
Just [] ->
tcError $ UnknownADTError t
Just [cdecl] ->
return cdecl
Just _ ->
tcError $ UnknownADTError t
Nothing ->
tcError $ UnknownNamespaceError (getRefNamespace t)
findTrait :: Type -> TypecheckM TraitDecl
findTrait t = do
result <- asks $ traitLookup t
case result of
Just [] ->
tcError $ UnknownTraitError t
Just [tdecl] ->
return tdecl
Just l ->
tcError $ AmbiguousTypeError t (map tname l)
Nothing ->
tcError $ UnknownNamespaceError (getRefNamespace t)
isKnownRefType :: Type -> TypecheckM Bool
isKnownRefType ty
| isRefAtomType ty = do
result <- asks $ refTypeLookup ty
case result of
Just [] -> return False
Just [ref] -> return $ maybe False isExplicitNamespace
(getRefNamespace ref)
Just l -> tcError $ AmbiguousTypeError ty l
Nothing -> return False
| isCapabilityType ty = do
let traits = typesFromCapability ty
results <- mapM isKnownRefType traits
return $ and results
| isUnionType ty = do
let members = unionMembers ty
results <- mapM isKnownRefType members
return $ and results
| otherwise = return True
findField :: Type -> Name -> TypecheckM FieldDecl
findField ty f = do
isKnown <- isKnownRefType ty
unless isKnown $
tcError $ UnknownTypeUsageError "access field of" ty
result <- asks $ fieldLookup ty f
case result of
Just fdecl -> return fdecl
Nothing -> tcError $ FieldNotFoundError f ty
findMethod :: Type -> Name -> TypecheckM FunctionHeader
findMethod ty = liftM fst . findMethodWithCalledType ty
findMethodWithCalledType :: Type -> Name -> TypecheckM (FunctionHeader, Type)
findMethodWithCalledType ty name
| isUnionType ty = do
let members = unionMembers ty
results <- mapM (`findMethodWithCalledType` name) members
let result@(_, calledType) = head results
unless (all (==calledType) (map snd results)) $
tcError $ UnionMethodAmbiguityError ty name
return result
| isTypeVar ty
, Just bound <- getBound ty = do
(header, bound') <- findMethodWithCalledType bound name
return (header, setBound (Just bound') ty)
| otherwise = do
isKnown <- isKnownRefType ty
unless isKnown $
tcError $ UnknownTypeUsageError "call method on" ty
result <- asks $ methodAndCalledTypeLookup ty name
when (isNothing result) $
tcError $ MethodNotFoundError name ty
return $ fromJust result
findCapability :: Type -> TypecheckM Type
findCapability ty = do
result <- asks $ capabilityLookup ty
return $ fromMaybe err result
where
err = error $ "Util.hs: No capability in " ++ Ty.showWithKind ty
findVar :: QualifiedName -> TypecheckM (Maybe (QualifiedName, Type))
findVar x = do
result <- asks $ varLookup x
case result of
Just [] ->
return Nothing
Just [qvar] ->
return (Just qvar)
Just l ->
tcError $ AmbiguousNameError x l
Nothing ->
tcError $ UnknownNamespaceError (qnspace x)
getImplementedTraits :: Type -> TypecheckM [Type]
getImplementedTraits ty
| isClassType ty = do
capability <- findCapability ty
return $ typesFromCapability capability
| otherwise =
error $ "Types.hs: Can't get implemented traits of type " ++ show ty
propagateResultType :: Type -> Expr -> Expr
propagateResultType ty e
| hasResultingBody e =
let body' = propagateResultType ty (body e)
in setType ty e{body = body'}
| Match{clauses} <- e =
let clauses' = map propagateMatchClause clauses
in setType ty e{clauses = clauses'}
| Seq{eseq} <- e =
let result = propagateResultType ty (last eseq)
in setType ty e{eseq = init eseq ++ [result]}
| IfThenElse{thn, els} <- e =
setType ty e{thn = propagateResultType ty thn
,els = propagateResultType ty els}
| otherwise = setType ty e
where
hasResultingBody TypedExpr{} = True
hasResultingBody Let{} = True
hasResultingBody While{} = True
hasResultingBody For{} = True
hasResultingBody _ = False
propagateMatchClause mc@MatchClause{mchandler} =
mc{mchandler = propagateResultType ty mchandler}
typeIsUnifiable ty
| isClassType ty = do
capability <- findCapability ty
return $ not (isIncapability capability)
| isCapabilityType ty = return $ not (isIncapability ty)
| otherwise =
return $
isUnionType ty ||
isNullType ty ||
isBottomType ty
isUnifiableWith ty types
| isArrowType ty = return False
| hasResultType ty &&
all hasResultType types &&
all (hasSameKind ty) types =
isUnifiableWith (getResultType ty) (map getResultType types)
| isClassType ty = do
capability <- findCapability ty
if isIncapability capability
then return $ all (==ty) types
else allM typeIsUnifiable types
| otherwise = do
tyUniable <- typeIsUnifiable ty
tysUniable <- allM typeIsUnifiable types
return $ tyUniable && tysUniable &&
not (isNullType ty)
unifyTypes :: [Type] -> TypecheckM (Maybe Type)
unifyTypes tys = do
result <- findM (`isUnifiableWith` tys) tys
case result of
Just ty -> do
union <- doUnifyTypes ty tys
liftM Just $ lub union
Nothing ->
return Nothing
where
lub union = do
let members = unionMembers union
bounds <- filterM (\t -> allM (`subtypeOf` t) members) members
if null bounds
then return union
else return $ head bounds
doUnifyTypes :: Type -> [Type] -> TypecheckM Type
doUnifyTypes inter [] = return inter
doUnifyTypes inter args@(ty:tys)
| hasResultType inter = do
let res = getResultType inter
args' = map getResultType args
res' <- doUnifyTypes res args'
return $ setResultType inter res'
| isNullType ty =
doUnifyTypes inter tys
| isBottomType ty =
doUnifyTypes inter tys
| isClassType ty =
if ty == inter
then doUnifyTypes inter tys
else do
cap <- findCapability ty
doUnifyTypes inter (cap:tys)
| isClassType inter = do
cap <- findCapability inter
doUnifyTypes cap (ty:tys)
| isCapabilityType ty = do
let members = unionMembers inter
isSubsumed <- anyM (ty `equivalentTo`) members
if isSubsumed
then doUnifyTypes inter tys
else do
unlessM (anyM (\t -> allM (`subtypeOf` t) members)
(typesFromCapability ty)) $
tcError $ MalformedUnionTypeError ty inter
doUnifyTypes (unionType inter ty) tys
| isUnionType ty =
doUnifyTypes inter (unionMembers ty ++ tys)
| otherwise =
error "Util.hs: Tried to form an union without a capability"
uniquifyTypeVars :: [Type] -> Type -> TypecheckM Type
uniquifyTypeVars params = typeMapM (uniquifyTypeVar params)
uniquifyTypeVar :: [Type] -> Type -> TypecheckM Type
uniquifyTypeVar params ty
| isTypeVar ty = do
localTypeVars <- asks typeParameters
boundTypeVars <- map fst <$> asks bindings
if ty `elem` params && (ty `elem` localTypeVars || ty `elem` boundTypeVars)
then uniquify ty
else return ty
| otherwise = return ty
where
uniquify :: Type -> TypecheckM Type
uniquify ty = do
localTypeVars <- asks typeParameters
boundTypeVars <- map fst <$> asks bindings
let candidates = map (appendToTypeVar ty) [0..]
return $ fromJust $
find (`notElem` localTypeVars ++ boundTypeVars) candidates
appendToTypeVar ty i =
let id = getId ty
id' = id ++ show i
bound = getBound ty
in setBound bound $
typeVar id' `withModeOf` ty `withBoxOf` ty
isSafeValField :: FieldDecl -> TypecheckM Bool
isSafeValField f@Field{ftype} = do
isSafe <- isSharableType ftype
return $ isValField f && isSafe
abstractTraitFrom :: Type -> (Type, [TraitExtension]) -> TypecheckM TraitDecl
abstractTraitFrom cname (t, exts) = do
tdecl@Trait{tname, treqs, tmethods} <- findTrait t
let bindings = zip (getTypeParameters tname) (getTypeParameters t)
(fieldNames, methodNames) = partitionTraitExtensions exts
fields <- mapM (findField cname) fieldNames
checkLocalFields t fields
fields' <- checkReadFields t fields
methods <- mapM (findMethod cname) methodNames
treqs' <- mapM (resolveReq t) treqs
let newReqs = treqs' ++ map RequiredField fields' ++ map RequiredMethod methods
tmethods' = map (concretizeMethod bindings) tmethods
return tdecl{treqs = newReqs
,tname = t
,tmethods = tmethods'}
where
resolveReq trait r@RequiredField{rfield = Field{fname}} = do
rfield' <- findField trait fname
return r{rfield = rfield'}
resolveReq trait r@RequiredMethod{rheader} = do
rheader' <- findMethod trait (hname rheader)
return r{rheader = rheader'}
concretizeMethod :: [(Type, Type)] -> MethodDecl -> MethodDecl
concretizeMethod bindings m =
let mheader' = replaceHeaderTypes bindings (mheader m)
in m{mheader = mheader'}
checkReadFields t fields
| isReadSingleType t = do
unsafeFields <- filterM (liftM not . isAliasableType . ftype) fields
let unsafeField = head unsafeFields
unless (null unsafeFields) $
tcError $ NonSafeInExtendedReadTraitError
t (fname unsafeField) (ftype unsafeField)
return $ map (\f -> f{fmut = Val}) fields
| otherwise = return fields
checkLocalFields t fields =
unless (isLocalSingleType t || isActiveSingleType t) $ do
localFields <- filterM (isLocalType . ftype) fields
unless (null localFields) $
tcError $ ThreadLocalFieldExtensionError
t (head localFields)
partly :: (Type -> TypecheckM Bool) -> Type -> TypecheckM Bool
partly isKind ty
| isCompositeType ty
, traits <- typesFromCapability ty
= anyM (partly isKind) traits
| isUnionType ty
, tys <- unionMembers ty
= anyM (partly isKind) tys
| isClassType ty = do
capability <- findCapability ty
capIsPartly <- partly isKind capability
tyIsKind <- isKind ty
return $ tyIsKind || capIsPartly
| hasResultType ty &&
not (isArrowType ty) =
partly isKind (getResultType ty)
| isTupleType ty =
anyM (partly isKind) (getArgTypes ty)
| otherwise = isKind ty
fully :: (Type -> Bool) -> Type -> TypecheckM Bool
fully isKind ty
| isCompositeType ty
, traits <- typesFromCapability ty
= allM (fully isKind) traits
| isUnionType ty
, tys <- unionMembers ty
= allM (fully isKind) tys
| isClassType ty = do
capability <- findCapability ty
liftM (isKind ty ||) (fully isKind capability)
| hasResultType ty &&
not (isArrowType ty) =
fully isKind (getResultType ty)
| isTupleType ty =
allM (fully isKind) (getArgTypes ty)
| otherwise = return $ isKind ty
isLinearType :: Type -> TypecheckM Bool
isLinearType = partly (return . isLinearSingleType)
isSubordinateType :: Type -> TypecheckM Bool
isSubordinateType = partly (return . isSubordinateSingleType)
isEncapsulatedType :: Type -> TypecheckM Bool
isEncapsulatedType = fully isSubordinateSingleType
isLocalType :: Type -> TypecheckM Bool
isLocalType = partly (isLocalType' [])
where
isLocalType' :: [Type] -> Type -> TypecheckM Bool
isLocalType' checked ty
| ty `elem` checked = return False
| otherwise = do
holdsLocal <- holdsLocalData checked ty
return $ isLocalSingleType ty || holdsLocal
holdsLocalData :: [Type] -> Type -> TypecheckM Bool
holdsLocalData checked ty
| isPassiveRefType ty && isRefAtomType ty &&
not (isUnsafeSingleType ty) && ty `notElem` checked =
anyM (isLocalType' (ty:checked)) $ getTypeParameters ty
| otherwise = return False
isPassiveType :: Type -> TypecheckM Bool
isPassiveType ty
| isClassType ty && isModeless ty = do
capability <- findCapability ty
isPassiveType capability
| isClassType ty =
return $ isPassiveRefType ty
| isCapabilityType ty =
fully isPassiveRefType ty
| isUnionType ty
, tys <- unionMembers ty
= allM isPassiveType tys
| isTypeVar ty
, Just bound <- getBound ty
= isPassiveType bound
| otherwise = return False
isActiveType :: Type -> TypecheckM Bool
isActiveType ty
| isClassType ty && isModeless ty = do
capability <- findCapability ty
isActiveType capability
| isClassType ty =
return $ isActiveSingleType ty
| isCapabilityType ty =
fully isActiveSingleType ty
| isUnionType ty
, tys <- unionMembers ty
= allM isActiveType tys
| isTypeVar ty
, Just bound <- getBound ty
= isActiveType bound
| otherwise = return False
isSharedType :: Type -> TypecheckM Bool
isSharedType ty
| isClassType ty && isModeless ty = do
capability <- findCapability ty
isSharedType capability
| isClassType ty =
return $ isSharedSingleType ty
| isCapabilityType ty =
fully isSharedSingleType ty
| isUnionType ty
, tys <- unionMembers ty
= allM isSharedType tys
| isTypeVar ty
, Just bound <- getBound ty
= isSharedType bound
| otherwise = return False
isSharableType :: Type -> TypecheckM Bool
isSharableType ty
| isArrowType ty = return $ isModeless ty
| hasResultType ty = isSharableType $ getResultType ty
| isTupleType ty = allM isSharableType $ getArgTypes ty
| isCompositeType ty
, traits <- typesFromCapability ty = allM isSharableType traits
| isClassType ty && isModeless ty = do
capability <- findCapability ty
isSharableType capability
| isModeless ty =
return $ isPrimitive ty
|| isRangeType ty
|| isCType ty
|| isIncapability ty
| otherwise = return $ hasSharableMode ty
isUnsafeType :: Type -> TypecheckM Bool
isUnsafeType ty
| isClassType ty = do
capability <- findCapability ty
capIsUnsafe <- isUnsafeType capability
return $ isUnsafeSingleType ty || capIsUnsafe
| otherwise = return $
any isUnsafeSingleType $ typeComponents ty
isAliasableType :: Type -> TypecheckM Bool
isAliasableType ty
| isArrowType ty = return . not $ isLinearSingleType ty
| hasResultType ty = isAliasableType $ getResultType ty
| isTupleType ty = allM isAliasableType $ getArgTypes ty
| otherwise =
anyM (\f -> f ty)
[isSharableType
,isLocalType
,\t -> return $
isTypeVar t && (isModeless t || hasSharableMode t)
]
checkConjunction :: Type -> [Type] -> TypecheckM ()
checkConjunction source sinks
| isCompositeType source = do
let sourceConjunctions = conjunctiveTypesFromCapability source
mapM_ (\ty -> wellFormedConjunction sourceConjunctions
(sinks \\ [ty]) ty) sinks
| isClassType source = do
cap <- findCapability source
when (isIncapability cap) $
tcError $ CannotUnpackError source
when (source `elem` sinks) $
tcError $ CannotInferUnpackingError source
checkConjunction cap sinks
| isTraitType source =
whenM (isLinearType source) $
tcError $ DuplicatingSplitError source
| otherwise =
tcError $ UnsplittableTypeError source
where
wellFormedConjunction pairs siblings ty = do
when (null pairs) $
tcError $ MalformedConjunctionError ty (head siblings) source
let nonDisjoints =
filter (\ty' -> all (not . singleConjunction ty ty') pairs) siblings
nonDisjoint = head nonDisjoints
unless (null nonDisjoints) $
tcError $ MalformedConjunctionError ty nonDisjoint source
singleConjunction ty1 ty2 (tys1, tys2) =
ty1 `elem` tys1 && ty2 `elem` tys2 ||
ty1 `elem` tys2 && ty2 `elem` tys1
|
parapluu/encore
|
src/types/Typechecker/Util.hs
|
bsd-3-clause
| 34,744
| 0
| 20
| 10,291
| 10,080
| 4,761
| 5,319
| -1
| -1
|
module Teem
(mask
,fa
,gzip
,makeMask
,isNrrd
,center
,getB0Indices
,extractB0
)
where
-- Script Deps
-- center.py
import qualified Data.Map as M
import Development.Shake
import Development.Shake.Command
import Development.Shake.FilePath
import System.Process (callProcess, readProcess)
import Teem.Parser (Result (..), Value (..),
readNrrdHeader)
gzip :: FilePath -> IO ()
gzip out = callProcess "unu" ["save","-e","gzip","-f","nrrd","-i",out,"-o",out]
mask :: FilePath -> FilePath -> FilePath -> IO ()
mask mask vol out = do
callProcess "unu" ["3op", "ifelse", mask, vol
, "0", "-o", out]
gzip out
fa :: FilePath -> FilePath -> Action ()
fa dwi out = do
withTempFile $ \tensor -> do
command_ [] "tend" ["estim","-est","lls","-B","kvp","-knownB0","true","-i",dwi,"-o",tensor]
command_ [] "tend" ["anvol","-t","-1","-a","fa","-i",tensor,"-o",out]
liftIO $ gzip out
makeMask :: FilePath -> FilePath -> Action()
makeMask invol outvol = do
command_ [] "unu" ["3op","ifelse",invol,"1","0","-o",outvol]
liftIO $ gzip outvol
isNrrd :: FilePath -> Bool
isNrrd file = ext == ".nrrd" || ext == ".nhdr"
where
ext = takeExtension file
toNifti :: FilePath -> FilePath -> Action ()
toNifti nrrd out = unit $ cmd "ConvertBetweenFileFormats" nrrd out
center :: FilePath -> IO ()
center nrrd = callProcess "center.py"
["-i", nrrd
,"-o", nrrd]
getB0Indices :: FilePath -> IO [Int]
getB0Indices nrrd = do
maybeKvps <- Teem.Parser.readNrrdHeader nrrd
case maybeKvps of
(Success kvps) -> return
$ map (read . drop 15 . fst)
. filter ((== VGradientDirection (0,0,0)) . snd)
. M.toList
$ kvps
failure -> do
print failure
error $ "Teem.getB0Indices: Failed to parse nrrd header: " ++ nrrd
extractB0 :: FilePath -> FilePath -> IO ()
extractB0 dwi out = do
b0indices <- getB0Indices dwi
if null b0indices then error $ "No B0 found in " ++ dwi
else do
callProcess "unu" ["slice"
,"-a", "3"
,"-p", show $ head b0indices
,"-i", dwi
,"-o", out]
gzip out
|
reckbo/ppl
|
pipeline-lib/Teem.hs
|
bsd-3-clause
| 2,298
| 0
| 18
| 659
| 786
| 425
| 361
| 65
| 2
|
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeOperators #-}
module Main where
import Prelude ()
import MyLittlePrelude
import Control.Exception (finally)
import Data.Acid (closeAcidState, openLocalState)
import Data.IORef
import Network.HTTP.Client (newManager)
import Network.HTTP.Client.TLS (tlsManagerSettings)
import Network.Wai (Application)
import Network.Wai.Handler.Warp (run)
import Network.Wai.Middleware.Cors (CorsResourcePolicy (..), cors,
simpleCorsResourcePolicy,
simpleHeaders, simpleMethods)
import Network.Wai.Middleware.RequestLogger (logStdoutDev)
import Servant (Server, enter, serve)
import Inspection.API
import Inspection.API.Types
import qualified Inspection.Config as Config
import Inspection.Database
import Inspection.Flags
import Inspection.GithubM
import qualified Inspection.BuildLogStorage as BuildLogStorage
newEnvironment :: IO Environment
newEnvironment = do
envFlags <- getEnvironmentFlags
envConfig <- Config.getConfig "inspection.yaml"
envManager <- newManager tlsManagerSettings
envAcid <- openLocalState initialDB
envGithubCacheRef <- newIORef (GithubCache mempty)
envBuildLogStorageEnv <- BuildLogStorage.Environment envManager
<$> BuildLogStorage.loadConfig
pure Environment{..}
main :: IO ()
main = do
env <- newEnvironment
finally
(run 8080 (app env))
(closeAcidState (envAcid env))
server :: Environment -> Server InspectorAPI
server env = enter (inspectorToEither env) inspectorServer
app :: Environment -> Application
app env = logStdoutDev $ cors (const (Just corsPolicy))
$ serve (Proxy :: Proxy InspectorAPI)
(server env)
where
corsPolicy = simpleCorsResourcePolicy
{ corsMethods = simpleMethods <> ["DELETE", "PUT", "PATCH"]
, corsRequestHeaders = simpleHeaders <> ["content-type"]
}
|
zudov/purescript-inspection
|
app/Main.hs
|
bsd-3-clause
| 2,272
| 0
| 11
| 660
| 471
| 263
| 208
| 51
| 1
|
import Text.Show.Pretty
import Text.XML.HXT.Core
import Text.XML.HXT.Curl
import Web.Ohloh
-- Insert your API Key here:
apiKey = ""
myQueryUrl = "http://www.ohloh.net/languages/haskell.xml?api_key=" ++ apiKey
main = do
res <- runX $ xunpickleDocument xpResponse [ withRemoveWS yes, withCurl [] ] $ myQueryUrl
putStrLn $ ppShow $ head res
|
fthomas/ohloh-hs
|
tests/fetch-haskell.hs
|
bsd-3-clause
| 345
| 0
| 13
| 52
| 94
| 51
| 43
| 9
| 1
|
{-# OPTIONS_HADDOCK hide #-}
module Control.Distributed.STM.DebugBase
(debugStrLn0,debugStrLn1,debugStrLn2,debugStrLn3,debugStrLn4,
debugStrLn5,debugStrLn6,debugStrLn7,debugStrLn8,debugStrLn9,
gDebugLock, startGDebug, stopGDebug, gDebugStrLn,
newDebugMVar, inspectMVars, timedInspect) where
import Control.Concurrent
import Prelude
import System.IO
import System.IO.Unsafe
---------------------
-- Debugging Tools --
---------------------
debug0,debug1,debug2,debug3,debug4,debug5,debug6,debug7,debug8,debug9 :: Bool
debug0 = False -- name server
debug1 = False -- catch error
debug2 = False -- robustness
debug3 = False -- robust <-
debug4 = False -- tcp connection
debug5 = False -- robust ->
debug6 = False -- bomberman
debug7 = False -- atomic
debug8 = False -- 3 phase commit
debug9 = False -- life check
debugStrLn0 :: String -> IO ()
debugStrLn0 str = if debug0 then do
myPid <- myThreadId
takeMVar gDebugLock
hPutStrLn stderr (show myPid++": "++str)
putMVar gDebugLock ()
else return ()
debugStrLn1 :: String -> IO ()
debugStrLn1 str = if debug1 then do
myPid <- myThreadId
takeMVar gDebugLock
hPutStrLn stderr (show myPid++": "++str)
putMVar gDebugLock ()
else return ()
debugStrLn2 :: String -> IO ()
debugStrLn2 str = if debug2 then do
myPid <- myThreadId
takeMVar gDebugLock
hPutStrLn stderr (show myPid++": "++str)
putMVar gDebugLock ()
else return ()
debugStrLn3 :: String -> IO ()
debugStrLn3 str = if debug3 then do
myPid <- myThreadId
takeMVar gDebugLock
hPutStrLn stderr (show myPid++": "++str)
putMVar gDebugLock ()
else return ()
debugStrLn4 :: String -> IO ()
debugStrLn4 str = if debug4 then do
myPid <- myThreadId
takeMVar gDebugLock
hPutStrLn stderr (show myPid++": "++str)
putMVar gDebugLock ()
else return ()
debugStrLn5 :: String -> IO ()
debugStrLn5 str = if debug5 then do
myPid <- myThreadId
takeMVar gDebugLock
hPutStrLn stderr (show myPid++": "++str)
putMVar gDebugLock ()
else return ()
debugStrLn6 :: String -> IO ()
debugStrLn6 str = if debug6 then do
myPid <- myThreadId
takeMVar gDebugLock
hPutStrLn stderr (show myPid++": "++str)
putMVar gDebugLock ()
else return ()
debugStrLn7 :: String -> IO ()
debugStrLn7 str = if debug7 then do
myPid <- myThreadId
takeMVar gDebugLock
hPutStrLn stderr (show myPid++": "++str)
putMVar gDebugLock ()
else return ()
debugStrLn8 :: String -> IO ()
debugStrLn8 str = if debug8 then do
myPid <- myThreadId
takeMVar gDebugLock
hPutStrLn stderr (show myPid++": "++str)
putMVar gDebugLock ()
else return ()
debugStrLn9 :: String -> IO ()
debugStrLn9 str = if debug9 then do
myPid <- myThreadId
takeMVar gDebugLock
hPutStrLn stderr (show myPid++": "++str)
putMVar gDebugLock ()
else return ()
gDebug :: MVar Bool
{-# NOINLINE gDebug #-}
gDebug = unsafePerformIO (newMVar False)
gDebugLock :: MVar ()
{-# NOINLINE gDebugLock #-}
gDebugLock = unsafePerformIO (newMVar ())
gDebugStrLn :: String -> IO ()
gDebugStrLn str = do
isDebug <- readMVar gDebug
if isDebug then do
myPid <- myThreadId
takeMVar gDebugLock
hPutStrLn stderr (show myPid++": "++str)
putMVar gDebugLock ()
else return ()
startGDebug :: IO ()
startGDebug = swapMVar gDebug True >> return ()
stopGDebug :: IO ()
stopGDebug = swapMVar gDebug False >> return ()
gMVarStates :: MVar (IO ())
{-# NOINLINE gMVarStates #-}
gMVarStates = unsafePerformIO (newMVar (return ()))
newDebugMVar :: String -> a -> IO (MVar a)
newDebugMVar s var = do
mVar <- newMVar var
mVarStates <- takeMVar gMVarStates
putMVar gMVarStates (do
hPutStr stderr (s++" ")
b <- isEmptyMVar mVar
hPutStr stderr (if b then "empty !; " else "full; ")
mVarStates)
return mVar
inspectMVars :: String -> IO ()
inspectMVars s = do
takeMVar gDebugLock
hPutStrLn stderr s
myPid <- myThreadId
hPutStr stderr (show myPid++": ### MVar states >>>")
mVarStates <- readMVar gMVarStates
mVarStates
hPutStrLn stderr ("<<< MVar states ###")
putMVar gDebugLock ()
timedInspect :: IO ()
timedInspect = do
if debug6
then do
inspectMVars "### Timed Debugger ###"
threadDelay (5 * 1000000)
timedInspect
else return ()
instance Show (IO a) where
show _ = show "IO "
instance Show (MVar a) where
show _ = show "MVar "
instance Show (Chan a) where
show _ = show "Chan "
|
proger/haskell-dstm
|
Control/Distributed/STM/DebugBase.hs
|
bsd-3-clause
| 6,065
| 0
| 14
| 2,549
| 1,546
| 759
| 787
| 147
| 2
|
{-# LANGUAGE LambdaCase #-}
module Control.Monad.Trans.Error.Wrap
( WrappedError (..)
, WrappedErrorT (..)
, runWrappedErrorT
, throwError
, catchError
, mapWrappedErrorT
) where
import Control.Applicative
import Control.Monad
import Control.Monad.Fix
import Control.Monad.Trans
import Control.Monad.Trans.Error hiding (catchError, throwError)
import qualified Control.Monad.Trans.Error as Error
data WrappedError e
= NoMsg
| StrMsg String
| Error !e deriving Show
instance Functor WrappedError where
fmap _ NoMsg = NoMsg
fmap _ (StrMsg s) = StrMsg s
fmap f (Error e) = Error (f e)
instance Error (WrappedError e) where
noMsg = NoMsg
strMsg = StrMsg
newtype WrappedErrorT e m a =
WrapErrorT { unwrapErrorT :: ErrorT (WrappedError e) m a
}
runWrappedErrorT :: WrappedErrorT e m a -> m (Either (WrappedError e) a)
runWrappedErrorT = runErrorT . unwrapErrorT
instance Functor m => Functor (WrappedErrorT e m) where
fmap f = WrapErrorT . fmap f . unwrapErrorT
a <$ m = WrapErrorT $ a <$ unwrapErrorT m
instance (Functor m, Monad m) => Applicative (WrappedErrorT e m) where
pure = WrapErrorT . return
f <*> v = WrapErrorT $ unwrapErrorT f <*> unwrapErrorT v
instance (Functor m, Monad m) => Alternative (WrappedErrorT e m) where
empty = mzero
(<|>) = mplus
instance Monad m => Monad (WrappedErrorT e m) where
return = WrapErrorT . return
m >>= k = WrapErrorT $ unwrapErrorT m >>= unwrapErrorT . k
m >> n = WrapErrorT $ unwrapErrorT m >> unwrapErrorT n
fail = WrapErrorT . fail
instance Monad m => MonadPlus (WrappedErrorT e m) where
mzero = WrapErrorT mzero
m `mplus` n = WrapErrorT $ unwrapErrorT m `mplus` unwrapErrorT n
instance MonadFix m => MonadFix (WrappedErrorT e m) where
mfix = WrapErrorT . mfix . (unwrapErrorT .)
instance MonadTrans (WrappedErrorT e) where
lift = WrapErrorT . lift
instance MonadIO m => MonadIO (WrappedErrorT e m) where
liftIO = WrapErrorT . liftIO
throwError :: Monad m => e -> WrappedErrorT e m a
throwError = WrapErrorT . Error.throwError . Error
catchError :: Monad m =>
WrappedErrorT e m a ->
(e -> WrappedErrorT e m a) ->
WrappedErrorT e m a
m `catchError` h =
WrapErrorT $ unwrapErrorT m `Error.catchError` \ case
e@NoMsg -> Error.throwError e
e@(StrMsg _) -> Error.throwError e
Error e -> unwrapErrorT $ h e
mapWrappedErrorT :: (m (Either (WrappedError e) a) -> n (Either (WrappedError e') b)) ->
WrappedErrorT e m a -> WrappedErrorT e' n b
mapWrappedErrorT f = WrapErrorT . mapErrorT f . unwrapErrorT
|
sonyandy/unify
|
src/Control/Monad/Trans/Error/Wrap.hs
|
bsd-3-clause
| 2,638
| 0
| 12
| 601
| 937
| 488
| 449
| 68
| 3
|
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE GADTs #-}
-- | Make changes to project or global configuration.
module Stack.ConfigCmd
(ConfigCmdSet(..)
,configCmdSetParser
,cfgCmdSet
,cfgCmdSetName
,cfgCmdName) where
import Control.Applicative
import Control.Monad
import Control.Monad.IO.Unlift
import Control.Monad.Logger
import qualified Data.ByteString as S
import qualified Data.HashMap.Strict as HMap
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Yaml as Yaml
import qualified Options.Applicative as OA
import qualified Options.Applicative.Types as OA
import Path
import Path.IO
import Prelude -- Silence redundant import warnings
import Stack.Config (makeConcreteResolver, getProjectConfig, getImplicitGlobalProjectDir, LocalConfigStatus(..))
import Stack.Constants
import Stack.Snapshot (loadResolver)
import Stack.Types.Config
import Stack.Types.Resolver
import Stack.Types.StringError
data ConfigCmdSet
= ConfigCmdSetResolver AbstractResolver
| ConfigCmdSetSystemGhc CommandScope
Bool
| ConfigCmdSetInstallGhc CommandScope
Bool
data CommandScope
= CommandScopeGlobal
-- ^ Apply changes to the global configuration,
-- typically at @~/.stack/config.yaml@.
| CommandScopeProject
-- ^ Apply changes to the project @stack.yaml@.
configCmdSetScope :: ConfigCmdSet -> CommandScope
configCmdSetScope (ConfigCmdSetResolver _) = CommandScopeProject
configCmdSetScope (ConfigCmdSetSystemGhc scope _) = scope
configCmdSetScope (ConfigCmdSetInstallGhc scope _) = scope
cfgCmdSet
:: (StackMiniM env m, HasConfig env, HasGHCVariant env)
=> GlobalOpts -> ConfigCmdSet -> m ()
cfgCmdSet go cmd = do
conf <- view configL
configFilePath <-
case configCmdSetScope cmd of
CommandScopeProject -> do
mstackYamlOption <- forM (globalStackYaml go) resolveFile'
mstackYaml <- getProjectConfig mstackYamlOption
case mstackYaml of
LCSProject stackYaml -> return stackYaml
LCSNoProject -> liftM (</> stackDotYaml) (getImplicitGlobalProjectDir conf)
LCSNoConfig -> errorString "config command used when no local configuration available"
CommandScopeGlobal -> return (configUserConfigPath conf)
-- We don't need to worry about checking for a valid yaml here
(config :: Yaml.Object) <-
liftIO (Yaml.decodeFileEither (toFilePath configFilePath)) >>= either throwM return
newValue <- cfgCmdSetValue (parent configFilePath) cmd
let cmdKey = cfgCmdSetOptionName cmd
config' = HMap.insert cmdKey newValue config
if config' == config
then $logInfo
(T.pack (toFilePath configFilePath) <>
" already contained the intended configuration and remains unchanged.")
else do
liftIO (S.writeFile (toFilePath configFilePath) (Yaml.encode config'))
$logInfo (T.pack (toFilePath configFilePath) <> " has been updated.")
cfgCmdSetValue
:: (StackMiniM env m, HasConfig env, HasGHCVariant env)
=> Path Abs Dir -- ^ root directory of project
-> ConfigCmdSet -> m Yaml.Value
cfgCmdSetValue root (ConfigCmdSetResolver newResolver) = do
concreteResolver <- makeConcreteResolver (Just root) newResolver
-- Check that the snapshot actually exists
void $ loadResolver concreteResolver
return (Yaml.toJSON concreteResolver)
cfgCmdSetValue _ (ConfigCmdSetSystemGhc _ bool) =
return (Yaml.Bool bool)
cfgCmdSetValue _ (ConfigCmdSetInstallGhc _ bool) =
return (Yaml.Bool bool)
cfgCmdSetOptionName :: ConfigCmdSet -> Text
cfgCmdSetOptionName (ConfigCmdSetResolver _) = "resolver"
cfgCmdSetOptionName (ConfigCmdSetSystemGhc _ _) = configMonoidSystemGHCName
cfgCmdSetOptionName (ConfigCmdSetInstallGhc _ _) = configMonoidInstallGHCName
cfgCmdName :: String
cfgCmdName = "config"
cfgCmdSetName :: String
cfgCmdSetName = "set"
configCmdSetParser :: OA.Parser ConfigCmdSet
configCmdSetParser =
OA.hsubparser $
mconcat
[ OA.command
"resolver"
(OA.info
(ConfigCmdSetResolver <$>
OA.argument
readAbstractResolver
(OA.metavar "RESOLVER" <>
OA.help "E.g. \"nightly\" or \"lts-7.2\""))
(OA.progDesc
"Change the resolver of the current project. See https://docs.haskellstack.org/en/stable/yaml_configuration/#resolver for more info."))
, OA.command
(T.unpack configMonoidSystemGHCName)
(OA.info
(ConfigCmdSetSystemGhc <$> scopeFlag <*> boolArgument)
(OA.progDesc
"Configure whether stack should use a system GHC installation or not."))
, OA.command
(T.unpack configMonoidInstallGHCName)
(OA.info
(ConfigCmdSetInstallGhc <$> scopeFlag <*> boolArgument)
(OA.progDesc
"Configure whether stack should automatically install GHC when necessary."))
]
scopeFlag :: OA.Parser CommandScope
scopeFlag =
OA.flag
CommandScopeProject
CommandScopeGlobal
(OA.long "global" <>
OA.help
"Modify the global configuration (typically at \"~/.stack/config.yaml\") instead of the project stack.yaml.")
readBool :: OA.ReadM Bool
readBool = do
s <- OA.readerAsk
case s of
"true" -> return True
"false" -> return False
_ -> OA.readerError ("Invalid value " ++ show s ++ ": Expected \"true\" or \"false\"")
boolArgument :: OA.Parser Bool
boolArgument = OA.argument readBool (OA.metavar "true|false" <> OA.completeWith ["true", "false"])
|
martin-kolinek/stack
|
src/Stack/ConfigCmd.hs
|
bsd-3-clause
| 6,260
| 0
| 18
| 1,713
| 1,209
| 632
| 577
| 136
| 5
|
{-# LANGUAGE MultiWayIf #-}
module Toy.SM.Interpreter
( execute
) where
import Control.Lens (at, (%=), (+=), (.=), (<<%=), (?=))
import Control.Monad (forever, mzero, replicateM, replicateM_, void,
when)
import Control.Monad.Error.Class (MonadError (..))
import Control.Monad.Morph (hoist)
import Control.Monad.Trans (MonadIO (..))
import Control.Monad.Trans (lift)
import Data.Conduit (await, yield)
import Data.Conduit.Lift (evalStateC, execStateC, runMaybeC)
import Data.Default (def)
import Data.Functor (($>))
import qualified Data.Map as M
import qualified Data.Vector as V
import Formatting (build, int, sformat, shown, string, (%))
import Universum
import Toy.Base (Exec, FunSign (..))
import Toy.Exp (ExpRes (..), arithspoon, arrayAccess,
arrayFree, arrayLength, arrayMake, arrayMakeU,
arraySet, binOp, changeRefCounter,
checkNoExpResRefs, runWholeRefCountingGc,
valueOnly)
import Toy.SM.Data (ExecState (..), IP, Inst (..), Insts,
LabelId (..), esIp, esLocals, esStack,
exitLabel, initFunName)
import Toy.Util.Error (mapError)
execute :: MonadIO m => Insts -> Exec m ()
execute insts =
hoist runWholeRefCountingGc $
evalStateC def{ _esIp = programEntry } $
executeDo
where
executeDo = void . runMaybeC . forever $ do
i <- use esIp
inst <- insts V.!? i `whenNothing` throwError "To infinity and beyond! >>>"
mapError (sformat ("At "%build%" ("%shown%"): "%build) i inst) $
step inst
esIp += 1
step = \case
Push v -> push (ValueR v)
PushNull -> push (ValueR 0)
Drop -> pop >>= changeRefCounter (-)
Dup -> replicateM_ 2 . push =<< pop
Bin op -> do
let lil = "Arithmetic operation on reference"
[b, a] <- replicateM 2 (pop `valueOnly` lil)
push . ValueR =<< arithspoon (binOp op a b)
Load n -> use (esLocals . at n) >>= \case
Nothing -> push NotInitR
Just var -> push var >> changeRefCounter (+) var
LoadNoGc n -> use (esLocals . at n) >>= \case
Nothing -> push NotInitR
Just var -> push var
StoreInit n -> esLocals . at n ?= ValueR 0
Store n -> do
whenJustM (use $ esLocals . at n) $
changeRefCounter (-)
pop >>= (esLocals . at n ?= )
ArrayMake -> do
k <- pop
arrayMakeU k >>= push
ArrayAccess -> do
i <- pop
a <- pop
e <- arrayAccess a i
push e
ArraySet -> do
e <- pop
i <- pop
a <- pop
arraySet a i e
Label{} -> step Nop
Jmp lid -> do
ensureStackSize 0 "jump"
case lid of
l@LLabel{} -> do
ip <- use esIp
-- only forward lookup for local variables
case V.elemIndex (Label l) (V.drop ip insts) of
Nothing -> throwError $ "No label " <> show l <> " ahead"
Just relIp -> esIp .= ip + relIp
other -> (esIp .= ) =<< getLabel other
JmpIf lid -> do
cond <- pop `valueOnly` "If on reference"
when (cond /= 0) $ step (Jmp lid)
Call (FunSign name args) -> do
processCall name args
JumpToFunEnd -> step $ Jmp exitLabel
FunExit -> lift mzero
Enter{} -> throwError "Got into out of nowhere"
Nop -> return ()
push v = esStack %= (v:)
pop = use esStack >>= \case
[] -> throwError "Empty stack"
s:ss -> (esStack .= ss) $> s
ensureStackSize size reason = do
st <- use esStack
when (length st /= size) . throwError $
sformat ("Stack has size "%int%", but expected to have "%
int%" instead, reason: "%string)
(length st) size reason
badStackAtFunEnd = sformat ("Not 1 argument on function end: "%shown)
getLabel :: MonadError Text m => LabelId -> m IP
getLabel = buildLabelsMap insts
programEntry =
either (error "No entry point exists") identity $
getLabel (FLabel initFunName)
processCall name args = case name of
"read" ->
await >>= maybe (throwError "No input") (push . ValueR)
"write" -> do
yield =<< pop `valueOnly` "Can't write reference"
push (ValueR 0)
"allocate" -> do
l <- pop
a <- arrayMake l (ValueR 0)
push a
"array_free" -> do
a <- pop
arrayFree a
push (ValueR 0)
"arrlen" -> do
a <- pop
l <- arrayLength a
push l
"arrmake" -> do
l <- pop
e <- pop
a <- arrayMake l e
push a
"Arrmake" ->
step (Call $ FunSign "arrmake" args)
"ensure_no_allocations" -> do
checkNoExpResRefs
push (ValueR 0)
_ -> do
stack <- esStack <<%= drop (length args)
entry <- getLabel (FLabel name)
let funExecState = ExecState
{ _esLocals = M.fromList (zip args stack)
, _esStack = []
, _esIp = entry
}
-- run execution with its own `ExecState`, not allowing it to
-- infulence on our current state
funEndExecState <-
hoist (lift . lift) $ execStateC funExecState executeDo
forM_ (take (length args) stack) $
changeRefCounter (-)
case _esStack funEndExecState of
[x] -> esStack %= (x:)
other -> throwError $ badStackAtFunEnd other
buildLabelsMap :: MonadError Text m => Insts -> LabelId -> m IP
buildLabelsMap (V.toList -> insts) =
let addLabel (idx, Label li) = M.insert li idx
addLabel _ = identity
labelsMap = foldr addLabel M.empty $ zip [0..] insts
in \labelId -> (labelsMap ^. at labelId)
`whenNothing` throwError (sformat ("No label "%build) labelId)
|
Martoon-00/toy-compiler
|
src/Toy/SM/Interpreter.hs
|
bsd-3-clause
| 7,076
| 0
| 22
| 3,141
| 1,992
| 1,015
| 977
| -1
| -1
|
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
module Util.AccessFrequency
( getAverageReadsPerFile
, calculateAverageReadsPerFile
) where
import Data.List (foldl', sort)
import qualified Data.Map.Strict as M
import Request (FileID, FileRequest, RequestType (..),
forEachFileRequestIn)
type FileStatistic = M.Map FileID Int
getAverageReadsPerFile :: String -> IO (Double, Int, Int, Int)
getAverageReadsPerFile logPath =
calculateAverageReadsPerFile `forEachFileRequestIn` logPath
calculateAverageReadsPerFile :: [FileRequest] -> (Double, Int, Int, Int)
calculateAverageReadsPerFile requests =
let statistic = foldl' fileToStatistic M.empty requests
sumOfAllReads = fromIntegral $ foldl' (+) 0 statistic
sumOfDifferentFiles = fromIntegral $ M.size statistic
maxReads = foldl' max 0 statistic
minReads = foldl' min maxReads statistic
median = medianAccessRate statistic
in (sumOfAllReads / sumOfDifferentFiles, minReads, maxReads, median)
fileToStatistic :: FileStatistic -> FileRequest -> FileStatistic
fileToStatistic statistic (Read, fileId, _) =
let combine oldValue newValue = oldValue + newValue
in M.insertWith combine fileId 1 statistic
fileToStatistic statistic _ = statistic
medianAccessRate :: FileStatistic -> Int
medianAccessRate statistic =
let ordered = sort . M.elems $ statistic
indexOfMedian = length ordered `div` 2
in ordered !! indexOfMedian
|
wochinge/CacheSimulator
|
src/Util/AccessFrequency.hs
|
bsd-3-clause
| 1,510
| 0
| 11
| 314
| 370
| 205
| 165
| 31
| 1
|
{-# LANGUAGE JavaScriptFFI #-}
module GHCJS.Three.HasGeoMat
( HasGeoMat(..)
) where
import GHCJS.Types
import GHCJS.Three.Monad
import GHCJS.Three.Geometry
import GHCJS.Three.Material
foreign import javascript unsafe "($1)['geometry']"
thr_geometry :: JSVal -> Three JSVal
foreign import javascript unsafe "($2)['geometry'] = $1"
thr_setGeometry :: JSVal -> JSVal -> Three ()
foreign import javascript unsafe "($1)['material']"
thr_material :: JSVal -> Three JSVal
foreign import javascript unsafe "($2)['material'] = $1"
thr_setMaterial :: JSVal -> JSVal -> Three ()
class ThreeJSVal m => HasGeoMat m where
-- | get geometry
geometry :: m -> Three Geometry
geometry = fmap fromJSVal . thr_geometry . toJSVal
-- | set geometry
setGeometry :: IsGeometry g => g -> m -> Three ()
setGeometry g m = thr_setGeometry (toJSVal g) (toJSVal m)
-- | get material
material :: m -> Three Material
material = fmap fromJSVal . thr_material . toJSVal
-- | set material
setMaterial :: IsMaterial mat => mat -> m -> Three ()
setMaterial mat m = thr_setMaterial (toJSVal mat) (toJSVal m)
|
manyoo/ghcjs-three
|
src/GHCJS/Three/HasGeoMat.hs
|
bsd-3-clause
| 1,151
| 23
| 6
| 242
| 310
| 166
| 144
| 24
| 0
|
module Lib
( someFunc
) where
someFunc :: IO ()
someFunc = putStrLn "someFunc"
data Orientation = Up | Down | Left | Right
-- type Graphite a = Orientation -> Double -> Double -> a -> Html
-- data Graphite a = Graphite
-- { grOrientation :: Orientation
-- , grX :: Double
-- , grY :: Double
-- , grFunc :: [a] -> Html
-- , gr :: Int
-- }
-- Let's say we have a double bar graph:
--
-- |
-- |
-- Ht | | Bars by gender
-- | || ||
-- | || || ||
-- -----------------
-- Age
-- ex :: Quantity
-- ex2 :: Gender -> Quantity
-- ex3 :: (Age,Gender) -> Quantity OR Age -> Gender -> Quantity
-- In a sense, singleBar consumes an `a`.
-- singleBar :: (a -> Double) -> Thickness -> Orientation -> (a -> Diagram)
-- multi :: Orientation -> (a -> Diagram) -> [x] -> (x -> a) -> Diagram
-- multi :: (y -> x -> a) -> Diagram
-- multi' :: Orientation -> ((Rec Identity rs -> a) -> Diagram) -> [x]
-- -> (Rec Identity (x ': rs) -> a) -> Diagram
-- singleLine :: (a -> Double) -> Spacing -> Orientation -> ([a] -> Diagram)
-- many :: (a -> Diagram) -> Spacing -> ([a] -> Diagram)
-- many :: (a -> Diagram) -> Spacing -> [x] -> (x -> a) -> Diagram
|
andrewthad/graphite
|
src/Lib.hs
|
bsd-3-clause
| 1,374
| 0
| 6
| 491
| 72
| 54
| 18
| 5
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Lang.PrettyPrint where
import System.Console.ANSI
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as T
data ColorScheme = ColorScheme
{ keywordColor :: Text
, sortColor :: Text
, namedColor :: Text
, unnamedColor :: Text
, indexColor :: Text
, promptColor :: Text
, errorColor :: Text
, okColor :: Text
, showIndices :: Bool
, reset :: Text
}
banner :: Text -> Text
banner t = T.center 30 '=' $ T.concat [" ", t, " "]
colorBanner :: ColorScheme -> (ColorScheme -> Text) -> Text -> Text
colorBanner sch color t =
let len = T.length t
d = (30 - len - 2) `div` 2
m = (30 - len - 2) `mod` 2
in T.concat [ T.replicate (d + m) "="
, color sch, " ", t, " ", reset sch
, T.replicate d "="
]
defaultScheme = ColorScheme
{ keywordColor = T.pack $ setSGRCode [SetColor Foreground Vivid Magenta]
, sortColor = T.pack $ setSGRCode [SetColor Foreground Dull Magenta]
, unnamedColor = T.pack $ setSGRCode [SetColor Foreground Dull Blue]
, namedColor = T.pack $ setSGRCode [SetColor Foreground Vivid Yellow]
, indexColor = T.pack $ setSGRCode [SetColor Foreground Vivid Red]
, promptColor = T.pack $ setSGRCode [SetConsoleIntensity BoldIntensity]
, errorColor = T.pack $ setSGRCode [SetColor Foreground Vivid Red]
, okColor = T.pack $ setSGRCode [SetColor Foreground Vivid Green]
, showIndices = False
, reset = sgrReset
}
debugScheme = defaultScheme { showIndices = True }
noScheme = ColorScheme
{ keywordColor = ""
, sortColor = ""
, namedColor = ""
, unnamedColor = ""
, indexColor = ""
, promptColor = ""
, errorColor = ""
, okColor = ""
, showIndices = False
, reset = ""
}
sgrReset :: Text
sgrReset = T.pack $ setSGRCode [Reset]
class PrettyPrint a where
pretty :: ColorScheme -> a -> Text
prettyBracket :: ColorScheme -> a -> Text
prettyBracket sch x = T.concat ["(", pretty sch x, ")"]
prettyPrint :: ColorScheme -> a -> IO ()
prettyPrint cs = T.putStrLn . pretty cs
withColor :: ColorScheme -> (ColorScheme -> Text) -> Text -> Text
withColor sch color t = T.concat [color sch, t, reset sch]
instance PrettyPrint Text where
pretty sch t = t
|
Alasdair/Mella
|
Lang/PrettyPrint.hs
|
bsd-3-clause
| 2,412
| 0
| 12
| 671
| 790
| 441
| 349
| 62
| 1
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Ivory.Tower.AST.Signal where
#if MIN_VERSION_mainland_pretty(0,6,0)
import Text.PrettyPrint.Mainland.Class
#endif
import Text.PrettyPrint.Mainland
import Ivory.Tower.Types.Time
data Signal = Signal
-- Note: The Ord instance must sort first by deadline,
-- otherwise interrupt handlers will not process
-- interrupts in the correct order.
{ signal_deadline :: Microseconds
, signal_name :: String
, signal_number :: Int
} deriving (Eq, Show, Ord)
instance Pretty Signal where
ppr Signal{..} = text signal_name <> colon
<+/> "deadline=" <> ppr signal_deadline
<+/> "number=" <> ppr signal_number
|
GaloisInc/tower
|
tower/src/Ivory/Tower/AST/Signal.hs
|
bsd-3-clause
| 754
| 0
| 11
| 153
| 129
| 77
| 52
| 15
| 0
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE StandaloneDeriving #-}
-- | The Config type.
module Stack.Types.Config
(
-- * Main configuration types and classes
-- ** HasPlatform & HasStackRoot
HasPlatform(..)
,PlatformVariant(..)
-- ** Config & HasConfig
,Config(..)
,HasConfig(..)
,askLatestSnapshotUrl
,explicitSetupDeps
,getMinimalEnvOverride
-- ** BuildConfig & HasBuildConfig
,BuildConfig(..)
,LocalPackages(..)
,LocalPackageView(..)
,NamedComponent(..)
,stackYamlL
,projectRootL
,HasBuildConfig(..)
-- ** GHCVariant & HasGHCVariant
,GHCVariant(..)
,ghcVariantName
,ghcVariantSuffix
,parseGHCVariant
,HasGHCVariant(..)
,snapshotsDir
-- ** EnvConfig & HasEnvConfig
,EnvConfig(..)
,HasEnvConfig(..)
,getCompilerPath
-- * Details
-- ** ApplyGhcOptions
,ApplyGhcOptions(..)
-- ** ConfigException
,HpackExecutable(..)
,ConfigException(..)
-- ** WhichSolverCmd
,WhichSolverCmd(..)
-- ** ConfigMonoid
,ConfigMonoid(..)
,configMonoidInstallGHCName
,configMonoidSystemGHCName
,parseConfigMonoid
-- ** DumpLogs
,DumpLogs(..)
-- ** EnvSettings
,EnvSettings(..)
,minimalEnvSettings
-- ** GlobalOpts & GlobalOptsMonoid
,GlobalOpts(..)
,GlobalOptsMonoid(..)
,StackYamlLoc(..)
,defaultLogLevel
-- ** LoadConfig
,LoadConfig(..)
-- ** PackageIndex, IndexName & IndexLocation
-- Re-exports
,PackageIndex(..)
,IndexName(..)
,indexNameText
-- Config fields
,configPackageIndex
,configPackageIndexOld
,configPackageIndexCache
,configPackageIndexCacheOld
,configPackageIndexGz
,configPackageIndexRoot
,configPackageTarball
-- ** Project & ProjectAndConfigMonoid
,Project(..)
,ProjectAndConfigMonoid(..)
,parseProjectAndConfigMonoid
-- ** PvpBounds
,PvpBounds(..)
,PvpBoundsType(..)
,parsePvpBounds
-- ** ColorWhen
,readColorWhen
-- ** SCM
,SCM(..)
-- * Paths
,bindirSuffix
,configInstalledCache
,configLoadedSnapshotCache
,GlobalInfoSource(..)
,getProjectWorkDir
,docDirSuffix
,flagCacheLocal
,extraBinDirs
,hpcReportDir
,installationRootDeps
,installationRootLocal
,bindirCompilerTools
,hoogleRoot
,hoogleDatabasePath
,packageDatabaseDeps
,packageDatabaseExtra
,packageDatabaseLocal
,platformOnlyRelDir
,platformGhcRelDir
,platformGhcVerOnlyRelDir
,useShaPathOnWindows
,workDirL
-- * Command-specific types
-- ** Eval
,EvalOpts(..)
-- ** Exec
,ExecOpts(..)
,SpecialExecCmd(..)
,ExecOptsExtra(..)
-- ** Setup
,DownloadInfo(..)
,VersionedDownloadInfo(..)
,GHCDownloadInfo(..)
,SetupInfo(..)
,SetupInfoLocation(..)
-- ** Docker entrypoint
,DockerEntrypoint(..)
,DockerUser(..)
,module X
-- * Lens helpers
,wantedCompilerVersionL
,actualCompilerVersionL
,buildOptsL
,globalOptsL
,buildOptsInstallExesL
,buildOptsMonoidHaddockL
,buildOptsMonoidTestsL
,buildOptsMonoidBenchmarksL
,buildOptsMonoidInstallExesL
,buildOptsHaddockL
,globalOptsBuildOptsMonoidL
,packageIndicesL
,stackRootL
,configUrlsL
,cabalVersionL
,whichCompilerL
,envOverrideL
,loadedSnapshotL
,shouldForceGhcColorFlag
,appropriateGhcColorFlag
-- * Lens reexport
,view
,to
) where
import Control.Monad.Writer (tell)
import Stack.Prelude
import Data.Aeson.Extended
(ToJSON, toJSON, FromJSON, FromJSONKey (..), parseJSON, withText, object,
(.=), (..:), (..:?), (..!=), Value(Bool, String),
withObjectWarnings, WarningParser, Object, jsonSubWarnings,
jsonSubWarningsT, jsonSubWarningsTT, WithJSONWarnings(..), noJSONWarnings,
FromJSONKeyFunction (FromJSONKeyTextParser))
import Data.Attoparsec.Args (parseArgs, EscapingMode (Escaping))
import qualified Data.ByteString.Char8 as S8
import Data.List (stripPrefix)
import Data.List.NonEmpty (NonEmpty)
import qualified Data.List.NonEmpty as NonEmpty
import qualified Data.Map as Map
import qualified Data.Map.Strict as M
import qualified Data.Set as Set
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8)
import Data.Yaml (ParseException)
import qualified Data.Yaml as Yaml
import Distribution.PackageDescription (GenericPackageDescription)
import Distribution.ParseUtils (PError)
import Distribution.System (Platform)
import qualified Distribution.Text
import Distribution.Version (anyVersion, mkVersion')
import Generics.Deriving.Monoid (memptydefault, mappenddefault)
import Lens.Micro (Lens', lens, _1, _2, to)
import Options.Applicative (ReadM)
import qualified Options.Applicative as OA
import qualified Options.Applicative.Types as OA
import Path
import qualified Paths_stack as Meta
import Stack.Constants
import Stack.Types.BuildPlan
import Stack.Types.Compiler
import Stack.Types.CompilerBuild
import Stack.Types.Docker
import Stack.Types.FlagName
import Stack.Types.Image
import Stack.Types.Nix
import Stack.Types.PackageIdentifier
import Stack.Types.PackageIndex
import Stack.Types.PackageName
import Stack.Types.Resolver
import Stack.Types.Runner
import Stack.Types.TemplateName
import Stack.Types.Urls
import Stack.Types.Version
import qualified System.FilePath as FilePath
import System.PosixCompat.Types (UserID, GroupID, FileMode)
import System.Process.Read (EnvOverride, findExecutable)
-- Re-exports
import Stack.Types.Config.Build as X
#ifdef mingw32_HOST_OS
import Crypto.Hash (hashWith, SHA1(..))
import qualified Data.ByteArray.Encoding as Mem (convertToBase, Base(Base16))
#endif
-- | The top-level Stackage configuration.
data Config =
Config {configStackRoot :: !(Path Abs Dir)
-- ^ ~/.stack more often than not
,configWorkDir :: !(Path Rel Dir)
-- ^ this allows to override .stack-work directory
,configUserConfigPath :: !(Path Abs File)
-- ^ Path to user configuration file (usually ~/.stack/config.yaml)
,configBuild :: !BuildOpts
-- ^ Build configuration
,configDocker :: !DockerOpts
-- ^ Docker configuration
,configNix :: !NixOpts
-- ^ Execution environment (e.g nix-shell) configuration
,configEnvOverride :: !(EnvSettings -> IO EnvOverride)
-- ^ Environment variables to be passed to external tools
,configLocalProgramsBase :: !(Path Abs Dir)
-- ^ Non-platform-specific path containing local installations
,configLocalPrograms :: !(Path Abs Dir)
-- ^ Path containing local installations (mainly GHC)
,configConnectionCount :: !Int
-- ^ How many concurrent connections are allowed when downloading
,configHideTHLoading :: !Bool
-- ^ Hide the Template Haskell "Loading package ..." messages from the
-- console
,configPlatform :: !Platform
-- ^ The platform we're building for, used in many directory names
,configPlatformVariant :: !PlatformVariant
-- ^ Variant of the platform, also used in directory names
,configGHCVariant0 :: !(Maybe GHCVariant)
-- ^ The variant of GHC requested by the user.
-- In most cases, use 'BuildConfig' or 'MiniConfig's version instead,
-- which will have an auto-detected default.
,configGHCBuild :: !(Maybe CompilerBuild)
-- ^ Override build of the compiler distribution (e.g. standard, gmp4, tinfo6)
,configUrls :: !Urls
-- ^ URLs for other files used by stack.
-- TODO: Better document
-- e.g. The latest snapshot file.
-- A build plan name (e.g. lts5.9.yaml) is appended when downloading
-- the build plan actually.
,configPackageIndices :: ![PackageIndex]
-- ^ Information on package indices. This is left biased, meaning that
-- packages in an earlier index will shadow those in a later index.
--
-- Warning: if you override packages in an index vs what's available
-- upstream, you may correct your compiled snapshots, as different
-- projects may have different definitions of what pkg-ver means! This
-- feature is primarily intended for adding local packages, not
-- overriding. Overriding is better accomplished by adding to your
-- list of packages.
--
-- Note that indices specified in a later config file will override
-- previous indices, /not/ extend them.
--
-- Using an assoc list instead of a Map to keep track of priority
,configSystemGHC :: !Bool
-- ^ Should we use the system-installed GHC (on the PATH) if
-- available? Can be overridden by command line options.
,configInstallGHC :: !Bool
-- ^ Should we automatically install GHC if missing or the wrong
-- version is available? Can be overridden by command line options.
,configSkipGHCCheck :: !Bool
-- ^ Don't bother checking the GHC version or architecture.
,configSkipMsys :: !Bool
-- ^ On Windows: don't use a sandboxed MSYS
,configCompilerCheck :: !VersionCheck
-- ^ Specifies which versions of the compiler are acceptable.
,configLocalBin :: !(Path Abs Dir)
-- ^ Directory we should install executables into
,configRequireStackVersion :: !VersionRange
-- ^ Require a version of stack within this range.
,configJobs :: !Int
-- ^ How many concurrent jobs to run, defaults to number of capabilities
,configOverrideGccPath :: !(Maybe (Path Abs File))
-- ^ Optional gcc override path
,configOverrideHpack :: !HpackExecutable
-- ^ Use Hpack executable (overrides bundled Hpack)
,configExtraIncludeDirs :: !(Set FilePath)
-- ^ --extra-include-dirs arguments
,configExtraLibDirs :: !(Set FilePath)
-- ^ --extra-lib-dirs arguments
,configConcurrentTests :: !Bool
-- ^ Run test suites concurrently
,configImage :: !ImageOpts
,configTemplateParams :: !(Map Text Text)
-- ^ Parameters for templates.
,configScmInit :: !(Maybe SCM)
-- ^ Initialize SCM (e.g. git) when creating new projects.
,configGhcOptionsByName :: !(Map PackageName [Text])
-- ^ Additional GHC options to apply to specific packages.
,configGhcOptionsByCat :: !(Map ApplyGhcOptions [Text])
-- ^ Additional GHC options to apply to categories of packages
,configSetupInfoLocations :: ![SetupInfoLocation]
-- ^ Additional SetupInfo (inline or remote) to use to find tools.
,configPvpBounds :: !PvpBounds
-- ^ How PVP upper bounds should be added to packages
,configModifyCodePage :: !Bool
-- ^ Force the code page to UTF-8 on Windows
,configExplicitSetupDeps :: !(Map (Maybe PackageName) Bool)
-- ^ See 'explicitSetupDeps'. 'Nothing' provides the default value.
,configRebuildGhcOptions :: !Bool
-- ^ Rebuild on GHC options changes
,configApplyGhcOptions :: !ApplyGhcOptions
-- ^ Which packages to ghc-options on the command line apply to?
,configAllowNewer :: !Bool
-- ^ Ignore version ranges in .cabal files. Funny naming chosen to
-- match cabal.
,configDefaultTemplate :: !(Maybe TemplateName)
-- ^ The default template to use when none is specified.
-- (If Nothing, the default default is used.)
,configAllowDifferentUser :: !Bool
-- ^ Allow users other than the stack root owner to use the stack
-- installation.
,configPackageCache :: !(IORef (Maybe (PackageCache PackageIndex)))
-- ^ In memory cache of hackage index.
,configDumpLogs :: !DumpLogs
-- ^ Dump logs of local non-dependencies when doing a build.
,configMaybeProject :: !(Maybe (Project, Path Abs File))
-- ^ 'Just' when a local project can be found, 'Nothing' when stack must
-- fall back on the implicit global project.
,configAllowLocals :: !Bool
-- ^ Are we allowed to build local packages? The script
-- command disallows this.
,configSaveHackageCreds :: !Bool
-- ^ Should we save Hackage credentials to a file?
,configRunner :: !Runner
}
data HpackExecutable
= HpackBundled
| HpackCommand String
deriving (Show, Read, Eq, Ord)
-- | Which packages do ghc-options on the command line apply to?
data ApplyGhcOptions = AGOTargets -- ^ all local targets
| AGOLocals -- ^ all local packages, even non-targets
| AGOEverything -- ^ every package
deriving (Show, Read, Eq, Ord, Enum, Bounded)
instance FromJSON ApplyGhcOptions where
parseJSON = withText "ApplyGhcOptions" $ \t ->
case t of
"targets" -> return AGOTargets
"locals" -> return AGOLocals
"everything" -> return AGOEverything
_ -> fail $ "Invalid ApplyGhcOptions: " ++ show t
-- | Which build log files to dump
data DumpLogs
= DumpNoLogs -- ^ don't dump any logfiles
| DumpWarningLogs -- ^ dump logfiles containing warnings
| DumpAllLogs -- ^ dump all logfiles
deriving (Show, Read, Eq, Ord, Enum, Bounded)
instance FromJSON DumpLogs where
parseJSON (Bool True) = return DumpAllLogs
parseJSON (Bool False) = return DumpNoLogs
parseJSON v =
withText
"DumpLogs"
(\t ->
if | t == "none" -> return DumpNoLogs
| t == "warning" -> return DumpWarningLogs
| t == "all" -> return DumpAllLogs
| otherwise -> fail ("Invalid DumpLogs: " ++ show t))
v
-- | Controls which version of the environment is used
data EnvSettings = EnvSettings
{ esIncludeLocals :: !Bool
-- ^ include local project bin directory, GHC_PACKAGE_PATH, etc
, esIncludeGhcPackagePath :: !Bool
-- ^ include the GHC_PACKAGE_PATH variable
, esStackExe :: !Bool
-- ^ set the STACK_EXE variable to the current executable name
, esLocaleUtf8 :: !Bool
-- ^ set the locale to C.UTF-8
}
deriving (Show, Eq, Ord)
data ExecOpts = ExecOpts
{ eoCmd :: !SpecialExecCmd
, eoArgs :: ![String]
, eoExtra :: !ExecOptsExtra
} deriving (Show)
data SpecialExecCmd
= ExecCmd String
| ExecGhc
| ExecRunGhc
deriving (Show, Eq)
data ExecOptsExtra
= ExecOptsPlain
| ExecOptsEmbellished
{ eoEnvSettings :: !EnvSettings
, eoPackages :: ![String]
, eoRtsOptions :: ![String]
}
deriving (Show)
data EvalOpts = EvalOpts
{ evalArg :: !String
, evalExtra :: !ExecOptsExtra
} deriving (Show)
-- | Parsed global command-line options.
data GlobalOpts = GlobalOpts
{ globalReExecVersion :: !(Maybe String) -- ^ Expected re-exec in container version
, globalDockerEntrypoint :: !(Maybe DockerEntrypoint)
-- ^ Data used when stack is acting as a Docker entrypoint (internal use only)
, globalLogLevel :: !LogLevel -- ^ Log level
, globalTimeInLog :: !Bool -- ^ Whether to include timings in logs.
, globalConfigMonoid :: !ConfigMonoid -- ^ Config monoid, for passing into 'loadConfig'
, globalResolver :: !(Maybe AbstractResolver) -- ^ Resolver override
, globalCompiler :: !(Maybe (CompilerVersion 'CVWanted)) -- ^ Compiler override
, globalTerminal :: !Bool -- ^ We're in a terminal?
, globalColorWhen :: !ColorWhen -- ^ When to use ansi terminal colors
, globalTermWidth :: !(Maybe Int) -- ^ Terminal width override
, globalStackYaml :: !(StackYamlLoc FilePath) -- ^ Override project stack.yaml
} deriving (Show)
data StackYamlLoc filepath
= SYLDefault
| SYLOverride !filepath
| SYLNoConfig !(Path Abs Dir)
-- ^ FilePath is the directory containing the script file, used
-- for resolving custom snapshot files.
deriving (Show,Functor,Foldable,Traversable)
-- | Parsed global command-line options monoid.
data GlobalOptsMonoid = GlobalOptsMonoid
{ globalMonoidReExecVersion :: !(First String) -- ^ Expected re-exec in container version
, globalMonoidDockerEntrypoint :: !(First DockerEntrypoint)
-- ^ Data used when stack is acting as a Docker entrypoint (internal use only)
, globalMonoidLogLevel :: !(First LogLevel) -- ^ Log level
, globalMonoidTimeInLog :: !(First Bool) -- ^ Whether to include timings in logs.
, globalMonoidConfigMonoid :: !ConfigMonoid -- ^ Config monoid, for passing into 'loadConfig'
, globalMonoidResolver :: !(First AbstractResolver) -- ^ Resolver override
, globalMonoidCompiler :: !(First (CompilerVersion 'CVWanted)) -- ^ Compiler override
, globalMonoidTerminal :: !(First Bool) -- ^ We're in a terminal?
, globalMonoidColorWhen :: !(First ColorWhen) -- ^ When to use ansi colors
, globalMonoidTermWidth :: !(First Int) -- ^ Terminal width override
, globalMonoidStackYaml :: !(First FilePath) -- ^ Override project stack.yaml
} deriving (Show, Generic)
instance Monoid GlobalOptsMonoid where
mempty = memptydefault
mappend = mappenddefault
-- | Default logging level should be something useful but not crazy.
defaultLogLevel :: LogLevel
defaultLogLevel = LevelInfo
readColorWhen :: ReadM ColorWhen
readColorWhen = do
s <- OA.readerAsk
case s of
"never" -> return ColorNever
"always" -> return ColorAlways
"auto" -> return ColorAuto
_ -> OA.readerError "Expected values of color option are 'never', 'always', or 'auto'."
-- | A superset of 'Config' adding information on how to build code. The reason
-- for this breakdown is because we will need some of the information from
-- 'Config' in order to determine the values here.
--
-- These are the components which know nothing about local configuration.
data BuildConfig = BuildConfig
{ bcConfig :: !Config
, bcSnapshotDef :: !SnapshotDef
-- ^ Build plan wanted for this build
, bcGHCVariant :: !GHCVariant
-- ^ The variant of GHC used to select a GHC bindist.
, bcPackages :: ![PackageLocation Subdirs]
-- ^ Local packages
, bcDependencies :: ![PackageLocationIndex Subdirs]
-- ^ Extra dependencies specified in configuration.
--
-- These dependencies will not be installed to a shared location, and
-- will override packages provided by the resolver.
, bcExtraPackageDBs :: ![Path Abs Dir]
-- ^ Extra package databases
, bcStackYaml :: !(Path Abs File)
-- ^ Location of the stack.yaml file.
--
-- Note: if the STACK_YAML environment variable is used, this may be
-- different from projectRootL </> "stack.yaml"
--
-- FIXME MSS 2016-12-08: is the above comment still true? projectRootL
-- is defined in terms of bcStackYaml
, bcFlags :: !(Map PackageName (Map FlagName Bool))
-- ^ Per-package flag overrides
, bcImplicitGlobal :: !Bool
-- ^ Are we loading from the implicit global stack.yaml? This is useful
-- for providing better error messages.
}
stackYamlL :: HasBuildConfig env => Lens' env (Path Abs File)
stackYamlL = buildConfigL.lens bcStackYaml (\x y -> x { bcStackYaml = y })
-- | Directory containing the project's stack.yaml file
projectRootL :: HasBuildConfig env => Getting r env (Path Abs Dir)
projectRootL = stackYamlL.to parent
-- | Configuration after the environment has been setup.
data EnvConfig = EnvConfig
{envConfigBuildConfig :: !BuildConfig
,envConfigCabalVersion :: !Version
-- ^ This is the version of Cabal that stack will use to compile Setup.hs files
-- in the build process.
--
-- Note that this is not necessarily the same version as the one that stack
-- depends on as a library and which is displayed when running
-- @stack list-dependencies | grep Cabal@ in the stack project.
,envConfigCompilerVersion :: !(CompilerVersion 'CVActual)
-- ^ The actual version of the compiler to be used, as opposed to
-- 'wantedCompilerL', which provides the version specified by the
-- build plan.
,envConfigCompilerBuild :: !CompilerBuild
,envConfigPackagesRef :: !(IORef (Maybe LocalPackages))
-- ^ Cache for 'getLocalPackages'.
,envConfigLoadedSnapshot :: !LoadedSnapshot
-- ^ The fully resolved snapshot information.
}
data LocalPackages = LocalPackages
{ lpProject :: !(Map PackageName LocalPackageView)
, lpDependencies :: !(Map PackageName (GenericPackageDescription, PackageLocationIndex FilePath))
}
-- | A view of a local package needed for resolving components
data LocalPackageView = LocalPackageView
{ lpvVersion :: !Version
, lpvRoot :: !(Path Abs Dir)
, lpvCabalFP :: !(Path Abs File)
, lpvComponents :: !(Set NamedComponent)
, lpvGPD :: !GenericPackageDescription
, lpvLoc :: !(PackageLocation FilePath)
}
-- | A single, fully resolved component of a package
data NamedComponent
= CLib
| CExe !Text
| CTest !Text
| CBench !Text
deriving (Show, Eq, Ord)
-- | Value returned by 'Stack.Config.loadConfig'.
data LoadConfig = LoadConfig
{ lcConfig :: !Config
-- ^ Top-level Stack configuration.
, lcLoadBuildConfig :: !(Maybe (CompilerVersion 'CVWanted) -> IO BuildConfig)
-- ^ Action to load the remaining 'BuildConfig'.
, lcProjectRoot :: !(Maybe (Path Abs Dir))
-- ^ The project root directory, if in a project.
}
data PackageEntry = PackageEntry
{ peExtraDepMaybe :: !(Maybe TreatLikeExtraDep)
, peLocation :: !(PackageLocation Subdirs)
, peSubdirs :: !Subdirs
}
deriving Show
-- | Should a package be treated just like an extra-dep?
--
-- 'True' means, it will only be built as a dependency
-- for others, and its test suite/benchmarks will not be run.
--
-- Useful modifying an upstream package, see:
-- https://github.com/commercialhaskell/stack/issues/219
-- https://github.com/commercialhaskell/stack/issues/386
type TreatLikeExtraDep = Bool
instance FromJSON (WithJSONWarnings PackageEntry) where
parseJSON (String t) = do
WithJSONWarnings loc _ <- parseJSON $ String t
return $ noJSONWarnings
PackageEntry
{ peExtraDepMaybe = Nothing
, peLocation = loc
, peSubdirs = DefaultSubdirs
}
parseJSON v = withObjectWarnings "PackageEntry" (\o -> PackageEntry
<$> o ..:? "extra-dep"
<*> jsonSubWarnings (o ..: "location")
<*> o ..:? "subdirs" ..!= DefaultSubdirs) v
-- | A project is a collection of packages. We can have multiple stack.yaml
-- files, but only one of them may contain project information.
data Project = Project
{ projectUserMsg :: !(Maybe String)
-- ^ A warning message to display to the user when the auto generated
-- config may have issues.
, projectPackages :: ![PackageLocation Subdirs]
-- ^ Packages which are actually part of the project (as opposed
-- to dependencies).
--
-- /NOTE/ Stack has always allowed these packages to be any kind
-- of package location, but in reality only @PLFilePath@ really
-- makes sense. We could consider replacing @[PackageLocation]@
-- with @[FilePath]@ to properly enforce this idea, though it will
-- slightly break backwards compatibility if someone really did
-- want to treat such things as non-deps.
, projectDependencies :: ![PackageLocationIndex Subdirs]
-- ^ Dependencies defined within the stack.yaml file, to be
-- applied on top of the snapshot.
, projectFlags :: !(Map PackageName (Map FlagName Bool))
-- ^ Flags to be applied on top of the snapshot flags.
, projectResolver :: !Resolver
-- ^ How we resolve which @SnapshotDef@ to use
, projectCompiler :: !(Maybe (CompilerVersion 'CVWanted))
-- ^ When specified, overrides which compiler to use
, projectExtraPackageDBs :: ![FilePath]
}
deriving Show
instance ToJSON Project where
-- Expanding the constructor fully to ensure we don't miss any fields.
toJSON (Project userMsg packages extraDeps flags resolver compiler extraPackageDBs) = object $ concat
[ maybe [] (\cv -> ["compiler" .= cv]) compiler
, maybe [] (\msg -> ["user-message" .= msg]) userMsg
, if null extraPackageDBs then [] else ["extra-package-dbs" .= extraPackageDBs]
, if null extraDeps then [] else ["extra-deps" .= extraDeps]
, if Map.null flags then [] else ["flags" .= flags]
, ["packages" .= packages]
, ["resolver" .= resolver]
]
-- An uninterpreted representation of configuration options.
-- Configurations may be "cascaded" using mappend (left-biased).
data ConfigMonoid =
ConfigMonoid
{ configMonoidStackRoot :: !(First (Path Abs Dir))
-- ^ See: 'configStackRoot'
, configMonoidWorkDir :: !(First (Path Rel Dir))
-- ^ See: 'configWorkDir'.
, configMonoidBuildOpts :: !BuildOptsMonoid
-- ^ build options.
, configMonoidDockerOpts :: !DockerOptsMonoid
-- ^ Docker options.
, configMonoidNixOpts :: !NixOptsMonoid
-- ^ Options for the execution environment (nix-shell or container)
, configMonoidConnectionCount :: !(First Int)
-- ^ See: 'configConnectionCount'
, configMonoidHideTHLoading :: !(First Bool)
-- ^ See: 'configHideTHLoading'
, configMonoidLatestSnapshotUrl :: !(First Text)
-- ^ Deprecated in favour of 'urlsMonoidLatestSnapshot'
, configMonoidUrls :: !UrlsMonoid
-- ^ See: 'configUrls
, configMonoidPackageIndices :: !(First [PackageIndex])
-- ^ See: 'configPackageIndices'
, configMonoidSystemGHC :: !(First Bool)
-- ^ See: 'configSystemGHC'
,configMonoidInstallGHC :: !(First Bool)
-- ^ See: 'configInstallGHC'
,configMonoidSkipGHCCheck :: !(First Bool)
-- ^ See: 'configSkipGHCCheck'
,configMonoidSkipMsys :: !(First Bool)
-- ^ See: 'configSkipMsys'
,configMonoidCompilerCheck :: !(First VersionCheck)
-- ^ See: 'configCompilerCheck'
,configMonoidRequireStackVersion :: !IntersectingVersionRange
-- ^ See: 'configRequireStackVersion'
,configMonoidArch :: !(First String)
-- ^ Used for overriding the platform
,configMonoidGHCVariant :: !(First GHCVariant)
-- ^ Used for overriding the platform
,configMonoidGHCBuild :: !(First CompilerBuild)
-- ^ Used for overriding the GHC build
,configMonoidJobs :: !(First Int)
-- ^ See: 'configJobs'
,configMonoidExtraIncludeDirs :: !(Set FilePath)
-- ^ See: 'configExtraIncludeDirs'
,configMonoidExtraLibDirs :: !(Set FilePath)
-- ^ See: 'configExtraLibDirs'
, configMonoidOverrideGccPath :: !(First (Path Abs File))
-- ^ Allow users to override the path to gcc
,configMonoidOverrideHpack :: !(First FilePath)
-- ^ Use Hpack executable (overrides bundled Hpack)
,configMonoidConcurrentTests :: !(First Bool)
-- ^ See: 'configConcurrentTests'
,configMonoidLocalBinPath :: !(First FilePath)
-- ^ Used to override the binary installation dir
,configMonoidImageOpts :: !ImageOptsMonoid
-- ^ Image creation options.
,configMonoidTemplateParameters :: !(Map Text Text)
-- ^ Template parameters.
,configMonoidScmInit :: !(First SCM)
-- ^ Initialize SCM (e.g. git init) when making new projects?
,configMonoidGhcOptionsByName :: !(Map PackageName [Text])
-- ^ See 'configGhcOptionsByName'
,configMonoidGhcOptionsByCat :: !(Map ApplyGhcOptions [Text])
-- ^ See 'configGhcOptionsAll'
,configMonoidExtraPath :: ![Path Abs Dir]
-- ^ Additional paths to search for executables in
,configMonoidSetupInfoLocations :: ![SetupInfoLocation]
-- ^ Additional setup info (inline or remote) to use for installing tools
,configMonoidLocalProgramsBase :: !(First (Path Abs Dir))
-- ^ Override the default local programs dir, where e.g. GHC is installed.
,configMonoidPvpBounds :: !(First PvpBounds)
-- ^ See 'configPvpBounds'
,configMonoidModifyCodePage :: !(First Bool)
-- ^ See 'configModifyCodePage'
,configMonoidExplicitSetupDeps :: !(Map (Maybe PackageName) Bool)
-- ^ See 'configExplicitSetupDeps'
,configMonoidRebuildGhcOptions :: !(First Bool)
-- ^ See 'configMonoidRebuildGhcOptions'
,configMonoidApplyGhcOptions :: !(First ApplyGhcOptions)
-- ^ See 'configApplyGhcOptions'
,configMonoidAllowNewer :: !(First Bool)
-- ^ See 'configMonoidAllowNewer'
,configMonoidDefaultTemplate :: !(First TemplateName)
-- ^ The default template to use when none is specified.
-- (If Nothing, the default default is used.)
, configMonoidAllowDifferentUser :: !(First Bool)
-- ^ Allow users other than the stack root owner to use the stack
-- installation.
, configMonoidDumpLogs :: !(First DumpLogs)
-- ^ See 'configDumpLogs'
, configMonoidSaveHackageCreds :: !(First Bool)
-- ^ See 'configSaveHackageCreds'
}
deriving (Show, Generic)
instance Monoid ConfigMonoid where
mempty = memptydefault
mappend = mappenddefault
parseConfigMonoid :: Path Abs Dir -> Value -> Yaml.Parser (WithJSONWarnings ConfigMonoid)
parseConfigMonoid = withObjectWarnings "ConfigMonoid" . parseConfigMonoidObject
-- | Parse a partial configuration. Used both to parse both a standalone config
-- file and a project file, so that a sub-parser is not required, which would interfere with
-- warnings for missing fields.
parseConfigMonoidObject :: Path Abs Dir -> Object -> WarningParser ConfigMonoid
parseConfigMonoidObject rootDir obj = do
-- Parsing 'stackRoot' from 'stackRoot'/config.yaml would be nonsensical
let configMonoidStackRoot = First Nothing
configMonoidWorkDir <- First <$> obj ..:? configMonoidWorkDirName
configMonoidBuildOpts <- jsonSubWarnings (obj ..:? configMonoidBuildOptsName ..!= mempty)
configMonoidDockerOpts <- jsonSubWarnings (obj ..:? configMonoidDockerOptsName ..!= mempty)
configMonoidNixOpts <- jsonSubWarnings (obj ..:? configMonoidNixOptsName ..!= mempty)
configMonoidConnectionCount <- First <$> obj ..:? configMonoidConnectionCountName
configMonoidHideTHLoading <- First <$> obj ..:? configMonoidHideTHLoadingName
configMonoidLatestSnapshotUrl <- First <$> obj ..:? configMonoidLatestSnapshotUrlName
configMonoidUrls <- jsonSubWarnings (obj ..:? configMonoidUrlsName ..!= mempty)
configMonoidPackageIndices <- First <$> jsonSubWarningsTT (obj ..:? configMonoidPackageIndicesName)
configMonoidSystemGHC <- First <$> obj ..:? configMonoidSystemGHCName
configMonoidInstallGHC <- First <$> obj ..:? configMonoidInstallGHCName
configMonoidSkipGHCCheck <- First <$> obj ..:? configMonoidSkipGHCCheckName
configMonoidSkipMsys <- First <$> obj ..:? configMonoidSkipMsysName
configMonoidRequireStackVersion <- IntersectingVersionRange . unVersionRangeJSON <$> (
obj ..:? configMonoidRequireStackVersionName
..!= VersionRangeJSON anyVersion)
configMonoidArch <- First <$> obj ..:? configMonoidArchName
configMonoidGHCVariant <- First <$> obj ..:? configMonoidGHCVariantName
configMonoidGHCBuild <- First <$> obj ..:? configMonoidGHCBuildName
configMonoidJobs <- First <$> obj ..:? configMonoidJobsName
configMonoidExtraIncludeDirs <- fmap (Set.map (toFilePath rootDir FilePath.</>)) $
obj ..:? configMonoidExtraIncludeDirsName ..!= Set.empty
configMonoidExtraLibDirs <- fmap (Set.map (toFilePath rootDir FilePath.</>)) $
obj ..:? configMonoidExtraLibDirsName ..!= Set.empty
configMonoidOverrideGccPath <- First <$> obj ..:? configMonoidOverrideGccPathName
configMonoidOverrideHpack <- First <$> obj ..:? configMonoidOverrideHpackName
configMonoidConcurrentTests <- First <$> obj ..:? configMonoidConcurrentTestsName
configMonoidLocalBinPath <- First <$> obj ..:? configMonoidLocalBinPathName
configMonoidImageOpts <- jsonSubWarnings (obj ..:? configMonoidImageOptsName ..!= mempty)
templates <- obj ..:? "templates"
(configMonoidScmInit,configMonoidTemplateParameters) <-
case templates of
Nothing -> return (First Nothing,M.empty)
Just tobj -> do
scmInit <- tobj ..:? configMonoidScmInitName
params <- tobj ..:? configMonoidTemplateParametersName
return (First scmInit,fromMaybe M.empty params)
configMonoidCompilerCheck <- First <$> obj ..:? configMonoidCompilerCheckName
options <- Map.map unGhcOptions <$> obj ..:? configMonoidGhcOptionsName ..!= mempty
optionsEverything <-
case (Map.lookup GOKOldEverything options, Map.lookup GOKEverything options) of
(Just _, Just _) -> fail "Cannot specify both `*` and `$everything` GHC options"
(Nothing, Just x) -> return x
(Just x, Nothing) -> do
tell "The `*` ghc-options key is not recommended. Consider using $locals, or if really needed, $everything"
return x
(Nothing, Nothing) -> return []
let configMonoidGhcOptionsByCat = Map.fromList
[ (AGOEverything, optionsEverything)
, (AGOLocals, Map.findWithDefault [] GOKLocals options)
, (AGOTargets, Map.findWithDefault [] GOKTargets options)
]
configMonoidGhcOptionsByName = Map.fromList
[(name, opts) | (GOKPackage name, opts) <- Map.toList options]
configMonoidExtraPath <- obj ..:? configMonoidExtraPathName ..!= []
configMonoidSetupInfoLocations <-
maybeToList <$> jsonSubWarningsT (obj ..:? configMonoidSetupInfoLocationsName)
configMonoidLocalProgramsBase <- First <$> obj ..:? configMonoidLocalProgramsBaseName
configMonoidPvpBounds <- First <$> obj ..:? configMonoidPvpBoundsName
configMonoidModifyCodePage <- First <$> obj ..:? configMonoidModifyCodePageName
configMonoidExplicitSetupDeps <-
(obj ..:? configMonoidExplicitSetupDepsName ..!= mempty)
>>= fmap Map.fromList . mapM handleExplicitSetupDep . Map.toList
configMonoidRebuildGhcOptions <- First <$> obj ..:? configMonoidRebuildGhcOptionsName
configMonoidApplyGhcOptions <- First <$> obj ..:? configMonoidApplyGhcOptionsName
configMonoidAllowNewer <- First <$> obj ..:? configMonoidAllowNewerName
configMonoidDefaultTemplate <- First <$> obj ..:? configMonoidDefaultTemplateName
configMonoidAllowDifferentUser <- First <$> obj ..:? configMonoidAllowDifferentUserName
configMonoidDumpLogs <- First <$> obj ..:? configMonoidDumpLogsName
configMonoidSaveHackageCreds <- First <$> obj ..:? configMonoidSaveHackageCredsName
return ConfigMonoid {..}
where
handleExplicitSetupDep :: Monad m => (Text, Bool) -> m (Maybe PackageName, Bool)
handleExplicitSetupDep (name', b) = do
name <-
if name' == "*"
then return Nothing
else case parsePackageNameFromString $ T.unpack name' of
Left e -> fail $ show e
Right x -> return $ Just x
return (name, b)
configMonoidWorkDirName :: Text
configMonoidWorkDirName = "work-dir"
configMonoidBuildOptsName :: Text
configMonoidBuildOptsName = "build"
configMonoidDockerOptsName :: Text
configMonoidDockerOptsName = "docker"
configMonoidNixOptsName :: Text
configMonoidNixOptsName = "nix"
configMonoidConnectionCountName :: Text
configMonoidConnectionCountName = "connection-count"
configMonoidHideTHLoadingName :: Text
configMonoidHideTHLoadingName = "hide-th-loading"
configMonoidLatestSnapshotUrlName :: Text
configMonoidLatestSnapshotUrlName = "latest-snapshot-url"
configMonoidUrlsName :: Text
configMonoidUrlsName = "urls"
configMonoidPackageIndicesName :: Text
configMonoidPackageIndicesName = "package-indices"
configMonoidSystemGHCName :: Text
configMonoidSystemGHCName = "system-ghc"
configMonoidInstallGHCName :: Text
configMonoidInstallGHCName = "install-ghc"
configMonoidSkipGHCCheckName :: Text
configMonoidSkipGHCCheckName = "skip-ghc-check"
configMonoidSkipMsysName :: Text
configMonoidSkipMsysName = "skip-msys"
configMonoidRequireStackVersionName :: Text
configMonoidRequireStackVersionName = "require-stack-version"
configMonoidArchName :: Text
configMonoidArchName = "arch"
configMonoidGHCVariantName :: Text
configMonoidGHCVariantName = "ghc-variant"
configMonoidGHCBuildName :: Text
configMonoidGHCBuildName = "ghc-build"
configMonoidJobsName :: Text
configMonoidJobsName = "jobs"
configMonoidExtraIncludeDirsName :: Text
configMonoidExtraIncludeDirsName = "extra-include-dirs"
configMonoidExtraLibDirsName :: Text
configMonoidExtraLibDirsName = "extra-lib-dirs"
configMonoidOverrideGccPathName :: Text
configMonoidOverrideGccPathName = "with-gcc"
configMonoidOverrideHpackName :: Text
configMonoidOverrideHpackName = "with-hpack"
configMonoidConcurrentTestsName :: Text
configMonoidConcurrentTestsName = "concurrent-tests"
configMonoidLocalBinPathName :: Text
configMonoidLocalBinPathName = "local-bin-path"
configMonoidImageOptsName :: Text
configMonoidImageOptsName = "image"
configMonoidScmInitName :: Text
configMonoidScmInitName = "scm-init"
configMonoidTemplateParametersName :: Text
configMonoidTemplateParametersName = "params"
configMonoidCompilerCheckName :: Text
configMonoidCompilerCheckName = "compiler-check"
configMonoidGhcOptionsName :: Text
configMonoidGhcOptionsName = "ghc-options"
configMonoidExtraPathName :: Text
configMonoidExtraPathName = "extra-path"
configMonoidSetupInfoLocationsName :: Text
configMonoidSetupInfoLocationsName = "setup-info"
configMonoidLocalProgramsBaseName :: Text
configMonoidLocalProgramsBaseName = "local-programs-path"
configMonoidPvpBoundsName :: Text
configMonoidPvpBoundsName = "pvp-bounds"
configMonoidModifyCodePageName :: Text
configMonoidModifyCodePageName = "modify-code-page"
configMonoidExplicitSetupDepsName :: Text
configMonoidExplicitSetupDepsName = "explicit-setup-deps"
configMonoidRebuildGhcOptionsName :: Text
configMonoidRebuildGhcOptionsName = "rebuild-ghc-options"
configMonoidApplyGhcOptionsName :: Text
configMonoidApplyGhcOptionsName = "apply-ghc-options"
configMonoidAllowNewerName :: Text
configMonoidAllowNewerName = "allow-newer"
configMonoidDefaultTemplateName :: Text
configMonoidDefaultTemplateName = "default-template"
configMonoidAllowDifferentUserName :: Text
configMonoidAllowDifferentUserName = "allow-different-user"
configMonoidDumpLogsName :: Text
configMonoidDumpLogsName = "dump-logs"
configMonoidSaveHackageCredsName :: Text
configMonoidSaveHackageCredsName = "save-hackage-creds"
data ConfigException
= ParseConfigFileException (Path Abs File) ParseException
| ParseCustomSnapshotException Text ParseException
| NoProjectConfigFound (Path Abs Dir) (Maybe Text)
| UnexpectedArchiveContents [Path Abs Dir] [Path Abs File]
| UnableToExtractArchive Text (Path Abs File)
| BadStackVersionException VersionRange
| NoMatchingSnapshot WhichSolverCmd (NonEmpty SnapName)
| ResolverMismatch WhichSolverCmd !Text String -- Text == resolver name, sdName
| ResolverPartial WhichSolverCmd !Text String -- Text == resolver name, sdName
| NoSuchDirectory FilePath
| ParseGHCVariantException String
| BadStackRoot (Path Abs Dir)
| Won'tCreateStackRootInDirectoryOwnedByDifferentUser (Path Abs Dir) (Path Abs Dir) -- ^ @$STACK_ROOT@, parent dir
| UserDoesn'tOwnDirectory (Path Abs Dir)
| FailedToCloneRepo String
| ManualGHCVariantSettingsAreIncompatibleWithSystemGHC
| NixRequiresSystemGhc
| NoResolverWhenUsingNoLocalConfig
| InvalidResolverForNoLocalConfig String
| InvalidCabalFileInLocal !(PackageLocationIndex FilePath) !PError !ByteString
| DuplicateLocalPackageNames ![(PackageName, [PackageLocationIndex FilePath])]
deriving Typeable
instance Show ConfigException where
show (ParseConfigFileException configFile exception) = concat
[ "Could not parse '"
, toFilePath configFile
, "':\n"
, Yaml.prettyPrintParseException exception
, "\nSee http://docs.haskellstack.org/en/stable/yaml_configuration/"
]
show (ParseCustomSnapshotException url exception) = concat
[ "Could not parse '"
, T.unpack url
, "':\n"
, Yaml.prettyPrintParseException exception
-- FIXME: Link to docs about custom snapshots
-- , "\nSee http://docs.haskellstack.org/en/stable/yaml_configuration/"
]
show (NoProjectConfigFound dir mcmd) = concat
[ "Unable to find a stack.yaml file in the current directory ("
, toFilePath dir
, ") or its ancestors"
, case mcmd of
Nothing -> ""
Just cmd -> "\nRecommended action: stack " ++ T.unpack cmd
]
show (UnexpectedArchiveContents dirs files) = concat
[ "When unpacking an archive specified in your stack.yaml file, "
, "did not find expected contents. Expected: a single directory. Found: "
, show ( map (toFilePath . dirname) dirs
, map (toFilePath . filename) files
)
]
show (UnableToExtractArchive url file) = concat
[ "Archive extraction failed. We support tarballs and zip, couldn't handle the following URL, "
, T.unpack url, " downloaded to the file ", toFilePath $ filename file
]
show (BadStackVersionException requiredRange) = concat
[ "The version of stack you are using ("
, show (fromCabalVersion (mkVersion' Meta.version))
, ") is outside the required\n"
,"version range specified in stack.yaml ("
, T.unpack (versionRangeText requiredRange)
, ")." ]
show (NoMatchingSnapshot whichCmd names) = concat
[ "None of the following snapshots provides a compiler matching "
, "your package(s):\n"
, unlines $ map (\name -> " - " <> T.unpack (renderSnapName name))
(NonEmpty.toList names)
, showOptions whichCmd Don'tSuggestSolver
]
show (ResolverMismatch whichCmd resolver errDesc) = concat
[ "Resolver '"
, T.unpack resolver
, "' does not have a matching compiler to build some or all of your "
, "package(s).\n"
, errDesc
, showOptions whichCmd Don'tSuggestSolver
]
show (ResolverPartial whichCmd resolver errDesc) = concat
[ "Resolver '"
, T.unpack resolver
, "' does not have all the packages to match your requirements.\n"
, unlines $ fmap (" " <>) (lines errDesc)
, showOptions whichCmd
(case whichCmd of
IsSolverCmd -> Don'tSuggestSolver
_ -> SuggestSolver)
]
show (NoSuchDirectory dir) =
"No directory could be located matching the supplied path: " ++ dir
show (ParseGHCVariantException v) =
"Invalid ghc-variant value: " ++ v
show (BadStackRoot stackRoot) = concat
[ "Invalid stack root: '"
, toFilePath stackRoot
, "'. Please provide a valid absolute path."
]
show (Won'tCreateStackRootInDirectoryOwnedByDifferentUser envStackRoot parentDir) = concat
[ "Preventing creation of stack root '"
, toFilePath envStackRoot
, "'. Parent directory '"
, toFilePath parentDir
, "' is owned by someone else."
]
show (UserDoesn'tOwnDirectory dir) = concat
[ "You are not the owner of '"
, toFilePath dir
, "'. Aborting to protect file permissions."
, "\nRetry with '--"
, T.unpack configMonoidAllowDifferentUserName
, "' to disable this precaution."
]
show (FailedToCloneRepo commandName) = concat
[ "Failed to use "
, commandName
, " to clone the repo. Please ensure that "
, commandName
, " is installed and available to stack on your PATH environment variable."
]
show ManualGHCVariantSettingsAreIncompatibleWithSystemGHC = T.unpack $ T.concat
[ "stack can only control the "
, configMonoidGHCVariantName
, " of its own GHC installations. Please use '--no-"
, configMonoidSystemGHCName
, "'."
]
show NixRequiresSystemGhc = T.unpack $ T.concat
[ "stack's Nix integration is incompatible with '--no-system-ghc'. "
, "Please use '--"
, configMonoidSystemGHCName
, "' or disable the Nix integration."
]
show NoResolverWhenUsingNoLocalConfig = "When using the script command, you must provide a resolver argument"
show (InvalidResolverForNoLocalConfig ar) = "The script command requires a specific resolver, you provided " ++ ar
show (InvalidCabalFileInLocal loc err _) = concat
[ "Unable to parse cabal file from "
, show loc
, ": "
, show err
]
show (DuplicateLocalPackageNames pairs) = concat
$ "The same package name is used in multiple local packages\n"
: map go pairs
where
go (name, dirs) = unlines
$ ""
: (packageNameString name ++ " used in:")
: map goLoc dirs
goLoc loc = "- " ++ show loc
instance Exception ConfigException
showOptions :: WhichSolverCmd -> SuggestSolver -> String
showOptions whichCmd suggestSolver = unlines $ "\nThis may be resolved by:" : options
where
options =
(case suggestSolver of
SuggestSolver -> [useSolver]
Don'tSuggestSolver -> []) ++
(case whichCmd of
IsSolverCmd -> [useResolver]
IsInitCmd -> both
IsNewCmd -> both)
both = [omitPackages, useResolver]
useSolver = " - Using '--solver' to ask cabal-install to generate extra-deps, atop the chosen snapshot."
omitPackages = " - Using '--omit-packages to exclude mismatching package(s)."
useResolver = " - Using '--resolver' to specify a matching snapshot/resolver"
data WhichSolverCmd = IsInitCmd | IsSolverCmd | IsNewCmd
data SuggestSolver = SuggestSolver | Don'tSuggestSolver
-- | Get the URL to request the information on the latest snapshots
askLatestSnapshotUrl :: (MonadReader env m, HasConfig env) => m Text
askLatestSnapshotUrl = view $ configL.to configUrls.to urlsLatestSnapshot
-- | Root for a specific package index
configPackageIndexRoot :: (MonadReader env m, HasConfig env, MonadThrow m) => IndexName -> m (Path Abs Dir)
configPackageIndexRoot (IndexName name) = do
root <- view stackRootL
dir <- parseRelDir $ S8.unpack name
return (root </> $(mkRelDir "indices") </> dir)
-- | Location of the 01-index.cache file
configPackageIndexCache :: (MonadReader env m, HasConfig env, MonadThrow m) => IndexName -> m (Path Abs File)
configPackageIndexCache = liftM (</> $(mkRelFile "01-index.cache")) . configPackageIndexRoot
-- | Location of the 00-index.cache file
configPackageIndexCacheOld :: (MonadReader env m, HasConfig env, MonadThrow m) => IndexName -> m (Path Abs File)
configPackageIndexCacheOld = liftM (</> $(mkRelFile "00-index.cache")) . configPackageIndexRoot
-- | Location of the 01-index.tar file
configPackageIndex :: (MonadReader env m, HasConfig env, MonadThrow m) => IndexName -> m (Path Abs File)
configPackageIndex = liftM (</> $(mkRelFile "01-index.tar")) . configPackageIndexRoot
-- | Location of the 00-index.tar file. This file is just a copy of
-- the 01-index.tar file, provided for tools which still look for the
-- 00-index.tar file.
configPackageIndexOld :: (MonadReader env m, HasConfig env, MonadThrow m) => IndexName -> m (Path Abs File)
configPackageIndexOld = liftM (</> $(mkRelFile "00-index.tar")) . configPackageIndexRoot
-- | Location of the 01-index.tar.gz file
configPackageIndexGz :: (MonadReader env m, HasConfig env, MonadThrow m) => IndexName -> m (Path Abs File)
configPackageIndexGz = liftM (</> $(mkRelFile "01-index.tar.gz")) . configPackageIndexRoot
-- | Location of a package tarball
configPackageTarball :: (MonadReader env m, HasConfig env, MonadThrow m) => IndexName -> PackageIdentifier -> m (Path Abs File)
configPackageTarball iname ident = do
root <- configPackageIndexRoot iname
name <- parseRelDir $ packageNameString $ packageIdentifierName ident
ver <- parseRelDir $ versionString $ packageIdentifierVersion ident
base <- parseRelFile $ packageIdentifierString ident ++ ".tar.gz"
return (root </> $(mkRelDir "packages") </> name </> ver </> base)
-- | @".stack-work"@
workDirL :: HasConfig env => Lens' env (Path Rel Dir)
workDirL = configL.lens configWorkDir (\x y -> x { configWorkDir = y })
-- | Per-project work dir
getProjectWorkDir :: (HasBuildConfig env, MonadReader env m) => m (Path Abs Dir)
getProjectWorkDir = do
root <- view projectRootL
workDir <- view workDirL
return (root </> workDir)
-- | File containing the installed cache, see "Stack.PackageDump"
configInstalledCache :: (HasBuildConfig env, MonadReader env m) => m (Path Abs File)
configInstalledCache = liftM (</> $(mkRelFile "installed-cache.bin")) getProjectWorkDir
-- | Relative directory for the platform identifier
platformOnlyRelDir
:: (MonadReader env m, HasPlatform env, MonadThrow m)
=> m (Path Rel Dir)
platformOnlyRelDir = do
platform <- view platformL
platformVariant <- view platformVariantL
parseRelDir (Distribution.Text.display platform ++ platformVariantSuffix platformVariant)
-- | Directory containing snapshots
snapshotsDir :: (MonadReader env m, HasEnvConfig env, MonadThrow m) => m (Path Abs Dir)
snapshotsDir = do
root <- view stackRootL
platform <- platformGhcRelDir
return $ root </> $(mkRelDir "snapshots") </> platform
-- | Installation root for dependencies
installationRootDeps :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir)
installationRootDeps = do
root <- view stackRootL
-- TODO: also useShaPathOnWindows here, once #1173 is resolved.
psc <- platformSnapAndCompilerRel
return $ root </> $(mkRelDir "snapshots") </> psc
-- | Installation root for locals
installationRootLocal :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir)
installationRootLocal = do
workDir <- getProjectWorkDir
psc <- useShaPathOnWindows =<< platformSnapAndCompilerRel
return $ workDir </> $(mkRelDir "install") </> psc
-- | Installation root for compiler tools
bindirCompilerTools :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir)
bindirCompilerTools = do
config <- view configL
platform <- platformGhcRelDir
compilerVersion <- envConfigCompilerVersion <$> view envConfigL
compiler <- parseRelDir $ compilerVersionString compilerVersion
return $
configStackRoot config </>
$(mkRelDir "compiler-tools") </>
platform </>
compiler </>
bindirSuffix
-- | Hoogle directory.
hoogleRoot :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir)
hoogleRoot = do
workDir <- getProjectWorkDir
psc <- useShaPathOnWindows =<< platformSnapAndCompilerRel
return $ workDir </> $(mkRelDir "hoogle") </> psc
-- | Get the hoogle database path.
hoogleDatabasePath :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs File)
hoogleDatabasePath = do
dir <- hoogleRoot
return (dir </> $(mkRelFile "database.hoo"))
-- | Path for platform followed by snapshot name followed by compiler
-- name.
platformSnapAndCompilerRel
:: (MonadReader env m, HasEnvConfig env, MonadThrow m)
=> m (Path Rel Dir)
platformSnapAndCompilerRel = do
sd <- view snapshotDefL
platform <- platformGhcRelDir
name <- parseRelDir $ sdRawPathName sd
ghc <- compilerVersionDir
useShaPathOnWindows (platform </> name </> ghc)
-- | Relative directory for the platform and GHC identifier
platformGhcRelDir
:: (MonadReader env m, HasEnvConfig env, MonadThrow m)
=> m (Path Rel Dir)
platformGhcRelDir = do
ec <- view envConfigL
verOnly <- platformGhcVerOnlyRelDirStr
parseRelDir (mconcat [ verOnly
, compilerBuildSuffix (envConfigCompilerBuild ec)])
-- | Relative directory for the platform and GHC identifier without GHC bindist build
platformGhcVerOnlyRelDir
:: (MonadReader env m, HasPlatform env, HasGHCVariant env, MonadThrow m)
=> m (Path Rel Dir)
platformGhcVerOnlyRelDir =
parseRelDir =<< platformGhcVerOnlyRelDirStr
-- | Relative directory for the platform and GHC identifier without GHC bindist build
-- (before parsing into a Path)
platformGhcVerOnlyRelDirStr
:: (MonadReader env m, HasPlatform env, HasGHCVariant env)
=> m FilePath
platformGhcVerOnlyRelDirStr = do
platform <- view platformL
platformVariant <- view platformVariantL
ghcVariant <- view ghcVariantL
return $ mconcat [ Distribution.Text.display platform
, platformVariantSuffix platformVariant
, ghcVariantSuffix ghcVariant ]
-- | This is an attempt to shorten stack paths on Windows to decrease our
-- chances of hitting 260 symbol path limit. The idea is to calculate
-- SHA1 hash of the path used on other architectures, encode with base
-- 16 and take first 8 symbols of it.
useShaPathOnWindows :: MonadThrow m => Path Rel Dir -> m (Path Rel Dir)
useShaPathOnWindows =
#ifdef mingw32_HOST_OS
parseRelDir . S8.unpack . S8.take 8 . Mem.convertToBase Mem.Base16 . hashWith SHA1 . encodeUtf8 . T.pack . toFilePath
#else
return
#endif
compilerVersionDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Rel Dir)
compilerVersionDir = do
compilerVersion <- view actualCompilerVersionL
parseRelDir $ case compilerVersion of
GhcVersion version -> versionString version
GhcjsVersion {} -> compilerVersionString compilerVersion
-- | Package database for installing dependencies into
packageDatabaseDeps :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir)
packageDatabaseDeps = do
root <- installationRootDeps
return $ root </> $(mkRelDir "pkgdb")
-- | Package database for installing local packages into
packageDatabaseLocal :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir)
packageDatabaseLocal = do
root <- installationRootLocal
return $ root </> $(mkRelDir "pkgdb")
-- | Extra package databases
packageDatabaseExtra :: (MonadReader env m, HasEnvConfig env) => m [Path Abs Dir]
packageDatabaseExtra = view $ buildConfigL.to bcExtraPackageDBs
-- | Directory for holding flag cache information
flagCacheLocal :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir)
flagCacheLocal = do
root <- installationRootLocal
return $ root </> $(mkRelDir "flag-cache")
-- | Where to store 'LoadedSnapshot' caches
configLoadedSnapshotCache
:: (MonadThrow m, MonadReader env m, HasConfig env, HasGHCVariant env)
=> SnapshotDef
-> GlobalInfoSource
-> m (Path Abs File)
configLoadedSnapshotCache sd gis = do
root <- view stackRootL
platform <- platformGhcVerOnlyRelDir
file <- parseRelFile $ sdRawPathName sd ++ ".cache"
gis' <- parseRelDir $
case gis of
GISSnapshotHints -> "__snapshot_hints__"
GISCompiler cv -> compilerVersionString cv
-- Yes, cached plans differ based on platform
return (root </> $(mkRelDir "loaded-snapshot-cache") </> platform </> gis' </> file)
-- | Where do we get information on global packages for loading up a
-- 'LoadedSnapshot'?
data GlobalInfoSource
= GISSnapshotHints
-- ^ Accept the hints in the snapshot definition
| GISCompiler (CompilerVersion 'CVActual)
-- ^ Look up the actual information in the installed compiler
-- | Suffix applied to an installation root to get the bin dir
bindirSuffix :: Path Rel Dir
bindirSuffix = $(mkRelDir "bin")
-- | Suffix applied to an installation root to get the doc dir
docDirSuffix :: Path Rel Dir
docDirSuffix = $(mkRelDir "doc")
-- | Where HPC reports and tix files get stored.
hpcReportDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> m (Path Abs Dir)
hpcReportDir = do
root <- installationRootLocal
return $ root </> $(mkRelDir "hpc")
-- | Get the extra bin directories (for the PATH). Puts more local first
--
-- Bool indicates whether or not to include the locals
extraBinDirs :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> m (Bool -> [Path Abs Dir])
extraBinDirs = do
deps <- installationRootDeps
local <- installationRootLocal
tools <- bindirCompilerTools
return $ \locals -> if locals
then [local </> bindirSuffix, deps </> bindirSuffix, tools]
else [deps </> bindirSuffix, tools]
-- | Get the minimal environment override, useful for just calling external
-- processes like git or ghc
getMinimalEnvOverride :: (MonadReader env m, HasConfig env, MonadIO m) => m EnvOverride
getMinimalEnvOverride = do
config' <- view configL
liftIO $ configEnvOverride config' minimalEnvSettings
minimalEnvSettings :: EnvSettings
minimalEnvSettings =
EnvSettings
{ esIncludeLocals = False
, esIncludeGhcPackagePath = False
, esStackExe = False
, esLocaleUtf8 = False
}
-- | Get the path for the given compiler ignoring any local binaries.
--
-- https://github.com/commercialhaskell/stack/issues/1052
getCompilerPath
:: (MonadIO m, MonadThrow m, MonadReader env m, HasConfig env)
=> WhichCompiler
-> m (Path Abs File)
getCompilerPath wc = do
config' <- view configL
eoWithoutLocals <- liftIO $
configEnvOverride config' minimalEnvSettings { esLocaleUtf8 = True }
join (findExecutable eoWithoutLocals (compilerExeName wc))
data ProjectAndConfigMonoid
= ProjectAndConfigMonoid !Project !ConfigMonoid
parseProjectAndConfigMonoid :: Path Abs Dir -> Value -> Yaml.Parser (WithJSONWarnings ProjectAndConfigMonoid)
parseProjectAndConfigMonoid rootDir =
withObjectWarnings "ProjectAndConfigMonoid" $ \o -> do
dirs <- jsonSubWarningsTT (o ..:? "packages") ..!= [packageEntryCurrDir]
extraDeps <- jsonSubWarningsTT (o ..:? "extra-deps") ..!= []
flags <- o ..:? "flags" ..!= mempty
-- Convert the packages/extra-deps/flags approach we use in
-- the stack.yaml into the internal representation.
let (packages, deps) = convert dirs extraDeps
resolver <- (o ..: "resolver")
>>= either (fail . show) return
. parseCustomLocation (Just rootDir)
compiler <- o ..:? "compiler"
msg <- o ..:? "user-message"
config <- parseConfigMonoidObject rootDir o
extraPackageDBs <- o ..:? "extra-package-dbs" ..!= []
let project = Project
{ projectUserMsg = msg
, projectResolver = resolver
, projectCompiler = compiler
, projectExtraPackageDBs = extraPackageDBs
, projectPackages = packages
, projectDependencies = deps
, projectFlags = flags
}
return $ ProjectAndConfigMonoid project config
where
convert :: [PackageEntry]
-> [PackageLocationIndex Subdirs] -- extra-deps
-> ( [PackageLocation Subdirs] -- project
, [PackageLocationIndex Subdirs] -- dependencies
)
convert entries extraDeps =
partitionEithers $ concatMap goEntry entries ++ map Right extraDeps
where
goEntry :: PackageEntry -> [Either (PackageLocation Subdirs) (PackageLocationIndex Subdirs)]
goEntry (PackageEntry Nothing pl@(PLFilePath _) subdirs) = goEntry' False pl subdirs
goEntry (PackageEntry Nothing pl _) = fail $ concat
[ "Refusing to implicitly treat package location as an extra-dep:\n"
, show pl
, "\nRecommendation: either move to 'extra-deps' or set 'extra-dep: true'."
]
goEntry (PackageEntry (Just extraDep) pl subdirs) = goEntry' extraDep pl subdirs
goEntry' :: Bool -- ^ extra dep?
-> PackageLocation Subdirs
-> Subdirs
-> [Either (PackageLocation Subdirs) (PackageLocationIndex Subdirs)]
goEntry' extraDep pl subdirs =
map (if extraDep then Right . PLOther else Left) (addSubdirs pl subdirs)
combineSubdirs :: [FilePath] -> Subdirs -> Subdirs
combineSubdirs paths DefaultSubdirs = ExplicitSubdirs paths
-- this could be considered an error condition, but we'll
-- just try and make it work
combineSubdirs paths (ExplicitSubdirs paths') = ExplicitSubdirs (paths ++ paths')
-- We do the toList/fromList bit as an efficient nub, and
-- to avoid having duplicate subdir names (especially for
-- the "." case, where parsing gets wonky).
addSubdirs :: PackageLocation Subdirs
-> Subdirs
-> [PackageLocation Subdirs]
addSubdirs pl DefaultSubdirs = [pl]
addSubdirs (PLRepo repo) (ExplicitSubdirs subdirs) =
[PLRepo repo { repoSubdirs = combineSubdirs subdirs $ repoSubdirs repo }]
addSubdirs (PLArchive arch) (ExplicitSubdirs subdirs) =
[PLArchive arch { archiveSubdirs = combineSubdirs subdirs $ archiveSubdirs arch }]
addSubdirs (PLFilePath fp) (ExplicitSubdirs subdirs) =
map (\subdir -> PLFilePath $ fp FilePath.</> subdir) subdirs
-- | A PackageEntry for the current directory, used as a default
packageEntryCurrDir :: PackageEntry
packageEntryCurrDir = PackageEntry
{ peExtraDepMaybe = Nothing
, peLocation = PLFilePath "."
, peSubdirs = DefaultSubdirs
}
-- | A software control system.
data SCM = Git
deriving (Show)
instance FromJSON SCM where
parseJSON v = do
s <- parseJSON v
case s of
"git" -> return Git
_ -> fail ("Unknown or unsupported SCM: " <> s)
instance ToJSON SCM where
toJSON Git = toJSON ("git" :: Text)
-- | A variant of the platform, used to differentiate Docker builds from host
data PlatformVariant = PlatformVariantNone
| PlatformVariant String
-- | Render a platform variant to a String suffix.
platformVariantSuffix :: PlatformVariant -> String
platformVariantSuffix PlatformVariantNone = ""
platformVariantSuffix (PlatformVariant v) = "-" ++ v
-- | Specialized bariant of GHC (e.g. libgmp4 or integer-simple)
data GHCVariant
= GHCStandard -- ^ Standard bindist
| GHCIntegerSimple -- ^ Bindist that uses integer-simple
| GHCCustom String -- ^ Other bindists
deriving (Show)
instance FromJSON GHCVariant where
-- Strange structuring is to give consistent error messages
parseJSON =
withText
"GHCVariant"
(either (fail . show) return . parseGHCVariant . T.unpack)
-- | Render a GHC variant to a String.
ghcVariantName :: GHCVariant -> String
ghcVariantName GHCStandard = "standard"
ghcVariantName GHCIntegerSimple = "integersimple"
ghcVariantName (GHCCustom name) = "custom-" ++ name
-- | Render a GHC variant to a String suffix.
ghcVariantSuffix :: GHCVariant -> String
ghcVariantSuffix GHCStandard = ""
ghcVariantSuffix v = "-" ++ ghcVariantName v
-- | Parse GHC variant from a String.
parseGHCVariant :: (MonadThrow m) => String -> m GHCVariant
parseGHCVariant s =
case stripPrefix "custom-" s of
Just name -> return (GHCCustom name)
Nothing
| s == "" -> return GHCStandard
| s == "standard" -> return GHCStandard
| s == "integersimple" -> return GHCIntegerSimple
| otherwise -> return (GHCCustom s)
-- | Build of the compiler distribution (e.g. standard, gmp4, tinfo6)
-- | Information for a file to download.
data DownloadInfo = DownloadInfo
{ downloadInfoUrl :: Text
-- ^ URL or absolute file path
, downloadInfoContentLength :: Maybe Int
, downloadInfoSha1 :: Maybe ByteString
} deriving (Show)
instance FromJSON (WithJSONWarnings DownloadInfo) where
parseJSON = withObjectWarnings "DownloadInfo" parseDownloadInfoFromObject
-- | Parse JSON in existing object for 'DownloadInfo'
parseDownloadInfoFromObject :: Object -> WarningParser DownloadInfo
parseDownloadInfoFromObject o = do
url <- o ..: "url"
contentLength <- o ..:? "content-length"
sha1TextMay <- o ..:? "sha1"
return
DownloadInfo
{ downloadInfoUrl = url
, downloadInfoContentLength = contentLength
, downloadInfoSha1 = fmap encodeUtf8 sha1TextMay
}
data VersionedDownloadInfo = VersionedDownloadInfo
{ vdiVersion :: Version
, vdiDownloadInfo :: DownloadInfo
}
deriving Show
instance FromJSON (WithJSONWarnings VersionedDownloadInfo) where
parseJSON = withObjectWarnings "VersionedDownloadInfo" $ \o -> do
version <- o ..: "version"
downloadInfo <- parseDownloadInfoFromObject o
return VersionedDownloadInfo
{ vdiVersion = version
, vdiDownloadInfo = downloadInfo
}
data GHCDownloadInfo = GHCDownloadInfo
{ gdiConfigureOpts :: [Text]
, gdiConfigureEnv :: Map Text Text
, gdiDownloadInfo :: DownloadInfo
}
deriving Show
instance FromJSON (WithJSONWarnings GHCDownloadInfo) where
parseJSON = withObjectWarnings "GHCDownloadInfo" $ \o -> do
configureOpts <- o ..:? "configure-opts" ..!= mempty
configureEnv <- o ..:? "configure-env" ..!= mempty
downloadInfo <- parseDownloadInfoFromObject o
return GHCDownloadInfo
{ gdiConfigureOpts = configureOpts
, gdiConfigureEnv = configureEnv
, gdiDownloadInfo = downloadInfo
}
data SetupInfo = SetupInfo
{ siSevenzExe :: Maybe DownloadInfo
, siSevenzDll :: Maybe DownloadInfo
, siMsys2 :: Map Text VersionedDownloadInfo
, siGHCs :: Map Text (Map Version GHCDownloadInfo)
, siGHCJSs :: Map Text (Map (CompilerVersion 'CVActual) DownloadInfo)
, siStack :: Map Text (Map Version DownloadInfo)
}
deriving Show
instance FromJSON (WithJSONWarnings SetupInfo) where
parseJSON = withObjectWarnings "SetupInfo" $ \o -> do
siSevenzExe <- jsonSubWarningsT (o ..:? "sevenzexe-info")
siSevenzDll <- jsonSubWarningsT (o ..:? "sevenzdll-info")
siMsys2 <- jsonSubWarningsT (o ..:? "msys2" ..!= mempty)
siGHCs <- jsonSubWarningsTT (o ..:? "ghc" ..!= mempty)
siGHCJSs <- jsonSubWarningsTT (o ..:? "ghcjs" ..!= mempty)
siStack <- jsonSubWarningsTT (o ..:? "stack" ..!= mempty)
return SetupInfo {..}
-- | For @siGHCs@ and @siGHCJSs@ fields maps are deeply merged.
-- For all fields the values from the last @SetupInfo@ win.
instance Monoid SetupInfo where
mempty =
SetupInfo
{ siSevenzExe = Nothing
, siSevenzDll = Nothing
, siMsys2 = Map.empty
, siGHCs = Map.empty
, siGHCJSs = Map.empty
, siStack = Map.empty
}
mappend l r =
SetupInfo
{ siSevenzExe = siSevenzExe r <|> siSevenzExe l
, siSevenzDll = siSevenzDll r <|> siSevenzDll l
, siMsys2 = siMsys2 r <> siMsys2 l
, siGHCs = Map.unionWith (<>) (siGHCs r) (siGHCs l)
, siGHCJSs = Map.unionWith (<>) (siGHCJSs r) (siGHCJSs l)
, siStack = Map.unionWith (<>) (siStack l) (siStack r) }
-- | Remote or inline 'SetupInfo'
data SetupInfoLocation
= SetupInfoFileOrURL String
| SetupInfoInline SetupInfo
deriving (Show)
instance FromJSON (WithJSONWarnings SetupInfoLocation) where
parseJSON v =
(noJSONWarnings <$>
withText "SetupInfoFileOrURL" (pure . SetupInfoFileOrURL . T.unpack) v) <|>
inline
where
inline = do
WithJSONWarnings si w <- parseJSON v
return $ WithJSONWarnings (SetupInfoInline si) w
-- | How PVP bounds should be added to .cabal files
data PvpBoundsType
= PvpBoundsNone
| PvpBoundsUpper
| PvpBoundsLower
| PvpBoundsBoth
deriving (Show, Read, Eq, Typeable, Ord, Enum, Bounded)
data PvpBounds = PvpBounds
{ pbType :: !PvpBoundsType
, pbAsRevision :: !Bool
}
deriving (Show, Read, Eq, Typeable, Ord)
pvpBoundsText :: PvpBoundsType -> Text
pvpBoundsText PvpBoundsNone = "none"
pvpBoundsText PvpBoundsUpper = "upper"
pvpBoundsText PvpBoundsLower = "lower"
pvpBoundsText PvpBoundsBoth = "both"
parsePvpBounds :: Text -> Either String PvpBounds
parsePvpBounds t = maybe err Right $ do
(t', asRevision) <-
case T.break (== '-') t of
(x, "") -> Just (x, False)
(x, "-revision") -> Just (x, True)
_ -> Nothing
x <- Map.lookup t' m
Just PvpBounds
{ pbType = x
, pbAsRevision = asRevision
}
where
m = Map.fromList $ map (pvpBoundsText &&& id) [minBound..maxBound]
err = Left $ "Invalid PVP bounds: " ++ T.unpack t
instance ToJSON PvpBounds where
toJSON (PvpBounds typ asRevision) =
toJSON (pvpBoundsText typ <> (if asRevision then "-revision" else ""))
instance FromJSON PvpBounds where
parseJSON = withText "PvpBounds" (either fail return . parsePvpBounds)
-- | Provide an explicit list of package dependencies when running a custom Setup.hs
explicitSetupDeps :: (MonadReader env m, HasConfig env) => PackageName -> m Bool
explicitSetupDeps name = do
m <- view $ configL.to configExplicitSetupDeps
return $
-- Yes there are far cleverer ways to write this. I honestly consider
-- the explicit pattern matching much easier to parse at a glance.
case Map.lookup (Just name) m of
Just b -> b
Nothing ->
case Map.lookup Nothing m of
Just b -> b
Nothing -> False -- default value
-- | Data passed into Docker container for the Docker entrypoint's use
newtype DockerEntrypoint = DockerEntrypoint
{ deUser :: Maybe DockerUser
-- ^ UID/GID/etc of host user, if we wish to perform UID/GID switch in container
} deriving (Read,Show)
-- | Docker host user info
data DockerUser = DockerUser
{ duUid :: UserID -- ^ uid
, duGid :: GroupID -- ^ gid
, duGroups :: [GroupID] -- ^ Supplemantal groups
, duUmask :: FileMode -- ^ File creation mask }
} deriving (Read,Show)
data GhcOptionKey
= GOKOldEverything
| GOKEverything
| GOKLocals
| GOKTargets
| GOKPackage !PackageName
deriving (Eq, Ord)
instance FromJSONKey GhcOptionKey where
fromJSONKey = FromJSONKeyTextParser $ \t ->
case t of
"*" -> return GOKOldEverything
"$everything" -> return GOKEverything
"$locals" -> return GOKLocals
"$targets" -> return GOKTargets
_ ->
case parsePackageName t of
Left e -> fail $ show e
Right x -> return $ GOKPackage x
fromJSONKeyList = FromJSONKeyTextParser $ \_ -> fail "GhcOptionKey.fromJSONKeyList"
newtype GhcOptions = GhcOptions { unGhcOptions :: [Text] }
instance FromJSON GhcOptions where
parseJSON = withText "GhcOptions" $ \t ->
case parseArgs Escaping t of
Left e -> fail e
Right opts -> return $ GhcOptions $ map T.pack opts
-----------------------------------
-- Lens classes
-----------------------------------
-- | Class for environment values which have a Platform
class HasPlatform env where
platformL :: Lens' env Platform
default platformL :: HasConfig env => Lens' env Platform
platformL = configL.platformL
{-# INLINE platformL #-}
platformVariantL :: Lens' env PlatformVariant
default platformVariantL :: HasConfig env => Lens' env PlatformVariant
platformVariantL = configL.platformVariantL
{-# INLINE platformVariantL #-}
-- | Class for environment values which have a GHCVariant
class HasGHCVariant env where
ghcVariantL :: Lens' env GHCVariant
default ghcVariantL :: HasBuildConfig env => Lens' env GHCVariant
ghcVariantL = buildConfigL.ghcVariantL
{-# INLINE ghcVariantL #-}
-- | Class for environment values that can provide a 'Config'.
class (HasPlatform env, HasRunner env) => HasConfig env where
configL :: Lens' env Config
default configL :: HasBuildConfig env => Lens' env Config
configL = buildConfigL.lens bcConfig (\x y -> x { bcConfig = y })
{-# INLINE configL #-}
class HasConfig env => HasBuildConfig env where
buildConfigL :: Lens' env BuildConfig
default buildConfigL :: HasEnvConfig env => Lens' env BuildConfig
buildConfigL = envConfigL.lens
envConfigBuildConfig
(\x y -> x { envConfigBuildConfig = y })
class (HasBuildConfig env, HasGHCVariant env) => HasEnvConfig env where
envConfigL :: Lens' env EnvConfig
-----------------------------------
-- Lens instances
-----------------------------------
instance HasPlatform (Platform,PlatformVariant) where
platformL = _1
platformVariantL = _2
instance HasPlatform Config where
platformL = lens configPlatform (\x y -> x { configPlatform = y })
platformVariantL = lens configPlatformVariant (\x y -> x { configPlatformVariant = y })
instance HasPlatform LoadConfig
instance HasPlatform BuildConfig
instance HasPlatform EnvConfig
instance HasGHCVariant GHCVariant where
ghcVariantL = id
{-# INLINE ghcVariantL #-}
instance HasGHCVariant BuildConfig where
ghcVariantL = lens bcGHCVariant (\x y -> x { bcGHCVariant = y })
instance HasGHCVariant EnvConfig
instance HasConfig Config where
configL = id
{-# INLINE configL #-}
instance HasConfig LoadConfig where
configL = lens lcConfig (\x y -> x { lcConfig = y })
instance HasConfig BuildConfig where
configL = lens bcConfig (\x y -> x { bcConfig = y })
instance HasConfig EnvConfig
instance HasBuildConfig BuildConfig where
buildConfigL = id
{-# INLINE buildConfigL #-}
instance HasBuildConfig EnvConfig
instance HasEnvConfig EnvConfig where
envConfigL = id
{-# INLINE envConfigL #-}
instance HasRunner Config where
runnerL = lens configRunner (\x y -> x { configRunner = y })
instance HasRunner LoadConfig where
runnerL = configL.runnerL
instance HasRunner BuildConfig where
runnerL = configL.runnerL
instance HasRunner EnvConfig where
runnerL = configL.runnerL
instance HasLogFunc Config where
logFuncL = runnerL.logFuncL
instance HasLogFunc LoadConfig where
logFuncL = runnerL.logFuncL
instance HasLogFunc BuildConfig where
logFuncL = runnerL.logFuncL
instance HasLogFunc EnvConfig where
logFuncL = runnerL.logFuncL
-----------------------------------
-- Helper lenses
-----------------------------------
stackRootL :: HasConfig s => Lens' s (Path Abs Dir)
stackRootL = configL.lens configStackRoot (\x y -> x { configStackRoot = y })
-- | The compiler specified by the @MiniBuildPlan@. This may be
-- different from the actual compiler used!
wantedCompilerVersionL :: HasBuildConfig s => Getting r s (CompilerVersion 'CVWanted)
wantedCompilerVersionL = snapshotDefL.to sdWantedCompilerVersion
-- | The version of the compiler which will actually be used. May be
-- different than that specified in the 'MiniBuildPlan' and returned
-- by 'wantedCompilerVersionL'.
actualCompilerVersionL :: HasEnvConfig s => Lens' s (CompilerVersion 'CVActual)
actualCompilerVersionL = envConfigL.lens
envConfigCompilerVersion
(\x y -> x { envConfigCompilerVersion = y })
snapshotDefL :: HasBuildConfig s => Lens' s SnapshotDef
snapshotDefL = buildConfigL.lens
bcSnapshotDef
(\x y -> x { bcSnapshotDef = y })
packageIndicesL :: HasConfig s => Lens' s [PackageIndex]
packageIndicesL = configL.lens
configPackageIndices
(\x y -> x { configPackageIndices = y })
buildOptsL :: HasConfig s => Lens' s BuildOpts
buildOptsL = configL.lens
configBuild
(\x y -> x { configBuild = y })
buildOptsMonoidHaddockL :: Lens' BuildOptsMonoid (Maybe Bool)
buildOptsMonoidHaddockL = lens (getFirst . buildMonoidHaddock)
(\buildMonoid t -> buildMonoid {buildMonoidHaddock = First t})
buildOptsMonoidTestsL :: Lens' BuildOptsMonoid (Maybe Bool)
buildOptsMonoidTestsL = lens (getFirst . buildMonoidTests)
(\buildMonoid t -> buildMonoid {buildMonoidTests = First t})
buildOptsMonoidBenchmarksL :: Lens' BuildOptsMonoid (Maybe Bool)
buildOptsMonoidBenchmarksL = lens (getFirst . buildMonoidBenchmarks)
(\buildMonoid t -> buildMonoid {buildMonoidBenchmarks = First t})
buildOptsMonoidInstallExesL :: Lens' BuildOptsMonoid (Maybe Bool)
buildOptsMonoidInstallExesL =
lens (getFirst . buildMonoidInstallExes)
(\buildMonoid t -> buildMonoid {buildMonoidInstallExes = First t})
buildOptsInstallExesL :: Lens' BuildOpts Bool
buildOptsInstallExesL =
lens boptsInstallExes
(\bopts t -> bopts {boptsInstallExes = t})
buildOptsHaddockL :: Lens' BuildOpts Bool
buildOptsHaddockL =
lens boptsHaddock
(\bopts t -> bopts {boptsHaddock = t})
globalOptsL :: Lens' GlobalOpts ConfigMonoid
globalOptsL = lens globalConfigMonoid (\x y -> x { globalConfigMonoid = y })
globalOptsBuildOptsMonoidL :: Lens' GlobalOpts BuildOptsMonoid
globalOptsBuildOptsMonoidL = globalOptsL.lens
configMonoidBuildOpts
(\x y -> x { configMonoidBuildOpts = y })
configUrlsL :: HasConfig env => Lens' env Urls
configUrlsL = configL.lens configUrls (\x y -> x { configUrls = y })
cabalVersionL :: HasEnvConfig env => Lens' env Version
cabalVersionL = envConfigL.lens
envConfigCabalVersion
(\x y -> x { envConfigCabalVersion = y })
loadedSnapshotL :: HasEnvConfig env => Lens' env LoadedSnapshot
loadedSnapshotL = envConfigL.lens
envConfigLoadedSnapshot
(\x y -> x { envConfigLoadedSnapshot = y })
whichCompilerL :: Getting r (CompilerVersion a) WhichCompiler
whichCompilerL = to whichCompiler
envOverrideL :: HasConfig env => Lens' env (EnvSettings -> IO EnvOverride)
envOverrideL = configL.lens
configEnvOverride
(\x y -> x { configEnvOverride = y })
shouldForceGhcColorFlag :: (HasRunner env, HasEnvConfig env)
=> RIO env Bool
shouldForceGhcColorFlag = do
canDoColor <- (>= $(mkVersion "8.2.1")) . getGhcVersion
<$> view actualCompilerVersionL
shouldDoColor <- logUseColor <$> view logOptionsL
return $ canDoColor && shouldDoColor
appropriateGhcColorFlag :: (HasRunner env, HasEnvConfig env)
=> RIO env (Maybe String)
appropriateGhcColorFlag = f <$> shouldForceGhcColorFlag
where f True = Just ghcColorForceFlag
f False = Nothing
|
MichielDerhaeg/stack
|
src/Stack/Types/Config.hs
|
bsd-3-clause
| 80,291
| 0
| 16
| 18,417
| 15,278
| 8,288
| 6,990
| 1,749
| 8
|
module Hint.Base (
MonadInterpreter(..), RunGhc,
GhcError(..), InterpreterError(..), mayFail, catchIE,
InterpreterSession, SessionData(..), GhcErrLogger,
InterpreterState(..), fromState, onState,
InterpreterConfiguration(..),
ImportList(..), ModuleQualification(..), ModuleImport(..),
runGhc1, runGhc2,
ModuleName, PhantomModule(..),
findModule, moduleIsLoaded,
withDynFlags,
ghcVersion,
debug, showGHC
) where
import Control.Monad.Trans
import Control.Monad.Catch as MC
import Data.IORef
import Data.Dynamic
import qualified Data.List
import qualified Hint.GHC as GHC
import Hint.Extension
-- | Version of the underlying ghc api. Values are:
--
-- * @804@ for GHC 8.4.x
--
-- * @806@ for GHC 8.6.x
--
-- * etc...
ghcVersion :: Int
ghcVersion = __GLASGOW_HASKELL__
class (MonadIO m, MonadMask m) => MonadInterpreter m where
fromSession :: FromSession m a
modifySessionRef :: ModifySessionRef m a
runGhc :: RunGhc m a
-- this is for hiding the actual types in haddock
type FromSession m a = (InterpreterSession -> a) -> m a
type ModifySessionRef m a = (InterpreterSession -> IORef a) -> (a -> a) -> m a
data InterpreterError = UnknownError String
| WontCompile [GhcError]
| NotAllowed String
-- | GhcExceptions from the underlying GHC API are caught
-- and rethrown as this.
| GhcException String
deriving (Show, Typeable)
data InterpreterState = St {
activePhantoms :: [PhantomModule],
zombiePhantoms :: [PhantomModule],
#if defined(NEED_PHANTOM_DIRECTORY)
phantomDirectory :: Maybe FilePath,
#endif
hintSupportModule :: PhantomModule,
importQualHackMod :: Maybe PhantomModule,
qualImports :: [ModuleImport],
defaultExts :: [(Extension, Bool)], -- R/O
configuration :: InterpreterConfiguration
}
data ImportList = NoImportList | ImportList [String] | HidingList [String]
deriving (Eq, Show)
data ModuleQualification = NotQualified | ImportAs String | QualifiedAs (Maybe String)
deriving (Eq, Show)
-- | Represent module import statement.
-- See 'setImportsF'
data ModuleImport = ModuleImport { modName :: String
, modQual :: ModuleQualification
, modImp :: ImportList
} deriving (Show)
data InterpreterConfiguration = Conf {
searchFilePath :: [FilePath],
languageExts :: [Extension],
allModsInScope :: Bool
}
type InterpreterSession = SessionData ()
instance Exception InterpreterError
where
displayException (UnknownError err) = "UnknownError: " ++ err
displayException (WontCompile es) = unlines . Data.List.nub . map errMsg $ es
displayException (NotAllowed err) = "NotAllowed: " ++ err
displayException (GhcException err) = "GhcException: " ++ err
type RunGhc m a =
(forall n.(MonadIO n, MonadMask n) => GHC.GhcT n a)
-> m a
type RunGhc1 m a b =
(forall n.(MonadIO n, MonadMask n) => a -> GHC.GhcT n b)
-> (a -> m b)
type RunGhc2 m a b c =
(forall n.(MonadIO n, MonadMask n) => a -> b -> GHC.GhcT n c)
-> (a -> b -> m c)
data SessionData a = SessionData {
internalState :: IORef InterpreterState,
versionSpecific :: a,
ghcErrListRef :: IORef [GhcError],
ghcErrLogger :: GhcErrLogger
}
-- When intercepting errors reported by GHC, we only get a ErrUtils.Message
-- and a SrcLoc.SrcSpan. The latter holds the file name and the location
-- of the error. However, SrcSpan is abstract and it doesn't provide
-- functions to retrieve the line and column of the error... we can only
-- generate a string with this information. Maybe I can parse this string
-- later.... (sigh)
newtype GhcError = GhcError{errMsg :: String} deriving Show
mapGhcExceptions :: MonadInterpreter m
=> (String -> InterpreterError)
-> m a
-> m a
mapGhcExceptions buildEx action =
action
`MC.catch` (\err -> case err of
GhcException s -> throwM (buildEx s)
_ -> throwM err)
catchIE :: MonadInterpreter m => m a -> (InterpreterError -> m a) -> m a
catchIE = MC.catch
type GhcErrLogger = GHC.LogAction
-- | Module names are _not_ filepaths.
type ModuleName = String
runGhc1 :: MonadInterpreter m => RunGhc1 m a b
runGhc1 f a = runGhc (f a)
runGhc2 :: MonadInterpreter m => RunGhc2 m a b c
runGhc2 f a = runGhc1 (f a)
-- ================ Handling the interpreter state =================
fromState :: MonadInterpreter m => (InterpreterState -> a) -> m a
fromState f = do ref_st <- fromSession internalState
liftIO $ f `fmap` readIORef ref_st
onState :: MonadInterpreter m => (InterpreterState -> InterpreterState) -> m ()
onState f = modifySessionRef internalState f >> return ()
-- =============== Error handling ==============================
mayFail :: MonadInterpreter m => m (Maybe a) -> m a
mayFail action =
do
maybe_res <- action
--
es <- modifySessionRef ghcErrListRef (const [])
--
case (maybe_res, null es) of
(Nothing, True) -> throwM $ UnknownError "Got no error message"
(Nothing, False) -> throwM $ WontCompile (reverse es)
(Just a, _) -> return a
-- ================= Debugging stuff ===============
debug :: MonadInterpreter m => String -> m ()
debug = liftIO . putStrLn . ("!! " ++)
showGHC :: (MonadInterpreter m, GHC.Outputable a) => a -> m String
showGHC a
= do unqual <- runGhc GHC.getPrintUnqual
withDynFlags $ \df ->
return $ GHC.showSDocForUser df unqual (GHC.ppr a)
-- ================ Misc ===================================
-- this type ought to go in Hint.Context, but ghc dislikes cyclic imports...
data PhantomModule = PhantomModule{pmName :: ModuleName, pmFile :: FilePath}
deriving (Eq, Show)
findModule :: MonadInterpreter m => ModuleName -> m GHC.Module
findModule mn = mapGhcExceptions NotAllowed $
runGhc2 GHC.findModule mod_name Nothing
where mod_name = GHC.mkModuleName mn
moduleIsLoaded :: MonadInterpreter m => ModuleName -> m Bool
moduleIsLoaded mn = (findModule mn >> return True)
`catchIE` (\e -> case e of
NotAllowed{} -> return False
WontCompile{} -> return False
_ -> throwM e)
withDynFlags :: MonadInterpreter m => (GHC.DynFlags -> m a) -> m a
withDynFlags action
= do df <- runGhc GHC.getSessionDynFlags
action df
|
mvdan/hint
|
src/Hint/Base.hs
|
bsd-3-clause
| 7,197
| 0
| 13
| 2,235
| 1,741
| 961
| 780
| -1
| -1
|
{-# OPTIONS -fno-warn-incomplete-patterns -optc-DNON_POSIX_SOURCE #-}
{-# LANGUAGE ForeignFunctionInterface, NamedFieldPuns, PatternGuards #-}
-- THIS IS A COPY OF THE GHC DRIVER PROGRAM
-- The only modification is the insertion of the RunPhaseHook
-----------------------------------------------------------------------------
--
-- GHC Driver program
--
-- (c) The University of Glasgow 2005
--
-----------------------------------------------------------------------------
module Main (main) where
-- The official GHC API
import qualified GHC
import GHC ( -- DynFlags(..), HscTarget(..),
-- GhcMode(..), GhcLink(..),
Ghc, GhcMonad(..),
LoadHowMuch(..) )
import CmdLineParser
-- Implementations of the various modes (--show-iface, mkdependHS. etc.)
import LoadIface ( showIface )
import HscMain ( newHscEnv )
import DriverPipeline ( oneShot, compileFile )
import DriverMkDepend ( doMkDependHS )
#ifdef GHCI
import InteractiveUI ( interactiveUI, ghciWelcomeMsg, defaultGhciSettings )
#endif
-- Various other random stuff that we need
import Config
import Constants
import HscTypes
import Packages ( dumpPackages )
import DriverPhases
import BasicTypes ( failed )
import StaticFlags
import DynFlags
import ErrUtils
import FastString
import Outputable
import SrcLoc
import Util
import Panic
import MonadUtils ( liftIO )
-- Imports for --abi-hash
import LoadIface ( loadUserInterface )
import Module ( mkModuleName )
import Finder ( findImportedModule, cannotFindInterface )
import TcRnMonad ( initIfaceCheck )
import Binary ( openBinMem, put_, fingerprintBinMem )
-- Standard Haskell libraries
import System.IO
import System.Environment
import System.Exit
import System.FilePath
import Control.Monad
import Data.Char
import Data.List
import Data.Maybe
-- imports not in the original driver program
import GHC.Paths
import Hooks ( runPhaseHook)
import HscMainMod ( newRunPhaseHook )
-----------------------------------------------------------------------------
-- ToDo:
-- time commands when run with -v
-- user ways
-- Win32 support: proper signal handling
-- reading the package configuration file is too slow
-- -K<size>
-----------------------------------------------------------------------------
-- GHC's command-line interface
main :: IO ()
main = do
initGCStatistics -- See Note [-Bsymbolic and hooks]
hSetBuffering stdout LineBuffering
hSetBuffering stderr LineBuffering
GHC.defaultErrorHandler defaultFatalMessager defaultFlushOut $ do
argv1 <- getArgs
let argv1' = map (mkGeneralLocated "on the commandline") argv1
(argv2, staticFlagWarnings) <- parseStaticFlags argv1'
-- 2. Parse the "mode" flags (--make, --interactive etc.)
(mode, argv3, modeFlagWarnings) <- parseModeFlags argv2
let flagWarnings = staticFlagWarnings ++ modeFlagWarnings
-- If all we want to do is something like showing the version number
-- then do it now, before we start a GHC session etc. This makes
-- getting basic information much more resilient.
-- In particular, if we wait until later before giving the version
-- number then bootstrapping gets confused, as it tries to find out
-- what version of GHC it's using before package.conf exists, so
-- starting the session fails.
case mode of
Left preStartupMode ->
do case preStartupMode of
ShowSupportedExtensions -> showSupportedExtensions
ShowVersion -> showVersion
ShowNumVersion -> putStrLn cProjectVersion
ShowOptions -> showOptions
Right postStartupMode ->
-- start our GHC session
GHC.runGhc (Just libdir) $ do
dflags <- GHC.getSessionDynFlags
case postStartupMode of
Left preLoadMode ->
liftIO $ do
case preLoadMode of
ShowInfo -> showInfo dflags
ShowGhcUsage -> showGhcUsage dflags
ShowGhciUsage -> showGhciUsage dflags
PrintWithDynFlags f -> putStrLn (f dflags)
Right postLoadMode ->
main' postLoadMode dflags argv3 flagWarnings
main' :: PostLoadMode -> DynFlags -> [Located String] -> [Located String]
-> Ghc ()
main' postLoadMode dflags0 args flagWarnings = do
-- set the default GhcMode, HscTarget and GhcLink. The HscTarget
-- can be further adjusted on a module by module basis, using only
-- the -fvia-C and -fasm flags. If the default HscTarget is not
-- HscC or HscAsm, -fvia-C and -fasm have no effect.
let dflt_target = hscTarget dflags0
(mode, lang, link)
= case postLoadMode of
DoInteractive -> (CompManager, HscInterpreted, LinkInMemory)
DoEval _ -> (CompManager, HscInterpreted, LinkInMemory)
DoMake -> (CompManager, dflt_target, LinkBinary)
DoMkDependHS -> (MkDepend, dflt_target, LinkBinary)
DoAbiHash -> (OneShot, dflt_target, LinkBinary)
_ -> (OneShot, dflt_target, LinkBinary)
let dflags1 = case lang of
HscInterpreted ->
let platform = targetPlatform dflags0
dflags0a = updateWays $ dflags0 { ways = interpWays }
dflags0b = foldl gopt_set dflags0a
$ concatMap (wayGeneralFlags platform)
interpWays
dflags0c = foldl gopt_unset dflags0b
$ concatMap (wayUnsetGeneralFlags platform)
interpWays
in dflags0c
_ ->
dflags0
dflags2 = dflags1{ ghcMode = mode,
hscTarget = lang,
ghcLink = link,
hooks = (hooks dflags1) {
runPhaseHook = Just newRunPhaseHook},
verbosity = case postLoadMode of
DoEval _ -> 0
_other -> 1
}
-- turn on -fimplicit-import-qualified for GHCi now, so that it
-- can be overriden from the command-line
-- XXX: this should really be in the interactive DynFlags, but
-- we don't set that until later in interactiveUI
dflags3 | DoInteractive <- postLoadMode = imp_qual_enabled
| DoEval _ <- postLoadMode = imp_qual_enabled
| otherwise = dflags2
where imp_qual_enabled = dflags2 `gopt_set` Opt_ImplicitImportQualified
-- The rest of the arguments are "dynamic"
-- Leftover ones are presumably files
(dflags4, fileish_args, dynamicFlagWarnings) <-
GHC.parseDynamicFlags dflags3 args
GHC.prettyPrintGhcErrors dflags4 $ do
let flagWarnings' = flagWarnings ++ dynamicFlagWarnings
handleSourceError (\e -> do
GHC.printException e
liftIO $ exitWith (ExitFailure 1)) $ do
liftIO $ handleFlagWarnings dflags4 flagWarnings'
-- make sure we clean up after ourselves
GHC.defaultCleanupHandler dflags4 $ do
liftIO $ showBanner postLoadMode dflags4
let
-- To simplify the handling of filepaths, we normalise all filepaths right
-- away - e.g., for win32 platforms, backslashes are converted
-- into forward slashes.
normal_fileish_paths = map (normalise . unLoc) fileish_args
(srcs, objs) = partition_args normal_fileish_paths [] []
dflags5 = dflags4 { ldInputs = map (FileOption "") objs
++ ldInputs dflags4 }
-- we've finished manipulating the DynFlags, update the session
_ <- GHC.setSessionDynFlags dflags5
dflags6 <- GHC.getSessionDynFlags
hsc_env <- GHC.getSession
---------------- Display configuration -----------
when (verbosity dflags6 >= 4) $
liftIO $ dumpPackages dflags6
when (verbosity dflags6 >= 3) $ do
liftIO $ hPutStrLn stderr ("Hsc static flags: " ++ unwords staticFlags)
---------------- Final sanity checking -----------
liftIO $ checkOptions postLoadMode dflags6 srcs objs
---------------- Do the business -----------
handleSourceError (\e -> do
GHC.printException e
liftIO $ exitWith (ExitFailure 1)) $ do
case postLoadMode of
ShowInterface f -> liftIO $ doShowIface dflags6 f
DoMake -> doMake srcs
DoMkDependHS -> doMkDependHS (map fst srcs)
StopBefore p -> liftIO (oneShot hsc_env p srcs)
DoInteractive -> ghciUI srcs Nothing
DoEval exprs -> ghciUI srcs $ Just $ reverse exprs
DoAbiHash -> abiHash srcs
liftIO $ dumpFinalStats dflags6
ghciUI :: [(FilePath, Maybe Phase)] -> Maybe [String] -> Ghc ()
#ifndef GHCI
ghciUI _ _ = throwGhcException (CmdLineError "not built for interactive use")
#else
ghciUI = interactiveUI defaultGhciSettings
#endif
-- -----------------------------------------------------------------------------
-- Splitting arguments into source files and object files. This is where we
-- interpret the -x <suffix> option, and attach a (Maybe Phase) to each source
-- file indicating the phase specified by the -x option in force, if any.
partition_args :: [String] -> [(String, Maybe Phase)] -> [String]
-> ([(String, Maybe Phase)], [String])
partition_args [] srcs objs = (reverse srcs, reverse objs)
partition_args ("-x":suff:args) srcs objs
| "none" <- suff = partition_args args srcs objs
| StopLn <- phase = partition_args args srcs (slurp ++ objs)
| otherwise = partition_args rest (these_srcs ++ srcs) objs
where phase = startPhase suff
(slurp,rest) = break (== "-x") args
these_srcs = zip slurp (repeat (Just phase))
partition_args (arg:args) srcs objs
| looks_like_an_input arg = partition_args args ((arg,Nothing):srcs) objs
| otherwise = partition_args args srcs (arg:objs)
{-
We split out the object files (.o, .dll) and add them
to ldInputs for use by the linker.
The following things should be considered compilation manager inputs:
- haskell source files (strings ending in .hs, .lhs or other
haskellish extension),
- module names (not forgetting hierarchical module names),
- things beginning with '-' are flags that were not recognised by
the flag parser, and we want them to generate errors later in
checkOptions, so we class them as source files (#5921)
- and finally we consider everything not containing a '.' to be
a comp manager input, as shorthand for a .hs or .lhs filename.
Everything else is considered to be a linker object, and passed
straight through to the linker.
-}
looks_like_an_input :: String -> Bool
looks_like_an_input m = isSourceFilename m
|| looksLikeModuleName m
|| "-" `isPrefixOf` m
|| '.' `notElem` m
-- -----------------------------------------------------------------------------
-- Option sanity checks
-- | Ensure sanity of options.
--
-- Throws 'UsageError' or 'CmdLineError' if not.
checkOptions :: PostLoadMode -> DynFlags ->
[(String,Maybe Phase)] -> [String] -> IO ()
-- Final sanity checking before kicking off a compilation (pipeline).
checkOptions mode dflags srcs objs = do
-- Complain about any unknown flags
let unknown_opts = [ f | (f@('-':_), _) <- srcs ]
when (notNull unknown_opts) (unknownFlagsErr unknown_opts)
when (notNull (filter wayRTSOnly (ways dflags))
&& isInterpretiveMode mode) $
hPutStrLn stderr
("Warning: -debug, -threaded and -ticky are ignored by GHCi")
-- -prof and --interactive are not a good combination
when ((filter (not . wayRTSOnly) (ways dflags) /= interpWays)
&& isInterpretiveMode mode) $
do throwGhcException (UsageError
"--interactive can't be used with -prof or -unreg.")
-- -ohi sanity check
if (isJust (outputHi dflags) &&
(isCompManagerMode mode || srcs `lengthExceeds` 1))
then throwGhcException
(UsageError "-ohi can only be used when compiling a" ++
" single source file")
else do
-- -o sanity checking
if (srcs `lengthExceeds` 1 && isJust (outputFile dflags)
&& not (isLinkMode mode))
then throwGhcException
(UsageError "can't apply -o to multiple source files")
else do
let not_linking = not (isLinkMode mode) || isNoLink (ghcLink dflags)
when (not_linking && not (null objs)) $
hPutStrLn stderr ("Warning: the following files would be used as" ++
" linker inputs, but linking is not being done: " ++
unwords objs)
-- Check that there are some input files
-- (except in the interactive case)
if null srcs && (null objs || not_linking) && needsInputsMode mode
then throwGhcException (UsageError "no input files")
else do
-- Verify that output files point somewhere sensible.
verifyOutputFiles dflags
-- Compiler output options
-- called to verify that the output files & directories
-- point somewhere valid.
--
-- The assumption is that the directory portion of these output
-- options will have to exist by the time 'verifyOutputFiles'
-- is invoked.
--
verifyOutputFiles :: DynFlags -> IO ()
verifyOutputFiles dflags = do
-- not -odir: we create the directory for -odir if it doesn't exist (#2278).
let ofile = outputFile dflags
when (isJust ofile) $ do
let fn = fromJust ofile
flg <- doesDirNameExist fn
when (not flg) (nonExistentDir "-o" fn)
let ohi = outputHi dflags
when (isJust ohi) $ do
let hi = fromJust ohi
flg <- doesDirNameExist hi
when (not flg) (nonExistentDir "-ohi" hi)
where
nonExistentDir flg dir =
throwGhcException (CmdLineError ("error: directory portion of " ++
show dir ++ " does not exist (used with " ++
show flg ++ " option.)"))
-----------------------------------------------------------------------------
-- GHC modes of operation
type Mode = Either PreStartupMode PostStartupMode
type PostStartupMode = Either PreLoadMode PostLoadMode
data PreStartupMode
= ShowVersion -- ghc -V/--version
| ShowNumVersion -- ghc --numeric-version
| ShowSupportedExtensions -- ghc --supported-extensions
| ShowOptions -- ghc --show-options
showVersionMode, showNumVersionMode :: Mode
showVersionMode = mkPreStartupMode ShowVersion
showNumVersionMode = mkPreStartupMode ShowNumVersion
showSupportedExtensionsMode, showOptionsMode :: Mode
showSupportedExtensionsMode = mkPreStartupMode ShowSupportedExtensions
showOptionsMode = mkPreStartupMode ShowOptions
mkPreStartupMode :: PreStartupMode -> Mode
mkPreStartupMode = Left
isShowVersionMode :: Mode -> Bool
isShowVersionMode (Left ShowVersion) = True
isShowVersionMode _ = False
isShowNumVersionMode :: Mode -> Bool
isShowNumVersionMode (Left ShowNumVersion) = True
isShowNumVersionMode _ = False
data PreLoadMode
= ShowGhcUsage -- ghc -?
| ShowGhciUsage -- ghci -?
| ShowInfo -- ghc --info
| PrintWithDynFlags (DynFlags -> String) -- ghc --print-foo
showGhcUsageMode, showGhciUsageMode, showInfoMode :: Mode
showGhcUsageMode = mkPreLoadMode ShowGhcUsage
showGhciUsageMode = mkPreLoadMode ShowGhciUsage
showInfoMode = mkPreLoadMode ShowInfo
printSetting :: String -> Mode
printSetting k = mkPreLoadMode (PrintWithDynFlags f)
where f dflags = fromMaybe (panic ("Setting not found: " ++ show k))
$ lookup k (compilerInfo dflags)
mkPreLoadMode :: PreLoadMode -> Mode
mkPreLoadMode = Right . Left
isShowGhcUsageMode :: Mode -> Bool
isShowGhcUsageMode (Right (Left ShowGhcUsage)) = True
isShowGhcUsageMode _ = False
isShowGhciUsageMode :: Mode -> Bool
isShowGhciUsageMode (Right (Left ShowGhciUsage)) = True
isShowGhciUsageMode _ = False
data PostLoadMode
= ShowInterface FilePath -- ghc --show-iface
| DoMkDependHS -- ghc -M
| StopBefore Phase -- ghc -E | -C | -S
-- StopBefore StopLn is the default
| DoMake -- ghc --make
| DoInteractive -- ghc --interactive
| DoEval [String] -- ghc -e foo -e bar => DoEval ["bar", "foo"]
| DoAbiHash -- ghc --abi-hash
doMkDependHSMode, doMakeMode, doInteractiveMode, doAbiHashMode :: Mode
doMkDependHSMode = mkPostLoadMode DoMkDependHS
doMakeMode = mkPostLoadMode DoMake
doInteractiveMode = mkPostLoadMode DoInteractive
doAbiHashMode = mkPostLoadMode DoAbiHash
showInterfaceMode :: FilePath -> Mode
showInterfaceMode fp = mkPostLoadMode (ShowInterface fp)
stopBeforeMode :: Phase -> Mode
stopBeforeMode phase = mkPostLoadMode (StopBefore phase)
doEvalMode :: String -> Mode
doEvalMode str = mkPostLoadMode (DoEval [str])
mkPostLoadMode :: PostLoadMode -> Mode
mkPostLoadMode = Right . Right
isDoInteractiveMode :: Mode -> Bool
isDoInteractiveMode (Right (Right DoInteractive)) = True
isDoInteractiveMode _ = False
isStopLnMode :: Mode -> Bool
isStopLnMode (Right (Right (StopBefore StopLn))) = True
isStopLnMode _ = False
isDoMakeMode :: Mode -> Bool
isDoMakeMode (Right (Right DoMake)) = True
isDoMakeMode _ = False
#ifdef GHCI
isInteractiveMode :: PostLoadMode -> Bool
isInteractiveMode DoInteractive = True
isInteractiveMode _ = False
#endif
-- isInterpretiveMode: byte-code compiler involved
isInterpretiveMode :: PostLoadMode -> Bool
isInterpretiveMode DoInteractive = True
isInterpretiveMode (DoEval _) = True
isInterpretiveMode _ = False
needsInputsMode :: PostLoadMode -> Bool
needsInputsMode DoMkDependHS = True
needsInputsMode (StopBefore _) = True
needsInputsMode DoMake = True
needsInputsMode _ = False
-- True if we are going to attempt to link in this mode.
-- (we might not actually link, depending on the GhcLink flag)
isLinkMode :: PostLoadMode -> Bool
isLinkMode (StopBefore StopLn) = True
isLinkMode DoMake = True
isLinkMode DoInteractive = True
isLinkMode (DoEval _) = True
isLinkMode _ = False
isCompManagerMode :: PostLoadMode -> Bool
isCompManagerMode DoMake = True
isCompManagerMode DoInteractive = True
isCompManagerMode (DoEval _) = True
isCompManagerMode _ = False
-- -----------------------------------------------------------------------------
-- Parsing the mode flag
parseModeFlags :: [Located String]
-> IO (Mode,
[Located String],
[Located String])
parseModeFlags args = do
let ((leftover, errs1, warns), (mModeFlag, errs2, flags')) =
runCmdLine (processArgs mode_flags args)
(Nothing, [], [])
mode = case mModeFlag of
Nothing -> doMakeMode
Just (m, _) -> m
errs = errs1 ++ map (mkGeneralLocated "on the commandline") errs2
when (not (null errs)) $ throwGhcException $ errorsToGhcException errs
return (mode, flags' ++ leftover, warns)
type ModeM = CmdLineP (Maybe (Mode, String), [String], [Located String])
-- mode flags sometimes give rise to new DynFlags (eg. -C, see below)
-- so we collect the new ones and return them.
mode_flags :: [Flag ModeM]
mode_flags =
[ ------- help / version ----------------------------------------------
Flag "?" (PassFlag (setMode showGhcUsageMode))
, Flag "-help" (PassFlag (setMode showGhcUsageMode))
, Flag "V" (PassFlag (setMode showVersionMode))
, Flag "-version" (PassFlag (setMode showVersionMode))
, Flag "-numeric-version" (PassFlag (setMode showNumVersionMode))
, Flag "-info" (PassFlag (setMode showInfoMode))
, Flag "-show-options" (PassFlag (setMode showOptionsMode))
, Flag "-supported-languages"
(PassFlag (setMode showSupportedExtensionsMode))
, Flag "-supported-extensions"
(PassFlag (setMode showSupportedExtensionsMode))
] ++
[ Flag k' (PassFlag (setMode (printSetting k)))
| k <- ["Project version",
"Booter version",
"Stage",
"Build platform",
"Host platform",
"Target platform",
"Have interpreter",
"Object splitting supported",
"Have native code generator",
"Support SMP",
"Unregisterised",
"Tables next to code",
"RTS ways",
"Leading underscore",
"Debug on",
"LibDir",
"Global Package DB",
"C compiler flags",
"Gcc Linker flags",
"Ld Linker flags"],
let k' = "-print-" ++ map (replaceSpace . toLower) k
replaceSpace ' ' = '-'
replaceSpace c = c
] ++
------- interfaces ----------------------------------------------------
[ Flag "-show-iface" (HasArg (\f -> setMode (showInterfaceMode f)
"--show-iface"))
------- primary modes ------------------------------------------------
, Flag "c" (PassFlag (\f -> do setMode (stopBeforeMode StopLn) f
addFlag "-no-link" f))
, Flag "M" (PassFlag (setMode doMkDependHSMode))
, Flag "E" (PassFlag (setMode (stopBeforeMode anyHsc)))
, Flag "C" (PassFlag (setMode (stopBeforeMode HCc)))
, Flag "S" (PassFlag (setMode (stopBeforeMode As)))
, Flag "-make" (PassFlag (setMode doMakeMode))
, Flag "-interactive" (PassFlag (setMode doInteractiveMode))
, Flag "-abi-hash" (PassFlag (setMode doAbiHashMode))
, Flag "e" (SepArg (\s -> setMode (doEvalMode s) "-e"))
]
setMode :: Mode -> String -> EwM ModeM ()
setMode newMode newFlag = liftEwM $ do
(mModeFlag, errs, flags') <- getCmdLineState
let (modeFlag', errs') =
case mModeFlag of
Nothing -> ((newMode, newFlag), errs)
Just (oldMode, oldFlag) ->
case (oldMode, newMode) of
-- -c/--make are allowed together, and mean --make -no-link
_ | isStopLnMode oldMode && isDoMakeMode newMode
|| isStopLnMode newMode && isDoMakeMode oldMode ->
((doMakeMode, "--make"), [])
-- If we have both --help and --interactive then we
-- want showGhciUsage
_ | isShowGhcUsageMode oldMode &&
isDoInteractiveMode newMode ->
((showGhciUsageMode, oldFlag), [])
| isShowGhcUsageMode newMode &&
isDoInteractiveMode oldMode ->
((showGhciUsageMode, newFlag), [])
-- Otherwise, --help/--version/--numeric-version always win
| isDominantFlag oldMode -> ((oldMode, oldFlag), [])
| isDominantFlag newMode -> ((newMode, newFlag), [])
-- We need to accumulate eval flags like "-e foo -e bar"
(Right (Right (DoEval esOld)),
Right (Right (DoEval [eNew]))) ->
((Right (Right (DoEval (eNew : esOld))), oldFlag),
errs)
-- Saying e.g. --interactive --interactive is OK
_ | oldFlag == newFlag -> ((oldMode, oldFlag), errs)
-- Otherwise, complain
_ -> let err = flagMismatchErr oldFlag newFlag
in ((oldMode, oldFlag), err : errs)
putCmdLineState (Just modeFlag', errs', flags')
where isDominantFlag f = isShowGhcUsageMode f ||
isShowGhciUsageMode f ||
isShowVersionMode f ||
isShowNumVersionMode f
flagMismatchErr :: String -> String -> String
flagMismatchErr oldFlag newFlag
= "cannot use `" ++ oldFlag ++ "' with `" ++ newFlag ++ "'"
addFlag :: String -> String -> EwM ModeM ()
addFlag s flag = liftEwM $ do
(m, e, flags') <- getCmdLineState
putCmdLineState (m, e, mkGeneralLocated loc s : flags')
where loc = "addFlag by " ++ flag ++ " on the commandline"
-- ----------------------------------------------------------------------------
-- Run --make mode
doMake :: [(String,Maybe Phase)] -> Ghc ()
doMake srcs = do
let (hs_srcs, non_hs_srcs) = partition haskellish srcs
haskellish (f,Nothing) =
looksLikeModuleName f || isHaskellUserSrcFilename f || '.' `notElem` f
haskellish (_,Just phase) =
phase `notElem` [As, Cc, Cobjc, Cobjcpp, CmmCpp, Cmm, StopLn]
hsc_env <- GHC.getSession
-- if we have no haskell sources from which to do a dependency
-- analysis, then just do one-shot compilation and/or linking.
-- This means that "ghc Foo.o Bar.o -o baz" links the program as
-- we expect.
if (null hs_srcs)
then liftIO (oneShot hsc_env StopLn srcs)
else do
o_files <- mapM (\x -> liftIO $ compileFile hsc_env StopLn x)
non_hs_srcs
dflags <- GHC.getSessionDynFlags
let dflags' = dflags { ldInputs = map (FileOption "") o_files
++ ldInputs dflags }
_ <- GHC.setSessionDynFlags dflags'
targets <- mapM (uncurry GHC.guessTarget) hs_srcs
GHC.setTargets targets
ok_flag <- GHC.load LoadAllTargets
when (failed ok_flag) (liftIO $ exitWith (ExitFailure 1))
return ()
-- ---------------------------------------------------------------------------
-- --show-iface mode
doShowIface :: DynFlags -> FilePath -> IO ()
doShowIface dflags file = do
hsc_env <- newHscEnv dflags
showIface hsc_env file
-- ---------------------------------------------------------------------------
-- Various banners and verbosity output.
showBanner :: PostLoadMode -> DynFlags -> IO ()
showBanner _postLoadMode dflags = do
let verb = verbosity dflags
#ifdef GHCI
-- Show the GHCi banner
when (isInteractiveMode _postLoadMode && verb >= 1) $ putStrLn ghciWelcomeMsg
#endif
-- Display details of the configuration in verbose mode
when (verb >= 2) $
do hPutStr stderr "Glasgow Haskell Compiler, Version "
hPutStr stderr cProjectVersion
hPutStr stderr ", stage "
hPutStr stderr cStage
hPutStr stderr " booted by GHC version "
hPutStrLn stderr cBooterVersion
-- We print out a Read-friendly string, but a prettier one than the
-- Show instance gives us
showInfo :: DynFlags -> IO ()
showInfo dflags = do
let sq x = " [" ++ x ++ "\n ]"
putStrLn $ sq $ intercalate "\n ," $ map show $ compilerInfo dflags
showSupportedExtensions :: IO ()
showSupportedExtensions = mapM_ putStrLn supportedLanguagesAndExtensions
showVersion :: IO ()
showVersion = putStrLn (cProjectName ++ ", NotGHC, version " ++ cProjectVersion)
showOptions :: IO ()
showOptions = putStr (unlines availableOptions)
where
availableOptions = map ((:) '-') $
getFlagNames mode_flags ++
getFlagNames flagsDynamic ++
(filterUnwantedStatic . getFlagNames $ flagsStatic)
++ flagsStaticNames
getFlagNames opts = map getFlagName opts
getFlagName (Flag name _) = name
-- this is a hack to get rid of two unwanted entries that get listed
-- as static flags. Hopefully this hack will disappear one day together
-- with static flags
filterUnwantedStatic = filter (\x -> not (x `elem` ["f", "fno-"]))
showGhcUsage :: DynFlags -> IO ()
showGhcUsage = showUsage False
showGhciUsage :: DynFlags -> IO ()
showGhciUsage = showUsage True
showUsage :: Bool -> DynFlags -> IO ()
showUsage ghci dflags = do
let usage_path = if ghci then ghciUsagePath dflags
else ghcUsagePath dflags
usage <- readFile usage_path
dump usage
where
dump "" = return ()
dump ('$':'$':s) = putStr progName >> dump s
dump (c:s) = putChar c >> dump s
dumpFinalStats :: DynFlags -> IO ()
dumpFinalStats dflags =
when (gopt Opt_D_faststring_stats dflags) $ dumpFastStringStats dflags
dumpFastStringStats :: DynFlags -> IO ()
dumpFastStringStats dflags = do
buckets <- getFastStringTable
let (entries, longest, has_z) = countFS 0 0 0 buckets
msg = text "FastString stats:" $$
nest 4 (vcat [text "size: " <+> int (length buckets),
text "entries: " <+> int entries,
text "longest chain: " <+> int longest,
text "has z-encoding: " <+> (has_z `pcntOf` entries)
])
-- we usually get more "has z-encoding" than "z-encoded", because
-- when we z-encode a string it might hash to the exact same string,
-- which will is not counted as "z-encoded". Only strings whose
-- Z-encoding is different from the original string are counted in
-- the "z-encoded" total.
putMsg dflags msg
where
x `pcntOf` y = int ((x * 100) `quot` y) <> char '%'
countFS :: Int -> Int -> Int -> [[FastString]] -> (Int, Int, Int)
countFS entries longest has_z [] = (entries, longest, has_z)
countFS entries longest has_z (b:bs) =
let
len = length b
longest' = max len longest
entries' = entries + len
has_zs = length (filter hasZEncoding b)
in
countFS entries' longest' (has_z + has_zs) bs
-- -----------------------------------------------------------------------------
-- ABI hash support
{-
ghc --abi-hash Data.Foo System.Bar
Generates a combined hash of the ABI for modules Data.Foo and
System.Bar. The modules must already be compiled, and appropriate -i
options may be necessary in order to find the .hi files.
This is used by Cabal for generating the InstalledPackageId for a
package. The InstalledPackageId must change when the visible ABI of
the package chagnes, so during registration Cabal calls ghc --abi-hash
to get a hash of the package's ABI.
-}
abiHash :: [(String, Maybe Phase)] -> Ghc ()
abiHash strs = do
hsc_env <- getSession
let dflags = hsc_dflags hsc_env
liftIO $ do
let find_it str = do
let modname = mkModuleName str
r <- findImportedModule hsc_env modname Nothing
case r of
Found _ m -> return m
_error -> throwGhcException $ CmdLineError $ showSDoc dflags $
cannotFindInterface dflags modname r
mods <- mapM find_it (map fst strs)
let get_iface modl = loadUserInterface False (text "abiHash") modl
ifaces <- initIfaceCheck hsc_env $ mapM get_iface mods
bh <- openBinMem (3*1024) -- just less than a block
put_ bh hiVersion
-- package hashes change when the compiler version changes (for now)
-- see #5328
mapM_ (put_ bh . mi_mod_hash) ifaces
f <- fingerprintBinMem bh
putStrLn (showPpr dflags f)
-- -----------------------------------------------------------------------------
-- Util
unknownFlagsErr :: [String] -> a
unknownFlagsErr fs = throwGhcException $ UsageError $ concatMap oneError fs
where
oneError f =
"unrecognised flag: " ++ f ++ "\n" ++
(case fuzzyMatch f (nub allFlags) of
[] -> ""
suggs -> "did you mean one of:\n" ++ unlines (map (" " ++) suggs))
{- Note [-Bsymbolic and hooks]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Bsymbolic is a flag that prevents the binding of references to global
symbols to symbols outside the shared library being compiled (see `man
ld`). When dynamically linking, we don't use -Bsymbolic on the RTS
package: that is because we want hooks to be overridden by the user,
we don't want to constrain them to the RTS package.
Unfortunately this seems to have broken somehow on OS X: as a result,
defaultHooks (in hschooks.c) is not called, which does not initialize
the GC stats. As a result, this breaks things like `:set +s` in GHCi
(#8754). As a hacky workaround, we instead call 'defaultHooks'
directly to initalize the flags in the RTS.
A biproduct of this, I believe, is that hooks are likely broken on OS
X when dynamically linking. But this probably doesn't affect most
people since we're linking GHC dynamically, but most things themselves
link statically.
-}
foreign import ccall safe "initGCStatistics"
initGCStatistics :: IO ()
|
a-ford/notghc
|
NotGHC.hs
|
bsd-3-clause
| 33,381
| 2
| 27
| 9,474
| 6,752
| 3,489
| 3,263
| -1
| -1
|
--
--
--
------------------
-- Exercise 11.14.
------------------
--
--
--
module E'11'14 where
-- uncurry :: (a -> b -> c) -> ( (a , b) -> c )
-- uncurry f (x , y) = f x y
-- Because -> is right associative, the type of uncurry
-- is equivalent to "(a -> b -> c) -> (a , b) -> c".
-- "uncurry ($)":
-----------------
-- The effect of applying "uncurry" to "($)" is that the curried form of "($)"
-- will be converted to an uncurried form where the first two arguments
-- have to be supplied as a tuple.
{- GHCi>
:t ($)
-}
-- ($) :: (a -> b) -> a -> b
-- ($) :: (a -> b) -> a -> b
-- -------- - -
-- | | | | (-- 1)
-- uncurry :: (a -> b -> c) -> ( (a , b) -> c )
--
--
-- uncurry :: (a -> b -> c) -> ( (a , b) -> c )
-- | 'Application of "($)"'
-- | 'Rule of cancellation'
-- => (uncurry ($)) :: ( (a' , b') -> c' )
-- | Right associativity of "->"
-- => (uncurry ($)) :: (a' , b') -> c'
-- | 'Type replacement' / Polymorphism, see (-- 1)
-- => (uncurry ($)) :: (a -> b , a) -> b
{- GHCi>
:t (uncurry ($))
-}
-- (uncurry ($)) :: (b -> c , b) -> c
-- "uncurry (:)":
-----------------
-- Again the effect will be the conversion of the function "(:)" from a curried to an uncurried form.
{- GHCi>
:t (:)
-}
-- (:) :: a -> [a] -> [a]
-- (:) :: a -> [a] -> [a]
-- - --- ---
-- | | | | (-- 1)
-- uncurry :: (a -> b -> c ) -> ( (a , b) -> c )
--
--
-- uncurry :: (a -> b -> c) -> ( (a , b) -> c )
-- | 'Application of "(:)"'
-- | 'Rule of cancellation'
-- => (uncurry (:)) :: ( (a' , b') -> c' )
-- | Right associativity of "->"
-- => (uncurry (:)) :: (a' , b') -> c'
-- | 'Type replacement' / Polymorphism, see (-- 1)
-- => (uncurry (:)) :: (a , [a]) -> [a]
{- GHCi>
:t (uncurry (:))
-}
-- (uncurry (:)) :: (a , [a]) -> [a]
-- "uncurry (.)":
-----------------
-- Again the effect will be the conversion of the function "(.)" from a curried to an uncurried form.
{- GHCi>
:t (.)
-}
-- (.) :: (b -> c) -> (a -> b) -> a -> c
-- (.) :: (b -> c) -> (a -> b) -> a -> c
-- -------- -------- ------
-- | | | | (-- 1)
-- uncurry :: (a -> b -> c ) -> ( (a , b) -> c )
--
--
-- uncurry :: (a -> b -> c) -> ( (a , b) -> c )
-- | 'Application of "(:)"'
-- | 'Rule of cancellation'
-- => (uncurry (.)) :: ( (a' , b') -> c' )
-- | Right associativity of "->"
-- => (uncurry (.)) :: (a' , b') -> c'
-- | 'Type replacement' / Polymorphism, see (-- 1)
-- => (uncurry (.)) :: (b -> c , a -> b) -> a -> c
{- GHCi>
:t (uncurry (.))
-}
-- (uncurry (.)) :: (b -> c , a -> b) -> a -> c
|
pascal-knodel/haskell-craft
|
_/links/E'11'14.hs
|
mit
| 3,491
| 0
| 2
| 1,634
| 82
| 81
| 1
| 1
| 0
|
{-# LANGUAGE DeriveDataTypeable #-}
{- |
Module : ./Hybrid/AS_Hybrid.der.hs
Copyright : (c) T.Mossakowski, W.Herding, C.Maeder, Uni Bremen 2004-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : till@informatik.uni-bremen.de
Stability : provisional
Portability : portable
Abstract syntax for hybrid logic extension of CASL
Only the added syntax is specified
-}
module Hybrid.AS_Hybrid where
import Common.Id
import Common.AS_Annotation
import CASL.AS_Basic_CASL
import Data.Data
-- DrIFT command
{-! global: GetRange !-}
type H_BASIC_SPEC = BASIC_SPEC H_BASIC_ITEM H_SIG_ITEM H_FORMULA
type AnHybFORM = Annoted (FORMULA H_FORMULA)
data H_BASIC_ITEM = Simple_mod_decl [Annoted SIMPLE_ID] [AnHybFORM] Range
| Term_mod_decl [Annoted SORT] [AnHybFORM] Range
| Simple_nom_decl [Annoted SIMPLE_ID] [AnHybFORM] Range
deriving (Show, Typeable, Data)
data RIGOR = Rigid | Flexible deriving (Show, Typeable, Data)
data H_SIG_ITEM =
Rigid_op_items RIGOR [Annoted (OP_ITEM H_FORMULA)] Range
-- pos: op, semi colons
| Rigid_pred_items RIGOR [Annoted (PRED_ITEM H_FORMULA)] Range
-- pos: pred, semi colons
deriving (Show, Typeable, Data)
data MODALITY = Simple_mod SIMPLE_ID | Term_mod (TERM H_FORMULA)
deriving (Show, Eq, Ord, Typeable, Data)
data NOMINAL = Simple_nom SIMPLE_ID
deriving (Show, Eq, Ord, Typeable, Data)
data H_FORMULA = At NOMINAL (FORMULA H_FORMULA) Range
| BoxOrDiamond Bool MODALITY (FORMULA H_FORMULA) Range
| Here NOMINAL Range
| Univ NOMINAL (FORMULA H_FORMULA) Range
| Exist NOMINAL (FORMULA H_FORMULA) Range
deriving (Show, Eq, Ord, Typeable, Data)
|
spechub/Hets
|
Hybrid/AS_Hybrid.der.hs
|
gpl-2.0
| 1,825
| 0
| 10
| 461
| 390
| 214
| 176
| 27
| 0
|
{-# LANGUAGE CPP, ForeignFunctionInterface #-}
-----------------------------------------------------------------------------------------
{-| Module : WxcObject
Copyright : (c) Daan Leijen 2003, 2004
License : wxWindows
Maintainer : wxhaskell-devel@lists.sourceforge.net
Stability : provisional
Portability : portable
Basic object type.
-}
-----------------------------------------------------------------------------------------
module Graphics.UI.WXCore.WxcObject(
-- * Object types
Object, objectNull, objectIsNull, objectCast, objectIsManaged
, objectFromPtr, objectFromManagedPtr
, withObjectPtr
, objectFinalize, objectNoFinalize
-- * Managed objects
, ManagedPtr, TManagedPtr, CManagedPtr
) where
import Control.Exception
import System.IO.Unsafe( unsafePerformIO )
import Foreign.C
import Foreign.Ptr
import Foreign.Storable
import Foreign.Marshal.Alloc
import Foreign.Marshal.Array
{- note: for GHC 6.10.2 or higher, recommends to use "import Foreign.Concurrent"
See http://www.haskell.org/pipermail/cvs-ghc/2009-January/047120.html -}
import Foreign.ForeignPtr hiding (newForeignPtr,addForeignPtrFinalizer)
import Foreign.Concurrent
{-----------------------------------------------------------------------------------------
Objects
-----------------------------------------------------------------------------------------}
{- | An @Object a@ is a pointer to an object of type @a@. The @a@ parameter is used
to encode the inheritance relation. When the type parameter is unit @()@, it denotes
an object of exactly that class, when the parameter is a type variable @a@, it
specifies an object that is at least an instance of that class. For example in
wxWidgets, we have the following class hierarchy:
> EvtHandler
> |- Window
> |- Frame
> |- Control
> |- Button
> |- Radiobox
In wxHaskell, all the creation functions will return objects of exactly that
class and use the @()@ type:
> frameCreate :: Window a -> ... -> IO (Frame ())
> buttonCreate :: Window a -> ... -> IO (Button ())
> ...
In contrast, all the /this/ (or /self/) pointers of methods can take objects
of any instance of that class and have a type variable, for example:
> windowSetClientSize :: Window a -> Size -> IO ()
> controlSetLabel :: Control a -> String -> IO ()
> buttonSetDefault :: Button a -> IO ()
This means that we can use @windowSetClientSize@ on any window, including
buttons and frames, but we can only use @controlSetLabel@ on controls, not
including frames.
In wxHaskell, this works since a @Frame ()@ is actually a type synonym for
@Window (CFrame ())@ (where @CFrame@ is an abstract data type). We can thus
pass a value of type @Frame ()@ to anything that expects some @Window a@.
For a button this works too, as it is a synonym for @Control (CButton ())@
which is in turn a synonym for @Window (CControl (CButton ()))@. Note that
we can\'t pass a frame to something that expects a value of type @Control a@.
Of course, a @Window a@ is actually a type synonym for @EvtHandler (CWindow a)@.
If you study the documentation in "Graphics.UI.WX.Classes" closely, you
can discover where this chain ends :-).
Objects are not automatically deleted. Normally you can use a delete function
like @windowDelete@ to delete an object. However, almost all objects in the
wxWidgets library are automatically deleted by the library. The only objects
that should be used with care are resources as bitmaps, fonts and brushes.
-}
data Object a = Object !(Ptr a)
| Managed !(ForeignPtr (TManagedPtr a))
-- | Managed pointer (proxy) objects
type ManagedPtr a = Ptr (CManagedPtr a)
type TManagedPtr a = CManagedPtr a
data CManagedPtr a = CManagedPtr
instance Eq (Object a) where
obj1 == obj2
= unsafePerformIO $
withObjectPtr obj1 $ \p1 ->
withObjectPtr obj2 $ \p2 ->
return (p1 == p2)
instance Ord (Object a) where
compare obj1 obj2
= unsafePerformIO $
withObjectPtr obj1 $ \p1 ->
withObjectPtr obj2 $ \p2 ->
return (compare p1 p2)
instance Show (Object a) where
show obj
= unsafePerformIO $
withObjectPtr obj $ \p ->
return (show p)
-- | A null object. Use with care.
objectNull :: Object a
objectNull
= Object nullPtr
-- | Is this a managed object?
objectIsManaged :: Object a -> Bool
objectIsManaged obj
= case obj of
Managed fp -> True
_ -> False
-- | Test for null object.
objectIsNull :: Object a -> Bool
objectIsNull obj
= unsafePerformIO $
withObjectPtr obj $ \p -> return (p == nullPtr)
-- | Cast an object to another type. Use with care.
objectCast :: Object a -> Object b
objectCast obj
= case obj of
Object p -> Object (castPtr p)
Managed fp -> Managed (castForeignPtr fp)
-- | Do something with the object pointer.
withObjectPtr :: Object a -> (Ptr a -> IO b) -> IO b
withObjectPtr obj f
= case obj of
Object p -> f p
Managed fp -> withForeignPtr fp $ \mp ->
do p <- wxManagedPtr_GetPtr mp
f p
-- | Finalize a managed object manually. (No effect on unmanaged objects.)
objectFinalize :: Object a -> IO ()
objectFinalize obj
= case obj of
Object p -> return ()
Managed fp -> withForeignPtr fp $ wxManagedPtr_Finalize
-- | Remove the finalizer on a managed object. (No effect on unmanaged objects.)
objectNoFinalize :: Object a -> IO ()
objectNoFinalize obj
= case obj of
Object p -> return ()
Managed fp -> withForeignPtr fp $ wxManagedPtr_NoFinalize
-- | Create an unmanaged object.
objectFromPtr :: Ptr a -> Object a
objectFromPtr p
= Object p
-- | Create a managed object with a given finalizer.
objectFromManagedPtr :: ManagedPtr a -> IO (Object a)
objectFromManagedPtr mp
= do fun <- wxManagedPtrDeleteFunction
-- wxManagedPtr_NoFinalize mp {- turn off finalization -}
fp <- newForeignPtr mp (fun mp)
return (Managed fp)
wxManagedPtrDeleteFunction :: IO (ManagedPtr a -> IO ())
wxManagedPtrDeleteFunction
= do fun <- wxManagedPtr_GetDeleteFunction
return $ wxManagedPtr_CallbackFunction fun
{--------------------------------------------------------------------------
Managed pointers
--------------------------------------------------------------------------}
foreign import ccall wxManagedPtr_GetPtr :: Ptr (TManagedPtr a) -> IO (Ptr a)
foreign import ccall wxManagedPtr_Finalize :: ManagedPtr a -> IO ()
foreign import ccall wxManagedPtr_NoFinalize :: ManagedPtr a -> IO ()
foreign import ccall wxManagedPtr_GetDeleteFunction :: IO (FunPtr (ManagedPtr a -> IO ()))
foreign import ccall "dynamic" wxManagedPtr_CallbackFunction :: FunPtr (ManagedPtr a -> IO ()) -> ManagedPtr a -> IO ()
|
ekmett/wxHaskell
|
wxcore/src/haskell/Graphics/UI/WXCore/WxcObject.hs
|
lgpl-2.1
| 7,005
| 0
| 13
| 1,577
| 1,044
| 532
| 512
| 93
| 2
|
{-# LANGUAGE OverloadedStrings #-}
{- |
Copyright : Galois, Inc. 2012-2015
License : BSD3
Maintainer : jhendrix@galois.com
Stability : experimental
Portability : non-portable (language extensions)
-}
module Tests.Rewriter
( rewriter_tests
) where
import Verifier.SAW.Conversion
import Verifier.SAW.Prelude
import Verifier.SAW.Rewriter
import Verifier.SAW.SharedTerm
import Test.Tasty
import Test.Tasty.HUnit
scMkTerm :: SharedContext -> TermBuilder Term -> IO Term
scMkTerm sc t = runTermBuilder t (scGlobalDef sc) (scTermF sc)
rewriter_tests :: [TestTree]
rewriter_tests =
[ prelude_bveq_sameL_test ]
prelude_bveq_sameL_test :: TestTree
prelude_bveq_sameL_test =
testCase "prelude_bveq_sameL_test" $ do
sc0 <- mkSharedContext
scLoadPreludeModule sc0
let eqs = [ "Prelude.bveq_sameL" ]
ss <- scSimpset sc0 [] eqs []
let sc = rewritingSharedContext sc0 ss
natType <- scMkTerm sc (mkDataType "Prelude.Nat" [] [])
n <- scFreshGlobal sc "n" natType
let boolType = mkDataType "Prelude.Bool" [] []
bvType <- scMkTerm sc (mkDataType "Prelude.Vec" [] [pure n, boolType])
x <- scFreshGlobal sc "x" bvType
z <- scFreshGlobal sc "z" bvType
let lhs =
mkGlobalDef "Prelude.bvEq"
`pureApp` n
`pureApp` x
`mkApp` (mkGlobalDef "Prelude.bvAdd" `pureApp` n `pureApp` x `pureApp` z)
let rhs =
mkGlobalDef "Prelude.bvEq"
`pureApp` n
`mkApp` (mkGlobalDef "Prelude.bvNat" `pureApp` n `mkApp` mkNatLit 0)
`pureApp` z
lhs_term <- scMkTerm sc lhs
rhs_term <- scMkTerm sc rhs
assertEqual "Incorrect conversion\n" lhs_term rhs_term
|
GaloisInc/saw-script
|
saw-core/tests/src/Tests/Rewriter.hs
|
bsd-3-clause
| 1,686
| 3
| 11
| 378
| 439
| 230
| 209
| 41
| 1
|
-- Copyright (c) 2016 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -funbox-strict-fields -Wall -Werror #-}
{-# Language FlexibleInstances, FlexibleContexts, UndecidableInstances,
MultiParamTypeClasses, DeriveTraversable, DeriveFoldable,
DeriveFunctor #-}
-- | The Salt core language. Salt's surface syntax is transliterated
-- into Core, which is then type-checked and compiled. Core is
-- generally not meant for human consumption.
module Language.Salt.Core.Syntax(
Literal(..),
Quantifier(..),
Pattern(..),
Element(..),
Case(..),
Intro(..),
Elim(..),
Cmd(..),
Comp(..),
elimTermPos
) where
import Bound
import Bound.Scope.ExtraInstances()
import Bound.Var.ExtraInstances()
import Control.Applicative
import Control.Monad hiding (mapM)
import Control.Monad.Positions
import Control.Monad.Symbols
import Data.Array hiding (accum)
import Data.Default
import Data.Foldable
import Data.Hashable
import Data.Hashable.Extras
import Data.Hashable.ExtraInstances()
import Data.HashMap.Strict(HashMap)
import Data.List(sortBy)
import Data.Position.DWARFPosition
import Data.Ratio
import Data.Traversable
import Data.Word
import Language.Salt.Format
import Prelude hiding (foldl, mapM)
import Prelude.Extras(Eq1(..), Ord1(..))
import Prelude.Extras.ExtraInstances()
import Text.XML.Expat.Pickle
import Text.XML.Expat.Tree(NodeG)
import Text.Format hiding ((<$>))
import qualified Data.HashMap.Strict as HashMap
import qualified Data.ByteString.UTF8 as Strict
import qualified Data.ByteString.Lazy.UTF8 as Lazy
-- | A literal value.
data Literal =
-- | A number literal.
Num {
-- | The number value.
numVal :: !Rational
}
-- | A string literal.
| Str {
-- | The string value.
strVal :: !Strict.ByteString
}
-- | A Character literal.
| Char {
-- | The character value.
charVal :: !Char
}
deriving (Ord, Eq)
-- | Quantifier types.
data Quantifier =
-- | Universal quantifiers.
Forall
-- | Existential quantifiers.
| Exists
deriving (Ord, Eq, Enum)
-- | A pattern binding. Represents how to deconstruct a value and
-- bind it to variables.
data Pattern bound =
-- | A deconstruction. Takes a type apart. Some types have
-- constructors; nameless record types don't (they should use the
-- "unused" symbol).
Deconstruct {
-- | The type constructor. For nameless records, use the
-- "unused" symbol.
deconstructConstructor :: !bound,
-- | The fields in the record being bound. Note: all fields are
-- given names by transliteration.
deconstructBinds :: HashMap bound (Pattern bound),
-- | Whether or not the binding is strict (ie. it omits some names)
deconstructStrict :: !Bool,
-- | The position in source from which this originates.
deconstructPos :: !(DWARFPosition [bound] [bound])
}
-- | An "as" binding. Allows part of a pattern to be bound to a
-- name, but further deconstructed by another pattern. For
-- example "x as (y, z)".
| As {
-- | The outer name, to which the entire datatype is bound.
asName :: !bound,
-- | The inner binding, which further deconstructs the binding.
asBind :: Pattern bound,
-- | The position in source from which this originates.
asPos :: !(DWARFPosition [bound] [bound])
}
-- | A simple name binding. This does the same thing as an as
-- pattern, but does not further deconstruct the binding.
--
-- In the intended use case, we have a special symbol representing
-- a wildcard (namely, the unused symbol), so we don't bother
-- defining another constructor for wildcards.
| Name {
-- | The bound variable type being bound.
nameSym :: !bound,
-- | The position in source from which this originates.
namePos :: !(DWARFPosition [bound] [bound])
}
-- | A constant. Constrains the binding to the given value.
| Exact {
-- | The literal.
exactLiteral :: !Literal,
-- | The position in source from which this originates.
exactPos :: !(DWARFPosition [bound] [bound])
}
-- | A case. Consists of a pattern and a body wrapped in a scope.
-- Used to describe functions and computations with parameters.
data Case bound free =
Case {
-- | the pattern for this case.
casePat :: Pattern bound,
-- | The body of the case.
caseBody :: Scope bound (Intro bound) free,
-- | The position in source from which this originates.
casePos :: !(DWARFPosition [bound] [bound])
}
-- | An element. This is either an argument in a function type or a
-- field in a record type. The pattern introduces variables in
-- subsequent elements.
data Element bound free =
Element {
-- | The name of the element.
elemName :: !bound,
-- | The binding pattern of the element.
elemPat :: Pattern bound,
-- | The type of the element.
elemType :: Scope bound (Intro bound) free,
-- | The position in source from which this originates.
elemPos :: !(DWARFPosition [bound] [bound])
}
-- | Terms. Represents pure terms in the language. Terms are further
-- subdivided into types, propositions, and elimination and
-- introduction terms (both of which represent values).
--
-- Types and propositions do not support decidable equality, and thus
-- cannot be computed upon.
--
-- Values can be computed upon, and can appear in a pattern match.
-- Elimination terms are those terms whose type can be inferred in
-- type checking. Introduction terms are those terms that require a
-- type to be checked against.
data Intro bound free =
-- Types. These do not support decidable equality. As such, they
-- cannot be the result of a computation, and cannot appear in
-- patterns.
-- | Dependent product type. This is the type given to functions.
FuncType {
-- | The binding order for arguments. This is used to determine
-- the order in which to evaluate scopes.
funcTypeArgs :: [Element bound free],
-- | Array used to map tuple arguments to the correct parameter names.
funcTypeArgOrder :: !(Array Word bound),
-- | The return type of the function, which can reference the
-- value of any argument by their binding name.
funcTypeRetTy :: Scope bound (Intro bound) free,
-- | The position in source from which this originates.
funcTypePos :: !(DWARFPosition [bound] [bound])
}
-- | Dependent sum type. This is the type given to structures.
| RecordType {
-- | The remaining elements of the sum type. The first element
-- in the binding order is a degenerate scope; the remaining
-- scopes may reference any previous elements from the binding
-- order, but not themselves or any future scopes.
--
-- Note: all fields are given names by transliteration.
recTypeBody :: [Element bound free],
-- | Array used to convert tuples to records of this type.
recTypeOrder :: !(Array Word bound),
-- | The position in source from which this originates.
recTypePos :: !(DWARFPosition [bound] [bound])
}
-- | Refinement type. This type represents all members of a type
-- satisfying a given proposition.
| RefineType {
-- | The base type.
refineType :: Intro bound free,
-- | Binding patterns and propositions for values of the base
-- type. These express the constraints on the base type.
refineCases :: [Case bound free],
-- | The position in source from which this originates.
refinePos :: !(DWARFPosition [bound] [bound])
}
-- | Computation type. This type represents a computation, and
-- includes both its result type and a specification of its
-- behavior.
| CompType {
-- | The result type of the computation.
compType :: Intro bound free,
-- | Binding patterns and behavior specifications for values of
-- the return type. These express the constraints on the base
-- type.
compCases :: [Case bound free],
-- | The position in source from which this originates.
compTypePos :: !(DWARFPosition [bound] [bound])
}
-- Propositions. These do not support decidable equality. As such,
-- they cannot be the result of a computation, and cannot appear in
-- a pattern.
-- | Quantified proposition.
| Quantified {
-- | The kind of quantification this represents (forall/exists).
quantKind :: !Quantifier,
-- | The type of the quantifier variable. If a sum type is
-- used, this will be treated similarly to a multi-argument
-- function.
quantType :: Intro bound free,
-- | A case statement which denotes a proposition.
quantCases :: [Case bound free],
-- | The position in source from which this originates.
quantPos :: !(DWARFPosition [bound] [bound])
}
-- | A lambda expression. Represents a function value. Lambdas
-- cannot appear in patterns, though they can be computed on.
--
-- Lambdas will ultimately need to be extended to support
-- overloading, which adds an inherent multiple dispatch ability.
-- This is obviousy highly nontrivial to implement.
| Lambda {
-- | The cases describing this function's behavior.
lambdaCases :: [Case bound free],
-- | The position in source from which this originates.
lambdaPos :: !(DWARFPosition [bound] [bound])
}
-- | An eta expansion. This is present for type checking only.
-- This represents a "frozen" substitution.
--
-- XXX Quite possibly this will be removed
| Eta {
etaTerm :: Elim bound free,
etaType :: Intro bound free,
-- | The position in source from which this originates.
etaPos :: !(DWARFPosition [bound] [bound])
}
-- | A record. Records can be named or ordered in the surface
-- syntax. Ordered records are transliterated into named
-- records, with the fields "1", "2", and so on.
| Record {
-- | The bindings for this record. These are introduction terms.
recFields :: !(HashMap bound (Intro bound free)),
-- | The position in source from which this originates.
recPos :: !(DWARFPosition [bound] [bound])
}
-- | A tuple. These denote record values, but their
-- interpretation depends on the expected type.
| Tuple {
-- | The fields of the tuple.
tupleFields :: !(Array Word (Intro bound free)),
-- | The position in source from which this originates.
tuplePos :: !(DWARFPosition [bound] [bound])
}
-- | A collection of one or more terms, each of which is
-- bound to a name. Each of the members of the group may reference
-- eachother.
| Fix {
-- | The self-reference symbol.
fixSym :: !bound,
-- | The term, parameterized by the self-reference @fixSym@.
fixTerm :: !(Scope bound (Intro bound) free),
-- | The position in source from which this originates.
fixPos :: !(DWARFPosition [bound] [bound])
}
-- | A computation value. This is essentially a "frozen"
-- computation.
| Comp {
compBody :: Comp bound free,
-- | The position in source from which this originates.
compPos :: !(DWARFPosition [bound] [bound])
}
-- | An elimination term presented as an introduction term.
| Elim {
-- | The wrapped elimination term.
elimTerm :: Elim bound free
}
-- | A literal value.
| Literal {
-- | The literal value.
literalVal :: !Literal,
-- | The position in source from which this originates.
literalPos :: !(DWARFPosition [bound] [bound])
}
-- | A symbol whose meaning is understood implicitly by the
-- compiler. This includes intrinsic functions, as well as
-- anything that is auto-generated during transliteration.
| Constructor {
-- | The name of the constructor.
constructorSym :: !bound,
-- | The position in source from which this originates.
constructorPos :: !(DWARFPosition [bound] [bound])
}
-- | Placeholder for a malformed term, allowing type checking to
-- continue in spite of errors.
| BadIntro {
-- | The position in source from which this originates.
badIntroPos :: !(DWARFPosition [bound] [bound])
}
-- | Elimination Terms. These terms generate a type in type checking.
data Elim bound free =
-- | Call term. Represents a call to a function. The type of the
-- term comes from the called function's return type.
--
-- As with structures, calls can be named or ordered in surface
-- syntax. Also similar to structures, ordered calls are
-- transliterated into named calls with parameter names "1", "2",
-- and so on.
Call {
-- | The argument to the call. Multiple arguments are
-- implemented using either a record or a tuple.
callArg :: Intro bound free,
-- | The function being called. This must be an elimination
-- term.
callFunc :: Elim bound free,
-- | The position in source from which this originates.
callPos :: !(DWARFPosition [bound] [bound])
}
-- | A typed term. This is an introduction term with an explicit
-- type tag, which makes it an elimination term.
| Typed {
-- | The introduction term being typed.
typedTerm :: Intro bound free,
-- | The type of the introduction term.
typedType :: Intro bound free,
-- | The position in source from which this originates.
typedPos :: !(DWARFPosition [bound] [bound])
}
-- | A variable symbol. Since we know the types of all variables,
-- this is an elimination term.
| Var {
-- | The underlying symbol.
varSym :: !free,
-- | The position in source from which this originates.
varPos :: !(DWARFPosition [bound] [bound])
}
-- | Placeholder for a malformed term, allowing type checking to
-- continue in spite of errors.
| BadElim {
-- | The position in source from which this originates.
badElimPos :: !(DWARFPosition [bound] [bound])
}
-- | Commands. These represent individual statements, or combinations
-- thereof, which do not bind variables.
--
-- We don't need a lot of control flow structures. Loops are handled
-- by the Fix structure, conditionals are handled by the pattern
-- matching inherent in Lambdas. Widening and narrowing (ie typecase
-- and downcast) can be effectively handled by adding a multiple
-- dispatch capability).
--
-- The Core-to-LLVM compiler should therefore be smart enough to
-- figure out when it can implement a Fix as a loop, and when it can
-- inline lambdas, and when it can figure out dispatch decisions
-- statically. This is also desirable in its own right.
data Cmd bound free =
Value {
-- | The term representing the value of this command
valTerm :: Intro bound free,
-- | The position in source from which this originates.
valPos :: !(DWARFPosition [bound] [bound])
}
-- | Evaluate a computation value. This allows execution of
-- computations produced by terms.
| Eval {
-- | The computation value to evaluate
evalTerm :: Intro bound free,
-- | The position in source from which this originates.
evalPos :: !(DWARFPosition [bound] [bound])
}
-- | Placeholder for a malformed command, allowing type checking to
-- continue in spite of errors.
| BadCmd {
-- | The position in source from which this originates.
badCmdPos :: !(DWARFPosition [bound] [bound])
}
-- | Computations. Semantically, computations are generators for
-- sequences of atomic actions, which are not guaranteed to terminate,
-- or even run without error (in the case of unverified computations.
-- In terms of programming language structures, they represent
-- higher-order stateful procedures.
--
-- Obviously, computations do not admit decidable equality.
--
-- A raw computation is something akin to a function taking void in C,
-- or a monad in Haskell. Stateful functions with arguments are
-- modeled as regular functions which produce a computation.
data Comp bound free =
-- | A sequential composition of terms.
Seq {
-- | The pattern to which to bind the result of seqCmd.
seqPat :: Pattern bound,
-- | The type being bound.
seqType :: Intro bound free,
-- | The command to execute.
seqCmd :: Cmd bound free,
-- | The next computation to execute.
seqNext :: Scope bound (Comp bound) free,
-- | The position in source from which this originates.
seqPos :: !(DWARFPosition [bound] [bound])
}
-- | Result of a computation. This is always the end of a sequence.
| End {
-- | The command to run to produce a result.
endCmd :: Cmd bound free,
-- | The position in source from which this originates.
endPos :: !(DWARFPosition [bound] [bound])
}
-- | Placeholder for a malformed computation, allowing type checking
-- to continue in spite of errors.
| BadComp {
-- | The position in source from which this originates.
badCompPos :: !(DWARFPosition [bound] [bound])
}
elimTermPos :: Elim bound free -> (DWARFPosition [bound] [bound])
elimTermPos Call { callPos = pos } = pos
elimTermPos Typed { typedPos = pos } = pos
elimTermPos Var { varPos = pos } = pos
elimTermPos BadElim { badElimPos = pos } = pos
instance Eq1 Pattern where
Deconstruct { deconstructBinds = binds1, deconstructStrict = strict1,
deconstructConstructor = constructor1 } ==#
Deconstruct { deconstructBinds = binds2, deconstructStrict = strict2,
deconstructConstructor = constructor2 } =
(strict1 == strict2) && (constructor1 == constructor2) &&
(binds1 == binds2)
As { asName = sym1, asBind = bind1 } ==#
As { asName = sym2, asBind = bind2 } =
(sym1 == sym2) && (bind1 ==# bind2)
Name { nameSym = sym1 } ==# Name { nameSym = sym2 } = sym1 == sym2
Exact { exactLiteral = lit1 } ==# Exact { exactLiteral = lit2 } = lit1 == lit2
_ ==# _ = False
instance Eq b => Eq1 (Case b) where
Case { casePat = pat1, caseBody = body1 } ==#
Case { casePat = pat2, caseBody = body2 } =
pat1 ==# pat2 && body1 ==# body2
instance Eq b => Eq1 (Element b) where
Element { elemType = ty1, elemPat = pat1, elemName = sym1 } ==#
Element { elemType = ty2, elemPat = pat2, elemName = sym2 } =
sym1 == sym2 && ty1 ==# ty2 && pat1 ==# pat2
instance Eq b => Eq1 (Intro b) where
FuncType { funcTypeArgs = argtys1, funcTypeArgOrder = ord1,
funcTypeRetTy = retty1 } ==#
FuncType { funcTypeArgs = argtys2, funcTypeArgOrder = ord2,
funcTypeRetTy = retty2 } =
argtys1 ==# argtys2 && elems ord1 ==# elems ord2 && retty1 ==# retty2
RecordType { recTypeBody = body1, recTypeOrder = ord1 } ==#
RecordType { recTypeBody = body2, recTypeOrder = ord2 } =
body1 ==# body2 && ord1 == ord2
RefineType { refineType = ty1, refineCases = cases1 } ==#
RefineType { refineType = ty2, refineCases = cases2 } =
ty1 ==# ty2 && cases1 ==# cases2
CompType { compType = ty1, compCases = cases1 } ==#
CompType { compType = ty2, compCases = cases2 } =
ty1 ==# ty2 && cases1 ==# cases2
Quantified { quantKind = kind1, quantType = ty1, quantCases = cases1 } ==#
Quantified { quantKind = kind2, quantType = ty2, quantCases = cases2 } =
kind1 == kind2 && ty1 ==# ty2 && cases1 ==# cases2
Lambda { lambdaCases = cases1 } ==# Lambda { lambdaCases = cases2 } =
cases1 == cases2
Eta { etaTerm = term1, etaType = ty1 } ==#
Eta { etaTerm = term2, etaType = ty2 } =
term1 ==# term2 && ty1 ==# ty2
Record { recFields = vals1 } ==# Record { recFields = vals2 } = vals1 == vals2
Tuple { tupleFields = vals1 } ==# Tuple { tupleFields = vals2 } =
elems vals1 ==# elems vals2
Fix { fixSym = sym1, fixTerm = term1 } ==#
Fix { fixSym = sym2, fixTerm = term2 } = sym1 == sym2 && term1 == term2
Comp { compBody = body1 } ==# Comp { compBody = body2 } = body1 ==# body2
Elim { elimTerm = term1 } ==# Elim { elimTerm = term2 } = term1 ==# term2
Literal { literalVal = lit1 } ==# Literal { literalVal = lit2 } = lit1 == lit2
Constructor { constructorSym = sym1 } ==#
Constructor { constructorSym = sym2 } = sym1 == sym2
BadIntro {} ==# BadIntro {} = True
_ ==# _ = False
instance Eq b => Eq1 (Elim b) where
Call { callArg = arg1, callFunc = func1 } ==#
Call { callArg = arg2, callFunc = func2 } =
arg1 == arg2 && func1 ==# func2
Typed { typedTerm = term1, typedType = ty1 } ==#
Typed { typedTerm = term2, typedType = ty2 } =
term1 ==# term2 && ty1 ==# ty2
Var { varSym = sym1 } ==# Var { varSym = sym2 } = sym1 == sym2
BadElim {} ==# BadElim {} = True
_ ==# _ = False
instance Eq b => Eq1 (Cmd b) where
Value { valTerm = term1 } ==# Value { valTerm = term2 } = term1 ==# term2
Eval { evalTerm = term1 } ==# Eval { evalTerm = term2 } = term1 ==# term2
BadCmd _ ==# BadCmd _ = True
_ ==# _ = False
instance Eq b => Eq1 (Comp b) where
Seq { seqType = ty1, seqPat = pat1, seqCmd = cmd1, seqNext = next1 } ==#
Seq { seqType = ty2, seqPat = pat2, seqCmd = cmd2, seqNext = next2 } =
pat1 ==# pat2 && cmd1 ==# cmd2 && next1 ==# next2 && ty1 ==# ty2
End { endCmd = Eval { evalTerm = Comp { compBody = c1 } } } ==# c2 = c1 ==# c2
c1 ==# End { endCmd = Eval { evalTerm = Comp { compBody = c2 } } } = c1 ==# c2
End { endCmd = cmd1 } ==# End { endCmd = cmd2 } = cmd1 ==# cmd2
BadComp {} ==# BadComp {} = True
_ ==# _ = False
instance (Eq b) => Eq (Pattern b) where (==) = (==#)
instance (Eq b, Eq s) => Eq (Case b s) where (==) = (==#)
instance (Eq b, Eq s) => Eq (Element b s) where (==) = (==#)
instance (Eq b, Eq s) => Eq (Intro b s) where (==) = (==#)
instance (Eq b, Eq s) => Eq (Elim b s) where (==) = (==#)
instance (Eq b, Eq s) => Eq (Cmd b s) where (==) = (==#)
instance (Eq b, Eq s) => Eq (Comp b s) where (==) = (==#)
keyOrd :: Ord a => (a, b) -> (a, b) -> Ordering
keyOrd (a1, _) (a2, _) = compare a1 a2
instance Ord1 Pattern where
compare1 Deconstruct { deconstructBinds = binds1, deconstructStrict = strict1,
deconstructConstructor = constructor1 }
Deconstruct { deconstructBinds = binds2, deconstructStrict = strict2,
deconstructConstructor = constructor2 } =
case compare strict1 strict2 of
EQ -> case compare constructor1 constructor2 of
EQ -> compare (sortBy keyOrd (HashMap.toList binds1))
(sortBy keyOrd (HashMap.toList binds2))
out -> out
out -> out
compare1 Deconstruct {} _ = GT
compare1 _ Deconstruct {} = LT
compare1 As { asName = sym1, asBind = bind1 }
As { asName = sym2, asBind = bind2 } =
case compare sym1 sym2 of
EQ -> compare1 bind1 bind2
out -> out
compare1 As {} _ = GT
compare1 _ As {} = LT
compare1 Name { nameSym = sym1 } Name { nameSym = sym2 } =
compare sym1 sym2
compare1 Name {} _ = GT
compare1 _ Name {} = LT
compare1 Exact { exactLiteral = val1 } Exact { exactLiteral = val2 } =
compare val1 val2
instance Ord b => Ord1 (Case b) where
compare1 Case { casePat = pat1, caseBody = body1 }
Case { casePat = pat2, caseBody = body2 } =
case compare1 pat1 pat2 of
EQ -> compare body1 body2
out -> out
instance Ord b => Ord1 (Element b) where
compare1 Element { elemName = sym1, elemPat = pat1, elemType = ty1 }
Element { elemName = sym2, elemPat = pat2, elemType = ty2 } =
case compare sym1 sym2 of
EQ -> case compare1 ty1 ty2 of
EQ -> compare1 pat1 pat2
out -> out
out -> out
instance Ord b => Ord1 (Intro b) where
compare1 FuncType { funcTypeArgs = argtys1, funcTypeRetTy = retty1 }
FuncType { funcTypeArgs = argtys2, funcTypeRetTy = retty2 } =
case compare1 retty1 retty2 of
EQ -> compare1 argtys1 argtys2
out -> out
compare1 FuncType {} _ = GT
compare1 _ FuncType {} = LT
compare1 RecordType { recTypeBody = body1, recTypeOrder = ord1 }
RecordType { recTypeBody = body2, recTypeOrder = ord2 } =
case compare ord1 ord2 of
EQ -> compare1 body1 body2
out -> out
compare1 RecordType {} _ = GT
compare1 _ RecordType {} = LT
compare1 RefineType { refineType = ty1, refineCases = cases1 }
RefineType { refineType = ty2, refineCases = cases2 } =
case compare1 ty1 ty2 of
EQ -> compare1 cases1 cases2
out -> out
compare1 RefineType {} _ = GT
compare1 _ RefineType {} = LT
compare1 CompType { compType = ty1, compCases = cases1 }
CompType { compType = ty2, compCases = cases2 } =
case compare ty1 ty2 of
EQ -> compare1 cases1 cases2
out -> out
compare1 CompType {} _ = GT
compare1 _ CompType {} = LT
compare1 Quantified { quantKind = kind1, quantType = ty1,
quantCases = cases1 }
Quantified { quantKind = kind2, quantType = ty2,
quantCases = cases2 } =
case compare kind1 kind2 of
EQ -> case compare1 ty1 ty2 of
EQ -> compare1 cases1 cases2
out -> out
out -> out
compare1 Quantified {} _ = GT
compare1 _ Quantified {} = LT
compare1 Eta { etaTerm = term1, etaType = ty1 }
Eta { etaTerm = term2, etaType = ty2 } =
case compare1 term1 term2 of
EQ -> compare ty1 ty2
out -> out
compare1 Eta {} _ = GT
compare1 _ Eta {} = LT
compare1 Lambda { lambdaCases = cases1 } Lambda { lambdaCases = cases2 } =
compare1 cases1 cases2
compare1 Lambda {} _ = GT
compare1 _ Lambda {} = LT
compare1 Record { recFields = vals1 } Record { recFields = vals2 } =
compare (sortBy keyOrd (HashMap.toList vals1))
(sortBy keyOrd (HashMap.toList vals2))
compare1 Record {} _ = GT
compare1 _ Record {} = LT
compare1 Tuple { tupleFields = vals1 } Tuple { tupleFields = vals2 } =
compare1 (elems vals1) (elems vals2)
compare1 Tuple {} _ = GT
compare1 _ Tuple {} = LT
compare1 Fix { fixSym = sym1, fixTerm = term1 }
Fix { fixSym = sym2, fixTerm = term2 } =
case compare sym1 sym2 of
EQ -> compare term1 term2
out -> out
compare1 Fix {} _ = GT
compare1 _ Fix {} = LT
compare1 Comp { compBody = body1 } Comp { compBody = body2 } =
compare1 body1 body2
compare1 Comp {} _ = GT
compare1 _ Comp {} = LT
compare1 Elim { elimTerm = term1 } Elim { elimTerm = term2 } =
compare1 term1 term2
compare1 Elim {} _ = GT
compare1 _ Elim {} = LT
compare1 Literal { literalVal = lit1 } Literal { literalVal = lit2 } =
compare lit1 lit2
compare1 Literal {} _ = LT
compare1 _ Literal {} = GT
compare1 Constructor { constructorSym = sym1 }
Constructor { constructorSym = sym2 } =
compare sym1 sym2
compare1 Constructor {} _ = LT
compare1 _ Constructor {} = GT
compare1 BadIntro {} BadIntro {} = EQ
instance Ord b => Ord1 (Elim b) where
compare1 Call { callArg = arg1, callFunc = func1 }
Call { callArg = arg2, callFunc = func2 } =
case compare1 func1 func2 of
EQ -> compare arg1 arg2
out -> out
compare1 Call {} _ = GT
compare1 _ Call {} = LT
compare1 Typed { typedTerm = term1, typedType = ty1 }
Typed { typedTerm = term2, typedType = ty2 } =
case compare1 term1 term2 of
EQ -> compare ty1 ty2
out -> out
compare1 Typed {} _ = GT
compare1 _ Typed {} = LT
compare1 Var { varSym = sym1 } Var { varSym = sym2 } = compare sym1 sym2
compare1 Var {} _ = GT
compare1 _ Var {} = LT
compare1 BadElim {} BadElim {} = EQ
instance Ord b => Ord1 (Cmd b) where
compare1 Value { valTerm = term1 } Value { valTerm = term2 } =
compare1 term1 term2
compare1 Value {} _ = GT
compare1 _ Value {} = LT
compare1 Eval { evalTerm = term1 } Eval { evalTerm = term2 } =
compare1 term1 term2
compare1 Eval {} _ = GT
compare1 _ Eval {} = LT
compare1 BadCmd {} BadCmd {} = EQ
instance Ord b => Ord1 (Comp b) where
compare1 Seq { seqType = ty1, seqPat = pat1,
seqCmd = cmd1, seqNext = next1 }
Seq { seqType = ty2, seqPat = pat2,
seqCmd = cmd2, seqNext = next2 } =
case compare1 ty1 ty2 of
EQ -> case compare1 pat1 pat2 of
EQ -> case compare1 cmd1 cmd2 of
EQ -> compare1 next1 next2
out -> out
out -> out
out -> out
compare1 Seq {} _ = GT
compare1 _ Seq {} = LT
compare1 End { endCmd = cmd1 } End { endCmd = cmd2 } = compare1 cmd1 cmd2
compare1 End {} _ = GT
compare1 _ End {} = LT
compare1 BadComp {} BadComp {} = EQ
instance (Ord b) => Ord (Pattern b) where compare = compare1
instance (Ord b, Ord s) => Ord (Case b s) where compare = compare1
instance (Ord b, Ord s) => Ord (Element b s) where compare = compare1
instance (Ord b, Ord s) => Ord (Intro b s) where compare = compare1
instance (Ord b, Ord s) => Ord (Elim b s) where compare = compare1
instance (Ord b, Ord s) => Ord (Cmd b s) where compare = compare1
instance (Ord b, Ord s) => Ord (Comp b s) where compare = compare1
instance Hashable Literal where
hashWithSalt s Num { numVal = n } =
s `hashWithSalt` (1 :: Int) `hashWithSalt` n
hashWithSalt s Str { strVal = str } =
s `hashWithSalt` (2 :: Int) `hashWithSalt` str
hashWithSalt s Char { charVal = c } =
s `hashWithSalt` (2 :: Int) `hashWithSalt` c
instance (Hashable b, Ord b) => Hashable1 (Case b) where
hashWithSalt1 s Case { casePat = pat, caseBody = body } =
(s `hashWithSalt` pat) `hashWithSalt1` body
instance (Hashable b, Ord b) => Hashable1 (Element b) where
hashWithSalt1 s Element { elemName = sym, elemPat = pat, elemType = ty } =
(s `hashWithSalt` sym `hashWithSalt` pat) `hashWithSalt1` ty
instance (Hashable b, Ord b) => Hashable1 (Intro b) where
hashWithSalt1 s FuncType { funcTypeArgs = argtys, funcTypeRetTy = retty } =
s `hashWithSalt` (1 :: Int) `hashWithSalt` argtys `hashWithSalt` retty
hashWithSalt1 s RecordType { recTypeBody = body, recTypeOrder = ord } =
(s `hashWithSalt` (2 :: Int) `hashWithSalt` elems ord) `hashWithSalt1` body
hashWithSalt1 s RefineType { refineType = ty, refineCases = cases } =
s `hashWithSalt` (3 :: Int) `hashWithSalt1` ty `hashWithSalt1` cases
hashWithSalt1 s CompType { compType = ty, compCases = cases } =
s `hashWithSalt` (4 :: Int) `hashWithSalt1` ty `hashWithSalt1` cases
hashWithSalt1 s Quantified { quantKind = Forall, quantType = ty,
quantCases = cases } =
s `hashWithSalt` (5 :: Int) `hashWithSalt1` ty `hashWithSalt1` cases
hashWithSalt1 s Quantified { quantKind = Exists, quantType = ty,
quantCases = cases } =
s `hashWithSalt` (6 :: Int) `hashWithSalt1` ty `hashWithSalt1` cases
hashWithSalt1 s Eta { etaTerm = term, etaType = ty } =
s `hashWithSalt` (7 :: Int) `hashWithSalt1` term `hashWithSalt1` ty
hashWithSalt1 s Lambda { lambdaCases = cases } =
s `hashWithSalt` (8 :: Int) `hashWithSalt1` cases
hashWithSalt1 s Record { recFields = vals } =
s `hashWithSalt` (9 :: Int) `hashWithSalt1`
sortBy keyOrd (HashMap.toList vals)
hashWithSalt1 s Tuple { tupleFields = vals } =
s `hashWithSalt` (10 :: Int) `hashWithSalt1` elems vals
hashWithSalt1 s Fix { fixSym = sym, fixTerm = term } =
(s `hashWithSalt` (11 :: Int) `hashWithSalt` sym) `hashWithSalt1` term
hashWithSalt1 s Comp { compBody = body } =
s `hashWithSalt` (12 :: Int) `hashWithSalt1` body
hashWithSalt1 s Elim { elimTerm = term } =
s `hashWithSalt` (13 :: Int) `hashWithSalt1` term
hashWithSalt1 s Literal { literalVal = term } =
s `hashWithSalt` (14 :: Int) `hashWithSalt` term
hashWithSalt1 s Constructor { constructorSym = sym } =
s `hashWithSalt` (15 :: Int) `hashWithSalt` sym
hashWithSalt1 s BadIntro {} = s `hashWithSalt` (0 :: Int)
instance (Hashable b, Ord b) => Hashable1 (Elim b) where
hashWithSalt1 s Call { callArg = arg, callFunc = func } =
(s `hashWithSalt` (1 :: Int) `hashWithSalt` arg) `hashWithSalt1` func
hashWithSalt1 s Typed { typedTerm = term, typedType = ty } =
s `hashWithSalt` (2 :: Int) `hashWithSalt1` term `hashWithSalt1` ty
hashWithSalt1 s Var { varSym = sym } =
s `hashWithSalt` (3 :: Int) `hashWithSalt` sym
hashWithSalt1 s BadElim {} = s `hashWithSalt` (0 :: Int)
instance (Hashable b, Ord b) => Hashable1 (Cmd b) where
hashWithSalt1 s Value { valTerm = term } =
s `hashWithSalt` (1 :: Int) `hashWithSalt1` term
hashWithSalt1 s Eval { evalTerm = term } =
s `hashWithSalt` (2 :: Int) `hashWithSalt1` term
hashWithSalt1 s (BadCmd _) = s `hashWithSalt` (0 :: Int)
instance (Hashable b, Ord b) => Hashable1 (Comp b) where
hashWithSalt1 s Seq { seqCmd = cmd, seqNext = next,
seqType = ty, seqPat = pat } =
(s `hashWithSalt` (1 :: Int) `hashWithSalt` pat) `hashWithSalt1`
ty `hashWithSalt1` cmd `hashWithSalt1` next
hashWithSalt1 s End { endCmd = cmd } =
s `hashWithSalt` (2 :: Int) `hashWithSalt1` cmd
hashWithSalt1 s (BadComp _) = s `hashWithSalt` (0 :: Int)
instance (Hashable b, Ord b) => Hashable (Pattern b) where
hashWithSalt s Deconstruct { deconstructConstructor = constructor,
deconstructBinds = binds,
deconstructStrict = strict } =
(s `hashWithSalt` (1 :: Int) `hashWithSalt` constructor `hashWithSalt`
strict) `hashWithSalt` sortBy keyOrd (HashMap.toList binds)
hashWithSalt s As { asName = sym, asBind = bind } =
(s `hashWithSalt` (2 :: Int) `hashWithSalt` sym) `hashWithSalt` bind
hashWithSalt s Name { nameSym = sym } =
s `hashWithSalt` (3 :: Int) `hashWithSalt` sym
hashWithSalt s Exact { exactLiteral = c } =
s `hashWithSalt` (4 :: Int) `hashWithSalt` c
instance (Hashable b, Hashable s, Ord b) => Hashable (Case b s) where
hashWithSalt = hashWithSalt1
instance (Hashable b, Hashable s, Ord b) => Hashable (Element b s) where
hashWithSalt = hashWithSalt1
instance (Hashable b, Hashable s, Ord b) => Hashable (Intro b s) where
hashWithSalt = hashWithSalt1
instance (Hashable b, Hashable s, Ord b) => Hashable (Elim b s) where
hashWithSalt = hashWithSalt1
instance (Hashable b, Hashable s, Ord b) => Hashable (Cmd b s) where
hashWithSalt = hashWithSalt1
instance (Hashable b, Hashable s, Ord b) => Hashable (Comp b s) where
hashWithSalt = hashWithSalt1
instance Functor (Case b) where fmap = fmapDefault
instance Functor (Element b) where fmap = fmapDefault
instance Functor (Intro b) where fmap = fmapDefault
instance Functor (Elim b) where fmap = fmapDefault
instance Functor (Cmd b) where fmap = fmapDefault
instance Functor (Comp b) where fmap = fmapDefault
instance Foldable (Case b) where foldMap = foldMapDefault
instance Foldable (Element b) where foldMap = foldMapDefault
instance Foldable (Intro b) where foldMap = foldMapDefault
instance Foldable (Elim b) where foldMap = foldMapDefault
instance Foldable (Cmd b) where foldMap = foldMapDefault
instance Foldable (Comp b) where foldMap = foldMapDefault
instance Traversable (Case b) where
traverse f c @ Case { caseBody = body } =
(\body' -> c { caseBody = body' }) <$> traverse f body
instance Traversable (Element b) where
traverse f c @ Element { elemType = ty } =
(\ty' -> c { elemType = ty' }) <$> traverse f ty
instance Traversable (Intro b) where
traverse f t @ FuncType { funcTypeArgs = argtys, funcTypeRetTy = retty } =
(\argtys' retty' -> t { funcTypeArgs = argtys', funcTypeRetTy = retty' }) <$>
traverse (traverse f) argtys <*> traverse f retty
traverse f t @ RecordType { recTypeBody = body } =
(\body' -> t { recTypeBody = body' }) <$> traverse (traverse f) body
traverse f t @ RefineType { refineType = ty, refineCases = cases } =
(\ty' cases' -> t { refineType = ty', refineCases = cases' }) <$>
traverse f ty <*> traverse (traverse f) cases
traverse f t @ CompType { compType = ty, compCases = cases } =
(\ty' cases' -> t { compType = ty', compCases = cases' }) <$>
traverse f ty <*> traverse (traverse f) cases
traverse f t @ Quantified { quantType = ty, quantCases = cases } =
(\ty' cases' -> t { quantType = ty', quantCases = cases' }) <$>
traverse f ty <*> traverse (traverse f) cases
traverse f t @ Eta { etaTerm = term, etaType = ty } =
(\term' ty' -> t { etaTerm = term', etaType = ty' }) <$>
traverse f term <*> traverse f ty
traverse f t @ Lambda { lambdaCases = cases } =
(\cases' -> t { lambdaCases = cases' }) <$> traverse (traverse f) cases
traverse f t @ Record { recFields = vals } =
(\vals' -> t { recFields = vals' }) <$> traverse (traverse f) vals
traverse f t @ Tuple { tupleFields = vals } =
(\vals' -> t { tupleFields = vals' }) <$> traverse (traverse f) vals
traverse f t @ Fix { fixTerm = term } =
(\term' -> t { fixTerm = term' }) <$> traverse f term
traverse f c @ Comp { compBody = body } =
(\body' -> c { compBody = body' }) <$> traverse f body
traverse f t @ Elim { elimTerm = term } =
(\term' -> t { elimTerm = term' }) <$> traverse f term
traverse _ Constructor { constructorSym = sym, constructorPos = p } =
pure Constructor { constructorSym = sym, constructorPos = p }
traverse _ Literal { literalVal = lit, literalPos = p } =
pure Literal { literalVal = lit, literalPos = p }
traverse _ BadIntro { badIntroPos = p } = pure BadIntro { badIntroPos = p }
instance Traversable (Elim b) where
traverse f t @ Call { callArg = arg, callFunc = func } =
(\arg' func' -> t { callArg = arg', callFunc = func' }) <$>
traverse f arg <*> traverse f func
traverse f t @ Typed { typedTerm = term, typedType = ty } =
(\term' ty' -> t { typedTerm = term', typedType = ty' }) <$>
traverse f term <*> traverse f ty
traverse f t @ Var { varSym = sym } =
(\sym' -> t { varSym = sym' }) <$> f sym
traverse _ BadElim { badElimPos = p } = pure BadElim { badElimPos = p }
instance Traversable (Cmd b) where
traverse f c @ Value { valTerm = term } =
(\term' -> c { valTerm = term' }) <$> traverse f term
traverse f c @ Eval { evalTerm = term } =
(\term' -> c { evalTerm = term' }) <$> traverse f term
traverse _ (BadCmd c) = pure (BadCmd c)
instance Traversable (Comp b) where
traverse f c @ Seq { seqCmd = cmd, seqNext = next, seqType = ty } =
(\ty' cmd' next' -> c { seqCmd = cmd', seqNext = next', seqType = ty' }) <$>
traverse f ty <*> traverse f cmd <*> traverse f next
traverse f c @ End { endCmd = cmd } =
(\cmd' -> c { endCmd = cmd' }) <$> traverse f cmd
traverse _ (BadComp p) = pure (BadComp p)
injectpos :: DWARFPosition [bound] [bound]
injectpos = Synthetic { synthDesc = Strict.fromString "Monad return" }
{-
instance MonadTrans (Pattern b) where
lift = Exact
instance Bound (Pattern b) where
b @ Deconstruct { deconstructBinds = binds } >>>= f =
b { deconstructBinds = fmap (>>>= f) binds }
b @ As { asBind = bind } >>>= f = b { asBind = bind >>>= f }
b @ Name { nameSym = sym } >>>= _ = b { nameSym = sym }
Exact t >>>= f = Exact (t >>= f)
-}
instance Applicative (Intro b) where
pure = return
(<*>) = ap
instance Applicative (Elim b) where
pure = return
(<*>) = ap
instance Applicative (Comp b) where
pure = return
(<*>) = ap
caseSubstIntro :: (a -> Intro c b) -> Case c a -> Case c b
caseSubstIntro f c @ Case { caseBody = body } = c { caseBody = body >>>= f }
elementSubstIntro :: (a -> Intro c b) -> Element c a -> Element c b
elementSubstIntro f e @ Element { elemType = ty } = e { elemType = ty >>>= f }
elimSubstIntro :: (a -> Intro c b) -> Elim c a -> Elim c b
elimSubstIntro f t @ Call { callArg = arg, callFunc = func } =
t { callArg = arg >>= f, callFunc = elimSubstIntro f func }
elimSubstIntro f t @ Typed { typedTerm = term, typedType = ty } =
t { typedTerm = term >>= f, typedType = ty >>= f }
elimSubstIntro _ Var {} = error "Should not see this case"
elimSubstIntro _ BadElim { badElimPos = p } = BadElim { badElimPos = p }
cmdSubstIntro :: (a -> Intro c b) -> Cmd c a -> Cmd c b
cmdSubstIntro f c @ Value { valTerm = term } = c { valTerm = term >>= f }
cmdSubstIntro f c @ Eval { evalTerm = term } = c { evalTerm = term >>= f }
cmdSubstIntro _ (BadCmd p) = BadCmd p
compSubstIntro :: (a -> Intro c b) -> Comp c a -> Comp c b
compSubstIntro f c @ Seq { seqCmd = cmd, seqNext = next, seqType = ty } =
c { seqType = ty >>= f, seqCmd = cmdSubstIntro f cmd,
seqNext = next >>>= compSubstIntro f . return }
compSubstIntro f c @ End { endCmd = cmd } =
c { endCmd = cmdSubstIntro f cmd }
compSubstIntro _ (BadComp p) = BadComp p
instance Monad (Intro b) where
return sym = Elim { elimTerm = Var { varSym = sym, varPos = injectpos } }
t @ FuncType { funcTypeArgs = argtys, funcTypeRetTy = retty } >>= f =
t { funcTypeArgs = fmap (elementSubstIntro f) argtys,
funcTypeRetTy = retty >>>= f }
t @ RecordType { recTypeBody = body } >>= f =
t { recTypeBody = fmap (elementSubstIntro f) body }
t @ RefineType { refineType = ty, refineCases = cases } >>= f =
t { refineType = ty >>= f, refineCases = fmap (caseSubstIntro f) cases }
t @ CompType { compType = ty, compCases = cases } >>= f =
t { compType = ty >>= f, compCases = fmap (caseSubstIntro f) cases }
t @ Quantified { quantType = ty, quantCases = cases } >>= f =
t { quantCases = fmap (caseSubstIntro f) cases, quantType = ty >>= f }
t @ Lambda { lambdaCases = cases } >>= f =
t { lambdaCases = fmap (caseSubstIntro f) cases }
t @ Record { recFields = vals } >>= f = t { recFields = fmap (>>= f) vals }
t @ Tuple { tupleFields = vals } >>= f = t { tupleFields = fmap (>>= f) vals }
t @ Fix { fixTerm = term } >>= f = t { fixTerm = term >>>= f }
t @ Comp { compBody = body } >>= f = t { compBody = compSubstIntro f body }
Elim { elimTerm = Var { varSym = sym } } >>= f = f sym
t @ Elim { elimTerm = term } >>= f = t { elimTerm = elimSubstIntro f term }
t @ Eta { etaTerm = term, etaType = ty } >>= f =
t { etaTerm = elimSubstIntro f term, etaType = ty >>= f }
Literal { literalVal = lit, literalPos = p } >>= _ =
Literal { literalVal = lit, literalPos = p }
Constructor { constructorSym = sym, constructorPos = p } >>= _ =
Constructor { constructorSym = sym, constructorPos = p }
BadIntro { badIntroPos = p } >>= _ = BadIntro { badIntroPos = p }
caseSubstElim :: (a -> Elim c b) -> Case c a -> Case c b
caseSubstElim f c @ Case { caseBody = body } =
c { caseBody = body >>>= (Elim . f) }
elementSubstElim :: (a -> Elim c b) -> Element c a -> Element c b
elementSubstElim f e @ Element { elemType = ty } =
e { elemType = ty >>>= (Elim . f) }
cmdSubstElim :: (a -> Elim c b) -> Cmd c a -> Cmd c b
cmdSubstElim f c @ Value { valTerm = term } =
c { valTerm = introSubstElim f term }
cmdSubstElim f c @ Eval { evalTerm = term } =
c { evalTerm = introSubstElim f term }
cmdSubstElim _ (BadCmd p) = BadCmd p
compSubstElim :: (a -> Elim c b) -> Comp c a -> Comp c b
compSubstElim f c @ Seq { seqCmd = cmd, seqNext = next, seqType = ty } =
c { seqType = introSubstElim f ty, seqCmd = cmdSubstElim f cmd,
seqNext = next >>>= compSubstElim f . return }
compSubstElim f c @ End { endCmd = cmd } =
c { endCmd = cmdSubstElim f cmd }
compSubstElim _ (BadComp p) = BadComp p
introSubstElim :: (a -> Elim c b) -> Intro c a -> Intro c b
introSubstElim f t @ FuncType { funcTypeArgs = argtys, funcTypeRetTy = retty } =
t { funcTypeArgs = fmap (elementSubstElim f) argtys,
funcTypeRetTy = retty >>>= (Elim . f) }
introSubstElim f t @ RecordType { recTypeBody = body } =
t { recTypeBody = fmap (elementSubstElim f) body }
introSubstElim f t @ RefineType { refineType = ty, refineCases = cases } =
t { refineType = introSubstElim f ty,
refineCases = fmap (caseSubstElim f) cases }
introSubstElim f t @ CompType { compType = ty, compCases = cases } =
t { compType = introSubstElim f ty, compCases = fmap (caseSubstElim f) cases }
introSubstElim f t @ Quantified { quantType = ty, quantCases = cases } =
t { quantCases = fmap (caseSubstElim f) cases,
quantType = introSubstElim f ty }
introSubstElim f t @ Lambda { lambdaCases = cases } =
t { lambdaCases = fmap (caseSubstElim f) cases }
introSubstElim f t @ Record { recFields = vals } =
t { recFields = fmap (introSubstElim f) vals }
introSubstElim f t @ Tuple { tupleFields = vals } =
t { tupleFields = fmap (introSubstElim f) vals }
introSubstElim f t @ Fix { fixTerm = term } = t { fixTerm = term >>>= Elim . f }
introSubstElim f t @ Comp { compBody = body } =
t { compBody = compSubstElim f body }
introSubstElim f Elim { elimTerm = term } = Elim { elimTerm = term >>= f }
introSubstElim f t @ Eta { etaTerm = term, etaType = ty } =
t { etaTerm = term >>= f, etaType = introSubstElim f ty }
introSubstElim _ Literal { literalVal = lit, literalPos = p } =
Literal { literalVal = lit, literalPos = p }
introSubstElim _ Constructor { constructorSym = sym, constructorPos = p } =
Constructor { constructorSym = sym, constructorPos = p }
introSubstElim _ BadIntro { badIntroPos = p } = BadIntro { badIntroPos = p }
instance Monad (Elim b) where
return sym = Var { varSym = sym, varPos = injectpos }
t @ Call { callArg = arg, callFunc = func } >>= f =
t { callArg = introSubstElim f arg, callFunc = func >>= f }
t @ Typed { typedTerm = term, typedType = ty } >>= f =
t { typedTerm = introSubstElim f term, typedType = introSubstElim f ty }
Var { varSym = sym } >>= f = f sym
BadElim { badElimPos = p } >>= _ = BadElim { badElimPos = p }
introSubstComp :: (a -> Comp c b) -> a -> Intro c b
introSubstComp f sym =
case f sym of
End { endCmd = Eval { evalTerm = term } } -> term
End { endCmd = Value { valTerm = term } } -> term
End { endCmd = BadCmd p } -> BadIntro p
body @ Seq { seqPos = pos } -> Comp { compBody = body, compPos = pos }
BadComp { badCompPos = p } -> BadIntro { badIntroPos = p }
cmdSubstComp :: (a -> Comp c b) -> Cmd c a -> Cmd c b
cmdSubstComp f c @ Value { valTerm = term } =
c { valTerm = term >>= introSubstComp f }
cmdSubstComp f c @ Eval { evalTerm = term } =
c { evalTerm = term >>= introSubstComp f }
cmdSubstComp _ (BadCmd p) = BadCmd p
instance Monad (Comp b) where
return sym =
End {
endCmd = Eval {
evalTerm = Elim {
elimTerm = Var { varSym = sym,
varPos = injectpos }
},
evalPos = injectpos
},
endPos = injectpos
}
c @ Seq { seqType = ty, seqCmd = cmd, seqNext = next } >>= f =
c { seqType = ty >>= introSubstComp f, seqNext = next >>>= f,
seqCmd = cmdSubstComp f cmd }
c @ End { endCmd = cmd } >>= f = c { endCmd = cmdSubstComp f cmd }
BadComp p >>= _ = BadComp p
instance Format Literal where
format Num { numVal = num }
| denominator num == 1 = format (numerator num)
| otherwise = format (numerator num) <> char '/' <> format (denominator num)
format Str { strVal = str } = dquoted (bytestring str)
format Char { charVal = chr } = squoted (char chr)
instance Show Literal where
show = Lazy.toString . renderOptimal 80 False . format
instance Monad m => FormatM m Literal where
formatM = return . format
formatBind :: (Default sym, Format sym, Format ent, Eq sym) =>
(sym, ent) -> (Doc, Doc)
formatBind (fname, ent) = (format fname, format ent)
formatMBind :: (MonadPositions m, MonadSymbols m, Default sym,
FormatM m sym, FormatM m ent, Eq sym) =>
(sym, ent) -> m (Doc, Doc)
formatMBind (fname, ent) =
do
fname' <- formatM fname
ent' <- formatM ent
return (fname', ent')
instance Format Quantifier where
format Forall = string "forall"
format Exists = string "exists"
instance (Default bound, Format bound, Eq bound) => Format (Pattern bound) where
format Deconstruct { deconstructConstructor = sym, deconstructBinds = binds }
| sym == def =
let
binddocs = map formatBind (HashMap.toList binds)
in
recordDoc binddocs
| otherwise =
let
binddocs = map formatBind (HashMap.toList binds)
symdoc = format sym
in
compoundApplyDoc symdoc binddocs
format As { asBind = bind, asName = sym } =
let
namedoc = format sym
binddoc = format bind
in
namedoc </> nest nestLevel (string "as" </> binddoc)
format Name { nameSym = sym } = format sym
format Exact { exactLiteral = e } = format e
instance (Format bound, Default bound, Eq bound) => Show (Pattern bound) where
show = Lazy.toString . renderOptimal 80 False . format
instance (MonadPositions m, MonadSymbols m,
FormatM m bound, Default bound, Eq bound) =>
FormatM m (Pattern bound) where
formatM Deconstruct { deconstructConstructor = sym, deconstructBinds = binds }
| sym == def =
do
binddocs <- mapM formatMBind (HashMap.toList binds)
return $! recordDoc binddocs
| otherwise =
do
symdoc <- formatM sym
binddocs <- mapM formatMBind (HashMap.toList binds)
return $! compoundApplyDoc symdoc binddocs
formatM As { asBind = bind, asName = sym } =
do
namedoc <- formatM sym
binddoc <- formatM bind
return (namedoc </> nest nestLevel (string "as" </> binddoc))
formatM Name { nameSym = sym } = formatM sym
formatM Exact { exactLiteral = e } = formatM e
instance (Format bound, Format free, Default bound, Eq bound) =>
Format (Case bound free) where
format Case { casePat = pat, caseBody = body } =
let
patdoc = format pat
bodydoc = format body
in
patdoc </> equals </> nest nestLevel bodydoc
instance (Format bound, Format free, Default bound, Eq bound) =>
Show (Case bound free) where
show = Lazy.toString . renderOptimal 80 False . format
instance (MonadPositions m, MonadSymbols m, FormatM m bound,
FormatM m free, Default bound, Eq bound) =>
FormatM m (Case bound free) where
formatM Case { casePat = pat, caseBody = body } =
do
patdoc <- formatM pat
bodydoc <- formatM body
return (patdoc </> equals </> nest nestLevel bodydoc)
instance (Format bound, Format free, Default bound, Eq bound) =>
Format (Element bound free) where
format Element { elemPat = Name { nameSym = sym' },
elemType = ty, elemName = sym } | sym' == def =
let
symdoc = format sym
tydoc = format ty
in case flatten tydoc of
Just flatty ->
choose [ symdoc <+> colon <+> flatty,
symdoc <+> colon <!> nest nestLevel tydoc ]
Nothing -> symdoc <+> colon <!> nest nestLevel tydoc
format Element { elemPat = pat, elemType = ty, elemName = sym } =
let
symdoc = format sym
patdoc = format pat
tydoc = format ty
in case flatten tydoc of
Just flatty ->
choose [ symdoc <+> colon <+> flatty <+>
equals <!> nest nestLevel patdoc,
symdoc <+> colon <!>
nest nestLevel (tydoc <+> equals <!> nest nestLevel patdoc) ]
Nothing -> symdoc <+> colon <!>
nest nestLevel (tydoc <+> equals <!> nest nestLevel patdoc)
instance (Format bound, Format free, Default bound, Eq bound) =>
Show (Element bound free) where
show = Lazy.toString . renderOptimal 80 False . format
instance (MonadPositions m, MonadSymbols m, FormatM m bound,
FormatM m free, Default bound, Eq bound) =>
FormatM m (Element bound free) where
formatM Element { elemPat = Name { nameSym = sym' },
elemType = ty, elemName = sym } | sym' == def =
do
symdoc <- formatM sym
tydoc <- formatM ty
case flatten tydoc of
Just flatty ->
return (choose [ symdoc <+> colon <+> flatty,
symdoc <+> colon <!> nest nestLevel tydoc ])
Nothing -> return (symdoc <+> colon <!> nest nestLevel tydoc)
formatM Element { elemPat = pat, elemType = ty, elemName = sym } =
do
symdoc <- formatM sym
patdoc <- formatM pat
tydoc <- formatM ty
case flatten tydoc of
Just flatty ->
return (choose [ symdoc <+> colon <+> flatty <+>
equals <!> nest nestLevel patdoc,
symdoc <+> colon <!>
nest nestLevel (tydoc <+> equals <!>
nest nestLevel patdoc) ])
Nothing -> return (symdoc <+> colon <!>
nest nestLevel (tydoc <+> equals <!>
nest nestLevel patdoc))
formatCompList :: (Format bound, Format free, Default bound, Eq bound) =>
Comp bound free -> [Doc]
formatCompList =
let
formatCompList':: (Format bound, Format free, Default bound, Eq bound) =>
[Doc] -> Comp bound free -> [Doc]
formatCompList' accum Seq { seqCmd = cmd, seqPat = pat,
seqType = ty, seqNext = next } =
let
cmddoc = format cmd
patdoc = format pat
typedoc = format ty
doc = patdoc <+> colon </>
nest nestLevel (typedoc <+> equals </> nest nestLevel cmddoc)
in
formatCompList' (doc : accum) (fromScope next)
formatCompList' accum End { endCmd = cmd } =
let
cmddoc = format cmd
in
reverse (cmddoc : accum)
formatCompList' accum BadComp {} = reverse (string "<bad>" : accum)
in
formatCompList' []
formatMCompList :: (MonadPositions m, MonadSymbols m, FormatM m bound,
FormatM m free, Default bound, Eq bound) =>
Comp bound free -> m [Doc]
formatMCompList =
let
formatMCompList':: (MonadPositions m, MonadSymbols m, FormatM m bound,
FormatM m free, Default bound, Eq bound) =>
[Doc] -> Comp bound free -> m [Doc]
formatMCompList' accum Seq { seqCmd = cmd, seqPat = pat,
seqType = ty, seqNext = next } =
do
cmddoc <- formatM cmd
patdoc <- formatM pat
typedoc <- formatM ty
formatMCompList' (patdoc <+> colon </>
nest nestLevel (typedoc <+> equals </>
nest nestLevel cmddoc) : accum)
(fromScope next)
formatMCompList' accum End { endCmd = cmd } =
do
cmddoc <- formatM cmd
return $! reverse (cmddoc : accum)
formatMCompList' accum BadComp {} =
return $! reverse (string "<bad>" : accum)
in
formatMCompList' []
instance (Format bound, Format free, Default bound, Eq bound) =>
Format (Intro bound free) where
format Eta {} = error "Eta is going away"
format FuncType { funcTypeArgs = args, funcTypeRetTy = retty } =
let
argdocs = map format args
rettydoc = format retty
in
tupleDoc argdocs <+> string "->" </> nest nestLevel rettydoc
format RecordType { recTypeBody = body } =
let
bodydocs = map format body
in
tupleDoc bodydocs
format RefineType { refineType = ty, refineCases = cases } =
let
tydoc = format ty
casedocs = map format cases
in
tydoc </> nest nestLevel (string "where" <+> casesDoc casedocs)
format CompType { compType = ty, compCases = cases } =
let
tydoc = format ty
casedocs = map format cases
in
tydoc </> nest nestLevel (string "spec" <+> casesDoc casedocs)
format Quantified { quantType = ty, quantCases = cases, quantKind = kind } =
let
tydoc = format ty
casedocs = map format cases
in
format kind <+> tydoc <> dot <+> casesDoc casedocs
format Lambda { lambdaCases = cases } =
let
casedocs = map format cases
in
string "lambda" <+> casesDoc casedocs
format Record { recFields = fields } =
let
fielddocs = map formatBind (HashMap.toList fields)
in
recordDoc fielddocs
format Tuple { tupleFields = fields } =
let
fielddocs = map format (elems fields)
in
tupleDoc fielddocs
format Fix { fixSym = sym, fixTerm = term } =
let
symdoc = format sym
termdoc = format term
in
string "fix " <> symdoc <+> equals <//> nest nestLevel termdoc
format Comp { compBody = body } =
let
bodydocs = formatCompList body
in
blockDoc bodydocs
format Elim { elimTerm = term } = format term
format Constructor { constructorSym = sym } = format sym
format Literal { literalVal = lit } = format lit
format BadIntro {} = string "<bad>"
instance (Format bound, Format free, Default bound, Eq bound) =>
Show (Intro bound free) where
show = Lazy.toString . renderOptimal 80 False . format
instance (MonadPositions m, MonadSymbols m, FormatM m bound,
FormatM m free, Default bound, Eq bound) =>
FormatM m (Intro bound free) where
formatM Eta {} = error "Eta is going away"
formatM FuncType { funcTypeArgs = args, funcTypeRetTy = retty } =
do
argdocs <- mapM formatM args
rettydoc <- formatM retty
return (tupleDoc argdocs <+> string "->" </> nest nestLevel rettydoc)
formatM RecordType { recTypeBody = body } =
do
bodydocs <- mapM formatM body
return (tupleDoc bodydocs)
formatM RefineType { refineType = ty, refineCases = cases } =
do
tydoc <- formatM ty
casedocs <- mapM formatM cases
return (tydoc </> nest nestLevel (string "\\where" <+> casesDoc casedocs))
formatM CompType { compType = ty, compCases = cases } =
do
tydoc <- formatM ty
casedocs <- mapM formatM cases
return (tydoc </> nest nestLevel (string "\\spec" <+> casesDoc casedocs))
formatM Quantified { quantType = ty, quantCases = cases, quantKind = kind } =
do
tydoc <- formatM ty
casedocs <- mapM formatM cases
return (format kind <+> tydoc <> dot <+> casesDoc casedocs)
formatM Lambda { lambdaCases = cases } =
do
casedocs <- mapM formatM cases
return (string "\\lambda" <+> casesDoc casedocs)
formatM Record { recFields = fields } =
do
fielddocs <- mapM formatMBind (HashMap.toList fields)
return (recordDoc fielddocs)
formatM Tuple { tupleFields = fields } =
do
fielddocs <- mapM formatM (elems fields)
return (tupleDoc fielddocs)
formatM Fix { fixSym = sym, fixTerm = term } =
do
symdoc <- formatM sym
termdoc <- formatM term
return (string "fix " <> symdoc <+> equals <//> nest nestLevel termdoc)
formatM Comp { compBody = body } =
do
bodydoc <- formatMCompList body
return $! blockDoc bodydoc
formatM Elim { elimTerm = term } = formatM term
formatM Constructor { constructorSym = sym } = formatM sym
formatM Literal { literalVal = lit } = formatM lit
formatM BadIntro {} = return $! string "<bad>"
instance (Format bound, Format free, Default bound, Eq bound) =>
Format (Elim bound free) where
format Call { callFunc = func, callArg = arg } =
let
argdoc = format arg
funcdoc = format func
in
funcdoc </> argdoc
format Typed { typedTerm = term, typedType = ty } =
let
termdoc = format term
typedoc = format ty
in
termdoc <+> colon </> nest nestLevel typedoc
format Var { varSym = sym } = format sym
format BadElim {} = string "<bad>"
instance (Format bound, Format free, Default bound, Eq bound) =>
Show (Elim bound free) where
show = Lazy.toString . renderOptimal 80 False . format
instance (MonadPositions m, MonadSymbols m, FormatM m bound,
FormatM m free, Default bound, Eq bound) =>
FormatM m (Elim bound free) where
formatM Call { callFunc = func, callArg = arg } =
do
argdocs <- formatM arg
funcdoc <- formatM func
return (funcdoc </> argdocs)
formatM Typed { typedTerm = term, typedType = ty } =
do
termdoc <- formatM term
typedoc <- formatM ty
return (termdoc <+> colon </> nest nestLevel typedoc)
formatM Var { varSym = sym } = formatM sym
formatM BadElim {} = return $! string "<bad>"
instance (Format bound, Format free, Default bound, Eq bound) =>
Format (Cmd bound free) where
format Value { valTerm = term } = format term
format Eval { evalTerm = term } = string "do" </> nest nestLevel (format term)
format BadCmd {} = string "BadCmd"
instance (Format bound, Format free, Default bound, Eq bound) =>
Show (Cmd bound free) where
show = Lazy.toString . renderOptimal 80 False . format
instance (MonadPositions m, MonadSymbols m, FormatM m bound,
FormatM m free, Default bound, Eq bound) =>
FormatM m (Cmd bound free) where
formatM Value { valTerm = term } = formatM term
formatM Eval { evalTerm = term } =
do
termdoc <- formatM term
return $! string "do" </> nest nestLevel termdoc
formatM BadCmd {} = return $! string "<bad>"
instance (Format bound, Format free, Default bound, Eq bound) =>
Format (Comp bound free) where
format comp =
let
stmlist = formatCompList comp
in
stmsDoc stmlist
instance (Format bound, Format free, Default bound, Eq bound) =>
Show (Comp bound free) where
show = Lazy.toString . renderOptimal 80 False . format
instance (MonadPositions m, MonadSymbols m, FormatM m bound,
FormatM m free, Default bound, Eq bound) =>
FormatM m (Comp bound free) where
formatM comp =
do
stmlist <- formatMCompList comp
return $! stmsDoc stmlist
numPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text) =>
PU [NodeG [] tag text] Literal
numPickler =
let
revfunc Num { numVal = num } = (numerator num, denominator num)
revfunc _ = error $! "Can't convert"
in
xpWrap (\(numer, denom) -> Num { numVal = numer % denom }, revfunc)
(xpElemAttrs (gxFromString "Num")
(xpPair (xpAttr (gxFromString "numerator") xpPrim)
(xpAttr (gxFromString "denominator") xpPrim)))
strPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text) =>
PU [NodeG [] tag text] Literal
strPickler =
let
revfunc Str { strVal = str } = gxFromByteString str
revfunc _ = error $! "Can't convert"
in
xpWrap (\str -> Str { strVal = gxToByteString str }, revfunc)
(xpElemNodes (gxFromString "Str")
(xpElemNodes (gxFromString "value")
(xpContent xpText0)))
charPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text) =>
PU [NodeG [] tag text] Literal
charPickler =
let
revfunc Char { charVal = chr } = chr
revfunc _ = error $! "Can't convert"
in
xpWrap (\chr -> Char { charVal = chr }, revfunc)
(xpElemAttrs (gxFromString "Char")
(xpAttr (gxFromString "value") xpPrim))
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text) =>
XmlPickler [NodeG [] tag text] Literal where
xpickle =
let
picker Num {} = 0
picker Str {} = 1
picker Char {} = 2
in
xpAlt picker [numPickler, strPickler, charPickler]
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text) =>
XmlPickler [(tag, text)] Quantifier where
xpickle = xpAlt fromEnum [xpWrap (const Forall, const ())
(xpAttrFixed (gxFromString "kind")
(gxFromString "forall")),
xpWrap (const Forall, const ())
(xpAttrFixed (gxFromString "kind")
(gxFromString "exists"))]
mapPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler (Attributes tag text) key,
XmlPickler [NodeG [] tag text] val,
Hashable key, Eq key) =>
String -> PU [NodeG [] tag text] (HashMap key val)
mapPickler entname =
xpWrap (HashMap.fromList, HashMap.toList)
(xpList (xpElem (gxFromString entname) xpickle xpickle))
deconstructPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Pattern bound)
deconstructPickler =
let
revfunc Deconstruct { deconstructStrict = strict, deconstructBinds = binds,
deconstructConstructor = sym, deconstructPos = pos } =
((sym, strict), (binds, pos))
revfunc _ = error $! "Can't convert"
in
xpWrap (\((sym, strict), (binds, pos)) ->
Deconstruct { deconstructStrict = strict, deconstructBinds = binds,
deconstructConstructor = sym, deconstructPos = pos },
revfunc)
(xpElem (gxFromString "Deconstruct")
(xpPair xpickle (xpAttr (gxFromString "strict") xpPrim))
(xpPair (xpElemNodes (gxFromString "binds")
(mapPickler "field"))
(xpElemNodes (gxFromString "pos") xpickle)))
asPickler :: (GenericXMLString tag, Show tag, GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Pattern bound)
asPickler =
let
revfunc As { asName = sym, asBind = pat, asPos = pos } = (sym, (pat, pos))
revfunc _ = error $! "Can't convert"
in
xpWrap (\(sym, (pat, pos)) -> As { asName = sym, asBind = pat,
asPos = pos }, revfunc)
(xpElem (gxFromString "As") xpickle
(xpPair (xpElemNodes (gxFromString "pattern") xpickle)
(xpElemNodes (gxFromString "pair") xpickle)))
namePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Pattern bound)
namePickler =
let
revfunc Name { nameSym = sym, namePos = pos } = (sym, pos)
revfunc _ = error $! "Can't convert"
in
xpWrap (\(sym, pos) -> Name { nameSym = sym, namePos = pos }, revfunc)
(xpElem (gxFromString "Name") xpickle
(xpElemNodes (gxFromString "pos") xpickle))
constantPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] bound,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Pattern bound)
constantPickler =
let
revfunc Exact { exactLiteral = v, exactPos = pos } = (v, pos)
revfunc _ = error $! "Can't convert"
in
xpWrap (\(v, pos) -> Exact { exactLiteral = v, exactPos = pos }, revfunc)
(xpElemNodes (gxFromString "Exact")
(xpPair (xpElemNodes (gxFromString "literal") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
Hashable bound, Eq bound) =>
XmlPickler [NodeG [] tag text] (Pattern bound) where
xpickle =
let
picker Deconstruct {} = 0
picker As {} = 1
picker Name {} = 2
picker Exact {} = 3
in
xpAlt picker [ deconstructPickler, asPickler,
namePickler, constantPickler ]
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
XmlPickler [NodeG [] tag text] (Case bound free) where
xpickle =
xpWrap (\(pat, body, pos) -> Case { casePat = pat, caseBody = body,
casePos = pos },
\Case { casePat = pat, caseBody = body, casePos = pos } ->
(pat, body, pos))
(xpElemNodes (gxFromString "Case")
(xpTriple (xpElemNodes (gxFromString "pattern") xpickle)
(xpElemNodes (gxFromString "body") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
XmlPickler [NodeG [] tag text] (Element bound free) where
xpickle =
xpWrap (\(sym, (pat, ty, pos)) -> Element { elemName = sym, elemPat = pat,
elemType = ty, elemPos = pos },
\Element { elemName = sym, elemPat = pat,
elemType = ty, elemPos = pos } -> (sym, (pat, ty, pos)))
(xpElem (gxFromString "Element") xpickle
(xpTriple (xpElemNodes (gxFromString "pattern") xpickle)
(xpElemNodes (gxFromString "body") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
listToArr :: [a] -> Array Word a
listToArr l = listArray (0, fromIntegral (length l)) l
funcTypePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Intro bound free)
funcTypePickler =
let
revfunc FuncType { funcTypeArgs = args, funcTypeRetTy = retty,
funcTypeArgOrder = ord, funcTypePos = pos } =
(args, elems ord, retty, pos)
revfunc _ = error $! "Can't convert"
in
xpWrap (\(args, ord, retty, pos) ->
FuncType { funcTypeArgs = args, funcTypeRetTy = retty,
funcTypeArgOrder = listToArr ord, funcTypePos = pos },
revfunc)
(xpElemNodes (gxFromString "FuncType")
(xp4Tuple (xpElemNodes (gxFromString "args") xpickle)
(xpElemNodes (gxFromString "order") xpickle)
(xpElemNodes (gxFromString "retty") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
recordTypePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Intro bound free)
recordTypePickler =
let
revfunc RecordType { recTypeBody = body, recTypeOrder = ord,
recTypePos = pos } = (body, elems ord, pos)
revfunc _ = error $! "Can't convert"
in
xpWrap (\(body, ord, pos) -> RecordType { recTypeOrder = listToArr ord,
recTypeBody = body,
recTypePos = pos }, revfunc)
(xpElemNodes (gxFromString "RecordType")
(xpTriple (xpList (xpElemNodes (gxFromString "body")
xpickle))
(xpList (xpElemNodes (gxFromString "order")
xpickle))
(xpElemNodes (gxFromString "pos") xpickle)))
refineTypePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Intro bound free)
refineTypePickler =
let
revfunc RefineType { refineType = ty, refineCases = cases,
refinePos = pos } = (ty, cases, pos)
revfunc _ = error $! "Can't convert"
in
xpWrap (\(ty, cases, pos) -> RefineType { refineType = ty,
refineCases = cases,
refinePos = pos },
revfunc)
(xpElemNodes (gxFromString "RefineType")
(xpTriple (xpElemNodes (gxFromString "type") xpickle)
(xpList (xpElemNodes (gxFromString "cases")
xpickle))
(xpElemNodes (gxFromString "pos") xpickle)))
compTypePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Intro bound free)
compTypePickler =
let
revfunc RefineType { refineType = ty, refineCases = cases,
refinePos = pos } = (ty, cases, pos)
revfunc _ = error $! "Can't convert"
in
xpWrap (\(ty, cases, pos) -> RefineType { refineType = ty,
refineCases = cases,
refinePos = pos },
revfunc)
(xpElemNodes (gxFromString "RefineType")
(xpTriple (xpElemNodes (gxFromString "type") xpickle)
(xpList (xpElemNodes (gxFromString "cases")
xpickle))
(xpElemNodes (gxFromString "pos") xpickle)))
quantifiedPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Intro bound free)
quantifiedPickler =
let
revfunc Quantified { quantKind = kind, quantType = ty, quantCases = cases,
quantPos = pos } = (kind, (ty, cases, pos))
revfunc _ = error $! "Can't convert"
in
xpWrap (\(kind, (ty, cases, pos)) ->
Quantified { quantKind = kind, quantType = ty, quantCases = cases,
quantPos = pos },
revfunc)
(xpElem (gxFromString "Quantified") xpickle
(xpTriple (xpElemNodes (gxFromString "type") xpickle)
(xpList (xpElemNodes (gxFromString "cases")
xpickle))
(xpElemNodes (gxFromString "pos") xpickle)))
lambdaPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Intro bound free)
lambdaPickler =
let
revfunc Lambda { lambdaCases = cases, lambdaPos = pos } = (cases, pos)
revfunc _ = error $! "Can't convert"
in
xpWrap (\(cases, pos) -> Lambda { lambdaCases = cases, lambdaPos = pos },
revfunc)
(xpElemNodes (gxFromString "Lambda")
(xpPair (xpList (xpElemNodes (gxFromString "cases")
xpickle))
(xpElemNodes (gxFromString "pos") xpickle)))
recordPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Intro bound free)
recordPickler =
let
revfunc Record { recFields = vals, recPos = pos } = (vals, pos)
revfunc _ = error $! "Can't convert"
in
xpWrap (\(vals, pos) -> Record { recFields = vals, recPos = pos }, revfunc)
(xpElemNodes (gxFromString "Record")
(xpPair (xpElemNodes (gxFromString "fields")
(mapPickler "field"))
(xpElemNodes (gxFromString "pos") xpickle)))
tuplePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Intro bound free)
tuplePickler =
let
revfunc Tuple { tupleFields = vals, tuplePos = pos } = (elems vals, pos)
revfunc _ = error $! "Can't convert"
in
xpWrap (\(vals, pos) -> Tuple { tupleFields = listToArr vals,
tuplePos = pos }, revfunc)
(xpElemNodes (gxFromString "Record")
(xpPair (xpElemNodes (gxFromString "fields") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
fixPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Intro bound free)
fixPickler =
let
revfunc Fix { fixSym = sym, fixTerm = term, fixPos = pos } =
(sym, term, pos)
revfunc _ = error $! "Can't convert"
in
xpWrap (\(sym, term, pos) -> Fix { fixSym = sym, fixTerm = term,
fixPos = pos }, revfunc)
(xpElemNodes (gxFromString "Fix")
(xpTriple (xpElemNodes (gxFromString "sym") xpickle)
(xpElemNodes (gxFromString "terms") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
compPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Intro bound free)
compPickler =
let
revfunc Comp { compBody = body, compPos = pos } = (body, pos)
revfunc _ = error $! "Can't convert"
in
xpWrap (\(body, pos) -> Comp { compBody = body, compPos = pos }, revfunc)
(xpElemNodes (gxFromString "Comp")
(xpPair (xpElemNodes (gxFromString "body") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
elimPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Intro bound free)
elimPickler =
let
revfunc Elim { elimTerm = term } = term
revfunc _ = error $! "Can't convert"
in
xpWrap (Elim, revfunc)
(xpElemNodes (gxFromString "Elim")
(xpElemNodes (gxFromString "term") xpickle))
constructorPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Intro bound free)
constructorPickler =
let
revfunc Constructor { constructorSym = sym, constructorPos = pos } =
(sym, pos)
revfunc _ = error $! "Can't convert"
in
xpWrap (\(sym, pos) -> Constructor { constructorSym = sym,
constructorPos = pos },
revfunc)
(xpElem (gxFromString "Constructor") xpickle
(xpElemNodes (gxFromString "pos") xpickle))
literalPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Intro bound free)
literalPickler =
let
revfunc Literal { literalVal = lit, literalPos = pos } = (lit, pos)
revfunc _ = error $! "Can't convert"
in
xpWrap (\(lit, pos) -> Literal { literalVal = lit, literalPos = pos },
revfunc)
(xpElemNodes (gxFromString "Literal")
(xpPair (xpElemNodes (gxFromString "literal") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
badIntroPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Intro bound free)
badIntroPickler =
let
revfunc BadIntro { badIntroPos = pos } = pos
revfunc _ = error $! "Can't convert"
in
xpWrap (BadIntro, revfunc)
(xpElemNodes (gxFromString "BadIntro")
(xpElemNodes (gxFromString "pos") xpickle))
instance (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
XmlPickler [NodeG [] tag text] (Intro bound free) where
xpickle =
let
picker FuncType {} = 0
picker RecordType {} = 1
picker RefineType {} = 2
picker CompType {} = 3
picker Quantified {} = 4
picker Lambda {} = 5
picker Record {} = 6
picker Tuple {} = 7
picker Fix {} = 8
picker Comp {} = 9
picker Elim {} = 10
picker Constructor {} = 11
picker Literal {} = 12
picker BadIntro {} = 13
picker Eta {} = error "Eta not supported"
in
xpAlt picker [ funcTypePickler, recordTypePickler, refineTypePickler,
compTypePickler, quantifiedPickler, lambdaPickler,
recordPickler, tuplePickler, fixPickler, compPickler,
elimPickler, constructorPickler, literalPickler,
badIntroPickler ]
callPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Elim bound free)
callPickler =
let
revfunc Call { callFunc = func, callArg = arg,
callPos = pos } = (func, arg, pos)
revfunc _ = error $! "Can't convert"
in
xpWrap (\(func, arg, pos) -> Call { callFunc = func, callArg = arg,
callPos = pos },
revfunc)
(xpElemNodes (gxFromString "Call")
(xpTriple (xpElemNodes (gxFromString "func") xpickle)
(xpElemNodes (gxFromString "arg") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
typedPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Elim bound free)
typedPickler =
let
revfunc Typed { typedTerm = term, typedType = ty,
typedPos = pos } = (term, ty, pos)
revfunc _ = error $! "Can't convert"
in
xpWrap (\(term, ty, pos) -> Typed { typedTerm = term, typedType = ty,
typedPos = pos }, revfunc)
(xpElemNodes (gxFromString "Typed")
(xpTriple (xpElemNodes (gxFromString "term") xpickle)
(xpElemNodes (gxFromString "type") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
varPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Elim bound free)
varPickler =
let
revfunc Var { varSym = sym, varPos = pos } = (sym, pos)
revfunc _ = error $! "Can't convert"
in
xpWrap (\(sym, pos) -> Var { varSym = sym, varPos = pos }, revfunc)
(xpElemNodes (gxFromString "Var")
(xpPair (xpElemNodes (gxFromString "sym") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
badElimPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Elim bound free)
badElimPickler =
let
revfunc BadElim { badElimPos = pos } = pos
revfunc _ = error $! "Can't convert"
in
xpWrap (BadElim, revfunc)
(xpElemNodes (gxFromString "BadElim")
(xpElemNodes (gxFromString "pos") xpickle))
instance (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
XmlPickler [NodeG [] tag text] (Elim bound free) where
xpickle =
let
picker Call {} = 0
picker Typed {} = 1
picker Var {} = 2
picker BadElim {} = 3
in
xpAlt picker [ callPickler, typedPickler, varPickler, badElimPickler ]
valuePickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Cmd bound free)
valuePickler =
let
revfunc Value { valTerm = term, valPos = pos } = (term, pos)
revfunc _ = error $! "Can't convert"
in
xpWrap (\(term, pos) -> Value { valTerm = term, valPos = pos }, revfunc)
(xpElemNodes (gxFromString "Value")
(xpPair (xpElemNodes (gxFromString "term") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
evalPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Cmd bound free)
evalPickler =
let
revfunc Eval { evalTerm = term, evalPos = pos } = (term, pos)
revfunc _ = error $! "Can't convert"
in
xpWrap (\(term, pos) -> Eval { evalTerm = term, evalPos = pos }, revfunc)
(xpElemNodes (gxFromString "Eval")
(xpPair (xpElemNodes (gxFromString "term") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
badCmdPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Cmd bound free)
badCmdPickler =
let
revfunc (BadCmd pos) = pos
revfunc _ = error $! "Can't convert"
in
xpWrap (BadCmd, revfunc)
(xpElemNodes (gxFromString "BadCmd")
(xpElemNodes (gxFromString "pos") xpickle))
instance (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
XmlPickler [NodeG [] tag text] (Cmd bound free) where
xpickle =
let
picker Value {} = 0
picker Eval {} = 1
picker BadCmd {} = 2
in
xpAlt picker [ valuePickler, evalPickler, badCmdPickler ]
seqPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Comp bound free)
seqPickler =
let
revfunc Seq { seqCmd = cmd, seqPat = pat, seqType = ty,
seqNext = next, seqPos = pos } = (cmd, pat, ty, next, pos)
revfunc _ = error $! "Can't convert"
in
xpWrap (\(cmd, pat, ty, next, pos) -> Seq { seqCmd = cmd, seqPat = pat,
seqType = ty, seqNext = next,
seqPos = pos }, revfunc)
(xpElemNodes (gxFromString "End")
(xp5Tuple (xpElemNodes (gxFromString "cmd") xpickle)
(xpElemNodes (gxFromString "pat") xpickle)
(xpElemNodes (gxFromString "type") xpickle)
(xpElemNodes (gxFromString "next") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
endPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Comp bound free)
endPickler =
let
revfunc End { endCmd = cmd, endPos = pos } = (cmd, pos)
revfunc _ = error $! "Can't convert"
in
xpWrap (\(cmd, pos) -> End { endCmd = cmd, endPos = pos }, revfunc)
(xpElemNodes (gxFromString "End")
(xpPair (xpElemNodes (gxFromString "cmd") xpickle)
(xpElemNodes (gxFromString "pos") xpickle)))
badCompPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
PU [NodeG [] tag text] (Comp bound free)
badCompPickler =
let
revfunc (BadComp pos) = pos
revfunc _ = error $! "Can't convert"
in
xpWrap (BadComp, revfunc)
(xpElemNodes (gxFromString "BadComp")
(xpElemNodes (gxFromString "pos") xpickle))
instance (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
XmlPickler [(tag, text)] bound,
XmlPickler [NodeG [] tag text] bound,
XmlPickler [NodeG [] tag text] free,
Hashable bound, Eq bound) =>
XmlPickler [NodeG [] tag text] (Comp bound free) where
xpickle =
let
picker Seq {} = 0
picker End {} = 1
picker BadComp {} = 2
in
xpAlt picker [ seqPickler, endPickler, badCompPickler ]
|
emc2/saltlang
|
src/salt/Language/Salt/Core/Syntax.hs
|
bsd-3-clause
| 95,073
| 179
| 44
| 28,630
| 29,249
| 15,639
| 13,610
| -1
| -1
|
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE ScopedTypeVariables #-}
import Control.Monad (zipWithM_)
import Data.Aeson (encode, decode, ToJSON, FromJSON)
import Data.ByteString (hGetContents)
import Data.ByteString.UTF8 (lines, toString)
import Data.Coerce (Coercible, coerce)
import Data.Foldable (traverse_)
import Data.Maybe (fromJust)
import System.Process (CreateProcess(..), StdStream(..), createProcess
,shell)
import Test.Hspec (hspec, expectationFailure, parallel, describe, it
,shouldBe, Expectation, SpecWith)
import Prelude hiding (lines)
import Duffer.Unified (readObject)
import Duffer.Loose.Objects (Ref)
import Duffer.WithRepo (withRepo)
import Duffer.JSON (GitObjectJSON(..), RefJSON(..))
repo :: String
repo = "../.git"
gitDir :: String
gitDir = "GIT_DIR=" ++ repo ++ " "
main :: IO ()
main = let objectTypes = ["blob", "tree", "commit", "tag"] in
traverse objectsOfType objectTypes >>=
hspec . parallel . describe "JSON" . testJSON objectTypes
testJSON :: [String] -> [[Ref]] -> SpecWith ()
testJSON types partitionedRefs = describe "decoding and encoding" $ do
zipWithM_ describeDecodingEncodingAll types partitionedRefs
testRefs $ concat partitionedRefs
describeDecodingEncodingAll :: String -> [Ref] -> SpecWith ()
describeDecodingEncodingAll oType =
it ("correctly decodes and encodes all " ++ oType ++ "s") .
traverse_ (decodeEncodeObject repo)
decodeEncodeObject :: FilePath -> Ref -> Expectation
decodeEncodeObject path ref = withRepo path (readObject ref) >>= maybe
(expectationFailure $ toString ref ++ "not read")
((roundTrip . GitObjectJSON) >>= shouldBe)
testRefs :: [Ref] -> SpecWith ()
testRefs = it "correctly decodes and encodes all refs" .
traverse_ ((roundTrip . RefJSON) >>= shouldBe)
roundTrip :: forall a b. (Coercible a b, FromJSON a, ToJSON a) => a -> b
roundTrip = coerce @a . fromJust . decode . encode
objectsOfType :: String -> IO [Ref]
objectsOfType objectType = fmap lines $
cmd (gitDir ++ "git rev-list --objects --all")
>|> (gitDir ++
"git cat-file --batch-check='%(objectname) %(objecttype) %(rest)'")
>|> ("grep '^[^ ]* " ++ objectType ++ "'")
>|> "cut -d' ' -f1"
>>= hGetContents
where
cmd command = createProcess (shell command) {std_out = CreatePipe} >>=
\(_, Just handle, _, _) -> return handle
(>|>) handle command = withPipe =<< handle
where withPipe pipe = createProcess (shell command)
{std_out = CreatePipe, std_in = UseHandle pipe} >>=
\(_, Just handle', _, _) -> return handle'
|
vaibhavsagar/duffer.hs
|
duffer-json/test/Spec.hs
|
bsd-3-clause
| 2,755
| 0
| 13
| 643
| 805
| 442
| 363
| 57
| 1
|
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "Data/Text/Internal/Builder/RealFloat/Functions.hs" #-}
{-# LANGUAGE CPP #-}
-- |
-- Module: Data.Text.Internal.Builder.RealFloat.Functions
-- Copyright: (c) The University of Glasgow 1994-2002
-- License: see libraries/base/LICENSE
--
-- /Warning/: this is an internal module, and does not have a stable
-- API or name. Functions in this module may not check or enforce
-- preconditions expected by public modules. Use at your own risk!
module Data.Text.Internal.Builder.RealFloat.Functions
(
roundTo
) where
roundTo :: Int -> [Int] -> (Int,[Int])
roundTo d is =
case f d True is of
x@(0,_) -> x
(1,xs) -> (1, 1:xs)
_ -> error "roundTo: bad Value"
where
b2 = base `quot` 2
f n _ [] = (0, replicate n 0)
f 0 e (x:xs) | x == b2 && e && all (== 0) xs = (0, []) -- Round to even when at exactly half the base
| otherwise = (if x >= b2 then 1 else 0, [])
f n _ (i:xs)
| i' == base = (1,0:ds)
| otherwise = (0,i':ds)
where
(c,ds) = f (n-1) (even i) xs
i' = c + i
base = 10
|
phischu/fragnix
|
tests/packages/scotty/Data.Text.Internal.Builder.RealFloat.Functions.hs
|
bsd-3-clause
| 1,184
| 0
| 12
| 367
| 346
| 195
| 151
| 22
| 6
|
{-# LANGUAGE Haskell2010 #-}
{-# LINE 1 "Data/Vector/Mutable.hs" #-}
{-# LANGUAGE CPP, DeriveDataTypeable, MultiParamTypeClasses, FlexibleInstances, BangPatterns, TypeFamilies #-}
-- |
-- Module : Data.Vector.Mutable
-- Copyright : (c) Roman Leshchinskiy 2008-2010
-- License : BSD-style
--
-- Maintainer : Roman Leshchinskiy <rl@cse.unsw.edu.au>
-- Stability : experimental
-- Portability : non-portable
--
-- Mutable boxed vectors.
--
module Data.Vector.Mutable (
-- * Mutable boxed vectors
MVector(..), IOVector, STVector,
-- * Accessors
-- ** Length information
length, null,
-- ** Extracting subvectors
slice, init, tail, take, drop, splitAt,
unsafeSlice, unsafeInit, unsafeTail, unsafeTake, unsafeDrop,
-- ** Overlapping
overlaps,
-- * Construction
-- ** Initialisation
new, unsafeNew, replicate, replicateM, clone,
-- ** Growing
grow, unsafeGrow,
-- ** Restricting memory usage
clear,
-- * Accessing individual elements
read, write, modify, swap,
unsafeRead, unsafeWrite, unsafeModify, unsafeSwap,
-- * Modifying vectors
nextPermutation,
-- ** Filling and copying
set, copy, move, unsafeCopy, unsafeMove
) where
import Control.Monad (when)
import qualified Data.Vector.Generic.Mutable as G
import Data.Primitive.Array
import Control.Monad.Primitive
import Prelude hiding ( length, null, replicate, reverse, read,
take, drop, splitAt, init, tail )
import Data.Typeable ( Typeable )
import qualified Data.Vector.Internal.Check as Ck
-- | Mutable boxed vectors keyed on the monad they live in ('IO' or @'ST' s@).
data MVector s a = MVector {-# UNPACK #-} !Int
{-# UNPACK #-} !Int
{-# UNPACK #-} !(MutableArray s a)
deriving ( Typeable )
type IOVector = MVector RealWorld
type STVector s = MVector s
-- NOTE: This seems unsafe, see http://trac.haskell.org/vector/ticket/54
{-
instance NFData a => NFData (MVector s a) where
rnf (MVector i n arr) = unsafeInlineST $ force i
where
force !ix | ix < n = do x <- readArray arr ix
rnf x `seq` force (ix+1)
| otherwise = return ()
-}
instance G.MVector MVector a where
{-# INLINE basicLength #-}
basicLength (MVector _ n _) = n
{-# INLINE basicUnsafeSlice #-}
basicUnsafeSlice j m (MVector i _ arr) = MVector (i+j) m arr
{-# INLINE basicOverlaps #-}
basicOverlaps (MVector i m arr1) (MVector j n arr2)
= sameMutableArray arr1 arr2
&& (between i j (j+n) || between j i (i+m))
where
between x y z = x >= y && x < z
{-# INLINE basicUnsafeNew #-}
basicUnsafeNew n
= do
arr <- newArray n uninitialised
return (MVector 0 n arr)
{-# INLINE basicInitialize #-}
-- initialization is unnecessary for boxed vectors
basicInitialize _ = return ()
{-# INLINE basicUnsafeReplicate #-}
basicUnsafeReplicate n x
= do
arr <- newArray n x
return (MVector 0 n arr)
{-# INLINE basicUnsafeRead #-}
basicUnsafeRead (MVector i _ arr) j = readArray arr (i+j)
{-# INLINE basicUnsafeWrite #-}
basicUnsafeWrite (MVector i _ arr) j x = writeArray arr (i+j) x
{-# INLINE basicUnsafeCopy #-}
basicUnsafeCopy (MVector i n dst) (MVector j _ src)
= copyMutableArray dst i src j n
basicUnsafeMove dst@(MVector iDst n arrDst) src@(MVector iSrc _ arrSrc)
= case n of
0 -> return ()
1 -> readArray arrSrc iSrc >>= writeArray arrDst iDst
2 -> do
x <- readArray arrSrc iSrc
y <- readArray arrSrc (iSrc + 1)
writeArray arrDst iDst x
writeArray arrDst (iDst + 1) y
_
| overlaps dst src
-> case compare iDst iSrc of
LT -> moveBackwards arrDst iDst iSrc n
EQ -> return ()
GT | (iDst - iSrc) * 2 < n
-> moveForwardsLargeOverlap arrDst iDst iSrc n
| otherwise
-> moveForwardsSmallOverlap arrDst iDst iSrc n
| otherwise -> G.basicUnsafeCopy dst src
{-# INLINE basicClear #-}
basicClear v = G.set v uninitialised
{-# INLINE moveBackwards #-}
moveBackwards :: PrimMonad m => MutableArray (PrimState m) a -> Int -> Int -> Int -> m ()
moveBackwards !arr !dstOff !srcOff !len =
((Ck.check "Data/Vector/Mutable.hs" 150) Ck.Internal) "moveBackwards" "not a backwards move" (dstOff < srcOff)
$ loopM len $ \ i -> readArray arr (srcOff + i) >>= writeArray arr (dstOff + i)
{-# INLINE moveForwardsSmallOverlap #-}
-- Performs a move when dstOff > srcOff, optimized for when the overlap of the intervals is small.
moveForwardsSmallOverlap :: PrimMonad m => MutableArray (PrimState m) a -> Int -> Int -> Int -> m ()
moveForwardsSmallOverlap !arr !dstOff !srcOff !len =
((Ck.check "Data/Vector/Mutable.hs" 157) Ck.Internal) "moveForwardsSmallOverlap" "not a forward move" (dstOff > srcOff)
$ do
tmp <- newArray overlap uninitialised
loopM overlap $ \ i -> readArray arr (dstOff + i) >>= writeArray tmp i
loopM nonOverlap $ \ i -> readArray arr (srcOff + i) >>= writeArray arr (dstOff + i)
loopM overlap $ \ i -> readArray tmp i >>= writeArray arr (dstOff + nonOverlap + i)
where nonOverlap = dstOff - srcOff; overlap = len - nonOverlap
-- Performs a move when dstOff > srcOff, optimized for when the overlap of the intervals is large.
moveForwardsLargeOverlap :: PrimMonad m => MutableArray (PrimState m) a -> Int -> Int -> Int -> m ()
moveForwardsLargeOverlap !arr !dstOff !srcOff !len =
((Ck.check "Data/Vector/Mutable.hs" 168) Ck.Internal) "moveForwardsLargeOverlap" "not a forward move" (dstOff > srcOff)
$ do
queue <- newArray nonOverlap uninitialised
loopM nonOverlap $ \ i -> readArray arr (srcOff + i) >>= writeArray queue i
let mov !i !qTop = when (i < dstOff + len) $ do
x <- readArray arr i
y <- readArray queue qTop
writeArray arr i y
writeArray queue qTop x
mov (i+1) (if qTop + 1 >= nonOverlap then 0 else qTop + 1)
mov dstOff 0
where nonOverlap = dstOff - srcOff
{-# INLINE loopM #-}
loopM :: Monad m => Int -> (Int -> m a) -> m ()
loopM !n k = let
go i = when (i < n) (k i >> go (i+1))
in go 0
uninitialised :: a
uninitialised = error "Data.Vector.Mutable: uninitialised element"
-- Length information
-- ------------------
-- | Length of the mutable vector.
length :: MVector s a -> Int
{-# INLINE length #-}
length = G.length
-- | Check whether the vector is empty
null :: MVector s a -> Bool
{-# INLINE null #-}
null = G.null
-- Extracting subvectors
-- ---------------------
-- | Yield a part of the mutable vector without copying it.
slice :: Int -> Int -> MVector s a -> MVector s a
{-# INLINE slice #-}
slice = G.slice
take :: Int -> MVector s a -> MVector s a
{-# INLINE take #-}
take = G.take
drop :: Int -> MVector s a -> MVector s a
{-# INLINE drop #-}
drop = G.drop
{-# INLINE splitAt #-}
splitAt :: Int -> MVector s a -> (MVector s a, MVector s a)
splitAt = G.splitAt
init :: MVector s a -> MVector s a
{-# INLINE init #-}
init = G.init
tail :: MVector s a -> MVector s a
{-# INLINE tail #-}
tail = G.tail
-- | Yield a part of the mutable vector without copying it. No bounds checks
-- are performed.
unsafeSlice :: Int -- ^ starting index
-> Int -- ^ length of the slice
-> MVector s a
-> MVector s a
{-# INLINE unsafeSlice #-}
unsafeSlice = G.unsafeSlice
unsafeTake :: Int -> MVector s a -> MVector s a
{-# INLINE unsafeTake #-}
unsafeTake = G.unsafeTake
unsafeDrop :: Int -> MVector s a -> MVector s a
{-# INLINE unsafeDrop #-}
unsafeDrop = G.unsafeDrop
unsafeInit :: MVector s a -> MVector s a
{-# INLINE unsafeInit #-}
unsafeInit = G.unsafeInit
unsafeTail :: MVector s a -> MVector s a
{-# INLINE unsafeTail #-}
unsafeTail = G.unsafeTail
-- Overlapping
-- -----------
-- | Check whether two vectors overlap.
overlaps :: MVector s a -> MVector s a -> Bool
{-# INLINE overlaps #-}
overlaps = G.overlaps
-- Initialisation
-- --------------
-- | Create a mutable vector of the given length.
new :: PrimMonad m => Int -> m (MVector (PrimState m) a)
{-# INLINE new #-}
new = G.new
-- | Create a mutable vector of the given length. The memory is not initialized.
unsafeNew :: PrimMonad m => Int -> m (MVector (PrimState m) a)
{-# INLINE unsafeNew #-}
unsafeNew = G.unsafeNew
-- | Create a mutable vector of the given length (0 if the length is negative)
-- and fill it with an initial value.
replicate :: PrimMonad m => Int -> a -> m (MVector (PrimState m) a)
{-# INLINE replicate #-}
replicate = G.replicate
-- | Create a mutable vector of the given length (0 if the length is negative)
-- and fill it with values produced by repeatedly executing the monadic action.
replicateM :: PrimMonad m => Int -> m a -> m (MVector (PrimState m) a)
{-# INLINE replicateM #-}
replicateM = G.replicateM
-- | Create a copy of a mutable vector.
clone :: PrimMonad m => MVector (PrimState m) a -> m (MVector (PrimState m) a)
{-# INLINE clone #-}
clone = G.clone
-- Growing
-- -------
-- | Grow a vector by the given number of elements. The number must be
-- positive.
grow :: PrimMonad m
=> MVector (PrimState m) a -> Int -> m (MVector (PrimState m) a)
{-# INLINE grow #-}
grow = G.grow
-- | Grow a vector by the given number of elements. The number must be
-- positive but this is not checked.
unsafeGrow :: PrimMonad m
=> MVector (PrimState m) a -> Int -> m (MVector (PrimState m) a)
{-# INLINE unsafeGrow #-}
unsafeGrow = G.unsafeGrow
-- Restricting memory usage
-- ------------------------
-- | Reset all elements of the vector to some undefined value, clearing all
-- references to external objects. This is usually a noop for unboxed vectors.
clear :: PrimMonad m => MVector (PrimState m) a -> m ()
{-# INLINE clear #-}
clear = G.clear
-- Accessing individual elements
-- -----------------------------
-- | Yield the element at the given position.
read :: PrimMonad m => MVector (PrimState m) a -> Int -> m a
{-# INLINE read #-}
read = G.read
-- | Replace the element at the given position.
write :: PrimMonad m => MVector (PrimState m) a -> Int -> a -> m ()
{-# INLINE write #-}
write = G.write
-- | Modify the element at the given position.
modify :: PrimMonad m => MVector (PrimState m) a -> (a -> a) -> Int -> m ()
{-# INLINE modify #-}
modify = G.modify
-- | Swap the elements at the given positions.
swap :: PrimMonad m => MVector (PrimState m) a -> Int -> Int -> m ()
{-# INLINE swap #-}
swap = G.swap
-- | Yield the element at the given position. No bounds checks are performed.
unsafeRead :: PrimMonad m => MVector (PrimState m) a -> Int -> m a
{-# INLINE unsafeRead #-}
unsafeRead = G.unsafeRead
-- | Replace the element at the given position. No bounds checks are performed.
unsafeWrite :: PrimMonad m => MVector (PrimState m) a -> Int -> a -> m ()
{-# INLINE unsafeWrite #-}
unsafeWrite = G.unsafeWrite
-- | Modify the element at the given position. No bounds checks are performed.
unsafeModify :: PrimMonad m => MVector (PrimState m) a -> (a -> a) -> Int -> m ()
{-# INLINE unsafeModify #-}
unsafeModify = G.unsafeModify
-- | Swap the elements at the given positions. No bounds checks are performed.
unsafeSwap :: PrimMonad m => MVector (PrimState m) a -> Int -> Int -> m ()
{-# INLINE unsafeSwap #-}
unsafeSwap = G.unsafeSwap
-- Filling and copying
-- -------------------
-- | Set all elements of the vector to the given value.
set :: PrimMonad m => MVector (PrimState m) a -> a -> m ()
{-# INLINE set #-}
set = G.set
-- | Copy a vector. The two vectors must have the same length and may not
-- overlap.
copy :: PrimMonad m
=> MVector (PrimState m) a -> MVector (PrimState m) a -> m ()
{-# INLINE copy #-}
copy = G.copy
-- | Copy a vector. The two vectors must have the same length and may not
-- overlap. This is not checked.
unsafeCopy :: PrimMonad m => MVector (PrimState m) a -- ^ target
-> MVector (PrimState m) a -- ^ source
-> m ()
{-# INLINE unsafeCopy #-}
unsafeCopy = G.unsafeCopy
-- | Move the contents of a vector. The two vectors must have the same
-- length.
--
-- If the vectors do not overlap, then this is equivalent to 'copy'.
-- Otherwise, the copying is performed as if the source vector were
-- copied to a temporary vector and then the temporary vector was copied
-- to the target vector.
move :: PrimMonad m
=> MVector (PrimState m) a -> MVector (PrimState m) a -> m ()
{-# INLINE move #-}
move = G.move
-- | Move the contents of a vector. The two vectors must have the same
-- length, but this is not checked.
--
-- If the vectors do not overlap, then this is equivalent to 'unsafeCopy'.
-- Otherwise, the copying is performed as if the source vector were
-- copied to a temporary vector and then the temporary vector was copied
-- to the target vector.
unsafeMove :: PrimMonad m => MVector (PrimState m) a -- ^ target
-> MVector (PrimState m) a -- ^ source
-> m ()
{-# INLINE unsafeMove #-}
unsafeMove = G.unsafeMove
-- | Compute the next (lexicographically) permutation of given vector in-place.
-- Returns False when input is the last permtuation
nextPermutation :: (PrimMonad m,Ord e) => MVector (PrimState m) e -> m Bool
{-# INLINE nextPermutation #-}
nextPermutation = G.nextPermutation
|
phischu/fragnix
|
tests/packages/scotty/Data.Vector.Mutable.hs
|
bsd-3-clause
| 13,639
| 0
| 19
| 3,330
| 3,293
| 1,720
| 1,573
| 235
| 2
|
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-2012
Note [Unarisation]
~~~~~~~~~~~~~~~~~~
The idea of this pass is to translate away *all* unboxed-tuple binders. So for example:
f (x :: (# Int, Bool #)) = f x + f (# 1, True #)
==>
f (x1 :: Int) (x2 :: Bool) = f x1 x2 + f 1 True
It is important that we do this at the STG level and NOT at the core level
because it would be very hard to make this pass Core-type-preserving.
STG fed to the code generators *must* be unarised because the code generators do
not support unboxed tuple binders natively.
Note [Unarisation and arity]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Because of unarisation, the arity that will be recorded in the generated info table
for an Id may be larger than the idArity. Instead we record what we call the RepArity,
which is the Arity taking into account any expanded arguments, and corresponds to
the number of (possibly-void) *registers* arguments will arrive in.
-}
{-# LANGUAGE CPP #-}
module UnariseStg (unarise) where
#include "HsVersions.h"
import CoreSyn
import StgSyn
import VarEnv
import UniqSupply
import Id
import MkId (realWorldPrimId)
import Type
import TysWiredIn
import DataCon
import VarSet
import OccName
import Name
import Util
import Outputable
import BasicTypes
-- | A mapping from unboxed-tuple binders to the Ids they were expanded to.
--
-- INVARIANT: Ids in the range don't have unboxed tuple types.
--
-- Those in-scope variables without unboxed-tuple types are not present in
-- the domain of the mapping at all.
type UnariseEnv = VarEnv [Id]
ubxTupleId0 :: Id
ubxTupleId0 = dataConWorkId (tupleDataCon Unboxed 0)
unarise :: UniqSupply -> [StgBinding] -> [StgBinding]
unarise us binds = zipWith (\us -> unariseBinding us init_env) (listSplitUniqSupply us) binds
where -- See Note [Nullary unboxed tuple] in Type.hs
init_env = unitVarEnv ubxTupleId0 [realWorldPrimId]
unariseBinding :: UniqSupply -> UnariseEnv -> StgBinding -> StgBinding
unariseBinding us rho bind = case bind of
StgNonRec x rhs -> StgNonRec x (unariseRhs us rho rhs)
StgRec xrhss -> StgRec $ zipWith (\us (x, rhs) -> (x, unariseRhs us rho rhs))
(listSplitUniqSupply us) xrhss
unariseRhs :: UniqSupply -> UnariseEnv -> StgRhs -> StgRhs
unariseRhs us rho rhs = case rhs of
StgRhsClosure ccs b_info fvs update_flag srt args expr
-> StgRhsClosure ccs b_info (unariseIds rho fvs) update_flag
(unariseSRT rho srt) args' (unariseExpr us' rho' expr)
where (us', rho', args') = unariseIdBinders us rho args
StgRhsCon ccs con args
-> StgRhsCon ccs con (unariseArgs rho args)
------------------------
unariseExpr :: UniqSupply -> UnariseEnv -> StgExpr -> StgExpr
unariseExpr _ rho (StgApp f args)
| null args
, UbxTupleRep tys <- repType (idType f)
= -- Particularly important where (##) is concerned
-- See Note [Nullary unboxed tuple]
StgConApp (tupleDataCon Unboxed (length tys))
(map StgVarArg (unariseId rho f))
| otherwise
= StgApp f (unariseArgs rho args)
unariseExpr _ _ (StgLit l)
= StgLit l
unariseExpr _ rho (StgConApp dc args)
| isUnboxedTupleCon dc = StgConApp (tupleDataCon Unboxed (length args')) args'
| otherwise = StgConApp dc args'
where
args' = unariseArgs rho args
unariseExpr _ rho (StgOpApp op args ty)
= StgOpApp op (unariseArgs rho args) ty
unariseExpr us rho (StgLam xs e)
= StgLam xs' (unariseExpr us' rho' e)
where
(us', rho', xs') = unariseIdBinders us rho xs
unariseExpr us rho (StgCase e case_lives alts_lives bndr srt alt_ty alts)
= StgCase (unariseExpr us1 rho e) (unariseLives rho case_lives)
(unariseLives rho alts_lives) bndr (unariseSRT rho srt)
alt_ty alts'
where
(us1, us2) = splitUniqSupply us
alts' = unariseAlts us2 rho alt_ty bndr alts
unariseExpr us rho (StgLet bind e)
= StgLet (unariseBinding us1 rho bind) (unariseExpr us2 rho e)
where
(us1, us2) = splitUniqSupply us
unariseExpr us rho (StgLetNoEscape live_in_let live_in_bind bind e)
= StgLetNoEscape (unariseLives rho live_in_let) (unariseLives rho live_in_bind)
(unariseBinding us1 rho bind) (unariseExpr us2 rho e)
where
(us1, us2) = splitUniqSupply us
unariseExpr us rho (StgTick tick e)
= StgTick tick (unariseExpr us rho e)
------------------------
unariseAlts :: UniqSupply -> UnariseEnv -> AltType -> Id -> [StgAlt] -> [StgAlt]
unariseAlts us rho (UbxTupAlt n) bndr [(DEFAULT, [], [], e)]
= [(DataAlt (tupleDataCon Unboxed n), ys, uses, unariseExpr us2' rho' e)]
where
(us2', rho', ys) = unariseIdBinder us rho bndr
uses = replicate (length ys) (not (isDeadBinder bndr))
unariseAlts us rho (UbxTupAlt n) bndr [(DataAlt _, ys, uses, e)]
= [(DataAlt (tupleDataCon Unboxed n), ys', uses', unariseExpr us2' rho'' e)]
where
(us2', rho', ys', uses') = unariseUsedIdBinders us rho ys uses
rho'' = extendVarEnv rho' bndr ys'
unariseAlts _ _ (UbxTupAlt _) _ alts
= pprPanic "unariseExpr: strange unboxed tuple alts" (ppr alts)
unariseAlts us rho _ _ alts
= zipWith (\us alt -> unariseAlt us rho alt) (listSplitUniqSupply us) alts
--------------------------
unariseAlt :: UniqSupply -> UnariseEnv -> StgAlt -> StgAlt
unariseAlt us rho (con, xs, uses, e)
= (con, xs', uses', unariseExpr us' rho' e)
where
(us', rho', xs', uses') = unariseUsedIdBinders us rho xs uses
------------------------
unariseSRT :: UnariseEnv -> SRT -> SRT
unariseSRT _ NoSRT = NoSRT
unariseSRT rho (SRTEntries ids) = SRTEntries (concatMapVarSet (unariseId rho) ids)
unariseLives :: UnariseEnv -> StgLiveVars -> StgLiveVars
unariseLives rho ids = concatMapVarSet (unariseId rho) ids
unariseArgs :: UnariseEnv -> [StgArg] -> [StgArg]
unariseArgs rho = concatMap (unariseArg rho)
unariseArg :: UnariseEnv -> StgArg -> [StgArg]
unariseArg rho (StgVarArg x) = map StgVarArg (unariseId rho x)
unariseArg _ (StgLitArg l) = [StgLitArg l]
unariseIds :: UnariseEnv -> [Id] -> [Id]
unariseIds rho = concatMap (unariseId rho)
unariseId :: UnariseEnv -> Id -> [Id]
unariseId rho x
| Just ys <- lookupVarEnv rho x
= ASSERT2( case repType (idType x) of UbxTupleRep _ -> True; _ -> x == ubxTupleId0
, text "unariseId: not unboxed tuple" <+> ppr x )
ys
| otherwise
= ASSERT2( case repType (idType x) of UbxTupleRep _ -> False; _ -> True
, text "unariseId: was unboxed tuple" <+> ppr x )
[x]
unariseUsedIdBinders :: UniqSupply -> UnariseEnv -> [Id] -> [Bool]
-> (UniqSupply, UnariseEnv, [Id], [Bool])
unariseUsedIdBinders us rho xs uses
= case mapAccumL2 do_one us rho (zipEqual "unariseUsedIdBinders" xs uses) of
(us', rho', xs_usess) -> uncurry ((,,,) us' rho') (unzip (concat xs_usess))
where
do_one us rho (x, use) = third3 (map (flip (,) use)) (unariseIdBinder us rho x)
unariseIdBinders :: UniqSupply -> UnariseEnv -> [Id] -> (UniqSupply, UnariseEnv, [Id])
unariseIdBinders us rho xs = third3 concat $ mapAccumL2 unariseIdBinder us rho xs
unariseIdBinder :: UniqSupply -> UnariseEnv -> Id -> (UniqSupply, UnariseEnv, [Id])
unariseIdBinder us rho x = case repType (idType x) of
UnaryRep _ -> (us, rho, [x])
UbxTupleRep tys -> let (us0, us1) = splitUniqSupply us
ys = unboxedTupleBindersFrom us0 x tys
rho' = extendVarEnv rho x ys
in (us1, rho', ys)
unboxedTupleBindersFrom :: UniqSupply -> Id -> [UnaryType] -> [Id]
unboxedTupleBindersFrom us x tys = zipWith (mkSysLocalOrCoVar fs) (uniqsFromSupply us) tys
where fs = occNameFS (getOccName x)
concatMapVarSet :: (Var -> [Var]) -> VarSet -> VarSet
concatMapVarSet f xs = mkVarSet [x' | x <- varSetElems xs, x' <- f x]
|
nushio3/ghc
|
compiler/simplStg/UnariseStg.hs
|
bsd-3-clause
| 7,781
| 0
| 13
| 1,648
| 2,384
| 1,237
| 1,147
| 129
| 3
|
{-# LANGUAGE DeriveDataTypeable #-}
module Contract.Date
( Date -- incl. Ord instance providing compare function and (<)
, DateError
, at -- was "?" in SML. Use the Read instance instead
, addDays, dateDiff, ppDate, ppDays
) where
-- to define the exception
import Control.Exception as E
import Data.Typeable
import Data.Char(isDigit)
import Text.Printf
-- everything implemented "on foot". Could use a library module
type Year = Int
type Month = Int
type Day = Int
-- | Dates are represented as year, month, and day.
data Date = Date Year Month Day
deriving (Eq,Ord, Show)
isLeapYear :: Year -> Bool
isLeapYear year = year `mod` 400 == 0 ||
(not (year `mod` 100 == 0) && year `mod` 4 == 0)
daysInYear year = if isLeapYear year then 366 else 365
daysInMonth :: Year -> Month -> Int
daysInMonth year m =
let m31 = [1,3,5,7,8,10,12]
daysInFeb = if isLeapYear year then 29 else 28
in if m `elem` m31 then 31
else if m == 2 then daysInFeb else 30
data DateError = DateError String deriving (Typeable,Show)
instance Exception DateError
check :: Date -> Date
check d@(Date year month day) = if
year >= 1 && year <= 9999 && -- there is no such thing as year 0!
month >= 1 && month <= 12 &&
day >= 1 && day <= daysInMonth year month
then d else dateError (ppDate d)
dateError s = -- print (s ++ "\n")
throw (DateError ("Expecting date in the form YYYY-MM-DD - got " ++ s))
-- | read a date from a string in format yyyy-mm-dd
at :: String -> Date
at s | length s /= 10 = dateError s
| s!!4 /= '-' = dateError s
| s!!7 /= '-' = dateError s
| not allDigits = dateError s
| otherwise = check result
where substr a b = take b (drop a s)
y = substr 0 4
m = substr 5 2
d = substr 8 2
allDigits = all isDigit (y++m++d)
result = Date (read y) (read m) (read d)
-- (\e -> dateError (s ++ "\n" ++ show (e::ErrorCall)))
-- | Dates can only be read in format yyyy-mm-dd
instance Read Date where
readsPrec _ d = [(at (take 10 d), drop 10 d)]
-- | print a date in format yyyy-mm-dd (padding with zeros)
ppDate :: Date -> String
ppDate (Date year month day) = printf "%04d-%02d-%02d" year month day
-- | add given number of days to a date (result date is checked)
addDays :: Int -> Date -> Date
addDays 0 d = check d
addDays i (d@(Date year month day))
| i < 0 = subDays (-i) d
| otherwise = let days = daysInMonth year month
n = days - day
next = if month == 12 then Date (year+1) 1 1
else Date year (month+1) 1
in if i <= n then check (Date year month (day+i))
else addDays (i-n-1) next
-- | subtract days (used for adding negative amount of days to a date)
subDays 0 d = check d
subDays i (d@(Date year month day))
| i < 0 = addDays (-i) d -- should not occur, not directly callable
| otherwise = if i < day then check (Date year month (day-i))
else let (y,m) = if month == 1 then (year-1,12)
else (year,month-1)
d = daysInMonth y m
in subDays (i-day) (Date y m d)
-- derived Ord, comparisons component-wise left-to-right, no big deal
-- fun compare ({year=y1,month=m1,day=d1}, {year=y2,month=m2,day=d2}) =
-- if y1 < y2 then LESS
-- else (if y1 = y2 then
-- if m1 < m2 then LESS
-- else if m1 = m2 then
-- (if d1 < d2 then LESS
-- else if d1 = d2 then EQUAL
-- else GREATER)
-- else GREATER
-- else GREATER)
-- | compute day difference to go from d1 to d2
dateDiff :: Date -> Date -> Int
dateDiff d1@(Date y1 m1 n1) d2@(Date y2 m2 n2)
= case compare d1 d2 of
EQ -> 0
GT -> - (dateDiff d2 d1)
LT -> -- d1 < d2
if y1 == y2 then
if m1 == m2 then n2 - n1
else -- m1 < m2, go to next month
daysInMonth y1 m1 - n1 + 1 +
dateDiff (Date y1 (m1+1) 1) d2
else -- y1 < y2, but step fwd in months (leapyears!)
let next = if m1 == 12 then Date (y1+1) 1 n1
else Date y1 (m1+1) n1
in daysInMonth y1 m1 + dateDiff next d2
-- | print a number of days as years/months/days, using 30/360 convention
ppDays :: Int -> String
ppDays 0 = "0d"
ppDays t = if null s then "0d" else s
where years = t `div` 360
months = (t `div` 30) `mod` 12 -- (t mod 360) div 30
days = t `mod` 30
str n c = if n == 0 then "" else show n ++ c:[]
s = concat (zipWith str [years,months,days] "ymd")
|
HIPERFIT/contracts
|
Haskell/Contract/Date.hs
|
mit
| 5,051
| 0
| 16
| 1,851
| 1,487
| 795
| 692
| 90
| 6
|
-----------------------------------------------------------------------------
-- |
-- Module : Control.Monad.ST.Strict
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : provisional
-- Portability : non-portable (requires universal quantification for runST)
--
-- The strict ST monad (re-export of "Control.Monad.ST")
--
-----------------------------------------------------------------------------
module Control.Monad.ST.Strict (
module Control.Monad.ST
) where
import Prelude
import Control.Monad.ST
|
kaoskorobase/mescaline
|
resources/hugs/packages/base/Control/Monad/ST/Strict.hs
|
gpl-3.0
| 646
| 2
| 5
| 91
| 41
| 32
| 9
| 4
| 0
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sr-SP">
<title>Retest Add-On</title>
<maps>
<homeID>retest</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/retest/src/main/javahelp/org/zaproxy/addon/retest/resources/help_sr_SP/helpset_sr_SP.hs
|
apache-2.0
| 961
| 77
| 67
| 156
| 411
| 208
| 203
| -1
| -1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-missing-fields #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-----------------------------------------------------------------
-- Autogenerated by Thrift
-- --
-- DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-- @generated
-----------------------------------------------------------------
module Module_Types where
import Prelude ( Bool(..), Enum, Float, IO, Double, String, Maybe(..),
Eq, Show, Ord,
concat, error, fromIntegral, fromEnum, length, map,
maybe, not, null, otherwise, return, show, toEnum,
enumFromTo, Bounded, minBound, maxBound,
(.), (&&), (||), (==), (++), ($), (-), (>>=), (>>))
import Control.Applicative (ZipList(..), (<*>))
import Control.Exception
import Control.Monad ( liftM, ap, when )
import Data.ByteString.Lazy (ByteString)
import Data.Functor ( (<$>) )
import Data.Hashable
import Data.Int
import Data.Maybe (catMaybes)
import Data.Text.Lazy ( Text )
import Data.Text.Lazy.Encoding ( decodeUtf8, encodeUtf8 )
import qualified Data.Text.Lazy as T
import Data.Typeable ( Typeable )
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Vector as Vector
import Test.QuickCheck.Arbitrary ( Arbitrary(..) )
import Test.QuickCheck ( elements )
import Thrift hiding (ProtocolExnType(..))
import qualified Thrift (ProtocolExnType(..))
import Thrift.Types
import Thrift.Arbitraries
|
chjp2046/fbthrift
|
thrift/compiler/test/fixtures/service-fuzzer/gen-hs/Module_Types.hs
|
apache-2.0
| 1,761
| 0
| 6
| 333
| 370
| 257
| 113
| 35
| 0
|
{-# OPTIONS_GHC -fwarn-unused-matches #-}
module T9824 where
foo = [p| (x, y) |]
|
urbanslug/ghc
|
testsuite/tests/quotes/T9824.hs
|
bsd-3-clause
| 83
| 0
| 4
| 15
| 14
| 11
| 3
| -1
| -1
|
module ShouldCompile where
-- !!! test the overlapping patterns detection.
-- f1 overlaps
f1 "ab" = []
f1 "ab" = []
f1 _ = []
-- f2 overlaps
f2 "ab" = []
f2 ('a':'b':[]) = []
f2 _ = []
-- f3 overlaps
f3 ('a':'b':[]) = []
f3 "ab" = []
f3 _ = []
-- f4 doesn't overlap
f4 "ab" = []
f4 ('a':'b':'c':[]) = []
f4 _ = []
-- f5 doesn't overlap
f5 ('a':'b':'c':[]) = []
f5 "ab" = []
f5 _ = []
-- f6 doesn't overlap
f6 "ab" = []
f6 ('a':[]) = []
f6 _ = []
|
urbanslug/ghc
|
testsuite/tests/deSugar/should_compile/ds051.hs
|
bsd-3-clause
| 453
| 0
| 10
| 110
| 260
| 137
| 123
| 19
| 1
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module TeardownTest where
import RIO
import Control.Exception (MaskingState (..), getMaskingState)
import Control.Monad (replicateM)
import qualified Control.Teardown as SUT
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.HUnit
tests :: TestTree
tests = testGroup
"teardown"
[ testCase "idempotent execution of teardown action" $ do
callCountRef <- newIORef (0 :: Int)
teardownAction <- SUT.newTeardown
"test cleanup"
(atomicModifyIORef callCountRef (\a -> (a + 1, ())) :: IO ())
replicateM_ 10 (SUT.runTeardown teardownAction)
callCount <- readIORef callCountRef
assertEqual "teardown action got called more than once" 1 callCount
, testCase "failing teardown action does not stop execution" $ do
teardownAction <- SUT.newTeardown "failing teardown"
(error "failing teardown" :: IO ())
result <- SUT.runTeardown teardownAction
replicateM_ 9 (SUT.runTeardown teardownAction)
assertBool "result should report an error" (SUT.didTeardownFail result)
, testCase "thread safe idempotent execution of teardown action" $ do
callCountRef <- newIORef (0 :: Int)
teardownAction <- SUT.newTeardown
"test cleanup"
(atomicModifyIORef callCountRef (\a -> (a + 1, ())) :: IO ())
asyncList <- replicateM
10
(async
-- each async executes teardown 3 times
$ replicateM_ 3 $ SUT.runTeardown_ teardownAction)
mapM_ wait asyncList
callCount <- readIORef callCountRef
assertEqual "teardown action must not be called more than once" 1 callCount
, testCase "teardown tree keeps idempotent guarantees around execution" $ do
callCountRefs <- replicateM 10 $ newIORef (0 :: Int)
teardownAction <-
SUT.newTeardown "bigger system"
$ forM callCountRefs
$ \callCountRef -> SUT.newTeardown
"test cleanup"
(atomicModifyIORef callCountRef (\a -> (a + 1, ())) :: IO ())
replicateM_ 10 (SUT.runTeardown_ teardownAction)
countRefs <- mapM readIORef callCountRefs
assertEqual "teardown action must not be called more than once"
(replicate 10 1)
countRefs
, testCase "teardown action that returns Teardown list returns correct count"
$ do
failedTeardownActions <- replicateM
5
(SUT.newTeardown "test cleanup with failures" (error "nope" :: IO ()))
teardownActions <- replicateM
5
(SUT.newTeardown "test cleanup" (return () :: IO ()))
teardownAction <- SUT.newTeardown
"bigger system"
(return (failedTeardownActions <> teardownActions) :: IO
[SUT.Teardown]
)
toredownResult <- SUT.runTeardown teardownAction
replicateM_ 9 (SUT.runTeardown teardownAction)
assertEqual "teardown action count must be correct"
10
(SUT.toredownCount toredownResult)
assertEqual "failed teardown action must be correct"
5
(SUT.failedToredownCount toredownResult)
, testCase "Teardown sub-routine executes on an uninterruptedMask" $ do
resultVar <- newEmptyMVar
teardown <- SUT.newTeardown "test" (getMaskingState >>= putMVar resultVar)
SUT.runTeardown_ teardown
masking <- takeMVar resultVar
assertEqual "Expecting Teardown masked state is Uninterruptible"
MaskedUninterruptible
masking
]
|
roman/Haskell-teardown
|
test/tasty/TeardownTest.hs
|
isc
| 3,664
| 0
| 19
| 1,038
| 813
| 395
| 418
| 79
| 1
|
module Env
(
initialEnv, extendEnv, lookupEnv, defineVar, setVar, extendRef
) where
import Types
import Subr
import qualified Data.Map as M
initialEnv :: Env
initialEnv = Node (makeFrame primitiveProcedureNames primitiveProcedureObjects) M.empty
primitiveProcedureNames :: [String]
primitiveProcedureNames = map (\(Primitive x, _) -> x) $ M.toList primitiveProcedures
primitiveProcedureObjects :: [SObj]
primitiveProcedureObjects = map fst $ M.toList primitiveProcedures
makeFrame :: [String] -> [SObj] -> Frame
makeFrame vars vals = M.fromList $ zip vars vals
extendEnv :: [String] -> [SObj] -> Env -> Env
extendEnv vars vals (Node f e) = Node f $ M.insert (keyPos e) (Node (makeFrame vars vals) M.empty) e
lookupEnv :: String -> SEnv -> Maybe SObj
lookupEnv var (Node f _, []) = M.lookup var f
lookupEnv var (e, r:rs) = do
f <- scanEnv (e, r:rs)
let val = M.lookup var f
case val of
Nothing -> lookupEnv var (e, rs)
_ -> val
scanEnv :: SEnv -> Maybe Frame
scanEnv (Node f _, []) = Just f
scanEnv (Node _ e, r:rs) = do
e <- M.lookup r e
scanEnv (e, rs)
bottomEnv :: SEnv -> Maybe (M.Map Int Env)
bottomEnv (Node _ e, []) = Just e
bottomEnv (Node _ e, r:rs) = do
e <- M.lookup r e
bottomEnv (e, rs)
defineVar :: String -> SObj -> SEnv -> SEnv
defineVar var val (e, r) = do
let ret = scanEnv (e, r)
case ret of
Nothing -> (e, r)
Just f -> let newf = M.insert var val f in
replace newf (e, r)
setVar :: String -> SObj -> SEnv -> SEnv
setVar var val (Node f e, []) =
let bind = M.lookup var f in
case bind of
Nothing -> (Node f e, []) -- unbound variable error
Just _ -> let newf = M.insert var val f in
(Node newf e, [])
setVar var val (e, r) = do
let ret = scanEnv (e, r)
case ret of
Nothing -> (e, r)
Just f ->
let bind = M.lookup var f in
case bind of
Nothing -> let (newe, _) = setVar var val (e, tail r) in
(newe, r)
Just _ -> let newf = M.insert var val f in
replace newf (e, r)
replace :: Frame -> SEnv -> SEnv
replace f (Node _ e, []) = (Node f e, [])
replace f (Node f' e, r:rs) =
let ret = M.lookup r e in
case ret of
Nothing -> (Node f' e, r:rs)
Just e' ->
let (newe, _) = replace f (e', rs) in
(Node f' (M.insert r newe e), r:rs)
extendRef :: Env -> [Ref] -> [Ref]
extendRef e rs = do
let m = bottomEnv (e, rs)
case m of
Nothing -> rs
Just m' -> keyPos m' : rs
keyPos :: M.Map k a -> Int
keyPos = (1+) . length . M.keys
|
syunta/Scheme-hs
|
src/Env.hs
|
mit
| 2,557
| 4
| 21
| 696
| 1,280
| 649
| 631
| 76
| 4
|
{-# LANGUAGE OverloadedStrings, RankNTypes #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
-- | An HTTP\/2-aware variant of the 'Network.Wai.Application' type. Compared
-- to the original, this exposes the new functionality of server push and
-- trailers, allows stream fragments to be sent in the form of file ranges, and
-- allows the stream body to produce a value to be used in constructing the
-- trailers. Existing @Applications@ can be faithfully upgraded to HTTP\/2
-- with 'promoteApplication' or served transparently over both protocols with
-- the normal Warp 'Network.Wai.Handler.Warp.run' family of functions.
--
-- An 'HTTP2Application' takes a 'Request' and a 'PushFunc' and produces a
-- 'Responder' that will push any associated resources and send the response
-- body. The response is always a stream of 'Builder's and file chunks.
-- Equivalents of the 'Network.Wai.responseBuilder' family of functions are
-- provided for creating 'Responder's conveniently.
--
-- Pushed streams are handled by an IO action that triggers a server push. It
-- returns @True@ if the @PUSH_PROMISE@ frame was sent, @False@ if not. Note
-- this means it will still return @True@ if the client reset or ignored the
-- stream. This gives handlers the freedom to implement their own heuristics
-- for whether to actually push a resource, while also allowing middleware and
-- frameworks to trigger server pushes automatically.
module Network.Wai.HTTP2
(
-- * Applications
HTTP2Application
-- * Responder
, Responder(..)
, RespondFunc
, Body
, Chunk(..)
, Trailers
-- * Server push
, PushFunc
, PushPromise(..)
, promiseHeaders
-- * Conveniences
, promoteApplication
-- ** Responders
, respond
, respondCont
, respondIO
, respondFile
, respondFilePart
, respondNotFound
, respondWith
-- ** Stream Bodies
, streamFilePart
, streamBuilder
, streamSimple
) where
import Blaze.ByteString.Builder (Builder)
import Control.Exception (Exception, throwIO)
import Control.Monad.Trans.Cont (ContT(..))
import Data.ByteString (ByteString)
#if __GLASGOW_HASKELL__ < 709
import Data.Functor ((<$>))
#endif
import Data.IORef (newIORef, readIORef, writeIORef)
#if __GLASGOW_HASKELL__ < 709
import Data.Monoid (mempty)
#endif
import Data.Typeable (Typeable)
import qualified Network.HTTP.Types as H
import Network.Wai (Application)
import Network.Wai.Internal
( FilePart(..)
, Request(requestHeaders)
, Response(..)
, ResponseReceived(..)
, StreamingBody
, adjustForFilePart
, chooseFilePart
, tryGetFileSize
)
-- | Headers sent after the end of a data stream, as defined by section 4.1.2 of
-- the HTTP\/1.1 spec (RFC 7230), and section 8.1 of the HTTP\/2 spec.
type Trailers = [H.Header]
-- | The synthesized request and headers of a pushed stream.
data PushPromise = PushPromise
{ promisedMethod :: H.Method
, promisedPath :: ByteString
, promisedAuthority :: ByteString
, promisedScheme :: ByteString
, promisedHeader :: H.RequestHeaders
}
-- | The HTTP\/2-aware equivalent of 'Network.Wai.Application'.
type HTTP2Application = Request -> PushFunc -> Responder
-- | Part of a streaming response -- either a 'Builder' or a range of a file.
data Chunk = FileChunk FilePath FilePart | BuilderChunk Builder
-- | The streaming body of a response. Equivalent to
-- 'Network.Wai.StreamingBody' except that it can also write file ranges and
-- return the stream's trailers.
type Body = (Chunk -> IO ()) -> IO () -> IO Trailers
-- | Given to 'Responders'; provide a status, headers, and a stream body, and
-- we'll give you a token proving you called the 'RespondFunc'.
type RespondFunc s = H.Status -> H.ResponseHeaders -> Body -> IO s
-- | The result of an 'HTTP2Application'; or, alternately, an application
-- that's independent of the request. This is a continuation-passing style
-- function that first provides a response by calling the given respond
-- function, then returns the request's 'Trailers'.
--
-- The respond function is similar to the one in 'Network.Wai.Application', but
-- it only takes a streaming body, the status and headers are curried, and it
-- also produces trailers for the stream.
newtype Responder = Responder
{ runResponder :: forall s. RespondFunc s -> IO s }
-- | A function given to an 'HTTP2Application' to initiate a server-pushed
-- stream. Its argument is the same as the result of an 'HTTP2Application', so
-- you can either implement the response inline, or call your own application
-- to create the response.
--
-- The result is 'True' if the @PUSH_PROMISE@ frame will be sent, or 'False' if
-- it will not. This can happen if server push is disabled, the concurrency
-- limit of server-initiated streams is reached, or the associated stream has
-- already been closed.
--
-- This function shall ensure that stream data provided after it returns will
-- be sent after the @PUSH_PROMISE@ frame, so that servers can implement the
-- requirement that any pushed stream for a resource be initiated before
-- sending DATA frames that reference it.
type PushFunc = PushPromise -> Responder -> IO Bool
-- | Create the 'H.RequestHeaders' corresponding to the given 'PushPromise'.
--
-- This is primarily useful for WAI handlers like Warp, and application
-- implementers are unlikely to use it directly.
promiseHeaders :: PushPromise -> H.RequestHeaders
promiseHeaders p =
[ (":method", promisedMethod p)
, (":path", promisedPath p)
, (":authority", promisedAuthority p)
, (":scheme", promisedScheme p)
] ++ promisedHeader p
-- | Create a response body consisting of a single range of a file. Does not
-- set Content-Length or Content-Range headers. For that, use
-- 'respondFilePart' or 'respondFile'.
streamFilePart :: FilePath -> FilePart -> Body
streamFilePart path part write _ = write (FileChunk path part) >> return []
-- | Respond with a single range of a file, adding the Accept-Ranges,
-- Content-Length and Content-Range headers and changing the status to 206 as
-- appropriate.
--
-- If you want the range to be inferred automatically from the Range header,
-- use 'respondFile' instead. On the other hand, if you want to avoid the
-- automatic header and status adjustments, use 'respond' and 'streamFilePart'
-- directly.
respondFilePart :: H.Status -> H.ResponseHeaders -> FilePath -> FilePart -> Responder
respondFilePart s h path part = Responder $ \k -> do
let (s', h') = adjustForFilePart s h part
k s' h' $ streamFilePart path part
-- | Serve the requested range of the specified file (based on the Range
-- header), using the given 'H.Status' and 'H.ResponseHeaders' as a base. If
-- the file is not accessible, the status will be replaced with 404 and a
-- default not-found message will be served. If a partial file is requested,
-- the status will be replaced with 206 and the Content-Range header will be
-- added. The Content-Length header will always be added.
respondFile :: H.Status -> H.ResponseHeaders -> FilePath -> H.RequestHeaders -> Responder
respondFile s h path reqHdrs = Responder $ \k -> do
fileSize <- tryGetFileSize path
case fileSize of
Left _ -> runResponder (respondNotFound h) k
Right size -> runResponder (respondFileExists s h path size reqHdrs) k
-- As 'respondFile', but with prior knowledge of the file's existence and size.
respondFileExists :: H.Status -> H.ResponseHeaders -> FilePath -> Integer -> H.RequestHeaders -> Responder
respondFileExists s h path size reqHdrs =
respondFilePart s h path $ chooseFilePart size $ lookup H.hRange reqHdrs
-- | Respond with a minimal 404 page with the given headers.
respondNotFound :: H.ResponseHeaders -> Responder
respondNotFound h = Responder $ \k -> k H.notFound404 h' $
streamBuilder "File not found."
where
contentType = (H.hContentType, "text/plain; charset=utf-8")
h' = contentType:filter ((/=H.hContentType) . fst) h
-- | Construct a 'Responder' that will just call the 'RespondFunc' with the
-- given arguments.
respond :: H.Status -> H.ResponseHeaders -> Body -> Responder
respond s h b = Responder $ \k -> k s h b
-- | Fold the given bracketing action into a 'Responder'. Note the first
-- argument is isomorphic to @Codensity IO a@ or @forall s. ContT s IO a@, and
-- is the type of a partially-applied 'Control.Exception.bracket' or
-- @with@-style function.
--
-- > respondWith (bracket acquire release) $
-- > \x -> respondNotFound [("x", show x)]
--
-- is equivalent to
--
-- > Responder $ \k -> bracket acquire release $
-- > \x -> runResponder (respondNotFound [("x", show x)] k
--
-- This is morally equivalent to ('>>=') on 'Codensity' 'IO'.
respondWith :: (forall s. (a -> IO s) -> IO s) -> (a -> Responder) -> Responder
respondWith with f = respondCont $ f <$> ContT with
-- | Fold the 'ContT' into the contained 'Responder'.
respondCont :: (forall r. ContT r IO Responder) -> Responder
respondCont cont = Responder $ \k -> runContT cont $ \r -> runResponder r k
-- | Fold the 'IO' into the contained 'Responder'.
respondIO :: IO Responder -> Responder
respondIO io = Responder $ \k -> io >>= \r -> runResponder r k
-- | Create a response body consisting of a single builder.
streamBuilder :: Builder -> Body
streamBuilder builder write _ = write (BuilderChunk builder) >> return []
-- | Create a response body of a stream of 'Builder's.
streamSimple :: StreamingBody -> Body
streamSimple body write flush = body (write . BuilderChunk) flush >> return []
-- | Use a normal WAI 'Response' to send the response. Useful if you're
-- sharing code between HTTP\/2 applications and HTTP\/1 applications.
--
-- The 'Request' is used to determine the right file range to serve for
-- 'ResponseFile'.
promoteResponse :: Request -> Response -> Responder
promoteResponse req response = case response of
(ResponseBuilder s h b) ->
Responder $ \k -> k s h (streamBuilder b)
(ResponseStream s h body) ->
Responder $ \k -> k s h (streamSimple body)
(ResponseRaw _ fallback) -> promoteResponse req fallback
(ResponseFile s h path mpart) -> maybe
(respondFile s h path $ requestHeaders req)
(respondFilePart s h path)
mpart
-- | An 'Network.Wai.Application' we tried to promote neither called its
-- respond action nor raised; this is only possible if it imported the
-- 'ResponseReceived' constructor and used it to lie about having called the
-- action.
data RespondNeverCalled = RespondNeverCalled deriving (Show, Typeable)
instance Exception RespondNeverCalled
-- | Promote a normal WAI 'Application' to an 'HTTP2Application' by ignoring
-- the HTTP/2-specific features.
promoteApplication :: Application -> HTTP2Application
promoteApplication app req _ = Responder $ \k -> do
-- In HTTP2Applications, the Responder is required to ferry a value of
-- arbitrary type from the RespondFunc back to the caller of the
-- application, but in Application the type is fixed to ResponseReceived.
-- To add this extra power to an Application, we have to squirrel it away
-- in an IORef as a hack.
ref <- newIORef Nothing
let k' r = do
writeIORef ref . Just =<< runResponder (promoteResponse req r) k
return ResponseReceived
ResponseReceived <- app req k'
readIORef ref >>= maybe (throwIO RespondNeverCalled) return
|
coypoop/wai
|
wai/Network/Wai/HTTP2.hs
|
mit
| 11,546
| 0
| 17
| 2,270
| 1,653
| 939
| 714
| -1
| -1
|
{-# htermination (readInt :: MyInt -> (Char -> MyBool) -> (Char -> MyInt) -> (List Char) -> (List (Tup2 MyInt (List Char)))) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data Tup2 a b = Tup2 a b ;
data Char = Char MyInt ;
data Integer = Integer MyInt ;
data MyInt = Pos Nat | Neg Nat ;
data Nat = Succ Nat | Zero ;
foldr :: (a -> b -> b) -> b -> (List a) -> b;
foldr f z Nil = z;
foldr f z (Cons x xs) = f x (foldr f z xs);
psPs :: (List a) -> (List a) -> (List a);
psPs Nil ys = ys;
psPs (Cons x xs) ys = Cons x (psPs xs ys);
concat :: (List (List a)) -> (List a);
concat = foldr psPs Nil;
map :: (b -> a) -> (List b) -> (List a);
map f Nil = Nil;
map f (Cons x xs) = Cons (f x) (map f xs);
pt :: (c -> b) -> (a -> c) -> a -> b;
pt f g x = f (g x);
concatMap :: (b -> (List a)) -> (List b) -> (List a);
concatMap f = pt concat (map f);
nonnull00 (Tup2 (Cons vy vz) t) = Cons (Tup2 (Cons vy vz) t) Nil;
nonnull00 wu = Nil;
nonnull0 vu68 = nonnull00 vu68;
otherwise :: MyBool;
otherwise = MyTrue;
span2Span0 xx xy p wv ww MyTrue = Tup2 Nil (Cons wv ww);
span2Vu43 xx xy = span xx xy;
span2Ys0 xx xy (Tup2 ys wx) = ys;
span2Ys xx xy = span2Ys0 xx xy (span2Vu43 xx xy);
span2Zs0 xx xy (Tup2 wy zs) = zs;
span2Zs xx xy = span2Zs0 xx xy (span2Vu43 xx xy);
span2Span1 xx xy p wv ww MyTrue = Tup2 (Cons wv (span2Ys xx xy)) (span2Zs xx xy);
span2Span1 xx xy p wv ww MyFalse = span2Span0 xx xy p wv ww otherwise;
span2 p (Cons wv ww) = span2Span1 p ww p wv ww (p wv);
span3 p Nil = Tup2 Nil Nil;
span3 xv xw = span2 xv xw;
span :: (a -> MyBool) -> (List a) -> Tup2 (List a) (List a);
span p Nil = span3 p Nil;
span p (Cons wv ww) = span2 p (Cons wv ww);
nonnull :: (Char -> MyBool) -> (List Char) -> (List (Tup2 (List Char) (List Char)));
nonnull p s = concatMap nonnull0 (Cons (span p s) Nil);
foldl :: (a -> b -> a) -> a -> (List b) -> a;
foldl f z Nil = z;
foldl f z (Cons x xs) = foldl f (f z x) xs;
foldl1 :: (a -> a -> a) -> (List a) -> a;
foldl1 f (Cons x xs) = foldl f x xs;
fromIntegerMyInt :: Integer -> MyInt
fromIntegerMyInt (Integer x) = x;
toIntegerMyInt :: MyInt -> Integer
toIntegerMyInt x = Integer x;
fromIntegral = pt fromIntegerMyInt toIntegerMyInt;
primMinusNat :: Nat -> Nat -> MyInt;
primMinusNat Zero Zero = Pos Zero;
primMinusNat Zero (Succ y) = Neg (Succ y);
primMinusNat (Succ x) Zero = Pos (Succ x);
primMinusNat (Succ x) (Succ y) = primMinusNat x y;
primPlusNat :: Nat -> Nat -> Nat;
primPlusNat Zero Zero = Zero;
primPlusNat Zero (Succ y) = Succ y;
primPlusNat (Succ x) Zero = Succ x;
primPlusNat (Succ x) (Succ y) = Succ (Succ (primPlusNat x y));
primPlusInt :: MyInt -> MyInt -> MyInt;
primPlusInt (Pos x) (Neg y) = primMinusNat x y;
primPlusInt (Neg x) (Pos y) = primMinusNat y x;
primPlusInt (Neg x) (Neg y) = Neg (primPlusNat x y);
primPlusInt (Pos x) (Pos y) = Pos (primPlusNat x y);
psMyInt :: MyInt -> MyInt -> MyInt
psMyInt = primPlusInt;
primMulNat :: Nat -> Nat -> Nat;
primMulNat Zero Zero = Zero;
primMulNat Zero (Succ y) = Zero;
primMulNat (Succ x) Zero = Zero;
primMulNat (Succ x) (Succ y) = primPlusNat (primMulNat x (Succ y)) (Succ y);
primMulInt :: MyInt -> MyInt -> MyInt;
primMulInt (Pos x) (Pos y) = Pos (primMulNat x y);
primMulInt (Pos x) (Neg y) = Neg (primMulNat x y);
primMulInt (Neg x) (Pos y) = Neg (primMulNat x y);
primMulInt (Neg x) (Neg y) = Pos (primMulNat x y);
srMyInt :: MyInt -> MyInt -> MyInt
srMyInt = primMulInt;
readInt0 radix n d = psMyInt (srMyInt n radix) d;
readInt10 radix digToInt (Tup2 ds r) = Cons (Tup2 (foldl1 (readInt0 radix) (map (pt fromIntegral digToInt) ds)) r) Nil;
readInt10 radix digToInt vv = Nil;
readInt1 radix digToInt vu77 = readInt10 radix digToInt vu77;
readInt radix isDig digToInt s = concatMap (readInt1 radix digToInt) (nonnull isDig s);
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/basic_haskell/readInt_1.hs
|
mit
| 3,939
| 0
| 13
| 943
| 2,042
| 1,064
| 978
| 88
| 1
|
module HaSC.Prim.Semantic where
import qualified Data.Map as M
import Text.Parsec.Pos
import Control.Applicative
import Control.Monad
import Control.Monad.Writer
import Control.Monad.State.Strict
import HaSC.Prim.AST
import HaSC.Prim.Environment
import HaSC.Prim.ObjInfo
import HaSC.Prim.AnalyzedAST
import HaSC.Prim.ErrorMsg
semanticAnalyze :: Program -> (A_Program, [String])
semanticAnalyze prog = runEnv body initialEnv
where body = do collectGlobal prog
ret <- analyze prog
case typeCheck ret of
(Left errMsg) -> error errMsg
(Right _) -> return ret
{- analyze**
オブジェクト情報を収集して, それらの情報を含んだ新たな木を生成する.
検出するエラーはプロトタイプ宣言と変数宣言が不正な場合のみである. -}
analyze :: Program -> StateEnv A_Program
analyze prog = liftM concat (mapM analyzeEDecl prog)
analyzeEDecl :: EDecl -> StateEnv [A_EDecl]
analyzeEDecl (Decl p dcl_ty)
= return $ map (A_Decl . makeVarInfo p global_lev) dcl_ty
analyzeEDecl (FuncPrototype _ _ _ _) = return []
analyzeEDecl (FuncDef p _ name args stmt)
= do let parms = map (makeParmInfo p) args
a_stmt <- withEnv param_lev
(mapM_ (extendEnv p param_lev) parms)
(analyzeStmt func_lev stmt)
func <- findObjFromJust p global_lev name
if name == "main" && objCtype func == CFun CInt []
then return $ [A_Func p func parms (addReturn a_stmt)]
else return $ [A_Func p func parms a_stmt]
addReturn :: A_Stmt -> A_Stmt
addReturn (A_CompoundStmt stmts) = A_CompoundStmt (stmts ++
[A_ReturnStmt p (A_Constant 0)])
where p = newPos "hoge" 0 0
addReturn _ = error "never happen"
analyzeStmt :: Level -> Stmt -> StateEnv A_Stmt
analyzeStmt lev (CompoundStmt _ stmts)
= withEnv (lev+1)
(return ())
(liftM A_CompoundStmt (mapM (analyzeStmt $ lev+1) stmts))
analyzeStmt lev (DeclStmt p dcls)
= let info = map (makeVarInfo p lev) dcls
in mapM_ (addEnv lev) info >> (return $ A_DeclStmt info)
analyzeStmt _ (EmptyStmt _) = return A_EmptyStmt
analyzeStmt lev (ExprStmt _ e) = liftM A_ExprStmt (analyzeExpr lev e)
analyzeStmt lev s@(IfStmt _ _ _ _) = analyzeIf lev s
analyzeStmt lev s@(WhileStmt _ _ _) = analyzeWhile lev s
analyzeStmt lev (ReturnStmt p e) = liftM (A_ReturnStmt p) (analyzeExpr lev e)
analyzeStmt lev (RetVoidStmt p) = return $ A_RetVoidStmt p
analyzeIf :: Level -> Stmt -> StateEnv A_Stmt
analyzeIf lev (IfStmt p cond true false)
= do a_cond <- analyzeExpr lev cond
liftM2 (A_IfStmt p a_cond) (analyzeStmt lev true) (analyzeStmt lev false)
analyzeWhile :: Level -> Stmt -> StateEnv A_Stmt
analyzeWhile lev (WhileStmt p cond body)
= do a_cond <- analyzeExpr lev cond
liftM (A_WhileStmt p a_cond) (analyzeStmt lev body)
analyzeExpr :: Level -> Expr -> StateEnv A_Expr
analyzeExpr lev (AssignExpr p e1 e2)
= liftM2 (A_AssignExpr p) (analyzeExpr lev e1) (analyzeExpr lev e2)
analyzeExpr lev (UnaryPrim p op e)
= liftM (A_UnaryPrim p op) (analyzeExpr lev e)
analyzeExpr lev (BinaryPrim p op e1 e2)
= liftM2 (A_BinaryPrim p op) (analyzeExpr lev e1) (analyzeExpr lev e2)
analyzeExpr lev (ApplyFunc p name args)
= do funcInfo <- findObjFromJust p lev name
a_args <- mapM (analyzeExpr lev) args
return $ A_ApplyFunc p funcInfo a_args
analyzeExpr lev (MultiExpr p es) = liftM A_MultiExpr (mapM (analyzeExpr lev) es)
analyzeExpr lev (Constant p num) = return $ A_Constant num
analyzeExpr lev (IdentExpr p name)
= do info <- findObjFromJust p lev name
case info of
(ObjInfo _ Func _ _) -> error $ funcReferError p name
(ObjInfo _ FuncProto _ _) -> error $ funcReferError p name
validInfo -> return $ A_IdentExpr validInfo
{- typeCheck -}
typeCheck :: A_Program -> Check ()
typeCheck = mapM_ declTypeCheck
declTypeCheck :: A_EDecl -> Check ()
declTypeCheck (A_Decl _) = return ()
declTypeCheck (A_Func p info args body)
= case getRetType info of
(Just ty) -> do retTy <- stmtTypeCheck (objName info, ty) body
when (ty /= retTy) (fail $ invalidRetTypeError p (objName info))
Nothing -> fail $ invalidRetTypeError p (objName info)
where
getRetType info = case objCtype info of
(CFun retTy _) -> Just retTy
_ -> Nothing
{- return の型が間違っている場合のエラーを検出・報告するために,
(関数名, 期待する返り値の型) というタプルを持ちまわる.-}
stmtTypeCheck :: (Identifier, CType) -> A_Stmt -> Check CType
stmtTypeCheck info@(name, retTy) = stmtTypeCheck'
where
stmtTypeCheck' :: A_Stmt -> Check CType
stmtTypeCheck' (A_EmptyStmt) = wellTyped
stmtTypeCheck' (A_ExprStmt e) = exprTypeCheck e >> wellTyped
stmtTypeCheck' (A_DeclStmt l) = wellTyped
stmtTypeCheck' (A_CompoundStmt s) = foldCompoundStmt info CVoid s
stmtTypeCheck' (A_IfStmt p c tr fl) = ifTypeCheck p info c tr fl
stmtTypeCheck' (A_WhileStmt p c bd) = whileTypeCheck p info c bd
stmtTypeCheck' (A_ReturnStmt p e) = returnTypeCheck p info e
stmtTypeCheck' (A_RetVoidStmt p)
= if retTy /= CVoid
then fail $ retTypeError p name retTy CVoid
else wellTyped
foldCompoundStmt :: (Identifier, CType) -> CType -> [A_Stmt] -> Check CType
foldCompoundStmt info = foldM f
where
f acc stmt = do stmtTy <- stmtTypeCheck info stmt
return $ synType stmtTy acc
ifTypeCheck :: SourcePos -> (Identifier, CType) -> A_Expr -> A_Stmt -> A_Stmt
-> Check CType
ifTypeCheck p info cond tr fls
= do condTy <- exprTypeCheck cond
if condTy == CInt
then liftM2 synType (stmtTypeCheck info tr) (stmtTypeCheck info fls)
else fail $ condError p condTy
whileTypeCheck :: SourcePos -> (Identifier, CType) -> A_Expr -> A_Stmt
-> Check CType
whileTypeCheck p info cond body
= do condTy <- exprTypeCheck cond
if condTy == CInt
then stmtTypeCheck info body
else fail $ condError p condTy
returnTypeCheck :: SourcePos -> (Identifier, CType) -> A_Expr -> Check CType
returnTypeCheck p (name, retTy) e
= do ty <- exprTypeCheck e
if retTy == ty
then return ty
else fail $ retTypeError p name retTy ty
exprTypeCheck :: A_Expr -> Check CType
exprTypeCheck (A_AssignExpr p e1 e2)
= do checkAssignForm p e1
ty1 <- exprTypeCheck e1
ty2 <- exprTypeCheck e2
if ty1 == ty2
then return ty1
else fail $ typeDiffError p "=" ty1 ty2
exprTypeCheck (A_UnaryPrim p op e)
= case op of
"&" -> addrTypeChcek p e
"*" -> pointerTypeCheck p e
exprTypeCheck (A_BinaryPrim p op e1 e2)
| op `elem` ["&&", "||", "*", "/"]
= do ty1 <- exprTypeCheck e1
ty2 <- exprTypeCheck e2
if ty1 == CInt && ty2 == CInt
then return CInt
else fail $ binaryTypeError p op ty1 ty2
| op `elem` ["==", "!=", "<", "<=", ">", ">="]
= do ty1 <- exprTypeCheck e1
ty2 <- exprTypeCheck e2
if ty1 == ty2
then return CInt
else fail $ typeDiffError p op ty1 ty2
| op `elem` ["+", "-"]
= do ty1 <- exprTypeCheck e1
ty2 <- exprTypeCheck e2
case (ty1, ty2) of
(CInt, CInt) -> return CInt
(CPointer ty, CInt) -> return $ CPointer ty
(CArray ty _, CInt) -> return $ CPointer ty
_ -> fail $ invalidCalcError p op ty1 ty2
exprTypeCheck (A_ApplyFunc p info args)
= do argTypes <- mapM exprTypeCheck args
case info of
(ObjInfo nm Func (CFun ty parms) _)
-> if argTypes == parms
then return ty
else fail $ argumentError p nm
_ -> fail $ funcReferError p (objName info)
exprTypeCheck (A_MultiExpr es) = liftM last (mapM exprTypeCheck es)
exprTypeCheck (A_Constant n) = return CInt
exprTypeCheck (A_IdentExpr info) = return $ objCtype info
addrTypeChcek :: SourcePos -> A_Expr -> Check CType
addrTypeChcek p e = do checkAddressReferForm p e
ty <- exprTypeCheck e
if ty == CInt
then return (CPointer CInt)
else fail $ unaryError p "&" CInt ty
pointerTypeCheck :: SourcePos -> A_Expr -> Check CType
pointerTypeCheck p e
= do ty <- exprTypeCheck e
case ty of
(CPointer ty') -> return ty'
_ -> fail $ unaryError p "*" (CPointer CInt) ty
{- Utility -}
checkAddressReferForm :: SourcePos -> A_Expr -> Check ()
checkAddressReferForm _ (A_IdentExpr _) = return ()
checkAddressReferForm p _ = fail $ addrFormError p
checkAssignForm :: SourcePos -> A_Expr -> Check ()
checkAssignForm p (A_IdentExpr (ObjInfo nm kind ty _))
= case (kind, ty) of
(Var, (CArray _ _)) -> fail $ assignError p
(Var, _) -> return ()
(Parm, (CArray _ _)) -> fail $ assignError p
(Parm, _) -> return ()
(Func, _) -> fail $ assignError p
(FuncProto, _) -> fail $ assignError p
checkAssignForm p (A_UnaryPrim _ "*" _) = return ()
checkAssignForm p _ = fail $ assignError p
wellTyped :: Check CType
wellTyped = return CVoid
isPointer :: A_Expr -> Bool
isPointer e = case exprTypeCheck e of
(Left _) -> undefined
(Right ty) -> case ty of
(CPointer _) -> True
(CArray _ _) -> True
_ -> False
|
yu-i9/HaSC
|
src/HaSC/Prim/Semantic.hs
|
mit
| 10,061
| 0
| 15
| 3,018
| 3,357
| 1,657
| 1,700
| 209
| 10
|
-- -------------------------------------------------------------------------------------
-- Author: Sourabh S Joshi (cbrghostrider); Copyright - All rights reserved.
-- For email, run on linux (perl v5.8.5):
-- perl -e 'print pack "H*","736f75726162682e732e6a6f73686940676d61696c2e636f6d0a"'
-- -------------------------------------------------------------------------------------
import System.Environment (getArgs)
main :: IO ()
main = do
[file] <- getArgs
ip <- readFile file
writeFile ((takeWhile (/= '.') file) ++ ".out" ) (processQual ip)
-- a token is a list of strings, list length is #chars per dict word
type Token = [String]
-- a dict is a list of words
type Dict = [String]
-- takes string made up so far for curr token
-- and unconsumed input
-- returns (string for curr token, rest of unconsumed input)
consumeLtr :: String -> String -> (String, String)
consumeLtr strSoFar [] = (strSoFar, [])
consumeLtr strSoFar (')':xs) = (strSoFar, xs)
consumeLtr strSoFar (x:xs) = consumeLtr (strSoFar ++ [x]) xs
-- takes partial output, unconsumed input, and returns full output
consumeLtrStart :: [String] -> String -> [String]
consumeLtrStart strs [] = strs
consumeLtrStart strs ('(': xs) =
let (myStr, restStr) = consumeLtr "" xs in consumeLtrStart (strs ++ [myStr]) restStr
consumeLtrStart strs (x:xs) = consumeLtrStart (strs ++ [[x]]) xs
-- checks if a word can be formed from token
isWordInToken :: (String, Token) -> Bool
isWordInToken (s, t) = all (==True) . map (\(letter, choices) -> letter `elem` choices) $ zip s t
-- converts a string to a Token
-- e.g. (ab)c(bc) -> ["ab", "c", "bc"]
convert :: String -> Token
convert = consumeLtrStart []
-- given a Dict and a Token, lists how many words from that token in dict
listNum :: Dict -> Token -> Int
listNum d t = length . filter (== True) . map isWordInToken $ zip d (repeat t)
toOpStr :: Int -> [Int] -> [String]
toOpStr n [] = []
toOpStr n (x:xs) = ("Case #" ++ (show n) ++ ": " ++ (show x)) : toOpStr (n+1) xs
processQual :: String -> String
processQual ip =
let allLines = lines ip
[l, d, n]= map read . words . head $ allLines
dict = take d . tail $ allLines
tokens = map convert . take n . drop d . tail $ allLines
nums = map (listNum dict) tokens
opLines = toOpStr 1 nums
in unlines opLines
|
cbrghostrider/Hacking
|
codeJam/2009/alienLanguage/qual2.hs
|
mit
| 2,373
| 0
| 13
| 488
| 703
| 378
| 325
| 35
| 1
|
-- |
{-# LANGUAGE TypeSynonymInstances #-}
module Point where
import Foreign.C.Types
import Foreign.Ptr
import Foreign.Storable
data Point3D = P {-# UNPACK #-} !CDouble
{-# UNPACK #-} !CDouble
{-# UNPACK #-} !CDouble
deriving (Show)
instance Storable Point3D where
sizeOf _ = sizeOf (undefined :: CDouble) * 3
alignment _ = alignment (undefined :: CDouble)
{-# INLINE peek #-}
peek p = do
x <- peekElemOff q 0
y <- peekElemOff q 1
z <- peekElemOff q 2
return (P x y z)
where
q = castPtr p
{-# INLINE poke #-}
poke p (P x y z) = do
pokeElemOff q 0 x
pokeElemOff q 1 y
pokeElemOff q 2 z
where
q = castPtr p
type Point = Point3D
type Normal = Point3D
_x :: Point -> Double
_x (P x _ _) = realToFrac x
_y :: Point -> Double
_y (P _ y _) = realToFrac y
_z :: Point -> Double
_z (P _ _ z) = realToFrac z
instance Num Point where
(P x y z) + (P x' y' z') = P (x + x') (y + y') (z + z')
(P x y z) - (P x' y' z') = P (x - x') (y - y') (z - z')
(P x y z) * (P x' y' z') = P (x * x') (y * y') (z * z')
negate p = zeroP - p
abs (P x y z) = P (abs x) (abs y) (abs z)
signum = undefined
fromInteger = undefined
zeroP :: Point
zeroP = point 0 0 0
dot :: Point -> Point -> Double
dot (P a b c) (P a' b' c') = realToFrac $ a*a' + b*b' + c*c'
norm :: Point -> Double
norm (P a b c) = realToFrac $ sqrt(a^2 + b^2 + c^2)
distance :: Point -> Point -> Double
distance (P x y z) (P x' y' z') = realToFrac $ sqrt((x - x')^2 + (y - y')^2 + (z - z')^2)
(.*) :: Double -> Point -> Point
(.*) k (P x y z) = point' (x*k') (y*k') (z*k')
where
k' = realToFrac k
point :: Double -> Double -> Double -> Point
point x y z = P x' y' z'
where
x' = realToFrac x
y' = realToFrac y
z' = realToFrac z
point' :: CDouble -> CDouble -> CDouble -> Point
point' = P
angle :: Point -> Point -> Point -> Double
angle x y z = acos(prod / (d1*d2))
where
v1 = x - y
v2 = z - y
d1 = norm v1
d2 = norm v2
prod = dot v1 v2
makePoint :: [Double] -> Point
makePoint (x:y:z:[]) = point x y z
makePoint _ = error "Wrong point number!"
|
rabipelais/cloudpoint
|
src/Point.hs
|
mit
| 2,197
| 0
| 14
| 676
| 1,119
| 574
| 545
| 69
| 1
|
module Rebase.GHC.Stack
(
module GHC.Stack
)
where
import GHC.Stack
|
nikita-volkov/rebase
|
library/Rebase/GHC/Stack.hs
|
mit
| 71
| 0
| 5
| 12
| 20
| 13
| 7
| 4
| 0
|
{-# htermination lookupWithDefaultFM :: FiniteMap () b -> b -> () -> b #-}
import FiniteMap
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/FiniteMap_lookupWithDefaultFM_2.hs
|
mit
| 92
| 0
| 3
| 16
| 5
| 3
| 2
| 1
| 0
|
{-# htermination replicateM_ :: Int -> Maybe a -> Maybe () #-}
import Monad
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/Monad_replicateM__3.hs
|
mit
| 76
| 0
| 3
| 14
| 5
| 3
| 2
| 1
| 0
|
{-# LANGUAGE DataKinds, TypeOperators, FlexibleContexts, FlexibleInstances,
GADTs, ScopedTypeVariables, ConstraintKinds #-}
-- | Tools for binding vinyl records to GLSL program uniform
-- parameters. The most common usage is to use the 'setUniforms'
-- function to set each field of a 'PlainRec' to the GLSL uniform
-- parameter with the same name. This verifies that each field of the
-- record corresponds to a uniform parameter of the given shader
-- program, and that the types all agree.
module Graphics.VinylGL.Uniforms (setAllUniforms, setSomeUniforms, setUniforms,
HasFieldGLTypes(..)) where
import Control.Applicative ((<$>))
import Data.Foldable (traverse_)
import Data.Functor.Identity
import qualified Data.Map as M
import Data.Maybe (fromMaybe)
import qualified Data.Set as S
import Data.Vinyl
import Graphics.GLUtil (HasVariableType(..), ShaderProgram(..), AsUniform(..))
import Graphics.Rendering.OpenGL as GL
import Data.Vinyl.Reflect (HasFieldNames(..))
-- | Provide the 'GL.VariableType' of each field in a 'Rec'. The list
-- of types has the same order as the fields of the 'Rec'.
class HasFieldGLTypes a where
fieldGLTypes :: a -> [GL.VariableType]
instance HasFieldGLTypes (Rec '[] f) where
fieldGLTypes _ = []
instance (HasVariableType t, HasFieldGLTypes (PlainRec ts))
=> HasFieldGLTypes (PlainRec (sy:::t ': ts)) where
fieldGLTypes _ = variableType (undefined::t)
: fieldGLTypes (undefined::PlainRec ts)
type UniformFields a = (HasFieldNames a, HasFieldGLTypes a, SetUniformFields a)
-- | Set GLSL uniform parameters from a 'PlainRec'. A check is
-- performed to verify that /all/ uniforms used by a program are
-- represented by the record type. In other words, the record is a
-- superset of the parameters used by the program.
setAllUniforms :: forall ts. UniformFields (PlainRec ts)
=> ShaderProgram -> PlainRec ts -> IO ()
setAllUniforms s x = case checks of
Left msg -> error msg
Right _ -> setUniformFields locs x
where fnames = fieldNames (undefined::PlainRec ts)
checks = do namesCheck "record" (M.keys $ uniforms s) fnames
typesCheck True (snd <$> uniforms s) fieldTypes
fieldTypes = M.fromList $
zip fnames (fieldGLTypes (undefined::PlainRec ts))
locs = map (fmap fst . (`M.lookup` uniforms s)) fnames
{-# INLINE setAllUniforms #-}
-- | Set GLSL uniform parameters from a 'PlainRec' representing a
-- subset of all uniform parameters used by a program.
setUniforms :: forall ts. UniformFields (PlainRec ts)
=> ShaderProgram -> PlainRec ts -> IO ()
setUniforms s x = case checks of
Left msg -> error msg
Right _ -> setUniformFields locs x
where fnames = fieldNames (undefined::PlainRec ts)
checks = do namesCheck "GLSL program" fnames (M.keys $ uniforms s)
typesCheck False fieldTypes (snd <$> uniforms s)
fieldTypes = M.fromList $
zip fnames (fieldGLTypes (undefined::PlainRec ts))
locs = map (fmap fst . (`M.lookup` uniforms s)) fnames
{-# INLINE setUniforms #-}
-- | Set GLSL uniform parameters from those fields of a 'PlainRec'
-- whose names correspond to uniform parameters used by a program.
setSomeUniforms :: forall ts. UniformFields (PlainRec ts)
=> ShaderProgram -> PlainRec ts -> IO ()
setSomeUniforms s x = case typesCheck' True (snd <$> uniforms s) fieldTypes of
Left msg -> error msg
Right _ -> setUniformFields locs x
where fnames = fieldNames (undefined::PlainRec ts)
{-# INLINE fnames #-}
fieldTypes = M.fromList . zip fnames $
fieldGLTypes (undefined::PlainRec ts)
{-# INLINE fieldTypes #-}
locs = map (fmap fst . (`M.lookup` uniforms s)) fnames
{-# INLINE locs #-}
{-# INLINE setSomeUniforms #-}
-- | @namesCheck blame little big@ checks that each name in @little@ is
-- an element of @big@.
namesCheck :: String -> [String] -> [String] -> Either String ()
namesCheck blame little big = mapM_ aux little
where big' = S.fromList big
aux x | x `S.member` big' = Right ()
| otherwise = Left $ "Field "++x++" not found in "++blame
-- | @typesChecks blame little big@ checks that each (name,type) pair
-- in @little@ is a member of @big@.
typesCheck :: Bool
-> M.Map String GL.VariableType -> M.Map String GL.VariableType
-> Either String ()
typesCheck blame little big = mapM_ aux $ M.toList little
where aux (n,t)
| Just True == (glTypeEquiv t <$> M.lookup n big) = return ()
| otherwise = Left $ msg n (show t) (maybe "" show (M.lookup n big))
msg n t t' = let (expected, actual) = if blame
then (t,t')
else (t',t)
in "Record and GLSL type disagreement on field "++n++
": GLSL expected "++expected++
", record provides "++actual
-- | @typesCheck' blame little big@ checks that each (name,type) pair
-- in the intersection of @little@ and @big@ is consistent.
typesCheck' :: Bool
-> M.Map String GL.VariableType -> M.Map String GL.VariableType
-> Either String ()
typesCheck' blame little big = mapM_ aux $ M.toList little
where aux (n,t)
| fromMaybe True (glTypeEquiv t <$> M.lookup n big) = return ()
| otherwise = Left $ msg n (show t) (maybe "" show (M.lookup n big))
msg n t t' = let (expected, actual) = if blame
then (t,t')
else (t',t)
in "Record and GLSL type disagreement on field "++n++
": GLSL expected "++expected++
", record provides "++actual
-- The equivalence on 'GL.VariableType's we need identifies Samplers
-- with Ints because this is how GLSL represents samplers.
glTypeEquiv' :: GL.VariableType -> GL.VariableType -> Bool
glTypeEquiv' GL.Sampler1D GL.Int' = True
glTypeEquiv' GL.Sampler2D GL.Int' = True
glTypeEquiv' GL.Sampler3D GL.Int' = True
glTypeEquiv' x y = x == y
-- We define our own equivalence relation on types because we don't
-- have unique Haskell representations for every GL type. For example,
-- the GLSL sampler types (e.g. Sampler2D) are just GLint in Haskell.
glTypeEquiv :: VariableType -> VariableType -> Bool
glTypeEquiv x y = glTypeEquiv' x y || glTypeEquiv' y x
class SetUniformFields a where
setUniformFields :: [Maybe UniformLocation] -> a -> IO ()
instance SetUniformFields (Rec '[] f) where
setUniformFields _ _ = return ()
{-# INLINE setUniformFields #-}
instance (AsUniform t, SetUniformFields (PlainRec ts))
=> SetUniformFields (PlainRec ((sy:::t) ': ts)) where
setUniformFields [] _ = error "Ran out of UniformLocations"
setUniformFields (loc:locs) (Identity x :& xs) =
do traverse_ (asUniform x) loc
setUniformFields locs xs
{-# INLINABLE setUniformFields #-}
|
spetz911/progames
|
vinyl-gl-master/src/Graphics/VinylGL/Uniforms.hs
|
mit
| 7,200
| 0
| 14
| 1,907
| 1,788
| 930
| 858
| 109
| 2
|
-- | Use persistent-mongodb the same way you would use other persistent
-- libraries and refer to the general persistent documentation.
-- There are some new MongoDB specific filters under the filters section.
-- These help extend your query into a nested document.
--
-- However, at some point you will find the normal Persistent APIs lacking.
-- and want lower level-level MongoDB access.
-- There are functions available to make working with the raw driver
-- easier: they are under the Entity conversion section.
-- You should still use the same connection pool that you are using for Persistent.
--
-- MongoDB is a schema-less database.
-- The MongoDB Persistent backend does not help perform migrations.
-- Unlike SQL backends, uniqueness constraints cannot be created for you.
-- You must place a unique index on unique fields.
{-# LANGUAGE CPP, PackageImports, OverloadedStrings, ScopedTypeVariables #-}
{-# LANGUAGE DeriveDataTypeable, GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses, TypeSynonymInstances, FlexibleInstances, FlexibleContexts #-}
{-# LANGUAGE RankNTypes, TypeFamilies #-}
{-# LANGUAGE EmptyDataDecls #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE GADTs #-}
module Database.Persist.MongoDB
(
-- * Entity conversion
collectionName
, docToEntityEither
, docToEntityThrow
, entityToDocument
, recordToDocument
, documentFromEntity
, toInsertDoc
, entityToInsertDoc
, updatesToDoc
, filtersToDoc
, toUniquesDoc
-- * MongoDB specific queries
-- $nested
, (->.), (~>.), (?&->.), (?&~>.), (&->.), (&~>.)
-- ** Filters
-- $filters
, nestEq, nestNe, nestGe, nestLe, nestIn, nestNotIn
, anyEq, nestAnyEq, nestBsonEq, anyBsonEq, multiBsonEq
, inList, ninList
, (=~.)
-- non-operator forms of filters
, NestedField(..)
, MongoRegexSearchable
, MongoRegex
-- ** Updates
-- $updates
, nestSet, nestInc, nestDec, nestMul, push, pull, pullAll, addToSet, eachOp
-- * Key conversion helpers
, BackendKey(..)
, keyToOid
, oidToKey
, recordTypeFromKey
, readMayObjectId
, readMayMongoKey
, keyToText
-- * PersistField conversion
, fieldName
-- * using connections
, withConnection
, withMongoPool
, withMongoDBConn
, withMongoDBPool
, createMongoDBPool
, runMongoDBPool
, runMongoDBPoolDef
, ConnectionPool
, Connection
, MongoAuth (..)
-- * Connection configuration
, MongoConf (..)
, defaultMongoConf
, defaultHost
, defaultAccessMode
, defaultPoolStripes
, defaultConnectionIdleTime
, defaultStripeConnections
, applyDockerEnv
-- ** using raw MongoDB pipes
, PipePool
, createMongoDBPipePool
, runMongoDBPipePool
-- * network type
, HostName
, PortID
-- * MongoDB driver types
, Database
, DB.Action
, DB.AccessMode(..)
, DB.master
, DB.slaveOk
, (DB.=:)
, DB.ObjectId
, DB.MongoContext
-- * Database.Persist
, module Database.Persist
) where
import Database.Persist
import qualified Database.Persist.Sql as Sql
import qualified Control.Monad.IO.Class as Trans
import Control.Exception (throw, throwIO)
import Data.Acquire (mkAcquire)
import qualified Data.Traversable as Traversable
import Data.Bson (ObjectId(..))
import qualified Database.MongoDB as DB
import Database.MongoDB.Query (Database)
import Control.Applicative (Applicative, (<$>))
import Network (PortID (PortNumber))
import Network.Socket (HostName)
import Data.Maybe (mapMaybe, fromJust)
import qualified Data.Text as T
import Data.Text (Text)
import qualified Data.ByteString as BS
import qualified Data.Text.Encoding as E
import qualified Data.Serialize as Serialize
import Web.PathPieces (PathPiece(..))
import Web.HttpApiData (ToHttpApiData(..), FromHttpApiData(..), parseUrlPieceMaybe, parseUrlPieceWithPrefix, readTextData)
import Data.Conduit
import Control.Monad.IO.Class (liftIO)
import Data.Aeson (Value (Number), (.:), (.:?), (.!=), FromJSON(..), ToJSON(..), withText, withObject)
import Data.Aeson.Types (modifyFailure)
import Control.Monad (liftM, (>=>), forM_)
import qualified Data.Pool as Pool
import Data.Time (NominalDiffTime)
#ifdef HIGH_PRECISION_DATE
import Data.Time.Clock.POSIX (utcTimeToPOSIXSeconds)
#endif
import Data.Time.Calendar (Day(..))
#if MIN_VERSION_aeson(0, 7, 0)
#else
import Data.Attoparsec.Number
#endif
import Data.Bits (shiftR)
import Data.Word (Word16)
import Data.Monoid (mappend)
import Control.Monad.Trans.Reader (ask, runReaderT)
import Control.Monad.Trans.Control (MonadBaseControl)
import Numeric (readHex)
import Unsafe.Coerce (unsafeCoerce)
#if MIN_VERSION_base(4,6,0)
import System.Environment (lookupEnv)
#else
import System.Environment (getEnvironment)
#endif
#ifdef DEBUG
import FileLocation (debug)
#endif
#if !MIN_VERSION_base(4,6,0)
lookupEnv :: String -> IO (Maybe String)
lookupEnv key = do
env <- getEnvironment
return $ lookup key env
#endif
instance HasPersistBackend DB.MongoContext where
type BaseBackend DB.MongoContext = DB.MongoContext
persistBackend = id
recordTypeFromKey :: Key record -> record
recordTypeFromKey _ = error "recordTypeFromKey"
newtype NoOrphanNominalDiffTime = NoOrphanNominalDiffTime NominalDiffTime
deriving (Show, Eq, Num)
instance FromJSON NoOrphanNominalDiffTime where
#if MIN_VERSION_aeson(0, 7, 0)
parseJSON (Number x) = (return . NoOrphanNominalDiffTime . fromRational . toRational) x
#else
parseJSON (Number (I x)) = (return . NoOrphanNominalDiffTime . fromInteger) x
parseJSON (Number (D x)) = (return . NoOrphanNominalDiffTime . fromRational . toRational) x
#endif
parseJSON _ = fail "couldn't parse diff time"
newtype NoOrphanPortID = NoOrphanPortID PortID deriving (Show, Eq)
instance FromJSON NoOrphanPortID where
#if MIN_VERSION_aeson(0, 7, 0)
parseJSON (Number x) = (return . NoOrphanPortID . PortNumber . fromIntegral ) cnvX
where cnvX :: Word16
cnvX = round x
#else
parseJSON (Number (I x)) = (return . NoOrphanPortID . PortNumber . fromInteger) x
#endif
parseJSON _ = fail "couldn't parse port number"
data Connection = Connection DB.Pipe DB.Database
type ConnectionPool = Pool.Pool Connection
instance ToHttpApiData (BackendKey DB.MongoContext) where
toUrlPiece = keyToText
instance FromHttpApiData (BackendKey DB.MongoContext) where
parseUrlPiece input = do
s <- parseUrlPieceWithPrefix "o" input <!> return input
MongoKey <$> readTextData s
where
infixl 3 <!>
Left _ <!> y = y
x <!> _ = x
-- | ToPathPiece is used to convert a key to/from text
instance PathPiece (BackendKey DB.MongoContext) where
toPathPiece = toUrlPiece
fromPathPiece = parseUrlPieceMaybe
keyToText :: BackendKey DB.MongoContext -> Text
keyToText = T.pack . show . unMongoKey
-- | Convert a Text to a Key
readMayMongoKey :: Text -> Maybe (BackendKey DB.MongoContext)
readMayMongoKey = fmap MongoKey . readMayObjectId
readMayObjectId :: Text -> Maybe DB.ObjectId
readMayObjectId str =
case filter (null . snd) $ reads $ T.unpack str :: [(DB.ObjectId,String)] of
(parsed,_):[] -> Just parsed
_ -> Nothing
instance PersistField DB.ObjectId where
toPersistValue = oidToPersistValue
fromPersistValue oid@(PersistObjectId _) = Right $ persistObjectIdToDbOid oid
fromPersistValue (PersistByteString bs) = fromPersistValue (PersistObjectId bs)
fromPersistValue _ = Left $ T.pack "expected PersistObjectId"
instance Sql.PersistFieldSql DB.ObjectId where
sqlType _ = Sql.SqlOther "doesn't make much sense for MongoDB"
instance Sql.PersistFieldSql (BackendKey DB.MongoContext) where
sqlType _ = Sql.SqlOther "doesn't make much sense for MongoDB"
withConnection :: (Trans.MonadIO m, Applicative m)
=> MongoConf
-> (ConnectionPool -> m b) -> m b
withConnection mc =
withMongoDBPool (mgDatabase mc) (T.unpack $ mgHost mc) (mgPort mc) (mgAuth mc) (mgPoolStripes mc) (mgStripeConnections mc) (mgConnectionIdleTime mc)
withMongoDBConn :: (Trans.MonadIO m, Applicative m)
=> Database -> HostName -> PortID
-> Maybe MongoAuth -> NominalDiffTime
-> (ConnectionPool -> m b) -> m b
withMongoDBConn dbname hostname port mauth connectionIdleTime = withMongoDBPool dbname hostname port mauth 1 1 connectionIdleTime
createPipe :: HostName -> PortID -> IO DB.Pipe
createPipe hostname port = DB.connect (DB.Host hostname port)
createReplicatSet :: (DB.ReplicaSetName, [DB.Host]) -> Database -> Maybe MongoAuth -> IO Connection
createReplicatSet rsSeed dbname mAuth = do
pipe <- DB.openReplicaSet rsSeed >>= DB.primary
testAccess pipe dbname mAuth
return $ Connection pipe dbname
createRsPool :: (Trans.MonadIO m, Applicative m) => Database -> ReplicaSetConfig
-> Maybe MongoAuth
-> Int -- ^ pool size (number of stripes)
-> Int -- ^ stripe size (number of connections per stripe)
-> NominalDiffTime -- ^ time a connection is left idle before closing
-> m ConnectionPool
createRsPool dbname (ReplicaSetConfig rsName rsHosts) mAuth connectionPoolSize stripeSize connectionIdleTime = do
Trans.liftIO $ Pool.createPool
(createReplicatSet (rsName, rsHosts) dbname mAuth)
(\(Connection pipe _) -> DB.close pipe)
connectionPoolSize
connectionIdleTime
stripeSize
testAccess :: DB.Pipe -> Database -> Maybe MongoAuth -> IO ()
testAccess pipe dbname mAuth = do
_ <- case mAuth of
Just (MongoAuth user pass) -> DB.access pipe DB.UnconfirmedWrites dbname (DB.auth user pass)
Nothing -> return undefined
return ()
createConnection :: Database -> HostName -> PortID -> Maybe MongoAuth -> IO Connection
createConnection dbname hostname port mAuth = do
pipe <- createPipe hostname port
testAccess pipe dbname mAuth
return $ Connection pipe dbname
createMongoDBPool :: (Trans.MonadIO m, Applicative m) => Database -> HostName -> PortID
-> Maybe MongoAuth
-> Int -- ^ pool size (number of stripes)
-> Int -- ^ stripe size (number of connections per stripe)
-> NominalDiffTime -- ^ time a connection is left idle before closing
-> m ConnectionPool
createMongoDBPool dbname hostname port mAuth connectionPoolSize stripeSize connectionIdleTime = do
Trans.liftIO $ Pool.createPool
(createConnection dbname hostname port mAuth)
(\(Connection pipe _) -> DB.close pipe)
connectionPoolSize
connectionIdleTime
stripeSize
createMongoPool :: (Trans.MonadIO m, Applicative m) => MongoConf -> m ConnectionPool
createMongoPool c@MongoConf{mgReplicaSetConfig = Just (ReplicaSetConfig rsName hosts)} =
createRsPool
(mgDatabase c)
(ReplicaSetConfig rsName ((DB.Host (T.unpack $ mgHost c) (mgPort c)):hosts))
(mgAuth c)
(mgPoolStripes c) (mgStripeConnections c) (mgConnectionIdleTime c)
createMongoPool c@MongoConf{mgReplicaSetConfig = Nothing} =
createMongoDBPool
(mgDatabase c) (T.unpack (mgHost c)) (mgPort c)
(mgAuth c)
(mgPoolStripes c) (mgStripeConnections c) (mgConnectionIdleTime c)
type PipePool = Pool.Pool DB.Pipe
-- | A pool of plain MongoDB pipes.
-- The database parameter has not yet been applied yet.
-- This is useful for switching between databases (on the same host and port)
-- Unlike the normal pool, no authentication is available
createMongoDBPipePool :: (Trans.MonadIO m, Applicative m) => HostName -> PortID
-> Int -- ^ pool size (number of stripes)
-> Int -- ^ stripe size (number of connections per stripe)
-> NominalDiffTime -- ^ time a connection is left idle before closing
-> m PipePool
createMongoDBPipePool hostname port connectionPoolSize stripeSize connectionIdleTime =
Trans.liftIO $ Pool.createPool
(createPipe hostname port)
DB.close
connectionPoolSize
connectionIdleTime
stripeSize
withMongoPool :: (Trans.MonadIO m, Applicative m) => MongoConf -> (ConnectionPool -> m b) -> m b
withMongoPool conf connectionReader = createMongoPool conf >>= connectionReader
withMongoDBPool :: (Trans.MonadIO m, Applicative m) =>
Database -> HostName -> PortID -> Maybe MongoAuth -> Int -> Int -> NominalDiffTime -> (ConnectionPool -> m b) -> m b
withMongoDBPool dbname hostname port mauth poolStripes stripeConnections connectionIdleTime connectionReader = do
pool <- createMongoDBPool dbname hostname port mauth poolStripes stripeConnections connectionIdleTime
connectionReader pool
-- | run a pool created with 'createMongoDBPipePool'
runMongoDBPipePool :: (Trans.MonadIO m, MonadBaseControl IO m) => DB.AccessMode -> Database -> DB.Action m a -> PipePool -> m a
runMongoDBPipePool accessMode db action pool =
Pool.withResource pool $ \pipe -> DB.access pipe accessMode db action
runMongoDBPool :: (Trans.MonadIO m, MonadBaseControl IO m) => DB.AccessMode -> DB.Action m a -> ConnectionPool -> m a
runMongoDBPool accessMode action pool =
Pool.withResource pool $ \(Connection pipe db) -> DB.access pipe accessMode db action
-- | use default 'AccessMode'
runMongoDBPoolDef :: (Trans.MonadIO m, MonadBaseControl IO m) => DB.Action m a -> ConnectionPool -> m a
runMongoDBPoolDef = runMongoDBPool defaultAccessMode
queryByKey :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext)
=> Key record -> DB.Query
queryByKey k = (DB.select (keyToMongoDoc k) (collectionNameFromKey k)) {DB.project = projectionFromKey k}
selectByKey :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext)
=> Key record -> DB.Selection
selectByKey k = DB.select (keyToMongoDoc k) (collectionNameFromKey k)
updatesToDoc :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext)
=> [Update record] -> DB.Document
updatesToDoc upds = map updateToMongoField upds
updateToBson :: Text
-> PersistValue
-> Either PersistUpdate MongoUpdateOperation
-> DB.Field
updateToBson fname v up =
#ifdef DEBUG
debug (
#endif
opName DB.:= DB.Doc [fname DB.:= opValue]
#ifdef DEBUG
)
#endif
where
inc = "$inc"
mul = "$mul"
(opName, opValue) = case up of
Left pup -> case (pup, v) of
(Assign, PersistNull) -> ("$unset", DB.Int64 1)
(Assign,a) -> ("$set", DB.val a)
(Add, a) -> (inc, DB.val a)
(Subtract, PersistInt64 i) -> (inc, DB.Int64 (-i))
(Multiply, PersistInt64 i) -> (mul, DB.Int64 i)
(Multiply, PersistDouble d) -> (mul, DB.Float d)
(Subtract, _) -> error "expected PersistInt64 for a subtraction"
(Multiply, _) -> error "expected PersistInt64 or PersistDouble for a subtraction"
-- Obviously this could be supported for floats by multiplying with 1/x
(Divide, _) -> throw $ PersistMongoDBUnsupported "divide not supported"
(BackendSpecificUpdate bsup, _) -> throw $ PersistMongoDBError $
T.pack $ "did not expect BackendSpecificUpdate " ++ T.unpack bsup
Right mup -> case mup of
MongoEach op -> case op of
MongoPull -> ("$pullAll", DB.val v)
_ -> (opToText op, DB.Doc ["$each" DB.:= DB.val v])
MongoSimple x -> (opToText x, DB.val v)
updateToMongoField :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext)
=> Update record -> DB.Field
updateToMongoField (Update field v up) = updateToBson (fieldName field) (toPersistValue v) (Left up)
updateToMongoField (BackendUpdate up) = mongoUpdateToDoc up
-- | convert a unique key into a MongoDB document
toUniquesDoc :: forall record. (PersistEntity record) => Unique record -> [DB.Field]
toUniquesDoc uniq = zipWith (DB.:=)
(map (unDBName . snd) $ persistUniqueToFieldNames uniq)
(map DB.val (persistUniqueToValues uniq))
-- | convert a PersistEntity into document fields.
-- for inserts only: nulls are ignored so they will be unset in the document.
-- 'entityToDocument' includes nulls
toInsertDoc :: forall record. (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext)
=> record -> DB.Document
toInsertDoc record = zipFilter (embeddedFields $ toEmbedEntityDef entDef)
(map toPersistValue $ toPersistFields record)
where
entDef = entityDef $ Just record
zipFilter :: [EmbedFieldDef] -> [PersistValue] -> DB.Document
zipFilter [] _ = []
zipFilter _ [] = []
zipFilter (fd:efields) (pv:pvs) =
if isNull pv then recur else
(fieldToLabel fd DB.:= embeddedVal (emFieldEmbed fd) pv):recur
where
recur = zipFilter efields pvs
isNull PersistNull = True
isNull (PersistMap m) = null m
isNull (PersistList l) = null l
isNull _ = False
-- make sure to removed nulls from embedded entities also
embeddedVal :: Maybe EmbedEntityDef -> PersistValue -> DB.Value
embeddedVal (Just emDef) (PersistMap m) = DB.Doc $
zipFilter (embeddedFields emDef) $ map snd m
embeddedVal je@(Just _) (PersistList l) = DB.Array $ map (embeddedVal je) l
embeddedVal _ pv = DB.val pv
entityToInsertDoc :: forall record. (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext)
=> Entity record -> DB.Document
entityToInsertDoc (Entity key record) = keyToMongoDoc key ++ toInsertDoc record
collectionName :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext)
=> record -> Text
collectionName = unDBName . entityDB . entityDef . Just
-- | convert a PersistEntity into document fields.
-- unlike 'toInsertDoc', nulls are included.
recordToDocument :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext)
=> record -> DB.Document
recordToDocument record = zipToDoc (map fieldDB $ entityFields entity) (toPersistFields record)
where
entity = entityDef $ Just record
entityToDocument :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext)
=> record -> DB.Document
entityToDocument = recordToDocument
{-# DEPRECATED entityToDocument "use recordToDocument" #-}
documentFromEntity :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext)
=> Entity record -> DB.Document
documentFromEntity (Entity key record) =
keyToMongoDoc key ++ entityToDocument record
zipToDoc :: PersistField a => [DBName] -> [a] -> [DB.Field]
zipToDoc [] _ = []
zipToDoc _ [] = []
zipToDoc (e:efields) (p:pfields) =
let pv = toPersistValue p
in (unDBName e DB.:= DB.val pv):zipToDoc efields pfields
fieldToLabel :: EmbedFieldDef -> Text
fieldToLabel = unDBName . emFieldDB
keyFrom_idEx :: (Trans.MonadIO m, PersistEntity record) => DB.Value -> m (Key record)
keyFrom_idEx idVal = case keyFrom_id idVal of
Right k -> return k
Left err -> liftIO $ throwIO $ PersistMongoDBError $ "could not convert key: "
`mappend` T.pack (show idVal)
`mappend` err
keyFrom_id :: (PersistEntity record) => DB.Value -> Either Text (Key record)
keyFrom_id idVal = case cast idVal of
(PersistMap m) -> keyFromValues $ map snd m
pv -> keyFromValues [pv]
-- | It would make sense to define the instance for ObjectId
-- and then use newtype deriving
-- however, that would create an orphan instance
instance ToJSON (BackendKey DB.MongoContext) where
toJSON (MongoKey (Oid x y)) = toJSON $ DB.showHexLen 8 x $ DB.showHexLen 16 y ""
instance FromJSON (BackendKey DB.MongoContext) where
parseJSON = withText "MongoKey" $ \t ->
maybe
(fail "Invalid base64")
(return . MongoKey . persistObjectIdToDbOid . PersistObjectId)
$ fmap (i2bs (8 * 12) . fst) $ headMay $ readHex $ T.unpack t
where
-- should these be exported from Types/Base.hs ?
headMay [] = Nothing
headMay (x:_) = Just x
-- taken from crypto-api
-- |@i2bs bitLen i@ converts @i@ to a 'ByteString' of @bitLen@ bits (must be a multiple of 8).
i2bs :: Int -> Integer -> BS.ByteString
i2bs l i = BS.unfoldr (\l' -> if l' < 0 then Nothing else Just (fromIntegral (i `shiftR` l'), l' - 8)) (l-8)
{-# INLINE i2bs #-}
-- | older versions versions of haddock (like that on hackage) do not show that this defines
-- @BackendKey DB.MongoContext = MongoKey { unMongoKey :: DB.ObjectId }@
instance PersistCore DB.MongoContext where
newtype BackendKey DB.MongoContext = MongoKey { unMongoKey :: DB.ObjectId }
deriving (Show, Read, Eq, Ord, PersistField)
instance PersistStoreWrite DB.MongoContext where
insert record = DB.insert (collectionName record) (toInsertDoc record)
>>= keyFrom_idEx
insertMany [] = return []
insertMany records@(r:_) = mapM keyFrom_idEx =<<
DB.insertMany (collectionName r) (map toInsertDoc records)
insertEntityMany [] = return ()
insertEntityMany ents@(Entity _ r : _) =
DB.insertMany_ (collectionName r) (map entityToInsertDoc ents)
insertKey k record = DB.insert_ (collectionName record) $
entityToInsertDoc (Entity k record)
repsert k record = DB.save (collectionName record) $
documentFromEntity (Entity k record)
replace k record = do
DB.replace (selectByKey k) (recordToDocument record)
return ()
delete k =
DB.deleteOne DB.Select {
DB.coll = collectionNameFromKey k
, DB.selector = keyToMongoDoc k
}
update _ [] = return ()
update key upds =
DB.modify
(DB.Select (keyToMongoDoc key) (collectionNameFromKey key))
$ updatesToDoc upds
updateGet key upds = do
result <- DB.findAndModify (queryByKey key) (updatesToDoc upds)
either err instantiate result
where
instantiate doc = do
Entity _ rec <- fromPersistValuesThrow t doc
return rec
err msg = Trans.liftIO $ throwIO $ KeyNotFound $ show key ++ msg
t = entityDefFromKey key
instance PersistStoreRead DB.MongoContext where
get k = do
d <- DB.findOne (queryByKey k)
case d of
Nothing -> return Nothing
Just doc -> do
Entity _ ent <- fromPersistValuesThrow t doc
return $ Just ent
where
t = entityDefFromKey k
instance PersistUniqueRead DB.MongoContext where
getBy uniq = do
mdoc <- DB.findOne $
(DB.select (toUniquesDoc uniq) (collectionName rec)) {DB.project = projectionFromRecord rec}
case mdoc of
Nothing -> return Nothing
Just doc -> liftM Just $ fromPersistValuesThrow t doc
where
t = entityDef $ Just rec
rec = dummyFromUnique uniq
instance PersistUniqueWrite DB.MongoContext where
deleteBy uniq =
DB.delete DB.Select {
DB.coll = collectionName $ dummyFromUnique uniq
, DB.selector = toUniquesDoc uniq
}
upsert newRecord upds = do
uniq <- onlyUnique newRecord
upsertBy uniq newRecord upds
upsertBy uniq newRecord upds = do
let uniqueDoc = toUniquesDoc uniq
let uniqKeys = map DB.label uniqueDoc
let insDoc = DB.exclude uniqKeys $ toInsertDoc newRecord
let selection = DB.select uniqueDoc $ collectionName newRecord
if null upds
then DB.upsert selection ["$set" DB.=: insDoc]
else do
DB.upsert selection ["$setOnInsert" DB.=: insDoc]
DB.modify selection $ updatesToDoc upds
-- because findAndModify $setOnInsert is broken we do a separate get now
mdoc <- getBy uniq
maybe (err "possible race condition: getBy found Nothing")
return mdoc
where
err = Trans.liftIO . throwIO . UpsertError
{-
-- cannot use findAndModify
-- because $setOnInsert is crippled
-- https://jira.mongodb.org/browse/SERVER-2643
result <- DB.findAndModifyOpts
selection
(DB.defFamUpdateOpts ("$setOnInsert" DB.=: insDoc : ["$set" DB.=: insDoc]))
{ DB.famUpsert = True }
either err instantiate result
where
-- this is only possible when new is False
instantiate Nothing = error "upsert: impossible null"
instantiate (Just doc) =
fromPersistValuesThrow (entityDef $ Just newRecord) doc
-}
-- | It would make more sense to call this _id, but GHC treats leading underscore in special ways
id_ :: T.Text
id_ = "_id"
-- _id is always the primary key in MongoDB
-- but _id can contain any unique value
keyToMongoDoc :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext)
=> Key record -> DB.Document
keyToMongoDoc k = case entityPrimary $ entityDefFromKey k of
Nothing -> zipToDoc [DBName id_] values
Just pdef -> [id_ DB.=: zipToDoc (primaryNames pdef) values]
where
primaryNames = map fieldDB . compositeFields
values = keyToValues k
entityDefFromKey :: PersistEntity record => Key record -> EntityDef
entityDefFromKey = entityDef . Just . recordTypeFromKey
collectionNameFromKey :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext)
=> Key record -> Text
collectionNameFromKey = collectionName . recordTypeFromKey
projectionFromEntityDef :: EntityDef -> DB.Projector
projectionFromEntityDef eDef =
map toField (entityFields eDef)
where
toField :: FieldDef -> DB.Field
toField fDef = (unDBName (fieldDB fDef)) DB.=: (1 :: Int)
projectionFromKey :: PersistEntity record => Key record -> DB.Projector
projectionFromKey = projectionFromEntityDef . entityDefFromKey
projectionFromRecord :: PersistEntity record => record -> DB.Projector
projectionFromRecord = projectionFromEntityDef . entityDef . Just
instance PersistQueryWrite DB.MongoContext where
updateWhere _ [] = return ()
updateWhere filts upds =
DB.modify DB.Select {
DB.coll = collectionName $ dummyFromFilts filts
, DB.selector = filtersToDoc filts
} $ updatesToDoc upds
deleteWhere filts = do
DB.delete DB.Select {
DB.coll = collectionName $ dummyFromFilts filts
, DB.selector = filtersToDoc filts
}
instance PersistQueryRead DB.MongoContext where
count filts = do
i <- DB.count query
return $ fromIntegral i
where
query = DB.select (filtersToDoc filts) $
collectionName $ dummyFromFilts filts
-- | uses cursor option NoCursorTimeout
-- If there is no sorting, it will turn the $snapshot option on
-- and explicitly closes the cursor when done
selectSourceRes filts opts = do
context <- ask
return (pullCursor context `fmap` mkAcquire (open context) (close context))
where
close :: DB.MongoContext -> DB.Cursor -> IO ()
close context cursor = runReaderT (DB.closeCursor cursor) context
open :: DB.MongoContext -> IO DB.Cursor
open = runReaderT (DB.find (makeQuery filts opts)
-- it is an error to apply $snapshot when sorting
{ DB.snapshot = noSort
, DB.options = [DB.NoCursorTimeout]
})
pullCursor context cursor = do
mdoc <- liftIO $ runReaderT (DB.nextBatch cursor) context
case mdoc of
[] -> return ()
docs -> do
forM_ docs $ fromPersistValuesThrow t >=> yield
pullCursor context cursor
t = entityDef $ Just $ dummyFromFilts filts
(_, _, orders) = limitOffsetOrder opts
noSort = null orders
selectFirst filts opts = DB.findOne (makeQuery filts opts)
>>= Traversable.mapM (fromPersistValuesThrow t)
where
t = entityDef $ Just $ dummyFromFilts filts
selectKeysRes filts opts = do
context <- ask
let make = do
cursor <- liftIO $ flip runReaderT context $ DB.find $ (makeQuery filts opts) {
DB.project = [id_ DB.=: (1 :: Int)]
}
pullCursor context cursor
return $ return make
where
pullCursor context cursor = do
mdoc <- liftIO $ runReaderT (DB.next cursor) context
case mdoc of
Nothing -> return ()
Just [_id DB.:= idVal] -> do
k <- liftIO $ keyFrom_idEx idVal
yield k
pullCursor context cursor
Just y -> liftIO $ throwIO $ PersistMarshalError $ T.pack $ "Unexpected in selectKeys: " ++ show y
orderClause :: PersistEntity val => SelectOpt val -> DB.Field
orderClause o = case o of
Asc f -> fieldName f DB.=: ( 1 :: Int)
Desc f -> fieldName f DB.=: (-1 :: Int)
_ -> error "orderClause: expected Asc or Desc"
makeQuery :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext) => [Filter record] -> [SelectOpt record] -> DB.Query
makeQuery filts opts =
(DB.select (filtersToDoc filts) (collectionName $ dummyFromFilts filts)) {
DB.limit = fromIntegral limit
, DB.skip = fromIntegral offset
, DB.sort = orders
, DB.project = projectionFromRecord (dummyFromFilts filts)
}
where
(limit, offset, orders') = limitOffsetOrder opts
orders = map orderClause orders'
filtersToDoc :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext) => [Filter record] -> DB.Document
filtersToDoc filts =
#ifdef DEBUG
debug $
#endif
if null filts then [] else multiFilter AndDollar filts
filterToDocument :: (PersistEntity val, PersistEntityBackend val ~ DB.MongoContext) => Filter val -> DB.Document
filterToDocument f =
case f of
Filter field v filt -> [filterToBSON (fieldName field) v filt]
BackendFilter mf -> mongoFilterToDoc mf
-- The empty filter case should never occur when the user uses ||.
-- An empty filter list will throw an exception in multiFilter
--
-- The alternative would be to create a query which always returns true
-- However, I don't think an end user ever wants that.
FilterOr fs -> multiFilter OrDollar fs
-- Ignore an empty filter list instead of throwing an exception.
-- \$and is necessary in only a few cases, but it makes query construction easier
FilterAnd [] -> []
FilterAnd fs -> multiFilter AndDollar fs
data MultiFilter = OrDollar | AndDollar deriving Show
toMultiOp :: MultiFilter -> Text
toMultiOp OrDollar = orDollar
toMultiOp AndDollar = andDollar
multiFilter :: forall record. (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext) => MultiFilter -> [Filter record] -> [DB.Field]
multiFilter _ [] = throw $ PersistMongoDBError "An empty list of filters was given"
multiFilter multi filters =
case (multi, filter (not . null) (map filterToDocument filters)) of
-- a $or must have at least 2 items
(OrDollar, []) -> orError
(AndDollar, []) -> []
(OrDollar, _:[]) -> orError
(AndDollar, doc:[]) -> doc
(_, doc) -> [toMultiOp multi DB.:= DB.Array (map DB.Doc doc)]
where
orError = throw $ PersistMongoDBError $
"An empty list of filters was given to one side of ||."
existsDollar, orDollar, andDollar :: Text
existsDollar = "$exists"
orDollar = "$or"
andDollar = "$and"
filterToBSON :: forall a. ( PersistField a)
=> Text
-> Either a [a]
-> PersistFilter
-> DB.Field
filterToBSON fname v filt = case filt of
Eq -> nullEq
Ne -> nullNeq
_ -> notEquality
where
dbv = toValue v
notEquality = fname DB.=: [showFilter filt DB.:= dbv]
nullEq = case dbv of
DB.Null -> orDollar DB.=:
[ [fname DB.:= DB.Null]
, [fname DB.:= DB.Doc [existsDollar DB.:= DB.Bool False]]
]
_ -> fname DB.:= dbv
nullNeq = case dbv of
DB.Null ->
fname DB.:= DB.Doc
[ showFilter Ne DB.:= DB.Null
, existsDollar DB.:= DB.Bool True
]
_ -> notEquality
showFilter Ne = "$ne"
showFilter Gt = "$gt"
showFilter Lt = "$lt"
showFilter Ge = "$gte"
showFilter Le = "$lte"
showFilter In = "$in"
showFilter NotIn = "$nin"
showFilter Eq = error "EQ filter not expected"
showFilter (BackendSpecificFilter bsf) = throw $ PersistMongoDBError $ T.pack $ "did not expect BackendSpecificFilter " ++ T.unpack bsf
mongoFilterToBSON :: forall typ. PersistField typ
=> Text
-> MongoFilterOperator typ
-> DB.Document
mongoFilterToBSON fname filt = case filt of
(PersistFilterOperator v op) -> [filterToBSON fname v op]
(MongoFilterOperator bval) -> [fname DB.:= bval]
mongoUpdateToBson :: forall typ. PersistField typ
=> Text
-> UpdateValueOp typ
-> DB.Field
mongoUpdateToBson fname upd = case upd of
UpdateValueOp (Left v) op -> updateToBson fname (toPersistValue v) op
UpdateValueOp (Right v) op -> updateToBson fname (PersistList $ map toPersistValue v) op
mongoUpdateToDoc :: PersistEntity record => MongoUpdate record -> DB.Field
mongoUpdateToDoc (NestedUpdate field op) = mongoUpdateToBson (nestedFieldName field) op
mongoUpdateToDoc (ArrayUpdate field op) = mongoUpdateToBson (fieldName field) op
mongoFilterToDoc :: PersistEntity record => MongoFilter record -> DB.Document
mongoFilterToDoc (NestedFilter field op) = mongoFilterToBSON (nestedFieldName field) op
mongoFilterToDoc (ArrayFilter field op) = mongoFilterToBSON (fieldName field) op
mongoFilterToDoc (NestedArrayFilter field op) = mongoFilterToBSON (nestedFieldName field) op
mongoFilterToDoc (RegExpFilter fn (reg, opts)) = [ fieldName fn DB.:= DB.RegEx (DB.Regex reg opts)]
nestedFieldName :: forall record typ. PersistEntity record => NestedField record typ -> Text
nestedFieldName = T.intercalate "." . nesFldName
where
nesFldName :: forall r1 r2. (PersistEntity r1) => NestedField r1 r2 -> [DB.Label]
nesFldName (nf1 `LastEmbFld` nf2) = [fieldName nf1, fieldName nf2]
nesFldName ( f1 `MidEmbFld` f2) = fieldName f1 : nesFldName f2
nesFldName ( f1 `MidNestFlds` f2) = fieldName f1 : nesFldName f2
nesFldName ( f1 `MidNestFldsNullable` f2) = fieldName f1 : nesFldName f2
nesFldName (nf1 `LastNestFld` nf2) = [fieldName nf1, fieldName nf2]
nesFldName (nf1 `LastNestFldNullable` nf2) = [fieldName nf1, fieldName nf2]
toValue :: forall a. PersistField a => Either a [a] -> DB.Value
toValue val =
case val of
Left v -> DB.val $ toPersistValue v
Right vs -> DB.val $ map toPersistValue vs
fieldName :: forall record typ. (PersistEntity record) => EntityField record typ -> DB.Label
fieldName f | fieldHaskell fd == HaskellName "Id" = id_
| otherwise = unDBName $ fieldDB $ fd
where
fd = persistFieldDef f
docToEntityEither :: forall record. (PersistEntity record) => DB.Document -> Either T.Text (Entity record)
docToEntityEither doc = entity
where
entDef = entityDef $ Just (getType entity)
entity = eitherFromPersistValues entDef doc
getType :: Either err (Entity ent) -> ent
getType = error "docToEntityEither/getType: never here"
docToEntityThrow :: forall m record. (Trans.MonadIO m, PersistEntity record, PersistEntityBackend record ~ DB.MongoContext) => DB.Document -> m (Entity record)
docToEntityThrow doc =
case docToEntityEither doc of
Left s -> Trans.liftIO . throwIO $ PersistMarshalError $ s
Right entity -> return entity
fromPersistValuesThrow :: (Trans.MonadIO m, PersistEntity record, PersistEntityBackend record ~ DB.MongoContext) => EntityDef -> [DB.Field] -> m (Entity record)
fromPersistValuesThrow entDef doc =
case eitherFromPersistValues entDef doc of
Left t -> Trans.liftIO . throwIO $ PersistMarshalError $
unHaskellName (entityHaskell entDef) `mappend` ": " `mappend` t
Right entity -> return entity
mapLeft :: (a -> c) -> Either a b -> Either c b
mapLeft _ (Right r) = Right r
mapLeft f (Left l) = Left (f l)
eitherFromPersistValues :: (PersistEntity record) => EntityDef -> [DB.Field] -> Either T.Text (Entity record)
eitherFromPersistValues entDef doc = case mKey of
Nothing -> addDetail $ Left $ "could not find _id field: "
Just kpv -> do
body <- addDetail (fromPersistValues (map snd $ orderPersistValues (toEmbedEntityDef entDef) castDoc))
key <- keyFromValues [kpv]
return $ Entity key body
where
addDetail :: Either Text a -> Either Text a
addDetail = mapLeft (\msg -> msg `mappend` " for doc: " `mappend` T.pack (show doc))
castDoc = assocListFromDoc doc
-- normally _id is the first field
mKey = lookup id_ castDoc
-- | unlike many SQL databases, MongoDB makes no guarantee of the ordering
-- of the fields returned in the document.
-- Ordering might be maintained if persistent were the only user of the db,
-- but other tools may be using MongoDB.
--
-- Persistent creates a Haskell record from a list of PersistValue
-- But most importantly it puts all PersistValues in the proper order
orderPersistValues :: EmbedEntityDef -> [(Text, PersistValue)] -> [(Text, PersistValue)]
orderPersistValues entDef castDoc = reorder
where
castColumns = map nameAndEmbed (embeddedFields entDef)
nameAndEmbed fdef = (fieldToLabel fdef, emFieldEmbed fdef)
-- TODO: the below reasoning should be re-thought now that we are no longer inserting null: searching for a null column will look at every returned field before giving up
-- Also, we are now doing the _id lookup at the start.
--
-- we have an alist of fields that need to be the same order as entityColumns
--
-- this naive lookup is O(n^2)
-- reorder = map (fromJust . (flip Prelude.lookup $ castDoc)) castColumns
--
-- this is O(n * log(n))
-- reorder = map (\c -> (M.fromList castDoc) M.! c) castColumns
--
-- and finally, this is O(n * log(n))
-- * do an alist lookup for each column
-- * but once we found an item in the alist use a new alist without that item for future lookups
-- * so for the last query there is only one item left
--
reorder :: [(Text, PersistValue)]
reorder = match castColumns castDoc []
where
match :: [(Text, Maybe EmbedEntityDef)]
-> [(Text, PersistValue)]
-> [(Text, PersistValue)]
-> [(Text, PersistValue)]
-- when there are no more Persistent castColumns we are done
--
-- allow extra mongoDB fields that persistent does not know about
-- another application may use fields we don't care about
-- our own application may set extra fields with the raw driver
match [] _ values = values
match (column:columns) fields values =
let (found, unused) = matchOne fields []
in match columns unused $ values ++
[(fst column, nestedOrder (snd column) (snd found))]
where
nestedOrder (Just em) (PersistMap m) =
PersistMap $ orderPersistValues em m
nestedOrder (Just em) (PersistList l) =
PersistList $ map (nestedOrder (Just em)) l
-- implied: nestedOrder Nothing found = found
nestedOrder _ found = found
matchOne (field:fs) tried =
if fst column == fst field
-- snd drops the name now that it has been used to make the match
-- persistent will add the field name later
then (field, tried ++ fs)
else matchOne fs (field:tried)
-- if field is not found, assume it was a Nothing
--
-- a Nothing could be stored as null, but that would take up space.
-- instead, we want to store no field at all: that takes less space.
-- Also, another ORM may be doing the same
-- Also, this adding a Maybe field means no migration required
matchOne [] tried = ((fst column, PersistNull), tried)
assocListFromDoc :: DB.Document -> [(Text, PersistValue)]
assocListFromDoc = Prelude.map (\f -> ( (DB.label f), cast (DB.value f) ) )
oidToPersistValue :: DB.ObjectId -> PersistValue
oidToPersistValue = PersistObjectId . Serialize.encode
oidToKey :: (ToBackendKey DB.MongoContext record) => DB.ObjectId -> Key record
oidToKey = fromBackendKey . MongoKey
persistObjectIdToDbOid :: PersistValue -> DB.ObjectId
persistObjectIdToDbOid (PersistObjectId k) = case Serialize.decode k of
Left msg -> throw $ PersistError $ T.pack $ "error decoding " ++ (show k) ++ ": " ++ msg
Right o -> o
persistObjectIdToDbOid _ = throw $ PersistInvalidField "expected PersistObjectId"
keyToOid :: ToBackendKey DB.MongoContext record => Key record -> DB.ObjectId
keyToOid = unMongoKey . toBackendKey
instance DB.Val PersistValue where
val (PersistInt64 x) = DB.Int64 x
val (PersistText x) = DB.String x
val (PersistDouble x) = DB.Float x
val (PersistBool x) = DB.Bool x
#ifdef HIGH_PRECISION_DATE
val (PersistUTCTime x) = DB.Int64 $ round $ 1000 * 1000 * 1000 * (utcTimeToPOSIXSeconds x)
#else
-- this is just millisecond precision: https://jira.mongodb.org/browse/SERVER-1460
val (PersistUTCTime x) = DB.UTC x
#endif
val (PersistDay d) = DB.Int64 $ fromInteger $ toModifiedJulianDay d
val (PersistNull) = DB.Null
val (PersistList l) = DB.Array $ map DB.val l
val (PersistMap m) = DB.Doc $ map (\(k, v)-> (DB.=:) k v) m
val (PersistByteString x) = DB.Bin (DB.Binary x)
val x@(PersistObjectId _) = DB.ObjId $ persistObjectIdToDbOid x
val (PersistTimeOfDay _) = throw $ PersistMongoDBUnsupported "PersistTimeOfDay not implemented for the MongoDB backend. only PersistUTCTime currently implemented"
val (PersistRational _) = throw $ PersistMongoDBUnsupported "PersistRational not implemented for the MongoDB backend"
val (PersistDbSpecific _) = throw $ PersistMongoDBUnsupported "PersistDbSpecific not implemented for the MongoDB backend"
cast' (DB.Float x) = Just (PersistDouble x)
cast' (DB.Int32 x) = Just $ PersistInt64 $ fromIntegral x
cast' (DB.Int64 x) = Just $ PersistInt64 x
cast' (DB.String x) = Just $ PersistText x
cast' (DB.Bool x) = Just $ PersistBool x
cast' (DB.UTC d) = Just $ PersistUTCTime d
cast' DB.Null = Just $ PersistNull
cast' (DB.Bin (DB.Binary b)) = Just $ PersistByteString b
cast' (DB.Fun (DB.Function f)) = Just $ PersistByteString f
cast' (DB.Uuid (DB.UUID uid)) = Just $ PersistByteString uid
cast' (DB.Md5 (DB.MD5 md5)) = Just $ PersistByteString md5
cast' (DB.UserDef (DB.UserDefined bs)) = Just $ PersistByteString bs
cast' (DB.RegEx (DB.Regex us1 us2)) = Just $ PersistByteString $ E.encodeUtf8 $ T.append us1 us2
cast' (DB.Doc doc) = Just $ PersistMap $ assocListFromDoc doc
cast' (DB.Array xs) = Just $ PersistList $ mapMaybe DB.cast' xs
cast' (DB.ObjId x) = Just $ oidToPersistValue x
cast' (DB.JavaScr _) = throw $ PersistMongoDBUnsupported "cast operation not supported for javascript"
cast' (DB.Sym _) = throw $ PersistMongoDBUnsupported "cast operation not supported for sym"
cast' (DB.Stamp _) = throw $ PersistMongoDBUnsupported "cast operation not supported for stamp"
cast' (DB.MinMax _) = throw $ PersistMongoDBUnsupported "cast operation not supported for minmax"
cast :: DB.Value -> PersistValue
-- since we have case analysys this won't ever be Nothing
-- However, unsupported types do throw an exception in pure code
-- probably should re-work this to throw in IO
cast = fromJust . DB.cast'
instance Serialize.Serialize DB.ObjectId where
put (DB.Oid w1 w2) = do Serialize.put w1
Serialize.put w2
get = do w1 <- Serialize.get
w2 <- Serialize.get
return (DB.Oid w1 w2)
dummyFromUnique :: Unique v -> v
dummyFromUnique _ = error "dummyFromUnique"
dummyFromFilts :: [Filter v] -> v
dummyFromFilts _ = error "dummyFromFilts"
data MongoAuth = MongoAuth DB.Username DB.Password deriving Show
-- | Information required to connect to a mongo database
data MongoConf = MongoConf
{ mgDatabase :: Text
, mgHost :: Text
, mgPort :: PortID
, mgAuth :: Maybe MongoAuth
, mgAccessMode :: DB.AccessMode
, mgPoolStripes :: Int
, mgStripeConnections :: Int
, mgConnectionIdleTime :: NominalDiffTime
-- | YAML fields for this are @rsName@ and @rsSecondaries@
-- mgHost is assumed to be the primary
, mgReplicaSetConfig :: Maybe ReplicaSetConfig
} deriving Show
defaultHost :: Text
defaultHost = "127.0.0.1"
defaultAccessMode :: DB.AccessMode
defaultAccessMode = DB.ConfirmWrites ["w" DB.:= DB.Int32 1]
defaultPoolStripes, defaultStripeConnections :: Int
defaultPoolStripes = 1
defaultStripeConnections = 10
defaultConnectionIdleTime :: NominalDiffTime
defaultConnectionIdleTime = 20
defaultMongoConf :: Text -> MongoConf
defaultMongoConf dbName = MongoConf
{ mgDatabase = dbName
, mgHost = defaultHost
, mgPort = DB.defaultPort
, mgAuth = Nothing
, mgAccessMode = defaultAccessMode
, mgPoolStripes = defaultPoolStripes
, mgStripeConnections = defaultStripeConnections
, mgConnectionIdleTime = defaultConnectionIdleTime
, mgReplicaSetConfig = Nothing
}
data ReplicaSetConfig = ReplicaSetConfig DB.ReplicaSetName [DB.Host]
deriving Show
instance FromJSON MongoConf where
parseJSON v = modifyFailure ("Persistent: error loading MongoDB conf: " ++) $
flip (withObject "MongoConf") v $ \o ->do
db <- o .: "database"
host <- o .:? "host" .!= defaultHost
NoOrphanPortID port <- o .:? "port" .!= NoOrphanPortID DB.defaultPort
poolStripes <- o .:? "poolstripes" .!= defaultPoolStripes
stripeConnections <- o .:? "connections" .!= defaultStripeConnections
NoOrphanNominalDiffTime connectionIdleTime <- o .:? "connectionIdleTime" .!= NoOrphanNominalDiffTime defaultConnectionIdleTime
mUser <- o .:? "user"
mPass <- o .:? "password"
accessString <- o .:? "accessMode" .!= confirmWrites
mRsName <- o .:? "rsName"
rsSecondaires <- o .:? "rsSecondaries" .!= []
mPoolSize <- o .:? "poolsize"
case mPoolSize of
Nothing -> return ()
Just (_::Int) -> fail "specified deprecated poolsize attribute. Please specify a connections. You can also specify a pools attribute which defaults to 1. Total connections opened to the db are connections * pools"
accessMode <- case accessString of
"ReadStaleOk" -> return DB.ReadStaleOk
"UnconfirmedWrites" -> return DB.UnconfirmedWrites
"ConfirmWrites" -> return defaultAccessMode
badAccess -> fail $ "unknown accessMode: " ++ T.unpack badAccess
let rs = case (mRsName, rsSecondaires) of
(Nothing, []) -> Nothing
(Nothing, _) -> error "found rsSecondaries key. Also expected but did not find a rsName key"
(Just rsName, hosts) -> Just $ ReplicaSetConfig rsName $ fmap DB.readHostPort hosts
return MongoConf {
mgDatabase = db
, mgHost = host
, mgPort = port
, mgAuth =
case (mUser, mPass) of
(Just user, Just pass) -> Just (MongoAuth user pass)
_ -> Nothing
, mgPoolStripes = poolStripes
, mgStripeConnections = stripeConnections
, mgAccessMode = accessMode
, mgConnectionIdleTime = connectionIdleTime
, mgReplicaSetConfig = rs
}
where
confirmWrites = "ConfirmWrites"
instance PersistConfig MongoConf where
type PersistConfigBackend MongoConf = DB.Action
type PersistConfigPool MongoConf = ConnectionPool
createPoolConfig = createMongoPool
runPool c = runMongoDBPool (mgAccessMode c)
loadConfig = parseJSON
-- | docker integration: change the host to the mongodb link
applyDockerEnv :: MongoConf -> IO MongoConf
applyDockerEnv mconf = do
mHost <- lookupEnv "MONGODB_PORT_27017_TCP_ADDR"
return $ case mHost of
Nothing -> mconf
Just h -> mconf { mgHost = T.pack h }
-- ---------------------------
-- * MongoDB specific Filters
-- $filters
--
-- You can find example usage for all of Persistent in our test cases:
-- <https://github.com/yesodweb/persistent/blob/master/persistent-test/EmbedTest.hs#L144>
--
-- These filters create a query that reaches deeper into a document with
-- nested fields.
type instance BackendSpecificFilter DB.MongoContext record = MongoFilter record
type instance BackendSpecificUpdate DB.MongoContext record = MongoUpdate record
data NestedField record typ
= forall emb. PersistEntity emb => EntityField record [emb] `LastEmbFld` EntityField emb typ
| forall emb. PersistEntity emb => EntityField record [emb] `MidEmbFld` NestedField emb typ
| forall nest. PersistEntity nest => EntityField record nest `MidNestFlds` NestedField nest typ
| forall nest. PersistEntity nest => EntityField record (Maybe nest) `MidNestFldsNullable` NestedField nest typ
| forall nest. PersistEntity nest => EntityField record nest `LastNestFld` EntityField nest typ
| forall nest. PersistEntity nest => EntityField record (Maybe nest) `LastNestFldNullable` EntityField nest typ
-- | A MongoRegex represents a Regular expression.
-- It is a tuple of the expression and the options for the regular expression, respectively
-- Options are listed here: <http://docs.mongodb.org/manual/reference/operator/query/regex/>
-- If you use the same options you may want to define a helper such as @r t = (t, "ims")@
type MongoRegex = (Text, Text)
-- | Mark the subset of 'PersistField's that can be searched by a mongoDB regex
-- Anything stored as PersistText or an array of PersistText would be valid
class PersistField typ => MongoRegexSearchable typ where
instance MongoRegexSearchable Text
instance MongoRegexSearchable rs => MongoRegexSearchable (Maybe rs)
instance MongoRegexSearchable rs => MongoRegexSearchable [rs]
-- | Filter using a Regular expression.
(=~.) :: forall record searchable. (MongoRegexSearchable searchable, PersistEntity record, PersistEntityBackend record ~ DB.MongoContext) => EntityField record searchable -> MongoRegex -> Filter record
fld =~. val = BackendFilter $ RegExpFilter fld val
data MongoFilterOperator typ = PersistFilterOperator (Either typ [typ]) PersistFilter
| MongoFilterOperator DB.Value
data UpdateValueOp typ =
UpdateValueOp
(Either typ [typ])
(Either PersistUpdate MongoUpdateOperation)
deriving Show
data MongoUpdateOperation = MongoEach MongoUpdateOperator
| MongoSimple MongoUpdateOperator
deriving Show
data MongoUpdateOperator = MongoPush
| MongoPull
| MongoAddToSet
deriving Show
opToText :: MongoUpdateOperator -> Text
opToText MongoPush = "$push"
opToText MongoPull = "$pull"
opToText MongoAddToSet = "$addToSet"
data MongoFilter record =
forall typ. PersistField typ =>
NestedFilter
(NestedField record typ)
(MongoFilterOperator typ)
| forall typ. PersistField typ =>
ArrayFilter
(EntityField record [typ])
(MongoFilterOperator typ)
| forall typ. PersistField typ =>
NestedArrayFilter
(NestedField record [typ])
(MongoFilterOperator typ)
| forall typ. MongoRegexSearchable typ =>
RegExpFilter
(EntityField record typ)
MongoRegex
data MongoUpdate record =
forall typ. PersistField typ =>
NestedUpdate
(NestedField record typ)
(UpdateValueOp typ)
| forall typ. PersistField typ =>
ArrayUpdate
(EntityField record [typ])
(UpdateValueOp typ)
-- | Point to an array field with an embedded object and give a deeper query into the embedded object.
-- Use with 'nestEq'.
(->.) :: forall record emb typ. PersistEntity emb => EntityField record [emb] -> EntityField emb typ -> NestedField record typ
(->.) = LastEmbFld
-- | Point to an array field with an embedded object and give a deeper query into the embedded object.
-- This level of nesting is not the final level.
-- Use '->.' or '&->.' to point to the final level.
(~>.) :: forall record typ emb. PersistEntity emb => EntityField record [emb] -> NestedField emb typ -> NestedField record typ
(~>.) = MidEmbFld
-- | Point to a nested field to query. This field is not an array type.
-- Use with 'nestEq'.
(&->.) :: forall record typ nest. PersistEntity nest => EntityField record nest -> EntityField nest typ -> NestedField record typ
(&->.) = LastNestFld
-- | Same as '&->.', but Works against a Maybe type
(?&->.) :: forall record typ nest. PersistEntity nest => EntityField record (Maybe nest) -> EntityField nest typ -> NestedField record typ
(?&->.) = LastNestFldNullable
-- | Point to a nested field to query. This field is not an array type.
-- This level of nesting is not the final level.
-- Use '->.' or '&>.' to point to the final level.
(&~>.) :: forall val nes nes1. PersistEntity nes1 => EntityField val nes1 -> NestedField nes1 nes -> NestedField val nes
(&~>.) = MidNestFlds
-- | Same as '&~>.', but works against a Maybe type
(?&~>.) :: forall val nes nes1. PersistEntity nes1 => EntityField val (Maybe nes1) -> NestedField nes1 nes -> NestedField val nes
(?&~>.) = MidNestFldsNullable
infixr 4 =~.
infixr 5 ~>.
infixr 5 &~>.
infixr 5 ?&~>.
infixr 6 &->.
infixr 6 ?&->.
infixr 6 ->.
infixr 4 `nestEq`
infixr 4 `nestNe`
infixr 4 `nestGe`
infixr 4 `nestLe`
infixr 4 `nestIn`
infixr 4 `nestNotIn`
infixr 4 `anyEq`
infixr 4 `nestAnyEq`
infixr 4 `nestBsonEq`
infixr 4 `multiBsonEq`
infixr 4 `anyBsonEq`
infixr 4 `nestSet`
infixr 4 `push`
infixr 4 `pull`
infixr 4 `pullAll`
infixr 4 `addToSet`
-- | The normal Persistent equality test '==.' is not generic enough.
-- Instead use this with the drill-down arrow operaters such as '->.'
--
-- using this as the only query filter is similar to the following in the mongoDB shell
--
-- > db.Collection.find({"object.field": item})
nestEq, nestNe, nestGe, nestLe, nestIn, nestNotIn :: forall record typ.
( PersistField typ , PersistEntityBackend record ~ DB.MongoContext)
=> NestedField record typ
-> typ
-> Filter record
nestEq = nestedFilterOp Eq
nestNe = nestedFilterOp Ne
nestGe = nestedFilterOp Ge
nestLe = nestedFilterOp Le
nestIn = nestedFilterOp In
nestNotIn = nestedFilterOp NotIn
nestedFilterOp :: forall record typ.
( PersistField typ
, PersistEntityBackend record ~ DB.MongoContext
) => PersistFilter -> NestedField record typ -> typ -> Filter record
nestedFilterOp op nf v = BackendFilter $
NestedFilter nf $ PersistFilterOperator (Left v) op
-- | same as `nestEq`, but give a BSON Value
nestBsonEq :: forall record typ.
( PersistField typ
, PersistEntityBackend record ~ DB.MongoContext
) => NestedField record typ -> DB.Value -> Filter record
nf `nestBsonEq` val = BackendFilter $
NestedFilter nf $ MongoFilterOperator val
-- | Like '(==.)' but for an embedded list.
-- Checks to see if the list contains an item.
--
-- In Haskell we need different equality functions for embedded fields that are lists or non-lists to keep things type-safe.
--
-- using this as the only query filter is similar to the following in the mongoDB shell
--
-- > db.Collection.find({arrayField: arrayItem})
anyEq :: forall record typ.
( PersistField typ
, PersistEntityBackend record ~ DB.MongoContext
) => EntityField record [typ] -> typ -> Filter record
fld `anyEq` val = BackendFilter $
ArrayFilter fld $ PersistFilterOperator (Left val) Eq
-- | Like nestEq, but for an embedded list.
-- Checks to see if the nested list contains an item.
nestAnyEq :: forall record typ.
( PersistField typ
, PersistEntityBackend record ~ DB.MongoContext
) => NestedField record [typ] -> typ -> Filter record
fld `nestAnyEq` val = BackendFilter $
NestedArrayFilter fld $ PersistFilterOperator (Left val) Eq
multiBsonEq :: forall record typ.
( PersistField typ
, PersistEntityBackend record ~ DB.MongoContext
) => EntityField record [typ] -> DB.Value -> Filter record
multiBsonEq = anyBsonEq
{-# DEPRECATED multiBsonEq "Please use anyBsonEq instead" #-}
-- | same as `anyEq`, but give a BSON Value
anyBsonEq :: forall record typ.
( PersistField typ
, PersistEntityBackend record ~ DB.MongoContext
) => EntityField record [typ] -> DB.Value -> Filter record
fld `anyBsonEq` val = BackendFilter $
ArrayFilter fld $ MongoFilterOperator val
nestSet, nestInc, nestDec, nestMul :: forall record typ.
( PersistField typ , PersistEntityBackend record ~ DB.MongoContext)
=> NestedField record typ
-> typ
-> Update record
nestSet = nestedUpdateOp Assign
nestInc = nestedUpdateOp Add
nestDec = nestedUpdateOp Subtract
nestMul = nestedUpdateOp Multiply
push, pull, addToSet :: forall record typ.
( PersistField typ
, PersistEntityBackend record ~ DB.MongoContext
) => EntityField record [typ] -> typ -> Update record
fld `push` val = backendArrayOperation MongoPush fld val
fld `pull` val = backendArrayOperation MongoPull fld val
fld `addToSet` val = backendArrayOperation MongoAddToSet fld val
backendArrayOperation ::
forall record typ.
(PersistField typ, BackendSpecificUpdate (PersistEntityBackend record) record ~ MongoUpdate record)
=> MongoUpdateOperator -> EntityField record [typ] -> typ
-> Update record
backendArrayOperation op fld val = BackendUpdate $
ArrayUpdate fld $ UpdateValueOp (Left val) (Right $ MongoSimple op)
-- | equivalent to $each
--
-- > eachOp push field []
--
-- @eachOp pull@ will get translated to @$pullAll@
eachOp :: forall record typ.
( PersistField typ, PersistEntityBackend record ~ DB.MongoContext)
=> (EntityField record [typ] -> typ -> Update record)
-> EntityField record [typ] -> [typ]
-> Update record
eachOp haskellOp fld val = case haskellOp fld (error "eachOp: undefined") of
BackendUpdate (ArrayUpdate _ (UpdateValueOp (Left _) (Right (MongoSimple op)))) -> each op
BackendUpdate (ArrayUpdate{}) -> error "eachOp: unexpected ArrayUpdate"
BackendUpdate (NestedUpdate{}) -> error "eachOp: did not expect NestedUpdate"
Update{} -> error "eachOp: did not expect Update"
where
each op = BackendUpdate $ ArrayUpdate fld $
UpdateValueOp (Right val) (Right $ MongoEach op)
pullAll :: forall record typ.
( PersistField typ
, PersistEntityBackend record ~ DB.MongoContext
) => EntityField record [typ] -> [typ] -> Update record
fld `pullAll` val = eachOp pull fld val
nestedUpdateOp :: forall record typ.
( PersistField typ
, PersistEntityBackend record ~ DB.MongoContext
) => PersistUpdate -> NestedField record typ -> typ -> Update record
nestedUpdateOp op nf v = BackendUpdate $
NestedUpdate nf $ UpdateValueOp (Left v) (Left op)
-- | Intersection of lists: if any value in the field is found in the list.
inList :: PersistField typ => EntityField v [typ] -> [typ] -> Filter v
f `inList` a = Filter (unsafeCoerce f) (Right a) In
infix 4 `inList`
-- | No intersection of lists: if no value in the field is found in the list.
ninList :: PersistField typ => EntityField v [typ] -> [typ] -> Filter v
f `ninList` a = Filter (unsafeCoerce f) (Right a) In
infix 4 `ninList`
|
pseudonom/persistent
|
persistent-mongoDB/Database/Persist/MongoDB.hs
|
mit
| 60,699
| 0
| 21
| 14,575
| 15,348
| 8,003
| 7,345
| -1
| -1
|
module Euler89 (Numeral (..), minimalNumerals, parseNumerals, numeralValue) where
--import qualified Data.Map as M (Map, fromList, lookup, elems)
import qualified Data.List as L (sortBy)
data Numeral = I | V | X | L | C | D | M deriving (Show)
data Piece = Single Numeral | Subtractive Numeral Numeral deriving (Show)
valueOfPiece :: Piece -> Int
valueOfPiece (Single n) = numeralValue n
valueOfPiece (Subtractive s b) = (numeralValue b) - (numeralValue s)
allowedSubtractives :: [(Numeral, Numeral)]
allowedSubtractives = [(I, V), (I, X), (X, L), (X, C), (C, D), (C, M)]
allowedNumerals :: [Numeral]
allowedNumerals = [I, V, X, L, C, D, M]
allowedPieces :: [Piece]
allowedPieces = (map (\t -> Subtractive (fst t) (snd t)) allowedSubtractives) ++ (map Single allowedNumerals)
allowedPiecesSorted :: [Piece]
allowedPiecesSorted = L.sortBy (\p1 p2 -> compare (valueOfPiece p2) (valueOfPiece p1)) allowedPieces
unPiece :: Piece -> [Numeral]
unPiece (Subtractive i j) = [i,j]
unPiece (Single n) = [n]
minimalNumerals :: Int -> [Numeral]
minimalNumerals = concatMap unPiece . minimalPieces
minimalPieces:: Int -> [Piece]
minimalPieces n | n <= 0 = []
| otherwise = reverse $ go n allowedPiecesSorted []
where go 0 _ accum = accum
go _ [] accum = accum
go m (p:ps) accum = let v = valueOfPiece p in
case (v `compare` m) of
LT -> go (m - v) (p:ps) (p:accum)
EQ -> go (m - v) (p:ps) (p:accum)
GT -> go m ps accum
parseNumerals :: String -> Maybe [Numeral]
parseNumerals = mapM charMapping
charMapping :: Char -> Maybe Numeral
charMapping 'I' = Just I
charMapping 'V' = Just V
charMapping 'X' = Just X
charMapping 'L' = Just L
charMapping 'C' = Just C
charMapping 'D' = Just D
charMapping 'M' = Just M
charMapping _ = Nothing
numeralValue :: Numeral -> Int
numeralValue I = 1
numeralValue V = 5
numeralValue X = 10
numeralValue L = 50
numeralValue C = 100
numeralValue D = 500
numeralValue M = 1000
|
nlim/haskell-playground
|
src/Euler89.hs
|
mit
| 2,187
| 0
| 14
| 620
| 830
| 450
| 380
| 49
| 5
|
module Test.Example
( exampleTests
) where
import Data.ByteString (ByteString)
import Data.Text (Text)
import Flow
import Prelude hiding (read)
import Shikensu (Definition(..), Dictionary(..), list, makeDefinition)
import Shikensu.Contrib (clone, copyPropsToMetadata, permalink, renameExt, renderContent)
import Shikensu.Contrib.IO (read, write)
import System.Directory (getCurrentDirectory)
import System.FilePath ((</>), joinPath)
import Test.Helpers
import Test.Tasty
import Test.Tasty.HUnit
import qualified Data.Text.Encoding as Text (decodeUtf8, encodeUtf8)
exampleTests :: TestTree
exampleTests =
testCase "Example test" $ dictionaries >>= uncurry assertEq
thePattern :: String
thePattern =
"fixtures" </> "*.md"
dictionaries :: IO (Dictionary, Dictionary)
dictionaries = do
root <- getCurrentDirectory
let testsDir = root </> "tests"
dictA <- dictionary_io testsDir
let absolute = testsDir </> "fixtures" </> "example.md"
let dictB = [ Shikensu.makeDefinition testsDir thePattern absolute ]
dictB_Read <- read dictB
dictB_Final <- flow dictB_Read
return (dictA, dictB_Final)
-- Setup
dictionary_io :: String -> IO Dictionary
dictionary_io absolutePathToCwd =
Shikensu.list [thePattern] absolutePathToCwd
>>= read
>>= flow
>>= write "build"
flow :: Dictionary -> IO Dictionary
flow =
renameExt ".md" ".html"
.> permalink "index"
.> clone "index.html" "200.html"
.> copyPropsToMetadata
.> renderContent markdownRenderer
.> return
markdownRenderer :: Definition -> Maybe ByteString
markdownRenderer def =
content def
|> fmap Text.decodeUtf8
|> fmap renderMarkdown
|> fmap Text.encodeUtf8
renderMarkdown :: Text -> Text
renderMarkdown text =
text
|
icidasset/shikensu
|
tests/Test/Example.hs
|
mit
| 1,851
| 0
| 12
| 400
| 483
| 260
| 223
| 54
| 1
|
elementAt a b =
|
axnion/playground
|
random/haskell/99questions/problem3.hs
|
mit
| 16
| 0
| 5
| 4
| 11
| 5
| 6
| -1
| -1
|
{-# LANGUAGE RecordWildCards #-}
-- | TCP Specific connection handling for monitoring with Riemann
module Network.Monitoring.Riemann.TCP (
module Network.Monitoring.Riemann.Types,
TCPClient, makeTCPClientFromConnection, makeTLSClient, makeTCPClient,
sendEventTCP, sendQueryTCP, sendEventTCP', sendQueryTCP'
) where
import Control.Concurrent.Async (waitCatch, withAsync)
import Control.Concurrent.MVar
import Control.Exception.Base (SomeException, bracket)
import Control.Lens ((&), (.~), (?~), (^.))
import qualified Data.ByteString as BS
import Data.Default (def)
import Data.Monoid
import Data.ProtocolBuffers
import Data.Serialize.Get
import Data.Serialize.Put
import Data.Text
import Data.Time.Clock (getCurrentTime)
import Data.Time.Clock.POSIX (utcTimeToPOSIXSeconds)
import Network.Connection
import Network.Monitoring.Riemann.Types
type Hostname = String
type Port = Int
-- | An opaque data type for a TCP Riemann client. It currently does not support
-- connection pooling; concurrent uses will be blocked.
data TCPClient = TCPClient
{ conn :: Connection
, lock :: MVar () }
-- | Perform an action and catch all synchronous exceptions.
tryAny :: IO a -> IO (Either SomeException a)
tryAny action = withAsync action waitCatch
-- | Perform an action while holding a lock (binary semaphore).
takeLockDo :: MVar () -> IO a -> IO a
takeLockDo lock action = bracket (takeMVar lock) (\_ -> putMVar lock ()) (const action)
-- | Create a @TCPClient@ from an existing @Connection@.
makeTCPClientFromConnection :: Connection -> IO TCPClient
makeTCPClientFromConnection conn = TCPClient conn <$> newMVar ()
-- | Create a @TCPClient@ from host name, port, and @TLSSettings@. The
-- connection will use TLS.
makeTLSClient :: Hostname -> Port -> TLSSettings -> IO TCPClient
makeTLSClient hn po tls = do
ctx <- initConnectionContext
let params = ConnectionParams hn (fromIntegral po) (Just tls) Nothing
TCPClient <$> connectTo ctx params <*> newMVar ()
-- | Create a @TCPClient@ from host name and port. The connection will not use
-- TLS.
makeTCPClient :: Hostname -> Port -> IO TCPClient
makeTCPClient hn po = do
ctx <- initConnectionContext
let params = ConnectionParams hn (fromIntegral po) Nothing Nothing
TCPClient <$> connectTo ctx params <*> newMVar ()
-- | Send a message.
sendMsg :: TCPClient -> Msg -> IO ()
sendMsg TCPClient{..} msg = do
let bytes = runPut (encodeMessage msg)
bytesWithLen = runPut (putWord32be (fromIntegral $ BS.length bytes) >> putByteString bytes)
connectionPut conn bytesWithLen
-- | Receive a message.
receiveMsg :: TCPClient -> IO Msg
receiveMsg TCPClient{..} = do
msgLenBS <- readExactlyNBytes 4
let msgLen = fromIntegral $ runGet' getWord32be msgLenBS
msgBs <- readExactlyNBytes msgLen
return $ runGet' decodeMessage msgBs
where readExactlyNBytes n = do
bs <- connectionGet conn n
if BS.length bs == n then return bs else do
nextPart <- readExactlyNBytes (n - BS.length bs)
return (bs <> nextPart)
runGet' g bs = either (error . ("cannot deserialise: " <>)) id $ runGet g bs
-- | Send an event with the @TCPClient@. If it fails, it will return the
-- exception in a @Left@ value.
sendEventTCP' :: TCPClient -> Event -> IO (Either SomeException ())
sendEventTCP' conn e = tryAny $ do
current <- getCurrentTime
let now = round (utcTimeToPOSIXSeconds current)
let msg = def & events .~ [e & time ?~ now]
takeLockDo (lock conn) $ sendMsg conn msg
-- | Send a query with the @TCPClient@ and wait for a reply. If it fails, it
-- will return the exception in a @Left@ value.
sendQueryTCP' :: TCPClient -> Text -> IO (Either SomeException ([State], [Event]))
sendQueryTCP' conn q = tryAny $ do
let msg = def & query .~ Just q
rcvd <- takeLockDo (lock conn) $ do
sendMsg conn msg
receiveMsg conn
return (rcvd ^. states, rcvd ^. events)
-- | Send an event with the @TCPClient@. If it fails, it will silently discard
-- the exception.
sendEventTCP :: TCPClient -> Event -> IO ()
sendEventTCP conn e = either (const def) id <$> sendEventTCP' conn e
-- | Send a query with the @TCPClient@ and wait for a reply. If it fails, a
-- default (i.e. empty) value will be returned.
sendQueryTCP :: TCPClient -> Text -> IO ([State], [Event])
sendQueryTCP conn q = either (const def) id <$> sendQueryTCP' conn q
|
capital-match/riemann-hs
|
src/Network/Monitoring/Riemann/TCP.hs
|
mit
| 4,601
| 0
| 17
| 1,030
| 1,174
| 610
| 564
| 75
| 2
|
module Common
( parseBS
, cursorFromElement
) where
import Data.ByteString.Lazy (ByteString)
import Text.XML
import Text.XML.Cursor
import Codec.Xlsx.Parser.Internal
import Codec.Xlsx.Writer.Internal
parseBS :: FromCursor a => ByteString -> [a]
parseBS = fromCursor . fromDocument . parseLBS_ def
cursorFromElement :: Element -> Cursor
cursorFromElement = fromNode . NodeElement . addNS mainNamespace Nothing
|
qrilka/xlsx
|
test/Common.hs
|
mit
| 418
| 0
| 7
| 58
| 110
| 64
| 46
| 12
| 1
|
module Crypto.Text where
import Data.Monoid
import Data.Foldable as F
import Data.Vector as V
import Prelude as P
import Crypto
-- | All subvectors in the vector of a specific length which could reappear.
-- Provides constant time lookup for subvectors of the current length at every
-- position. /O(n)/
repVecs :: Int -> Vector c -> Vector (Vector c)
repVecs l v = generate (V.length v - l + 1) (\i -> slice i l v)
-- | Finds matches of a given length in the prepared lookup vector. /O(n^2)/
findMatches :: Eq c => Int -> Vector (Vector c) -> [(Vector c, Int, Int)]
findMatches l ps = P.concat $ P.zipWith f [0 ..] $ V.toList $ ps
where
f i v = [ (v, i, j - i) | j <- [i + l .. V.length ps - l], ps ! j == v ]
repetitions :: Eq c => Int -> Vector c -> [(Vector c, Int, Int)]
repetitions l = findMatches l . repVecs l
-- | Removes all submatches, requires the bigger matches to appear first.
pruneSubMatches :: [(Vector c, Int, Int)] -> [(Vector c, Int, Int)]
pruneSubMatches = go []
where
go bl (m : ms) | (m `hits`) `P.any` bl = go bl ms
| otherwise = go (m : bl) ms
go bl [] = bl
hits (lv, li, lo) (rv, ri, ro) = let ll = V.length lv
rl = V.length rv
in ll < rl && ri <= li && ri + rl >= li + ll && lo == ro
-- | Splits the input into n pieces, whereas n is a positive non-zero number.
multiplex :: Int -> Vector c -> [Vector c]
multiplex n v = fmap f [0 .. n - 1]
where
f o = generate (max - (if r == 0 || r >= o + 1 then 0 else 1)) (\i -> v ! (i * n + o))
vl = V.length v
max = ceiling $ fromIntegral vl / fromIntegral n
r = vl `rem` n
-- | Find all repetitions starting with a length of 3 up to n / 2.
kasiski :: Eq c => V.Vector c -> [(V.Vector c, Int, Int, [Int])]
kasiski v = fmap addFactors $ pruneSubMatches reps
where
reps = P.concat [ repetitions l v | l <- [m, m - 1 .. 3] ]
m = V.length v `quot` 2
addFactors (v, p, os) = (v, p, os, factors os)
coincidenceIndex :: (F.Foldable f, Fractional n) => f n -> n -> n
coincidenceIndex lis l = getSum (F.foldMap (\li -> Sum $ li * (li - 1)) lis) / (l * (l - 1))
friedman :: (F.Foldable f, Fractional n) => f n -> n -> n
friedman lis l = 0.0377 * l / ((l - 1) * coincidenceIndex lis l - 0.0385 * l + 0.0762)
fromGerOrd :: Int -> Char
toGerOrd :: Char -> Int
(fromGerOrd, toGerOrd) = (toEnum . (+ aOrd) . flip mod 26, subtract aOrd . fromEnum)
where
aOrd = fromEnum 'A'
-- | Decrypt a cypher text which is encoded with the Vigenère encryption using
-- a specific keyword, given as list of distances.
decryptVigenère :: [Int] -> String -> String
decryptVigenère ks = P.zipWith (\k c -> fromGerOrd $ toGerOrd c - k) $ cycle ks
|
muesli4/crypto
|
src/Crypto/Text.hs
|
mit
| 2,845
| 3
| 17
| 822
| 1,154
| 616
| 538
| 42
| 2
|
module Streams where
------------------------------------------------------------
data Stream a = Stream a | Cons a (Stream a)
instance Show a => Show (Stream a) where
show = show . take 20 . streamToList
------------------------------------------------------------
streamToList :: Stream a -> [a]
streamToList (Stream a) = [a]
streamToList (Cons a rest) = a : streamToList rest
------------------------------------------------------------
streamRepeat :: a -> Stream a
streamRepeat s = Cons s (streamRepeat s)
------------------------------------------------------------
streamMap :: (a -> b) -> Stream a -> Stream b
streamMap f (Stream a) = Stream (f a)
streamMap f (Cons a rest) = Cons (f a) (streamMap f rest)
------------------------------------------------------------
-- TODO: is this even correct?
streamFromSeed :: (a -> a) -> a -> Stream a
streamFromSeed f v = Cons v (streamFromSeed f (f v))
------------------------------------------------------------
nats :: Stream Integer
nats = streamFromSeed (+1) 0
------------------------------------------------------------
ruler :: Stream Integer
ruler = streamMap (\m -> pow2 m (m `div` 2)) (streamFromSeed (+1) 1)
where
pow2 :: Integer -> Integer -> Integer
pow2 m n = if m `mod` (2^n) == 0 then n else pow2 m (n-1)
------------------------------------------------------------
|
sajith/cis194
|
hw06/Streams.hs
|
mit
| 1,380
| 0
| 11
| 225
| 435
| 230
| 205
| 20
| 2
|
{-# OPTIONS_GHC -XTypeSynonymInstances -XOverloadedStrings -XRecursiveDo -pgmF dist/build/marxup/marxup -F #-}
import MarXup
import MarXup.Latex
import MarXup.Latex.Math
import MarXup.Math
import MarXup.Tex
import MarXup.LineUp.Haskell
import MarXup.DerivationTrees
import MarXup.PrettyPrint as PP hiding (width)
import MarXup.PrettyPrint.Core as PC
import Control.Applicative
import Data.Monoid
import Control.Monad (unless)
import MarXup.Diagram as D
import Graphics.Diagrams.Plot
import Graphics.Diagrams.Graphviz
import Control.Lens (set)
import Data.GraphViz hiding (Plain)
import Data.String
import Data.Traversable
import Data.GraphViz.Attributes.Complete
(Attribute(RankSep,Shape,Label,Margin,Width,Len,RankDir),
Shape(..),Label(StrLabel),DPoint(..),RankDir(..))
import Numeric (showFFloat, showEFloat)
data SExp = Atom String | SX [SExp]
textualS = textual . ($ "")
aPlot :: Dia
aPlot = do
c@(bx,_) <- simplePlot (Point (textualS . showFFloat (Just 1)) (textualS . showEFloat (Just 0)))
(vec (simplLinAxis 0.1,
logAxis 10
-- simplLinAxis 2000
))
(map vec [(0.1,139),(0.35,10035),(0.23,1202)])
functionPlot c 100 (\x -> 100 + 300000*(x-0.2)^^2)
width bx === constant 200
height bx === constant 100
where vec (x,y) = Point x y
prettyS :: SExp -> Tex Doc
prettyS (Atom x) = PP.text (textual x)
prettyS (SX xs) = do
xs' <- traverse prettyS xs
enclosure "(" ")" " " xs'
expr :: TeX
expr = do
d <- prettyS six
paragraph "1000"
PC.pretty 1 1000 d
paragraph "100"
PC.pretty 1 200 d
paragraph "10"
PC.pretty 1 10 d
where
three = SX $ map Atom ["arstarsx","wftwfy","varstw","x","M"]
six = SX [ three , three , three ]
preamble body = do
documentClass "article" []
usepackage "inputenc" ["utf8x"]
usepackage "graphicx" []
env "document" body
(▸) = flip (#)
grDiag :: TexDiagram ()
grDiag = graph tex Dot gr
nod x = DotNode x [Margin (DVal 0),Width 0, Shape Circle, Label $ StrLabel $ fromString x]
edg x y z = DotEdge x y [Label $ StrLabel z, Len 0.1]
gr :: DotGraph String
gr = DotGraph False True Nothing
(DotStmts [GraphAttrs [RankSep [0.1], RankDir FromLeft]] []
[nod "A", nod "B", nod "C", nod "D"]
[edg "A" "B" "1"
,edg "A" "C" "2"
,edg "B" "D" "3"
,edg "D" "A" "4"])
testDiagram :: TexDiagram ()
testDiagram = do
-- draw $ path $ circle (Point 0 0) 5
a <- D.label "a" $ ensureMath $ "a"
b <- D.label "b" $ ensureMath $ "b"
a' <- draw $ circle "a'" -- label $ ensureMath $ "c"
width a' === constant 15
b' <- D.label "b'" $ ensureMath $ "d"
a'' <- D.label "a''" $ ensureMath $ "."
b'' <- D.label "b''" $ ensureMath $ "."
-- c <- texObj $ ensureMath $ "c"
-- Center ▸ c === MP.center [E ▸ a'', E ▸ b''] + (20 +: 0)
let width = constant 70
vdist b a === constant 30
hdist a a' === width
hdist a' a'' === width
alignMatrix [[Center ▸ a, Center ▸ a',Center ▸ a'']
,[Center ▸ b, Center ▸ b',Center ▸ b'']
]
autoLab "bing" =<< arrow a a'
autoLab "bang" =<< arrow b b'
autoLab "oops" . turn180 =<< arrow a b
autoLab "pif" =<< arrow a' a''
autoLab "paf" =<< arrow b' b''
draw $ do
autoLab "equal" =<< edge a'' b''
return ()
ax c = Rule {delimiter = mempty, ruleStyle = outline "black", ruleLabel = mempty, conclusion = c}
someTree = derivationTreeD $ Node (rule ("modus ponens") "B")
[defaultLink ::> Node (ax "X") []
,defaultLink {steps = 0} ::> Node (rule "" "A")
[defaultLink {steps = 0} ::> Node (rule (braces $ cmd0 "small" <> "mystery") "A1")
[defaultLink ::> Node (ax "Y") []
,defaultLink ::> Node (rule "" "A2")
[defaultLink ::> Node (rule "" "A3")
[defaultLink ::> Node (ax "Z") []]]]]
,defaultLink {steps = 3} ::> Node (rule "abs" "A --> B")
[defaultLink ::> Node (rule "" "B")
[defaultLink ::> Node (ax "A") []
]
]
]
(∶) = binop 2 ", "
γ = Con $ cmd "Gamma" nil
(⊢) = binop 1 (cmd0 "vdash")
x = Con "x"
y = Con "y"
a = Con "a"
b = Con "b"
(≜) = binop 1 "="
main = renderTex Plain "LaTeX" docu
docu = preamble «
@intro<-section«Intro»
At-syntax is used to call a Haskell function. The result can be bound.
For example, the @sans«section» command returns a label that can be used
for references.
This is section @xref(intro). Note that cross-references are checked
at ``compile-time''. Forward references also work (see
sec. @xref(concl)).
@section«Markup»
Here comes @sans«some sans-serif text with @emph«emphasis»!»
Note that arguments put in braces are markup.
@section«Math»
Arguments in parenthesis are Haskell. Combined with unicode syntax,
this can make writing all sorts mathy stuff rather pleasant. For
example: @(γ ⊢ x ∶ a).
The operators are overloaded to work on text as well:
@display(b ≜ sqrt (a * (b + (x/y))))
There is also special support for derivation trees:
@section«Pretty Printer»
@expr
@section«Derivation Trees»
Here is some derivation tree:
@someTree
@section«Diagrams»
@testDiagram
One can also draw diagrams:
@section«Graphviz»
There is partial, rudimentary support for layout of graphs using graphviz.
@grDiag
%% This is deactivated for now; it requires graphviz to be installed
@section«Plots»
@aPlot
@cmd0"newpage"
@section«Haskell»
There is simple support for lhs2tex-style stuff.
Another paragaph.
@haskell«
autoLab :: String -> OVector -> Diagram TeX Tex Object
autoLab s = autoLabel s (textual s)
»
some text after
@concl<-section«Conclusion»
Mar@ensureMath«@cmd0"chi"»up is awesome :p .
»
|
jyp/MarXup
|
examples/LaTeX.hs
|
gpl-2.0
| 5,744
| 69
| 23
| 1,300
| 2,260
| 1,167
| 1,093
| -1
| -1
|
module Types where
import Control.Applicative
import Control.Concurrent.Chan
import Control.Monad
import Graphics.UI.SDL.Types
import Graphics.UI.SDL.Video
import Graphics.UI.SDL.Rect
import Graphics.UI.SDL.Color
import Graphics.UI.SDL.Time
import Graphics.UI.SDL.TTF.Management as TTFM
import Graphics.UI.SDL.TTF.Render as TTFR
import Graphics.UI.SDL.TTF.General as TTFG
import Graphics.UI.SDL.TTF.Types
import qualified Data.HashMap.Strict as Map
import qualified Graphics.UI.SDL.Image as Img
import qualified Graphics.UI.SDL.Primitives as GFX
type LonLat = (Double, Double)
data YanasState = YanasState {
stScreen :: Surface,
stMainfont :: Font,
stAirspace :: [Element],
stView :: (Double, Double, Double, Double),
stScreenSize :: (Int, Int),
stSurfaces :: Map.HashMap String Surface,
stSurfaceTemp :: Int,
stGndElev :: Int,
stQNH :: Int
}
deriving (Eq, Show)
data Squawk = SquawkA | SquawkC | SquawkS | SquawkIdent
| Squawk Int | Reset Squawk
deriving (Eq, Show)
data ACCategory = AC_A | AC_B | AC_C | AC_D | AC_E
deriving (Eq, Ord, Show)
data ACPCategory = AP_A | AP_B | AP_C | AP_D
deriving (Eq, Ord, Show)
data ApchType = APCHVisual | APCHIFR | APCHVOR | APCHNDB |
APCHCircleToLand
deriving (Eq, Show)
data RateFlag = OrMore | OrLess
deriving (Eq, Show)
data Rate = Rate Int (Maybe RateFlag) | OwnRate
deriving (Eq, Show)
data Heading = Heading Int
| Direct Waypoint
deriving (Eq, Show)
data TurnDirection = TurnOwnDiscretion Heading
| TurnLeft Heading | TurnRight Heading
deriving (Eq, Show)
type Designation = String
data Frequency = Frequency Int
deriving (Eq, Ord)
data RelVPos = Below | Same | Above
deriving (Eq, Show)
data RelMovement = LeftToRight | RightToLeft
deriving (Eq, Show)
data ACReport = InSight
| Vacated
deriving (Eq, Show)
data ACSay = SAYAgain
| SAYAltitude
| SAYIntentions
| SAYPosition
| SAYType
deriving (Eq, Show)
-- VPos is only to be used for ATC commands
-- In all other cases, true altitude is used.
data VPos = Flightlevel Int
| Altitude Int
deriving (Eq)
instance Show VPos where
show (Flightlevel fl) = "flightlevel " ++ show fl
show (Altitude alt) = "altitude " ++ show alt ++ " feet"
data ACCommand = Turn TurnDirection
| Climb VPos Rate
| Descend VPos Rate
| Speed Int
| FinalApproachSpeed
| OwnSpeed
| Approach ApchType Runway
| ClearedToLand Runway
| ClearedForTakeoff Runway
| QNH Int
| ReportConditions
| GoAround
| Cancel ACCommand
| MaintainOwnSeparation Aeroplane
| LineupAndWait Runway
| Cross Runway
| Report ACReport
| Say ACSay
| Stop
| ACSquawk [Squawk]
| Traffic {
trafficWhat :: Aeroplane,
trafficMilesAhead :: Int,
trafficHour :: Int,
trafficRelVPos :: RelVPos,
trafficMovement :: Maybe RelMovement
}
| Contact Designation Frequency
deriving (Eq, Show)
data ConditionalClearanceObservation = Landing | Departing | Crossing
deriving (Eq, Show)
data ACCondition = WhicheverIsLater ACCondition ACCondition
| WhenAirbourne | WhenPassing Int | Overhead Element
| Behind ConditionalClearanceObservation Aeroplane
deriving (Eq, Show)
newtype ZuluTime = ZuluTime Integer
deriving (Eq, Show)
data ATCCommand = ACCmd {
cmdCallsign :: [String],
cmdBroadcast :: Bool,
cmdCondition :: Maybe ACCondition,
cmdCommand :: ACCommand,
cmdLimit :: Maybe Element,
cmdValidity :: (Maybe ZuluTime, Maybe ZuluTime)
}
| ATCText {
cmdText :: String
}
deriving (Eq, Show)
data Equipment = ETransponder | EVHF | EUHF | EADF
deriving (Eq, Show)
-- All altitudes are true altitues AMSL
data Aeroplane = Aeroplane {
acregistration :: String,
accallsign :: String,
acicao :: String,
actype :: String,
accategory :: ACCategory,
acpcategory :: ACPCategory,
aclon :: Double,
aclat :: Double,
-- KTAS
acspeed :: Double,
-- Degrees *true*
acheading :: Double,
-- fpm
acvspeed :: Double,
acvclearedaltitude :: VPos,
actruealt :: Double,
actransponder :: ([Squawk], Int),
acequipment :: [Equipment],
acflightplan :: String,
acfrequency :: Frequency,
acatccommands :: [ATCCommand],
acatcresponses :: [String],
acturnrate :: Double,
acturnto :: Double,
-- Todo: These two must be calc'ed dynamically!! (aeroplane specs, density alt)
acclimbrate :: Int,
acdescentrate :: Int,
acqnh :: Int -- The QNH the pilots were given by ATC/FIS/Luftaufsicht/...
}
deriving (Show, Eq)
data BeaconType = VOR | NDB | ILS | DME
deriving (Show, Eq)
data Beacon = Beacon {
bcntype :: BeaconType,
bcnlon :: Double,
bcnlat :: Double,
bcnfreq :: Integer,
bcnname :: String,
bcnid :: String,
bcnvar :: Double,
bcninop :: Bool,
bcnrange :: Int
}
deriving (Show, Eq)
data RWYSurface = ASPH | CONC | GRASS
deriving (Show, Eq)
data Runway = Runway {
rwylon :: Double,
rwylat :: Double,
rwyqfu :: Int,
rwyvar :: Double,
rwyelev :: Int,
rwydesignation :: String,
rwydisplacement :: Double,
rwytora :: Int,
rwylda :: Int,
rwyasda :: Int,
rwywidth :: Int,
rwyals :: Bool,
rwycenterline :: Bool,
rwysurface :: RWYSurface,
rwystrength :: Int,
rwyad :: String
}
deriving (Show, Eq)
data Obstacle = Obstacle {
obslon :: Double,
obslat :: Double,
obsrad :: Double,
obsmslelev :: Int,
obsdesignation :: String
}
deriving (Show, Eq)
data Waypoint = VFRRP {
vfrlon :: Double,
vfrlat :: Double,
vfrcompulsory :: Bool,
vfrdesignation :: String,
vfrdesignationletter :: Maybe Char,
vfrctr :: String}
deriving (Show, Eq)
data NavAction = NavigateTo {
navto :: Waypoint,
navspeed :: Rate,
navvspeed :: Rate,
navalt :: VPos
}
data AirspaceClassification = AirspaceA | AirspaceB | AirspaceC |
AirspaceD | AirspaceE | AirspaceF |
AirspaceG | AirspaceEDR | AirspaceEDD
deriving (Show, Eq)
data AirspaceFlags = TMZ | CTR | RVSR
deriving (Show, Eq)
data VerticalPosition = GND | FL Int | AMSLQNH Int |
BelowA AirspaceClassification |
AboveA AirspaceClassification
deriving (Show, Eq)
data OperatingHours = H24 | HJ | HN | HS | HT | HX
deriving (Show, Eq)
data Airspace = Airspace {
airClassification :: AirspaceClassification,
airFlags :: [AirspaceFlags],
airVBottom :: VerticalPosition,
airVTop :: VerticalPosition,
airActive :: OperatingHours,
airPolygone :: [LonLat]
}
deriving (Show, Eq)
data Element = AC Aeroplane | BC Beacon | RWY Runway
| OBS Obstacle | WP Waypoint | Air Airspace
deriving (Show, Eq)
instance Show Frequency where
show (Frequency f)
| zeroes f >= 5 = show mhz ++ "." ++ show hkhz ++ " "
| zeroes f >= 3 = show mhz ++ "." ++ show khz
| otherwise = show f
where
mhz = f `quot` 1000000
hkhz = f `mod` 1000000 `quot` 100000
khz = f `mod` 1000000 `quot` 1000
zeroes n
| (n `mod` 100000) == 0 = 5
| (n `mod` 1000) == 0 = 3
| otherwise = 0
|
hce/yanas
|
src/Types.hs
|
gpl-2.0
| 8,422
| 0
| 10
| 2,997
| 2,048
| 1,239
| 809
| 232
| 1
|
module DarkPlaces.Binary where
import Control.Applicative
import qualified Data.ByteString.Lazy as BL
import Prelude hiding (getLine)
import Data.Binary.Get
import Data.Binary.IEEE754
import Data.Int
import DarkPlaces.Types
maxTrackLen :: Int64
maxTrackLen = 8
getQVector :: ProtocolVersion -> Get QVector
getQVector p = consQVector <$> cords <*> cords <*> cords
where
cords = getCoord p
getQVector32f :: Get QVector
getQVector32f = consQVector <$> getFloat32le <*> getFloat32le <*> getFloat32le
getCoord13i :: Get Float
getCoord13i = do
d <- getInt16le
return $ (fromIntegral d) * (1.0 / 8.0)
getCoord16i :: Get Float
getCoord16i = fromIntegral <$> getInt16le
getCoord32f :: Get Float
getCoord32f = getFloat32le
getCoord :: ProtocolVersion -> Get Float
getCoord p
| p `elem` quakes ++ neharaFamily = getCoord13i
| p `elem` [(ProtocolDarkplaces2)..(ProtocolDarkplaces4)] = getCoord16i
| otherwise = getCoord32f -- for ProtocolDarkplaces1 and bigger then 4
where
quakes = [ProtocolQuake, ProtocolQuakeDP, ProtocolQuakeWorld]
neharaFamily = [(ProtocolNehahraMovie)..(ProtocolNehahraBJP3)]
getLine :: Get BL.ByteString
getLine = do
b <- getWord8
if b == 10 -- 10 is '\n'
then return $ BL.singleton b
else BL.cons' b <$> getLine
getLineLimited :: Int64 -> Get BL.ByteString
getLineLimited limit | limit <= 0 = fail "Line to long"
| otherwise = do
b <- getWord8
if b == 10
then return $ BL.singleton b
else BL.cons' b <$> getLineLimited (limit - 1)
getStringList :: Get [BL.ByteString]
getStringList = do
str <- getLazyByteStringNul
if BL.null str
then return []
else (str :) <$> getStringList
-- signed char
getInt8 :: Get Int8
getInt8 = fromIntegral <$> getWord8
-- signed short
getInt16le :: Get Int16
getInt16le = fromIntegral <$> getWord16le
-- signed int
getInt32le :: Get Int32
getInt32le = fromIntegral <$> getWord32le
getAngle8i :: Get Float
getAngle8i = (360.0 / 256.0 *) . fromIntegral <$> getInt8
getAngle16i :: Get Float
getAngle16i = (360.0 / 65536.0 *) . fromIntegral <$> getInt16le
getCord16i :: Get Float
getCord16i = fromIntegral <$> getInt16le
-- unsigned char to int
getWord8asInt :: Get Int
getWord8asInt = fromIntegral <$> getWord8
-- signed char to int
getInt8asInt :: Get Int
getInt8asInt = fromIntegral <$> getInt8
getInt16asInt :: Get Int
getInt16asInt = fromIntegral <$> getInt16le
getWord16asInt :: Get Int
getWord16asInt = fromIntegral <$> getWord16le
getLineAndRemaining :: Get (BL.ByteString, Int64)
getLineAndRemaining = getLine >>= \l -> bytesRead >>= \b -> return (l, b)
getLineLAndRemaining :: Int64 -> Get (BL.ByteString, Int64)
getLineLAndRemaining n = getLineLimited n >>= \l -> bytesRead >>= \b -> return (l, b)
splitAtTrack :: BL.ByteString -> Either ErrorInfo (BL.ByteString, BL.ByteString)
splitAtTrack file_data = case either_track of
Left (_, offset, msg) -> Left (offset, msg)
Right (_, _, (line, drop_bytes)) -> Right (line, BL.drop drop_bytes file_data)
where
either_track = runGetOrFail (getLineLAndRemaining maxTrackLen) file_data
skipTrack :: BL.ByteString -> Either ErrorInfo BL.ByteString
skipTrack file_data = snd <$> splitAtTrack file_data
|
bacher09/darkplaces-demo
|
src/DarkPlaces/Binary.hs
|
gpl-2.0
| 3,269
| 0
| 12
| 621
| 971
| 519
| 452
| 80
| 2
|
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable #-}
module Robots.Quiz where
import Inter.Types
import Autolib.ToDoc
import Autolib.Reader
import Data.Typeable
data RC = RC { width :: Integer -- ^ feld geht von (-w.-w) .. (w,w)
, num :: Int -- ^ of robots
, at_least :: Int -- ^ req length of solution
, search_width :: Int -- ^ at most that many nodes per level
}
deriving ( Typeable )
$(derives [makeReader, makeToDoc] [''RC])
rc :: RC
rc = RC { width = 3
, num = 5
, at_least = 5
, search_width = 1000
}
-- Local Variables:
-- mode: haskell
-- End:
|
Erdwolf/autotool-bonn
|
src/Robots/Quiz.hs
|
gpl-2.0
| 595
| 4
| 9
| 145
| 137
| 86
| 51
| 17
| 1
|
module Tema_18e_TablaPropiedades_Spec (main, spec) where
import Tema_18.TablaPropiedades
import Test.Hspec
import Test.QuickCheck
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "Propiedades de las tablas" $ do
it "p1" $
property prop_modifica_modifica_1
it "p2" $
property prop_modifica_modifica_2
it "p3" $
property prop_valor_modifica_1
it "p4" $
property prop_valor_modifica_2
|
jaalonso/I1M-Cod-Temas
|
test/Tema_18e_TablaPropiedades_Spec.hs
|
gpl-2.0
| 442
| 0
| 11
| 97
| 117
| 56
| 61
| 17
| 1
|
{-
Copyright (C) 2009-2012 John MacFarlane <jgm@berkeley.edu>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- | Functions for converting between different representations of
mathematical formulas.
Also note that in general @writeLaTeX . readLaTeX /= id@.
A typical use is to combine together a reader and writer.
> import Control.Applicative ((<$>))
> import Data.Text (Text)
> import Text.TeXMath (writeMathML, readTeX)
>
> texMathToMathML :: DisplayType -> Text -> Either Text Element
> texMathToMathML dt s = writeMathML dt <$> readTeX s
It is also possible to manipulate the AST using 'Data.Generics'. For
example, if you wanted to replace all occurences of the identifier
x in your expression, you do could do so with the following
script.
@
{-\# LANGUAGE OverloadedStrings -\#}
import Control.Applicative ((\<$\>))
import Data.Text (Text)
import Data.Generics (everywhere, mkT)
import Text.TeXMath (writeMathML, readTeX)
import Text.TeXMath.Types
import Text.XML.Light (Element)
changeIdent :: Exp -> Exp
changeIdent (EIdentifier "x") = EIdentifier "y"
changeIdent e = e
texToMMLWithChangeIdent :: DisplayType -> Text -> Either Text Element
texToMMLWithChangeIdent dt s =
writeMathML dt . everywhere (mkT changeIdent) \<$\> readTeX s
@
-}
module Text.TeXMath ( readMathML,
readOMML,
readTeX,
writeTeX,
writeTeXWith,
addLaTeXEnvironment,
writeEqn,
writeOMML,
writeMathML,
writePandoc,
DisplayType(..),
Exp
)
where
import Text.TeXMath.Readers.TeX
import Text.TeXMath.Readers.MathML
import Text.TeXMath.Readers.OMML
import Text.TeXMath.Writers.MathML
import Text.TeXMath.Writers.OMML
import Text.TeXMath.Writers.Pandoc
import Text.TeXMath.Writers.TeX
import Text.TeXMath.Writers.Eqn
import Text.TeXMath.Types
|
jgm/texmath
|
src/Text/TeXMath.hs
|
gpl-2.0
| 2,633
| 0
| 5
| 602
| 112
| 78
| 34
| 21
| 0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.