code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module Main ( main ) where
import qualified Rest.Gen as RG
import qualified Rest.Gen.Config as RGC
import qualified Mdb.Serve.RestApi as API
main :: IO ()
main = do
config <- RGC.configFromArgs "rest-example-gen"
RG.generate config "RestExample" API.api [] [] []
|
waldheinz/mdb
|
src/main/RestApiMain.hs
|
apache-2.0
| 270
| 0
| 9
| 46
| 88
| 51
| 37
| 8
| 1
|
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE Strict #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TemplateHaskell #-}
module Mangekyo
( run
, runCode
, dump
, Option(..)
, code
, input
, output
, module Paths_mangekyo
) where
import Paths_mangekyo (version)
import Control.Lens
import Data.Conduit
import Data.Default (Default(def))
import System.IO (stdin, stdout)
import Text.Show.Pretty (ppShow)
import qualified Data.Conduit.Binary as CB
import qualified Data.HashMap.Strict as H
import qualified Data.Text as T
import Mangekyo.Type as Type
import Mangekyo.Parser (parseMangekyo)
import Mangekyo.Interpreter
import Mangekyo.Builtin
import Mangekyo.Conduit (Input, Output)
import qualified Mangekyo.Conduit.JSON as CJ
data Option = Option { _code :: String
, _input :: Input
, _output :: Output
, _dump :: Bool
}
makeLenses ''Option
instance Default Option where
def = Option { _code = ""
, _input = CJ.toValue
, _output = CJ.fromValue
, _dump = False
}
run :: Option -> IO ()
run opt = do
case (opt^.dump, parseMangekyo "<string>" $ T.pack (opt^.code)) of
(_, Left e) -> error e
(True, Right program) -> pprint program
(_, Right program) -> do
let mangekyo = interpret program >>= yieldIfNotUnit
ns = Type.Object $ H.fromList builtins
flip runMangekyo ns $ CB.sourceHandle stdin =$= opt^.input =$= mangekyo =$= opt^.output $$ CB.sinkHandle stdout
where
yieldIfNotUnit (Tuple []) = return ()
yieldIfNotUnit v = yield v
runCode :: String -> IO ()
runCode s = run $ def & code .~ s
pprint :: Show a => a -> IO ()
pprint = putStrLn . ppShow
|
nakamuray/mangekyo
|
src/Mangekyo.hs
|
bsd-2-clause
| 1,822
| 0
| 20
| 527
| 539
| 304
| 235
| 53
| 4
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UnicodeSyntax #-}
{-# LANGUAGE ViewPatterns #-}
import Control.Concurrent (threadDelay)
import Control.Exception (SomeException,catch)
import Control.Monad (forever,replicateM_,void)
import Control.Monad.IO.Class (liftIO)
import Data.Functor ((<$>))
import Data.IORef (modifyIORef,newIORef,readIORef)
import Data.Monoid ((<>),Sum(..),mempty)
import Data.Serialize (Serialize(..))
import System.Environment (getArgs)
import System.Log.Logger (Priority(..),updateGlobalLogger,rootLoggerName,setLevel)
import qualified System.Log.Logger as Logger -- needed for deriveLoggers
import System.Log.Logger.TH (deriveLoggers)
import System.IO (stdin,stdout)
import System.Random (randomRIO)
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.HUnit ((@?=),(@=?))
import qualified LogicGrowsOnTrees as V
import LogicGrowsOnTrees.Checkpoint
import LogicGrowsOnTrees.Examples.Queens
import LogicGrowsOnTrees.Parallel.Adapter.Processes
import LogicGrowsOnTrees.Parallel.ExplorationMode (ExplorationMode(AllMode))
import LogicGrowsOnTrees.Parallel.Main
import LogicGrowsOnTrees.Parallel.Purity (Purity(Pure))
deriveLoggers "Logger" [ERROR]
remdups :: (Eq a) => [a] -> [a]
remdups [] = []
remdups (x : []) = [x]
remdups (x : xx : xs)
| x == xx = remdups (x : xs)
| otherwise = x : remdups (xx : xs)
-- Instances {{{
instance Serialize (Sum Int) where
put = put . getSum
get = fmap Sum get
main :: IO ()
main = do
-- updateGlobalLogger rootLoggerName (setLevel DEBUG)
args β getArgs
case args of
["nqueens",read β n] β
runWorkerUsingHandles
AllMode
Pure
(nqueensCount n)
stdin
stdout
`catch`
(\(e::SomeException) β errorM $ "Worker process failed: " ++ show e)
_ β defaultMain tests
tests :: [Test]
tests =
[testCase "one process" . runTest $ do
setNumberOfWorkers 0
setNumberOfWorkers 1
,testCase "two processes" . runTest . void $
changeNumberOfWorkers (3-)
,testCase "many processes" . runTest . void $ liftIO (randomRIO (0,1::Int)) >>= \case
0 β changeNumberOfWorkers (\i β if i > 1 then i-1 else i)
1 β changeNumberOfWorkers (+1)
n β error $ "invalid number of workers: " ++ show n
]
where
runTest generateNoise = do
let n = 15
tree = nqueensCount n
progresses_ref β newIORef []
filepath β getProgFilepath
RunOutcome _ termination_reason β
runSupervisor
AllMode
filepath
["nqueens",show n]
(const $ return ())
mempty
(do setNumberOfWorkers 1
forever $ do
liftIO $ threadDelay 10000
requestProgressUpdate >>= liftIO . modifyIORef progresses_ref . (:)
generateNoise
)
result β case termination_reason of
Aborted _ β error "prematurely aborted"
Completed result β return result
Failure _ message β error message
let correct_result = V.exploreTree tree
result @?= correct_result
progresses β remdups <$> readIORef progresses_ref
replicateM_ 4 $ randomRIO (0,length progresses-1) >>= \i β do
let Progress checkpoint result = progresses !! i
result @=? exploreTreeStartingFromCheckpoint (invertCheckpoint checkpoint) tree
correct_result @=? result <> (exploreTreeStartingFromCheckpoint checkpoint tree)
|
gcross/LogicGrowsOnTrees
|
LogicGrowsOnTrees-processes/tests/tests.hs
|
bsd-2-clause
| 3,800
| 0
| 19
| 1,009
| 1,042
| 561
| 481
| 93
| 6
|
{-# LANGUAGE CPP #-}
-- |
-- Module : $HEADER$
-- Description : Utility functions for Functors
-- Copyright : (c) 2011, 2013, 2014 Peter Trsko
-- License : BSD3
--
-- Maintainer : peter.trsko@gmail.com
-- Stability : provisional
-- Portability : non-portable (CPP)
--
-- Utility functions for Functors.
module Data.Functor.Utils
(
-- * Functor
Functor(..)
#if !MIN_VERSION_base(4,2,0)
, (<$)
#endif
, ($>)
-- ** Infix variations on fmap
, (<$>)
, (<$$>)
, (<<$>>)
, (<<$$>>)
-- ** Apply inside functor
, (<#>)
, (<##>)
-- ** Lenses
--
-- $lens
, iso
, lens
)
where
-- Module Data.Functor was introduced in base 4.2.0.0.
#if MIN_VERSION_base(4,2,0)
import Data.Functor (Functor(..), (<$>))
#else
import Control.Applicative ((<$), (<$>))
#endif
#ifdef WITH_COMONAD
import Control.Comonad (($>))
#endif
import Data.Function.Between (between)
-- | Flipped version of '<$>', the naming convention comes from
-- "Control.Applicative" where '<**>' is flipped version of '<*>'.
(<$$>) :: (Functor f) => f a -> (a -> b) -> f b
(<$$>) = flip fmap
infixl 4 <$$>
{-# INLINE (<$$>) #-}
#ifndef WITH_COMONAD
-- Package comonad <http://hackage.haskell.org/package/comonad/> already
-- defines this function. Following comment is partially taken from that
-- package.
-- | Replace the contents of a functor uniformly with a constant value.
--
-- Flipped version of '<$'.
($>) :: Functor f => f a -> b -> f b
($>) = flip (<$)
infixl 4 $>
{-# INLINE ($>) #-}
#endif
-- | Instead of @\\ x -> f x '<$>' g x@ this function allows to write
-- @f '<<$>>' g@.
(<<$>>) :: Functor f => (a -> b -> c) -> (a -> f b) -> a -> f c
(f <<$>> g) x = f x `fmap` g x
infix 4 <<$>>
{-# INLINE (<<$>>) #-}
-- | Flipped variant of '<<$>>'.
(<<$$>>) :: Functor f => (a -> f b) -> (a -> b -> c) -> a -> f c
(<<$$>>) = flip (<<$>>)
infix 4 <<$$>>
{-# INLINE (<<$$>>) #-}
-- | Like @\\ x f -> f '<*>' 'pure' x@, but does not have 'Applicative'
-- constraint. Flipped version of '<#>'.
--
-- Implemented as: @x '<##>' f = ('$' x) '<$>' f@.
(<##>) :: Functor f => a -> f (a -> b) -> f b
x <##> f = ($ x) `fmap` f
infixl 4 <##>
{-# INLINE (<##>) #-}
-- | Like @\\ f x -> f '<*>' 'pure' x@, but does not have 'Applicative'
-- constraint.
--
-- Implemented as: @f '<#>' x = ('$' x) '<$>' f@.
--
-- Examples:
--
-- >>> Just (+1) <#> 2
-- Just 3
-- >>> [(+1), (*2)] <#> 3
-- [4,6]
--
-- For @instance 'Functor' ((->) r)@ this function behaves as infix version of
-- flip 'flip':
--
-- >>> (-) <#> 1 $ 2
-- 1
(<#>) :: (Functor f) => f (a -> b) -> a -> f b
f <#> x = ($ x) `fmap` f
infixl 4 <#>
{-# INLINE (<#>) #-}
-- | Create /lens/ from @f@ and @g@ that form an isomorphism.
--
-- Defined as: @iso f g = fmap f \`between\` g@
--
-- Name colides with
--
-- > iso :: (s -> a) -> (b -> t) -> Iso s t a b
--
-- Defined in /lens/ <http://hackage.haskell.org/package/lens> package as of
-- version 0.1.
iso :: (Functor f) => (c -> d) -> (a -> b) -> (b -> f c) -> a -> f d
iso = between . fmap
{-# INLINE iso #-}
-- | Construct /lens/ from @r -> (a, a -> r)@-style lens.
--
-- Name colides with a lot of lens-like packages including /lens/
-- <http://hackage.haskell.org/package/lens> package as of version 0.1.
lens :: Functor f => (r -> (a, a -> r)) -> (a -> f a) -> r -> f r
lens getSetItem f = setItemIn <<$>> f . getItemFrom
where
getItemFrom = fst . getSetItem -- :: r -> a
setItemIn = snd . getSetItem -- :: r -> a -> r
{-# INLINEABLE lens #-}
-- $lens
--
-- For newtypes and data types with single constructor and single attribute it
-- is possible to create lens-like function simply by writing:
--
-- > l = iso constructor selector
--
-- This is due to the fact that constructor and selector together form an
-- isomorphism. The /lens/ package has a lot of sugar for isomorphisms and you
-- should check it out if they come up in your code.
--
-- Example:
--
-- > data Foo a = Foo {fromFoo :: a}
-- >
-- > foo :: Functor f => (a -> f a) -> Foo a -> f (Foo a)
-- > foo = iso Foo fromFoo
--
-- Not so long ago there was a time when people thought that lenses had type
-- @r -> (a, a -> r)@, or variation on that. There is also a lot of old code
-- that it uses it directly or indirectly via some libraries that internally
-- use such definition of lenses. To create new-style /lens/ from this old
-- style you can use 'lens' function:
--
-- > data Foo a = Foo
-- > { _foo1 :: Int
-- > , _foo2 :: a
-- > , _foo3 :: a
-- > , _foo4 :: a
-- > }
-- >
-- > foo1 :: Functor f => (Int -> f Int) -> Foo a -> f (Foo a)
-- > foo1 = lens $ \ f@Foo{_foo1 = x} -> (x, \ x' -> f{_foo1 = x'})
--
-- In cases when you need to avoid TemplateHaskell (like on platforms that
-- don't support Haskell byte compilation, yet) these functions might come
-- handy.
--
-- See /lens/ <http://hackage.haskell.org/package/lens> package for details
-- on current standard lenses.
|
trskop/hs-not-found
|
not-found/src/Data/Functor/Utils.hs
|
bsd-3-clause
| 4,956
| 0
| 12
| 1,131
| 757
| 484
| 273
| 47
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module DataFlow.JSONGraphFormat (
Val(..),
Metadata(),
Document(..),
Graph(..),
Node(..),
Edge(..)
) where
import Data.Aeson
import Data.Aeson.Types (Pair)
import qualified Data.Map as M
import Data.Vector (fromList)
data Val = Str String
| Arr [Val]
instance ToJSON Val where
toJSON (Str s) = toJSON s
toJSON (Arr vs) = toJSON vs
type Metadata = M.Map String Val
data Document = SingleGraph { graph :: Graph }
| MultiGraph { graphs :: [Graph] }
instance ToJSON Document where
toJSON (SingleGraph g) = object [
"graph" .= toJSON g
]
toJSON (MultiGraph gs) = object [
"graphs" .= toJSON gs
]
data Graph = Graph { nodes :: [Node]
, edges :: [Edge]
, graphLabel :: Maybe String
, graphMetadata :: Metadata
}
instance ToJSON Graph where
toJSON (Graph nodes edges lbl metadata) = object $
labelField lbl ++ [
"nodes" .= toJSON nodes,
"edges" .= toJSON edges,
"metadata" .= toJSON metadata
]
data Node = Node { id :: String
, nodeLabel :: Maybe String
, nodeMetadata :: Metadata }
instance ToJSON Node where
toJSON (Node id' lbl metadata) = object $
labelField lbl ++ [
"id" .= toJSON id',
"metadata" .= toJSON metadata
]
data Edge = Edge { source :: String
, target :: String
, edgeLabel :: Maybe String
, edgeMetadata :: Metadata }
instance ToJSON Edge where
toJSON (Edge source target lbl metadata) = object $
labelField lbl ++ [
"source" .= toJSON source,
"target" .= toJSON target,
"metadata" .= toJSON metadata
]
labelField :: Maybe String -> [Pair]
labelField (Just s) = [("label", toJSON s)]
labelField _ = []
|
sonyxperiadev/dataflow
|
src/DataFlow/JSONGraphFormat.hs
|
bsd-3-clause
| 1,916
| 0
| 9
| 633
| 601
| 331
| 270
| 59
| 1
|
module WaveSim.Widgets
(newButton) where
import Control.Monad
import Data.Maybe
import Data.IORef
import WaveSim.Types
onButton :: Int -> Int -> Button -> Bool
onButton x y but = x >= bx && x <= (bx + bw) && y >= by && y <= (by + bh)
where bx = truncate $ xPosPoint $ ulRectPoint $ butGeometry $ but
by = truncate $ yPosPoint $ ulRectPoint $ butGeometry $ but
bw = truncate $ width $ butGeometry $ but
bh = truncate $ height $ butGeometry $ but
activateButton :: MouseInfo -> Button -> Bool
activateButton mouseInfo' button =
leftMouseDown mouseInfo' == False &&
onButton (mouseX mouseInfo') (mouseY mouseInfo') button &&
onButton (prevMouseXDown mouseInfo') (prevMouseYDown mouseInfo') button
buttonMouseCallback :: IORef WorldState -> Button -> IO ()
buttonMouseCallback worldStateRef button = do
worldState <- readIORef worldStateRef
when (activateButton (mouseInfo (inputStateRef worldState)) button) $
(fromJust (butClickCall button)) worldStateRef
newButton :: IORef WorldState -> Button -> IO ()
newButton worldStateRef but = do
worldState <- readIORef worldStateRef
let mh = (buttonMouseCallback worldStateRef but):(mouseHandlers worldState)
writeIORef worldStateRef (worldState {mouseHandlers = mh})
|
jethomas/WaveSim
|
src/WaveSim/Widgets.hs
|
bsd-3-clause
| 1,277
| 0
| 14
| 243
| 429
| 216
| 213
| 27
| 1
|
-- | Compute interpolant for an unsatisfiable conjunction of formulas
module Example.Monad.Interpolation
( run )
where
import Control.Monad
import Control.Monad.Trans ( liftIO )
import Z3.Monad
run :: IO ()
run = do
env <- newItpEnv Nothing stdOpts
evalZ3WithEnv z3 env
z3 = do
a <- mkFreshBoolVar "a"
b <- mkFreshBoolVar "b"
na <- mkNot a
nb <- mkNot b
f1 <- mkIff a b
f2 <- mkIff a nb
g1 <- mkInterpolant f1
g2 <- mkInterpolant f2
g3 <- mkInterpolant =<< mkTrue
params <- mkParams
res <- flip computeInterpolant params =<< mkAnd [g1, g2, g3]
case res of
Just (Right itps) -> mapM_ (liftIO . putStrLn <=< astToString) itps
otherwise -> error "could not compute interpolants"
|
sukwon0709/z3-haskell
|
examples/Example/Monad/Interpolation.hs
|
bsd-3-clause
| 769
| 0
| 13
| 209
| 251
| 119
| 132
| -1
| -1
|
{-# LANGUAGE LambdaCase #-}
module Y2018.Day20 (answer1, answer2) where
import Control.Monad
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import Data.Foldable
import Data.Maybe
import Data.Functor
import Data.List
import Data.Ord
import Text.Megaparsec
import Text.Megaparsec.Char
import Data.Void
import Control.Lens
type Parser = Parsec Void String
data Dir = N | S | W | E deriving (Show, Eq)
data Cell = Room | DoorH | DoorV | Wall deriving (Show, Eq)
type Point = (Int,Int)
newtype Regex = Regex [RegexSym] deriving (Show)
data RegexSym
= D Dir
| R [Regex]
deriving (Show)
answer1, answer2 :: IO ()
answer1 = do
regex <- getData
print $ solve regex
answer2 = do
regex <- getData
let withDist = M.toList $ dijkstra (0,0) $ buildMap regex
let res = length $ filter (\x -> snd x >= 1000) withDist
print res
solve :: Regex -> Int
solve = head . sortOn Down . M.elems . dijkstra (0,0) . buildMap
buildMap :: Regex -> M.Map Point Cell
buildMap r =
let s = (0,0)
m0 = M.singleton s Room
results = stepRegex (s, m0) r
in foldl' (\m (_, m') -> M.union m' m) mempty results
stepRegex :: (Point, M.Map Point Cell) -> Regex -> [(Point, M.Map Point Cell)]
stepRegex (p, m) (Regex rSyms) = foldM stepSym (p, m) rSyms
stepSym :: (Point, M.Map Point Cell) -> RegexSym -> [(Point, M.Map Point Cell)]
stepSym (p, m) (D dir) =
let (p', cells) = move p dir
m' = foldl' (\m (k, v) -> M.insert k v m) m cells
in [(p', m')]
stepSym start (R rs) =
let withDup = concatMap (stepRegex start) rs
f m (p, cells) = M.insertWith M.union p cells m
deduped = foldl' f mempty withDup
in M.toList deduped
move :: Point -> Dir -> (Point, [(Point, Cell)])
move (x,y) N = ((x,y-2), [((x,y-1), DoorH), ((x,y-2), Room)])
move (x,y) S = ((x,y+2), [((x,y+1), DoorH), ((x,y+2), Room)])
move (x,y) W = ((x-2,y), [((x-1,y), DoorV), ((x-2,y), Room)])
move (x,y) E = ((x+2,y), [((x+1,y), DoorV), ((x+2,y), Room)])
dijkstra
:: Point -- starting point
-> M.Map Point Cell -- map
-> M.Map Point Int -- shortest number of door from start
dijkstra start map = go map S.empty M.empty [(start, 0)]
where
go _ _ distances [] = distances
go !map !visited !distances ((p@(x,y), d):ps) =
let neighbors = do
a <- [x-1, x, x+1]
b <- [y-1, y, y+1]
guard $ (a,b) /= (x,y)
let destX = 2 * a - x
let destY = 2 * b - y
let dest = (destX, destY)
guard $ not (dest `S.member` visited)
guard $ maybe False isDoor (M.lookup (a,b) map)
guard $ maybe False isRoom (M.lookup dest map)
pure (dest, d+1)
f a b = if a < b then a else b
distances' = foldl' (\m (x, d) -> M.insertWith f x d m) distances neighbors
visited' = S.insert p visited
in go map visited' distances' (neighbors <> ps)
isRoom Room = True
isRoom _ = False
isDoor DoorH = True
isDoor DoorV = True
isDoor _ = False
buildRegex :: String -> Regex
buildRegex raw = case parse parseRegex "regex" raw of
Left err -> error $ show err
Right r -> r
getData :: IO Regex
getData = buildRegex <$> readFile "data/2018/day20.txt"
parseRegex :: Parser Regex
parseRegex = do
char '^'
rs <- many parseRSym
char '$'
pure $ Regex rs
parseRSym :: Parser RegexSym
parseRSym
= (D <$> parseDir)
<|> do
char '('
subR <- (Regex <$> many parseRSym) `sepBy` char '|'
char ')'
pure $ R subR
parseDir :: Parser Dir
parseDir
= char 'N' $> N
<|> char 'S' $> S
<|> char 'E' $> E
<|> char 'W' $> W
prettyMap :: M.Map Point Cell -> String
prettyMap m =
let ks = M.keys m
minX = minimum $ fmap fst ks
minY = minimum $ fmap snd ks
maxX = maximum $ fmap fst ks
maxY = maximum $ fmap snd ks
makeLine y = [ pretty m (x,y) | x <- [minX..maxX]]
ls = [makeLine y | y <- [minY..maxY]]
in unlines ls
pretty m p
| p == (0,0) = 'X'
| otherwise = fromMaybe '#' $ pretty' <$> M.lookup p m
pretty' = \case
Room -> '.'
DoorH -> 'β'
DoorV -> '|'
Wall -> '#'
prettyRegex (Regex syms) =
let psym (D dir) = show dir
psym (R rs) = "(" <> intercalate "|" (fmap pr rs) <> ")"
pr (Regex syms) = concatMap psym syms
in "^" <> concatMap psym syms <> "$"
test :: IO ()
test = do
assertDist "^ESSWWN(E|NNENN(EESS(WNSE|)SSS|WWWSSSSE(SW|NNNE)))$" 23
assertDist "^WSSEESWWWNW(S|NENNEEEENN(ESSSSW(NWSW|SSEN)|WSWWN(E|WWS(E|SS))))$" 31
assertDist "^ENWWW(NEEE|SSE(EE|N))$" 10
assertDist "^ENNWSWW(NEWS|)SSSEEN(WNSE|)EE(SWEN|)NNN$" 18
assertDist "^N(ESNNW|N|NN)NNN$" 6
raw <- head . lines <$> readFile "./data/2018/day20.txt"
let dumped = prettyRegex (buildRegex raw)
print $ dumped == raw
print "done"
assertDist :: String -> Int -> IO ()
assertDist r expected = do
let reg = buildRegex r
when (prettyRegex reg /= r) $ do
putStrLn "invalid parsing?"
putStrLn r
putStrLn (prettyRegex reg)
print reg
let d = solve reg
if d == expected
then putStrLn "ok"
else do
putStrLn r
putStrLn $ prettyMap $ buildMap reg
putStrLn $ "got: " <> show d <> " but expected: " <> show expected
|
geekingfrog/advent-of-code
|
src/Y2018/Day20.hs
|
bsd-3-clause
| 5,197
| 0
| 17
| 1,332
| 2,395
| 1,244
| 1,151
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
module SlackBot.Config (readConf) where
import SlackBot.Types
import qualified Data.Configurator as C
-- Read config file.
readConf :: FilePath -> IO BotConfig
readConf cfgFile = do
cfg <- C.load [C.Required cfgFile]
token <- C.require cfg "slack_api_token"
return (BotConfig token)
|
Lepovirta/slackbot
|
src/SlackBot/Config.hs
|
bsd-3-clause
| 328
| 0
| 11
| 52
| 92
| 48
| 44
| 9
| 1
|
{-# LANGUAGE BangPatterns #-}
-- | Like Throughput, but send every ping from a different process
-- (i.e., require a lightweight connection per ping)
import System.Environment
import Control.Monad
import Control.Applicative
import Control.Distributed.Process
import Control.Distributed.Process.Node
import Network.Transport.TCP (createTransport, defaultTCPAddr, defaultTCPParameters)
import Data.Binary (encode, decode)
import qualified Data.ByteString.Lazy as BSL
counter :: Process ()
counter = go 0
where
go :: Int -> Process ()
go !n = do
b <- expect
case b of
Nothing -> go (n + 1)
Just them -> send them n >> go 0
count :: Int -> ProcessId -> Process ()
count n them = do
us <- getSelfPid
replicateM_ n . spawnLocal $ send them (Nothing :: Maybe ProcessId)
send them (Just us)
n' <- expect
liftIO $ print (n == n')
initialProcess :: String -> Process ()
initialProcess "SERVER" = do
us <- getSelfPid
liftIO $ BSL.writeFile "counter.pid" (encode us)
counter
initialProcess "CLIENT" = do
n <- liftIO $ getLine
them <- liftIO $ decode <$> BSL.readFile "counter.pid"
count (read n) them
main :: IO ()
main = do
[role, host, port] <- getArgs
trans <- createTransport (defaultTCPAddr host port) defaultTCPParameters
case trans of
Right transport -> do node <- newLocalNode transport initRemoteTable
runProcess node $ initialProcess role
Left other -> error $ show other
|
haskell-distributed/distributed-process
|
benchmarks/Spawns.hs
|
bsd-3-clause
| 1,477
| 0
| 14
| 319
| 479
| 236
| 243
| 41
| 2
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TypeFamilies #-}
module Shadow where
import Data.Foldable
import Data.Monoid ((<>))
import Data.Map (Map)
import qualified Data.Text as T
import Lucid.Svg
import qualified Lucid.Svg.Attributes as A
import qualified Lucid.Svg.Elements as E
import Reflex
import Reflex.Dom
import Reflex.Dynamic.TH
import Utils
filtID :: MonadWidget t m => ShadowParams t -> m (Dynamic t String)
filtID sp =
$(qDyn [| mconcat ["shadowFiltX", fs $(unqDyn[| _spX sp |])
,"Y", fs $(unqDyn[| _spY sp |])
,"B", fs $(unqDyn[| _spBlur sp |])
,"C", doColor $(unqDyn[| _spColor sp |])
]
|])
where doColor = filter (`notElem` ['(',')',','])
data ShadowParams t = ShadowParams
{ _spX :: Dynamic t Int
, _spY :: Dynamic t Int
, _spBlur :: Dynamic t Int
, _spColor :: Dynamic t String
}
--defShadowParams :: MonadWidget t m => m (ShadowParams t)
defShadowParams :: Reflex t => ShadowParams t
defShadowParams = ShadowParams
(constDyn 4) (constDyn 4) (constDyn 1) (constDyn "rgba(0,0,0,1)")
elShadow :: MonadWidget t m
=> ShadowParams t
-> m a
-> m a
elShadow sp@ShadowParams{..} child = do
fID <- filtID sp
fURL <- forDyn fID $ \n -> "url(#" ++ n ++ ")"
filtElemOffsetAttrs <- combineDyn (\x y -> "result" =: "offOut"
<> "in" =: "SourceAlpha"
<> "dx" =: fs x
<> "dy" =: fs y
) _spX _spX
filtElemFloodAttrs <- forDyn _spColor $ \c ->
( "result" =: "floodOut"
<> "flood-color" =: c
<> "flood-opacity" =: "1" )
filtElemBlurAttrs <- forDyn _spBlur $ \r ->
"result" =: "blurOut"
<> "in" =: "offOut"
<> "stdDeviation" =: show r
let filtElemCompositeAttrs = ("result" =: st "shadowOut")
<> ("in" =: "floodOut") <> ("in2" =: "blurOut")
<> "operator" =: "in"
filtElemBlendAttrs = ("in" =: "SourceGraphic") <> ("in2" =: "shadowOut")
<> ("mode" =: st "normal")
filtElemAttrs <- forDyn fID $ \n ->
"id" =: n
<> "x" =: "-0.5" <> "y" =: "-0.5"
<> "width" =: "200%" <> "height" =: "200%"
gAttrs <- mapDyn ("filter" =: ) fURL
filtElem <- svgElDynAttr "filter" filtElemAttrs $ do
svgElDynAttr "feOffset" filtElemOffsetAttrs $ return ()
svgElDynAttr "feFlood" filtElemFloodAttrs $ return ()
svgElDynAttr "feGaussianBlur" filtElemBlurAttrs $ return ()
svgElAttr "feComposite" filtElemCompositeAttrs $ return ()
svgElAttr "feBlend" filtElemBlendAttrs $ return ()
e <- svgElDynAttr "g" gAttrs $ child
return e
|
imalsogreg/collabplot
|
client/src/Shadow.hs
|
bsd-3-clause
| 3,030
| 10
| 18
| 1,007
| 805
| 427
| 378
| 70
| 1
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Handles @deriving@ clauses on @data@ declarations.
-}
{-# LANGUAGE CPP #-}
module TcDeriv ( tcDeriving, DerivInfo(..), mkDerivInfos ) where
#include "HsVersions.h"
import HsSyn
import DynFlags
import TcRnMonad
import FamInst
import TcErrors( reportAllUnsolved )
import TcValidity( validDerivPred )
import TcClassDcl( tcMkDeclCtxt )
import TcEnv
import TcGenDeriv -- Deriv stuff
import TcGenGenerics
import InstEnv
import Inst
import FamInstEnv
import TcHsType
import TcMType
import TcSimplify
import LoadIface( loadInterfaceForName )
import Module( getModule )
import RnNames( extendGlobalRdrEnvRn )
import RnBinds
import RnEnv
import RnSource ( addTcgDUs )
import HscTypes
import Avail
import Unify( tcUnifyTy )
import Class
import Type
import ErrUtils
import DataCon
import Maybes
import RdrName
import Name
import NameEnv
import NameSet
import TyCon
import TcType
import Var
import VarSet
import PrelNames
import THNames ( liftClassKey )
import SrcLoc
import Util
import Outputable
import FastString
import Bag
import Pair
import Control.Monad
import Data.List
{-
************************************************************************
* *
Overview
* *
************************************************************************
Overall plan
~~~~~~~~~~~~
1. Convert the decls (i.e. data/newtype deriving clauses,
plus standalone deriving) to [EarlyDerivSpec]
2. Infer the missing contexts for the InferTheta's
3. Add the derived bindings, generating InstInfos
-}
-- DerivSpec is purely local to this module
data DerivSpec theta = DS { ds_loc :: SrcSpan
, ds_name :: Name -- DFun name
, ds_tvs :: [TyVar]
, ds_theta :: theta
, ds_cls :: Class
, ds_tys :: [Type]
, ds_tc :: TyCon
, ds_tc_args :: [Type]
, ds_overlap :: Maybe OverlapMode
, ds_newtype :: Bool }
-- This spec implies a dfun declaration of the form
-- df :: forall tvs. theta => C tys
-- The Name is the name for the DFun we'll build
-- The tyvars bind all the variables in the theta
-- For type families, the tycon in
-- in ds_tys is the *family* tycon
-- in ds_tc, ds_tc_args is the *representation* tycon
-- For non-family tycons, both are the same
-- the theta is either the given and final theta, in standalone deriving,
-- or the not-yet-simplified list of constraints together with their origin
-- ds_newtype = True <=> Generalised Newtype Deriving (GND)
-- False <=> Vanilla deriving
{-
Example:
newtype instance T [a] = MkT (Tree a) deriving( C s )
==>
axiom T [a] = :RTList a
axiom :RTList a = Tree a
DS { ds_tvs = [a,s], ds_cls = C, ds_tys = [s, T [a]]
, ds_tc = :RTList, ds_tc_args = [a]
, ds_newtype = True }
-}
type DerivContext = Maybe ThetaType
-- Nothing <=> Vanilla deriving; infer the context of the instance decl
-- Just theta <=> Standalone deriving: context supplied by programmer
data PredOrigin = PredOrigin PredType CtOrigin
type ThetaOrigin = [PredOrigin]
mkPredOrigin :: CtOrigin -> PredType -> PredOrigin
mkPredOrigin origin pred = PredOrigin pred origin
mkThetaOrigin :: CtOrigin -> ThetaType -> ThetaOrigin
mkThetaOrigin origin = map (mkPredOrigin origin)
data EarlyDerivSpec = InferTheta (DerivSpec ThetaOrigin)
| GivenTheta (DerivSpec ThetaType)
-- InferTheta ds => the context for the instance should be inferred
-- In this case ds_theta is the list of all the constraints
-- needed, such as (Eq [a], Eq a), together with a suitable CtLoc
-- to get good error messages.
-- The inference process is to reduce this to a simpler form (e.g.
-- Eq a)
--
-- GivenTheta ds => the exact context for the instance is supplied
-- by the programmer; it is ds_theta
forgetTheta :: EarlyDerivSpec -> DerivSpec ()
forgetTheta (InferTheta spec) = spec { ds_theta = () }
forgetTheta (GivenTheta spec) = spec { ds_theta = () }
earlyDSLoc :: EarlyDerivSpec -> SrcSpan
earlyDSLoc (InferTheta spec) = ds_loc spec
earlyDSLoc (GivenTheta spec) = ds_loc spec
splitEarlyDerivSpec :: [EarlyDerivSpec] -> ([DerivSpec ThetaOrigin], [DerivSpec ThetaType])
splitEarlyDerivSpec [] = ([],[])
splitEarlyDerivSpec (InferTheta spec : specs) =
case splitEarlyDerivSpec specs of (is, gs) -> (spec : is, gs)
splitEarlyDerivSpec (GivenTheta spec : specs) =
case splitEarlyDerivSpec specs of (is, gs) -> (is, spec : gs)
pprDerivSpec :: Outputable theta => DerivSpec theta -> SDoc
pprDerivSpec (DS { ds_loc = l, ds_name = n, ds_tvs = tvs,
ds_cls = c, ds_tys = tys, ds_theta = rhs })
= parens (hsep [ppr l, ppr n, ppr tvs, ppr c, ppr tys]
<+> equals <+> ppr rhs)
instance Outputable theta => Outputable (DerivSpec theta) where
ppr = pprDerivSpec
instance Outputable EarlyDerivSpec where
ppr (InferTheta spec) = ppr spec <+> ptext (sLit "(Infer)")
ppr (GivenTheta spec) = ppr spec <+> ptext (sLit "(Given)")
instance Outputable PredOrigin where
ppr (PredOrigin ty _) = ppr ty -- The origin is not so interesting when debugging
{-
Inferring missing contexts
~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data T a b = C1 (Foo a) (Bar b)
| C2 Int (T b a)
| C3 (T a a)
deriving (Eq)
[NOTE: See end of these comments for what to do with
data (C a, D b) => T a b = ...
]
We want to come up with an instance declaration of the form
instance (Ping a, Pong b, ...) => Eq (T a b) where
x == y = ...
It is pretty easy, albeit tedious, to fill in the code "...". The
trick is to figure out what the context for the instance decl is,
namely @Ping@, @Pong@ and friends.
Let's call the context reqd for the T instance of class C at types
(a,b, ...) C (T a b). Thus:
Eq (T a b) = (Ping a, Pong b, ...)
Now we can get a (recursive) equation from the @data@ decl:
Eq (T a b) = Eq (Foo a) u Eq (Bar b) -- From C1
u Eq (T b a) u Eq Int -- From C2
u Eq (T a a) -- From C3
Foo and Bar may have explicit instances for @Eq@, in which case we can
just substitute for them. Alternatively, either or both may have
their @Eq@ instances given by @deriving@ clauses, in which case they
form part of the system of equations.
Now all we need do is simplify and solve the equations, iterating to
find the least fixpoint. Notice that the order of the arguments can
switch around, as here in the recursive calls to T.
Let's suppose Eq (Foo a) = Eq a, and Eq (Bar b) = Ping b.
We start with:
Eq (T a b) = {} -- The empty set
Next iteration:
Eq (T a b) = Eq (Foo a) u Eq (Bar b) -- From C1
u Eq (T b a) u Eq Int -- From C2
u Eq (T a a) -- From C3
After simplification:
= Eq a u Ping b u {} u {} u {}
= Eq a u Ping b
Next iteration:
Eq (T a b) = Eq (Foo a) u Eq (Bar b) -- From C1
u Eq (T b a) u Eq Int -- From C2
u Eq (T a a) -- From C3
After simplification:
= Eq a u Ping b
u (Eq b u Ping a)
u (Eq a u Ping a)
= Eq a u Ping b u Eq b u Ping a
The next iteration gives the same result, so this is the fixpoint. We
need to make a canonical form of the RHS to ensure convergence. We do
this by simplifying the RHS to a form in which
- the classes constrain only tyvars
- the list is sorted by tyvar (major key) and then class (minor key)
- no duplicates, of course
So, here are the synonyms for the ``equation'' structures:
Note [Data decl contexts]
~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data (RealFloat a) => Complex a = !a :+ !a deriving( Read )
We will need an instance decl like:
instance (Read a, RealFloat a) => Read (Complex a) where
...
The RealFloat in the context is because the read method for Complex is bound
to construct a Complex, and doing that requires that the argument type is
in RealFloat.
But this ain't true for Show, Eq, Ord, etc, since they don't construct
a Complex; they only take them apart.
Our approach: identify the offending classes, and add the data type
context to the instance decl. The "offending classes" are
Read, Enum?
FURTHER NOTE ADDED March 2002. In fact, Haskell98 now requires that
pattern matching against a constructor from a data type with a context
gives rise to the constraints for that context -- or at least the thinned
version. So now all classes are "offending".
Note [Newtype deriving]
~~~~~~~~~~~~~~~~~~~~~~~
Consider this:
class C a b
instance C [a] Char
newtype T = T Char deriving( C [a] )
Notice the free 'a' in the deriving. We have to fill this out to
newtype T = T Char deriving( forall a. C [a] )
And then translate it to:
instance C [a] Char => C [a] T where ...
Note [Newtype deriving superclasses]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
(See also Trac #1220 for an interesting exchange on newtype
deriving and superclasses.)
The 'tys' here come from the partial application in the deriving
clause. The last arg is the new instance type.
We must pass the superclasses; the newtype might be an instance
of them in a different way than the representation type
E.g. newtype Foo a = Foo a deriving( Show, Num, Eq )
Then the Show instance is not done via Coercible; it shows
Foo 3 as "Foo 3"
The Num instance is derived via Coercible, but the Show superclass
dictionary must the Show instance for Foo, *not* the Show dictionary
gotten from the Num dictionary. So we must build a whole new dictionary
not just use the Num one. The instance we want is something like:
instance (Num a, Show (Foo a), Eq (Foo a)) => Num (Foo a) where
(+) = ((+)@a)
...etc...
There may be a coercion needed which we get from the tycon for the newtype
when the dict is constructed in TcInstDcls.tcInstDecl2
Note [Unused constructors and deriving clauses]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See Trac #3221. Consider
data T = T1 | T2 deriving( Show )
Are T1 and T2 unused? Well, no: the deriving clause expands to mention
both of them. So we gather defs/uses from deriving just like anything else.
-}
-- | Stuff needed to process a `deriving` clause
data DerivInfo = DerivInfo { di_rep_tc :: TyCon
-- ^ The data tycon for normal datatypes,
-- or the *representation* tycon for data families
, di_preds :: [LHsType Name]
, di_ctxt :: SDoc -- ^ error context
}
-- | Extract `deriving` clauses of proper data type (skips data families)
mkDerivInfos :: [TyClGroup Name] -> TcM [DerivInfo]
mkDerivInfos tycls = concatMapM mk_derivs tycls
where
mk_derivs (TyClGroup { group_tyclds = decls })
= concatMapM (mk_deriv . unLoc) decls
mk_deriv decl@(DataDecl { tcdLName = L _ data_name
, tcdDataDefn =
HsDataDefn { dd_derivs = Just (L _ preds) } })
= do { tycon <- tcLookupTyCon data_name
; return [DerivInfo { di_rep_tc = tycon, di_preds = preds
, di_ctxt = tcMkDeclCtxt decl }] }
mk_deriv _ = return []
{-
************************************************************************
* *
\subsection[TcDeriv-driver]{Top-level function for \tr{derivings}}
* *
************************************************************************
-}
tcDeriving :: [DerivInfo] -- All `deriving` clauses
-> [LDerivDecl Name] -- All stand-alone deriving declarations
-> TcM (TcGblEnv, Bag (InstInfo Name), HsValBinds Name)
tcDeriving deriv_infos deriv_decls
= recoverM (do { g <- getGblEnv
; return (g, emptyBag, emptyValBindsOut)}) $
do { -- Fish the "deriving"-related information out of the TcEnv
-- And make the necessary "equations".
is_boot <- tcIsHsBootOrSig
; traceTc "tcDeriving" (ppr is_boot)
; early_specs <- makeDerivSpecs is_boot deriv_infos deriv_decls
; traceTc "tcDeriving 1" (ppr early_specs)
-- for each type, determine the auxliary declarations that are common
-- to multiple derivations involving that type (e.g. Generic and
-- Generic1 should use the same TcGenGenerics.MetaTyCons)
; (commonAuxs, auxDerivStuff) <- commonAuxiliaries $ map forgetTheta early_specs
; let (infer_specs, given_specs) = splitEarlyDerivSpec early_specs
; insts1 <- mapM (genInst commonAuxs) given_specs
-- the stand-alone derived instances (@insts1@) are used when inferring
-- the contexts for "deriving" clauses' instances (@infer_specs@)
; final_specs <- extendLocalInstEnv (map (iSpec . fstOf3) insts1) $
inferInstanceContexts infer_specs
; insts2 <- mapM (genInst commonAuxs) final_specs
; let (inst_infos, deriv_stuff, maybe_fvs) = unzip3 (insts1 ++ insts2)
; loc <- getSrcSpanM
; let (binds, newTyCons, famInsts, extraInstances) =
genAuxBinds loc (unionManyBags (auxDerivStuff : deriv_stuff))
; dflags <- getDynFlags
; (inst_info, rn_binds, rn_dus) <-
renameDeriv is_boot (inst_infos ++ (bagToList extraInstances)) binds
; unless (isEmptyBag inst_info) $
liftIO (dumpIfSet_dyn dflags Opt_D_dump_deriv "Derived instances"
(ddump_deriving inst_info rn_binds newTyCons famInsts))
; let all_tycons = map ATyCon (bagToList newTyCons)
; gbl_env <- tcExtendGlobalEnv all_tycons $
tcExtendGlobalEnvImplicit (concatMap implicitTyThings all_tycons) $
tcExtendLocalFamInstEnv (bagToList famInsts) $
tcExtendLocalInstEnv (map iSpec (bagToList inst_info)) getGblEnv
; let all_dus = rn_dus `plusDU` usesOnly (mkFVs $ catMaybes maybe_fvs)
; return (addTcgDUs gbl_env all_dus, inst_info, rn_binds) }
where
ddump_deriving :: Bag (InstInfo Name) -> HsValBinds Name
-> Bag TyCon -- ^ Empty data constructors
-> Bag FamInst -- ^ Rep type family instances
-> SDoc
ddump_deriving inst_infos extra_binds repMetaTys repFamInsts
= hang (ptext (sLit "Derived instances:"))
2 (vcat (map (\i -> pprInstInfoDetails i $$ text "") (bagToList inst_infos))
$$ ppr extra_binds)
$$ hangP "Generic representation:" (
hangP "Generated datatypes for meta-information:"
(vcat (map ppr (bagToList repMetaTys)))
$$ hangP "Representation types:"
(vcat (map pprRepTy (bagToList repFamInsts))))
hangP s x = text "" $$ hang (ptext (sLit s)) 2 x
-- Prints the representable type family instance
pprRepTy :: FamInst -> SDoc
pprRepTy fi@(FamInst { fi_tys = lhs })
= ptext (sLit "type") <+> ppr (mkTyConApp (famInstTyCon fi) lhs) <+>
equals <+> ppr rhs
where rhs = famInstRHS fi
-- As of 24 April 2012, this only shares MetaTyCons between derivations of
-- Generic and Generic1; thus the types and logic are quite simple.
type CommonAuxiliary = MetaTyCons
type CommonAuxiliaries = NameEnv CommonAuxiliary
commonAuxiliaries :: [DerivSpec ()] -> TcM (CommonAuxiliaries, BagDerivStuff)
commonAuxiliaries = foldM snoc (emptyNameEnv, emptyBag) where
snoc :: (CommonAuxiliaries, BagDerivStuff)
-> DerivSpec () -> TcM (CommonAuxiliaries, BagDerivStuff)
snoc acc@(cas, stuff) (DS {ds_cls = cls, ds_tc = rep_tycon})
| getUnique cls `elem` [genClassKey, gen1ClassKey] =
extendComAux $ genGenericMetaTyCons rep_tycon
| otherwise = return acc
where extendComAux :: TcM (MetaTyCons, BagDerivStuff)
-> TcM (CommonAuxiliaries, BagDerivStuff)
extendComAux m -- don't run m if its already in the accumulator
| elemNameEnv (tyConName rep_tycon) cas = return acc
| otherwise = do (ca, new_stuff) <- m
return ( extendNameEnv cas (tyConName rep_tycon) ca
, stuff `unionBags` new_stuff)
renameDeriv :: Bool
-> [InstInfo RdrName]
-> Bag (LHsBind RdrName, LSig RdrName)
-> TcM (Bag (InstInfo Name), HsValBinds Name, DefUses)
renameDeriv is_boot inst_infos bagBinds
| is_boot -- If we are compiling a hs-boot file, don't generate any derived bindings
-- The inst-info bindings will all be empty, but it's easier to
-- just use rn_inst_info to change the type appropriately
= do { (rn_inst_infos, fvs) <- mapAndUnzipM rn_inst_info inst_infos
; return ( listToBag rn_inst_infos
, emptyValBindsOut, usesOnly (plusFVs fvs)) }
| otherwise
= discardWarnings $ -- Discard warnings about unused bindings etc
setXOptM Opt_EmptyCase $ -- Derived decls (for empty types) can have
-- case x of {}
setXOptM Opt_ScopedTypeVariables $ -- Derived decls (for newtype-deriving) can
setXOptM Opt_KindSignatures $ -- used ScopedTypeVariables & KindSignatures
do {
-- Bring the extra deriving stuff into scope
-- before renaming the instances themselves
; (aux_binds, aux_sigs) <- mapAndUnzipBagM return bagBinds
; let aux_val_binds = ValBindsIn aux_binds (bagToList aux_sigs)
; rn_aux_lhs <- rnTopBindsLHS emptyFsEnv aux_val_binds
; let bndrs = collectHsValBinders rn_aux_lhs
; envs <- extendGlobalRdrEnvRn (map Avail bndrs) emptyFsEnv ;
; setEnvs envs $
do { (rn_aux, dus_aux) <- rnValBindsRHS (TopSigCtxt (mkNameSet bndrs)) rn_aux_lhs
; (rn_inst_infos, fvs_insts) <- mapAndUnzipM rn_inst_info inst_infos
; return (listToBag rn_inst_infos, rn_aux,
dus_aux `plusDU` usesOnly (plusFVs fvs_insts)) } }
where
rn_inst_info :: InstInfo RdrName -> TcM (InstInfo Name, FreeVars)
rn_inst_info
inst_info@(InstInfo { iSpec = inst
, iBinds = InstBindings
{ ib_binds = binds
, ib_tyvars = tyvars
, ib_pragmas = sigs
, ib_extensions = exts -- Only for type-checking
, ib_derived = sa } })
= ASSERT( null sigs )
bindLocalNamesFV tyvars $
do { (rn_binds,_, fvs) <- rnMethodBinds False (is_cls_nm inst) [] binds []
; let binds' = InstBindings { ib_binds = rn_binds
, ib_tyvars = tyvars
, ib_pragmas = []
, ib_extensions = exts
, ib_derived = sa }
; return (inst_info { iBinds = binds' }, fvs) }
{-
Note [Newtype deriving and unused constructors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this (see Trac #1954):
module Bug(P) where
newtype P a = MkP (IO a) deriving Monad
If you compile with -fwarn-unused-binds you do not expect the warning
"Defined but not used: data consructor MkP". Yet the newtype deriving
code does not explicitly mention MkP, but it should behave as if you
had written
instance Monad P where
return x = MkP (return x)
...etc...
So we want to signal a user of the data constructor 'MkP'.
This is the reason behind the (Maybe Name) part of the return type
of genInst.
Note [Why we don't pass rep_tc into deriveTyData]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Down in the bowels of mkEqnHelp, we need to convert the fam_tc back into
the rep_tc by means of a lookup. And yet we have the rep_tc right here!
Why look it up again? Answer: it's just easier this way.
We drop some number of arguments from the end of the datatype definition
in deriveTyData. The arguments are dropped from the fam_tc.
This action may drop a *different* number of arguments
passed to the rep_tc, depending on how many free variables, etc., the
dropped patterns have.
Also, this technique carries over the kind substitution from deriveTyData
nicely.
************************************************************************
* *
From HsSyn to DerivSpec
* *
************************************************************************
@makeDerivSpecs@ fishes around to find the info about needed derived instances.
-}
makeDerivSpecs :: Bool
-> [DerivInfo]
-> [LDerivDecl Name]
-> TcM [EarlyDerivSpec]
makeDerivSpecs is_boot deriv_infos deriv_decls
= do { eqns1 <- concatMapM (recoverM (return []) . deriveDerivInfo) deriv_infos
; eqns2 <- concatMapM (recoverM (return []) . deriveStandalone) deriv_decls
; let eqns = eqns1 ++ eqns2
; if is_boot then -- No 'deriving' at all in hs-boot files
do { unless (null eqns) (add_deriv_err (head eqns))
; return [] }
else return eqns }
where
add_deriv_err eqn
= setSrcSpan (earlyDSLoc eqn) $
addErr (hang (ptext (sLit "Deriving not permitted in hs-boot file"))
2 (ptext (sLit "Use an instance declaration instead")))
------------------------------------------------------------------
-- | Process a `deriving` clause
deriveDerivInfo :: DerivInfo -> TcM [EarlyDerivSpec]
deriveDerivInfo (DerivInfo { di_rep_tc = rep_tc, di_preds = preds
, di_ctxt = err_ctxt })
= addErrCtxt err_ctxt $
concatMapM (deriveTyData tvs tc tys) preds
where
tvs = tyConTyVars rep_tc
(tc, tys) = case tyConFamInstSig_maybe rep_tc of
-- data family:
Just (fam_tc, pats, _) -> (fam_tc, pats)
-- NB: deriveTyData wants the *user-specified*
-- name. See Note [Why we don't pass rep_tc into deriveTyData]
_ -> (rep_tc, mkTyVarTys tvs) -- datatype
------------------------------------------------------------------
deriveStandalone :: LDerivDecl Name -> TcM [EarlyDerivSpec]
-- Standalone deriving declarations
-- e.g. deriving instance Show a => Show (T a)
-- Rather like tcLocalInstDecl
deriveStandalone (L loc (DerivDecl deriv_ty overlap_mode))
= setSrcSpan loc $
addErrCtxt (standaloneCtxt deriv_ty) $
do { traceTc "Standalone deriving decl for" (ppr deriv_ty)
; (tvs, theta, cls, inst_tys) <- tcHsInstHead TcType.InstDeclCtxt deriv_ty
; traceTc "Standalone deriving;" $ vcat
[ text "tvs:" <+> ppr tvs
, text "theta:" <+> ppr theta
, text "cls:" <+> ppr cls
, text "tys:" <+> ppr inst_tys ]
-- C.f. TcInstDcls.tcLocalInstDecl1
; checkTc (not (null inst_tys)) derivingNullaryErr
; let cls_tys = take (length inst_tys - 1) inst_tys
inst_ty = last inst_tys
; traceTc "Standalone deriving:" $ vcat
[ text "class:" <+> ppr cls
, text "class types:" <+> ppr cls_tys
, text "type:" <+> ppr inst_ty ]
; case tcSplitTyConApp_maybe inst_ty of
Just (tc, tc_args)
| className cls == typeableClassName
-> do warnUselessTypeable
return []
| isAlgTyCon tc || isDataFamilyTyCon tc -- All other classes
-> do { spec <- mkEqnHelp (fmap unLoc overlap_mode)
tvs cls cls_tys tc tc_args (Just theta)
; return [spec] }
_ -> -- Complain about functions, primitive types, etc,
failWithTc $ derivingThingErr False cls cls_tys inst_ty $
ptext (sLit "The last argument of the instance must be a data or newtype application")
}
warnUselessTypeable :: TcM ()
warnUselessTypeable
= do { warn <- woptM Opt_WarnDerivingTypeable
; when warn $ addWarnTc
$ ptext (sLit "Deriving") <+> quotes (ppr typeableClassName) <+>
ptext (sLit "has no effect: all types now auto-derive Typeable") }
------------------------------------------------------------------
deriveTyData :: [TyVar] -> TyCon -> [Type] -- LHS of data or data instance
-- Can be a data instance, hence [Type] args
-> LHsType Name -- The deriving predicate
-> TcM [EarlyDerivSpec]
-- The deriving clause of a data or newtype declaration
-- I.e. not standalone deriving
deriveTyData tvs tc tc_args (L loc deriv_pred)
= setSrcSpan loc $ -- Use the location of the 'deriving' item
do { (deriv_tvs, cls, cls_tys, cls_arg_kind)
<- tcExtendTyVarEnv tvs $
tcHsDeriv deriv_pred
-- Deriving preds may (now) mention
-- the type variables for the type constructor, hence tcExtendTyVarenv
-- The "deriv_pred" is a LHsType to take account of the fact that for
-- newtype deriving we allow deriving (forall a. C [a]).
-- Typeable is special, because Typeable :: forall k. k -> Constraint
-- so the argument kind 'k' is not decomposable by splitKindFunTys
-- as is the case for all other derivable type classes
; if className cls == typeableClassName
then do warnUselessTypeable
return []
else
do { -- Given data T a b c = ... deriving( C d ),
-- we want to drop type variables from T so that (C d (T a)) is well-kinded
let (arg_kinds, _) = splitKindFunTys cls_arg_kind
n_args_to_drop = length arg_kinds
n_args_to_keep = tyConArity tc - n_args_to_drop
(tc_args_to_keep, args_to_drop)
= splitAt n_args_to_keep tc_args
inst_ty_kind = typeKind (mkTyConApp tc tc_args_to_keep)
dropped_tvs = tyVarsOfTypes args_to_drop
-- Match up the kinds, and apply the resulting kind substitution
-- to the types. See Note [Unify kinds in deriving]
-- We are assuming the tycon tyvars and the class tyvars are distinct
mb_match = tcUnifyTy inst_ty_kind cls_arg_kind
Just kind_subst = mb_match
(univ_kvs, univ_tvs) = partition isKindVar $ varSetElems $
mkVarSet deriv_tvs `unionVarSet`
tyVarsOfTypes tc_args_to_keep
univ_kvs' = filter (`notElemTvSubst` kind_subst) univ_kvs
(subst', univ_tvs') = mapAccumL substTyVarBndr kind_subst univ_tvs
final_tc_args = substTys subst' tc_args_to_keep
final_cls_tys = substTys subst' cls_tys
; traceTc "derivTyData1" (vcat [ pprTvBndrs tvs, ppr tc, ppr tc_args, ppr deriv_pred
, pprTvBndrs (varSetElems $ tyVarsOfTypes tc_args)
, ppr n_args_to_keep, ppr n_args_to_drop
, ppr inst_ty_kind, ppr cls_arg_kind, ppr mb_match
, ppr final_tc_args, ppr final_cls_tys ])
-- Check that the result really is well-kinded
; checkTc (n_args_to_keep >= 0 && isJust mb_match)
(derivingKindErr tc cls cls_tys cls_arg_kind)
; traceTc "derivTyData2" (vcat [ ppr univ_tvs ])
; checkTc (allDistinctTyVars args_to_drop && -- (a) and (b)
not (any (`elemVarSet` dropped_tvs) univ_tvs)) -- (c)
(derivingEtaErr cls final_cls_tys (mkTyConApp tc final_tc_args))
-- Check that
-- (a) The args to drop are all type variables; eg reject:
-- data instance T a Int = .... deriving( Monad )
-- (b) The args to drop are all *distinct* type variables; eg reject:
-- class C (a :: * -> * -> *) where ...
-- data instance T a a = ... deriving( C )
-- (c) The type class args, or remaining tycon args,
-- do not mention any of the dropped type variables
-- newtype T a s = ... deriving( ST s )
-- newtype K a a = ... deriving( Monad )
; spec <- mkEqnHelp Nothing (univ_kvs' ++ univ_tvs')
cls final_cls_tys tc final_tc_args Nothing
; return [spec] } }
{-
Note [Unify kinds in deriving]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider (Trac #8534)
data T a b = MkT a deriving( Functor )
-- where Functor :: (*->*) -> Constraint
So T :: forall k. * -> k -> *. We want to get
instance Functor (T * (a:*)) where ...
Notice the '*' argument to T.
Moreover, as well as instantiating T's kind arguments, we may need to instantiate
C's kind args. Consider (Trac #8865):
newtype T a b = MkT (Either a b) deriving( Category )
where
Category :: forall k. (k -> k -> *) -> Constraint
We need to generate the instance
instance Category * (Either a) where ...
Notice the '*' argument to Category.
So we need to
* drop arguments from (T a b) to match the number of
arrows in the (last argument of the) class;
* and then *unify* kind of the remaining type against the
expected kind, to figure out how to instantiate C's and T's
kind arguments.
In the two examples,
* we unify kind-of( T k (a:k) ) ~ kind-of( Functor )
i.e. (k -> *) ~ (* -> *) to find k:=*.
yielding k:=*
* we unify kind-of( Either ) ~ kind-of( Category )
i.e. (* -> * -> *) ~ (k -> k -> k)
yielding k:=*
Now we get a kind substitution. We then need to:
1. Remove the substituted-out kind variables from the quantified kind vars
2. Apply the substitution to the kinds of quantified *type* vars
(and extend the substitution to reflect this change)
3. Apply that extended substitution to the non-dropped args (types and
kinds) of the type and class
Forgetting step (2) caused Trac #8893:
data V a = V [a] deriving Functor
data P (x::k->*) (a:k) = P (x a) deriving Functor
data C (x::k->*) (a:k) = C (V (P x a)) deriving Functor
When deriving Functor for P, we unify k to *, but we then want
an instance $df :: forall (x:*->*). Functor x => Functor (P * (x:*->*))
and similarly for C. Notice the modified kind of x, both at binding
and occurrence sites.
-}
mkEqnHelp :: Maybe OverlapMode
-> [TyVar]
-> Class -> [Type]
-> TyCon -> [Type]
-> DerivContext -- Just => context supplied (standalone deriving)
-- Nothing => context inferred (deriving on data decl)
-> TcRn EarlyDerivSpec
-- Make the EarlyDerivSpec for an instance
-- forall tvs. theta => cls (tys ++ [ty])
-- where the 'theta' is optional (that's the Maybe part)
-- Assumes that this declaration is well-kinded
mkEqnHelp overlap_mode tvs cls cls_tys tycon tc_args mtheta
= do { -- Find the instance of a data family
-- Note [Looking up family instances for deriving]
fam_envs <- tcGetFamInstEnvs
; let (rep_tc, rep_tc_args, _co) = tcLookupDataFamInst fam_envs tycon tc_args
-- If it's still a data family, the lookup failed; i.e no instance exists
; when (isDataFamilyTyCon rep_tc)
(bale_out (ptext (sLit "No family instance for") <+> quotes (pprTypeApp tycon tc_args)))
-- For standalone deriving (mtheta /= Nothing),
-- check that all the data constructors are in scope.
; rdr_env <- getGlobalRdrEnv
; let data_con_names = map dataConName (tyConDataCons rep_tc)
hidden_data_cons = not (isWiredInName (tyConName rep_tc)) &&
(isAbstractTyCon rep_tc ||
any not_in_scope data_con_names)
not_in_scope dc = null (lookupGRE_Name rdr_env dc)
-- Make a Qual RdrName that will do for each DataCon
-- so we can report it as used (Trac #7969)
data_con_rdrs = [ greUsedRdrName gre
| dc_name <- data_con_names
, gre : _ <- [lookupGRE_Name rdr_env dc_name]
, not (isLocalGRE gre) ]
; addUsedRdrNames data_con_rdrs
; unless (isNothing mtheta || not hidden_data_cons)
(bale_out (derivingHiddenErr tycon))
; dflags <- getDynFlags
; if isDataTyCon rep_tc then
mkDataTypeEqn dflags overlap_mode tvs cls cls_tys
tycon tc_args rep_tc rep_tc_args mtheta
else
mkNewTypeEqn dflags overlap_mode tvs cls cls_tys
tycon tc_args rep_tc rep_tc_args mtheta }
where
bale_out msg = failWithTc (derivingThingErr False cls cls_tys (mkTyConApp tycon tc_args) msg)
{-
Note [Looking up family instances for deriving]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
tcLookupFamInstExact is an auxiliary lookup wrapper which requires
that looked-up family instances exist. If called with a vanilla
tycon, the old type application is simply returned.
If we have
data instance F () = ... deriving Eq
data instance F () = ... deriving Eq
then tcLookupFamInstExact will be confused by the two matches;
but that can't happen because tcInstDecls1 doesn't call tcDeriving
if there are any overlaps.
There are two other things that might go wrong with the lookup.
First, we might see a standalone deriving clause
deriving Eq (F ())
when there is no data instance F () in scope.
Note that it's OK to have
data instance F [a] = ...
deriving Eq (F [(a,b)])
where the match is not exact; the same holds for ordinary data types
with standalone deriving declarations.
Note [Deriving, type families, and partial applications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When there are no type families, it's quite easy:
newtype S a = MkS [a]
-- :CoS :: S ~ [] -- Eta-reduced
instance Eq [a] => Eq (S a) -- by coercion sym (Eq (:CoS a)) : Eq [a] ~ Eq (S a)
instance Monad [] => Monad S -- by coercion sym (Monad :CoS) : Monad [] ~ Monad S
When type familes are involved it's trickier:
data family T a b
newtype instance T Int a = MkT [a] deriving( Eq, Monad )
-- :RT is the representation type for (T Int a)
-- :Co:RT :: :RT ~ [] -- Eta-reduced!
-- :CoF:RT a :: T Int a ~ :RT a -- Also eta-reduced!
instance Eq [a] => Eq (T Int a) -- easy by coercion
-- d1 :: Eq [a]
-- d2 :: Eq (T Int a) = d1 |> Eq (sym (:Co:RT a ; :coF:RT a))
instance Monad [] => Monad (T Int) -- only if we can eta reduce???
-- d1 :: Monad []
-- d2 :: Monad (T Int) = d1 |> Monad (sym (:Co:RT ; :coF:RT))
Note the need for the eta-reduced rule axioms. After all, we can
write it out
instance Monad [] => Monad (T Int) -- only if we can eta reduce???
return x = MkT [x]
... etc ...
See Note [Eta reduction for data family axioms] in TcInstDcls.
************************************************************************
* *
Deriving data types
* *
************************************************************************
-}
mkDataTypeEqn :: DynFlags
-> Maybe OverlapMode
-> [Var] -- Universally quantified type variables in the instance
-> Class -- Class for which we need to derive an instance
-> [Type] -- Other parameters to the class except the last
-> TyCon -- Type constructor for which the instance is requested
-- (last parameter to the type class)
-> [Type] -- Parameters to the type constructor
-> TyCon -- rep of the above (for type families)
-> [Type] -- rep of the above
-> DerivContext -- Context of the instance, for standalone deriving
-> TcRn EarlyDerivSpec -- Return 'Nothing' if error
mkDataTypeEqn dflags overlap_mode tvs cls cls_tys
tycon tc_args rep_tc rep_tc_args mtheta
= case checkSideConditions dflags mtheta cls cls_tys rep_tc rep_tc_args of
-- NB: pass the *representation* tycon to checkSideConditions
NonDerivableClass msg -> bale_out (nonStdErr cls $$ msg)
DerivableClassError msg -> bale_out msg
CanDerive -> go_for_it
DerivableViaInstance -> go_for_it
where
go_for_it = mk_data_eqn overlap_mode tvs cls tycon tc_args rep_tc rep_tc_args mtheta
bale_out msg = failWithTc (derivingThingErr False cls cls_tys (mkTyConApp tycon tc_args) msg)
mk_data_eqn :: Maybe OverlapMode -> [TyVar] -> Class
-> TyCon -> [TcType] -> TyCon -> [TcType] -> DerivContext
-> TcM EarlyDerivSpec
mk_data_eqn overlap_mode tvs cls tycon tc_args rep_tc rep_tc_args mtheta
= do loc <- getSrcSpanM
dfun_name <- newDFunName' cls tycon
case mtheta of
Nothing -> do --Infer context
inferred_constraints <- inferConstraints cls inst_tys rep_tc rep_tc_args
return $ InferTheta $ DS
{ ds_loc = loc
, ds_name = dfun_name, ds_tvs = tvs
, ds_cls = cls, ds_tys = inst_tys
, ds_tc = rep_tc, ds_tc_args = rep_tc_args
, ds_theta = inferred_constraints
, ds_overlap = overlap_mode
, ds_newtype = False }
Just theta -> do -- Specified context
return $ GivenTheta $ DS
{ ds_loc = loc
, ds_name = dfun_name, ds_tvs = tvs
, ds_cls = cls, ds_tys = inst_tys
, ds_tc = rep_tc, ds_tc_args = rep_tc_args
, ds_theta = theta
, ds_overlap = overlap_mode
, ds_newtype = False }
where
inst_tys = [mkTyConApp tycon tc_args]
----------------------
inferConstraints :: Class -> [TcType]
-> TyCon -> [TcType]
-> TcM ThetaOrigin
-- Generate a sufficiently large set of constraints that typechecking the
-- generated method definitions should succeed. This set will be simplified
-- before being used in the instance declaration
inferConstraints cls inst_tys rep_tc rep_tc_args
| cls `hasKey` genClassKey -- Generic constraints are easy
= return []
| cls `hasKey` gen1ClassKey -- Gen1 needs Functor
= ASSERT(length rep_tc_tvs > 0) -- See Note [Getting base classes]
do { functorClass <- tcLookupClass functorClassName
; return (con_arg_constraints (get_gen1_constraints functorClass)) }
| otherwise -- The others are a bit more complicated
= ASSERT2( equalLength rep_tc_tvs all_rep_tc_args, ppr cls <+> ppr rep_tc )
do { traceTc "inferConstraints" (vcat [ppr cls <+> ppr inst_tys, ppr arg_constraints])
; return (stupid_constraints ++ extra_constraints
++ sc_constraints
++ arg_constraints) }
where
arg_constraints = con_arg_constraints get_std_constrained_tys
-- Constraints arising from the arguments of each constructor
con_arg_constraints :: (CtOrigin -> Type -> [PredOrigin]) -> [PredOrigin]
con_arg_constraints get_arg_constraints
= [ pred
| data_con <- tyConDataCons rep_tc
, (arg_n, arg_ty) <- ASSERT( isVanillaDataCon data_con )
zip [1..] $ -- ASSERT is precondition of dataConInstOrigArgTys
dataConInstOrigArgTys data_con all_rep_tc_args
, not (isUnLiftedType arg_ty)
, let orig = DerivOriginDC data_con arg_n
, pred <- get_arg_constraints orig arg_ty ]
-- No constraints for unlifted types
-- See Note [Deriving and unboxed types]
-- For functor-like classes, two things are different
-- (a) We recurse over argument types to generate constraints
-- See Functor examples in TcGenDeriv
-- (b) The rep_tc_args will be one short
is_functor_like = getUnique cls `elem` functorLikeClassKeys
|| onlyOneAndTypeConstr inst_tys
onlyOneAndTypeConstr [inst_ty] = typeKind inst_ty `tcEqKind` a2a_kind
onlyOneAndTypeConstr _ = False
a2a_kind = mkArrowKind liftedTypeKind liftedTypeKind
get_gen1_constraints functor_cls orig ty
= mk_functor_like_constraints orig functor_cls $
get_gen1_constrained_tys last_tv ty
get_std_constrained_tys :: CtOrigin -> Type -> [PredOrigin]
get_std_constrained_tys orig ty
| is_functor_like = mk_functor_like_constraints orig cls $
deepSubtypesContaining last_tv ty
| otherwise = [mkPredOrigin orig (mkClassPred cls [ty])]
mk_functor_like_constraints :: CtOrigin -> Class -> [Type] -> [PredOrigin]
-- 'cls' is Functor or Traversable etc
-- For each type, generate two constraints: (cls ty, kind(ty) ~ (*->*))
-- The second constraint checks that the first is well-kinded.
-- Lacking that, as Trac #10561 showed, we can just generate an
-- ill-kinded instance.
mk_functor_like_constraints orig cls tys
= [ mkPredOrigin orig pred
| ty <- tys
, pred <- [ mkClassPred cls [ty]
, mkEqPred (typeKind ty) a2a_kind] ]
rep_tc_tvs = tyConTyVars rep_tc
last_tv = last rep_tc_tvs
all_rep_tc_args | cls `hasKey` gen1ClassKey || is_functor_like
= rep_tc_args ++ [mkTyVarTy last_tv]
| otherwise = rep_tc_args
-- Constraints arising from superclasses
-- See Note [Superclasses of derived instance]
sc_constraints = mkThetaOrigin DerivOrigin $
substTheta (zipOpenTvSubst (classTyVars cls) inst_tys) (classSCTheta cls)
-- Stupid constraints
stupid_constraints = mkThetaOrigin DerivOrigin $
substTheta subst (tyConStupidTheta rep_tc)
subst = zipTopTvSubst rep_tc_tvs all_rep_tc_args
-- Extra Data constraints
-- The Data class (only) requires that for
-- instance (...) => Data (T t1 t2)
-- IF t1:*, t2:*
-- THEN (Data t1, Data t2) are among the (...) constraints
-- Reason: when the IF holds, we generate a method
-- dataCast2 f = gcast2 f
-- and we need the Data constraints to typecheck the method
extra_constraints
| cls `hasKey` dataClassKey
, all (isLiftedTypeKind . typeKind) rep_tc_args
= [mkPredOrigin DerivOrigin (mkClassPred cls [ty]) | ty <- rep_tc_args]
| otherwise
= []
{-
Note [Getting base classes]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Functor and Typeable are defined in package 'base', and that is not available
when compiling 'ghc-prim'. So we must be careful that 'deriving' for stuff in
ghc-prim does not use Functor or Typeable implicitly via these lookups.
Note [Deriving and unboxed types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We have some special hacks to support things like
data T = MkT Int# deriving ( Show )
Specifically, we use TcGenDeriv.box to box the Int# into an Int
(which we know how to show), and append a '#'. Parenthesis are not required
for unboxed values (`MkT -3#` is a valid expression).
Note [Deriving any class]
~~~~~~~~~~~~~~~~~~~~~~~~~
Classic uses of a deriving clause, or a standalone-deriving declaration, are
for:
* a built-in class like Eq or Show, for which GHC knows how to generate
the instance code
* a newtype, via the mechanism enabled by GeneralizedNewtypeDeriving
The DeriveAnyClass extension adds a third way to derive instances, based on
empty instance declarations.
The canonical use case is in combination with GHC.Generics and default method
signatures. These allow us to have instance declarations being empty, but still
useful, e.g.
data T a = ...blah..blah... deriving( Generic )
instance C a => C (T a) -- No 'where' clause
where C is some "random" user-defined class.
This boilerplate code can be replaced by the more compact
data T a = ...blah..blah... deriving( Generic, C )
if DeriveAnyClass is enabled.
This is not restricted to Generics; any class can be derived, simply giving
rise to an empty instance.
Unfortunately, it is not clear how to determine the context (in case of
standard deriving; in standalone deriving, the user provides the context).
GHC uses the same heuristic for figuring out the class context that it uses for
Eq in the case of *-kinded classes, and for Functor in the case of
* -> *-kinded classes. That may not be optimal or even wrong. But in such
cases, standalone deriving can still be used.
-}
------------------------------------------------------------------
-- Check side conditions that dis-allow derivability for particular classes
-- This is *apart* from the newtype-deriving mechanism
--
-- Here we get the representation tycon in case of family instances as it has
-- the data constructors - but we need to be careful to fall back to the
-- family tycon (with indexes) in error messages.
data DerivStatus = CanDerive -- Standard class, can derive
| DerivableClassError SDoc -- Standard class, but can't do it
| DerivableViaInstance -- See Note [Deriving any class]
| NonDerivableClass SDoc -- Non-standard class
-- A "standard" class is one defined in the Haskell report which GHC knows how
-- to generate code for, such as Eq, Ord, Ix, etc.
checkSideConditions :: DynFlags -> DerivContext -> Class -> [TcType]
-> TyCon -> [Type] -- tycon and its parameters
-> DerivStatus
checkSideConditions dflags mtheta cls cls_tys rep_tc rep_tc_args
| Just cond <- sideConditions mtheta cls
= case (cond (dflags, rep_tc, rep_tc_args)) of
NotValid err -> DerivableClassError err -- Class-specific error
IsValid | null cls_tys -> CanDerive -- All derivable classes are unary, so
-- cls_tys (the type args other than last)
-- should be null
| otherwise -> DerivableClassError (classArgsErr cls cls_tys) -- e.g. deriving( Eq s )
| otherwise = maybe DerivableViaInstance NonDerivableClass
(canDeriveAnyClass dflags rep_tc cls)
classArgsErr :: Class -> [Type] -> SDoc
classArgsErr cls cls_tys = quotes (ppr (mkClassPred cls cls_tys)) <+> ptext (sLit "is not a class")
nonStdErr :: Class -> SDoc
nonStdErr cls =
quotes (ppr cls)
<+> ptext (sLit "is not a standard derivable class (Eq, Show, etc.)")
sideConditions :: DerivContext -> Class -> Maybe Condition
sideConditions mtheta cls
| cls_key == eqClassKey = Just (cond_std `andCond` cond_args cls)
| cls_key == ordClassKey = Just (cond_std `andCond` cond_args cls)
| cls_key == showClassKey = Just (cond_std `andCond` cond_args cls)
| cls_key == readClassKey = Just (cond_std `andCond` cond_args cls)
| cls_key == enumClassKey = Just (cond_std `andCond` cond_isEnumeration)
| cls_key == ixClassKey = Just (cond_std `andCond` cond_enumOrProduct cls)
| cls_key == boundedClassKey = Just (cond_std `andCond` cond_enumOrProduct cls)
| cls_key == dataClassKey = Just (checkFlag Opt_DeriveDataTypeable `andCond`
cond_std `andCond`
cond_args cls)
| cls_key == functorClassKey = Just (checkFlag Opt_DeriveFunctor `andCond`
cond_vanilla `andCond`
cond_functorOK True False)
| cls_key == foldableClassKey = Just (checkFlag Opt_DeriveFoldable `andCond`
cond_vanilla `andCond`
cond_functorOK False True)
-- Functor/Fold/Trav works ok
-- for rank-n types
| cls_key == traversableClassKey = Just (checkFlag Opt_DeriveTraversable `andCond`
cond_vanilla `andCond`
cond_functorOK False False)
| cls_key == genClassKey = Just (checkFlag Opt_DeriveGeneric `andCond`
cond_vanilla `andCond`
cond_RepresentableOk)
| cls_key == gen1ClassKey = Just (checkFlag Opt_DeriveGeneric `andCond`
cond_vanilla `andCond`
cond_Representable1Ok)
| cls_key == liftClassKey = Just (checkFlag Opt_DeriveLift `andCond`
cond_vanilla `andCond`
cond_args cls)
| otherwise = Nothing
where
cls_key = getUnique cls
cond_std = cond_stdOK mtheta False -- Vanilla data constructors, at least one,
-- and monotype arguments
cond_vanilla = cond_stdOK mtheta True -- Vanilla data constructors but
-- allow no data cons or polytype arguments
type Condition = (DynFlags, TyCon, [Type]) -> Validity
-- first Bool is whether or not we are allowed to derive Data and Typeable
-- second Bool is whether or not we are allowed to derive Functor
-- TyCon is the *representation* tycon if the data type is an indexed one
-- [Type] are the type arguments to the (representation) TyCon
-- Nothing => OK
orCond :: Condition -> Condition -> Condition
orCond c1 c2 tc
= case (c1 tc, c2 tc) of
(IsValid, _) -> IsValid -- c1 succeeds
(_, IsValid) -> IsValid -- c21 succeeds
(NotValid x, NotValid y) -> NotValid (x $$ ptext (sLit " or") $$ y)
-- Both fail
andCond :: Condition -> Condition -> Condition
andCond c1 c2 tc = c1 tc `andValid` c2 tc
cond_stdOK :: DerivContext -- Says whether this is standalone deriving or not;
-- if standalone, we just say "yes, go for it"
-> Bool -- True <=> permissive: allow higher rank
-- args and no data constructors
-> Condition
cond_stdOK (Just _) _ _
= IsValid -- Don't check these conservative conditions for
-- standalone deriving; just generate the code
-- and let the typechecker handle the result
cond_stdOK Nothing permissive (_, rep_tc, _)
| null data_cons
, not permissive = NotValid (no_cons_why rep_tc $$ suggestion)
| not (null con_whys) = NotValid (vcat con_whys $$ suggestion)
| otherwise = IsValid
where
suggestion = ptext (sLit "Possible fix: use a standalone deriving declaration instead")
data_cons = tyConDataCons rep_tc
con_whys = getInvalids (map check_con data_cons)
check_con :: DataCon -> Validity
check_con con
| not (isVanillaDataCon con)
= NotValid (badCon con (ptext (sLit "has existentials or constraints in its type")))
| not (permissive || all isTauTy (dataConOrigArgTys con))
= NotValid (badCon con (ptext (sLit "has a higher-rank type")))
| otherwise
= IsValid
no_cons_why :: TyCon -> SDoc
no_cons_why rep_tc = quotes (pprSourceTyCon rep_tc) <+>
ptext (sLit "must have at least one data constructor")
cond_RepresentableOk :: Condition
cond_RepresentableOk (_, tc, tc_args) = canDoGenerics tc tc_args
cond_Representable1Ok :: Condition
cond_Representable1Ok (_, tc, tc_args) = canDoGenerics1 tc tc_args
cond_enumOrProduct :: Class -> Condition
cond_enumOrProduct cls = cond_isEnumeration `orCond`
(cond_isProduct `andCond` cond_args cls)
cond_args :: Class -> Condition
-- For some classes (eg Eq, Ord) we allow unlifted arg types
-- by generating specialised code. For others (eg Data) we don't.
cond_args cls (_, tc, _)
= case bad_args of
[] -> IsValid
(ty:_) -> NotValid (hang (ptext (sLit "Don't know how to derive") <+> quotes (ppr cls))
2 (ptext (sLit "for type") <+> quotes (ppr ty)))
where
bad_args = [ arg_ty | con <- tyConDataCons tc
, arg_ty <- dataConOrigArgTys con
, isUnLiftedType arg_ty
, not (ok_ty arg_ty) ]
cls_key = classKey cls
ok_ty arg_ty
| cls_key == eqClassKey = check_in arg_ty ordOpTbl
| cls_key == ordClassKey = check_in arg_ty ordOpTbl
| cls_key == showClassKey = check_in arg_ty boxConTbl
| cls_key == liftClassKey = check_in arg_ty litConTbl
| otherwise = False -- Read, Ix etc
check_in :: Type -> [(Type,a)] -> Bool
check_in arg_ty tbl = any (eqType arg_ty . fst) tbl
cond_isEnumeration :: Condition
cond_isEnumeration (_, rep_tc, _)
| isEnumerationTyCon rep_tc = IsValid
| otherwise = NotValid why
where
why = sep [ quotes (pprSourceTyCon rep_tc) <+>
ptext (sLit "must be an enumeration type")
, ptext (sLit "(an enumeration consists of one or more nullary, non-GADT constructors)") ]
-- See Note [Enumeration types] in TyCon
cond_isProduct :: Condition
cond_isProduct (_, rep_tc, _)
| isProductTyCon rep_tc = IsValid
| otherwise = NotValid why
where
why = quotes (pprSourceTyCon rep_tc) <+>
ptext (sLit "must have precisely one constructor")
functorLikeClassKeys :: [Unique]
functorLikeClassKeys = [functorClassKey, foldableClassKey, traversableClassKey]
cond_functorOK :: Bool -> Bool -> Condition
-- OK for Functor/Foldable/Traversable class
-- Currently: (a) at least one argument
-- (b) don't use argument contravariantly
-- (c) don't use argument in the wrong place, e.g. data T a = T (X a a)
-- (d) optionally: don't use function types
-- (e) no "stupid context" on data type
cond_functorOK allowFunctions allowExQuantifiedLastTyVar (_, rep_tc, _)
| null tc_tvs
= NotValid (ptext (sLit "Data type") <+> quotes (ppr rep_tc)
<+> ptext (sLit "must have some type parameters"))
| not (null bad_stupid_theta)
= NotValid (ptext (sLit "Data type") <+> quotes (ppr rep_tc)
<+> ptext (sLit "must not have a class context:") <+> pprTheta bad_stupid_theta)
| otherwise
= allValid (map check_con data_cons)
where
tc_tvs = tyConTyVars rep_tc
Just (_, last_tv) = snocView tc_tvs
bad_stupid_theta = filter is_bad (tyConStupidTheta rep_tc)
is_bad pred = last_tv `elemVarSet` tyVarsOfType pred
data_cons = tyConDataCons rep_tc
check_con con = allValid (check_universal con : foldDataConArgs (ft_check con) con)
check_universal :: DataCon -> Validity
check_universal con
| allowExQuantifiedLastTyVar
= IsValid -- See Note [DeriveFoldable with ExistentialQuantification]
-- in TcGenDeriv
| Just tv <- getTyVar_maybe (last (tyConAppArgs (dataConOrigResTy con)))
, tv `elem` dataConUnivTyVars con
, not (tv `elemVarSet` tyVarsOfTypes (dataConTheta con))
= IsValid -- See Note [Check that the type variable is truly universal]
| otherwise
= NotValid (badCon con existential)
ft_check :: DataCon -> FFoldType Validity
ft_check con = FT { ft_triv = IsValid, ft_var = IsValid
, ft_co_var = NotValid (badCon con covariant)
, ft_fun = \x y -> if allowFunctions then x `andValid` y
else NotValid (badCon con functions)
, ft_tup = \_ xs -> allValid xs
, ft_ty_app = \_ x -> x
, ft_bad_app = NotValid (badCon con wrong_arg)
, ft_forall = \_ x -> x }
existential = ptext (sLit "must be truly polymorphic in the last argument of the data type")
covariant = ptext (sLit "must not use the type variable in a function argument")
functions = ptext (sLit "must not contain function types")
wrong_arg = ptext (sLit "must use the type variable only as the last argument of a data type")
checkFlag :: ExtensionFlag -> Condition
checkFlag flag (dflags, _, _)
| xopt flag dflags = IsValid
| otherwise = NotValid why
where
why = ptext (sLit "You need ") <> text flag_str
<+> ptext (sLit "to derive an instance for this class")
flag_str = case [ flagSpecName f | f <- xFlags , flagSpecFlag f == flag ] of
[s] -> s
other -> pprPanic "checkFlag" (ppr other)
std_class_via_coercible :: Class -> Bool
-- These standard classes can be derived for a newtype
-- using the coercible trick *even if no -XGeneralizedNewtypeDeriving
-- because giving so gives the same results as generating the boilerplate
std_class_via_coercible clas
= classKey clas `elem` [eqClassKey, ordClassKey, ixClassKey, boundedClassKey]
-- Not Read/Show/Lift because they respect the type
-- Not Enum, because newtypes are never in Enum
non_coercible_class :: Class -> Bool
-- *Never* derive Read, Show, Typeable, Data, Generic, Generic1, Lift
-- by Coercible, even with -XGeneralizedNewtypeDeriving
-- Also, avoid Traversable, as the Coercible-derived instance and the "normal"-derived
-- instance behave differently if there's a non-lawful Applicative out there.
-- Besides, with roles, Coercible-deriving Traversable is ill-roled.
non_coercible_class cls
= classKey cls `elem` ([ readClassKey, showClassKey, dataClassKey
, genClassKey, gen1ClassKey, typeableClassKey
, traversableClassKey, liftClassKey ])
badCon :: DataCon -> SDoc -> SDoc
badCon con msg = ptext (sLit "Constructor") <+> quotes (ppr con) <+> msg
{-
Note [Check that the type variable is truly universal]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For Functor and Traversable instances, we must check that the *last argument*
of the type constructor is used truly universally quantified. Example
data T a b where
T1 :: a -> b -> T a b -- Fine! Vanilla H-98
T2 :: b -> c -> T a b -- Fine! Existential c, but we can still map over 'b'
T3 :: b -> T Int b -- Fine! Constraint 'a', but 'b' is still polymorphic
T4 :: Ord b => b -> T a b -- No! 'b' is constrained
T5 :: b -> T b b -- No! 'b' is constrained
T6 :: T a (b,b) -- No! 'b' is constrained
Notice that only the first of these constructors is vanilla H-98. We only
need to take care about the last argument (b in this case). See Trac #8678.
Eg. for T1-T3 we can write
fmap f (T1 a b) = T1 a (f b)
fmap f (T2 b c) = T2 (f b) c
fmap f (T3 x) = T3 (f x)
We need not perform these checks for Foldable instances, however, since
functions in Foldable can only consume existentially quantified type variables,
rather than produce them (as is the case in Functor and Traversable functions.)
As a result, T can have a derived Foldable instance:
foldr f z (T1 a b) = f b z
foldr f z (T2 b c) = f b z
foldr f z (T3 x) = f x z
foldr f z (T4 x) = f x z
foldr f z (T5 x) = f x z
foldr _ z T6 = z
See Note [DeriveFoldable with ExistentialQuantification] in TcGenDeriv.
Note [Superclasses of derived instance]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In general, a derived instance decl needs the superclasses of the derived
class too. So if we have
data T a = ...deriving( Ord )
then the initial context for Ord (T a) should include Eq (T a). Often this is
redundant; we'll also generate an Ord constraint for each constructor argument,
and that will probably generate enough constraints to make the Eq (T a) constraint
be satisfied too. But not always; consider:
data S a = S
instance Eq (S a)
instance Ord (S a)
data T a = MkT (S a) deriving( Ord )
instance Num a => Eq (T a)
The derived instance for (Ord (T a)) must have a (Num a) constraint!
Similarly consider:
data T a = MkT deriving( Data, Typeable )
Here there *is* no argument field, but we must nevertheless generate
a context for the Data instances:
instance Typable a => Data (T a) where ...
************************************************************************
* *
Deriving newtypes
* *
************************************************************************
-}
mkNewTypeEqn :: DynFlags -> Maybe OverlapMode -> [Var] -> Class
-> [Type] -> TyCon -> [Type] -> TyCon -> [Type]
-> DerivContext
-> TcRn EarlyDerivSpec
mkNewTypeEqn dflags overlap_mode tvs
cls cls_tys tycon tc_args rep_tycon rep_tc_args mtheta
-- Want: instance (...) => cls (cls_tys ++ [tycon tc_args]) where ...
| ASSERT( length cls_tys + 1 == classArity cls )
might_derive_via_coercible && ((newtype_deriving && not deriveAnyClass)
|| std_class_via_coercible cls)
= do traceTc "newtype deriving:" (ppr tycon <+> ppr rep_tys <+> ppr all_preds)
dfun_name <- newDFunName' cls tycon
loc <- getSrcSpanM
case mtheta of
Just theta -> return $ GivenTheta $ DS
{ ds_loc = loc
, ds_name = dfun_name, ds_tvs = varSetElemsKvsFirst dfun_tvs
, ds_cls = cls, ds_tys = inst_tys
, ds_tc = rep_tycon, ds_tc_args = rep_tc_args
, ds_theta = theta
, ds_overlap = overlap_mode
, ds_newtype = True }
Nothing -> return $ InferTheta $ DS
{ ds_loc = loc
, ds_name = dfun_name, ds_tvs = varSetElemsKvsFirst dfun_tvs
, ds_cls = cls, ds_tys = inst_tys
, ds_tc = rep_tycon, ds_tc_args = rep_tc_args
, ds_theta = all_preds
, ds_overlap = overlap_mode
, ds_newtype = True }
| otherwise
= case checkSideConditions dflags mtheta cls cls_tys rep_tycon rep_tc_args of
-- Error with standard class
DerivableClassError msg
| might_derive_via_coercible -> bale_out (msg $$ suggest_gnd)
| otherwise -> bale_out msg
-- Must use newtype deriving or DeriveAnyClass
NonDerivableClass _msg
-- Too hard, even with newtype deriving
| newtype_deriving -> bale_out cant_derive_err
-- Try newtype deriving!
-- Here we suggest GeneralizedNewtypeDeriving even in cases where it may
-- not be applicable. See Trac #9600.
| otherwise -> bale_out (non_std $$ suggest_gnd)
-- CanDerive/DerivableViaInstance
_ -> do when (newtype_deriving && deriveAnyClass) $
addWarnTc (sep [ ptext (sLit "Both DeriveAnyClass and GeneralizedNewtypeDeriving are enabled")
, ptext (sLit "Defaulting to the DeriveAnyClass strategy for instantiating") <+> ppr cls ])
go_for_it
where
newtype_deriving = xopt Opt_GeneralizedNewtypeDeriving dflags
deriveAnyClass = xopt Opt_DeriveAnyClass dflags
go_for_it = mk_data_eqn overlap_mode tvs cls tycon tc_args
rep_tycon rep_tc_args mtheta
bale_out = bale_out' newtype_deriving
bale_out' b = failWithTc . derivingThingErr b cls cls_tys inst_ty
non_std = nonStdErr cls
suggest_gnd = ptext (sLit "Try GeneralizedNewtypeDeriving for GHC's newtype-deriving extension")
-- Here is the plan for newtype derivings. We see
-- newtype T a1...an = MkT (t ak+1...an) deriving (.., C s1 .. sm, ...)
-- where t is a type,
-- ak+1...an is a suffix of a1..an, and are all tyars
-- ak+1...an do not occur free in t, nor in the s1..sm
-- (C s1 ... sm) is a *partial applications* of class C
-- with the last parameter missing
-- (T a1 .. ak) matches the kind of C's last argument
-- (and hence so does t)
-- The latter kind-check has been done by deriveTyData already,
-- and tc_args are already trimmed
--
-- We generate the instance
-- instance forall ({a1..ak} u fvs(s1..sm)).
-- C s1 .. sm t => C s1 .. sm (T a1...ak)
-- where T a1...ap is the partial application of
-- the LHS of the correct kind and p >= k
--
-- NB: the variables below are:
-- tc_tvs = [a1, ..., an]
-- tyvars_to_keep = [a1, ..., ak]
-- rep_ty = t ak .. an
-- deriv_tvs = fvs(s1..sm) \ tc_tvs
-- tys = [s1, ..., sm]
-- rep_fn' = t
--
-- Running example: newtype T s a = MkT (ST s a) deriving( Monad )
-- We generate the instance
-- instance Monad (ST s) => Monad (T s) where
nt_eta_arity = newTyConEtadArity rep_tycon
-- For newtype T a b = MkT (S a a b), the TyCon machinery already
-- eta-reduces the representation type, so we know that
-- T a ~ S a a
-- That's convenient here, because we may have to apply
-- it to fewer than its original complement of arguments
-- Note [Newtype representation]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Need newTyConRhs (*not* a recursive representation finder)
-- to get the representation type. For example
-- newtype B = MkB Int
-- newtype A = MkA B deriving( Num )
-- We want the Num instance of B, *not* the Num instance of Int,
-- when making the Num instance of A!
rep_inst_ty = newTyConInstRhs rep_tycon rep_tc_args
rep_tys = cls_tys ++ [rep_inst_ty]
rep_pred = mkClassPred cls rep_tys
rep_pred_o = mkPredOrigin DerivOrigin rep_pred
-- rep_pred is the representation dictionary, from where
-- we are gong to get all the methods for the newtype
-- dictionary
-- Next we figure out what superclass dictionaries to use
-- See Note [Newtype deriving superclasses] above
cls_tyvars = classTyVars cls
dfun_tvs = tyVarsOfTypes inst_tys
inst_ty = mkTyConApp tycon tc_args
inst_tys = cls_tys ++ [inst_ty]
sc_theta =
mkThetaOrigin DerivOrigin $
substTheta (zipOpenTvSubst cls_tyvars inst_tys) (classSCTheta cls)
-- Next we collect Coercible constraints between
-- the Class method types, instantiated with the representation and the
-- newtype type; precisely the constraints required for the
-- calls to coercible that we are going to generate.
coercible_constraints =
[ let (Pair t1 t2) = mkCoerceClassMethEqn cls (varSetElemsKvsFirst dfun_tvs) inst_tys rep_inst_ty meth
in mkPredOrigin (DerivOriginCoerce meth t1 t2) (mkCoerciblePred t1 t2)
| meth <- classMethods cls ]
-- If there are no tyvars, there's no need
-- to abstract over the dictionaries we need
-- Example: newtype T = MkT Int deriving( C )
-- We get the derived instance
-- instance C T
-- rather than
-- instance C Int => C T
all_preds = rep_pred_o : coercible_constraints ++ sc_theta -- NB: rep_pred comes first
-------------------------------------------------------------------
-- Figuring out whether we can only do this newtype-deriving thing
-- See Note [Determining whether newtype-deriving is appropriate]
might_derive_via_coercible
= not (non_coercible_class cls)
&& eta_ok
&& ats_ok
-- && not (isRecursiveTyCon tycon) -- Note [Recursive newtypes]
-- Check that eta reduction is OK
eta_ok = nt_eta_arity <= length rep_tc_args
-- The newtype can be eta-reduced to match the number
-- of type argument actually supplied
-- newtype T a b = MkT (S [a] b) deriving( Monad )
-- Here the 'b' must be the same in the rep type (S [a] b)
-- And the [a] must not mention 'b'. That's all handled
-- by nt_eta_rity.
ats_ok = null (classATs cls)
-- No associated types for the class, because we don't
-- currently generate type 'instance' decls; and cannot do
-- so for 'data' instance decls
cant_derive_err
= vcat [ ppUnless eta_ok eta_msg
, ppUnless ats_ok ats_msg ]
eta_msg = ptext (sLit "cannot eta-reduce the representation type enough")
ats_msg = ptext (sLit "the class has associated types")
{-
Note [Recursive newtypes]
~~~~~~~~~~~~~~~~~~~~~~~~~
Newtype deriving works fine, even if the newtype is recursive.
e.g. newtype S1 = S1 [T1 ()]
newtype T1 a = T1 (StateT S1 IO a ) deriving( Monad )
Remember, too, that type families are currently (conservatively) given
a recursive flag, so this also allows newtype deriving to work
for type famillies.
We used to exclude recursive types, because we had a rather simple
minded way of generating the instance decl:
newtype A = MkA [A]
instance Eq [A] => Eq A -- Makes typechecker loop!
But now we require a simple context, so it's ok.
Note [Determining whether newtype-deriving is appropriate]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we see
newtype NT = MkNT Foo
deriving C
we have to decide how to perform the deriving. Do we do newtype deriving,
or do we do normal deriving? In general, we prefer to do newtype deriving
wherever possible. So, we try newtype deriving unless there's a glaring
reason not to.
Note that newtype deriving might fail, even after we commit to it. This
is because the derived instance uses `coerce`, which must satisfy its
`Coercible` constraint. This is different than other deriving scenarios,
where we're sure that the resulting instance will type-check.
************************************************************************
* *
\subsection[TcDeriv-fixpoint]{Finding the fixed point of \tr{deriving} equations}
* *
************************************************************************
A ``solution'' (to one of the equations) is a list of (k,TyVarTy tv)
terms, which is the final correct RHS for the corresponding original
equation.
\begin{itemize}
\item
Each (k,TyVarTy tv) in a solution constrains only a type
variable, tv.
\item
The (k,TyVarTy tv) pairs in a solution are canonically
ordered by sorting on type varible, tv, (major key) and then class, k,
(minor key)
\end{itemize}
-}
inferInstanceContexts :: [DerivSpec ThetaOrigin] -> TcM [DerivSpec ThetaType]
inferInstanceContexts [] = return []
inferInstanceContexts infer_specs
= do { traceTc "inferInstanceContexts" $ vcat (map pprDerivSpec infer_specs)
; iterate_deriv 1 initial_solutions }
where
------------------------------------------------------------------
-- The initial solutions for the equations claim that each
-- instance has an empty context; this solution is certainly
-- in canonical form.
initial_solutions :: [ThetaType]
initial_solutions = [ [] | _ <- infer_specs ]
------------------------------------------------------------------
-- iterate_deriv calculates the next batch of solutions,
-- compares it with the current one; finishes if they are the
-- same, otherwise recurses with the new solutions.
-- It fails if any iteration fails
iterate_deriv :: Int -> [ThetaType] -> TcM [DerivSpec ThetaType]
iterate_deriv n current_solns
| n > 20 -- Looks as if we are in an infinite loop
-- This can happen if we have -XUndecidableInstances
-- (See TcSimplify.tcSimplifyDeriv.)
= pprPanic "solveDerivEqns: probable loop"
(vcat (map pprDerivSpec infer_specs) $$ ppr current_solns)
| otherwise
= do { -- Extend the inst info from the explicit instance decls
-- with the current set of solutions, and simplify each RHS
inst_specs <- zipWithM newDerivClsInst current_solns infer_specs
; new_solns <- checkNoErrs $
extendLocalInstEnv inst_specs $
mapM gen_soln infer_specs
; if (current_solns `eqSolution` new_solns) then
return [ spec { ds_theta = soln }
| (spec, soln) <- zip infer_specs current_solns ]
else
iterate_deriv (n+1) new_solns }
eqSolution = eqListBy (eqListBy eqType)
------------------------------------------------------------------
gen_soln :: DerivSpec ThetaOrigin -> TcM ThetaType
gen_soln (DS { ds_loc = loc, ds_tvs = tyvars
, ds_cls = clas, ds_tys = inst_tys, ds_theta = deriv_rhs })
= setSrcSpan loc $
addErrCtxt (derivInstCtxt the_pred) $
do { theta <- simplifyDeriv the_pred tyvars deriv_rhs
-- checkValidInstance tyvars theta clas inst_tys
-- Not necessary; see Note [Exotic derived instance contexts]
; traceTc "TcDeriv" (ppr deriv_rhs $$ ppr theta)
-- Claim: the result instance declaration is guaranteed valid
-- Hence no need to call:
-- checkValidInstance tyvars theta clas inst_tys
; return (sortBy cmpType theta) } -- Canonicalise before returning the solution
where
the_pred = mkClassPred clas inst_tys
------------------------------------------------------------------
newDerivClsInst :: ThetaType -> DerivSpec theta -> TcM ClsInst
newDerivClsInst theta (DS { ds_name = dfun_name, ds_overlap = overlap_mode
, ds_tvs = tvs, ds_cls = clas, ds_tys = tys })
= newClsInst overlap_mode dfun_name tvs theta clas tys
extendLocalInstEnv :: [ClsInst] -> TcM a -> TcM a
-- Add new locally-defined instances; don't bother to check
-- for functional dependency errors -- that'll happen in TcInstDcls
extendLocalInstEnv dfuns thing_inside
= do { env <- getGblEnv
; let inst_env' = extendInstEnvList (tcg_inst_env env) dfuns
env' = env { tcg_inst_env = inst_env' }
; setGblEnv env' thing_inside }
{-
***********************************************************************************
* *
* Simplify derived constraints
* *
***********************************************************************************
-}
simplifyDeriv :: PredType
-> [TyVar]
-> ThetaOrigin -- Wanted
-> TcM ThetaType -- Needed
-- Given instance (wanted) => C inst_ty
-- Simplify 'wanted' as much as possibles
-- Fail if not possible
simplifyDeriv pred tvs theta
= do { (skol_subst, tvs_skols) <- tcInstSkolTyVars tvs -- Skolemize
-- The constraint solving machinery
-- expects *TcTyVars* not TyVars.
-- We use *non-overlappable* (vanilla) skolems
-- See Note [Overlap and deriving]
; let skol_set = mkVarSet tvs_skols
doc = ptext (sLit "deriving") <+> parens (ppr pred)
; wanted <- mapM (\(PredOrigin t o) -> newWanted o (substTy skol_subst t)) theta
; traceTc "simplifyDeriv" $
vcat [ pprTvBndrs tvs $$ ppr theta $$ ppr wanted, doc ]
; residual_wanted <- solveWantedsTcM wanted
; residual_simple <- zonkSimples (wc_simple residual_wanted)
; let (good, bad) = partitionBagWith get_good residual_simple
-- See Note [Exotic derived instance contexts]
get_good :: Ct -> Either PredType Ct
get_good ct | validDerivPred skol_set p
, isWantedCt ct = Left p
-- NB: residual_wanted may contain unsolved
-- Derived and we stick them into the bad set
-- so that reportUnsolved may decide what to do with them
| otherwise = Right ct
where p = ctPred ct
; traceTc "simplifyDeriv 2" $
vcat [ ppr tvs_skols, ppr residual_simple, ppr good, ppr bad ]
-- If we are deferring type errors, simply ignore any insoluble
-- constraints. They'll come up again when we typecheck the
-- generated instance declaration
; defer <- goptM Opt_DeferTypeErrors
; unless defer (reportAllUnsolved (residual_wanted { wc_simple = bad }))
; let min_theta = mkMinimalBySCs (bagToList good)
subst_skol = zipTopTvSubst tvs_skols $ map mkTyVarTy tvs
-- The reverse substitution (sigh)
; return (substTheta subst_skol min_theta) }
{-
Note [Overlap and deriving]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider some overlapping instances:
data Show a => Show [a] where ..
data Show [Char] where ...
Now a data type with deriving:
data T a = MkT [a] deriving( Show )
We want to get the derived instance
instance Show [a] => Show (T a) where...
and NOT
instance Show a => Show (T a) where...
so that the (Show (T Char)) instance does the Right Thing
It's very like the situation when we're inferring the type
of a function
f x = show [x]
and we want to infer
f :: Show [a] => a -> String
BOTTOM LINE: use vanilla, non-overlappable skolems when inferring
the context for the derived instance.
Hence tcInstSkolTyVars not tcInstSuperSkolTyVars
Note [Exotic derived instance contexts]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In a 'derived' instance declaration, we *infer* the context. It's a
bit unclear what rules we should apply for this; the Haskell report is
silent. Obviously, constraints like (Eq a) are fine, but what about
data T f a = MkT (f a) deriving( Eq )
where we'd get an Eq (f a) constraint. That's probably fine too.
One could go further: consider
data T a b c = MkT (Foo a b c) deriving( Eq )
instance (C Int a, Eq b, Eq c) => Eq (Foo a b c)
Notice that this instance (just) satisfies the Paterson termination
conditions. Then we *could* derive an instance decl like this:
instance (C Int a, Eq b, Eq c) => Eq (T a b c)
even though there is no instance for (C Int a), because there just
*might* be an instance for, say, (C Int Bool) at a site where we
need the equality instance for T's.
However, this seems pretty exotic, and it's quite tricky to allow
this, and yet give sensible error messages in the (much more common)
case where we really want that instance decl for C.
So for now we simply require that the derived instance context
should have only type-variable constraints.
Here is another example:
data Fix f = In (f (Fix f)) deriving( Eq )
Here, if we are prepared to allow -XUndecidableInstances we
could derive the instance
instance Eq (f (Fix f)) => Eq (Fix f)
but this is so delicate that I don't think it should happen inside
'deriving'. If you want this, write it yourself!
NB: if you want to lift this condition, make sure you still meet the
termination conditions! If not, the deriving mechanism generates
larger and larger constraints. Example:
data Succ a = S a
data Seq a = Cons a (Seq (Succ a)) | Nil deriving Show
Note the lack of a Show instance for Succ. First we'll generate
instance (Show (Succ a), Show a) => Show (Seq a)
and then
instance (Show (Succ (Succ a)), Show (Succ a), Show a) => Show (Seq a)
and so on. Instead we want to complain of no instance for (Show (Succ a)).
The bottom line
~~~~~~~~~~~~~~~
Allow constraints which consist only of type variables, with no repeats.
************************************************************************
* *
\subsection[TcDeriv-normal-binds]{Bindings for the various classes}
* *
************************************************************************
After all the trouble to figure out the required context for the
derived instance declarations, all that's left is to chug along to
produce them. They will then be shoved into @tcInstDecls2@, which
will do all its usual business.
There are lots of possibilities for code to generate. Here are
various general remarks.
PRINCIPLES:
\begin{itemize}
\item
We want derived instances of @Eq@ and @Ord@ (both v common) to be
``you-couldn't-do-better-by-hand'' efficient.
\item
Deriving @Show@---also pretty common--- should also be reasonable good code.
\item
Deriving for the other classes isn't that common or that big a deal.
\end{itemize}
PRAGMATICS:
\begin{itemize}
\item
Deriving @Ord@ is done mostly with the 1.3 @compare@ method.
\item
Deriving @Eq@ also uses @compare@, if we're deriving @Ord@, too.
\item
We {\em normally} generate code only for the non-defaulted methods;
there are some exceptions for @Eq@ and (especially) @Ord@...
\item
Sometimes we use a @_con2tag_<tycon>@ function, which returns a data
constructor's numeric (@Int#@) tag. These are generated by
@gen_tag_n_con_binds@, and the heuristic for deciding if one of
these is around is given by @hasCon2TagFun@.
The examples under the different sections below will make this
clearer.
\item
Much less often (really just for deriving @Ix@), we use a
@_tag2con_<tycon>@ function. See the examples.
\item
We use the renamer!!! Reason: we're supposed to be
producing @LHsBinds Name@ for the methods, but that means
producing correctly-uniquified code on the fly. This is entirely
possible (the @TcM@ monad has a @UniqueSupply@), but it is painful.
So, instead, we produce @MonoBinds RdrName@ then heave 'em through
the renamer. What a great hack!
\end{itemize}
-}
-- Generate the InstInfo for the required instance paired with the
-- *representation* tycon for that instance,
-- plus any auxiliary bindings required
--
-- Representation tycons differ from the tycon in the instance signature in
-- case of instances for indexed families.
--
genInst :: CommonAuxiliaries
-> DerivSpec ThetaType
-> TcM (InstInfo RdrName, BagDerivStuff, Maybe Name)
genInst comauxs
spec@(DS { ds_tvs = tvs, ds_tc = rep_tycon, ds_tc_args = rep_tc_args
, ds_theta = theta, ds_newtype = is_newtype, ds_tys = tys
, ds_name = dfun_name, ds_cls = clas, ds_loc = loc })
| is_newtype -- See Note [Bindings for Generalised Newtype Deriving]
= do { inst_spec <- newDerivClsInst theta spec
; traceTc "genInst/is_newtype" (vcat [ppr loc, ppr clas, ppr tvs, ppr tys, ppr rhs_ty])
; return ( InstInfo
{ iSpec = inst_spec
, iBinds = InstBindings
{ ib_binds = gen_Newtype_binds loc clas tvs tys rhs_ty
, ib_tyvars = map Var.varName tvs -- Scope over bindings
, ib_pragmas = []
, ib_extensions = [ Opt_ImpredicativeTypes
, Opt_RankNTypes ]
, ib_derived = True } }
, emptyBag
, Just $ getName $ head $ tyConDataCons rep_tycon ) }
-- See Note [Newtype deriving and unused constructors]
| otherwise
= do { (meth_binds, deriv_stuff) <- genDerivStuff loc clas
dfun_name rep_tycon
(lookupNameEnv comauxs
(tyConName rep_tycon))
; inst_spec <- newDerivClsInst theta spec
; traceTc "newder" (ppr inst_spec)
; let inst_info = InstInfo { iSpec = inst_spec
, iBinds = InstBindings
{ ib_binds = meth_binds
, ib_tyvars = map Var.varName tvs
, ib_pragmas = []
, ib_extensions = []
, ib_derived = True } }
; return ( inst_info, deriv_stuff, Nothing ) }
where
rhs_ty = newTyConInstRhs rep_tycon rep_tc_args
genDerivStuff :: SrcSpan -> Class -> Name -> TyCon
-> Maybe CommonAuxiliary
-> TcM (LHsBinds RdrName, BagDerivStuff)
genDerivStuff loc clas dfun_name tycon comaux_maybe
| let ck = classKey clas
, -- Special case because monadic
Just gk <- lookup ck [(genClassKey, Gen0), (gen1ClassKey, Gen1)]
= let -- TODO NSF: correctly identify when we're building Both instead of One
Just metaTyCons = comaux_maybe -- well-guarded by commonAuxiliaries and genInst
in do
(binds, faminst) <- gen_Generic_binds gk tycon metaTyCons (nameModule dfun_name)
return (binds, unitBag (DerivFamInst faminst))
| otherwise -- Non-monadic generators
= do { dflags <- getDynFlags
; fix_env <- getDataConFixityFun tycon
; return (genDerivedBinds dflags fix_env clas loc tycon) }
getDataConFixityFun :: TyCon -> TcM (Name -> Fixity)
-- If the TyCon is locally defined, we want the local fixity env;
-- but if it is imported (which happens for standalone deriving)
-- we need to get the fixity env from the interface file
-- c.f. RnEnv.lookupFixity, and Trac #9830
getDataConFixityFun tc
= do { this_mod <- getModule
; if nameIsLocalOrFrom this_mod name
then do { fix_env <- getFixityEnv
; return (lookupFixity fix_env) }
else do { iface <- loadInterfaceForName doc name
-- Should already be loaded!
; return (mi_fix_fn iface . nameOccName) } }
where
name = tyConName tc
doc = ptext (sLit "Data con fixities for") <+> ppr name
{-
Note [Bindings for Generalised Newtype Deriving]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
class Eq a => C a where
f :: a -> a
newtype N a = MkN [a] deriving( C )
instance Eq (N a) where ...
The 'deriving C' clause generates, in effect
instance (C [a], Eq a) => C (N a) where
f = coerce (f :: [a] -> [a])
This generates a cast for each method, but allows the superclasse to
be worked out in the usual way. In this case the superclass (Eq (N
a)) will be solved by the explicit Eq (N a) instance. We do *not*
create the superclasses by casting the superclass dictionaries for the
representation type.
See the paper "Safe zero-cost coercions for Hsakell".
************************************************************************
* *
\subsection[TcDeriv-taggery-Names]{What con2tag/tag2con functions are available?}
* *
************************************************************************
-}
derivingNullaryErr :: MsgDoc
derivingNullaryErr = ptext (sLit "Cannot derive instances for nullary classes")
derivingKindErr :: TyCon -> Class -> [Type] -> Kind -> MsgDoc
derivingKindErr tc cls cls_tys cls_kind
= hang (ptext (sLit "Cannot derive well-kinded instance of form")
<+> quotes (pprClassPred cls cls_tys <+> parens (ppr tc <+> ptext (sLit "..."))))
2 (ptext (sLit "Class") <+> quotes (ppr cls)
<+> ptext (sLit "expects an argument of kind") <+> quotes (pprKind cls_kind))
derivingEtaErr :: Class -> [Type] -> Type -> MsgDoc
derivingEtaErr cls cls_tys inst_ty
= sep [ptext (sLit "Cannot eta-reduce to an instance of form"),
nest 2 (ptext (sLit "instance (...) =>")
<+> pprClassPred cls (cls_tys ++ [inst_ty]))]
derivingThingErr :: Bool -> Class -> [Type] -> Type -> MsgDoc -> MsgDoc
derivingThingErr newtype_deriving clas tys ty why
= sep [(hang (ptext (sLit "Can't make a derived instance of"))
2 (quotes (ppr pred))
$$ nest 2 extra) <> colon,
nest 2 why]
where
extra | newtype_deriving = ptext (sLit "(even with cunning GeneralizedNewtypeDeriving)")
| otherwise = Outputable.empty
pred = mkClassPred clas (tys ++ [ty])
derivingHiddenErr :: TyCon -> SDoc
derivingHiddenErr tc
= hang (ptext (sLit "The data constructors of") <+> quotes (ppr tc) <+> ptext (sLit "are not all in scope"))
2 (ptext (sLit "so you cannot derive an instance for it"))
standaloneCtxt :: LHsType Name -> SDoc
standaloneCtxt ty = hang (ptext (sLit "In the stand-alone deriving instance for"))
2 (quotes (ppr ty))
derivInstCtxt :: PredType -> MsgDoc
derivInstCtxt pred
= ptext (sLit "When deriving the instance for") <+> parens (ppr pred)
|
ghc-android/ghc
|
compiler/typecheck/TcDeriv.hs
|
bsd-3-clause
| 91,657
| 2
| 19
| 27,915
| 12,426
| 6,535
| 5,891
| -1
| -1
|
module Window
( withWindow
, keyIsPressed
) where
import qualified Graphics.UI.GLFW as GLFW
import Control.Monad (when)
-- Whether a key is pressed
keyIsPressed :: GLFW.Window -> GLFW.Key -> IO Bool
keyIsPressed win key = isPress `fmap` GLFW.getKey win key
-- Whether a keystate is a key press
isPress :: GLFW.KeyState -> Bool
isPress GLFW.KeyState'Pressed = True
isPress GLFW.KeyState'Repeating = True
isPress _ = False
-- Run an io function with a window
withWindow :: Int -> Int -> String -> (GLFW.Window -> IO ()) -> IO ()
withWindow width height title f = do
GLFW.setErrorCallback $ Just simpleErrorCallback
r <- GLFW.init
when r $ do
m <- GLFW.createWindow width height title Nothing Nothing
case m of
(Just win) -> do
GLFW.makeContextCurrent m
f win
GLFW.destroyWindow win
Nothing -> return ()
-- causes runtime error in ghci
-- *** Exception: <stdout>: hPutChar: invalid argument (Bad file descriptor)
-- GLFW.terminate
where
simpleErrorCallback e s =
putStrLn $ unwords [show e, show s]
|
Catchouli/Hasteroids
|
src/Window.hs
|
bsd-3-clause
| 1,110
| 0
| 16
| 274
| 307
| 155
| 152
| 25
| 2
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Network.Mail.Locutoria.Config where
import Control.Lens
import Network.Mail.Mime (Address)
import Network.Mail.Locutoria.Cli.Keymap (KeyBindings)
import Network.Mail.Locutoria.Notmuch (Database)
data Config = Config
{ _cfgDb :: Database
, _cfgKeyBindings :: KeyBindings
, _cfgUserAddr :: Address
, _cfgSendCmd :: [String]
}
makeLenses ''Config
|
hallettj/locutoria
|
Network/Mail/Locutoria/Config.hs
|
bsd-3-clause
| 490
| 0
| 9
| 116
| 95
| 61
| 34
| 13
| 0
|
{-# LANGUAGE OverloadedStrings #-}
module NML (
NetworkObject(..),
Path,
GUID,
guid,
SingleType(..),
GroupType(..),
ServiceType(..),
NMLReader, runNMLReader, local,
NMLData(..), mkNMLData,
Cost
) where
import NML.RDFRep (NMLReader, runNMLReader, local, NMLData(..), mkNMLData, Cost)
import NML.Types
|
maertsen/netPropCheck
|
NML.hs
|
bsd-3-clause
| 304
| 0
| 6
| 36
| 100
| 68
| 32
| 14
| 0
|
module Main where
import Control.Applicative
import Control.Monad.Identity ( Identity )
import Data.Char ( digitToInt )
import Data.Function
import Data.List
import Data.Maybe
import Data.Ord
import System.Environment ( getArgs )
import Safe ( readNote )
import Data.List.Split
import Text.Parsec ( ParsecT, alphaNum, digit, char, string
, anyChar, eof
, many1, sepBy1
, try, parse )
type Parser a = ParsecT String () Identity a
newtype Version = Version [Int]
deriving (Eq, Ord)
instance Show Version where
show (Version is) = intercalate "." (map show is)
pVersion :: Parser Version
pVersion = Version <$> (try (char '(' *> ints <* char ')') <|> ints)
where
ints :: Parser [Int]
ints = int `sepBy1` char '.'
int :: Parser Int
int = do
is <- map digitToInt <$> many1 digit
return $ sum $ zipWith (*) (reverse is) (iterate (10*) 1)
data Package = Package { name :: String
, availableVersion :: Maybe Version
, installedVersions :: [Version]
}
deriving (Eq, Ord, Show)
combinePkg :: [Package] -> [Package]
combinePkg =
map grp .
groupBy ((==) `on` name) .
sortBy (comparing name)
where
grp :: [Package] -> Package
grp [] = error "empty group"
grp ps@(Package{name=n}:_) =
Package n
(listToMaybe $ mapMaybe availableVersion ps)
(nub $ sort $ concatMap installedVersions ps)
showP :: Package -> String
showP (Package n Nothing is) = n ++ ": a new version might be available. You have " ++ show is ++ "."
showP (Package n (Just a) is) = n ++ ": version " ++ show a ++ " is available. You have " ++ show is ++ "."
parseSkipFinish :: Parser f -> Parser s -> Parser a -> Parser [a]
parseSkipFinish f s a = (f *> return [])
<|> ((:) <$> a <*> parseSkipFinish f s a)
<|> (s *> parseSkipFinish f s a)
skipOr :: Parser b -> Parser a -> Parser a
skipOr skip p = try p <|> (skip *> skipOr skip p)
pPackage :: Parser Package
pPackage =
Package <$> (string "* " *> many1 (alphaNum <|> char '-'))
<*> pOne
<*> skipOr anyChar (string "Installed versions: " *> pVersion `sepBy1` string ", ")
where
noneStr = "[ Not available from any configured repository ]"
pOne = skipOr anyChar $ string "Default available version: "
*> ( Nothing <$ string noneStr <|>
Just <$> pVersion )
pPackages :: Parser [Package]
pPackages = parseSkipFinish eof anyChar pPackage <* eof
pGhcPkgList :: String -> Maybe Package
pGhcPkgList = helper . strip
where
helper "" = Nothing
helper ('/':_) = Nothing
helper xs =
let
parts = splitOn "-" xs
pkgname = init parts
pkgvers = map (\ s -> readNote (show s) s) $ splitOn "." $ last parts
in
Just $ Package (intercalate "-" pkgname) Nothing [Version pkgvers]
strip :: String -> String
strip = reverse . stripL . reverse . stripL
where
stripL (ch:xs) | ch `elem` " ()" = stripL xs
stripL xs = xs
main :: IO ()
main = do
(a:b:_) <- getArgs
cabalListInstalled <- readFile a
ghcPkgList <- readFile b
case parse pPackages "<stdin>" cabalListInstalled of
Left err -> error $ show err
Right ps -> do
let
fromCabalList = ps
fromGhcPkgList = mapMaybe pGhcPkgList (lines ghcPkgList)
allPackages = combinePkg $ fromCabalList ++ fromGhcPkgList
-- putStrLn $ unlines $ map show fromCabalList
-- putStrLn $ unlines $ map show fromGhcPkgList
putStrLn $ unlines $ map showP $
flip filter allPackages $ \ p ->
case availableVersion p of
Nothing -> False
Just av -> av > last (installedVersions p)
putStrLn $ unlines $
"A new version might be availble for the following packages too:" :
map ((" " ++) . name) (filter (isNothing . availableVersion) allPackages)
|
ozgurakgun/cabal-list-outdated
|
Main.hs
|
bsd-3-clause
| 4,302
| 0
| 20
| 1,465
| 1,358
| 700
| 658
| 97
| 3
|
module Parsers (parseDoubles) where
import Data.List (unfoldr)
import qualified Data.ByteString.Lazy.Char8 as B
import qualified Data.ByteString.Lex.Lazy.Double as B
-- | Parse doubles separated by space or separator character (as determined by
-- 'isSeparator'). Anything else is translated into a 'Nothing'.
--
-- Examples:
--
-- > parseDoubles "1 2.3" = [Just 1.0,Just 2.3]
-- > parseDoubles "1,,2.3," = [Just 1.0,Nothing,Just 2.3,Nothing]
-- > parseDoubles "1 NA 3" = [Just 1.0,Nothing,Just 3.0]
--
parseDoubles :: B.ByteString -> [Maybe Double]
parseDoubles bs | B.null bs = []
| isSeparator (B.last bs) = ds ++ [Nothing]
| otherwise = ds
where
ds = unfoldr go (skipWhitespace bs)
go xs | B.null xs = Nothing
| otherwise = case B.readDouble xs of
Just (d, ys) -> Just (Just d, skipSep ys)
Nothing -> Just (Nothing, skipSep $ skipNA xs)
isWhitespace :: Char -> Bool
isWhitespace c = c == ' ' || c == '\t'
isSeparator :: Char -> Bool
isSeparator c = c `elem` ",;"
skipWhitespace :: B.ByteString -> B.ByteString
skipWhitespace = B.dropWhile isWhitespace
skipNA :: B.ByteString -> B.ByteString
skipNA = B.dropWhile (not . \c -> isWhitespace c || isSeparator c)
skipSep :: B.ByteString -> B.ByteString
skipSep = skipWhitespace . skip . skipWhitespace
where
skip bs = case B.uncons bs of
Just (x, bs') -> if isSeparator x then bs' else B.cons x bs'
Nothing -> B.empty
|
b4winckler/up-down-signature
|
src/sigscore/Parsers.hs
|
bsd-3-clause
| 1,563
| 0
| 14
| 419
| 436
| 231
| 205
| 26
| 3
|
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.EXT.GPUProgramParameters
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.EXT.GPUProgramParameters (
-- * Extension Support
glGetEXTGPUProgramParameters,
gl_EXT_gpu_program_parameters,
-- * Functions
glProgramEnvParameters4fvEXT,
glProgramLocalParameters4fvEXT
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Functions
|
haskell-opengl/OpenGLRaw
|
src/Graphics/GL/EXT/GPUProgramParameters.hs
|
bsd-3-clause
| 688
| 0
| 4
| 89
| 47
| 36
| 11
| 7
| 0
|
-- | This module exports the types used to create facets queries.
module Data.Factual.Query.FacetsQuery
(
-- * FacetsQuery type
FacetsQuery(..)
-- * Required modules
, module Data.Factual.Shared.Table
, module Data.Factual.Shared.Search
, module Data.Factual.Shared.Filter
, module Data.Factual.Shared.Geo
) where
import Data.Factual.Query
import Data.Factual.Shared.Table
import Data.Factual.Shared.Search
import Data.Factual.Shared.Filter
import Data.Factual.Shared.Geo
import Data.Factual.Utils
import qualified Data.Map as M
-- | The FacetsQuery type is used to construct facets queries. A table and search
-- should be specified, but the rest of the query options are essentially
-- optional.
data FacetsQuery = FacetsQuery { table :: Table
, search :: Search
, select :: [String]
, filters :: [Filter]
, geo :: Maybe Geo
, limit :: Maybe Int
, minCount :: Maybe Int
, includeCount :: Bool
} deriving (Eq, Show)
-- The FacetsQuery type is a member of the Query typeclass so it can be used to
-- make a request.
instance Query FacetsQuery where
path query = (show $ table query) ++ "/facets"
params query = M.fromList [ searchPair $ search query
, selectPair $ select query
, filtersPair $ filters query
, geoPair $ geo query
, limitPair $ limit query
, minCountPair $ minCount query
, includeCountPair $ includeCount query ]
-- Helper functions
minCountPair :: Maybe Int -> (String, String)
minCountPair (Just x) = ("min_count", show x)
minCountPair Nothing = ("min_count", "")
|
rudyl313/factual-haskell-driver
|
Data/Factual/Query/FacetsQuery.hs
|
bsd-3-clause
| 1,982
| 0
| 9
| 723
| 357
| 215
| 142
| 35
| 1
|
------------------------------------------------------------------------------
-- | This module exports the 'Config' datatype, which you can use to configure
-- the Snap HTTP server.
--
module Snap.Http.Server.Config
( Config
, ConfigLog(..)
, emptyConfig
, defaultConfig
, commandLineConfig
, extendedCommandLineConfig
, completeConfig
, optDescrs
, fmapOpt
, getAccessLog
, getBind
, getCompression
, getDefaultTimeout
, getErrorHandler
, getErrorLog
, getHostname
, getLocale
, getOther
, getPort
, getProxyType
, getSSLBind
, getSSLCert
, getSSLKey
, getSSLPort
, getVerbose
, getStartupHook
, setAccessLog
, setBind
, setCompression
, setDefaultTimeout
, setErrorHandler
, setErrorLog
, setHostname
, setLocale
, setOther
, setPort
, setProxyType
, setSSLBind
, setSSLCert
, setSSLKey
, setSSLPort
, setVerbose
, setStartupHook
, StartupInfo
, getStartupSockets
, getStartupConfig
) where
import Snap.Internal.Http.Server.Config
|
afcowie/new-snap-server
|
src/Snap/Http/Server/Config.hs
|
bsd-3-clause
| 1,043
| 0
| 5
| 228
| 164
| 112
| 52
| 48
| 0
|
module Generics.GPAH.Cpp.Analyze where
import Generics.GPAH.Cpp.Base
import System.Process
import System.Exit
import Text.Printf
import Generics.GPAH.Utils
import Distribution.PackageDescription
analyzeModule :: FilePath -> FilePath -> GenericPackageDescription -> IO Analysis
analyzeModule fp pkgAbsDir cabal = do
let cmd = (shell
(printf "ghc -cpp -E -optP-P %s %s -Wwarn %s" (cppOpts cabal) (ghcOpts cabal) fp)
) -- {cwd = Just pkgAbsDir}
exitCode <- createProcess cmd >>= \ (_,_,_,ph) -> waitForProcess ph
return $ if exitCode == ExitSuccess then [] else [fp]
|
bezirg/gpah
|
src/Generics/GPAH/Cpp/Analyze.hs
|
bsd-3-clause
| 597
| 0
| 15
| 108
| 171
| 94
| 77
| 13
| 2
|
{-# LANGUAGE FlexibleContexts,OverloadedStrings #-}
-----------------------------------------------------------------------------
-- |
-- Module :
-- Copyright : (c) 2013 Boyun Tang
-- License : BSD-style
-- Maintainer : tangboyun@hotmail.com
-- Stability : experimental
-- Portability : ghc
--
--
--
-----------------------------------------------------------------------------
module Diagrams.HeatMap where
import Data.Default.Class
import qualified Data.HashMap.Strict as H
import qualified Data.Vector as V
import qualified Data.Vector.Unboxed as UV
import Diagrams.HeatMap.Impl
import Diagrams.HeatMap.Internal
import Diagrams.HeatMap.Module
import Diagrams.HeatMap.Type
plotHeatMap :: (Renderable (Path V2 Double) b,Renderable (Text Double) b,Renderable (DImage Double External) b,Backend b V2 Double)
=> Para -> Dataset -> (QDiagram b V2 Double Any,Dataset)
plotHeatMap p dataset =
let rowTreeLineW = rowTreeLineWidth p
colTreeLineW = colTreeLineWidth p
gapRatioForRowLabel = 0.02
legendFS = legendFontSize p
labelHash = case colLabels dataset of
Nothing -> H.empty
Just tVec ->
let uniqeGV = nub $ sort $ V.toList tVec
nGroup = length uniqeGV
(hu,s,l) = hslView $ toSRGB red
in H.fromList $
zip uniqeGV $
map
(uncurryRGB sRGB .
(\newH -> hsl newH s l).
(hu+).(*(360 / (fromIntegral nGroup))) . fromIntegral
) [0..nGroup-1]
matrix = datM dataset
i = nRow matrix
j = nCol matrix
w = matrixWidth p / fromIntegral j
h = matrixHeight p / fromIntegral i
(rowDendro,colDendro) = clustering (clustOpt p) (datM dataset)
rowIdxVec = fromMaybe (V.enumFromN 0 i) $ fmap (V.fromList . toList) $ rowDendro
colIdxVec = fromMaybe (V.enumFromN 0 j) $ fmap (V.fromList . toList) $ colDendro
newMatrix = let v = case order matrix of
RowMajor -> dat matrix
ColumnMajor -> dat $ changeOrder matrix
in matrix {dat = UV.generate (UV.length v)
(\idx ->
let (r,c) = idx `divMod` j
r' = rowIdxVec `atV` r
c' = colIdxVec `atV` c
idx' = r'*j+c'
in v `atUV` idx')
,order = RowMajor
}
newDataset = let rNVec = rowNames dataset
cNVec = colNames dataset
cLVec = colLabels dataset
in
Dataset { rowNames = fmap (flip V.backpermute rowIdxVec) rNVec
, colNames = fmap (flip V.backpermute colIdxVec) cNVec
, colLabels = fmap (flip V.backpermute colIdxVec) cLVec
, datM = newMatrix
}
(rowTreePos,rowTreeV) =
case rowCluster $ clustOpt p of
Nothing -> (LeftTree,unit_X)
Just (_,_,posi) -> case posi of
LeftTree -> (LeftTree,unit_X)
RightTree -> (RightTree,unitX)
(colTreePos,colTreeV) =
case colCluster $ clustOpt p of
Nothing -> (TopTree,unitY)
Just (_,_,posi) -> case posi of
TopTree -> (TopTree,unitY)
BottomTree -> (BottomTree,unit_Y)
matrixD = plotMatrix p newMatrix
(rowLabelD,rowLabelV) =
let ali =
case rowTreePos of
LeftTree -> True
RightTree -> False
dia = beside (rotate (180 @@ deg) rowTreeV)
(strutX $ gapRatioForRowLabel * matrixWidth p) $
centerY $ vcat $
mkLabels ali (rowFontSize p) h
(fontName p) (rowNames newDataset)
in (dia,rotate (180 @@ deg) rowTreeV)
(colLabelD,colLabelV) =
let gW = w
gH = 0.618 * gW
ali =
case colTreePos of
TopTree -> False
BottomTree -> True
rs =
case colLabels newDataset of
Nothing -> []
Just lVec ->
map
((\c ->
centerY $
beside colTreeV
(strutY gH) $
beside (rotate (180 @@ deg) colTreeV)
(strutY (2*gH)) $
rect gW gH
# lcA transparent
# fc c
) . (labelHash H.!)) $ V.toList lVec
dia = let ds = map (
case colTreePos of
TopTree -> rotate (90 @@ deg)
BottomTree -> rotate (90 @@ deg)) $
mkLabels ali (colFontSize p) w
(fontName p) (colNames newDataset)
in case colLabels newDataset of
Just _ ->
centerXY $ hcat $
zipWith
(beside' colTreeV) rs ds
Nothing -> centerXY $
case colTreePos of
TopTree -> strutY (2*gH) === hcat ds
BottomTree -> hcat ds === strutY (2*gH)
in (dia,rotate (180 @@ deg) colTreeV)
rowTree = if isNothing rowDendro
then mempty
else let (dia,treeH) = toTree rowTreeLineW h $ fromJust rowDendro
tree = scaleY (rowTreeHeight p / treeH) dia
in case rowTreePos of
LeftTree ->
transform (rotation $ 90 @@ deg) $
transform reflectionX tree
RightTree -> transform (rotation $ (-90) @@ deg) tree
colTree = if isNothing colDendro
then mempty
else let (dia,treeH) = toTree colTreeLineW w $ fromJust colDendro
tree = scaleY (colTreeHeight p / treeH) dia
in case colTreePos of
TopTree -> tree
BottomTree -> transform reflectionY tree
legends = let gW = w
gH = legendFS
in mkGroupLegend (colorBarPos p) gW gH (fontName p) labelHash
beside' v a b = beside v b a
heatPlot = beside' rowLabelV rowLabelD $
beside' colLabelV colLabelD $
beside' colTreeV colTree $
beside' rowTreeV rowTree matrixD
colorBar = plotColorBar p
sep' = legendFS * 0.5
catOptSetteing = set sep sep' $ set catMethod Cat def
in case colorBarPos p of
Horizontal ->
let gD = centerXY $ rotate ((-90) @@ deg) $ hcat' catOptSetteing $
map (\(r,t) -> rotate (90 @@ deg) $ alignR $ t ||| strutX (2*sep') ||| r) legends
in (centerXY $ (heatPlot === strutY (2*sep') === (gD # alignT # centerX ||| strutX (0.1 * matrixWidth p) ||| colorBar # alignT) # centerXY),newDataset)
Vertical ->
let gD = centerXY $ vcat' catOptSetteing $
map (\(r,t) -> alignL $ r ||| strutX (2*sep') ||| t) legends
in (centerXY (heatPlot ||| (gD # alignL # centerY === strutY (0.1 * matrixHeight p) === colorBar # alignL) # centerXY),newDataset)
|
tangboyun/diagrams-heatmap
|
src/Diagrams/HeatMap.hs
|
bsd-3-clause
| 8,224
| 1
| 30
| 3,847
| 2,150
| 1,116
| 1,034
| 154
| 18
|
{-# LANGUAGE TemplateHaskell #-}
-- | This module offers templates, that HaTeXExtension uses to simplify declarations of
-- additional commands
module HaTeXExtension.Meta
( defTeXCommand
, defFusedOperators
)
where
import Text.LaTeX.Base.Syntax
import Language.Haskell.TH
import Language.Haskell.TH.Lib
import HaTeXExtension.Meta.Sugar
{-| Template @ defTeXCommand \"foo\" \"bar\" [\"+\#\&\",\"qux\"] @ declares
* value @foo@, that represents TeX command @bar@ without arguments:
@ foo :: `LaTeX'
foo = TeXComm \"bar\" []
@
* operator:
@ (+\#\&) :: `LaTeX' -> `LaTeX' -> `LaTeX'
a +#& b = a \<\> foo \<\> b
@
* function:
@ qux :: `LaTeX' -> `LaTeX' -> `LaTeX'
a \`qux\` b = a \<\> foo \<\> b
@
Template can take any count of such operators-synonims.
If first argument is @\"\"@, then function name will be
the same as second argument - command name.
This is top-level declaration, so it can be used without
special splicing @$(...)@ syntax.
-}
defTeXCommand :: String -- ^ function name
-> String -- ^ TeX command name
-> [String] -- ^ operator names
-> Q [Dec] -- ^ top-level declaration
defTeXCommand "" comm ops = defTeXCommand comm comm ops
defTeXCommand name comm ops =
qDecs $ [ name ^:: [t| LaTeX |]
, name ^= [| TeXComm comm [] |]
] ++ (concatMap opDec ops)
where
opDec op = [ op ^:: [t| LaTeX -> LaTeX -> LaTeX |]
, op ^= [| \a b -> a <> $(dyn name) <> b |]
]
{-| Template @defFusedOperators [\"< ... <\"]@ declares
operators, that are combinations of several others:
@ \<\...\< :: `LaTeX' -> `LaTeX' -> `LaTeX'
a \<\...\< b = a \< \"\" \... \"\" \< b
@
This is top-level declaration, so it can be used without
special splicing @$(...)@ syntax.
-}
defFusedOperators :: [String] -- ^ strings with operator names
-> Q [Dec] -- ^ top-level declaration
defFusedOperators list =
qDecs [ concat ops ^= body ops | ops <- (map words list) ]
where
body, fuse :: [String] -> Q Exp
body ops = [| \a b -> a <> $(fuse ops) <> b |]
fuse [] = [| "" |]
fuse (x:xs) = [| $(dyn x) "" "" <> $(fuse xs) |]
{--
Different experiments with templates
defTeXCommand name comm ops =
qDecs $ [ name ^:: [t| LaTeX |]
, name ^= body
] ++ (concatMap opDec ops)
where
body = [| TeXComm comm [] |]
opDec op = [ op ^:: [t| LaTeX -> LaTeX -> LaTeX |]
, op ^= [| \a b -> a <> $body <> b |]
]
defTeXCommand name comm = do
let name' = mkName name
body <- [| TeXComm comm [] |]
return [ SigD name' (ConT ''LaTeX) -- name :: LaTeX
, FunD name' [Clause [] (NormalB body) []] -- name = TeXComm comm []
]
--}
|
laughedelic/HaTeXExtension
|
HaTeXExtension/Meta.hs
|
bsd-3-clause
| 2,924
| 0
| 10
| 912
| 296
| 177
| 119
| 27
| 2
|
{-# LANGUAGE OverloadedStrings #-}
module Site.Compilers (
indexCompiler,
contentCompiler,
sassCompiler,
pandocCompiler,
pandocFeedCompiler,
pygmentsServer
) where
import Site.Types
import Site.Pandoc
import Site.Contexts
import Site.Routes
import Site.WebSockets
import Site.Pygments
import Hakyll hiding (pandocCompiler)
indexCompiler :: String -> Routes -> Pattern -> Rules ()
indexCompiler name path itemsPattern =
create [fromFilePath $ name ++ ".html"] $ do
route path
compile $ do
makeItem ""
>>= loadAndApplyTemplate "templates/index.html" (archiveCtx itemsPattern)
>>= loadAndApplyTemplate "templates/layout.html" defaultCtx
contentCompiler :: Content -> Channels -> Streams -> Rules ()
contentCompiler content channels streams =
match pattern $ do
route $ niceRoute routeRewrite
compile $ pandocCompiler streams
>>= webSocketPipe channels
>>= loadAndApplyTemplate itemTemplate context
>>= loadAndApplyTemplate "templates/layout.html" layoutContext
where pattern = contentPattern content
routeRewrite = contentRoute content
template = contentTemplate content
context = contentContext content
layoutContext = contentLayoutContext content
itemTemplate = fromFilePath $ "templates/" ++ template ++ ".html"
sassCompiler :: Compiler (Item String)
sassCompiler = loadBody (fromFilePath "scss/screen.scss")
>>= makeItem
>>= withItemBody (unixFilter "scss" args)
where args = ["-s", "-I", "provider/scss/",
"--cache-location", "generated/scss"]
|
da-x/hakyll-site
|
src/Site/Compilers.hs
|
bsd-3-clause
| 1,638
| 0
| 15
| 355
| 375
| 192
| 183
| 43
| 1
|
{-- snippet headers --}
{-# LANGUAGE CPP, ForeignFunctionInterface #-}
module Regex where
import Foreign
import Foreign.C.Types
#include <pcre.h>
{-- /snippet headers --}
{-- snippet newtype --}
-- | A type for PCRE compile-time options. These are newtyped CInts,
-- which can be bitwise-or'd together, using '(Data.Bits..|.)'
--
newtype PCREOption = PCREOption { unPCREOption :: CInt }
deriving (Eq,Ord,Show,Read)
{-- /snippet newtype --}
{-- snippet constoptions --}
caseless :: PCREOption
caseless = PCREOption #const PCRE_CASELESS
dollar_endonly :: PCREOption
dollar_endonly = PCREOption #const PCRE_DOLLAR_ENDONLY
dotall :: PCREOption
dotall = PCREOption #const PCRE_DOTALL
{-- /snippet constoptions --}
|
binesiyu/ifl
|
examples/ch17/Regex-hsc-const.hs
|
mit
| 748
| 0
| 6
| 133
| 101
| 63
| 38
| 12
| 1
|
-- | DELETE operatiosn on HD wallets
module Cardano.Wallet.Kernel.DB.HdWallet.Delete (
deleteHdRoot
, deleteHdAccount
, deleteHdAddress
, deleteAllHdAccounts
) where
import Universum
import Control.Lens (at, (.=))
import Cardano.Wallet.Kernel.DB.HdWallet
import Cardano.Wallet.Kernel.DB.Util.AcidState
import Cardano.Wallet.Kernel.DB.Util.IxSet (ixedIndexed)
import qualified Cardano.Wallet.Kernel.DB.Util.IxSet as IxSet
{-------------------------------------------------------------------------------
DELETE
-------------------------------------------------------------------------------}
-- | Delete a wallet in a cascade fashion, i.e. including all its accounts
-- and transitively all its addresses.
deleteHdRoot :: HdRootId -> Update' UnknownHdRoot HdWallets ()
deleteHdRoot rootId = do
-- Delete all of the accounts and addresses related to this wallet.
deleteAllHdAccounts rootId
-- Finally, delete the wallet.
zoom hdWalletsRoots $ do
at rootId .= Nothing
-- | Delete all accounts and addresses associated with a wallet.
deleteAllHdAccounts :: HdRootId -> Update' UnknownHdRoot HdWallets ()
deleteAllHdAccounts rootId = do
-- Check that the root exists to begin with
zoomHdRootId identity rootId $
return ()
-- Deletes all the addresses related this wallet, first.
zoom hdWalletsAddresses $ do
rootAddresses <- gets (IxSet.toList . IxSet.getEQ rootId)
forM_ rootAddresses (\addr -> at (addr ^. ixedIndexed . hdAddressId) .= Nothing)
-- Deletes all the accounts for this wallet.
zoom hdWalletsAccounts $ do
rootAccounts <- gets (IxSet.toList . IxSet.getEQ rootId)
forM_ rootAccounts (\account -> at (account ^. hdAccountId) .= Nothing)
-- | Delete an account
deleteHdAccount :: HdAccountId -> Update' UnknownHdAccount HdWallets ()
deleteHdAccount accId = do
-- Check that the account & its parent root do exist before deleting anything.
zoomHdAccountId identity accId $
return ()
-- Cascade-delete all the addresses associated with this account
zoom hdWalletsAddresses $ do
rootAddresses <- gets (IxSet.toList . IxSet.getEQ accId)
forM_ rootAddresses (\addr -> at (addr ^. ixedIndexed . hdAddressId) .= Nothing)
-- Finally, delete the account.
zoom hdWalletsAccounts $
at accId .= Nothing
-- | Delete an address.
deleteHdAddress :: HdAddressId -> Update' UnknownHdAccount HdWallets ()
deleteHdAddress addrId = do
-- Check that the account & its parent root do exist before deleting anything.
zoomHdAccountId identity (addrId ^. hdAddressIdParent) $
return ()
zoom hdWalletsAddresses $ at addrId .= Nothing
|
input-output-hk/pos-haskell-prototype
|
wallet/src/Cardano/Wallet/Kernel/DB/HdWallet/Delete.hs
|
mit
| 2,752
| 0
| 17
| 564
| 523
| 273
| 250
| -1
| -1
|
module PatAs where
a :: Int
(a @ [y]) = 32
|
roberth/uu-helium
|
test/typeerrors/Examples/PatAs.hs
|
gpl-3.0
| 44
| 1
| 9
| 12
| 28
| 15
| 13
| 3
| 1
|
{-# LANGUAGE TemplateHaskell #-}
module Sound.Tidal.Parse.TH where
import Language.Haskell.TH
import Language.Haskellish
-- example: $(fromTidal "jux") ...is translated as... T.jux <$ reserved "jux"
fromTidal :: String -> Q Exp
fromTidal x = do
let y = return (VarE $ mkName $ "T." ++ x)
let z = appE [|reserved|] $ return (LitE $ StringL x)
uInfixE y [|(<$)|] z
-- example: $(fromHaskell "+") ...is translated as... + <$ reserved "+"
fromHaskell :: String -> Q Exp
fromHaskell x = do
let y = return (VarE $ mkName $ x)
let z = appE [|reserved|] $ return (LitE $ StringL x)
uInfixE y [|(<$)|] z
|
bgold-cosmos/Tidal
|
tidal-parse/src/Sound/Tidal/Parse/TH.hs
|
gpl-3.0
| 611
| 0
| 14
| 119
| 202
| 106
| 96
| 14
| 1
|
module Examples.DirProd
( dirExample
) where
import Utils
import Graph
import LaTeX
import Algebra.Matrix
import Algebra.Semiring
import Algebra.Constructs.Direct
import Policy.ShortestPath
import Policy.WidestPath
dirExample 0 = toMatrix (G (11,
[ (1,2, Dir (SP 2, WP 100)), (1,4, Dir (SP 1, WP 10))
, (2,1, Dir (SP 2, WP 100)), (2,3, Dir (SP 1, WP 1000)), (2,5, Dir (SP 3, WP 100)), (2,6, Dir (SP 4, WP 100))
, (3,2, Dir (SP 1, WP 1000)), (3,6, Dir (SP 2, WP 10)), (3,7, Dir (SP 1, WP 1000))
, (4,1, Dir (SP 1, WP 10)), (4,5, Dir (SP 2, WP 1000)), (4,8, Dir (SP 1, WP 10))
, (5,2, Dir (SP 3, WP 100)), (5,4, Dir (SP 2, WP 1000)), (5,6, Dir (SP 2, WP 100)), (5,8, Dir (SP 1, WP 100))
, (6,2, Dir (SP 4, WP 100)), (6,3, Dir (SP 2, WP 10)), (6,5, Dir (SP 2, WP 100)), (6,7, Dir (SP 5, WP 100)), (6,10, Dir (SP 2, WP 10)), (6,11, Dir (SP 2, WP 100))
, (7,3, Dir (SP 1, WP 1000)), (7,6, Dir (SP 5, WP 100)), (7,11, Dir (SP 1, WP 10))
, (8,4, Dir (SP 1, WP 10)), (8,5, Dir (SP 1, WP 100)), (8,9, Dir (SP 2, WP 10))
, (9,8, Dir (SP 2, WP 10)), (9,10,Dir (SP 1, WP 100))
, (10,6, Dir (SP 2, WP 10)), (10,9, Dir (SP 1, WP 100)), (10,11, Dir (SP 3, WP 10000))
, (11,6, Dir (SP 2, WP 100)), (11,7, Dir (SP 1, WP 10)), (11,10, Dir (SP 3, WP 10000))
]))
|
sdynerow/Semirings-Library
|
haskell/Examples/DirProd.hs
|
apache-2.0
| 1,295
| 4
| 13
| 307
| 1,045
| 583
| 462
| 22
| 1
|
{-# LANGUAGE TypeFamilies #-}
module Propellor.Types.Container where
-- | A value that can be bound between the host and a container.
--
-- For example, a Bound Port is a Port on the container that is bound to
-- a Port on the host.
data Bound v = Bound
{ hostSide :: v
, containerSide :: v
}
-- | Create a Bound value, from two different values for the host and
-- container.
--
-- For example, @Port 8080 -<- Port 80@ means that port 8080 on the host
-- is bound to port 80 from the container.
(-<-) :: (hostv ~ v, containerv ~ v) => hostv -> containerv -> Bound v
(-<-) hostv containerv = Bound hostv containerv
-- | Flipped version of -<- with the container value first and host value
-- second.
(->-) :: (containerv ~ v, hostv ~ v) => hostv -> containerv -> Bound v
(->-) containerv hostv = Bound hostv containerv
-- | Create a Bound value, that is the same on both the host and container.
same :: v -> Bound v
same v = Bound v v
|
shosti/propellor
|
src/Propellor/Types/Container.hs
|
bsd-2-clause
| 944
| 4
| 8
| 196
| 179
| 104
| 75
| 11
| 1
|
module Sidebar where
import Hakyll
import System.FilePath
import Data.Maybe
import Data.String.Utils (replace)
import Control.Applicative
import Control.Monad
data PageTree = Tree { tPath :: FilePath
, tDisp :: String
, tLeaves :: [PageTree]
}
| Page { pPath :: String
, pDesc :: String
}
| Group { gDesc :: String
, gLeaves :: [PageTree]
}
deriving (Eq, Show)
atDirectory :: FilePath -> PageTree -> Maybe PageTree
atDirectory dir t@(Tree p _ ls) =
case (dir == p) of
True -> Just t
False -> msum $ map (recur p) ls
where
prefix :: FilePath -> PageTree -> PageTree
prefix x (Tree p d ls) = (Tree (x </> p) d ls)
prefix x (Page p desc) = (Page (x </> p) desc)
prefix x (Group d ls) = (Group d (map (prefix x) ls))
recur :: FilePath -> PageTree -> Maybe PageTree
recur p subtree = atDirectory dir (prefix p subtree)
atDirectory _ (Page _ _) = Nothing
atDirectory _ (Group _ _) = Nothing
itemDirectory :: Item a -> FilePath
itemDirectory = dropTrailingPathSeparator
. dropFileName
. itemFilePath
itemFilePath :: Item a -> FilePath
itemFilePath = (flip addExtension) "html"
. dropExtension
. replace "pages" "" -- XXX get rid of leading,
-- yes i know this is evil.
. toFilePath
. itemIdentifier
ls :: PageTree -> String
ls (Tree _ _ ps) = unlines $ map l ps
where l (Tree p _ _) = p ++ "/"
l (Page p _) = p
ls _ = ""
tree :: PageTree -> String
tree pt = case pt of
Tree d _ ps -> mktree d ps
Page p _ -> p
Group _ ps -> mktree "" ps
where
mktree d ps = unlines $ concatMap (aux d) ps
aux prefix (Tree d _ ps) = concatMap (aux (prefix </> d)) ps
aux prefix (Page p _) = [ prefix </> p ]
aux prefix (Group _ ps) = concatMap (aux prefix) ps
sidebarHTML :: PageTree -> Item a -> String
sidebarHTML sitemap item =
fromMaybe "" $ listing <$> atDirectory pwd sitemap
where
pwd = itemDirectory item
listing :: PageTree -> String
listing (Tree p d xs) = unlines $
[ "<div class=\"well sidebar-nav\">"
, " <ul class=\"nav nav-list\">"
, " <li class=\"nav-header\">" ++ d ++ "</li>"
] ++
[ listitem p x | x <- xs ] ++
[ " </ul>"
, "</div><!--/.well -->"
]
listing _ = "" -- Should not be possible (result of atDirectory)
listitem :: String -> PageTree -> String
listitem p (Tree p' d _) =
"<li><a href=\"" ++ fullpath ++ "\">" ++ d ++ "</a></li>"
where fullpath = p </> p' ++ "/"
listitem p (Page p' desc) =
"<li" ++ emph ++ "><a href=\"" ++ fullpath ++ "\">" ++ desc ++
"</a></li>"
where
fullpath = p </> p'
emph = case (itemFilePath item) == fullpath of
True -> " class=\"active\" "
False -> ""
listitem p (Group d ps) = header ++ contents
where
header = " <li class=\"nav-header\">" ++ d ++ "</li>\n"
contents = unlines $ map (listitem p) ps
|
GaloisInc/ivorylang-org
|
Sidebar.hs
|
bsd-3-clause
| 3,200
| 0
| 12
| 1,080
| 1,083
| 561
| 522
| 80
| 5
|
module Llvm.Util.Mapping where
import qualified Data.Map as M
getValOrImplError :: (Show a, Ord a) => (M.Map a k, String) -> a -> k
getValOrImplError (mp, mn) x = case M.lookup x mp of
Just s -> s
Nothing -> error $ "implementation error: " ++ show x ++ " is not added to " ++ mn
|
mlite/hLLVM
|
src/Llvm/Util/Mapping.hs
|
bsd-3-clause
| 285
| 0
| 11
| 61
| 117
| 63
| 54
| 6
| 2
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE UndecidableInstances #-}
module Stack.Types.Resolver
(AbstractResolver(..)
,readAbstractResolver
,Snapshots (..)
) where
import Pantry.Internal.AesonExtended
(FromJSON, parseJSON,
withObject, (.:), withText)
import qualified Data.HashMap.Strict as HashMap
import qualified Data.IntMap.Strict as IntMap
import qualified Data.Text as T
import Data.Text.Read (decimal)
import Data.Time (Day)
import Options.Applicative (ReadM)
import qualified Options.Applicative.Types as OA
import Stack.Prelude
-- | Either an actual resolver value, or an abstract description of one (e.g.,
-- latest nightly).
data AbstractResolver
= ARLatestNightly
| ARLatestLTS
| ARLatestLTSMajor !Int
| ARResolver !RawSnapshotLocation
| ARGlobal
instance Show AbstractResolver where
show = T.unpack . utf8BuilderToText . display
instance Display AbstractResolver where
display ARLatestNightly = "nightly"
display ARLatestLTS = "lts"
display (ARLatestLTSMajor x) = "lts-" <> display x
display (ARResolver usl) = display usl
display ARGlobal = "global"
readAbstractResolver :: ReadM (Unresolved AbstractResolver)
readAbstractResolver = do
s <- OA.readerAsk
case s of
"global" -> pure $ pure ARGlobal
"nightly" -> pure $ pure ARLatestNightly
"lts" -> pure $ pure ARLatestLTS
'l':'t':'s':'-':x | Right (x', "") <- decimal $ T.pack x ->
pure $ pure $ ARLatestLTSMajor x'
_ -> pure $ ARResolver <$> parseRawSnapshotLocation (T.pack s)
data BuildPlanTypesException
= ParseResolverException !Text
| FilepathInDownloadedSnapshot !Text
deriving Typeable
instance Exception BuildPlanTypesException
instance Show BuildPlanTypesException where
show (ParseResolverException t) = concat
[ "Invalid resolver value: "
, T.unpack t
, ". Possible valid values include lts-2.12, nightly-YYYY-MM-DD, ghc-7.10.2, and ghcjs-0.1.0_ghc-7.10.2. "
, "See https://www.stackage.org/snapshots for a complete list."
]
show (FilepathInDownloadedSnapshot url) = unlines
[ "Downloaded snapshot specified a 'resolver: { location: filepath }' "
, "field, but filepaths are not allowed in downloaded snapshots.\n"
, "Filepath specified: " ++ T.unpack url
]
-- | Most recent Nightly and newest LTS version per major release.
data Snapshots = Snapshots
{ snapshotsNightly :: !Day
, snapshotsLts :: !(IntMap Int)
}
deriving Show
instance FromJSON Snapshots where
parseJSON = withObject "Snapshots" $ \o -> Snapshots
<$> (o .: "nightly" >>= parseNightly)
<*> fmap IntMap.unions (mapM (parseLTS . snd)
$ filter (isLTS . fst)
$ HashMap.toList o)
where
parseNightly t =
case parseSnapName t of
Left e -> fail $ show e
Right (LTS _ _) -> fail "Unexpected LTS value"
Right (Nightly d) -> return d
isLTS = ("lts-" `T.isPrefixOf`)
parseLTS = withText "LTS" $ \t ->
case parseSnapName t of
Left e -> fail $ show e
Right (LTS x y) -> return $ IntMap.singleton x y
Right (Nightly _) -> fail "Unexpected nightly value"
|
juhp/stack
|
src/Stack/Types/Resolver.hs
|
bsd-3-clause
| 3,684
| 0
| 16
| 944
| 806
| 430
| 376
| 98
| 5
|
-- | Add a module description here
-- also add descriptions to each function.
module Protocol.BCode
(
BCode,
Path(..),
encode,
-- encodeBS,
decode,
search,
announce,
comment,
creationDate,
announceList,
info,
hashInfoDict,
infoLength,
infoName,
infoPieceLength,
infoPieces,
numberPieces,
infoFiles,
prettyPrint,
trackerComplete,
trackerIncomplete,
trackerInterval,
trackerMinInterval,
trackerPeers,
trackerWarning,
trackerError,
toBS,
fromBS,
-- Extended handshake
extendedP,
extendedV,
extendedRReq,
extendedMsg,
--Tests
testSuite)
where
import Control.Monad
import Control.Applicative hiding (many)
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString as B
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Char
import Data.List
import qualified Data.Map as M
import Text.PrettyPrint.HughesPJ hiding (char)
import Data.Serialize
import Data.Word
import Test.QuickCheck
import Test.Framework
import Test.Framework.Providers.QuickCheck2
import Test.Framework.Providers.HUnit
import Test.HUnit hiding (Path, Test)
import Digest
import TestInstance() -- for instances only
-- | BCode represents the structure of a bencoded file
data BCode = BInt Integer -- ^ An integer
| BString B.ByteString -- ^ A string of bytes
| BArray [BCode] -- ^ An array
| BDict (M.Map B.ByteString BCode) -- ^ A key, value map
deriving (Show, Eq)
instance Arbitrary BCode where
arbitrary = sized bc'
where bc' :: Int -> Gen BCode
bc' 0 = oneof [BInt <$> arbitrary,
BString <$> arbitrary]
bc' n =
oneof [BInt <$> arbitrary,
BString <$> arbitrary,
BArray <$> sequence (replicate n $ bc' (n `div` 8)),
do keys <- vectorOf n arbitrary
values <- sequence (replicate n $ bc' (n `div` 8))
return $ BDict $ M.fromList $ zip keys values]
data Path = PString B.ByteString
| PInt Int
toW8 :: Char -> Word8
toW8 = fromIntegral . ord
fromW8 :: Word8 -> Char
fromW8 = chr . fromIntegral
toBS :: String -> B.ByteString
toBS = B.pack . map toW8
fromBS :: B.ByteString -> String
fromBS = map fromW8 . B.unpack
fromUtf8BS :: B.ByteString -> String
fromUtf8BS = T.unpack . T.decodeUtf8
instance Serialize BCode where
put (BInt i) = wrap 'i' 'e' $ putShow i
put (BArray arr) = wrap 'l' 'e' . mapM_ put $ arr
put (BDict mp) = wrap 'd' 'e' dict
where dict = mapM_ encPair . M.toList $ mp
encPair (k, v) = put (BString k) >> put v
put (BString s) = do
putShow (B.length s)
putWord8 (toW8 ':')
putByteString s
get = getBInt <|> getBArray <|> getBDict <|> getBString
-- | Get something wrapped in two Chars
getWrapped :: Char -> Char -> Get a -> Get a
getWrapped a b p = char a *> p <* char b
-- | Parses a BInt
getBInt :: Get BCode
getBInt = BInt . read <$> getWrapped 'i' 'e' intP
where intP = ((:) <$> char '-' <*> getDigits) <|> getDigits
-- | Parses a BArray
getBArray :: Get BCode
getBArray = BArray <$> getWrapped 'l' 'e' (many get)
-- | Parses a BDict
getBDict :: Get BCode
getBDict = BDict . M.fromList <$> getWrapped 'd' 'e' (many getPairs)
where getPairs = do
(BString s) <- getBString
x <- get
return (s,x)
-- | Parses a BString
getBString :: Get BCode
getBString = do
count <- getDigits
BString <$> ( char ':' *> getStr (read count :: Integer))
where maxInt = fromIntegral (maxBound :: Int) :: Integer
getStr n | n >= 0 = B.concat <$> (sequence $ getStr' n)
| otherwise = fail $ "read a negative length string, length: " ++ show n
getStr' n | n > maxInt = getByteString maxBound : getStr' (n-maxInt)
| otherwise = [getByteString . fromIntegral $ n]
-- | Get one or more digit characters
getDigits :: Get String
getDigits = many1 digit
-- | Returns a character if it is a digit, fails otherwise. uses isDigit.
digit :: Get Char
digit = do
x <- getCharG
if isDigit x
then return x
else fail $ "Expected digit, got: " ++ show x
-- * Put helper functions
-- | Put an element, wrapped by two characters
wrap :: Char -> Char -> Put -> Put
wrap a b m = do
putWord8 (toW8 a)
m
putWord8 (toW8 b)
-- | Put something as it is shown using @show@
putShow :: Show a => a -> Put
putShow = mapM_ put . show
-- * Get Helper functions
-- | Parse zero or items using a given parser
many :: Get a -> Get [a]
many p = many1 p `mplus` return []
-- | Parse one or more items using a given parser
many1 :: Get a -> Get [a]
many1 p = (:) <$> p <*> many p
-- | Parse a given character
char :: Char -> Get Char
char c = do
x <- getCharG
if x == c
then return c
else fail $ "Expected char: '" ++ c:"' got: '" ++ [x,'\'']
-- | Get a Char. Only works with single byte characters
getCharG :: Get Char
getCharG = fromW8 <$> getWord8
-- BCode helper functions
-- | Return the hash of the info-dict in a torrent file
hashInfoDict :: BCode -> IO Digest
hashInfoDict bc =
do ih <- case info bc of
Nothing -> fail "Could not find infoHash"
Just x -> return x
let encoded = encode ih
return $ digest $ L.fromChunks $ [encoded]
toPS :: String -> Path
toPS = PString . toBS
{- Simple search function over BCoded data structures, general case. In practice, we
will prefer some simpler mnemonics -}
search :: [Path] -> BCode -> Maybe BCode
search [] bc = Just bc
search (PInt i : rest) (BArray bs) | i < 0 || i > length bs = Nothing
| otherwise = search rest (bs!!i)
search (PString s : rest) (BDict mp) = M.lookup s mp >>= search rest
search _ _ = Nothing
search' :: String -> BCode -> Maybe B.ByteString
search' str b = case search [toPS str] b of
Nothing -> Nothing
Just (BString s) -> Just s
_ -> Nothing
searchStr :: String -> BCode -> Maybe B.ByteString
searchStr = search'
searchInt :: String -> BCode -> Maybe Integer
searchInt str b = case search [toPS str] b of
Just (BInt i) -> Just i
_ -> Nothing
searchInfo :: String -> BCode -> Maybe BCode
searchInfo str = search [toPS "info", toPS str]
{- Various accessors -}
announce, comment, creationDate :: BCode -> Maybe B.ByteString
announce = search' "announce"
comment = search' "comment"
creationDate = search' "creation date"
-- | list of list of strings, according to official spec
announceList :: BCode -> Maybe [[B.ByteString]]
announceList b = case search [toPS "announce-list"] b of
Just (BArray xs) -> Just ( map (\(BArray s) -> map' s) xs)
_ -> Nothing
where map' = map (\(BString s) -> s)
{- Tracker accessors -}
trackerComplete, trackerIncomplete, trackerInterval :: BCode -> Maybe Integer
trackerMinInterval :: BCode -> Maybe Integer
trackerComplete = searchInt "complete"
trackerIncomplete = searchInt "incomplete"
trackerInterval = searchInt "interval"
trackerMinInterval = searchInt "min interval"
trackerError, trackerWarning :: BCode -> Maybe B.ByteString
trackerError = searchStr "failure reason"
trackerWarning = searchStr "warning mesage"
trackerPeers :: BCode -> Maybe (B.ByteString, B.ByteString)
trackerPeers bc = do v4 <- searchStr "peers" bc
v6 <- return $ maybe (B.empty) id $ searchStr "peers6" bc
return (v4, v6)
info :: BCode -> Maybe BCode
info = search [toPS "info"]
infoName :: BCode -> Maybe B.ByteString
infoName bc = case search [toPS "info", toPS "name"] bc of
Just (BString s) -> Just s
_ -> Nothing
infoPieceLength ::BCode -> Maybe Integer
infoPieceLength bc = do BInt i <- search [toPS "info", toPS "piece length"] bc
return i
infoLength :: BCode -> Maybe Integer
infoLength bc = maybe length2 Just length1
where
-- |info/length key for single-file torrent
length1 = do BInt i <- search [toPS "info", toPS "length"] bc
return i
-- |length summed from files of multi-file torrent
length2 = sum `fmap`
map snd `fmap`
infoFiles bc
infoPieces :: BCode -> Maybe [B.ByteString]
infoPieces b = do t <- searchInfo "pieces" b
case t of
BString str -> return $ sha1Split str
_ -> mzero
where sha1Split r | r == B.empty = []
| otherwise = block : sha1Split rest
where (block, rest) = B.splitAt 20 r
numberPieces :: BCode -> Maybe Int
numberPieces = fmap length . infoPieces
infoFiles :: BCode -> Maybe [([String], Integer)] -- ^[(filePath, fileLength)]
infoFiles bc = let mbFpath = fromUtf8BS `fmap` infoName bc
mbLength = infoLength bc
mbFiles = do BArray fileList <- searchInfo "files" bc
return $ do fileDict@(BDict _) <- fileList
let Just (BInt l) = search [toPS "length"] fileDict
Just (BArray pth) = search [toPS "path"] fileDict
pth' = map (\(BString s) -> fromUtf8BS s) pth
return (pth', l)
in case (mbFpath, mbLength, mbFiles) of
(Just fpath, _, Just files) ->
Just $
map (\(pth, l) ->
(fpath:pth, l)
) files
(Just fpath, Just l, _) ->
Just [([fpath], l)]
(_, _, Just files) ->
Just files
_ ->
Nothing
---------------------------------------------------------------------
-- Extended message handshake
--
extendedP :: BCode -> Maybe Word16
extendedP = fmap fromIntegral . searchInt "p"
extendedV :: BCode -> Maybe String
extendedV = fmap ( fmap (chr . fromIntegral) ) . fmap B.unpack . searchStr "v"
extendedRReq :: BCode -> Maybe Integer
extendedRReq = searchInt "rreq"
extendedMsg :: Integer -> String -> Integer -> BCode
extendedMsg p v rreq = BDict $ M.fromList [(toBS "m", BDict M.empty)
,(toBS "p", BInt p)
,(toBS "v", BString $ toBS v)
,(toBS "rreq", BInt rreq)]
---------------------------------------------------------------------
-- Pretty printing
--
pp :: BCode -> Doc
pp bc =
case bc of
BInt i -> integer i
BString s -> text (show s)
BArray arr -> text "[" <+> (cat $ intersperse comma al) <+> text "]"
where al = map pp arr
BDict mp -> text "{" <+> cat (intersperse comma mpl) <+> text "}"
where mpl = map (\(s, bc') -> text (fromUtf8BS s) <+> text "->" <+> pp bc') $ M.toList mp
prettyPrint :: BCode -> String
prettyPrint = render . pp
toBDict :: [(String,BCode)] -> BCode
toBDict = BDict . M.fromList . map (\(k,v) -> ((toBS k),v))
toBString :: String -> BCode
toBString = BString . toBS
-- TESTS
testSuite :: Test
testSuite = testGroup "Protocol/BCode"
[ testProperty "QC encode-decode/id" propEncodeDecodeId,
testCase "HUnit encode-decode/id" testDecodeEncodeProp1 ]
propEncodeDecodeId :: BCode -> Bool
propEncodeDecodeId bc =
let encoded = encode bc
decoded = decode encoded
in
Right bc == decoded
testDecodeEncodeProp1 :: Assertion
testDecodeEncodeProp1 =
let encoded = encode testData
decoded = decode encoded
in
assertEqual "for encode/decode identify" (Right testData) decoded
testData :: [BCode]
testData = [BInt 123,
BInt (-123),
BString (toBS "Hello"),
BString (toBS ['\NUL'..'\255']),
BArray [BInt 1234567890
,toBString "a longer string with eieldei stuff to mess things up"
],
toBDict [
("hello",BInt 3)
,("a key",toBString "and a value")
,("a sub dict",toBDict [
("some stuff",BInt 1)
,("some more stuff", toBString "with a string")
])
]
]
|
jlouis/combinatorrent
|
src/Protocol/BCode.hs
|
bsd-2-clause
| 13,311
| 0
| 21
| 4,652
| 3,796
| 1,963
| 1,833
| 292
| 4
|
{-# LANGUAGE CPP, DeriveDataTypeable, MultiParamTypeClasses, FlexibleInstances, ScopedTypeVariables #-}
-- |
-- Module : Data.Vector.Primitive.Mutable
-- Copyright : (c) Roman Leshchinskiy 2008-2010
-- License : BSD-style
--
-- Maintainer : Roman Leshchinskiy <rl@cse.unsw.edu.au>
-- Stability : experimental
-- Portability : non-portable
--
-- Mutable primitive vectors.
--
module Data.Vector.Primitive.Mutable (
-- * Mutable vectors of primitive types
MVector(..), IOVector, STVector, Prim,
-- * Accessors
-- ** Length information
length, null,
-- ** Extracting subvectors
slice, init, tail, take, drop, splitAt,
unsafeSlice, unsafeInit, unsafeTail, unsafeTake, unsafeDrop,
-- ** Overlapping
overlaps,
-- * Construction
-- ** Initialisation
new, unsafeNew, replicate, replicateM, clone,
-- ** Growing
grow, unsafeGrow,
-- ** Restricting memory usage
clear,
-- * Accessing individual elements
read, write, modify, swap,
unsafeRead, unsafeWrite, unsafeModify, unsafeSwap,
-- * Modifying vectors
-- ** Filling and copying
set, copy, move, unsafeCopy, unsafeMove
) where
import qualified Data.Vector.Generic.Mutable as G
import Data.Primitive.ByteArray
import Data.Primitive ( Prim, sizeOf )
import Data.Word ( Word8 )
import Control.Monad.Primitive
import Control.Monad ( liftM )
import Control.DeepSeq ( NFData(rnf) )
import Prelude hiding ( length, null, replicate, reverse, map, read,
take, drop, splitAt, init, tail )
import Data.Typeable ( Typeable )
-- Data.Vector.Internal.Check is unnecessary
#define NOT_VECTOR_MODULE
#include "vector.h"
-- | Mutable vectors of primitive types.
data MVector s a = MVector {-# UNPACK #-} !Int
{-# UNPACK #-} !Int
{-# UNPACK #-} !(MutableByteArray s) -- ^ offset, length, underlying mutable byte array
deriving ( Typeable )
type IOVector = MVector RealWorld
type STVector s = MVector s
instance NFData (MVector s a) where
rnf (MVector _ _ _) = ()
instance Prim a => G.MVector MVector a where
basicLength (MVector _ n _) = n
basicUnsafeSlice j m (MVector i _ arr)
= MVector (i+j) m arr
{-# INLINE basicOverlaps #-}
basicOverlaps (MVector i m arr1) (MVector j n arr2)
= sameMutableByteArray arr1 arr2
&& (between i j (j+n) || between j i (i+m))
where
between x y z = x >= y && x < z
{-# INLINE basicUnsafeNew #-}
basicUnsafeNew n
| n < 0 = error $ "Primitive.basicUnsafeNew: negative length: " ++ show n
| n > mx = error $ "Primitive.basicUnsafeNew: length to large: " ++ show n
| otherwise = MVector 0 n `liftM` newByteArray (n * size)
where
size = sizeOf (undefined :: a)
mx = maxBound `div` size :: Int
{-# INLINE basicInitialize #-}
basicInitialize (MVector off n v) =
setByteArray v (off * size) (n * size) (0 :: Word8)
where
size = sizeOf (undefined :: a)
{-# INLINE basicUnsafeRead #-}
basicUnsafeRead (MVector i _ arr) j = readByteArray arr (i+j)
{-# INLINE basicUnsafeWrite #-}
basicUnsafeWrite (MVector i _ arr) j x = writeByteArray arr (i+j) x
{-# INLINE basicUnsafeCopy #-}
basicUnsafeCopy (MVector i n dst) (MVector j _ src)
= copyMutableByteArray dst (i*sz) src (j*sz) (n*sz)
where
sz = sizeOf (undefined :: a)
{-# INLINE basicUnsafeMove #-}
basicUnsafeMove (MVector i n dst) (MVector j _ src)
= moveByteArray dst (i*sz) src (j*sz) (n * sz)
where
sz = sizeOf (undefined :: a)
{-# INLINE basicSet #-}
basicSet (MVector i n arr) x = setByteArray arr i n x
-- Length information
-- ------------------
-- | Length of the mutable vector.
length :: Prim a => MVector s a -> Int
{-# INLINE length #-}
length = G.length
-- | Check whether the vector is empty
null :: Prim a => MVector s a -> Bool
{-# INLINE null #-}
null = G.null
-- Extracting subvectors
-- ---------------------
-- | Yield a part of the mutable vector without copying it.
slice :: Prim a => Int -> Int -> MVector s a -> MVector s a
{-# INLINE slice #-}
slice = G.slice
take :: Prim a => Int -> MVector s a -> MVector s a
{-# INLINE take #-}
take = G.take
drop :: Prim a => Int -> MVector s a -> MVector s a
{-# INLINE drop #-}
drop = G.drop
splitAt :: Prim a => Int -> MVector s a -> (MVector s a, MVector s a)
{-# INLINE splitAt #-}
splitAt = G.splitAt
init :: Prim a => MVector s a -> MVector s a
{-# INLINE init #-}
init = G.init
tail :: Prim a => MVector s a -> MVector s a
{-# INLINE tail #-}
tail = G.tail
-- | Yield a part of the mutable vector without copying it. No bounds checks
-- are performed.
unsafeSlice :: Prim a
=> Int -- ^ starting index
-> Int -- ^ length of the slice
-> MVector s a
-> MVector s a
{-# INLINE unsafeSlice #-}
unsafeSlice = G.unsafeSlice
unsafeTake :: Prim a => Int -> MVector s a -> MVector s a
{-# INLINE unsafeTake #-}
unsafeTake = G.unsafeTake
unsafeDrop :: Prim a => Int -> MVector s a -> MVector s a
{-# INLINE unsafeDrop #-}
unsafeDrop = G.unsafeDrop
unsafeInit :: Prim a => MVector s a -> MVector s a
{-# INLINE unsafeInit #-}
unsafeInit = G.unsafeInit
unsafeTail :: Prim a => MVector s a -> MVector s a
{-# INLINE unsafeTail #-}
unsafeTail = G.unsafeTail
-- Overlapping
-- -----------
-- | Check whether two vectors overlap.
overlaps :: Prim a => MVector s a -> MVector s a -> Bool
{-# INLINE overlaps #-}
overlaps = G.overlaps
-- Initialisation
-- --------------
-- | Create a mutable vector of the given length.
new :: (PrimMonad m, Prim a) => Int -> m (MVector (PrimState m) a)
{-# INLINE new #-}
new = G.new
-- | Create a mutable vector of the given length. The length is not checked.
unsafeNew :: (PrimMonad m, Prim a) => Int -> m (MVector (PrimState m) a)
{-# INLINE unsafeNew #-}
unsafeNew = G.unsafeNew
-- | Create a mutable vector of the given length (0 if the length is negative)
-- and fill it with an initial value.
replicate :: (PrimMonad m, Prim a) => Int -> a -> m (MVector (PrimState m) a)
{-# INLINE replicate #-}
replicate = G.replicate
-- | Create a mutable vector of the given length (0 if the length is negative)
-- and fill it with values produced by repeatedly executing the monadic action.
replicateM :: (PrimMonad m, Prim a) => Int -> m a -> m (MVector (PrimState m) a)
{-# INLINE replicateM #-}
replicateM = G.replicateM
-- | Create a copy of a mutable vector.
clone :: (PrimMonad m, Prim a)
=> MVector (PrimState m) a -> m (MVector (PrimState m) a)
{-# INLINE clone #-}
clone = G.clone
-- Growing
-- -------
-- | Grow a vector by the given number of elements. The number must be
-- positive.
grow :: (PrimMonad m, Prim a)
=> MVector (PrimState m) a -> Int -> m (MVector (PrimState m) a)
{-# INLINE grow #-}
grow = G.grow
-- | Grow a vector by the given number of elements. The number must be
-- positive but this is not checked.
unsafeGrow :: (PrimMonad m, Prim a)
=> MVector (PrimState m) a -> Int -> m (MVector (PrimState m) a)
{-# INLINE unsafeGrow #-}
unsafeGrow = G.unsafeGrow
-- Restricting memory usage
-- ------------------------
-- | Reset all elements of the vector to some undefined value, clearing all
-- references to external objects. This is usually a noop for unboxed vectors.
clear :: (PrimMonad m, Prim a) => MVector (PrimState m) a -> m ()
{-# INLINE clear #-}
clear = G.clear
-- Accessing individual elements
-- -----------------------------
-- | Yield the element at the given position.
read :: (PrimMonad m, Prim a) => MVector (PrimState m) a -> Int -> m a
{-# INLINE read #-}
read = G.read
-- | Replace the element at the given position.
write :: (PrimMonad m, Prim a) => MVector (PrimState m) a -> Int -> a -> m ()
{-# INLINE write #-}
write = G.write
-- | Modify the element at the given position.
modify :: (PrimMonad m, Prim a) => MVector (PrimState m) a -> (a -> a) -> Int -> m ()
{-# INLINE modify #-}
modify = G.modify
-- | Swap the elements at the given positions.
swap :: (PrimMonad m, Prim a) => MVector (PrimState m) a -> Int -> Int -> m ()
{-# INLINE swap #-}
swap = G.swap
-- | Yield the element at the given position. No bounds checks are performed.
unsafeRead :: (PrimMonad m, Prim a) => MVector (PrimState m) a -> Int -> m a
{-# INLINE unsafeRead #-}
unsafeRead = G.unsafeRead
-- | Replace the element at the given position. No bounds checks are performed.
unsafeWrite
:: (PrimMonad m, Prim a) => MVector (PrimState m) a -> Int -> a -> m ()
{-# INLINE unsafeWrite #-}
unsafeWrite = G.unsafeWrite
-- | Modify the element at the given position. No bounds checks are performed.
unsafeModify :: (PrimMonad m, Prim a) => MVector (PrimState m) a -> (a -> a) -> Int -> m ()
{-# INLINE unsafeModify #-}
unsafeModify = G.unsafeModify
-- | Swap the elements at the given positions. No bounds checks are performed.
unsafeSwap
:: (PrimMonad m, Prim a) => MVector (PrimState m) a -> Int -> Int -> m ()
{-# INLINE unsafeSwap #-}
unsafeSwap = G.unsafeSwap
-- Filling and copying
-- -------------------
-- | Set all elements of the vector to the given value.
set :: (PrimMonad m, Prim a) => MVector (PrimState m) a -> a -> m ()
{-# INLINE set #-}
set = G.set
-- | Copy a vector. The two vectors must have the same length and may not
-- overlap.
copy :: (PrimMonad m, Prim a)
=> MVector (PrimState m) a -> MVector (PrimState m) a -> m ()
{-# INLINE copy #-}
copy = G.copy
-- | Copy a vector. The two vectors must have the same length and may not
-- overlap. This is not checked.
unsafeCopy :: (PrimMonad m, Prim a)
=> MVector (PrimState m) a -- ^ target
-> MVector (PrimState m) a -- ^ source
-> m ()
{-# INLINE unsafeCopy #-}
unsafeCopy = G.unsafeCopy
-- | Move the contents of a vector. The two vectors must have the same
-- length.
--
-- If the vectors do not overlap, then this is equivalent to 'copy'.
-- Otherwise, the copying is performed as if the source vector were
-- copied to a temporary vector and then the temporary vector was copied
-- to the target vector.
move :: (PrimMonad m, Prim a)
=> MVector (PrimState m) a -> MVector (PrimState m) a -> m ()
{-# INLINE move #-}
move = G.move
-- | Move the contents of a vector. The two vectors must have the same
-- length, but this is not checked.
--
-- If the vectors do not overlap, then this is equivalent to 'unsafeCopy'.
-- Otherwise, the copying is performed as if the source vector were
-- copied to a temporary vector and then the temporary vector was copied
-- to the target vector.
unsafeMove :: (PrimMonad m, Prim a)
=> MVector (PrimState m) a -- ^ target
-> MVector (PrimState m) a -- ^ source
-> m ()
{-# INLINE unsafeMove #-}
unsafeMove = G.unsafeMove
|
seckcoder/vector
|
Data/Vector/Primitive/Mutable.hs
|
bsd-3-clause
| 10,883
| 0
| 12
| 2,506
| 2,759
| 1,505
| 1,254
| 187
| 1
|
#!/usr/bin/env runhaskell
import Control.Monad
import System.Environment
import System.Directory
import System.Process
import System.FilePath
import System.Exit
import Utils.Scripting
deploymentDir = "nikki"
main = do
args <- filterM warnWhenNotDirectory =<< getArgs
let searchPath = args ++ ["dist/build/nikki/", "dist/build/core/", "../"]
clean
copy searchPath
let deploymentIndicator = deploymentDir </> "yes_nikki_is_deployed"
copyLicenses
putStrLn ("touching " ++ deploymentIndicator)
writeFile deploymentIndicator ""
failWithUsage = do
putStrLn ""
putStrLn =<< readFile "windowsDeploy.readme"
exitWith $ ExitFailure 3
-- | Checks, if a directory exists and issues a warning, when not.
-- Returns if the directory exists.
warnWhenNotDirectory :: FilePath -> IO Bool
warnWhenNotDirectory p = do
e <- doesDirectoryExist p
when (not e) $
putStrLn ("WARNING: directory does not exist: " ++ p)
return e
-- | cleans the deployed folder
-- creates the deployment folder, if ot doesn't exist
clean :: IO ()
clean = do
putStrLn "cleaning..."
exists <- doesDirectoryExist deploymentDir
if exists then do
_ <- system ("rm -rf " ++ deploymentDir ++ "/*")
return ()
else
createDirectory deploymentDir
-- | copies all files to the deployment folder
copy :: [FilePath] -> IO ()
copy searchPath = do
putStrLn "copying..."
mapM_ (\ file -> searchAndCopy searchPath file deploymentDir) deploymentFiles
-- | searches for a files in the given searchpath and copies it to the destination folder
searchAndCopy :: [FilePath] -> FilePath -> FilePath -> IO ()
searchAndCopy searchPath file destinationFolder = do
mSrc <- search searchPath file
case mSrc of
Just src -> do
putStrLn ("found " ++ file ++ " in " ++ takeDirectory src)
trySystem ("cp -r " ++ src ++ " " ++ destinationFolder)
return ()
Nothing -> do
putStrLn ("ERROR: file not found in searched paths: " ++ file)
failWithUsage
-- | searches a file in a given list of directories
-- and returns the full (albeit not necessarily absolute) path.
search :: [FilePath] -> FilePath -> IO (Maybe FilePath)
search (a : searchPath) file = do
e <- doesExist (a </> file)
if e then
return $ Just (a </> file)
else
search searchPath file
search [] _ = return Nothing
doesExist :: FilePath -> IO Bool
doesExist path = do
a <- doesDirectoryExist path
b <- doesFileExist path
return (a || b)
-- | files to deploy
deploymentFiles :: [FilePath]
deploymentFiles = (
"nikki.exe" :
"core.exe" :
"data" :
-- curl
-- "libcurl-4.dll" : -- shouldn't be needed anymore
-- are sometimes statically linked
"libzip.dll" :
"libzlib1.dll" :
-- sfml deps
"libsndfile-1.dll" :
"openal32.dll" :
-- qt
"QtCore4.dll" :
"QtGui4.dll" :
"QtOpenGL4.dll" :
-- mingw deps
"mingwm10.dll" :
"libgcc_s_dw2-1.dll" :
"libstdc++-6.dll" :
[])
-- | copy the needed licenses to the deployment folder
copyLicenses :: IO ()
copyLicenses = do
putStrLn "copying license files"
copyDirectory (".." </> "deploymentLicenses") deploymentDir
|
geocurnoff/nikki
|
src/windowsDeploy.hs
|
lgpl-3.0
| 3,291
| 0
| 18
| 815
| 769
| 377
| 392
| 82
| 2
|
module ResultSet (
build
, ResultSet(..)
, Results
) where
import Control.Parallel.Strategies
import Control.Monad
import Control.Monad.ST
import Data.Maybe (isJust)
import qualified Data.ByteString.Char8 as B
import qualified Data.Vector as V
import qualified Data.Vector.Mutable as MV
import Data.Vector.Algorithms.Intro (sort)
import Scorer
import Utils (merge, chunkV)
type ScoredList = V.Vector (Double, B.ByteString)
data Results = ParVec [ScoredList]
deriving (Show, Eq)
class ResultSet a where
size :: a -> Int
items :: a -> [B.ByteString]
refine :: ScoreStrategy s => a -> s -> a
instance ResultSet Results where
size (ParVec sl) = sum . fmap V.length $ sl
items (ParVec sl) = fmap snd . merge fst . fmap V.toList $ sl
refine (ParVec sl) sc = ParVec newSet
where rl = fmap (fmap snd) sl
newSet = scoreRL (score sc) rl
-- Create
build :: [B.ByteString] -> Results
build lst = ParVec chunks
where initVec = V.fromList $ zip [1..] lst
len = length lst
chunkSize = fst . divMod len $ 5000
chunks = chunkV (chunkSize + 1) $ initVec
-- Score line accordingly
scoreRL :: Scorer -> [V.Vector B.ByteString] -> [ScoredList]
scoreRL f rl = parMap rdeepseq cms rl
where fo x = fmap (\i -> (i, x)) $ f x
cms x = runST $ do
let remaining = V.filter isJust . fmap fo $ x
let vsize = V.length remaining
-- Copy the array to a mutable one
mv <- MV.new vsize
forM_ [0..(vsize - 1)] $ \idx -> do
e <- V.indexM remaining idx
case e of
Just el -> MV.write mv idx el
_ -> return ()
-- Sort
sort mv
V.unsafeFreeze mv
|
passy/hf
|
src/ResultSet.hs
|
apache-2.0
| 1,834
| 0
| 18
| 598
| 641
| 337
| 304
| 47
| 2
|
yes = if foo then do stuff; moreStuff; lastOfTheStuff else return ()
|
mpickering/hlint-refactor
|
tests/examples/Default26.hs
|
bsd-3-clause
| 73
| 0
| 7
| 16
| 30
| 15
| 15
| 1
| 2
|
module Propellor.Types.Chroot where
import Data.Monoid
import qualified Data.Map as M
import Propellor.Types.Empty
data ChrootInfo host = ChrootInfo
{ _chroots :: M.Map FilePath host
, _chrootCfg :: ChrootCfg
}
deriving (Show)
instance Monoid (ChrootInfo host) where
mempty = ChrootInfo mempty mempty
mappend old new = ChrootInfo
{ _chroots = M.union (_chroots old) (_chroots new)
, _chrootCfg = _chrootCfg old <> _chrootCfg new
}
instance Empty (ChrootInfo host) where
isEmpty i = and
[ isEmpty (_chroots i)
, isEmpty (_chrootCfg i)
]
data ChrootCfg
= NoChrootCfg
| SystemdNspawnCfg [(String, Bool)]
deriving (Show, Eq)
instance Monoid ChrootCfg where
mempty = NoChrootCfg
mappend v NoChrootCfg = v
mappend NoChrootCfg v = v
mappend (SystemdNspawnCfg l1) (SystemdNspawnCfg l2) =
SystemdNspawnCfg (l1 <> l2)
instance Empty ChrootCfg where
isEmpty c= c == NoChrootCfg
|
avengerpenguin/propellor
|
src/Propellor/Types/Chroot.hs
|
bsd-2-clause
| 903
| 20
| 10
| 163
| 325
| 174
| 151
| 29
| 0
|
module Test9 where
f x = x + (let y = 67 in y)
|
SAdams601/HaRe
|
old/testing/refacRedunDec/Test9_TokOut.hs
|
bsd-3-clause
| 48
| 0
| 10
| 15
| 30
| 16
| 14
| 2
| 1
|
{-+
This is a small utility to strip blank lines and comments from Haskell files.
Haskell modules are read from files named on the command line.
The result is output on stdout.
Haskell files whose names end with ".lhs" are assumed to be in literate style.
-}
import System(getArgs)
import Unlit(readHaskellFile)
import StripComments(stripcomments)
main = mapM_ stripFile =<< getArgs
stripFile path = putStrLn . stripcomments =<< readHaskellFile path
|
forste/haReFork
|
tools/hsutils/stripcomments.hs
|
bsd-3-clause
| 456
| 0
| 6
| 74
| 57
| 31
| 26
| 5
| 1
|
{-# LANGUAGE NoImplicitPrelude #-}
module Stack.Build.ExecuteSpec (main, spec) where
import Stack.Prelude
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = return ()
|
MichielDerhaeg/stack
|
src/test/Stack/Build/ExecuteSpec.hs
|
bsd-3-clause
| 189
| 0
| 6
| 31
| 59
| 34
| 25
| 8
| 1
|
{-# LANGUAGE FlexibleContexts, FlexibleInstances, MultiParamTypeClasses,
UndecidableInstances, PatternGuards #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Layout.Combo
-- Copyright : (c) David Roundy <droundy@darcs.net>
-- License : BSD-style (see LICENSE)
--
-- Maintainer : none
-- Stability : unstable
-- Portability : unportable
--
-- A layout that combines multiple layouts.
--
-----------------------------------------------------------------------------
module XMonad.Layout.Combo (
-- * Usage
-- $usage
combineTwo,
CombineTwo
) where
import Data.List ( delete, intersect, (\\) )
import Data.Maybe ( isJust )
import XMonad hiding (focus)
import XMonad.StackSet ( integrate', Workspace (..), Stack(..) )
import XMonad.Layout.WindowNavigation ( MoveWindowToWindow(..) )
import qualified XMonad.StackSet as W ( differentiate )
-- $usage
-- You can use this module with the following in your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Layout.Combo
--
-- and add something like
--
-- > combineTwo (TwoPane 0.03 0.5) (tabbed shrinkText defaultTConf) (tabbed shrinkText defaultTConf)
--
-- to your layouts.
--
-- For more detailed instructions on editing the layoutHook see:
--
-- "XMonad.Doc.Extending#Editing_the_layout_hook"
--
-- combineTwo is a new simple layout combinator. It allows the
-- combination of two layouts using a third to split the screen
-- between the two, but has the advantage of allowing you to
-- dynamically adjust the layout, in terms of the number of windows in
-- each sublayout. To do this, use "XMonad.Layout.WindowNavigation",
-- and add the following key bindings (or something similar):
--
-- > , ((modm .|. controlMask .|. shiftMask, xK_Right), sendMessage $ Move R)
-- > , ((modm .|. controlMask .|. shiftMask, xK_Left ), sendMessage $ Move L)
-- > , ((modm .|. controlMask .|. shiftMask, xK_Up ), sendMessage $ Move U)
-- > , ((modm .|. controlMask .|. shiftMask, xK_Down ), sendMessage $ Move D)
--
-- For detailed instruction on editing the key binding see
-- "XMonad.Doc.Extending#Editing_key_bindings".
--
-- These bindings will move a window into the sublayout that is
-- up\/down\/left\/right of its current position. Note that there is some
-- weirdness in combineTwo, in that the mod-tab focus order is not very closely
-- related to the layout order. This is because we're forced to keep track of
-- the window positions separately, and this is ugly. If you don't like this,
-- lobby for hierarchical stacks in core xmonad or go reimplement the core of
-- xmonad yourself.
data CombineTwo l l1 l2 a = C2 [a] [a] l (l1 a) (l2 a)
deriving (Read, Show)
combineTwo :: (Read a, Eq a, LayoutClass super (), LayoutClass l1 a, LayoutClass l2 a) =>
super () -> l1 a -> l2 a -> CombineTwo (super ()) l1 l2 a
combineTwo = C2 [] []
instance (LayoutClass l (), LayoutClass l1 a, LayoutClass l2 a, Read a, Show a, Eq a, Typeable a)
=> LayoutClass (CombineTwo (l ()) l1 l2) a where
runLayout (Workspace _ (C2 f w2 super l1 l2) s) rinput = arrange (integrate' s)
where arrange [] = do l1' <- maybe l1 id `fmap` handleMessage l1 (SomeMessage ReleaseResources)
l2' <- maybe l2 id `fmap` handleMessage l2 (SomeMessage ReleaseResources)
super' <- maybe super id `fmap`
handleMessage super (SomeMessage ReleaseResources)
return ([], Just $ C2 [] [] super' l1' l2')
arrange [w] = do l1' <- maybe l1 id `fmap` handleMessage l1 (SomeMessage ReleaseResources)
l2' <- maybe l2 id `fmap` handleMessage l2 (SomeMessage ReleaseResources)
super' <- maybe super id `fmap`
handleMessage super (SomeMessage ReleaseResources)
return ([(w,rinput)], Just $ C2 [w] [w] super' l1' l2')
arrange origws =
do let w2' = case origws `intersect` w2 of [] -> [head origws]
[x] -> [x]
x -> case origws \\ x of
[] -> init x
_ -> x
superstack = Stack { focus=(), up=[], down=[()] }
s1 = differentiate f' (origws \\ w2')
s2 = differentiate f' w2'
f' = case s of (Just s') -> focus s':delete (focus s') f
Nothing -> f
([((),r1),((),r2)], msuper') <- runLayout (Workspace "" super (Just superstack)) rinput
(wrs1, ml1') <- runLayout (Workspace "" l1 s1) r1
(wrs2, ml2') <- runLayout (Workspace "" l2 s2) r2
return (wrs1++wrs2, Just $ C2 f' w2'
(maybe super id msuper') (maybe l1 id ml1') (maybe l2 id ml2'))
handleMessage (C2 f ws2 super l1 l2) m
| Just (MoveWindowToWindow w1 w2) <- fromMessage m,
w1 `notElem` ws2,
w2 `elem` ws2 = do l1' <- maybe l1 id `fmap` handleMessage l1 m
l2' <- maybe l2 id `fmap` handleMessage l2 m
return $ Just $ C2 f (w1:ws2) super l1' l2'
| Just (MoveWindowToWindow w1 w2) <- fromMessage m,
w1 `elem` ws2,
w2 `notElem` ws2 = do l1' <- maybe l1 id `fmap` handleMessage l1 m
l2' <- maybe l2 id `fmap` handleMessage l2 m
let ws2' = case delete w1 ws2 of [] -> [w2]
x -> x
return $ Just $ C2 f ws2' super l1' l2'
| otherwise = do ml1' <- broadcastPrivate m [l1]
ml2' <- broadcastPrivate m [l2]
msuper' <- broadcastPrivate m [super]
if isJust msuper' || isJust ml1' || isJust ml2'
then return $ Just $ C2 f ws2
(maybe super head msuper')
(maybe l1 head ml1')
(maybe l2 head ml2')
else return Nothing
description (C2 _ _ super l1 l2) = "combining "++ description l1 ++" and "++
description l2 ++" with "++ description super
differentiate :: Eq q => [q] -> [q] -> Maybe (Stack q)
differentiate (z:zs) xs | z `elem` xs = Just $ Stack { focus=z
, up = reverse $ takeWhile (/=z) xs
, down = tail $ dropWhile (/=z) xs }
| otherwise = differentiate zs xs
differentiate [] xs = W.differentiate xs
broadcastPrivate :: LayoutClass l b => SomeMessage -> [l b] -> X (Maybe [l b])
broadcastPrivate a ol = do nml <- mapM f ol
if any isJust nml
then return $ Just $ zipWith ((flip maybe) id) ol nml
else return Nothing
where f l = handleMessage l a `catchX` return Nothing
|
pjones/xmonad-test
|
vendor/xmonad-contrib/XMonad/Layout/Combo.hs
|
bsd-2-clause
| 7,653
| 0
| 19
| 2,878
| 1,824
| 957
| 867
| 81
| 2
|
module Plugin.Dummy.DocAssocs (docAssocs) where
import qualified Data.Map as M
import qualified Data.ByteString.Char8 as P
-- pack all these strings
base :: P.ByteString
base = P.pack "base"
stm :: P.ByteString
stm = P.pack "stm"
mtl :: P.ByteString
mtl = P.pack "mtl"
fgl :: P.ByteString
fgl = P.pack "fgl"
qc :: P.ByteString
qc = P.pack "QuickCheck"
hunit :: P.ByteString
hunit = P.pack "bytestring"
parsec :: P.ByteString
parsec = P.pack "parsec"
unix :: P.ByteString
unix = P.pack "unix"
haskeline :: P.ByteString
haskeline = P.pack "haskeline"
network :: P.ByteString
network = P.pack "network"
th :: P.ByteString
th = P.pack "template-haskell"
hs :: P.ByteString
hs = P.pack "1"
cabal :: P.ByteString
cabal = P.pack "Cabal"
hgl :: P.ByteString
hgl = P.pack "3"
glut :: P.ByteString
glut = P.pack "GLUT"
x11 :: P.ByteString
x11 = P.pack "3"
opengl :: P.ByteString
opengl = P.pack "OpenGL"
containers :: P.ByteString
containers = P.pack "containers"
docAssocs :: M.Map P.ByteString P.ByteString
docAssocs = {-# SCC "Dummy.DocAssocs" #-} M.fromList [
(P.pack "Control.Arrow", base),
(P.pack "Control.Concurrent", base),
(P.pack "Control.Concurrent.Chan", base),
(P.pack "Control.Concurrent.MVar", base),
(P.pack "Control.Concurrent.QSem", base),
(P.pack "Control.Concurrent.QSemN", base),
(P.pack "Control.Concurrent.STM", stm),
(P.pack "Control.Concurrent.STM.TChan", stm),
(P.pack "Control.Concurrent.STM.TMVar", stm),
(P.pack "Control.Concurrent.STM.TVar", stm),
(P.pack "Control.Concurrent.SampleVar", base),
(P.pack "Control.Exception", base),
(P.pack "Control.Monad", base),
(P.pack "Control.Monad.Cont", mtl),
(P.pack "Control.Monad.Error", mtl),
(P.pack "Control.Monad.Fix", base),
(P.pack "Control.Monad.Identity", mtl),
(P.pack "Control.Monad.List", mtl),
(P.pack "Control.Monad.RWS", mtl),
(P.pack "Control.Monad.Reader", mtl),
(P.pack "Control.Monad.ST", base),
(P.pack "Control.Monad.ST.Lazy", base),
(P.pack "Control.Monad.ST.Strict", base),
(P.pack "Control.Monad.State", mtl),
(P.pack "Control.Monad.Trans", mtl),
(P.pack "Control.Monad.Writer", mtl),
(P.pack "Control.Parallel", base),
(P.pack "Control.Parallel.Strategies", base),
(P.pack "Data.Array", base),
(P.pack "Data.Array.Diff", base),
(P.pack "Data.Array.IArray", base),
(P.pack "Data.Array.IO", base),
(P.pack "Data.Array.MArray", base),
(P.pack "Data.Array.ST", base),
(P.pack "Data.Array.Storable", base),
(P.pack "Data.Array.Unboxed", base),
(P.pack "Data.Bits", base),
(P.pack "Data.Bool", base),
(P.pack "Data.Char", base),
(P.pack "Data.Complex", base),
(P.pack "Data.Dynamic", base),
(P.pack "Data.Either", base),
(P.pack "Data.FiniteMap", base),
(P.pack "Data.FunctorM", base),
(P.pack "Data.Generics", base),
(P.pack "Data.Generics.Aliases", base),
(P.pack "Data.Generics.Basics", base),
(P.pack "Data.Generics.Instances", base),
(P.pack "Data.Generics.Schemes", base),
(P.pack "Data.Generics.Text", base),
(P.pack "Data.Generics.Twins", base),
(P.pack "Data.Graph", containers),
(P.pack "Data.Graph.Inductive", fgl),
(P.pack "Data.Graph.Inductive.Basic", fgl),
(P.pack "Data.Graph.Inductive.Example", fgl),
(P.pack "Data.Graph.Inductive.Graph", fgl),
(P.pack "Data.Graph.Inductive.Graphviz", fgl),
(P.pack "Data.Graph.Inductive.Internal.FiniteMap", fgl),
(P.pack "Data.Graph.Inductive.Internal.Heap", fgl),
(P.pack "Data.Graph.Inductive.Internal.Queue", fgl),
(P.pack "Data.Graph.Inductive.Internal.RootPath", fgl),
(P.pack "Data.Graph.Inductive.Internal.Thread", fgl),
(P.pack "Data.Graph.Inductive.Monad", fgl),
(P.pack "Data.Graph.Inductive.Monad.IOArray", fgl),
(P.pack "Data.Graph.Inductive.NodeMap", fgl),
(P.pack "Data.Graph.Inductive.Query", fgl),
(P.pack "Data.Graph.Inductive.Query.ArtPoint", fgl),
(P.pack "Data.Graph.Inductive.Query.BCC", fgl),
(P.pack "Data.Graph.Inductive.Query.BFS", fgl),
(P.pack "Data.Graph.Inductive.Query.DFS", fgl),
(P.pack "Data.Graph.Inductive.Query.Dominators", fgl),
(P.pack "Data.Graph.Inductive.Query.GVD", fgl),
(P.pack "Data.Graph.Inductive.Query.Indep", fgl),
(P.pack "Data.Graph.Inductive.Query.MST", fgl),
(P.pack "Data.Graph.Inductive.Query.MaxFlow", fgl),
(P.pack "Data.Graph.Inductive.Query.MaxFlow2", fgl),
(P.pack "Data.Graph.Inductive.Query.Monad", fgl),
(P.pack "Data.Graph.Inductive.Query.SP", fgl),
(P.pack "Data.Graph.Inductive.Query.TransClos", fgl),
(P.pack "Data.Graph.Inductive.Tree", fgl),
(P.pack "Data.HashTable", base),
(P.pack "Data.IORef", base),
(P.pack "Data.Int", base),
(P.pack "Data.IntMap", containers),
(P.pack "Data.IntSet", containers),
(P.pack "Data.Ix", base),
(P.pack "Data.List", base),
(P.pack "Data.Map", containers),
(P.pack "Data.Maybe", base),
(P.pack "Data.Monoid", base),
(P.pack "Data.PackedString", base),
(P.pack "Data.Queue", base),
(P.pack "Data.Ratio", base),
(P.pack "Data.STRef", base),
(P.pack "Data.STRef.Lazy", base),
(P.pack "Data.STRef.Strict", base),
(P.pack "Data.Sequence", containers),
(P.pack "Data.Set", containers),
(P.pack "Data.Tree", containers),
(P.pack "Data.Tuple", base),
(P.pack "Data.Typeable", base),
(P.pack "Data.Unique", base),
(P.pack "Data.Version", base),
(P.pack "Data.Word", base),
(P.pack "Debug.QuickCheck", qc),
(P.pack "Debug.QuickCheck.Batch", qc),
(P.pack "Debug.QuickCheck.Poly", qc),
(P.pack "Debug.QuickCheck.Utils", qc),
(P.pack "Debug.Trace", base),
(P.pack "Distribution.Compat.Directory", cabal),
(P.pack "Distribution.Compat.Exception", cabal),
(P.pack "Distribution.Compat.FilePath", cabal),
(P.pack "Distribution.Compat.RawSystem", cabal),
(P.pack "Distribution.Compat.ReadP", cabal),
(P.pack "Distribution.Extension", cabal),
(P.pack "Distribution.GetOpt", cabal),
(P.pack "Distribution.InstalledPackageInfo", cabal),
(P.pack "Distribution.License", cabal),
(P.pack "Distribution.Make", cabal),
(P.pack "Distribution.Package", cabal),
(P.pack "Distribution.PackageDescription", cabal),
(P.pack "Distribution.PreProcess", cabal),
(P.pack "Distribution.PreProcess.Unlit", cabal),
(P.pack "Distribution.Setup", cabal),
(P.pack "Distribution.Simple", cabal),
(P.pack "Distribution.Simple.Build", cabal),
(P.pack "Distribution.Simple.Configure", cabal),
(P.pack "Distribution.Simple.GHCPackageConfig", cabal),
(P.pack "Distribution.Simple.Install", cabal),
(P.pack "Distribution.Simple.LocalBuildInfo", cabal),
(P.pack "Distribution.Simple.Register", cabal),
(P.pack "Distribution.Simple.SrcDist", cabal),
(P.pack "Distribution.Simple.Utils", cabal),
(P.pack "Distribution.Version", cabal),
(P.pack "Foreign", base),
(P.pack "Foreign.C", base),
(P.pack "Foreign.C.Error", base),
(P.pack "Foreign.C.String", base),
(P.pack "Foreign.C.Types", base),
(P.pack "Foreign.Concurrent", base),
(P.pack "Foreign.ForeignPtr", base),
(P.pack "Foreign.Marshal", base),
(P.pack "Foreign.Marshal.Alloc", base),
(P.pack "Foreign.Marshal.Array", base),
(P.pack "Foreign.Marshal.Error", base),
(P.pack "Foreign.Marshal.Pool", base),
(P.pack "Foreign.Marshal.Utils", base),
(P.pack "Foreign.Ptr", base),
(P.pack "Foreign.StablePtr", base),
(P.pack "Foreign.Storable", base),
(P.pack "GHC.Conc", base),
(P.pack "GHC.ConsoleHandler", base),
(P.pack "GHC.Dotnet", base),
(P.pack "GHC.Exts", base),
(P.pack "Graphics.HGL", hgl),
(P.pack "Graphics.HGL.Core", hgl),
(P.pack "Graphics.HGL.Draw", hgl),
(P.pack "Graphics.HGL.Draw.Brush", hgl),
(P.pack "Graphics.HGL.Draw.Font", hgl),
(P.pack "Graphics.HGL.Draw.Monad", hgl),
(P.pack "Graphics.HGL.Draw.Pen", hgl),
(P.pack "Graphics.HGL.Draw.Picture", hgl),
(P.pack "Graphics.HGL.Draw.Region", hgl),
(P.pack "Graphics.HGL.Draw.Text", hgl),
(P.pack "Graphics.HGL.Key", hgl),
(P.pack "Graphics.HGL.Run", hgl),
(P.pack "Graphics.HGL.Units", hgl),
(P.pack "Graphics.HGL.Utils", hgl),
(P.pack "Graphics.HGL.Window", hgl),
(P.pack "Graphics.Rendering.OpenGL", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.Antialiasing", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.BasicTypes", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.BeginEnd", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.Bitmaps", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.BufferObjects", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.Clipping", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.ColorSum", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.Colors", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.CoordTrans", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.DisplayLists", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.Evaluators", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.Feedback", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.FlushFinish", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.Fog", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.Framebuffer", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.Hints", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.LineSegments", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.PerFragment", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.PixelRectangles", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.PixelRectangles.ColorTable", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.PixelRectangles.Convolution", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.PixelRectangles.Histogram", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.PixelRectangles.Minmax", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.PixelRectangles.PixelMap", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.PixelRectangles.PixelStorage", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.PixelRectangles.PixelTransfer", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.PixelRectangles.Rasterization", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.Points", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.Polygons", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.RasterPos", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.ReadCopyPixels", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.Rectangles", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.SavingState", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.Selection", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.StateVar", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.StringQueries", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.Texturing", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.Texturing.Application", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.Texturing.Environments", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.Texturing.Objects", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.Texturing.Parameters", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.Texturing.Queries", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.Texturing.Specification", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.VertexArrays", opengl),
(P.pack "Graphics.Rendering.OpenGL.GL.VertexSpec", opengl),
(P.pack "Graphics.Rendering.OpenGL.GLU", opengl),
(P.pack "Graphics.Rendering.OpenGL.GLU.Errors", opengl),
(P.pack "Graphics.Rendering.OpenGL.GLU.Initialization", opengl),
(P.pack "Graphics.Rendering.OpenGL.GLU.Matrix", opengl),
(P.pack "Graphics.Rendering.OpenGL.GLU.Mipmapping", opengl),
(P.pack "Graphics.Rendering.OpenGL.GLU.NURBS", opengl),
(P.pack "Graphics.Rendering.OpenGL.GLU.Quadrics", opengl),
(P.pack "Graphics.Rendering.OpenGL.GLU.Tessellation", opengl),
(P.pack "Graphics.SOE", hgl),
(P.pack "Graphics.UI.GLUT", glut),
(P.pack "Graphics.UI.GLUT.Begin", glut),
(P.pack "Graphics.UI.GLUT.Callbacks", glut),
(P.pack "Graphics.UI.GLUT.Callbacks.Global", glut),
(P.pack "Graphics.UI.GLUT.Callbacks.Window", glut),
(P.pack "Graphics.UI.GLUT.Colormap", glut),
(P.pack "Graphics.UI.GLUT.Debugging", glut),
(P.pack "Graphics.UI.GLUT.DeviceControl", glut),
(P.pack "Graphics.UI.GLUT.Fonts", glut),
(P.pack "Graphics.UI.GLUT.GameMode", glut),
(P.pack "Graphics.UI.GLUT.Initialization", glut),
(P.pack "Graphics.UI.GLUT.Menu", glut),
(P.pack "Graphics.UI.GLUT.Objects", glut),
(P.pack "Graphics.UI.GLUT.Overlay", glut),
(P.pack "Graphics.UI.GLUT.State", glut),
(P.pack "Graphics.UI.GLUT.Window", glut),
(P.pack "Graphics.X11.Types", x11),
(P.pack "Graphics.X11.Xlib", x11),
(P.pack "Graphics.X11.Xlib.Atom", x11),
(P.pack "Graphics.X11.Xlib.Color", x11),
(P.pack "Graphics.X11.Xlib.Context", x11),
(P.pack "Graphics.X11.Xlib.Display", x11),
(P.pack "Graphics.X11.Xlib.Event", x11),
(P.pack "Graphics.X11.Xlib.Font", x11),
(P.pack "Graphics.X11.Xlib.Misc", x11),
(P.pack "Graphics.X11.Xlib.Region", x11),
(P.pack "Graphics.X11.Xlib.Screen", x11),
(P.pack "Graphics.X11.Xlib.Types", x11),
(P.pack "Graphics.X11.Xlib.Window", x11),
(P.pack "Language.Haskell.Parser", hs),
(P.pack "Language.Haskell.Pretty", hs),
(P.pack "Language.Haskell.Syntax", hs),
(P.pack "Language.Haskell.TH", th),
(P.pack "Language.Haskell.TH.Lib", th),
(P.pack "Language.Haskell.TH.Ppr", th),
(P.pack "Language.Haskell.TH.PprLib", th),
(P.pack "Language.Haskell.TH.Syntax", th),
(P.pack "Network", network),
(P.pack "Network.BSD", network),
(P.pack "Network.CGI", network),
(P.pack "Network.Socket", network),
(P.pack "Network.URI", network),
(P.pack "Numeric", base),
(P.pack "Prelude", base),
(P.pack "System.CPUTime", base),
(P.pack "System.Cmd", base),
(P.pack "System.Console.GetOpt", base),
(P.pack "System.Console.Haskeline", haskeline),
(P.pack "System.Directory", base),
(P.pack "System.Environment", base),
(P.pack "System.Exit", base),
(P.pack "System.IO", base),
(P.pack "System.IO.Error", base),
(P.pack "System.IO.Unsafe", base),
(P.pack "System.Info", base),
(P.pack "System.Locale", base),
(P.pack "System.Mem", base),
(P.pack "System.Mem.StableName", base),
(P.pack "System.Mem.Weak", base),
(P.pack "System.Posix", unix),
(P.pack "System.Posix.Directory", unix),
(P.pack "System.Posix.DynamicLinker", unix),
(P.pack "System.Posix.DynamicLinker.Module", unix),
(P.pack "System.Posix.DynamicLinker.Prim", unix),
(P.pack "System.Posix.Env", unix),
(P.pack "System.Posix.Error", unix),
(P.pack "System.Posix.Files", unix),
(P.pack "System.Posix.IO", unix),
(P.pack "System.Posix.Process", unix),
(P.pack "System.Posix.Resource", unix),
(P.pack "System.Posix.Signals", base),
(P.pack "System.Posix.Signals.Exts", unix),
(P.pack "System.Posix.Temp", unix),
(P.pack "System.Posix.Terminal", unix),
(P.pack "System.Posix.Time", unix),
(P.pack "System.Posix.Types", base),
(P.pack "System.Posix.Unistd", unix),
(P.pack "System.Posix.User", unix),
(P.pack "System.Process", base),
(P.pack "System.Random", base),
(P.pack "System.Time", base),
(P.pack "Test.HUnit", hunit),
(P.pack "Test.HUnit.Base", hunit),
(P.pack "Test.HUnit.Lang", hunit),
(P.pack "Test.HUnit.Terminal", hunit),
(P.pack "Test.HUnit.Text", hunit),
(P.pack "Test.QuickCheck", qc),
(P.pack "Test.QuickCheck.Batch", qc),
(P.pack "Test.QuickCheck.Poly", qc),
(P.pack "Test.QuickCheck.Utils", qc),
(P.pack "Text.Html", base),
(P.pack "Text.Html.BlockTable", base),
(P.pack "Text.ParserCombinators.Parsec", parsec),
(P.pack "Text.ParserCombinators.Parsec.Char", parsec),
(P.pack "Text.ParserCombinators.Parsec.Combinator", parsec),
(P.pack "Text.ParserCombinators.Parsec.Error", parsec),
(P.pack "Text.ParserCombinators.Parsec.Expr", parsec),
(P.pack "Text.ParserCombinators.Parsec.Language", parsec),
(P.pack "Text.ParserCombinators.Parsec.Perm", parsec),
(P.pack "Text.ParserCombinators.Parsec.Pos", parsec),
(P.pack "Text.ParserCombinators.Parsec.Prim", parsec),
(P.pack "Text.ParserCombinators.Parsec.Token", parsec),
(P.pack "Text.ParserCombinators.ReadP", base),
(P.pack "Text.ParserCombinators.ReadPrec", base),
(P.pack "Text.PrettyPrint", base),
(P.pack "Text.PrettyPrint.HughesPJ", base),
(P.pack "Text.Printf", base),
(P.pack "Text.Read", base),
(P.pack "Text.Read.Lex", base),
(P.pack "Text.Regex", base),
(P.pack "Text.Regex.Posix", base),
(P.pack "Text.Show", base),
(P.pack "Text.Show.Functions", base)]
|
jwiegley/lambdabot-1
|
Plugin/Dummy/DocAssocs.hs
|
mit
| 16,329
| 0
| 9
| 1,896
| 5,095
| 2,902
| 2,193
| 379
| 1
|
{-# LANGUAGE FlexibleInstances #-}
module CovenantEyes.Nestify
( nestify
) where
import Data.Functor ((<$>))
import Data.List (foldl', isInfixOf)
import CovenantEyes.Nestify.Stack (Stack, emptyStack, pop, popWhile, push, size, toList)
class Nestifiable a where
isBegin :: a -> Bool
isEnd :: a -> Bool
scope :: a -> String
entry :: a -> String
build :: a -> String -> a
instance Nestifiable [String] where
isBegin xs = any (`isInfixOf` entry xs) ["Begin", "Enter"]
isEnd xs = any (`isInfixOf` entry xs) ["End", "Exit"]
scope = last -- message is the last column
entry = last . init -- scope name is the penultimate column
build x y = x ++ [y]
(<++>) :: Nestifiable a => (Stack String, [a]) -> a -> (Stack String, [a])
(stack, result) <++> x = (newStack, thisLine : result)
where
isBegin' = isBegin x
isEnd' = isEnd x
newStack
| isBegin' = scope x `push` stack
| isEnd' = pop (popWhile (/= scope x) stack) -- jump over unbalanced scopes if needed
| otherwise = stack
thisLine
| isEnd' = indented (size newStack)
| otherwise = indented (size stack)
where indented d = build x (replicate d ' ' ++ entry x)
nestify :: Nestifiable a => [a] -> ([a], [String])
nestify = postProcess . foldl' (<++>) (emptyStack, [])
where
postProcess (stack, result) = (reverse result, toList stack)
|
3noch/nestify
|
src/CovenantEyes/Nestify.hs
|
mit
| 1,452
| 0
| 12
| 396
| 523
| 287
| 236
| 33
| 1
|
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances #-}
module Data.Substitution
( Subst(..), Entry(..)
) where
import Data.Maybe
import Data.Mapping.Partial
import Text.PrettyPrint.ANSI.Leijen
data Entry n t = n := t
| n :=? !()
deriving (Show, Read, Eq)
instance (Pretty n, Pretty t) => Pretty (Entry n t) where
pretty (n := t ) = pretty n <+> text ":=" <+> pretty t
pretty (n :=? ()) = pretty n <+> text ":=?"
newtype Subst n t = Subst {
bindings :: [Entry n t]
} deriving (Show, Read, Eq)
instance (Pretty n, Pretty t) => Pretty (Subst n t) where
pretty = vcat . map pretty . bindings
instance Eq n => PartialMap n (Subst n) where
sempty = Subst []
apply (Subst cxt) name = lookup name $ mapMaybe toPair cxt
where toPair (a := b) = Just (a, b)
toPair _ = Nothing
bind n t (Subst xs) = Subst $ n := t : xs
unbind n (Subst xs) = Subst $ n :=? () : xs
|
pxqr/unification
|
Data/Substitution.hs
|
mit
| 950
| 0
| 10
| 262
| 409
| 217
| 192
| 26
| 0
|
module H.Run where
import H.Common
main :: Tool
main _ args = do
let (_options, _switches, _files) = getOpts args [
]
return ()
|
hydrogen-tools/hydrogen-cli
|
src/H/Run.hs
|
mit
| 159
| 0
| 11
| 55
| 60
| 31
| 29
| 6
| 1
|
{-|
Module : Repl.Commands
Description : Facilities for commands available in the REPL.
Copyright : (c) Michael Lopez, 2017
License : MIT
Maintainer : m-lopez (github)
Stability : unstable
Portability : non-portable
-}
module Repl.Commands(
executeCommand
) where
import Repl.State ( CompilerState(..) )
import Context ( Ctx(..), Binding(BVar) )
import Expressions ( QType, ExprName, Expr, ExprName(ExprName) )
import Util.DebugOr ( DebugOr, showUnderlying )
import Data.List ( dropWhile, dropWhileEnd, stripPrefix )
import Data.Char ( isSpace )
import Ast.Parser ( Ast(..), replParse )
import Elaboration ( synthExpr )
import Data.Maybe ( fromMaybe )
-- | Unrecognized command message.
unrecognizedCommand :: String -> String
unrecognizedCommand cmd = "unrecognized command `" ++ cmd ++ "`; try `:help`"
-- | Removes preceding and trailing whitespace.
dropEnclosingWhiteSpace :: String -> String
dropEnclosingWhiteSpace s = dropWhileEnd isSpace $ dropWhile isSpace s
-- | Prints a parse tree.
parseTree :: String -> String
parseTree s = showUnderlying $ replParse s
-- | Prints the type of an expression.
-- FIXME: We have demonstrated the need to print expressions in a debug and
-- source code mode.
typeSynth :: CompilerState -> String -> DebugOr (Expr, QType)
typeSynth state s = do
ast <- replParse s
(e, t) <- synthExpr (getCtx state) ast
return (e, t)
-- | Prints all of the commands.
helpCmd :: CompilerState -> String -> String
helpCmd _ arg = if arg == ""
then helpText
else "the command `help` does not take any arguments"
-- | Elaborates an expression.
elabCmd :: CompilerState -> String -> String
elabCmd state arg = showUnderlying $ fst <$> typeSynth state arg
-- | Print the parse tree of code.
astCmd :: CompilerState -> String -> String
astCmd state = parseTree
-- | Print the type of an expression.
tCmd :: CompilerState -> String -> String
tCmd state arg = showUnderlying $ snd <$> typeSynth state arg
-- | Print the current bindings.
bindingsCmd :: CompilerState -> String -> String
bindingsCmd (CompilerState (Ctx bindings)) arg = if arg == ""
then showContext bindings
else "the command `bindings` does not take any arguments"
where
showContext ctx' = case ctx' of
BVar (ExprName x) t _ : ctx'' -> x ++ ": " ++ show t ++ "\n" ++ showContext ctx''
[] -> ""
-- | A mock command for exiting the terminal used for the description of
-- commands.
mockQuitCmd :: Command
mockQuitCmd = Command "quit" emptyCmd "" "Exit the terminal."
where
emptyCmd x y = ""
-- | The help text.
-- FIXME: Break this up and put it in another file!
helpText :: String
helpText = header ++ "\n" ++ concat descLines
where
header = "Here is a list of commands."
allCommands = commands ++ [ mockQuitCmd ]
indent = " "
argColLength = foldl max 0 $ map (\x -> ((length $ getExampleArgs x) + (length $ getName x))) allCommands
printCmdLine (Command name _ args desc) = indent ++ ":" ++ name ++ " " ++ args ++ replicate (argColLength - (length args + length name) + 1) ' ' ++ desc ++ "\n"
descLines = map printCmdLine allCommands
-- | A Toaster REPL command. There is always the implicit.
data Command = Command {
getName :: String,
getProcedure :: CompilerState -> String -> String,
getExampleArgs :: String,
getDescription :: String
}
-- | A command is a string and a function that parsers the preceeding arguments.
-- FIXME: Move this to another file.
commands :: [Command]
commands = [
Command "ast" astCmd "<e>"
"Prints the parse tree of an expression `e`.",
Command "elab" elabCmd "<e>"
"Prints the elaborated for of the expression `e`.",
Command "help" helpCmd ""
"Print the command list.",
Command "bindings" bindingsCmd ""
"Prints all bindings in the current context",
Command "t" tCmd "<e>"
"Prints the type of an expression `e`." ]
-- | Matches the input command `cmd` with one of the commands.
matchCmd :: CompilerState -> String -> Maybe String
matchCmd state cmd = matchCmdRec cmd commands
where
matchCmdRec :: String -> [Command] -> Maybe String
matchCmdRec cmd [] = Nothing
matchCmdRec cmd (c:cs) = case stripPrefix (getName c) cmd of
Just arg -> Just $ func state justArg
where
func = getProcedure c
justArg = dropEnclosingWhiteSpace arg
Nothing -> matchCmdRec cmd cs
-- | Executes a command.
executeCommand :: CompilerState -> String -> String
executeCommand state cmd = fromMaybe (unrecognizedCommand cmd) (matchCmd state cmd)
|
m-lopez/jack
|
src/Repl/Commands.hs
|
mit
| 4,548
| 0
| 16
| 935
| 1,077
| 578
| 499
| 78
| 3
|
import IpAddressSpec
main :: IO ()
main = do
ipAddressSpecs
|
lloydmeta/ip-parsing-hs
|
test/Spec.hs
|
mit
| 74
| 1
| 6
| 24
| 25
| 11
| 14
| 4
| 1
|
module Main (main) where
import Poker
main = do
let cards = HandCards (Card A Clubs) (Card Q Clubs) (Card J Clubs) (Card Ten Clubs) (Card K Clubs)
print $ identifyHand cards
|
tomgi/poker_hands
|
main.hs
|
mit
| 190
| 0
| 12
| 48
| 87
| 44
| 43
| 5
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Writings (writingsRoutes) where
import Data.Monoid ((<>))
import Hakyll
import Data.List (sort)
import Control.Applicative (empty)
import System.FilePath
import Data.Time.Clock (UTCTime)
import Data.Time.Locale.Compat (defaultTimeLocale)
import Text.XML.HXT.DOM.XmlNode
import Text.XML.HXT.DOM.TypeDefs
import Text.XML.HXT.DOM.ShowXml (xshow)
data NavDirection = Prev | Next
writingsRoutes :: Rules ()
writingsRoutes = do
match "writings/**.png" $ do
route idRoute
compile copyFileCompiler
match "writings/**.jpg" $ do
route idRoute
compile copyFileCompiler
match "writings/**" $ do
route $ setExtension "html"
compile $ pandocCompiler
>>= loadAndApplyTemplate "templates/writing.html" ctx
ctx :: Context String
ctx =
field "postNav" postNav <>
defaultContext
-- generate next/prev links for the item's siblings.
postNav :: Item String -> Compiler String
postNav post = do
let ident = itemIdentifier post
r <- getRoute ident
case r of
Nothing -> pure ""
Just path -> do
let siblingsGlob = fromGlob $ takeDirectory path </> "**"
sortedSiblings <- getMatches siblingsGlob >>= sortByDate
let next = itemAfter sortedSiblings ident
let prev = itemBefore sortedSiblings ident
nextHtml <- maybe (return "") (toNavLink Next) next
prevHtml <- maybe (return "") (toNavLink Prev) prev
return $ concat [prevHtml, " ", nextHtml]
toNavLink :: NavDirection -> Identifier -> Compiler String
toNavLink dir ident = do
url <- fmap toUrl <$> getRoute ident
title <- getMetadataField ident "linkTitle"
let link = do
url' <- url
title' <- title
case dir of
Next -> return $ ln "writing-next-link" url' (title' <> " →")
Prev -> return $ ln "writing-prev-link" url' ("← " <> title')
maybe empty pure link
ln :: String -> -- classname
String -> -- href
String -> -- title
String
ln klass href title = xshow [mkElement (mkName "a") [ mkAttr (mkName "class") [mkText klass]
, mkAttr (mkName "href") [mkText href]
]
[ mkText title] ]
sortByDate :: [Identifier] -> Compiler [Identifier]
sortByDate xs = map snd . sort <$> mapM f xs
where
f :: Identifier -> Compiler (UTCTime, Identifier)
f ident = do
t <- getItemUTC defaultTimeLocale ident
return (t, ident)
itemAfter :: Eq a => [a] -> a -> Maybe a
itemAfter xs x = lookup x $ zip xs (tail xs)
itemBefore :: Eq a => [a] -> a -> Maybe a
itemBefore xs x = lookup x $ zip (tail xs) xs
|
mjhoy/mjhoy.com
|
Writings.hs
|
mit
| 2,696
| 0
| 18
| 704
| 875
| 432
| 443
| 71
| 2
|
{-# OPTIONS_HADDOCK show-extensions #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE Safe #-}
{-|
Module : MPD.Commands.Query
Copyright : (c) Joachim Fasting, 2014
License : MIT
Maintainer : joachifm@fastmail.fm
Stability : unstable
Portability : unportable
Convenient syntax for database queries.
-}
module MPD.Commands.Query (
Query
, (=?)
#ifdef TEST
, queryTerms
#endif
) where
import MPD.Commands.Types
import MPD.Core.CommandArg (CommandArg(..))
import qualified Data.Text as T
{-|
A database query consisting of 0 or more terms.
@
Title =? "FooBar" <> Artist =? "BarFoo"
@
matches items with title "FooBar" and artist "BarFoo".
Use 'mempty' to create a query which matches anything.
-}
newtype Query = Query { queryTerms :: [(Metadata, T.Text)] }
instance Monoid Query where
mempty = Query []
Query a `mappend` Query b = Query (a `mappend` b)
instance CommandArg Query where
fromArg = T.unwords . map f . queryTerms
where f (m, s) = T.unwords [fromArg m, T.pack (show s)]
(=?) :: Metadata -> T.Text -> Query
m =? s = Query [(m, s)]
|
joachifm/nanompd
|
src/MPD/Commands/Query.hs
|
mit
| 1,074
| 0
| 12
| 211
| 236
| 138
| 98
| 18
| 1
|
{-# LANGUAGE OverloadedStrings, RecordWildCards, ViewPatterns, LambdaCase #-}
module TextureCache ( withTextureCache
, TextureCache
, TextureCache.fetchImage
, TextureCache.gatherCacheStats
-- Wrap TextureGrid exports
, debugDumpGrid
-- Re-exports from QuadTypes
, QuadUV(..)
) where
import qualified Graphics.Rendering.OpenGL as GL
import qualified Data.ByteString as B
import Control.Exception
import Control.Monad
import Text.Printf
import Data.IORef
import ImageCache
import qualified LRUBoundedMap as LBM
import Trace
import GLHelpers
import qualified TextureGrid as TG
import QuadTypes
-- OpenGL texture cache on top of the ImageCache module
data TextureCacheEntry = TETexture !GL.TextureObject -- 'Large' texture, stored in its own object
| TEGrid !TG.GridSlot -- 'Small' texture, TextureGrid reference
deriving (Eq)
data TextureCache = TextureCache
{ tcCacheEntries :: !(IORef (LBM.Map B.ByteString TextureCacheEntry))
, tcImageCache :: !ImageCache
, tcTexGrid :: !TG.TextureGrid
, tcUseTextureGrid :: !Bool
}
withTextureCache :: Int
-> Bool
-> Int
-> (Int, Int)
-> ImageCache
-> (TextureCache -> IO ())
-> IO ()
withTextureCache maxCacheEntries
tcUseTextureGrid
gridTexSize
(smallTexWdh, smallTexHgt)
tcImageCache
f = do
TG.withTextureGrid gridTexSize
1
(smallTexWdh, smallTexHgt)
GL.RGBA
GL.RGBA8
GL.UnsignedByte
TFMinMag
$ \tcTexGrid ->
bracket
( newIORef (LBM.empty maxCacheEntries) >>= \tcCacheEntries ->
return $ TextureCache { .. }
)
( \tc -> do
cacheEntries <- readIORef $ tcCacheEntries tc
case LBM.valid cacheEntries of
Just err -> traceS TLError $ "LRUBoundedMap: TextureCache:\n" ++ err
Nothing -> return ()
-- Shutdown
traceT TLInfo $ "Shutting down texture cache"
mapM_ (\case TETexture tex -> GL.deleteObjectName tex; _ -> return ())
. map snd . LBM.toList $ cacheEntries
)
f
-- Fetch an image from the texture cache, or forward the request to the image
-- cache in case we don't have it. We need the tick for the image cache (keep
-- track of retry times for failed fetches)
--
-- TODO: We should have a frame index that gets stored with each lookup in the
-- cache. This is to ensure that we never delete a texture queried for the
-- current frame. Just put those textures in a list and delete them on the
-- next request with a higher frame number
fetchImage :: TextureCache
-> Double
-> B.ByteString
-> IO (Maybe (GL.TextureObject, QuadUV))
fetchImage (TextureCache { .. }) tick uri = do
-- TODO: Add some exception safety to this function. It's somewhat tricky, as there
-- are several data structures being potentially updated (TextureCache, ImageCache
-- and TextureGrid) as well as resources being allocated
--
-- TODO: Might need to limit amount of texture uploads per-frame. We could simply return
-- Nothing after a certain amount of ms or MB
cacheEntries <- readIORef tcCacheEntries
case LBM.lookup uri cacheEntries of
-- Cache hit, texture
(newEntries, Just (TETexture tex)) -> do
writeCache newEntries
return $ Just (tex, QuadUVDefault)
-- Cache hit, grid slot
(newEntries, Just (TEGrid (TG.viewGridSlot -> TG.GridSlot tex _ _ uv))) -> do
writeCache newEntries
return $ Just (tex, uv)
-- Cache miss
(_, Nothing ) -> do
hicFetch <- ImageCache.fetchImage tcImageCache tick uri
case hicFetch of
Just (Fetched (ImageRes w h img)) -> do
-- Image cache hit. Small enough to insert into grid, or do we need to
-- allocate a texture?
entry <- if tcUseTextureGrid && TG.isGridSized tcTexGrid w h
then TEGrid <$> TG.insertImage tcTexGrid w h img
else TETexture <$> newTexture2D GL.RGBA
GL.RGBA8
GL.UnsignedByte
(w, h)
(TCUpload img)
True
(Just TFMinMag)
True
-- Insert into cache, delete any overflow
let (newEntries, delEntry) = LBM.insert uri entry cacheEntries
case delEntry of
Just (_, TETexture delTex) -> GL.deleteObjectName delTex
Just (_, TEGrid slot ) -> TG.freeSlot tcTexGrid slot
Nothing -> return ()
-- Remove from image cache
deleteImage tcImageCache uri
-- Write back the cache directory and return
writeCache newEntries
return $ case entry of
TETexture tex -> Just (tex, QuadUVDefault)
TEGrid (TG.viewGridSlot -> TG.GridSlot tex _ _ uv) -> Just (tex, uv)
_ -> return Nothing
where writeCache = writeIORef tcCacheEntries
gatherCacheStats :: TextureCache -> IO String
gatherCacheStats tc = do
cache <- readIORef $ tcCacheEntries tc
let dir = LBM.toList cache
dirLen = fst $ LBM.size cache
(mem, maxWdh, maxHgt, texCnt) <-
foldM ( \r@(mem', maxWdh', maxHgt', texCnt') (_, entry) -> case entry of
TETexture tex -> do
GL.textureBinding GL.Texture2D GL.$= Just tex
(w, h) <- getCurTex2DSize
return (mem' + w * h * 4, max w maxWdh', max h maxHgt', texCnt' + 1)
_ -> return r
)
(0, 0, 0, 0)
dir
(numGridTex, numFreeSlots, gridTexWdh, (slotWdh, slotHgt), ifmt)
<- TG.getGridMemoryUsage $ tcTexGrid tc
return $ printf ( "Dir. Capacity: %i/%i (%.1f%% slotrefs) Β· MemImg: %3.fMB" ++
" Β· LargestImg: %ix%i | GridTex: %i x %ix%ix%s Β· %ix%i slots (free: %i)"
)
(fst $ LBM.size cache)
(snd $ LBM.size cache)
(fromIntegral ((dirLen - texCnt) * 100) / fromIntegral dirLen :: Float)
(fromIntegral mem / 1024 / 1024 :: Double)
maxWdh
maxHgt
numGridTex
gridTexWdh gridTexWdh
(show ifmt)
slotWdh
slotHgt
numFreeSlots
debugDumpGrid :: TextureCache -> FilePath -> IO ()
debugDumpGrid tc = TG.debugDumpGrid (tcTexGrid tc)
|
blitzcode/jacky
|
src/TextureCache.hs
|
mit
| 7,685
| 0
| 25
| 3,244
| 1,440
| 751
| 689
| 143
| 8
|
module Text.XML.DOM.Parser.Buildable
( Buildable(..)
) where
import Data.Functor.Identity
import Data.List.NonEmpty as NE
import Data.Maybe
-- | Class of traversable functors which may be constructed from list
class Traversable f => Buildable f where
-- | If method return Nothing this means we can not build
-- traversable from given list. In this case 'inFilteredTrav' should
-- fail traversing.
build :: [a] -> Maybe (f a)
instance Buildable Identity where
build = fmap Identity . listToMaybe
instance Buildable [] where
build = Just
instance Buildable Maybe where
build = Just . listToMaybe
instance Buildable NonEmpty where
build = NE.nonEmpty
|
typeable/dom-parser
|
src/Text/XML/DOM/Parser/Buildable.hs
|
mit
| 676
| 0
| 10
| 125
| 145
| 83
| 62
| 15
| 0
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
import Text.Parsec
import Text.Parsec.Text
--import Text.Parsec.Expr
import qualified Text.Parsec.Token as Tok
import qualified Text.Parsec.Language as Lang
import qualified Data.Text as T
import Data.Functor.Identity (Identity)
lett :: T.Text
lett = "abcdefghijklmnopqrstuvwxyz"
num :: T.Text
num = "1234567890"
lexer :: Tok.GenTokenParser T.Text () Identity
lexer = Tok.makeTokenParser style
style :: Tok.GenLanguageDef T.Text () Identity
style = Lang.emptyDef {
Tok.commentStart = "{-"
, Tok.commentEnd = "-}"
, Tok.identStart = lett <|> oneOf "+-/*"
, Tok.identLetter = lett <|> num <|> oneOf "_'"
, Tok.reservedOpNames = [ "'", "\""]
, Tok.reservedNames = [ "true", "false"
, "let", "quote", "lambda"
, "Nil" ]
}
{-
Tok.TokenParser { parens = m_parens
, identifier = m_identifier -- Tok.Identifer lexer
, reservedOp = m_reservedOp
, reserved = m_reserved
, semiSep1 = m_semiSep1
, whiteSpace = m_whiteSpace } = makeTokenParser style
-}
reservedOp :: T.Text -> Parser ()
reservedOp op = Tok.reservedOp lexer (T.unpack op)
parseAtom :: Parser LispVal
parseAtom = Atom . T.pack <$> Tok.identifier lexer
parseString :: Parser LispVal
parseString =
do reservedOp "\""
p <- Tok.identifier lexer
reservedOp "\""
return $ Str (T.pack p)
parseNumber :: Parser LispVal
parseNumber = fmap (Num . read) $ many1 digit
parseList :: Parser LispVal
parseList = List . concat <$> (many parseExpr `sepBy` char ' ')
{-
parseSExp1 :: Parser LispVal
parseSExp1 = List . concat <$> Tok.parens (many parseExpr `sepBy` char ' ')
-}
parseSExp :: Parser LispVal
parseSExp =
do reservedOp "("
p <- (many parseExpr `sepBy` char ' ')
reservedOp ")"
return $ List . concat $ p
parseQuote :: Parser LispVal
parseQuote =
do
reservedOp "\'"
x <- parseExpr
return $ List [Atom "quote", x]
parseExpr :: Parser LispVal
parseExpr = parseAtom
<|> parseString
<|> parseNumber
<|> parseReserved
<|> parseQuote
<|> parseSExp
parseReserved =
do
reservedOp "Nil" >> return Nil
<|> (reservedOp "#t" >> return (Bin True))
<|> (reservedOp "#f" >> return (Bin False))
contents :: Parser a -> Parser a
contents p = do
Tok.whiteSpace lexer
r <- p
eof
return r
readExpr :: T.Text -> Either ParseError LispVal
readExpr = parse (contents parseExpr) "<stdin>"
-------------------------
-- STAND ALONE TEST
-- --------------------
p pa inp = case parse pa "" inp of
{ Left err -> "err " ++ show err
; Right ans -> "ans " ++ show ans
}
-- need a copy of LispVal for stand alone
data LispVal = Nil | Bin Bool | Atom T.Text | Num Int | Str T.Text | List [LispVal] deriving (Show)
main :: IO ()
main =
do
print $ p parseReserved "Nil"
print $ p parseExpr "Nil"
print $ p parseExpr "'Nil"
print " "
print $ p parseQuote "'(1 2 3 4)"
print $ p parseQuote "'x"
print $ p parseQuote "'()"
print " "
print " "
print $ p (parseExpr) "(1)"
print $ p parseList "a \"a\" \"a\""
print $ p parseList "x 1 2"
print $ p parseSExp "(a \"a\" \"a\")"
print $ p parseSExp "(1 2 3 4)"
print " "
print " "
--print $ p (m_parens (many parseExpr `sepBy` char ' ')) "(lambda (fnName a b c) (body) )"
print $ p parseSExp "(lambda (fnName a b c) (body) )"
print $ p parseSExp "(a 1 b 2)"
print $ p parseSExp "(let (a 1 b 2) (fn a b) )"
print $ p parseSExp "(let (a (x 1 2) b (y 3 4)) (fn a b) )"
print " "
print " "
print $ p parseExpr "x"
print $ p parseExpr "1"
print $ p parseExpr "\"a b c d\""
print $ p parseExpr "(3 1)"
print " "
print $ p parseReserved "#t"
print $ p parseReserved "#f"
print $ p parseExpr "#t"
print $ p parseExpr "#f"
print " "
print $ p parseExpr "(eq? 1 2)"
print $ p parseExpr "1"
|
write-you-a-scheme-v2/scheme
|
archive/Parse3.hs
|
mit
| 4,039
| 29
| 11
| 1,075
| 1,239
| 589
| 650
| 111
| 2
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-s3target.html
module Stratosphere.ResourceProperties.GlueCrawlerS3Target where
import Stratosphere.ResourceImports
-- | Full data type definition for GlueCrawlerS3Target. See
-- 'glueCrawlerS3Target' for a more convenient constructor.
data GlueCrawlerS3Target =
GlueCrawlerS3Target
{ _glueCrawlerS3TargetExclusions :: Maybe (ValList Text)
, _glueCrawlerS3TargetPath :: Maybe (Val Text)
} deriving (Show, Eq)
instance ToJSON GlueCrawlerS3Target where
toJSON GlueCrawlerS3Target{..} =
object $
catMaybes
[ fmap (("Exclusions",) . toJSON) _glueCrawlerS3TargetExclusions
, fmap (("Path",) . toJSON) _glueCrawlerS3TargetPath
]
-- | Constructor for 'GlueCrawlerS3Target' containing required fields as
-- arguments.
glueCrawlerS3Target
:: GlueCrawlerS3Target
glueCrawlerS3Target =
GlueCrawlerS3Target
{ _glueCrawlerS3TargetExclusions = Nothing
, _glueCrawlerS3TargetPath = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-s3target.html#cfn-glue-crawler-s3target-exclusions
gcstExclusions :: Lens' GlueCrawlerS3Target (Maybe (ValList Text))
gcstExclusions = lens _glueCrawlerS3TargetExclusions (\s a -> s { _glueCrawlerS3TargetExclusions = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-crawler-s3target.html#cfn-glue-crawler-s3target-path
gcstPath :: Lens' GlueCrawlerS3Target (Maybe (Val Text))
gcstPath = lens _glueCrawlerS3TargetPath (\s a -> s { _glueCrawlerS3TargetPath = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/ResourceProperties/GlueCrawlerS3Target.hs
|
mit
| 1,748
| 0
| 12
| 205
| 264
| 151
| 113
| 27
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Yesod.Devel.Runner
( runner
) where
import Yesod.Devel.Daemon (daemon)
import Yesod.Devel.Builder (LastCompile)
import Yesod.Devel.Process (RunProc, EarlyTermination (..))
import Yesod.Devel.PortManager (PortManager, getPort, recyclePort)
import Control.Monad (join)
import Control.Concurrent (threadDelay)
import Filesystem (createTree, removeFile)
import Filesystem.Path.CurrentOS (FilePath)
import Control.Concurrent.STM
import Control.Concurrent.Async (async, cancelWith)
import Control.Exception (finally, try, IOException)
import Data.Text (Text, pack)
import Data.Monoid ((<>))
import Prelude hiding (FilePath)
runner :: (Text -> IO ())
-> PortManager
-> Int -> TVar (Maybe Int) -> RunProc -> TVar LastCompile -> IO ()
runner log portManager mainPort reversedPort runProc lastCompile = do
killUserProcess <- newTVarIO $ return ()
daemon "runner" (loop killUserProcess Nothing) log
where
loop killUserProcess mtimestamp = do
newTimestamp <- atomically $ do
lc <- readTVar lastCompile
case (mtimestamp, lc) of
(_, Nothing) -> retry
(Nothing, Just x) -> return x
(Just old, Just new)
| old == new -> retry
| otherwise -> return new
log "New compilation available, running"
createTree "yesod-devel"
writeFile "yesod-devel/devel-terminate" ""
join $ atomically $ do
x <- readTVar $ killUserProcess
writeTVar (killUserProcess) (return ())
writeTVar reversedPort Nothing
return x
threadDelay 100000
removeFileIfExists "yesod-devel/devel-terminate"
removeFileIfExists "dist/devel-terminate" -- compatibility
port <- getPort portManager
log $ "Got new listening port of " <> pack (show port)
up <- async $ runProc "runghc"
-- FIXME [ "-package-dbdist/package.conf.inplace"
[ "-package-confdist/package.conf.inplace"
, "devel.hs"
]
[ ("PORT", show port)
, ("APPROOT", "http://localhost:" ++ show mainPort)
, ("DISPLAY_PORT", show mainPort)
] `finally` recyclePort portManager port
atomically $ do
writeTVar (killUserProcess) (cancelWith up EarlyTermination)
writeTVar reversedPort (Just port)
loop killUserProcess $ Just newTimestamp
removeFileIfExists :: FilePath -> IO ()
removeFileIfExists fp = do
ex <- try $ removeFile fp
case ex :: Either IOException () of
Left _ -> return ()
Right () -> return ()
|
snoyberg/yesod-devel-beta
|
Yesod/Devel/Runner.hs
|
mit
| 2,696
| 0
| 19
| 737
| 744
| 379
| 365
| 62
| 3
|
import System.IO
import Control.Monad (forever)
import Control.Concurrent (threadDelay)
import Control.Concurrent.BoundedChan
import Control.Concurrent (forkIO)
digestsFilePath :: String
digestsFilePath = "/mnt/lpd-distlib/streamer/v1/digests.list"
checkFile :: Handle -> BoundedChan (String) -> Int -> IO ()
checkFile handle chan skip = do
isEof <- hIsEOF handle
if isEof == True
then do
threadDelay 1000000
checkFile handle chan 0
else do
hasLine <- hWaitForInput handle 67
if hasLine == True
then do
line <- hGetLine handle
if skip > 0
then checkFile handle chan $ skip - 1
else do
writeChan chan line
checkFile handle chan 0
else
checkFile handle chan 0
main = do
handle <- openFile digestsFilePath ReadMode
chan <- newBoundedChan 10
forkIO (checkFile handle chan 1)
forever $ do
putStrLn "attempting to read a line.."
a <- readChan chan
putStrLn a
|
adizere/nifty-tree
|
playground/wait-input.hs
|
mit
| 1,170
| 1
| 16
| 444
| 303
| 141
| 162
| 33
| 4
|
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ >= 701
{-# LANGUAGE Safe #-}
#endif
module CPUTime (
getCPUTime, cpuTimePrecision
) where
import System.CPUTime
|
jwiegley/ghc-release
|
libraries/haskell98/CPUTime.hs
|
gpl-3.0
| 170
| 0
| 4
| 35
| 21
| 15
| 6
| 4
| 0
|
import Control.Exception ( assert )
import Data.Grib
import Text.Printf ( printf )
main :: IO ()
main = let filename = "test/stage/regular_latlon_surface.grib1" in
runGribIO_ filename $ do
setString "file" filename
getLong "Ni" >>= liftIO . printf "numberOfPointsAlongAParallel=%d\n"
getLong "Nj" >>= liftIO . printf "numberOfPointsAlongAMeridian=%d\n"
getDouble "yFirst" >>=
liftIO . printf "latitudeOfFirstGridPointInDegrees=%g\n"
getDouble "xFirst" >>=
liftIO . printf "longitudeOfFirstGridPointInDegrees=%g\n"
getDouble "yLast" >>=
liftIO . printf "latitudeOfLastGridPointInDegrees=%g\n"
getDouble "xLast" >>=
liftIO . printf "longitudeOfLastGridPointInDegrees=%g\n"
getDouble "DyInDegrees" >>=
liftIO . printf "jDirectionIncrementInDegrees=%g\n"
getDouble "DxInDegrees" >>=
liftIO . printf "iDirectionIncrementInDegrees=%g\n"
getString "packingType" >>= liftIO . printf "packingType=%s\n"
values <- getValues
let numValues = length values
average = sum values / fromIntegral numValues
liftIO $ printf "There are %d values, average is %g\n" numValues average
filename' <- getString "file"
liftIO $ assert (filename' == filename) (return ())
|
mjakob/hgrib
|
examples/get.hs
|
gpl-3.0
| 1,268
| 1
| 14
| 255
| 308
| 135
| 173
| 28
| 1
|
module PolicyEffects.Model (
PolicyEffectsMsg (..),
PolicyEffectsState,
PolicyEffectsEnvironment,
PolicyEffectsAgentDef,
PolicyEffectsAgentBehaviour,
PolicyEffectsAgentIn,
PolicyEffectsAgentOut,
PolicyEffectsAgentObservable,
PolicyEffectsEnvironmentReplicator
) where
import FRP.FrABS
-- NOTE: this implementation was inspired by this article:
-- http://www.decisionsciencenews.com/2017/06/19/counterintuitive-problem-everyone-room-keeps-giving-dollars-random-others-youll-never-guess-happens-next/
-- It shows how random spending of money creates a huge deviation of wealth
------------------------------------------------------------------------------------------------------------------------
-- DOMAIN-SPECIFIC AGENT-DEFINITIONS
------------------------------------------------------------------------------------------------------------------------
data PolicyEffectsMsg = Spend Double deriving (Eq, Show)
type PolicyEffectsState = Double
type PolicyEffectsEnvironment = Network ()
type PolicyEffectsAgentDef = AgentDef PolicyEffectsState PolicyEffectsMsg PolicyEffectsEnvironment
type PolicyEffectsAgentBehaviour = AgentBehaviour PolicyEffectsState PolicyEffectsMsg PolicyEffectsEnvironment
type PolicyEffectsAgentIn = AgentIn PolicyEffectsState PolicyEffectsMsg PolicyEffectsEnvironment
type PolicyEffectsAgentOut = AgentOut PolicyEffectsState PolicyEffectsMsg PolicyEffectsEnvironment
type PolicyEffectsAgentObservable = AgentObservable PolicyEffectsState
type PolicyEffectsEnvironmentReplicator = EnvironmentReplicator PolicyEffectsEnvironment
------------------------------------------------------------------------------------------------------------------------
|
thalerjonathan/phd
|
coding/libraries/chimera/examples/ABS/PolicyEffects/Model.hs
|
gpl-3.0
| 1,729
| 0
| 6
| 158
| 158
| 95
| 63
| 20
| 0
|
{-# LANGUAGE UnicodeSyntax #-}
{-# LANGUAGE LambdaCase #-}
-- |
-- Module : Main
-- Copyright : (c) Mateusz Kowalczyk 2014
-- License : GPL-3
--
-- Maintainer : fuuzetsu@fuuzetsu.co.uk
-- Stability : experimental
-- Portability : portable
module Main where
import Graphics.HImg
import System.Environment (getArgs)
import System.Exit (exitWith, ExitCode(ExitFailure))
main β· IO ()
main = getArgs >>= \case
"-h":_ β printHelp >> exitWith (ExitFailure 1)
["-f", link] β viewFromFile link
["-u", link] β viewFromLink link
[link] β viewGuess link
_ β printHelp >> exitWith (ExitFailure 1)
|
Fuuzetsu/himg
|
src/Main.hs
|
gpl-3.0
| 688
| 0
| 12
| 179
| 151
| 86
| 65
| 13
| 5
|
module Reductions where
import Data.List
import Data.Maybe
import Control.Monad
import Control.Monad.State
-- Local imports
import ReductionRules (StateReduction,
reduceLollyStateIO,
reduceWithStateIO,
reducePlusStateIO,
reduceOneStateIO)
import Util
import Syntax
import Printer
import ProverState
import Term
import UserIO
startTeLLer :: ProverStateIO ()
startTeLLer = do
-- First, we apply apply some simplification rules to the environment (these are defined in 'RewriteRules')
modify simplifyEnv
-- We also initialize the map modelling the origin of resources
-- TODO: move this to the functions that add the resources?
reductions <- gets totalReductions
when (reductions == 0) $ modify initOriginMapWithAtoms
omap <- gets originOfResources
-- We now start the fixpoint calculation
startFixpointReductions
-- BT
tellAllStories <- gets tellAllStories
when tellAllStories $ do
stack <- gets btStack
graphs <- gets btTraces
if((length stack) > 0) then do
trace <- gets actionTrace
let next = head stack
-- set the state to next, except for the stack
allTraces <- gets btTraces
put $ next {btStack = tail stack, btTraces = trace:allTraces}
startTeLLer
else do
trace <- gets actionTrace
state <- get
put $ state {btTraces = trace:(btTraces state)}
--g <- gets btTraces
--lift $ putStrLn $ "ALL TRACES: "++ show g
--lift $ putStrLn $ show g
startFixpointReductions :: ProverStateIO ()
startFixpointReductions = do
-- We apply the reduction rules until we reach a fixed point
initialContext <- gets env
findFixpointStateIO reduceStateIO initialContext
-- We have reached a dead-end, i.e., there are no enabled *focused* actions.
-- We now test whether there are enabled *unfocused* actions. If there are any,
-- we move them to the context and we start again.
-- TODO: Check with Gwenn if this is the desired behaviour.
focusedActions <- gets env
unfocusedActions <- gets unfocused
let existEnabledUnfocusedActions = not ( null ( listEnabledActions (focusedActions++unfocusedActions)))
when (existEnabledUnfocusedActions) $
do lift $ tellerWarning "We have reached a dead-end, but there are other available actions."
modify moveUnfocusedToEnv
modify (setFocusedReductionsTo 0)
-- Ask if the user wants to proceed
--answer <- askUserIfProceed
--when (answer) $ startFixpointReductions
startFixpointReductions
-- Before we leave, let us clean the state by moving disabled unfocused actions to the environment
modify moveUnfocusedToEnv
modify (setFocusedReductionsTo 0)
reduceStateIO :: Environment -> ProverStateIO Environment
reduceStateIO ts = do
inDebugMode <- gets debugMode
g <- gets granularity
numFocusedReductions <- gets focusedReductions
-- Not a fixpoint yet, but we have reached the granularity value. Bring back unfocused
-- actions to the environment and reset the counter 'focusedReductions'.
-- TODO: What is this state called? Quiescence?
when (g == numFocusedReductions) $ do
modify moveUnfocusedToEnv
modify (setFocusedReductionsTo 0)
lift $ tellerWarning "Granularity limit reached."
--answer <- askUserIfProceed
--when (answer) $ startFixpointReductions
context <- gets env
unfo <- gets unfocused
let enabledActions = listEnabledActions context --(env state)
when inDebugMode $
(lift $ print $ "[DEBUG] ENVIRONMENT: " ++ show context) >>
(lift $ print $ "[DEBUG] UNFOCUSED: " ++ show unfo) >>
(lift $ print $ "[DEBUG] ENABLED ACTIONS: " ++ show enabledActions)
-- If there are several available actions, let the user choose which one to
-- reduce first
-- TODO: and you are not focusing...
when ((length enabledActions)>1) $ chooseActionToFocusOn enabledActions True
-- chooseActionToFocusOn changes the state, so let us get a new copy of the environment
-- TODO, FIXME: This should improve. This style leads to programs difficult to debug!
newEnv <- gets env
-- We now try one reduction of each type, until we reach a fixpoint.
tryReductionsStateIO reductions (linearizeTensorProducts newEnv)
where reductions =
[
reduceLollyStateIO,
reduceWithStateIO,
reducePlusStateIO,
reduceOneStateIO
]
-- split the next functions into IO (into CLI) + State
-- Second parameter: True if running for the first time or if list of actions changed
chooseActionToFocusOn :: [Term] -> Bool -> ProverStateIO ()
chooseActionToFocusOn [] _ = return ()
chooseActionToFocusOn l printListActions = do
when (printListActions) $ lift $ printListOfActions l
-- JFF: Cindy wants to be able to print the state and to add new resources at this point
-- TODO: refactor the code...
when (printListActions) $ lift $ tellerPrintLn "p) Print environment"
when (printListActions) $ lift $ tellerPrintLn "+-) Add/Remove resources (e.g. +A A-@B A-@C)"
when (printListActions) $ lift $ tellerPrintLn "choicepoints) List choice points"
when (printListActions) $ lift $ tellerPrintLn "goto n) Go to choice point n"
option <- lift $ getLine -- TODO CHANGE FOR READLINE
-- user selects printing option
when ((head option) == 'p') $ do
state <- get
lift (tellerPrintLn (showState state))
-- TODO: something weird is happening with IO (possibly related with Readline): p is being read
-- twice!
when ((head option) == '+') $ do
changeEnvWith addToEnv (drop 1 option)
when ((head option) == '-') $ do
changeEnvWith removeFromEnv (drop 1 option)
when (option == "choicepoints") $ do
cpoints <- gets choicePoints
let l = map (\(n,f,s,t) -> (n,f,s)) cpoints
if(null l) then lift (tellerPrintLn "No choice points to show!")
else lift (tellerPrintLn (show l))
when ((take 5 option) == "goto ") $ do
let index = fst $ head (reads (drop 5 option) :: [(Int,String)])
cpoints <- gets choicePoints
let npoints = length cpoints
let newState = (\(n,f,s,t)->t) $ cpoints!!(npoints-1-index)
put newState
-- Adding new actions can add new enabled actions!
context <- gets env
let initialListActions = l
let l = listEnabledActions context
let sizeList = length l
if (isValidActionChoice option sizeList) then
do
let index = fst $ head (reads option :: [(Int, String)])
let chosenAction = l!!index -- TODO: possibly unsafe? and the next steps not very efficient?
state <- get
let newEnv = chosenAction: (env (state) \\ l)
let unFocus = (unfocused state) ++ (l \\ [chosenAction])
-- There was a choice, so add this state to choicePoints
state <- get
let lastActionName = (\(f,s,t) -> t) $ (\l -> if (null l) then (0,[],"Beginning") else last l) $ filter (\(_,_,nm)->(take 7 nm) /= (take 7 "\\emptyset")) $ actionTrace state
let currentChoicePoints = choicePoints state
put (state {choicePoints = (length currentChoicePoints, lastActionName, map showAction l, state): currentChoicePoints})
-- Saved states
tellAllStories <- gets tellAllStories
when tellAllStories $ do
savedState <- get
let alternatives = [ savedState {env = notChosen: (env (savedState)\\l),
unfocused = (unfocused savedState) ++ (l\\[notChosen])}
| notChosen <- l\\[chosenAction]]
-- lift $ putStrLn $ "ALTS " ++ show (map env alternatives)
state <- get
put (state {btStack = alternatives++(btStack state)})
-- End of saved states
state <- get
put $ state { env = newEnv, unfocused = unFocus }
return ()
else do
lift $ tellerWarning $ "Choose an action from 0 to " ++ (show (sizeList-1) ++ " to proceed!")
chooseActionToFocusOn l (l/=initialListActions)
isValidActionChoice :: String -> Int -> Bool
isValidActionChoice s n =
let value = (reads s :: [(Int, String)])
in
case value of
[] -> False
(i,_):_ -> if ((i>=0) && (i<n)) then True else False
tryReductionsStateIO :: [StateReduction] -> [Term] -> ProverStateIO [Term]
--tryReductionsStateIO (f:fs) t = tryReductionStateIO' f t >>= tryReductionsStateIO fs
tryReductionsStateIO (f:fs) t = do
newEnv <- tryReductionStateIO' f t
modify (changeEnvTo newEnv) -- TODO: do I need this?
let enabledActions = listEnabledActions newEnv
-- If we are focusing, allow the choice of actions when there are several available
gran <- gets granularity
fred <- gets focusedReductions
when ((fred<gran) && (length enabledActions)>1) $ chooseActionToFocusOn enabledActions True
newEnv <- gets env
tryReductionsStateIO fs newEnv
tryReductionsStateIO [] t = return t
tryReductionStateIO' :: StateReduction -> [Term] -> ProverStateIO [Term]
tryReductionStateIO' f ls = return . fromMaybe ls =<< tryReductionStateIO f ls
tryReductionStateIO :: StateReduction -> [Term] -> ProverStateIO (Maybe [Term])
tryReductionStateIO f ls = go (point ls)
where go [] = return Nothing
go (x:xs) = do
x' <- f x
case x' of
Just _ -> return x'
Nothing -> go xs
-- TODO: organize the code below
-- (Move to Utils?)
point ls = go [] ls
where go prev (x:next) = (x, prev ++ next) : go (x:prev) next
go prev [] = []
pairs (x:y:xs) = (x, y):pairs xs
findRepeat :: Eq a => [a] -> a
findRepeat = fst . fromJust . find (uncurry (==)) . pairs
pointedMap f ls = map f (point ls)
|
jff/TeLLer
|
src/Reductions.hs
|
gpl-3.0
| 10,536
| 0
| 23
| 3,156
| 2,487
| 1,243
| 1,244
| 166
| 4
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Dataflow.Projects.WorkerMessages
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Send a worker_message to the service.
--
-- /See:/ <https://cloud.google.com/dataflow Dataflow API Reference> for @dataflow.projects.workerMessages@.
module Network.Google.Resource.Dataflow.Projects.WorkerMessages
(
-- * REST Resource
ProjectsWorkerMessagesResource
-- * Creating a Request
, projectsWorkerMessages
, ProjectsWorkerMessages
-- * Request Lenses
, pwmXgafv
, pwmUploadProtocol
, pwmAccessToken
, pwmUploadType
, pwmPayload
, pwmProjectId
, pwmCallback
) where
import Network.Google.Dataflow.Types
import Network.Google.Prelude
-- | A resource alias for @dataflow.projects.workerMessages@ method which the
-- 'ProjectsWorkerMessages' request conforms to.
type ProjectsWorkerMessagesResource =
"v1b3" :>
"projects" :>
Capture "projectId" Text :>
"WorkerMessages" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] SendWorkerMessagesRequest :>
Post '[JSON] SendWorkerMessagesResponse
-- | Send a worker_message to the service.
--
-- /See:/ 'projectsWorkerMessages' smart constructor.
data ProjectsWorkerMessages =
ProjectsWorkerMessages'
{ _pwmXgafv :: !(Maybe Xgafv)
, _pwmUploadProtocol :: !(Maybe Text)
, _pwmAccessToken :: !(Maybe Text)
, _pwmUploadType :: !(Maybe Text)
, _pwmPayload :: !SendWorkerMessagesRequest
, _pwmProjectId :: !Text
, _pwmCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsWorkerMessages' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pwmXgafv'
--
-- * 'pwmUploadProtocol'
--
-- * 'pwmAccessToken'
--
-- * 'pwmUploadType'
--
-- * 'pwmPayload'
--
-- * 'pwmProjectId'
--
-- * 'pwmCallback'
projectsWorkerMessages
:: SendWorkerMessagesRequest -- ^ 'pwmPayload'
-> Text -- ^ 'pwmProjectId'
-> ProjectsWorkerMessages
projectsWorkerMessages pPwmPayload_ pPwmProjectId_ =
ProjectsWorkerMessages'
{ _pwmXgafv = Nothing
, _pwmUploadProtocol = Nothing
, _pwmAccessToken = Nothing
, _pwmUploadType = Nothing
, _pwmPayload = pPwmPayload_
, _pwmProjectId = pPwmProjectId_
, _pwmCallback = Nothing
}
-- | V1 error format.
pwmXgafv :: Lens' ProjectsWorkerMessages (Maybe Xgafv)
pwmXgafv = lens _pwmXgafv (\ s a -> s{_pwmXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pwmUploadProtocol :: Lens' ProjectsWorkerMessages (Maybe Text)
pwmUploadProtocol
= lens _pwmUploadProtocol
(\ s a -> s{_pwmUploadProtocol = a})
-- | OAuth access token.
pwmAccessToken :: Lens' ProjectsWorkerMessages (Maybe Text)
pwmAccessToken
= lens _pwmAccessToken
(\ s a -> s{_pwmAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pwmUploadType :: Lens' ProjectsWorkerMessages (Maybe Text)
pwmUploadType
= lens _pwmUploadType
(\ s a -> s{_pwmUploadType = a})
-- | Multipart request metadata.
pwmPayload :: Lens' ProjectsWorkerMessages SendWorkerMessagesRequest
pwmPayload
= lens _pwmPayload (\ s a -> s{_pwmPayload = a})
-- | The project to send the WorkerMessages to.
pwmProjectId :: Lens' ProjectsWorkerMessages Text
pwmProjectId
= lens _pwmProjectId (\ s a -> s{_pwmProjectId = a})
-- | JSONP
pwmCallback :: Lens' ProjectsWorkerMessages (Maybe Text)
pwmCallback
= lens _pwmCallback (\ s a -> s{_pwmCallback = a})
instance GoogleRequest ProjectsWorkerMessages where
type Rs ProjectsWorkerMessages =
SendWorkerMessagesResponse
type Scopes ProjectsWorkerMessages =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly",
"https://www.googleapis.com/auth/userinfo.email"]
requestClient ProjectsWorkerMessages'{..}
= go _pwmProjectId _pwmXgafv _pwmUploadProtocol
_pwmAccessToken
_pwmUploadType
_pwmCallback
(Just AltJSON)
_pwmPayload
dataflowService
where go
= buildClient
(Proxy :: Proxy ProjectsWorkerMessagesResource)
mempty
|
brendanhay/gogol
|
gogol-dataflow/gen/Network/Google/Resource/Dataflow/Projects/WorkerMessages.hs
|
mpl-2.0
| 5,402
| 0
| 18
| 1,266
| 791
| 461
| 330
| 119
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DialogFlow.Projects.Locations.Agents.Restore
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Restores the specified agent from a binary file. Replaces the current
-- agent with a new one. Note that all existing resources in agent (e.g.
-- intents, entity types, flows) will be removed. Note: You should always
-- train flows prior to sending them queries. See the [training
-- documentation](https:\/\/cloud.google.com\/dialogflow\/cx\/docs\/concept\/training).
--
-- /See:/ <https://cloud.google.com/dialogflow/ Dialogflow API Reference> for @dialogflow.projects.locations.agents.restore@.
module Network.Google.Resource.DialogFlow.Projects.Locations.Agents.Restore
(
-- * REST Resource
ProjectsLocationsAgentsRestoreResource
-- * Creating a Request
, projectsLocationsAgentsRestore
, ProjectsLocationsAgentsRestore
-- * Request Lenses
, plarXgafv
, plarUploadProtocol
, plarAccessToken
, plarUploadType
, plarPayload
, plarName
, plarCallback
) where
import Network.Google.DialogFlow.Types
import Network.Google.Prelude
-- | A resource alias for @dialogflow.projects.locations.agents.restore@ method which the
-- 'ProjectsLocationsAgentsRestore' request conforms to.
type ProjectsLocationsAgentsRestoreResource =
"v3" :>
CaptureMode "name" "restore" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
GoogleCloudDialogflowCxV3RestoreAgentRequest
:> Post '[JSON] GoogleLongrunningOperation
-- | Restores the specified agent from a binary file. Replaces the current
-- agent with a new one. Note that all existing resources in agent (e.g.
-- intents, entity types, flows) will be removed. Note: You should always
-- train flows prior to sending them queries. See the [training
-- documentation](https:\/\/cloud.google.com\/dialogflow\/cx\/docs\/concept\/training).
--
-- /See:/ 'projectsLocationsAgentsRestore' smart constructor.
data ProjectsLocationsAgentsRestore =
ProjectsLocationsAgentsRestore'
{ _plarXgafv :: !(Maybe Xgafv)
, _plarUploadProtocol :: !(Maybe Text)
, _plarAccessToken :: !(Maybe Text)
, _plarUploadType :: !(Maybe Text)
, _plarPayload :: !GoogleCloudDialogflowCxV3RestoreAgentRequest
, _plarName :: !Text
, _plarCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsAgentsRestore' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plarXgafv'
--
-- * 'plarUploadProtocol'
--
-- * 'plarAccessToken'
--
-- * 'plarUploadType'
--
-- * 'plarPayload'
--
-- * 'plarName'
--
-- * 'plarCallback'
projectsLocationsAgentsRestore
:: GoogleCloudDialogflowCxV3RestoreAgentRequest -- ^ 'plarPayload'
-> Text -- ^ 'plarName'
-> ProjectsLocationsAgentsRestore
projectsLocationsAgentsRestore pPlarPayload_ pPlarName_ =
ProjectsLocationsAgentsRestore'
{ _plarXgafv = Nothing
, _plarUploadProtocol = Nothing
, _plarAccessToken = Nothing
, _plarUploadType = Nothing
, _plarPayload = pPlarPayload_
, _plarName = pPlarName_
, _plarCallback = Nothing
}
-- | V1 error format.
plarXgafv :: Lens' ProjectsLocationsAgentsRestore (Maybe Xgafv)
plarXgafv
= lens _plarXgafv (\ s a -> s{_plarXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plarUploadProtocol :: Lens' ProjectsLocationsAgentsRestore (Maybe Text)
plarUploadProtocol
= lens _plarUploadProtocol
(\ s a -> s{_plarUploadProtocol = a})
-- | OAuth access token.
plarAccessToken :: Lens' ProjectsLocationsAgentsRestore (Maybe Text)
plarAccessToken
= lens _plarAccessToken
(\ s a -> s{_plarAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plarUploadType :: Lens' ProjectsLocationsAgentsRestore (Maybe Text)
plarUploadType
= lens _plarUploadType
(\ s a -> s{_plarUploadType = a})
-- | Multipart request metadata.
plarPayload :: Lens' ProjectsLocationsAgentsRestore GoogleCloudDialogflowCxV3RestoreAgentRequest
plarPayload
= lens _plarPayload (\ s a -> s{_plarPayload = a})
-- | Required. The name of the agent to restore into. Format:
-- \`projects\/\/locations\/\/agents\/\`.
plarName :: Lens' ProjectsLocationsAgentsRestore Text
plarName = lens _plarName (\ s a -> s{_plarName = a})
-- | JSONP
plarCallback :: Lens' ProjectsLocationsAgentsRestore (Maybe Text)
plarCallback
= lens _plarCallback (\ s a -> s{_plarCallback = a})
instance GoogleRequest ProjectsLocationsAgentsRestore
where
type Rs ProjectsLocationsAgentsRestore =
GoogleLongrunningOperation
type Scopes ProjectsLocationsAgentsRestore =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow"]
requestClient ProjectsLocationsAgentsRestore'{..}
= go _plarName _plarXgafv _plarUploadProtocol
_plarAccessToken
_plarUploadType
_plarCallback
(Just AltJSON)
_plarPayload
dialogFlowService
where go
= buildClient
(Proxy ::
Proxy ProjectsLocationsAgentsRestoreResource)
mempty
|
brendanhay/gogol
|
gogol-dialogflow/gen/Network/Google/Resource/DialogFlow/Projects/Locations/Agents/Restore.hs
|
mpl-2.0
| 6,311
| 0
| 16
| 1,329
| 790
| 465
| 325
| 117
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Calendar.Calendars.Update
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates metadata for a calendar.
--
-- /See:/ <https://developers.google.com/google-apps/calendar/firstapp Calendar API Reference> for @calendar.calendars.update@.
module Network.Google.Resource.Calendar.Calendars.Update
(
-- * REST Resource
CalendarsUpdateResource
-- * Creating a Request
, calendarsUpdate
, CalendarsUpdate
-- * Request Lenses
, cuCalendarId
, cuPayload
) where
import Network.Google.AppsCalendar.Types
import Network.Google.Prelude
-- | A resource alias for @calendar.calendars.update@ method which the
-- 'CalendarsUpdate' request conforms to.
type CalendarsUpdateResource =
"calendar" :>
"v3" :>
"calendars" :>
Capture "calendarId" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Calendar :> Put '[JSON] Calendar
-- | Updates metadata for a calendar.
--
-- /See:/ 'calendarsUpdate' smart constructor.
data CalendarsUpdate = CalendarsUpdate'
{ _cuCalendarId :: !Text
, _cuPayload :: !Calendar
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CalendarsUpdate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cuCalendarId'
--
-- * 'cuPayload'
calendarsUpdate
:: Text -- ^ 'cuCalendarId'
-> Calendar -- ^ 'cuPayload'
-> CalendarsUpdate
calendarsUpdate pCuCalendarId_ pCuPayload_ =
CalendarsUpdate'
{ _cuCalendarId = pCuCalendarId_
, _cuPayload = pCuPayload_
}
-- | Calendar identifier. To retrieve calendar IDs call the calendarList.list
-- method. If you want to access the primary calendar of the currently
-- logged in user, use the \"primary\" keyword.
cuCalendarId :: Lens' CalendarsUpdate Text
cuCalendarId
= lens _cuCalendarId (\ s a -> s{_cuCalendarId = a})
-- | Multipart request metadata.
cuPayload :: Lens' CalendarsUpdate Calendar
cuPayload
= lens _cuPayload (\ s a -> s{_cuPayload = a})
instance GoogleRequest CalendarsUpdate where
type Rs CalendarsUpdate = Calendar
type Scopes CalendarsUpdate =
'["https://www.googleapis.com/auth/calendar"]
requestClient CalendarsUpdate'{..}
= go _cuCalendarId (Just AltJSON) _cuPayload
appsCalendarService
where go
= buildClient
(Proxy :: Proxy CalendarsUpdateResource)
mempty
|
rueshyna/gogol
|
gogol-apps-calendar/gen/Network/Google/Resource/Calendar/Calendars/Update.hs
|
mpl-2.0
| 3,239
| 0
| 13
| 731
| 384
| 232
| 152
| 61
| 1
|
-- brittany { lconfig_columnAlignMode: { tag: ColumnAlignModeDisabled }, lconfig_indentPolicy: IndentPolicyLeft }
func x
| very long guard, another rather long guard that refers to x = nontrivial
expression
foo
bar
alsdkjlasdjlasj
| otherwise = 0
|
lspitzner/brittany
|
data/Test420.hs
|
agpl-3.0
| 267
| 1
| 8
| 55
| 62
| 27
| 35
| 7
| 1
|
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances#-}
data Color = Red | Green | Blue
class Bort a where
foo :: a -> String
instance Eq Color where
(==) Red Red = True
(==) Green Green = True
(==) Blue Blue = True
(==) _ _ = False
instance Show Color where
show Red = "red"
show Green = "green"
show Blue = "blue"
instance Bort Color where
foo Red = "red"
foo Green = "green"
foo Blue = "blue"
instance Bort Char where
foo c = [c]
instance Bort String where
foo = id
instance Bort a => Bort [a] where
foo = undefined
|
EricYT/Haskell
|
src/real_haskell/chapter-6/Color.hs
|
apache-2.0
| 601
| 0
| 7
| 184
| 208
| 111
| 97
| 23
| 0
|
{-# LANGUAGE FlexibleContexts #-}
module Propellor.Property.Parted (
TableType(..),
PartTable(..),
partTableSize,
Partition(..),
mkPartition,
Partition.Fs(..),
PartSize(..),
ByteSize,
toPartSize,
fromPartSize,
reducePartSize,
Partition.MkfsOpts,
PartType(..),
PartFlag(..),
Eep(..),
partitioned,
parted,
installed,
) where
import Propellor.Base
import qualified Propellor.Property.Apt as Apt
import qualified Propellor.Property.Partition as Partition
import Utility.DataUnits
import Data.Char
import System.Posix.Files
class PartedVal a where
val :: a -> String
-- | Types of partition tables supported by parted.
data TableType = MSDOS | GPT | AIX | AMIGA | BSD | DVH | LOOP | MAC | PC98 | SUN
deriving (Show)
instance PartedVal TableType where
val = map toLower . show
-- | A disk's partition table.
data PartTable = PartTable TableType [Partition]
deriving (Show)
instance Monoid PartTable where
-- | default TableType is MSDOS
mempty = PartTable MSDOS []
-- | uses the TableType of the second parameter
mappend (PartTable _l1 ps1) (PartTable l2 ps2) = PartTable l2 (ps1 ++ ps2)
-- | Gets the total size of the disk specified by the partition table.
partTableSize :: PartTable -> ByteSize
partTableSize (PartTable _ ps) = fromPartSize $
-- add 1 megabyte to hold the partition table itself
mconcat (MegaBytes 1 : map partSize ps)
-- | A partition on the disk.
data Partition = Partition
{ partType :: PartType
, partSize :: PartSize
, partFs :: Partition.Fs
, partMkFsOpts :: Partition.MkfsOpts
, partFlags :: [(PartFlag, Bool)] -- ^ flags can be set or unset (parted may set some flags by default)
, partName :: Maybe String -- ^ optional name for partition (only works for GPT, PC98, MAC)
}
deriving (Show)
-- | Makes a Partition with defaults for non-important values.
mkPartition :: Partition.Fs -> PartSize -> Partition
mkPartition fs sz = Partition
{ partType = Primary
, partSize = sz
, partFs = fs
, partMkFsOpts = []
, partFlags = []
, partName = Nothing
}
-- | Type of a partition.
data PartType = Primary | Logical | Extended
deriving (Show)
instance PartedVal PartType where
val Primary = "primary"
val Logical = "logical"
val Extended = "extended"
-- | All partition sizing is done in megabytes, so that parted can
-- automatically lay out the partitions.
--
-- Note that these are SI megabytes, not mebibytes.
newtype PartSize = MegaBytes Integer
deriving (Show)
instance PartedVal PartSize where
val (MegaBytes n)
| n > 0 = show n ++ "MB"
-- parted can't make partitions smaller than 1MB;
-- avoid failure in edge cases
| otherwise = show "1MB"
-- | Rounds up to the nearest MegaByte.
toPartSize :: ByteSize -> PartSize
toPartSize b = MegaBytes $ ceiling (fromInteger b / 1000000 :: Double)
fromPartSize :: PartSize -> ByteSize
fromPartSize (MegaBytes b) = b * 1000000
instance Monoid PartSize where
mempty = MegaBytes 0
mappend (MegaBytes a) (MegaBytes b) = MegaBytes (a + b)
reducePartSize :: PartSize -> PartSize -> PartSize
reducePartSize (MegaBytes a) (MegaBytes b) = MegaBytes (a - b)
-- | Flags that can be set on a partition.
data PartFlag = BootFlag | RootFlag | SwapFlag | HiddenFlag | RaidFlag | LvmFlag | LbaFlag | LegacyBootFlag | IrstFlag | EspFlag | PaloFlag
deriving (Show)
instance PartedVal PartFlag where
val BootFlag = "boot"
val RootFlag = "root"
val SwapFlag = "swap"
val HiddenFlag = "hidden"
val RaidFlag = "raid"
val LvmFlag = "lvm"
val LbaFlag = "lba"
val LegacyBootFlag = "legacy_boot"
val IrstFlag = "irst"
val EspFlag = "esp"
val PaloFlag = "palo"
instance PartedVal Bool where
val True = "on"
val False = "off"
instance PartedVal Partition.Fs where
val Partition.EXT2 = "ext2"
val Partition.EXT3 = "ext3"
val Partition.EXT4 = "ext4"
val Partition.BTRFS = "btrfs"
val Partition.REISERFS = "reiserfs"
val Partition.XFS = "xfs"
val Partition.FAT = "fat"
val Partition.VFAT = "vfat"
val Partition.NTFS = "ntfs"
val Partition.LinuxSwap = "linux-swap"
data Eep = YesReallyDeleteDiskContents
-- | Partitions a disk using parted, and formats the partitions.
--
-- The FilePath can be a block device (eg, \/dev\/sda), or a disk image file.
--
-- This deletes any existing partitions in the disk! Use with EXTREME caution!
partitioned :: Eep -> FilePath -> PartTable -> Property NoInfo
partitioned eep disk (PartTable tabletype parts) = property desc $ do
isdev <- liftIO $ isBlockDevice <$> getFileStatus disk
ensureProperty $ combineProperties desc
[ parted eep disk partedparams
, if isdev
then formatl (map (\n -> disk ++ show n) [1 :: Int ..])
else Partition.kpartx disk (formatl . map Partition.partitionLoopDev)
]
where
desc = disk ++ " partitioned"
formatl devs = combineProperties desc (map format (zip parts devs))
partedparams = concat $ mklabel : mkparts (1 :: Integer) mempty parts []
format (p, dev) = Partition.formatted' (partMkFsOpts p)
Partition.YesReallyFormatPartition (partFs p) dev
mklabel = ["mklabel", val tabletype]
mkflag partnum (f, b) =
[ "set"
, show partnum
, val f
, val b
]
mkpart partnum offset p =
[ "mkpart"
, val (partType p)
, val (partFs p)
, val offset
, val (offset <> partSize p)
] ++ case partName p of
Just n -> ["name", show partnum, n]
Nothing -> []
mkparts partnum offset (p:ps) c =
mkparts (partnum+1) (offset <> partSize p) ps
(c ++ mkpart partnum offset p : map (mkflag partnum) (partFlags p))
mkparts _ _ [] c = c
-- | Runs parted on a disk with the specified parameters.
--
-- Parted is run in script mode, so it will never prompt for input.
-- It is asked to use cylinder alignment for the disk.
parted :: Eep -> FilePath -> [String] -> Property NoInfo
parted YesReallyDeleteDiskContents disk ps =
cmdProperty "parted" ("--script":"--align":"cylinder":disk:ps)
`requires` installed
-- | Gets parted installed.
installed :: Property NoInfo
installed = Apt.installed ["parted"]
|
np/propellor
|
src/Propellor/Property/Parted.hs
|
bsd-2-clause
| 5,940
| 103
| 18
| 1,106
| 1,746
| 950
| 796
| 144
| 4
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Citeproc
import Citeproc.CslJson
import Control.Monad (when, unless, foldM)
import Control.Applicative ((<|>))
import Data.Bifunctor (second)
import Data.Maybe (fromMaybe)
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import qualified Data.ByteString.Lazy as BL
import qualified Data.Aeson as Aeson
import Data.Aeson.Encode.Pretty as AesonPretty
import Data.Ord (comparing)
import System.IO
import System.Exit
import System.Environment
import System.Console.GetOpt
main :: IO ()
main = do
rawargs <- getArgs
let (opts, args, errs) = getOpt Permute options rawargs
unless (null errs) $ do
mapM_ err errs
exitWith $ ExitFailure 1
opt <- foldM (flip ($)) defaultOpt opts
when (optHelp opt) $ do
putStr $ usageInfo "citeproc [OPTIONS] [FILE]" options
exitSuccess
when (optVersion opt) $ do
putStrLn $ "citeproc version " ++ VERSION_citeproc
exitSuccess
format <- case optFormat opt of
Just "html" -> return Html
Just "json" -> return Json
Just _ -> err "--format must be html or json"
Nothing -> return Html
bs <- case args of
[] -> BL.getContents
(f:_) -> BL.readFile f
case Aeson.eitherDecode bs of
Left e -> err e
Right (inp :: Inputs (CslJson Text)) -> do
stylesheet <- case optStyle opt of
Just fp -> T.dropWhile (=='\xFEFF') <$> -- drop BOM
TIO.readFile fp
Nothing ->
case inputsStyle inp of
Just s -> return s
Nothing -> err "No style specified"
references <- case optReferences opt of
Just fp -> do
raw <- BL.readFile fp
case Aeson.eitherDecode raw of
Left e -> err e
Right rs -> return rs
Nothing ->
case inputsReferences inp of
Just rs -> return rs
Nothing -> err "No references specified"
abbreviations <- case optAbbreviations opt of
Just fp -> do
raw <- BL.readFile fp
case Aeson.eitherDecode raw of
Left e -> err e
Right ab -> return $ Just ab
Nothing -> return $ inputsAbbreviations inp
let lang = optLang opt <|> inputsLang inp
parseResult <-
parseStyle (\_ -> return mempty) stylesheet
case parseResult of
Left e -> err (T.unpack $ prettyCiteprocError e)
Right parsedStyle -> do
let style = parsedStyle{ styleAbbreviations = abbreviations }
let result= citeproc defaultCiteprocOptions
style
lang
references
(fromMaybe [] (inputsCitations inp))
let jsonResult :: Aeson.Value
jsonResult =
case format of
Json -> Aeson.object
[ ("citations", Aeson.toJSON $
map cslJsonToJson
(resultCitations result))
, ("bibliography", Aeson.toJSON $
map (second cslJsonToJson)
(resultBibliography result))
, ("warnings", Aeson.toJSON $ resultWarnings result)
]
Html -> Aeson.toJSON result
BL.putStr $ AesonPretty.encodePretty'
AesonPretty.defConfig
{ confIndent = AesonPretty.Spaces 2
, confCompare = AesonPretty.keyOrder
["citations","bibliography","warnings"]
`mappend` comparing T.length
, confTrailingNewline = True }
jsonResult
data Format = Json | Html deriving (Show, Ord, Eq)
data Opt =
Opt{ optStyle :: Maybe String
, optReferences :: Maybe String
, optAbbreviations :: Maybe String
, optFormat :: Maybe String
, optLang :: Maybe Lang
, optHelp :: Bool
, optVersion :: Bool
} deriving Show
defaultOpt :: Opt
defaultOpt =
Opt { optStyle = Nothing
, optReferences = Nothing
, optAbbreviations = Nothing
, optFormat = Nothing
, optLang = Nothing
, optHelp = False
, optVersion = False
}
options :: [OptDescr (Opt -> IO Opt)]
options =
[ Option ['s'] ["style"]
(ReqArg (\fp opt -> return opt{ optStyle = Just fp }) "FILE")
"CSL style file"
, Option ['r'] ["references"]
(ReqArg (\fp opt -> return opt{ optReferences = Just fp }) "FILE")
"CSL JSON bibliography"
, Option ['a'] ["abbreviations"]
(ReqArg (\fp opt -> return opt{ optAbbreviations = Just fp }) "FILE")
"CSL abbreviations table"
, Option ['l'] ["lang"]
(ReqArg (\lang opt ->
case parseLang (T.pack lang) of
Right l -> return opt{ optLang = Just l }
Left msg -> err $ "Could not parse language tag:\n" ++ msg)
"BCP 47 language tag")
"Override locale"
, Option ['f'] ["format"]
(ReqArg (\format opt -> return opt{ optFormat = Just format }) "html|json")
"Controls formatting of entries in result"
, Option ['h'] ["help"]
(NoArg (\opt -> return opt{ optHelp = True }))
"Print usage information"
, Option ['V'] ["version"]
(NoArg (\opt -> return opt{ optVersion = True }))
"Print version number"
]
err :: String -> IO a
err s = do
hPutStrLn stderr s
exitWith $ ExitFailure 1
|
jgm/citeproc
|
app/Main.hs
|
bsd-2-clause
| 6,053
| 0
| 29
| 2,341
| 1,638
| 841
| 797
| 152
| 15
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE RankNTypes #-}
module Bead.Persistence.NoSQLDir (
Persist
, Config
, defaultConfig
, configToPersistConfig
, Interpreter
, runPersist
, runInterpreter
, ForeignKey(..)
, saveUser
, personalInfo
, filterUsers
, loadUser
, updateUser
, doesUserExist
, userDescription
, userSubmissions
, administratedCourses
, administratedGroups
, scoresOfUser
, attachNotificationToUser
, notificationsOfUser
, copyFile
, listFiles
, getFile
#ifndef SSO
, saveUserReg
, loadUserReg
#endif
, saveCourse
, courseKeys
, filterCourses
, loadCourse
, groupKeysOfCourse
, isUserInCourse
, userCourses
, createCourseAdmin
, courseAdmins
, subscribedToCourse
, unsubscribedFromCourse
, testScriptsOfCourse
, saveGroup
, loadGroup
, courseOfGroup
, filterGroups
, isUserInGroup
, userGroups
, subscribe
, unsubscribe
, groupAdmins
, createGroupAdmin
, subscribedToGroup
, unsubscribedFromGroup
, saveTestScript
, loadTestScript
, courseOfTestScript
, modifyTestScript
, saveTestCase
, loadTestCase
, testScriptOfTestCase
, modifyTestCase
, removeTestCaseAssignment
, copyTestCaseFile
, modifyTestScriptOfTestCase
, saveTestJob
, insertTestFeedback
, finalizeTestFeedback
, testFeedbacks
, deleteTestFeedbacks
, filterAssignment
, assignmentKeys
, saveAssignment
, loadAssignment
, modifyAssignment
, saveCourseAssignment
, saveGroupAssignment
, courseAssignments
, groupAssignments
, courseOfAssignment
, groupOfAssignment
, submissionsForAssignment
, assignmentCreatedTime
, lastSubmission
, testCaseOfAssignment
, saveSubmission
, loadSubmission
, assignmentOfSubmission
, usernameOfSubmission
, filterSubmissions
, submissionKeys
, evaluationOfSubmission
, commentsOfSubmission
, feedbacksOfSubmission
, saveCommentNotification
, saveFeedbackNotification
, saveSystemNotification
, loadNotification
, commentOfNotification
, feedbackOfNotification
, usersOfNotification
, removeFromOpened
, openedSubmissions
, openedSubmissionSubset
, usersOpenedSubmissions
, saveSubmissionEvaluation
, saveScoreEvaluation
, loadEvaluation
, modifyEvaluation
, submissionOfEvaluation
, scoreOfEvaluation
, saveFeedback
, loadFeedback
, submissionOfFeedback
, saveComment
, loadComment
, submissionOfComment
, saveCourseAssessment
, saveGroupAssessment
, loadAssessment
, modifyAssessment
, courseOfAssessment
, groupOfAssessment
, scoresOfAssessment
, assessmentsOfGroup
, assessmentsOfCourse
, saveScore
, loadScore
, assessmentOfScore
, usernameOfScore
, evaluationOfScore
, testIncomingDataDir
, isPersistenceSetUp
, initPersistence
, createPersistInit
, createPersistInterpreter
, parseConfig
#ifdef TEST
, tests
#endif
) where
import Bead.Domain.Types
import Bead.Domain.Entities
import Bead.Domain.Entity.Notification hiding (Feedback)
import Bead.Domain.Relationships
import Bead.Persistence.Initialization
import Bead.Persistence.NoSQLDirFile
import Control.Monad.Transaction.TIO
import Control.Applicative ((<$>))
import Control.Concurrent.MVar
import Control.Exception (IOException)
import Control.Monad (join, liftM, filterM, when, unless, forM)
import Data.Maybe (catMaybes)
import System.FilePath
import System.Directory hiding (copyFile)
import System.Posix.Types (COff(..))
import System.Posix.Files (getFileStatus, fileExist, fileSize, modificationTime)
import Data.Function (on)
import Data.Set (Set)
import Data.Time (UTCTime, getCurrentTime)
import Data.Time.Clock.POSIX (posixSecondsToUTCTime)
import Data.List (sortBy, isSuffixOf)
#ifdef TEST
import Test.Tasty.TestSet (TestSet)
#endif
type Persist a = TIO a
reason :: Either IOException a -> (Erroneous a)
reason (Left e) = Left . show $ e
reason (Right x) = Right x
-- No configuration is necessary
data Config = Config
configToPersistConfig = const Config
-- | Creates a persist initialization structure.
createPersistInit :: Config -> IO PersistInit
createPersistInit _ = return PersistInit {
isSetUp = isPersistenceSetUp
, initPersist = initPersistence
, tearDown = nTearDown
}
-- | Creates an interpreter for the persistent compuation
createPersistInterpreter :: Config -> IO Interpreter
createPersistInterpreter _ = do
mutex <- newMVar ()
let run cmd = modifyMVar mutex $ \m -> do
x <- runPersist' cmd
return (m,x)
return (Interpreter run)
where
runPersist' :: Persist a -> IO (Erroneous a)
runPersist' = liftM reason . atomically
parseConfig :: String -> Config
parseConfig _ = Config
defaultConfig = Config
newtype Interpreter = Interpreter { unInt :: forall a . Persist a -> IO (Erroneous a) }
runInterpreter (Interpreter run) = run
runPersist = runInterpreter
-- Returns True if all the necessary persistence directories exist on the disk
-- otherwise false
isPersistenceSetUp :: IO Bool
isPersistenceSetUp = and <$> mapM doesDirectoryExist persistenceDirs
initPersistence :: IO ()
initPersistence = mapM_ createDirWhenDoesNotExist persistenceDirs
where
createDirWhenDoesNotExist d = do
existDir <- doesDirectoryExist d
unless existDir . createDirectory $ d
nTearDown :: IO ()
nTearDown = do
exists <- doesDirectoryExist dataDir
when exists $ removeDirectoryRecursive dataDir
#ifndef SSO
saveUserReg :: UserRegistration -> Persist UserRegKey
saveUserReg u = do
dirName <- createTmpDir userRegDataDir "ur"
let userRegKey = UserRegKey . takeBaseName $ dirName
save dirName u
return userRegKey
loadUserReg :: UserRegKey -> Persist UserRegistration
loadUserReg u = do
let p = userRegDirPath u
isU <- isUserRegDir p
unless isU . throwEx . userError . join $ [userRegKeyFold id u, " user registration does not exist."]
liftM snd $ tLoadUserReg p
#endif
saveUser :: User -> Persist ()
saveUser usr = do
userExist <- isThereAUser (u_username usr)
case userExist of
True -> throwEx $ userError $ "The user already exists: " ++ show (u_username usr)
False -> do
let dirname = dirName usr
createDir dirname
save dirname usr
-- Checks if the given username exist in the persistence layer
-- and it has a correct structure
checkIfUserDir :: Username -> Persist ()
checkIfUserDir username = do
let dirname = dirName username
exist <- hasNoRollback $ doesDirectoryExist dirname
unless exist . throwEx . userError $ "User directory does not exist: " ++ show username
correct <- hasNoRollback $ isCorrectStructure dirname userDirStructure
unless correct . throwEx . userError $ "User directory is not correct: " ++ show username
copyFile :: Username -> FilePath -> UsersFile -> Persist ()
copyFile username tmpPath userfile = do
checkIfUserDir username
let dirname = dirName username
datadir = dirname </> (usersFile (const "public-files") (const "private-files") userfile)
copy tmpPath (datadir </> usersFile id id userfile)
-- Calculates the file modification time in UTC time from the File status
fileModificationInUTCTime = posixSecondsToUTCTime . realToFrac . modificationTime
listFiles :: Username -> Persist [(UsersFile, FileInfo)]
listFiles username = do
checkIfUserDir username
let dirname = dirName username
publicPaths <- getFilesInFolder (dirname </> "public-files")
publicFiles <- forM publicPaths $ \path -> do
status <- hasNoRollback $ getFileStatus path
let info = FileInfo
(fileOffsetToInt $ fileSize status)
(fileModificationInUTCTime status)
return (UsersPublicFile $ takeFileName path, info)
privatePaths <- getFilesInFolder (dirname </> "private-files")
privateFiles <- forM privatePaths $ \path -> do
status <- hasNoRollback $ getFileStatus path
let info = FileInfo
(fileOffsetToInt $ fileSize status)
(fileModificationInUTCTime status)
return (UsersPrivateFile $ takeFileName path, info)
return $! publicFiles ++ privateFiles
where
fileOffsetToInt (COff x) = fromIntegral x
getFile :: Username -> UsersFile -> Persist FilePath
getFile username userfile = do
checkIfUserDir username
let dirname = dirName username
fname = dirname </> (usersFile (const "public-files") (const "private-files") userfile)
</> (usersFile id id userfile)
exist <- hasNoRollback $ doesFileExist fname
unless exist . throwEx . userError $ concat [
"File (", fname, ") does not exist in users folder ("
, show username, ")"
]
return fname
isThereAUser :: Username -> Persist Bool
isThereAUser uname = hasNoRollback $ do
let dirname = dirName uname
exist <- doesDirectoryExist dirname
case exist of
False -> return False
True -> isCorrectStructure dirname userDirStructure
doesUserExist :: Username -> Persist Bool
doesUserExist = hasNoRollback . doesDirectoryExist . dirName
personalInfo :: Username -> Persist PersonalInfo
personalInfo uname = do
user <- loadUser uname
return $ flip userCata user $ \role _username _email name timezone _lang uid ->
PersonalInfo (role, name, timezone, uid)
isUserDir :: FilePath -> Persist Bool
isUserDir = isCorrectDirStructure userDirStructure
filterUsers :: (User -> Bool) -> Persist [User]
filterUsers f = filterDirectory userDataDir isUserDir load (filter f)
loadUser :: Username -> Persist User
loadUser = load . dirName
userDescription :: Username -> Persist UserDesc
userDescription = liftM mkUserDescription . loadUser
updateUser :: User -> Persist ()
updateUser user = update (dirName . u_username $ user) user
administratedCourses :: Username -> Persist [(CourseKey, Course)]
administratedCourses u = do
let dirname = joinPath [dirName u, "courseadmin"]
(selectValidDirsFrom dirname isCourseDir) >>= (mapM tLoadCourse)
administratedGroups :: Username -> Persist [(GroupKey, Group)]
administratedGroups u = do
let dirname = joinPath [dirName u, "groupadmin"]
(selectValidDirsFrom dirname isGroupDir) >>= (mapM tLoadGroup)
attachNotificationToUser :: Username -> NotificationKey -> Persist ()
attachNotificationToUser u nk = do
link u nk "user"
link nk u "notification"
notificationsOfUser :: Username -> Persist [NotificationKey]
notificationsOfUser =
objectsIn "notification" NotificationKey isNotificationDir
courseDirPath :: CourseKey -> FilePath
courseDirPath (CourseKey c) = joinPath [courseDataDir, c]
groupDirPath :: GroupKey -> FilePath
groupDirPath (GroupKey g) = joinPath [groupDataDir, g]
#ifndef SSO
userRegDirPath :: UserRegKey -> FilePath
userRegDirPath = userRegKeyFold $ \u -> joinPath [userRegDataDir, u]
#endif
loadCourse :: CourseKey -> Persist Course
loadCourse c = do
let p = courseDirPath c
isC <- isCourseDir p
-- GUARD: Course dir does not exist
unless isC . throwEx . userError . join $ [courseKeyMap id c, " course does not exist."]
-- Course found
liftM snd $ tLoadCourse p
tLoadCourse :: FilePath -> Persist (CourseKey, Course)
tLoadCourse = tLoadPersistenceObject CourseKey
#ifndef SSO
tLoadUserReg :: FilePath -> Persist (UserRegKey, UserRegistration)
tLoadUserReg = tLoadPersistenceObject UserRegKey
#endif
groupKeysOfCourse :: CourseKey -> Persist [GroupKey]
groupKeysOfCourse c = do
let p = courseDirPath c
g = joinPath [p, "groups"]
subdirs <- getSubDirectories g
return . map (GroupKey . takeBaseName) $ subdirs
createCourseAdmin :: Username -> CourseKey -> Persist ()
createCourseAdmin u ck = do
usr <- loadUser u
case atLeastCourseAdmin . u_role $ usr of
False -> throwEx . userError . join $ [usernameCata id u, " is not course admin"]
True -> do
link u ck "admins"
link ck u "courseadmin"
subscribedToCourse :: CourseKey -> Persist [Username]
subscribedToCourse = objectsIn "users" Username isUserDir
subscribedToGroup :: GroupKey -> Persist [Username]
subscribedToGroup = objectsIn "users" Username isUserDir
unsubscribedFromCourse :: CourseKey -> Persist [Username]
unsubscribedFromCourse = objectsIn "unsubscribed" Username isUserDir
unsubscribedFromGroup :: GroupKey -> Persist [Username]
unsubscribedFromGroup = objectsIn "unsubscribed" Username isUserDir
isCorrectDirStructure :: DirStructure -> FilePath -> Persist Bool
isCorrectDirStructure d p = hasNoRollback $ isCorrectStructure p d
scoresOfUser :: Username -> Persist [ScoreKey]
scoresOfUser = objectsIn "score" ScoreKey isScoreDir
isGroupDir :: FilePath -> Persist Bool
isGroupDir = isCorrectDirStructure groupDirStructure
loadGroup :: GroupKey -> Persist Group
loadGroup g = do
let p = groupDirPath g
isG <- isGroupDir p
-- GUARD: Group id does not exist
unless isG . throwEx . userError . join $ [groupKeyMap id g, " group does not exist."]
liftM snd $ tLoadGroup p
where
groupDirPath :: GroupKey -> FilePath
groupDirPath (GroupKey g) = joinPath [groupDataDir, g]
admins :: (DirName k) => k -> Persist [Username]
admins k = do
let dirname = joinPath [dirName k, "admins"]
mapM (liftM u_username . load) =<< (selectValidDirsFrom dirname isUserDir)
groupAdmins :: GroupKey -> Persist [Username]
groupAdmins = admins
courseAdmins :: CourseKey -> Persist [Username]
courseAdmins = admins
testScriptsOfCourse :: CourseKey -> Persist [TestScriptKey]
testScriptsOfCourse = objectsIn "test-script" TestScriptKey isTestScriptDir
isUserInGroup :: Username -> GroupKey -> Persist Bool
isUserInGroup u gk = isLinkedIn u gk "users"
isUserInCourse :: Username -> CourseKey -> Persist Bool
isUserInCourse u ck = isLinkedIn u ck "users"
subscribe :: Username -> CourseKey -> GroupKey -> Persist ()
subscribe username ck gk = do
link username gk "users"
link username ck "users"
link gk username "group"
link ck username "course"
unsubscribe :: Username -> CourseKey -> GroupKey -> Persist ()
unsubscribe username ck gk = do
unlink username gk "users"
unlink username ck "users"
unlink gk username "group"
unlink ck username "course"
link username gk "unsubscribed"
link username ck "unsubscribed"
createGroupAdmin :: Username -> GroupKey -> Persist ()
createGroupAdmin u gk = do
link u gk "admins"
link gk u "groupadmin"
courseOfGroup :: GroupKey -> Persist CourseKey
courseOfGroup = objectOrError "No course was found for " "course" CourseKey isCourseDir
tLoadPersistenceObject :: (Load o)
=> (String -> k) -- ^ Key constructor
-> FilePath -- ^ Base path
-> Persist (k,o) -- ^ Key and the loaded object
tLoadPersistenceObject f d = do
let key = takeBaseName d
object <- load d
return (f key, object)
tLoadGroup :: FilePath -> Persist (GroupKey, Group)
tLoadGroup = tLoadPersistenceObject GroupKey
saveCourse :: Course -> Persist CourseKey
saveCourse c = do
dirName <- createTmpDir courseDataDir "cr"
let courseKey = CourseKey . takeBaseName $ dirName
save dirName c
return courseKey
userCourses :: Username -> Persist [CourseKey]
userCourses u = do
let dirname = joinPath [dirName u, "course"]
map (CourseKey . takeBaseName) <$> (selectValidDirsFrom dirname isCourseDir)
userGroups :: Username -> Persist [GroupKey]
userGroups u = do
let dirname = joinPath [dirName u, "group"]
map (GroupKey . takeBaseName) <$> (selectValidDirsFrom dirname isGroupDir)
class ForeignKey k where
referredPath :: k -> DirPath
baseName :: k -> String
foreignKey :: (ForeignKey k1, ForeignKey k2) => k1 -> k2 -> FilePath -> Persist ()
foreignKey object linkto subdir =
createLink
(joinPath ["..", "..", "..", "..", (referredPath object)])
(joinPath [(referredPath linkto), subdir, baseName object])
removeForeignKey :: (ForeignKey k1, ForeignKey k2) => k1 -> k2 -> FilePath -> Persist ()
removeForeignKey object linkto subdir =
deleteLink
(joinPath ["..", "..", "..", "..", (referredPath object)])
(joinPath [(referredPath linkto), subdir, baseName object])
isLinkedIn :: (ForeignKey k1, ForeignKey k2) => k1 -> k2 -> FilePath -> Persist Bool
isLinkedIn object linkto subdir =
hasNoRollback . doesDirectoryExist . joinPath $ [referredPath linkto, subdir, baseName object]
link :: (ForeignKey k1, ForeignKey k2) => k1 -> k2 -> FilePath -> Persist ()
link object linkto subdir = do
exist <- isLinkedIn object linkto subdir
unless exist $ foreignKey object linkto subdir
unlink :: (ForeignKey k1, ForeignKey k2) => k1 -> k2 -> FilePath -> Persist ()
unlink object linkto subdir = do
exist <- isLinkedIn object linkto subdir
when exist $ removeForeignKey object linkto subdir
instance ForeignKey Username where
referredPath = dirName
baseName = takeBaseName . dirName
instance ForeignKey GroupKey where
referredPath (GroupKey g) = joinPath [groupDataDir, g]
baseName (GroupKey g) = g
instance ForeignKey CourseKey where
referredPath (CourseKey c) = joinPath [courseDataDir, c]
baseName (CourseKey c) = c
instance ForeignKey AssignmentKey where
referredPath (AssignmentKey a) = joinPath [assignmentDataDir, a]
baseName (AssignmentKey a) = a
instance ForeignKey SubmissionKey where
referredPath (SubmissionKey s) = joinPath [submissionDataDir, s]
baseName (SubmissionKey s) = s
instance ForeignKey TestJobKey where
referredPath (TestJobKey s) = joinPath [testOutgoingDataDir, s]
baseName (TestJobKey s) = s
instance ForeignKey FeedbackKey where
referredPath (FeedbackKey s) = joinPath [feedbackDataDir, s]
baseName (FeedbackKey s) = s
instance ForeignKey NotificationKey where
referredPath (NotificationKey k) = joinPath [notificationDataDir, k]
baseName (NotificationKey k) = k
{- * One primitve value is stored in the file with the same name as the row.
* One combined value is stored in the given directory into many files. The name
of the directory is the primary key for the record.
* The foreign keys are the symlinks for the other row of the given combined object.
-}
saveGroup :: CourseKey -> Group -> Persist GroupKey
saveGroup ck group = do
dirName <- createTmpDir groupDataDir "gr"
let groupKey = GroupKey . takeBaseName $ dirName
save dirName group
link groupKey ck "groups"
link ck groupKey "course"
return groupKey
filterGroups :: (GroupKey -> Group -> Bool) -> Persist [(GroupKey, Group)]
filterGroups f = filterDirectory groupDataDir isGroupDir tLoadGroup (filter (uncurry f))
currentTime :: Persist UTCTime
currentTime = hasNoRollback getCurrentTime
saveAssignment :: Assignment -> Persist AssignmentKey
saveAssignment a = do
dirName <- createTmpDir assignmentDataDir "a"
let assignmentKey = takeBaseName dirName
save dirName a
saveCreatedTime dirName =<< currentTime
return . AssignmentKey $ assignmentKey
modifyAssignment :: AssignmentKey -> Assignment -> Persist ()
modifyAssignment ak a = do
let p = assignmentDirPath ak
isA <- isAssignmentDir p
unless isA . throwEx . userError $ "Assignment does not exist"
update p a
assignmentCreatedTime :: AssignmentKey -> Persist UTCTime
assignmentCreatedTime ak = do
let p = assignmentDirPath ak
isDir <- isAssignmentDir p
case isDir of
False -> throwEx $ userError $ join [assignmentKeyMap id ak, " assignment does not exist."]
True -> getCreatedTime p
selectValidDirsFrom :: FilePath -> (FilePath -> Persist Bool) -> Persist [FilePath]
selectValidDirsFrom dir isValidDir = getSubDirectories dir >>= filterM isValidDir
assignmentKeys :: Persist [AssignmentKey]
assignmentKeys =
(selectValidDirsFrom assignmentDataDir isAssignmentDir) >>=
calcExerciseKeys
where
calcExerciseKeys = return . map (AssignmentKey . takeBaseName)
filterAssignment :: (AssignmentKey -> Assignment -> Bool) -> Persist [(AssignmentKey, Assignment)]
filterAssignment f = filterDirectory assignmentDataDir isAssignmentDir tLoadAssignment (filter (uncurry f))
loadAssignment :: AssignmentKey -> Persist Assignment
loadAssignment a = do
let p = assignmentDirPath a
isEx <- isAssignmentDir p
case isEx of
False -> throwEx $ userError $ join [assignmentKeyMap id a, " assignment does not exist."]
True -> liftM snd $ tLoadAssignment p
assignmentDirPath :: AssignmentKey -> FilePath
assignmentDirPath (AssignmentKey e) = joinPath [assignmentDataDir, e]
tLoadAssignment :: FilePath -> Persist (AssignmentKey, Assignment)
tLoadAssignment dirName = do
let exerciseKey = takeBaseName dirName
e <- load dirName
return (AssignmentKey exerciseKey, e)
objectsIn
:: (Show sk, DirName sk)
=> FilePath -- Subdir where to look at
-> (String -> rk) -- Result's key constructor
-> (FilePath -> Persist Bool) -- Checks if the given subdir is appropiate
-> sk -- The source's key
-> Persist [rk]
objectsIn subdir keyConstructor isValidDir sourceKey = do
let dirname = joinPath [dirName sourceKey, subdir]
map (keyConstructor . takeBaseName) <$> (selectValidDirsFrom dirname isValidDir)
objectIn s k v sk = objectsIn s k v sk >>= just
where
just [] = return Nothing
just [k] = return $ Just k
just _ = throwEx . userError $ "Impossible: found more than one object found for: " ++ show sk
-- Browses the objects in the given directory all the subdirectories,
-- checks if entry with the 'isValidDir'
objectOrError msg subdir keyConstructor isValidDir sourceKey = do
m <- objectIn subdir keyConstructor isValidDir sourceKey
case m of
Nothing -> throwEx . userError $ msg ++ show sourceKey
Just x -> return x
courseOfAssignment :: AssignmentKey -> Persist (Maybe CourseKey)
courseOfAssignment = objectIn "course" CourseKey isCourseDir
groupOfAssignment :: AssignmentKey -> Persist (Maybe GroupKey)
groupOfAssignment = objectIn "group" GroupKey isGroupDir
testCaseOfAssignment :: AssignmentKey -> Persist (Maybe TestCaseKey)
testCaseOfAssignment = objectIn "test-case" TestCaseKey isTestCaseDir
submissionsForAssignment :: AssignmentKey -> Persist [SubmissionKey]
submissionsForAssignment = objectsIn "submission" SubmissionKey isSubmissionDir
saveAndLinkAssignment :: (ForeignKey k) => FilePath -> k -> Assignment -> Persist AssignmentKey
saveAndLinkAssignment subdir k a = do
ak <- saveAssignment a
link ak k "assignments"
link k ak subdir
return ak
saveCourseAssignment :: CourseKey -> Assignment -> Persist AssignmentKey
saveCourseAssignment = saveAndLinkAssignment "course"
saveGroupAssignment :: GroupKey -> Assignment -> Persist AssignmentKey
saveGroupAssignment = saveAndLinkAssignment "group"
isAssignmentDir :: FilePath -> Persist Bool
isAssignmentDir = isCorrectDirStructure assignmentDirStructure
assignmentsFor :: (a -> FilePath) -> a -> Persist [AssignmentKey]
assignmentsFor dirPath k = do
fp <- (selectValidDirsFrom (joinPath [dirPath k, "assignments"]) isAssignmentDir)
return ((AssignmentKey . takeBaseName) <$> fp)
courseAssignments :: CourseKey -> Persist [AssignmentKey]
courseAssignments = assignmentsFor courseDirPath
groupAssignments :: GroupKey -> Persist [AssignmentKey]
groupAssignments = assignmentsFor groupDirPath
courseKeys :: Persist [CourseKey]
courseKeys =
(selectValidDirsFrom courseDataDir isCourseDir) >>=
calcCourseKeys
where
calcCourseKeys = return . map (CourseKey . takeBaseName)
isCourseDir :: FilePath -> Persist Bool
isCourseDir = isCorrectDirStructure courseDirStructure
#ifndef SSO
isUserRegDir :: FilePath -> Persist Bool
isUserRegDir = isCorrectDirStructure userRegDirStructure
#endif
filterCourses :: (CourseKey -> Course -> Bool) -> Persist [(CourseKey, Course)]
filterCourses f = filterDirectory courseDataDir isCourseDir tLoadCourse (filter (uncurry f))
-- * Notification
isNotificationDir :: FilePath -> Persist Bool
isNotificationDir = isCorrectDirStructure notificationDirStructure
saveNotification :: Notification -> Persist NotificationKey
saveNotification n = do
dirName <- createTmpDir notificationDataDir "n"
let notificationKey = NotificationKey . takeBaseName $ dirName
save dirName n
return notificationKey
saveCommentNotification :: CommentKey -> Notification -> Persist NotificationKey
saveCommentNotification ck n = do
nk <- saveNotification n
link ck nk "comment"
return nk
saveFeedbackNotification :: FeedbackKey -> Notification -> Persist NotificationKey
saveFeedbackNotification fk n = do
nk <- saveNotification n
link fk nk "feedback"
return nk
saveSystemNotification :: Notification -> Persist NotificationKey
saveSystemNotification = saveNotification
loadNotification :: NotificationKey -> Persist Notification
loadNotification = load . dirName
commentOfNotification :: NotificationKey -> Persist (Maybe CommentKey)
commentOfNotification = objectIn "comment" CommentKey isCommentDir
feedbackOfNotification :: NotificationKey -> Persist (Maybe FeedbackKey)
feedbackOfNotification = objectIn "feedback" FeedbackKey isFeedbackDir
usersOfNotification :: NotificationKey -> Persist [Username]
usersOfNotification = objectsIn "user" Username isUserDir
-- * Submission
isSubmissionDir :: FilePath -> Persist Bool
isSubmissionDir = isCorrectDirStructure submissionDirStructure
saveSubmission :: AssignmentKey -> Username -> Submission -> Persist SubmissionKey
saveSubmission ak u s = do
dirName <- createTmpDir submissionDataDir "s"
let submissionKey = SubmissionKey . takeBaseName $ dirName
save dirName s
link ak submissionKey "assignment"
link u submissionKey "user"
link submissionKey ak "submission"
linkUserSubmission submissionKey
nPlaceToOpened ak u submissionKey
return submissionKey
where
linkUserSubmission :: SubmissionKey -> Persist ()
linkUserSubmission sk = do
let dirName = userAsgSubmissionDir u ak
createDirIfMissing dirName
createLink
(joinPath ["..","..","..","..","..",(referredPath sk)])
(joinPath [dirName, (baseName sk)])
userAsgSubmissionDir :: Username -> AssignmentKey -> FilePath
userAsgSubmissionDir u ak = joinPath [referredPath u, "submissions", baseName ak]
lastSubmission :: AssignmentKey -> Username -> Persist (Maybe SubmissionKey)
lastSubmission ak u = do
let dirName = userAsgSubmissionDir u ak
e <- hasNoRollback $ doesDirectoryExist dirName
case e of
False -> return Nothing
True -> do
paths <- selectValidDirsFrom dirName isSubmissionDir
case paths of
[] -> return Nothing
ps -> (Just . snd . last . sortBy (compare `on` (solutionPostDate . fst))) <$> (mapM loadSbm ps)
where
loadSbm p = do
s <- load p
return (s, SubmissionKey . takeBaseName $ p)
-- TODO: Validate the directory
loadSubmission :: SubmissionKey -> Persist Submission
loadSubmission = load . dirName
assignmentOfSubmission :: SubmissionKey -> Persist AssignmentKey
assignmentOfSubmission sk =
objectOrError "No assignment was found for the " "assignment" AssignmentKey isAssignmentDir sk
usernameOfSubmission :: SubmissionKey -> Persist Username
usernameOfSubmission sk =
objectOrError "No assignment was found for the " "user" Username isUserDir sk
evaluationOfSubmission :: SubmissionKey -> Persist (Maybe EvaluationKey)
evaluationOfSubmission =
objectIn "evaluation" EvaluationKey isEvaluationDir
filterSubmissions :: (SubmissionKey -> Submission -> Bool) -> Persist [(SubmissionKey, Submission)]
filterSubmissions f = filterDirectory submissionDataDir isSubmissionDir tLoadSubmission (filter (uncurry f))
submissionKeys :: Persist [SubmissionKey]
submissionKeys = map fst <$> filterSubmissions (\_ _ -> True)
tLoadSubmission :: FilePath -> Persist (SubmissionKey, Submission)
tLoadSubmission dirName = do
s <- load dirName
return (SubmissionKey . takeBaseName $ dirName, s)
openedSubmissionDataDirPath :: AssignmentKey -> Username -> FilePath
openedSubmissionDataDirPath ak u =
joinPath [openSubmissionDataDir, "assignment", baseName ak, baseName u]
nPlaceToOpened :: AssignmentKey -> Username -> SubmissionKey -> Persist ()
nPlaceToOpened ak u sk = do
let lookupPath = openedSubmissionDataDirPath ak u
createLink
(joinPath ["..", "..", "..", (referredPath sk)])
(joinPath [openSubmissionAllDataDir, baseName sk])
createDirIfMissing lookupPath
createLink
(joinPath ["..", "..", "..", "..", "..", (referredPath sk)])
(joinPath [lookupPath, baseName sk])
removeFromOpened :: AssignmentKey -> Username -> SubmissionKey -> Persist ()
removeFromOpened ak u sk = do
let openedAllSubmissionKeyPath = joinPath [openSubmissionAllDataDir, baseName sk]
exist <- hasNoRollback $ fileExist openedAllSubmissionKeyPath
when exist $ removeSymLink openedAllSubmissionKeyPath
let openedSubmissionDataDir = joinPath [openedSubmissionDataDirPath ak u, baseName sk]
exist <- hasNoRollback $ fileExist openedSubmissionDataDir
when exist $ removeSymLink openedSubmissionDataDir
usersOpenedSubmissions :: AssignmentKey -> Username -> Persist [SubmissionKey]
usersOpenedSubmissions ak u = do
let path = openedSubmissionDataDirPath ak u
exists <- doesDirExist path
if exists
then filterDirectory path isSubmissionDir tLoadSubmission (map fst)
else return []
filterDirectory :: FilePath -> (FilePath -> Persist Bool) -> (FilePath -> Persist a) -> ([a] -> [b]) -> Persist [b]
filterDirectory dir isValid loader f = f <$> ((selectValidDirsFrom dir isValid) >>= (mapM loader))
-- | First it checks if the directory exist, if not the result is an empty list
-- else, the result is the original filtered data
safeFilterDirectory :: FilePath -> (FilePath -> Persist Bool) -> (FilePath -> Persist a) -> ([a] -> [b]) -> Persist [b]
safeFilterDirectory dir isValid loader f = do
e <- doesDirExist dir
case e of
False -> return []
True -> filterDirectory dir isValid loader f
openedSubmissions :: Persist [SubmissionKey]
openedSubmissions = filterDirectory openSubmissionAllDataDir isSubmissionDir tLoadSubmission (map fst)
openedSubmissionSubset :: Set AssignmentKey -> Set Username -> Persist [SubmissionKey]
openedSubmissionSubset = error "NoSQLDir.openedSubmissionSubset is undefined"
userDirPath :: Username -> FilePath
userDirPath (Username u) = joinPath [userDataDir, u]
submissionDirPath :: SubmissionKey -> FilePath
submissionDirPath (SubmissionKey sk) = joinPath [submissionDataDir, sk]
userSubmissions :: Username -> AssignmentKey -> Persist [SubmissionKey]
userSubmissions u ak =
safeFilterDirectory
(joinPath [userDirPath u, "submissions", baseName ak])
isSubmissionDir
(return . takeBaseName)
(map SubmissionKey)
commentsOfSubmission :: SubmissionKey -> Persist [CommentKey]
commentsOfSubmission sk =
filterDirectory
(joinPath [submissionDirPath sk, "comment"])
isCommentDir
(return . takeBaseName)
(map CommentKey)
-- * Evaluation
instance ForeignKey EvaluationKey where
referredPath (EvaluationKey e) = joinPath [evaluationDataDir, e]
baseName (EvaluationKey e) = e
isEvaluationDir :: FilePath -> Persist Bool
isEvaluationDir = isCorrectDirStructure evaluationDirStructure
saveEvaluation :: Evaluation -> Persist EvaluationKey
saveEvaluation e = do
dirName <- createTmpDir evaluationDataDir "ev"
let evKey = EvaluationKey . takeBaseName $ dirName
save dirName e
return evKey
saveSubmissionEvaluation :: SubmissionKey -> Evaluation -> Persist EvaluationKey
saveSubmissionEvaluation sk e = do
key <- saveEvaluation e
link key sk "evaluation"
link sk key "submission"
return key
saveScoreEvaluation :: ScoreKey -> Evaluation -> Persist EvaluationKey
saveScoreEvaluation sk e = do
key <- saveEvaluation e
link key sk "evaluation"
link sk key "score"
return key
evaluationDirPath :: EvaluationKey -> FilePath
evaluationDirPath (EvaluationKey e) = joinPath [evaluationDataDir, e]
loadEvaluation :: EvaluationKey -> Persist Evaluation
loadEvaluation e = do
let p = evaluationDirPath e
isE <- isEvaluationDir p
unless isE . throwEx . userError . join $ ["Evaluation does not exist."]
liftM snd . tLoadPersistenceObject EvaluationKey $ p
modifyEvaluation :: EvaluationKey -> Evaluation -> Persist ()
modifyEvaluation ek e = do
let p = evaluationDirPath ek
isE <- isEvaluationDir p
unless isE . throwEx . userError . join $ ["Evaluation does not exist."]
update p e
submissionOfEvaluation :: EvaluationKey -> Persist (Maybe SubmissionKey)
submissionOfEvaluation =
objectIn "submission" SubmissionKey isSubmissionDir
scoreOfEvaluation :: EvaluationKey -> Persist (Maybe ScoreKey)
scoreOfEvaluation =
objectIn "score" ScoreKey isScoreDir
-- * Comment
instance ForeignKey CommentKey where
referredPath (CommentKey c) = joinPath [commentDataDir, c]
baseName (CommentKey c) = c
commentDirPath :: CommentKey -> FilePath
commentDirPath (CommentKey c) = joinPath [commentDataDir, c]
isCommentDir :: FilePath -> Persist Bool
isCommentDir = isCorrectDirStructure commentDirStructure
saveComment :: SubmissionKey -> Comment -> Persist CommentKey
saveComment sk c = do
dirName <- createTmpDir commentDataDir "cm"
let key = CommentKey . takeBaseName $ dirName
save dirName c
link key sk "comment"
link sk key "submission"
return key
loadComment :: CommentKey -> Persist Comment
loadComment ck = do
let p = commentDirPath ck
isC <- isCommentDir p
unless isC . throwEx $ userError "Comment does not exist."
liftM snd . tLoadPersistenceObject CommentKey $ p
submissionOfComment :: CommentKey -> Persist SubmissionKey
submissionOfComment =
objectOrError "No submission was found for " "submission" SubmissionKey isSubmissionDir
-- * Test Script
testScriptDirPath :: TestScriptKey -> FilePath
testScriptDirPath = testScriptKeyCata $ \k -> joinPath [testScriptDataDir, k]
isTestScriptDir :: FilePath -> Persist Bool
isTestScriptDir = isCorrectDirStructure testScriptDirStructure
instance ForeignKey TestScriptKey where
referredPath = testScriptDirPath
baseName (TestScriptKey k) = k
saveTestScript :: CourseKey -> TestScript -> Persist TestScriptKey
saveTestScript ck ts = do
dirName <- createTmpDir testScriptDataDir "ts"
let key = TestScriptKey $ takeBaseName dirName
save dirName ts
link key ck "test-script"
link ck key "course"
return key
loadTestScript :: TestScriptKey -> Persist TestScript
loadTestScript tk = do
let p = testScriptDirPath tk
isTS <- isTestScriptDir p
unless isTS . throwEx $ userError "Not a test script directory"
snd <$> tLoadPersistenceObject TestScriptKey p
courseOfTestScript :: TestScriptKey -> Persist CourseKey
courseOfTestScript =
objectOrError "No course was found for " "course" CourseKey isCourseDir
modifyTestScript :: TestScriptKey -> TestScript -> Persist ()
modifyTestScript tk ts = do
let p = testScriptDirPath tk
isTS <- isTestScriptDir p
unless isTS . throwEx $ userError "Test Script does not exist"
update p ts
-- * Test Case
testCaseDirPath :: TestCaseKey -> FilePath
testCaseDirPath = testCaseKeyCata $ \k -> joinPath [testCaseDataDir, k]
isTestCaseDir :: FilePath -> Persist Bool
isTestCaseDir = isCorrectDirStructure testCaseDirStructure
instance ForeignKey TestCaseKey where
referredPath = testCaseDirPath
baseName (TestCaseKey k) = k
saveTestCase :: TestScriptKey -> AssignmentKey -> TestCase -> Persist TestCaseKey
saveTestCase tk ak tc = do
dirName <- createTmpDir testCaseDataDir "tc"
let key = TestCaseKey $ takeBaseName dirName
save dirName tc
link key ak "test-case"
link ak key "assignment"
link tk key "test-script"
return key
removeTestCaseAssignment :: TestCaseKey -> AssignmentKey -> Persist ()
removeTestCaseAssignment tk ak = do
unlink tk ak "test-case"
unlink ak tk "assignment"
modifyTestScriptOfTestCase :: TestCaseKey -> TestScriptKey -> Persist ()
modifyTestScriptOfTestCase tck tsk = do
tskOld <- testScriptOfTestCase tck
unlink tskOld tck "test-script"
link tsk tck "test-script"
loadTestCase :: TestCaseKey -> Persist TestCase
loadTestCase tk = do
let p = testCaseDirPath tk
isTC <- isTestCaseDir p
unless isTC . throwEx $ userError "Not a test case directory"
snd <$> tLoadPersistenceObject TestCaseKey p
testScriptOfTestCase :: TestCaseKey -> Persist TestScriptKey
testScriptOfTestCase =
objectOrError "No Test Script was found for " "test-script" TestScriptKey isTestScriptDir
modifyTestCase :: TestCaseKey -> TestCase -> Persist ()
modifyTestCase tk tc = do
let p = testCaseDirPath tk
isTC <- isTestCaseDir p
unless isTC . throwEx $ userError "Test Case does not exist"
update p tc
copyTestCaseFile :: TestCaseKey -> Username -> UsersFile -> Persist ()
copyTestCaseFile tk u uf = do
let p = testCaseDirPath tk
isTC <- isTestCaseDir p
unless isTC . throwEx $ userError "Test Case does not exist"
ufp <- getFile u uf
overwriteFile ufp (p </> "value")
-- Collects the test script, test case and the submission and copies them to the
-- the directory named after the submission key placed in the test-outgoing directory
saveTestJob :: SubmissionKey -> Persist ()
saveTestJob sk = do
ak <- assignmentOfSubmission sk
mtk <- testCaseOfAssignment ak
maybe (return ()) copyParts mtk
where
-- If there is a test case, we copy the information to the desired
copyParts :: TestCaseKey -> Persist ()
copyParts tk = do
tsk <- testScriptOfTestCase tk
let submissionFile = referredPath sk </> "solution"
testcaseFile = referredPath tk </> "value"
testscriptFile = referredPath tsk </> "script"
tjk = submissionKeyToTestJobKey sk
tjPath = referredPath tjk
exist <- hasNoRollback $ doesDirectoryExist tjPath
when exist . throwEx . userError $ concat ["Test job directory already exist:", show tjk]
createDirLocked tjPath $ \p -> do
copy submissionFile (p </> "submission")
copy testscriptFile (p </> "script")
copy testcaseFile (p </> "tests")
insertTestFeedback :: SubmissionKey -> FeedbackInfo -> Persist ()
insertTestFeedback sk info = do
let sDir = submissionKeyMap (testIncomingDataDir </>) sk <.> "locked"
hasNoRollback $ createDirectoryIfMissing True sDir
let student comment = fileSave sDir "public" comment
admin comment = fileSave sDir "private" comment
result bool = fileSave sDir "result" (show bool)
feedbackInfo result student admin evaluated info
where
evaluated _ _ = error "insertTestComment: Evaluation should not be inserted by test."
finalizeTestFeedback :: SubmissionKey -> Persist ()
finalizeTestFeedback sk = do
let sDir = submissionKeyMap (testIncomingDataDir </>) sk
renameDir (sDir <.> "locked") sDir
-- Test Feedbacks are stored in the persistence layer, in the test-incomming directory
-- each one in a file, named after an existing submission in the system
testFeedbacks :: Persist [(SubmissionKey, Feedback)]
testFeedbacks = createFeedbacks =<< processables
where
processables = filter (not . (`isSuffixOf` ".locked")) <$>
getSubDirectories testIncomingDataDir
createFeedbacks = fmap join . mapM createFeedback
createFeedback path = do
let sk = SubmissionKey . last $ splitDirectories path
addKey x = (sk, x)
files <- getFilesInFolder path
fmap (map addKey . catMaybes) $
forM files $ \file -> do
fileDate <- fileModificationInUTCTime <$> (hasNoRollback $ getFileStatus file)
let (dir,fname) = splitFileName file
feedback f = Feedback f fileDate
case fname of
"private" -> Just . feedback . MessageForAdmin <$> fileLoad dir fname Just
"public" -> Just . feedback . MessageForStudent <$> fileLoad dir fname Just
"result" -> Just . feedback . TestResult <$> fileLoad dir fname readMaybe
_ -> return Nothing
-- Deletes the comments (test-agent and message as well)
-- contained file from the test-incomming directory, named after
-- an existing submission
deleteTestFeedbacks :: SubmissionKey -> Persist ()
deleteTestFeedbacks =
submissionKeyMap (hasNoRollback . removeDirectoryRecursive . (testIncomingDataDir </>))
-- Return all the feedbacks for the given submission
feedbacksOfSubmission :: SubmissionKey -> Persist [FeedbackKey]
feedbacksOfSubmission = objectsIn "feedback" FeedbackKey isFeedbackDir
-- * Feedback
feedbackDirPath :: FeedbackKey -> FilePath
feedbackDirPath = feedbackKey $ \k -> joinPath [feedbackDataDir, k]
isFeedbackDir :: FilePath -> Persist Bool
isFeedbackDir = isCorrectDirStructure feedbackDirStructure
-- Saves the feedback
saveFeedback :: SubmissionKey -> Feedback -> Persist FeedbackKey
saveFeedback sk f = do
dirName <- createTmpDir feedbackDataDir "f"
let key = FeedbackKey $ takeBaseName dirName
save dirName f
link key sk "feedback"
link sk key "submission"
return key
-- Loads the feedback
loadFeedback :: FeedbackKey -> Persist Feedback
loadFeedback fk = do
let p = feedbackDirPath fk
isF <- isFeedbackDir p
unless isF . throwEx $ userError "Not a feedback directory"
snd <$> tLoadPersistenceObject FeedbackKey p
-- Returns the submission of the feedback
submissionOfFeedback :: FeedbackKey -> Persist SubmissionKey
submissionOfFeedback =
objectOrError "No Submission was found for " "submission" SubmissionKey isSubmissionDir
-- * Assessment
instance ForeignKey AssessmentKey where
referredPath (AssessmentKey c) = joinPath [assessmentDataDir, c]
baseName (AssessmentKey c) = c
isAssessmentDir :: FilePath -> Persist Bool
isAssessmentDir = isCorrectDirStructure assessmentDirStructure
assessmentDirPath :: AssessmentKey -> FilePath
assessmentDirPath (AssessmentKey e) = joinPath [assessmentDataDir, e]
saveAssessment :: Assessment -> Persist AssessmentKey
saveAssessment as = do
dirName <- createTmpDir assessmentDataDir "at"
let key = AssessmentKey $ takeBaseName dirName
save dirName as
return key
saveCourseAssessment :: CourseKey -> Assessment -> Persist AssessmentKey
saveCourseAssessment ck as = do
key <- saveAssessment as
link key ck "assessments"
link ck key "course"
return key
saveGroupAssessment :: GroupKey -> Assessment -> Persist AssessmentKey
saveGroupAssessment gk as = do
key <- saveAssessment as
link key gk "assessments"
link gk key "group"
return key
loadAssessment :: AssessmentKey -> Persist Assessment
loadAssessment ak = do
let p = assessmentDirPath ak
isDir <- isAssessmentDir p
unless isDir . throwEx $ userError "Not an assessment directory"
snd <$> tLoadPersistenceObject AssessmentKey p
modifyAssessment :: AssessmentKey -> Assessment -> Persist ()
modifyAssessment ak a = do
let p = assessmentDirPath ak
isDir <- isAssessmentDir p
unless isDir . throwEx $ userError "Not an assessment directory"
update p a
courseOfAssessment :: AssessmentKey -> Persist (Maybe CourseKey)
courseOfAssessment = objectIn "course" CourseKey isCourseDir
groupOfAssessment :: AssessmentKey -> Persist (Maybe GroupKey)
groupOfAssessment = objectIn "group" GroupKey isGroupDir
scoresOfAssessment :: AssessmentKey -> Persist [ScoreKey]
scoresOfAssessment = objectsIn "score" ScoreKey isScoreDir
assessmentsOfGroup :: GroupKey -> Persist [AssessmentKey]
assessmentsOfGroup = objectsIn "assessments" AssessmentKey isAssessmentDir
assessmentsOfCourse :: CourseKey -> Persist [AssessmentKey]
assessmentsOfCourse = objectsIn "assessments" AssessmentKey isAssessmentDir
-- * Score
instance ForeignKey ScoreKey where
referredPath (ScoreKey c) = joinPath [scoreDataDir, c]
baseName (ScoreKey c) = c
isScoreDir :: FilePath -> Persist Bool
isScoreDir = isCorrectDirStructure scoreDirStructure
scoreDirPath :: ScoreKey -> FilePath
scoreDirPath (ScoreKey e) = joinPath [scoreDataDir, e]
saveScore :: Username -> AssessmentKey -> Score -> Persist ScoreKey
saveScore u ak s = do
dirName <- createTmpDir scoreDataDir "sc"
let key = ScoreKey $ takeBaseName dirName
save dirName s
link key u "score"
link u key "user"
link ak key "assessment"
link key ak "score"
return key
loadScore :: ScoreKey -> Persist Score
loadScore sk = do
let p = scoreDirPath sk
isDir <- isScoreDir p
unless isDir . throwEx $ userError "Not a score directory"
snd <$> tLoadPersistenceObject ScoreKey p
assessmentOfScore :: ScoreKey -> Persist AssessmentKey
assessmentOfScore = objectOrError "No assessment was found for " "assessment" AssessmentKey isAssessmentDir
usernameOfScore :: ScoreKey -> Persist Username
usernameOfScore = objectOrError "No user was found for " "user" Username isUserDir
evaluationOfScore :: ScoreKey -> Persist (Maybe EvaluationKey)
evaluationOfScore = objectIn "evaluation" EvaluationKey isEvaluationDir
-- * Tools
encodePwd :: String -> String
encodePwd = ordEncode
#ifdef TEST
tests :: TestSet ()
tests = return ()
#endif
|
pgj/bead
|
src/Bead/Persistence/NoSQLDir.hs
|
bsd-3-clause
| 44,629
| 0
| 22
| 7,842
| 12,161
| 6,021
| 6,140
| 1,010
| 4
|
{-# LANGUAGE BangPatterns,NPlusKPatterns #-}
module Frog where
import Data.Char (isSpace)
import qualified Data.ByteString.Char8 as C
import qualified Data.Vector as V
parseInt = C.readInt . C.dropWhile isSpace
getIntVec n = V.unfoldrN n parseInt <$> C.getLine
data T = T T {-# UNPACK #-}!Int T
tmap f (T l x r) = T (tmap f l) (f x) (tmap f r)
index :: T -> Int -> Int
index (T _ x _) 0 = x
index (T l _ r) (n+1) = case n `divMod` 2 of
(q, 0) -> index l q
(q, 1) -> index r q
nats :: T
nats = go 0 1 where
go !n !s = T (go l s') n (go r s') where
l = n+s
r = l+s
s'= s*2
main = do
n <- readLn :: IO Int
hs <- getIntVec n
let f_tree = tmap (f fastest_f) nats
where fastest_f = index f_tree
x#y = abs $ hs V.! x - hs V.! y
f mf 0 = 0
f mf 1 = 1#0
f mf i = sub i 1 `min` sub i 2
where sub m j = mf (m-j) + m#(m-j)
print $ index f_tree (n-1)
|
cutsea110/aop
|
src/Frog.hs
|
bsd-3-clause
| 957
| 1
| 17
| 318
| 510
| 261
| 249
| 31
| 3
|
import qualified Data.Set as S
import Common.Numbers.Primes (testPrime)
findIt :: (S.Set Int) -> Int -> Int
findIt p n = if testPrime n
then findIt (S.insert n p) (n + 2)
else if any (\x -> S.member (n - x) p) square2 then findIt p (n + 2) else n
where square2 = takeWhile (\x -> x < n) [ 2*a*a | a <- [1 .. ] ]
main = print $ findIt (S.fromList [2]) 3
|
foreverbell/project-euler-solutions
|
src/46.hs
|
bsd-3-clause
| 367
| 1
| 12
| 90
| 211
| 111
| 100
| 8
| 3
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleContexts #-}
module Blackbox.FooSnaplet where
------------------------------------------------------------------------------
import Prelude hiding (lookup)
import Control.Lens
import Control.Monad.State
import Data.Configurator
import Data.Maybe
import Data.Monoid
import qualified Data.Text as T
import Snap.Snaplet
import Snap.Snaplet.Heist
import Snap.Core
import Heist
import Heist.Interpreted
import Blackbox.Common
import Data.Map.Syntax ((##))
------------------------------------------------------------------------------
data FooSnaplet = FooSnaplet { fooField :: String }
fooInit :: HasHeist b => Snaplet (Heist b) -> SnapletInit b FooSnaplet
fooInit h = makeSnaplet "foosnaplet" "A demonstration snaplet called foo."
(Just $ return "../foosnaplet") $ do
config <- getSnapletUserConfig
addTemplates h ""
rootUrl <- getSnapletRootURL
fp <- getSnapletFilePath
name <- getSnapletName
_lens <- getLens
let splices = do "foosplice" ## textSplice "contents of the foo splice"
"fooconfig" ## shConfigSplice _lens
addConfig h $ mempty & scInterpretedSplices .~ splices
addRoutes [("fooConfig", liftIO (lookup config "fooSnapletField") >>= writeLBS . fromJust)
,("fooRootUrl", writeBS rootUrl)
,("fooSnapletName", writeText $ fromMaybe "empty snaplet name" name)
,("fooFilePath", writeText $ T.pack fp)
,("handlerConfig", handlerConfig)
]
return $ FooSnaplet "foo snaplet data string"
getFooField :: Handler b FooSnaplet String
getFooField = gets fooField
|
snapframework/snap-templates
|
test/suite/Blackbox/FooSnaplet.hs
|
bsd-3-clause
| 1,685
| 0
| 15
| 317
| 381
| 203
| 178
| 39
| 1
|
{-# LANGUAGE FlexibleInstances, TypeSynonymInstances #-}
{-# OPTIONS -fno-warn-orphans #-}
-----------------------------------------------------------------------------
-- |
-- Module : Language.C.Analysis.Debug
-- Copyright : (c) 2008 Benedikt Huber
-- License : BSD-style
-- Maintainer : benedikt.huber@gmail.com
-- Stability : prototype
-- Portability : ghc
--
-- Pretty printing the semantic analysis representation.
-- This is currently only intended for debugging purposes.
-----------------------------------------------------------------------------
module Language.C.Analysis.Debug (
globalDeclStats,
prettyAssocs, prettyAssocsWith,
-- and many pretty instances
)
where
import Language.C.Analysis.SemRep
import Language.C.Analysis.Export
import Language.C.Analysis.DefTable
import Language.C.Analysis.NameSpaceMap
import Language.C.Data
import Language.C.Pretty
import Language.C.Syntax
import Text.PrettyPrint.HughesPJ
import Data.Map (Map) ; import qualified Data.Map as Map
prettyAssocs :: (Pretty k, Pretty v) => String -> [(k,v)] -> Doc
prettyAssocs label = prettyAssocsWith label pretty pretty
prettyAssocsWith :: String -> (k -> Doc) -> (v -> Doc) -> [(k,v)] -> Doc
prettyAssocsWith label prettyKey prettyVal theMap =
text label $$ (nest 8) (vcat $ map prettyEntry theMap)
where
prettyEntry (k,v) = prettyKey k <+> text " ~> " <+> prettyVal v
instance Pretty DefTable where
pretty dt = text "DefTable" $$ (nest 4 $ vcat defMaps)
where
defMaps = [ prettyNSMap "idents" identDecls
, prettyNSMap "tags" tagDecls
, prettyNSMap "labels" labelDefs
, prettyNSMap "members" memberDecls
]
prettyNSMap label f = prettyAssocs label . nsMapToList $ f dt
instance Pretty GlobalDecls where
pretty gd = text "Global Declarations" $$ (nest 4 $ vcat declMaps)
where
declMaps = [ prettyMap "enumerators" theEnums, prettyMap "declarations" theDecls,
prettyMap "objects" theObjs, prettyMap "functions" theFuns,
prettyMap "tags" $ gTags gd, prettyMap "typeDefs" $ gTypeDefs gd ]
prettyMap :: (Pretty t, Pretty k) => String -> Map k t -> Doc
prettyMap label = prettyAssocs label . Map.assocs
(theDecls, (theEnums, theObjs, theFuns)) = splitIdentDecls False (gObjs gd)
globalDeclStats :: (FilePath -> Bool) -> GlobalDecls -> [(String,Int)]
globalDeclStats file_filter gmap =
[ ("Enumeration Constants",Map.size enumerators),
("Total Object/Function Declarations",Map.size all_decls),
("Object definitions", Map.size objDefs),
("Function Definitions", Map.size funDefs),
("Tag definitions", Map.size tagDefs),
("TypeDefs", Map.size typeDefs)
]
where
gmap' = filterGlobalDecls filterFile gmap
(all_decls,(enumerators,objDefs,funDefs)) = splitIdentDecls True (gObjs gmap')
(tagDefs,typeDefs) = (gTags gmap', gTypeDefs gmap')
filterFile :: (CNode n) => n -> Bool
filterFile = file_filter . posFile . posOfNode . nodeInfo
instance (Pretty a, Pretty b) => Pretty (Either a b) where
pretty = either pretty pretty
instance Pretty TagFwdDecl where
pretty (CompDecl ct) = pretty ct
pretty (EnumDecl et) = pretty et
instance Pretty CompTyKind where
pretty StructTag = text "struct"
pretty UnionTag = text "union"
instance Pretty CompTypeRef where
pretty (CompTypeRef sue kind _) = pretty kind <+> pretty sue
instance Pretty EnumTypeRef where
pretty (EnumTypeRef sue _ ) = text "enum" <+> pretty sue
instance Pretty Ident where
pretty = text . identToString
instance Pretty SUERef where
pretty ref = text (show ref)
instance Pretty TagDef where
pretty (CompDef compty) = pretty compty
pretty (EnumDef enumty) = pretty enumty
instance Pretty IdentDecl where
pretty (Declaration decl) = pretty decl
pretty (ObjectDef odef) = pretty odef
pretty (FunctionDef fdef) = pretty fdef
pretty (EnumeratorDef enumerator) = pretty enumerator
instance Pretty Decl where
pretty (Decl vardecl _) =
text "declaration" <+>
pretty vardecl
instance Pretty TypeDef where
pretty (TypeDef ident ty attrs _) =
text "typedef" <+> pretty ident <+> text "as" <+>
pretty attrs <+> pretty ty
instance Pretty ObjDef where
pretty (ObjDef vardecl init_opt _) =
text "object" <+>
pretty vardecl <+> maybe empty (((text "=") <+>) . pretty) init_opt
instance Pretty FunDef where
pretty (FunDef vardecl _stmt _) =
text "function" <+>
pretty vardecl
instance Pretty VarDecl where
pretty (VarDecl name attrs ty) =
((hsep . punctuate (text " |")) [pretty name, pretty attrs, pretty ty])
instance Pretty ParamDecl where
pretty (ParamDecl (VarDecl name declattrs ty) _) =
pretty declattrs <+> pretty name <+> text "::" <+> pretty ty
pretty (AbstractParamDecl (VarDecl name declattrs ty) _) =
text "abstract" <+> pretty declattrs <+> pretty name <+>
text "::" <+> pretty ty
instance Pretty DeclAttrs where
pretty (DeclAttrs inline storage attrs) =
(if inline then (text "inline") else empty) <+>
(hsep $ [ pretty storage, pretty attrs])
instance Pretty Type where
pretty ty = pretty (exportTypeDecl ty)
instance Pretty TypeQuals where
pretty tyQuals = hsep $ map showAttr [ ("const",constant),("volatile",volatile),("restrict",restrict) ]
where showAttr (str,select) | select tyQuals = text str
| otherwise = empty
instance Pretty CompType where
pretty (CompType sue_ref tag members attrs node) =
(text.show) tag <+> pretty sue_ref <+>
braces (terminateSemi members) <+>
pretty attrs
instance Pretty MemberDecl where
pretty (MemberDecl (VarDecl name declattrs ty) bitfield _) =
pretty declattrs <+> pretty name <+> text "::" <+> pretty ty <+>
(maybe empty (\bf -> text ":" <+> pretty bf) bitfield)
pretty (AnonBitField ty bitfield_sz _) =
pretty ty <+> text ":" <+> pretty bitfield_sz
instance Pretty EnumType where
pretty (EnumType sue_ref enumerators attrs _) =
text "enum" <+> pretty sue_ref <+> braces (terminateSemi_ $ map prettyEnr enumerators) <+> pretty attrs
where
prettyEnr (Enumerator ident expr enumty _) = pretty ident <+> text " = " <+> pretty expr
instance Pretty Enumerator where
pretty (Enumerator ident expr enumty _) = text "<" <> text "econst" <+> pretty (sueRef enumty) <> text ">" <+>
pretty ident <+> text " = " <+> pretty expr
instance Pretty Storage where
pretty NoStorage = empty
pretty (Auto reg) = text$ if reg then "auto/register" else "auto"
pretty (Static linkage thread_local) =
(hcat . punctuate (text "/") $ [ text "static",pretty linkage ])
<+> (if thread_local then text ", __thread" else empty)
pretty (FunLinkage linkage) = text "function/" <> pretty linkage
instance Pretty Linkage where
pretty InternalLinkage = text "internal"
pretty ExternalLinkage = text "external"
pretty NoLinkage = text "local"
instance Pretty VarName where
pretty NoName = text "<anonymous>"
pretty (VarName ident asmname_opt) = pretty ident <+> (maybe empty pAsmName asmname_opt)
where pAsmName asmname = text "" <+> parens (text "asmname" <+> pretty asmname)
instance Pretty Attributes where
pretty = joinComma
instance Pretty Attr where
pretty (Attr ident es _) = pretty ident <+> (if null es then empty else text "(...)")
joinComma :: (Pretty a) => [a] -> Doc
joinComma = hsep . punctuate comma . map pretty
terminateSemi :: (Pretty a) => [a] -> Doc
terminateSemi = terminateSemi_ . map pretty
terminateSemi_ :: [Doc] -> Doc
terminateSemi_ = hsep . map (<> semi)
|
acowley/language-c
|
src/Language/C/Analysis/Debug.hs
|
bsd-3-clause
| 7,890
| 0
| 13
| 1,794
| 2,478
| 1,258
| 1,220
| 150
| 1
|
module Main where
import qualified Heed as H
main :: IO ()
main = H.main
|
Arguggi/heed
|
heed-backend/exe/Main.hs
|
bsd-3-clause
| 75
| 0
| 6
| 17
| 27
| 17
| 10
| 4
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Test.Tasty (TestTree, defaultMain, testGroup)
import Test.Tasty.HUnit ((@?=), testCase)
import qualified Hello
import qualified SlashRedirect
import qualified Param
import qualified Combined
import Nero
main :: IO ()
main = defaultMain tests
tests :: TestTree
tests = testGroup "Examples"
[ testsHello
, testsSlashRedirect
, testsParam
, testsCombined
]
testsHello :: TestTree
testsHello = testGroup "Hello"
[ testCase "hello" $ run Hello.app1 "/hello/there"
@?= Just (ok "<h1>Hello there</h1>")
, testCase "hello with Int" $ run Hello.app2 "/hello/there/4"
@?= Nothing
, testCase "hello with Int" $ run Hello.app2 "/hello/there/4/"
@?= Just (ok "<h1>Hello there 4</h1>")
, testCase "bye" $ run Hello.app1 "/bye/there"
@?= Nothing
]
where
run a p = a $ dummyRequest & path .~ p
testsSlashRedirect :: TestTree
testsSlashRedirect = testGroup "SlashRedirect"
[ testCase "withSlash"
$ run "/hello/there/"
@?= Just (ok "<h1>Hello there</h1>")
, testCase "withoutSlash"
$ run "/hello/there"
@?= Just (movedPermanently $ dummyUrl & path .~ "/hello/there/")
, testCase "NoMatch"
$ run "/bye/there"
@?= Nothing
]
where
run p = SlashRedirect.app $ dummyRequest & path .~ p
testsParam :: TestTree
testsParam = testGroup "HTTP parameters"
[ testGroup "Query parameters"
[ testGroup "Single value"
[ testCase "1 value"
$ Param.app1 (mkReqQ $ pure "there")
@?= Just (ok "<h1>Hello there</h1>")
, testCase "First one of 2 values"
$ Param.app1 (mkReqQ ["out", "there"])
@?= Just (ok "<h1>Hello out</h1>")
]
, testGroup "Multiple values"
[ testCase "1 value"
$ Param.app2 (mkReqQ $ pure "there")
@?= ok "<h1>Hello there</h1>"
, testCase "Concatenating 2 values"
$ Param.app2 (mkReqQ ["out", "there"])
@?= ok "<h1>Hello out there</h1>"
]
]
, testGroup "Form parameters"
[ testCase "1 value"
$ Param.app3 (mkReqF $ pure "there")
@?= Just (ok "<h1>Hello there</h1>")
]
, testCase "Query and form parameters merge"
$ Param.app4 (mkReqM (pure "out") (pure "there"))
@?= ok "<h1>Hello out there</h1>"
]
where
mkReqQ p = dummyRequest & query . at "name" ?~ p
mkReqF p = dummyRequestForm & form . at "name" ?~ p
mkReqM pq pf = mkReqF pf & query . at "name" ?~ pq
testsCombined :: TestTree
testsCombined = testGroup "Query parameters and routing"
[ testCase "hello"
$ Combined.app12 (dummyRequest & path .~ "/hello/out"
& query . at "surname" ?~ pure "there")
@?= Just (ok "<h1>Hello out there</h1>")
, testGroup "nested"
[ testCase "first"
$ Combined.nested (dummyRequest & path .~ "/name/hello/there")
@?= Just (ok "<h1>Hello there</h1>")
, testCase "second"
$ Combined.nested (dummyRequest & path .~ "/name/"
& query . at "surname" ?~ pure "there")
@?= Just (ok "<h1>Hello there</h1>")
]
]
|
plutonbrb/nero-examples
|
test.hs
|
bsd-3-clause
| 3,318
| 0
| 17
| 1,002
| 882
| 442
| 440
| -1
| -1
|
{-# LANGUAGE EmptyDataDecls, TypeSynonymInstances #-}
{-# OPTIONS_GHC -fcontext-stack48 #-}
module Games.Chaos2010.Database.Squares_valid_categories where
import Games.Chaos2010.Database.Fields
import Database.HaskellDB.DBLayout
type Squares_valid_categories =
Record
(HCons (LVPair Category (Expr (Maybe String)))
(HCons (LVPair X (Expr (Maybe Int)))
(HCons (LVPair Y (Expr (Maybe Int)))
(HCons (LVPair Ptype (Expr (Maybe String)))
(HCons (LVPair Allegiance (Expr (Maybe String)))
(HCons (LVPair Tag (Expr (Maybe Int)))
(HCons (LVPair Undead (Expr (Maybe Bool)))
(HCons (LVPair Ridable (Expr (Maybe Bool))) HNil))))))))
squares_valid_categories :: Table Squares_valid_categories
squares_valid_categories = baseTable "squares_valid_categories"
|
JakeWheat/Chaos-2010
|
Games/Chaos2010/Database/Squares_valid_categories.hs
|
bsd-3-clause
| 893
| 0
| 27
| 226
| 272
| 142
| 130
| 17
| 1
|
{-# LANGUAGE FlexibleContexts #-}
import Plots
import Plots.Axis
import Plots.Types hiding (B)
import Plots.Themes
import Plots.Utils
import Data.List
import Diagrams.Prelude
import Diagrams.Backend.Rasterific
import Diagrams.Backend.CmdLine
import Data.Array
import Data.Monoid.Recommend
import Dataset
alldata = zip (zip petalLength petalWidth) species
mydata1 = foobarsingle alldata "setosa"
mydata2 = foobarsingle alldata "versicolor"
mydata3 = foobarsingle alldata "virginica"
myaxis :: Axis B V2 Double
myaxis = r2Axis &~ do
textPlot' (5.1,5.0) "test dragon" $ do
plotColor .= darkblue
make :: Diagram B -> IO ()
make = renderRasterific "test.png" (mkWidth 1000) . frame 20
main :: IO ()
main = make $ renderAxis myaxis
|
bergey/plots
|
examples/text.hs
|
bsd-3-clause
| 756
| 5
| 11
| 128
| 242
| 120
| 122
| 25
| 1
|
import System.Environment (getArgs)
import Data.Char (chr, ord)
import Data.List (permutations, sort)
nmb :: Int -> Int -> Int -> String -> Bool
nmb x _ _ [] = x == 42
nmb x y z (a:as) | y == 0 = nmb (x + ord a) (mod z 3) (div z 3) as
| y == 1 = nmb (x - ord a) (mod z 3) (div z 3) as
| otherwise = nmb (x * ord a) (mod z 3) (div z 3) as
numb :: Int -> String -> Bool
numb 81 _ = False
numb x ys | nmb (ord (head ys)) (mod x 3) (div x 3) (tail ys) = True
| otherwise = numb (x+1) ys
numbo :: [String] -> Bool
numbo [] = False
numbo (xs:xss) | numb 0 xs = True
| otherwise = numbo xss
numberoperations :: [Int] -> String
numberoperations xs | numbo ys = "YES"
| otherwise = "NO"
where ys = permutations . map chr $ sort xs
main :: IO ()
main = do
[inpFile] <- getArgs
input <- readFile inpFile
putStr . unlines . map (numberoperations . map read . words) $ lines input
|
nikai3d/ce-challenges
|
moderate/number_operations.hs
|
bsd-3-clause
| 1,067
| 2
| 13
| 396
| 546
| 261
| 285
| 25
| 1
|
module Lab2 where
------------------------------------------------------------------------------------------------------------------------------
-- Lab 2: Validating Credit Card Numbers
------------------------------------------------------------------------------------------------------------------------------
-- ===================================
-- Ex. 0
-- ===================================
toDigits :: Integer -> [Integer]
toDigits 0 = []
toDigits n = toDigits (n `div` 10) ++ ([n `mod` 10])
-- ===================================
-- Ex. 1
-- ===================================
toDigitsRev :: Integer -> [Integer]
toDigitsRev = reverse . toDigits
-- ===================================
-- Ex. 2
-- ===================================
doubleSecond :: [Integer] -> [Integer]
doubleSecond xs = [doubleEven pair | pair <- (zip xs [1..length xs])]
doubleEven (n,i)
| i `mod` 2 == 0 = n * 2
| otherwise = n
-- ===================================
-- Ex. 3
-- ===================================
sumDigits :: [Integer] -> Integer
sumDigits xs = sum (concat [toDigits x | x <- xs])
-- ===================================
-- Ex. 4
-- ===================================
isValid :: Integer -> Bool
isValid n = modulus == 0
where modulus = sumDigits (doubleSecond (toDigitsRev n)) `mod` 10
-- ===================================
-- Ex. 5
-- ===================================
numValid :: [Integer] -> Integer
numValid xs = sum . map (\_ -> 1) $ filter isValid xs
creditcards :: [Integer]
creditcards = [ 4716347184862961,
4532899082537349,
4485429517622493,
4320635998241421,
4929778869082405,
5256283618614517,
5507514403575522,
5191806267524120,
5396452857080331,
5567798501168013,
6011798764103720,
6011970953092861,
6011486447384806,
6011337752144550,
6011442159205994,
4916188093226163,
4916699537435624,
4024607115319476,
4556945538735693,
4532818294886666,
5349308918130507,
5156469512589415,
5210896944802939,
5442782486960998,
5385907818416901,
6011920409800508,
6011978316213975,
6011221666280064,
6011285399268094,
6011111757787451,
4024007106747875,
4916148692391990,
4916918116659358,
4024007109091313,
4716815014741522,
5370975221279675,
5586822747605880,
5446122675080587,
5361718970369004,
5543878863367027,
6011996932510178,
6011475323876084,
6011358905586117,
6011672107152563,
6011660634944997,
4532917110736356,
4485548499291791,
4532098581822262,
4018626753711468,
4454290525773941,
5593710059099297,
5275213041261476,
5244162726358685,
5583726743957726,
5108718020905086,
6011887079002610,
6011119104045333,
6011296087222376,
6011183539053619,
6011067418196187,
4532462702719400,
4420029044272063,
4716494048062261,
4916853817750471,
4327554795485824,
5138477489321723,
5452898762612993,
5246310677063212,
5211257116158320,
5230793016257272,
6011265295282522,
6011034443437754,
6011582769987164,
6011821695998586,
6011420220198992,
4716625186530516,
4485290399115271,
4556449305907296,
4532036228186543,
4916950537496300,
5188481717181072,
5535021441100707,
5331217916806887,
5212754109160056,
5580039541241472,
6011450326200252,
6011141461689343,
6011886911067144,
6011835735645726,
6011063209139742,
379517444387209,
377250784667541,
347171902952673,
379852678889749,
345449316207827,
349968440887576,
347727987370269,
370147776002793,
374465794689268,
340860752032008,
349569393937707,
379610201376008,
346590844560212,
376638943222680,
378753384029375,
348159548355291,
345714137642682,
347556554119626,
370919740116903,
375059255910682,
373129538038460,
346734548488728,
370697814213115,
377968192654740,
379127496780069,
375213257576161,
379055805946370,
345835454524671,
377851536227201,
345763240913232
]
|
guhemama/moocs
|
FP101x.DelftX.edX/Lab2/CreditCardValidation.hs
|
bsd-3-clause
| 5,578
| 0
| 12
| 2,241
| 709
| 438
| 271
| 139
| 1
|
{-|
Copyright : (c) Dave Laing, 2017
License : BSD3
Maintainer : dave.laing.80@gmail.com
Stability : experimental
Portability : non-portable
-}
{-# LANGUAGE ConstraintKinds #-}
module Fragment.Variant.Rules.Term (
VariantEvalConstraint
, variantEvalRules
) where
import Control.Monad (MonadPlus(..))
import Control.Lens (review, preview)
import Rules.Term
import Ast.Pattern
import Ast.Term
import Fragment.Variant.Ast.Pattern
import Fragment.Variant.Ast.Term
valueVariant :: (AsTmVariant ki ty pt tm) => (Term ki ty pt tm a -> Maybe (Term ki ty pt tm a)) -> Term ki ty pt tm a -> Maybe (Term ki ty pt tm a)
valueVariant valueFn tm = do
(l, tmV, ty) <- preview _TmVariant tm
tm' <- valueFn tmV
return $ review _TmVariant (l, tm', ty)
stepVariant :: (AsTmVariant ki ty pt tm) => (Term ki ty pt tm a -> Maybe (Term ki ty pt tm a)) -> Term ki ty pt tm a -> Maybe (Term ki ty pt tm a)
stepVariant stepFn tm = do
(l, tmV, ty) <- preview _TmVariant tm
tm' <- stepFn tmV
return $ review _TmVariant (l, tm', ty)
matchVariant :: (AsPtVariant pt, AsTmVariant ki ty pt tm) => (Pattern pt a -> Term ki ty pt tm a -> Maybe [Term ki ty pt tm a]) -> Pattern pt a -> Term ki ty pt tm a -> Maybe [Term ki ty pt tm a]
matchVariant matchFn p tm = do
(lP, pV) <- preview _PtVariant p
(lV, tmV, _) <- preview _TmVariant tm
if lP == lV
then matchFn pV tmV
else mzero
type VariantEvalConstraint ki ty pt tm a =
( AsPtVariant pt
, AsTmVariant ki ty pt tm
)
variantEvalRules :: VariantEvalConstraint ki ty pt tm a
=> EvalInput ki ty pt tm a
variantEvalRules =
EvalInput
[ ValueRecurse valueVariant ]
[ StepRecurse stepVariant ]
[ MatchRecurse matchVariant ]
|
dalaing/type-systems
|
src/Fragment/Variant/Rules/Term.hs
|
bsd-3-clause
| 1,718
| 0
| 12
| 378
| 656
| 341
| 315
| 38
| 2
|
{-# OPTIONS_GHC -fno-warn-orphans #-} -- Temporary, I hope. SLPJ Aug08
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ConstraintKinds #-}
-- Needed for the same reasons as in Reader, State etc
{- |
Module : Control.Monad.Error
Copyright : (c) Michael Weber <michael.weber@post.rwth-aachen.de> 2001,
(c) Jeff Newbern 2003-2006,
(c) Andriy Palamarchuk 2006
License : BSD-style (see the file libraries/base/LICENSE)
Maintainer : libraries@haskell.org
Stability : experimental
Portability : non-portable (multi-parameter type classes)
[Computation type:] Computations which may fail or throw exceptions.
[Binding strategy:] Failure records information about the cause\/location
of the failure. Failure values bypass the bound function,
other values are used as inputs to the bound function.
[Useful for:] Building computations from sequences of functions that may fail
or using exception handling to structure error handling.
[Zero and plus:] Zero is represented by an empty error and the plus operation
executes its second argument if the first fails.
[Example type:] @'Data.Either' String a@
The Error monad (also called the Exception monad).
-}
-- {-
-- Rendered by Michael Weber <mailto:michael.weber@post.rwth-aachen.de>,
-- inspired by the Haskell Monad Template Library from
-- Andy Gill (<http://www.cse.ogi.edu/~andy/>)
-- -}
module Control.Monad.Error (
module Control.Monad.Error.Class,
ErrorT(..),
mapErrorT,
module Control.Monad,
module Control.Monad.Trans,
-- * Example 1: Custom Error Data Type
-- $customErrorExample
-- * Example 2: Using ErrorT Monad Transformer
-- $ErrorTExample
) where
import Control.Monad
import Control.Monad.Cont.Class
import Control.Monad.Error.Class
import Control.Monad.RWS.Class
import Control.Monad.Reader.Class
import Control.Monad.Reader
import Control.Monad.State.Class
import Control.Monad.Trans
import Control.Monad.Writer.Class
import Control.Monad.Instances ()
import System.IO
-- instance MonadPlus IO where
-- mzero = ioError (userError "mzero")
-- m `mplus` n = m `catch` \_ -> n
instance MonadError IOError IO where
throwError = ioError
catchError = catchError
-- ---------------------------------------------------------------------------
-- Our parameterizable error monad
-- instance (Error e) => Monad (Either e) where
-- return = Right
-- Left l >>= _ = Left l
-- Right r >>= k = k r
-- fail msg = Left (strMsg msg)
-- instance (Error e) => MonadPlus (Either e) where
-- mzero = Left noMsg
-- Left _ `mplus` n = n
-- m `mplus` _ = m
-- instance (Error e) => MonadError e (Either e) where
-- throwError = Left
-- Left l `catchError` h = h l
-- Right r `catchError` _ = Right r
{- |
The error monad transformer. It can be used to add error handling to other
monads.
The @ErrorT@ Monad structure is parameterized over two things:
* e - The error type.
* m - The inner monad.
Here are some examples of use:
> -- wraps IO action that can throw an error e
> type ErrorWithIO e a = ErrorT e IO a
> ==> ErrorT (IO (Either e a))
>
> -- IO monad wrapped in StateT inside of ErrorT
> type ErrorAndStateWithIO e s a = ErrorT e (StateT s IO) a
> ==> ErrorT (StateT s IO (Either e a))
> ==> ErrorT (StateT (s -> IO (Either e a,s)))
-}
newtype ErrorT e m a = ErrorT { runErrorT :: m (Either e a) }
mapErrorT :: (m (Either e a) -> n (Either e' b))
-> ErrorT e m a
-> ErrorT e' n b
mapErrorT f m = ErrorT $ f (runErrorT m)
instance (Monad m) => Functor (ErrorT e m) where
fmap f m = ErrorT $ do
a <- runErrorT m
case a of
Left l -> return (Left l)
Right r -> return (Right (f r))
instance (Monad m, Error e) => Monad (ErrorT e m) where
return a = ErrorT $ return (Right a)
m >>= k = ErrorT $ do
a <- runErrorT m
case a of
Left l -> return (Left l)
Right r -> runErrorT (k r)
fail msg = ErrorT $ return (Left (strMsg msg))
instance (Monad m, Error e) => MonadPlus (ErrorT e m) where
mzero = ErrorT $ return (Left noMsg)
m `mplus` n = ErrorT $ do
a <- runErrorT m
case a of
Left _ -> runErrorT n
Right r -> return (Right r)
instance (Monad m, Error e) => MonadError e (ErrorT e m) where
throwError l = ErrorT $ return (Left l)
m `catchError` h = ErrorT $ do
a <- runErrorT m
case a of
Left l -> runErrorT (h l)
Right r -> return (Right r)
-- ---------------------------------------------------------------------------
-- Instances for other mtl transformers
-- instance Functor (Either e) where
-- fmap f (Left e) = Left e
-- fmap f (Right x) = Right (f x)
instance (Error e) => MonadTrans (ErrorT e) where
lift m = ErrorT $ do
a <- m
return (Right a)
mt = MT
unlift f = ErrorT $ f $ \m -> runErrorT m
instance (Error e, MonadIO m) => MonadIO (ErrorT e m) where
liftIO = lift . liftIO
instance (Error e, MonadCont m) => MonadCont (ErrorT e m) where
callCC f = ErrorT $
callCC $ \c ->
runErrorT (f (\a -> ErrorT $ c (Right a)))
instance (Error e, MonadRWS r w s m) => MonadRWS r w s (ErrorT e m)
instance (Error e, MonadReader r m) => MonadReader r (ErrorT e m) where
ask = lift ask
local f m = ErrorT $ local f (runErrorT m)
instance (Error e, MonadState s m) => MonadState s (ErrorT e m) where
get = lift get
put = lift . put
instance (Error e, MonadWriter w m) => MonadWriter w (ErrorT e m) where
tell = lift . tell
listen m = ErrorT $ do
(a, w) <- listen (runErrorT m)
case a of
Left l -> return $ Left l
Right r -> return $ Right (r, w)
pass m = ErrorT $ pass $ do
a <- runErrorT m
case a of
Left l -> return (Left l, id)
Right (r, f) -> return (Right r, f)
{- $customErrorExample
Here is an example that demonstrates the use of a custom 'Error' data type with
the 'throwError' and 'catchError' exception mechanism from 'MonadError'.
The example throws an exception if the user enters an empty string
or a string longer than 5 characters. Otherwise it prints length of the string.
>-- This is the type to represent length calculation error.
>data LengthError = EmptyString -- Entered string was empty.
> | StringTooLong Int -- A string is longer than 5 characters.
> -- Records a length of the string.
> | OtherError String -- Other error, stores the problem description.
>
>-- We make LengthError an instance of the Error class
>-- to be able to throw it as an exception.
>instance Error LengthError where
> noMsg = OtherError "A String Error!"
> strMsg s = OtherError s
>
>-- Converts LengthError to a readable message.
>instance Show LengthError where
> show EmptyString = "The string was empty!"
> show (StringTooLong len) =
> "The length of the string (" ++ (show len) ++ ") is bigger than 5!"
> show (OtherError msg) = msg
>
>-- For our monad type constructor, we use Either LengthError
>-- which represents failure using Left LengthError
>-- or a successful result of type a using Right a.
>type LengthMonad = Either LengthError
>
>main = do
> putStrLn "Please enter a string:"
> s <- getLine
> reportResult (calculateLength s)
>
>-- Wraps length calculation to catch the errors.
>-- Returns either length of the string or an error.
>calculateLength :: String -> LengthMonad Int
>calculateLength s = (calculateLengthOrFail s) `catchError` Left
>
>-- Attempts to calculate length and throws an error if the provided string is
>-- empty or longer than 5 characters.
>-- The processing is done in Either monad.
>calculateLengthOrFail :: String -> LengthMonad Int
>calculateLengthOrFail [] = throwError EmptyString
>calculateLengthOrFail s | len > 5 = throwError (StringTooLong len)
> | otherwise = return len
> where len = length s
>
>-- Prints result of the string length calculation.
>reportResult :: LengthMonad Int -> IO ()
>reportResult (Right len) = putStrLn ("The length of the string is " ++ (show len))
>reportResult (Left e) = putStrLn ("Length calculation failed with error: " ++ (show e))
-}
{- $ErrorTExample
@'ErrorT'@ monad transformer can be used to add error handling to another monad.
Here is an example how to combine it with an @IO@ monad:
>import Control.Monad.Error
>
>-- An IO monad which can return String failure.
>-- It is convenient to define the monad type of the combined monad,
>-- especially if we combine more monad transformers.
>type LengthMonad = ErrorT String IO
>
>main = do
> -- runErrorT removes the ErrorT wrapper
> r <- runErrorT calculateLength
> reportResult r
>
>-- Asks user for a non-empty string and returns its length.
>-- Throws an error if user enters an empty string.
>calculateLength :: LengthMonad Int
>calculateLength = do
> -- all the IO operations have to be lifted to the IO monad in the monad stack
> liftIO $ putStrLn "Please enter a non-empty string: "
> s <- liftIO getLine
> if null s
> then throwError "The string was empty!"
> else return $ length s
>
>-- Prints result of the string length calculation.
>reportResult :: Either String Int -> IO ()
>reportResult (Right len) = putStrLn ("The length of the string is " ++ (show len))
>reportResult (Left e) = putStrLn ("Length calculation failed with error: " ++ (show e))
-}
|
ifigueroap/mzv
|
src/Control/Monad/Error.hs
|
bsd-3-clause
| 9,723
| 0
| 16
| 2,338
| 1,337
| 703
| 634
| 90
| 1
|
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE TupleSections #-}
--------------------------------------------------------------------------------
-- | Solve a system of horn-clause constraints ---------------------------------
--------------------------------------------------------------------------------
module Language.Fixpoint.Solver.Solve (solve, interpolation) where
-- import Control.Concurrent (threadDelay)
import Control.Monad (filterM)
import Control.Monad.State.Strict (lift)
import qualified Data.HashMap.Strict as M
-- import Language.Fixpoint.Utils.Progress
import Language.Fixpoint.Misc
import qualified Language.Fixpoint.Types as F
import Language.Fixpoint.Types.PrettyPrint
import Language.Fixpoint.Types.Config hiding (stats)
import qualified Language.Fixpoint.Solver.Solution as S
import qualified Language.Fixpoint.Solver.Worklist as W
import Language.Fixpoint.Solver.Monad
import Language.Fixpoint.Solver.Validate (sanitize)
-- DEBUG
import Text.Printf
import System.Console.CmdArgs.Verbosity (whenLoud)
import Control.DeepSeq
--------------------------------------------------------------------------------
solve :: (NFData a, F.Fixpoint a) => Config -> S.Solution -> F.SInfo a -> IO (F.Result a)
--------------------------------------------------------------------------------
solve cfg s0 fi = do
-- donePhase Loud "Worklist Initialize"
(res, stat) <- runSolverM cfg fi n act
whenLoud $ printStats fi wkl stat
-- print (numIter stat)
return res
where
wkl = W.init fi
n = fromIntegral $ W.wRanks wkl
act = solve_ fi s0 wkl
printStats :: F.SInfo a -> W.Worklist a -> Stats -> IO ()
printStats fi w s = putStrLn "\n" >> ppTs [ ptable fi, ptable s, ptable w ]
where
ppTs = putStrLn . showpp . mconcat
--------------------------------------------------------------------------------
solve_ :: (NFData a, F.Fixpoint a)
=> F.SInfo a -> S.Solution -> W.Worklist a
-> SolveM (F.Result a, Stats)
--------------------------------------------------------------------------------
solve_ fi s0 wkl = do
let s0' = mappend s0 $ {-# SCC "sol-init" #-} S.init fi
s <- {-# SCC "sol-refine" #-} refine s0' wkl
st <- stats
res <- {-# SCC "sol-result" #-} result wkl s
let res' = {-# SCC "sol-tidy" #-} tidyResult res
return $!! (res', st)
-- | tidyResult ensures we replace the temporary kVarArg names
-- introduced to ensure uniqueness with the original names
-- appearing in the supplied WF constraints.
tidyResult :: F.Result a -> F.Result a
tidyResult r = r { F.resSolution = tidySolution (F.resSolution r) }
tidySolution :: F.FixSolution -> F.FixSolution
tidySolution = fmap tidyPred
tidyPred :: F.Expr -> F.Expr
tidyPred = F.substf (F.eVar . F.tidySymbol)
--------------------------------------------------------------------------------
refine :: S.Solution -> W.Worklist a -> SolveM S.Solution
--------------------------------------------------------------------------------
refine s w
| Just (c, w', newScc, rnk) <- W.pop w = do
i <- tickIter newScc
(b, s') <- refineC i s c
lift $ writeLoud $ refineMsg i c b rnk
let w'' = if b then W.push c w' else w'
refine s' w''
| otherwise = return s
-- DEBUG
refineMsg i c b rnk = printf "\niter=%d id=%d change=%s rank=%d\n"
i (F.subcId c) (show b) rnk
---------------------------------------------------------------------------
-- | Single Step Refinement -----------------------------------------------
---------------------------------------------------------------------------
refineC :: Int -> S.Solution -> F.SimpC a -> SolveM (Bool, S.Solution)
---------------------------------------------------------------------------
refineC _i s c
| null rhs = return (False, s)
| otherwise = do lhs <- lhsPred s c <$> getBinds
kqs <- filterValid lhs rhs
return $ S.update s ks {- tracepp (msg ks rhs kqs) -} kqs
where
(ks, rhs) = rhsCands s c
-- msg ks xs ys = printf "refineC: iter = %d, ks = %s, rhs = %d, rhs' = %d \n" _i (showpp ks) (length xs) (length ys)
lhsPred :: S.Solution -> F.SimpC a -> F.BindEnv -> F.Expr
lhsPred s c be = F.pAnd pBinds
where
pBinds = S.apply s <$> xts
xts = F.envCs be $ F.senv c
rhsCands :: S.Solution -> F.SimpC a -> ([F.KVar], S.Cand (F.KVar, S.EQual))
rhsCands s c = (fst <$> ks, kqs)
where
kqs = [ cnd k su q | (k, su) <- ks, q <- S.lookup s k]
ks = predKs . F.crhs $ c
cnd k su q = (F.subst su (S.eqPred q), (k, q))
predKs :: F.Expr -> [(F.KVar, F.Subst)]
predKs (F.PAnd ps) = concatMap predKs ps
predKs (F.PKVar k su) = [(k, su)]
predKs _ = []
---------------------------------------------------------------------------
-- | Convert Solution into Result -----------------------------------------
---------------------------------------------------------------------------
result :: (F.Fixpoint a) => W.Worklist a -> S.Solution -> SolveM (F.Result a)
---------------------------------------------------------------------------
result wkl s = do
let sol = M.map (F.pAnd . fmap S.eqPred) s
stat <- result_ wkl s
return $ F.Result (F.sinfo <$> stat) sol
result_ :: W.Worklist a -> S.Solution -> SolveM (F.FixResult (F.SimpC a))
result_ w s = res <$> filterM (isUnsat s) cs
where
cs = W.unsatCandidates w
res [] = F.Safe
res cs' = F.Unsafe cs'
-- isUnsat' c = lift progressTick >> isUnsat s c
---------------------------------------------------------------------------
isUnsat :: S.Solution -> F.SimpC a -> SolveM Bool
---------------------------------------------------------------------------
isUnsat s c = do
lp <- lhsPred s c <$> getBinds
let rp = rhsPred s c
not <$> isValid lp rp
isValid :: F.Expr -> F.Expr -> SolveM Bool
isValid p q = (not . null) <$> filterValid p [(q, ())]
rhsPred :: S.Solution -> F.SimpC a -> F.Expr
rhsPred s c = S.apply s $ F.crhs c
---------------------------------------------------------------------------
interpolation :: Config -> F.SInfo a -> F.Expr -> IO F.Expr
---------------------------------------------------------------------------
interpolation cfg fi p = runSolverM cfg fi' 0 $ interpolationSolver fi' p
where Right fi' = sanitize fi
{-
donePhase' :: String -> SolveM ()
---------------------------------------------------------------------------
donePhase' msg = lift $ do
threadDelay 25000
putBlankLn
donePhase Loud msg
-}
|
rolph-recto/liquid-fixpoint
|
src/Language/Fixpoint/Solver/Solve.hs
|
bsd-3-clause
| 6,655
| 0
| 14
| 1,329
| 1,796
| 930
| 866
| 97
| 2
|
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Language.Py.Pretty
-- Copyright : (c) 2009 Bernie Pope
-- License : BSD-style
-- Maintainer : bjpop@csse.unimelb.edu.au
-- Stability : experimental
-- Portability : ghc
--
-- Convenience class for pretty printing combinators.
-----------------------------------------------------------------------------
module Language.Py.Pretty
( module TextPP
, module Language.Py.Pretty
) where
import Text.PrettyPrint as TextPP
--------------------------------------------------------------------------------
-- | All types which can be transformed into a 'Doc'.
class Pretty a where
pretty :: a -> Doc
-- | Transform values into strings.
prettyText :: Pretty a => a -> String
prettyText = render . pretty
-- | Print just the prefix of something
prettyPrefix :: Pretty a => Int -> a -> Doc
prettyPrefix maxLen x
| length fullText <= maxLen = pretty fullText
| otherwise = pretty (take maxLen fullText) <+> text "..."
where
fullText = prettyText x
instance Pretty String where
pretty = text
-- | Conditionally wrap parentheses around an item.
parensIf :: Pretty a => (a -> Bool) -> a -> Doc
parensIf test x = if test x then parens $ pretty x else pretty x
perhaps :: Pretty a => Maybe a -> Doc -> Doc
perhaps Nothing doc = empty
perhaps (Just {}) doc = doc
-- | A list of things separated by commas.
commaList :: Pretty a => [a] -> Doc
commaList = hsep . punctuate comma . map pretty
instance Pretty Int where
pretty = int
instance Pretty Integer where
pretty = integer
instance Pretty Double where
pretty = double
instance Pretty Bool where
pretty True = text "True"
pretty False = text "False"
instance Pretty a => Pretty (Maybe a) where
pretty Nothing = empty
pretty (Just x) = pretty x
|
codeq/language-py
|
src/Language/Py/Pretty.hs
|
bsd-3-clause
| 1,892
| 0
| 9
| 348
| 445
| 234
| 211
| 35
| 2
|
{-# LANGUAGE CPP, DeriveDataTypeable #-}
module Main where
import Distribution.Client.DistDirLayout
import Distribution.Client.ProjectConfig
import Distribution.Client.Config (defaultCabalDir)
import Distribution.Client.ProjectPlanning
import Distribution.Client.ProjectPlanning.Types
import Distribution.Client.ProjectBuilding
import qualified Distribution.Client.InstallPlan as InstallPlan
import Distribution.Package
import Distribution.PackageDescription
import Distribution.InstalledPackageInfo (InstalledPackageInfo)
import Distribution.Simple.Setup (toFlag)
import Distribution.Version
import Distribution.Verbosity
import Distribution.Text
import Data.Monoid
import qualified Data.Map as Map
import Control.Monad
import Control.Exception
import System.FilePath
import System.Directory
import Test.Tasty
import Test.Tasty.HUnit
import Test.Tasty.Options
import Data.Tagged (Tagged(..))
import Data.Proxy (Proxy(..))
import Data.Typeable (Typeable)
main :: IO ()
main =
defaultMainWithIngredients
(defaultIngredients ++ [includingOptions projectConfigOptionDescriptions])
(withProjectConfig $ \config ->
testGroup "Integration tests (internal)"
(tests config))
tests :: ProjectConfig -> [TestTree]
tests config =
--TODO: tests for:
-- * normal success
-- * dry-run tests with changes
[ testGroup "Exceptions during discovey and planning" $
[ testCase "no package" (testExceptionInFindingPackage config)
, testCase "no package2" (testExceptionInFindingPackage2 config)
, testCase "proj conf1" (testExceptionInProjectConfig config)
]
, testGroup "Exceptions during building (local inplace)" $
[ testCase "configure" (testExceptionInConfigureStep config)
, testCase "build" (testExceptionInBuildStep config)
-- , testCase "register" testExceptionInRegisterStep
]
--TODO: need to repeat for packages for the store
, testGroup "Successful builds" $
[ testCaseSteps "Setup script styles" (testSetupScriptStyles config)
, testCase "keep-going" (testBuildKeepGoing config)
]
, testGroup "Regression tests" $
[ testCase "issue #3324" (testRegressionIssue3324 config)
]
]
testExceptionInFindingPackage :: ProjectConfig -> Assertion
testExceptionInFindingPackage config = do
BadPackageLocations locs <- expectException "BadPackageLocations" $
void $ planProject testdir config
case locs of
[BadLocGlobEmptyMatch "./*.cabal"] -> return ()
_ -> assertFailure "expected BadLocGlobEmptyMatch"
cleanProject testdir
where
testdir = "exception/no-pkg"
testExceptionInFindingPackage2 :: ProjectConfig -> Assertion
testExceptionInFindingPackage2 config = do
BadPackageLocations locs <- expectException "BadPackageLocations" $
void $ planProject testdir config
case locs of
[BadPackageLocationFile (BadLocDirNoCabalFile ".")] -> return ()
_ -> assertFailure $ "expected BadLocDirNoCabalFile, got " ++ show locs
cleanProject testdir
where
testdir = "exception/no-pkg2"
testExceptionInProjectConfig :: ProjectConfig -> Assertion
testExceptionInProjectConfig config = do
BadPerPackageCompilerPaths ps <- expectException "BadPerPackageCompilerPaths" $
void $ planProject testdir config
case ps of
[(PackageName "foo","ghc")] -> return ()
_ -> assertFailure $ "expected (PackageName \"foo\",\"ghc\"), got "
++ show ps
cleanProject testdir
where
testdir = "exception/bad-config"
testExceptionInConfigureStep :: ProjectConfig -> Assertion
testExceptionInConfigureStep config = do
(plan, res) <- executePlan =<< planProject testdir config
(_pkga1, failure) <- expectPackageFailed plan res pkgidA1
case buildFailureReason failure of
ConfigureFailed _ -> return ()
_ -> assertFailure $ "expected ConfigureFailed, got " ++ show failure
cleanProject testdir
where
testdir = "exception/configure"
pkgidA1 = PackageIdentifier (PackageName "a") (Version [1] [])
testExceptionInBuildStep :: ProjectConfig -> Assertion
testExceptionInBuildStep config = do
(plan, res) <- executePlan =<< planProject testdir config
(_pkga1, failure) <- expectPackageFailed plan res pkgidA1
expectBuildFailed failure
where
testdir = "exception/build"
pkgidA1 = PackageIdentifier (PackageName "a") (Version [1] [])
testSetupScriptStyles :: ProjectConfig -> (String -> IO ()) -> Assertion
testSetupScriptStyles config reportSubCase = do
reportSubCase (show SetupCustomExplicitDeps)
(plan1, res1) <- executePlan =<< planProject testdir1 config
(pkg1, _) <- expectPackageInstalled plan1 res1 pkgidA
elabSetupScriptStyle pkg1 @?= SetupCustomExplicitDeps
hasDefaultSetupDeps pkg1 @?= Just False
marker1 <- readFile (basedir </> testdir1 </> "marker")
marker1 @?= "ok"
removeFile (basedir </> testdir1 </> "marker")
reportSubCase (show SetupCustomImplicitDeps)
(plan2, res2) <- executePlan =<< planProject testdir2 config
(pkg2, _) <- expectPackageInstalled plan2 res2 pkgidA
elabSetupScriptStyle pkg2 @?= SetupCustomImplicitDeps
hasDefaultSetupDeps pkg2 @?= Just True
marker2 <- readFile (basedir </> testdir2 </> "marker")
marker2 @?= "ok"
removeFile (basedir </> testdir2 </> "marker")
reportSubCase (show SetupNonCustomInternalLib)
(plan3, res3) <- executePlan =<< planProject testdir3 config
(pkg3, _) <- expectPackageInstalled plan3 res3 pkgidA
elabSetupScriptStyle pkg3 @?= SetupNonCustomInternalLib
{-
--TODO: the SetupNonCustomExternalLib case is hard to test since it
-- requires a version of Cabal that's later than the one we're testing
-- e.g. needs a .cabal file that specifies cabal-version: >= 2.0
-- and a corresponding Cabal package that we can use to try and build a
-- default Setup.hs.
reportSubCase (show SetupNonCustomExternalLib)
(plan4, res4) <- executePlan =<< planProject testdir4 config
(pkg4, _) <- expectPackageInstalled plan4 res4 pkgidA
pkgSetupScriptStyle pkg4 @?= SetupNonCustomExternalLib
-}
where
testdir1 = "build/setup-custom1"
testdir2 = "build/setup-custom2"
testdir3 = "build/setup-simple"
pkgidA = PackageIdentifier (PackageName "a") (Version [0,1] [])
-- The solver fills in default setup deps explicitly, but marks them as such
hasDefaultSetupDeps = fmap defaultSetupDepends
. setupBuildInfo . elabPkgDescription
-- | Test the behaviour with and without @--keep-going@
--
testBuildKeepGoing :: ProjectConfig -> Assertion
testBuildKeepGoing config = do
-- P is expected to fail, Q does not depend on P but without
-- parallel build and without keep-going then we don't build Q yet.
(plan1, res1) <- executePlan =<< planProject testdir (config <> keepGoing False)
(_, failure1) <- expectPackageFailed plan1 res1 pkgidP
expectBuildFailed failure1
_ <- expectPackageConfigured plan1 res1 pkgidQ
-- With keep-going then we should go on to sucessfully build Q
(plan2, res2) <- executePlan
=<< planProject testdir (config <> keepGoing True)
(_, failure2) <- expectPackageFailed plan2 res2 pkgidP
expectBuildFailed failure2
_ <- expectPackageInstalled plan2 res2 pkgidQ
return ()
where
testdir = "build/keep-going"
pkgidP = PackageIdentifier (PackageName "p") (Version [0,1] [])
pkgidQ = PackageIdentifier (PackageName "q") (Version [0,1] [])
keepGoing kg =
mempty {
projectConfigBuildOnly = mempty {
projectConfigKeepGoing = toFlag kg
}
}
-- | See <https://github.com/haskell/cabal/issues/3324>
--
testRegressionIssue3324 :: ProjectConfig -> Assertion
testRegressionIssue3324 config = do
-- expected failure first time due to missing dep
(plan1, res1) <- executePlan =<< planProject testdir config
(_pkgq, failure) <- expectPackageFailed plan1 res1 pkgidQ
expectBuildFailed failure
-- add the missing dep, now it should work
let qcabal = basedir </> testdir </> "q" </> "q.cabal"
withFileFinallyRestore qcabal $ do
appendFile qcabal (" build-depends: p\n")
(plan2, res2) <- executePlan =<< planProject testdir config
_ <- expectPackageInstalled plan2 res2 pkgidP
_ <- expectPackageInstalled plan2 res2 pkgidQ
return ()
where
testdir = "regression/3324"
pkgidP = PackageIdentifier (PackageName "p") (Version [0,1] [])
pkgidQ = PackageIdentifier (PackageName "q") (Version [0,1] [])
---------------------------------
-- Test utils to plan and build
--
basedir :: FilePath
basedir = "tests" </> "IntegrationTests2"
planProject :: FilePath -> ProjectConfig -> IO PlanDetails
planProject testdir cliConfig = do
cabalDir <- defaultCabalDir
let cabalDirLayout = defaultCabalDirLayout cabalDir
projectRootDir <- canonicalizePath ("tests" </> "IntegrationTests2"
</> testdir)
let distDirLayout = defaultDistDirLayout projectRootDir
-- Clear state between test runs. The state remains if the previous run
-- ended in an exception (as we leave the files to help with debugging).
cleanProject testdir
(elaboratedPlan, _, elaboratedShared, projectConfig) <-
rebuildInstallPlan verbosity
projectRootDir distDirLayout cabalDirLayout
cliConfig
let targets =
Map.fromList
[ (installedUnitId elab, [BuildDefaultComponents])
| InstallPlan.Configured elab <- InstallPlan.toList elaboratedPlan
, elabBuildStyle elab == BuildInplaceOnly ]
elaboratedPlan' = pruneInstallPlanToTargets targets elaboratedPlan
(elaboratedPlan'', pkgsBuildStatus) <-
rebuildTargetsDryRun verbosity distDirLayout elaboratedShared
elaboratedPlan'
let buildSettings = resolveBuildTimeSettings
verbosity cabalDirLayout
(projectConfigShared projectConfig)
(projectConfigBuildOnly projectConfig)
(projectConfigBuildOnly cliConfig)
return (distDirLayout,
elaboratedPlan'',
elaboratedShared,
pkgsBuildStatus,
buildSettings)
type PlanDetails = (DistDirLayout,
ElaboratedInstallPlan,
ElaboratedSharedConfig,
BuildStatusMap,
BuildTimeSettings)
executePlan :: PlanDetails -> IO (ElaboratedInstallPlan, BuildOutcomes)
executePlan (distDirLayout,
elaboratedPlan,
elaboratedShared,
pkgsBuildStatus,
buildSettings) =
fmap ((,) elaboratedPlan) $
rebuildTargets verbosity
distDirLayout
elaboratedPlan
elaboratedShared
pkgsBuildStatus
-- Avoid trying to use act-as-setup mode:
buildSettings { buildSettingNumJobs = 1 }
cleanProject :: FilePath -> IO ()
cleanProject testdir = do
alreadyExists <- doesDirectoryExist distDir
when alreadyExists $ removeDirectoryRecursive distDir
where
projectRootDir = "tests" </> "IntegrationTests2" </> testdir
distDirLayout = defaultDistDirLayout projectRootDir
distDir = distDirectory distDirLayout
verbosity :: Verbosity
verbosity = minBound --normal --verbose --maxBound --minBound
-------------------------------------------
-- Tasty integration to adjust the config
--
withProjectConfig :: (ProjectConfig -> TestTree) -> TestTree
withProjectConfig testtree =
askOption $ \ghcPath ->
testtree (mkProjectConfig ghcPath)
mkProjectConfig :: GhcPath -> ProjectConfig
mkProjectConfig (GhcPath ghcPath) =
mempty {
projectConfigShared = mempty {
projectConfigHcPath = maybeToFlag ghcPath
},
projectConfigBuildOnly = mempty {
projectConfigNumJobs = toFlag (Just 1)
}
}
where
maybeToFlag = maybe mempty toFlag
data GhcPath = GhcPath (Maybe FilePath)
deriving Typeable
instance IsOption GhcPath where
defaultValue = GhcPath Nothing
optionName = Tagged "with-ghc"
optionHelp = Tagged "The ghc compiler to use"
parseValue = Just . GhcPath . Just
projectConfigOptionDescriptions :: [OptionDescription]
projectConfigOptionDescriptions = [Option (Proxy :: Proxy GhcPath)]
---------------------------------------
-- HUint style utils for this context
--
expectException :: Exception e => String -> IO a -> IO e
expectException expected action = do
res <- try action
case res of
Left e -> return e
Right _ -> throwIO $ HUnitFailure $ "expected an exception " ++ expected
expectPackagePreExisting :: ElaboratedInstallPlan -> BuildOutcomes -> PackageId
-> IO InstalledPackageInfo
expectPackagePreExisting plan buildOutcomes pkgid = do
planpkg <- expectPlanPackage plan pkgid
case (planpkg, InstallPlan.lookupBuildOutcome planpkg buildOutcomes) of
(InstallPlan.PreExisting pkg, Nothing)
-> return pkg
(_, buildResult) -> unexpectedBuildResult "PreExisting" planpkg buildResult
expectPackageConfigured :: ElaboratedInstallPlan -> BuildOutcomes -> PackageId
-> IO ElaboratedConfiguredPackage
expectPackageConfigured plan buildOutcomes pkgid = do
planpkg <- expectPlanPackage plan pkgid
case (planpkg, InstallPlan.lookupBuildOutcome planpkg buildOutcomes) of
(InstallPlan.Configured pkg, Nothing)
-> return pkg
(_, buildResult) -> unexpectedBuildResult "Configured" planpkg buildResult
expectPackageInstalled :: ElaboratedInstallPlan -> BuildOutcomes -> PackageId
-> IO (ElaboratedConfiguredPackage, BuildResult)
expectPackageInstalled plan buildOutcomes pkgid = do
planpkg <- expectPlanPackage plan pkgid
case (planpkg, InstallPlan.lookupBuildOutcome planpkg buildOutcomes) of
(InstallPlan.Configured pkg, Just (Right result))
-> return (pkg, result)
(_, buildResult) -> unexpectedBuildResult "Installed" planpkg buildResult
expectPackageFailed :: ElaboratedInstallPlan -> BuildOutcomes -> PackageId
-> IO (ElaboratedConfiguredPackage, BuildFailure)
expectPackageFailed plan buildOutcomes pkgid = do
planpkg <- expectPlanPackage plan pkgid
case (planpkg, InstallPlan.lookupBuildOutcome planpkg buildOutcomes) of
(InstallPlan.Configured pkg, Just (Left failure))
-> return (pkg, failure)
(_, buildResult) -> unexpectedBuildResult "Failed" planpkg buildResult
unexpectedBuildResult :: String -> ElaboratedPlanPackage
-> Maybe (Either BuildFailure BuildResult) -> IO a
unexpectedBuildResult expected planpkg buildResult =
throwIO $ HUnitFailure $
"expected to find " ++ display (packageId planpkg) ++ " in the "
++ expected ++ " state, but it is actually in the " ++ actual ++ " state."
where
actual = case (buildResult, planpkg) of
(Nothing, InstallPlan.PreExisting{}) -> "PreExisting"
(Nothing, InstallPlan.Configured{}) -> "Configured"
(Just (Right _), InstallPlan.Configured{}) -> "Installed"
(Just (Left _), InstallPlan.Configured{}) -> "Failed"
_ -> "Impossible!"
expectPlanPackage :: ElaboratedInstallPlan -> PackageId
-> IO ElaboratedPlanPackage
expectPlanPackage plan pkgid =
case [ pkg
| pkg <- InstallPlan.toList plan
, packageId pkg == pkgid ] of
[pkg] -> return pkg
[] -> throwIO $ HUnitFailure $
"expected to find " ++ display pkgid
++ " in the install plan but it's not there"
_ -> throwIO $ HUnitFailure $
"expected to find only one instance of " ++ display pkgid
++ " in the install plan but there's several"
expectBuildFailed :: BuildFailure -> IO ()
expectBuildFailed (BuildFailure _ (BuildFailed _)) = return ()
expectBuildFailed (BuildFailure _ reason) =
assertFailure $ "expected BuildFailed, got " ++ show reason
---------------------------------------
-- Other utils
--
-- | Allow altering a file during a test, but then restore it afterwards
--
withFileFinallyRestore :: FilePath -> IO a -> IO a
withFileFinallyRestore file action = do
copyFile file backup
action `finally` renameFile backup file
where
backup = file <.> "backup"
|
sopvop/cabal
|
cabal-install/tests/IntegrationTests2.hs
|
bsd-3-clause
| 16,685
| 0
| 15
| 3,827
| 3,561
| 1,813
| 1,748
| 309
| 5
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-|
'Snap.Extension.Heist.Heist' is an implementation of the 'MonadHeist'
interface defined in 'Snap.Extension.Heist'.
As always, to use, add 'HeistState' to your application's state, along with an
instance of 'HasHeistState' for your application's state, making sure to
use a 'heistInitializer' in your application's 'Initializer', and then you're
ready to go.
'Snap.Extension.Heist.Heist' is a little different to other Snap Extensions,
which is unfortunate as it is probably the most widely useful one. As
explained below, 'HeistState' takes your application's monad as a type
argument, and 'HasHeistState' is a multi-parameter type class, the additional
first parameter also being your application's monad.
Two instances of 'MonadHeist' are provided with this module. One is designed
for users wanting to use Heist templates with their application, the other is
designed for users writing Snap Extensions which use their own Heist templates
internally.
The first one of these instances is
@HasHeistState (SnapExtend s) s => MonadHeist (SnapExtend s) (SnapExtend s)@.
This means that any type @s@ which has a 'HeistState', whose
'TemplateState'\'s monad is @SnapExtend s@ forms a 'MonadHeist' whose
'TemplateState'\'s monad is @SnapExtend s@ and whose monad itself is
@SnapExtend s@. The @s@ here is your application's state, and @SnapExtend s@
is your application's monad.
The second one of these instances is
@HasHeistState m s => MonadHeist m (ReaderT s m)@. This means that any type
@s@ which has, for any m, a @HeistState m@, forms a 'MonadHeist', whose
'TemplateState'\'s monad is @m@, when made the environment of
a 'ReaderT' wrapped around @m@. The @s@ here would be the Snap Extension's
internal state, and the @m@ would be 'SnapExtend' wrapped around any @s'@
which was an instance of the Snap Extension's @HasState@ class.
This implementation does not require that your application's monad implement
interfaces from any other Snap Extension.
-}
module Snap.Extension.Heist.Heist
( HeistState
, HasHeistState(..)
, heistInitializer
) where
import Control.Applicative
import Control.Concurrent.MVar
import Control.Monad
import Control.Monad.Reader
import Control.Monad.Trans
import qualified Data.ByteString as B
import Snap.Extension
import Snap.Extension.Heist
import Snap.Types
import Text.Templating.Heist
import Text.Templating.Heist.Splices.Static
------------------------------------------------------------------------------
-- | Your application's state must include a 'HeistState' in order for your
-- application to be a 'MonadHeist'.
--
-- Unlike other @-State@ types, this is of kind @(* -> *) -> *@. Unless you're
-- developing your own Snap Extension which has its own internal 'HeistState',
-- the type argument you want to pass to 'HeistState' is your application's
-- monad, which should be 'SnapExtend' wrapped around your application's
-- state.
data MonadSnap m => HeistState m = HeistState
{ _path :: FilePath
, _origTs :: TemplateState m
, _tsMVar :: MVar (TemplateState m)
, _sts :: StaticTagState
, _modifier :: TemplateState m -> TemplateState m
}
------------------------------------------------------------------------------
-- | For you appliaction's monad to be a 'MonadHeist', your application's
-- state needs to be an instance of 'HasHeistState'. Minimal complete
-- definition: 'getHeistState', 'setHeistState'.
--
-- Unlike other @HasState@ type classes, this is a type class has two
-- parameters. Among other things, this means that you will need to enable the
-- @FlexibleInstances@ and @MultiParameterTypeClasses@ language extensions to
-- be able to create an instance of @HasHeistState@. In most cases, the last
-- parameter will as usual be your application's state, and the additional
-- first one will be the monad formed by wrapping 'SnapExtend' around your
-- application's state.
--
-- However, if you are developing your own Snap Extension which uses its own
-- internal 'HeistState', the last parameter will be your Snap Extension's
-- internal state, and the additional first parameter will be any monad formed
-- by wrapping @SnapExtend@ around a type which has an instance of the
-- @HasState@ class for your monad. These two use cases are subtly different,
-- which is why 'HasHeistState' needs two type parameters.
class MonadSnap m => HasHeistState m s | s -> m where
getHeistState :: s -> HeistState m
setHeistState :: HeistState m -> s -> s
modifyHeistState :: (HeistState m -> HeistState m) -> s -> s
modifyHeistState f s = setHeistState (f $ getHeistState s) s
------------------------------------------------------------------------------
-- | The 'Initializer' for 'HeistState'. It takes one argument, a path to a
-- template directory containing @.tpl@ files.
heistInitializer :: MonadSnap m => FilePath -> Initializer (HeistState m)
heistInitializer path = do
heistState <- liftIO $ do
(origTs,sts) <- bindStaticTag emptyTemplateState
loadTemplates path origTs >>= either error (\ts -> do
tsMVar <- newMVar ts
return $ HeistState path origTs tsMVar sts id)
mkInitializer heistState
------------------------------------------------------------------------------
instance MonadSnap m => InitializerState (HeistState m) where
extensionId = const "Heist/Heist"
mkCleanup = const $ return ()
mkReload (HeistState path origTs tsMVar sts _) = do
clearStaticTagCache $ sts
either error (modifyMVar_ tsMVar . const . return) =<<
loadTemplates path origTs
------------------------------------------------------------------------------
instance HasHeistState (SnapExtend s) s => MonadHeist (SnapExtend s) (SnapExtend s) where
render t = do
(HeistState _ _ tsMVar _ modifier) <- asks getHeistState
ts <- liftIO $ fmap modifier $ readMVar tsMVar
renderTemplate ts t >>= maybe pass (\html -> do
modifyResponse $ setContentType "text/html; charset=utf-8"
modifyResponse $ setContentLength (fromIntegral $ B.length html)
writeBS html)
heistLocal f = local $ modifyHeistState $ \s ->
s { _modifier = f . _modifier s }
------------------------------------------------------------------------------
instance HasHeistState m s => MonadHeist m (ReaderT s m) where
render t = ReaderT $ \s -> do
let (HeistState _ _ tsMVar _ modifier) = getHeistState s
ts <- liftIO $ fmap modifier $ readMVar tsMVar
mt <- renderTemplate ts t
renderTemplate ts t >>= maybe pass (\html -> do
modifyResponse $ setContentType "text/html; charset=utf-8"
modifyResponse $ setContentLength (fromIntegral $ B.length html)
writeBS html)
heistLocal f = local $ modifyHeistState $ \s ->
s { _modifier = f . _modifier s }
|
duairc/snap-extensions
|
src/Snap/Extension/Heist/Heist.hs
|
bsd-3-clause
| 7,176
| 0
| 21
| 1,402
| 915
| 477
| 438
| 67
| 1
|
{- |
Predefined instances of PlotWithGnuplot for
* @[(Double,Double)]@ Plot as points or lines, according to Graphics.Gnewplot.Style
* @[((Double,Double),Double)]@ Plot durations as disconnected horizontal lines
* @(Double->Double, (Double,Double))@ Plot arbitrary functions with maximum and minimum values
* @TimeSeries@ Plot a timeseries with a step size, start time and a StorableVector of values.
-}
{-# LANGUAGE GeneralizedNewtypeDeriving, FlexibleInstances, ExistentialQuantification #-}
module Graphics.Gnewplot.Instances where
import Graphics.Gnewplot.Types
import Graphics.Gnewplot.Exec
import Graphics.Gnewplot.Style
import System.Directory
import System.IO
import Control.Monad
import Data.List
import qualified Data.StorableVector as SV
instance PlotWithGnuplot [(Double,Double)] where
getGnuplotCmd [] = return []
getGnuplotCmd es =
do fnm <- ("/tmp/gnuplotevs"++) `fmap` uniqueIntStr
writeEvts fnm es
return [PL (concat ["\"", fnm, "\" using 1:2"]) "" "points" (removeFile fnm)]
where writeEvts fp evs = do
h <- openFile fp WriteMode
forM_ evs $ \(t,v)-> hPutStrLn h $ show t++"\t"++show v
hClose h
instance PlotWithGnuplot [((Double,Double),Double)] where
getGnuplotCmd [] = return []
getGnuplotCmd es =
do fnm <- ("/tmp/gnuplotdurs"++) `fmap` uniqueIntStr
writeEvts fnm es
return [PL (concat ["\"", fnm, "\" using 1:($2)"]) "" "lines" (removeFile fnm)]
where writeEvts fp durs = do
h <- openFile fp WriteMode
forM_ durs $ \((t1,t2),v)-> do
hPutStrLn h $ show t1++"\t"++show v
hPutStrLn h $ show t2++"\t"++show v
hPutStrLn h $ show t2++"\tNaN"
hClose h
instance PlotWithGnuplot (Double->Double, (Double,Double)) where
getGnuplotCmd (f,(t1,t2)) =
let dx = (t2-t1)/1000
xs = map (\p-> p*dx+t1) [0..999]
in getGnuplotCmd $ Lines [LineStyle 0] $ zip xs $ map f xs
data TimeSeries = TimeSeries { stepSize:: Double,
startTime :: Double,
tsValues ::(SV.Vector Double) }
instance PlotWithGnuplot TimeSeries where
getGnuplotCmd (TimeSeries dt t1 vls) = do
fnm <- ("/tmp/gnuplotsig"++) `fmap` uniqueIntStr
h <- openBinaryFile fnm WriteMode
SV.hPut h vls
hClose h
return $ [PL (concat ["\"", fnm, "\" binary format=\"%float64\" using ($0*",
show dt, "+", show t1, "):1"] )
"" -- (show t1++"->"++show t2)
"lines"
(removeFile fnm)]
data Heatmap = Heatmap [[Double]]
instance PlotWithGnuplot Heatmap where
getGnuplotCmd (Heatmap xss) = do
fnm <- ("/tmp/gnuplotheat"++) `fmap` uniqueIntStr
h <- openFile fnm WriteMode
forM_ xss $ \xs-> hPutStrLn h $ intercalate ", " (map show xs)
hClose h
return $ [SPL ("'"++fnm++"' matrix") "" "image" (removeFile fnm),
TopLevelGnuplotCmd "set view map" "unset view",
TopLevelGnuplotCmd ("set xrange [-0.5:"++show (length (head xss)-1)++".5]") "unset xrange"]
|
glutamate/gnewplot
|
Graphics/Gnewplot/Instances.hs
|
bsd-3-clause
| 3,354
| 0
| 19
| 1,023
| 943
| 485
| 458
| 62
| 0
|
module Transactions.Transaction where
import Persist.Serializable
import Transactions.Money
data Transaction = Income Money
| Payment Money
deriving (Show, Read)
instance Eq Transaction where
Income x == Income y = x == y
Payment x == Payment y = x == y
Payment _ == Income _ = False
Income _ == Payment _ = False
instance Serializable Transaction where
serialize = serializeTransaction
parse = undefined
serializeTransaction :: Transaction -> String
serializeTransaction (Payment x) = "Payment " ++ show x
serializeTransaction (Income x) = "Income " ++ show x
|
Miguel-Fontes/hs-money
|
src/Transactions/Transaction.hs
|
bsd-3-clause
| 635
| 0
| 7
| 161
| 194
| 95
| 99
| 17
| 1
|
{-# LANGUAGE TemplateHaskell #-}
module Main where
import Data.Sequence.NonEmpty (NonEmptySeq(NonEmptySeq))
import Data.Time
import Test.QuickCheck
import Test.Tasty.QuickCheck
import Test.Tasty.TH
import Penny.SeqUtil
import Penny.Copper.Copperize
import Penny.Copper.Decopperize
import Penny.NonNegative
import Penny.Positive
instance Arbitrary a => Arbitrary (NonEmptySeq a) where
arbitrary = NonEmptySeq <$> arbitrary <*> arbitrary
instance (Arbitrary a, Arbitrary b) => Arbitrary (Groups a b) where
arbitrary = Groups <$> arbitrary <*> arbitrary <*> arbitrary
instance Arbitrary TimeZone where
arbitrary = do
mins <- choose (-1439, 1439)
return $ TimeZone mins False ""
newtype HoursI = HoursI Int deriving Show
instance Arbitrary HoursI where
arbitrary = HoursI <$> choose (0, 23)
newtype N20'23I = N20'23I Int deriving Show
instance Arbitrary N20'23I where
arbitrary = N20'23I <$> choose (20, 23)
newtype N0'19I = N0'19I Int deriving Show
instance Arbitrary N0'19I where
arbitrary = N0'19I <$> choose (0,19)
prop_hours :: HoursI -> Property
prop_hours (HoursI i) = case c'Hours'Int i of
Nothing -> counterexample "copperization failed" $ property False
Just copperHrs -> fromIntegral (c'Integer'NonNegative (dHours copperHrs))
=== i
prop_copperizeTimeZone :: TimeZone -> Property
prop_copperizeTimeZone z = case cTimeZone z of
Nothing -> counterexample "copperization failed" $ property False
Just copperZone -> let zone = dZone copperZone
in counterexample (show zone)
$ timeZoneMinutes (dZone copperZone) === timeZoneMinutes z
prop_N20'23 :: N20'23I -> Property
prop_N20'23 (N20'23I i) = case c'N20'23'Int i of
Nothing -> counterexample "copperization failed" $ property False
Just copperN20 -> fromIntegral (c'Integer'Positive (dN20'23 copperN20))
=== i
prop_N0'19 :: N0'19I -> Property
prop_N0'19 (N0'19I i) = case c'N0'19'Int i of
Nothing -> counterexample "copperization failed" $ property False
Just copperN20 -> fromIntegral (c'Integer'NonNegative (dN0'19 copperN20))
=== i
prop_concatThenGroup :: Groups Int Int -> Bool
prop_concatThenGroup groups = groupEithers (concatGroups groups) == groups
main = $(defaultMainGenerator)
|
massysett/penny
|
penny/tests/penny-tasty.hs
|
bsd-3-clause
| 2,221
| 0
| 14
| 360
| 665
| 341
| 324
| 53
| 2
|
{-# LANGUAGE ForeignFunctionInterface, CPP #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.ARB.SeparateShaderObjects
-- Copyright : (c) Sven Panne 2013
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- All raw functions and tokens from the separate_shader_objects, see
-- <http://www.opengl.org/registry/specs/ARB/separate_shader_objects.txt>.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.ARB.SeparateShaderObjects (
-- * Functions
glUseProgramStages,
glActiveShaderProgram,
glCreateShaderProgramv,
glBindProgramPipeline,
glDeleteProgramPipelines,
glGenProgramPipelines,
glIsProgramPipeline,
glGetProgramPipelineiv,
glProgramUniform1i,
glProgramUniform1iv,
glProgramUniform1f,
glProgramUniform1fv,
glProgramUniform1d,
glProgramUniform1dv,
glProgramUniform1ui,
glProgramUniform1uiv,
glProgramUniform2i,
glProgramUniform2iv,
glProgramUniform2f,
glProgramUniform2fv,
glProgramUniform2d,
glProgramUniform2dv,
glProgramUniform2ui,
glProgramUniform2uiv,
glProgramUniform3i,
glProgramUniform3iv,
glProgramUniform3f,
glProgramUniform3fv,
glProgramUniform3d,
glProgramUniform3dv,
glProgramUniform3ui,
glProgramUniform3uiv,
glProgramUniform4i,
glProgramUniform4iv,
glProgramUniform4f,
glProgramUniform4fv,
glProgramUniform4d,
glProgramUniform4dv,
glProgramUniform4ui,
glProgramUniform4uiv,
glProgramUniformMatrix2fv,
glProgramUniformMatrix3fv,
glProgramUniformMatrix4fv,
glProgramUniformMatrix2dv,
glProgramUniformMatrix3dv,
glProgramUniformMatrix4dv,
glProgramUniformMatrix2x3fv,
glProgramUniformMatrix3x2fv,
glProgramUniformMatrix2x4fv,
glProgramUniformMatrix4x2fv,
glProgramUniformMatrix3x4fv,
glProgramUniformMatrix4x3fv,
glProgramUniformMatrix2x3dv,
glProgramUniformMatrix3x2dv,
glProgramUniformMatrix2x4dv,
glProgramUniformMatrix4x2dv,
glProgramUniformMatrix3x4dv,
glProgramUniformMatrix4x3dv,
glValidateProgramPipeline,
glGetProgramPipelineInfoLog,
-- * Tokens
gl_VERTEX_SHADER_BIT,
gl_FRAGMENT_SHADER_BIT,
gl_GEOMETRY_SHADER_BIT,
gl_TESS_CONTROL_SHADER_BIT,
gl_TESS_EVALUATION_SHADER_BIT,
gl_ALL_SHADER_BITS,
gl_PROGRAM_SEPARABLE,
gl_ACTIVE_PROGRAM,
gl_PROGRAM_PIPELINE_BINDING
) where
import Foreign.C.Types
import Foreign.Ptr
import Graphics.Rendering.OpenGL.Raw.Extensions
import Graphics.Rendering.OpenGL.Raw.Core31.Types
#include "HsOpenGLRaw.h"
extensionNameString :: String
extensionNameString = "GL_separate_shader_objects"
EXTENSION_ENTRY(dyn_glUseProgramStages,ptr_glUseProgramStages,"glUseProgramStages",glUseProgramStages,GLuint -> GLbitfield -> GLuint -> IO ())
EXTENSION_ENTRY(dyn_glActiveShaderProgram,ptr_glActiveShaderProgram,"glActiveShaderProgram",glActiveShaderProgram,GLuint -> GLuint -> IO ())
EXTENSION_ENTRY(dyn_glCreateShaderProgramv,ptr_glCreateShaderProgramv,"glCreateShaderProgramv",glCreateShaderProgramv,GLenum -> GLsizei -> Ptr (Ptr GLchar) -> IO GLuint)
EXTENSION_ENTRY(dyn_glBindProgramPipeline,ptr_glBindProgramPipeline,"glBindProgramPipeline",glBindProgramPipeline,GLuint -> IO ())
EXTENSION_ENTRY(dyn_glDeleteProgramPipelines,ptr_glDeleteProgramPipelines,"glDeleteProgramPipelines",glDeleteProgramPipelines,GLsizei -> Ptr GLuint -> IO ())
EXTENSION_ENTRY(dyn_glGenProgramPipelines,ptr_glGenProgramPipelines,"glGenProgramPipelines",glGenProgramPipelines,GLsizei -> Ptr GLuint -> IO ())
EXTENSION_ENTRY(dyn_glIsProgramPipeline,ptr_glIsProgramPipeline,"glIsProgramPipeline",glIsProgramPipeline,GLuint -> IO GLboolean)
EXTENSION_ENTRY(dyn_glGetProgramPipelineiv,ptr_glGetProgramPipelineiv,"glGetProgramPipelineiv",glGetProgramPipelineiv,GLuint -> GLenum -> Ptr GLint -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform1i,ptr_glProgramUniform1i,"glProgramUniform1i",glProgramUniform1i,GLuint -> GLint -> GLint -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform1iv,ptr_glProgramUniform1iv,"glProgramUniform1iv",glProgramUniform1iv,GLuint -> GLint -> GLsizei -> Ptr GLint -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform1f,ptr_glProgramUniform1f,"glProgramUniform1f",glProgramUniform1f,GLuint -> GLint -> GLfloat -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform1fv,ptr_glProgramUniform1fv,"glProgramUniform1fv",glProgramUniform1fv,GLuint -> GLint -> GLsizei -> Ptr GLfloat -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform1d,ptr_glProgramUniform1d,"glProgramUniform1d",glProgramUniform1d,GLuint -> GLint -> GLdouble -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform1dv,ptr_glProgramUniform1dv,"glProgramUniform1dv",glProgramUniform1dv,GLuint -> GLint -> GLsizei -> Ptr GLdouble -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform1ui,ptr_glProgramUniform1ui,"glProgramUniform1ui",glProgramUniform1ui,GLuint -> GLint -> GLuint -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform1uiv,ptr_glProgramUniform1uiv,"glProgramUniform1uiv",glProgramUniform1uiv,GLuint -> GLint -> GLsizei -> Ptr GLuint -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform2i,ptr_glProgramUniform2i,"glProgramUniform2i",glProgramUniform2i,GLuint -> GLint -> GLint -> GLint -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform2iv,ptr_glProgramUniform2iv,"glProgramUniform2iv",glProgramUniform2iv,GLuint -> GLint -> GLsizei -> Ptr GLint -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform2f,ptr_glProgramUniform2f,"glProgramUniform2f",glProgramUniform2f,GLuint -> GLint -> GLfloat -> GLfloat -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform2fv,ptr_glProgramUniform2fv,"glProgramUniform2fv",glProgramUniform2fv,GLuint -> GLint -> GLsizei -> Ptr GLfloat -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform2d,ptr_glProgramUniform2d,"glProgramUniform2d",glProgramUniform2d,GLuint -> GLint -> GLdouble -> GLdouble -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform2dv,ptr_glProgramUniform2dv,"glProgramUniform2dv",glProgramUniform2dv,GLuint -> GLint -> GLsizei -> Ptr GLdouble -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform2ui,ptr_glProgramUniform2ui,"glProgramUniform2ui",glProgramUniform2ui,GLuint -> GLint -> GLuint -> GLuint -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform2uiv,ptr_glProgramUniform2uiv,"glProgramUniform2uiv",glProgramUniform2uiv,GLuint -> GLint -> GLsizei -> Ptr GLuint -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform3i,ptr_glProgramUniform3i,"glProgramUniform3i",glProgramUniform3i,GLuint -> GLint -> GLint -> GLint -> GLint -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform3iv,ptr_glProgramUniform3iv,"glProgramUniform3iv",glProgramUniform3iv,GLuint -> GLint -> GLsizei -> Ptr GLint -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform3f,ptr_glProgramUniform3f,"glProgramUniform3f",glProgramUniform3f,GLuint -> GLint -> GLfloat -> GLfloat -> GLfloat -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform3fv,ptr_glProgramUniform3fv,"glProgramUniform3fv",glProgramUniform3fv,GLuint -> GLint -> GLsizei -> Ptr GLfloat -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform3d,ptr_glProgramUniform3d,"glProgramUniform3d",glProgramUniform3d,GLuint -> GLint -> GLdouble -> GLdouble -> GLdouble -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform3dv,ptr_glProgramUniform3dv,"glProgramUniform3dv",glProgramUniform3dv,GLuint -> GLint -> GLsizei -> Ptr GLdouble -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform3ui,ptr_glProgramUniform3ui,"glProgramUniform3ui",glProgramUniform3ui,GLuint -> GLint -> GLuint -> GLuint -> GLuint -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform3uiv,ptr_glProgramUniform3uiv,"glProgramUniform3uiv",glProgramUniform3uiv,GLuint -> GLint -> GLsizei -> Ptr GLuint -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform4i,ptr_glProgramUniform4i,"glProgramUniform4i",glProgramUniform4i,GLuint -> GLint -> GLint -> GLint -> GLint -> GLint -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform4iv,ptr_glProgramUniform4iv,"glProgramUniform4iv",glProgramUniform4iv,GLuint -> GLint -> GLsizei -> Ptr GLint -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform4f,ptr_glProgramUniform4f,"glProgramUniform4f",glProgramUniform4f,GLuint -> GLint -> GLfloat -> GLfloat -> GLfloat -> GLfloat -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform4fv,ptr_glProgramUniform4fv,"glProgramUniform4fv",glProgramUniform4fv,GLuint -> GLint -> GLsizei -> Ptr GLfloat -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform4d,ptr_glProgramUniform4d,"glProgramUniform4d",glProgramUniform4d,GLuint -> GLint -> GLdouble -> GLdouble -> GLdouble -> GLdouble -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform4dv,ptr_glProgramUniform4dv,"glProgramUniform4dv",glProgramUniform4dv,GLuint -> GLint -> GLsizei -> Ptr GLdouble -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform4ui,ptr_glProgramUniform4ui,"glProgramUniform4ui",glProgramUniform4ui,GLuint -> GLint -> GLuint -> GLuint -> GLuint -> GLuint -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniform4uiv,ptr_glProgramUniform4uiv,"glProgramUniform4uiv",glProgramUniform4uiv,GLuint -> GLint -> GLsizei -> Ptr GLuint -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniformMatrix2fv,ptr_glProgramUniformMatrix2fv,"glProgramUniformMatrix2fv",glProgramUniformMatrix2fv,GLuint -> GLint -> GLsizei -> GLboolean -> Ptr GLfloat -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniformMatrix3fv,ptr_glProgramUniformMatrix3fv,"glProgramUniformMatrix3fv",glProgramUniformMatrix3fv,GLuint -> GLint -> GLsizei -> GLboolean -> Ptr GLfloat -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniformMatrix4fv,ptr_glProgramUniformMatrix4fv,"glProgramUniformMatrix4fv",glProgramUniformMatrix4fv,GLuint -> GLint -> GLsizei -> GLboolean -> Ptr GLfloat -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniformMatrix2dv,ptr_glProgramUniformMatrix2dv,"glProgramUniformMatrix2dv",glProgramUniformMatrix2dv,GLuint -> GLint -> GLsizei -> GLboolean -> Ptr GLdouble -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniformMatrix3dv,ptr_glProgramUniformMatrix3dv,"glProgramUniformMatrix3dv",glProgramUniformMatrix3dv,GLuint -> GLint -> GLsizei -> GLboolean -> Ptr GLdouble -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniformMatrix4dv,ptr_glProgramUniformMatrix4dv,"glProgramUniformMatrix4dv",glProgramUniformMatrix4dv,GLuint -> GLint -> GLsizei -> GLboolean -> Ptr GLdouble -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniformMatrix2x3fv,ptr_glProgramUniformMatrix2x3fv,"glProgramUniformMatrix2x3fv",glProgramUniformMatrix2x3fv,GLuint -> GLint -> GLsizei -> GLboolean -> Ptr GLfloat -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniformMatrix3x2fv,ptr_glProgramUniformMatrix3x2fv,"glProgramUniformMatrix3x2fv",glProgramUniformMatrix3x2fv,GLuint -> GLint -> GLsizei -> GLboolean -> Ptr GLfloat -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniformMatrix2x4fv,ptr_glProgramUniformMatrix2x4fv,"glProgramUniformMatrix2x4fv",glProgramUniformMatrix2x4fv,GLuint -> GLint -> GLsizei -> GLboolean -> Ptr GLfloat -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniformMatrix4x2fv,ptr_glProgramUniformMatrix4x2fv,"glProgramUniformMatrix4x2fv",glProgramUniformMatrix4x2fv,GLuint -> GLint -> GLsizei -> GLboolean -> Ptr GLfloat -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniformMatrix3x4fv,ptr_glProgramUniformMatrix3x4fv,"glProgramUniformMatrix3x4fv",glProgramUniformMatrix3x4fv,GLuint -> GLint -> GLsizei -> GLboolean -> Ptr GLfloat -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniformMatrix4x3fv,ptr_glProgramUniformMatrix4x3fv,"glProgramUniformMatrix4x3fv",glProgramUniformMatrix4x3fv,GLuint -> GLint -> GLsizei -> GLboolean -> Ptr GLfloat -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniformMatrix2x3dv,ptr_glProgramUniformMatrix2x3dv,"glProgramUniformMatrix2x3dv",glProgramUniformMatrix2x3dv,GLuint -> GLint -> GLsizei -> GLboolean -> Ptr GLdouble -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniformMatrix3x2dv,ptr_glProgramUniformMatrix3x2dv,"glProgramUniformMatrix3x2dv",glProgramUniformMatrix3x2dv,GLuint -> GLint -> GLsizei -> GLboolean -> Ptr GLdouble -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniformMatrix2x4dv,ptr_glProgramUniformMatrix2x4dv,"glProgramUniformMatrix2x4dv",glProgramUniformMatrix2x4dv,GLuint -> GLint -> GLsizei -> GLboolean -> Ptr GLdouble -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniformMatrix4x2dv,ptr_glProgramUniformMatrix4x2dv,"glProgramUniformMatrix4x2dv",glProgramUniformMatrix4x2dv,GLuint -> GLint -> GLsizei -> GLboolean -> Ptr GLdouble -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniformMatrix3x4dv,ptr_glProgramUniformMatrix3x4dv,"glProgramUniformMatrix3x4dv",glProgramUniformMatrix3x4dv,GLuint -> GLint -> GLsizei -> GLboolean -> Ptr GLdouble -> IO ())
EXTENSION_ENTRY(dyn_glProgramUniformMatrix4x3dv,ptr_glProgramUniformMatrix4x3dv,"glProgramUniformMatrix4x3dv",glProgramUniformMatrix4x3dv,GLuint -> GLint -> GLsizei -> GLboolean -> Ptr GLdouble -> IO ())
EXTENSION_ENTRY(dyn_glValidateProgramPipeline,ptr_glValidateProgramPipeline,"glValidateProgramPipeline",glValidateProgramPipeline,GLuint -> IO ())
EXTENSION_ENTRY(dyn_glGetProgramPipelineInfoLog,ptr_glGetProgramPipelineInfoLog,"glGetProgramPipelineInfoLog",glGetProgramPipelineInfoLog,GLuint -> GLsizei -> Ptr GLsizei -> Ptr GLchar -> IO ())
gl_VERTEX_SHADER_BIT :: GLbitfield
gl_VERTEX_SHADER_BIT = 0x00000001
gl_FRAGMENT_SHADER_BIT :: GLbitfield
gl_FRAGMENT_SHADER_BIT = 0x00000002
gl_GEOMETRY_SHADER_BIT :: GLbitfield
gl_GEOMETRY_SHADER_BIT = 0x00000004
gl_TESS_CONTROL_SHADER_BIT :: GLbitfield
gl_TESS_CONTROL_SHADER_BIT = 0x00000008
gl_TESS_EVALUATION_SHADER_BIT :: GLbitfield
gl_TESS_EVALUATION_SHADER_BIT = 0x00000010
gl_ALL_SHADER_BITS :: GLbitfield
gl_ALL_SHADER_BITS = 0xFFFFFFFF
gl_PROGRAM_SEPARABLE :: GLenum
gl_PROGRAM_SEPARABLE = 0x8258
gl_ACTIVE_PROGRAM :: GLenum
gl_ACTIVE_PROGRAM = 0x8259
gl_PROGRAM_PIPELINE_BINDING :: GLenum
gl_PROGRAM_PIPELINE_BINDING = 0x825A
|
mfpi/OpenGLRaw
|
src/Graphics/Rendering/OpenGL/Raw/ARB/SeparateShaderObjects.hs
|
bsd-3-clause
| 13,605
| 0
| 15
| 1,108
| 2,923
| 1,622
| 1,301
| -1
| -1
|
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE DisambiguateRecordFields #-}
module Data.Povray.Settings where
import Data.Povray.Base
import Data.Povray.Types
import Data.Povray.Colour
import Data.Povray.Photon
import Data.Maybe
data GlobalSetting = GlobalSetting {
-- abcBailOut :: Maybe Double,
ambientLight :: Maybe Colour,
assumedGamma :: Maybe Double,
-- hfGray16 :: Bool,
iridWaveLength :: Maybe Colour,
-- charSet :: Maybe Charset,
maxIntersections :: Maybe Int,
maxTraceLevel :: Maybe Int,
numberOfWaves :: Maybe Int,
-- noiseGenerator :: Maybe noiseGenerator,
-- radiosity :: Maybe Radiosity,
photon :: Maybe Photon
}
emptyGlobalSetting :: GlobalSetting
emptyGlobalSetting = GlobalSetting Nothing (Just 1.0) -- assumedGamma
Nothing Nothing
Nothing Nothing Nothing
instance Povray GlobalSetting where
toPov GlobalSetting{..}
= join[
"global_settings {",
maybeToPovWithName "ambient_light" ambientLight,
maybeToPovWithName "assumed_gamma" assumedGamma,
maybeToPovWithName "irid_wavelength" iridWaveLength,
maybeToPovWithName "max_intersections" maxIntersections,
maybeToPovWithName "max_trace_level" maxTraceLevel,
maybeToPovWithName "number_of_waves" numberOfWaves,
maybeToPov photon,
"}"
]
|
lesguillemets/hspov_proto
|
src/Data/Povray/Settings.hs
|
bsd-3-clause
| 1,508
| 0
| 9
| 437
| 232
| 131
| 101
| 32
| 1
|
{-# Language CPP #-}
module CabalBounds.HaskellPlatform
( librariesOf
, currentLibraries
, previousLibraries
, allVersions
, HPVersion
) where
import qualified Distribution.Version as V
import Data.List (find)
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative ((<$>))
#endif
type LibName = String
type LibVersion = V.Version
type Library = (LibName, LibVersion)
type HPVersion = String
-- | the libraries of the given haskell platform version
librariesOf :: HPVersion -> Maybe [Library]
librariesOf hpVers = snd <$> find ((== hpVers) . fst) allVersions
-- | the libraries of the current haskell platform
currentLibraries :: [Library]
currentLibraries = snd . last $ allVersions
-- | the libraries of the previous haskell platform
previousLibraries :: [Library]
previousLibraries = snd . head . drop 1 . reverse $ allVersions
-- | all haskell platform versions and their libraries
allVersions :: [(HPVersion, [Library])]
allVersions =
[ ("2010.2.0.0", libs_2010_2_0_0)
, ("2011.2.0.0", libs_2011_2_0_0)
, ("2011.2.0.1", libs_2011_2_0_1)
, ("2011.4.0.0", libs_2011_4_0_0)
, ("2012.2.0.0", libs_2012_2_0_0)
, ("2012.4.0.0", libs_2012_4_0_0)
, ("2013.2.0.0", libs_2013_2_0_0)
, ("2014.2.0.0", libs_2014_2_0_0)
, ("7.10.2" , libs_7_10_2)
]
libs_2010_2_0_0 =
[ lib "array" [0,3,0,1]
, lib "base" [4,2,0,2]
, lib "bytestring" [0,9,1,7]
, lib "Cabal" [1,8,0,6]
, lib "cgi" [3001,1,7,3]
, lib "containers" [0,3,0,0]
, lib "deepseq" [1,1,0,0]
, lib "directory" [1,0,1,1]
, lib "extensible-exceptions" [0,1,1,1]
, lib "fgl" [5,4,2,3]
, lib "filepath" [1,1,0,4]
, lib "GLUT" [2,1,2,1]
, lib "haskell-src" [1,0,1,3]
, lib "haskell98" [1,0,1,1]
, lib "hpc" [0,5,0,5]
, lib "html" [1,0,1,2]
, lib "HTTP" [4000,0,9]
, lib "HUnit" [1,2,2,1]
, lib "mtl" [1,1,0,2]
, lib "network" [2,2,1,7]
, lib "old-locale" [1,0,0,2]
, lib "old-time" [1,0,0,5]
, lib "OpenGL" [2,2,3,0]
, lib "parallel" [2,2,0,1]
, lib "parsec" [2,1,0,1]
, lib "pretty" [1,0,1,1]
, lib "process" [1,0,1,3]
, lib "QuickCheck" [2,1,1,1]
, lib "random" [1,0,0,2]
, lib "regex-base" [0,93,2]
, lib "regex-compat" [0,93,1]
, lib "regex-posix" [0,94,2]
, lib "stm" [2,1,2,1]
, lib "syb" [0,1,0,2]
, lib "template-haskell" [2,4,0,1]
, lib "time" [1,1,4]
, lib "unix" [2,4,0,2]
, lib "xhtml" [3000,2,0,1]
, lib "zlib" [0,5,2,0]
]
libs_2011_2_0_0 =
[ lib "array" [0,3,0,2]
, lib "base" [4,3,1,0]
, lib "bytestring" [0,9,1,10]
, lib "Cabal" [1,10,1,0]
, lib "cgi" [3001,1,7,4]
, lib "containers" [0,4,0,0]
, lib "deepseq" [1,1,0,2]
, lib "directory" [1,1,0,0]
, lib "extensible-exceptions" [0,1,1,2]
, lib "fgl" [5,4,2,3]
, lib "filepath" [1,2,0,0]
, lib "GLUT" [2,1,2,1]
, lib "haskell-src" [1,0,1,4]
, lib "haskell2010" [1,0,0,0]
, lib "haskell98" [1,1,0,1]
, lib "hpc" [0,5,0,6]
, lib "html" [1,0,1,2]
, lib "HTTP" [4000,1,1]
, lib "HUnit" [1,2,2,3]
, lib "mtl" [2,0,1,0]
, lib "network" [2,3,0,2]
, lib "old-locale" [1,0,0,2]
, lib "old-time" [1,0,0,6]
, lib "OpenGL" [2,2,3,0]
, lib "parallel" [3,1,0,1]
, lib "parsec" [3,1,1]
, lib "pretty" [1,0,1,2]
, lib "process" [1,0,1,5]
, lib "QuickCheck" [2,4,0,1]
, lib "random" [1,0,0,3]
, lib "regex-base" [0,93,2]
, lib "regex-compat" [0,93,1]
, lib "regex-posix" [0,94,4]
, lib "stm" [2,2,0,1]
, lib "syb" [0,3]
, lib "template-haskell" [2,5,0,0]
, lib "text" [0,11,0,5]
, lib "time" [1,2,0,3]
, lib "transformers" [0,2,2,0]
, lib "unix" [2,4,2,0]
, lib "xhtml" [3000,2,0,1]
, lib "zlib" [0,5,3,1]
]
libs_2011_2_0_1 =
[ lib "array" [0,3,0,2]
, lib "base" [4,3,1,0]
, lib "bytestring" [0,9,1,10]
, lib "Cabal" [1,10,1,0]
, lib "cgi" [3001,1,7,4]
, lib "containers" [0,4,0,0]
, lib "deepseq" [1,1,0,2]
, lib "directory" [1,1,0,0]
, lib "extensible-exceptions" [0,1,1,2]
, lib "fgl" [5,4,2,3]
, lib "filepath" [1,2,0,0]
, lib "GLUT" [2,1,2,1]
, lib "haskell-src" [1,0,1,4]
, lib "haskell2010" [1,0,0,0]
, lib "haskell98" [1,1,0,1]
, lib "hpc" [0,5,0,6]
, lib "html" [1,0,1,2]
, lib "HTTP" [4000,1,1]
, lib "HUnit" [1,2,2,3]
, lib "mtl" [2,0,1,0]
, lib "network" [2,3,0,2]
, lib "old-locale" [1,0,0,2]
, lib "old-time" [1,0,0,6]
, lib "OpenGL" [2,2,3,0]
, lib "parallel" [3,1,0,1]
, lib "parsec" [3,1,1]
, lib "pretty" [1,0,1,2]
, lib "process" [1,0,1,5]
, lib "QuickCheck" [2,4,0,1]
, lib "random" [1,0,0,3]
, lib "regex-base" [0,93,2]
, lib "regex-compat" [0,93,1]
, lib "regex-posix" [0,94,4]
, lib "stm" [2,2,0,1]
, lib "syb" [0,3]
, lib "template-haskell" [2,5,0,0]
, lib "text" [0,11,0,6]
, lib "time" [1,2,0,3]
, lib "transformers" [0,2,2,0]
, lib "unix" [2,4,2,0]
, lib "xhtml" [3000,2,0,1]
, lib "zlib" [0,5,3,1]
]
libs_2011_4_0_0 =
[ lib "array" [0,3,0,2]
, lib "base" [4,3,1,0]
, lib "bytestring" [0,9,1,10]
, lib "Cabal" [1,10,2,0]
, lib "cgi" [3001,1,7,4]
, lib "containers" [0,4,0,0]
, lib "deepseq" [1,1,0,2]
, lib "directory" [1,1,0,0]
, lib "extensible-exceptions" [0,1,1,2]
, lib "fgl" [5,4,2,4]
, lib "filepath" [1,2,0,0]
, lib "GLUT" [2,1,2,1]
, lib "haskell-src" [1,0,1,4]
, lib "haskell2010" [1,0,0,0]
, lib "haskell98" [1,1,0,1]
, lib "hpc" [0,5,0,6]
, lib "html" [1,0,1,2]
, lib "HTTP" [4000,1,2]
, lib "HUnit" [1,2,4,2]
, lib "mtl" [2,0,1,0]
, lib "network" [2,3,0,5]
, lib "old-locale" [1,0,0,2]
, lib "old-time" [1,0,0,6]
, lib "OpenGL" [2,2,3,0]
, lib "parallel" [3,1,0,1]
, lib "parsec" [3,1,1]
, lib "pretty" [1,0,1,2]
, lib "process" [1,0,1,5]
, lib "QuickCheck" [2,4,1,1]
, lib "random" [1,0,0,3]
, lib "regex-base" [0,93,2]
, lib "regex-compat" [0,95,1]
, lib "regex-posix" [0,95,1]
, lib "stm" [2,2,0,1]
, lib "syb" [0,3,3]
, lib "template-haskell" [2,5,0,0]
, lib "text" [0,11,1,5]
, lib "time" [1,2,0,3]
, lib "transformers" [0,2,2,0]
, lib "unix" [2,4,2,0]
, lib "xhtml" [3000,2,0,4]
, lib "zlib" [0,5,3,1]
]
libs_2012_2_0_0 =
[ lib "array" [0,4,0,0]
, lib "base" [4,5,0,0]
, lib "bytestring" [0,9,2,1]
, lib "Cabal" [1,14,0]
, lib "cgi" [3001,1,7,4]
, lib "containers" [0,4,2,1]
, lib "deepseq" [1,3,0,0]
, lib "directory" [1,1,0,2]
, lib "extensible-exceptions" [0,1,1,4]
, lib "fgl" [5,4,2,4]
, lib "filepath" [1,3,0,0]
, lib "GLUT" [2,1,2,1]
, lib "haskell-src" [1,0,1,5]
, lib "haskell2010" [1,1,0,1]
, lib "haskell98" [2,0,0,1]
, lib "hpc" [0,5,1,1]
, lib "html" [1,0,1,2]
, lib "HTTP" [4000,2,3]
, lib "HUnit" [1,2,4,2]
, lib "mtl" [2,1,1]
, lib "network" [2,3,0,13]
, lib "old-locale" [1,0,0,4]
, lib "old-time" [1,1,0,0]
, lib "OpenGL" [2,2,3,1]
, lib "parallel" [3,2,0,2]
, lib "parsec" [3,1,2]
, lib "pretty" [1,1,1,0]
, lib "process" [1,1,0,1]
, lib "QuickCheck" [2,4,2]
, lib "random" [1,0,1,1]
, lib "regex-base" [0,93,2]
, lib "regex-compat" [0,95,1]
, lib "regex-posix" [0,95,1]
, lib "stm" [2,3]
, lib "syb" [0,3,6,1]
, lib "template-haskell" [2,7,0,0]
, lib "text" [0,11,2,0]
, lib "time" [1,4]
, lib "transformers" [0,3,0,0]
, lib "unix" [2,5,1,0]
, lib "xhtml" [3000,2,1]
, lib "zlib" [0,5,3,3]
]
libs_2012_4_0_0 =
[ lib "array" [0,4,0,0]
, lib "async" [2,0,1,3]
, lib "base" [4,5,1,0]
, lib "bytestring" [0,9,2,1]
, lib "Cabal" [1,14,0]
, lib "cgi" [3001,1,7,4]
, lib "containers" [0,4,2,1]
, lib "deepseq" [1,3,0,0]
, lib "directory" [1,1,0,2]
, lib "extensible-exceptions" [0,1,1,4]
, lib "fgl" [5,4,2,4]
, lib "filepath" [1,3,0,0]
, lib "GLUT" [2,1,2,1]
, lib "haskell-src" [1,0,1,5]
, lib "haskell2010" [1,1,0,1]
, lib "haskell98" [2,0,0,1]
, lib "hpc" [0,5,1,1]
, lib "html" [1,0,1,2]
, lib "HTTP" [4000,2,5]
, lib "HUnit" [1,2,5,1]
, lib "mtl" [2,1,2]
, lib "network" [2,3,1,0]
, lib "old-locale" [1,0,0,4]
, lib "old-time" [1,1,0,0]
, lib "OpenGL" [2,2,3,1]
, lib "parallel" [3,2,0,3]
, lib "parsec" [3,1,3]
, lib "pretty" [1,1,1,0]
, lib "primitive" [0,5,0,1]
, lib "process" [1,1,0,1]
, lib "QuickCheck" [2,5,1,1]
, lib "random" [1,0,1,1]
, lib "regex-base" [0,93,2]
, lib "regex-compat" [0,95,1]
, lib "regex-posix" [0,95,2]
, lib "split" [0,2,1,1]
, lib "stm" [2,4]
, lib "syb" [0,3,7]
, lib "template-haskell" [2,7,0,0]
, lib "text" [0,11,2,3]
, lib "time" [1,4]
, lib "transformers" [0,3,0,0]
, lib "unix" [2,5,1,1]
, lib "vector" [0,10,0,1]
, lib "xhtml" [3000,2,1]
, lib "zlib" [0,5,4,0]
]
libs_2013_2_0_0 =
[ lib "array" [0,4,0,1]
, lib "async" [2,0,1,4]
, lib "attoparsec" [0,10,4,0]
, lib "base" [4,6,0,1]
, lib "bytestring" [0,10,0,2]
, lib "Cabal" [1,16,0]
, lib "case-insensitive" [1,0,0,1]
, lib "cgi" [3001,1,7,5]
, lib "containers" [0,5,0,0]
, lib "deepseq" [1,3,0,1]
, lib "directory" [1,2,0,1]
, lib "fgl" [5,4,2,4]
, lib "filepath" [1,3,0,1]
, lib "GLUT" [2,4,0,0]
, lib "GLURaw" [1,3,0,0]
, lib "hashable" [1,1,2,5]
, lib "haskell-src" [1,0,1,5]
, lib "haskell2010" [1,1,1,0]
, lib "haskell98" [2,0,0,2]
, lib "hpc" [0,6,0,0]
, lib "html" [1,0,1,2]
, lib "HTTP" [4000,2,8]
, lib "HUnit" [1,2,5,2]
, lib "mtl" [2,1,2]
, lib "network" [2,4,1,2]
, lib "old-locale" [1,0,0,5]
, lib "old-time" [1,1,0,1]
, lib "OpenGL" [2,8,0,0]
, lib "OpenGLRaw" [1,3,0,0]
, lib "parallel" [3,2,0,3]
, lib "parsec" [3,1,3]
, lib "pretty" [1,1,1,0]
, lib "primitive" [0,5,0,1]
, lib "process" [1,1,0,2]
, lib "QuickCheck" [2,6]
, lib "random" [1,0,1,1]
, lib "regex-base" [0,93,2]
, lib "regex-compat" [0,95,1]
, lib "regex-posix" [0,95,2]
, lib "split" [0,2,2]
, lib "stm" [2,4,2]
, lib "syb" [0,4,0]
, lib "template-haskell" [2,8,0,0]
, lib "text" [0,11,3,1]
, lib "time" [1,4,0,1]
, lib "transformers" [0,3,0,0]
, lib "unordered-containers" [0,2,3,0]
, lib "unix" [2,6,0,1]
, lib "Win32" [2,3,0,0]
, lib "vector" [0,10,0,1]
, lib "xhtml" [3000,2,1]
, lib "zlib" [0,5,4,1]
]
libs_2014_2_0_0 =
[ lib "array" [0,5,0,0]
, lib "async" [2,0,1,5]
, lib "attoparsec" [0,10,4,0]
, lib "base" [4,7,0,1]
, lib "bytestring" [0,10,4,0]
, lib "Cabal" [1,18,1,3]
, lib "case-insensitive" [1,1,0,3]
, lib "containers" [0,5,5,1]
, lib "deepseq" [1,3,0,2]
, lib "directory" [1,2,1,0]
, lib "fgl" [5,5,0,1]
, lib "filepath" [1,3,0,2]
, lib "GLUT" [2,5,1,1]
, lib "GLURaw" [1,4,0,1]
, lib "hashable" [1,2,2,0]
, lib "haskell-src" [1,0,1,6]
, lib "haskell2010" [1,1,2,0]
, lib "haskell98" [2,0,0,3]
, lib "hpc" [0,6,0,1]
, lib "html" [1,0,1,2]
, lib "HTTP" [4000,2,10]
, lib "HUnit" [1,2,5,2]
, lib "mtl" [2,1,3,1]
, lib "network" [2,4,2,3]
, lib "old-locale" [1,0,0,6]
, lib "old-time" [1,1,0,2]
, lib "OpenGL" [2,9,2,0]
, lib "OpenGLRaw" [1,5,0,0]
, lib "parallel" [3,2,0,4]
, lib "parsec" [3,1,5]
, lib "pretty" [1,1,1,1]
, lib "primitive" [0,5,2,1]
, lib "process" [1,2,0,0]
, lib "QuickCheck" [2,6]
, lib "random" [1,0,1,1]
, lib "regex-base" [0,93,2]
, lib "regex-compat" [0,95,1]
, lib "regex-posix" [0,95,2]
, lib "split" [0,2,2]
, lib "stm" [2,4,2]
, lib "syb" [0,4,1]
, lib "template-haskell" [2,9,0,0]
, lib "text" [1,1,0,0]
, lib "time" [1,4,2]
, lib "transformers" [0,3,0,0]
, lib "unordered-containers" [0,2,4,0]
, lib "unix" [2,7,0,1]
, lib "vector" [0,10,9,1]
, lib "xhtml" [3000,2,1]
, lib "zlib" [0,5,4,1]
]
libs_7_10_2 =
[ lib "array" [0,5,1,0]
, lib "base" [4,8,1,0]
, lib "bytestring" [0,10,6,0]
, lib "Cabal" [1,22,4,0]
, lib "containers" [0,5,6,2]
, lib "deepseq" [1,4,1,1]
, lib "directory" [1,2,2,0]
, lib "filepath" [1,4,0,0]
, lib "hpc" [0,6,0,2]
, lib "old-locale" [1,0,0,7]
, lib "old-time" [1,1,0,3]
, lib "pretty" [1,1,2,0]
, lib "process" [1,2,3,0]
, lib "template-haskell" [2,10,0,0]
, lib "time" [1,5,0,1]
, lib "transformers" [0,4,2,0]
, lib "unix" [2,7,1,0]
, lib "xhtml" [3000,2,1]
, lib "async" [2,0,2]
, lib "attoparsec" [0,13,0,1]
, lib "case-insensitive" [1,2,0,4]
, lib "cgi" [3001,2,2,2]
, lib "exceptions" [0,8,0,2]
, lib "fgl" [5,5,2,1]
, lib "GLURaw" [1,5,0,1]
, lib "GLUT" [2,7,0,1]
, lib "hashable" [1,2,3,3]
, lib "haskell-src" [1,0,2,0]
, lib "html" [1,0,1,2]
, lib "HTTP" [4000,2,20]
, lib "HUnit" [1,2,5,2]
, lib "mtl" [2,2,1]
, lib "multipart" [0,1,2]
, lib "network" [2,6,2,1]
, lib "network-uri" [2,6,0,3]
, lib "ObjectName" [1,1,0,0]
, lib "OpenGL" [2,12,0,1]
, lib "OpenGLRaw" [2,5,1,0]
, lib "parallel" [3,2,0,6]
, lib "parsec" [3,1,9]
, lib "primitive" [0,6]
, lib "QuickCheck" [2,8,1]
, lib "random" [1,1]
, lib "regex-base" [0,93,2]
, lib "regex-compat" [0,95,1]
, lib "regex-posix" [0,95,2]
, lib "scientific" [0,3,3,8]
, lib "split" [0,2,2]
, lib "StateVar" [1,1,0,0]
, lib "stm" [2,4,4]
, lib "syb" [0,5,1]
, lib "text" [1,2,1,3]
, lib "tf-random" [0,5]
, lib "transformers-compat" [0,4,0,4]
, lib "unordered-containers" [0,2,5,1]
, lib "vector" [0,11,0,0]
, lib "zlib" [0,5,4,2]
]
type VersionBranch = [Int]
lib :: LibName -> VersionBranch -> Library
lib libName branch = (libName, V.mkVersion branch)
|
dan-t/cabal-bounds
|
lib/CabalBounds/HaskellPlatform.hs
|
bsd-3-clause
| 13,663
| 0
| 9
| 3,213
| 8,334
| 4,988
| 3,346
| 455
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Bot.Init
( onConnect
)
where
import qualified Control.Lens as Lens
import Data.Text (Text)
import qualified Bot.Config as Config
import Bot.Monad (MonadBot)
import qualified Bot.Monad as Bot
onConnect :: MonadBot m => m [Text]
onConnect = do
conf <- Bot.getConfig
pure
[ "CAP REQ :twitch.tv/tags"
, "PASS :" <> Lens.view Config.pass conf
, "NICK :" <> Lens.view Config.nick conf
, "JOIN :" <> Lens.view Config.channel conf
]
|
frublox/aichanbot
|
src/Bot/Init.hs
|
bsd-3-clause
| 558
| 0
| 11
| 162
| 145
| 82
| 63
| 16
| 1
|
module System.Nemesis.Driver where
import Control.Arrow ((>>>))
import Control.Lens
import Control.Monad.State (get, put, execStateT)
import Data.List (intercalate, isPrefixOf, sort)
import qualified Data.Map as Map
import Data.Monoid ((<>))
import Prelude hiding ((-))
import System.Environment (getArgs)
import Text.Printf (printf)
import System.Nemesis.Type
import System.Nemesis.Utils ((-), ljust)
displayName :: Task -> String
displayName t = (t ^. name : t ^. namespace) & reverse & map (printf "%-10s") & intercalate " "
showTask :: Task -> String
showTask = showWithLeftJust 44
showWithLeftJust :: Int -> Task -> String
showWithLeftJust n task =
case task ^. description of
Nothing -> fullName task
Just x -> fullName task & ljust n ' ' & (<> x)
run :: Unit -> IO ()
run unit = do
args <- getArgs
case args of
[] -> help
_target:_ -> execStateT unit (emptyNemesis & target .~ _target) >>= runNemesis
where
help = execStateT unit (emptyNemesis) >>= list_task
list_task n = do
let _tasks = n ^. tasks & Map.elems
_task_len = _tasks & map (fullName >>> length) & maximum & (+ 5)
br
n ^. tasks & Map.elems & sort & map (showWithLeftJust _task_len) & traverse putStrLn
br
br = putStrLn ""
insertTask :: Task -> Unit
insertTask t = do
n <- get
let _description = n ^. currentDesc
_namespace = n ^. currentNamespace
_deps = t ^. deps & map (withCurrent _namespace)
_task = t
& deps .~ _deps
& description .~ _description
& namespace .~ _namespace
_tasks = n ^. tasks & Map.insert (_task & fullName) _task
put - n
& tasks .~ _tasks
& currentDesc .~ mempty
where
withCurrent aNamespace x
| "/" `isPrefixOf` x = tail x
| otherwise = (x : aNamespace) & reverse & intercalate "/"
runNemesis :: Nemesis -> IO ()
runNemesis n = run' (n ^. target)
where
run' :: String -> IO ()
run' s = case n ^. (tasks . at s) of
Nothing -> bye
Just x -> run_task x
where
bye = do
printf "%s does not exist!" s
run_task :: Task -> IO ()
run_task t = do
t ^. deps & traverse run'
t ^. action & unShowIO
|
nfjinjing/nemesis
|
src/System/Nemesis/Driver.hs
|
bsd-3-clause
| 2,386
| 0
| 17
| 759
| 829
| 430
| 399
| 64
| 2
|
-----------------------------------------------------------
-- |
-- Module : PrintQuery.hs
-- Copyright : haskelldb-users@lists.sourceforge.net
-- License : BSD-style
--
-- Maintainer : haskelldb-users@lists.sourceforge.net
-- Stability : experimental
-- Portability : non portable
-- Author : Justin Bailey (jgbailey AT gmail DOT com)
-- Pretty printing for Query, PrimQuery, and SqlSelect values.
-- Useful for debugging the library.
--
-----------------------------------------------------------
module Database.HaskellDB.PrintQuery
(ppQuery, ppQueryUnOpt
, ppSelect, ppSelectUnOpt, ppSqlSelect, ppPrim
, Database.HaskellDB.PrintQuery.ppSql, Database.HaskellDB.PrintQuery.ppSqlUnOpt)
where
import Prelude hiding ((<>))
import Database.HaskellDB.PrimQuery
import Database.HaskellDB.Sql
import Database.HaskellDB.Query (Query, runQuery, Rel)
import Database.HaskellDB.Optimize (optimize)
import Database.HaskellDB.Sql.Generate (sqlQuery)
import Database.HaskellDB.Sql.Default (defaultSqlGenerator)
import Database.HaskellDB.Sql.Print as Sql (ppSql)
import Text.PrettyPrint.HughesPJ
-- | Take a query, turn it into a SqlSelect and print it.
ppSql :: Query (Rel r) -> Doc
ppSql qry = Sql.ppSql . sqlQuery defaultSqlGenerator . optimize $ runQuery qry
-- | Take a query, turn it into a SqlSelect and print it.
ppSqlUnOpt :: Query (Rel r) -> Doc
ppSqlUnOpt qry = Sql.ppSql . sqlQuery defaultSqlGenerator $ runQuery qry
-- | Take a query, turn it into a SqlSelect and print it.
ppSelect :: Query (Rel r) -> Doc
ppSelect qry = ppPQ (sqlQuery defaultSqlGenerator) optimize (runQuery $ qry)
-- | Take a query, turn it into a SqlSelect and print it, with optimizations.
ppSelectUnOpt :: Query (Rel r) -> Doc
ppSelectUnOpt qry = ppPQ (sqlQuery defaultSqlGenerator) id (runQuery $ qry)
-- | Optimize the query and pretty print the primitive representation.
ppQuery :: Query (Rel r) -> Doc
ppQuery qry = ppPrimF optimize (runQuery $ qry)
-- | Pretty print the primitive representation of an unoptimized query.
ppQueryUnOpt :: Query (Rel r) -> Doc
ppQueryUnOpt qry = ppPrimF id (runQuery $ qry)
-- | Pretty print a PrimQuery value.
ppPrim :: PrimQuery -> Doc
ppPrim = ppPrimF id
-- | Transform a PrimQuery according to the function given, then
-- pretty print it.
ppPrimF :: (PrimQuery -> PrimQuery) -- ^ Transformation function to apply to PrimQuery first.
-> PrimQuery -- ^ PrimQuery to print.
-> Doc
ppPrimF f qry = ppPrimF' (f qry)
where
ppPrimF' (BaseTable tableName scheme) =
hang (text "BaseTable" <> colon <+> text tableName)
nesting
(brackets (fsep $ punctuate comma (map text scheme)))
ppPrimF' (Project assoc primQuery) =
hang (text "Project")
nesting (brackets (ppAssoc assoc) $+$
parens (ppPrimF' primQuery))
ppPrimF' (Restrict primExpr primQuery) =
hang (text "Restrict")
nesting
(ppExpr primExpr $+$ ppPrimF' primQuery)
ppPrimF' (Group assoc primQuery) =
hang (text "Group")
nesting
(brackets (ppAssoc assoc) $+$
parens (ppPrimF' primQuery))
ppPrimF' (Binary relOp primQueryL primQueryR) =
hang (text "Binary:" <+> text (show relOp))
nesting
(parens (ppPrimF' primQueryL) $+$
parens (ppPrimF' primQueryR))
ppPrimF' (Special specialOp primQuery) =
hang (text "Special:" <+> text (show specialOp))
nesting
(parens (ppPrimF' primQuery))
ppPrimF' Empty = text "Empty"
-- | Pretty print an Assoc list (i.e. columns and expression).
ppAssoc :: Assoc -> Doc
ppAssoc assoc = fsep . punctuate comma . map (\(a, e) -> text a <> colon <+> ppExpr e) $ assoc
-- | Pretty print an PrimExpr value.
ppExpr :: PrimExpr -> Doc
ppExpr = text . show
ppPQ :: (PrimQuery -> SqlSelect) -- ^ Function to turn primitive query into a SqlSelect.
-> (PrimQuery -> PrimQuery) -- ^ Transformation to apply to query, if any.
-> PrimQuery -- ^ The primitive query to transform and print.
-> Doc
ppPQ select trans prim = ppSqlSelect . select . trans $ prim
ppSqlSelect :: SqlSelect -> Doc
ppSqlSelect (SqlBin string sqlSelectL sqlSelectR) =
hang (text "SqlBin:" <+> text string) nesting
(parens (ppSqlSelect sqlSelectL) $+$
parens (ppSqlSelect sqlSelectR))
ppSqlSelect (SqlTable sqlTable) = text "SqlTable:" <+> text sqlTable
ppSqlSelect SqlEmpty = text "SqlEmpty"
ppSqlSelect (SqlSelect options attrs tables criteria groupby orderby extra) =
hang (text "SqlSelect") nesting $
hang (text "attrs:") nesting (brackets . fsep . punctuate comma . map ppAttr $ attrs) $+$
text "criteria:" <+> (brackets . fsep . punctuate comma . map ppSqlExpr $ criteria) $+$
hang (text "tables:") nesting (brackets . fsep . punctuate comma . map ppTable $ tables) $+$
maybe (text "groupby: empty") ppGroupBy groupby $+$
hang (text "orderby:") nesting (brackets . fsep . punctuate comma . map ppOrder $ orderby) $+$
text "extras:" <+> (brackets . fsep. punctuate comma . map text $ extra) $+$
text "options:" <+> (brackets . fsep . punctuate comma . map text $ options)
ppGroupBy All = text "groupby: all"
ppGroupBy (Columns cs) = hang (text "groupby:") nesting (brackets . fsep . punctuate comma . map ppAttr $ cs)
ppTable :: (SqlTable, SqlSelect) -> Doc
ppTable (tbl, select) =
if null tbl
then ppSqlSelect select
else hang (text tbl <> colon) nesting (ppSqlSelect select)
ppAttr :: (SqlColumn, SqlExpr) -> Doc
ppAttr (col, expr) = text col <> colon <+> ppSqlExpr expr
ppOrder :: (SqlExpr, SqlOrder) -> Doc
ppOrder (expr, order) = parens (ppSqlExpr expr) <+> text (show order)
ppSqlExpr :: SqlExpr -> Doc
ppSqlExpr sql = text $ show sql
-- | Nesting level.
nesting :: Int
nesting = 2
|
m4dc4p/haskelldb
|
src/Database/HaskellDB/PrintQuery.hs
|
bsd-3-clause
| 5,784
| 0
| 20
| 1,160
| 1,619
| 839
| 780
| 98
| 7
|
module Main where
import ABS
(x:i:ni:num_div:obj:i_divides:f:n:primeb:reminder:res:the_end) = [1..]
main_ :: Method
main_ [] this wb k =
Assign n (Val (I 2000)) $
Assign x (Sync is_prime [n]) $
k
is_prime :: Method
is_prime [pn] this wb k =
Assign i (Val (I 1)) $
Assign ni (Val (I pn)) $
Assign num_div (Val (I 0)) $
While (ILTE (Attr i) (Attr ni)) (\k' ->
Assign obj New $
Assign f (Async obj divides [i,ni]) $
Await f $
Assign i_divides (Get f) $
Assign num_div (Val (Add (Attr num_div) (Attr i_divides))) $
Assign i (Val (Add (Attr i) (I 1))) $
k'
) $
If (IEq (Attr num_div) (I 2))
(\k' -> Assign primeb (Val (I 1)) k')
(\k' -> Assign primeb (Val (I 0)) k') $
Return primeb wb k
divides :: Method
divides [pd, pn] this wb k =
Assign reminder (Val (Mod (I pn) (I pd)) ) $
If (IEq (Attr reminder) (I 0))
(\k' -> Assign res (Val (I 1)) k')
(\k' -> Assign res (Val (I 0)) k' ) $
Return res wb k
main' :: IO ()
main' = run' 1000000 main_ (head the_end)
main :: IO ()
main = printHeap =<< run 1000000 main_ (head the_end)
|
abstools/abs-haskell-formal
|
benchmarks/2_primality_test/progs/2000.hs
|
bsd-3-clause
| 1,111
| 0
| 20
| 295
| 677
| 340
| 337
| 37
| 1
|
{-# LANGUAGE QuasiQuotes, TypeFamilies, TemplateHaskell, MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE OverloadedStrings #-} -- hah, the test should be renamed...
-- Not actually a problem, we're now requiring overloaded strings, we just need
-- to make the docs more explicit about it.
module YesodCoreTest.NoOverloadedStringsSub where
import Yesod.Core
import Yesod.Core.Types
data Subsite = Subsite (forall master. Yesod master => YesodSubRunnerEnv Subsite master (HandlerT master IO) -> Application)
mkYesodSubData "Subsite" [parseRoutes|
/bar BarR GET
/baz BazR GET
/bin BinR GET
/has-one-piece/#Int OnePiecesR GET
/has-two-pieces/#Int/#Int TwoPiecesR GET
/has-three-pieces/#Int/#Int/#Int ThreePiecesR GET
|]
instance Yesod master => YesodSubDispatch Subsite (HandlerT master IO) where
yesodSubDispatch ysre =
f ysre
where
Subsite f = ysreGetSub ysre $ yreSite $ ysreParentEnv ysre
|
tolysz/yesod
|
yesod-core/test/YesodCoreTest/NoOverloadedStringsSub.hs
|
mit
| 1,003
| 0
| 13
| 154
| 141
| 76
| 65
| 14
| 0
|
{-# LANGUAGE OverloadedStrings #-}
{-|
Module : Foreign.Lua.ModuleTests
Copyright : Β© 2019 Albert Krewinkel
License : MIT
Maintainer : Albert Krewinkel <albert+hslua@zeitkraut.de>
Stability : alpha
Portability : Requires GHC 8 or later.
Tests creating and loading of modules with Haskell.
-}
module Foreign.Lua.ModuleTests (tests) where
import Foreign.Lua (Lua)
import Foreign.Lua.Module (addfield, addfunction, create, preloadhs, requirehs)
import Test.HsLua.Util ((=:), pushLuaExpr, shouldBeResultOf)
import Test.Tasty (TestTree, testGroup)
import qualified Foreign.Lua as Lua
-- | Specifications for Attributes parsing functions.
tests :: TestTree
tests = testGroup "Module"
[ testGroup "requirehs"
[ "pushes module to stack" =:
1 `shouldBeResultOf` do
Lua.openlibs
old <- Lua.gettop
requirehs "foo" (Lua.pushnumber 5.0)
new <- Lua.gettop
return (new - old)
, "module can be loaded with `require`" =:
let testModule = "string as a module" :: String
in testModule `shouldBeResultOf` do
Lua.openlibs
requirehs "test.module" (Lua.push testModule)
pushLuaExpr "require 'test.module'"
Lua.peek Lua.stackTop
]
, testGroup "preloadhs"
[ "does not modify the stack" =:
0 `shouldBeResultOf` do
Lua.openlibs
old <- Lua.gettop
preloadhs "foo" (1 <$ Lua.pushnumber 5.0)
new <- Lua.gettop
return (new - old)
, "module can be loaded with `require`" =:
let testModule = "string as a module" :: String
in testModule `shouldBeResultOf` do
Lua.openlibs
preloadhs "test.module" (1 <$ Lua.push testModule)
pushLuaExpr "require 'test.module'"
Lua.peek Lua.stackTop
]
, testGroup "creation helpers"
[ "create produces a table" =:
Lua.TypeTable `shouldBeResultOf` do
create
Lua.ltype Lua.stackTop
, "addfield modifies table" =:
Lua.Integer 23 `shouldBeResultOf` do
create
addfield "field_name" (23 :: Int)
Lua.getfield Lua.stackTop "field_name"
Lua.peek Lua.stackTop
, "addfunction modifies table" =:
Lua.Integer 5 `shouldBeResultOf` do
create
addfunction "minus18" (return . subtract 18 :: Int -> Lua Int)
Lua.getfield Lua.stackTop "minus18"
Lua.pushinteger 23
Lua.call 1 1
Lua.peek Lua.stackTop
]
]
|
osa1/hslua
|
test/Foreign/Lua/ModuleTests.hs
|
mit
| 2,448
| 0
| 19
| 641
| 567
| 286
| 281
| 58
| 1
|
{-| Module : Logger
License : GPL
Maintainer : helium@cs.uu.nl
Stability : experimental
Portability : portable
-}
module Helium.Utils.Logger ( logger, logInternalError ) where
import Network
import Control.Concurrent
import Control.Monad
import System.Environment
import Data.Char
import Data.Maybe
import Helium.Main.Args
import System.IO
import Helium.Main.Version
import qualified Control.Exception as CE (catch, IOException)
{-# NOTINLINE logger #-}
---------------------------------------------------
-- Global variables and settings
-- Some additional ones are in Args.hs
loggerDELAY, loggerTRIES :: Int
loggerDELAY = 10000 -- in micro-seconds
loggerTRIES = 2
loggerINTERNALERRORHOSTNAME :: String
loggerINTERNALERRORHOSTNAME = "helium.zoo.cs.uu.nl"
loggerINTERNALERRORPORTNR :: Int
loggerINTERNALERRORPORTNR = loggerDEFAULTPORT
loggerSEPARATOR, loggerTERMINATOR, loggerUSERNAME, loggerDEFAULTNAME :: String
loggerSEPARATOR = "\NUL\NUL\n"
loggerTERMINATOR = "\SOH\SOH\n"
loggerUSERNAME = "USERNAME"
loggerDEFAULTNAME = "unknown"
loggerADMINSEPARATOR, escapeChar :: Char
loggerADMINSEPARATOR = '|'
escapeChar = '\\'
loggerESCAPABLES :: String
loggerESCAPABLES = [loggerADMINSEPARATOR, escapeChar]
alertESCAPABLES :: String
alertESCAPABLES = "\""
debug :: String -> Bool -> IO ()
debug s loggerDEBUGMODE = when loggerDEBUGMODE (putStrLn s)
-- Make sure that options that contain a space are quoted with double quotes.
-- And all double quotes in the options are escaped.
unwordsQuoted :: [String] -> String
unwordsQuoted wrds = unwords (map (quote . escape alertESCAPABLES) wrds)
where
quote s = if ' ' `elem` s then "\"" ++ s ++ "\"" else s -- Not efficient, but balanced.
------------------------------------------------------
-- Normalization/escaping functions
normalizeName :: String -> String
normalizeName name = let
newname = map toLower (filter isAlphaNum name)
in
if null newname then loggerDEFAULTNAME else newname
-- Escapes all characters from the list escapables
escape :: String -> String -> String
escape _ [] = []
escape escapables (x:xs) =
if x `elem` escapables
then escapeChar : rest
else rest
where
rest = x : escape escapables xs
-- Remove line breaks and escape special characters
normalize :: String -> String
normalize = escape loggerESCAPABLES . filter ('\n' /=)
logInternalError :: Maybe ([String],String) -> IO ()
logInternalError maybeSources =
logger "I" maybeSources internalErrorOptions
where
internalErrorOptions = [EnableLogging, Host loggerINTERNALERRORHOSTNAME, Port loggerINTERNALERRORPORTNR]
------------------------------------------------------
-- The function to send a message to a socket
-- TODO : decide whether we really don't want to send interpreter input.
logger :: String -> Maybe ([String],String) -> [Option] -> IO ()
logger logcode maybeSources options =
let
debugLogger = DebugLogger `elem` options
reallyLog = EnableLogging `elem` options -- We use that the presence of an alert adds EnableLogging in Options.hs
hostName = fromMaybe loggerDEFAULTHOST (hostFromOptions options)
portNumber = fromMaybe loggerDEFAULTPORT (portFromOptions options)
handlerDef :: CE.IOException -> IO String
handlerDef _ = return loggerDEFAULTNAME
handlerTerm :: CE.IOException -> IO String
handlerTerm _ = return loggerTERMINATOR
in
when reallyLog $ do
debug (hostName ++ ":" ++ show portNumber) debugLogger
username <- getEnv loggerUSERNAME `CE.catch` handlerDef
optionString <- getArgs
sources <- case maybeSources of
Nothing ->
return loggerTERMINATOR
Just (imports,hsFile) ->
do let allHsFiles = hsFile:imports
allFiles = allHsFiles ++ map toTypeFile allHsFiles
xs <- mapM (getContentOfFile debugLogger) allFiles
return (concat (loggerSEPARATOR:xs)++loggerTERMINATOR)
`CE.catch` handlerTerm
{- putStr (normalizeName username ++
(loggerADMINSEPARATOR : normalize logcode) ++
(loggerADMINSEPARATOR : normalize version) ++
(loggerADMINSEPARATOR : normalize (unwords optionString)) ++
"\n" ++sources) -}
let alertLogcode = if hasAlertOption options then map toLower logcode else map toUpper logcode
sendLogString hostName
portNumber
(normalizeName username ++
(loggerADMINSEPARATOR : normalize alertLogcode) ++
(loggerADMINSEPARATOR : normalize version) ++
(loggerADMINSEPARATOR : normalize (unwordsQuoted optionString)) ++
"\n" ++sources
)
debugLogger
toTypeFile :: String -> String
toTypeFile fullName = fullNameNoExt ++ ".type"
where
(path, baseName, _) = splitFilePath fullName
fullNameNoExt = combinePathAndFile path baseName
getContentOfFile :: Bool -> String -> IO String
getContentOfFile loggerDEBUGMODE name =
do program <- readFile name
debug ("Logging file " ++ name) loggerDEBUGMODE
return ( fileNameWithoutPath name
++ "\n"
++ program
++ "\n"
++ loggerSEPARATOR
)
`CE.catch` handler
where
handler :: CE.IOException -> IO String
handler _ = return ""
-- isInterpreterModule :: Maybe ([String],String) -> Bool
-- isInterpreterModule Nothing = False
-- isInterpreterModule (Just (_, hsFile)) = fileNameWithoutPath hsFile == "Interpreter.hs"
sendLogString :: String -> Int -> String -> Bool -> IO ()
sendLogString hostName portNr message loggerDEBUGMODE = withSocketsDo (rec_ 0)
where
rec_ i = do --installHandler sigPIPE Ignore Nothing
handle <- connectTo hostName (PortNumber (fromIntegral portNr))
hSetBuffering handle (BlockBuffering (Just 1024))
sendToAndFlush handle message loggerDEBUGMODE
`CE.catch`
\exception ->
if i+1 >= loggerTRIES
then debug ( "Could not make a connection: no send (" ++ show (exception :: CE.IOException) ++ ")" ) loggerDEBUGMODE
else do debug ( "Could not make a connection: sleeping (" ++ show exception ++ ")" ) loggerDEBUGMODE
threadDelay loggerDELAY
rec_ (i+1)
{- from Utils.hs.....because of the import-dependencies, it is not possible to import
this function directly -}
splitFilePath :: String -> (String, String, String)
splitFilePath filePath =
let slashes = "\\/"
(revFileName, revPath) = span (`notElem` slashes) (reverse filePath)
(baseName, ext) = span (/= '.') (reverse revFileName)
in (reverse revPath, baseName, dropWhile (== '.') ext)
combinePathAndFile :: String -> String -> String
combinePathAndFile path file =
case path of
"" -> file
_ | last path == '/' -> path ++ file
| otherwise -> path ++ "/" ++ file
fileNameWithoutPath :: String -> String
fileNameWithoutPath filePath =
let slashes = "\\/"
(revFileName, _) = span (`notElem` slashes) (reverse filePath)
in reverse revFileName
sendToAndFlush :: Handle -- Hostname
-> String -- Message to send
-> Bool -- Debug logger?
-> IO ()
sendToAndFlush handle msg loggerDEBUGMODE = do
hPutStr handle msg
hPutStr handle loggerSEPARATOR
hFlush handle
-- b1 <- hIsWritable s
-- b2 <- hIsReadable s
-- putStrLn ((if b1 then "writable" else "not writable") ++ " and " ++
-- (if b2 then "readable" else "not readable"))
debug "Waiting for a handshake" loggerDEBUGMODE
handshake <- getRetriedLine 0
debug ("Received a handshake: " ++ show handshake) loggerDEBUGMODE
-- hClose handle
where
getRetriedLine i = hGetLine handle `CE.catch` handler i
handler :: Int -> CE.IOException -> IO String
handler j _ =
if j+1 >= loggerTRIES
then do
debug "Did not receive anything back" loggerDEBUGMODE
return ""
else do
debug "Waiting to try again" loggerDEBUGMODE
threadDelay loggerDELAY
getRetriedLine (j+1)
|
roberth/uu-helium
|
src/Helium/Utils/Logger.hs
|
gpl-3.0
| 8,870
| 0
| 22
| 2,614
| 1,802
| 942
| 860
| 154
| 3
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.