code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE NoImplicitPrelude #-}
module Foreign.String where
import Compiler.Num -- for (-)
data CPPString
builtin getStringElement 2 "getStringElement" "Vector"
builtin sizeOfString 1 "sizeOfString" "Vector"
unpack_cpp_string s = [getStringElement s i | i <- [0..sizeOfString s - 1]]
listFromString s = unpack_cpp_string s
| bredelings/BAli-Phy | haskell/Foreign/String.hs | gpl-2.0 | 331 | 0 | 10 | 47 | 87 | 44 | 43 | -1 | -1 |
{-|
Module : Matchings
Description : Provides some functions on matchings in graphs
Licence : LGPL-2.1
Maintainer : Manuel Eberl <last name + m _at_ in.tum.de>
Stability : experimental
This module provides some algorithms related to matchings in graphs, most notably an implementation of
Edmond's blossom algorithm, which computes maximum matchings for graphs.
Definitions:
* A /matching/ is a subset of the edges of a graph such that no two edges in it are incident to the same node.
* A /maximal matching/ is a matching that cannot be made any larger, i.e. no additional
edge can be added to it without violating the property of node-disjoint edges.
* A /maximum matching/ is a matching such that no other matching contains more edges.
In this list, the given notions are strictly increasing in strength. In particular, note that every maximum matching
is also maximal, but not every maximal matching is a maximum one.
Our implementation of Edmond's blossom algorithm is an adaptation of the Java implementation in JGraphT:
<https://github.com/jgrapht/jgrapht/blob/master/jgrapht-core/src/main/java/org/jgrapht/alg/EdmondsBlossomShrinking.java>
-}
module Data.Graph.Inductive.Query.MaximumMatching
(isMatching, isMaximalMatching, isMaximumMatching, maximalMatchings, maximumMatching) where
import Control.Monad
import Data.Graph.Inductive.Graph
import Data.Graph.Inductive.Basic
import Data.Maybe (fromJust)
import Data.Set (Set)
import qualified Data.Set as S
import Data.Map (Map)
import qualified Data.Map as M
import Data.List (nub, (\\))
type Matching = Map Node Node
-- | Inserts all the elements of a list into a set.
insertList :: Ord a => [a] -> Set a -> Set a
insertList xs s = S.union s (S.fromList xs)
-- | Updates a map such that all keys from the list point to the given value.
updateList :: Ord k => [k] -> v -> Map k v -> Map k v
updateList ks v m = M.union (M.fromList [(k,v) | k <- ks]) m
-- | A lookup in the List monad instead of Maybe.
lookup' :: Ord k => k -> Map k v -> [v]
lookup' k m = maybe [] (\x -> [x]) (M.lookup k m)
-- | Determines whether a given set of edges is a matching.
isMatching :: Graph gr => gr a b -> [Edge] -> Bool
isMatching g m = null (m \\ edges g) && distinct (concatMap (\(a,b) -> [a,b]) m)
where distinct xs = nub xs == xs
-- | Determines whether a given set of edges is a maximal matching, i.e. a matching that cannot be
-- extended by adding another edge.
isMaximalMatching :: Graph gr => gr a b -> [Edge] -> Bool
isMaximalMatching g m = isMatching g m && and [a `elem` ns || b `elem` ns | (a,b) <- edges g]
where ns = nub $ concatMap (\(a,b) -> [a,b]) m
-- | Computes all maximal matchings in a graph.
maximalMatchings :: Graph gr => gr a b -> [[Edge]]
maximalMatchings g = f [([], edges g)] []
where -- First parameter: list of matchings with the set of edges that could still be added to it
-- Second parameter: accumulator of matchings that have already been processed in this pass, but can still be
-- enlarged in the next pass.
-- When we have a matching that cannot be enlarged with an additional edge, we return it; otherwise,
-- we put in all the ways in which it can be enlarged for the next pass.
f :: [([Edge], [Edge])] -> [([Edge], [Edge])] -> [[Edge]]
f [] [] = []
f [] ms' = f ms' []
f ((m,[]):ms) ms' = if isMaximalMatching g m then m : f ms ms' else f ms ms'
f ((m,es):ms) ms' = f ms (m' ++ ms')
where m' = do ((a,b), es') <- suffixes es
return ((a,b):m, [(c,d) | (c,d) <- es', a /= c, b /= c, a /= d, b /= d])
-- Chooses an element of the list and returns that element and all its successors.
suffixes :: [c] -> [(c, [c])]
suffixes [] = []
suffixes (x:xs) = (x,xs) : suffixes xs
-- | Determines whether the given set of edges is a maximum matching.
isMaximumMatching :: Graph gr => gr a b -> [Edge] -> Bool
isMaximumMatching g m = isMatching g m && length m == length (maximumMatching g)
-- | Computes a maximum matching of the given graph using Edmond's blossom algorithm.
maximumMatching :: Graph gr => gr a b -> [Edge]
maximumMatching g = [if (v,w) `S.member` edgeSet then (v,w) else (w,v)
| v <- nodes g, w <- lookup' v matching, v <= w]
where edgeSet = S.fromList (edges g)
matching = foldl (matchNode g) M.empty (nodes g)
matchNode :: Graph gr => gr a b -> Matching -> Node -> Matching
matchNode g matching root
| root `M.member` matching = matching
| otherwise = constructMatching $
matchNode' ([root], S.singleton root, M.empty, M.fromList [(i,i) | i <- nodes g])
where constructMatching :: (Matching, Maybe Node) -> Matching
constructMatching (p, v) =
let constructMatching' matching Nothing = matching
constructMatching' matching (Just v) =
let w = fromJust (M.lookup v p)
in constructMatching' (M.insert w v (M.insert v w matching)) (M.lookup w matching)
in constructMatching' matching v
matchNode' :: ([Node], Set Node, Map Node Node, Map Node Node) -> (Map Node Node, Maybe Node)
matchNode' (q, used, p, base) =
case q of
[] -> (p, Nothing)
v:q' -> either id matchNode' (foldM (go v) (q',used,p,base) (neighbors g v))
go :: Node -> ([Node], Set Node, Map Node Node, Map Node Node) -> Node ->
Either (Map Node Node, Maybe Node) ([Node], Set Node, Map Node Node, Map Node Node)
go v (q, used, p, base) w
| M.lookup v base == M.lookup w base = Right (q, used, p, base)
| M.lookup v matching == Just w = Right (q, used, p, base)
| w == root || maybe False (`M.member` p) (M.lookup w matching) =
let curBase = lca base p v w
(blossom, p') = markPath base w curBase v (markPath base v curBase w (S.empty, p))
xs = [x | x <- nodes g, y <- lookup' x base, y `S.member` blossom]
xs' = [x | x <- xs, x `S.notMember` used]
in Right (xs' ++ q, insertList xs' used, p', updateList xs curBase base)
| w `M.notMember` p =
let p' = M.insert w v p
in case M.lookup w matching of
Nothing -> Left (p', Just w)
Just w' -> Right (w':q, S.insert w' used, p', base)
| otherwise = Right (q, used, p, base)
markPath :: Map Node Node -> Node -> Node -> Node -> (Set Node, Map Node Node) ->
(Set Node, Map Node Node)
markPath base v b c (blossom, p)
| M.lookup v base == Just b = (blossom, p)
| otherwise =
let w = fromJust $ M.lookup v matching
bv = fromJust $ M.lookup v base
bw = fromJust $ M.lookup w base
v' = fromJust $ M.lookup w p
in markPath base v' b w (S.insert bw (S.insert bv blossom), M.insert v c p)
lca :: Map Node Node -> Map Node Node -> Node -> Node -> Node
lca base p a b = g b
where f seen x =
let x' = fromJust $ M.lookup x base
seen' = S.insert x' seen
in case M.lookup x' matching of
Nothing -> seen'
Just y -> f seen' (fromJust (M.lookup y p))
seen = f S.empty a
g x =
let x' = fromJust $ M.lookup x base
in if x' `S.member` seen then
x'
else
g (fromJust (M.lookup (fromJust (M.lookup x' matching)) p))
| 3of8/haskell_playground | maximum-matching/Data/Graph/Inductive/Query/MaximumMatching.hs | gpl-2.0 | 7,861 | 0 | 21 | 2,421 | 2,552 | 1,341 | 1,211 | 101 | 6 |
{-
Copyright 2012-2015 Vidar Holen
This file is part of ShellCheck.
http://www.vidarholen.net/contents/shellcheck
ShellCheck is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
ShellCheck is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
import Control.Exception
import Control.Monad
import Control.Monad.Trans
import Control.Monad.Trans.Error
import Control.Monad.Trans.List
import Data.Char
import Data.List
import Data.Maybe
import Data.Monoid
import GHC.Exts
import GHC.IO.Device
import Prelude hiding (catch)
import ShellCheck.Data
import ShellCheck.Options
import ShellCheck.Simple
import ShellCheck.Analytics
import System.Console.GetOpt
import System.Directory
import System.Environment
import System.Exit
import System.Info
import System.IO
import Text.JSON
import qualified Data.Map as Map
data Flag = Flag String String
data Status = NoProblems | SomeProblems | BadInput | SupportFailure | SyntaxFailure | RuntimeException deriving (Ord, Eq)
data JsonComment = JsonComment FilePath ShellCheckComment
instance Error Status where
noMsg = RuntimeException
instance Monoid Status where
mempty = NoProblems
mappend = max
header = "Usage: shellcheck [OPTIONS...] FILES..."
options = [
Option "e" ["exclude"]
(ReqArg (Flag "exclude") "CODE1,CODE2..") "exclude types of warnings",
Option "f" ["format"]
(ReqArg (Flag "format") "FORMAT") "output format",
Option "s" ["shell"]
(ReqArg (Flag "shell") "SHELLNAME") "Specify dialect (bash,sh,ksh)",
Option "V" ["version"]
(NoArg $ Flag "version" "true") "Print version information"
]
printErr = hPutStrLn stderr
instance JSON (JsonComment) where
showJSON (JsonComment filename c) = makeObj [
("file", showJSON filename),
("line", showJSON $ scLine c),
("column", showJSON $ scColumn c),
("level", showJSON $ scSeverity c),
("code", showJSON $ scCode c),
("message", showJSON $ scMessage c)
]
readJSON = undefined
parseArguments :: [String] -> ErrorT Status IO ([Flag], [FilePath])
parseArguments argv =
case getOpt Permute options argv of
(opts, files, []) -> return (opts, files)
(_, _, errors) -> do
liftIO . printErr $ concat errors ++ "\n" ++ usageInfo header options
throwError SyntaxFailure
formats :: Map.Map String (AnalysisOptions -> [FilePath] -> IO Status)
formats = Map.fromList [
("json", forJson),
("gcc", forGcc),
("checkstyle", forCheckstyle),
("tty", forTty)
]
toStatus = liftM (either id (const NoProblems)) . runErrorT
catchExceptions :: IO Status -> IO Status
catchExceptions action = action -- action `catch` handler
where
handler err = do
printErr $ show (err :: SomeException)
return RuntimeException
checkComments comments = if null comments then NoProblems else SomeProblems
forTty :: AnalysisOptions -> [FilePath] -> IO Status
forTty options files = do
output <- mapM doFile files
return $ mconcat output
where
clear = ansi 0
ansi n = "\x1B[" ++ show n ++ "m"
colorForLevel "error" = 31 -- red
colorForLevel "warning" = 33 -- yellow
colorForLevel "info" = 32 -- green
colorForLevel "style" = 32 -- green
colorForLevel "message" = 1 -- bold
colorForLevel "source" = 0 -- none
colorForLevel _ = 0 -- none
colorComment level comment =
ansi (colorForLevel level) ++ comment ++ clear
doFile path = catchExceptions $ do
contents <- readContents path
doInput path contents
doInput filename contents = do
let fileLines = lines contents
let lineCount = length fileLines
let comments = getComments options contents
let groups = groupWith scLine comments
colorFunc <- getColorFunc
mapM_ (\x -> do
let lineNum = scLine (head x)
let line = if lineNum < 1 || lineNum > lineCount
then ""
else fileLines !! (lineNum - 1)
putStrLn ""
putStrLn $ colorFunc "message"
("In " ++ filename ++" line " ++ show lineNum ++ ":")
putStrLn (colorFunc "source" line)
mapM_ (\c -> putStrLn (colorFunc (scSeverity c) $ cuteIndent c)) x
putStrLn ""
) groups
return . checkComments $ comments
cuteIndent comment =
replicate (scColumn comment - 1) ' ' ++
"^-- " ++ code (scCode comment) ++ ": " ++ scMessage comment
code code = "SC" ++ show code
getColorFunc = do
term <- hIsTerminalDevice stdout
let windows = "mingw" `isPrefixOf` os
return $ if term && not windows then colorComment else const id
forJson :: AnalysisOptions -> [FilePath] -> IO Status
forJson options files = catchExceptions $ do
comments <- runListT $ do
file <- ListT $ return files
comment <- ListT $ commentsFor options file
return $ JsonComment file comment
putStrLn $ encodeStrict comments
return $ checkComments comments
-- Mimic GCC "file:line:col: (error|warning|note): message" format
forGcc :: AnalysisOptions -> [FilePath] -> IO Status
forGcc options files = do
files <- mapM process files
return $ mconcat files
where
process file = catchExceptions $ do
contents <- readContents file
let comments = makeNonVirtual (getComments options contents) contents
mapM_ (putStrLn . format file) comments
return $ checkComments comments
format filename c = concat [
filename, ":",
show $ scLine c, ":",
show $ scColumn c, ": ",
case scSeverity c of
"error" -> "error"
"warning" -> "warning"
_ -> "note",
": ",
concat . lines $ scMessage c,
" [SC", show $ scCode c, "]"
]
-- Checkstyle compatible output. A bit of a hack to avoid XML dependencies
forCheckstyle :: AnalysisOptions -> [FilePath] -> IO Status
forCheckstyle options files = do
putStrLn "<?xml version='1.0' encoding='UTF-8'?>"
putStrLn "<checkstyle version='4.3'>"
statuses <- mapM process files
putStrLn "</checkstyle>"
return $ mconcat statuses
where
process file = catchExceptions $ do
comments <- commentsFor options file
putStrLn (formatFile file comments)
return $ checkComments comments
severity "error" = "error"
severity "warning" = "warning"
severity _ = "info"
attr s v = concat [ s, "='", escape v, "' " ]
escape = concatMap escape'
escape' c = if isOk c then [c] else "&#" ++ show (ord c) ++ ";"
isOk x = any ($x) [isAsciiUpper, isAsciiLower, isDigit, (`elem` " ./")]
formatFile name comments = concat [
"<file ", attr "name" name, ">\n",
concatMap format comments,
"</file>"
]
format c = concat [
"<error ",
attr "line" $ show . scLine $ c,
attr "column" $ show . scColumn $ c,
attr "severity" $ severity . scSeverity $ c,
attr "message" $ scMessage c,
attr "source" $ "ShellCheck.SC" ++ show (scCode c),
"/>\n"
]
commentsFor options file = liftM (getComments options) $ readContents file
getComments = shellCheck
readContents :: FilePath -> IO String
readContents file =
if file == "-"
then getContents
else readFile file
-- Realign comments from a tabstop of 8 to 1
makeNonVirtual comments contents =
map fix comments
where
ls = lines contents
fix c = c {
scColumn =
if scLine c > 0 && scLine c <= length ls
then real (ls !! (scLine c - 1)) 0 0 (scColumn c)
else scColumn c
}
real _ r v target | target <= v = r
real [] r v _ = r -- should never happen
real ('\t':rest) r v target =
real rest (r+1) (v + 8 - (v `mod` 8)) target
real (_:rest) r v target = real rest (r+1) (v+1) target
getOption [] _ = Nothing
getOption (Flag var val:_) name | name == var = return val
getOption (_:rest) flag = getOption rest flag
getOptions options name =
map (\(Flag _ val) -> val) . filter (\(Flag var _) -> var == name) $ options
split char str =
split' str []
where
split' (a:rest) element =
if a == char
then reverse element : split' rest []
else split' rest (a:element)
split' [] element = [reverse element]
getExclusions options =
let elements = concatMap (split ',') $ getOptions options "exclude"
clean = dropWhile (not . isDigit)
in
map (Prelude.read . clean) elements :: [Int]
excludeCodes codes =
filter (not . hasCode)
where
hasCode c = scCode c `elem` codes
main = do
args <- getArgs
status <- toStatus $ do
(flags, files) <- parseArguments args
process flags files
exitWith $ statusToCode status
statusToCode status =
case status of
NoProblems -> ExitSuccess
SomeProblems -> ExitFailure 1
BadInput -> ExitFailure 5
SyntaxFailure -> ExitFailure 3
SupportFailure -> ExitFailure 4
RuntimeException -> ExitFailure 2
process :: [Flag] -> [FilePath] -> ErrorT Status IO ()
process flags files = do
options <- foldM (flip parseOption) defaultAnalysisOptions flags
verifyFiles files
let format = fromMaybe "tty" $ getOption flags "format"
case Map.lookup format formats of
Nothing -> do
liftIO $ do
printErr $ "Unknown format " ++ format
printErr "Supported formats:"
mapM_ (printErr . write) $ Map.keys formats
throwError SupportFailure
where write s = " " ++ s
Just f -> ErrorT $ liftM Left $ f options files
parseOption flag options =
case flag of
Flag "shell" str ->
fromMaybe (die $ "Unknown shell: " ++ str) $ do
shell <- shellForExecutable str
return $ return options { optionShellType = Just shell }
Flag "exclude" str -> do
new <- mapM parseNum $ split ',' str
let old = optionExcludes options
return options { optionExcludes = new ++ old }
Flag "version" _ -> do
liftIO printVersion
throwError NoProblems
_ -> return options
where
die s = do
liftIO $ printErr s
throwError SupportFailure
parseNum ('S':'C':str) = parseNum str
parseNum num = do
unless (all isDigit num) $ do
liftIO . printErr $ "Bad exclusion: " ++ num
throwError SyntaxFailure
return (Prelude.read num :: Integer)
verifyFiles files =
when (null files) $ do
liftIO $ printErr "No files specified.\n"
liftIO $ printErr $ usageInfo header options
throwError SyntaxFailure
printVersion = do
putStrLn "ShellCheck - shell script analysis tool"
putStrLn $ "version: " ++ shellcheckVersion
putStrLn "license: GNU General Public License, version 3"
putStrLn "website: http://www.shellcheck.net"
| chadbrewbaker/shellcheck | shellcheck.hs | gpl-3.0 | 11,635 | 34 | 20 | 3,283 | 3,566 | 1,702 | 1,864 | 275 | 9 |
module MaaS.Color (
Color(..),
genColor,
getColor,
convColor,
colorInterpol
) where
import Data.Word
import MaaS.Tools
import MaaS.Maths
data Color a = Color a a a deriving Show
getColor :: RealFrac a => [Color a] -> a -> Color a
getColor cols r = colorInterpol (cols !! (n-1)) (cols !! n) (remap 0 1 p1 p2 r)
where n = clamp 1 ((length cols)-1) (round ((fromIntegral (length cols))*r))
p1 = (fromIntegral (n-1)) / (fromIntegral (length cols))
p2 = (fromIntegral (n)) / (fromIntegral (length cols))
genColor :: (RealFrac a, Integral b) => [Color a] -> b -> b -> Color a
genColor cols m n
| n == 0 = (Color 0 0 0)
| otherwise = getColor cols (dn/dm)
where dm = fromIntegral m
dn = fromIntegral n
convColor :: RealFrac a => Color a -> (Word8,Word8,Word8)
convColor (Color r g b) = (w8 (r*255.0),w8 (g*255.0),w8 (b*255.0))
where w8 = floor
colorInterpol :: RealFrac a => Color a -> Color a -> a -> Color a
colorInterpol (Color r1 g1 b1) (Color r2 g2 b2) m = (Color (li r1 r2 m) (li g1 g2 m) (li b1 b2 m))
where li = linearInterpol
| maeln/MaaS | MaaS/Color.hs | gpl-3.0 | 1,088 | 0 | 15 | 252 | 572 | 298 | 274 | 27 | 1 |
module Main where
import System.Directory
import Cube
import qualified Stage1
import qualified Stage2
import qualified Stage3
import qualified Stage4
import qualified CubeReader as Reader
checkFile :: FilePath -> IO Bool
checkFile fp = do
b <- doesFileExist fp
if b then do
putStrLn $ "Found " ++ fp
r <- readFile fp
if not . null $ r then
return True
else (putStrLn $ fp ++ " is empty") >> return False
else putStrLn (fp ++ " not found") >> return False
main :: IO ()
main = do
s1 <- checkFile "Stage1.dat"
if s1 then return () else putStrLn "Writing Table for Stage 1" >> Stage1.writeTable
s2 <- checkFile "Stage2.dat"
if s2 then return () else putStrLn "Writing Table for Stage 2" >> Stage2.writeTable
s3 <- checkFile "Stage3.dat"
if s3 then return () else putStrLn "Writing Table for Stage 3" >> Stage3.writeTable
s4 <- checkFile "Stage4.dat"
if s4 then return () else putStrLn "Writing Table for Stage 4" >> Stage4.writeTable
cube <- Reader.readCube
putStrLn $ show cube
| Ricikle/Rubik | Main.hs | gpl-3.0 | 1,089 | 0 | 14 | 281 | 327 | 162 | 165 | 30 | 5 |
{-# OPTIONS -cpp -fglasgow-exts #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.IntSet
-- Copyright : (c) Daan Leijen 2002
-- License : BSD-style
-- Maintainer : libraries@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- An efficient implementation of integer sets.
--
-- Since many function names (but not the type name) clash with
-- "Prelude" names, this module is usually imported @qualified@, e.g.
--
-- > import Data.IntSet (IntSet)
-- > import qualified Data.IntSet as IntSet
--
-- The implementation is based on /big-endian patricia trees/. This data
-- structure performs especially well on binary operations like 'union'
-- and 'intersection'. However, my benchmarks show that it is also
-- (much) faster on insertions and deletions when compared to a generic
-- size-balanced set implementation (see "Data.Set").
--
-- * Chris Okasaki and Andy Gill, \"/Fast Mergeable Integer Maps/\",
-- Workshop on ML, September 1998, pages 77-86,
-- <http://www.cse.ogi.edu/~andy/pub/finite.htm>
--
-- * D.R. Morrison, \"/PATRICIA -- Practical Algorithm To Retrieve
-- Information Coded In Alphanumeric/\", Journal of the ACM, 15(4),
-- October 1968, pages 514-534.
--
-- Many operations have a worst-case complexity of /O(min(n,W))/.
-- This means that the operation can become linear in the number of
-- elements with a maximum of /W/ -- the number of bits in an 'Int'
-- (32 or 64).
-----------------------------------------------------------------------------
module Data.IntSet (
-- * Set type
IntSet -- instance Eq,Show
-- * Operators
, (\\)
-- * Query
, null
, size
, member
, notMember
, isSubsetOf
, isProperSubsetOf
-- * Construction
, empty
, singleton
, insert
, delete
-- * Combine
, union, unions
, difference
, intersection
-- * Filter
, filter
, partition
, split
, splitMember
-- * Map
, map
-- * Fold
, fold
-- * Conversion
-- ** List
, elems
, toList
, fromList
-- ** Ordered list
, toAscList
, fromAscList
, fromDistinctAscList
-- * Debugging
, showTree
, showTreeWith
) where
import Prelude hiding (lookup,filter,foldr,foldl,null,map)
import Data.Bits
import Data.Int
import qualified Data.List as List
import Data.Monoid (Monoid(..))
import Data.Typeable
{-
-- just for testing
import QuickCheck
import List (nub,sort)
import qualified List
-}
import Data.Word
infixl 9 \\{-This comment teaches CPP correct behaviour -}
-- A "Nat" is a natural machine word (an unsigned Int)
type Nat = Word
natFromInt :: Int -> Nat
natFromInt i = fromIntegral i
intFromNat :: Nat -> Int
intFromNat w = fromIntegral w
shiftRL :: Nat -> Int -> Nat
shiftRL x i = shiftR x i
{--------------------------------------------------------------------
Operators
--------------------------------------------------------------------}
-- | /O(n+m)/. See 'difference'.
(\\) :: IntSet -> IntSet -> IntSet
m1 \\ m2 = difference m1 m2
{--------------------------------------------------------------------
Types
--------------------------------------------------------------------}
-- | A set of integers.
data IntSet = Nil
| Tip {-# UNPACK #-} !Int
| Bin {-# UNPACK #-} !Prefix {-# UNPACK #-} !Mask !IntSet !IntSet
type Prefix = Int
type Mask = Int
instance Monoid IntSet where
mempty = empty
mappend = union
mconcat = unions
{--------------------------------------------------------------------
Query
--------------------------------------------------------------------}
-- | /O(1)/. Is the set empty?
null :: IntSet -> Bool
null Nil = True
null other = False
-- | /O(n)/. Cardinality of the set.
size :: IntSet -> Int
size t
= case t of
Bin p m l r -> size l + size r
Tip y -> 1
Nil -> 0
-- | /O(min(n,W))/. Is the value a member of the set?
member :: Int -> IntSet -> Bool
member x t
= case t of
Bin p m l r
| nomatch x p m -> False
| zero x m -> member x l
| otherwise -> member x r
Tip y -> (x==y)
Nil -> False
-- | /O(log n)/. Is the element not in the set?
notMember :: Int -> IntSet -> Bool
notMember k = not . member k
-- 'lookup' is used by 'intersection' for left-biasing
lookup :: Int -> IntSet -> Maybe Int
lookup k t
= let nk = natFromInt k in seq nk (lookupN nk t)
lookupN :: Nat -> IntSet -> Maybe Int
lookupN k t
= case t of
Bin p m l r
| zeroN k (natFromInt m) -> lookupN k l
| otherwise -> lookupN k r
Tip kx
| (k == natFromInt kx) -> Just kx
| otherwise -> Nothing
Nil -> Nothing
{--------------------------------------------------------------------
Construction
--------------------------------------------------------------------}
-- | /O(1)/. The empty set.
empty :: IntSet
empty
= Nil
-- | /O(1)/. A set of one element.
singleton :: Int -> IntSet
singleton x
= Tip x
{--------------------------------------------------------------------
Insert
--------------------------------------------------------------------}
-- | /O(min(n,W))/. Add a value to the set. When the value is already
-- an element of the set, it is replaced by the new one, ie. 'insert'
-- is left-biased.
insert :: Int -> IntSet -> IntSet
insert x t
= case t of
Bin p m l r
| nomatch x p m -> join x (Tip x) p t
| zero x m -> Bin p m (insert x l) r
| otherwise -> Bin p m l (insert x r)
Tip y
| x==y -> Tip x
| otherwise -> join x (Tip x) y t
Nil -> Tip x
-- right-biased insertion, used by 'union'
insertR :: Int -> IntSet -> IntSet
insertR x t
= case t of
Bin p m l r
| nomatch x p m -> join x (Tip x) p t
| zero x m -> Bin p m (insert x l) r
| otherwise -> Bin p m l (insert x r)
Tip y
| x==y -> t
| otherwise -> join x (Tip x) y t
Nil -> Tip x
-- | /O(min(n,W))/. Delete a value in the set. Returns the
-- original set when the value was not present.
delete :: Int -> IntSet -> IntSet
delete x t
= case t of
Bin p m l r
| nomatch x p m -> t
| zero x m -> bin p m (delete x l) r
| otherwise -> bin p m l (delete x r)
Tip y
| x==y -> Nil
| otherwise -> t
Nil -> Nil
{--------------------------------------------------------------------
Union
--------------------------------------------------------------------}
-- | The union of a list of sets.
unions :: [IntSet] -> IntSet
unions xs
= foldlStrict union empty xs
-- | /O(n+m)/. The union of two sets.
union :: IntSet -> IntSet -> IntSet
union t1@(Bin p1 m1 l1 r1) t2@(Bin p2 m2 l2 r2)
| shorter m1 m2 = union1
| shorter m2 m1 = union2
| p1 == p2 = Bin p1 m1 (union l1 l2) (union r1 r2)
| otherwise = join p1 t1 p2 t2
where
union1 | nomatch p2 p1 m1 = join p1 t1 p2 t2
| zero p2 m1 = Bin p1 m1 (union l1 t2) r1
| otherwise = Bin p1 m1 l1 (union r1 t2)
union2 | nomatch p1 p2 m2 = join p1 t1 p2 t2
| zero p1 m2 = Bin p2 m2 (union t1 l2) r2
| otherwise = Bin p2 m2 l2 (union t1 r2)
union (Tip x) t = insert x t
union t (Tip x) = insertR x t -- right bias
union Nil t = t
union t Nil = t
{--------------------------------------------------------------------
Difference
--------------------------------------------------------------------}
-- | /O(n+m)/. Difference between two sets.
difference :: IntSet -> IntSet -> IntSet
difference t1@(Bin p1 m1 l1 r1) t2@(Bin p2 m2 l2 r2)
| shorter m1 m2 = difference1
| shorter m2 m1 = difference2
| p1 == p2 = bin p1 m1 (difference l1 l2) (difference r1 r2)
| otherwise = t1
where
difference1 | nomatch p2 p1 m1 = t1
| zero p2 m1 = bin p1 m1 (difference l1 t2) r1
| otherwise = bin p1 m1 l1 (difference r1 t2)
difference2 | nomatch p1 p2 m2 = t1
| zero p1 m2 = difference t1 l2
| otherwise = difference t1 r2
difference t1@(Tip x) t2
| member x t2 = Nil
| otherwise = t1
difference Nil t = Nil
difference t (Tip x) = delete x t
difference t Nil = t
{--------------------------------------------------------------------
Intersection
--------------------------------------------------------------------}
-- | /O(n+m)/. The intersection of two sets.
intersection :: IntSet -> IntSet -> IntSet
intersection t1@(Bin p1 m1 l1 r1) t2@(Bin p2 m2 l2 r2)
| shorter m1 m2 = intersection1
| shorter m2 m1 = intersection2
| p1 == p2 = bin p1 m1 (intersection l1 l2) (intersection r1 r2)
| otherwise = Nil
where
intersection1 | nomatch p2 p1 m1 = Nil
| zero p2 m1 = intersection l1 t2
| otherwise = intersection r1 t2
intersection2 | nomatch p1 p2 m2 = Nil
| zero p1 m2 = intersection t1 l2
| otherwise = intersection t1 r2
intersection t1@(Tip x) t2
| member x t2 = t1
| otherwise = Nil
intersection t (Tip x)
= case lookup x t of
Just y -> Tip y
Nothing -> Nil
intersection Nil t = Nil
intersection t Nil = Nil
{--------------------------------------------------------------------
Subset
--------------------------------------------------------------------}
-- | /O(n+m)/. Is this a proper subset? (ie. a subset but not equal).
isProperSubsetOf :: IntSet -> IntSet -> Bool
isProperSubsetOf t1 t2
= case subsetCmp t1 t2 of
LT -> True
ge -> False
subsetCmp t1@(Bin p1 m1 l1 r1) t2@(Bin p2 m2 l2 r2)
| shorter m1 m2 = GT
| shorter m2 m1 = subsetCmpLt
| p1 == p2 = subsetCmpEq
| otherwise = GT -- disjoint
where
subsetCmpLt | nomatch p1 p2 m2 = GT
| zero p1 m2 = subsetCmp t1 l2
| otherwise = subsetCmp t1 r2
subsetCmpEq = case (subsetCmp l1 l2, subsetCmp r1 r2) of
(GT,_ ) -> GT
(_ ,GT) -> GT
(EQ,EQ) -> EQ
other -> LT
subsetCmp (Bin p m l r) t = GT
subsetCmp (Tip x) (Tip y)
| x==y = EQ
| otherwise = GT -- disjoint
subsetCmp (Tip x) t
| member x t = LT
| otherwise = GT -- disjoint
subsetCmp Nil Nil = EQ
subsetCmp Nil t = LT
-- | /O(n+m)/. Is this a subset?
-- @(s1 `isSubsetOf` s2)@ tells whether @s1@ is a subset of @s2@.
isSubsetOf :: IntSet -> IntSet -> Bool
isSubsetOf t1@(Bin p1 m1 l1 r1) t2@(Bin p2 m2 l2 r2)
| shorter m1 m2 = False
| shorter m2 m1 = match p1 p2 m2 && (if zero p1 m2 then isSubsetOf t1 l2
else isSubsetOf t1 r2)
| otherwise = (p1==p2) && isSubsetOf l1 l2 && isSubsetOf r1 r2
isSubsetOf (Bin p m l r) t = False
isSubsetOf (Tip x) t = member x t
isSubsetOf Nil t = True
{--------------------------------------------------------------------
Filter
--------------------------------------------------------------------}
-- | /O(n)/. Filter all elements that satisfy some predicate.
filter :: (Int -> Bool) -> IntSet -> IntSet
filter pred t
= case t of
Bin p m l r
-> bin p m (filter pred l) (filter pred r)
Tip x
| pred x -> t
| otherwise -> Nil
Nil -> Nil
-- | /O(n)/. partition the set according to some predicate.
partition :: (Int -> Bool) -> IntSet -> (IntSet,IntSet)
partition pred t
= case t of
Bin p m l r
-> let (l1,l2) = partition pred l
(r1,r2) = partition pred r
in (bin p m l1 r1, bin p m l2 r2)
Tip x
| pred x -> (t,Nil)
| otherwise -> (Nil,t)
Nil -> (Nil,Nil)
-- | /O(log n)/. The expression (@'split' x set@) is a pair @(set1,set2)@
-- where all elements in @set1@ are lower than @x@ and all elements in
-- @set2@ larger than @x@.
--
-- > split 3 (fromList [1..5]) == (fromList [1,2], fromList [3,4])
split :: Int -> IntSet -> (IntSet,IntSet)
split x t
= case t of
Bin p m l r
| m < 0 -> if x >= 0 then let (lt,gt) = split' x l in (union r lt, gt)
else let (lt,gt) = split' x r in (lt, union gt l)
-- handle negative numbers.
| otherwise -> split' x t
Tip y
| x>y -> (t,Nil)
| x<y -> (Nil,t)
| otherwise -> (Nil,Nil)
Nil -> (Nil, Nil)
split' :: Int -> IntSet -> (IntSet,IntSet)
split' x t
= case t of
Bin p m l r
| match x p m -> if zero x m then let (lt,gt) = split' x l in (lt,union gt r)
else let (lt,gt) = split' x r in (union l lt,gt)
| otherwise -> if x < p then (Nil, t)
else (t, Nil)
Tip y
| x>y -> (t,Nil)
| x<y -> (Nil,t)
| otherwise -> (Nil,Nil)
Nil -> (Nil,Nil)
-- | /O(log n)/. Performs a 'split' but also returns whether the pivot
-- element was found in the original set.
splitMember :: Int -> IntSet -> (IntSet,Bool,IntSet)
splitMember x t
= case t of
Bin p m l r
| m < 0 -> if x >= 0 then let (lt,found,gt) = splitMember' x l in (union r lt, found, gt)
else let (lt,found,gt) = splitMember' x r in (lt, found, union gt l)
-- handle negative numbers.
| otherwise -> splitMember' x t
Tip y
| x>y -> (t,False,Nil)
| x<y -> (Nil,False,t)
| otherwise -> (Nil,True,Nil)
Nil -> (Nil,False,Nil)
splitMember' :: Int -> IntSet -> (IntSet,Bool,IntSet)
splitMember' x t
= case t of
Bin p m l r
| match x p m -> if zero x m then let (lt,found,gt) = splitMember x l in (lt,found,union gt r)
else let (lt,found,gt) = splitMember x r in (union l lt,found,gt)
| otherwise -> if x < p then (Nil, False, t)
else (t, False, Nil)
Tip y
| x>y -> (t,False,Nil)
| x<y -> (Nil,False,t)
| otherwise -> (Nil,True,Nil)
Nil -> (Nil,False,Nil)
{----------------------------------------------------------------------
Map
----------------------------------------------------------------------}
-- | /O(n*min(n,W))/.
-- @'map' f s@ is the set obtained by applying @f@ to each element of @s@.
--
-- It's worth noting that the size of the result may be smaller if,
-- for some @(x,y)@, @x \/= y && f x == f y@
map :: (Int->Int) -> IntSet -> IntSet
map f = fromList . List.map f . toList
{--------------------------------------------------------------------
Fold
--------------------------------------------------------------------}
-- | /O(n)/. Fold over the elements of a set in an unspecified order.
--
-- > sum set == fold (+) 0 set
-- > elems set == fold (:) [] set
fold :: (Int -> b -> b) -> b -> IntSet -> b
fold f z t
= case t of
Bin 0 m l r | m < 0 -> foldr f (foldr f z l) r
-- put negative numbers before.
Bin p m l r -> foldr f z t
Tip x -> f x z
Nil -> z
foldr :: (Int -> b -> b) -> b -> IntSet -> b
foldr f z t
= case t of
Bin p m l r -> foldr f (foldr f z r) l
Tip x -> f x z
Nil -> z
{--------------------------------------------------------------------
List variations
--------------------------------------------------------------------}
-- | /O(n)/. The elements of a set. (For sets, this is equivalent to toList)
elems :: IntSet -> [Int]
elems s
= toList s
{--------------------------------------------------------------------
Lists
--------------------------------------------------------------------}
-- | /O(n)/. Convert the set to a list of elements.
toList :: IntSet -> [Int]
toList t
= fold (:) [] t
-- | /O(n)/. Convert the set to an ascending list of elements.
toAscList :: IntSet -> [Int]
toAscList t = toList t
-- | /O(n*min(n,W))/. Create a set from a list of integers.
fromList :: [Int] -> IntSet
fromList xs
= foldlStrict ins empty xs
where
ins t x = insert x t
-- | /O(n*min(n,W))/. Build a set from an ascending list of elements.
fromAscList :: [Int] -> IntSet
fromAscList xs
= fromList xs
-- | /O(n*min(n,W))/. Build a set from an ascending list of distinct elements.
fromDistinctAscList :: [Int] -> IntSet
fromDistinctAscList xs
= fromList xs
{--------------------------------------------------------------------
Eq
--------------------------------------------------------------------}
instance Eq IntSet where
t1 == t2 = equal t1 t2
t1 /= t2 = nequal t1 t2
equal :: IntSet -> IntSet -> Bool
equal (Bin p1 m1 l1 r1) (Bin p2 m2 l2 r2)
= (m1 == m2) && (p1 == p2) && (equal l1 l2) && (equal r1 r2)
equal (Tip x) (Tip y)
= (x==y)
equal Nil Nil = True
equal t1 t2 = False
nequal :: IntSet -> IntSet -> Bool
nequal (Bin p1 m1 l1 r1) (Bin p2 m2 l2 r2)
= (m1 /= m2) || (p1 /= p2) || (nequal l1 l2) || (nequal r1 r2)
nequal (Tip x) (Tip y)
= (x/=y)
nequal Nil Nil = False
nequal t1 t2 = True
{--------------------------------------------------------------------
Ord
--------------------------------------------------------------------}
instance Ord IntSet where
compare s1 s2 = compare (toAscList s1) (toAscList s2)
-- tentative implementation. See if more efficient exists.
{--------------------------------------------------------------------
Show
--------------------------------------------------------------------}
instance Show IntSet where
showsPrec p xs = showParen (p > 10) $
showString "fromList " . shows (toList xs)
showSet :: [Int] -> ShowS
showSet []
= showString "{}"
showSet (x:xs)
= showChar '{' . shows x . showTail xs
where
showTail [] = showChar '}'
showTail (x:xs) = showChar ',' . shows x . showTail xs
{--------------------------------------------------------------------
Read
--------------------------------------------------------------------}
instance Read IntSet where
readsPrec p = readParen (p > 10) $ \ r -> do
("fromList",s) <- lex r
(xs,t) <- reads s
return (fromList xs,t)
{--------------------------------------------------------------------
Typeable
--------------------------------------------------------------------}
intSetTc = mkTyCon "IntSet"; instance Typeable IntSet where { typeOf _ = mkTyConApp intSetTc [] }
{--------------------------------------------------------------------
Debugging
--------------------------------------------------------------------}
-- | /O(n)/. Show the tree that implements the set. The tree is shown
-- in a compressed, hanging format.
showTree :: IntSet -> String
showTree s
= showTreeWith True False s
{- | /O(n)/. The expression (@'showTreeWith' hang wide map@) shows
the tree that implements the set. If @hang@ is
'True', a /hanging/ tree is shown otherwise a rotated tree is shown. If
@wide@ is 'True', an extra wide version is shown.
-}
showTreeWith :: Bool -> Bool -> IntSet -> String
showTreeWith hang wide t
| hang = (showsTreeHang wide [] t) ""
| otherwise = (showsTree wide [] [] t) ""
showsTree :: Bool -> [String] -> [String] -> IntSet -> ShowS
showsTree wide lbars rbars t
= case t of
Bin p m l r
-> showsTree wide (withBar rbars) (withEmpty rbars) r .
showWide wide rbars .
showsBars lbars . showString (showBin p m) . showString "\n" .
showWide wide lbars .
showsTree wide (withEmpty lbars) (withBar lbars) l
Tip x
-> showsBars lbars . showString " " . shows x . showString "\n"
Nil -> showsBars lbars . showString "|\n"
showsTreeHang :: Bool -> [String] -> IntSet -> ShowS
showsTreeHang wide bars t
= case t of
Bin p m l r
-> showsBars bars . showString (showBin p m) . showString "\n" .
showWide wide bars .
showsTreeHang wide (withBar bars) l .
showWide wide bars .
showsTreeHang wide (withEmpty bars) r
Tip x
-> showsBars bars . showString " " . shows x . showString "\n"
Nil -> showsBars bars . showString "|\n"
showBin p m
= "*" -- ++ show (p,m)
showWide wide bars
| wide = showString (concat (reverse bars)) . showString "|\n"
| otherwise = id
showsBars :: [String] -> ShowS
showsBars bars
= case bars of
[] -> id
_ -> showString (concat (reverse (tail bars))) . showString node
node = "+--"
withBar bars = "| ":bars
withEmpty bars = " ":bars
{--------------------------------------------------------------------
Helpers
--------------------------------------------------------------------}
{--------------------------------------------------------------------
Join
--------------------------------------------------------------------}
join :: Prefix -> IntSet -> Prefix -> IntSet -> IntSet
join p1 t1 p2 t2
| zero p1 m = Bin p m t1 t2
| otherwise = Bin p m t2 t1
where
m = branchMask p1 p2
p = mask p1 m
{--------------------------------------------------------------------
@bin@ assures that we never have empty trees within a tree.
--------------------------------------------------------------------}
bin :: Prefix -> Mask -> IntSet -> IntSet -> IntSet
bin p m l Nil = l
bin p m Nil r = r
bin p m l r = Bin p m l r
{--------------------------------------------------------------------
Endian independent bit twiddling
--------------------------------------------------------------------}
zero :: Int -> Mask -> Bool
zero i m
= (natFromInt i) .&. (natFromInt m) == 0
nomatch,match :: Int -> Prefix -> Mask -> Bool
nomatch i p m
= (mask i m) /= p
match i p m
= (mask i m) == p
mask :: Int -> Mask -> Prefix
mask i m
= maskW (natFromInt i) (natFromInt m)
zeroN :: Nat -> Nat -> Bool
zeroN i m = (i .&. m) == 0
{--------------------------------------------------------------------
Big endian operations
--------------------------------------------------------------------}
maskW :: Nat -> Nat -> Prefix
maskW i m
= intFromNat (i .&. (complement (m-1) `xor` m))
shorter :: Mask -> Mask -> Bool
shorter m1 m2
= (natFromInt m1) > (natFromInt m2)
branchMask :: Prefix -> Prefix -> Mask
branchMask p1 p2
= intFromNat (highestBitMask (natFromInt p1 `xor` natFromInt p2))
{----------------------------------------------------------------------
Finding the highest bit (mask) in a word [x] can be done efficiently in
three ways:
* convert to a floating point value and the mantissa tells us the
[log2(x)] that corresponds with the highest bit position. The mantissa
is retrieved either via the standard C function [frexp] or by some bit
twiddling on IEEE compatible numbers (float). Note that one needs to
use at least [double] precision for an accurate mantissa of 32 bit
numbers.
* use bit twiddling, a logarithmic sequence of bitwise or's and shifts (bit).
* use processor specific assembler instruction (asm).
The most portable way would be [bit], but is it efficient enough?
I have measured the cycle counts of the different methods on an AMD
Athlon-XP 1800 (~ Pentium III 1.8Ghz) using the RDTSC instruction:
highestBitMask: method cycles
--------------
frexp 200
float 33
bit 11
asm 12
highestBit: method cycles
--------------
frexp 195
float 33
bit 11
asm 11
Wow, the bit twiddling is on today's RISC like machines even faster
than a single CISC instruction (BSR)!
----------------------------------------------------------------------}
{----------------------------------------------------------------------
[highestBitMask] returns a word where only the highest bit is set.
It is found by first setting all bits in lower positions than the
highest bit and than taking an exclusive or with the original value.
Allthough the function may look expensive, GHC compiles this into
excellent C code that subsequently compiled into highly efficient
machine code. The algorithm is derived from Jorg Arndt's FXT library.
----------------------------------------------------------------------}
highestBitMask :: Nat -> Nat
highestBitMask x
= case (x .|. shiftRL x 1) of
x -> case (x .|. shiftRL x 2) of
x -> case (x .|. shiftRL x 4) of
x -> case (x .|. shiftRL x 8) of
x -> case (x .|. shiftRL x 16) of
x -> case (x .|. shiftRL x 32) of -- for 64 bit platforms
x -> (x `xor` (shiftRL x 1))
{--------------------------------------------------------------------
Utilities
--------------------------------------------------------------------}
foldlStrict f z xs
= case xs of
[] -> z
(x:xx) -> let z' = f z x in seq z' (foldlStrict f z' xx)
{-
{--------------------------------------------------------------------
Testing
--------------------------------------------------------------------}
testTree :: [Int] -> IntSet
testTree xs = fromList xs
test1 = testTree [1..20]
test2 = testTree [30,29..10]
test3 = testTree [1,4,6,89,2323,53,43,234,5,79,12,9,24,9,8,423,8,42,4,8,9,3]
{--------------------------------------------------------------------
QuickCheck
--------------------------------------------------------------------}
qcheck prop
= check config prop
where
config = Config
{ configMaxTest = 500
, configMaxFail = 5000
, configSize = \n -> (div n 2 + 3)
, configEvery = \n args -> let s = show n in s ++ [ '\b' | _ <- s ]
}
{--------------------------------------------------------------------
Arbitrary, reasonably balanced trees
--------------------------------------------------------------------}
instance Arbitrary IntSet where
arbitrary = do{ xs <- arbitrary
; return (fromList xs)
}
{--------------------------------------------------------------------
Single, Insert, Delete
--------------------------------------------------------------------}
prop_Single :: Int -> Bool
prop_Single x
= (insert x empty == singleton x)
prop_InsertDelete :: Int -> IntSet -> Property
prop_InsertDelete k t
= not (member k t) ==> delete k (insert k t) == t
{--------------------------------------------------------------------
Union
--------------------------------------------------------------------}
prop_UnionInsert :: Int -> IntSet -> Bool
prop_UnionInsert x t
= union t (singleton x) == insert x t
prop_UnionAssoc :: IntSet -> IntSet -> IntSet -> Bool
prop_UnionAssoc t1 t2 t3
= union t1 (union t2 t3) == union (union t1 t2) t3
prop_UnionComm :: IntSet -> IntSet -> Bool
prop_UnionComm t1 t2
= (union t1 t2 == union t2 t1)
prop_Diff :: [Int] -> [Int] -> Bool
prop_Diff xs ys
= toAscList (difference (fromList xs) (fromList ys))
== List.sort ((List.\\) (nub xs) (nub ys))
prop_Int :: [Int] -> [Int] -> Bool
prop_Int xs ys
= toAscList (intersection (fromList xs) (fromList ys))
== List.sort (nub ((List.intersect) (xs) (ys)))
{--------------------------------------------------------------------
Lists
--------------------------------------------------------------------}
prop_Ordered
= forAll (choose (5,100)) $ \n ->
let xs = [0..n::Int]
in fromAscList xs == fromList xs
prop_List :: [Int] -> Bool
prop_List xs
= (sort (nub xs) == toAscList (fromList xs))
-}
| kaoskorobase/mescaline | resources/hugs/packages/base/Data/IntSet.hs | gpl-3.0 | 28,920 | 0 | 26 | 8,391 | 6,732 | 3,400 | 3,332 | 454 | 5 |
module TalkBot.Fun ( sass, hello, extend ) where
sass :: String -> String
sass s = case words s of
[] -> hello
w:[] -> "Don't you \"" ++ (extend 1 w) ++ "\" me"
a:b:[] -> a ++ " yourself!"
ws -> unwords $ fmap (extend 3) ws
hello :: String
hello = "hello"
extend :: Int -> String -> String
extend _ [] = []
extend n a = a ++ (replicate n (last a))
| RoboNickBot/talkbot | src/TalkBot/Fun.hs | gpl-3.0 | 370 | 0 | 11 | 99 | 183 | 95 | 88 | 12 | 4 |
{-# LANGUAGE NoMonomorphismRestriction #-}
module Pipes.Categorical
(
runEffect
,stdinLn
,stdoutLn
,module Control.Category
,module Control.Arrow
,zip
,each
) where
import qualified Pipes as P
import Pipes ((>->),yield,await,lift,Pipe)
import qualified Pipes.Prelude as PP
import Data.Char
import Control.Category ((>>>),Category((.),id))
import Control.Arrow
import Prelude hiding ((.),zip)
stdinLn =PipeC PP.stdinLn
stdoutLn =PipeC PP.stdoutLn
each =PipeC. P.each
zip x y =PipeC (PP.zip (yield () >-> (unPipeC x)) ( yield () >-> (unPipeC y)))
newtype PipeC m a b =PipeC { unPipeC :: P.Proxy () a () b m () }
instance Monad m => Category (PipeC m) where
id = undefined
(PipeC f) . (PipeC g) =PipeC(myComp g f)
myComp :: Monad m => Pipe a b m r -> Pipe b c m r -> Pipe a c m r
myComp = (>->)
instance Monad m => Arrow (PipeC m) where
arr f =PipeC(PP.map f)
first _ = undefined
second _ = undefined
runEffect x = P.runEffect (yield () >-> unPipeC x >-> PP.drain)
| xpika/line-size | Pipes/Categorical.hs | gpl-3.0 | 990 | 0 | 11 | 187 | 450 | 249 | 201 | 32 | 1 |
module Update where
import qualified Data.Map as M
import Data.Acid
import Data.Record.StateFields
import Query
import Types
register :: String -> String -> Update Database (Either Error Id)
register name password = do
user <- runQuery $ findUser name
case user of
Just _ -> return $ Left EUserExists
Nothing -> do
uId <- getf dbUsers >>= return . M.size
let user = emptyUser uId name password
modf dbUsers $ M.insert uId user
return $ Right uId
createEntry :: Request -> Update Database (Either Error Id)
createEntry ReqCreateEntry{..} = undefined
createEntry _ = return $ Left EBadInput
updateEntry :: Request -> Update Database (Maybe Error)
updateEntry ReqUpdateEntry{..} = undefined
updateEntry _ = return $ Just EBadInput
| ktvoelker/todo | src/Update.hs | gpl-3.0 | 772 | 0 | 15 | 159 | 271 | 133 | 138 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
-- |
-- Module : Network.Google.Manufacturers
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Public API for managing Manufacturer Center related data.
--
-- /See:/ <https://developers.google.com/manufacturers/ Manufacturer Center API Reference>
module Network.Google.Manufacturers
(
-- * Service Configuration
manufacturersService
-- * OAuth Scopes
, manufacturercenterScope
-- * API Declaration
, ManufacturersAPI
-- * Resources
-- ** manufacturers.accounts.products.delete
, module Network.Google.Resource.Manufacturers.Accounts.Products.Delete
-- ** manufacturers.accounts.products.get
, module Network.Google.Resource.Manufacturers.Accounts.Products.Get
-- ** manufacturers.accounts.products.list
, module Network.Google.Resource.Manufacturers.Accounts.Products.List
-- ** manufacturers.accounts.products.update
, module Network.Google.Resource.Manufacturers.Accounts.Products.Update
-- * Types
-- ** DestinationStatusStatus
, DestinationStatusStatus (..)
-- ** IssueResolution
, IssueResolution (..)
-- ** Image
, Image
, image
, iStatus
, iImageURL
, iType
-- ** FeatureDescription
, FeatureDescription
, featureDescription
, fdImage
, fdText
, fdHeadline
-- ** ProductDetail
, ProductDetail
, productDetail
, pdAttributeValue
, pdAttributeName
, pdSectionName
-- ** Empty
, Empty
, empty
-- ** DestinationStatus
, DestinationStatus
, destinationStatus
, dsDestination
, dsStatus
-- ** AccountsProductsGetInclude
, AccountsProductsGetInclude (..)
-- ** ImageStatus
, ImageStatus (..)
-- ** Count
, Count
, count
, cValue
, cUnit
-- ** AccountsProductsListInclude
, AccountsProductsListInclude (..)
-- ** Capacity
, Capacity
, capacity
, capValue
, capUnit
-- ** ImageType
, ImageType (..)
-- ** Attributes
, Attributes
, attributes
, aProductName
, aScent
, aImageLink
, aFeatureDescription
, aProductDetail
, aProductLine
, aColor
, aSize
, aFlavor
, aPattern
, aSizeSystem
, aProductHighlight
, aMaterial
, aFormat
, aProductType
, aCount
, aDisclosureDate
, aBrand
, aAdditionalImageLink
, aExcludedDestination
, aVideoLink
, aCapacity
, aGtin
, aAgeGroup
, aIncludedDestination
, aGender
, aSuggestedRetailPrice
, aItemGroupId
, aRichProductContent
, aTargetClientId
, aSizeType
, aReleaseDate
, aTitle
, aMpn
, aProductPageURL
, aDescription
, aTheme
-- ** Xgafv
, Xgafv (..)
-- ** IssueSeverity
, IssueSeverity (..)
-- ** Price
, Price
, price
, pAmount
, pCurrency
-- ** Product
, Product
, product
, pParent
, pDestinationStatuses
, pTargetCountry
, pName
, pAttributes
, pIssues
, pContentLanguage
, pProductId
-- ** Issue
, Issue
, issue
, issAttribute
, issDestination
, issSeverity
, issResolution
, issTitle
, issType
, issTimestamp
, issDescription
-- ** ListProductsResponse
, ListProductsResponse
, listProductsResponse
, lprNextPageToken
, lprProducts
) where
import Network.Google.Prelude
import Network.Google.Manufacturers.Types
import Network.Google.Resource.Manufacturers.Accounts.Products.Delete
import Network.Google.Resource.Manufacturers.Accounts.Products.Get
import Network.Google.Resource.Manufacturers.Accounts.Products.List
import Network.Google.Resource.Manufacturers.Accounts.Products.Update
{- $resources
TODO
-}
-- | Represents the entirety of the methods and resources available for the Manufacturer Center API service.
type ManufacturersAPI =
AccountsProductsListResource :<|>
AccountsProductsGetResource
:<|> AccountsProductsDeleteResource
:<|> AccountsProductsUpdateResource
| brendanhay/gogol | gogol-manufacturers/gen/Network/Google/Manufacturers.hs | mpl-2.0 | 4,439 | 0 | 7 | 1,104 | 524 | 378 | 146 | 129 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.IdentityToolkit.RelyingParty.GetAccountInfo
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns the account info.
--
-- /See:/ <https://developers.google.com/identity-toolkit/v3/ Google Identity Toolkit API Reference> for @identitytoolkit.relyingparty.getAccountInfo@.
module Network.Google.Resource.IdentityToolkit.RelyingParty.GetAccountInfo
(
-- * REST Resource
RelyingPartyGetAccountInfoResource
-- * Creating a Request
, relyingPartyGetAccountInfo
, RelyingPartyGetAccountInfo
-- * Request Lenses
, rpgaiPayload
) where
import Network.Google.IdentityToolkit.Types
import Network.Google.Prelude
-- | A resource alias for @identitytoolkit.relyingparty.getAccountInfo@ method which the
-- 'RelyingPartyGetAccountInfo' request conforms to.
type RelyingPartyGetAccountInfoResource =
"identitytoolkit" :>
"v3" :>
"relyingparty" :>
"getAccountInfo" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
IdentitytoolkitRelyingPartyGetAccountInfoRequest
:> Post '[JSON] GetAccountInfoResponse
-- | Returns the account info.
--
-- /See:/ 'relyingPartyGetAccountInfo' smart constructor.
newtype RelyingPartyGetAccountInfo =
RelyingPartyGetAccountInfo'
{ _rpgaiPayload :: IdentitytoolkitRelyingPartyGetAccountInfoRequest
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'RelyingPartyGetAccountInfo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rpgaiPayload'
relyingPartyGetAccountInfo
:: IdentitytoolkitRelyingPartyGetAccountInfoRequest -- ^ 'rpgaiPayload'
-> RelyingPartyGetAccountInfo
relyingPartyGetAccountInfo pRpgaiPayload_ =
RelyingPartyGetAccountInfo' {_rpgaiPayload = pRpgaiPayload_}
-- | Multipart request metadata.
rpgaiPayload :: Lens' RelyingPartyGetAccountInfo IdentitytoolkitRelyingPartyGetAccountInfoRequest
rpgaiPayload
= lens _rpgaiPayload (\ s a -> s{_rpgaiPayload = a})
instance GoogleRequest RelyingPartyGetAccountInfo
where
type Rs RelyingPartyGetAccountInfo =
GetAccountInfoResponse
type Scopes RelyingPartyGetAccountInfo =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient RelyingPartyGetAccountInfo'{..}
= go (Just AltJSON) _rpgaiPayload
identityToolkitService
where go
= buildClient
(Proxy :: Proxy RelyingPartyGetAccountInfoResource)
mempty
| brendanhay/gogol | gogol-identity-toolkit/gen/Network/Google/Resource/IdentityToolkit/RelyingParty/GetAccountInfo.hs | mpl-2.0 | 3,305 | 0 | 13 | 672 | 308 | 189 | 119 | 53 | 1 |
module Codewars.Kata.IsUpperCase where
isUpperCase :: String -> Bool
isUpperCase = not . any (`elem` ['a'..'z'])
| ice1000/OI-codes | codewars/201-300/is-the-string-uppercase.hs | agpl-3.0 | 114 | 0 | 8 | 16 | 39 | 24 | 15 | 3 | 1 |
module Handler.Page where
import Import
import qualified Data.Aeson as Json
import Data.Text as Text (cons, foldr)
import Data.Char (isDigit, isLetter, toLower)
import Network.HTTP.Types.Status (conflict409)
queryPages :: Handler [(Entity Page)]
queryPages = do
d <- getDeploymentId
runDB $ select $ from $ \p -> do
where_ $ p ^. PageDeployment ==. val d
return p
form :: Form Text
form extra = do
(result, view) <- mreq pageName "" Nothing
let widget = toWidget [whamlet|
<div>
#{extra}
^{fvInput view}
<input type="submit" class="btn btn-green" value="New Page">
$maybe err <- fvErrors view
#{err}
|]
return (result, widget)
where
pageName = check'em validate textField
validate s = do
let s' = slug s
d <- getDeploymentId
m <- runDB $ checkUnique $ Page d s' (toSqlKey 0)
return $ case m of
Just _ -> Left ("Already exists" :: Text)
Nothing -> Right s'
check'em = checkM
slug str = snd $ Text.foldr (\c (b, s) -> new b s c) (False, "") str
new b s c =
let r = rep c
b' = r == '_'
in (b', if b && b' then s else r `cons` s)
rep c
| isDigit c = c
| isLetter c = toLower c
| otherwise = '_'
getPageR :: Handler Html
getPageR = do
ps <- queryPages
(widget, enc) <- generateFormPost form
defaultLayout $ setTitle "Pages" >> $(widgetFile "page")
postPageNewR :: Handler Html
postPageNewR = do
((result, widget), enc) <- runFormPost form
case result of
FormSuccess name -> newPage name >>= redirect . PageEditR
_ -> queryPages >>= \ps -> defaultLayout (setTitle "Pages" >> $(widgetFile "page"))
where
newPage name = do
d <- getDeploymentId
piece <- runDB $ insert $ Piece d "default"
runDB $ insert $ Page d name piece
getPageEditR :: PageId -> Handler Html
getPageEditR pid = do
page <- queryPage
defaultLayout $ do
setTitle $ "Edit " <> (toHtml $ pageName page)
$(widgetFile "page-edit")
renderPiece $ pagePiece page
where
queryPage :: Handler Page
queryPage = do
d <- getDeploymentId
p <- runDB $ select $ from $ \p -> do
where_ $ p ^. PageId ==. val pid &&. p ^. PageDeployment ==. val d
return p
case p of
((Entity _ page):[]) -> return page
_ -> notFound
postAjaxPagePieceR :: PieceId -> Handler Json.Value
postAjaxPagePieceR pid = do
template <- getJson $ \o -> o .: "template"
d <- getDeploymentId
runDB $ update $ \p -> do
set p [ PieceTemplate =. val template ]
where_ $ p ^. PieceId ==. val pid &&. p ^. PieceDeployment ==. val d
return Json.Null
deleteAjaxPagePieceR :: PieceId -> Handler Json.Value
deleteAjaxPagePieceR pid = do
(Value pd, Value mp) <- dbReq $ select $ from $ \(piece `LeftOuterJoin` page) -> do
on $ just (piece ^. PieceId) ==. page ?. PagePiece
where_ $ piece ^. PieceId ==. val pid
return ( piece ^. PieceDeployment, page ?. PageId )
d <- getDeploymentId
when (isJust mp) (sendResponseStatus conflict409 ())
when (pd /= d) notFound
runDB $ do
delete $ from $ \p -> do
where_ $ p ^. PieceDataPiece ==. val pid
delete $ from $ \p -> do
where_ $ p ^. PieceId ==. val pid
return Json.Null
postAjaxPageDataR :: PieceId -> Text -> Handler Json.Value
postAjaxPageDataR pid key = do
(mt, v) <- getJson $ \o -> do
t <- o .: "type"
v <- o .: "value"
return (readMay t, v)
t <- maybe (invalidArgs []) return mt
d <- getDeploymentId
-- If linking to another piece, check it's ours.
when (t == Reference) $ do
ref <- maybe
(invalidArgs [])
(return . toSqlKey . fromIntegral)
(parseUnsigned v)
r <- runDB $ select $ from $ \p -> do
where_ $ p ^. PieceId ==. val ref
return ( p ^. PieceDeployment )
case r of
((Value rd):_) -> if (rd /= d)
then invalidArgs ["Reference piece does not exist"]
else return ()
_ -> invalidArgs ["Reference piece does not exist"]
r <- runDB $ select $ from $ \p -> do
where_ $ p ^. PieceId ==. val pid
return ( p ^. PieceDeployment )
case r of
((Value pd):_) -> do
when (pd /= d) notFound
c <- getCount
case c of
((Value 0):_) -> insertNew t v
_ -> updateExisting t v
[] -> notFound
-- PieceData might actually use special css etc...
return Json.Null
where
getCount :: Handler [Value Int]
getCount = runDB $ select $ from $ \p -> do
where_ $ p ^. PieceDataPiece ==. val pid
&&. p ^. PieceDataKey ==. val key
return countRows
insertNew t v = void $ runDB $ insert $ PieceData pid t key v
updateExisting t v = runDB $ update $ \p -> do
set p [ PieceDataType =. val t, PieceDataValue =. val v ]
where_ $ p ^. PieceDataPiece ==. val pid
&&. p ^. PieceDataKey ==. val key
| sir-murray/lol | Handler/Page.hs | agpl-3.0 | 5,186 | 0 | 19 | 1,681 | 1,894 | 923 | 971 | -1 | -1 |
-- Copyright 2020-2021 Google LLC
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module IntErrors where
import Data.Int (Int8, Int16)
x0, x1 :: Int8
-- Want: 128 is too large
x0 = 128
-- Want: -129 is too small
x1 = -129
x2 :: Int8
-- Want: out of range for Int, for conversion to Int8
x2 = 9223372036854775808
x3, x4 :: Int16
-- Want: 32768 is too large
x3 = 32768
-- Want: -32769 is too small
x4 = -32769
x5, x6 :: Int
-- Want: too large
x5 = 9223372036854775808
-- Want: too small
x6 = -9223372036854775809
| google/hs-dependent-literals | dependent-literals-plugin/tests/IntErrors.hs | apache-2.0 | 1,028 | 0 | 5 | 192 | 104 | 73 | 31 | 13 | 1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Application
( makeApplication
, getApplicationDev
, makeFoundation
) where
import Import
import Yesod.Default.Config
import Yesod.Default.Main
import Yesod.Default.Handlers
import Network.Wai.Middleware.RequestLogger
( mkRequestLogger, outputFormat, OutputFormat (..), IPAddrSource (..), destination
)
import qualified Network.Wai.Middleware.RequestLogger as RequestLogger
import Network.HTTP.Client.Conduit (newManager)
import Control.Concurrent (forkIO, threadDelay)
import System.Log.FastLogger (newStdoutLoggerSet, defaultBufSize, flushLogStr)
import Network.Wai.Logger (clockDateCacher)
import Data.Default (def)
import Yesod.Core.Types (loggerSet, Logger (Logger))
-- Import all relevant handler modules here.
-- Don't forget to add new modules to your cabal file!
import Handler.Home
import Handler.AuthJwt
import Handler.Welcome
-- This line actually creates our YesodDispatch instance. It is the second half
-- of the call to mkYesodData which occurs in Foundation.hs. Please see the
-- comments there for more details.
mkYesodDispatch "App" resourcesApp
-- This function allocates resources (such as a database connection pool),
-- performs initialization and creates a WAI application. This is also the
-- place to put your migrate statements to have automatic database
-- migrations handled by Yesod.
makeApplication :: AppConfig DefaultEnv Extra -> IO (Application, LogFunc)
makeApplication conf = do
foundation <- makeFoundation conf
-- Initialize the logging middleware
logWare <- mkRequestLogger def
{ outputFormat =
if development
then Detailed True
else Apache FromSocket
, destination = RequestLogger.Logger $ loggerSet $ appLogger foundation
}
-- Create the WAI application and apply middlewares
app <- toWaiAppPlain foundation
let logFunc = messageLoggerSource foundation (appLogger foundation)
return (logWare $ defaultMiddlewaresNoLogging app, logFunc)
-- | Loads up any necessary settings, creates your foundation datatype, and
-- performs some initialization.
makeFoundation :: AppConfig DefaultEnv Extra -> IO App
makeFoundation conf = do
manager <- newManager
s <- staticSite
loggerSet' <- newStdoutLoggerSet defaultBufSize
(getter, updater) <- clockDateCacher
-- If the Yesod logger (as opposed to the request logger middleware) is
-- used less than once a second on average, you may prefer to omit this
-- thread and use "(updater >> getter)" in place of "getter" below. That
-- would update the cache every time it is used, instead of every second.
let updateLoop = do
threadDelay 1000000
updater
flushLogStr loggerSet'
updateLoop
_ <- forkIO updateLoop
let logger = Yesod.Core.Types.Logger loggerSet' getter
foundation = App conf s manager logger
return foundation
-- for yesod devel
getApplicationDev :: IO (Int, Application)
getApplicationDev =
defaultDevelApp loader (fmap fst . makeApplication)
where
loader = Yesod.Default.Config.loadConfig (configSettings Development)
{ csParseExtra = parseExtra
}
| carlohamalainen/rapid-connect-yesod-demo | Application.hs | bsd-2-clause | 3,244 | 0 | 13 | 648 | 541 | 300 | 241 | -1 | -1 |
module Irrigation where
import Data.List (maximumBy)
import Data.Ord (comparing)
import Control.Arrow ((&&&))
type Location = (Int,Int)
data Sprinkler = Sprinkler
{
location :: Location
, radius :: Int
}
data CropField = CropField
{
rows :: Int
, columns :: Int
, crops :: [Location]
}
grid :: CropField -> [Location]
grid c = [(x,y) | x <- [0..rows c], y <- [0..columns c]]
intDistance :: Location -> Location -> Int
intDistance (x1,y1) (x2,y2) = floor (sqrt (dx*dx + dy*dy))
where
dx = fromIntegral (x1 - x2)
dy = fromIntegral (y1 - y2)
bestLocation :: CropField -> Int -> Location
bestLocation field radius = fst $ maximumBy (comparing snd) $ map (location &&& score field) sprinklers
where
sprinklers = [Sprinkler loc radius | loc <- grid field]
score :: CropField -> Sprinkler -> Int
score field sprinkler = killedCrop + length (filter (inRange sprinkler) (crops field))
where
killedCrop = if location sprinkler `elem` crops field then (- 1) else 0
inRange :: Sprinkler -> Location -> Bool
inRange s p = intDistance l p <= r
where
r = radius s
l = location s
| fffej/haskellprojects | daily-programmer/18-03-2015/Irrigation.hs | bsd-2-clause | 1,177 | 0 | 11 | 298 | 464 | 253 | 211 | 28 | 2 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for ganeti-htools.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.Query.Language
( testQuery_Language
, genFilter
, genJSValue
) where
import Test.QuickCheck
import Control.Applicative
import Control.Arrow (second)
import Text.JSON
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import Ganeti.Query.Language
-- | Custom 'Filter' generator (top-level), which enforces a
-- (sane) limit on the depth of the generated filters.
genFilter :: Gen (Filter FilterField)
genFilter = choose (0, 10) >>= genFilter'
-- | Custom generator for filters that correctly halves the state of
-- the generators at each recursive step, per the QuickCheck
-- documentation, in order not to run out of memory.
genFilter' :: Int -> Gen (Filter FilterField)
genFilter' 0 =
oneof [ pure EmptyFilter
, TrueFilter <$> genName
, EQFilter <$> genName <*> value
, LTFilter <$> genName <*> value
, GTFilter <$> genName <*> value
, LEFilter <$> genName <*> value
, GEFilter <$> genName <*> value
, RegexpFilter <$> genName <*> arbitrary
, ContainsFilter <$> genName <*> value
]
where value = oneof [ QuotedString <$> genName
, NumericValue <$> arbitrary
]
genFilter' n =
oneof [ AndFilter <$> vectorOf n'' (genFilter' n')
, OrFilter <$> vectorOf n'' (genFilter' n')
, NotFilter <$> genFilter' n'
]
where n' = n `div` 2 -- sub-filter generator size
n'' = max n' 2 -- but we don't want empty or 1-element lists,
-- so use this for and/or filter list length
$(genArbitrary ''QueryTypeOp)
$(genArbitrary ''QueryTypeLuxi)
$(genArbitrary ''ItemType)
instance Arbitrary FilterRegex where
arbitrary = genName >>= mkRegex -- a name should be a good regex
$(genArbitrary ''ResultStatus)
$(genArbitrary ''FieldType)
$(genArbitrary ''FieldDefinition)
-- | Generates an arbitrary JSValue. We do this via a function a not
-- via arbitrary instance since that would require us to define an
-- arbitrary for JSValue, which can be recursive, entering the usual
-- problems with that; so we only generate the base types, not the
-- recursive ones, and not 'JSNull', which we can't use in a
-- 'RSNormal' 'ResultEntry'.
genJSValue :: Gen JSValue
genJSValue =
oneof [ JSBool <$> arbitrary
, JSRational <$> pure False <*> arbitrary
, JSString <$> (toJSString <$> arbitrary)
, (JSArray . map showJSON) <$> (arbitrary::Gen [Int])
, JSObject . toJSObject . map (second showJSON) <$>
(arbitrary::Gen [(String, Int)])
]
-- | Generates a 'ResultEntry' value.
genResultEntry :: Gen ResultEntry
genResultEntry = do
rs <- arbitrary
rv <- case rs of
RSNormal -> Just <$> genJSValue
_ -> pure Nothing
return $ ResultEntry rs rv
$(genArbitrary ''QueryFieldsResult)
-- | Tests that serialisation/deserialisation of filters is
-- idempotent.
prop_filter_serialisation :: Property
prop_filter_serialisation = forAll genFilter testSerialisation
-- | Tests that filter regexes are serialised correctly.
prop_filterregex_instances :: FilterRegex -> Property
prop_filterregex_instances rex =
printTestCase "failed JSON encoding" (testSerialisation rex)
-- | Tests 'ResultStatus' serialisation.
prop_resultstatus_serialisation :: ResultStatus -> Property
prop_resultstatus_serialisation = testSerialisation
-- | Tests 'FieldType' serialisation.
prop_fieldtype_serialisation :: FieldType -> Property
prop_fieldtype_serialisation = testSerialisation
-- | Tests 'FieldDef' serialisation.
prop_fielddef_serialisation :: FieldDefinition -> Property
prop_fielddef_serialisation = testSerialisation
-- | Tests 'ResultEntry' serialisation. Needed especially as this is
-- done manually, and not via buildObject (different serialisation
-- format).
prop_resultentry_serialisation :: Property
prop_resultentry_serialisation = forAll genResultEntry testSerialisation
-- | Tests 'FieldDef' serialisation. We use a made-up maximum limit of
-- 20 for the generator, since otherwise the lists become too long and
-- we don't care so much about list length but rather structure.
prop_fieldsresult_serialisation :: Property
prop_fieldsresult_serialisation =
forAll (resize 20 arbitrary::Gen QueryFieldsResult) testSerialisation
-- | Tests 'ItemType' serialisation.
prop_itemtype_serialisation :: ItemType -> Property
prop_itemtype_serialisation = testSerialisation
testSuite "Query/Language"
[ 'prop_filter_serialisation
, 'prop_filterregex_instances
, 'prop_resultstatus_serialisation
, 'prop_fieldtype_serialisation
, 'prop_fielddef_serialisation
, 'prop_resultentry_serialisation
, 'prop_fieldsresult_serialisation
, 'prop_itemtype_serialisation
]
| apyrgio/snf-ganeti | test/hs/Test/Ganeti/Query/Language.hs | bsd-2-clause | 6,212 | 0 | 11 | 1,165 | 818 | 450 | 368 | 85 | 2 |
{-# Language MultiParamTypeClasses, FlexibleInstances, TypeFamilies #-}
module VarArgs where
import Data.Monoid
{-
This is basically half of the polyToMonoid package as seen on Hackage.
-}
class Monoid m => Monoidable a m where
toMonoid :: a -> m
squish :: Monoidable a m => m -> a -> m
squish m a = m `mappend` toMonoid a
class Monoid m => CPolyVariadic m r where
ctm :: m -> r
newtype Terminate m = Terminate { terminate :: m }
instance Monoid m => Monoid (Terminate m) where
mempty = Terminate mempty
mappend a b = Terminate $ terminate a `mappend` terminate b
instance (Monoid m', m' ~ m) => CPolyVariadic m (Terminate m') where
ctm = Terminate
instance (Monoidable a m, CPolyVariadic m r) => CPolyVariadic m (a->r) where
ctm acc = ctm . squish acc
| farre/grin | VarArgs.hs | bsd-3-clause | 773 | 0 | 8 | 157 | 264 | 138 | 126 | 17 | 1 |
module Mp3 (playMp3Bytes) where
import Kerchief.Prelude
import Control.Concurrent (forkIO)
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import System.FilePath ((</>))
import System.IO -- (hClose)
import System.IO.Silently (hSilence)
import System.Process
-- | Exec hard-coded "mpg123 -" with given bytes.
playMp3Bytes :: ByteString -> IO ()
playMp3Bytes bytes = void . forkIO . hSilence' [stdout, stderr] $ do
(Just hin, _, _, hprocess) <- createProcess (shell "mpg123 -") { std_in = CreatePipe }
BS.hPut hin bytes
hClose hin
void $ waitForProcess hprocess
-- hSilence doesn't preserve BufferMode of handles it silences.
hSilence' :: [Handle] -> IO a -> IO a
hSilence' hs action = do
bufferings <- mapM hGetBuffering hs
result <- hSilence hs action
mapM_ (uncurry hSetBuffering) (zip hs bufferings)
return result
| mitchellwrosen/kerchief | src/Mp3.hs | bsd-3-clause | 957 | 0 | 12 | 235 | 270 | 142 | 128 | 21 | 1 |
-- This module could potentially be rewritten and heavily simplified once the
-- custom context menu spec in HTML 5.1 is adopted by all major browsers. At
-- time of writing the spec is only a recommendation and only implemented by
-- Mozilla Firefox.
module Graphics.UI.Threepenny.Ext.Contextmenu (
-- * Menu items and constructors.
MenuItem(..), MenuItemValue(..), actionMenuItem, nestedMenuItem,
-- * Context menu.
contextMenu
) where
import Control.Monad (void, when)
import qualified Graphics.UI.Threepenny as UI
import Graphics.UI.Threepenny.Core
import Graphics.UI.Threepenny.Ext.Contextmenu.Style
import Graphics.UI.Threepenny.Ext.Contextmenu.Util
-- |A menu item has some text to display and a 'MenuItemValue'.
data MenuItem = MenuItem { mIText :: String, mIValue :: MenuItemValue }
-- |UI actions to execute or a nested menu.
data MenuItemValue = MenuItemActions [UI ()] | NestedMenu [MenuItem]
-- |Constructor for a menu item with UI actions to execute.
actionMenuItem :: String -> [UI u] -> MenuItem
actionMenuItem text actions =
MenuItem { mIText = text, mIValue = MenuItemActions $ map void actions }
-- |Constructor for a menu item with a nested menu.
nestedMenuItem :: String -> [MenuItem] -> MenuItem
nestedMenuItem text nested =
MenuItem { mIText = text ++ " ›", mIValue = NestedMenu nested }
-- |Attach a context menu to the document body. The context menu is activated by
-- a contextmenu event from the given element. The event is prevented from
-- propagating.
contextMenu :: [MenuItem] -> Element -> UI ()
contextMenu items source = do
rmTarget <- UI.div # set style rmTargetStyle
let closeRmTarget = void $ dimensions "0" "0" rmTarget
(menu, closeMenu, closeMenusNestedMenus) <- newMenu [closeRmTarget] items
-- Display menu on a contextmenu event.
on UI.contextmenu source $ \(x, y) ->
displayAt x y menu >> dimensions "100vw" "100vh" rmTarget
-- Hide everything on rmTarget click.
on UI.mousedown rmTarget $ const $
closeRmTarget >> closeMenu >> sequence closeMenusNestedMenus
-- Hide nested menus on hover over rmTarget.
on UI.hover rmTarget $ const $ sequence closeMenusNestedMenus
-- Attach everything to the body, with a large z-index.
parent <- UI.div #+ [element rmTarget, element menu]
# set UI.style [("z-index", "10000"),
("position", "absolute")]
(askWindow >>= getBody) #+ [element parent]
preventDefaultContextMenu source
-- |A tuple of a menu element, UI action to close it, and UI actions to close
-- any nested menus.
newMenu :: [UI ()] -> [MenuItem] -> UI (Element, UI (), [UI ()])
newMenu closeParents menuItems = do
menu <- UI.li # set style menuStyle
let closeMenu = void $ display "none" menu
-- Tuples of menu items and UI actions to close respective nested menus.
-- :: UI [(Element, [Action])]
menuItemEls <- mapM (menuItem $ closeParents ++ [closeMenu]) menuItems
element menu #+ map (element . fst) menuItemEls
-- On hover over a menu item close any nested menus from *other* menu items.
let closeOtherMenusOnHover ((el1, _), i1) xs =
on UI.hover el1 $ const $ do
let closeIfNotSelf ((_, closeEl2), i2) =
when (i1 /= i2) (sequence_ closeEl2)
mapM closeIfNotSelf xs
mapPairsWithIndex menuItemEls closeOtherMenusOnHover
return (menu, closeMenu, concat $ map snd menuItemEls)
-- |A tuple of a menu item element, and UI actions to close it and a potential
-- nested menu.
menuItem :: [UI ()] -> MenuItem -> UI (Element, [UI ()])
menuItem closeParents (MenuItem text value) = do
menuItem <- UI.li # set UI.text text # set style menuItemStyle
highlightWhileHover menuItem
case value of
MenuItemActions actions -> do
-- On click close the entire menu and execute the UI actions.
on UI.click menuItem $ const $ sequence $ closeParents ++ actions
return (menuItem, [])
NestedMenu nestedMenuItems -> do
(nestedMenu, closeNestedMenu, closeNestedMenusNestedMenus)
<- newMenu closeParents nestedMenuItems
-- Position a nested menu relative to this menu item.
-- element menuItemEl # set UI.position "relative"
-- element nestedMenuEl # set UI.position "absolute" # set UI.right "0px" # set UI.top "0px"
element menuItem #+ [element nestedMenu]
-- On hover display the nested menu.
on UI.hover menuItem $ const $ display "block" nestedMenu
return (menuItem, [closeNestedMenu] ++ closeNestedMenusNestedMenus)
-- |Highlight an element while hovered over.
highlightWhileHover :: Element -> UI ()
highlightWhileHover el = whileHover el
(element el # set style [("background-color", "#DEF" )])
(element el # set style [("background-color", "inherit")])
-- |CSS class used to identify elements on which to prevent a default context
-- menu from opening.
preventDefaultClass = "__prevent-default-context-menu"
-- |Prevent a default context menu opening on an element.
preventDefaultContextMenu :: Element -> UI ()
preventDefaultContextMenu el = do
element el # set UI.class_ preventDefaultClass
runFunction $ ffi "$(%1).bind('contextmenu', e => e.preventDefault())"
("." ++ preventDefaultClass)
| barischj/threepenny-gui-contextmenu | src/Graphics/UI/Threepenny/Ext/ContextMenu.hs | bsd-3-clause | 5,320 | 0 | 18 | 1,127 | 1,166 | 615 | 551 | 68 | 2 |
{-# LANGUAGE CPP #-}
-- | Gang Primitives.
module Data.Array.Repa.Eval.Gang
( theGang
, Gang, forkGang, gangSize, gangIO, gangST)
where
import GHC.IO
import GHC.ST
import GHC.Conc (forkOn)
import Control.Concurrent.MVar
import Control.Exception (assert)
import Control.Monad
import GHC.Conc (numCapabilities)
import System.IO
-- TheGang --------------------------------------------------------------------
-- | This globally shared gang is auto-initialised at startup and shared by all
-- Repa computations.
--
-- In a data parallel setting, it does not help to have multiple gangs
-- running at the same time. This is because a single data parallel
-- computation should already be able to keep all threads busy. If we had
-- multiple gangs running at the same time, then the system as a whole would
-- run slower as the gangs would contend for cache and thrash the scheduler.
--
-- If, due to laziness or otherwise, you try to start multiple parallel
-- Repa computations at the same time, then you will get the following
-- warning on stderr at runtime:
--
-- @Data.Array.Repa: Performing nested parallel computation sequentially.
-- You've probably called the 'compute' or 'copy' function while another
-- instance was already running. This can happen if the second version
-- was suspended due to lazy evaluation. Use 'deepSeqArray' to ensure that
-- each array is fully evaluated before you 'compute' the next one.
-- @
--
theGang :: Gang
{-# NOINLINE theGang #-}
theGang
= unsafePerformIO
$ do let caps = numCapabilities
forkGang caps
-- Requests -------------------------------------------------------------------
-- | The 'Req' type encapsulates work requests for individual members of a gang.
data Req
-- | Instruct the worker to run the given action.
= ReqDo (Int -> IO ())
-- | Tell the worker that we're shutting the gang down.
-- The worker should signal that it's receieved the request by
-- writing to its result var before returning to the caller (forkGang).
| ReqShutdown
-- Gang -----------------------------------------------------------------------
-- | A 'Gang' is a group of threads that execute arbitrary work requests.
data Gang
= Gang
{ -- | Number of threads in the gang.
_gangThreads :: !Int
-- | Workers listen for requests on these vars.
, _gangRequestVars :: [MVar Req]
-- | Workers put their results in these vars.
, _gangResultVars :: [MVar ()]
-- | Indicates that the gang is busy.
, _gangBusy :: MVar Bool
}
instance Show Gang where
showsPrec p (Gang n _ _ _)
= showString "<<"
. showsPrec p n
. showString " threads>>"
-- | O(1). Yield the number of threads in the 'Gang'.
gangSize :: Gang -> Int
gangSize (Gang n _ _ _)
= n
-- | Fork a 'Gang' with the given number of threads (at least 1).
forkGang :: Int -> IO Gang
forkGang n
= assert (n > 0)
$ do
-- Create the vars we'll use to issue work requests.
mvsRequest <- sequence $ replicate n $ newEmptyMVar
-- Create the vars we'll use to signal that threads are done.
mvsDone <- sequence $ replicate n $ newEmptyMVar
-- Add finalisers so we can shut the workers down cleanly if they
-- become unreachable.
zipWithM_ (\varReq varDone
-> mkWeakMVar varReq (finaliseWorker varReq varDone))
mvsRequest
mvsDone
-- Create all the worker threads
zipWithM_ forkOn [0..]
$ zipWith3 gangWorker
[0 .. n-1] mvsRequest mvsDone
-- The gang is currently idle.
busy <- newMVar False
return $ Gang n mvsRequest mvsDone busy
-- | The worker thread of a 'Gang'.
-- The threads blocks on the MVar waiting for a work request.
gangWorker :: Int -> MVar Req -> MVar () -> IO ()
gangWorker threadId varRequest varDone
= do
-- Wait for a request
req <- takeMVar varRequest
case req of
ReqDo action
-> do -- Run the action we were given.
action threadId
-- Signal that the action is complete.
putMVar varDone ()
-- Wait for more requests.
gangWorker threadId varRequest varDone
ReqShutdown
-> putMVar varDone ()
-- | Finaliser for worker threads.
-- We want to shutdown the corresponding thread when it's MVar becomes
-- unreachable.
-- Without this Repa programs can complain about "Blocked indefinitely
-- on an MVar" because worker threads are still blocked on the request
-- MVars when the program ends. Whether the finalizer is called or not
-- is very racey. It happens about 1 in 10 runs when for the
-- repa-edgedetect benchmark, and less often with the others.
--
-- We're relying on the comment in System.Mem.Weak that says
-- "If there are no other threads to run, the runtime system will
-- check for runnablefinalizers before declaring the system to be
-- deadlocked."
--
-- If we were creating and destroying the gang cleanly we wouldn't need
-- this, but theGang is created with a top-level unsafePerformIO.
-- Hacks beget hacks beget hacks...
--
finaliseWorker :: MVar Req -> MVar () -> IO ()
finaliseWorker varReq varDone
= do putMVar varReq ReqShutdown
takeMVar varDone
return ()
-- | Issue work requests for the 'Gang' and wait until they complete.
--
-- If the gang is already busy then print a warning to `stderr` and just
-- run the actions sequentially in the requesting thread.
gangIO :: Gang
-> (Int -> IO ())
-> IO ()
{-# NOINLINE gangIO #-}
gangIO gang@(Gang _ _ _ busy) action
= do b <- swapMVar busy True
if b
then do
seqIO gang action
else do
parIO gang action
_ <- swapMVar busy False
return ()
-- | Run an action on the gang sequentially.
seqIO :: Gang -> (Int -> IO ()) -> IO ()
seqIO (Gang n _ _ _) action
= do hPutStr stderr
$ unlines
[ "Data.Array.Repa: Performing nested parallel computation sequentially."
, " You've probably called the 'compute' or 'copy' function while another"
, " instance was already running. This can happen if the second version"
, " was suspended due to lazy evaluation. Use 'deepSeqArray' to ensure"
, " that each array is fully evaluated before you 'compute' the next one."
, "" ]
mapM_ action [0 .. n-1]
-- | Run an action on the gang in parallel.
parIO :: Gang -> (Int -> IO ()) -> IO ()
parIO (Gang _ mvsRequest mvsResult _) action
= do
-- Send requests to all the threads.
mapM_ (\v -> putMVar v (ReqDo action)) mvsRequest
-- Wait for all the requests to complete.
mapM_ takeMVar mvsResult
-- | Same as 'gangIO' but in the 'ST' monad.
gangST :: Gang -> (Int -> ST s ()) -> ST s ()
gangST g p = unsafeIOToST . gangIO g $ unsafeSTToIO . p
| kairne/repa-lts | Data/Array/Repa/Eval/Gang.hs | bsd-3-clause | 7,148 | 27 | 13 | 1,971 | 1,047 | 563 | 484 | 100 | 2 |
module RDiffFS (rdiffFSMain) where
import qualified Data.ByteString.Char8 as B
import Foreign.C.Error
import System.Posix.Types
import System.Posix.Files
import System.Posix.IO
import System.IO.Error -- isPermissionError etc.
import System.Fuse
import System.Environment -- getArgs, withArgs
import System.Directory -- doesDirectoryExist, canonicalizePath, getDirectoryContents
import System.FilePath -- pathSeparator, </>, takeFileName
import Text.Regex.Posix
import Data.String.Utils -- replace (from libghc6-missingh-dev)
import System.Posix.Directory
import Data.List -- isInfixOf, isSuffixOf, sort
import Data.Maybe -- mapMaybe
import Foreign -- .&.
import Control.Arrow -- first
import Codec.Compression.GZip
import qualified Data.ByteString.Lazy.Char8 as L
import System.IO -- hPutStrLn, stderr
import Control.Exception -- catch, try
import Rdiff
-- The main method is so short I feel it's best to get it out of the way here.
rdiffFSMain :: IO ()
rdiffFSMain = do
args <- getArgs
verifyArgs args
path <- canonicalizePath $ head args
ensureRdiffBackupDir path
withArgs (tail args) $ fuseMain (rdiffFSOps path) defaultExceptionHandler
usage :: String
usage = "rdifffs <rdiff-backup directory> <mountpoint>"
type RdiffContext = String
data RdiffBackup = Current String | Increment String deriving (Eq,Show)
getRdiffBackupDate :: RdiffBackup -> String
getRdiffBackupDate (Current x) = x
getRdiffBackupDate (Increment x) = x
-- we need at least two CMDs: one for us (underlay), one for fuse (mntpoint)
verifyArgs :: [String] -> IO ()
verifyArgs xs | length xs > 1 = return ()
verifyArgs xs | otherwise = error $
"invalid number of command-line arguments.\n" ++ "usage: " ++ usage
isRdiffBackupDir :: FilePath -> IO Bool
isRdiffBackupDir path = do
res <- mapM doesDirectoryExist
[path, path </> "rdiff-backup-data", path </> "rdiff-backup-data" </> "increments" ]
(return . and) res
ensureRdiffBackupDir :: FilePath -> IO ()
ensureRdiffBackupDir path = do
answer <- isRdiffBackupDir path
if answer
then return ()
else error "not a valid rdiff-backup directory"
datetime_regex = replace "D" "[0-9]" "\\.(DDDD-DD-DDTDD:DD:DD(Z|[-+]DD:DD))\\."
current_mirror_regex = "^current_mirror" ++ datetime_regex ++ "data$"
increment_regex = "^increments" ++ datetime_regex ++ "dir$"
getCurrentMirror :: [String] -> RdiffBackup
getCurrentMirror [] = error "missing current_mirror file"
getCurrentMirror (x:xs) | x =~ current_mirror_regex = Current $ extractDate x
| otherwise = getCurrentMirror xs
getIncrements :: [String] -> [RdiffBackup]
getIncrements files = map (Increment . extractDate) $ filter (=~ increment_regex) files
extractDate :: String -> String
extractDate bigstr = head $ matchData (bigstr =~ datetime_regex) where
matchData :: (String,String,String,[String]) -> [String]
matchData (x,y,z,w) = w
-- TODO: better name
getDates :: RdiffContext -> IO [RdiffBackup]
getDates repo = do
l <- getDirectoryContents $ repo </> "rdiff-backup-data"
return $ (getCurrentMirror l) : (getIncrements l)
-- Unpack a string from an RdiffBackup type
unRdiffBackup :: RdiffBackup -> String
unRdiffBackup (Current x) = x
unRdiffBackup (Increment x) = x
type HT = ()
rdiffFSOps :: RdiffContext -> FuseOperations HT
rdiffFSOps repo = defaultFuseOps { fuseGetFileStat = rdiffGetFileStat repo
, fuseOpen = rdiffOpen repo
, fuseRead = rdiffRead repo
, fuseOpenDirectory = rdiffOpenDirectory repo
, fuseReadDirectory = rdiffReadDirectory repo
, fuseGetFileSystemStats = rdiffGetFileSystemStats
, fuseReadSymbolicLink = rdiffReadSymbolicLink repo
}
buildStat ctx entrytype fsize = FileStat { statEntryType = entrytype
, statFileMode = foldr1 unionFileModes
[ ownerReadMode
, ownerExecuteMode
, groupReadMode
, groupExecuteMode
, otherReadMode
, otherExecuteMode
]
, statLinkCount = 2
, statFileOwner = fuseCtxUserID ctx
, statFileGroup = fuseCtxGroupID ctx
, statSpecialDeviceID = 0
, statFileSize = fsize
, statBlocks = 1
, statAccessTime = 0
, statModificationTime = 0
, statStatusChangeTime = 0
}
dirStat ctx = buildStat ctx Directory 4096
fileStat ctx = buildStat ctx RegularFile $ fromIntegral $ B.length $ B.pack "test string"
linkStat ctx = buildStat ctx SymbolicLink$ fromIntegral $ B.length $ B.pack "test string"
{-
Firstly, the top-level FUSE operations. These handle the top-level directory
(list of backup dates, a symlink to the current (most recent) backup); detect
whether the request is for a sub-directory, and dispatch to the appropriate
function (either rdiffCurrent* or rdiffIncrement*) to handle such requests.
-}
data WhichBackupType = CurrentBackup | IncrementBackup | Neither deriving(Eq)
whichBackup :: RdiffContext -> String -> IO WhichBackupType
whichBackup repo path = do
dates <- getDates repo
if (Current prefix) `elem` dates
then return CurrentBackup
else if (Increment prefix) `elem` dates
then return IncrementBackup
else return Neither
where
prefix = head $ splitDirectories path
{-
rdiffGetFileStat implements getattr(2). We handle requests for the root
directory and the /current symlink within.
-}
rdiffGetFileStat :: RdiffContext -> FilePath -> IO (Either Errno FileStat)
rdiffGetFileStat _ "/" = do
ctx <- getFuseContext
return $ Right $ dirStat ctx
rdiffGetFileStat _ "/current" = do
ctx <- getFuseContext
return $ Right $ linkStat ctx
rdiffGetFileStat repo fpath = do
which <- whichBackup repo path
case which of
CurrentBackup -> rdiffCurrentGetFileStat repo path
IncrementBackup -> rdiffIncrementGetFileStat repo path
Neither -> return $ Left eNOENT
where
(_:path) = fpath
rdiffOpenDirectory :: RdiffContext -> FilePath -> IO Errno
rdiffOpenDirectory _ "/" = return eOK
rdiffOpenDirectory repo fdir = do
which <- whichBackup repo dir
case which of
CurrentBackup -> rdiffCurrentOpenDirectory repo dir
IncrementBackup -> rdiffIncrementOpenDirectory repo dir
Neither -> return eNOENT
where
(_:dir) = fdir
type Fpair = (FilePath, FileStat)
rdiffReadDirectory :: RdiffContext -> FilePath -> IO (Either Errno [Fpair])
rdiffReadDirectory repo "/" = do
ctx <- getFuseContext
dates <- getDates repo
return $ Right $ (dirs ctx (map getRdiffBackupDate dates)) ++ ([("current", linkStat ctx)])
where dirs ctx xs = map (\x -> (x, dirStat ctx)) ([".", ".."] ++ xs)
rdiffReadDirectory repo fdir = do
which <- whichBackup repo dir
case which of
CurrentBackup -> rdiffCurrentReadDirectory repo dir
IncrementBackup -> rdiffIncrementReadDirectory repo dir
Neither -> return $ Left eNOENT
where
(_:dir) = fdir
rdiffOpen :: RdiffContext -> FilePath -> OpenMode -> OpenFileFlags -> IO (Either Errno HT)
rdiffOpen repo fpath mode flags = do
which <- whichBackup repo path
case which of
CurrentBackup -> rdiffCurrentOpen repo path mode flags
IncrementBackup -> rdiffIncrementOpen repo path mode flags
Neither -> return $ Left eNOENT
where
(_:path) = fpath
rdiffRead :: RdiffContext -> FilePath -> HT -> ByteCount -> FileOffset -> IO (Either Errno B.ByteString)
rdiffRead repo fpath ht byteCount offset = do
which <- whichBackup repo path
case which of
CurrentBackup -> rdiffCurrentRead repo path ht byteCount offset
IncrementBackup -> rdiffIncrementRead repo path ht byteCount offset
Neither -> return $ Left eNOENT
where
(_:path) = fpath
-- Lazy, whole-file version, for internal use
rdiffReadFile :: RdiffContext -> FilePath -> IO (Either Errno L.ByteString)
rdiffReadFile repo fpath = do
which <- whichBackup repo path
case which of
CurrentBackup -> rdiffCurrentReadFile repo path
IncrementBackup -> do -- XXX: convert rdiffIncrementReadFile to lazy
strict <- rdiffIncrementReadFile repo path
case strict of
Left x -> return (Left x)
Right x -> return $ Right $ L.fromChunks [x]
Neither -> return $ Left eNOENT
where
(_:path) = fpath
rdiffGetFileSystemStats :: String -> IO (Either Errno FileSystemStats)
rdiffGetFileSystemStats str =
return $ Right $ FileSystemStats
{ fsStatBlockSize = 512
, fsStatBlockCount = 1
, fsStatBlocksFree = 1
, fsStatBlocksAvailable = 1
, fsStatFileCount = 5
, fsStatFilesFree = 10
, fsStatMaxNameLength = 255
}
{-
The current implementation of rdiffReadSymbolicLink here assumes that the
list returned by 'getDates' will have the Current backup at the head of
the list. This is true for the current implementation, but it would be nice
to enforce this.
-}
rdiffReadSymbolicLink :: RdiffContext -> FilePath -> IO (Either Errno FilePath)
rdiffReadSymbolicLink repo "/current" = do
dates <- getDates repo
return $ Right $ getRdiffBackupDate $ head dates
rdiffReadSymbolicLink repo fpath = do
which <- whichBackup repo path
case which of
CurrentBackup -> rdiffCurrentReadSymbolicLink repo path
IncrementBackup -> rdiffIncrementReadSymbolicLink repo path
Neither -> return $ Left eNOSYS
where
(_:path) = fpath
----------------------------------------------------------------------------
-- Some helper functions for the Current and Increment sets.
fileNameToFileStat :: FilePath -> IO FileStat
fileNameToFileStat path = do
ctx <- getFuseContext
stat <- getSymbolicLinkStatus path
let mode = fileMode stat
return FileStat { statEntryType = fileModeToEntryType mode
, statFileMode = mode
, statLinkCount = linkCount stat
, statFileOwner = fuseCtxUserID ctx
, statFileGroup = fuseCtxGroupID ctx
, statSpecialDeviceID = specialDeviceID stat
, statFileSize = fileSize stat
, statBlocks = 1
, statAccessTime = accessTime stat
, statModificationTime = modificationTime stat
, statStatusChangeTime = statusChangeTime stat
}
fileNameToTuple :: FilePath -> IO (String, FileStat)
fileNameToTuple f = do
fstat <- fileNameToFileStat f
return (f, fstat)
{-
Most of these routines need to take a path /<date>/foo/bar and
split it up into <date> and foo/bar bits. (TODO that the Current
functions still do this themselves)
-}
rSplitPath :: FilePath -> (FilePath, FilePath)
rSplitPath path = (head split, joinPath $ tail split) where
split = splitDirectories path
----------------------------------------------------------------------------
-- current functions
--
-- These handle IO requests for stuff under the current backup tree.
rdiffCurrentGetFileStat :: RdiffContext -> FilePath -> IO (Either Errno FileStat)
rdiffCurrentGetFileStat repo path = do
fstat <- fileNameToFileStat realPath
return $ Right $ fstat
where
realPath = joinPath $ repo:(tail $ splitDirectories path)
rdiffCurrentReadSymbolicLink :: RdiffContext -> FilePath -> IO (Either Errno FilePath)
rdiffCurrentReadSymbolicLink repo path = do
target <- readSymbolicLink $ repo </> remainder
return $ Right $ target
where
remainder = joinPath $ tail $ splitDirectories path
rdiffCurrentReadDirectory :: RdiffContext -> FilePath -> IO (Either Errno [Fpair])
rdiffCurrentReadDirectory repo dir = do catch try handler where
realdir = joinPath $ repo:(tail $ splitDirectories dir)
try = do
l <- getDirectoryContents realdir
ret <- mapM (fileNameToTuple . (realdir </>)) $ filter (/= "rdiff-backup-data") l
return $ Right $ map (\(s,f) -> (takeFileName s, f)) ret
handler e | isPermissionError e = return $ Left eACCES
| isDoesNotExistError e = return $ Left eNOENT
| otherwise = return $ Left eFAULT
{-
This is a really ugly function. We need to call readdir(2) on the underlying
directory and try to pass any error on up to our readdir(2) response. Hence
using the Posix library and trying to handle the error. Another approach might
be to just getDirectoryContents, which is in System.Directory and returns some
fairly useful exception types.
-}
rdiffCurrentOpenDirectory :: RdiffContext -> FilePath -> IO Errno
rdiffCurrentOpenDirectory repo dir = do
eds <- try $ openDirStream realdir :: IO (Either IOError DirStream)
case eds of
Left _ -> return eACCES
Right ds -> do
closeDirStream ds
return eOK
where
realdir = joinPath $ repo:(tail $ splitDirectories dir)
genericOpen mode path = case mode of
ReadOnly -> do -- Read Write Execute
ok <- fileAccess path True False False
if ok
then return $ Right ()
else return $ Left eACCES
_ -> return $ Left eACCES
rdiffCurrentOpen :: RdiffContext -> FilePath -> OpenMode -> OpenFileFlags -> IO (Either Errno HT)
rdiffCurrentOpen repo path mode flags = genericOpen mode (repo </> remainder)
where (increment, remainder) = rSplitPath path
rdiffCurrentRead :: RdiffContext -> FilePath -> HT -> ByteCount -> FileOffset -> IO (Either Errno B.ByteString)
rdiffCurrentRead repo path _ byteCount offset = do
stuff <- rdiffCurrentReadFile repo path
case stuff of
Left x -> return (Left x)
Right x -> return $ Right $ B.concat $ L.toChunks $
L.take (fromIntegral byteCount) $ L.drop (fromIntegral offset) x
-- Lazy whole-file read version.
rdiffCurrentReadFile :: RdiffContext -> FilePath -> IO (Either Errno L.ByteString)
rdiffCurrentReadFile repo path = do
stuff <- L.readFile realpath
return (Right stuff)
where
remainder = joinPath $ tail $ splitDirectories path
realpath = repo </> remainder
----------------------------------------------------------------------------
-- increment helper functions
--
-- Ensure that the given path corresponds to a valid increment timestamp
isValidIncrement :: RdiffContext -> String -> IO Bool
isValidIncrement repo path = do
dates <- getDates repo
return (increment `elem` increments dates) where
(increment, remainder) = rSplitPath path
increments = (map getRdiffBackupDate) . tail
incrementSuffixes = [ ".missing", ".diff.gz", ".dir", ".snapshot.gz" ]
{-
Given a filename, an increment timestamp, and an increment file, is the
increment file relevant to the filename?
-}
isRelevantFile :: String -> String -> String -> Bool
isRelevantFile f inc fs = prefixOK && suffixOK where
suffix = drop (length f + length inc + 1) fs
prefixOK = (f ++ '.':inc) `isPrefixOf` fs
suffixOK = suffix `elem` incrementSuffixes
-- Abstracted boilerplate function that ensures the increment exists etc.
rdiffIncrementBoilerPlate :: RdiffContext -> FilePath
-> IO (Either Errno a)
-> (FilePath -> IO (Either Errno a))
-> IO (Either Errno a)
rdiffIncrementBoilerPlate repo path currentCase incrementCase = do
dates <- getDates repo
valid <- isValidIncrement repo path
if not valid then return (Left eNOENT) else do
files <- getDirectoryContents incdir
case maybeRelevantIncFile file increment files of
Nothing -> currentCase
Just (Left x) -> return (Left x)
Just (Right x) -> incrementCase x
where
(increment, remainder) = rSplitPath path
incbase = repo </> "rdiff-backup-data" </> "increments"
incdir = incbase </> (takeDirectory remainder)
file = head $ replace [""] ["."] [takeFileName remainder]
----------------------------------------------------------------------------
-- increment functions
--
-- Try to do the impure IO stuff in the main function, and encapsulate the core
-- algorithm in an 'inner' pure function.
rdiffIncrementGetFileStat :: RdiffContext -> FilePath -> IO (Either Errno FileStat)
rdiffIncrementGetFileStat repo path =
rdiffIncrementBoilerPlate repo path (rdiffCurrentGetFileStat repo path) incFn
where
incFn x = case interpretIncFile file increment x of
Left x -> return (Left x)
Right x -> fileNameToFileStat (incdir </> x) >>= (return . Right)
(increment, remainder) = rSplitPath path
incbase = repo </> "rdiff-backup-data" </> "increments"
incdir = incbase </> (takeDirectory remainder)
file = head $ replace [""] ["."] [takeFileName remainder]
{-
The inner pure function:
Given a virtual filename, an increment timestamp, and a list of increment
files, return either Nothing (no appropriate increment file), Just Right
increment file (one appropriate increment file), or Just Left eNOSYS,
signifying more than one supposedly-appropriate increment file, which
should never happen with a valid repository.
-}
maybeRelevantIncFile :: String -> String -> [String] -> Maybe (Either Errno String)
maybeRelevantIncFile file inc files = case length relevant of
1 -> Just $ Right $ head relevant
0 -> Nothing
_ -> Just $ Left eNOSYS -- error FIXME what kind?
where
relevant = filter (isRelevantFile file inc) files
{-
Given a filename, an increment timestamp, and an increment filename,
return either an Errno (for e.g. no virtual file in this increment)
or a increment filename (to derive stat information from)
-}
interpretIncFile :: String -> String -> String -> Either Errno String
interpretIncFile file inc incfile
| suffix == ".missing" = Left eNOENT
| otherwise = Right incfile
where suffix = drop (length file + length inc + 1) incfile
{-
FIXME a bug below: "incdir" will correspond to a directory if *any* increment
had a directory by that name, even if the current increment did not. We need
to make use of the variable supplied to incFn.
-}
rdiffIncrementOpenDirectory :: RdiffContext -> FilePath -> IO Errno
rdiffIncrementOpenDirectory repo dir =
rdiffIncrementBoilerPlate repo dir curFn incFn >>= return . (either id id)
where
(inc, remainder) = rSplitPath dir
incdir = repo </> "rdiff-backup-data" </> "increments" </> remainder
curFn = rdiffCurrentOpenDirectory repo dir >>= (return . Left)
incFn _ = do
eds <- try $ openDirStream incdir :: IO (Either IOError DirStream)
case eds of
Left _ -> return (Left eACCES)
Right ds -> do
closeDirStream ds
return (Right eOK)
{-
Get the directory contents for the relevant increments directory and the
corresponding FileStat information. Pass, along with the readDirectory output
for the equivalent Current directory, to a pure inner function.
-}
rdiffIncrementReadDirectory :: RdiffContext -> FilePath -> IO (Either Errno [Fpair])
rdiffIncrementReadDirectory repo dir = do
i <- getDirectoryContents incdir
c <- rdiffCurrentReadDirectory repo $ "current" </> remainder
ctx <- getFuseContext
case c of
Left e -> return (Left e)
Right c' -> do
i' <- mapM (\f -> do f' <- fstat f; return (f,f')) (i \\ [".", ".."])
(return . Right) $ incrementReadDirectory ctx i' c' inc
where
(inc, remainder) = rSplitPath dir
incdir = repo </> "rdiff-backup-data" </> "increments" </> remainder
fstat f = fileNameToFileStat (incdir </> f)
-- TODO: we need to handle a failure from getDirectoryContents (exception?)
defaultDir ctx = buildStat ctx Directory 1024
incrementReadDirectory :: FuseContext -> [Fpair] -> [Fpair] -> String -> [Fpair]
incrementReadDirectory ctx increDirectory curDirectory incr = nub' $
(incfiles ++ dirfiles ++ difffiles ++ curDirectory) \\\ missfiles where
nub' = nubBy pairCmp
pairCmp (a,_) (b,_) = a == b
(\\\) = deleteFirstsBy pairCmp -- specialised '\\'
incfiles = fetch ".snapshot.gz"
missfiles = fetch ".missing"
difffiles = fetch ".diff.gz"
dirfiles = map (second (\_->(defaultDir ctx))) (fetch ".dir")
fetch s = getBySuffix ('.':incr) $ getBySuffix s increDirectory
{-
returns sublist of strings which have the provided suffix, with the suffix
removed. I think this would be nicer operating purely on Strings, and being
applied with first/fst etc. by the caller.
-}
getBySuffix :: String -> [Fpair] -> [Fpair]
getBySuffix _ [] = []
getBySuffix suffix fps =
map (first (trimSuffix suffix)) $ filter (isSuffixOf suffix . fst) fps
where
trimSuffix s f = take (length f - length s) f
rdiffIncrementReadSymbolicLink :: RdiffContext -> FilePath -> IO (Either Errno FilePath)
rdiffIncrementReadSymbolicLink repo path =
rdiffIncrementBoilerPlate repo path (rdiffCurrentReadSymbolicLink repo path) incFn
where
incFn x = readSymbolicLink x >>= (return . Right)
rdiffIncrementOpen :: RdiffContext -> FilePath -> OpenMode -> OpenFileFlags -> IO (Either Errno HT)
rdiffIncrementOpen repo path mode flags = do
rdiffIncrementBoilerPlate repo path curFn incFn
where
(inc, remainder) = rSplitPath path
incbase = repo </> "rdiff-backup-data" </> "increments"
incdir = incbase </> (takeDirectory remainder)
file = head $ replace [""] ["."] [takeFileName remainder]
incFn incfile = case interpretIncFile file inc path of
Left x -> return (Left x)
Right x -> genericOpen mode (incdir </> incfile)
curFn = rdiffCurrentOpen repo path mode flags
-- handle the offset and count side of reading, leave the rest to rdiffIncrementReadFile
rdiffIncrementRead :: RdiffContext -> FilePath -> HT -> ByteCount -> FileOffset
-> IO (Either Errno B.ByteString)
rdiffIncrementRead repo path ht byteCount offset = do
rdiffIncrementBoilerPlate repo path curFn incFn
where
incFn incfile = do
foo <- rdiffIncrementReadFile repo path
case foo of
Left x -> return (Left x)
Right x -> return $ Right $ B.take (fromIntegral byteCount)
$ B.drop (fromIntegral offset) x
curFn = rdiffCurrentRead repo path ht byteCount offset
-- A lazy, whole-file increment file reader.
rdiffIncrementReadFile :: RdiffContext -> FilePath -> IO (Either Errno B.ByteString)
rdiffIncrementReadFile repo path = do
rdiffIncrementBoilerPlate repo path curFn incFn
where
(inc, remainder) = rSplitPath path
incbase = repo </> "rdiff-backup-data" </> "increments"
incdir = incbase </> (takeDirectory remainder)
file = head $ replace [""] ["."] [takeFileName remainder]
incFn incfile = case suffix incfile of
".snapshot.gz" -> do -- this is, probably, horrid.
stuff <- fmap decompress $ L.readFile (incdir </> incfile)
return $ Right $ B.concat $ L.toChunks $ stuff
".diff.gz" -> do
l <- getDates repo
case nextIncrement inc (map unRdiffBackup l) of
Nothing -> return (Left eINVAL)
Just ni -> do
patch <- fmap decompress $ L.readFile (incdir </> incfile)
case parsePatch (L.unpack patch) of
Left x -> return (Left eINVAL) -- XXX: appropriate code?
Right pt -> do
foo <- rdiffReadFile repo $ pathSeparator:(ni </> remainder)
case foo of
Left x -> return (Left x)
Right x -> return $ Right $ B.pack $ applyPatch pt $ L.unpack x
".missing" -> return (Left eNOENT)
".dir" -> return (Left eISDIR)
_ -> return (Left eINVAL)
suffix incfile = drop (length file + length inc + 1) incfile
curFn = do
stuff <- rdiffCurrentReadFile repo path
case stuff of
Left x -> return (Left x)
Right x -> return (Right $ B.concat $ L.toChunks x)
-- Return the increment temporally after the supplied argument, if there is one.
nextIncrement :: String -> [String] -> Maybe String
nextIncrement cur incrs = if length succs > 0
then Just $ head succs
else Nothing
where succs = filter (\x -> x > cur) (sort incrs)
| jmtd/rdifffs | RDiffFS.hs | bsd-3-clause | 25,354 | 0 | 28 | 6,733 | 6,220 | 3,130 | 3,090 | 429 | 9 |
-----------------------------------------------------------------------------
-- |
-- Module : Berp.Compile.VarSet
-- Copyright : (c) 2010 Bernie Pope
-- License : BSD-style
-- Maintainer : florbitous@gmail.com
-- Stability : experimental
-- Portability : ghc
--
-- A set of variables. XXX Does this need to be in its own module?
--
-----------------------------------------------------------------------------
module Berp.Compile.VarSet where
import Data.Set
import Berp.Compile.IdentString
type VarSet = Set IdentString
| bjpop/berp | libs/src/Berp/Compile/VarSet.hs | bsd-3-clause | 539 | 0 | 5 | 77 | 39 | 29 | 10 | 4 | 0 |
module ProjectEuler.Problem052 (solution052) where
import Data.Digits
import Data.List
import Util
allEqual :: Eq a => [a] -> Bool
allEqual xs = and $ zipWith (==) xs (tail xs)
sameDigits :: Integer -> Bool
sameDigits = allEqual . map (sort . digits 10) . flip map [2..6] . (*)
solution052 :: Integer
solution052 = head . filter sameDigits $ ints
| guillaume-nargeot/project-euler-haskell | src/ProjectEuler/Problem052.hs | bsd-3-clause | 351 | 0 | 11 | 63 | 142 | 77 | 65 | 10 | 1 |
{-# LANGUAGE RankNTypes #-}
{- Church Encodings -
- - - - - - - - - - -
- By Risto Stevcev -}
module Church where
import Prelude hiding (succ, pred, and, or, not, exp, div, head, tail)
{- Church Logical Operators -
- - - - - - - - - - - - - - -}
type ChurchBool = forall a. a -> a -> a
-- Church Boolean (True)
-- λt.λf.t
true :: ChurchBool
true = \t -> \f -> t
-- Church Boolean (False)
-- λt.λf.f
false :: ChurchBool
false = \t -> \f -> f
-- Church AND
-- λa.λb.a b false
and :: ChurchBool -> ChurchBool -> ChurchBool
and = \a -> \b -> a b false
-- Church OR
-- λa.λb.a true b
or :: ChurchBool -> ChurchBool -> ChurchBool
or = \a -> \b -> a true b
-- Church NOT
-- λp.λa.λb.p b a
not :: ChurchBool -> ChurchBool
not = \p -> \a -> \b -> p b a
-- Church XOR
-- λa.λb.a (not b) b
xor :: ChurchBool -> ChurchBool -> ChurchBool
xor = \a -> \b -> a (not b) b
-- Convert Church Boolean to Haskell Bool
-- (λa.λb.λc.c a b) True False
unchurch_bool :: (Bool -> Bool -> a) -> a
unchurch_bool = (\a -> \b -> \c -> c a b) True False
{- Church Natural Numbers (n ∈ ℕ) -
- - - - - - - - - - - - - - - - - -}
-- Rolling/unrolling the ChurchNum is required for Church subtraction
-- The type system isn't powerful enough to handle it otherwise.
--
type ChurchNum = forall a. (a -> a) -> a -> a
newtype Church = Church { unChurch :: ChurchNum }
-- Church Numeral: 0
-- λf.λx.x
zero :: Church
zero = Church $ \f -> \x -> x
-- Church Numeral: 1
-- λf.λx.f x
one :: Church
one = Church $ \f -> \x -> f x
-- Church Numeral: 2
-- λf.λx.f (f x)
two :: Church
two = Church $ \f -> \x -> f (f x)
-- Church Numeral: 3
-- λf.λx.f (f (f x))
three :: Church
three = Church $ \f -> \x -> f (f (f x))
-- Church Numeral: n (where n ∈ ℕ)
-- num 0 = λf.λx.x
-- num n = λf.λx.f (num (n-1) f x)
num :: Integer -> Church
num 0 = Church $ \f -> \x -> x
num n = Church $ \f -> \x -> f ((unChurch $ num (n-1)) f x)
-- Convert Church Numeral (n ∈ ℕ) to Haskell Integer
-- λa.a (λb.b+1) (0)
unchurch_num :: Church -> Integer
unchurch_num = \a -> unChurch a (\b -> b + 1) (0)
{- Church Conditionals -
- - - - - - - - - - - -}
-- Church Conditional (If/Else)
-- λp.λa.λb.p a b
ifelse :: ChurchBool -> a -> a -> a
ifelse = \p -> \a -> \b -> p a b
{- Church Loops -
- - - - - - - - -}
-- Y Combinator
-- Y = λf.(λx.f (x x)) (λx.f (x x))
--
-- Beta reduction of this gives,
-- Y g = (λf.(λx.f (x x)) (λx.f (x x))) g
-- = (λx.g (x x)) (λx.g (x x))
-- = g((λx.g (x x)) (λx.g (x x)))
-- = g (Y g)
y g = g (y g)
-- A non-recursive version of the Y combinator
newtype Mu a = Mu (Mu a -> a)
ynr f = (\h -> h $ Mu h) (\x -> f . (\(Mu g) -> g) x $ x)
{- Church Arithmetic Operators (n ∈ ℕ) -
- - - - - - - - - - - - - - - - - - - -}
-- Church Successor
-- λn.λf.λx.f (n f x)
succ :: Church -> Church
succ = \n -> Church $ \f -> \x -> f (unChurch n f x)
-- Church Predecessor
-- λn.λf.λx.n (λg.λh.h (g f)) (λu.x) (λu.u)
pred :: Church -> Church
pred = \n -> Church $
\f -> \x -> unChurch n (\g -> \h -> h (g f)) (\u -> x) (\u -> u)
-- Church Addition
-- λm.λn.λf.λx.m f (n f x)
add :: Church -> Church -> Church
add = \m -> \n -> Church $ \f -> \x -> unChurch m f (unChurch n f x)
-- Church Subtraction
-- λm.λn. n pred m
sub :: Church -> Church -> Church
sub = \m -> \n -> unChurch n pred m
-- Church Multiplication
-- λm.λn.λf.m (n f)
mult :: Church -> Church -> Church
mult = \m -> \n -> Church $ \f -> unChurch m (unChurch n f)
-- Church Division (gets the floor if divides to a fraction)
-- λd n m.ifelse (geq n m) (succ (d (sub n m) m)) zero
div :: Church -> Church -> Church
div = y (\d n m -> ifelse (geq n m) (succ (d (sub n m) m)) zero)
-- Church Exponentiation
-- λm.λn.n m
exp :: Church -> Church -> Church
exp = \m -> \n -> Church $ (unChurch n) (unChurch m)
-- Church Factorial
-- λf n.ifelse (is_zero n) one (mult n (fac (pred n)))
fac :: Church -> Church
fac = y (\f n -> ifelse (is_zero n) one (mult n $ f $ pred n))
{- Church Comparison Operators -
- - - - - - - - - - - - - - - -}
-- Church Comparison (== 0)
-- λn.n (λx.false) true
is_zero :: Church -> ChurchBool
is_zero = \n -> unChurch n (\x -> false) true
-- Church Comparison (<)
-- λm.λn.and (is_zero (sub m n)) (not (is_zero (sub n m)))
lt :: Church -> Church -> ChurchBool
lt = \m -> \n -> and (is_zero $ sub m n) (not (is_zero $ sub n m))
-- Church Comparison (<=)
-- λm.λn.is_zero (sub m n)
leq :: Church -> Church -> ChurchBool
leq = \m -> \n -> is_zero (sub m n)
-- Church Comparison (==)
-- λm.λn.and (leq m n) (leq n m)
eq :: Church -> Church -> ChurchBool
eq = \m -> \n -> and (leq m n) (leq n m)
-- Church Comparison (>=)
-- λm.λn.or (not (leq m n)) (eq m n)
geq :: Church -> Church -> ChurchBool
geq = \m -> \n -> or (not (leq m n)) (eq m n)
-- Church Comparison (>)
-- λm.λn.not (leq m n)
gt :: Church -> Church -> ChurchBool
gt = \m -> \n -> not (leq m n)
{- Church Lists -
- - - - - - - - -}
-- Church Pairs
-- λx.λy.λz.z x y
pair :: a1 -> a2 -> (a1 -> a2 -> a) -> a
pair = \x -> \y -> \z -> z x y
-- Church Pairs (first item)
-- λp.p -> (λx.λy.x)
first :: ((a2 -> a1 -> a2) -> a) -> a
first = \p -> p (\x -> \y -> x)
-- Church Pairs (second item)
-- λp.p -> (λx.λy.y)
second :: ((a1 -> a2 -> a2) -> a) -> a
second = \p -> p (\x -> \y -> y)
-- Church Pairs (nil)
-- pair true true
nil :: ((a1 -> a1 -> a1) -> (a2 -> a2 -> a2) -> a) -> a
nil = pair true true
-- Church Comparison (is_nil)
-- first (true for nil pair)
is_nil :: ((a2 -> a1 -> a2) -> a) -> a
is_nil = first
-- Church Cons
-- λh.λt.pair false (pair h t)
cons :: a2 -> a3 -> ((a1 -> a1 -> a1) -> ((a2 -> a3 -> a4) -> a4) -> a) -> a
cons = \h -> \t -> pair false (pair h t)
-- Church Head
-- λz.first (second z)
head :: ((a3 -> a4 -> a4) -> (a2 -> a1 -> a2) -> a) -> a
head = \z -> first (second z)
-- Church Tail
-- λz.second (second z)
tail :: ((a3 -> a4 -> a4) -> (a1 -> a2 -> a2) -> a) -> a
tail = \z -> second (second z)
{- Church Tuples -
- - - - - - - - -}
-- Tuples differ from lists in two ways,
-- 1. Elements in a tuple can only be of one type (in this case, a ChurchBool or a ChurchNum)
-- 2. Tuples are fixed in size
data ChurchElem = ChurchNumber Church | ChurchBoolean ChurchBool
type ChurchTuple2 = forall a. (ChurchElem -> ChurchElem -> ChurchElem) -> ChurchElem
-- Church Tuple (of size 2)
tuple2 :: ChurchElem -> ChurchElem -> ChurchTuple2
tuple2 = \x -> \y -> \z -> z x y
tuple2_first :: ChurchTuple2 -> ChurchElem
tuple2_first = \p -> p (\x -> \y -> x)
tuple2_second :: ChurchTuple2 -> ChurchElem
tuple2_second = \p -> p (\x -> \y -> y)
unchurch_bool_elem :: ChurchElem -> Bool
unchurch_bool_elem (ChurchBoolean x) = unchurch_bool x
unchurch_num_elem :: ChurchElem -> Integer
unchurch_num_elem (ChurchNumber x) = unchurch_num x
{- Church Integers (n ∈ ℤ) -
- - - - - - - - - - - - - -}
type ChurchInteger = forall a. (Church -> Church -> Church) -> Church
-- Convert Church Numeral (natural number) to Church Integer
-- λx.pair x zero
convertNZ :: Church -> ChurchInteger
convertNZ = \x -> pair x zero
-- Church Negation
-- λx.pair (second x) (first x)
neg :: ChurchInteger -> ChurchInteger
neg = \x -> pair (second x) (first x)
-- Church OneZero
-- (Fixes incorrect integer representations that don't have a zero in the pair.
-- Ex: (7, 2) == 7 - 2 == 5)
-- λoneZ x.ifelse (is_zero (first x))
-- x (ifelse (is_zero (second x)) x (oneZ (pair (pred (first x)) (pred (second x)))))
onezero :: ChurchInteger -> ChurchInteger
onezero = y ( \oneZ x -> ifelse (is_zero $ first x) x
(ifelse (is_zero $ second x) x
(oneZ $ pair (pred $ first x) (pred $ second x))) )
-- Convert Church Integer to Haskell Integer
-- λx.ifelse (is_zero (first x)) (-1*(unchurch_num (second x)))
-- (unchurch_num (first x))
unchurch_int :: ChurchInteger -> Integer
unchurch_int = \x -> ifelse (is_zero (first x))
((-1)*(unchurch_num $ second x)) (unchurch_num $ first x)
{- Church Arithmetic Operators (n ∈ ℤ) -
- - - - - - - - - - - - - - - - - - - -}
-- Church Addition
-- λx.λy.onezero (pair (add (first x) (first y)) (add (second x) (second y)))
addZ :: ChurchInteger -> ChurchInteger -> ChurchInteger
addZ = \x -> \y -> onezero (pair (add (first x) (first y)) (add (second x) (second y)))
-- Church Subtraction
-- λx.λy.onezero (pair (add (first x) (second y)) (add (second x) (first y)))
subZ :: ChurchInteger -> ChurchInteger -> ChurchInteger
subZ = \x -> \y -> onezero (pair (add (first x) (second y)) (add (second x) (first y)))
-- Church Multiplication
-- λx.λy.pair (add (mult (first x) (first y)) (mult (second x) (second y)))
-- (add (mult (first x) (second y)) (mult (second x) (first y)))
multZ :: ChurchInteger -> ChurchInteger -> ChurchInteger
multZ = \x -> \y -> pair (add (mult (first x) (first y)) (mult (second x) (second y)))
(add (mult (first x) (second y)) (mult (second x) (first y)))
-- Church DivNoZero
-- (Divides only if the value is not zero)
-- λx.λy.is_zero y zero (div x y)
divnZ :: Church -> Church -> Church
divnZ = \x -> \y -> is_zero y zero (div x y)
-- Church Division
-- λx.λy.pair (add (divnZ (first x) (first y)) (divnZ (second x) (second y)))
-- (add (divnZ (first x) (second y)) (divnZ (second x) (first y)))
divZ :: ChurchInteger -> ChurchInteger -> ChurchInteger
divZ = \x -> \y -> pair (add (divnZ (first x) (first y)) (divnZ (second x) (second y)))
(add (divnZ (first x) (second y)) (divnZ (second x) (first y)))
| Risto-Stevcev/haskell-church-encodings | RankNTypes/Church.hs | bsd-3-clause | 9,703 | 0 | 16 | 2,327 | 2,919 | 1,636 | 1,283 | 119 | 1 |
{-# LANGUAGE QuasiQuotes #-}
import LiquidHaskell
-------------------------------------------------------------------------
-- | Encoding Sets of Values With Liquid Types --------------------------
-------------------------------------------------------------------------
-- TODO: make this self-enclosed
import Data.Set (Set(..))
-- | To start with, lets check that the `listElts` measure is sensible
[lq| myid0 :: xs:[a] -> {v:[a]| (len v) = (len xs)} |]
myid0 [] = []
myid0 (x:xs) = x : myid0 xs
[lq| myid :: xs:[a] -> {v:[a]| (listElts v) = (listElts xs)} |]
myid [] = []
myid (x:xs) = x : myid xs
-- | The reverse function should also return the same set of values.
-- Note that the reverse uses the tail-recursive helper @go@.
-- Mouse over and see what type is inferred for it!
[lq| Decrease go 2 |]
[lq| myrev :: xs:[a] -> {v:[a]| listElts(v) = listElts(xs)} |]
myrev :: [a] -> [a]
myrev = go []
where
go acc [] = acc
go acc (y:ys) = go (y:acc) ys
-- | Next, here's good old List-append, but now with a specification about
-- the sets of values in the input and output.
[lq| myapp :: xs:[a]
-> ys:[a]
-> {v:[a] | listElts v = Set_cup (listElts xs) (listElts ys) } |]
myapp :: [a] -> [a] -> [a]
myapp [] ys = ys
myapp (x:xs) ys = x : myapp xs ys
-- | Finally, to round off this little demo, here's @filter@, which returns a subset.
[lq| myfilter :: (a -> Bool) -> xs:[a] -> {v:[a] | Set_sub (listElts v) (listElts xs) } |]
myfilter :: (a -> Bool) -> [a] -> [a]
myfilter f [] = []
myfilter f (x:xs) = if f x
then x : myfilter f xs
else myfilter f xs
| spinda/liquidhaskell | tests/gsoc15/unknown/pos/listSet.hs | bsd-3-clause | 1,681 | 0 | 9 | 408 | 350 | 198 | 152 | 25 | 2 |
-- Copyright 2021 Google LLC
--
-- Use of this source code is governed by a BSD-style
-- license that can be found in the LICENSE file or at
-- https://developers.google.com/open-source/licenses/bsd
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module CheapReduction (
CheaplyReducibleE (..), cheapReduce, cheapReduceWithDecls, cheapNormalize) where
import qualified Data.Map.Strict as M
import Data.Maybe
import Data.Functor.Identity
import Control.Applicative
import Control.Monad.Trans
import Control.Monad.Writer.Strict
import Control.Monad.State.Strict
import MTL1
import Name
import Syntax
import PPrint ()
import {-# SOURCE #-} Inference (trySynthDictBlock)
import Err
-- === api ===
type NiceE e = (HoistableE e, SinkableE e, SubstE AtomSubstVal e, SubstE Name e)
cheapReduce :: forall e' e m n
. (EnvReader m, CheaplyReducibleE e e', NiceE e, NiceE e')
=> e n -> m n (Maybe (e' n), Maybe (ESet Type n))
cheapReduce e = liftCheapReducerM idSubst $ cheapReduceE e
-- Second result contains the set of dictionary types that failed to synthesize
-- during traversal of the supplied decls.
cheapReduceWithDecls
:: forall e' e m n l
. ( CheaplyReducibleE e e', NiceE e', NiceE e, EnvReader m )
=> Nest Decl n l -> e l -> m n (Maybe (e' n), Maybe (ESet Type n))
cheapReduceWithDecls decls result = do
Abs decls' result' <- sinkM $ Abs decls result
liftCheapReducerM idSubst $
cheapReduceWithDeclsB decls' $
cheapReduceE result'
cheapNormalize :: (EnvReader m, CheaplyReducibleE e e, NiceE e) => e n -> m n (e n)
cheapNormalize a = fromJust . fst <$> cheapReduce a
-- === internal ===
newtype CheapReducerM (i :: S) (o :: S) (a :: *) =
CheapReducerM
(SubstReaderT AtomSubstVal
(MaybeT1
(WriterT1 FailedDictTypes
(ScopedT1 (MapE AtomName (MaybeE Atom))
(EnvReaderT Identity)))) i o a)
deriving ( Functor, Applicative, Monad, Alternative
, EnvReader, ScopeReader, EnvExtender
, SubstReader AtomSubstVal)
newtype FailedDictTypes (n::S) = FailedDictTypes (MaybeE (ESet Type) n)
deriving (SinkableE, HoistableE)
instance Semigroup (FailedDictTypes n) where
FailedDictTypes (JustE l) <> FailedDictTypes (JustE r) =
FailedDictTypes $ JustE $ l <> r
_ <> _ = FailedDictTypes $ NothingE
instance Monoid (FailedDictTypes n) where
mempty = FailedDictTypes $ JustE mempty
instance FallibleMonoid1 FailedDictTypes where
mfail = FailedDictTypes $ NothingE
class ( Alternative2 m, SubstReader AtomSubstVal m
, EnvReader2 m, EnvExtender2 m) => CheapReducer m where
reportSynthesisFail :: Type o -> m i o ()
updateCache :: AtomName o -> Maybe (Atom o) -> m i o ()
lookupCache :: AtomName o -> m i o (Maybe (Maybe (Atom o)))
instance CheapReducer CheapReducerM where
reportSynthesisFail ty = CheapReducerM $ SubstReaderT $ lift $ lift11 $
WriterT1 $ tell $ FailedDictTypes $ JustE $ eSetSingleton ty
updateCache v u = CheapReducerM $ SubstReaderT $ lift $ lift11 $ lift11 $
modify (MapE . M.insert v (toMaybeE u) . fromMapE)
lookupCache v = CheapReducerM $ SubstReaderT $ lift $ lift11 $ lift11 $
fmap fromMaybeE <$> gets (M.lookup v . fromMapE)
liftCheapReducerM :: EnvReader m
=> Subst AtomSubstVal i o -> CheapReducerM i o a
-> m o (Maybe a, Maybe (ESet Type o))
liftCheapReducerM subst (CheapReducerM m) = do
(result, FailedDictTypes tys) <-
liftM runIdentity $ liftEnvReaderT $ runScopedT1
(runWriterT1 $ runMaybeT1 $ runSubstReaderT subst m) mempty
return $ (result, fromMaybeE tys)
cheapReduceFromSubst
:: forall e m i o .
( SubstReader AtomSubstVal m, EnvReader2 m
, SinkableE e, SubstE AtomSubstVal e, HoistableE e)
=> e i -> m i o (e o)
cheapReduceFromSubst e = traverseNames cheapSubstName =<< substM e
where
cheapSubstName :: Color c => Name c o -> m i o (AtomSubstVal c o)
cheapSubstName v = lookupEnv v >>= \case
AtomNameBinding (LetBound (DeclBinding _ _ (Atom x))) ->
liftM SubstVal $ dropSubst $ cheapReduceFromSubst x
_ -> return $ Rename v
cheapReduceWithDeclsB
:: ( CheapReducer m
, HoistableE e, SinkableE e, SubstE AtomSubstVal e, SubstE Name e)
=> Nest Decl i i'
-> (forall o'. Ext o o' => m i' o' (e o'))
-> m i o (e o)
cheapReduceWithDeclsB decls cont = do
Abs irreducibleDecls result <- cheapReduceWithDeclsRec decls cont
case hoist irreducibleDecls result of
HoistSuccess result' -> return result'
HoistFailure _ -> empty
cheapReduceWithDeclsRec
:: ( CheapReducer m
, HoistableE e, SinkableE e, SubstE AtomSubstVal e, SubstE Name e)
=> Nest Decl i i'
-> (forall o'. Ext o o' => m i' o' (e o'))
-> m i o (Abs (Nest Decl) e o)
cheapReduceWithDeclsRec decls cont = case decls of
Empty -> Abs Empty <$> cont
Nest (Let b binding@(DeclBinding _ _ expr)) rest -> do
optional (cheapReduceE expr) >>= \case
Nothing -> do
binding' <- substM binding
withFreshBinder (getNameHint b) binding' \b' -> do
updateCache (binderName b') Nothing
extendSubst (b@>Rename (binderName b')) do
Abs decls' result <- cheapReduceWithDeclsRec rest cont
return $ Abs (Nest (Let b' binding') decls') result
Just x ->
extendSubst (b@>SubstVal x) $
cheapReduceWithDeclsRec rest cont
cheapReduceName :: (Color c, CheapReducer m) => Name c o -> m i o (AtomSubstVal c o)
cheapReduceName v =
lookupEnv v >>= \case
AtomNameBinding (LetBound (DeclBinding _ _ e)) -> case e of
-- We avoid synthesizing the dictionaries during the traversal
-- and only do that when cheap reduction is performed on the expr directly.
Op (SynthesizeDict _ _) -> stuck
_ -> do
cachedVal <- lookupCache v >>= \case
Nothing -> do
result <- optional (dropSubst $ cheapReduceE e)
updateCache v result
return result
Just result -> return result
case cachedVal of
Nothing -> stuck
Just ans -> return $ SubstVal ans
_ -> stuck
where stuck = return $ Rename v
class CheaplyReducibleE (e::E) (e'::E) where
cheapReduceE :: CheapReducer m => e i -> m i o (e' o)
instance CheaplyReducibleE Atom Atom where
cheapReduceE :: forall m i o. CheapReducer m => Atom i -> m i o (Atom o)
cheapReduceE a = case a of
-- Don't try to eagerly reduce lambda bodies. We might get stuck long before
-- we have a chance to apply tham. Also, recursive traversal of those bodies
-- means that we will follow the full call chain, so it's really expensive!
Lam _ -> substM a
-- We traverse the Atom constructors that might contain lambda expressions
-- explicitly, to make sure that we can skip normalizing free vars inside those.
Con con -> Con <$> mapM cheapReduceE con
DataCon sourceName dataDefName params con args ->
DataCon sourceName <$> substM dataDefName <*> mapM cheapReduceE params <*> pure con <*> mapM cheapReduceE args
Record items -> Record <$> mapM cheapReduceE items
Variant ty l c p -> do
ExtLabeledItemsE ty' <- substM $ ExtLabeledItemsE ty
Variant ty' <$> pure l <*> pure c <*> cheapReduceE p
-- Do recursive reduction via substitution
_ -> do
a' <- substM a
dropSubst $ traverseNames cheapReduceName a'
instance (CheaplyReducibleE e e', NiceE e') => CheaplyReducibleE (Abs (Nest Decl) e) e' where
cheapReduceE (Abs decls result) = cheapReduceWithDeclsB decls $ cheapReduceE result
instance (CheaplyReducibleE Expr e', NiceE e') => CheaplyReducibleE Block e' where
cheapReduceE (Block _ decls result) = cheapReduceE $ Abs decls result
instance CheaplyReducibleE Expr Atom where
cheapReduceE = \case
Atom atom -> cheapReduceE atom
App f' xs' -> do
f <- cheapReduceE f'
case fromNaryLam (length xs') f of
Just (NaryLamExpr bs _ body) -> do
xs <- mapM cheapReduceE xs'
let subst = bs @@> fmap SubstVal xs
dropSubst $ extendSubst subst $ cheapReduceE body
_ -> empty
Op (SynthesizeDict _ ty') -> do
ty <- cheapReduceE ty'
runFallibleT1 (trySynthDictBlock ty) >>= \case
Success block -> dropSubst $ cheapReduceE block
Failure _ -> reportSynthesisFail ty >> empty
-- TODO: Make sure that this wraps correctly
-- TODO: Other casts?
Op (CastOp ty' val') -> do
ty <- cheapReduceE ty'
case ty of
BaseTy (Scalar Int32Type) -> do
val <- cheapReduceE val'
case val of
Con (Lit (Int64Lit v)) -> return $ Con $ Lit $ Int32Lit $ fromIntegral v
_ -> empty
_ -> empty
_ -> empty
instance (CheaplyReducibleE e1 e1', CheaplyReducibleE e2 e2')
=> CheaplyReducibleE (PairE e1 e2) (PairE e1' e2') where
cheapReduceE (PairE e1 e2) = PairE <$> cheapReduceE e1 <*> cheapReduceE e2
instance CheaplyReducibleE EffectRow EffectRow where
cheapReduceE row = cheapReduceFromSubst row
instance CheaplyReducibleE FieldRowElems FieldRowElems where
cheapReduceE elems = cheapReduceFromSubst elems
| google-research/dex-lang | src/lib/CheapReduction.hs | bsd-3-clause | 9,273 | 0 | 28 | 2,225 | 2,971 | 1,462 | 1,509 | -1 | -1 |
module Yesod.CoreBot.Bliki.Store where
import Yesod.CoreBot.Bliki.Prelude
import Control.Monad.State.Class
import Control.Monad.Trans
import qualified Data.ByteString.Lazy as B
import qualified Data.FileStore as FileStore
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TL
import System.Directory
-- XXX: The inner FileStore is not actually pure.
data Store = Store
{ filestore :: FileStore
}
class ( MonadIO m, Applicative m, MonadState m, StateType m ~ Store ) => StoreM m
instance ( MonadIO m, Applicative m, MonadState m, StateType m ~ Store ) => StoreM m
directory_listing :: StoreM m => FilePath -> m [ Resource ]
directory_listing node_path = do
store <- get
liftIO $ FileStore.directory (filestore store) node_path
cache_str :: FilePath -> String -> IO ()
cache_str out_path str = do
let text = TL.pack str
bs = TL.encodeUtf8 text
tmp_out = out_path ++ ".tmp"
createDirectoryIfMissing True (takeDirectory tmp_out)
B.writeFile tmp_out bs
renameFile tmp_out out_path
return ()
data_for_node_rev :: StoreM m
=> FilePath
-> RevisionId
-> m B.ByteString
data_for_node_rev node_path rev_ID = do
fs <- gets filestore
liftIO $ FileStore.smartRetrieve fs True node_path (Just rev_ID)
| coreyoconnor/corebot-bliki | src/Yesod/CoreBot/Bliki/Store.hs | bsd-3-clause | 1,358 | 0 | 11 | 313 | 390 | 204 | 186 | -1 | -1 |
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Applicative
import Control.Monad
import Test.Framework (Test, defaultMain, testGroup)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.QuickCheck
import Test.QuickCheck.Test
import Data.Byteable
import qualified Data.ByteString as B
import qualified Crypto.Cipher.AES as AES
import Crypto.Cipher.Types
import Crypto.Cipher.Tests
import qualified KATECB
import qualified KATCBC
import qualified KATXTS
import qualified KATGCM
import qualified KATOCB3
instance Show AES.AES where
show _ = "AES"
instance Arbitrary AES.AESIV where
arbitrary = AES.aesIV_ . B.pack <$> replicateM 16 arbitrary
instance Arbitrary AES.AES where
arbitrary = AES.initAES . B.pack <$> replicateM 16 arbitrary
toKatECB (k,p,c) = KAT_ECB { ecbKey = k, ecbPlaintext = p, ecbCiphertext = c }
toKatCBC (k,iv,p,c) = KAT_CBC { cbcKey = k, cbcIV = iv, cbcPlaintext = p, cbcCiphertext = c }
toKatXTS (k1,k2,iv,p,_,c) = KAT_XTS { xtsKey1 = k1, xtsKey2 = k2, xtsIV = iv, xtsPlaintext = p, xtsCiphertext = c }
toKatAEAD mode (k,iv,h,p,c,taglen,tag) =
KAT_AEAD { aeadMode = mode
, aeadKey = k
, aeadIV = iv
, aeadHeader = h
, aeadPlaintext = p
, aeadCiphertext = c
, aeadTaglen = taglen
, aeadTag = AuthTag tag
}
toKatGCM = toKatAEAD AEAD_GCM
toKatOCB = toKatAEAD AEAD_OCB
kats128 = defaultKATs
{ kat_ECB = map toKatECB KATECB.vectors_aes128_enc
, kat_CBC = map toKatCBC KATCBC.vectors_aes128_enc
, kat_CFB = [ KAT_CFB { cfbKey = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6\xab\xf7\x15\x88\x09\xcf\x4f\x3c"
, cfbIV = "\xC8\xA6\x45\x37\xA0\xB3\xA9\x3F\xCD\xE3\xCD\xAD\x9F\x1C\xE5\x8B"
, cfbPlaintext = "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
, cfbCiphertext = "\x26\x75\x1f\x67\xa3\xcb\xb1\x40\xb1\x80\x8c\xf1\x87\xa4\xf4\xdf"
}
]
, kat_XTS = map toKatXTS KATXTS.vectors_aes128_enc
, kat_AEAD = map toKatGCM KATGCM.vectors_aes128_enc ++
map toKatOCB KATOCB3.vectors_aes128_enc
}
kats192 = defaultKATs
{ kat_ECB = map toKatECB KATECB.vectors_aes192_enc
, kat_CBC = map toKatCBC KATCBC.vectors_aes192_enc
}
kats256 = defaultKATs
{ kat_ECB = map toKatECB KATECB.vectors_aes256_enc
, kat_CBC = map toKatCBC KATCBC.vectors_aes256_enc
, kat_XTS = map toKatXTS KATXTS.vectors_aes256_enc
, kat_AEAD = map toKatGCM KATGCM.vectors_aes256_enc
}
main = defaultMain
[ testBlockCipher kats128 (undefined :: AES.AES128)
, testBlockCipher kats192 (undefined :: AES.AES192)
, testBlockCipher kats256 (undefined :: AES.AES256)
, testProperty "genCtr" $ \(key, iv1) ->
let (bs1, iv2) = AES.genCounter key iv1 32
(bs2, iv3) = AES.genCounter key iv2 32
(bsAll, iv3') = AES.genCounter key iv1 64
in (B.concat [bs1,bs2] == bsAll && iv3 == iv3')
]
| vincenthz/hs-cipher-aes | Tests/Tests.hs | bsd-3-clause | 3,178 | 0 | 16 | 810 | 801 | 468 | 333 | 66 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.InstallPlan
-- Copyright : (c) Duncan Coutts 2008
-- License : BSD-like
--
-- Maintainer : duncan@community.haskell.org
-- Stability : provisional
-- Portability : portable
--
-- Package installation plan
--
-----------------------------------------------------------------------------
module Distribution.Client.InstallPlan (
InstallPlan,
GenericInstallPlan,
PlanPackage,
GenericPlanPackage(..),
IsUnit,
-- * Operations on 'InstallPlan's
new,
toList,
planIndepGoals,
depends,
fromSolverInstallPlan,
configureInstallPlan,
remove,
preexisting,
lookup,
directDeps,
revDirectDeps,
-- * Traversal
executionOrder,
execute,
BuildOutcomes,
lookupBuildOutcome,
-- ** Traversal helpers
-- $traversal
Processing,
ready,
completed,
failed,
-- * Display
showPlanIndex,
showInstallPlan,
-- * Graph-like operations
reverseTopologicalOrder,
reverseDependencyClosure,
) where
import Distribution.Client.Types hiding (BuildOutcomes)
import qualified Distribution.PackageDescription as PD
import qualified Distribution.Simple.Configure as Configure
import qualified Distribution.Simple.Setup as Cabal
import Distribution.InstalledPackageInfo
( InstalledPackageInfo )
import Distribution.Package
( Package(..)
, HasUnitId(..), UnitId(..) )
import Distribution.Solver.Types.SolverPackage
import Distribution.Client.JobControl
import Distribution.Text
import Text.PrettyPrint
import qualified Distribution.Client.SolverInstallPlan as SolverInstallPlan
import Distribution.Client.SolverInstallPlan (SolverInstallPlan)
import qualified Distribution.Solver.Types.ComponentDeps as CD
import Distribution.Solver.Types.Settings
import Distribution.Solver.Types.SolverId
import Distribution.Solver.Types.InstSolverPackage
-- TODO: Need this when we compute final UnitIds
-- import qualified Distribution.Simple.Configure as Configure
import Data.List
( foldl' )
import Data.Maybe
( fromMaybe, isJust )
import qualified Distribution.Compat.Graph as Graph
import Distribution.Compat.Graph (Graph, IsNode(..))
import Distribution.Compat.Binary (Binary(..))
import GHC.Generics
import Control.Monad
import Control.Exception
( assert )
import qualified Data.Map as Map
import Data.Map (Map)
import qualified Data.Set as Set
import Data.Set (Set)
import Prelude hiding (lookup)
-- When cabal tries to install a number of packages, including all their
-- dependencies it has a non-trivial problem to solve.
--
-- The Problem:
--
-- In general we start with a set of installed packages and a set of source
-- packages.
--
-- Installed packages have fixed dependencies. They have already been built and
-- we know exactly what packages they were built against, including their exact
-- versions.
--
-- Source package have somewhat flexible dependencies. They are specified as
-- version ranges, though really they're predicates. To make matters worse they
-- have conditional flexible dependencies. Configuration flags can affect which
-- packages are required and can place additional constraints on their
-- versions.
--
-- These two sets of package can and usually do overlap. There can be installed
-- packages that are also available as source packages which means they could
-- be re-installed if required, though there will also be packages which are
-- not available as source and cannot be re-installed. Very often there will be
-- extra versions available than are installed. Sometimes we may like to prefer
-- installed packages over source ones or perhaps always prefer the latest
-- available version whether installed or not.
--
-- The goal is to calculate an installation plan that is closed, acyclic and
-- consistent and where every configured package is valid.
--
-- An installation plan is a set of packages that are going to be used
-- together. It will consist of a mixture of installed packages and source
-- packages along with their exact version dependencies. An installation plan
-- is closed if for every package in the set, all of its dependencies are
-- also in the set. It is consistent if for every package in the set, all
-- dependencies which target that package have the same version.
-- Note that plans do not necessarily compose. You might have a valid plan for
-- package A and a valid plan for package B. That does not mean the composition
-- is simultaneously valid for A and B. In particular you're most likely to
-- have problems with inconsistent dependencies.
-- On the other hand it is true that every closed sub plan is valid.
-- | Packages in an install plan
--
-- NOTE: 'ConfiguredPackage', 'GenericReadyPackage' and 'GenericPlanPackage'
-- intentionally have no 'PackageInstalled' instance. `This is important:
-- PackageInstalled returns only library dependencies, but for package that
-- aren't yet installed we know many more kinds of dependencies (setup
-- dependencies, exe, test-suite, benchmark, ..). Any functions that operate on
-- dependencies in cabal-install should consider what to do with these
-- dependencies; if we give a 'PackageInstalled' instance it would be too easy
-- to get this wrong (and, for instance, call graph traversal functions from
-- Cabal rather than from cabal-install). Instead, see 'PackageInstalled'.
data GenericPlanPackage ipkg srcpkg
= PreExisting ipkg
| Configured srcpkg
deriving (Eq, Show, Generic)
type IsUnit a = (IsNode a, Key a ~ UnitId)
depends :: IsUnit a => a -> [UnitId]
depends = nodeNeighbors
-- NB: Expanded constraint synonym here to avoid undecidable
-- instance errors in GHC 7.8 and earlier.
instance (IsNode ipkg, IsNode srcpkg, Key ipkg ~ UnitId, Key srcpkg ~ UnitId)
=> IsNode (GenericPlanPackage ipkg srcpkg) where
type Key (GenericPlanPackage ipkg srcpkg) = UnitId
nodeKey (PreExisting ipkg) = nodeKey ipkg
nodeKey (Configured spkg) = nodeKey spkg
nodeNeighbors (PreExisting ipkg) = nodeNeighbors ipkg
nodeNeighbors (Configured spkg) = nodeNeighbors spkg
instance (Binary ipkg, Binary srcpkg)
=> Binary (GenericPlanPackage ipkg srcpkg)
type PlanPackage = GenericPlanPackage
InstalledPackageInfo (ConfiguredPackage UnresolvedPkgLoc)
instance (Package ipkg, Package srcpkg) =>
Package (GenericPlanPackage ipkg srcpkg) where
packageId (PreExisting ipkg) = packageId ipkg
packageId (Configured spkg) = packageId spkg
instance (HasUnitId ipkg, HasUnitId srcpkg) =>
HasUnitId
(GenericPlanPackage ipkg srcpkg) where
installedUnitId (PreExisting ipkg) = installedUnitId ipkg
installedUnitId (Configured spkg) = installedUnitId spkg
instance (HasConfiguredId ipkg, HasConfiguredId srcpkg) =>
HasConfiguredId (GenericPlanPackage ipkg srcpkg) where
configuredId (PreExisting ipkg) = configuredId ipkg
configuredId (Configured pkg) = configuredId pkg
data GenericInstallPlan ipkg srcpkg = GenericInstallPlan {
planIndex :: !(PlanIndex ipkg srcpkg),
planIndepGoals :: !IndependentGoals
}
-- | 'GenericInstallPlan' specialised to most commonly used types.
type InstallPlan = GenericInstallPlan
InstalledPackageInfo (ConfiguredPackage UnresolvedPkgLoc)
type PlanIndex ipkg srcpkg =
Graph (GenericPlanPackage ipkg srcpkg)
-- | Smart constructor that deals with caching the 'Graph' representation.
--
mkInstallPlan :: PlanIndex ipkg srcpkg
-> IndependentGoals
-> GenericInstallPlan ipkg srcpkg
mkInstallPlan index indepGoals =
GenericInstallPlan {
planIndex = index,
planIndepGoals = indepGoals
}
internalError :: String -> a
internalError msg = error $ "InstallPlan: internal error: " ++ msg
instance (IsNode ipkg, Key ipkg ~ UnitId, IsNode srcpkg, Key srcpkg ~ UnitId,
Binary ipkg, Binary srcpkg)
=> Binary (GenericInstallPlan ipkg srcpkg) where
put GenericInstallPlan {
planIndex = index,
planIndepGoals = indepGoals
} = put (index, indepGoals)
get = do
(index, indepGoals) <- get
return $! mkInstallPlan index indepGoals
showPlanIndex :: (Package ipkg, Package srcpkg,
IsUnit ipkg, IsUnit srcpkg)
=> PlanIndex ipkg srcpkg -> String
showPlanIndex index = renderStyle defaultStyle $
vcat (map dispPlanPackage (Graph.toList index))
where dispPlanPackage p =
hang (hsep [ text (showPlanPackageTag p)
, disp (packageId p)
, parens (disp (nodeKey p))]) 2
(vcat (map disp (nodeNeighbors p)))
showInstallPlan :: (Package ipkg, Package srcpkg,
IsUnit ipkg, IsUnit srcpkg)
=> GenericInstallPlan ipkg srcpkg -> String
showInstallPlan = showPlanIndex . planIndex
showPlanPackageTag :: GenericPlanPackage ipkg srcpkg -> String
showPlanPackageTag (PreExisting _) = "PreExisting"
showPlanPackageTag (Configured _) = "Configured"
-- | Build an installation plan from a valid set of resolved packages.
--
new :: IndependentGoals
-> PlanIndex ipkg srcpkg
-> GenericInstallPlan ipkg srcpkg
new indepGoals index = mkInstallPlan index indepGoals
toList :: GenericInstallPlan ipkg srcpkg
-> [GenericPlanPackage ipkg srcpkg]
toList = Graph.toList . planIndex
-- | Remove packages from the install plan. This will result in an
-- error if there are remaining packages that depend on any matching
-- package. This is primarily useful for obtaining an install plan for
-- the dependencies of a package or set of packages without actually
-- installing the package itself, as when doing development.
--
remove :: (IsUnit ipkg, IsUnit srcpkg)
=> (GenericPlanPackage ipkg srcpkg -> Bool)
-> GenericInstallPlan ipkg srcpkg
-> GenericInstallPlan ipkg srcpkg
remove shouldRemove plan =
new (planIndepGoals plan) newIndex
where
newIndex = Graph.fromList $
filter (not . shouldRemove) (toList plan)
-- | Replace a ready package with a pre-existing one. The pre-existing one
-- must have exactly the same dependencies as the source one was configured
-- with.
--
preexisting :: (IsUnit ipkg,
IsUnit srcpkg)
=> UnitId
-> ipkg
-> GenericInstallPlan ipkg srcpkg
-> GenericInstallPlan ipkg srcpkg
preexisting pkgid ipkg plan = plan'
where
plan' = plan {
planIndex = Graph.insert (PreExisting ipkg)
-- ...but be sure to use the *old* IPID for the lookup for
-- the preexisting record
. Graph.deleteKey pkgid
$ planIndex plan
}
-- | Lookup a package in the plan.
--
lookup :: (IsUnit ipkg, IsUnit srcpkg)
=> GenericInstallPlan ipkg srcpkg
-> UnitId
-> Maybe (GenericPlanPackage ipkg srcpkg)
lookup plan pkgid = Graph.lookup pkgid (planIndex plan)
-- | Find all the direct dependencies of the given package.
--
-- Note that the package must exist in the plan or it is an error.
--
directDeps :: GenericInstallPlan ipkg srcpkg
-> UnitId
-> [GenericPlanPackage ipkg srcpkg]
directDeps plan pkgid =
case Graph.neighbors (planIndex plan) pkgid of
Just deps -> deps
Nothing -> internalError "directDeps: package not in graph"
-- | Find all the direct reverse dependencies of the given package.
--
-- Note that the package must exist in the plan or it is an error.
--
revDirectDeps :: GenericInstallPlan ipkg srcpkg
-> UnitId
-> [GenericPlanPackage ipkg srcpkg]
revDirectDeps plan pkgid =
case Graph.revNeighbors (planIndex plan) pkgid of
Just deps -> deps
Nothing -> internalError "revDirectDeps: package not in graph"
-- | Return all the packages in the 'InstallPlan' in reverse topological order.
-- That is, for each package, all dependencies of the package appear first.
--
-- Compared to 'executionOrder', this function returns all the installed and
-- source packages rather than just the source ones. Also, while both this
-- and 'executionOrder' produce reverse topological orderings of the package
-- dependency graph, it is not necessarily exactly the same order.
--
reverseTopologicalOrder :: GenericInstallPlan ipkg srcpkg
-> [GenericPlanPackage ipkg srcpkg]
reverseTopologicalOrder plan = Graph.revTopSort (planIndex plan)
-- | Return the packages in the plan that depend directly or indirectly on the
-- given packages.
--
reverseDependencyClosure :: GenericInstallPlan ipkg srcpkg
-> [UnitId]
-> [GenericPlanPackage ipkg srcpkg]
reverseDependencyClosure plan = fromMaybe []
. Graph.revClosure (planIndex plan)
-- Alert alert! Why does SolverId map to a LIST of plan packages?
-- The sordid story has to do with 'build-depends' on a package
-- with libraries and executables. In an ideal world, we would
-- ONLY depend on the library in this situation. But c.f. #3661
-- some people rely on the build-depends to ALSO implicitly
-- depend on an executable.
--
-- I don't want to commit to a strategy yet, so the only possible
-- thing you can do in this case is return EVERYTHING and let
-- the client filter out what they want (executables? libraries?
-- etc). This similarly implies we can't return a 'ConfiguredId'
-- because that's not enough information.
fromSolverInstallPlan ::
(IsUnit ipkg, IsUnit srcpkg)
=> ( (SolverId -> [GenericPlanPackage ipkg srcpkg])
-> SolverInstallPlan.SolverPlanPackage
-> [GenericPlanPackage ipkg srcpkg] )
-> SolverInstallPlan
-> GenericInstallPlan ipkg srcpkg
fromSolverInstallPlan f plan =
mkInstallPlan (Graph.fromList pkgs'')
(SolverInstallPlan.planIndepGoals plan)
where
(_, _, pkgs'') = foldl' f' (Map.empty, Map.empty, [])
(SolverInstallPlan.reverseTopologicalOrder plan)
f' (pidMap, ipiMap, pkgs) pkg = (pidMap', ipiMap', pkgs' ++ pkgs)
where
pkgs' = f (mapDep pidMap ipiMap) pkg
(pidMap', ipiMap')
= case nodeKey pkg of
PreExistingId _ uid -> (pidMap, Map.insert uid pkgs' ipiMap)
PlannedId pid -> (Map.insert pid pkgs' pidMap, ipiMap)
mapDep _ ipiMap (PreExistingId _pid uid)
| Just pkgs <- Map.lookup uid ipiMap = pkgs
| otherwise = error ("fromSolverInstallPlan: PreExistingId " ++ display uid)
mapDep pidMap _ (PlannedId pid)
| Just pkgs <- Map.lookup pid pidMap = pkgs
| otherwise = error ("fromSolverInstallPlan: PlannedId " ++ display pid)
-- This shouldn't happen, since mapDep should only be called
-- on neighbor SolverId, which must have all been done already
-- by the reverse top-sort (we assume the graph is not broken).
-- | Conversion of 'SolverInstallPlan' to 'InstallPlan'.
-- Similar to 'elaboratedInstallPlan'
configureInstallPlan :: SolverInstallPlan -> InstallPlan
configureInstallPlan solverPlan =
flip fromSolverInstallPlan solverPlan $ \mapDep planpkg ->
[case planpkg of
SolverInstallPlan.PreExisting pkg ->
PreExisting (instSolverPkgIPI pkg)
SolverInstallPlan.Configured pkg ->
Configured (configureSolverPackage mapDep pkg)
]
where
configureSolverPackage :: (SolverId -> [PlanPackage])
-> SolverPackage UnresolvedPkgLoc
-> ConfiguredPackage UnresolvedPkgLoc
configureSolverPackage mapDep spkg =
ConfiguredPackage {
confPkgId = Configure.computeComponentId
Cabal.NoFlag
Cabal.NoFlag
(packageId spkg)
PD.CLibName
(map confInstId (CD.libraryDeps deps))
(solverPkgFlags spkg),
confPkgSource = solverPkgSource spkg,
confPkgFlags = solverPkgFlags spkg,
confPkgStanzas = solverPkgStanzas spkg,
confPkgDeps = deps
-- NB: no support for executable dependencies
}
where
deps = fmap (concatMap (map configuredId . mapDep)) (solverPkgLibDeps spkg)
-- ------------------------------------------------------------
-- * Primitives for traversing plans
-- ------------------------------------------------------------
-- $traversal
--
-- Algorithms to traverse or execute an 'InstallPlan', especially in parallel,
-- may make use of the 'Processing' type and the associated operations
-- 'ready', 'completed' and 'failed'.
--
-- The 'Processing' type is used to keep track of the state of a traversal and
-- includes the set of packages that are in the processing state, e.g. in the
-- process of being installed, plus those that have been completed and those
-- where processing failed.
--
-- Traversal algorithms start with an 'InstallPlan':
--
-- * Initially there will be certain packages that can be processed immediately
-- (since they are configured source packages and have all their dependencies
-- installed already). The function 'ready' returns these packages plus a
-- 'Processing' state that marks these same packages as being in the
-- processing state.
--
-- * The algorithm must now arrange for these packages to be processed
-- (possibly in parallel). When a package has completed processing, the
-- algorithm needs to know which other packages (if any) are now ready to
-- process as a result. The 'completed' function marks a package as completed
-- and returns any packages that are newly in the processing state (ie ready
-- to process), along with the updated 'Processing' state.
--
-- * If failure is possible then when processing a package fails, the algorithm
-- needs to know which other packages have also failed as a result. The
-- 'failed' function marks the given package as failed as well as all the
-- other packages that depend on the failed package. In addition it returns
-- the other failed packages.
-- | The 'Processing' type is used to keep track of the state of a traversal
-- and includes the set of packages that are in the processing state, e.g. in
-- the process of being installed, plus those that have been completed and
-- those where processing failed.
--
data Processing = Processing !(Set UnitId) !(Set UnitId) !(Set UnitId)
-- processing, completed, failed
-- | The packages in the plan that are initially ready to be installed.
-- That is they are in the configured state and have all their dependencies
-- installed already.
--
-- The result is both the packages that are now ready to be installed and also
-- a 'Processing' state containing those same packages. The assumption is that
-- all the packages that are ready will now be processed and so we can consider
-- them to be in the processing state.
--
ready :: (IsUnit ipkg, IsUnit srcpkg)
=> GenericInstallPlan ipkg srcpkg
-> ([GenericReadyPackage srcpkg], Processing)
ready plan =
assert (processingInvariant plan processing) $
(readyPackages, processing)
where
!processing =
Processing
(Set.fromList [ nodeKey pkg | pkg <- readyPackages ])
(Set.fromList [ nodeKey pkg | PreExisting pkg <- toList plan ])
Set.empty
readyPackages =
[ ReadyPackage pkg
| Configured pkg <- toList plan
, all isPreExisting (directDeps plan (nodeKey pkg))
]
isPreExisting (PreExisting {}) = True
isPreExisting _ = False
-- | Given a package in the processing state, mark the package as completed
-- and return any packages that are newly in the processing state (ie ready to
-- process), along with the updated 'Processing' state.
--
completed :: (IsUnit ipkg, IsUnit srcpkg)
=> GenericInstallPlan ipkg srcpkg
-> Processing -> UnitId
-> ([GenericReadyPackage srcpkg], Processing)
completed plan (Processing processingSet completedSet failedSet) pkgid =
assert (pkgid `Set.member` processingSet) $
assert (processingInvariant plan processing') $
( map asReadyPackage newlyReady
, processing' )
where
completedSet' = Set.insert pkgid completedSet
-- each direct reverse dep where all direct deps are completed
newlyReady = [ dep
| dep <- revDirectDeps plan pkgid
, all ((`Set.member` completedSet') . nodeKey)
(directDeps plan (nodeKey dep))
]
processingSet' = foldl' (flip Set.insert)
(Set.delete pkgid processingSet)
(map nodeKey newlyReady)
processing' = Processing processingSet' completedSet' failedSet
asReadyPackage (Configured pkg) = ReadyPackage pkg
asReadyPackage _ = error "InstallPlan.completed: internal error"
failed :: (IsUnit ipkg, IsUnit srcpkg)
=> GenericInstallPlan ipkg srcpkg
-> Processing -> UnitId
-> ([srcpkg], Processing)
failed plan (Processing processingSet completedSet failedSet) pkgid =
assert (pkgid `Set.member` processingSet) $
assert (all (`Set.notMember` processingSet) (tail newlyFailedIds)) $
assert (all (`Set.notMember` completedSet) (tail newlyFailedIds)) $
assert (all (`Set.notMember` failedSet) (tail newlyFailedIds)) $
assert (processingInvariant plan processing') $
( map asConfiguredPackage (tail newlyFailed)
, processing' )
where
processingSet' = Set.delete pkgid processingSet
failedSet' = failedSet `Set.union` Set.fromList newlyFailedIds
newlyFailedIds = map nodeKey newlyFailed
newlyFailed = fromMaybe (internalError "package not in graph")
$ Graph.revClosure (planIndex plan) [pkgid]
processing' = Processing processingSet' completedSet failedSet'
asConfiguredPackage (Configured pkg) = pkg
asConfiguredPackage _ = internalError "not in configured state"
processingInvariant :: (IsUnit ipkg, IsUnit srcpkg)
=> GenericInstallPlan ipkg srcpkg
-> Processing -> Bool
processingInvariant plan (Processing processingSet completedSet failedSet) =
all (isJust . flip Graph.lookup (planIndex plan)) (Set.toList processingSet)
&& all (isJust . flip Graph.lookup (planIndex plan)) (Set.toList completedSet)
&& all (isJust . flip Graph.lookup (planIndex plan)) (Set.toList failedSet)
&& noIntersection processingSet completedSet
&& noIntersection processingSet failedSet
&& noIntersection failedSet completedSet
&& noIntersection processingClosure completedSet
&& noIntersection processingClosure failedSet
&& and [ case Graph.lookup pkgid (planIndex plan) of
Just (Configured _) -> True
Just (PreExisting _) -> False
Nothing -> False
| pkgid <- Set.toList processingSet ++ Set.toList failedSet ]
where
processingClosure = Set.fromList
. map nodeKey
. fromMaybe (internalError "processingClosure")
. Graph.revClosure (planIndex plan)
. Set.toList
$ processingSet
noIntersection a b = Set.null (Set.intersection a b)
-- ------------------------------------------------------------
-- * Traversing plans
-- ------------------------------------------------------------
-- | Flatten an 'InstallPlan', producing the sequence of source packages in
-- the order in which they would be processed when the plan is executed. This
-- can be used for simultations or presenting execution dry-runs.
--
-- It is guaranteed to give the same order as using 'execute' (with a serial
-- in-order 'JobControl'), which is a reverse topological orderings of the
-- source packages in the dependency graph, albeit not necessarily exactly the
-- same ordering as that produced by 'reverseTopologicalOrder'.
--
executionOrder :: (IsUnit ipkg, IsUnit srcpkg)
=> GenericInstallPlan ipkg srcpkg
-> [GenericReadyPackage srcpkg]
executionOrder plan =
let (newpkgs, processing) = ready plan
in tryNewTasks processing newpkgs
where
tryNewTasks _processing [] = []
tryNewTasks processing (p:todo) = waitForTasks processing p todo
waitForTasks processing p todo =
p : tryNewTasks processing' (todo++nextpkgs)
where
(nextpkgs, processing') = completed plan processing (nodeKey p)
-- ------------------------------------------------------------
-- * Executing plans
-- ------------------------------------------------------------
-- | The set of results we get from executing an install plan.
--
type BuildOutcomes failure result = Map UnitId (Either failure result)
-- | Lookup the build result for a single package.
--
lookupBuildOutcome :: HasUnitId pkg
=> pkg -> BuildOutcomes failure result
-> Maybe (Either failure result)
lookupBuildOutcome = Map.lookup . installedUnitId
-- | Execute an install plan. This traverses the plan in dependency order.
--
-- Executing each individual package can fail and if so all dependents fail
-- too. The result for each package is collected as a 'BuildOutcomes' map.
--
-- Visiting each package happens with optional parallelism, as determined by
-- the 'JobControl'. By default, after any failure we stop as soon as possible
-- (using the 'JobControl' to try to cancel in-progress tasks). This behaviour
-- can be reversed to keep going and build as many packages as possible.
--
execute :: forall m ipkg srcpkg result failure.
(IsUnit ipkg, IsUnit srcpkg,
Monad m)
=> JobControl m (UnitId, Either failure result)
-> Bool -- ^ Keep going after failure
-> (srcpkg -> failure) -- ^ Value for dependents of failed packages
-> GenericInstallPlan ipkg srcpkg
-> (GenericReadyPackage srcpkg -> m (Either failure result))
-> m (BuildOutcomes failure result)
execute jobCtl keepGoing depFailure plan installPkg =
let (newpkgs, processing) = ready plan
in tryNewTasks Map.empty False False processing newpkgs
where
tryNewTasks :: BuildOutcomes failure result
-> Bool -> Bool -> Processing
-> [GenericReadyPackage srcpkg]
-> m (BuildOutcomes failure result)
tryNewTasks !results tasksFailed tasksRemaining !processing newpkgs
-- we were in the process of cancelling and now we're finished
| tasksFailed && not keepGoing && not tasksRemaining
= return results
-- we are still in the process of cancelling, wait for remaining tasks
| tasksFailed && not keepGoing && tasksRemaining
= waitForTasks results tasksFailed processing
-- no new tasks to do and all tasks are done so we're finished
| null newpkgs && not tasksRemaining
= return results
-- no new tasks to do, remaining tasks to wait for
| null newpkgs
= waitForTasks results tasksFailed processing
-- new tasks to do, spawn them, then wait for tasks to complete
| otherwise
= do sequence_ [ spawnJob jobCtl $ do
result <- installPkg pkg
return (nodeKey pkg, result)
| pkg <- newpkgs ]
waitForTasks results tasksFailed processing
waitForTasks :: BuildOutcomes failure result
-> Bool -> Processing
-> m (BuildOutcomes failure result)
waitForTasks !results tasksFailed !processing = do
(pkgid, result) <- collectJob jobCtl
case result of
Right _success -> do
tasksRemaining <- remainingJobs jobCtl
tryNewTasks results' tasksFailed tasksRemaining
processing' nextpkgs
where
results' = Map.insert pkgid result results
(nextpkgs, processing') = completed plan processing pkgid
Left _failure -> do
-- if this is the first failure and we're not trying to keep going
-- then try to cancel as many of the remaining jobs as possible
when (not tasksFailed && not keepGoing) $
cancelJobs jobCtl
tasksRemaining <- remainingJobs jobCtl
tryNewTasks results' True tasksRemaining processing' []
where
(depsfailed, processing') = failed plan processing pkgid
results' = Map.insert pkgid result results `Map.union` depResults
depResults = Map.fromList
[ (nodeKey deppkg, Left (depFailure deppkg))
| deppkg <- depsfailed ]
| sopvop/cabal | cabal-install/Distribution/Client/InstallPlan.hs | bsd-3-clause | 29,094 | 0 | 19 | 6,850 | 4,897 | 2,624 | 2,273 | 407 | 3 |
module Test.Atomic where
import HDevs.Atomic
import HDevs.Models
import Test.Tasty
import Test.Tasty.HUnit
gain :: Model Double Double
gain = static (4*)
gain' :: Model Double Double
gain' = deltaExt gain 2.3 3
foreverWait :: Assertion
foreverWait = ta gain @?= forever
noWaitOutput :: Assertion
noWaitOutput = lambda gain @?= Nothing
immediateSend :: Assertion
immediateSend = ta gain' @?= 0.0
correctOutputGain :: Assertion
correctOutputGain = lambda gain' @?= Just (4 * 3)
noWaitOutputIntegrator :: Assertion
noWaitOutputIntegrator = lambda (integrator 0) @?= Nothing
correctOutputIntegrator :: Assertion
correctOutputIntegrator = assertBool msg $ correct (lambda integrator')
where
integrator' = deltaExt (integrator 0) 3 4
correct Nothing = False
correct (Just y) = abs (y - 12) < 1e-6
msg = "The result was " ++ show (lambda integrator') ++ " instead of (Just 12)"
correctOutputPt1 :: Assertion
correctOutputPt1 = assertBool msg $ correct (lambda pt1')
where
pt1' = deltaExt (pt1 3 5 0) 1e6 4
correct Nothing = False
correct (Just y) = abs (y - 12) < 1e-2
msg = "The result was " ++ show (lambda pt1') ++ " instead of (Just 12)"
atomicTests :: TestTree
atomicTests = testGroup "Atomic Tests"
[ testCase "Gain model should wait forever" foreverWait
, testCase "Gain should output Nothing while waiting" noWaitOutput
, testCase "Gain should output in send immediatelly" immediateSend
, testCase "Gain should output correctly in send state" correctOutputGain
, testCase "Integrator should output Nothing while waiting" noWaitOutputIntegrator
, testCase "Integrator should output correctly in send state" correctOutputIntegrator
, testCase "Pt1 should output correctly in send state" correctOutputPt1 ]
| sglumac/HDevs | test/Test/Atomic.hs | bsd-3-clause | 1,821 | 0 | 11 | 370 | 455 | 235 | 220 | 40 | 2 |
{-# LANGUAGE FlexibleContexts #-}
module Main where
import System.Process
import System.Directory
import System.Exit
import Control.Monad
import Options.Applicative
import Data.Monoid
import System.Timeout
import System.FilePath
import Control.Exception
import Data.List
import Data.List.Split
import Data.Time.Clock
import Data.Char
import Numeric
import Language.Clojure.AST
import Language.Clojure.Parser
import Language.Clojure.Lang
import VCS.Multirec
import VCS.Disjoint
import VCS.Cost
import VCS.Diff
import Oracle.Oracle
import Util.UnixDiff
import Util.Test
import Debug.Trace
testPath :: FilePath
testPath = "test/conflicts2"
resultPath :: FilePath
resultPath = "/Users/giovannigarufi/Developement/thesis/th-vcs-clojure/test/results/"
executablePathRoot :: FilePath
executablePathRoot = ".stack-work/dist/x86_64-osx/Cabal-1.24.2.0/build/th-vcs-clojure-exe/th-vcs-clojure-exe"
executablePath = "/Users/giovannigarufi/Developement/thesis/th-vcs-clojure/.stack-work/dist/x86_64-osx/Cabal-1.24.2.0/build/th-vcs-clojure-exe/th-vcs-clojure-exe"
timeout_time :: Int
timeout_time = (minutesToMicro 1)
minutesToMicro :: Int -> Int
minutesToMicro i = i * (6 * 10 ^ 7)
main :: IO ()
main = do
opts <- execParser optsHelper
dirs <- listDirectory testPath
let dirsP = map (\d -> testPath ++ "/" ++ d) dirs
actual_dirs <- filterM doesDirectoryExist dirsP
results <- mapM (flip withCurrentDirectory checkPredicates) actual_dirs
let timeouts = filter isTimeout results
let errors = filter isError results
let completed = filter (\b -> not (errorOrTimeout b)) results
let disj_ = filter (\t -> disj t == True) completed
sDisj_ = filter (\t -> sDisj t == True) completed
comp_ = filter (\t -> comp t == True) completed
sComp_ = filter (\t -> sComp t == True) completed
putStrLn $ "Number of Tests " ++ show (length results)
putStrLn $ "Number of Timeouts " ++ show (length timeouts)
putStrLn $ "Number of Errors " ++ show (length errors)
putStrLn $ "Disjoint: " ++ show (length disj_)
putStrLn $ "Structurally-Disjoint: " ++ show (length sDisj_)
putStrLn $ "Compatible: " ++ show (length comp_)
putStrLn $ "Structurally-Compatible: " ++ show (length sComp_)
when (not (dry opts)) (writeResults results)
isTimeout :: TestResult -> Bool
isTimeout (Timeout _) = True
isTimeout _ = False
isError :: TestResult -> Bool
isError (Error _) = True
isError _ = False
errorOrTimeout :: TestResult -> Bool
errorOrTimeout t = isTimeout t || isError t
writeResults :: [TestResult] -> IO ()
writeResults res = do
let splitRes = splitInFolders res
mapM_ writeTables splitRes
where
baseFolder s = head (splitPath s)
simplifyName :: TestResult -> String
simplifyName res = intercalate "-" $ reverse $ drop 2 $ reverse $ splitOn "-" (last $ splitPath (extractPath res))
splitInFolders :: [TestResult] -> [(FilePath, [TestResult])]
splitInFolders r = map (\xs ->
(simplifyName (head xs), xs)) groups
where
groups = (groupBy
(\x y ->
simplifyName x == simplifyName y)
r)
extractPath :: TestResult -> FilePath
extractPath (Timeout a) = a
extractPath tr = tPath tr
liftMaybe :: (TestResult -> Bool) -> TestResult -> Maybe Bool
liftMaybe f (Timeout _) = Nothing
liftMaybe f a = Just $ f a
writeTables :: (FilePath, [TestResult]) -> IO ()
writeTables (path, result) = do
let rDisj = liftMaybe disj
rSDisj = liftMaybe sDisj
rComp = liftMaybe comp
rSComp = liftMaybe sComp
writeTableFor rDisj (toConflictResultFolder "Disj") result
writeTableFor rSDisj (toConflictResultFolder "Struct-Disj") result
writeTableFor rComp (toConflictResultFolder "Comp") result
writeTableFor rSComp (toConflictResultFolder "Struct-Comp") result
where
toConflictResultFolder name =
resultPath ++ path ++ "/" ++ name ++ ".md"
writeTableFor :: (TestResult -> Maybe Bool)
-> FilePath -> [TestResult] -> IO ()
writeTableFor f path result = do
writeFile path (mkTableFor f result)
mkTableFor :: (TestResult -> Maybe Bool) -> [TestResult] -> String
mkTableFor takeRes res = headers ++ "\n" ++ border ++ "\n" ++ values
where
headers = foldl addHeader "" res
border = foldl addBorder "" res
values = foldl addResults "" res
addHeader :: String -> TestResult -> String
addHeader soFar r =
soFar ++ "| " ++ simplify (extractPath r) ++ " | "
addBorder :: String -> TestResult -> String
addBorder soFar x =
soFar ++ "|" ++ replicate 13 '-' ++ "|"
addResults :: String -> TestResult -> String
addResults soFar r =
soFar ++ "|" ++ replicate 6 ' ' ++ convert (takeRes r)
++ replicate 6 ' ' ++ "|"
convert r = case r of
Just True -> "T"
Just False -> "F"
Nothing -> "X"
simplify :: FilePath -> String
simplify h = intercalate "-" $ map (take 5) (drop (length split - 2) split)
where
split = splitOn "-" (last (splitPath h))
runDiff :: IO Bool
runDiff = do
cwd <- getCurrentDirectory
putStrLn $ "Running in diff " ++ cwd
ph <- spawnProcess executablePath ["-s", "O1.clj", "-d", "B1.clj"]
result <- timeout timeout_time (waitForProcess ph)
case result of
Nothing -> do
terminateProcess ph
putStrLn $ "Process Timed out"
return False
Just exitcode -> do
putStrLn $ "Process terminated with exitcode " ++ show exitcode
return True
checkPredicates :: IO TestResult
checkPredicates = do
cwd <- getCurrentDirectory
putStrLn $ "Running in " ++ cwd
result <- timeout timeout_time
(catch (processConflictFolder cwd) logAndReturnError)
case result of
Nothing -> do
putStrLn $ "Process Timed out"
return $ Timeout cwd
Just r -> return r -- Error or success
logAndReturnError :: IOException -> IO TestResult
logAndReturnError e = do
cwd <- getCurrentDirectory
let err = show (e :: IOException)
putStrLn err
return $ Error cwd
decode :: Int -> FilePath -> TestResult
decode i p = TestResult {
disj=disj
, sDisj = sDisj
, comp = comp
, sComp = sComp
, tPath = p
}
where
int2b '1' = True
int2b '0' = False
int2b i = error $ "Can not decode " ++ show i
disj = int2b $ numbers !! 0
sDisj = int2b $ numbers !! 1
comp = int2b $ numbers !! 2
sComp = int2b $ numbers !! 3
numbers = conv2Bin (i - 1)
conv2Bin :: Int -> String
conv2Bin x = if length ans < 4
then reverse $ take 4 $ (reverse ans) ++ (repeat '0')
else ans
where
ans = Numeric.showIntAtBase 2 Data.Char.intToDigit x ""
checkNoConflicy :: IO (Maybe Bool)
checkNoConflicy = do
cwd <- getCurrentDirectory
putStrLn $ "Running in " ++ cwd
conflicts <- checkConflict "O1.clj" "A1.clj"
let status = all (all (==NoConflict)) conflicts
if status
then return $ Just status
else do
putStrLn $ show conflicts
return $ Just status
checkConflict :: String -> String -> IO [[MbMoveConflict]]
checkConflict srcFile dstFile = do
s <- readFile srcFile
d <- readFile dstFile
src <- parseFile "" s
dst <- parseFile "" d
let cp = buildCopyMaps (preprocess s d)
let diff3 = buildDelInsMap $ preprocessGrouped s d
putStrLn $ "cp" ++ show cp
putStrLn $ "diff3" ++ show diff3
return $ checkCopyMaps cp src dst
data Opts = Opts {
dry :: Bool
}
opts :: Parser Opts
opts = Opts <$> switch
( long "dry"
<> short 'd'
<> help "Dry run")
optsHelper :: ParserInfo Opts
optsHelper = info (helper <*> opts)
( fullDesc
<> progDesc "Clojure parser in Haskell"
) | nazrhom/vcs-clojure | app/Tester.hs | bsd-3-clause | 7,584 | 0 | 15 | 1,682 | 2,506 | 1,245 | 1,261 | 210 | 3 |
-- This module contains a 1:1 translation of registry.rnc with an (un-)parser.
-- No simplifications are done here.
module Registry (
parseRegistry, unparseRegistry,
Registry(..),
RegistryElement(..),
VendorIds(..),
VendorId(..),
Tags(..),
Tag(..),
Types(..),
Type(..),
TypeFragment(..),
Member(..),
Enums(..),
Enum'(..),
EnumValue(..),
Unused(..),
Commands(..),
Command(..),
Proto(..),
ProtoPart(..),
Param(..),
Feature(..),
Modification(..),
ModificationKind(..),
Extensions(..),
Extension(..),
ConditionalModification(..),
InterfaceElement(..),
Validity(..),
Usage(..),
Integer'(..),
EnumName(..),
ExtensionName(..),
TypeName(..),
TypeSuffix(..),
StringGroup(..),
ProfileName(..),
Vendor(..),
Comment(..),
Name(..),
Author(..),
Contact(..),
Bool'(..)
) where
import Data.Maybe ( maybeToList )
import Text.XML.HXT.Core
--------------------------------------------------------------------------------
parseRegistry :: String -> Either String Registry
parseRegistry = head . (runLA $
xreadDoc >>>
neg isXmlPi >>>
removeAllWhiteSpace >>>
canonicalizeAllNodes >>> -- needed to e.g. process CDATA, remove XML comments, etc.
arr (unpickleDoc' xpRegistry))
unparseRegistry :: Registry -> String
unparseRegistry =
concat . (pickleDoc xpRegistry >>>
runLA (writeDocumentToString [withIndent yes,
withOutputEncoding utf8,
withXmlPi yes]))
--------------------------------------------------------------------------------
-- Note: We do this slightly different from the schema.
newtype Registry = Registry {
unRegistry :: [RegistryElement]
} deriving (Eq, Ord, Show)
xpRegistry :: PU Registry
xpRegistry =
xpWrap (Registry, unRegistry) $
xpElem "registry" $
xpList xpRegistryElement
--------------------------------------------------------------------------------
data RegistryElement
= CommentElement { unCommentElement :: Comment }
| VendorIdsElement { unVendorIdsElement :: VendorIds }
| TagsElement { unTagsElement :: Tags }
| TypesElement { unTypesElement:: Types }
| EnumsElement { unEnumsElement :: Enums }
| CommandsElement { unCommandsElement :: Commands }
| FeatureElement { unFeatureElement :: Feature }
| ExtensionsElement { unExtensionsElement :: Extensions }
deriving (Eq, Ord, Show)
xpRegistryElement :: PU RegistryElement
xpRegistryElement = xpAlt tag pus
where tag (CommentElement _) = 0
tag (VendorIdsElement _) = 1
tag (TagsElement _) = 2
tag (TypesElement _) = 3
tag (EnumsElement _) = 4
tag (CommandsElement _) = 5
tag (FeatureElement _) = 6
tag (ExtensionsElement _) = 7
pus = [ xpWrap (CommentElement, unCommentElement) xpCommentElement
, xpWrap (VendorIdsElement, unVendorIdsElement) xpVendorIds
, xpWrap (TagsElement, unTagsElement) xpTags
, xpWrap (TypesElement, unTypesElement) xpTypes
, xpWrap (EnumsElement, unEnumsElement) xpEnums
, xpWrap (CommandsElement, unCommandsElement) xpCommands
, xpWrap (FeatureElement, unFeatureElement) xpFeature
, xpWrap (ExtensionsElement, unExtensionsElement) xpExtensions ]
--------------------------------------------------------------------------------
newtype VendorIds = VendorIds {
unVendorIds :: [VendorId]
} deriving (Eq, Ord, Show)
xpVendorIds :: PU VendorIds
xpVendorIds =
xpWrap (VendorIds, unVendorIds) $
xpElem "vendorids" $
xpList xpVendorId
--------------------------------------------------------------------------------
data VendorId = VendorId {
vendorIdName :: Vendor,
vendorIdId :: Integer',
vendorIdComment :: Maybe Comment
} deriving (Eq, Ord, Show)
xpVendorId :: PU VendorId
xpVendorId =
xpWrap (\(a,b,c) -> VendorId a b c
,\(VendorId a b c) -> (a,b,c)) $
xpElem "vendorid" $
xpTriple
xpVendorName
(xpAttr "id" xpInteger)
(xpOption xpComment)
--------------------------------------------------------------------------------
newtype Tags = Tags {
unTags :: [Tag]
} deriving (Eq, Ord, Show)
xpTags :: PU Tags
xpTags =
xpWrap (Tags, unTags) $
xpElem "tags" $
xpList xpTag
--------------------------------------------------------------------------------
data Tag = Tag {
tagName :: Vendor,
tagAuthor :: Author,
tagContact :: Contact
} deriving (Eq, Ord, Show)
xpTag :: PU Tag
xpTag =
xpWrap (\(a,b,c) -> Tag a b c
,\(Tag a b c) -> (a,b,c)) $
xpElem "tag" $
xpTriple
xpVendorName
(xpAttr "author" xpAuthor)
(xpAttr "contact" xpContact)
--------------------------------------------------------------------------------
newtype Types = Types {
unTypes :: [Type]
} deriving (Eq, Ord, Show)
xpTypes :: PU Types
xpTypes =
xpWrap (Types, unTypes) $
xpElem "types" $
xpList xpType
--------------------------------------------------------------------------------
-- Note: The schema for types in the Vulkan registry is a tragedy, so let's only
-- do some basic handling here and do some more parsing later.
data Type = Type {
typeAPI :: Maybe String,
typeRequires :: Maybe String,
typeName1 :: Maybe TypeName,
typeCategory :: Maybe String,
typeParent :: Maybe TypeName,
typeReturnedOnly :: Maybe Bool',
typeComment :: Maybe Comment,
typeBody :: [TypeFragment]
} deriving (Eq, Ord, Show)
xpType :: PU Type
xpType =
xpWrap (\(a,b,c,d,e,f,g,h) -> Type a b c d e f g h
,\(Type a b c d e f g h) -> (a,b,c,d,e,f,g,h)) $
xpElem "type" $
xp8Tuple
(xpAttrImplied "api" xpText)
(xpAttrImplied "requires" xpText)
(xpAttrImplied "name" xpTypeName)
(xpAttrImplied "category" xpText)
(xpAttrImplied "parent" xpTypeName)
(xpAttrImplied "returnedonly" xpBool)
(xpOption xpComment)
(xpList xpTypeFragment)
--------------------------------------------------------------------------------
data TypeFragment
= Text { unText :: String }
| TypeRef { unTypeRef :: TypeName }
| APIEntry { unAPIEntry :: String }
| NameDef { unNameDef :: TypeName }
| EnumRef { unEnumRef :: EnumName }
| MemberDef { unMemberDef :: Member }
| ValiditySpec { unValiditySpec :: Validity }
deriving (Eq, Ord, Show)
xpTypeFragment :: PU TypeFragment
xpTypeFragment =
xpAlt tag pus
where tag (Text _) = 0
tag (TypeRef _) = 1
tag (APIEntry _) = 2
tag (NameDef _) = 3
tag (EnumRef _) = 4
tag (MemberDef _) = 5
tag (ValiditySpec _) = 6
pus = [ xpWrap (Text, unText) xpText
, xpWrap (TypeRef, unTypeRef) $ xpElem "type" xpTypeName
, xpWrap (APIEntry, unAPIEntry) $ xpElem "apientry" xpText
, xpWrap (NameDef, unNameDef) $ xpElem "name" xpTypeName
, xpWrap (EnumRef, unEnumRef) $ xpElem "enum" xpEnumName
, xpWrap (MemberDef, unMemberDef) xpMember
, xpWrap (ValiditySpec, unValiditySpec) xpValidity]
--------------------------------------------------------------------------------
data Member = Member {
memberLen :: Maybe String,
memberExternSync :: Maybe String,
memberOptional :: Maybe Bool',
memberNoAutoValidity :: Maybe Bool',
memberParts :: [TypeFragment]
} deriving (Eq, Ord, Show)
xpMember :: PU Member
xpMember =
xpWrap (\(a,b,c,d,e) -> Member a b c d e
,\(Member a b c d e) -> (a,b,c,d,e)) $
xpElem "member" $
xp5Tuple
(xpAttrImplied "len" xpText)
(xpAttrImplied "externsync" xpText)
(xpAttrImplied "optional" xpBool)
(xpAttrImplied "noautovalidity" xpBool)
(xpList xpTypeFragment)
--------------------------------------------------------------------------------
data Enums = Enums {
enumsName :: Maybe TypeName,
enumsType :: Maybe String,
enumsStart :: Maybe Integer',
enumsEnd :: Maybe Integer',
enumsVendor :: Maybe Vendor,
enumsComment :: Maybe Comment,
enumsEnumOrUnuseds :: [Either Enum' Unused]
} deriving (Eq, Ord, Show)
xpEnums :: PU Enums
xpEnums =
xpWrap (\(a,b,c,d,e,f,g) -> Enums a b c d e f g
,\(Enums a b c d e f g) -> (a,b,c,d,e,f,g)) $
xpElem "enums" $
xp7Tuple
(xpAttrImplied "name" xpTypeName)
(xpAttrImplied "type" xpText)
(xpAttrImplied "start" xpInteger)
(xpAttrImplied "end" xpInteger)
(xpOption xpVendor)
(xpOption xpComment)
(xpList $ xpEither xpEnum xpUnused)
--------------------------------------------------------------------------------
xpEither :: PU a -> PU b -> PU (Either a b)
xpEither pl pr = xpAlt tag pus
where tag (Left _) = 0
tag (Right _) = 1
pus = [ xpWrap (Left, \(Left l) -> l) pl
, xpWrap (Right, \(Right r) -> r) pr ]
--------------------------------------------------------------------------------
data Enum' = Enum {
enumValue :: Maybe EnumValue,
enumAPI :: Maybe String,
enumType :: Maybe TypeSuffix,
enumName :: String,
enumAlias :: Maybe String,
enumComment :: Maybe Comment
} deriving (Eq, Ord, Show)
-- NOTE: The spec uses the interleave pattern, which is not needed: Attributes
-- are by definition unordered.
xpEnum :: PU Enum'
xpEnum =
xpWrap (\(a,b,c,d,e,f) -> Enum a b c d e f
,\(Enum a b c d e f) -> (a,b,c,d,e,f)) $
xpElem "enum" $
xp6Tuple
(xpOption xpEnumValue)
(xpAttrImplied "api" xpText)
(xpAttrImplied "type" xpTypeSuffix)
(xpAttr "name" xpText)
(xpAttrImplied "alias" xpText)
(xpOption xpComment)
--------------------------------------------------------------------------------
data EnumValue
= Value Integer' (Maybe TypeName)
| BitPos Integer' (Maybe TypeName)
| Offset Integer' (Maybe String) TypeName
deriving (Eq, Ord, Show)
xpEnumValue :: PU EnumValue
xpEnumValue = xpAlt tag pus
where tag (Value _ _) = 0
tag (BitPos _ _) = 1
tag (Offset _ _ _) = 2
pus = [ xpValue, xpBitPos, xpOffset ]
xpValue = xpWrap (\(a,b) -> Value a b
,\(Value a b) -> (a,b)) $
xpPair
(xpAttr "value" xpInteger)
(xpAttrImplied "extends" xpTypeName)
xpBitPos = xpWrap (\(a,b) -> BitPos a b
,\(BitPos a b) -> (a,b)) $
xpPair
(xpAttr "bitpos" xpInteger)
(xpAttrImplied "extends" xpTypeName)
xpOffset = xpWrap (\(a,b,c) -> Offset a b c
,\(Offset a b c) -> (a,b,c)) $
xpTriple
(xpAttr "offset" xpInteger)
(xpAttrImplied "dir" xpText)
(xpAttr "extends" xpTypeName)
--------------------------------------------------------------------------------
data Unused = Unused {
unusedStart :: Integer',
unusedEnd :: Maybe Integer',
unusedVendor :: Maybe Vendor,
unusedComment :: Maybe Comment
} deriving (Eq, Ord, Show)
xpUnused :: PU Unused
xpUnused =
xpWrap (\(a,b,c,d) -> Unused a b c d
,\(Unused a b c d) -> (a,b,c,d)) $
xpElem "unused" $
xp4Tuple
(xpAttr "start" xpInteger)
(xpAttrImplied "end" xpInteger)
(xpOption xpVendor)
(xpOption xpComment)
--------------------------------------------------------------------------------
newtype Commands = Commands {
unCommands :: [Command]
} deriving (Eq, Ord, Show)
xpCommands :: PU Commands
xpCommands =
xpWrap (Commands, unCommands) $
xpElem "commands" $
xpList xpCommand
--------------------------------------------------------------------------------
data Command = Command {
commandQueues :: Maybe String,
commandSuccessCodes :: Maybe String,
commandErrorCodes :: Maybe String,
commandRenderPass :: Maybe String,
commandCmdBufferLevel :: Maybe String,
commandComment :: Maybe Comment,
commandProto :: Proto,
commandParams :: [Param],
commandAlias :: Maybe Name,
commandDescription :: Maybe String,
commandImplicitExternSyncParams :: Maybe [String],
commandValidity :: Maybe Validity
} deriving (Eq, Ord, Show)
xpCommand :: PU Command
xpCommand =
xpWrap (\(a,b,c,d,e,f,g,h,(i,j,k,l)) -> Command a b c d e f g h i j k l
,\(Command a b c d e f g h i j k l) -> (a,b,c,d,e,f,g,h,(i,j,k,l))) $
xpElem "command" $
xp9Tuple
(xpAttrImplied "queues" xpText)
(xpAttrImplied "successcodes" xpText)
(xpAttrImplied "errorcodes" xpText)
(xpAttrImplied "renderpass" xpText)
(xpAttrImplied "cmdbufferlevel" xpText)
(xpOption xpComment)
(xpElem "proto" xpProto)
(xpList xpParam)
xpCommandTail
-- The spec uses the interleave pattern here, which is not supported in hxt.
-- As a workaround, we use a list of disjoint types.
xpCommandTail :: PU (Maybe Name, Maybe String, Maybe [String], Maybe Validity)
xpCommandTail =
xpWrapEither (\xs -> do a <- check "alias" [x | AliasElement x <- xs]
b <- check "description" [x | DescriptionElement x <- xs]
c <- check "implicitexternsyncparams" [x | ImplicitExternSyncParams x <- xs]
d <- check "validity" [x | ValidityElement x <- xs]
return (a,b,c,d)
,\(a,b,c,d) -> map AliasElement (maybeToList a) ++
map DescriptionElement (maybeToList b) ++
map ImplicitExternSyncParams (maybeToList c) ++
map ValidityElement (maybeToList d)) $
xpList $
xpAlt tag pus
where tag (AliasElement _) = 0
tag (DescriptionElement _) = 1
tag (ImplicitExternSyncParams _) = 2
tag (ValidityElement _) = 3
pus = [ xpWrap (AliasElement, unAliasElement) $
xpElem "alias" xpName
, xpWrap (DescriptionElement, unDescriptionElement) $
xpElem "description" xpText
, xpWrap (ImplicitExternSyncParams, unImplicitExternSyncParams) $
xpElem "implicitexternsyncparams" $
xpList $
xpElem "param" xpText
, xpWrap (ValidityElement, unValidityElement) xpValidity]
check n xs = case xs of
[] -> Right Nothing
[x] -> Right $ Just x
_ -> Left $ "expected at most one '" ++ n ++ "' element"
data CommandTail
= AliasElement { unAliasElement :: Name }
| DescriptionElement { unDescriptionElement :: String }
| ImplicitExternSyncParams { unImplicitExternSyncParams :: [String] }
| ValidityElement { unValidityElement :: Validity }
deriving (Eq, Ord, Show)
--------------------------------------------------------------------------------
newtype Proto = Proto {
unProto :: [Either String ProtoPart]
} deriving (Eq, Ord, Show)
xpProto :: PU Proto
xpProto = xpWrap (Proto, unProto) xpProtoContent
--------------------------------------------------------------------------------
data ProtoPart
= ProtoType { unProtoType :: TypeName }
| ProtoName { unProtoName :: String }
deriving (Eq, Ord, Show)
xpProtoContent :: PU [Either String ProtoPart]
xpProtoContent =
xpList $
xpAlt tag pus
where tag (Left _) = 0
tag (Right (ProtoType _)) = 1
tag (Right (ProtoName _)) = 2
pus = [ xpWrap (Left, either id (error "Right?")) xpText
, xpWrap (Right . ProtoType, either (error "Left?") unProtoType) $
xpElem "type" xpTypeName
, xpWrap (Right . ProtoName, either (error "Left?") unProtoName) $
xpElem "name" xpText ]
--------------------------------------------------------------------------------
data Param = Param {
paramLen :: Maybe String,
paramExternSync :: Maybe String,
paramOptional :: Maybe String,
paramNoAutoValidity :: Maybe String,
paramProto :: Proto
} deriving (Eq, Ord, Show)
xpParam :: PU Param
xpParam =
xpWrap (\(a,b,c,d,e) -> Param a b c d e
,\(Param a b c d e) -> (a,b,c,d,e)) $
xpElem "param" $
xp5Tuple
(xpAttrImplied "len" xpText)
(xpAttrImplied "externsync" xpText)
(xpAttrImplied "optional" xpText)
(xpAttrImplied "noautovalidity" xpText)
xpProto
--------------------------------------------------------------------------------
data Feature = Feature {
featureAPI :: String,
featureName :: Name,
featureNumber :: String, -- actually xsd:float, but used as a string
featureProtect :: Maybe String,
featureComment :: Maybe Comment,
featureModifications :: [Modification]
} deriving (Eq, Ord, Show)
xpFeature :: PU Feature
xpFeature =
xpWrap (\(a,b,c,d,e,f) -> Feature a b c d e f
,\(Feature a b c d e f) -> (a,b,c,d,e,f)) $
xpElem "feature" $
xp6Tuple
(xpAttr "api" xpText)
xpName
(xpAttr "number" xpText)
(xpAttrImplied "protect" xpText)
(xpOption xpComment)
(xpList xpModification)
--------------------------------------------------------------------------------
data Modification = Modification {
modificationModificationKind :: ModificationKind,
modificationProfileName :: Maybe ProfileName,
modificationComment :: Maybe Comment,
modificationInterfaceElements :: [InterfaceElement],
modificationUsages :: [(Either String String, Usage)] -- TODO: Better type
} deriving (Eq, Ord, Show)
data ModificationKind = Require | Remove -- TODO: Better name
deriving (Eq, Ord, Show, Enum)
xpModification :: PU Modification
xpModification =
xpAlt (fromEnum . modificationModificationKind) pus
where pus = [ xpMod "require" Require
, xpMod "remove" Remove ]
xpMod el kind =
xpWrap (\(a,b,c,d) -> Modification kind a b c d
,\(Modification _ a b c d) -> (a,b,c,d)) $
xpElem el $
xp4Tuple
(xpOption xpProfileName)
(xpOption xpComment)
(xpList xpInterfaceElement)
(xpList xpUsageWithAttr)
xpUsageWithAttr :: PU (Either String String, Usage)
xpUsageWithAttr =
xpElem "usage" $
xpPair
(xpAlt tag pus)
(xpWrap (Usage, unUsage) xpText)
where tag (Left _) = 0
tag (Right _) = 1
pus = [ xpWrap (Left, either id (error "Right?")) $
xpAttr "struct" xpText
, xpWrap (Right, either (error "Left?") id) $
xpAttr "command" xpText ]
--------------------------------------------------------------------------------
newtype Extensions = Extensions {
unExtensions :: [Extension]
} deriving (Eq, Ord, Show)
xpExtensions :: PU Extensions
xpExtensions =
xpWrap (Extensions, unExtensions) $
xpElem "extensions" $
xpList xpExtension
--------------------------------------------------------------------------------
data Extension = Extension {
extensionName :: Name,
extensionNumber :: Maybe Integer',
extensionProtect :: Maybe String,
extensionSupported :: Maybe StringGroup,
extensionAuthor :: Maybe Author,
extensionContact :: Maybe Contact,
extensionComment :: Maybe Comment,
extensionsRequireRemove :: [ConditionalModification]
} deriving (Eq, Ord, Show)
xpExtension :: PU Extension
xpExtension =
xpWrap (\(a,b,c,d,e,f,g,h) -> Extension a b c d e f g h
,\(Extension a b c d e f g h) -> (a,b,c,d,e,f,g,h)) $
xpElem "extension" $
xp8Tuple
xpName
(xpAttrImplied "number" xpInteger)
(xpAttrImplied "protect" xpText)
(xpAttrImplied "supported" xpStringGroup)
(xpAttrImplied "author" xpAuthor)
(xpAttrImplied "contact" xpContact)
(xpOption xpComment)
(xpList xpConditionalModification)
--------------------------------------------------------------------------------
data ConditionalModification = ConditionalModification {
conditionalModificationAPI :: Maybe String,
conditionalModificationModification :: Modification
} deriving (Eq, Ord, Show)
xpConditionalModification :: PU ConditionalModification
xpConditionalModification =
xpAlt (fromEnum . modificationModificationKind . conditionalModificationModification) pus
where pus = [ xpMod "require" Require
, xpMod "remove" Remove ]
xpMod el kind =
xpWrap (\(a,b,c,d,e) -> ConditionalModification a (Modification kind b c d e)
,\(ConditionalModification a (Modification _ b c d e)) -> (a,b,c,d,e)) $
xpElem el $
xp5Tuple
(xpAttrImplied "api" xpText)
(xpOption xpProfileName)
(xpOption xpComment)
(xpList xpInterfaceElement)
(xpList xpUsageWithAttr)
--------------------------------------------------------------------------------
data InterfaceElement
= InterfaceType Name (Maybe Comment)
| InterfaceEnum Enum'
| InterfaceCommand Name (Maybe Comment)
deriving (Eq, Ord, Show)
xpInterfaceElement :: PU InterfaceElement
xpInterfaceElement = xpAlt tag pus
where tag (InterfaceType _ _) = 0
tag (InterfaceEnum _) = 1
tag (InterfaceCommand _ _) = 2
pus = [ xpInterfaceType, xpInterfaceEnum, xpInterfaceCommand ]
xpInterfaceType = xpWrap (\(a,b) -> InterfaceType a b
,\(InterfaceType a b) -> (a,b)) $
xpElem "type" $
xpPair xpName (xpOption xpComment)
xpInterfaceEnum = xpWrap (InterfaceEnum, \(InterfaceEnum e) -> e) xpEnum
xpInterfaceCommand = xpWrap (\(a,b) -> InterfaceCommand a b
,\(InterfaceCommand a b) -> (a,b)) $
xpElem "command" $
xpPair xpName (xpOption xpComment)
--------------------------------------------------------------------------------
newtype Validity = Validity {
unValidity :: [Usage]
} deriving (Eq, Ord, Show)
xpValidity :: PU Validity
xpValidity =
xpWrap (Validity, unValidity) $
xpElem "validity" $
xpList xpUsage
--------------------------------------------------------------------------------
newtype Usage = Usage {
unUsage :: String
} deriving (Eq, Ord, Show)
xpUsage :: PU Usage
xpUsage =
xpWrap (Usage, unUsage) $
xpElem "usage" xpText
--------------------------------------------------------------------------------
newtype Integer' = Integer {
unInteger :: String
} deriving (Eq, Ord, Show)
xpInteger :: PU Integer'
xpInteger = xpWrap (Integer, unInteger) xpText
--------------------------------------------------------------------------------
newtype EnumName = EnumName {
unEnumName :: String
} deriving (Eq, Ord, Show)
xpEnumName :: PU EnumName
xpEnumName = xpWrap (EnumName, unEnumName) xpText
--------------------------------------------------------------------------------
newtype ExtensionName = ExtensionName {
unExtensionName :: String
} deriving (Eq, Ord, Show)
-- xpExtensionName :: PU ExtensionName
-- xpExtensionName = xpWrap (ExtensionName, unExtensionName) xpText
--------------------------------------------------------------------------------
newtype TypeName = TypeName {
unTypeName :: String
} deriving (Eq, Ord, Show)
xpTypeName :: PU TypeName
xpTypeName = xpWrap (TypeName, unTypeName) xpText
--------------------------------------------------------------------------------
newtype TypeSuffix = TypeSuffix {
unTypeSuffix :: String
} deriving (Eq, Ord, Show)
xpTypeSuffix :: PU TypeSuffix
xpTypeSuffix = xpWrap (TypeSuffix, unTypeSuffix) xpText
--------------------------------------------------------------------------------
newtype StringGroup = StringGroup {
unStringGroup :: String
} deriving (Eq, Ord, Show)
xpStringGroup :: PU StringGroup
xpStringGroup = xpWrap (StringGroup, unStringGroup) xpText
--------------------------------------------------------------------------------
newtype ProfileName = ProfileName {
unProfileName :: String
} deriving (Eq, Ord, Show)
xpProfileName :: PU ProfileName
xpProfileName =
xpWrap (ProfileName, unProfileName) $
xpAttr "profile" xpText
--------------------------------------------------------------------------------
newtype Vendor = Vendor {
unVendor :: String
} deriving (Eq, Ord, Show)
xpVendor :: PU Vendor
xpVendor = xpVendorAttr "vendor"
xpVendorName :: PU Vendor
xpVendorName = xpVendorAttr "name"
xpVendorAttr :: String -> PU Vendor
xpVendorAttr a =
xpWrap (Vendor, unVendor) $
xpAttr a xpText
--------------------------------------------------------------------------------
newtype Comment = Comment {
unComment :: String
} deriving (Eq, Ord, Show)
xpComment :: PU Comment
xpComment = xpCommentAs xpAttr
xpCommentElement :: PU Comment
xpCommentElement = xpCommentAs xpElem
xpCommentAs :: (String -> PU String -> PU String) -> PU Comment
xpCommentAs xp =
xpWrap (Comment, unComment) $
xp "comment" xpText
--------------------------------------------------------------------------------
newtype Name = Name {
unName :: String
} deriving (Eq, Ord, Show)
xpName :: PU Name
xpName =
xpWrap (Name, unName) $
xpAttr "name" xpText
--------------------------------------------------------------------------------
newtype Author = Author {
unAuthor :: String
} deriving (Eq, Ord, Show)
xpAuthor :: PU Author
xpAuthor = xpWrap (Author, unAuthor) xpText
--------------------------------------------------------------------------------
newtype Contact = Contact {
unContact :: String
} deriving (Eq, Ord, Show)
xpContact :: PU Contact
xpContact = xpWrap (Contact, unContact) xpText
--------------------------------------------------------------------------------
newtype Bool' = Bool {
unBool :: String
} deriving (Eq, Ord, Show)
xpBool :: PU Bool'
xpBool = xpWrap (Bool, unBool) xpText
| svenpanne/Vulkan | RegistryProcessor/src/Registry.hs | bsd-3-clause | 25,667 | 0 | 17 | 5,829 | 7,425 | 4,117 | 3,308 | 637 | 8 |
module Validations
( module Validations.Types
, module Validations.Internal
, module Validations.Adapters
, module Validations.Validator
, module Validations.Validation
) where
import Validations.Types
import Validations.Internal
import Validations.Adapters
import Validations.Validator
import Validations.Validation
| mavenraven/validations | src/Validations.hs | bsd-3-clause | 330 | 0 | 5 | 42 | 61 | 39 | 22 | 11 | 0 |
module Sexy.Classes.DesnocMay (DesnocMay(..)) where
import Sexy.Data (Maybe)
class DesnocMay l where
desnocMay :: l a -> Maybe (a, l a)
| DanBurton/sexy | src/Sexy/Classes/DesnocMay.hs | bsd-3-clause | 141 | 0 | 10 | 25 | 59 | 33 | 26 | 4 | 0 |
import Paths_sample
main = do
fp <- getDataFileName "name.txt"
name <- readFile fp
putStrLn $ "Hello, " ++ init name ++ "!"
| YoshikuniJujo/forest | examples/cabal/test-suite/hello.hs | bsd-3-clause | 128 | 0 | 9 | 26 | 47 | 21 | 26 | 5 | 1 |
module Main (main) where
import Test.Tasty
import Test.Tasty.Hspec
import Test.Tasty.Ingredients.Basic
import Test.Tasty.Runners.AntXML
main :: IO ()
main = do
theSpecs <- testSpec "Specs" specs
defaultMainWithIngredients (antXMLRunner:defaultIngredients) theSpecs
specs :: Spec
specs = do
describe "A test" $ do
it "passes" $ (1::Int) `shouldBe` 1
it "fails - or does it?" $ (1::Int) `shouldBe` 1
| cdodev/scintilla | test/Spec.hs | bsd-3-clause | 415 | 0 | 12 | 71 | 140 | 77 | 63 | 14 | 1 |
module Instructions where
data Instruction
= InstrJump Int --jumps give the number of the node where execution should resume
| InstrConditionalJump Int --jump if false
| InstrFunctionDecl String [String] Int --number of instructions in the function block
| InstrClassDecl String [String]
| InstrReturn
| InstrVarLookup String
| InstrGlobalLookup String
| InstrPushConstStr String
| InstrPushConstInt Int
| InstrFunctionCall
| InstrArrayAccess
| InstrObjNew String
| InstrObjMemberAccess
| InstrAssign
| InstrCompareEq
| InstrCompareLt
| InstrCompareGt
| InstrCompareLeq
| InstrCompareGeq
| InstrArithPlus
| InstrArithMinus
| InstrArithMul
| InstrArithDiv
| InstrArithMod
| InstrArithInc
| InstrArithDec
| InstrLogicNot
| InstrLogicAnd
| InstrLogicOr
| InstrBlockEnter --these are used to implement scoping
| InstrBlockLeave
| InstrStackPop --used to discard the to element of the stack.
| InstrLiteral Integer [Integer] --used by the inline assembler, just puts the value of the Instruction in there.
| InstrLoad String --used to load other files
deriving (Show)
| fegies/pseudocodeCompiler | src/Instructions.hs | bsd-3-clause | 1,216 | 0 | 7 | 298 | 163 | 105 | 58 | 37 | 0 |
module Board
( Board ()
, isValidBoard
, board
, unBoard
, tileAt
, updateTileAt
, tilesAt
, updateTilesAt
, findVoltorbTiles
, findOptionalTiles
, findRequiredTiles
, cluesFor
) where
import Data.Array (Array, array, bounds, (!), (//))
import Data.Char (intToDigit)
import Axis (axes)
import Clues (Clues, clues)
import Position (Position, axis, positionsByColumn, rows)
import Tile (Tile, clueFor, isVoltorb, isOptional, isRequired, unTile)
-- A 5x5 Board of Tiles
data Board = Board (Array Position Tile)
deriving (Eq, Ord)
instance Show Board where
show b =
unlines $ map (map $ showTile . tileAt b) rows
where showTile = intToDigit . unTile
-- Checks whether the given Array is valid as a Board.
isValidBoard :: Board -> Bool
isValidBoard b = bounds (unBoard b) == (minBound, maxBound)
-- Constructor for a Board.
board :: Array Position Tile -> Board
board a | isValidBoard b = b
| otherwise = error "Array does not have complete bounds"
where b = Board a
-- Deconstructor for a Board.
unBoard :: Board -> Array Position Tile
unBoard (Board a) = a
-- Returns the Tile at the given Position of a Board.
tileAt :: Board -> Position -> Tile
tileAt b p = unBoard b ! p
-- Updates the Tile at the given Position of a Board.
updateTileAt :: Board -> Position -> Tile -> Board
updateTileAt b p t = updateTilesAt b [(p, t)]
-- Returns the Tiles in the given list of Positions.
tilesAt :: Board -> [Position] -> [Tile]
tilesAt b = map $ tileAt b
-- Updates the Tiles at the corresponding Positions of a Board.
updateTilesAt :: Board -> [(Position, Tile)] -> Board
updateTilesAt b as = Board $ (// as) $ unBoard b
-- Finds the Positions of a Board that satsify the given predicate.
findTiles :: (Tile -> Bool) -> Board -> [Position]
findTiles p b = filter (p . tileAt b) positionsByColumn
-- Finds the Positions that contain 0-Tiles.
findVoltorbTiles :: Board -> [Position]
findVoltorbTiles = findTiles isVoltorb
-- Finds the Positions that contain 1-Tiles.
findOptionalTiles :: Board -> [Position]
findOptionalTiles = findTiles isOptional
-- Finds the Positions that contain 2/3-Tiles.
findRequiredTiles :: Board -> [Position]
findRequiredTiles = findTiles isRequired
-- Returns the Clues for all Axes of a Board.
cluesFor :: Board -> Clues
cluesFor b = clues $ array (minBound, maxBound) $ map (\a -> (a, clueFor $ tilesAt b $ axis a)) axes
| jameshales/voltorb-flip | src/Board.hs | bsd-3-clause | 2,420 | 0 | 12 | 480 | 677 | 377 | 300 | -1 | -1 |
-----------------------------------------------------------------------------
--
-- Old-style Cmm utilities.
--
-- (c) The University of Glasgow 2004-2006
--
-----------------------------------------------------------------------------
module OldCmmUtils(
CmmStmts, noStmts, oneStmt, mkStmts, plusStmts, stmtList,
isNopStmt,
maybeAssignTemp, loadArgsIntoTemps,
module CmmUtils,
) where
#include "HsVersions.h"
import OldCmm
import CmmUtils
import OrdList
import Unique
---------------------------------------------------
--
-- CmmStmts
--
---------------------------------------------------
type CmmStmts = OrdList CmmStmt
noStmts :: CmmStmts
noStmts = nilOL
oneStmt :: CmmStmt -> CmmStmts
oneStmt = unitOL
mkStmts :: [CmmStmt] -> CmmStmts
mkStmts = toOL
plusStmts :: CmmStmts -> CmmStmts -> CmmStmts
plusStmts = appOL
stmtList :: CmmStmts -> [CmmStmt]
stmtList = fromOL
---------------------------------------------------
--
-- CmmStmt
--
---------------------------------------------------
isNopStmt :: CmmStmt -> Bool
-- If isNopStmt returns True, the stmt is definitely a no-op;
-- but it might be a no-op even if isNopStmt returns False
isNopStmt CmmNop = True
isNopStmt (CmmAssign r e) = cheapEqReg r e
isNopStmt (CmmStore e1 (CmmLoad e2 _)) = cheapEqExpr e1 e2
isNopStmt _ = False
cheapEqExpr :: CmmExpr -> CmmExpr -> Bool
cheapEqExpr (CmmReg r) e = cheapEqReg r e
cheapEqExpr (CmmRegOff r 0) e = cheapEqReg r e
cheapEqExpr (CmmRegOff r n) (CmmRegOff r' n') = r==r' && n==n'
cheapEqExpr _ _ = False
cheapEqReg :: CmmReg -> CmmExpr -> Bool
cheapEqReg r (CmmReg r') = r==r'
cheapEqReg r (CmmRegOff r' 0) = r==r'
cheapEqReg _ _ = False
---------------------------------------------------
--
-- Helpers for foreign call arguments
--
---------------------------------------------------
loadArgsIntoTemps :: [Unique]
-> [HintedCmmActual]
-> ([Unique], [CmmStmt], [HintedCmmActual])
loadArgsIntoTemps uniques [] = (uniques, [], [])
loadArgsIntoTemps uniques ((CmmHinted e hint):args) =
(uniques'',
new_stmts ++ remaining_stmts,
(CmmHinted new_e hint) : remaining_e)
where
(uniques', new_stmts, new_e) = maybeAssignTemp uniques e
(uniques'', remaining_stmts, remaining_e) =
loadArgsIntoTemps uniques' args
maybeAssignTemp :: [Unique] -> CmmExpr -> ([Unique], [CmmStmt], CmmExpr)
maybeAssignTemp uniques e
| hasNoGlobalRegs e = (uniques, [], e)
| otherwise = (tail uniques, [CmmAssign local e], CmmReg local)
where local = CmmLocal (LocalReg (head uniques) (cmmExprType e))
| nomeata/ghc | compiler/cmm/OldCmmUtils.hs | bsd-3-clause | 2,788 | 0 | 11 | 627 | 682 | 382 | 300 | 50 | 1 |
-- |
-- Module : Network.Machine.Protocol.Torrent
-- Copyright : Lodvær 2015
-- License : BSD3
--
-- Maintainer : Lodvær <lodvaer@gmail.com>
-- Stability : provisional
-- Portability : unknown
--
-- Torrent machines.
module Network.Machine.Protocol.Torrent where
-- TODO
| lodvaer/machines-network | src/Network/Machine/Protocol/Torrent.hs | bsd-3-clause | 294 | 0 | 3 | 59 | 19 | 17 | 2 | 1 | 0 |
{-# LINE 1 "GHC.ConsoleHandler.hs" #-}
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.ConsoleHandler
-- Copyright : (c) The University of Glasgow
-- License : see libraries/base/LICENSE
--
-- Maintainer : cvs-ghc@haskell.org
-- Stability : internal
-- Portability : non-portable (GHC extensions)
--
-- NB. the contents of this module are only available on Windows.
--
-- Installing Win32 console handlers.
--
-----------------------------------------------------------------------------
module GHC.ConsoleHandler
where
import GHC.Base () -- dummy dependency
| phischu/fragnix | builtins/base/GHC.ConsoleHandler.hs | bsd-3-clause | 771 | 0 | 4 | 158 | 34 | 29 | 5 | 5 | 0 |
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DataKinds, PolyKinds, KindSignatures #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
module TypeLevel where
type family Map (f :: k1 -> k2) (l :: [k1]) :: [k2] where
Map f '[] = '[]
Map f (e ': l) = (f e) ': (Map f l)
type family FoldR (f :: a -> b -> b) (end :: b) (l :: [a]) :: b
type instance FoldR f end '[] = end
type instance FoldR f end (e ': l) = f e (FoldR f end l)
type xs :++: ys = FoldR '(:) ys xs
type family Concat (xss :: [[k]]) :: [k] where
Concat '[] = '[]
Concat (xs ': xss) = xs :++: (Concat xss)
-- Undecidable instances?
-- ghc can't tell that FoldL is decreasing in its last argument
type family FoldL (f :: b -> a -> b) (acc :: b) (l :: [a]) :: b
type instance FoldL f acc '[] = acc
type instance FoldL f acc (e ': l) = FoldL f (f acc e) l
type family If (pred :: Bool) (a :: t) (b :: t) :: t where
If False a b = b
If True a b = a
-- kind-indexed
-- no way relate the kinds of the inputs and output
type family FMap (f :: a -> b) (c :: fa) :: fb where
FMap f l = Map f l
FMap f Nothing = Nothing
FMap f (Just x) = Just (f x)
type family MaybeCase (m :: Maybe a) (n :: b) (f :: a -> b) :: b where
MaybeCase Nothing n f = n
MaybeCase (Just a) n f = f a
type family MaybeCase' (m :: Maybe a) (n :: Maybe b) (f :: a -> b) :: Maybe b where
MaybeCase' Nothing n f = n
MaybeCase' (Just a) n f = Just (f a)
type family MaybeBind (m :: Maybe a) (f :: a -> Maybe b) :: Maybe b where
MaybeBind Nothing f = Nothing
MaybeBind (Just a) f = f a
| vladfi1/hs-misc | lib/Misc/TypeLevel.hs | mit | 1,563 | 0 | 8 | 391 | 730 | 420 | 310 | 34 | 0 |
{-# LANGUAGE GADTs, DataKinds, TypeFamilies #-}
module Control.OperationalTransformation.List
( N (..)
, Vector (..)
, Operation (..)
, apply
, compose
, TransformedPair (..)
, transform
) where
data N = Z | S N deriving (Eq, Show)
data Vector :: * -> N -> * where
EmptyV :: Vector a Z
ConsV :: a -> Vector a n -> Vector a (S n)
data Operation :: * -> N -> N -> * where
EmptyOp :: Operation a Z Z
RetainOp :: Operation a n m -> Operation a (S n) (S m)
InsertOp :: a -> Operation a n m -> Operation a n (S m)
DeleteOp :: Operation a n m -> Operation a (S n) m
apply :: Operation a n m -> Vector a n -> Vector a m
apply EmptyOp EmptyV = EmptyV
apply (RetainOp o') (ConsV x xs) = ConsV x (apply o' xs)
apply (InsertOp x o') xs = ConsV x (apply o' xs)
apply (DeleteOp o') (ConsV _ xs) = apply o' xs
apply _ _ = error "not possible!"
addDeleteOp :: Operation a n m -> Operation a (S n) m
addDeleteOp (InsertOp x o') = InsertOp x (addDeleteOp o')
addDeleteOp o = DeleteOp o
compose :: Operation a n m -> Operation a m k -> Operation a n k
compose EmptyOp EmptyOp = EmptyOp
compose (DeleteOp a') b = addDeleteOp (compose a' b)
compose a (InsertOp x b') = InsertOp x (compose a b')
compose (RetainOp a') (RetainOp b') = RetainOp (compose a' b')
compose (RetainOp a') (DeleteOp b') = addDeleteOp (compose a' b')
compose (InsertOp x a') (RetainOp b') = InsertOp x (compose a' b')
compose (InsertOp _ a') (DeleteOp b') = compose a' b'
compose _ _ = error "not possible!"
data TransformedPair :: * -> N -> N -> * where
TP :: Operation a n k -> Operation a m k -> TransformedPair a n m
transform :: Operation a n m -> Operation a n k -> TransformedPair a k m
transform EmptyOp EmptyOp = TP EmptyOp EmptyOp
transform (InsertOp x a') b = case transform a' b of TP at bt -> TP (InsertOp x at) (RetainOp bt)
transform a (InsertOp x b') = case transform a b' of TP at bt -> TP (RetainOp at) (InsertOp x bt)
transform (RetainOp a') (RetainOp b') = case transform a' b' of TP at bt -> TP (RetainOp at) (RetainOp bt)
transform (DeleteOp a') (DeleteOp b') = transform a' b'
transform (RetainOp a') (DeleteOp b') = case transform a' b' of TP at bt -> TP at (addDeleteOp bt)
transform (DeleteOp a') (RetainOp b') = case transform a' b' of TP at bt -> TP (addDeleteOp at) bt
transform _ _ = error "not possible!" | Operational-Transformation/ot.hs | src/Control/OperationalTransformation/List.hs | mit | 2,592 | 0 | 10 | 765 | 1,137 | 562 | 575 | 47 | 1 |
-- File: Main.hs
-- Author: Adam Juraszek
-- Purpose: Executable read-eval-loop program useful for testing.
-- Source: https://github.com/juriad/Cardguess
{-# OPTIONS_GHC -fno-warn-orphans #-}
{- | The Main module which is used for testing and benchmarking the performance.
When runs a read-eval-loop reacting to several commands:
* i <RANK><SUIT> ... - interactive mode - guessing the input combination;
* o <#CARDS> - prints optimal second guesses as a template of OptimalN.hs;
* r <#CARDS> <#TRIES> - guesses random <#TRIES> combinations of <#CARDS>;
* s <#CARDS> <#TRIES> - prints random <#TRIES> combinations of <#CARDS>;
* h - prints help;
* q - quits the program.
The loop ends when an empty line is entered. -}
module Main (main) where
import Cardguess
import Common
import Feedback
import Optimalguess
import Control.Exception (SomeException, catch)
import Control.Monad (liftM, unless)
import Data.List (sort)
import Data.Random (sampleState)
import Data.Random.Extras (sample)
import System.Random (StdGen, getStdRandom)
import Text.PrettyPrint (Mode (..), Style (..), renderStyle)
import Text.Show.Pretty (PrettyVal (..), Value (..), dumpDoc)
{- | Ignore the orphan instance warning.
It must be here because we don't want the rest knowing about
pretty printing which is useful in this module. -}
instance PrettyVal Card where
prettyVal (Card s r) = Con "Card" [String $ showSuit s, String $ showRank r]
{- | Guesses the answer based on previous response in a loop
Accumulates list of responses from the guesser. -}
guessLoop :: Selection -> Response -> Result
guessLoop ans prev@(guess, _)
| guess == ans = []
| otherwise =
let resp = nextGuess prev (rateGuess ans guess)
in resp : guessLoop ans resp
{- | Guesses the given answer.
Gets initial response from guesser and accumulates list of responses. -}
guessCards :: Selection -> Result
guessCards ans =
let
answer = sort ans -- we need sorted selections only
initial = initialGuess $ length answer
in initial : guessLoop answer initial
-- | Decodes list of cards forming a selection from a list of strings.
decodeCards :: [String] -> IO Selection
decodeCards = mapM readIO
-- | Guesses and prints intermediate responses of an answer which is a string.
testGuess :: [String] -> IO ()
testGuess ws = do
cards <- decodeCards ws
print $ guessCards cards
-- | Prints optimal second guesses for n cards as a Haskell-like list.
findOptimal :: [String] -> IO ()
findOptimal [] = fail "findOptimal requires one integer argument"
findOptimal (sn:_) = do
n <- readIO sn :: IO Int
let (initial, _) = initialGuess n
let allSels = subsets deck n
let feedbacks = [(a, b, c, d, e)
| let x = [0 .. n], a <- x, b <- x, c <- x, d <- x, e <-x ]
let result = [(f, best) | f <- feedbacks,
let options = filterOptions initial f allSels,
let (_, best) = findBestGuess options,
not $ null best]
putStrLn $ renderStyle (Style PageMode 200 1) (dumpDoc result)
-- | Converts a selection to a simple human-friendly list of cards.
selectionToString :: Selection -> String
selectionToString = unwords . map show
-- | Generates a random list of some answers.
randomAnswers :: Int -> Int -> StdGen -> ([Selection], StdGen)
randomAnswers n tries = sampleState $ sample tries (subsets deck n)
-- | Guesses several random answers of given size.
testRandom :: (Selection -> String) -> [String] -> IO ()
testRandom fn ws =
if length ws == 2 then do
let (sn : st : _) = ws
n <- readIO sn :: IO Int
t <- readIO st :: IO Int
answers <- getStdRandom $ randomAnswers n t
mapM_ (putStrLn . fn) answers
else
fail "testRandom requires two integral arguments"
-- | List of help messages.
help :: [String]
help = ["i CARD ... | o SIZE | r SIZE TRIES | s SIZE TRIES | h | q",
"Write i(nteractive) followed by list of up to four cards in format RS.",
"Write o(ptimal) followed by number of cards.",
"Write r(andom) followed by number of cards and number of tries.",
"Write s(ample) followed by number of cards and number of combinations.",
"Write h(elp) for printing this help.",
"Write q(uit) to quit this program or just leave prompt empty."]
-- | Prints help.
printHelp :: IO ()
printHelp = mapM_ putStrLn help
{- | Runs a command based on first word specified on line in a loop
or terminates if the word is not recognized.
I would really love to use readline module to read input from the user
but the !@#$%^& GHC says NO: https://ghc.haskell.org/trac/ghc/ticket/9237 -}
readLoop :: IO ()
readLoop = do
ws <- liftM words getLine
case ws of
[] -> return ()
([] : _) -> undefined -- won't happen
((m : _) : rest) -> do
let
interactiveException ex = (ex :: SomeException) `seq`
putStrLn ("Wrong format: specify up to four cards"
++ " (<RANK><SUIT>) separated by blanks.")
randomException ex = (ex :: SomeException) `seq`
putStrLn ("Wrong format: specify number of cards"
++ " and number of tries separated by blanks.")
optimalException ex = (ex :: SomeException) `seq`
putStrLn "Wrong format: specify number of cards"
case m of
'i' -> catch (testGuess rest) interactiveException
'o' -> catch (findOptimal rest) optimalException
'r' -> catch (testRandom (show . guessCards) rest)
randomException
's' -> catch (testRandom selectionToString rest)
randomException
'h' -> printHelp
_ -> return ()
unless (m == 'q') readLoop
-- | Prints brief help and runs the main loop.
main :: IO ()
main = do
putStrLn $ head help
readLoop
| juriad/Cardguess | src/Main.hs | mit | 5,995 | 0 | 25 | 1,567 | 1,279 | 671 | 608 | 100 | 8 |
{-# OPTIONS_JHC -fffi #-}
module System.Directory (
Permissions( Permissions, readable, writable, executable, searchable ),
createDirectory, removeDirectory, removeFile,
renameDirectory, renameFile, getDirectoryContents,
getCurrentDirectory, setCurrentDirectory,
doesFileExist, doesDirectoryExist,
getPermissions, setPermissions,
getModificationTime ) where
import Foreign
import Foreign.C
import System.Time
data Permissions = Permissions {
readable, writable,
executable, searchable :: !Bool
} deriving (Eq,Ord,Read,Show)
cPathMax :: CSize
cPathMax = 1024
getCurrentDirectory :: IO FilePath
getCurrentDirectory = allocaBytes (fromIntegral cPathMax) $ \cp -> do
cp <- throwErrnoIfNull "getCurrentDirectory" (getcwd cp cPathMax)
peekCString cp
setCurrentDirectory :: FilePath -> IO ()
setCurrentDirectory fp = throwErrnoIfMinus1_ fp $ withCString fp chdir
foreign import ccall unsafe chdir :: CString -> IO Int
foreign import ccall unsafe getcwd :: Ptr CChar -> CSize -> IO (Ptr CChar)
foreign import ccall unsafe mkdir :: CString -> Int -> IO Int
foreign import ccall unsafe rmdir :: CString -> IO Int
foreign import ccall unsafe unlink :: CString -> IO Int
foreign import ccall unsafe rename :: CString -> CString -> IO Int
createDirectory :: FilePath -> IO ()
createDirectory fp = throwErrnoIfMinus1_ fp $ withCString fp $ \cs -> mkdir cs (-1)
removeDirectory :: FilePath -> IO ()
removeDirectory fp = throwErrnoIfMinus1_ fp $ withCString fp rmdir
removeFile :: FilePath -> IO ()
removeFile fp = throwErrnoIfMinus1_ fp $ withCString fp unlink
renameDirectory :: FilePath -> FilePath -> IO ()
renameDirectory fp1 fp2 = throwErrnoIfMinus1_ "rename" $ do
withCString fp1 $ \fp1 -> do
withCString fp2 $ \fp2 -> do
rename fp1 fp2
renameFile :: FilePath -> FilePath -> IO ()
renameFile x y = renameDirectory x y
getDirectoryContents :: FilePath -> IO [FilePath]
getDirectoryContents = error "getDirectoryContents"
doesFileExist :: FilePath -> IO Bool
doesFileExist = error "doesFileExist"
doesDirectoryExist :: FilePath -> IO Bool
doesDirectoryExist = error "doesDirectoryExist"
getPermissions :: FilePath -> IO Permissions
getPermissions = error "getPermissions"
setPermissions :: FilePath -> Permissions -> IO ()
setPermissions = error "setPermissions"
getModificationTime :: FilePath -> IO ClockTime
getModificationTime = error "getModificationTime"
| dec9ue/jhc_copygc | lib/haskell-extras/System/Directory.hs | gpl-2.0 | 2,445 | 0 | 16 | 413 | 707 | 369 | 338 | -1 | -1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Documentation.Haddock.Doc (docParagraph) where
import Data.Monoid
import Documentation.Haddock.Types
import Data.Char (isSpace)
-- We put it here so that we can avoid a circular import
-- anything relevant imports this module anyway
instance Monoid (DocH mod id) where
mempty = DocEmpty
mappend = docAppend
docAppend :: DocH mod id -> DocH mod id -> DocH mod id
docAppend (DocDefList ds1) (DocDefList ds2) = DocDefList (ds1++ds2)
docAppend (DocDefList ds1) (DocAppend (DocDefList ds2) d) = DocAppend (DocDefList (ds1++ds2)) d
docAppend (DocOrderedList ds1) (DocOrderedList ds2) = DocOrderedList (ds1 ++ ds2)
docAppend (DocUnorderedList ds1) (DocUnorderedList ds2) = DocUnorderedList (ds1 ++ ds2)
docAppend DocEmpty d = d
docAppend d DocEmpty = d
docAppend (DocString s1) (DocString s2) = DocString (s1 ++ s2)
docAppend (DocAppend d (DocString s1)) (DocString s2) = DocAppend d (DocString (s1 ++ s2))
docAppend (DocString s1) (DocAppend (DocString s2) d) = DocAppend (DocString (s1 ++ s2)) d
docAppend d1 d2 = DocAppend d1 d2
-- again to make parsing easier - we spot a paragraph whose only item
-- is a DocMonospaced and make it into a DocCodeBlock
docParagraph :: DocH mod id -> DocH mod id
docParagraph (DocMonospaced p)
= DocCodeBlock (docCodeBlock p)
docParagraph (DocAppend (DocString s1) (DocMonospaced p))
| all isSpace s1
= DocCodeBlock (docCodeBlock p)
docParagraph (DocAppend (DocString s1)
(DocAppend (DocMonospaced p) (DocString s2)))
| all isSpace s1 && all isSpace s2
= DocCodeBlock (docCodeBlock p)
docParagraph (DocAppend (DocMonospaced p) (DocString s2))
| all isSpace s2
= DocCodeBlock (docCodeBlock p)
docParagraph p
= DocParagraph p
-- Drop trailing whitespace from @..@ code blocks. Otherwise this:
--
-- -- @
-- -- foo
-- -- @
--
-- turns into (DocCodeBlock "\nfoo\n ") which when rendered in HTML
-- gives an extra vertical space after the code block. The single space
-- on the final line seems to trigger the extra vertical space.
--
docCodeBlock :: DocH mod id -> DocH mod id
docCodeBlock (DocString s)
= DocString (reverse $ dropWhile (`elem` " \t") $ reverse s)
docCodeBlock (DocAppend l r)
= DocAppend l (docCodeBlock r)
docCodeBlock d = d
| jgm/haddock | haddock-library/src/Documentation/Haddock/Doc.hs | bsd-2-clause | 2,260 | 0 | 11 | 389 | 723 | 365 | 358 | 40 | 1 |
module Tandoori.Typing.Show(printCtxt, showName) where
import Tandoori.GHC.Internals
import Tandoori
import Tandoori.Typing
import Tandoori.Typing.Pretty
import Tandoori.Typing.Error
import Tandoori.Typing.Ctxt
import Tandoori.Typing.MonoEnv
import Control.Applicative ((<$>))
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Text.PrettyPrint.Boxes as Box
import Data.List
-- import qualified Data.MultiSet as Bag
data ShowTyCtxt = C { isLeftOfFun :: Bool, isRightOfApp :: Bool }
showFunLeft :: ShowTyCtxt -> Ty -> String
showFunLeft c ty = showTy' c{isLeftOfFun = True} ty
showFunRight c ty = showTy' c{isLeftOfFun = False} ty
showAppLeft c ty = showTy' c{isRightOfApp = False} ty
showAppRight c ty = showTy' c{isRightOfApp = True} ty
showInParen c ty = showTy' c{isLeftOfFun = False} ty
forceParen = False
parenIf :: Bool -> String -> String
parenIf True s = "(" ++ s ++ ")"
parenIf False s = if forceParen then (parenIf True s) else s
showTy :: Ty -> String
showTy ty = showTy' C{isLeftOfFun = False, isRightOfApp = False} ty
showTy' :: ShowTyCtxt -> Ty -> String
showTy' c (TyVar α) = showName α
showTy' c (TyCon con) = showName con
showTy' c (TyFun τ1 τ2) = parenIf (isLeftOfFun c) $ unwords [showFunLeft c τ1, "->", showFunRight c τ2]
showTy' c τ@(TyApp τ1 τ2) | isTyConList τ1 = "[" ++ showFunRight c τ2 ++ "]"
| isTyConTuple τ1 = let τs = tail $ tyUncurryApp τ
in "(" ++ commaList (map show τs) ++ ")"
| otherwise = parenIf (isRightOfApp c) $ unwords [showAppLeft c τ1, showAppRight c τ2]
showTy' c (TyTuple n) = "(" ++ replicate (pred n) ',' ++ ")"
showName :: Name -> String
showName name = if isSymOcc occName || isDataSymOcc occName then "(" ++ s ++ ")" else s
where occName = nameOccName name
s = occNameString occName
showPolyPred :: PolyPred -> String
showPolyPred (cls, α) = unwords [showName cls, showName α]
showPreds :: OverCtx -> String
showPreds [] = ""
showPreds [pred] = unwords [showPred pred, "=> "]
showPreds preds = unwords ["(" ++ (commaList $ map showPred preds) ++ ")", "=> "]
commaList = intercalate ", "
showPred :: OverPred -> String
showPred (cls, τ) = unwords [showName cls, show τ]
instance Show Ty where
show = showTy' C{isLeftOfFun = False, isRightOfApp = False}
instance Show OverTy where
show (OverTy ctx τ) = showPreds ctx ++ show τ
instance Show PolyTy where
show = show . fromPolyTy
instance (Show TyEq) where
show (τ :=: τ') = unwords [show τ, ":=:", show τ']
instance Outputable ErrorMessage where
ppr (ErrorMessage (ErrorLocation srcloc Nothing) content) = ppr srcloc <> colon <+> ppr content
ppr (ErrorMessage (ErrorLocation srcloc (Just src)) content) = ppr srcloc <> colon $$ src $$ ppr content
showFailedEqs sep tyeqs = unwords $ map (\ (t1 :=: t2) -> unwords [show t1, sep, show t2]) tyeqs
instance Outputable TypingErrorContent where
ppr (Unsolvable (τ1 :=: τ2)) = text "Cannot unify" <+> quotes (text (show τ1)) <+> text "with" <+> quotes (text (show τ2))
ppr (InfiniteType (τ1 :=: τ2)) = text "Occurs check failed: infinite type" <+> text (show τ1) <+> text "=" <+> text (show τ2)
ppr (UnfulfilledPredicate pred) = text "Unfulfilled predicate" <+> text (showPred pred)
instance Outputable ErrorContent where
ppr (UndefinedCon name) = text "Reference to undefined constructor" <+> quotes (ppr name)
ppr (UndefinedVar name) = text "Reference to undefined variable" <+> quotes (ppr name)
ppr (UnificationFailed ms tyerr) =
ppr tyerr' <+> source <> colon $$ text (Box.render $ boxMonos ms')
where (ms', tyerr') = runPretty $ do ms' <- mapM prettyMonoM ms
tyerr' <- prettyTypingErrorM (typingErrorContent tyerr)
return (ms', tyerr')
source = case typingErrorSrc tyerr of
Just x -> text "when unifying " <> (quotes . text . showName) x
Nothing -> empty
ppr (CantFitDecl σDecl (m, τ)) =
text "Declared type" <+> text (show σDecl) <+>
text "is not a special case of inferred typing" <+> text (showTyping m τ)
ppr (InvalidCon σ) =
text "Invalid constructor signature" <+> text (show σ)
ppr (ClassCycle clss) =
text "Cycle in superclass hierarchy" <> colon <+>
sep (punctuate comma $ map (quotes . text . showName) clss)
ppr (AmbiguousPredicate j (cls, α)) =
text "Ambiguous predicate" <+> text (showPred (cls, τ'))
where τ' = prettyTy (TyVar α)
ppr (MissingBaseInstances (cls, τ) πs) =
text "Missing base instances of" <+> text (showPred (cls, τ)) <> colon <+>
sep (punctuate comma $ map (text . showPred) $ fromPolyCtx πs)
ppr InvalidInstance = text "Invalid instance declaration"
ppr (UndefinedCls cls) = text "Undefined class" <+> text (showName cls)
ppr (OtherError message) = text message
prettyTypingErrorM (Unsolvable eq) = Unsolvable <$> prettyTyEqM eq
prettyTypingErrorM (InfiniteType eq) = InfiniteType <$> prettyTyEqM eq
prettyTypingErrorM (UnfulfilledPredicate (cls, τ)) = do τ' <- prettyTyM τ
return $ UnfulfilledPredicate (cls, τ')
prettyTyEqM (t :=: u) = do t' <- prettyTyM t
u' <- prettyTyM u
return $ t' :=: u'
prettyMonoM = mapMonoM prettyTvM
instance Show MonoEnv where
show m = "{" ++ intercalate ", " (typing ++ preds) ++ "}"
where typing = map (\ (x, τ) -> showName x ++ "::" ++ show τ) $ getMonoVars m
preds = map (\ (cls, α) -> unwords [showName cls, showName α]) $ getMonoPreds m
showTyping m τ = runPretty $ do
m' <- prettyMonoM m
τ' <- prettyTyM τ
return $ unwords [show m', "⊢", show τ']
printCtxt :: Ctxt -> IO ()
printCtxt c = Box.printBox $ boxName Box.<+> boxType
where showPolyTy = show . runPretty . prettyPolyTyM
pairs = (map (\ (name, (L _ σ)) -> (showName name, show σ)) $ Map.toList $ userDecls c) ++
(map (\ (name, (m, τ)) -> (showName name, showTyping m τ)) $ Map.toList $ polyVars c)
boxName = Box.vcat Box.left $ map (Box.text . fst) pairs
boxType = Box.vcat Box.left $ map (\ (name, typing) -> Box.text "::" Box.<+> Box.text typing) pairs
boxMonos :: [MonoEnv] -> Box.Box
boxMonos ms = Box.hsep 2 Box.top $ boxNames:(map boxTypes ms)
where vars :: [VarName]
vars = Set.toList $ Set.unions $ map (Set.fromList . map fst . getMonoVars) ms
-- Omit non-shared variables?
-- vars = map fst $ filter (\(v, c) -> c >= 2) $ Bag.toOccurList $ Bag.unions varBags
-- where varBags = map (Bag.fromList . nub . map fst . getMonoVars) ms
boxType m v = Box.text $ maybe "" show $ getMonoVar m v
boxNames = Box.vcat Box.left $
(Box.text ""):
(Box.text ""):
(Box.text "Predicates:"):
(map ((Box.<+> Box.text "::").(Box.text . showName)) vars)
boxTypes m = Box.vcat Box.left $ boxSrc:boxTy:boxPreds:boxsTyVars
where
boxSrc = Box.text $ maybe "" showSDocUnqual $ getMonoSrc m
boxTy = Box.text $ maybe "" show $ getMonoTy m
boxsTyVars = map (boxType m) vars
boxPreds = Box.text $ intercalate ", " $ map showPolyPred (getMonoPreds m)
| bitemyapp/tandoori | src/Tandoori/Typing/Show.hs | bsd-3-clause | 8,003 | 2 | 16 | 2,402 | 2,750 | 1,390 | 1,360 | 130 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-- | An abstraction for re-running actions if values or files have changed.
--
-- This is not a full-blown make-style incremental build system, it's a bit
-- more ad-hoc than that, but it's easier to integrate with existing code.
--
-- It's a convenient interface to the "Distribution.Client.FileMonitor"
-- functions.
--
module Distribution.Client.RebuildMonad (
-- * Rebuild monad
Rebuild,
runRebuild,
askRoot,
-- * Setting up file monitoring
monitorFiles,
MonitorFilePath,
monitorFile,
monitorFileHashed,
monitorNonExistentFile,
monitorDirectory,
monitorNonExistentDirectory,
monitorDirectoryExistence,
monitorFileOrDirectory,
monitorFileSearchPath,
monitorFileHashedSearchPath,
-- ** Monitoring file globs
monitorFileGlob,
monitorFileGlobExistence,
FilePathGlob(..),
FilePathRoot(..),
FilePathGlobRel(..),
GlobPiece(..),
-- * Using a file monitor
FileMonitor(..),
newFileMonitor,
rerunIfChanged,
-- * Utils
matchFileGlob,
) where
import Distribution.Client.FileMonitor
import Distribution.Client.Glob hiding (matchFileGlob)
import qualified Distribution.Client.Glob as Glob (matchFileGlob)
import Distribution.Simple.Utils (debug)
import Distribution.Verbosity (Verbosity)
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
#endif
import Control.Monad.State as State
import Control.Monad.Reader as Reader
import Distribution.Compat.Binary (Binary)
import System.FilePath (takeFileName)
-- | A monad layered on top of 'IO' to help with re-running actions when the
-- input files and values they depend on change. The crucial operations are
-- 'rerunIfChanged' and 'monitorFiles'.
--
newtype Rebuild a = Rebuild (ReaderT FilePath (StateT [MonitorFilePath] IO) a)
deriving (Functor, Applicative, Monad, MonadIO)
-- | Use this wihin the body action of 'rerunIfChanged' to declare that the
-- action depends on the given files. This can be based on what the action
-- actually did. It is these files that will be checked for changes next
-- time 'rerunIfChanged' is called for that 'FileMonitor'.
--
-- Relative paths are interpreted as relative to an implicit root, ultimately
-- passed in to 'runRebuild'.
--
monitorFiles :: [MonitorFilePath] -> Rebuild ()
monitorFiles filespecs = Rebuild (State.modify (filespecs++))
-- | Run a 'Rebuild' IO action.
unRebuild :: FilePath -> Rebuild a -> IO (a, [MonitorFilePath])
unRebuild rootDir (Rebuild action) = runStateT (runReaderT action rootDir) []
-- | Run a 'Rebuild' IO action.
runRebuild :: FilePath -> Rebuild a -> IO a
runRebuild rootDir (Rebuild action) = evalStateT (runReaderT action rootDir) []
-- | The root that relative paths are interpreted as being relative to.
askRoot :: Rebuild FilePath
askRoot = Rebuild Reader.ask
-- | This captures the standard use pattern for a 'FileMonitor': given a
-- monitor, an action and the input value the action depends on, either
-- re-run the action to get its output, or if the value and files the action
-- depends on have not changed then return a previously cached action result.
--
-- The result is still in the 'Rebuild' monad, so these can be nested.
--
-- Do not share 'FileMonitor's between different uses of 'rerunIfChanged'.
--
rerunIfChanged :: (Binary a, Binary b)
=> Verbosity
-> FileMonitor a b
-> a
-> Rebuild b
-> Rebuild b
rerunIfChanged verbosity monitor key action = do
rootDir <- askRoot
changed <- liftIO $ checkFileMonitorChanged monitor rootDir key
case changed of
MonitorUnchanged result files -> do
liftIO $ debug verbosity $ "File monitor '" ++ monitorName
++ "' unchanged."
monitorFiles files
return result
MonitorChanged reason -> do
liftIO $ debug verbosity $ "File monitor '" ++ monitorName
++ "' changed: " ++ showReason reason
startTime <- liftIO $ beginUpdateFileMonitor
(result, files) <- liftIO $ unRebuild rootDir action
liftIO $ updateFileMonitor monitor rootDir
(Just startTime) files key result
monitorFiles files
return result
where
monitorName = takeFileName (fileMonitorCacheFile monitor)
showReason (MonitoredFileChanged file) = "file " ++ file
showReason (MonitoredValueChanged _) = "monitor value changed"
showReason MonitorFirstRun = "first run"
showReason MonitorCorruptCache = "invalid cache file"
-- | Utility to match a file glob against the file system, starting from a
-- given root directory. The results are all relative to the given root.
--
-- Since this operates in the 'Rebuild' monad, it also monitors the given glob
-- for changes.
--
matchFileGlob :: FilePathGlob -> Rebuild [FilePath]
matchFileGlob glob = do
root <- askRoot
monitorFiles [monitorFileGlobExistence glob]
liftIO $ Glob.matchFileGlob root glob
| tolysz/prepare-ghcjs | spec-lts8/cabal/cabal-install/Distribution/Client/RebuildMonad.hs | bsd-3-clause | 5,128 | 0 | 17 | 1,141 | 827 | 461 | 366 | 81 | 5 |
#if defined(mingw32_HOST_OS) || defined(__MINGW32__)
{-# LANGUAGE ForeignFunctionInterface #-}
#endif
module Hint.CompatPlatform (
getPID
) where
import Control.Applicative
import Prelude
#if defined(mingw32_HOST_OS) || defined(__MINGW32__)
import Data.Word
#else
import System.Posix.Process
#endif
getPID :: IO Int
#if defined(mingw32_HOST_OS) || defined(__MINGW32__)
-- This function is not yet in the win32 package, so we have to
-- roll down our own definition.
--
-- Credit goes where it is deserved:
-- http://www.haskell.org/pipermail/haskell-cafe/2009-February/055097.html
foreign import stdcall unsafe "winbase.h GetCurrentProcessId"
c_GetCurrentProcessId :: IO Word32
getPID = fromIntegral <$> c_GetCurrentProcessId
#else
getPID = fromIntegral <$> getProcessID
#endif
| int-e/hint | src/Hint/CompatPlatform.hs | bsd-3-clause | 795 | 0 | 6 | 106 | 72 | 47 | 25 | 7 | 1 |
{-# LANGUAGE OverloadedStrings #-}
import Data.Conduit
import qualified Data.Conduit.List as CL
import Network.HTTP.Conduit
import Text.HTML.TagStream
import Control.Monad.IO.Class (liftIO)
import qualified Data.ByteString.Char8 as S8
import Data.Time
import System.Locale
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8)
import qualified Filesystem as F
import qualified Filesystem.Path.CurrentOS as F
import qualified Data.Yaml as Y
dropWhile' :: Resource m => (a -> Bool) -> Conduit a m a -- FIXME add to conduit
dropWhile' f =
conduitState True push close
where
push False x = return $ StateProducing False [x]
push True x
| f x = return $ StateProducing True []
| otherwise = return $ StateProducing False [x]
close _ = return []
takeWhile' :: Resource m => (a -> Bool) -> Conduit a m a -- FIXME add to conduit
takeWhile' f =
conduitState True push close
where
push False x = return $ StateProducing False []
push True x
| f x = return $ StateProducing True [x]
| otherwise = return $ StateProducing False []
close _ = return []
main :: IO ()
main = withManager $ \m -> do
req <- parseUrl "http://www.yesodweb.com/blog-archive"
Response _ _ body <- http req m
x <- body
$$ tokenStream
=$ dropWhile' (/= TagOpen "nav" [("class", "toc")] False)
=$ takeWhile' (/= TagClose "nav")
=$ sequenceSink () getPost
=$ CL.mapM (grab m)
=$ CL.consume
liftIO $ Y.encodeFile "posts.yaml" x
data Post = Post
{ postTime :: UTCTime
, postAuthor :: T.Text
, postTitle :: T.Text
, postFP :: F.FilePath
}
deriving Show
instance Y.ToJSON Post where
toJSON (Post time a title fp) = Y.object
[ "time" Y..= time
, "author" Y..= a
, "title" Y..= title
, "path" Y..= F.encodeString fp
]
grab m (daytime, href, titleBS) = runResourceT $ do
let title = decodeUtf8 titleBS
let slug = snd $ T.breakOnEnd "/" $ decodeUtf8 href
let mutc = parseTime defaultTimeLocale "%B %e, %Y %l:%M %P" $ S8.unpack daytime
utc <-
case mutc of
Nothing -> error $ "Invalid timestamp: " ++ show daytime
Just utc -> return utc
let (year, month, _) = toGregorian $ utctDay utc
let fp = F.decodeString (show year) F.</>
F.decodeString (show month) F.</>
F.fromText slug F.<.> "html"
liftIO $ F.createTree $ F.directory fp
req <- parseUrl $ S8.unpack href
Response _ _ body <- http req m
x <- body
$$ tokenStream
=$ dropWhile' (not . isAvatar)
=$ takeWhile' (not . isDisqus)
=$ CL.consume
author <-
case x of
TagOpen "img" as _:_ ->
case lookup "src" as of
Just src ->
case src of
"http://www.gravatar.com/avatar/bad65d3d7319025d73e065d7a29ee22a?s=100&d=identicon" -> return "greg"
"http://www.gravatar.com/avatar/71596bb1ca3ba3aa4400c3f407baec9f?s=100&d=identicon" -> return "michael"
_ -> error $ "Unknown author: " ++ show src
Nothing -> error "Unknown author"
_ -> error "Unknown author"
let content = encode $ drop 1 $ dropWhile (not . isScriptClose) x
liftIO $ S8.writeFile (F.encodeString fp) content
return $ Post utc author (replace title) fp
where
isAvatar (TagOpen "img" as _) = lookup "alt" as == Just "Avatar"
isAvatar _ = False
isScriptClose (TagClose "script") = True
isScriptClose _ = False
isArticleClose (TagClose "article") = True
isArticleClose _ = False
isDisqus (TagOpen "div" [("id", "disqus")] _) = True
isDisqus _ = False
replace = T.replace "'" "'"
. T.replace """ "\""
. T.replace "&" "&"
. T.replace "<" "<"
. T.replace ">" ">"
getPost () = do
mx <- CL.head
case mx of
Just (TagOpen "a" [("title", daytime), ("href", href)] False) -> do
Just (Text title) <- CL.head
return $ Emit () [(daytime, href, title)]
Nothing -> return Stop
_ -> getPost ()
| mlitchard/lambdaweb.com-content | blog/getposts.hs | bsd-2-clause | 4,271 | 0 | 19 | 1,278 | 1,419 | 708 | 711 | 109 | 10 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE RankNTypes #-}
------------------------------------------------------------------------------
module Data.HeterogeneousEnvironment
( KeyGen
, HeterogeneousEnvironment
, Key
, newKeyGen
, empty
, makeKey
, lookup
, insert
, delete
, adjust
, getKeyId
) where
------------------------------------------------------------------------------
import Control.Monad
import Data.IntMap (IntMap)
import qualified Data.IntMap as IM
import Data.IORef
import GHC.Exts
import Prelude hiding (lookup)
import Unsafe.Coerce
------------------------------------------------------------------------------
data HeterogeneousEnvironment = HeterogeneousEnvironment (IntMap Any)
newtype Key a = Key Int
newtype KeyGen = KeyGen (IORef Int)
------------------------------------------------------------------------------
-- | If you use two different KeyGens to work with the same map, you deserve
-- what you get.
newKeyGen :: IO KeyGen
newKeyGen = liftM KeyGen $ newIORef 0
------------------------------------------------------------------------------
getKeyId :: Key a -> Int
getKeyId (Key x) = x
------------------------------------------------------------------------------
empty :: HeterogeneousEnvironment
empty = HeterogeneousEnvironment $ IM.empty
------------------------------------------------------------------------------
makeKey :: KeyGen -> IO (Key a)
makeKey (KeyGen gen) = do
k <- atomicModifyIORef gen nextKey
return $ Key k
where
nextKey !x = if x >= maxBound-1
then error "too many keys generated"
else let !x' = x+1 in (x',x)
------------------------------------------------------------------------------
lookup :: Key a -> HeterogeneousEnvironment -> Maybe a
lookup (Key k) (HeterogeneousEnvironment m) = fmap unsafeCoerce $ IM.lookup k m
------------------------------------------------------------------------------
insert :: Key a -> a -> HeterogeneousEnvironment -> HeterogeneousEnvironment
insert (Key k) v (HeterogeneousEnvironment m) = HeterogeneousEnvironment $
IM.insert k (unsafeCoerce v) m
------------------------------------------------------------------------------
delete :: Key a -> HeterogeneousEnvironment -> HeterogeneousEnvironment
delete (Key k) (HeterogeneousEnvironment m) = HeterogeneousEnvironment $
IM.delete k m
------------------------------------------------------------------------------
adjust :: (a -> a) -> Key a -> HeterogeneousEnvironment -> HeterogeneousEnvironment
adjust f (Key k) (HeterogeneousEnvironment m) = HeterogeneousEnvironment $
IM.adjust f' k m
where
f' = unsafeCoerce . f . unsafeCoerce
| sopvop/heist | src/Data/HeterogeneousEnvironment.hs | bsd-3-clause | 2,887 | 0 | 12 | 581 | 559 | 299 | 260 | 49 | 2 |
{-# LANGUAGE ConstraintKinds, TemplateHaskell, PolyKinds, TypeFamilies #-}
module T7021a where
import GHC.Prim
import Language.Haskell.TH
type IOable a = (Show a, Read a)
type family ALittleSilly :: Constraint
data Proxy a = Proxy
foo :: IOable a => a
foo = undefined
baz :: a b => Proxy a -> b
baz = undefined
bar :: ALittleSilly => a
bar = undefined
test :: Q Exp
test = do
Just fooName <- lookupValueName "foo"
Just bazName <- lookupValueName "baz"
Just barName <- lookupValueName "bar"
reify fooName
reify bazName
reify barName
[t| (Show a, (Read a, Num a)) => a -> a |]
[| \_ -> 0 |]
| tibbe/ghc | testsuite/tests/th/T7021a.hs | bsd-3-clause | 630 | 0 | 8 | 147 | 190 | 98 | 92 | -1 | -1 |
{-# LANGUAGE NoMonomorphismRestriction, OverloadedStrings #-}
module TimetableImageCreator
(renderTable) where
import Diagrams.Prelude
import Diagrams.Backend.SVG.CmdLine
import Diagrams.Backend.SVG
import Data.List (intersperse)
import Data.List.Utils (replace)
import Data.List.Split (splitOn)
import Lucid (renderText)
import Data.Text.Lazy (unpack)
days :: [String]
days = ["Mon", "Tue", "Wed", "Thu", "Fri"]
times :: [String]
times = map (\x -> show x ++ ":00") ([8..12] ++ [1..8] :: [Int])
blue3 :: Colour Double
blue3 = sRGB24read "#437699"
pink1 :: Colour Double
pink1 = sRGB24read "#DB94B8"
cellWidth :: Double
cellWidth = 2
timeCellWidth :: Double
timeCellWidth = 1.2
cellHeight :: Double
cellHeight = 0.4
cellPaddingHeight :: Double
cellPaddingHeight = 0.1
fs :: Double
fs = 14
cell :: Diagram B
cell = rect cellWidth cellHeight
cellPadding :: Diagram B
cellPadding = rect cellWidth cellPaddingHeight
timeCell :: Diagram B
timeCell = rect timeCellWidth cellHeight # lw none
timeCellPadding :: Diagram B
timeCellPadding = rect timeCellWidth cellPaddingHeight # lw none
cellText :: String -> Diagram B
cellText s = font "Trebuchet MS" $ text s # fontSizeO fs
makeCell :: String -> Diagram B
makeCell s = vsep 0.030
[cellPadding # fc background # lc background,
cellText s # fc white <>
cell # fc background # lc background]
where
background = if null s then white else blue3
header :: String -> Diagram B
header session = (hcat $ (makeSessionCell session) : map makeHeaderCell days) # centerX === headerBorder
makeSessionCell :: String -> Diagram B
makeSessionCell s =
timeCellPadding === (cellText s <> timeCell)
makeHeaderCell :: String -> Diagram B
makeHeaderCell s =
cellPadding # lw none === (cellText s <> cell # lw none)
makeTimeCell :: String -> Diagram B
makeTimeCell s =
timeCellPadding === (cellText s <> timeCell # lw none)
makeRow :: [String] -> Diagram B
makeRow (x:xs) = (# centerX) . hcat $
makeTimeCell x : map makeCell xs
makeRow [] = error "invalid timetable format"
headerBorder :: Diagram B
headerBorder = hrule 11.2 # lw medium # lc pink1
rowBorder :: Diagram B
rowBorder = hrule 11.2 # lw thin # lc pink1
makeTable :: [[String]] -> String -> Diagram B
makeTable s session = vsep 0.04 $ (header session): intersperse rowBorder (map makeRow s)
renderTable :: String -> String -> String -> IO ()
renderTable filename courses session = do
let courseTable = partition5 $ splitOn "_" courses
print courseTable
let g = makeTable (zipWith (:) times courseTable) session
svg = renderDia SVG (SVGOptions (mkWidth 1024) Nothing "") g
txt = replace (show (fs :: Double) ++ "px") (show fs' ++ "px") $
unpack $ renderText svg
writeFile filename txt
where
partition5 [] = []
partition5 lst = take 5 lst : partition5 (drop 5 lst)
-- relative fonts don't play well with ImageMagick, apparently
fs' = round $ 1024 / 600 * fs
| tamaralipowski/courseography | hs/TimetableImageCreator.hs | gpl-3.0 | 2,994 | 46 | 12 | 611 | 1,028 | 538 | 490 | 78 | 2 |
-- GSoC 2015 - Haskell bindings for OpenCog.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE DataKinds #-}
-- | Simple example on executing code in multiple threads.
-- Note, before compiling this code you need to install the package: 'random'.
-- Executing: stack install random
import OpenCog.AtomSpace (AtomSpace,insert,get,remove,
debug,printAtom,newAtomSpace,(<:),
Atom(..),TruthVal(..),noTv,stv)
import Control.Monad.IO.Class (liftIO)
import Control.Concurrent (forkIO,threadDelay)
import System.Random (randomIO,randomRIO)
randomConcept :: Int -> AtomSpace Atom
randomConcept top = do
num <- liftIO $ randomRIO (1,top)
return $ Node "ConceptNode" ("Concept"++show num) noTv
randomList :: Int -> Int -> AtomSpace Atom
randomList n m = do
num <- liftIO $ randomRIO (1,n)
list <- mapM (\_ -> randomConcept m >>= return) [1..num]
return $ Link "ListLink" list noTv
main :: IO ()
main = do
as1 <- newAtomSpace Nothing
mapM (\n -> forkIO $ as1 <: loop n) [1..20]
as1 <: loop 21
loop :: Int -> AtomSpace ()
loop idNum = do
liftIO $ putStrLn $ "Thread " ++ show idNum
waitRandom
concept1 <- randomConcept 6
remove concept1
concept2 <- randomConcept 6
insert concept2
concept3 <- randomConcept 6
get concept3
waitRandom
list1 <- randomList 3 6
res <- get list1
case res of
Nothing -> liftIO $ putStrLn "Got: Nothing"
Just l -> liftIO $ putStrLn "Got:" >> printAtom l
list2 <- randomList 3 6
insert list2
list3 <- randomList 3 6
remove list3
if idNum == 1
then do
liftIO $ threadDelay 5000000
liftIO $ putStrLn $ replicate 70 '#'
debug
else return ()
loop idNum
where
waitRandom :: AtomSpace ()
waitRandom = do
n <- liftIO $ randomRIO (0,100000)
liftIO $ threadDelay n
| ceefour/atomspace | examples/haskell/example_multithreading.hs | agpl-3.0 | 1,949 | 0 | 12 | 559 | 618 | 302 | 316 | 53 | 3 |
{-| Module to access the information provided by the Xen hypervisor.
-}
{-
Copyright (C) 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Hypervisor.Xen
( getDomainsInfo
, getInferredDomInfo
, getUptimeInfo
--Data types to be re-exported from here
, Domain(..)
, UptimeInfo(..)
) where
import qualified Control.Exception as E
import Data.Attoparsec.Text as A
import qualified Data.Map as Map
import Data.Text (pack)
import System.Process
import qualified Ganeti.BasicTypes as BT
import qualified Ganeti.Constants as C
import Ganeti.Hypervisor.Xen.Types
import Ganeti.Hypervisor.Xen.XmParser
import Ganeti.Logging
import Ganeti.Utils
-- | Get information about the current Xen domains as a map where the domain
-- name is the key. This only includes the information made available by Xen
-- itself.
getDomainsInfo :: IO (BT.Result (Map.Map String Domain))
getDomainsInfo = do
contents <-
(E.try $ readProcess C.xenCmdXm ["list", "--long"] "")
:: IO (Either IOError String)
return $
either (BT.Bad . show) (
\c ->
case A.parseOnly xmListParser $ pack c of
Left msg -> BT.Bad msg
Right dom -> BT.Ok dom
) contents
-- | Given a domain and a map containing information about multiple domains,
-- infer additional information about that domain (specifically, whether it is
-- hung).
inferDomInfos :: Map.Map String Domain -> Domain -> Domain
inferDomInfos domMap dom1 =
case Map.lookup (domName dom1) domMap of
Just dom2 ->
dom1 { domIsHung = Just $ domCpuTime dom1 == domCpuTime dom2 }
Nothing -> dom1 { domIsHung = Nothing }
-- | Get information about the current Xen domains as a map where the domain
-- name is the key. This includes information made available by Xen itself as
-- well as further information that can be inferred by querying Xen multiple
-- times and comparing the results.
getInferredDomInfo :: IO (BT.Result (Map.Map String Domain))
getInferredDomInfo = do
domMap1 <- getDomainsInfo
domMap2 <- getDomainsInfo
case (domMap1, domMap2) of
(BT.Bad m1, BT.Bad m2) -> return . BT.Bad $ m1 ++ "\n" ++ m2
(BT.Bad m, BT.Ok d) -> do
logWarning $ "Unable to retrieve domains info the first time" ++ m
return $ BT.Ok d
(BT.Ok d, BT.Bad m) -> do
logWarning $ "Unable to retrieve domains info the second time" ++ m
return $ BT.Ok d
(BT.Ok d1, BT.Ok d2) -> return . BT.Ok $ fmap (inferDomInfos d2) d1
-- | Get information about the uptime of domains, as a map where the domain ID
-- is the key.
getUptimeInfo :: IO (Map.Map Int UptimeInfo)
getUptimeInfo = do
contents <-
((E.try $ readProcess C.xenCmdXm ["uptime"] "")
:: IO (Either IOError String)) >>=
exitIfBad "running command" . either (BT.Bad . show) BT.Ok
case A.parseOnly xmUptimeParser $ pack contents of
Left msg -> exitErr msg
Right uInfo -> return uInfo
| apyrgio/ganeti | src/Ganeti/Hypervisor/Xen.hs | bsd-2-clause | 4,141 | 0 | 15 | 808 | 738 | 390 | 348 | 57 | 4 |
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module T12646 where
import Language.Haskell.TH
import System.IO
type family F (a :: k) :: * where
F (a :: * -> *) = Int
F (a :: k) = Char
$(do info <- reify ''F
runIO $ putStrLn $ pprint info
runIO $ hFlush stdout
return [])
| ezyang/ghc | testsuite/tests/th/T12646.hs | bsd-3-clause | 346 | 0 | 11 | 90 | 117 | 63 | 54 | 13 | 0 |
module Q where
import Map
| themoritz/cabal | cabal-testsuite/PackageTests/Backpack/Reexport1/q/Q.hs | bsd-3-clause | 26 | 0 | 3 | 5 | 7 | 5 | 2 | 2 | 0 |
{-# OPTIONS_GHC -fwarn-incomplete-patterns -fwarn-overlapping-patterns #-}
{-# LANGUAGE GADTs #-}
module PMC004 where
data F a where
F1 :: F Int
F2 :: F Bool
data G a where
G1 :: G Int
G2 :: G Char
h :: F a -> G a -> ()
h F1 G1 = ()
h _ G1 = ()
| sgillespie/ghc | testsuite/tests/pmcheck/should_compile/pmc004.hs | bsd-3-clause | 258 | 0 | 7 | 68 | 93 | 51 | 42 | 12 | 1 |
{-# LANGUAGE Safe #-}
{-# LANGUAGE NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Foreign.Marshal
-- Copyright : (c) The FFI task force 2003
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : ffi@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- Marshalling support
--
-----------------------------------------------------------------------------
module Foreign.Marshal
(
-- | The module "Foreign.Marshal.Safe" re-exports the other modules in the
-- @Foreign.Marshal@ hierarchy (except for @Foreign.Marshal.Unsafe@):
module Foreign.Marshal.Alloc
, module Foreign.Marshal.Array
, module Foreign.Marshal.Error
, module Foreign.Marshal.Pool
, module Foreign.Marshal.Utils
) where
import Foreign.Marshal.Alloc
import Foreign.Marshal.Array
import Foreign.Marshal.Error
import Foreign.Marshal.Pool
import Foreign.Marshal.Utils
| tolysz/prepare-ghcjs | spec-lts8/base/Foreign/Marshal.hs | bsd-3-clause | 1,042 | 0 | 5 | 198 | 90 | 67 | 23 | 14 | 0 |
{-# LANGUAGE OverloadedStrings, DeriveGeneric, DataKinds #-}
module GTFS.Protobuf where
import Data.Int
import Data.ProtocolBuffers
import Data.Text (Text)
import GHC.Generics (Generic)
import GHC.TypeLits
data FeedMessage = FeedMessage {
feedHeader :: Required 1 (Message FeedHeader)
, feedEntity :: Repeated 2 (Message FeedEntity)
} deriving (Generic, Show)
instance Decode FeedMessage
data FeedHeader = FeedHeader {
gtfsRealTimeVersion :: Required 1 (Value Text)
, incrementality :: Optional 2 (Enumeration Incrementality)
, feedHeaderTimestamp :: Optional 3 (Value Int64)
} deriving (Generic, Show)
instance Decode FeedHeader
data Incrementality = FullDataset | Differential
deriving (Show, Ord, Eq, Enum)
data FeedEntity = FeedEntity {
feedId :: Required 1 (Value Text)
, feedIsDeleted :: Optional 2 (Value Bool)
, tripUpdate :: Optional 3 (Message TripUpdate)
, vehicle :: Optional 4 (Message VehiclePosition)
, alert :: Optional 5 (Message Alert)
} deriving (Generic, Show)
instance Decode FeedEntity
------------------------------------------------------------------------
data TripDescriptor = TripDescriptor {
trip_id :: Optional 1 (Value Text)
, route_id :: Optional 5 (Value Text)
, start_time :: Optional 2 (Value Text)
, direction_id :: Optional 6 (Value Int64)
, start_date :: Optional 3 (Value Text)
, schedule_relationship :: Optional 4 (Enumeration TDScheduleRelationship)
-- extensions
} deriving (Generic, Show)
instance Decode TripDescriptor
data TripUpdate = TripUpdate {
tuTrip :: Required 1 (Message TripDescriptor)
, tuVehicle :: Optional 3 (Message VehicleDescriptor)
, tuStopTimeUpdate :: Repeated 2 (Message StopTimeUpdate)
, tuTimeStamp :: Optional 4 (Value Int64)
} deriving (Generic, Show)
instance Decode TripUpdate
data VehicleDescriptor = VehicleDescriptor {
vehicleId :: Optional 1 (Value Text)
, vehicleLabel :: Optional 2 (Value Text)
, licensePlate :: Optional 3 (Value Text)
-- extensions
} deriving (Generic, Show)
instance Decode VehicleDescriptor
data StopTimeUpdate = StopTimeUpdate {
stop_sequence :: Optional 1 (Value Int32)
, stop_id :: Optional 4 (Value Text)
, arrival :: Optional 2 (Message StopTimeEvent)
, departure :: Optional 3 (Message StopTimeEvent)
, stScheduleRelationship :: Optional 5 (Enumeration STScheduleRelationship) -- note default is not implemented yet in library
} deriving (Generic, Show)
instance Decode StopTimeUpdate
data TDScheduleRelationship = Scheduled | Added | Unscheduled | Canceled
deriving (Show, Eq, Ord, Enum)
data STScheduleRelationship = Schedules | Skipped | NoData
deriving (Show, Eq, Ord, Enum)
data StopTimeEvent = StopTimeEvent {
delay :: Optional 1 (Value Int32)
, time :: Optional 2 (Value Int64)
, uncertainty :: Optional 3 (Value Int32)
-- extensions?
} deriving (Generic, Show)
instance Decode StopTimeEvent
------------------------------------------------------------------------
data VehiclePosition = VehiclePosition {
vpTrip :: Optional 1 (Message TripDescriptor)
, vpVehicle :: Optional 3 (Message VehicleDescriptor)
, vpPosition :: Optional 2 (Message Position)
, vpCurrentStopSequence :: Optional 3 (Value Int32)
, vpStopId :: Optional 7 (Value String)
, vpVehicleStopStatus :: Optional 4 (Enumeration VehicleStopStatus)
, vpTimeStamp :: Optional 5 (Value Int64)
, vpCongestionLevel :: Optional 6 (Enumeration CongestionLevel)
} deriving (Generic, Show)
instance Decode VehiclePosition
data Position = Position {
latitude :: Required 1 (Value Float)
, longitude :: Required 2 (Value Float)
, bearing :: Optional 3 (Value Float)
, odometer :: Optional 4 (Value Double)
, speed :: Optional 5 (Value Float)
} deriving (Generic, Show)
instance Decode Position
data VehicleStopStatus = IncomingAt | StoppedAt | InTransitTo
deriving (Show, Ord, Eq, Enum)
data CongestionLevel = UnknownCongestionLevel
| RunningSmoothly
| StopAndGo
| Congestion
| SevereCongestion
deriving (Show, Ord, Eq, Enum)
------------------------------------------------------------------------
data Alert = Alert {
alertTimeRange :: Repeated 1 (Message TimeRange)
, alertEntitySelector :: Repeated 5 (Message EntitySelector)
, alertCause :: Optional 6 (Enumeration Cause)
, alertEffect :: Optional 7 (Enumeration Effect)
, alertUrl :: Optional 8 (Message TranslatedString)
, alertDescriptionText :: Optional 11 (Message TranslatedString)
} deriving (Generic, Show)
instance Decode Alert
data TimeRange = TimeRange {
timeRangeStart :: Optional 1 (Value Int64)
, timeRangeEnd :: Optional 2 (Value Int64)
} deriving (Generic, Show)
instance Decode TimeRange
data EntitySelector = EntitySelector {
esAgencyId :: Optional 1 (Value String)
, esRouteId :: Optional 2 (Value String)
, esRouteType :: Optional 3 (Value Int32)
, esTrip :: Optional 4 (Message TripDescriptor)
, esStopId :: Optional 5 (Value String)
} deriving (Generic, Show)
instance Decode EntitySelector
data Cause = DUMMYCAUSE -- to force the real enumeration to start at 1
| UnknownCause
| OtherCause
| TechnicalProblem
| Strike
| Demonstration
| Accident
| Holiday
| Weather
| Maintenance
| Construction
| PoliceActivity
| MedicalEmergency
deriving (Show, Ord, Eq, Enum)
data Effect = DUMMYEFFECT
| NoService
| ReducedService
| SignificantDelays
| Detour
| AdditionalService
| ModifiedService
| OtherEffect
| UnknownEffect
| StopMoved
deriving (Show, Ord, Eq, Enum)
data TranslatedString = TranslatedString {
translation :: Repeated 1 (Message Translation)
} deriving (Generic, Show)
instance Decode TranslatedString
data Translation = Translation {
translationText :: Required 1 (Value String)
, translationLanguage :: Optional 2 (Value String)
} deriving (Generic, Show)
instance Decode Translation
| danchoi/gtfs-realtime | GTFS/Protobuf.hs | mit | 6,642 | 0 | 11 | 1,773 | 1,756 | 948 | 808 | 151 | 0 |
{-# LANGUAGE TemplateHaskell #-}
module Web.Slack.Config (SlackConfig(..), slackApiToken) where
import Control.Lens.TH
-- | Configuration options needed to connect to the Slack API
data SlackConfig = SlackConfig
{ _slackApiToken :: String -- ^ API Token for Bot
} deriving (Show)
makeLenses ''SlackConfig
| madjar/slack-api | src/Web/Slack/Config.hs | mit | 342 | 0 | 8 | 78 | 58 | 36 | 22 | 7 | 0 |
module CW4 where
data Tree a = Tree { valkey :: a, leftTree :: Tree a, rightTree :: Tree a } | Empty
deriving (Show)
infixr 9 &
(&) f x = x f
type Folder a b = a -> b -> b
insert :: Ord a => a -> Tree a -> Tree a
insert elem Empty = Tree { valkey = elem, leftTree = Empty, rightTree = Empty }
insert elem Tree { valkey = val, leftTree = left, rightTree = right }
| elem < val = Tree { valkey = val, leftTree = insert elem left, rightTree = right }
| otherwise = Tree { valkey = val, leftTree = left, rightTree = insert elem right }
create :: Ord a => [a] -> Tree a
create list = foldr insert Empty list
flatTreeL :: Tree a -> [a]
flatTreeL Empty = []
flatTreeL Tree { valkey = val, leftTree = left, rightTree = right } = flatTreeL left ++ [val] ++ flatTreeL right
flatTreeR :: Tree a -> [a]
flatTreeR Empty = []
flatTreeR Tree { valkey = val, leftTree = left, rightTree = right } = flatTreeR right ++ [val] ++ flatTreeR left
treeFoldR :: Folder a b -> b -> Tree a -> b
--гряхный хак
--treeFoldR f acc tree = foldr f acc $ flatTreeR tree
treeFoldR _ acc Empty = acc
treeFoldR f acc Tree { valkey = val, leftTree = left, rightTree = right } = flip (treeFoldR f) left $ flip (treeFoldR f) right $ f val acc
emptyTree :: Ord a => Tree a
emptyTree = Empty
simpleTree :: Tree Integer
simpleTree = Tree 10 Empty Empty
notSimpleTree :: Tree Integer
notSimpleTree = Tree 10 Empty $ Tree 15 Empty Empty
| GrandArchTemplar/FunctionalProgramming | src/CW4.hs | mit | 1,457 | 0 | 9 | 348 | 595 | 319 | 276 | 28 | 1 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.HTMLLIElement
(setType, getType, setValue, getValue, HTMLLIElement(..),
gTypeHTMLLIElement)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLLIElement.type Mozilla HTMLLIElement.type documentation>
setType ::
(MonadDOM m, ToJSString val) => HTMLLIElement -> val -> m ()
setType self val = liftDOM (self ^. jss "type" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLLIElement.type Mozilla HTMLLIElement.type documentation>
getType ::
(MonadDOM m, FromJSString result) => HTMLLIElement -> m result
getType self = liftDOM ((self ^. js "type") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLLIElement.value Mozilla HTMLLIElement.value documentation>
setValue :: (MonadDOM m) => HTMLLIElement -> Int -> m ()
setValue self val = liftDOM (self ^. jss "value" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLLIElement.value Mozilla HTMLLIElement.value documentation>
getValue :: (MonadDOM m) => HTMLLIElement -> m Int
getValue self
= liftDOM (round <$> ((self ^. js "value") >>= valToNumber))
| ghcjs/jsaddle-dom | src/JSDOM/Generated/HTMLLIElement.hs | mit | 2,074 | 0 | 12 | 265 | 541 | 326 | 215 | 30 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Mezzo.Compose.Intervals
-- Description : Interval literals
-- Copyright : (c) Dima Szamozvancev
-- License : MIT
--
-- Maintainer : ds709@cam.ac.uk
-- Stability : experimental
-- Portability : portable
--
-- Literals and operations involving intervals.
--
-----------------------------------------------------------------------------
module Mezzo.Compose.Intervals where
import Mezzo.Model
import Mezzo.Model.Prim
import Mezzo.Compose.Types
import Mezzo.Compose.Builder
import Mezzo.Compose.Templates
import GHC.TypeLits
import Data.Kind
import Control.Monad
-- * Atomic literals
-- ** Interval class literals
_iMaj :: IC Maj
_iMaj = IC
_iMin :: IC Min
_iMin = IC
_iPerf :: IC Perf
_iPerf = IC
_iAug :: IC Aug
_iAug = IC
_iDim :: IC Dim
_iDim = IC
-- ** Interval size literals
_i1 :: IS Unison
_i1 = IS
_i2 :: IS Second
_i2 = IS
_i3 :: IS Third
_i3 = IS
_i4 :: IS Fourth
_i4 = IS
_i5 :: IS Fifth
_i5 = IS
_i6 :: IS Sixth
_i6 = IS
_i7 :: IS Seventh
_i7 = IS
_i8 :: IS Octave
_i8 = IS
-- ** Constructor
interval :: IC ic -> IS is -> Intv (Interval ic is)
interval _ _ = Intv
-- * Concrete interval literals
iPerf1 :: Intv (Interval Perf Unison)
iPerf1 = Intv
iAug1 :: Intv (Interval Aug Unison)
iAug1 = Intv
iDim2 :: Intv (Interval Dim Second)
iDim2 = Intv
iMin2 :: Intv (Interval Min Second)
iMin2 = Intv
iMaj2 :: Intv (Interval Maj Second)
iMaj2 = Intv
iAug2 :: Intv (Interval Aug Second)
iAug2 = Intv
iDim3 :: Intv (Interval Dim Third)
iDim3 = Intv
iMin3 :: Intv (Interval Min Third)
iMin3 = Intv
iMaj3 :: Intv (Interval Maj Third)
iMaj3 = Intv
iAug3 :: Intv (Interval Aug Third)
iAug3 = Intv
iDim4 :: Intv (Interval Dim Fourth)
iDim4 = Intv
iPerf4 :: Intv (Interval Perf Fourth)
iPerf4 = Intv
iAug4 :: Intv (Interval Aug Fourth)
iAug4 = Intv
iDim5 :: Intv (Interval Dim Fifth)
iDim5 = Intv
iPerf5 :: Intv (Interval Perf Fifth)
iPerf5 = Intv
iAug5 :: Intv (Interval Aug Fifth)
iAug5 = Intv
iDim6 :: Intv (Interval Dim Sixth)
iDim6 = Intv
iMin6 :: Intv (Interval Min Sixth)
iMin6 = Intv
iMaj6 :: Intv (Interval Maj Sixth)
iMaj6 = Intv
iAug6 :: Intv (Interval Aug Sixth)
iAug6 = Intv
iDim7 :: Intv (Interval Dim Seventh)
iDim7 = Intv
iMin7 :: Intv (Interval Min Seventh)
iMin7 = Intv
iMaj7 :: Intv (Interval Maj Seventh)
iMaj7 = Intv
iAug7 :: Intv (Interval Aug Seventh)
iAug7 = Intv
iDim8 :: Intv (Interval Dim Octave)
iDim8 = Intv
iPerf8 :: Intv (Interval Perf Octave)
iPerf8 = Intv
iAug8 :: Intv (Interval Aug Octave)
iAug8 = Intv
-- * Operations
transUp :: (tr ~ (PitchRoot (RaiseBy (RootToPitch r) i)), IntRep tr)
=> Intv i -> RootS r -> RootS tr
transUp i _ = spec Root
transDown :: (tr ~ (PitchRoot (LowerBy (RootToPitch r) i)), IntRep tr)
=> Intv i -> RootS r -> RootS tr
transDown i _ = spec Root
| DimaSamoz/mezzo | src/Mezzo/Compose/Intervals.hs | mit | 2,891 | 0 | 13 | 581 | 991 | 530 | 461 | 97 | 1 |
{-# LANGUAGE ViewPatterns #-}
module Unison.Codebase.SqliteCodebase.Conversions where
import Control.Monad (foldM)
import Data.Bifunctor (Bifunctor (bimap))
import Data.Bitraversable (Bitraversable (bitraverse))
import qualified Data.ByteString.Short as SBS
import Data.Either (fromRight)
import Data.Foldable (Foldable (toList))
import Data.Map (Map)
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Text (Text, pack)
import qualified U.Codebase.Branch as V2.Branch
import qualified U.Codebase.Causal as V2
import qualified U.Codebase.Decl as V2.Decl
import qualified U.Codebase.HashTags as V2
import qualified U.Codebase.Kind as V2.Kind
import qualified U.Codebase.Reference as V2
import qualified U.Codebase.Reference as V2.Reference
import qualified U.Codebase.Referent as V2
import qualified U.Codebase.Referent as V2.Referent
import qualified U.Codebase.ShortHash as V2
import qualified U.Codebase.Sqlite.Symbol as V2
import qualified U.Codebase.Term as V2.Term
import qualified U.Codebase.TermEdit as V2.TermEdit
import qualified U.Codebase.Type as V2.Type
import qualified U.Codebase.TypeEdit as V2.TypeEdit
import qualified U.Codebase.WatchKind as V2
import qualified U.Codebase.WatchKind as V2.WatchKind
import qualified U.Core.ABT as V2.ABT
import qualified U.Util.Hash as V2
import qualified U.Util.Hash as V2.Hash
import qualified U.Util.Map as Map
import qualified U.Util.Set as Set
import qualified Unison.ABT as V1.ABT
import qualified Unison.Codebase.Branch as V1.Branch
import qualified Unison.Codebase.Causal as V1.Causal
import qualified Unison.Codebase.Metadata as V1.Metadata
import qualified Unison.Codebase.Patch as V1
import qualified Unison.Codebase.ShortBranchHash as V1
import qualified Unison.Codebase.TermEdit as V1.TermEdit
import qualified Unison.Codebase.TypeEdit as V1.TypeEdit
import qualified Unison.ConstructorType as CT
import qualified Unison.DataDeclaration as V1.Decl
import Unison.Hash (Hash)
import qualified Unison.Hash as V1
import qualified Unison.Kind as V1.Kind
import qualified Unison.NameSegment as V1
import Unison.Parser (Ann)
import qualified Unison.Parser as Ann
import qualified Unison.Pattern as V1.Pattern
import qualified Unison.Reference as V1
import qualified Unison.Reference as V1.Reference
import qualified Unison.Referent as V1
import qualified Unison.Referent as V1.Referent
import qualified Unison.Symbol as V1
import qualified Unison.Term as V1.Term
import qualified Unison.Type as V1.Type
import qualified Unison.Util.Relation as Relation
import qualified Unison.Util.Star3 as V1.Star3
import qualified Unison.Var as V1.Var
import qualified Unison.Var as Var
sbh1to2 :: V1.ShortBranchHash -> V2.ShortBranchHash
sbh1to2 (V1.ShortBranchHash b32) = V2.ShortBranchHash b32
decltype2to1 :: V2.Decl.DeclType -> CT.ConstructorType
decltype2to1 = \case
V2.Decl.Data -> CT.Data
V2.Decl.Effect -> CT.Effect
decltype1to2 :: CT.ConstructorType -> V2.Decl.DeclType
decltype1to2 = \case
CT.Data -> V2.Decl.Data
CT.Effect -> V2.Decl.Effect
watchKind1to2 :: V1.Var.WatchKind -> V2.WatchKind
watchKind1to2 = \case
V1.Var.RegularWatch -> V2.WatchKind.RegularWatch
V1.Var.TestWatch -> V2.WatchKind.TestWatch
other -> error $ "What kind of watchkind is " ++ other ++ "?"
watchKind2to1 :: V2.WatchKind -> V1.Var.WatchKind
watchKind2to1 = \case
V2.WatchKind.RegularWatch -> V1.Var.RegularWatch
V2.WatchKind.TestWatch -> V1.Var.TestWatch
term1to2 :: Hash -> V1.Term.Term V1.Symbol Ann -> V2.Term.Term V2.Symbol
term1to2 h =
V2.ABT.transform termF1to2
. V2.ABT.vmap symbol1to2
. V2.ABT.amap (const ())
. abt1to2
where
termF1to2 :: V1.Term.F V1.Symbol Ann Ann a -> V2.Term.F V2.Symbol a
termF1to2 = go
go = \case
V1.Term.Int i -> V2.Term.Int i
V1.Term.Nat n -> V2.Term.Nat n
V1.Term.Float f -> V2.Term.Float f
V1.Term.Boolean b -> V2.Term.Boolean b
V1.Term.Text t -> V2.Term.Text t
V1.Term.Char c -> V2.Term.Char c
V1.Term.Ref r -> V2.Term.Ref (rreference1to2 h r)
V1.Term.Constructor r i -> V2.Term.Constructor (reference1to2 r) (fromIntegral i)
V1.Term.Request r i -> V2.Term.Request (reference1to2 r) (fromIntegral i)
V1.Term.Handle b h -> V2.Term.Handle b h
V1.Term.App f a -> V2.Term.App f a
V1.Term.Ann e t -> V2.Term.Ann e (ttype1to2 t)
V1.Term.List as -> V2.Term.List as
V1.Term.If c t f -> V2.Term.If c t f
V1.Term.And a b -> V2.Term.And a b
V1.Term.Or a b -> V2.Term.Or a b
V1.Term.Lam a -> V2.Term.Lam a
V1.Term.LetRec _ bs body -> V2.Term.LetRec bs body
V1.Term.Let _ b body -> V2.Term.Let b body
V1.Term.Match e cases -> V2.Term.Match e (goCase <$> cases)
V1.Term.TermLink r -> V2.Term.TermLink (rreferent1to2 h r)
V1.Term.TypeLink r -> V2.Term.TypeLink (reference1to2 r)
V1.Term.Blank _ -> error "can't serialize term with blanks"
goCase (V1.Term.MatchCase p g b) =
V2.Term.MatchCase (goPat p) g b
goPat :: V1.Pattern.Pattern a -> V2.Term.Pattern Text V2.Reference
goPat = \case
V1.Pattern.Unbound _ -> V2.Term.PUnbound
V1.Pattern.Var _ -> V2.Term.PVar
V1.Pattern.Boolean _ b -> V2.Term.PBoolean b
V1.Pattern.Int _ i -> V2.Term.PInt i
V1.Pattern.Nat _ n -> V2.Term.PNat n
V1.Pattern.Float _ d -> V2.Term.PFloat d
V1.Pattern.Text _ t -> V2.Term.PText t
V1.Pattern.Char _ c -> V2.Term.PChar c
V1.Pattern.Constructor _ r i ps ->
V2.Term.PConstructor (reference1to2 r) i (goPat <$> ps)
V1.Pattern.As _ p -> V2.Term.PAs (goPat p)
V1.Pattern.EffectPure _ p -> V2.Term.PEffectPure (goPat p)
V1.Pattern.EffectBind _ r i ps k ->
V2.Term.PEffectBind (reference1to2 r) i (goPat <$> ps) (goPat k)
V1.Pattern.SequenceLiteral _ ps -> V2.Term.PSequenceLiteral (goPat <$> ps)
V1.Pattern.SequenceOp _ p op p2 ->
V2.Term.PSequenceOp (goPat p) (goSeqOp op) (goPat p2)
goSeqOp = \case
V1.Pattern.Cons -> V2.Term.PCons
V1.Pattern.Snoc -> V2.Term.PSnoc
V1.Pattern.Concat -> V2.Term.PConcat
term2to1 :: forall m. Monad m => Hash -> (Hash -> m V1.Reference.Size) -> (V2.Reference -> m CT.ConstructorType) -> V2.Term.Term V2.Symbol -> m (V1.Term.Term V1.Symbol Ann)
term2to1 h lookupSize lookupCT tm =
V1.ABT.transformM (termF2to1 h lookupSize lookupCT)
. V1.ABT.vmap symbol2to1
. V1.ABT.amap (const Ann.External)
$ abt2to1 tm
where
termF2to1 :: forall m a. Monad m => Hash -> (Hash -> m V1.Reference.Size) -> (V2.Reference -> m CT.ConstructorType) -> V2.Term.F V2.Symbol a -> m (V1.Term.F V1.Symbol Ann Ann a)
termF2to1 h lookupSize lookupCT = go
where
go :: V2.Term.F V2.Symbol a -> m (V1.Term.F V1.Symbol Ann Ann a)
go = \case
V2.Term.Int i -> pure $ V1.Term.Int i
V2.Term.Nat n -> pure $ V1.Term.Nat n
V2.Term.Float d -> pure $ V1.Term.Float d
V2.Term.Boolean b -> pure $ V1.Term.Boolean b
V2.Term.Text t -> pure $ V1.Term.Text t
V2.Term.Char c -> pure $ V1.Term.Char c
V2.Term.Ref r -> V1.Term.Ref <$> rreference2to1 h lookupSize r
V2.Term.Constructor r i ->
V1.Term.Constructor <$> reference2to1 lookupSize r <*> pure (fromIntegral i)
V2.Term.Request r i ->
V1.Term.Request <$> reference2to1 lookupSize r <*> pure (fromIntegral i)
V2.Term.Handle a a4 -> pure $ V1.Term.Handle a a4
V2.Term.App a a4 -> pure $ V1.Term.App a a4
V2.Term.Ann a t2 -> V1.Term.Ann a <$> ttype2to1 lookupSize t2
V2.Term.List sa -> pure $ V1.Term.List sa
V2.Term.If a a4 a5 -> pure $ V1.Term.If a a4 a5
V2.Term.And a a4 -> pure $ V1.Term.And a a4
V2.Term.Or a a4 -> pure $ V1.Term.Or a a4
V2.Term.Lam a -> pure $ V1.Term.Lam a
V2.Term.LetRec as a -> pure $ V1.Term.LetRec False as a
V2.Term.Let a a4 -> pure $ V1.Term.Let False a a4
V2.Term.Match a cases -> V1.Term.Match a <$> traverse goCase cases
V2.Term.TermLink rr -> V1.Term.TermLink <$> rreferent2to1 h lookupSize lookupCT rr
V2.Term.TypeLink r -> V1.Term.TypeLink <$> reference2to1 lookupSize r
goCase = \case
V2.Term.MatchCase pat cond body ->
V1.Term.MatchCase <$> (goPat pat) <*> pure cond <*> pure body
goPat = \case
V2.Term.PUnbound -> pure $ V1.Pattern.Unbound a
V2.Term.PVar -> pure $ V1.Pattern.Var a
V2.Term.PBoolean b -> pure $ V1.Pattern.Boolean a b
V2.Term.PInt i -> pure $ V1.Pattern.Int a i
V2.Term.PNat n -> pure $ V1.Pattern.Nat a n
V2.Term.PFloat d -> pure $ V1.Pattern.Float a d
V2.Term.PText t -> pure $ V1.Pattern.Text a t
V2.Term.PChar c -> pure $ V1.Pattern.Char a c
V2.Term.PConstructor r i ps ->
V1.Pattern.Constructor a <$> reference2to1 lookupSize r <*> pure i <*> (traverse goPat ps)
V2.Term.PAs p -> V1.Pattern.As a <$> goPat p
V2.Term.PEffectPure p -> V1.Pattern.EffectPure a <$> goPat p
V2.Term.PEffectBind r i ps p -> V1.Pattern.EffectBind a <$> reference2to1 lookupSize r <*> pure i <*> traverse goPat ps <*> goPat p
V2.Term.PSequenceLiteral ps -> V1.Pattern.SequenceLiteral a <$> traverse goPat ps
V2.Term.PSequenceOp p1 op p2 -> V1.Pattern.SequenceOp a <$> goPat p1 <*> pure (goOp op) <*> goPat p2
goOp = \case
V2.Term.PCons -> V1.Pattern.Cons
V2.Term.PSnoc -> V1.Pattern.Snoc
V2.Term.PConcat -> V1.Pattern.Concat
a = Ann.External
decl2to1 :: Monad m => Hash -> (Hash -> m V1.Reference.Size) -> V2.Decl.Decl V2.Symbol -> m (V1.Decl.Decl V1.Symbol Ann)
decl2to1 h lookupSize (V2.Decl.DataDeclaration dt m bound cts) =
goCT dt
<$> V1.Decl.DataDeclaration (goMod m) Ann.External (symbol2to1 <$> bound)
<$> cts'
where
goMod = \case
V2.Decl.Structural -> V1.Decl.Structural
V2.Decl.Unique t -> V1.Decl.Unique t
goCT = \case
V2.Decl.Data -> Right
V2.Decl.Effect -> Left . V1.Decl.EffectDeclaration
cts' = traverse mkCtor (zip cts [0 ..])
mkCtor (type1, i) = do
type2 <- dtype2to1 h lookupSize type1
pure $ (Ann.External, V1.symbol . pack $ "Constructor" ++ show i, type2)
decl1to2 :: Hash -> V1.Decl.Decl V1.Symbol a -> V2.Decl.Decl V2.Symbol
decl1to2 h decl1 = case V1.Decl.asDataDecl decl1 of
V1.Decl.DataDeclaration m _ann bound cts ->
V2.Decl.DataDeclaration
(decltype1to2 $ V1.Decl.constructorType decl1)
(goMod m)
(symbol1to2 <$> bound)
cts'
where
goMod = \case
V1.Decl.Structural -> V2.Decl.Structural
V1.Decl.Unique t -> V2.Decl.Unique t
cts' = [dtype1to2 h t | (_, _, t) <- cts]
symbol2to1 :: V2.Symbol -> V1.Symbol
symbol2to1 (V2.Symbol i t) = V1.Symbol i (Var.User t)
symbol1to2 :: V1.Symbol -> V2.Symbol
symbol1to2 (V1.Symbol i varType) = V2.Symbol i (Var.rawName varType)
shortHashSuffix1to2 :: Text -> V1.Reference.Pos
shortHashSuffix1to2 =
fst
. fromRight (error "todo: move suffix parsing to frontend")
. V1.Reference.readSuffix
abt2to1 :: Functor f => V2.ABT.Term f v a -> V1.ABT.Term f v a
abt2to1 (V2.ABT.Term fv a out) = V1.ABT.Term fv a (go out)
where
go = \case
V2.ABT.Cycle body -> V1.ABT.Cycle (abt2to1 body)
V2.ABT.Abs v body -> V1.ABT.Abs v (abt2to1 body)
V2.ABT.Var v -> V1.ABT.Var v
V2.ABT.Tm tm -> V1.ABT.Tm (abt2to1 <$> tm)
abt1to2 :: Functor f => V1.ABT.Term f v a -> V2.ABT.Term f v a
abt1to2 (V1.ABT.Term fv a out) = V2.ABT.Term fv a (go out)
where
go = \case
V1.ABT.Cycle body -> V2.ABT.Cycle (abt1to2 body)
V1.ABT.Abs v body -> V2.ABT.Abs v (abt1to2 body)
V1.ABT.Var v -> V2.ABT.Var v
V1.ABT.Tm tm -> V2.ABT.Tm (abt1to2 <$> tm)
rreference2to1 :: Applicative m => Hash -> (Hash -> m V1.Reference.Size) -> V2.Reference' Text (Maybe V2.Hash) -> m V1.Reference
rreference2to1 h lookupSize = \case
V2.ReferenceBuiltin t -> pure $ V1.Reference.Builtin t
V2.ReferenceDerived i -> V1.Reference.DerivedId <$> rreferenceid2to1 h lookupSize i
rreference1to2 :: Hash -> V1.Reference -> V2.Reference' Text (Maybe V2.Hash)
rreference1to2 h = \case
V1.Reference.Builtin t -> V2.ReferenceBuiltin t
V1.Reference.DerivedId i -> V2.ReferenceDerived (rreferenceid1to2 h i)
rreferenceid2to1 :: Functor m => Hash -> (Hash -> m V1.Reference.Size) -> V2.Reference.Id' (Maybe V2.Hash) -> m V1.Reference.Id
rreferenceid2to1 h lookupSize (V2.Reference.Id oh i) =
V1.Reference.Id h' i <$> lookupSize h'
where
h' = maybe h hash2to1 oh
rreferenceid1to2 :: Hash -> V1.Reference.Id -> V2.Reference.Id' (Maybe V2.Hash)
rreferenceid1to2 h (V1.Reference.Id h' i _n) = V2.Reference.Id oh i
where
oh = if h == h' then Nothing else Just (hash1to2 h')
hash1to2 :: Hash -> V2.Hash
hash1to2 (V1.Hash bs) = V2.Hash.Hash (SBS.toShort bs)
branchHash1to2 :: V1.Branch.Hash -> V2.CausalHash
branchHash1to2 = V2.CausalHash . hash1to2 . V1.Causal.unRawHash
branchHash2to1 :: V2.CausalHash -> V1.Branch.Hash
branchHash2to1 = V1.Causal.RawHash . hash2to1 . V2.unCausalHash
patchHash1to2 :: V1.Branch.EditHash -> V2.PatchHash
patchHash1to2 = V2.PatchHash . hash1to2
reference2to1 :: Applicative m => (Hash -> m V1.Reference.Size) -> V2.Reference -> m V1.Reference
reference2to1 lookupSize = \case
V2.ReferenceBuiltin t -> pure $ V1.Reference.Builtin t
V2.ReferenceDerived i -> V1.Reference.DerivedId <$> referenceid2to1 lookupSize i
reference1to2 :: V1.Reference -> V2.Reference
reference1to2 = \case
V1.Reference.Builtin t -> V2.ReferenceBuiltin t
V1.Reference.DerivedId i -> V2.ReferenceDerived (referenceid1to2 i)
referenceid1to2 :: V1.Reference.Id -> V2.Reference.Id
referenceid1to2 (V1.Reference.Id h i _n) = V2.Reference.Id (hash1to2 h) i
referenceid2to1 :: Functor m => (Hash -> m V1.Reference.Size) -> V2.Reference.Id -> m V1.Reference.Id
referenceid2to1 lookupSize (V2.Reference.Id h i) =
V1.Reference.Id sh i <$> lookupSize sh
where
sh = hash2to1 h
rreferent2to1 :: Applicative m => Hash -> (Hash -> m V1.Reference.Size) -> (V2.Reference -> m CT.ConstructorType) -> V2.ReferentH -> m V1.Referent
rreferent2to1 h lookupSize lookupCT = \case
V2.Ref r -> V1.Ref <$> rreference2to1 h lookupSize r
V2.Con r i -> V1.Con <$> reference2to1 lookupSize r <*> pure (fromIntegral i) <*> lookupCT r
rreferent1to2 :: Hash -> V1.Referent -> V2.ReferentH
rreferent1to2 h = \case
V1.Ref r -> V2.Ref (rreference1to2 h r)
V1.Con r i _ct -> V2.Con (reference1to2 r) (fromIntegral i)
referent2to1 :: Applicative m => (Hash -> m V1.Reference.Size) -> (V2.Reference -> m CT.ConstructorType) -> V2.Referent -> m V1.Referent
referent2to1 lookupSize lookupCT = \case
V2.Ref r -> V1.Ref <$> reference2to1 lookupSize r
V2.Con r i -> V1.Con <$> reference2to1 lookupSize r <*> pure (fromIntegral i) <*> lookupCT r
referent1to2 :: V1.Referent -> V2.Referent
referent1to2 = \case
V1.Ref r -> V2.Ref $ reference1to2 r
V1.Con r i _ct -> V2.Con (reference1to2 r) (fromIntegral i)
referentid2to1 :: Applicative m => (Hash -> m V1.Reference.Size) -> (V2.Reference -> m CT.ConstructorType) -> V2.Referent.Id -> m V1.Referent.Id
referentid2to1 lookupSize lookupCT = \case
V2.RefId r -> V1.Ref' <$> referenceid2to1 lookupSize r
V2.ConId r i ->
V1.Con' <$> referenceid2to1 lookupSize r
<*> pure (fromIntegral i)
<*> lookupCT (V2.ReferenceDerived r)
hash2to1 :: V2.Hash.Hash -> Hash
hash2to1 (V2.Hash.Hash sbs) = V1.Hash (SBS.fromShort sbs)
causalHash2to1 :: V2.CausalHash -> V1.Causal.RawHash V1.Branch.Raw
causalHash2to1 = V1.Causal.RawHash . hash2to1 . V2.unCausalHash
causalHash1to2 :: V1.Causal.RawHash V1.Branch.Raw -> V2.CausalHash
causalHash1to2 = V2.CausalHash . hash1to2 . V1.Causal.unRawHash
ttype2to1 :: Monad m => (Hash -> m V1.Reference.Size) -> V2.Term.Type V2.Symbol -> m (V1.Type.Type V1.Symbol Ann)
ttype2to1 lookupSize = type2to1' (reference2to1 lookupSize)
dtype2to1 :: Monad m => Hash -> (Hash -> m V1.Reference.Size) -> V2.Decl.Type V2.Symbol -> m (V1.Type.Type V1.Symbol Ann)
dtype2to1 h lookupSize = type2to1' (rreference2to1 h lookupSize)
type2to1' :: Monad m => (r -> m V1.Reference) -> V2.Type.TypeR r V2.Symbol -> m (V1.Type.Type V1.Symbol Ann)
type2to1' convertRef =
V1.ABT.transformM (typeF2to1 convertRef)
. V1.ABT.vmap symbol2to1
. V1.ABT.amap (const Ann.External)
. abt2to1
where
typeF2to1 :: Applicative m => (r -> m V1.Reference) -> V2.Type.F' r a -> m (V1.Type.F a)
typeF2to1 convertRef = \case
V2.Type.Ref r -> V1.Type.Ref <$> convertRef r
V2.Type.Arrow i o -> pure $ V1.Type.Arrow i o
V2.Type.Ann a k -> pure $ V1.Type.Ann a (convertKind k)
V2.Type.App f x -> pure $ V1.Type.App f x
V2.Type.Effect e b -> pure $ V1.Type.Effect e b
V2.Type.Effects as -> pure $ V1.Type.Effects as
V2.Type.Forall a -> pure $ V1.Type.Forall a
V2.Type.IntroOuter a -> pure $ V1.Type.IntroOuter a
where
convertKind = \case
V2.Kind.Star -> V1.Kind.Star
V2.Kind.Arrow i o -> V1.Kind.Arrow (convertKind i) (convertKind o)
dtype1to2 :: Hash -> V1.Type.Type V1.Symbol a -> V2.Type.TypeD V2.Symbol
dtype1to2 h = type1to2' (rreference1to2 h)
ttype1to2 :: V1.Type.Type V1.Symbol a -> V2.Type.TypeT V2.Symbol
ttype1to2 = type1to2' reference1to2
type1to2' :: (V1.Reference -> r) -> V1.Type.Type V1.Symbol a -> V2.Type.TypeR r V2.Symbol
type1to2' convertRef =
V2.ABT.transform (typeF1to2' convertRef)
. V2.ABT.vmap symbol1to2
. V2.ABT.amap (const ())
. abt1to2
where
typeF1to2' :: (V1.Reference -> r) -> V1.Type.F a -> V2.Type.F' r a
typeF1to2' convertRef = \case
V1.Type.Ref r -> V2.Type.Ref (convertRef r)
V1.Type.Arrow i o -> V2.Type.Arrow i o
V1.Type.Ann a k -> V2.Type.Ann a (convertKind k)
V1.Type.App f x -> V2.Type.App f x
V1.Type.Effect e b -> V2.Type.Effect e b
V1.Type.Effects as -> V2.Type.Effects as
V1.Type.Forall a -> V2.Type.Forall a
V1.Type.IntroOuter a -> V2.Type.IntroOuter a
where
convertKind = \case
V1.Kind.Star -> V2.Kind.Star
V1.Kind.Arrow i o -> V2.Kind.Arrow (convertKind i) (convertKind o)
-- | forces loading v1 branches even if they may not exist
causalbranch2to1 :: Monad m => (String -> Hash -> m V1.Reference.Size) -> (V2.Reference -> m CT.ConstructorType) -> V2.Branch.Causal m -> m (V1.Branch.Branch m)
causalbranch2to1 lookupSize lookupCT = fmap V1.Branch.Branch . causalbranch2to1' lookupSize lookupCT
causalbranch2to1' :: Monad m => (String -> Hash -> m V1.Reference.Size) -> (V2.Reference -> m CT.ConstructorType) -> V2.Branch.Causal m -> m (V1.Branch.UnwrappedBranch m)
causalbranch2to1' lookupSize lookupCT (V2.Causal hc _he (Map.toList -> parents) me) = do
let currentHash = causalHash2to1 hc
case parents of
[] -> V1.Causal.One currentHash <$> (me >>= branch2to1 lookupSize lookupCT)
[(hp, mp)] -> do
let parentHash = causalHash2to1 hp
V1.Causal.Cons currentHash
<$> (me >>= branch2to1 lookupSize lookupCT)
<*> pure (parentHash, causalbranch2to1' lookupSize lookupCT =<< mp)
merge -> do
let tailsList = map (bimap causalHash2to1 (causalbranch2to1' lookupSize lookupCT =<<)) merge
e <- me
V1.Causal.Merge currentHash <$> branch2to1 lookupSize lookupCT e <*> pure (Map.fromList tailsList)
causalbranch1to2 :: forall m. Monad m => V1.Branch.Branch m -> V2.Branch.Causal m
causalbranch1to2 (V1.Branch.Branch c) = causal1to2' hash1to2cb hash1to2c branch1to2 c
where
hash1to2cb :: V1.Branch.Hash -> (V2.CausalHash, V2.BranchHash)
hash1to2cb (V1.Causal.RawHash h) = (hc, hb)
where
h2 = hash1to2 h
hc = V2.CausalHash h2
hb = V2.BranchHash h2
hash1to2c :: V1.Branch.Hash -> V2.CausalHash
hash1to2c = V2.CausalHash . hash1to2 . V1.Causal.unRawHash
causal1to2' = causal1to2 @m @V1.Branch.Raw @V2.CausalHash @V2.BranchHash @(V1.Branch.Branch0 m) @(V2.Branch.Branch m)
causal1to2 :: forall m h h2c h2e e e2. (Monad m, Ord h2c) => (V1.Causal.RawHash h -> (h2c, h2e)) -> (V1.Causal.RawHash h -> h2c) -> (e -> m e2) -> V1.Causal.Causal m h e -> V2.Causal m h2c h2e e2
causal1to2 h1to22 h1to2 e1to2 = \case
V1.Causal.One (h1to22 -> (hc, hb)) e -> V2.Causal hc hb Map.empty (e1to2 e)
V1.Causal.Cons (h1to22 -> (hc, hb)) e (ht, mt) -> V2.Causal hc hb (Map.singleton (h1to2 ht) (causal1to2 h1to22 h1to2 e1to2 <$> mt)) (e1to2 e)
V1.Causal.Merge (h1to22 -> (hc, hb)) e parents -> V2.Causal hc hb (Map.bimap h1to2 (causal1to2 h1to22 h1to2 e1to2 <$>) parents) (e1to2 e)
branch1to2 :: forall m. Monad m => V1.Branch.Branch0 m -> m (V2.Branch.Branch m)
branch1to2 b = do
terms <- pure $ doTerms (V1.Branch._terms b)
types <- pure $ doTypes (V1.Branch._types b)
patches <- pure $ doPatches (V1.Branch._edits b)
children <- pure $ doChildren (V1.Branch._children b)
pure $ V2.Branch.Branch terms types patches children
where
-- is there a more readable way to structure these that's also linear?
doTerms :: V1.Branch.Star V1.Referent.Referent V1.NameSegment -> Map V2.Branch.NameSegment (Map V2.Referent.Referent (m V2.Branch.MdValues))
doTerms s =
Map.fromList
[ (namesegment1to2 ns, m2)
| ns <- toList . Relation.ran $ V1.Star3.d1 s
, let m2 =
Map.fromList
[ (referent1to2 r, pure md)
| r <- toList . Relation.lookupRan ns $ V1.Star3.d1 s
, let
mdrefs1to2 (typeR1, valR1) = (reference1to2 valR1, reference1to2 typeR1)
md = V2.Branch.MdValues . Map.fromList . map mdrefs1to2 . toList . Relation.lookupDom r $ V1.Star3.d3 s
]
]
doTypes :: V1.Branch.Star V1.Reference.Reference V1.NameSegment -> Map V2.Branch.NameSegment (Map V2.Reference.Reference (m V2.Branch.MdValues))
doTypes s =
Map.fromList
[ (namesegment1to2 ns, m2)
| ns <- toList . Relation.ran $ V1.Star3.d1 s
, let m2 =
Map.fromList
[ (reference1to2 r, pure md)
| r <- toList . Relation.lookupRan ns $ V1.Star3.d1 s
, let
mdrefs1to2 (typeR1, valR1) = (reference1to2 valR1, reference1to2 typeR1)
md = V2.Branch.MdValues . Map.fromList . map mdrefs1to2 . toList . Relation.lookupDom r $ V1.Star3.d3 s
]
]
doPatches :: Map V1.NameSegment (V1.Branch.EditHash, m V1.Patch) -> Map V2.Branch.NameSegment (V2.PatchHash, m V2.Branch.Patch)
doPatches = Map.bimap namesegment1to2 (bimap edithash1to2 (fmap patch1to2))
doChildren :: Map V1.NameSegment (V1.Branch.Branch m) -> Map V2.Branch.NameSegment (V2.Branch.Causal m)
doChildren = Map.bimap namesegment1to2 causalbranch1to2
patch2to1 ::
forall m.
Monad m =>
(String -> Hash -> m V1.Reference.Size) ->
V2.Branch.Patch ->
m V1.Patch
patch2to1 lookupSize (V2.Branch.Patch v2termedits v2typeedits) = do
termEdits <- Map.bitraverse referent2to1' (Set.traverse termedit2to1) v2termedits
typeEdits <- Map.bitraverse (reference2to1 (lookupSize "patch->old type")) (Set.traverse typeedit2to1) v2typeedits
pure $ V1.Patch (Relation.fromMultimap termEdits) (Relation.fromMultimap typeEdits)
where
referent2to1' :: V2.Referent -> m V1.Reference
referent2to1' = \case
V2.Referent.Ref r -> reference2to1 (lookupSize "patch->old term") r
V2.Referent.Con {} -> error "found referent on LHS when converting patch2to1"
termedit2to1 :: V2.TermEdit.TermEdit -> m V1.TermEdit.TermEdit
termedit2to1 = \case
V2.TermEdit.Replace (V2.Referent.Ref r) t ->
V1.TermEdit.Replace <$> reference2to1 (lookupSize "patch->new term") r <*> typing2to1 t
V2.TermEdit.Replace {} -> error "found referent on RHS when converting patch2to1"
V2.TermEdit.Deprecate -> pure V1.TermEdit.Deprecate
typeedit2to1 :: V2.TypeEdit.TypeEdit -> m V1.TypeEdit.TypeEdit
typeedit2to1 = \case
V2.TypeEdit.Replace r -> V1.TypeEdit.Replace <$> reference2to1 (lookupSize "patch->new type") r
V2.TypeEdit.Deprecate -> pure V1.TypeEdit.Deprecate
typing2to1 t = pure $ case t of
V2.TermEdit.Same -> V1.TermEdit.Same
V2.TermEdit.Subtype -> V1.TermEdit.Subtype
V2.TermEdit.Different -> V1.TermEdit.Different
patch1to2 :: V1.Patch -> V2.Branch.Patch
patch1to2 (V1.Patch v1termedits v1typeedits) = V2.Branch.Patch v2termedits v2typeedits
where
v2termedits = Map.bimap (V2.Referent.Ref . reference1to2) (Set.map termedit1to2) $ Relation.domain v1termedits
v2typeedits = Map.bimap reference1to2 (Set.map typeedit1to2) $ Relation.domain v1typeedits
termedit1to2 :: V1.TermEdit.TermEdit -> V2.TermEdit.TermEdit
termedit1to2 = \case
V1.TermEdit.Replace r t -> V2.TermEdit.Replace (V2.Referent.Ref (reference1to2 r)) (typing1to2 t)
V1.TermEdit.Deprecate -> V2.TermEdit.Deprecate
typeedit1to2 :: V1.TypeEdit.TypeEdit -> V2.TypeEdit.TypeEdit
typeedit1to2 = \case
V1.TypeEdit.Replace r -> V2.TypeEdit.Replace (reference1to2 r)
V1.TypeEdit.Deprecate -> V2.TypeEdit.Deprecate
typing1to2 = \case
V1.TermEdit.Same -> V2.TermEdit.Same
V1.TermEdit.Subtype -> V2.TermEdit.Subtype
V1.TermEdit.Different -> V2.TermEdit.Different
edithash2to1 :: V2.PatchHash -> V1.Branch.EditHash
edithash2to1 = hash2to1 . V2.unPatchHash
edithash1to2 :: V1.Branch.EditHash -> V2.PatchHash
edithash1to2 = V2.PatchHash . hash1to2
namesegment2to1 :: V2.Branch.NameSegment -> V1.NameSegment
namesegment2to1 (V2.Branch.NameSegment t) = V1.NameSegment t
namesegment1to2 :: V1.NameSegment -> V2.Branch.NameSegment
namesegment1to2 (V1.NameSegment t) = V2.Branch.NameSegment t
branch2to1 ::
Monad m =>
(String -> Hash -> m V1.Reference.Size) ->
(V2.Reference -> m CT.ConstructorType) ->
V2.Branch.Branch m ->
m (V1.Branch.Branch0 m)
branch2to1 lookupSize lookupCT (V2.Branch.Branch v2terms v2types v2patches v2children) = do
v1terms <- toStar (reference2to1 $ lookupSize "term metadata") =<< Map.bitraverse (pure . namesegment2to1) (Map.bitraverse (referent2to1 (lookupSize "term") lookupCT) id) v2terms
v1types <- toStar (reference2to1 $ lookupSize "type metadata") =<< Map.bitraverse (pure . namesegment2to1) (Map.bitraverse (reference2to1 (lookupSize "type")) id) v2types
v1patches <- Map.bitraverse (pure . namesegment2to1) (bitraverse (pure . edithash2to1) (fmap (patch2to1 lookupSize))) v2patches
v1children <- Map.bitraverse (pure . namesegment2to1) (causalbranch2to1 lookupSize lookupCT) v2children
pure $ V1.Branch.branch0 v1terms v1types v1children v1patches
where
toStar :: forall m name ref. (Monad m, Ord name, Ord ref) => (V2.Reference -> m V1.Reference) -> Map name (Map ref V2.Branch.MdValues) -> m (V1.Metadata.Star ref name)
toStar mdref2to1 m = foldM insert mempty (Map.toList m)
where
insert star (name, m) = foldM (insert' name) star (Map.toList m)
insert' :: name -> V1.Metadata.Star ref name -> (ref, V2.Branch.MdValues) -> m (V1.Metadata.Star ref name)
insert' name star (ref, V2.Branch.MdValues mdvals) = do
let facts = Set.singleton ref
names = Relation.singleton ref name
types :: Relation.Relation ref V1.Metadata.Type <-
Relation.insertManyRan ref <$> traverse mdref2to1 (Map.elems mdvals) <*> pure mempty
vals :: Relation.Relation ref (V1.Metadata.Type, V1.Metadata.Value) <-
Relation.insertManyRan ref <$> (traverse (\(t, v) -> (,) <$> mdref2to1 v <*> mdref2to1 t) (Map.toList mdvals)) <*> pure mempty
pure $ star <> V1.Star3.Star3 facts names types vals
-- V2.Branch0 should have the metadata types, could bulk load with relational operations
-- type Star a n = Star3 a n Type (Type, Value)
-- type Star a n = Star3 a n Type (Reference, Reference)
-- MdValues is a Set V2.Reference
-- (Name, TermRef, Metadata Type, Metadata Value) <-- decided not this (because name was too long/repetitive?)
-- (BranchId/Hash, TermRef, Metadata Type, Metadata Value) <-- what about this
-- data V2.Branch m = Branch
-- { terms :: Map NameSegment (Map Referent (m MdValues)),
-- types :: Map NameSegment (Map Reference (m MdValues)),
-- patches :: Map NameSegment (PatchHash, m Patch),
-- children :: Map NameSegment (Causal m)
-- }
-- branch0 :: Metadata.Star Referent NameSegment
-- -> Metadata.Star Reference NameSegment
-- -> Map NameSegment (Branch m)
-- -> Map NameSegment (EditHash, m Patch)
-- -> Branch0 m
-- type Metadata.Star a n = Star3 a n Type (Type, Value)
-- data Star3 fact d1 d2 d3
-- = Star3 { fact :: Set fact
-- , d1 :: Relation fact d1
-- , d2 :: Relation fact d2
-- , d3 :: Relation fact d3 } deriving (Eq,Ord,Show)
| unisonweb/platform | parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs | mit | 29,201 | 151 | 21 | 6,189 | 10,014 | 5,299 | 4,715 | -1 | -1 |
{-
see Chapter 16 of the Haskell 2010 Language Report
-}
module Data.Char where
data Char
| evilcandybag/JSHC | hslib/Data/Char.hs | mit | 93 | 0 | 3 | 18 | 10 | 7 | 3 | -1 | -1 |
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
-- | This module holds the 'Service' type class.
module Database.Hasqueue.Core.Service ( Service(..) ) where
import Pipes
-- | A 'Service' is a composable abstraction that can be started, stopped,
-- and converted into an 'Pipe.'
class Service s a b | s -> a b where
-- | Start the 'Service'.
startService :: IO s
-- | Stop the 'Service'.
stopService :: s -> IO ()
-- | Access a streaming interface for the 'Service'.
toPipe :: s -> Pipe a b IO ()
| nahiluhmot/hasqueue | src/Database/Hasqueue/Core/Service.hs | mit | 553 | 0 | 9 | 119 | 90 | 54 | 36 | 8 | 0 |
import Data.List
data BinTree a = Empty
| Node a (BinTree a) (BinTree a)
deriving (Eq, Ord)
-- declare BinTree a to be an instance of Show
instance (Show a) => Show (BinTree a) where
-- will start by a '<' before the root
-- and put a : a begining of line
show t = "< " ++ replace '\n' "\n: " (treeshow "" t)
where
-- treeshow pref Tree
-- shows a tree and starts each line with pref
-- We don't display the Empty tree
treeshow pref Empty = ""
-- Leaf
treeshow pref (Node x Empty Empty) =
(pshow pref x)
-- Right branch is empty
treeshow pref (Node x left Empty) =
(pshow pref x) ++ "\n" ++
(showSon pref "`--" " " left)
-- Left branch is empty
treeshow pref (Node x Empty right) =
(pshow pref x) ++ "\n" ++
(showSon pref "`--" " " right)
-- Tree with left and right children non empty
treeshow pref (Node x left right) =
(pshow pref x) ++ "\n" ++
(showSon pref "|--" "| " left) ++ "\n" ++
(showSon pref "`--" " " right)
-- shows a tree using some prefixes to make it nice
showSon pref before next t =
pref ++ before ++ treeshow (pref ++ next) t
-- pshow replaces "\n" by "\n"++pref
pshow pref x = replace '\n' ("\n"++pref) (show x)
-- replaces one char by another string
replace c new string =
concatMap (change c new) string
where
change c new x
| x == c = new
| otherwise = x:[] -- "x"
treeFromList :: (Ord a) => [a] -> BinTree a
treeFromList [] = Empty
treeFromList (x:xs) = Node x (treeFromList (filter (<x) xs))
(treeFromList (filter (>x) xs))
main = print $ treeFromList [7,2,4,8]
| duncanfinney/haskell-play | 05-trees.hs | mit | 2,514 | 19 | 11 | 1,323 | 554 | 292 | 262 | -1 | -1 |
module FVL.Parser
( ParseError
, parseString
, parseFile
) where
import Text.Parsec hiding (Empty)
import Text.Parsec.String
import Text.Parsec.Expr
import Control.Monad
import Control.Applicative ((<$>), (<$), (<*>), (<*), (*>))
import FVL.Algebra
import FVL.EFAST
import FVL.Lexer
type ExprParser = Parser (Fix Expr)
cint :: ExprParser
cint = Fx . CInt <$> integer
cbool :: Parser (Fix Expr)
cbool = Fx (CBool True) <$ reserved "True"
<|> Fx (CBool False) <$ reserved "False"
cvar :: ExprParser
cvar = Fx . CVar <$> identifier
prefix n f = Prefix (reservedOp n *> return (Fx . f))
binary n f a = Infix (reservedOp n *> return (\x -> Fx . f x)) a
opTable = [ [ prefix "!" Not ]
, [ appl ]
, [ binary "*" Mul AssocLeft
, binary "/" Div AssocLeft
, binary "%" Mod AssocLeft ]
, [ binary "+" Add AssocLeft
, binary "-" Sub AssocLeft
]
, [ binary "=" Equal AssocLeft
, binary "<" Less AssocLeft
, binary "<=" LessEq AssocLeft
, binary ">" Great AssocLeft
, binary ">=" GreatEq AssocLeft
]
, [ binary "&&" And AssocLeft ]
, [ binary "||" Or AssocLeft ]
, [ binary ":" Cons AssocRight ]
, [ binary ";" Semi AssocLeft ]
]
opExpr :: ExprParser
opExpr = buildExpressionParser opTable term
list :: ExprParser
list = toCons <$> brackets (commaSep expr)
where toCons [] = Fx Empty
toCons (x:xs) = Fx $ Cons x (toCons xs)
ifExpr :: ExprParser
ifExpr = reserved "If" *> ((\x y -> Fx . If x y)
<$> expr <*> (reserved "Then" *> expr) <*> (reserved "Else" *> expr))
function :: ExprParser
function = reserved "Function" *> ((\x -> Fx . Function x)
<$> identifier <*> (reservedOp "->" *> expr))
appl = Infix space AssocLeft
where space = whiteSpace
*> notFollowedBy (choice . map reservedOp $ opNames)
*> return (\x y -> Fx $ Appl x y)
letExpr :: ExprParser
letExpr = reserved "Let" *> do
s <- sepBy1 identifier whiteSpace
reservedOp "="
e <- expr
reserved "In"
e' <- expr
case s of (x:xs) -> return . Fx $ Let x xs e e'
caseExpr :: ExprParser
caseExpr = reserved "Case" *> do
p <- expr
reserved "Of" *> symbol "[]" *> reservedOp "->"
x <- expr
reservedOp "|"
(s, t) <- parens $ do{ s' <- identifier
; reservedOp ":"
; t' <- identifier
; return (s', t')
}
reservedOp "->"
y <- expr
return . Fx $ Case p x s t y
term :: ExprParser
term = cint
<|> cbool
<|> cvar
<|> list
<|> parens expr
expr :: ExprParser
expr = function
<|> letExpr
<|> ifExpr
<|> caseExpr
<|> opExpr
<|> term
parseString :: String -> Either ParseError (Fix Expr)
parseString s = parse (expr <* eof) "" s
parseFile :: FilePath -> IO (Either ParseError (Fix Expr))
parseFile f = parseFromFile (expr <* eof) f
| burz/Feval | FVL/Parser.hs | mit | 3,024 | 0 | 13 | 944 | 1,115 | 575 | 540 | 92 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
module Ch_HessConv_Opts where
import System.Console.CmdArgs
data Ch_HessConv_Opts = Ch_HessConv_Opts
{ input :: FilePath
, output :: FilePath
, addInput :: FilePath
, readFormat :: String
, writeFormat :: String
}
deriving (Show,Data,Typeable)
ch_hessConv_Opts = Ch_HessConv_Opts
{ input = def &= help "file with input hessian" &= typ "INPUT"
, output = "stdout" &= help "file for output hessian" &= typ "OUTPUT"
, addInput = def &= help "additional input needed, e.g. xyz-file with coordinates" &= typ "INPUT"
, readFormat = def &= help "format of the input hessian [nwchem,dalton]"
, writeFormat = def &= help "format of the output hessian [nwchem,dalton]"
}
mode = cmdArgsMode ch_hessConv_Opts
| sheepforce/Haskell-Tools | ch_hessconv/Ch_HessConv_Opts.hs | gpl-3.0 | 762 | 0 | 9 | 139 | 166 | 93 | 73 | 17 | 1 |
module Language.Objection.TypeCheck
(typeCheck)
where
import qualified Data.Map as M
import Language.Objection.SyntaxTree
data TypeCheckResult = TypeCheckSuccess
| TypeCheckFailure String
deriving (Read, Show)
typeCheck :: Module -> TypeCheckResult
typeCheck (Module classes) = TypeCheckSuccess
lookupFieldType :: Module
-> Identifier -- ^ Class Identifier
-> Identifier -- ^ Field Identifier
-> Maybe Type
lookupFieldType = undefined
lookupMethodType :: Module
-> Identifier
-> Identifier
-> Maybe (Maybe Type, [Type])
lookupMethodType = undefined
| jhance/objection | Language/Objection/TypeCheck.hs | gpl-3.0 | 723 | 0 | 10 | 241 | 136 | 78 | 58 | 19 | 1 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE PartialTypeSignatures #-}
{-# OPTIONS_GHC -fno-warn-overlapping-patterns #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# OPTIONS_GHC -fno-warn-unused-do-bind #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module Handler.DB.RouteProcessperiods where
import Handler.DB.Enums
import Handler.DB.Esqueleto
import Handler.DB.Internal
import Handler.DB.Validation
import qualified Handler.DB.FilterSort as FS
import qualified Handler.DB.PathPieces as PP
import Prelude
import Database.Esqueleto
import Database.Esqueleto.Internal.Sql (unsafeSqlBinOp)
import qualified Database.Persist as P
import Database.Persist.TH
import Yesod.Auth (requireAuth, requireAuthId, YesodAuth, AuthId, YesodAuthPersist, AuthEntity)
import Yesod.Core hiding (fileName, fileContentType)
import Yesod.Persist (runDB, YesodPersist, YesodPersistBackend)
import Control.Monad (when)
import Data.Aeson ((.:), (.:?), (.!=), FromJSON, parseJSON, decode)
import Data.Aeson.TH
import Data.Int
import Data.Word
import Data.Time
import Data.Text.Encoding (encodeUtf8)
import Data.Typeable (Typeable)
import qualified Data.Attoparsec as AP
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as AT
import qualified Data.ByteString.Lazy as LBS
import Data.Maybe
import qualified Data.Text.Read
import qualified Data.Text as T
import Data.Text (Text)
import qualified Data.List as DL
import Control.Monad (mzero, forM_)
import Control.Monad.Trans.Resource (runResourceT)
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
import qualified Network.HTTP.Conduit as C
import qualified Network.Wai as W
import Data.Conduit.Lazy (lazyConsume)
import Network.HTTP.Types (status200, status400, status403, status404)
import Blaze.ByteString.Builder.ByteString (fromByteString)
import Control.Applicative ((<$>), (<*>))
import qualified Data.HashMap.Lazy as HML
import qualified Data.HashMap.Strict as HMS
import Handler.Utils (nonEmpty)
import Handler.Utils (prepareNewUser,hasWritePerm,hasReadPermMaybe,hasReadPerm)
getProcessperiodsR :: forall master. (
YesodAuthPersist master,
AuthEntity master ~ User,
AuthId master ~ Key User,
YesodPersistBackend master ~ SqlBackend)
=> HandlerT DB (HandlerT master IO) A.Value
getProcessperiodsR = lift $ runDB $ do
authId <- lift $ requireAuthId
(filterParam_query) <- lookupGetParam "query"
defaultFilterParam <- lookupGetParam "filter"
let defaultFilterJson = (maybe Nothing (decode . LBS.fromChunks . (:[]) . encodeUtf8) defaultFilterParam) :: Maybe [FS.Filter]
defaultSortParam <- lookupGetParam "sort"
let defaultSortJson = (maybe Nothing (decode . LBS.fromChunks . (:[]) . encodeUtf8) defaultSortParam) :: Maybe [FS.Sort]
defaultOffsetParam <- lookupGetParam "start"
defaultLimitParam <- lookupGetParam "limit"
let defaultOffset = (maybe Nothing PP.fromPathPiece defaultOffsetParam) :: Maybe Int64
let defaultLimit = (maybe Nothing PP.fromPathPiece defaultLimitParam) :: Maybe Int64
let baseQuery limitOffsetOrder = from $ \(pp ) -> do
let ppId' = pp ^. ProcessPeriodId
where_ (hasReadPerm (val authId) (pp ^. ProcessPeriodId))
_ <- when limitOffsetOrder $ do
offset 0
limit 10000
case defaultSortJson of
Just xs -> mapM_ (\sjm -> case FS.s_field sjm of
"firstDay" -> case (FS.s_direction sjm) of
"ASC" -> orderBy [ asc (pp ^. ProcessPeriodFirstDay) ]
"DESC" -> orderBy [ desc (pp ^. ProcessPeriodFirstDay) ]
_ -> return ()
"lastDay" -> case (FS.s_direction sjm) of
"ASC" -> orderBy [ asc (pp ^. ProcessPeriodLastDay) ]
"DESC" -> orderBy [ desc (pp ^. ProcessPeriodLastDay) ]
_ -> return ()
"queued" -> case (FS.s_direction sjm) of
"ASC" -> orderBy [ asc (pp ^. ProcessPeriodQueued) ]
"DESC" -> orderBy [ desc (pp ^. ProcessPeriodQueued) ]
_ -> return ()
"processed" -> case (FS.s_direction sjm) of
"ASC" -> orderBy [ asc (pp ^. ProcessPeriodProcessed) ]
"DESC" -> orderBy [ desc (pp ^. ProcessPeriodProcessed) ]
_ -> return ()
"name" -> case (FS.s_direction sjm) of
"ASC" -> orderBy [ asc (pp ^. ProcessPeriodName) ]
"DESC" -> orderBy [ desc (pp ^. ProcessPeriodName) ]
_ -> return ()
_ -> return ()
) xs
Nothing -> orderBy [ desc (pp ^. ProcessPeriodFirstDay) ]
case defaultOffset of
Just o -> offset o
Nothing -> return ()
case defaultLimit of
Just l -> limit (min 10000 l)
Nothing -> return ()
case defaultFilterJson of
Just xs -> mapM_ (\fjm -> case FS.f_field fjm of
"id" -> case (FS.f_value fjm >>= PP.fromPathPiece) of
(Just v') -> where_ $ defaultFilterOp (FS.f_negate fjm) (FS.f_comparison fjm) (pp ^. ProcessPeriodId) (val v')
_ -> return ()
"firstDay" -> case (FS.f_value fjm >>= PP.fromPathPiece) of
(Just v') -> where_ $ defaultFilterOp (FS.f_negate fjm) (FS.f_comparison fjm) (pp ^. ProcessPeriodFirstDay) ((val v'))
_ -> return ()
"lastDay" -> case (FS.f_value fjm >>= PP.fromPathPiece) of
(Just v') -> where_ $ defaultFilterOp (FS.f_negate fjm) (FS.f_comparison fjm) (pp ^. ProcessPeriodLastDay) ((val v'))
_ -> return ()
"queued" -> case (FS.f_value fjm >>= PP.fromPathPiece) of
(Just v') -> where_ $ defaultFilterOp (FS.f_negate fjm) (FS.f_comparison fjm) (pp ^. ProcessPeriodQueued) ((val v'))
_ -> return ()
"processed" -> case (FS.f_value fjm >>= PP.fromPathPiece) of
(Just v') -> where_ $ defaultFilterOp (FS.f_negate fjm) (FS.f_comparison fjm) (pp ^. ProcessPeriodProcessed) ((val v'))
_ -> return ()
"name" -> case (FS.f_value fjm >>= PP.fromPathPiece) of
(Just v') -> where_ $ defaultFilterOp (FS.f_negate fjm) (FS.f_comparison fjm) (pp ^. ProcessPeriodName) ((val v'))
_ -> return ()
_ -> return ()
) xs
Nothing -> return ()
case FS.getDefaultFilter filterParam_query defaultFilterJson "query" of
Just localParam -> do
where_ $ (pp ^. ProcessPeriodName) `ilike` ((((val "%")) ++. ((val (localParam :: Text)))) ++. ((val "%")))
Nothing -> return ()
return (pp ^. ProcessPeriodId, pp ^. ProcessPeriodFirstDay, pp ^. ProcessPeriodLastDay, pp ^. ProcessPeriodQueued, pp ^. ProcessPeriodProcessed, pp ^. ProcessPeriodName)
count <- select $ do
baseQuery False
let countRows' = countRows
orderBy []
return $ (countRows' :: SqlExpr (Database.Esqueleto.Value Int))
results <- select $ baseQuery True
(return $ A.object [
"totalCount" .= ((\(Database.Esqueleto.Value v) -> (v::Int)) (head count)),
"result" .= (toJSON $ map (\row -> case row of
((Database.Esqueleto.Value f1), (Database.Esqueleto.Value f2), (Database.Esqueleto.Value f3), (Database.Esqueleto.Value f4), (Database.Esqueleto.Value f5), (Database.Esqueleto.Value f6)) -> A.object [
"id" .= toJSON f1,
"firstDay" .= toJSON f2,
"lastDay" .= toJSON f3,
"queued" .= toJSON f4,
"processed" .= toJSON f5,
"name" .= toJSON f6
]
_ -> A.object []
) results)
])
| tlaitinen/receipts | backend/Handler/DB/RouteProcessperiods.hs | gpl-3.0 | 8,787 | 0 | 35 | 2,671 | 2,457 | 1,313 | 1,144 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.OpsWorks.CreateApp
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Creates an app for a specified stack. For more information, see <http://docs.aws.amazon.com/opsworks/latest/userguide/workingapps-creating.html Creating Apps>.
--
-- Required Permissions: To use this action, an IAM user must have a Manage
-- permissions level for the stack, or an attached policy that explicitly grants
-- permissions. For more information on user permissions, see <http://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html Managing UserPermissions>.
--
-- <http://docs.aws.amazon.com/opsworks/latest/APIReference/API_CreateApp.html>
module Network.AWS.OpsWorks.CreateApp
(
-- * Request
CreateApp
-- ** Request constructor
, createApp
-- ** Request lenses
, caAppSource
, caAttributes
, caDataSources
, caDescription
, caDomains
, caEnableSsl
, caEnvironment
, caName
, caShortname
, caSslConfiguration
, caStackId
, caType
-- * Response
, CreateAppResponse
-- ** Response constructor
, createAppResponse
-- ** Response lenses
, carAppId
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.OpsWorks.Types
import qualified GHC.Exts
data CreateApp = CreateApp
{ _caAppSource :: Maybe Source
, _caAttributes :: Map AppAttributesKeys Text
, _caDataSources :: List "DataSources" DataSource
, _caDescription :: Maybe Text
, _caDomains :: List "Domains" Text
, _caEnableSsl :: Maybe Bool
, _caEnvironment :: List "Environment" EnvironmentVariable
, _caName :: Text
, _caShortname :: Maybe Text
, _caSslConfiguration :: Maybe SslConfiguration
, _caStackId :: Text
, _caType :: AppType
} deriving (Eq, Read, Show)
-- | 'CreateApp' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'caAppSource' @::@ 'Maybe' 'Source'
--
-- * 'caAttributes' @::@ 'HashMap' 'AppAttributesKeys' 'Text'
--
-- * 'caDataSources' @::@ ['DataSource']
--
-- * 'caDescription' @::@ 'Maybe' 'Text'
--
-- * 'caDomains' @::@ ['Text']
--
-- * 'caEnableSsl' @::@ 'Maybe' 'Bool'
--
-- * 'caEnvironment' @::@ ['EnvironmentVariable']
--
-- * 'caName' @::@ 'Text'
--
-- * 'caShortname' @::@ 'Maybe' 'Text'
--
-- * 'caSslConfiguration' @::@ 'Maybe' 'SslConfiguration'
--
-- * 'caStackId' @::@ 'Text'
--
-- * 'caType' @::@ 'AppType'
--
createApp :: Text -- ^ 'caStackId'
-> Text -- ^ 'caName'
-> AppType -- ^ 'caType'
-> CreateApp
createApp p1 p2 p3 = CreateApp
{ _caStackId = p1
, _caName = p2
, _caType = p3
, _caShortname = Nothing
, _caDescription = Nothing
, _caDataSources = mempty
, _caAppSource = Nothing
, _caDomains = mempty
, _caEnableSsl = Nothing
, _caSslConfiguration = Nothing
, _caAttributes = mempty
, _caEnvironment = mempty
}
-- | A 'Source' object that specifies the app repository.
caAppSource :: Lens' CreateApp (Maybe Source)
caAppSource = lens _caAppSource (\s a -> s { _caAppSource = a })
-- | One or more user-defined key/value pairs to be added to the stack attributes.
caAttributes :: Lens' CreateApp (HashMap AppAttributesKeys Text)
caAttributes = lens _caAttributes (\s a -> s { _caAttributes = a }) . _Map
-- | The app's data source.
caDataSources :: Lens' CreateApp [DataSource]
caDataSources = lens _caDataSources (\s a -> s { _caDataSources = a }) . _List
-- | A description of the app.
caDescription :: Lens' CreateApp (Maybe Text)
caDescription = lens _caDescription (\s a -> s { _caDescription = a })
-- | The app virtual host settings, with multiple domains separated by commas. For
-- example: ''www.example.com, example.com''
caDomains :: Lens' CreateApp [Text]
caDomains = lens _caDomains (\s a -> s { _caDomains = a }) . _List
-- | Whether to enable SSL for the app.
caEnableSsl :: Lens' CreateApp (Maybe Bool)
caEnableSsl = lens _caEnableSsl (\s a -> s { _caEnableSsl = a })
-- | An array of 'EnvironmentVariable' objects that specify environment variables to
-- be associated with the app. You can specify up to ten environment variables.
-- After you deploy the app, these variables are defined on the associated app
-- server instance.
--
-- This parameter is supported only by Chef 11.10 stacks. If you have specified
-- one or more environment variables, you cannot modify the stack's Chef version.
caEnvironment :: Lens' CreateApp [EnvironmentVariable]
caEnvironment = lens _caEnvironment (\s a -> s { _caEnvironment = a }) . _List
-- | The app name.
caName :: Lens' CreateApp Text
caName = lens _caName (\s a -> s { _caName = a })
-- | The app's short name.
caShortname :: Lens' CreateApp (Maybe Text)
caShortname = lens _caShortname (\s a -> s { _caShortname = a })
-- | An 'SslConfiguration' object with the SSL configuration.
caSslConfiguration :: Lens' CreateApp (Maybe SslConfiguration)
caSslConfiguration =
lens _caSslConfiguration (\s a -> s { _caSslConfiguration = a })
-- | The stack ID.
caStackId :: Lens' CreateApp Text
caStackId = lens _caStackId (\s a -> s { _caStackId = a })
-- | The app type. Each supported type is associated with a particular layer. For
-- example, PHP applications are associated with a PHP layer. AWS OpsWorks
-- deploys an application to those instances that are members of the
-- corresponding layer.
caType :: Lens' CreateApp AppType
caType = lens _caType (\s a -> s { _caType = a })
newtype CreateAppResponse = CreateAppResponse
{ _carAppId :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'CreateAppResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'carAppId' @::@ 'Maybe' 'Text'
--
createAppResponse :: CreateAppResponse
createAppResponse = CreateAppResponse
{ _carAppId = Nothing
}
-- | The app ID.
carAppId :: Lens' CreateAppResponse (Maybe Text)
carAppId = lens _carAppId (\s a -> s { _carAppId = a })
instance ToPath CreateApp where
toPath = const "/"
instance ToQuery CreateApp where
toQuery = const mempty
instance ToHeaders CreateApp
instance ToJSON CreateApp where
toJSON CreateApp{..} = object
[ "StackId" .= _caStackId
, "Shortname" .= _caShortname
, "Name" .= _caName
, "Description" .= _caDescription
, "DataSources" .= _caDataSources
, "Type" .= _caType
, "AppSource" .= _caAppSource
, "Domains" .= _caDomains
, "EnableSsl" .= _caEnableSsl
, "SslConfiguration" .= _caSslConfiguration
, "Attributes" .= _caAttributes
, "Environment" .= _caEnvironment
]
instance AWSRequest CreateApp where
type Sv CreateApp = OpsWorks
type Rs CreateApp = CreateAppResponse
request = post "CreateApp"
response = jsonResponse
instance FromJSON CreateAppResponse where
parseJSON = withObject "CreateAppResponse" $ \o -> CreateAppResponse
<$> o .:? "AppId"
| dysinger/amazonka | amazonka-opsworks/gen/Network/AWS/OpsWorks/CreateApp.hs | mpl-2.0 | 8,171 | 0 | 10 | 1,948 | 1,255 | 750 | 505 | 124 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.CloudResourceManager.Projects.Undelete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Restores the Project identified by the specified \`project_id\` (for
-- example, \`my-project-123\`). You can only use this method for a Project
-- that has a lifecycle state of DELETE_REQUESTED. After deletion starts,
-- the Project cannot be restored. The caller must have modify permissions
-- for this Project.
--
-- /See:/ <https://cloud.google.com/resource-manager Google Cloud Resource Manager API Reference> for @cloudresourcemanager.projects.undelete@.
module Network.Google.Resource.CloudResourceManager.Projects.Undelete
(
-- * REST Resource
ProjectsUndeleteResource
-- * Creating a Request
, projectsUndelete
, ProjectsUndelete
-- * Request Lenses
, puXgafv
, puUploadProtocol
, puPp
, puAccessToken
, puUploadType
, puPayload
, puBearerToken
, puProjectId
, puCallback
) where
import Network.Google.Prelude
import Network.Google.ResourceManager.Types
-- | A resource alias for @cloudresourcemanager.projects.undelete@ method which the
-- 'ProjectsUndelete' request conforms to.
type ProjectsUndeleteResource =
"v1" :>
"projects" :>
CaptureMode "projectId" "undelete" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] UndeleteProjectRequest :>
Post '[JSON] Empty
-- | Restores the Project identified by the specified \`project_id\` (for
-- example, \`my-project-123\`). You can only use this method for a Project
-- that has a lifecycle state of DELETE_REQUESTED. After deletion starts,
-- the Project cannot be restored. The caller must have modify permissions
-- for this Project.
--
-- /See:/ 'projectsUndelete' smart constructor.
data ProjectsUndelete = ProjectsUndelete'
{ _puXgafv :: !(Maybe Xgafv)
, _puUploadProtocol :: !(Maybe Text)
, _puPp :: !Bool
, _puAccessToken :: !(Maybe Text)
, _puUploadType :: !(Maybe Text)
, _puPayload :: !UndeleteProjectRequest
, _puBearerToken :: !(Maybe Text)
, _puProjectId :: !Text
, _puCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ProjectsUndelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'puXgafv'
--
-- * 'puUploadProtocol'
--
-- * 'puPp'
--
-- * 'puAccessToken'
--
-- * 'puUploadType'
--
-- * 'puPayload'
--
-- * 'puBearerToken'
--
-- * 'puProjectId'
--
-- * 'puCallback'
projectsUndelete
:: UndeleteProjectRequest -- ^ 'puPayload'
-> Text -- ^ 'puProjectId'
-> ProjectsUndelete
projectsUndelete pPuPayload_ pPuProjectId_ =
ProjectsUndelete'
{ _puXgafv = Nothing
, _puUploadProtocol = Nothing
, _puPp = True
, _puAccessToken = Nothing
, _puUploadType = Nothing
, _puPayload = pPuPayload_
, _puBearerToken = Nothing
, _puProjectId = pPuProjectId_
, _puCallback = Nothing
}
-- | V1 error format.
puXgafv :: Lens' ProjectsUndelete (Maybe Xgafv)
puXgafv = lens _puXgafv (\ s a -> s{_puXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
puUploadProtocol :: Lens' ProjectsUndelete (Maybe Text)
puUploadProtocol
= lens _puUploadProtocol
(\ s a -> s{_puUploadProtocol = a})
-- | Pretty-print response.
puPp :: Lens' ProjectsUndelete Bool
puPp = lens _puPp (\ s a -> s{_puPp = a})
-- | OAuth access token.
puAccessToken :: Lens' ProjectsUndelete (Maybe Text)
puAccessToken
= lens _puAccessToken
(\ s a -> s{_puAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
puUploadType :: Lens' ProjectsUndelete (Maybe Text)
puUploadType
= lens _puUploadType (\ s a -> s{_puUploadType = a})
-- | Multipart request metadata.
puPayload :: Lens' ProjectsUndelete UndeleteProjectRequest
puPayload
= lens _puPayload (\ s a -> s{_puPayload = a})
-- | OAuth bearer token.
puBearerToken :: Lens' ProjectsUndelete (Maybe Text)
puBearerToken
= lens _puBearerToken
(\ s a -> s{_puBearerToken = a})
-- | The project ID (for example, \`foo-bar-123\`). Required.
puProjectId :: Lens' ProjectsUndelete Text
puProjectId
= lens _puProjectId (\ s a -> s{_puProjectId = a})
-- | JSONP
puCallback :: Lens' ProjectsUndelete (Maybe Text)
puCallback
= lens _puCallback (\ s a -> s{_puCallback = a})
instance GoogleRequest ProjectsUndelete where
type Rs ProjectsUndelete = Empty
type Scopes ProjectsUndelete =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient ProjectsUndelete'{..}
= go _puProjectId _puXgafv _puUploadProtocol
(Just _puPp)
_puAccessToken
_puUploadType
_puBearerToken
_puCallback
(Just AltJSON)
_puPayload
resourceManagerService
where go
= buildClient
(Proxy :: Proxy ProjectsUndeleteResource)
mempty
| rueshyna/gogol | gogol-resourcemanager/gen/Network/Google/Resource/CloudResourceManager/Projects/Undelete.hs | mpl-2.0 | 6,194 | 0 | 19 | 1,519 | 942 | 549 | 393 | 132 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.TagManager.Types
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.TagManager.Types
(
-- * Service Configuration
tagManagerService
-- * OAuth Scopes
, tagManagerReadOnlyScope
, tagManagerEditContainersScope
, tagManagerManageAccountsScope
, tagManagerDeleteContainersScope
, tagManagerManageUsersScope
, tagManagerPublishScope
, tagManagerEditContainerversionsScope
-- * ListVariablesResponse
, ListVariablesResponse
, listVariablesResponse
, lvrVariables
-- * ListFoldersResponse
, ListFoldersResponse
, listFoldersResponse
, lfrFolders
-- * ListEnvironmentsResponse
, ListEnvironmentsResponse
, listEnvironmentsResponse
, lerEnvironments
-- * PublishContainerVersionResponse
, PublishContainerVersionResponse
, publishContainerVersionResponse
, pcvrCompilerError
, pcvrContainerVersion
-- * ContainerVersionHeader
, ContainerVersionHeader
, containerVersionHeader
, cvhNumTags
, cvhNumMacros
, cvhContainerId
, cvhContainerVersionId
, cvhAccountId
, cvhName
, cvhNumTriggers
, cvhDeleted
, cvhNumRules
, cvhNumVariables
-- * TeardownTag
, TeardownTag
, teardownTag
, ttStopTeardownOnFailure
, ttTagName
-- * ListTriggersResponse
, ListTriggersResponse
, listTriggersResponse
, ltrTriggers
-- * Macro
, Macro
, macro
, mScheduleEndMs
, mParentFolderId
, mContainerId
, mDisablingRuleId
, mFingerprint
, mAccountId
, mName
, mEnablingRuleId
, mMacroId
, mType
, mScheduleStartMs
, mNotes
, mParameter
-- * Tag
, Tag
, tag
, tBlockingTriggerId
, tScheduleEndMs
, tParentFolderId
, tLiveOnly
, tContainerId
, tPriority
, tTeardownTag
, tFingerprint
, tTagFiringOption
, tAccountId
, tTagId
, tName
, tBlockingRuleId
, tSetupTag
, tFiringTriggerId
, tType
, tScheduleStartMs
, tNotes
, tFiringRuleId
, tParameter
-- * CreateContainerVersionResponse
, CreateContainerVersionResponse
, createContainerVersionResponse
, ccvrCompilerError
, ccvrContainerVersion
-- * CreateContainerVersionRequestVersionOptions
, CreateContainerVersionRequestVersionOptions
, createContainerVersionRequestVersionOptions
, ccvrvoName
, ccvrvoQuickPreview
, ccvrvoNotes
-- * UserAccess
, UserAccess
, userAccess
, uaAccountAccess
, uaAccountId
, uaEmailAddress
, uaContainerAccess
, uaPermissionId
-- * Environment
, Environment
, environment
, eContainerId
, eFingerprint
, eContainerVersionId
, eURL
, eAuthorizationCode
, eAccountId
, eName
, eEnableDebug
, eEnvironmentId
, eType
, eAuthorizationTimestampMs
, eDescription
-- * AccountAccess
, AccountAccess
, accountAccess
, aaPermission
-- * TriggerType
, TriggerType (..)
-- * ContainerEnabledBuiltInVariableItem
, ContainerEnabledBuiltInVariableItem (..)
-- * ContainerUsageContextItem
, ContainerUsageContextItem (..)
-- * ConditionType
, ConditionType (..)
-- * ListAccountsResponse
, ListAccountsResponse
, listAccountsResponse
, larAccounts
-- * AccountAccessPermissionItem
, AccountAccessPermissionItem (..)
-- * TagTagFiringOption
, TagTagFiringOption (..)
-- * Rule
, Rule
, rule
, rContainerId
, rFingerprint
, rRuleId
, rAccountId
, rName
, rNotes
, rCondition
-- * Folder
, Folder
, folder
, fContainerId
, fFingerprint
, fFolderId
, fAccountId
, fName
-- * Variable
, Variable
, variable
, vScheduleEndMs
, vParentFolderId
, vContainerId
, vFingerprint
, vVariableId
, vAccountId
, vDisablingTriggerId
, vName
, vType
, vScheduleStartMs
, vNotes
, vEnablingTriggerId
, vParameter
-- * ParameterType
, ParameterType (..)
-- * Account
, Account
, account
, aaShareData
, aaFingerprint
, aaAccountId
, aaName
-- * ListContainerVersionsResponse
, ListContainerVersionsResponse
, listContainerVersionsResponse
, lcvrContainerVersionHeader
, lcvrContainerVersion
-- * Container
, Container
, container
, cPublicId
, cUsageContext
, cEnabledBuiltInVariable
, cContainerId
, cFingerprint
, cTimeZoneCountryId
, cAccountId
, cDomainName
, cName
, cNotes
, cTimeZoneId
-- * ListAccountUsersResponse
, ListAccountUsersResponse
, listAccountUsersResponse
, laurUserAccess
-- * ContainerAccessPermissionItem
, ContainerAccessPermissionItem (..)
-- * ContainerVersion
, ContainerVersion
, containerVersion
, cvMacro
, cvTag
, cvContainerId
, cvFingerprint
, cvContainerVersionId
, cvRule
, cvFolder
, cvVariable
, cvAccountId
, cvName
, cvContainer
, cvDeleted
, cvTrigger
, cvNotes
-- * EnvironmentType
, EnvironmentType (..)
-- * SetupTag
, SetupTag
, setupTag
, stTagName
, stStopOnSetupFailure
-- * ListContainersResponse
, ListContainersResponse
, listContainersResponse
, lcrContainers
-- * Trigger
, Trigger
, trigger
, triCustomEventFilter
, triParentFolderId
, triContainerId
, triTriggerId
, triCheckValidation
, triFingerprint
, triAutoEventFilter
, triUniqueTriggerId
, triAccountId
, triName
, triInterval
, triEnableAllVideos
, triWaitForTagsTimeout
, triLimit
, triFilter
, triType
, triVideoPercentageList
, triEventName
, triWaitForTags
-- * ListTagsResponse
, ListTagsResponse
, listTagsResponse
, ltrTags
-- * FolderEntities
, FolderEntities
, folderEntities
, feTag
, feVariable
, feTrigger
-- * Condition
, Condition
, condition
, cType
, cParameter
-- * ContainerAccess
, ContainerAccess
, containerAccess
, caContainerId
, caPermission
-- * Parameter
, Parameter
, parameter
, pList
, pValue
, pMap
, pKey
, pType
) where
import Network.Google.Prelude
import Network.Google.TagManager.Types.Product
import Network.Google.TagManager.Types.Sum
-- | Default request referring to version 'v1' of the Tag Manager API. This contains the host and root path used as a starting point for constructing service requests.
tagManagerService :: ServiceConfig
tagManagerService
= defaultService (ServiceId "tagmanager:v1")
"www.googleapis.com"
-- | View your Google Tag Manager container and its subcomponents
tagManagerReadOnlyScope :: Proxy '["https://www.googleapis.com/auth/tagmanager.readonly"]
tagManagerReadOnlyScope = Proxy;
-- | Manage your Google Tag Manager container and its subcomponents,
-- excluding versioning and publishing
tagManagerEditContainersScope :: Proxy '["https://www.googleapis.com/auth/tagmanager.edit.containers"]
tagManagerEditContainersScope = Proxy;
-- | View and manage your Google Tag Manager accounts
tagManagerManageAccountsScope :: Proxy '["https://www.googleapis.com/auth/tagmanager.manage.accounts"]
tagManagerManageAccountsScope = Proxy;
-- | Delete your Google Tag Manager containers
tagManagerDeleteContainersScope :: Proxy '["https://www.googleapis.com/auth/tagmanager.delete.containers"]
tagManagerDeleteContainersScope = Proxy;
-- | Manage user permissions of your Google Tag Manager account and container
tagManagerManageUsersScope :: Proxy '["https://www.googleapis.com/auth/tagmanager.manage.users"]
tagManagerManageUsersScope = Proxy;
-- | Publish your Google Tag Manager container versions
tagManagerPublishScope :: Proxy '["https://www.googleapis.com/auth/tagmanager.publish"]
tagManagerPublishScope = Proxy;
-- | Manage your Google Tag Manager container versions
tagManagerEditContainerversionsScope :: Proxy '["https://www.googleapis.com/auth/tagmanager.edit.containerversions"]
tagManagerEditContainerversionsScope = Proxy;
| rueshyna/gogol | gogol-tagmanager/gen/Network/Google/TagManager/Types.hs | mpl-2.0 | 8,731 | 0 | 7 | 2,146 | 1,025 | 695 | 330 | 275 | 1 |
module Widgets.Navbar where
import Import
import Model.Currency
import Model.User
navbar :: Widget
navbar = do
maybe_user <- handlerToWidget maybeAuth
alreadyExpired
(money_info, num_unread_notifs) <- case maybe_user of
Nothing -> return (Nothing, 0)
Just (Entity user_id user) -> do
(pledges, balance, num_unread_notifs) <- handlerToWidget $ runDB $ do
pledges :: [(Entity Project, Entity Pledge)] <- select $ from $
\ (project `InnerJoin` pledge) -> do
on_ $ pledge ^. PledgeProject ==. project ^. ProjectId
where_ $ pledge ^. PledgeUser ==. val user_id
return (project, pledge)
Just account <- get (userAccount user)
num_unread_notifs <- fetchNumUnreadNotificationsDB user_id
return (pledges, accountBalance account, num_unread_notifs)
let pledged = sum $ map (\ (project, pledge) ->
((projectShareValue (entityVal project) $*) . fromIntegral . pledgeFundedShares . entityVal) pledge) pledges
return $ (Just (balance, pledged), num_unread_notifs)
$(widgetFile "navbar")
| Happy0/snowdrift | Widgets/Navbar.hs | agpl-3.0 | 1,226 | 0 | 28 | 381 | 354 | 180 | 174 | -1 | -1 |
{-# LANGUAGE FlexibleContexts, NoMonomorphismRestriction #-}
module Jaek.UI.Render.Overlays (
compositeSelection
)
where
import Jaek.Base
import Jaek.UI.FrpHandlers
import Diagrams.Prelude
import Data.Colour (Colour, withOpacity)
compositeSelection ::
(Monoid m, Semigroup m, Renderable (Path R2) b, Backend b R2)
=> [DragEvent] -- the selection
-> [DragEvent] -- the current drag region
-> QDiagram b R2 m
-> QDiagram b R2 m
compositeSelection drags curs d =
drawSelection mediumpurple curs
`atop` drawSelection royalblue drags
`atop` d
-- | overlay for selected regions
drawSelection ::
(Monoid m, Semigroup m, Renderable (Path R2) b, Backend b R2)
=> Colour Double
-> [DragEvent]
-> QDiagram b R2 m
drawSelection colr = foldr (\de d -> drawDrag colr de `atop` d) mempty
drawDrag ::
(Monoid m, Semigroup m, Renderable (Path R2) b, Backend b R2)
=> Colour Double
-> DragEvent
-> QDiagram b R2 m
drawDrag colr (DragE (ClickE _ _ sx sy) ex ey) = mempty <$>
stroke (rect (abs $ ex - sx) (abs $ ey - sy))
# alignBL
# translate aVec
# fcA (colr `withOpacity` 0.4)
where
aVec = r2 (min sx ex, min sy ey)
| JohnLato/jaek | src/Jaek/UI/Render/Overlays.hs | lgpl-3.0 | 1,172 | 0 | 14 | 253 | 433 | 229 | 204 | 34 | 1 |
module Main (main) where
import System.Environment(getArgs)
import Hangman
main = do
args <- getArgs
wordList <- readWordFile $ head args
let targetWord = "********"
putStrLn targetWord
mainLoop targetWord wordList
where
mainLoop:: String -> [String] -> IO ()
mainLoop targetWord wordList = do
(letter:_) <- getLine
let (newWordList, revealedWord) = cheatHangMan wordList targetWord letter
putStrLn $ show newWordList
putStrLn revealedWord
if all (/= '*') revealedWord
then putStrLn "success"
else putStrLn "failure" >> mainLoop revealedWord newWordList
| yamanobori-old/CheatHangman | Main.hs | unlicense | 720 | 0 | 12 | 240 | 192 | 93 | 99 | 18 | 2 |
{-| Generic data loader.
This module holds the common code for parsing the input data after it
has been loaded from external sources.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.HTools.Loader
( mergeData
, clearDynU
, updateMissing
, updateMemStat
, assignIndices
, setMaster
, lookupNode
, lookupInstance
, lookupGroup
, eitherLive
, commonSuffix
, extractExTags
, updateExclTags
, RqType(..)
, Request(..)
, ClusterData(..)
, isAllocationRequest
, emptyCluster
, extractDesiredLocations
, updateDesiredLocationTags
) where
import Control.Monad
import Control.Monad.Fail (MonadFail)
import Data.List
import qualified Data.Map as M
import Data.Maybe
import qualified Data.Set as Set
import Text.Printf (printf)
import System.Time (ClockTime(..))
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Instance as Instance
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Group as Group
import qualified Ganeti.HTools.Cluster as Cluster
import qualified Ganeti.HTools.Cluster.Moves as Moves
import Ganeti.BasicTypes
import qualified Ganeti.HTools.Tags as Tags
import qualified Ganeti.HTools.Tags.Constants as TagsC
import Ganeti.HTools.Types
import Ganeti.Utils
import Ganeti.Types (EvacMode, Hypervisor(..))
-- * Types
{-| The iallocator request type.
This type denotes what request we got from Ganeti and also holds
request-specific fields.
-}
data RqType
= Allocate Instance.Instance Cluster.AllocDetails (Maybe [String])
-- ^ A new instance allocation, maybe with allocation restrictions
| AllocateSecondary Idx -- ^ Find a suitable
-- secondary node for disk
-- conversion
| Relocate Idx Int [Ndx] -- ^ Choose a new
-- secondary node
| NodeEvacuate [Idx] EvacMode -- ^ node-evacuate mode
| ChangeGroup [Gdx] [Idx] -- ^ Multi-relocate mode
| MultiAllocate [(Instance.Instance, Cluster.AllocDetails)]
-- ^ Multi-allocate mode
deriving (Show)
-- | A complete request, as received from Ganeti.
data Request = Request RqType ClusterData
deriving (Show)
-- | Decide whether a request asks to allocate new instances; if so, also
-- return the desired node group, if a unique node group is specified.
-- That is, return `Nothing` if the request is not an allocation request,
-- `Just Nothing`, if it is an Allocation request, but there is no unique
-- group specified, and return `Just (Just g)` if it is an allocation request
-- uniquely requesting Group `g`.
isAllocationRequest :: RqType -> Maybe (Maybe String)
isAllocationRequest (Allocate _ (Cluster.AllocDetails _ grp) _) = Just grp
isAllocationRequest (MultiAllocate reqs) = Just $
case ordNub . catMaybes
$ map (\(_, Cluster.AllocDetails _ grp) -> grp) reqs of
[grp] -> Just grp
_ -> Nothing
isAllocationRequest _ = Nothing
-- | The cluster state.
data ClusterData = ClusterData
{ cdGroups :: Group.List -- ^ The node group list
, cdNodes :: Node.List -- ^ The node list
, cdInstances :: Instance.List -- ^ The instance list
, cdTags :: [String] -- ^ The cluster tags
, cdIPolicy :: IPolicy -- ^ The cluster instance policy
} deriving (Show, Eq)
-- | An empty cluster.
emptyCluster :: ClusterData
emptyCluster = ClusterData Container.empty Container.empty Container.empty []
defIPolicy
-- * Functions
-- | Lookups a node into an assoc list.
lookupNode :: (MonadFail m) => NameAssoc -> String -> String -> m Ndx
lookupNode ktn inst node =
maybe (fail $ "Unknown node '" ++ node ++ "' for instance " ++ inst) return $
M.lookup node ktn
-- | Lookups an instance into an assoc list.
lookupInstance :: (MonadFail m) => NameAssoc -> String -> m Idx
lookupInstance kti inst =
maybe (fail $ "Unknown instance '" ++ inst ++ "'") return $ M.lookup inst kti
-- | Lookups a group into an assoc list.
lookupGroup :: (MonadFail m) => NameAssoc -> String -> String -> m Gdx
lookupGroup ktg nname gname =
maybe (fail $ "Unknown group '" ++ gname ++ "' for node " ++ nname) return $
M.lookup gname ktg
-- | Given a list of elements (and their names), assign indices to them.
assignIndices :: (Element a) =>
[(String, a)]
-> (NameAssoc, Container.Container a)
assignIndices name_element =
let (name_idx, idx_element) =
unzip . map (\ (idx, (k, v)) -> ((k, idx), (idx, setIdx v idx)))
. zip [0..] $ name_element
in (M.fromList name_idx, Container.fromList idx_element)
-- | Given am indexed node list, and the name of the master, mark it as such.
setMaster :: (MonadFail m) => NameAssoc -> Node.List -> String -> m Node.List
setMaster node_names node_idx master = do
kmaster <- maybe (fail $ "Master node " ++ master ++ " unknown") return $
M.lookup master node_names
let mnode = Container.find kmaster node_idx
return $ Container.add kmaster (Node.setMaster mnode True) node_idx
-- | Given the nodes with the location tags already set correctly, compute
-- the location score for an instance.
setLocationScore :: Node.List -> Instance.Instance -> Instance.Instance
setLocationScore nl inst =
let pnode = Container.find (Instance.pNode inst) nl
snode = Container.lookup (Instance.sNode inst) nl
in Moves.setInstanceLocationScore inst pnode snode
-- | For each instance, add its index to its primary and secondary nodes.
fixNodes :: Node.List
-> Instance.Instance
-> Node.List
fixNodes accu inst =
let pdx = Instance.pNode inst
sdx = Instance.sNode inst
pold = Container.find pdx accu
pnew = Node.setPri pold inst
ac2 = Container.add pdx pnew accu
in if sdx /= Node.noSecondary
then let sold = Container.find sdx accu
snew = Node.setSec sold inst
in Container.add sdx snew ac2
else ac2
-- | Set the node's policy to its group one. Note that this requires
-- the group to exist (should have been checked before), otherwise it
-- will abort with a runtime error.
setNodePolicy :: Group.List -> Node.Node -> Node.Node
setNodePolicy gl node =
let grp = Container.find (Node.group node) gl
gpol = Group.iPolicy grp
in Node.setPolicy gpol node
-- | Update instance with exclusion tags list.
updateExclTags :: [String] -> Instance.Instance -> Instance.Instance
updateExclTags tl inst =
let allTags = Instance.allTags inst
exclTags = filter (\tag -> any (`isPrefixOf` tag) tl) allTags
in inst { Instance.exclTags = exclTags }
-- | Update instance with desired location tags list.
updateDesiredLocationTags :: [String] -> Instance.Instance -> Instance.Instance
updateDesiredLocationTags tl inst =
let allTags = Instance.allTags inst
dsrdLocTags = filter (\tag -> any (`isPrefixOf` tag) tl) allTags
in inst { Instance.dsrdLocTags = Set.fromList dsrdLocTags }
-- | Update the movable attribute.
updateMovable :: [String] -- ^ Selected instances (if not empty)
-> [String] -- ^ Excluded instances
-> Instance.Instance -- ^ Target Instance
-> Instance.Instance -- ^ Target Instance with updated attribute
updateMovable selinsts exinsts inst =
if Instance.name inst `elem` exinsts ||
not (null selinsts || Instance.name inst `elem` selinsts)
then Instance.setMovable inst False
else inst
-- | Disables moves for instances with a split group.
disableSplitMoves :: Node.List -> Instance.Instance -> Instance.Instance
disableSplitMoves nl inst =
if not . isOk . Cluster.instanceGroup nl $ inst
then Instance.setMovable inst False
else inst
-- | Set the auto-repair policy for an instance.
setArPolicy :: [String] -- ^ Cluster tags
-> Group.List -- ^ List of node groups
-> Node.List -- ^ List of nodes
-> Instance.List -- ^ List of instances
-> ClockTime -- ^ Current timestamp, to evaluate ArSuspended
-> Instance.List -- ^ Updated list of instances
setArPolicy ctags gl nl il time =
let getArPolicy' = flip getArPolicy time
cpol = fromMaybe ArNotEnabled $ getArPolicy' ctags
gpols = Container.map (fromMaybe cpol . getArPolicy' . Group.allTags) gl
ipolfn = getArPolicy' . Instance.allTags
nlookup = flip Container.find nl . Instance.pNode
glookup = flip Container.find gpols . Node.group . nlookup
updateInstance inst = inst {
Instance.arPolicy = fromMaybe (glookup inst) $ ipolfn inst }
in
Container.map updateInstance il
-- | Get the auto-repair policy from a list of tags.
--
-- This examines the ganeti:watcher:autorepair and
-- ganeti:watcher:autorepair:suspend tags to determine the policy. If none of
-- these tags are present, Nothing (and not ArNotEnabled) is returned.
getArPolicy :: [String] -> ClockTime -> Maybe AutoRepairPolicy
getArPolicy tags time =
let enabled = mapMaybe (autoRepairTypeFromRaw <=<
chompPrefix TagsC.autoRepairTagEnabled) tags
suspended = mapMaybe (chompPrefix TagsC.autoRepairTagSuspended) tags
futureTs = filter (> time) . map (flip TOD 0) $
mapMaybe (tryRead "auto-repair suspend time") suspended
in
case () of
-- Note how we must return ArSuspended even if "enabled" is empty, so that
-- node groups or instances can suspend repairs that were enabled at an
-- upper scope (cluster or node group).
_ | "" `elem` suspended -> Just $ ArSuspended Forever
| not $ null futureTs -> Just . ArSuspended . Until . maximum $ futureTs
| not $ null enabled -> Just $ ArEnabled (minimum enabled)
| otherwise -> Nothing
-- | Compute the longest common suffix of a list of strings that
-- starts with a dot.
longestDomain :: [String] -> String
longestDomain [] = ""
longestDomain (x:xs) =
foldr (\ suffix accu -> if all (isSuffixOf suffix) xs
then suffix
else accu)
"" $ filter (isPrefixOf ".") (tails x)
-- | Extracts the exclusion tags from the cluster configuration.
extractExTags :: [String] -> [String]
extractExTags = filter (not . null) . mapMaybe (chompPrefix TagsC.exTagsPrefix)
-- | Extracts the desired locations from the instance tags.
extractDesiredLocations :: [String] -> [String]
extractDesiredLocations =
filter (not . null) . mapMaybe (chompPrefix TagsC.desiredLocationPrefix)
-- | Extracts the common suffix from node\/instance names.
commonSuffix :: Node.List -> Instance.List -> String
commonSuffix nl il =
let node_names = map Node.name $ Container.elems nl
inst_names = map Instance.name $ Container.elems il
in longestDomain (node_names ++ inst_names)
-- | Set the migration-related tags on a node given the cluster tags;
-- this assumes that the node tags are already set on that node.
addMigrationTags :: [String] -- ^ cluster tags
-> Node.Node -> Node.Node
addMigrationTags ctags node =
let ntags = Node.nTags node
migTags = Tags.getMigRestrictions ctags ntags
rmigTags = Tags.getRecvMigRestrictions ctags ntags
in Node.setRecvMigrationTags (Node.setMigrationTags node migTags) rmigTags
-- | Set the location tags on a node given the cluster tags;
-- this assumes that the node tags are already set on that node.
addLocationTags :: [String] -- ^ cluster tags
-> Node.Node -> Node.Node
addLocationTags ctags node =
let ntags = Node.nTags node
in Node.setLocationTags node $ Tags.getLocations ctags ntags
-- | Initializer function that loads the data from a node and instance
-- list and massages it into the correct format.
mergeData :: [(String, DynUtil)] -- ^ Instance utilisation data
-> [String] -- ^ Exclusion tags
-> [String] -- ^ Selected instances (if not empty)
-> [String] -- ^ Excluded instances
-> ClockTime -- ^ The current timestamp
-> ClusterData -- ^ Data from backends
-> Result ClusterData -- ^ Fixed cluster data
mergeData um extags selinsts exinsts time cdata@(ClusterData gl nl il ctags _) =
let il2 = setArPolicy ctags gl nl il time
il3 = foldl' (\im (name, n_util) ->
case Container.findByName im name of
Nothing -> im -- skipping unknown instance
Just inst ->
let new_i = inst { Instance.util = n_util }
in Container.add (Instance.idx inst) new_i im
) il2 um
allextags = extags ++ extractExTags ctags
dsrdLocTags = extractDesiredLocations ctags
inst_names = map Instance.name $ Container.elems il3
selinst_lkp = map (lookupName inst_names) selinsts
exinst_lkp = map (lookupName inst_names) exinsts
lkp_unknown = filter (not . goodLookupResult) (selinst_lkp ++ exinst_lkp)
selinst_names = map lrContent selinst_lkp
exinst_names = map lrContent exinst_lkp
node_names = map Node.name (Container.elems nl)
common_suffix = longestDomain (node_names ++ inst_names)
il4 = Container.map (computeAlias common_suffix .
updateExclTags allextags .
updateDesiredLocationTags dsrdLocTags .
updateMovable selinst_names exinst_names) il3
nl2 = Container.map (addLocationTags ctags) nl
il5 = Container.map (setLocationScore nl2) il4
nl3 = foldl' fixNodes nl2 (Container.elems il5)
nl4 = Container.map (setNodePolicy gl .
computeAlias common_suffix .
(`Node.buildPeers` il4)) nl3
il6 = Container.map (disableSplitMoves nl3) il5
nl5 = Container.map (addMigrationTags ctags) nl4
in if' (null lkp_unknown)
(Ok cdata { cdNodes = nl5, cdInstances = il6 })
(Bad $ "Unknown instance(s): " ++ show(map lrContent lkp_unknown))
-- | In a cluster description, clear dynamic utilisation information.
clearDynU :: ClusterData -> Result ClusterData
clearDynU cdata@(ClusterData _ _ il _ _) =
let il2 = Container.map (\ inst -> inst {Instance.util = zeroUtil }) il
in Ok cdata { cdInstances = il2 }
-- | Update cluster data to use static node memory on KVM.
setStaticKvmNodeMem :: Node.List -- ^ Nodes to update
-> Int -- ^ Static node size
-> Node.List -- ^ Updated nodes
setStaticKvmNodeMem nl static_node_mem =
let updateNM n
| Node.hypervisor n == Just Kvm = n { Node.nMem = static_node_mem }
| otherwise = n
in if static_node_mem > 0
then Container.map updateNM nl
else nl
-- | Update node memory stat based on instance list.
updateMemStat :: Node.Node -> Instance.List -> Node.Node
updateMemStat node il =
let node2 = node { Node.iMem = nodeImem node il }
node3 = node2 { Node.xMem = Node.missingMem node2 }
in node3 { Node.pMem = fromIntegral (Node.unallocatedMem node3)
/ Node.tMem node3 }
-- | Check the cluster for memory/disk allocation consistency and update stats.
updateMissing :: Node.List -- ^ All nodes in the cluster
-> Instance.List -- ^ All instances in the cluster
-> Int -- ^ Static node memory for KVM
-> ([String], Node.List) -- ^ Pair of errors, update node list
updateMissing nl il static_node_mem =
-- This overrides node mem on KVM as loaded from backend. Ganeti 2.17
-- handles this using obtainNodeMemory.
let nl2 = setStaticKvmNodeMem nl static_node_mem
updateSingle msgs node =
let nname = Node.name node
newn = updateMemStat node il
delta_mem = Node.xMem newn
delta_dsk = truncate (Node.tDsk node)
- Node.fDsk node
- nodeIdsk node il
umsg1 = if delta_mem > 512 || delta_dsk > 1024
then printf
"node %s is missing %d MB ram and %d GB disk"
nname delta_mem (delta_dsk `div` 1024):msgs
else msgs
in (umsg1, newn)
in Container.mapAccum updateSingle [] nl2
-- | Compute the amount of memory used by primary instances on a node.
nodeImem :: Node.Node -> Instance.List -> Int
nodeImem node il =
let rfind = flip Container.find il
il' = map rfind $ Node.pList node
oil' = filter Instance.usesMemory il'
in sum . map Instance.mem $ oil'
-- | Compute the amount of disk used by instances on a node (either primary
-- or secondary).
nodeIdsk :: Node.Node -> Instance.List -> Int
nodeIdsk node il =
let rfind = flip Container.find il
in sum . map (Instance.dsk . rfind)
$ Node.pList node ++ Node.sList node
-- | Get live information or a default value
eitherLive :: (MonadFail m) => Bool -> a -> m a -> m a
eitherLive True _ live_data = live_data
eitherLive False def_data _ = return def_data
| ganeti/ganeti | src/Ganeti/HTools/Loader.hs | bsd-2-clause | 18,630 | 0 | 20 | 4,759 | 3,922 | 2,093 | 1,829 | 294 | 2 |
{-# LANGUAGE QuasiQuotes, TemplateHaskell, MultiParamTypeClasses,
OverloadedStrings, TypeFamilies #-}
module Forms where
import Control.Applicative ((<$>), (<*>), pure)
import Prelude
import Yesod
import Yesod.Form ()
import Foundation (App)
--import Yesod.Form.Jquery
--import Data.Text
--import Data.String
import AppTypes
import Data.Time.Clock (UTCTime)
addNewsItemForm a1 a2 a3 =
renderDivs (customform a1 a2 a3)
where customform :: UTCTime -> UserId -> UserAlias -> AForm App App NewsItem
customform now uid ualias =
NewsItem
<$> areq textField "Title" Nothing
<*> areq textField "URL" Nothing
<*> pure now
<*> pure uid
<*> pure ualias
<*> pure []
{-
--personForm :: Html -> MForm Synopsis Synopsis (FormResult Person, Widget)
personForm = renderDivs $ Person
<$> areq textField "Name" Nothing
<*> areq (jqueryDayField def
{ jdsChangeYear = True -- give a year dropdown
, jdsYearRange = "1900:-5" -- 1900 till five years ago
}) "Birthday" Nothing
<*> aopt textField "Favorite color" Nothing
<*> areq emailField "Email address" Nothing
<*> aopt urlField "Website" Nothing
-}
| dreamcodez/moddit-yesod | Forms.hs | bsd-2-clause | 1,228 | 0 | 13 | 301 | 180 | 99 | 81 | 21 | 1 |
import "hint" HLint.HLint
ignore "Eta reduce"
ignore "Reduce duplication"
ignore "Avoid lambda"
-- in between, should fix eventually,
--but make relating to original coq development more subtle
-- but also doesnt really matter
ignore "Use camelCase"
ignore "Redundant if"
ignore "Use fromMaybe"
ignore "Use list comprehension"
-- debatable, but really about style so not interesting
ignore "Use <$>"
| cartazio/haver-raft | HLint.hs | bsd-2-clause | 401 | 0 | 5 | 59 | 59 | 24 | 35 | -1 | -1 |
import Control.Monad (forM_, when)
import qualified Data.Vector.Unboxed.Mutable as MV
import qualified Data.Vector.Unboxed as V
import Data.Vector.Unboxed ((!))
modulo :: Int
modulo = 20092010
sumMod :: Int -> Int -> Int
sumMod a b = (a + b) `mod` modulo
mulMat :: Int -> V.Vector Int -> V.Vector Int -> V.Vector Int
mulMat k a b = V.create $ do
ret <- MV.replicate k 0
forM_ [0 .. k - 1] $ \i -> do
when ((a!i) /= 0) $ do
forM_ [0 .. k - 1] $ \j -> do
let v = (a!i) * (b!j)
if (i + j >= k)
then (update ret (i + j - k) v) >> (update ret (i + j - k + 1) v)
else update ret (i + j) v
return ret where
update vec i d = do
v <- MV.unsafeRead vec i
MV.unsafeWrite vec i $ sumMod v d
powMat :: Int -> Int -> V.Vector Int -> V.Vector Int
powMat k p x = unit `seq` x `seq` helper k p x unit where
unit = V.fromList $ (1 : (replicate (k - 1) 0))
helper k 0 a r = r
helper k p a r = a' `seq` r' `seq` helper k (p `div` 2) a' r' where
a' = mulMat k a a
r' = if (odd p)
then mulMat k r a
else r
solve :: Int -> [Int] -> [Int] -> Int -> Int
solve k a f n = foldl sumMod 0 $ zipWith (*) f (V.toList power) where
b = V.fromList $ reverse a
baseMat = V.fromList $ 0 : 1 : (replicate (k - 2) 0)
power = powMat k n baseMat
main = print $ solve 2000 a f (10^18) where
a = (replicate 1998 0) ++ [1, 1]
f = replicate 2000 1
| foreverbell/project-euler-solutions | src/258.hs | bsd-3-clause | 1,535 | 0 | 26 | 536 | 778 | 410 | 368 | 39 | 3 |
{-# LANGUAGE MagicHash, NoImplicitPrelude, TypeFamilies, UnboxedTuples,
MultiParamTypeClasses, RoleAnnotations, CPP, TypeOperators,
PolyKinds #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.Types
-- Copyright : (c) The University of Glasgow 2009
-- License : see libraries/ghc-prim/LICENSE
--
-- Maintainer : cvs-ghc@haskell.org
-- Stability : internal
-- Portability : non-portable (GHC Extensions)
--
-- GHC type definitions.
-- Use GHC.Exts from the base package instead of importing this
-- module directly.
--
-----------------------------------------------------------------------------
module GHC.Types (
-- Data types that are built-in syntax
-- They are defined here, but not explicitly exported
--
-- Lists: []( [], (:) )
-- Type equality: (~)( Eq# )
Bool(..), Char(..), Int(..), Word(..),
Float(..), Double(..),
Ordering(..), IO(..),
isTrue#,
SPEC(..),
Nat, Symbol,
Any,
type (~~), Coercible,
TYPE, RuntimeRep(..), Type, Constraint,
-- The historical type * should ideally be written as
-- `type *`, without the parentheses. But that's a true
-- pain to parse, and for little gain.
VecCount(..), VecElem(..),
-- * Runtime type representation
Module(..), TrName(..), TyCon(..), TypeLitSort(..),
KindRep(..), KindBndr
) where
import GHC.Prim
infixr 5 :
{- *********************************************************************
* *
Kinds
* *
********************************************************************* -}
-- | The kind of constraints, like @Show a@
data Constraint
-- | The kind of types with lifted values. For example @Int :: Type@.
type Type = TYPE 'LiftedRep
{- *********************************************************************
* *
Nat and Symbol
* *
********************************************************************* -}
-- | (Kind) This is the kind of type-level natural numbers.
data Nat
-- | (Kind) This is the kind of type-level symbols.
-- Declared here because class IP needs it
data Symbol
{- *********************************************************************
* *
Any
* *
********************************************************************* -}
-- | The type constructor 'Any' is type to which you can unsafely coerce any
-- lifted type, and back. More concretely, for a lifted type @t@ and
-- value @x :: t@, -- @unsafeCoerce (unsafeCoerce x :: Any) :: t@ is equivalent
-- to @x@.
--
type family Any :: k where { }
-- See Note [Any types] in TysWiredIn. Also, for a bit of history on Any see
-- #10886. Note that this must be a *closed* type family: we need to ensure
-- that this can't reduce to a `data` type for the results discussed in
-- Note [Any types].
{- *********************************************************************
* *
Lists
NB: lists are built-in syntax, and hence not explicitly exported
* *
********************************************************************* -}
-- | The builtin list type, usually written in its non-prefix form @[a]@.
--
-- ==== __Examples__
--
-- Unless the OverloadedLists extension is enabled, list literals are
-- syntatic sugar for repeated applications of @:@ and @[]@.
--
-- >>> 1:2:3:4:[] == [1,2,3,4]
-- True
--
-- Similarly, unless the OverloadedStrings extension is enabled, string
-- literals are syntactic sugar for a lists of characters.
--
-- >>> ['h','e','l','l','o'] == "hello"
-- True
--
data [] a = [] | a : [a]
{- *********************************************************************
* *
Ordering
* *
********************************************************************* -}
data Ordering = LT | EQ | GT
{- *********************************************************************
* *
Int, Char, Word, Float, Double
* *
********************************************************************* -}
{- | The character type 'Char' is an enumeration whose values represent
Unicode (or equivalently ISO\/IEC 10646) code points (i.e. characters, see
<http://www.unicode.org/> for details). This set extends the ISO 8859-1
(Latin-1) character set (the first 256 characters), which is itself an extension
of the ASCII character set (the first 128 characters). A character literal in
Haskell has type 'Char'.
To convert a 'Char' to or from the corresponding 'Int' value defined
by Unicode, use 'Prelude.toEnum' and 'Prelude.fromEnum' from the
'Prelude.Enum' class respectively (or equivalently 'Data.Char.ord' and
'Data.Char.chr').
-}
data {-# CTYPE "HsChar" #-} Char = C# Char#
-- | A fixed-precision integer type with at least the range @[-2^29 .. 2^29-1]@.
-- The exact range for a given implementation can be determined by using
-- 'Prelude.minBound' and 'Prelude.maxBound' from the 'Prelude.Bounded' class.
data {-# CTYPE "HsInt" #-} Int = I# Int#
-- |A 'Word' is an unsigned integral type, with the same size as 'Int'.
data {-# CTYPE "HsWord" #-} Word = W# Word#
-- | Single-precision floating point numbers.
-- It is desirable that this type be at least equal in range and precision
-- to the IEEE single-precision type.
data {-# CTYPE "HsFloat" #-} Float = F# Float#
-- | Double-precision floating point numbers.
-- It is desirable that this type be at least equal in range and precision
-- to the IEEE double-precision type.
data {-# CTYPE "HsDouble" #-} Double = D# Double#
{- *********************************************************************
* *
IO
* *
********************************************************************* -}
{- |
A value of type @'IO' a@ is a computation which, when performed,
does some I\/O before returning a value of type @a@.
There is really only one way to \"perform\" an I\/O action: bind it to
@Main.main@ in your program. When your program is run, the I\/O will
be performed. It isn't possible to perform I\/O from an arbitrary
function, unless that function is itself in the 'IO' monad and called
at some point, directly or indirectly, from @Main.main@.
'IO' is a monad, so 'IO' actions can be combined using either the do-notation
or the 'Prelude.>>' and 'Prelude.>>=' operations from the 'Prelude.Monad'
class.
-}
newtype IO a = IO (State# RealWorld -> (# State# RealWorld, a #))
type role IO representational
{- The 'type role' role annotation for IO is redundant but is included
because this role is significant in the normalisation of FFI
types. Specifically, if this role were to become nominal (which would
be very strange, indeed!), changes elsewhere in GHC would be
necessary. See [FFI type roles] in TcForeign. -}
{- *********************************************************************
* *
(~) and Coercible
NB: (~) is built-in syntax, and hence not explicitly exported
* *
********************************************************************* -}
{-
Note [Kind-changing of (~) and Coercible]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
(~) and Coercible are tricky to define. To the user, they must appear as
constraints, but we cannot define them as such in Haskell. But we also cannot
just define them only in GHC.Prim (like (->)), because we need a real module
for them, e.g. to compile the constructor's info table.
Furthermore the type of MkCoercible cannot be written in Haskell
(no syntax for ~#R).
So we define them as regular data types in GHC.Types, and do magic in TysWiredIn,
inside GHC, to change the kind and type.
-}
-- | Lifted, heterogeneous equality. By lifted, we mean that it
-- can be bogus (deferred type error). By heterogeneous, the two
-- types @a@ and @b@ might have different kinds. Because @~~@ can
-- appear unexpectedly in error messages to users who do not care
-- about the difference between heterogeneous equality @~~@ and
-- homogeneous equality @~@, this is printed as @~@ unless
-- @-fprint-equality-relations@ is set.
class a ~~ b
-- See also Note [The equality types story] in TysPrim
-- | Lifted, homogeneous equality. By lifted, we mean that it
-- can be bogus (deferred type error). By homogeneous, the two
-- types @a@ and @b@ must have the same kinds.
class a ~ b
-- See also Note [The equality types story] in TysPrim
-- | @Coercible@ is a two-parameter class that has instances for types @a@ and @b@ if
-- the compiler can infer that they have the same representation. This class
-- does not have regular instances; instead they are created on-the-fly during
-- type-checking. Trying to manually declare an instance of @Coercible@
-- is an error.
--
-- Nevertheless one can pretend that the following three kinds of instances
-- exist. First, as a trivial base-case:
--
-- @instance Coercible a a@
--
-- Furthermore, for every type constructor there is
-- an instance that allows to coerce under the type constructor. For
-- example, let @D@ be a prototypical type constructor (@data@ or
-- @newtype@) with three type arguments, which have roles @nominal@,
-- @representational@ resp. @phantom@. Then there is an instance of
-- the form
--
-- @instance Coercible b b\' => Coercible (D a b c) (D a b\' c\')@
--
-- Note that the @nominal@ type arguments are equal, the
-- @representational@ type arguments can differ, but need to have a
-- @Coercible@ instance themself, and the @phantom@ type arguments can be
-- changed arbitrarily.
--
-- The third kind of instance exists for every @newtype NT = MkNT T@ and
-- comes in two variants, namely
--
-- @instance Coercible a T => Coercible a NT@
--
-- @instance Coercible T b => Coercible NT b@
--
-- This instance is only usable if the constructor @MkNT@ is in scope.
--
-- If, as a library author of a type constructor like @Set a@, you
-- want to prevent a user of your module to write
-- @coerce :: Set T -> Set NT@,
-- you need to set the role of @Set@\'s type parameter to @nominal@,
-- by writing
--
-- @type role Set nominal@
--
-- For more details about this feature, please refer to
-- <http://research.microsoft.com/en-us/um/people/simonpj/papers/ext-f/coercible.pdf Safe Coercions>
-- by Joachim Breitner, Richard A. Eisenberg, Simon Peyton Jones and Stephanie Weirich.
--
-- @since 4.7.0.0
class Coercible (a :: k) (b :: k)
-- See also Note [The equality types story] in TysPrim
{- *********************************************************************
* *
Bool, and isTrue#
* *
********************************************************************* -}
data {-# CTYPE "HsBool" #-} Bool = False | True
{-# INLINE isTrue# #-}
-- | Alias for 'tagToEnum#'. Returns True if its parameter is 1# and False
-- if it is 0#.
isTrue# :: Int# -> Bool -- See Note [Optimizing isTrue#]
isTrue# x = tagToEnum# x
{- Note [Optimizing isTrue#]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Current definition of isTrue# is a temporary workaround. We would like to
have functions isTrue# and isFalse# defined like this:
isTrue# :: Int# -> Bool
isTrue# 1# = True
isTrue# _ = False
isFalse# :: Int# -> Bool
isFalse# 0# = True
isFalse# _ = False
These functions would allow us to safely check if a tag can represent True
or False. Using isTrue# and isFalse# as defined above will not introduce
additional case into the code. When we scrutinize return value of isTrue#
or isFalse#, either explicitly in a case expression or implicitly in a guard,
the result will always be a single case expression (given that optimizations
are turned on). This results from case-of-case transformation. Consider this
code (this is both valid Haskell and Core):
case isTrue# (a ># b) of
True -> e1
False -> e2
Inlining isTrue# gives:
case (case (a ># b) of { 1# -> True; _ -> False } ) of
True -> e1
False -> e2
Case-of-case transforms that to:
case (a ># b) of
1# -> case True of
True -> e1
False -> e2
_ -> case False of
True -> e1
False -> e2
Which is then simplified by case-of-known-constructor:
case (a ># b) of
1# -> e1
_ -> e2
While we get good Core here, the code generator will generate very bad Cmm
if e1 or e2 do allocation. It will push heap checks into case alternatives
which results in about 2.5% increase in code size. Until this is improved we
just make isTrue# an alias to tagToEnum#. This is a temporary solution (if
you're reading this in 2023 then things went wrong). See #8326.
-}
{- *********************************************************************
* *
SPEC
* *
********************************************************************* -}
-- | 'SPEC' is used by GHC in the @SpecConstr@ pass in order to inform
-- the compiler when to be particularly aggressive. In particular, it
-- tells GHC to specialize regardless of size or the number of
-- specializations. However, not all loops fall into this category.
--
-- Libraries can specify this by using 'SPEC' data type to inform which
-- loops should be aggressively specialized.
data SPEC = SPEC | SPEC2
{- *********************************************************************
* *
Levity polymorphism
* *
********************************************************************* -}
-- | GHC maintains a property that the kind of all inhabited types
-- (as distinct from type constructors or type-level data) tells us
-- the runtime representation of values of that type. This datatype
-- encodes the choice of runtime value.
-- Note that 'TYPE' is parameterised by 'RuntimeRep'; this is precisely
-- what we mean by the fact that a type's kind encodes the runtime
-- representation.
--
-- For boxed values (that is, values that are represented by a pointer),
-- a further distinction is made, between lifted types (that contain ⊥),
-- and unlifted ones (that don't).
data RuntimeRep = VecRep VecCount VecElem -- ^ a SIMD vector type
| TupleRep [RuntimeRep] -- ^ An unboxed tuple of the given reps
| SumRep [RuntimeRep] -- ^ An unboxed sum of the given reps
| LiftedRep -- ^ lifted; represented by a pointer
| UnliftedRep -- ^ unlifted; represented by a pointer
| IntRep -- ^ signed, word-sized value
| Int8Rep -- ^ signed, 8-bit value
| Int16Rep -- ^ signed, 16-bit value
| Int32Rep -- ^ signed, 32-bit value
| Int64Rep -- ^ signed, 64-bit value (on 32-bit only)
| WordRep -- ^ unsigned, word-sized value
| Word8Rep -- ^ unsigned, 8-bit value
| Word16Rep -- ^ unsigned, 16-bit value
| Word32Rep -- ^ unsigned, 32-bit value
| Word64Rep -- ^ unsigned, 64-bit value (on 32-bit only)
| AddrRep -- ^ A pointer, but /not/ to a Haskell value
| FloatRep -- ^ a 32-bit floating point number
| DoubleRep -- ^ a 64-bit floating point number
-- RuntimeRep is intimately tied to TyCon.RuntimeRep (in GHC proper). See
-- Note [RuntimeRep and PrimRep] in RepType.
-- See also Note [Wiring in RuntimeRep] in TysWiredIn
-- | Length of a SIMD vector type
data VecCount = Vec2
| Vec4
| Vec8
| Vec16
| Vec32
| Vec64
-- Enum, Bounded instances in GHC.Enum
-- | Element of a SIMD vector type
data VecElem = Int8ElemRep
| Int16ElemRep
| Int32ElemRep
| Int64ElemRep
| Word8ElemRep
| Word16ElemRep
| Word32ElemRep
| Word64ElemRep
| FloatElemRep
| DoubleElemRep
-- Enum, Bounded instances in GHC.Enum
{- *********************************************************************
* *
Runtime representation of TyCon
* *
********************************************************************* -}
{- Note [Runtime representation of modules and tycons]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We generate a binding for M.$modName and M.$tcT for every module M and
data type T. Things to think about
- We want them to be economical on space; ideally pure data with no thunks.
- We do this for every module (except this module GHC.Types), so we can't
depend on anything else (eg string unpacking code)
That's why we have these terribly low-level representations. The TrName
type lets us use the TrNameS constructor when allocating static data;
but we also need TrNameD for the case where we are deserialising a TyCon
or Module (for example when deserialising a TypeRep), in which case we
can't conveniently come up with an Addr#.
-}
#include "MachDeps.h"
data Module = Module
TrName -- Package name
TrName -- Module name
data TrName
= TrNameS Addr# -- Static
| TrNameD [Char] -- Dynamic
-- | A de Bruijn index for a binder within a 'KindRep'.
type KindBndr = Int
#if WORD_SIZE_IN_BITS < 64
#define WORD64_TY Word64#
#else
#define WORD64_TY Word#
#endif
-- | The representation produced by GHC for conjuring up the kind of a
-- 'Data.Typeable.TypeRep'.
-- See Note [Representing TyCon kinds: KindRep] in TcTypeable.
data KindRep = KindRepTyConApp TyCon [KindRep]
| KindRepVar !KindBndr
| KindRepApp KindRep KindRep
| KindRepFun KindRep KindRep
| KindRepTYPE !RuntimeRep
| KindRepTypeLitS TypeLitSort Addr#
| KindRepTypeLitD TypeLitSort [Char]
data TypeLitSort = TypeLitSymbol
| TypeLitNat
-- Show instance for TyCon found in GHC.Show
data TyCon = TyCon WORD64_TY WORD64_TY -- Fingerprint
Module -- Module in which this is defined
TrName -- Type constructor name
Int# -- How many kind variables do we accept?
KindRep -- A representation of the type's kind
| sdiehl/ghc | libraries/ghc-prim/GHC/Types.hs | bsd-3-clause | 19,980 | 2 | 9 | 5,937 | 858 | 609 | 249 | -1 | -1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE FlexibleInstances, FlexibleContexts #-}
-- | Futhark prettyprinter. This module defines 'Pretty' instances
-- for the AST defined in "Language.Futhark.Syntax" and re-exports
-- "Futhark.Util.Pretty" for convenience.
module Language.Futhark.Pretty
( module Futhark.Util.Pretty
)
where
import Data.Array
import Data.Monoid
import Data.Hashable
import Data.Word
import qualified Data.HashSet as HS
import Prelude
import Futhark.Util.Pretty
import Language.Futhark.Syntax
import Language.Futhark.Attributes
commastack :: [Doc] -> Doc
commastack = align . stack . punctuate comma
class AliasAnnotation f where
aliasComment :: (Eq vn, Pretty vn, Hashable vn) => PatternBase f vn -> Doc -> Doc
instance AliasAnnotation NoInfo where
aliasComment _ = id
instance AliasAnnotation Info where
aliasComment pat d = case aliasComment' pat of
[] -> d
l:ls -> foldl (</>) l ls </> d
where aliasComment' Wildcard{} = []
aliasComment' (TuplePattern pats _) = concatMap aliasComment' pats
aliasComment' (Id ident) =
case clean . HS.toList . aliases $ unInfo $ identType ident of
[] -> []
als -> [oneline $
text "// " <> ppr ident <> text " aliases " <>
commasep (map ppr als)]
where clean = filter (/= identName ident)
oneline s = text $ displayS (renderCompact s) ""
instance Pretty Value where
ppr (PrimValue bv) = ppr bv
ppr (TupValue vs)
| any (not . primType . valueType) vs =
parens $ commastack $ map ppr vs
| otherwise =
parens $ commasep $ map ppr vs
ppr (ArrayValue a t)
| [] <- elems a = text "empty" <> parens (ppr t)
| Array{} <- t = brackets $ commastack $ map ppr $ elems a
| otherwise = brackets $ commasep $ map ppr $ elems a
instance Pretty PrimType where
ppr (Unsigned Int8) = text "u8"
ppr (Unsigned Int16) = text "u16"
ppr (Unsigned Int32) = text "u32"
ppr (Unsigned Int64) = text "u64"
ppr (Signed t) = ppr t
ppr (FloatType t) = ppr t
ppr Bool = text "bool"
instance Pretty PrimValue where
ppr (UnsignedValue (Int8Value v)) =
text (show (fromIntegral v::Word8)) <> text "u8"
ppr (UnsignedValue (Int16Value v)) =
text (show (fromIntegral v::Word16)) <> text "u16"
ppr (UnsignedValue (Int32Value v)) =
text (show (fromIntegral v::Word32)) <> text "u32"
ppr (UnsignedValue (Int64Value v)) =
text (show (fromIntegral v::Word64)) <> text "u64"
ppr (SignedValue v) = ppr v
ppr (BoolValue b) = text $ show b
ppr (FloatValue v) = ppr v
instance (Eq vn, Hashable vn, Pretty vn) =>
Pretty (TupleArrayElemTypeBase ShapeDecl as vn) where
ppr (PrimArrayElem bt _ u) = ppr u <> ppr bt
ppr (ArrayArrayElem at) = ppr at
ppr (TupleArrayElem ts) = parens $ commasep $ map ppr ts
instance (Eq vn, Hashable vn, Pretty vn) =>
Pretty (TupleArrayElemTypeBase Rank as vn) where
ppr (PrimArrayElem bt _ u) = ppr u <> ppr bt
ppr (ArrayArrayElem at) = ppr at
ppr (TupleArrayElem ts) = parens $ commasep $ map ppr ts
instance (Eq vn, Hashable vn, Pretty vn) =>
Pretty (ArrayTypeBase ShapeDecl as vn) where
ppr (PrimArray et (ShapeDecl ds) u _) =
ppr u <> foldl f (ppr et) ds
where f s AnyDim = brackets s
f s (NamedDim v) = brackets $ s <> comma <> ppr v
f s (ConstDim n) = brackets $ s <> comma <> ppr n
ppr (TupleArray et (ShapeDecl ds) u) =
ppr u <> foldl f (parens $ commasep $ map ppr et) ds
where f s AnyDim = brackets s
f s (NamedDim v) = brackets $ s <> comma <> ppr v
f s (ConstDim n) = brackets $ s <> comma <> ppr n
instance (Eq vn, Hashable vn, Pretty vn) => Pretty (ArrayTypeBase Rank as vn) where
ppr (PrimArray et (Rank n) u _) =
ppr u <> foldl (.) id (replicate n brackets) (ppr et)
ppr (TupleArray ts (Rank n) u) =
ppr u <> foldl (.) id (replicate n brackets)
(parens $ commasep $ map ppr ts)
instance (Eq vn, Hashable vn, Pretty vn) => Pretty (TypeBase ShapeDecl as vn) where
ppr (Prim et) = ppr et
ppr (Array at) = ppr at
ppr (Tuple ts) = parens $ commasep $ map ppr ts
instance (Eq vn, Hashable vn, Pretty vn) => Pretty (UserType vn) where
ppr (UserPrim et _) = ppr et
ppr (UserUnique t _) = text "*" <> ppr t
ppr (UserArray at d _) = brackets (ppr at <> f d)
where f AnyDim = mempty
f (NamedDim v) = comma <+> ppr v
f (ConstDim n) = comma <+> ppr n
ppr (UserTuple ts _) = parens $ commasep $ map ppr ts
ppr (UserTypeAlias name _) = ppr name
instance (Eq vn, Hashable vn, Pretty vn) => Pretty (TypeBase Rank as vn) where
ppr (Prim et) = ppr et
ppr (Array at) = ppr at
ppr (Tuple ts) = parens $ commasep $ map ppr ts
instance (Eq vn, Hashable vn, Pretty vn) => Pretty (TypeDeclBase f vn) where
ppr = ppr . declaredType
instance Pretty vn => Pretty (ParamBase f vn) where
ppr = ppr . paramName
instance Pretty vn => Pretty (IdentBase f vn) where
ppr = ppr . identName
instance Pretty UnOp where
ppr Not = text "!"
ppr Negate = text "-"
ppr Complement = text "~"
ppr Abs = text "abs "
ppr Signum = text "signum "
ppr (ToFloat t) = ppr t
ppr (ToSigned t) = ppr (Signed t)
ppr (ToUnsigned t) = ppr (Unsigned t)
instance Pretty BinOp where
ppr Plus = text "+"
ppr Minus = text "-"
ppr Pow = text "**"
ppr Times = text "*"
ppr Divide = text "/"
ppr Mod = text "%"
ppr Quot = text "//"
ppr Rem = text "%%"
ppr ShiftR = text ">>"
ppr ZShiftR = text ">>>"
ppr ShiftL = text "<<"
ppr Band = text "&"
ppr Xor = text "^"
ppr Bor = text "|"
ppr LogAnd = text "&&"
ppr LogOr = text "||"
ppr Equal = text "=="
ppr NotEqual = text "!="
ppr Less = text "<"
ppr Leq = text "<="
ppr Greater = text ">="
ppr Geq = text ">="
hasArrayLit :: ExpBase ty vn -> Bool
hasArrayLit ArrayLit{} = True
hasArrayLit (TupLit es2 _) = any hasArrayLit es2
hasArrayLit (Literal val _) = hasArrayVal val
hasArrayLit _ = False
hasArrayVal :: Value -> Bool
hasArrayVal ArrayValue{} = True
hasArrayVal (TupValue vs) = any hasArrayVal vs
hasArrayVal _ = False
instance (Eq vn, Hashable vn, Pretty vn, AliasAnnotation ty) => Pretty (ExpBase ty vn) where
ppr = pprPrec (-1)
pprPrec _ (Var v) = ppr v
pprPrec _ (Literal v _) = ppr v
pprPrec _ (TupLit es _)
| any hasArrayLit es = parens $ commastack $ map ppr es
| otherwise = parens $ commasep $ map ppr es
pprPrec _ (Empty (TypeDecl t _) _) =
text "empty" <> parens (ppr t)
pprPrec _ (ArrayLit es _ _) =
brackets $ commasep $ map ppr es
pprPrec p (BinOp bop x y _ _) = prettyBinOp p bop x y
pprPrec _ (UnOp op e _) = ppr op <+> pprPrec 9 e
pprPrec _ (If c t f _ _) = text "if" <+> ppr c </>
text "then" <+> align (ppr t) </>
text "else" <+> align (ppr f)
pprPrec _ (Apply fname args _ _) = text (longnameToString fname) <>
apply (map (align . ppr . fst) args)
pprPrec p (LetPat pat e body _) =
aliasComment pat $ mparens $ align $
text "let" <+> align (ppr pat) <+>
(if linebreak
then equals </> indent 2 (ppr e)
else equals <+> align (ppr e)) <+> text "in" </>
ppr body
where mparens = if p == -1 then id else parens
linebreak = case e of
Map{} -> True
Reduce{} -> True
Filter{} -> True
Scan{} -> True
DoLoop{} -> True
LetPat{} -> True
LetWith{} -> True
Literal ArrayValue{} _ -> False
If{} -> True
ArrayLit{} -> False
_ -> hasArrayLit e
pprPrec _ (LetWith dest src idxs ve body _)
| dest == src =
text "let" <+> ppr dest <+> list (map ppr idxs) <+>
equals <+> align (ppr ve) <+>
text "in" </> ppr body
| otherwise =
text "let" <+> ppr dest <+> equals <+> ppr src <+>
text "with" <+> brackets (commasep (map ppr idxs)) <+>
text "<-" <+> align (ppr ve) <+>
text "in" </> ppr body
pprPrec _ (Index e idxs _) =
pprPrec 9 e <> brackets (commasep (map ppr idxs))
pprPrec _ (TupleIndex e i _ _) =
pprPrec 9 e <> text "." <> ppr i
pprPrec _ (Iota e _) = text "iota" <> parens (ppr e)
pprPrec _ (Size i e _) =
text "size@" <> ppr i <> apply [ppr e]
pprPrec _ (Replicate ne ve _) =
text "replicate" <> apply [ppr ne, align (ppr ve)]
pprPrec _ (Reshape shape e _) =
text "reshape" <> apply [apply (map ppr shape), ppr e]
pprPrec _ (Rearrange perm e _) =
text "rearrange" <> apply [apply (map ppr perm), ppr e]
pprPrec _ (Transpose e _) =
text "transpose" <> apply [ppr e]
pprPrec _ (Rotate d x e _) =
text "rotate@" <> ppr d <> apply [ppr x, ppr e]
pprPrec _ (Map lam a _) = ppSOAC "map" [lam] [a]
pprPrec _ (Reduce Commutative lam e a _) = ppSOAC "reduceComm" [lam] [e, a]
pprPrec _ (Reduce Noncommutative lam e a _) = ppSOAC "reduce" [lam] [e, a]
pprPrec _ (Stream form lam arr _) =
case form of
MapLike o ->
let ord_str = if o == Disorder then "Per" else ""
in text ("streamMap"++ord_str) <>
parens ( ppList [lam] </> commasep [ppr arr] )
RedLike o comm lam0 acc ->
let ord_str = if o == Disorder then "Per" else ""
comm_str = case comm of Commutative -> "Comm"
Noncommutative -> ""
in text ("streamRed"++ord_str++comm_str) <>
parens ( ppList [lam0, lam] </> commasep [ppr acc, ppr arr] )
Sequential acc ->
text "streamSeq" <>
parens ( ppList [lam] </> commasep [ppr acc, ppr arr] )
pprPrec _ (Scan lam e a _) = ppSOAC "scan" [lam] [e, a]
pprPrec _ (Filter lam a _) = ppSOAC "filter" [lam] [a]
pprPrec _ (Partition lams a _) = ppSOAC "partition" lams [a]
pprPrec _ (Zip es _) = text "zip" <> apply (map (ppr . fst) es)
pprPrec _ (Unzip e _ _) = text "unzip" <> parens (ppr e)
pprPrec _ (Unsafe e _) = text "unsafe" <+> pprPrec 9 e
pprPrec _ (Split i e a _) =
text "split@" <> ppr i <> apply [ppr e, ppr a]
pprPrec _ (Concat i x y _) =
text "concat" <> text "@" <> ppr i <> apply [ppr x, ppr y]
pprPrec _ (Copy e _) = text "copy" <> parens (ppr e)
pprPrec _ (DoLoop pat initexp form loopbody letbody _) =
aliasComment pat $
text "loop" <+> parens (ppr pat <+> equals <+> ppr initexp) <+> equals <+>
ppr form <+>
text "do" </>
indent 2 (ppr loopbody) <+> text "in" </>
ppr letbody
pprPrec _ (Write i v a _) = text "write" <> parens (commasep [ppr i, ppr v, ppr a])
instance (Eq vn, Hashable vn, Pretty vn, AliasAnnotation ty) => Pretty (LoopFormBase ty vn) where
ppr (For FromUpTo lbound i ubound) =
text "for" <+> align (ppr lbound) <+> ppr i <+> text "<" <+> align (ppr ubound)
ppr (For FromDownTo lbound i ubound) =
text "for" <+> align (ppr ubound) <+> ppr i <+> text ">" <+> align (ppr lbound)
ppr (While cond) =
text "while" <+> ppr cond
instance (Eq vn, Hashable vn, Pretty vn) => Pretty (PatternBase ty vn) where
ppr (Id ident) = ppr ident
ppr (TuplePattern pats _) = parens $ commasep $ map ppr pats
ppr (Wildcard _ _) = text "_"
instance (Eq vn, Hashable vn, Pretty vn, AliasAnnotation ty) => Pretty (LambdaBase ty vn) where
ppr (CurryFun fname [] _ _) = text $ longnameToString fname
ppr (CurryFun fname curryargs _ _) =
text (longnameToString fname) <+> apply (map ppr curryargs)
ppr (AnonymFun params body rettype _) =
text "fn" <+> ppr rettype <+>
apply (map ppParam params) <+>
text "=>" </> indent 2 (ppr body)
ppr (UnOpFun unop _ _ _) =
ppr unop
ppr (BinOpFun binop _ _ _ _) =
ppr binop
ppr (CurryBinOpLeft binop x _ _ _) =
ppr x <+> ppr binop
ppr (CurryBinOpRight binop x _ _ _) =
ppr binop <+> ppr x
instance (Eq vn, Hashable vn, Pretty vn, AliasAnnotation ty) => Pretty (ProgBase ty vn) where
ppr = stack . punctuate line . map ppr . progDecs
instance (Eq vn, Hashable vn, Pretty vn, AliasAnnotation ty) => Pretty (DecBase ty vn) where
ppr (FunOrTypeDec dec) = ppr dec
ppr (SigDec sig) = ppr sig
ppr (ModDec modd) = ppr modd
instance (Eq vn, Hashable vn, Pretty vn, AliasAnnotation ty) => Pretty (ModDefBase ty vn) where
ppr (ModDef name moddecls _) =
ppr name <+> ppList moddecls
instance (Eq vn, Hashable vn, Pretty vn, AliasAnnotation ty) => Pretty (FunOrTypeDecBase ty vn) where
ppr (FunDec fun) = ppr fun
ppr (TypeDec tp) = ppr tp
instance (Eq vn, Hashable vn, Pretty vn, AliasAnnotation ty) => Pretty (TypeDefBase ty vn) where
ppr (TypeDef name usertype _) =
text "type" <+> text (nameToString name) <+>
equals </> ppr usertype
instance (Eq vn, Hashable vn, Pretty vn, AliasAnnotation ty) => Pretty (FunDefBase ty vn) where
ppr (FunDef entry (name, _) rettype args body _) =
text fun <+> ppr rettype <+>
text (nameToString name) <//>
apply (map ppParam args) <+>
equals </> indent 2 (ppr body)
where fun | entry = "entry"
| otherwise = "fun"
instance (Eq vn, Hashable vn, Pretty vn, AliasAnnotation ty) => Pretty (SigDefBase ty vn) where
ppr (SigDef name sigdecls _) =
text "sig" <+> text (nameToString name) <+>
equals <+> ppList sigdecls
instance (Eq vn, Hashable vn, Pretty vn, AliasAnnotation ty) => Pretty (SigDeclBase ty vn) where
ppr (TypeSig tpsig) = ppr tpsig
ppr (FunSig name params rettype) =
text (nameToString name) <+> ppList params <+> ppr rettype
ppParam :: (Eq vn, Hashable vn, Pretty vn) => ParamBase t vn -> Doc
ppParam param = ppr (paramDeclaredType param) <+> ppr param
prettyBinOp :: (Eq vn, Hashable vn, Pretty vn, AliasAnnotation ty) =>
Int -> BinOp -> ExpBase ty vn -> ExpBase ty vn -> Doc
prettyBinOp p bop x y = parensIf (p > precedence bop) $
pprPrec (precedence bop) x <+/>
ppr bop <+>
pprPrec (rprecedence bop) y
where precedence LogAnd = 0
precedence LogOr = 0
precedence Band = 1
precedence Bor = 1
precedence Xor = 1
precedence Equal = 2
precedence NotEqual = 2
precedence Less = 2
precedence Leq = 2
precedence Greater = 2
precedence Geq = 2
precedence ShiftL = 3
precedence ShiftR = 3
precedence ZShiftR = 3
precedence Plus = 4
precedence Minus = 4
precedence Times = 5
precedence Divide = 5
precedence Mod = 5
precedence Quot = 5
precedence Rem = 5
precedence Pow = 6
rprecedence Minus = 10
rprecedence Divide = 10
rprecedence op = precedence op
ppSOAC :: (Eq vn, Hashable vn, Pretty vn, AliasAnnotation ty, Pretty fn) =>
String -> [fn] -> [ExpBase ty vn] -> Doc
ppSOAC name funs es =
text name <> parens (ppList funs </>
commasep (map ppr es))
ppList :: (Pretty a) => [a] -> Doc
ppList as = case map ppr as of
[] -> empty
a':as' -> foldl (</>) (a' <> comma) $ map (<> comma) as'
| mrakgr/futhark | src/Language/Futhark/Pretty.hs | bsd-3-clause | 15,308 | 0 | 17 | 4,412 | 6,708 | 3,261 | 3,447 | 359 | 24 |
{-# LANGUAGE OverloadedStrings #-}
-- | View library.
module HL.View
(module HL.View
,module V)
where
import HL.Foundation as V (Route(..), App, Human(..), Slug(..))
import HL.Static as V
import HL.Types as C
import Control.Monad as V
import Data.Text as V (Text)
import Lucid as V
import Lucid.Bootstrap as V
import Yesod.Lucid as V
todo :: Term a r => a -> r
todo = termWith "div" [class_ "muted"]
type View y a = HtmlT (Reader (Page y)) a
| haskell-lang/haskell-lang | src/HL/View.hs | bsd-3-clause | 454 | 0 | 9 | 89 | 164 | 105 | 59 | 15 | 1 |
------------------------------------------------------------------------------
-- | Chart Base
------------------------------------------------------------------------------
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
module Graphics.HSD3.Chart.Base where
import Graphics.HSD3.D3
import Graphics.HSD3.Theme
------------------------------------------------------------------------------
type Chart a b = GraphT ThemeSt a b
data ThemeChart a b = ThemeChart (Theme a) (Chart a b)
instance Render ThemeChart where
render dat (ThemeChart theme graph) =
renderGraph (themeSt theme) dat (withPrelude theme graph)
------------------------------------------------------------------------------
| Soostone/hs-d3 | src/Graphics/HSD3/Chart/Base.hs | bsd-3-clause | 869 | 0 | 8 | 106 | 119 | 71 | 48 | 13 | 0 |
module BEST where
import Data.List
import Data
data Expression
= VariableExpression VarID
| LiteralExpression Literal
| ApplicationExpression Expression Expression
-- Case e of
| CaseExpression Expression [Case]
-- LambdaExpression x (VarExp x) = \x -> x
| LambdaExpression VarID Expression
-- asdf :: Something
| TypedExpression Expression CoreType
-- Polymorphic capital lambda argument
| TypeExpression CoreType deriving Show
data CoreType
= TypeVariableType VarID
| ApplicationType CoreType CoreType
| FunctionType CoreType CoreType
| ForAllType VarID CoreType
| UnknownType deriving Show
-- | LiteralType TypeLiteral
-- GHC.TypeLit BS
-- data TypeLiteral = NumericTypeLiteral Integer | StringTypeLiteral String
instance Pretty CoreType where
pretty (TypeVariableType s) = pretty s
-- (Cons 1) Nil
pretty (ApplicationType a b) = pretty a ++ " " ++ tpretty b
pretty (FunctionType a b@(FunctionType _ _)) = pretty a ++ " -> " ++ pretty b
pretty (FunctionType a b) = pretty a ++ " -> " ++ pretty b
pretty (ForAllType v t) = "forall " ++ pretty v ++ ". " ++ pretty t
pretty UnknownType = "UNINFERENCED TYPE"
tpretty :: CoreType -> String
tpretty (TypeVariableType s) = pretty s
tpretty a = "(" ++ pretty a ++ ")"
instance Pretty Expression where
pretty (VariableExpression v) = pretty v
pretty (LiteralExpression l) = pretty l
pretty (CaseExpression e cs) = "case " ++ pretty e ++ " of\n " ++ (intercalate "\n " . map pretty $ cs)
pretty (ApplicationExpression a b) = pretty a ++ " " ++ vpretty b
pretty (LambdaExpression a e) = "\\" ++ pretty a ++ ". " ++ vpretty e
pretty (TypeExpression t) = "{TYPE " ++ pretty t ++ "}"
pretty (TypedExpression e t) = vpretty e ++ " :: " ++ pretty t
vpretty :: Expression -> String
vpretty v@(VariableExpression _) = pretty v
vpretty l@(LiteralExpression _) = pretty l
vpretty a = "(" ++ pretty a ++ ")"
-- Pattern to match, variables bound in said pattern (x,xs for (Cons x xs)), and result expression
data Case = Case Pattern [VarID] Expression deriving Show
instance Pretty Case where
pretty (Case p _ e) = pretty p ++ " -> " ++ pretty e
data Pattern = DataPattern VarID [Pattern] | VariablePattern VarID | LitPattern Literal | DefaultPattern deriving Show
instance Pretty Pattern where
pretty (DataPattern v ps) = "(" ++ pretty v ++ " " ++ (unwords . map pretty $ ps) ++ ")"
pretty (VariablePattern v) = pretty v
pretty (LitPattern l) = pretty l
pretty DefaultPattern = "DEFAULT"
data VarID = VarID String Namespace CoreType deriving Show
instance Pretty VarID where
--pretty (VarID a _ t) = "[" ++ a ++ " :: " ++ pretty t ++ "]"
pretty (VarID a _ t) = a
data Namespace = Type | Value | TypeCons | ValueCons deriving Show
instance Pretty Namespace where
pretty Type = "type"
pretty Value = "value"
pretty TypeCons = "Type"
pretty ValueCons = "Value"
data Literal
= StringLiteral String
| CharLiteral Char
| IntLiteral Integer
| FracLiteral Double deriving Show
instance Pretty Literal where
pretty (StringLiteral s) = show s
pretty (CharLiteral c) = show c
pretty (IntLiteral i) = pretty i
pretty (FracLiteral d) = pretty d
{-
- case (Cons 1 Nil) of
- (Cons x xs) -> 1
- Nil -> 0
-}
testCase :: Expression
testCase =
CaseExpression
(ApplicationExpression
(ApplicationExpression
(VariableExpression (VarID "Cons" ValueCons cons))
(LiteralExpression (IntLiteral 1))
)
(VariableExpression (VarID "Nil" ValueCons list))
)
[Case
(DataPattern
(VarID "Cons" ValueCons cons)
[VariablePattern (VarID "x" Value (TypeVariableType (VarID "a" Type UnknownType)))
,VariablePattern (VarID "xs" Value list)
]
)
[VarID "x" Value (TypeVariableType (VarID "a" Type UnknownType)), VarID "xs" Value list]
(LiteralExpression (IntLiteral 1))
,Case
(DataPattern (VarID "Nil" ValueCons list) [])
[]
(LiteralExpression (IntLiteral 0))
]
where
list = ApplicationType (TypeVariableType (VarID "List" TypeCons UnknownType)) (TypeVariableType (VarID "a" Type UnknownType))
cons = FunctionType (TypeVariableType (VarID "a" Type UnknownType)) (FunctionType list list)
| Lazersmoke/rhc | src/Old/BEST.hs | bsd-3-clause | 4,249 | 0 | 17 | 915 | 1,315 | 669 | 646 | 88 | 1 |
{-# LANGUAGE RankNTypes, TypeFamilies #-}
{- |
This module provides functions creating Reform using Hamlet markup.
This module assumes that you wish for text based controls such as 'inputText' and 'textarea' to using 'String' values. If you prefer 'Data.Text.Text' see "Text.Reform.Hamlet.Text".
-}
module Text.Reform.Hamlet.String
( -- * \<input\> element
inputText
, inputPassword
, inputSubmit
, inputReset
, inputHidden
, inputButton
, inputCheckbox
, inputCheckboxes
, inputRadio
, inputFile
-- * \<textarea\> element
, textarea
-- * \<button\> element
, buttonSubmit
, buttonReset
, button
-- * \<select\> element
, select
, selectMultiple
-- * \<label\> element
, label
-- * errors
, errorList
, childErrorList
-- * layout functions
, br
, fieldset
, ol
, ul
, li
, form
) where
import Data.Text.Lazy (Text, pack)
import Text.Blaze (ToMarkup(..))
import Text.Reform
import qualified Text.Reform.Hamlet.Common as C
import Text.Hamlet (HtmlUrl)
-- | Create an @\<input type=\"text\"\>@ element
inputText :: (Monad m, FormInput input, FormError error, ErrorInputType error ~ input) =>
String -- ^ initial value
-> Form m input error (HtmlUrl url) () String
inputText initialValue = C.inputText getInputString initialValue
-- | Create an @\<input type=\"password\"\>@ element
inputPassword :: (Monad m, FormInput input, FormError error, ErrorInputType error ~ input) =>
Form m input error (HtmlUrl url) () String
inputPassword = C.inputPassword getInputString ""
-- | Create an @\<input type=\"submit\"\>@ element
--
-- returns:
--
-- [@Just@ /value/] if this button was used to submit the form.
--
-- [@Nothing@] if this button was not used to submit the form.
inputSubmit :: (Monad m, FormInput input, FormError error, ErrorInputType error ~ input) =>
String -- ^ @value@ attribute. Used for button label, and value if button is submitted.
-> Form m input error (HtmlUrl url) () (Maybe String)
inputSubmit initialValue = C.inputSubmit getInputString initialValue
-- | Create an @\<input type=\"reset\"\>@ element
--
-- This element does not add any data to the form data set.
inputReset :: (Monad m, FormInput input, FormError error, ErrorInputType error ~ input) =>
String -- ^ value attribute. Used only to label the button.
-> Form m input error (HtmlUrl url) () ()
inputReset = C.inputReset
-- | Create an @\<input type=\"hidden\"\>@ element
inputHidden :: (Monad m, FormInput input, FormError error, ErrorInputType error ~ input) =>
String -- ^ value to store in the hidden element
-> Form m input error (HtmlUrl url) () String
inputHidden initialValue = C.inputHidden getInputString initialValue
-- | Create an @\<input type=\"button\"\>@ element
--
-- The element is a push button with a text label. The button does nothing by default, but actions can be added using javascript. This element does not add any data to the form data set.
--
-- see also: 'C.button'
inputButton :: (Monad m, FormInput input, FormError error, ErrorInputType error ~ input) =>
String -- ^ value attribute. Used to label the button.
-> Form m input error (HtmlUrl url) () ()
inputButton label = C.inputButton label
-- | Create a \<textarea\>\<\/textarea\> element
textarea :: (Monad m, FormInput input, FormError error, ErrorInputType error ~ input) =>
Int -- ^ cols
-> Int -- ^ rows
-> String -- ^ initial contents
-> Form m input error (HtmlUrl url) () String
textarea rows cols initialValue = C.textarea getInputString rows cols initialValue
-- | create a @\<button type=\"submit\"\>\<\/button\>@ element
buttonSubmit :: ( Monad m, FormError error, FormInput input, ErrorInputType error ~ input, ToMarkup children) =>
String -- ^ value attribute. Returned if this button submits the form.
-> children -- ^ children to embed in the \<button\>
-> Form m input error (HtmlUrl url) () (Maybe String)
buttonSubmit = C.buttonSubmit getInputString
--------------------------------------------------------------------------------
-- re-exports from .Common. In theory we could just put the docs in .Common,
-- but, currently HSX strips them out.
-- | Create a single @\<input type=\"checkbox\"\>@ element
--
-- returns a 'Bool' indicating if it was checked or not.
--
-- see also 'inputCheckboxes'
inputCheckbox :: forall x error input m url. (Monad m, FormInput input, FormError error, ErrorInputType error ~ input) =>
Bool -- ^ initially checked
-> Form m input error (HtmlUrl url) () Bool
inputCheckbox = C.inputCheckbox
-- | Create a group of @\<input type=\"checkbox\"\>@ elements
--
inputCheckboxes :: (Functor m, Monad m, FormError error, ErrorInputType error ~ input, FormInput input, ToMarkup lbl) =>
[(a, lbl)] -- ^ (value, label)
-> (a -> Bool) -- ^ function which marks if a value should be checked (aka, selected) initially or not. Can match zero or more elements.
-> Form m input error (HtmlUrl url) () [a]
inputCheckboxes = C.inputCheckboxes
-- | Create a group of @\<input type=\"radio\"\>@ elements
inputRadio :: (Functor m, Monad m, FormError error, ErrorInputType error ~ input, FormInput input, ToMarkup lbl) =>
[(a, lbl)] -- ^ (value, label)
-> (a -> Bool) -- ^ predicate which returns @True@ if @a@ should be initially checked. Must match exactly one value in the previous argument.
-> Form m input error (HtmlUrl url) () a
inputRadio = C.inputRadio
-- | Create an @\<input type=\"file\"\>@ element
--
-- This control may succeed even if the user does not actually select a file to upload. In that case the uploaded name will likely be \"\" and the file contents will be empty as well.
inputFile :: (Monad m, FormError error, FormInput input, ErrorInputType error ~ input) =>
Form m input error (HtmlUrl url) () (FileType input)
inputFile = C.inputFile
-- | create a @\<button type=\"reset\"\>\<\/button\>@ element
--
-- This element does not add any data to the form data set.
buttonReset :: (Monad m, FormError error, ToMarkup children) =>
children -- ^ children of the @<\/button\>@ element
-> Form m input error (HtmlUrl url) () ()
buttonReset = C.buttonReset
-- | create a @\<button type=\"button\"\>\<\/button\>@ element
--
-- This element does not add any data to the form data set.
button :: ( Monad m, FormError error, FormInput input, ErrorInputType error ~ input, ToMarkup children) =>
children -- ^ children to embed in the \<button\>
-> Form m input error (HtmlUrl url) () ()
button = C.button
-- | create @\<select\>\<\/select\>@ element plus its @\<option\>\<\/option\>@ children.
--
-- see also: 'selectMultiple'
select :: (Functor m, Monad m, FormError error, ErrorInputType error ~ input, FormInput input, ToMarkup lbl) =>
[(a, lbl)] -- ^ (value, label)
-> (a -> Bool) -- ^ specifies which value is initially selected. Must match *exactly one* element in the list of choices
-> Form m input error (HtmlUrl url) () a
select = C.select
-- | create @\<select multiple=\"multiple\"\>\<\/select\>@ element plus its @\<option\>\<\/option\>@ children.
--
-- This creates a @\<select\>@ element which allows more than one item to be selected.
selectMultiple :: (Functor m, Monad m, FormError error, ErrorInputType error ~ input, FormInput input, ToMarkup lbl) =>
[(a, lbl)] -- ^ (value, label)
-> (a -> Bool) -- ^ specifies which values are initially selected. Can match 0 or more elements.
-> Form m input error (HtmlUrl url) () [a]
selectMultiple = C.selectMultiple
-- | create a @\<label\>@ element.
--
-- Use this with <++ or ++> to ensure that the @for@ attribute references the correct @id@.
--
-- > label "some input field: " ++> inputText ""
label :: (Monad m, ToMarkup c) =>
c
-> Form m input error (HtmlUrl url) () ()
label = C.label
-- | create a @\<ul\>@ which contains all the errors related to the 'Form'.
--
-- The @<\ul\>@ will have the attribute @class=\"reform-error-list\"@.
errorList :: (Monad m, ToMarkup error) =>
Form m input error (HtmlUrl url) () ()
errorList = C.errorList
-- | create a @\<ul\>@ which contains all the errors related to the 'Form'.
--
-- Includes errors from children of the current form.
--
-- The @<\ul\>@ will have the attribute @class=\"reform-error-list\"@.
childErrorList :: (Monad m, ToMarkup error) =>
Form m input error (HtmlUrl url) () ()
childErrorList = C.childErrorList
-- | create a @\<br\>@ tag.
br :: (Monad m) => Form m input error (HtmlUrl url) () ()
br = C.br
-- | wrap a @\<fieldset class=\"reform\"\>@ around a 'Form'
--
fieldset :: (Monad m, Functor m, ToMarkup c) =>
Form m input error c proof a
-> Form m input error (HtmlUrl url) proof a
fieldset = C.fieldset
-- | wrap an @\<ol class=\"reform\"\>@ around a 'Form'
ol :: (Monad m, Functor m, ToMarkup c) =>
Form m input error c proof a
-> Form m input error (HtmlUrl url) proof a
ol = C.ol
-- | wrap a @\<ul class=\"reform\"\>@ around a 'Form'
ul :: (Monad m, Functor m, ToMarkup c) =>
Form m input error c proof a
-> Form m input error (HtmlUrl url) proof a
ul = C.ul
-- | wrap a @\<li class=\"reform\"\>@ around a 'Form'
li :: (Monad m, Functor m, ToMarkup c) =>
Form m input error c proof a
-> Form m input error (HtmlUrl url) proof a
li = C.li
-- | create @\<form action=action method=\"POST\" enctype=\"multipart/form-data\"\>@
form :: ToMarkup action =>
action -- ^ action url
-> [(Text, Text)] -- ^ extra hidden fields to add to form
-> (HtmlUrl url) -- ^ children
-> (HtmlUrl url)
form = C.form
| Happstack/reform-hamlet | Text/Reform/Hamlet/String.hs | bsd-3-clause | 10,063 | 0 | 11 | 2,328 | 2,095 | 1,142 | 953 | 136 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Control.Concurrent.Actor
( ActorId
, ActorWorld
, Behavior
, create
, createIO
, wait
, send
, receive
, receiveMaybe
, receiveUntil
, getSelf
, liftIO
) where
import Control.Concurrent (forkIO)
import Control.Concurrent.STM
( TQueue, newTQueueIO, readTQueue, tryReadTQueue, writeTQueue
, newTVarIO, readTVar, writeTVar, retry, atomically
)
import Control.Applicative (Applicative)
import Control.Monad.IO.Class (MonadIO (..))
import Control.Monad.Reader (ReaderT (..), MonadReader (..))
type MailBox a = TQueue a
newtype ActorId r = ActorId
{ mailBox :: MailBox r
}
newtype ActorWorld r a = ActorWorld
{ unWorld :: ReaderT (ActorId r) IO a
} deriving
( Functor
, Applicative
, Monad
, MonadIO
, MonadReader (ActorId r)
)
type Behavior a b = a -> ActorWorld b ()
createIO :: Behavior a r -> a -> IO (ActorId r)
createIO bdef a = do
mbox <- newTQueueIO
forkIO $ execWorld (ActorId mbox) $ bdef a
return $ ActorId mbox
where
execWorld mbox world =
runReaderT (unWorld world) mbox
create :: Behavior a r -> a -> ActorWorld b (ActorId r)
create bdef = liftIO . createIO bdef
send :: ActorId a -> a -> ActorWorld b ()
send actor = liftIO . atomically . writeTQueue (mailBox actor)
receive :: ActorWorld r r
receive = ask >>= liftIO . atomically . readTQueue . mailBox
receiveMaybe :: ActorWorld r (Maybe r)
receiveMaybe = ask >>= liftIO . atomically . tryReadTQueue . mailBox
receiveUntil :: (r -> Bool) -> ActorWorld r r
receiveUntil p = do
r <- receive
if p r
then return r
else do
self <- getSelf
send self r
receiveUntil p
getSelf :: ActorWorld r (ActorId r)
getSelf = ask
wait :: Behavior a r -> a -> IO ()
wait bdef a = do
tvar <- newTVarIO False
createIO after tvar
wait' tvar
where
after tvar = do
bdef a
liftIO $ atomically $ writeTVar tvar True
wait' tvar = atomically $ do
r <- readTVar tvar
case r of
True -> return ()
False -> retry
| amutake/simple-actor | src/Control/Concurrent/Actor.hs | bsd-3-clause | 2,040 | 0 | 14 | 478 | 737 | 385 | 352 | 72 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module OpenRTB.Types.Enum.AdPositionSpec where
import Control.Applicative
import Data.Aeson
import Data.Aeson.TH
import Test.Hspec
import Test.QuickCheck
import Test.Instances
import OpenRTB.Types.Enum.AdPosition
data Mock = Mock { ap :: AdPosition } deriving (Eq, Show)
$(deriveJSON defaultOptions ''Mock)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "AdPosition" $ do
context "JSON" $ do
it "should convert back and forth" $ property $ do
\m -> (decode . encode) m == Just (m :: Mock)
instance Arbitrary Mock where
arbitrary = Mock <$> arbitrary
| ankhers/openRTB-hs | spec/OpenRTB/Types/Enum/AdPositionSpec.hs | bsd-3-clause | 652 | 0 | 18 | 111 | 195 | 106 | 89 | 21 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Network.NTP.ConfigFinder (findConfig) where
import System.Directory (doesFileExist)
#ifdef mingw32_HOST_OS
import Control.Exception (IOException, handle, bracket)
import Control.Monad (when)
import Data.List (find, tails, isPrefixOf)
import Foreign (withForeignPtr, nullPtr, castPtr, peek)
import Foreign.Marshal (alloca, allocaArray0, maybeWith)
import System.Win32 (HKEY, hKEY_LOCAL_MACHINE)
import System.Win32 (rEG_SZ, rEG_MULTI_SZ, rEG_EXPAND_SZ)
import System.Win32 (regOpenKey, regCloseKey, c_RegQueryValueEx)
import System.Win32 (withTString, peekTString, failUnlessSuccess)
#endif
------------------------------------------------------------------------
findConfig :: IO FilePath
findConfig = do
conf <- findPossibleConfig
ok <- doesFileExist conf
if ok then return conf
else error $ "findConfig: " ++ conf ++ " does not exist"
#ifdef mingw32_HOST_OS
findPossibleConfig :: IO FilePath
findPossibleConfig = do
cmd <- handleIOError getServiceCmd
case findConf cmd of
Just conf -> return conf
Nothing -> error $ "findConfig: could not find 'ntp.conf' location\n"
++ "NTP service command: " ++ cmd
where
getServiceCmd = bracket (regOpenKey hive key) regCloseKey
(flip regQueryValueString value)
hive = hKEY_LOCAL_MACHINE
key = "SYSTEM\\CurrentControlSet\\Services\\ntp"
value = Just "ImagePath"
findConf :: String -> Maybe String
findConf = fmap (takePath . drop 3) . find ("-c " `isPrefixOf`) . tails
takePath ('"':xs) = takeWhile (/= '"') xs
takePath xs = takeWhile (/= ' ') xs
handleIOError = handle $ \(e :: IOException) ->
error $ "findConfig: could not read NTP service details\n" ++ show e
-- TODO [ Submit bug report to Win32 package ]
-- TODO 'System.Win32.Registry.regQueryValue' throws if the key type is
-- TODO not REG_SZ, even though REG_MULTI_SZ and REG_EXPAND_SZ are also
-- TODO valid string types. This function fixes that problem.
regQueryValueString :: HKEY -> Maybe String -> IO String
regQueryValueString key mb_subkey =
withForeignPtr key $ \ p_key ->
maybeWith withTString mb_subkey $ \ c_subkey ->
alloca $ \ p_ty ->
alloca $ \ p_value_len -> do
failUnlessSuccess "RegQueryValue" $
c_RegQueryValueEx p_key c_subkey nullPtr p_ty nullPtr p_value_len
ty <- peek p_ty
when (ty `notElem` [rEG_SZ, rEG_MULTI_SZ, rEG_EXPAND_SZ]) $
ioError $ userError $ "RegQueryValue: expected value to be a string "
++ "(REG_SZ, REG_MULTI_SZ or REG_EXPAND_SZ)"
value_len <- peek p_value_len
allocaArray0 (fromIntegral value_len) $ \ c_value -> do
failUnlessSuccess "RegQueryValue" $
c_RegQueryValueEx p_key c_subkey nullPtr p_ty c_value p_value_len
peekTString (castPtr c_value)
#else
findPossibleConfig :: IO FilePath
findPossibleConfig = return "/etc/ntp.conf"
#endif
| jystic/ntpmon | src/Network/NTP/ConfigFinder.hs | bsd-3-clause | 3,020 | 0 | 22 | 635 | 669 | 358 | 311 | 12 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE DeriveGeneric #-}
module Gelatin.GL.Renderer (
-- * Renderer
GLRenderer,
Context(..),
-- * Loading textures
loadTexture,
loadTextureUnit,
unloadTexture,
loadImageAsTexture,
-- * Line rendering
filledPolylineRenderer,
-- * Triangle rendering
colorRenderer,
textureRenderer,
textureUnitRenderer,
filledTriangleRenderer,
-- * Bezier rendering
colorBezRenderer,
textureBezRenderer,
textureBezUnitRenderer,
filledBezierRenderer,
-- * Font rendering
fontyData,
loadFont,
filledFontRenderer,
fontCurves,
fontGeom,
-- * Masking
maskRenderer,
stencilMask,
alphaMask,
-- * Transforming a rendering
transformRenderer,
-- * Utils
toTexture,
toTextureUnit,
clipTexture
) where
import Gelatin.GL.Shader
import Gelatin.GL.Common
import Gelatin.Picture
import Linear hiding (trace)
import Graphics.Text.TrueType
import Graphics.GL.Core33
import Graphics.GL.Types
import Codec.Picture.Types
import Codec.Picture (decodeImage, readImage)
import Foreign.Marshal.Array
import Foreign.Marshal.Utils
import Foreign.C.String
import Foreign.Storable
import Foreign.Ptr
import Data.ByteString (ByteString)
import Data.Hashable
import Data.Renderable
import Data.Monoid
import Data.Maybe
import Data.Vector.Storable (Vector,unsafeWith)
import qualified Data.Vector.Unboxed as UV
import Control.Monad
import Control.Applicative
import System.Directory
import System.IO
import System.Exit
import qualified Data.ByteString.Char8 as B
import qualified Data.Foldable as F
import GHC.Stack
import GHC.Generics
--------------------------------------------------------------------------------
-- GLRenderers
--------------------------------------------------------------------------------
expandPolyline :: [V2 Float] -> Float -> Float
-> Maybe ([V2 Float], [V2 Float], [V2 Float], [V2 Float], Float)
expandPolyline verts thickness feather
| (v1:v2:_) <- verts =
let v3:v3n:_ = reverse verts
-- clamp the lower bound of our thickness to 1
absthick = max thickness 1
d = fromIntegral (ceiling $ absthick + 2.5 * feather :: Integer)
lens = 0 : zipWith distance verts (drop 1 verts)
totalLen = sum lens
totalEnd = totalLen + d
seqfunc (total,ts) len = (total + len,ts ++ [total + len])
seqLens = snd $ foldl seqfunc (0,[]) lens
isClosed = distance v1 v3 <= 0.00001
-- if the polyline is closed return a miter with the last point
startCap = ([cap,cap], uvs, [v2,v2],[prev,prev])
where (uvs,cap,prev) = if isClosed
-- no cap
then ([V2 0 d, V2 0 (-d)],v1,v3n)
-- cap
else let c = d *^ signorm (v2 - v1)
in ([V2 (-d) d, V2 (-d) (-d)],v1 - c, v1 - 2*c)
endCap = ([cap,cap], uvs,[next,next],[v3n,v3n])
where (uvs,cap,next) = if isClosed
-- no cap
then ([V2 totalLen d, V2 totalLen (-d)], v3, v2)
-- cap
else let c = d *^ signorm (v3 - v3n)
in ([V2 totalEnd d, V2 totalEnd (-d)], v3 + c, v3 + 2*c)
vcs = zip verts seqLens :: [(V2 Float, Float)]
tris = startCap : zipWith3 strip vcs (drop 1 vcs) (drop 2 vcs)
++ [endCap]
-- Expand the line into a triangle strip
strip (a,_) (b,l) (c,_) = ([b,b],[V2 l d,V2 l (-d)],[c,c],[a,a])
vs = concatMap (\(a,_,_,_) -> a) tris
us = concatMap (\(_,a,_,_) -> a) tris
ns = concatMap (\(_,_,a,_) -> a) tris
ps = concatMap (\(_,_,_,a) -> a) tris
in Just (vs, us, ns, ps, totalLen)
| otherwise = Nothing
-- | Creates and returns a renderer that renders an expanded 2d polyline
-- projected in 3d space.
filledPolylineRenderer :: Context -> ProjectedPolylineShader -> Fill
-> Float -> Float -> (LineCap,LineCap) -> [V2 Float]
-> IO GLRenderer
filledPolylineRenderer win psh fill thickness feather caps verts = do
let empty = do putStrLn "could not expand polyline"
return emptyRenderer
mpoly = expandPolyline verts thickness feather
flip (maybe empty) mpoly $ \(vs_,us_,ns_,ps_,totalLen) -> do
mtex <- case fill of
FillColor{} -> return Nothing
FillTexture bstr _ -> decodeImageAsTexture bstr
FillTextureFile fp _ -> loadImageAsTexture fp
let vToGL :: Foldable f => [f Float] -> [GLfloat]
vToGL = map realToFrac . concatMap F.toList
vs = vToGL vs_
us = vToGL us_
ns = vToGL ns_
ps = vToGL ps_
PPRS src = psh
withVAO $ \vao -> withBuffers 5 $ \bufs@[vbuf, cbuf, buvbuf, nbuf, pbuf] -> do
let commonLocs = [PositionLoc, BezUVLoc, NextLoc, PrevLoc]
colorLocs = ColorLoc : commonLocs
uvLocs = UVLoc : commonLocs
buffer f = do
bufferAttrib PositionLoc 2 vbuf vs
f
bufferAttrib BezUVLoc 2 buvbuf us
bufferAttrib NextLoc 2 nbuf ns
bufferAttrib PrevLoc 2 pbuf ps
hasUV <- case fill of
FillColor f -> do
onlyEnableAttribs colorLocs
buffer $ bufferAttrib ColorLoc 4 cbuf $ vToGL $ map f vs_
return False
FillTexture _ f -> do
onlyEnableAttribs uvLocs
buffer $ bufferAttrib UVLoc 2 cbuf $ vToGL $ map f vs_
return True
glBindVertexArray 0
let num = fromIntegral $ length vs_
r t = do let mv = modelviewProjection t
pj <- orthoContextProjection win
updateUniforms [ UniformProjection pj
, UniformModelView mv
, UniformThickness thickness
, UniformFeather feather
, UniformSumLength totalLen
, UniformLineCaps caps
, UniformHasUV hasUV
, UniformSampler 0
] src
drawBuffer (shProgram src) vao GL_TRIANGLE_STRIP num
c = do withArray bufs $ glDeleteBuffers 5
withArray [vao] $ glDeleteVertexArrays 1
case (hasUV, mtex) of
(True, Just tx) -> return (c, bindTexAround tx . r)
(True, Nothing) -> do putStrLn "Could not creat a filledPolylineRenderer"
empty
(False,_) -> return (c,r)
-- | Creates and returns a renderer that renders a given string of
-- triangles with the given filling.
filledTriangleRenderer :: Context -> GeomShader -> [Triangle (V2 Float)]
-> Fill -> IO GLRenderer
filledTriangleRenderer win gsh ts (FillColor f) = do
let vs = trisToComp ts
-- If we can't find a color in the color map we'll just use
-- transparent black.
cs = map f vs
colorRenderer win gsh GL_TRIANGLES vs cs
filledTriangleRenderer win gsh ts (FillTextureFile fp f) =
loadImageAsTexture fp >>= texFilledTriangleRenderer win gsh ts f
filledTriangleRenderer win gsh ts (FillTexture bstr f) =
decodeImageAsTexture bstr >>= texFilledTriangleRenderer win gsh ts f
texFilledTriangleRenderer :: Context -> GeomShader -> [Triangle (V2 Float)]
-> (V2 Float -> V2 Float) -> Maybe GLuint -> IO GLRenderer
texFilledTriangleRenderer win gsh ts f mtex = case mtex of
Just tx -> do
let vs = trisToComp ts
-- If we can't find a uv in the uv map we'll just use
-- 0,0
uvs = map f vs
(c, r) <- textureRenderer win gsh GL_TRIANGLES vs uvs
let r' t = bindTexAround tx $ r t
return (c, r')
_ -> do putStrLn "Could not create a filledTriangleRenderer."
return (return (), const $ putStrLn "Non op renderer.")
-- | Binds the given texture to the zeroeth texture unit, runs the IO
-- action and then unbinds the texture.
bindTexAround :: GLuint -> IO () -> IO ()
bindTexAround tx f = do
glActiveTexture GL_TEXTURE0
glBindTexture GL_TEXTURE_2D tx
f
glBindTexture GL_TEXTURE_2D 0
--------------------------------------------------------------------------------
-- Font decomposition into triangles and beziers
--------------------------------------------------------------------------------
-- | Ephemeral types for creating polygons from font outlines.
-- Fonty gives us a [[Vector (Float, Float)]] for an entire string, which
-- breaks down to
type Contour = [Bezier (V2 Float)] -- Beziers
type CharacterOutline = [Contour]
type StringOutline = [CharacterOutline]
deriving instance Generic FontStyle
instance Hashable FontStyle
deriving instance Generic FontDescriptor
instance Hashable FontDescriptor
-- | Provide a FontData for a given FontyFruity TrueType Font.
fontyData :: Font -> FontData
fontyData font = FontData { fontStringCurves = fontCurves font
, fontStringGeom = fontGeom font
, fontHash = \s -> hashWithSalt s $ descriptorOf font
, fontShow = show $ descriptorOf font
}
loadFont :: FilePath -> IO (Either String FontData)
loadFont fp = fmap fontyData <$> loadFontFile fp
stringOutline :: Font -> Int -> Float -> String -> StringOutline
stringOutline font dpi px str = beziers cs
where sz = pixelSizeInPointAtDpi px dpi
cs = getStringCurveAtPoint dpi (0,0) [(font, sz, str)]
fontGeom :: Font -> Int -> Float -> String -> ([Bezier (V2 Float)], [Triangle (V2 Float)])
fontGeom font dpi px str =
let bs = stringOutline font dpi px str
ts = concatMap (concatMap (concaveTriangles . onContourPoints)) bs
in (concat $ concat bs,ts)
fontCurves :: Font -> Int -> Float -> String -> [[[QuadraticBezier (V2 Float)]]]
fontCurves font dpi px str =
let bs = stringOutline font dpi px str
in fmap (fmap (fmap (\(Bezier _ a b c) -> bez3 a b c))) bs
fromFonty :: (UV.Unbox b1, Functor f1, Functor f) => ([V2 b1] -> b) -> f (f1 (UV.Vector (b1, b1))) -> f (f1 b)
fromFonty f = fmap $ fmap $ f . UV.toList . UV.map (uncurry V2)
beziers :: [[UV.Vector (Float, Float)]] -> StringOutline
beziers = fromFonty (toBeziers . fmap (fmap realToFrac))
-- | Turns a polygon into a list of triangles that can be rendered using the
-- Concave Polygon Stencil Test
-- @see http://www.glprogramming.com/red/chapter14.html#name13
concaveTriangles :: [a] -> [Triangle a]
concaveTriangles [] = []
concaveTriangles (a:as) = tris a as
where tris p (p':p'':ps) = Triangle p p' p'' : tris p (p'':ps)
tris _ _ = []
-- | Collects the points that lie directly on the contour of the font
-- outline.
onContourPoints :: [Bezier a] -> [a]
onContourPoints [] = []
onContourPoints (Bezier LT a b c :bs) = [a,b,c] ++ onContourPoints bs
onContourPoints (Bezier _ a _ c :bs) = [a,c] ++ onContourPoints bs
-- | Creates and returns a renderer that renders some text with a font.
filledFontRenderer :: Context -> GeomShader -> BezShader
-> FontData -> Int -> Float -> String
-> Fill -> IO GLRenderer
filledFontRenderer window gsh brs fd dpi px str fill = do
let (bs,ts) = fontStringGeom fd dpi px str
(cg,fg) <- filledTriangleRenderer window gsh ts fill
(cb,fb) <- filledBezierRenderer window brs bs fill
let s t = stencilMask (fg t) (fg t)
gs t = s t >> fb t
return (cg >> cb,gs)
-- | Creates and returns a renderer that renders the given colored
-- geometry.
colorRenderer :: Context -> GeomShader -> GLuint -> [V2 Float]
-> [V4 Float] -> IO GLRenderer
colorRenderer window gsh mode vs gs = do
let (GRS src) = gsh
srcs = [src]
withVAO $ \vao -> withBuffers 2 $ \[pbuf,cbuf] -> do
let ps = map realToFrac $ concatMap F.toList vs :: [GLfloat]
cs = map realToFrac $ concatMap F.toList $ take (length vs) gs :: [GLfloat]
onlyEnableAttribs [PositionLoc, ColorLoc]
bufferAttrib PositionLoc 2 pbuf ps
bufferAttrib ColorLoc 4 cbuf cs
glBindVertexArray 0
let num = fromIntegral $ length vs
renderFunction t = do
let mv = modelviewProjection t
pj <- orthoContextProjection window
updateUniforms [UniformHasUV False
,UniformProjection pj
,UniformModelView mv
] src
drawBuffer (shProgram src) vao mode num
cleanupFunction = do
withArray [pbuf, cbuf] $ glDeleteBuffers 2
withArray [vao] $ glDeleteVertexArrays 1
return (cleanupFunction,renderFunction)
-- | Creates and returns a renderer that renders a textured
-- geometry using the texture bound to GL_TEXTURE0.
textureRenderer :: Context -> GeomShader -> GLuint -> [V2 Float]
-> [V2 Float] -> IO GLRenderer
textureRenderer = textureUnitRenderer Nothing
-- | Creates and returns a renderer that renders the given textured
-- geometry using the specified texture binding.
textureUnitRenderer :: Maybe GLint -> Context -> GeomShader -> GLuint
-> [V2 Float] -> [V2 Float] -> IO GLRenderer
textureUnitRenderer Nothing w gs md vs uvs =
textureUnitRenderer (Just 0) w gs md vs uvs
textureUnitRenderer (Just u) win gsh mode vs uvs = do
let (GRS src) = gsh
srcs = [src]
withVAO $ \vao -> withBuffers 2 $ \[pbuf,cbuf] -> do
let f xs = map realToFrac $ concatMap F.toList xs :: [GLfloat]
ps = f vs
cs = f $ take (length vs) uvs
onlyEnableAttribs [PositionLoc, UVLoc]
bufferAttrib PositionLoc 2 pbuf ps
bufferAttrib UVLoc 2 cbuf cs
glBindVertexArray 0
let num = fromIntegral $ length vs
renderFunction tfrm = do
let mv = modelviewProjection tfrm
pj <- orthoContextProjection win
updateUniforms [UniformHasUV True
,UniformSampler $ fromIntegral u
,UniformProjection pj
,UniformModelView mv
] src
drawBuffer (shProgram src) vao mode num
cleanupFunction = do
withArray [pbuf, cbuf] $ glDeleteBuffers 2
withArray [vao] $ glDeleteVertexArrays 1
return (cleanupFunction,renderFunction)
-- | Creates and returns a renderer that renders the given colored beziers.
colorBezRenderer :: Context -> BezShader -> [Bezier (V2 Float)]
-> [Triangle (V4 Float)] -> IO GLRenderer
colorBezRenderer window (BRS src) bs ts =
withVAO $ \vao -> withBuffers 3 $ \[pbuf, tbuf, cbuf] -> do
let vs = concatMap (\(Bezier _ a b c) -> [a,b,c]) bs
cvs = concatMap (\(Triangle a b c) -> [a,b,c]) $ take (length bs) ts
ps = map realToFrac $ concatMap F.toList vs :: [GLfloat]
cs = map realToFrac $ concatMap F.toList cvs :: [GLfloat]
ws = concatMap (\(Bezier w _ _ _) -> let w' = fromBool $ w == LT
in [ 0, 0, w'
, 0.5, 0, w'
, 1, 1, w'
])
bs :: [GLfloat]
onlyEnableAttribs [PositionLoc, BezLoc, ColorLoc]
bufferAttrib PositionLoc 2 pbuf ps
bufferAttrib BezLoc 3 tbuf ws
bufferAttrib ColorLoc 4 cbuf cs
glBindVertexArray 0
let cleanupFunction = do
withArray [pbuf, tbuf, cbuf] $ glDeleteBuffers 3
withArray [vao] $ glDeleteVertexArrays 1
num = fromIntegral $ length vs
renderFunction t = do
pj <- orthoContextProjection window
let mv = modelviewProjection t
updateUniforms [UniformHasUV False
,UniformProjection pj
,UniformModelView mv
] src
drawBuffer (shProgram src) vao GL_TRIANGLES num
return (cleanupFunction,renderFunction)
-- | Creates and returns a renderer that renders the given textured beziers.
textureBezUnitRenderer :: Maybe GLint -> Context -> BezShader
-> [Bezier (V2 Float)] -> [Triangle (V2 Float)] -> IO GLRenderer
textureBezUnitRenderer Nothing window sh bs ts =
textureBezUnitRenderer (Just 0) window sh bs ts
textureBezUnitRenderer (Just u) window (BRS src) bs ts =
withVAO $ \vao -> withBuffers 3 $ \[pbuf, uvbuf, tbuf] -> do
let vs = concatMap (\(Bezier _ a b c) -> [a,b,c]) bs
uvs = concatMap (\(Triangle a b c) -> [a,b,c]) $ take (length bs) ts
f = map realToFrac . concatMap F.toList
uvs' = f uvs :: [GLfloat]
ps = f vs :: [GLfloat]
ws = concatMap (\(Bezier w _ _ _) -> let w' = fromBool $ w == LT
in [ 0, 0, w'
, 0.5, 0, w'
, 1, 1, w'
])
bs :: [GLfloat]
onlyEnableAttribs [PositionLoc, UVLoc, BezLoc]
bufferAttrib PositionLoc 2 pbuf ps
bufferAttrib UVLoc 2 uvbuf uvs'
bufferAttrib BezLoc 3 tbuf ws
glBindVertexArray 0
let cleanupFunction = do
withArray [pbuf, tbuf, uvbuf] $ glDeleteBuffers 3
withArray [vao] $ glDeleteVertexArrays 1
num = fromIntegral $ length vs
renderFunction t = do
let mv = modelviewProjection t
pj <- orthoContextProjection window
updateUniforms [UniformProjection pj
,UniformModelView mv
,UniformHasUV True
,UniformSampler $ fromIntegral u
] src
drawBuffer (shProgram src) vao GL_TRIANGLES num
return (cleanupFunction,renderFunction)
-- | Creates and returns a renderer that renders textured beziers using the
-- texture bound to GL_TEXTURE0.
textureBezRenderer :: Context -> BezShader -> [Bezier (V2 Float)]
-> [Triangle (V2 Float)] -> IO GLRenderer
textureBezRenderer = textureBezUnitRenderer Nothing
-- | Creates and returns a renderer that renders a given string of
-- triangles with the given filling.
filledBezierRenderer :: Context -> BezShader -> [Bezier (V2 Float)] -> Fill
-> IO GLRenderer
filledBezierRenderer win sh bs (FillColor f) = do
let ts = map (\(Bezier _ a b c) -> f <$> Triangle a b c) bs
colorBezRenderer win sh bs ts
filledBezierRenderer win sh bs (FillTexture bstr f) =
decodeImageAsTexture bstr >>= texFilledBezierRenderer win sh bs f
filledBezierRenderer win sh bs (FillTextureFile fp f) =
loadImageAsTexture fp >>= texFilledBezierRenderer win sh bs f
texFilledBezierRenderer win sh bs f mtex = do
let ts = map (\(Bezier _ a b c) -> f <$> Triangle a b c) bs
case mtex of
Just tx -> do (c,r) <- textureBezRenderer win sh bs ts
let r' t = bindTexAround tx $ r t
return (c, r')
Nothing -> do putStrLn "Could not create a filledBezRenderer."
return (return (), const $ putStrLn "Non op renderer.")
-- | Creates and returns a renderer that masks a textured rectangular area with
-- another texture.
maskRenderer :: Context -> MaskShader -> GLuint -> [V2 Float]
-> [V2 Float] -> IO GLRenderer
maskRenderer win (MRS src) mode vs uvs =
withVAO $ \vao -> withBuffers 2 $ \[pbuf, uvbuf] -> do
let vs' = map realToFrac $ concatMap F.toList vs :: [GLfloat]
uvs' = map realToFrac $ concatMap F.toList uvs :: [GLfloat]
onlyEnableAttribs [PositionLoc, UVLoc]
bufferAttrib PositionLoc 2 pbuf vs'
bufferAttrib UVLoc 2 uvbuf uvs'
glBindVertexArray 0
let cleanup = do withArray [pbuf, uvbuf] $ glDeleteBuffers 2
withArray [vao] $ glDeleteVertexArrays 1
num = fromIntegral $ length vs
render t = do
let mv = modelviewProjection t
pj <- orthoContextProjection win
updateUniforms [UniformProjection pj
,UniformModelView mv
,UniformMainTex 0
,UniformMaskTex 1
] src
drawBuffer (shProgram src) vao mode num
return (cleanup,render)
-- | Creates a rendering that masks an IO () drawing computation with the alpha
-- value of another.
alphaMask :: Context -> MaskShader -> IO () -> IO () -> IO GLRenderer
alphaMask win mrs r2 r1 = do
mainTex <- toTextureUnit (Just GL_TEXTURE0) win r2
maskTex <- toTextureUnit (Just GL_TEXTURE1) win r1
(w,h) <- ctxWindowSize win
let vs = map (fmap fromIntegral) [V2 0 0, V2 w 0, V2 w h, V2 0 h]
uvs = [V2 0 1, V2 1 1, V2 1 0, V2 0 0]
(c,f) <- maskRenderer win mrs GL_TRIANGLE_FAN vs uvs
let f' _ = do glActiveTexture GL_TEXTURE0
glBindTexture GL_TEXTURE_2D mainTex
glActiveTexture GL_TEXTURE1
glBindTexture GL_TEXTURE_2D maskTex
c' = withArray [mainTex,maskTex] $ glDeleteTextures 2
f'' _ = do glActiveTexture GL_TEXTURE0
glBindTexture GL_TEXTURE_2D 0
glActiveTexture GL_TEXTURE1
glBindTexture GL_TEXTURE_2D 0
return (c >> c', \t -> f' t >> f t >> f'' t)
-- | Creates an IO () drawing computation that masks an IO () drawing
-- computation with another using a stencil test.
stencilMask :: IO () -> IO () -> IO ()
stencilMask r2 r1 = do
glClear GL_DEPTH_BUFFER_BIT
-- Enable stencil testing
glEnable GL_STENCIL_TEST
-- Disable writing frame buffer color components
glColorMask GL_FALSE GL_FALSE GL_FALSE GL_FALSE
-- Disable writing into the depth buffer
glDepthMask GL_FALSE
-- Enable writing to all bits of the stencil mask
glStencilMask 0xFF
-- Clear the stencil buffer
glClear GL_STENCIL_BUFFER_BIT
glStencilFunc GL_NEVER 0 1
glStencilOp GL_INVERT GL_INVERT GL_INVERT
r1
glColorMask GL_TRUE GL_TRUE GL_TRUE GL_TRUE
glDepthMask GL_TRUE
glStencilFunc GL_EQUAL 1 1
glStencilOp GL_ZERO GL_ZERO GL_ZERO
r2
glDisable GL_STENCIL_TEST
transformRenderer :: Transform -> GLRenderer -> GLRenderer
transformRenderer t (c, r) = (c, r . (t <>))
--------------------------------------------------------------------------------
-- Updating uniforms
--------------------------------------------------------------------------------
modelviewProjection :: Transform -> M44 Float
modelviewProjection (Transform (V2 x y) (V2 w h) r) =
let sxy = V3 w h 1
txy = V3 x y 0
rxy = V3 0 0 1
rot = if r /= 0 then mat4Rotate r rxy else identity
in mat4Translate txy !*! rot !*! mat4Scale sxy
orthoContextProjection :: Context -> IO (M44 Float)
orthoContextProjection window = do
(ww, wh) <- ctxWindowSize window
let (hw,hh) = (fromIntegral ww, fromIntegral wh)
return $ ortho 0 hw hh 0 0 1
setProjectionUniforms :: [Shader] -> Context -> Transform -> IO ()
setProjectionUniforms srcs window t = do
pj <- orthoContextProjection window
mapM_ (updateUniform $ UniformProjection pj) srcs
let mv = modelviewProjection t
mapM_ (updateUniform $ UniformModelView mv) srcs
--------------------------------------------------------------------------------
-- Working with textures.
--------------------------------------------------------------------------------
decodeImageAsTexture :: ByteString -> IO (Maybe GLuint)
decodeImageAsTexture bstr = maybeLoadTexture $ decodeImage bstr
loadImageAsTexture :: FilePath -> IO (Maybe GLuint)
loadImageAsTexture fp = readImage fp >>= maybeLoadTexture
maybeLoadTexture :: Either String DynamicImage -> IO (Maybe GLuint)
maybeLoadTexture strOrImg = case strOrImg of
Left err -> putStrLn err >> return Nothing
Right i -> Just <$> loadTexture i
loadTexture :: DynamicImage -> IO GLuint
loadTexture = loadTextureUnit Nothing
allocAndActivateTex u = do
[t] <- allocaArray 1 $ \ptr -> do
glGenTextures 1 ptr
peekArray 1 ptr
glActiveTexture u
glBindTexture GL_TEXTURE_2D t
return t
loadTextureUnit :: Maybe GLuint -> DynamicImage -> IO GLuint
loadTextureUnit Nothing img = loadTextureUnit (Just GL_TEXTURE0) img
loadTextureUnit (Just u) img = do
t <- allocAndActivateTex u
loadJuicy img
glGenerateMipmap GL_TEXTURE_2D -- Generate mipmaps now!!!
glTexParameteri GL_TEXTURE_2D GL_TEXTURE_WRAP_S GL_REPEAT
glTexParameteri GL_TEXTURE_2D GL_TEXTURE_WRAP_T GL_REPEAT
glTexParameteri GL_TEXTURE_2D GL_TEXTURE_MAG_FILTER GL_NEAREST
glTexParameteri GL_TEXTURE_2D GL_TEXTURE_MIN_FILTER GL_NEAREST_MIPMAP_NEAREST
glBindTexture GL_TEXTURE_2D 0
return t
unloadTexture :: GLuint -> IO ()
unloadTexture t = withArray [t] $ glDeleteTextures 1
loadJuicy :: DynamicImage -> IO ()
loadJuicy (ImageY8 (Image w h d)) = bufferImageData w h d GL_RED GL_UNSIGNED_BYTE
loadJuicy (ImageY16 (Image w h d)) = bufferImageData w h d GL_RED GL_UNSIGNED_SHORT
loadJuicy (ImageYF (Image w h d)) = bufferImageData w h d GL_RED GL_FLOAT
loadJuicy (ImageYA8 i) = loadJuicy $ ImageRGB8 $ promoteImage i
loadJuicy (ImageYA16 i) = loadJuicy $ ImageRGBA16 $ promoteImage i
loadJuicy (ImageRGB8 (Image w h d)) = bufferImageData w h d GL_RGB GL_UNSIGNED_BYTE
loadJuicy (ImageRGB16 (Image w h d)) = bufferImageData w h d GL_RGB GL_UNSIGNED_SHORT
loadJuicy (ImageRGBF (Image w h d)) = bufferImageData w h d GL_RGB GL_FLOAT
loadJuicy (ImageRGBA8 (Image w h d)) = bufferImageData w h d GL_RGBA GL_UNSIGNED_BYTE
loadJuicy (ImageRGBA16 (Image w h d)) = bufferImageData w h d GL_RGBA GL_UNSIGNED_SHORT
loadJuicy (ImageYCbCr8 i) = loadJuicy $ ImageRGB8 $ convertImage i
loadJuicy (ImageCMYK8 i) = loadJuicy $ ImageRGB8 $ convertImage i
loadJuicy (ImageCMYK16 i) = loadJuicy $ ImageRGB16 $ convertImage i
toTexture :: Context -> IO () -> IO GLuint
toTexture = toTextureUnit Nothing
toTextureUnit :: Maybe GLuint -> Context -> IO () -> IO GLuint
toTextureUnit Nothing win r = toTextureUnit (Just GL_TEXTURE0) win r
toTextureUnit (Just u) win r = do
[fb] <- allocaArray 1 $ \ptr -> do
glGenFramebuffers 1 ptr
peekArray 1 ptr
glBindFramebuffer GL_FRAMEBUFFER fb
t <- allocAndActivateTex u
(w,h) <- ctxWindowSize win
let [w',h'] = map fromIntegral [w,h]
initializeTexImage2D w' h'
glFramebufferTexture GL_FRAMEBUFFER GL_COLOR_ATTACHMENT0 t 0
withArray [GL_COLOR_ATTACHMENT0] $ glDrawBuffers 1
status <- glCheckFramebufferStatus GL_FRAMEBUFFER
if status /= GL_FRAMEBUFFER_COMPLETE
then putStrLn "incomplete framebuffer!"
else do glClearColor 0 0 0 0
glClear GL_COLOR_BUFFER_BIT
glViewport 0 0 w' h'
r
glBindFramebuffer GL_FRAMEBUFFER 0
with fb $ glDeleteFramebuffers 1
(fbw, fbh) <- ctxFramebufferSize win
glViewport 0 0 (fromIntegral fbw) (fromIntegral fbh)
return t
initializeTexImage2D w h = do
glTexImage2D GL_TEXTURE_2D 0 GL_RGBA w h 0 GL_RGBA GL_UNSIGNED_BYTE nullPtr
glTexParameteri GL_TEXTURE_2D GL_TEXTURE_MAG_FILTER GL_NEAREST
glTexParameteri GL_TEXTURE_2D GL_TEXTURE_MIN_FILTER GL_NEAREST
type ClippingArea = (V2 Int, V2 Int)
-- | Sub-samples a texture using the given coordinate box and creates a new
-- texture. Keep in mind that OpenGL texture coordinates are flipped from
-- 'normal' graphics coordinates (y = 0 is the bottom of the texture). That
-- fact has bitten the author a number of times while clipping a texture
-- created with `toTexture` and `toUnitTexture`.
clipTexture :: GLuint -> ClippingArea -> IO GLuint
clipTexture rtex (V2 x1 y1, V2 x2 y2) = do
-- Create our framebuffers
[fbread,fbwrite] <- allocaArray 2 $ \ptr -> do
glGenFramebuffers 2 ptr
peekArray 2 ptr
-- Bind our read frame buffer and attach the input texture to it
glBindFramebuffer GL_READ_FRAMEBUFFER fbread
glFramebufferTexture2D GL_READ_FRAMEBUFFER GL_COLOR_ATTACHMENT0 GL_TEXTURE_2D rtex 0
clearErrors "clipTexture bind read framebuffer"
-- Generate a new texture and bind our write framebuffer to it
[wtex] <- allocaArray 1 $ \ptr -> do
glGenTextures 1 ptr
peekArray 1 ptr
glActiveTexture GL_TEXTURE0
glBindTexture GL_TEXTURE_2D wtex
let [x1',y1',x2',y2',w',h'] = map fromIntegral
[x1,y1,x2,y2,abs $ x2 - x1
,abs $ y2 - y1]
initializeTexImage2D w' h'
glBindFramebuffer GL_DRAW_FRAMEBUFFER fbwrite
glFramebufferTexture2D GL_DRAW_FRAMEBUFFER GL_COLOR_ATTACHMENT0 GL_TEXTURE_2D wtex 0
clearErrors "clipTexture bind write framebuffer"
-- Check our frame buffer stati
forM_ [GL_READ_FRAMEBUFFER,GL_DRAW_FRAMEBUFFER] $ \fb -> do
status <- glCheckFramebufferStatus fb
when (status /= GL_FRAMEBUFFER_COMPLETE) $ do
putStrLn "incomplete framebuffer!"
exitFailure
-- Blit the read framebuffer into the write framebuffer
glBlitFramebuffer x1' y1' x2' y2' 0 0 w' h' GL_COLOR_BUFFER_BIT GL_NEAREST
clearErrors "clipTexture blit framebuffers"
-- Cleanup
glBindFramebuffer GL_FRAMEBUFFER 0
withArray [fbread,fbwrite] $ glDeleteFramebuffers 2
glBindTexture GL_TEXTURE_2D 0
return wtex
--------------------------------------------------------------------------------
-- Buffering, Vertex Array Objects, Uniforms, etc.
--------------------------------------------------------------------------------
bufferImageData :: forall a a1 a2. (Storable a2, Integral a1, Integral a) => a -> a1 -> Vector a2 -> GLenum -> GLenum -> IO ()
bufferImageData w h dat imgfmt pxfmt = unsafeWith dat $ \ptr -> do
--glTexStorage2D GL_TEXTURE_2D 1 GL_RGBA8 (fromIntegral w) (fromIntegral h)
--glTexSubImage2D GL_TEXTURE_2D 0 0 0 (fromIntegral w) (fromIntegral h) GL_RGBA GL_UNSIGNED_BYTE (castPtr ptr)
glTexImage2D
GL_TEXTURE_2D
0
GL_RGBA
(fromIntegral w)
(fromIntegral h)
0
imgfmt
pxfmt
(castPtr ptr)
err <- glGetError
when (err /= 0) $ putStrLn $ "glTexImage2D Error: " ++ show err
withVAO :: (GLuint -> IO b) -> IO b
withVAO f = do
[vao] <- allocaArray 1 $ \ptr -> do
glGenVertexArrays 1 ptr
peekArray 1 ptr
glBindVertexArray vao
r <- f vao
glBindVertexArray vao
return r
withBuffers :: Int -> ([GLuint] -> IO b) -> IO b
withBuffers n f = do
bufs <- allocaArray n $ \ptr -> do
glGenBuffers (fromIntegral n) ptr
peekArray (fromIntegral n) ptr
f bufs
bufferAttrib :: Storable a => AttribLoc -> GLint -> GLuint -> [a] -> IO ()
bufferAttrib attr n buf as = do
let loc = locToGLuint attr
asize = length as * glFloatSize
glBindBuffer GL_ARRAY_BUFFER buf
withArray as $ \ptr ->
glBufferData GL_ARRAY_BUFFER (fromIntegral asize) (castPtr ptr) GL_STATIC_DRAW
glEnableVertexAttribArray loc
glVertexAttribPointer loc n GL_FLOAT GL_FALSE 0 nullPtr
drawBuffer :: GLuint
-> GLuint
-> GLenum
-> GLsizei
-> IO ()
drawBuffer program vao mode num = do
glUseProgram program
glBindVertexArray vao
clearErrors "glBindVertex"
glDrawArrays mode 0 num
clearErrors "glDrawArrays"
clearErrors :: String -> IO ()
clearErrors str = do
err' <- glGetError
when (err' /= 0) $ errorWithStackTrace $ unwords [str, show err']
glFloatSize :: Int
glFloatSize = sizeOf (undefined :: GLfloat)
| cies/gelatin | gelatin-gl/src/Gelatin/GL/Renderer.hs | bsd-3-clause | 32,925 | 13 | 26 | 9,905 | 9,386 | 4,666 | 4,720 | 617 | 6 |
-- !!! Testing Trex
module TrexTest where
import Hugs.Trex
eg1 = (a = True, b = "Hello", c = 12::Int)
eg2 = (c = 12::Int, a = True, b = "Hello")
average r = (#x r + #y r) / 2
-- interaction with type synonyms
type AddA r = (a :: Int | r)
type AddB r = (b :: Int | r)
type ARow = AddA EmptyRow
type BRow = AddB EmptyRow
type BARow = AddB ARow
type ABRow = AddA BRow
x = (a=2) :: Rec ARow
y = ((b=3 | x) :: Rec ABRow) :: Rec BARow
t20 = y
t21 = y == (a=2 | (b=3))
t22 = (\(a=v|r) -> (v,r)) y
t23 = (\(a=v|r) -> (v,r)) x
| FranklinChen/Hugs | tests/tcheck/trex.hs | bsd-3-clause | 524 | 20 | 11 | 129 | 291 | 168 | 123 | -1 | -1 |
module Aws.S3.Commands
(
module Aws.S3.Commands.DeleteObject
, module Aws.S3.Commands.GetBucket
, module Aws.S3.Commands.GetObject
, module Aws.S3.Commands.GetService
, module Aws.S3.Commands.PutBucket
, module Aws.S3.Commands.PutObject
)
where
import Aws.S3.Commands.DeleteObject
import Aws.S3.Commands.GetBucket
import Aws.S3.Commands.GetObject
import Aws.S3.Commands.GetService
import Aws.S3.Commands.PutBucket
import Aws.S3.Commands.PutObject
| jgm/aws | Aws/S3/Commands.hs | bsd-3-clause | 450 | 0 | 5 | 37 | 99 | 72 | 27 | 14 | 0 |
{-# LANGUAGE FlexibleContexts, TypeFamilies #-}
{-# OPTIONS -Wall #-}
module FiniteStateMachine.Defn (
IsFSA, atEnd
) where
-- import Basic.Types
import Basic.Memory (SetLike(isElem))
import Basic.Features (HasState(..), HasTransition, HasInput,
HasFinal(..), HasQ(..))
import Control.Lens ((^.))
class (HasState x, HasTransition x, HasFinal x, HasInput x) => IsFSA x where
atEnd :: (Eq (Q (State s)), HasState s, HasQ (State s), HasFinal s,
SetLike t, Final s ~ t (Q (State s))) =>
s -> Bool
atEnd mach = isElem (mach^.state^.q) (mach^.final)
| davidzhulijun/TAM | FiniteStateMachine/Defn.hs | bsd-3-clause | 565 | 0 | 12 | 98 | 230 | 129 | 101 | -1 | -1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE StandaloneDeriving #-}
module ImageUtils where
import Codec.Picture
import Control.DeepSeq
import Data.Ord (Down (..))
import Data.Vector.Generic ((!))
import qualified Data.Vector.Generic as V
import GHC.Generics
import GHC.Word (Word32, Word8)
import Graphics.Rasterific
import Graphics.Rasterific.Texture
deriving instance Read PixelRGBA8
deriving instance Generic PixelRGBA8
instance NFData PixelRGBA8
deriving instance Read a => Read (V2 a)
deriving instance Generic (V2 a)
instance NFData Point
-- NB This is only reliable on images with fewer than 2^22 pixels (2048x2048)
-- once you get bigger than that you can have overflow
imageFitness :: Image PixelRGBA8 -> Image PixelRGBA8 -> Down Word32
imageFitness (Image _ _ source) (Image _ _ target) =
-- wrap in a Down since GT should mean a better individual, but this score is lower-is-better
Down $ V.ifoldl' f 0 source
where
f acc i x =
acc + fromIntegral (delta x $ target ! i)
delta x y | x > y = x - y
| otherwise = y - x
renderWhite :: Int -> Int -> Drawing PixelRGBA8 () -> Image PixelRGBA8
renderWhite w h =
renderDrawing w h white
where
white = PixelRGBA8 255 255 255 255
| mattrrichard/darwin | src/ImageUtils.hs | bsd-3-clause | 1,463 | 0 | 12 | 408 | 348 | 184 | 164 | 31 | 1 |
{-# LANGUAGE CPP, GeneralizedNewtypeDeriving #-}
-- | The main game action monad type implementation. Just as any other
-- component of the library, this implementation can be substituted.
-- This module should not be imported anywhere except in 'Action'
-- to expose the executor to any code using the library.
module Game.LambdaHack.SampleImplementation.SampleMonadServer
( executorSer
#ifdef EXPOSE_INTERNAL
-- * Internal operations
, SerImplementation
#endif
) where
import Prelude ()
import Prelude.Compat
import Control.Concurrent
import qualified Control.Exception as Ex
import qualified Control.Monad.IO.Class as IO
import Control.Monad.Trans.State.Strict hiding (State)
import qualified Data.EnumMap.Strict as EM
import Data.Maybe
import System.FilePath
import Game.LambdaHack.Atomic.BroadcastAtomicWrite
import Game.LambdaHack.Atomic.CmdAtomic
import Game.LambdaHack.Atomic.MonadAtomic
import Game.LambdaHack.Atomic.MonadStateWrite
import Game.LambdaHack.Common.MonadStateRead
import qualified Game.LambdaHack.Common.Save as Save
import Game.LambdaHack.Common.State
import Game.LambdaHack.Common.Thread
import Game.LambdaHack.Server.CommonServer
import Game.LambdaHack.Server.MonadServer
import Game.LambdaHack.Server.ProtocolServer
import Game.LambdaHack.Server.State
data SerState = SerState
{ serState :: !State -- ^ current global state
, serServer :: !StateServer -- ^ current server state
, serDict :: !ConnServerDict -- ^ client-server connection information
, serToSave :: !(Save.ChanSave (State, StateServer))
-- ^ connection to the save thread
}
-- | Server state transformation monad.
newtype SerImplementation a =
SerImplementation {runSerImplementation :: StateT SerState IO a}
deriving (Monad, Functor, Applicative)
instance MonadStateRead SerImplementation where
getState = SerImplementation $ gets serState
getsState f = SerImplementation $ gets $ f . serState
instance MonadStateWrite SerImplementation where
modifyState f = SerImplementation $ state $ \serS ->
let newSerS = serS {serState = f $ serState serS}
in newSerS `seq` ((), newSerS)
putState s = SerImplementation $ state $ \serS ->
let newSerS = serS {serState = s}
in newSerS `seq` ((), newSerS)
instance MonadServer SerImplementation where
getServer = SerImplementation $ gets serServer
getsServer f = SerImplementation $ gets $ f . serServer
modifyServer f = SerImplementation $ state $ \serS ->
let newSerS = serS {serServer = f $ serServer serS}
in newSerS `seq` ((), newSerS)
putServer s = SerImplementation $ state $ \serS ->
let newSerS = serS {serServer = s}
in newSerS `seq` ((), newSerS)
liftIO = SerImplementation . IO.liftIO
saveChanServer = SerImplementation $ gets serToSave
instance MonadServerReadRequest SerImplementation where
getDict = SerImplementation $ gets serDict
getsDict f = SerImplementation $ gets $ f . serDict
modifyDict f =
SerImplementation $ modify $ \serS -> serS {serDict = f $ serDict serS}
putDict s =
SerImplementation $ modify $ \serS -> serS {serDict = s}
liftIO = SerImplementation . IO.liftIO
-- | The game-state semantics of atomic commands
-- as computed on the server.
instance MonadAtomic SerImplementation where
execAtomic = handleAndBroadcastServer
-- | Send an atomic action to all clients that can see it.
handleAndBroadcastServer :: (MonadStateWrite m, MonadServerReadRequest m)
=> CmdAtomic -> m ()
handleAndBroadcastServer atomic = do
persOld <- getsServer sper
knowEvents <- getsServer $ sknowEvents . sdebugSer
handleAndBroadcast knowEvents persOld resetFidPerception resetLitInDungeon
sendUpdateAI sendUpdateUI atomic
-- | Run an action in the @IO@ monad, with undefined state.
executorSer :: SerImplementation () -> IO ()
executorSer m = do
let saveFile (_, ser) =
fromMaybe "save" (ssavePrefixSer (sdebugSer ser))
<.> saveName
exe serToSave =
evalStateT (runSerImplementation m)
SerState { serState = emptyState
, serServer = emptyStateServer
, serDict = EM.empty
, serToSave
}
exeWithSaves = Save.wrapInSaves saveFile exe
-- Wait for clients to exit even in case of server crash
-- (or server and client crash), which gives them time to save
-- and report their own inconsistencies, if any.
-- TODO: send them a message to tell users "server crashed"
-- and then wait for them to exit normally.
Ex.handle (\(ex :: Ex.SomeException) -> do
threadDelay 1000000 -- let clients report their errors
Ex.throw ex) -- crash eventually, which kills clients
exeWithSaves
waitForChildren childrenServer -- no crash, wait for clients indefinitely
| beni55/LambdaHack | Game/LambdaHack/SampleImplementation/SampleMonadServer.hs | bsd-3-clause | 4,919 | 0 | 15 | 1,038 | 1,001 | 568 | 433 | -1 | -1 |
{-# LINE 1 "Control.Monad.ST.Lazy.Safe.hs" #-}
{-# LANGUAGE Trustworthy #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Monad.ST.Lazy.Safe
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : provisional
-- Portability : non-portable (requires universal quantification for runST)
--
-- This module presents an identical interface to "Control.Monad.ST",
-- except that the monad delays evaluation of state operations until
-- a value depending on them is required.
--
-- Safe API only.
--
-----------------------------------------------------------------------------
module Control.Monad.ST.Lazy.Safe {-# DEPRECATED "Safe is now the default, please use Control.Monad.ST.Lazy instead" #-} (
-- * The 'ST' monad
ST,
runST,
fixST,
-- * Converting between strict and lazy 'ST'
strictToLazyST, lazyToStrictST,
-- * Converting 'ST' To 'IO'
RealWorld,
stToIO,
) where
import Control.Monad.ST.Lazy.Imp
| phischu/fragnix | builtins/base/Control.Monad.ST.Lazy.Safe.hs | bsd-3-clause | 1,168 | 0 | 4 | 234 | 64 | 52 | 12 | 9 | 0 |
{- Language/Haskell/TH/Desugar/FV.hs
(c) Ryan Scott 2018
Compute free variables of programs.
-}
{-# LANGUAGE CPP #-}
module Language.Haskell.TH.Desugar.FV
( fvDType
, extractBoundNamesDPat
) where
#if __GLASGOW_HASKELL__ < 710
import Data.Foldable (foldMap)
#endif
#if __GLASGOW_HASKELL__ < 804
import Data.Monoid ((<>))
#endif
import Language.Haskell.TH.Syntax
import Language.Haskell.TH.Desugar.AST
import qualified Language.Haskell.TH.Desugar.OSet as OS
import Language.Haskell.TH.Desugar.OSet (OSet)
-- | Compute the free variables of a 'DType'.
fvDType :: DType -> OSet Name
fvDType = go
where
go :: DType -> OSet Name
go (DForallT tele ty) = fv_dtele tele (go ty)
go (DConstrainedT ctxt ty) = foldMap fvDType ctxt <> go ty
go (DAppT t1 t2) = go t1 <> go t2
go (DAppKindT t k) = go t <> go k
go (DSigT ty ki) = go ty <> go ki
go (DVarT n) = OS.singleton n
go (DConT {}) = OS.empty
go DArrowT = OS.empty
go (DLitT {}) = OS.empty
go DWildCardT = OS.empty
-----
-- Extracting bound term names
-----
-- | Extract the term variables bound by a 'DPat'.
--
-- This does /not/ extract any type variables bound by pattern signatures.
extractBoundNamesDPat :: DPat -> OSet Name
extractBoundNamesDPat = go
where
go :: DPat -> OSet Name
go (DLitP _) = OS.empty
go (DVarP n) = OS.singleton n
go (DConP _ tys pats) = foldMap fvDType tys <> foldMap go pats
go (DTildeP p) = go p
go (DBangP p) = go p
go (DSigP p _) = go p
go DWildP = OS.empty
-----
-- Binding forms
-----
-- | Adjust the free variables of something following a 'DForallTelescope'.
fv_dtele :: DForallTelescope -> OSet Name -> OSet Name
fv_dtele (DForallVis tvbs) = fv_dtvbs tvbs
fv_dtele (DForallInvis tvbs) = fv_dtvbs tvbs
-- | Adjust the free variables of something following 'DTyVarBndr's.
fv_dtvbs :: [DTyVarBndr flag] -> OSet Name -> OSet Name
fv_dtvbs tvbs fvs = foldr fv_dtvb fvs tvbs
-- | Adjust the free variables of something following a 'DTyVarBndr'.
fv_dtvb :: DTyVarBndr flag -> OSet Name -> OSet Name
fv_dtvb (DPlainTV n _) fvs = OS.delete n fvs
fv_dtvb (DKindedTV n _ k) fvs = OS.delete n fvs <> fvDType k
| goldfirere/th-desugar | Language/Haskell/TH/Desugar/FV.hs | bsd-3-clause | 2,334 | 0 | 9 | 608 | 671 | 350 | 321 | 41 | 10 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.