code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module Feature.RangeSpec where
import Test.Hspec
import Test.Hspec.Wai
import Network.HTTP.Types
import Network.Wai.Test (SResponse(simpleHeaders,simpleStatus))
import SpecHelper
spec :: Spec
spec = beforeAll (clearTable "items" >> createItems 15) . afterAll_ (clearTable "items")
. around withApp $
describe "GET /items" $ do
context "without range headers" $
context "with response under server size limit" $
it "returns whole range with status 200" $
get "/items" `shouldRespondWith` 200
context "with range headers" $ do
context "of acceptable range" $ do
it "succeeds with partial content" $ do
r <- request methodGet "/items"
(rangeHdrs $ ByteRangeFromTo 0 1) ""
liftIO $ do
simpleHeaders r `shouldSatisfy`
matchHeader "Content-Range" "0-1/15"
simpleStatus r `shouldBe` partialContent206
it "understands open-ended ranges" $
request methodGet "/items"
(rangeHdrs $ ByteRangeFrom 0) ""
`shouldRespondWith` 200
it "returns an empty body when there are no results" $
request methodGet "/menagerie"
(rangeHdrs $ ByteRangeFromTo 0 1) ""
`shouldRespondWith` ResponseMatcher {
matchBody = Just "[]"
, matchStatus = 200
, matchHeaders = ["Content-Range" <:> "*/0"]
}
it "allows one-item requests" $ do
r <- request methodGet "/items"
(rangeHdrs $ ByteRangeFromTo 0 0) ""
liftIO $ do
simpleHeaders r `shouldSatisfy`
matchHeader "Content-Range" "0-0/15"
simpleStatus r `shouldBe` partialContent206
it "handles ranges beyond collection length via truncation" $ do
r <- request methodGet "/items"
(rangeHdrs $ ByteRangeFromTo 10 100) ""
liftIO $ do
simpleHeaders r `shouldSatisfy`
matchHeader "Content-Range" "10-14/15"
simpleStatus r `shouldBe` partialContent206
context "of invalid range" $ do
it "fails with 416 for offside range" $
request methodGet "/items"
(rangeHdrs $ ByteRangeFromTo 1 0) ""
`shouldRespondWith` 416
it "refuses a range with nonzero start when there are no items" $
request methodGet "/menagerie"
(rangeHdrs $ ByteRangeFromTo 1 2) ""
`shouldRespondWith` ResponseMatcher {
matchBody = Nothing
, matchStatus = 416
, matchHeaders = ["Content-Range" <:> "*/0"]
}
it "refuses a range requesting start past last item" $
request methodGet "/items"
(rangeHdrs $ ByteRangeFromTo 100 199) ""
`shouldRespondWith` ResponseMatcher {
matchBody = Nothing
, matchStatus = 416
, matchHeaders = ["Content-Range" <:> "*/15"]
}
| nayosx/postgrest | test/Feature/RangeSpec.hs | mit | 3,060 | 0 | 21 | 1,057 | 652 | 326 | 326 | 67 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Y2018.M06.D19.Exercise where
{--
Same, but different. Today we are going to look at the non-duplicate articles
of the Virginian-Pilot: those articles that have the same UUID but are
materially different. This time, however, these articles have their full text
and are formatted as JSON.
--}
import Data.Aeson
import Data.Map (Map)
import Data.Time
type UUID = String
data Article = Art { uuid :: UUID, id :: Integer, published :: Day,
updated :: Maybe Day, title, text :: String }
deriving (Eq, Show)
exDir, artJSON :: FilePath
exDir = "Y2018/M06/D19/"
artJSON = "non-duplicate-articles.json"
instance FromJSON Article where
parseJSON art = undefined
readNonDuplicatedArticles :: FilePath -> IO [Article]
readNonDuplicatedArticles file = undefined
-- as before group these by article id:
groupArticles :: [Article] -> Map UUID [Article]
groupArticles arts = undefined
-- how many UUIDs are there? How many articles are there?
{-- BONUS -----------------------------------------------------------------
Okay, this is fun. We see that some of these articles are materially different.
No duh.
Classify these articles into sets, one set being the class of UUIDs that have
materially different articles, and the other set being the class of UUIDs that
have articles that are 'kinda' the same.
Let'r rip: Bayesian analysis, clustering, cosine similarity, or the google:
'artificial' artificial intelligence (favored throughout the corporate world).
--}
data Cluster = TheStructureThatWorksForYouHere
materially :: Traversable t => Map UUID [Article] -> t Cluster
materially uuids = undefined
| geophf/1HaskellADay | exercises/HAD/Y2018/M06/D19/Exercise.hs | mit | 1,674 | 0 | 9 | 282 | 220 | 128 | 92 | 21 | 1 |
{-# LANGUAGE TupleSections #-}
import Data.List
import Control.Monad.State.Lazy
factors n = map length . group . sort $ concatMap (reduce [2..]) [2..n]
reduce _ 1 = []
reduce y@(x:xs) n = case mod n p of
0 -> p : reduce y (div n x)
_ -> reduce xs n
--------------------------------------
-- qsort [] = []
-- qsort (x:xs) = qsort small ++ [x] ++ qsort big
-- where (small, big) = partition (< x) xs
----- ----- -----
-- choose :: Int -> [a] -> [[a]]
-- choose n xs = choose' (length xs) n xs
-- choose' :: Int -> Int -> [a] -> [[a]]
-- choose' _ 0 _ = return []
-- choose' len n (x:xs) =
-- liftM (x :) (choose' (len - 1) (n - 1) xs)
-- ++ if len <= n
-- then []
-- else choose' (len - 1) n xs
-- -- W . toSnd (!)
-- wfact :: Int -> Int -> Int
-- wfact fact n = length $ filter check $ choose n factors
-- where
-- reduced :: [Int]
-- reduced = reduceF fact
-- factors :: [[Int]]
-- factors = mapM (enumFromTo 0) reduced
-- check :: [[Int]] -> Bool
-- check = all (== 0) . foldl (zipWith (-)) reduced
-- main = print $ wfact 100 10
--------------------------------------
-- WIERD StateT _ [] IDEAS
--------------------------------------
-- lazy length check
--lazylen :: Int -> [a] -> Bool
--lazylen 0 [] = True
--lazylen _ [] = False
--lazylen n (x:xs) = lazylen (n - 1) xs
--singles :: [a] -> [(a, [a])]
--singles = singles' []
--singles' _ [] = []
--singles' past (present:future) = (present, past ++ future) : singles' (present:past) future
----allfactors :: [Int] -> [[Int]]
--allfactors = singles . mapM (enumFromTo 0)
--go 1 current _ = if all (== 0) current then return [current] else []
--go togo current bench = do
-- (this, others) <- bench
-- let new = zipWith (-) current this
-- if any (< 0) new
-- then []
-- else do
-- rest <- go (togo - 1) new $ singles others
-- return (this : rest)
--t = groupify $ reduce 144
--test = go 4 t (allfactors t)
--------------------------------------
--subj :: ([Int], [[Int]])
--subj = ([3, 2, 4], [])
----strip :: StateT ([Int], [[Int]]) [] [Int]
----strip used left = case
---- [ (current, (zipWith (-) left current, current : used))
---- | current <- mapM (enumFromTo 0) left
---- , not $ current `elem` used
---- ]
--strip :: StateT ([Int], [[Int]]) [] [Int]
--strip = StateT $ \(left, used) ->
-- [ (current, (zipWith (-) left current, current : used))
-- | current <- mapM (enumFromTo 0) left
-- , not $ current `elem` used
-- ]
--go :: StateT ([Int], [[Int]]) [] [[Int]]
--go = do
-- factor <- strip
-- (left, used) <- get
-- if all (== 0) left && elem left used
-- then return [factor]
-- else do
-- others <- go
-- return (factor : others)
--test = filter (lazylen 4) $ evalStateT go off
--off = (groupify $ reduce 144, [])
--------------------------------------
--nexts :: StateT (Int, [Int]) [] Int
--nexts = StateT $ \(curr, illegals) -> [ (new, (curr - new, new : illegals))
-- | new <- [0..curr]
-- , not $ elem new illegals
-- ]
--prepare :: [Int] -> [] (Int, [Int])
--prepare = map (, [])
--nexts :: StateT (Int, [Int]) [] Int
--nexts = StateT $ \(curr, illegals) ->
-- [ (new, (curr - new, new : illegals))
-- | new <- [0..curr]
-- , not $ elem new illegals
-- ]
--nexts :: (Int, [Int]) -> [(Int, (Int, [Int]))]
--nexts (curr, illegals) =
-- [ (new, (curr - new, new : illegals))
-- | new <- [0..curr]
-- , not $ elem new illegals
-- ]
--take1 :: StateT [(Int, [Int])] [] [Int]
--take1 = StateT $ map unzip . mapM nexts
--takeAll :: StateT [(Int, [Int])] [] [[Int]]
--takeAll = do
-- st <- get
-- if all (null . nexts) st
-- then return []
-- else do one <- take1
-- rest <- takeAll
-- return (one : rest)
--test = evalStateT takeAll $ prepare [2, 4, 3]
--take1 :: [(Int, [Int])] -> [] [(Int, [Int])]
--take1 [] = return []
--take1 (x:xs) = do
-- first <- nexts x
-- rest <- take1 xs
-- return (first : rest)
--combos :: [] (Int, [Int]) -> [[[(Int, [Int])]]]
--combos [] = return []
--combos xs = do
-- one <- take1 xs
-- others <- case combos one of
-- [] -> return []
-- c -> c
-- return (one : others)
--test = map length . combos . prepare . groupify . reduce
| nickspinale/euler | incomplete/495.hs | mit | 4,562 | 0 | 11 | 1,333 | 253 | 189 | 64 | 8 | 2 |
module AST where
data Module = Module
{ modName :: Name
, modImports :: () --XXX
, modTDecls :: [TDecl]
, modDecls :: [FunBind]
} deriving Show
blankModule :: Name -> Module
blankModule n = Module { modName = n
, modImports = ()
, modTDecls = []
, modDecls = []
}
addTDecl :: TDecl -> Module -> Module
addTDecl d m = m { modTDecls = d : modTDecls m }
addDecl :: FunBind -> Module -> Module
addDecl d m = m { modDecls = d : modDecls m }
data Name = ConName String | VarName String
deriving Show
tFunName :: Name
tFunName = ConName "->"
tTupName :: Int -> Name
tTupName n = ConName ("(" ++ replicate m ',' ++ ")")
where m | n == 0 = 0
| n >= 2 = n - 1
| otherwise = error "Invalid tuple name"
data Expr = EVar Name
| EWild
| ELit Literal
| ELam [IrrefPat] Expr
| EApp Expr Expr
| EWhere Expr [LocalBind]
| EHasType Expr Type
| EIf Match
| ECase Expr [Alt]
| ERec Name [ (Name, Maybe Expr) ] PRec -- ^ Record values
| EUpd Name Expr [ (Name, Maybe Expr) ]
-- ^ Update a record: @MyRec { e | x = 10 }@
| ETuple [Expr] -- ^ Not 1
deriving Show
data PRec = PRecDotDot | PRecNoDotDot
deriving Show
data Literal = LInteger Integer NumBase
| LRational Rational
| LString String
deriving Show
data NumBase = Base2 | Base8 | Base10 | Base16
deriving Show
data Pat = PVar Name
| PWild
| PCon Name [Pat]
| PRec Name [ (Name, Maybe Pat) ] PRec
| PHasType Pat Type
| PTuple [Pat] -- ^ Not 1
| PLit Literal
deriving Show
type IrrefPat = Pat
data Alt = Alt Pat Match
deriving Show
data Match = MDone Expr
| MMatch PGuard Match
| MFail
| MOr Match Match
deriving Show
data PGuard = GPat Pat Expr
| GBool Expr -- ^ sugar for: True <- Expr
| GWild -- ^ sugar for: True <- True
deriving Show
data LocalBind = BPat IrrefPat Expr
| BFun FunBind -- Function has at least 1 argument in locals
deriving Show
data FunBind = Fun Name [IrrefPat] Expr
-- ^ 0 or 1 arguments when at top level.
deriving Show
data Type = TApp Type Type
| TCon Name
| TVar TVar
| TQual Type Type
deriving Show
data TVar = TV String -- XXX
deriving Show
data TDecl = TDData Name [Name] TDef
deriving Show
data TDef = TSum [ (Name, [Type]) ] -- ^ Constructor, fields
| TProd [ (Name, Type) ] -- ^ Record fields
| TSyn Type -- ^ Type synonym
deriving Show
| yav/lang | src/AST.hs | mit | 3,402 | 0 | 10 | 1,657 | 753 | 439 | 314 | 84 | 1 |
module HW03 where
data Expression =
Var String -- Variable
| Val Int -- Integer literal
| Op Expression Bop Expression -- Operation
deriving (Show, Eq)
-- Binary (2-input) operators
data Bop =
Plus
| Minus
| Times
| Divide
| Gt
| Ge
| Lt
| Le
| Eql
deriving (Show, Eq)
data Statement =
Assign String Expression
| Incr String
| If Expression Statement Statement
| While Expression Statement
| For Statement Expression Statement Statement
| Sequence Statement Statement
| Skip
deriving (Show, Eq)
type State = String -> Int
-- Exercise 1 -----------------------------------------
extend :: State -> String -> Int -> State
extend s x v y
| x == y = v
| otherwise = s y
empty :: State
empty _ = 0
-- Exercise 2 -----------------------------------------
bToI :: Bool -> Int
bToI x = if x then 1 else 0
evalE :: State -> Expression -> Int
evalE s (Var x) = s x
evalE _ (Val x) = x
evalE s (Op x bop y) =
case bop of
Plus -> expr + expr2
Minus -> expr - expr2
Times -> expr * expr2
Divide -> div expr expr2
Gt -> bToI $ expr > expr2
Ge -> bToI $ expr >= expr2
Lt -> bToI $ expr < expr2
Le -> bToI $ expr <= expr2
Eql -> bToI $ expr == expr2
where expr = evalE s x
expr2 = evalE s y
-- Exercise 3 -----------------------------------------
data DietStatement = DAssign String Expression
| DIf Expression DietStatement DietStatement
| DWhile Expression DietStatement
| DSequence DietStatement DietStatement
| DSkip
deriving (Show, Eq)
desugar :: Statement -> DietStatement
desugar (Assign x e) = DAssign x e
desugar (Incr x) = DAssign x (Op (Var x) Plus (Val 1))
desugar (If e st1 st2) = DIf e (desugar st1) (desugar st2)
desugar (While e st) = DWhile e (desugar st)
desugar (For st1 e st2 x) =
DSequence (desugar st1) (DWhile e (DSequence (desugar x) (desugar st2)))
desugar (Sequence st1 st2) = DSequence (desugar st1) (desugar st2)
desugar Skip = DSkip
-- Exercise 4 -----------------------------------------
evalSimple :: State -> DietStatement -> State
evalSimple s (DAssign x e) = extend s x (evalE s e)
evalSimple s (for)
evalSimple s (whi)
evalSimple s if
run :: State -> Statement -> State
run = undefined
-- Programs -------------------------------------------
slist :: [Statement] -> Statement
slist [] = Skip
slist l = foldr1 Sequence l
{- Calculate the factorial of the input
for (Out := 1; In > 0; In := In - 1) {
Out := In * Out
}
-}
factorial :: Statement
factorial = For (Assign "Out" (Val 1))
(Op (Var "In") Gt (Val 0))
(Assign "In" (Op (Var "In") Minus (Val 1)))
(Assign "Out" (Op (Var "In") Times (Var "Out")))
{- Calculate the floor of the square root of the input
B := 0;
while (A >= B * B) {
B++
};
B := B - 1
-}
squareRoot :: Statement
squareRoot = slist [ Assign "B" (Val 0)
, While (Op (Var "A") Ge (Op (Var "B") Times (Var "B")))
(Incr "B")
, Assign "B" (Op (Var "B") Minus (Val 1))
]
{- Calculate the nth Fibonacci number
F0 := 1;
F1 := 1;
if (In == 0) {
Out := F0
} else {
if (In == 1) {
Out := F1
} else {
for (C := 2; C <= In; C++) {
T := F0 + F1;
F0 := F1;
F1 := T;
Out := T
}
}
}
-}
fibonacci :: Statement
fibonacci = slist [ Assign "F0" (Val 1)
, Assign "F1" (Val 1)
, If (Op (Var "In") Eql (Val 0))
(Assign "Out" (Var "F0"))
(If (Op (Var "In") Eql (Val 1))
(Assign "Out" (Var "F1"))
(For (Assign "C" (Val 2))
(Op (Var "C") Le (Var "In"))
(Incr "C")
(slist
[ Assign "T" (Op (Var "F0") Plus (Var "F1"))
, Assign "F0" (Var "F1")
, Assign "F1" (Var "T")
, Assign "Out" (Var "T")
])
)
)
]
| maggy96/haskell | cis194/03-ADTs/interpreter.hs | mit | 4,470 | 12 | 19 | 1,733 | 1,364 | 702 | 662 | -1 | -1 |
{-# LANGUAGE NoImplicitPrelude #-}
module Rx.Observable.Concat where
import Prelude.Compat hiding (concat)
import Control.Concurrent.STM (atomically)
import Control.Concurrent.STM.TQueue (isEmptyTQueue, newTQueueIO,
readTQueue, writeTQueue)
import Control.Concurrent.STM.TVar (newTVarIO, readTVar, writeTVar)
import Control.Monad (when)
import Data.Monoid ((<>))
import Rx.Disposable (dispose, newBooleanDisposable,
setDisposable, toDisposable)
import Rx.Observable.List (fromList)
import Rx.Observable.Types
import Rx.Scheduler (currentThread)
concat :: Observable s1 (Observable s2 a) -> Observable s2 a
concat sources = newObservable $ \outerObserver -> do
outerCompletedVar <- newTVarIO False
hasCurrentVar <- newTVarIO False
currentVar <- newTVarIO Nothing
currentDisposable <- newBooleanDisposable
pendingVar <- newTQueueIO
main outerObserver
outerCompletedVar
hasCurrentVar
currentVar
currentDisposable
pendingVar
where
main outerObserver
outerCompletedVar
hasCurrentVar
currentVar
currentDisposable
pendingVar = do
outerDisposable <-
subscribe sources outerOnNext outerOnError outerOnCompleted
return (outerDisposable <> toDisposable currentDisposable)
where
fetchHasCurrent = readTVar hasCurrentVar
fetchHasPending = not <$> isEmptyTQueue pendingVar
fetchOuterCompleted = readTVar outerCompletedVar
accquireCurrent = atomically $ do
hasCurrent <- readTVar hasCurrentVar
if hasCurrent
then return False
else do
writeTVar hasCurrentVar True
return True
resetCurrentSTM = do
current <- readTVar currentVar
writeTVar currentVar Nothing
writeTVar hasCurrentVar False
return current
resetCurrent = atomically resetCurrentSTM
disposeCurrent = do
mDisposable <- resetCurrent
case mDisposable of
Just disposable -> dispose disposable
Nothing -> return ()
outerOnNext source = do
currentAcquired <- accquireCurrent
if currentAcquired
then do
innerDisposable <-
subscribe source innerOnNext innerOnError innerOnCompleted
setDisposable currentDisposable innerDisposable
atomically (writeTVar currentVar (Just innerDisposable))
else atomically (writeTQueue pendingVar source)
outerOnError err = do
disposeCurrent
onError outerObserver err
outerOnCompleted = do
-- need to wait for all inner observables to complete
shouldComplete <- atomically $ do
writeTVar outerCompletedVar True
not <$> (((||)) <$> fetchHasCurrent <*> fetchHasPending)
when shouldComplete (onCompleted outerObserver)
innerOnNext = onNext outerObserver
innerOnError err = do
disposeCurrent
onError outerObserver err
innerOnCompleted = do
mNextSource <- atomically $ do
outerCompleted <- fetchOuterCompleted
hasPending <- fetchHasPending
if outerCompleted && not hasPending
then return Nothing
else do
_ <- resetCurrentSTM
Just <$> readTQueue pendingVar
case mNextSource of
Nothing -> onCompleted outerObserver
Just source -> outerOnNext source
concatList :: [Observable s a] -> Observable s a
concatList os = concat (fromList currentThread os)
| roman/Haskell-Reactive-Extensions | rx-core/src/Rx/Observable/Concat.hs | mit | 4,007 | 0 | 19 | 1,424 | 783 | 385 | 398 | 92 | 6 |
module Main where
import Pong
import Spear.Math.AABB
import Spear.Math.Spatial2
import Spear.Math.Vector
import Spear.Game
import Spear.Window
import Data.Maybe (mapMaybe)
import qualified Graphics.Rendering.OpenGL.GL as GL
import Graphics.Rendering.OpenGL.GL (($=))
data GameState = GameState
{ wnd :: Window
, world :: [GameObject]
}
main = run
$ withWindow (640,480) [] Window (2,0) (Just "Pong") initGame
$ loop (Just 30) step
initGame wnd = do
gameIO $ do
GL.clearColor $= GL.Color4 0.7 0.5 0.7 1.0
GL.matrixMode $= GL.Modelview 0
GL.loadIdentity
return $ GameState wnd newWorld
step :: Elapsed -> Dt -> Game GameState Bool
step elapsed dt = do
gs <- getGameState
evts <- events (wnd gs)
gameIO . process $ evts
let evts' = translate evts
modifyGameState $ \ gs -> gs
{ world = stepWorld elapsed dt evts' (world gs) }
getGameState >>= \gs -> gameIO . render $ world gs
return (not $ exitRequested evts)
render world = do
GL.clear [GL.ColorBuffer]
mapM_ renderGO world
swapBuffers
renderGO :: GameObject -> IO ()
renderGO go = do
let (AABB2 (Vector2 xmin' ymin') (Vector2 xmax' ymax')) = aabb go
(Vector2 xcenter ycenter) = pos go
(xmin,ymin,xmax,ymax) = (f2d xmin', f2d ymin', f2d xmax', f2d ymax')
GL.preservingMatrix $ do
GL.translate (GL.Vector3 (f2d xcenter) (f2d ycenter) 0)
GL.renderPrimitive (GL.TriangleStrip) $ do
GL.vertex (GL.Vertex2 xmin ymax)
GL.vertex (GL.Vertex2 xmin ymin)
GL.vertex (GL.Vertex2 xmax ymax)
GL.vertex (GL.Vertex2 xmax ymin)
process = mapM_ procEvent
procEvent (Resize w h) = do
GL.viewport $= (GL.Position 0 0, GL.Size (fromIntegral w) (fromIntegral h))
GL.matrixMode $= GL.Projection
GL.loadIdentity
GL.ortho 0 1 0 1 (-1) 1
GL.matrixMode $= GL.Modelview 0
procEvent _ = return ()
translate = mapMaybe translate'
translate' (KeyDown KEY_LEFT) = Just MoveLeft
translate' (KeyDown KEY_RIGHT) = Just MoveRight
translate' (KeyUp KEY_LEFT) = Just StopLeft
translate' (KeyUp KEY_RIGHT) = Just StopRight
translate' _ = Nothing
exitRequested = any (==(KeyDown KEY_ESC))
f2d :: Float -> GL.GLdouble
f2d = realToFrac
| jeannekamikaze/Spear | demos/pong/Main.hs | mit | 2,414 | 0 | 16 | 676 | 893 | 444 | 449 | 65 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Construction (Name, Term (..), appP, lamP, termP, varP)
import Data.Text
import Test.Hspec
import Text.Parsec
import Text.Parsec.Text
main :: IO ()
main = hspec $
describe "Parser test" parserTest
parserTest :: SpecWith ()
parserTest = do
let varX = Var "x"
varX1 = Var "x1"
varX2 = Var "x2"
combined1 = App (Lam "x" varX) varX1
combined2 = Lam "x2" (App (Lam "x" varX) varX1)
it "should test var parser" $ do
check varP "x" varX
check varP "x2" varX2
it "should test app parser" $ do
let app1 = App varX varX1
app2 = App varX1 varX2
app3 = App varX (App varX1 varX2)
app4 = App (App varX varX1) varX2
check termP "(x x1)" app1
check termP "(x x1)" app1
check termP "(x1 x2)" app2
check termP "(x (x1 x2))" app3
check termP "((x x1) x2)" app4
it "should test lam parser" $ do
let lam1 = Lam "x" varX
lam2 = Lam "x" varX1
lam3 = Lam "x" (App varX varX1)
check termP "(\\x.x)" lam1
check termP "(\\x.x1)" lam2
check termP "(\\x.(x x1))" lam3
--check termP "((\\x.x) x1)" combined1
it "should test bracket parser" $ do
check termP "((\\x.x) (x1))" combined1
check termP "((((\\x2.((\\x.x) x1)))))" combined2
check :: (Eq a, Show a) => Parser a -> Text -> a -> Expectation
check parser inputStr result =
parse parser "term parser" inputStr `shouldBe` Right result
| mortum5/programming | haskell/BM-courses/construct-yourself/test/ParseSpec.hs | mit | 1,535 | 0 | 15 | 449 | 490 | 236 | 254 | 43 | 1 |
module Network.Skype.Parser.Types where
import Control.Applicative
import Control.Arrow
import Data.Attoparsec.ByteString.Char8 (decimal)
import Data.Attoparsec.ByteString.Lazy
import Data.Char (chr)
import Data.Time.Calendar (fromGregorian)
import Data.Word8
import Network.Skype.Protocol.Types
import qualified Data.ByteString as BS
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
-- * User
userID :: Parser UserID
userID = takeWhile1 $ uncurry (&&) . ((not . isSpace) &&& (/=) _comma)
userFullName :: Parser UserDisplayName
userFullName = takeText
userDisplayName :: Parser UserDisplayName
userDisplayName = takeText
userBirthday :: Parser (Maybe UserBirthday)
userBirthday = Just <$> (fromGregorian <$> digit 4 <*> digit 2 <*> digit 2)
<|> Nothing <$ (word8 _0 *> endOfInput)
where
digit n = read . map (chr . fromIntegral) <$> count n (satisfy isDigit)
userLanguage :: Parser (Maybe (UserLanguageISOCode, UserLanguage))
userLanguage = Just <$> ((,) <$> (tokens <* spaces) <*> tokens) <|> pure Nothing
where
tokens = T.decodeUtf8 <$> takeWhile1 isAlpha
userCountry :: Parser (Maybe (UserCountryISOCode, UserCountry))
userCountry = Just <$> ((,) <$> (tokens <* spaces) <*> tokens) <|> pure Nothing
where
tokens = T.decodeUtf8 <$> takeWhile1 isAlpha
userProvince :: Parser UserProvince
userProvince = takeText
userCity :: Parser UserCity
userCity = takeText
userPhone :: Parser UserPhone
userPhone = takeText
userAbout :: Parser UserAbout
userAbout = takeText
userHomepage :: Parser UserHomepage
userHomepage = takeText
userSpeedDial :: Parser UserSpeedDial
userSpeedDial = takeText
userAuthRequestMessage :: Parser UserAuthRequestMessage
userAuthRequestMessage = takeText
userMoodText :: Parser UserMoodText
userMoodText = takeText
userRichMoodText :: Parser UserRichMoodText
userRichMoodText = takeText
userTimezoneOffset :: Parser UserTimezoneOffset
userTimezoneOffset = decimal
-- * Chat
chatID :: Parser ChatID
chatID = takeWhile1 $ uncurry (&&) . ((not . isSpace) &&& (/=) _comma)
chatTopic :: Parser ChatTopic
chatTopic = takeText
chatWindowTitle :: Parser ChatWindowTitle
chatWindowTitle = takeText
chatPasswordHint :: Parser ChatPasswordHint
chatPasswordHint = takeText
chatGuidelines :: Parser ChatGuidelines
chatGuidelines = takeText
chatDescription :: Parser ChatDescription
chatDescription = takeText
chatBlob :: Parser ChatBlob
chatBlob = takeByteString
-- * Chat member
chatMemberID :: Parser ChatMemberID
chatMemberID = decimal
-- * Chat message
chatMessageID :: Parser ChatMessageID
chatMessageID = decimal
chatMessageBody :: Parser ChatMessageBody
chatMessageBody = takeText
-- * Misc.
boolean :: Parser Bool
boolean = (True <$ string "TRUE") <|> (False <$ string "FALSE")
timestamp :: Parser Timestamp
timestamp = decimal
spaces :: Parser BS.ByteString
spaces = takeWhile1 isSpace
takeText :: Parser T.Text
takeText = T.decodeUtf8 <$> takeByteString
| emonkak/skype4hs | src/Network/Skype/Parser/Types.hs | mit | 2,964 | 0 | 12 | 437 | 802 | 442 | 360 | 76 | 1 |
module Region where
import Data.Map as Map
type RegionId = Int
type NationId = Int
type StepCount = Int
data Region = Region
{ regionId :: RegionId
, ownerId :: NationId
, allegiance :: NationId
, ownershipDuration :: StepCount
, adjacentRegions :: [RegionId]
, population :: Int
, industry :: Int
, infrastructure :: Int
, devastation :: Int
, happiness :: Int
, terrain :: Terrain
, terrainFeatures :: [TerrainFeature]
, ressources :: Map Ressource Int
} deriving Show
data Terrain =
Plain
| Rough
| Hills
| Desert
| Wasteland
| Tundra
| Jungle
| Mountain
| Swamp
| Forest
| Water
deriving Show
data TerrainFeature =
River
| Coast
| Volcano
deriving Show
data Ressource =
Food
| Energy
| RawMaterial
| Luxury
deriving (Show,Eq,Ord)
incOwnershipDuration :: Region -> Region
incOwnershipDuration r = r {ownershipDuration = 1 + ownershipDuration r}
shiftAllegiance :: Region -> Region
shiftAllegiance r = if allegiance r /= ownerId r
then if happiness r >= 80 && ownershipDuration r >= 5
then r {allegiance = ownerId r}
else r
else r
adjustPopulationByFood :: Region -> Region
adjustPopulationByFood r = r {population = newPop, happiness = happiness r + foodDiff}
where
food = Map.findWithDefault 0 Food $ ressources r
foodDiff = div ((food * 1000) - population r) 1000
popChange = foodDiff * (div (population r) 100)
newPop = population r + popChange
adjustPopulationByWar :: Region -> Region
adjustPopulationByWar r = r {population = newPop, happiness = happiness r - devastation r}
where
newPop = population r - devastation r * (div (population r) 100)
basicRegionStep :: Region -> Region
basicRegionStep = incOwnershipDuration . shiftAllegiance . adjustPopulationByFood . adjustPopulationByWar
| rsachtl/world_simulator | src/Region.hs | mit | 1,863 | 0 | 11 | 441 | 544 | 309 | 235 | 63 | 3 |
{-# LANGUAGE PackageImports #-}
{-# OPTIONS_GHC -fno-warn-dodgy-exports -fno-warn-unused-imports #-}
-- | Reexports "Data.Version.Compat"
-- from a globally unique namespace.
module Data.Version.Compat.Repl.Batteries (
module Data.Version.Compat
) where
import "this" Data.Version.Compat
| haskell-compat/base-compat | base-compat-batteries/src/Data/Version/Compat/Repl/Batteries.hs | mit | 290 | 0 | 5 | 31 | 29 | 22 | 7 | 5 | 0 |
{-# LANGUAGE CPP #-}
module GHCJS.DOM.EXTBlendMinMax (
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
module GHCJS.DOM.JSFFI.Generated.EXTBlendMinMax
#else
#endif
) where
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
import GHCJS.DOM.JSFFI.Generated.EXTBlendMinMax
#else
#endif
| plow-technologies/ghcjs-dom | src/GHCJS/DOM/EXTBlendMinMax.hs | mit | 355 | 0 | 5 | 33 | 33 | 26 | 7 | 4 | 0 |
module Antiqua.Graphics.Utils where
import Codec.Picture
import GHC.Word
pngLoad :: String -> IO (Image Word8)
pngLoad file = do
image <- readPng file
case image of
Right (ImageRGBA8 (Image width height dat)) ->
return (Image width height dat)
Right _ -> do
error "unsupported bitmap"
Left r -> do
print r
error "failed to load"
| olive/antiqua-prime | src/Antiqua/Graphics/Utils.hs | mit | 411 | 0 | 14 | 136 | 134 | 63 | 71 | 14 | 3 |
{-# LANGUAGE CPP #-}
module GHCJS.DOM.VTTRegionList (
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
module GHCJS.DOM.JSFFI.Generated.VTTRegionList
#else
#endif
) where
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
import GHCJS.DOM.JSFFI.Generated.VTTRegionList
#else
#endif
| plow-technologies/ghcjs-dom | src/GHCJS/DOM/VTTRegionList.hs | mit | 352 | 0 | 5 | 33 | 33 | 26 | 7 | 4 | 0 |
{-# LANGUAGE DoAndIfThenElse, BangPatterns, FlexibleInstances, ExistentialQuantification, ImpredicativeTypes #-}
module StarStats.Renderer where
import Control.Arrow
import Control.Applicative
import Data.List
import Data.Maybe
import qualified Data.Map as M
import Text.Printf
import System.Random
import System.IO.Unsafe
import StarStats.Utils
import StarStats.DB.Utils
import Data.IORef
data TimeBar = TimeBar String Int Int Int Int
toTimeBars :: [(String, Int, Int, Int, Int)] -> [(String, TimeBar)]
toTimeBars = ((\(user, w, x, y, z) -> (user, TimeBar user w x y z)) <$>)
instance Ord TimeBar where
(TimeBar x _ _ _ _) `compare` (TimeBar y _ _ _ _) = x `compare` y
instance Eq TimeBar where
(TimeBar x _ _ _ _) == (TimeBar y _ _ _ _) = x == y
instance Print TimeBar where
print' (TimeBar user w x y z) =
let uname = makeUserTag user in
(makeCanvas uname 100 16) ++ (makeRectScript uname w x y z)
type Heading = String
type Name = String
type Width = String
data Column = Column (M.Map Name String) Heading Width
data Row = Row [(String, Width)]
toColumn :: [(String, String)] -> Heading -> Width -> Column
toColumn xs h w = Column (M.fromList (mapFst lower <$> xs)) h w
--may be possible to pass width directly
getHeadingWidth :: Column -> (Heading, Width)
getHeadingWidth = (,) <$> getHeading <*> getWidth
toRow :: [(Name, [String], Width)] -> [Row]
toRow xs = (Row . doMap) <$> xs
where doMap (name, elts, width) = zip (name : (print' <$> elts)) (repeat width)
makeHeadingRow :: [Column] -> Row
makeHeadingRow cs =
let rowData = getHeadingWidth <$> cs in
Row $ (\(s, w) -> (tag "b" s, w)) <$> rowData
getMap :: Column -> M.Map String String
getMap (Column m _ _) = m
getWidth :: Column -> Width
getWidth (Column _ _ w) = w
getHeading :: Column -> Heading
getHeading (Column _ h _) = h
rowify :: [Name]
-> [Column]
-> [Row]
rowify us cs =
let hr = makeHeadingRow cs in
let maps = getMap <$> cs in
let ws = getWidth <$> cs in
let find' u m = fromMaybe "~~~There was an error finding that message~~~" (M.lookup u m) in
let assemble' :: Name -> [(String, Width)]
assemble' u = zip (find' u <$> maps) ws in
let rows = Row . assemble' <$> us in
hr : rows
formatTable :: Heading
-> String
-> [Name]
-> Heading
-> Width
-> [Column]
-> Bool
-> Maybe String
formatTable h desc ns nh nw cs b =
let ns' = (lower <$> ns) in
let nameCol = toColumn (zip ns' ns) nh nw in
let cs' = nameCol : cs in
let rows = rowify ns' cs' in
let formatCell (s, w) = td b w s in
let formatRow (Row xs) = tr $ concat $ formatCell <$> xs in
if length rows == 1
then Nothing
else Just $ linkHeader Table h desc $ tag "table" $ concat $ formatRow <$> rows
makeUserTag :: String -> String
makeUserTag s = "user-user-user-user-user-" ++ s
makeCanvas :: String -> Int -> Int -> String
makeCanvas name width height =
genTag "canvas" [ ("id", name)
, ("width", show width)
, ("height", show height)
] ""
makeRectScript :: String
-> Int
-> Int
-> Int
-> Int
-> String
makeRectScript name w x y z =
let vals = [show (name)] ++ (show <$> [w, x, y, z]) in
tag "script" $ makeCall "drawBar" vals
showPair :: (String,Double) -> String
showPair (s, i) = "[" ++ show s ++ "," ++ show i ++ "]"
pairToPercent :: [(a,Int)] -> [(a,Double)]
pairToPercent xs =
let sum' = fromIntegral $ sum $ snd <$> xs :: Double in
(\(x, i) -> (x, (fromIntegral $ 100 * i) / sum')) <$> xs
makeDonutGraph :: String -> String -> String -> String -> [(String,Int)] -> Maybe String
makeDonutGraph type' h desc canvasName xs =
let hours = pairToPercent xs in
let vals = "[" ++ (intercalate ", " $ showPair <$> hours) ++ "]" in
let s = makeCall type' [ show canvasName
, vals
] in
let tag' = divId canvasName ""in
if length vals == 0
then Nothing
else Just $ linkHeader DonutGraph h desc $ tag' ++ genTag "script" [("type", "text/javascript")] s
makeDonut = makeDonutGraph "donut"
makeHalfDonut = makeDonutGraph "halfDonut"
showInt :: Int -> String
showInt 0 = "null"
showInt x = show x
makeLine :: String -> String -> String -> String -> [(String,Int)] -> Maybe String
makeLine h desc canvasName label xs =
let labels = (intercalate ", " $ (show.fst) <$> xs) in
let vals = (intercalate ", " $ (showInt.snd) <$> xs) in
let s = makeCall "line" [ show canvasName
, "[" ++ vals ++ "]"
, "[" ++ labels ++ "]"
, show label
] in
let tag' = divId canvasName ""in
if length vals == 0
then Nothing
else Just $ linkHeader LineGraph h desc $ tag' ++ genTag "script" [("type", "text/javascript")] s
makeCall :: String -> [String] -> String
makeCall f args =
let fmt = (intercalate ", " args) in
concat $ [f, "("] ++ [fmt] ++ [");"]
simpleFormat :: Show a => (String, a) -> String
simpleFormat (user, num) = user ++ ": " ++ show num
formatList :: Show a => [(String, a)] -> [String]
formatList = liftA simpleFormat
makeList :: [String] -> String
makeList xs = concat $ tag "p" <$> xs
withHeading3 :: String -> String -> Section -> (String -> String)
withHeading3 h desc s x =
let divv = divClass "myhr" in
let spann = spanClass "myhr-inner" in
let spanDesc = spanClass "myhr-desc" in
let h' = " " ++ h ++ "" in
divClass (sectionString s) $ (divv (spann (h' ++ spanDesc (hoverBox desc)))) ++ x
section :: [String] -> String
section [] = ""
section xs = divClass "section" $ (unlines xs)
pairMap :: (a -> b) -> (a, a) -> (b, b)
pairMap f (x, y) = (f x, f y)
data Section = DonutGraph | LineGraph | Table
sectionString :: Section -> String
sectionString DonutGraph = "graph-element-donut"
sectionString LineGraph = "graph-element-line"
sectionString Table = "table-element"
linkHeader :: Section -> String -> String -> String -> String
linkHeader sec h desc s =
let tagname = hyphenate h in
let href = genTag "a" [("id", tagname), ("href", "#" ++ tagname)] in
(withHeading3 (href h) desc sec $ s)
hoverBox :: String -> String
hoverBox desc = spanClass "htip" (tag "div" ("[?] " ++ (divClass "hwrap" $ divClass "hbox" desc)))
makeFile :: String -> String -> String -> [String] -> String
makeFile x file head' scripts =
let favicon = voidTag "link" [ ("href", "/starstats/favicon.ico?v=1.1")
, ("rel", "shortcut icon")
] in
let scriptSrc src = genTag "script" [ ("language", "javascript")
, ("src", src)] "" in
let css = voidTag "link" [ ("href", file)
, ("rel", "stylesheet")
, ("type", "text/css")
] in
let s :: [String]
s = scriptSrc <$> scripts in
tag "html" $ tag "head" (css ++ (concat $ s) ++ head' ++ favicon) ++ tag "body" (divId "container" x)
simpleTable :: Print a => String -> String -> Bool -> [(String,a)] -> String
simpleTable w0 w1 b xs = tag "table" $ concat $ format <$> xs
where format (s, y) = tr $ td b w0 s ++ td b w1 (print' y)
{-# NOINLINE counter #-}
counter :: IORef Int
counter = unsafePerformIO $ newIORef 0
{-# NOINLINE makeExpandBox #-}
makeExpandBox :: Bool -> String -> String
makeExpandBox b x = unsafePerformIO $ do
i <- readIORef counter
writeIORef counter (i+1)
let id' = "A" ++ (show i)
let div' = divClass "overflowbox"
let label' = genTag "label" [("for", id')]
let tags = [ ("id", id')
, ("type", "checkbox")
, ("autocomplete", "off")
] ++ if b then [("checked", "")] else []
let inputTag = voidTag "input" tags
return $ div' (inputTag ++ label' (divClass "testtest" x))
td :: Bool -> String -> (String -> String)
td b width x = (genTag "td" [("width", width)] (makeExpandBox b x))
tr :: String -> String
tr = tag "tr"
divId :: String -> (String -> String)
divId id' = genTag "div" [("id", id')]
divClass :: String -> (String -> String)
divClass class' = genTag "div" [("class", class')]
spanClass :: String -> (String -> String)
spanClass class' = genTag "span" [("class", class')]
propToString :: (String,String) -> String
propToString (k, v) = k ++ "=\"" ++ v ++ "\" "
genTag :: String -> [(String,String)] -> String -> String
genTag t props c =
let props' = foldl (\acc kv -> acc ++ propToString kv) "" props in
concat ["<", t, " ", props', ">\n", c, "\n</", t, ">"]
voidTag :: String -> [(String,String)] -> String
voidTag t props =
let props' = foldl (\acc kv -> acc ++ propToString kv) "" props in
concat ["<", t, " ", props', "/>\n"]
tag :: String -> String -> String
tag s c = genTag s [] c
| deweyvm/starstats | src/StarStats/Renderer.hs | mit | 9,353 | 0 | 23 | 2,762 | 3,619 | 1,905 | 1,714 | 214 | 2 |
-- | Connections module describes all elements of network connectivity
module Simulation.HSimSNN.Connections where
import Simulation.HSimSNN.Population
import Simulation.HSimSNN.Neuron
-- | Connections encapsulates information about source and destination neurons
-- and corresponding synaptic information
--
-- * The connections are essentially a matrix from source to destination neurons.
--
-- * The index of the top level list of syninfo corresponds to the index of source neurons - (0,N). Therefore, it is expected to have the same length as pop.
--
-- * The first int in the tuple is the index of destination neurons - (0..N).
--
-- * The length of each sublist can be more than N to support multiple synapses per neuron.
--
data Connections = Connections {pop:: Population, syninfo::[[(Int,SynInfo)]]}
-- | String representation of Connections
instance Show Connections where
show (Connections _ sinf) = show sinf
| drwebb/HSimSNN | src/Simulation/HSimSNN/Connections.hs | gpl-2.0 | 929 | 0 | 11 | 147 | 92 | 60 | 32 | 6 | 0 |
bruteCollatz :: Integer -> [Integer]
bruteCollatz 1 = []
bruteCollatz n
| odd n = 3*n+1:bruteCollatz(3*n+1)
| even n = div n 2:bruteCollatz(div n 2)
chains = [bruteCollatz x | x <- [1..]]
longest' (max_i, max_l) =
let l = head $ dropWhile (\(i,l) -> l <= max_l) $ zip [max_i..] $ map length $ chains' max_i
in l:longest' l
-- memoizedCollatz :: Integer -> [Integer]
-- memoizedCollatz = memoize col where
-- col 1 = []
-- col n
-- | odd n = 3*n+1:memoizedCollatz(3*n+1)
-- | even n = div n 2:memoizedCollatz(div n 2)
memoizedCollatz :: Integer -> [Integer]
memoizedCollatz = memoFix col where
col _ 1 = []
col f n
| odd n = 3*n+1:f $! 3*n+1
| even n = div n 2:f $! div n 2
| softwaremechanic/Miscellaneous | Haskell/collatz.hs | gpl-2.0 | 717 | 3 | 16 | 176 | 289 | 147 | 142 | 15 | 2 |
{-# OPTIONS_GHC -Wall -fwarn-tabs -Werror #-}
-------------------------------------------------------------------------------
-- |
-- Module : Time.Types
-- Copyright : Copyright (c) 2014 Michael R. Shannon
-- License : GPLv2 or Later
-- Maintainer : mrshannon.aerospace@gmail.com
-- Stability : unstable
-- Portability : portable
--
-- Time module types.
-------------------------------------------------------------------------------
module Time.Types
( Time(..)
) where
data Time = Time
{ currentTime :: Double
, deltaTime :: Double
} deriving(Eq, Show)
| mrshannon/trees | src/Time/Types.hs | gpl-2.0 | 600 | 0 | 8 | 110 | 59 | 41 | 18 | 7 | 0 |
{-|
Module : Util.Happstack
Description : Contains a single method for creating a JSON response.
-}
module Util.Happstack
(createJSONResponse) where
import Data.Aeson (ToJSON, encode)
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as BSL
import Happstack.Server
-- | Creates a JSON response.
createJSONResponse :: ToJSON a => a -> Response
createJSONResponse x = toResponseBS (BS.pack "application/json") (encodeJSON x)
where
encodeJSON :: ToJSON a => a -> BSL.ByteString
encodeJSON = BSL.filter (/= '\\') . encode
| Courseography/courseography | app/Util/Happstack.hs | gpl-3.0 | 596 | 0 | 9 | 113 | 131 | 76 | 55 | 10 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TemplateHaskell #-}
-- |
-- Copyright : (c) 2010, 2011 Benedikt Schmidt & Simon Meier
-- License : GPL v3 (see LICENSE)
--
-- Maintainer : Simon Meier <iridcode@gmail.com>
-- Portability : GHC only
--
-- Theory loading infrastructure.
module Main.TheoryLoader (
-- * Static theory loading settings
theoryLoadFlags
-- ** Loading open theories
, loadOpenThy
-- ** Loading and closing theories
, loadClosedThy
, loadClosedThyWfReport
, loadClosedThyString
, reportOnClosedThyStringWellformedness
-- ** Loading open diff theories
, loadOpenDiffThy
-- ** Loading and closing diff theories
, loadClosedDiffThy
, loadClosedDiffThyWfReport
, loadClosedDiffThyString
, reportOnClosedDiffThyStringWellformedness
-- ** Constructing automatic provers
, constructAutoProver
, constructAutoDiffProver
-- ** Cached Message Deduction Rule Variants
, dhIntruderVariantsFile
, bpIntruderVariantsFile
, addMessageDeductionRuleVariants
) where
-- import Debug.Trace
import Prelude hiding (id, (.))
import Data.Char (toLower)
import Data.Label
import Data.List (isPrefixOf,intersperse)
import Data.Map (keys)
-- import Data.Monoid
import Data.FileEmbed (embedFile)
-- import Control.Basics
import Control.Category
import Control.DeepSeq (rnf)
import System.Console.CmdArgs.Explicit
import Theory
import Theory.Text.Parser (parseIntruderRules, parseOpenTheory, parseOpenTheoryString, parseOpenDiffTheory, parseOpenDiffTheoryString)
import Theory.Tools.AbstractInterpretation (EvaluationStyle(..))
import Theory.Tools.IntruderRules (specialIntruderRules, subtermIntruderRules
, multisetIntruderRules, xorIntruderRules)
import Theory.Tools.Wellformedness
import Sapic
import Main.Console
import Main.Environment
import Text.Parsec hiding ((<|>),try)
------------------------------------------------------------------------------
-- Theory loading: shared between interactive and batch mode
------------------------------------------------------------------------------
-- | Flags for loading a theory.
theoryLoadFlags :: [Flag Arguments]
theoryLoadFlags =
[ flagOpt "" ["prove"] (updateArg "prove") "LEMMAPREFIX"
"Attempt to prove a lemma "
, flagOpt "dfs" ["stop-on-trace"] (updateArg "stopOnTrace") "DFS|BFS|SEQDFS|NONE"
"How to search for traces (default DFS)"
, flagOpt "5" ["bound", "b"] (updateArg "bound") "INT"
"Bound the depth of the proofs"
, flagOpt "s" ["heuristic"] (updateArg "heuristic") ("(" ++ (intersperse '|' $ keys goalRankingIdentifiers) ++ ")+")
"Sequence of goal rankings to use (default 's')"
, flagOpt "summary" ["partial-evaluation"] (updateArg "partialEvaluation")
"SUMMARY|VERBOSE"
"Partially evaluate multiset rewriting system"
, flagOpt "" ["defines","D"] (updateArg "defines") "STRING"
"Define flags for pseudo-preprocessor."
, flagNone ["diff"] (addEmptyArg "diff")
"Turn on observational equivalence mode using diff terms."
, flagNone ["quit-on-warning"] (addEmptyArg "quit-on-warning")
"Strict mode that quits on any warning that is emitted."
, flagOpt "./oracle" ["oraclename"] (updateArg "oraclename") "FILE"
"Path to the oracle heuristic (default './oracle')."
-- , flagOpt "" ["diff"] (updateArg "diff") "OFF|ON"
-- "Turn on observational equivalence (default OFF)."
]
-- | The defined pre-processor flags in the argument.
defines :: Arguments -> [String]
defines = findArg "defines"
-- | Diff flag in the argument
diff :: Arguments -> [String]
diff as = if (argExists "diff" as) then ["diff"] else []
-- | quit-on-warning flag in the argument
quitOnWarning :: Arguments -> [String]
quitOnWarning as = if (argExists "quit-on-warning" as) then ["quit-on-warning"] else []
-- | Load an open theory from a file.
loadOpenDiffThy :: Arguments -> FilePath -> IO OpenDiffTheory
loadOpenDiffThy as fp = parseOpenDiffTheory (diff as ++ defines as ++ quitOnWarning as) fp
-- | Load an open theory from a file.
loadOpenThy :: Arguments -> FilePath -> IO OpenTranslatedTheory
loadOpenThy as inFile = do
thy <- parseOpenTheory (diff as ++ defines as ++ quitOnWarning as) inFile
thy' <- Sapic.translate thy
return thy'
-- | Load a closed theory.
loadClosedDiffThy :: Arguments -> FilePath -> IO ClosedDiffTheory
loadClosedDiffThy as inFile = do
thy0 <- loadOpenDiffThy as inFile
thy1 <- addMessageDeductionRuleVariantsDiff thy0
closeDiffThy as thy1
-- | Load a closed theory.
loadClosedThy :: Arguments -> FilePath -> IO ClosedTheory
loadClosedThy as inFile = loadOpenThy as inFile >>= closeThy as
-- | Load a closed theory and report on well-formedness errors.
loadClosedThyWfReport :: Arguments -> FilePath -> IO ClosedTheory
loadClosedThyWfReport as inFile = do
thy <- loadOpenThy as inFile
-- report
case checkWellformedness thy of
[] -> return ()
report -> do
putStrLn ""
putStrLn $ replicate 78 '-'
putStrLn $ "Theory file '" ++ inFile ++ "'"
putStrLn $ replicate 78 '-'
putStrLn ""
putStrLn $ "WARNING: ignoring the following wellformedness errors"
putStrLn ""
putStrLn $ renderDoc $ prettyWfErrorReport report
putStrLn $ replicate 78 '-'
if elem "quit-on-warning" (quitOnWarning as) then error "quit-on-warning mode selected - aborting on wellformedness errors." else putStrLn ""
-- return closed theory
closeThy as thy
-- | Load a closed diff theory and report on well-formedness errors.
loadClosedDiffThyWfReport :: Arguments -> FilePath -> IO ClosedDiffTheory
loadClosedDiffThyWfReport as inFile = do
thy0 <- loadOpenDiffThy as inFile
thy1 <- addMessageDeductionRuleVariantsDiff thy0
-- report
case checkWellformednessDiff thy1 of
[] -> return ()
report -> do
putStrLn ""
putStrLn $ replicate 78 '-'
putStrLn $ "Theory file '" ++ inFile ++ "'"
putStrLn $ replicate 78 '-'
putStrLn ""
putStrLn $ "WARNING: ignoring the following wellformedness errors"
putStrLn ""
putStrLn $ renderDoc $ prettyWfErrorReport report
putStrLn $ replicate 78 '-'
if elem "quit-on-warning" (quitOnWarning as) then error "quit-on-warning mode selected - aborting on wellformedness errors." else putStrLn ""
-- return closed theory
closeDiffThy as thy1
loadClosedThyString :: Arguments -> String -> IO (Either String ClosedTheory)
loadClosedThyString as input =
case parseOpenTheoryString (defines as) input of
Left err -> return $ Left $ "parse error: " ++ show err
Right thy -> do
thy' <- Sapic.translate thy
Right <$> closeThy as thy' -- No "return" because closeThy gives IO (ClosedTheory)
loadClosedDiffThyString :: Arguments -> String -> IO (Either String ClosedDiffTheory)
loadClosedDiffThyString as input =
case parseOpenDiffTheoryString (defines as) input of
Left err -> return $ Left $ "parse error: " ++ show err
Right thy -> fmap Right $ do
thy1 <- addMessageDeductionRuleVariantsDiff thy
closeDiffThy as thy1
-- | Load an open theory from a string.
loadOpenThyString :: Arguments -> String -> Either ParseError OpenTheory
loadOpenThyString as = parseOpenTheoryString (diff as ++ defines as ++ quitOnWarning as)
-- | Load an open theory from a string.
loadOpenDiffThyString :: Arguments -> String -> Either ParseError OpenDiffTheory
loadOpenDiffThyString as = parseOpenDiffTheoryString (diff as ++ defines as ++ quitOnWarning as)
-- | Load a close theory and only report on well-formedness errors or translation errors
reportOnClosedThyStringWellformedness :: Arguments -> String -> IO String
reportOnClosedThyStringWellformedness as input =
case loadOpenThyString as input of
Left err -> return $ "parse error: " ++ show err
Right thy -> do
thy' <- Sapic.translate thy
case checkWellformedness thy' of
[] -> return ""
report -> do
if elem "quit-on-warning" (quitOnWarning as) then error "quit-on-warning mode selected - aborting on wellformedness errors." else putStrLn ""
return $ " WARNING: ignoring the following wellformedness errors: " ++(renderDoc $ prettyWfErrorReport report)
-- | Load a closed diff theory and report on well-formedness errors.
reportOnClosedDiffThyStringWellformedness :: Arguments -> String -> IO String
reportOnClosedDiffThyStringWellformedness as input = do
case loadOpenDiffThyString as input of
Left err -> return $ "parse error: " ++ show err
Right thy0 -> do
thy1 <- addMessageDeductionRuleVariantsDiff thy0
-- report
case checkWellformednessDiff thy1 of
[] -> return ""
report -> do
if elem "quit-on-warning" (quitOnWarning as) then error "quit-on-warning mode selected - aborting on wellformedness errors." else putStrLn ""
return $ " WARNING: ignoring the following wellformedness errors: " ++(renderDoc $ prettyWfErrorReport report)
-- | Close a theory according to arguments.
closeThy :: Arguments -> OpenTranslatedTheory -> IO ClosedTheory
closeThy as thy0 = do
thy1 <- addMessageDeductionRuleVariants thy0
-- FIXME: wf-check is at the wrong position here. Needs to be more
-- fine-grained.
let thy2 = wfCheck thy1
-- close and prove
cthy <- closeTheory (maudePath as) thy2
return $ proveTheory lemmaSelector prover $ partialEvaluation cthy
where
-- apply partial application
----------------------------
partialEvaluation = case map toLower <$> findArg "partialEvaluation" as of
Just "verbose" -> applyPartialEvaluation Tracing
Just _ -> applyPartialEvaluation Summary
_ -> id
-- wellformedness check
-----------------------
wfCheck :: OpenTranslatedTheory -> OpenTranslatedTheory
wfCheck thy =
noteWellformedness
(checkWellformedness thy) thy (elem "quit-on-warning" (quitOnWarning as))
lemmaSelector :: Lemma p -> Bool
lemmaSelector lem =
any (`isPrefixOf` get lName lem) lemmaNames
where
lemmaNames :: [String]
lemmaNames = findArg "prove" as
-- replace all annotated sorrys with the configured autoprover.
prover :: Prover
prover | argExists "prove" as =
replaceSorryProver $ runAutoProver $ constructAutoProver as
| otherwise = mempty
-- | Close a diff theory according to arguments.
closeDiffThy :: Arguments -> OpenDiffTheory -> IO ClosedDiffTheory
closeDiffThy as thy0 = do
-- FIXME: wf-check is at the wrong position here. Needs to be more
-- fine-grained.
let thy2 = wfCheckDiff thy0
-- close and prove
cthy <- closeDiffTheory (maudePath as) (addDefaultDiffLemma thy2)
return $ proveDiffTheory lemmaSelector diffLemmaSelector prover diffprover $ partialEvaluation cthy
where
-- apply partial application
----------------------------
partialEvaluation = case map toLower <$> findArg "partialEvaluation" as of
Just "verbose" -> applyPartialEvaluationDiff Tracing
Just _ -> applyPartialEvaluationDiff Summary
_ -> id
-- wellformedness check
-----------------------
wfCheckDiff :: OpenDiffTheory -> OpenDiffTheory
wfCheckDiff thy =
noteWellformednessDiff
(checkWellformednessDiff thy) thy (elem "quit-on-warning" (quitOnWarning as))
lemmaSelector :: Lemma p -> Bool
lemmaSelector lem =
any (`isPrefixOf` get lName lem) lemmaNames
where
lemmaNames :: [String]
lemmaNames = findArg "prove" as
diffLemmaSelector :: DiffLemma p -> Bool
diffLemmaSelector lem =
any (`isPrefixOf` get lDiffName lem) lemmaNames
where
lemmaNames :: [String]
lemmaNames = findArg "prove" as
-- diff prover: replace all annotated sorrys with the configured autoprover.
diffprover :: DiffProver
diffprover | argExists "prove" as =
replaceDiffSorryProver $ runAutoDiffProver $ constructAutoDiffProver as
| otherwise = mempty
-- replace all annotated sorrys with the configured autoprover.
prover :: Prover
prover | argExists "prove" as =
replaceSorryProver $ runAutoProver $ constructAutoProver as
| otherwise = mempty
-- | Construct an 'AutoProver' from the given arguments (--bound,
-- --stop-on-trace).
constructAutoProver :: Arguments -> AutoProver
constructAutoProver as =
-- force error early
(rnf rankings) `seq`
AutoProver (roundRobinHeuristic rankings) proofBound stopOnTrace
where
-- handles to relevant arguments
--------------------------------
proofBound = read <$> findArg "bound" as
oracleName = case findArg "oraclename" as of
Nothing -> "./oracle"
Just fileName -> "./" ++ fileName
rankings = case findArg "heuristic" as of
Just (rawRankings@(_:_)) -> map setOracleName $ map charToGoalRanking rawRankings
Just [] -> error "--heuristic: at least one ranking must be given"
_ -> [SmartRanking False]
setOracleName (OracleRanking _) = OracleRanking oracleName
setOracleName (OracleSmartRanking _) = OracleSmartRanking oracleName
setOracleName r = r
stopOnTrace = case (map toLower) <$> findArg "stopOnTrace" as of
Nothing -> CutDFS
Just "dfs" -> CutDFS
Just "none" -> CutNothing
Just "bfs" -> CutBFS
Just "seqdfs" -> CutSingleThreadDFS
Just other -> error $ "unknown stop-on-trace method: " ++ other
-- | Construct an 'AutoProver' from the given arguments (--bound,
-- --stop-on-trace).
constructAutoDiffProver :: Arguments -> AutoProver
constructAutoDiffProver as =
-- FIXME!
-- force error early
(rnf rankings) `seq`
AutoProver (roundRobinHeuristic rankings) proofBound stopOnTrace
where
-- handles to relevant arguments
--------------------------------
proofBound = read <$> findArg "bound" as
oracleName = case findArg "oraclename" as of
Nothing -> "./oracle"
Just fileName -> "./" ++ fileName
rankings = case findArg "heuristic" as of
Just (rawRankings@(_:_)) -> map setOracleName $ map charToGoalRankingDiff rawRankings
Just [] -> error "--heuristic: at least one ranking must be given"
_ -> [SmartDiffRanking]
setOracleName (OracleRanking _) = OracleRanking oracleName
setOracleName (OracleSmartRanking _) = OracleSmartRanking oracleName
setOracleName r = r
stopOnTrace = case (map toLower) <$> findArg "stopOnTrace" as of
Nothing -> CutDFS
Just "dfs" -> CutDFS
Just "none" -> CutNothing
Just "bfs" -> CutBFS
Just "seqdfs" -> CutSingleThreadDFS
Just other -> error $ "unknown stop-on-trace method: " ++ other
------------------------------------------------------------------------------
-- Message deduction variants cached in files
------------------------------------------------------------------------------
-- | The name of the intruder variants file.
dhIntruderVariantsFile :: FilePath
dhIntruderVariantsFile = "data/intruder_variants_dh.spthy"
-- | The name of the intruder variants file.
bpIntruderVariantsFile :: FilePath
bpIntruderVariantsFile = "data/intruder_variants_bp.spthy"
-- | Construct the DH intruder variants for the given maude signature.
mkDhIntruderVariants :: MaudeSig -> [IntrRuleAC]
mkDhIntruderVariants msig =
either (error . show) id -- report errors lazily through 'error'
$ parseIntruderRules msig dhIntruderVariantsFile
$(embedFile "data/intruder_variants_dh.spthy")
-- | Construct the BP intruder variants for the given maude signature.
mkBpIntruderVariants :: MaudeSig -> [IntrRuleAC]
mkBpIntruderVariants msig =
either (error . show) id -- report errors lazily through 'error'
$ parseIntruderRules msig bpIntruderVariantsFile
$(embedFile "data/intruder_variants_bp.spthy")
-- | Add the variants of the message deduction rule. Uses built-in cached
-- files for the variants of the message deduction rules for Diffie-Hellman
-- exponentiation and Bilinear-Pairing.
addMessageDeductionRuleVariants :: OpenTranslatedTheory -> IO OpenTranslatedTheory
-- TODO (SM): drop use of IO here.
addMessageDeductionRuleVariants thy0
| enableBP msig = addIntruderVariants [ mkDhIntruderVariants
, mkBpIntruderVariants ]
| enableDH msig = addIntruderVariants [ mkDhIntruderVariants ]
| otherwise = return thy
where
msig = get (sigpMaudeSig . thySignature) thy0
rules = subtermIntruderRules False msig ++ specialIntruderRules False
++ (if enableMSet msig then multisetIntruderRules else [])
++ (if enableXor msig then xorIntruderRules else [])
thy = addIntrRuleACsAfterTranslate rules thy0
addIntruderVariants mkRuless = do
return $ addIntrRuleACsAfterTranslate (concatMap ($ msig) mkRuless) thy
-- | Add the variants of the message deduction rule. Uses the cached version
-- of the @"intruder_variants_dh.spthy"@ file for the variants of the message
-- deduction rules for Diffie-Hellman exponentiation.
addMessageDeductionRuleVariantsDiff :: OpenDiffTheory -> IO OpenDiffTheory
addMessageDeductionRuleVariantsDiff thy0
| enableBP msig = addIntruderVariantsDiff [ mkDhIntruderVariants
, mkBpIntruderVariants ]
| enableDH msig = addIntruderVariantsDiff [ mkDhIntruderVariants ]
| otherwise = return $ addIntrRuleLabels thy
where
msig = get (sigpMaudeSig . diffThySignature) thy0
rules diff' = subtermIntruderRules diff' msig ++ specialIntruderRules diff'
++ (if enableMSet msig then multisetIntruderRules else [])
++ (if enableXor msig then xorIntruderRules else [])
thy = addIntrRuleACsDiffBoth (rules False) $ addIntrRuleACsDiffBothDiff (rules True) thy0
addIntruderVariantsDiff mkRuless = do
return $ addIntrRuleLabels (addIntrRuleACsDiffBothDiff (concatMap ($ msig) mkRuless) $ addIntrRuleACsDiffBoth (concatMap ($ msig) mkRuless) thy)
| kmilner/tamarin-prover | src/Main/TheoryLoader.hs | gpl-3.0 | 19,132 | 0 | 20 | 4,750 | 3,682 | 1,855 | 1,827 | 292 | 11 |
{-#LANGUAGE ImpredicativeTypes, FlexibleContexts, RankNTypes,TypeOperators, ScopedTypeVariables, GADTs, MultiParamTypeClasses #-}
module Carnap.Core.Data.Util (scopeHeight, equalizeTypes, incArity, withArity, checkChildren, saferSubst,
mapover, maphead, withHead, hasVar, (:~:)(Refl), Buds(..), Blossoms(..), bloom, sbloom, grow, rebuild, stateRebuild, castToProxy, castTo) where
--this module defines utility functions and typeclasses for manipulating
--the data types defined in Core.Data
import Carnap.Core.Util
import Carnap.Core.Data.Types
import Carnap.Core.Data.Classes
import Carnap.Core.Unification.Unification
import Data.Typeable
import Data.List (nub)
import Control.Lens.Plated (Plated, cosmos, transform, children)
import Control.Lens.Fold (anyOf)
import Control.Monad.State.Lazy as S
--------------------------------------------------------
--1.Utility Functions
--------------------------------------------------------
{-|
Given two occupants of a typed fixpoint, this function returns @Just@
a proof of the equality of their types, if their types are equal, and
otherwise @Nothing@. Concretely, it lets you do things like dispatch to
different behaviors depending on the type of your arguments, for all the
languages that Carnap supports (since these languages are typed
fixedpoints).
For example, suppose you have two functions @f :: Language Int -> Bool@ and
@g :: Language Bool -> Bool@, and two representative language items @a ::
Language Int@, and @b :: Language Bool@. Then you can write
> combine f g v = case equalizeTypes v a of
> Just Refl -> f v
> Nothing -> case equalizeTypes v b of
> Just Refl -> g v
> Nothing -> False
to union the functions into a single polymorphic function.
-}
equalizeTypes :: Fix f a -> Fix f b -> Maybe (a :~: b)
equalizeTypes (x@(Fx _) :: Fix f a) (y@(Fx _) :: Fix f b) = eqT :: Maybe (a :~: b)
castToProxy :: Typeable a => Proxy a -> Fix f b -> Maybe (a :~: b)
castToProxy (Proxy :: Proxy a) (y@(Fx _) :: Fix f b) = eqT :: Maybe (a :~: b)
castTo :: forall a b f. (Typeable a, Typeable b) => Fix f b -> Maybe (Fix f a)
castTo x = case eqT :: Maybe (a :~: b) of Nothing -> Nothing; Just Refl -> Just x
{-|
This function replaces the head of a given language item with another head
that increases the arity of the item.
-}
incArity :: (Typeable a, Typeable b) =>
(forall c. Typeable c => FixLang l c -> Maybe (FixLang l (b -> c))) ->
FixLang l (b -> a) -> Maybe (FixLang l (b -> b -> a))
incArity f ((head :: FixLang l (t -> b -> a)) :!$: (tail :: FixLang l t)) =
case eqT :: Maybe (t :~: b) of
Nothing -> Nothing
Just Refl -> do x <- incArity f head
return $ x :!$: tail
incArity f head = f head
{-|
This function applies a suitably polymorphic function to the head of an
expression along with its arity, assuiming it has an arity.
-}
withArity :: (Typeable a, Typeable ret', Typeable i) =>
(forall ret. Arity i o ret -> FixLang l ret -> b)
-> Arity i o ret' -> FixLang l a -> Maybe b
withArity f a (head :!$: tail) = withArity f (ASucc a) head
withArity f (a :: Arity i o ret') (phi :: FixLang l a) =
case eqT :: Maybe (a :~: ret') of
Nothing -> Nothing
Just Refl -> Just (f a phi)
{-|
this function checks to see if phi occurs as a child of psi
-}
checkChildren :: (Eq s, Plated s) => s -> s -> Bool
checkChildren phi psi = phi /= psi && anyOf cosmos (== phi) psi
{-|
this function will, given a suitably polymorphic argument `f`, apply `f` to each of the immediate children, including the head, of the linguistic expression `le`.
-}
mapover :: (forall a. FixLang l a -> FixLang l a) -> FixLang l b -> FixLang l b
mapover f le@(x :!$: y) = mapover f x :!$: f y
mapover f x = f x
{-|
this function will, given a suitably polymorphic argument `f`, apply `f` to the head of the linguistic expression `le`.
-}
maphead :: (forall a. Typeable a => FixLang l a -> FixLang l a) -> FixLang l b -> FixLang l b
maphead f le@(x :!$: y) = maphead f x :!$: y
maphead f x@(Fx _) = f x
withHead :: (forall a. Typeable a => FixLang l a -> c) -> FixLang l b -> c
withHead f le@(x :!$: y) = withHead f x
withHead f x@(Fx _) = f x
{-|
this function will, given a suitably polymorphic argument `f`, apply `f` to the children of the linguistic expression `le`, but not the head.
-}
mapbody :: (forall a. Typeable a => FixLang l a -> FixLang l a) -> FixLang l b -> FixLang l b
mapbody f (x :!$: y) = maphead f x :!$: f y
mapbody f x@(Fx _) = x
hasVar :: (StaticVar (FixLang l), FirstOrder (FixLang l)) => FixLang l b -> Bool
hasVar (x :!$: y) = hasVar x || hasVar y
hasVar (LLam f) = hasVar $ f (static 0)
hasVar x = isVar x
{-|
This function will assign a height to a given linguistic expression,
greater than the height of any of any formula in the scope of one of its
variable-binding subexpressions
-}
scopeHeight :: MonadVar (FixLang f) (State Int) => FixLang f a -> Int
scopeHeight (x :!$: y) = max (scopeHeight x) (scopeHeight y)
scopeHeight (LLam f) = scopeHeight (f dv) + 1
where dv = evalState fresh (0 :: Int)
scopeHeight _ = 0
{-|
This function will rebuild a given linguistic expression, removing any
closures that might be present in the open formulas
-}
rebuild :: ( FirstOrder (FixLang f)
, MonadVar (FixLang f) (State Int)
, StaticVar (FixLang f)) => FixLang f a -> FixLang f a
rebuild (x :!$: y) = rebuild x :!$: rebuild y
rebuild (LLam f) = LLam (\x -> subst sv x $ rebuild (f sv))
where sv = static $ scopeHeight (LLam f)
rebuild t = t
stateRebuild :: ( FirstOrder (FixLang f) , StaticVar (FixLang f)) => FixLang f a -> State Int (FixLang f a)
stateRebuild (x :!$: y) = (:!$:) <$> stateRebuild x <*> stateRebuild y
stateRebuild (LLam f) = do n <- get
put (n + 1)
f' <- stateRebuild $ f (static n)
return $ LLam (\x -> subst (static n) x f')
stateRebuild t = return t
saferSubst :: ( StaticVar (FixLang f)
, FirstOrder (FixLang f)
) => FixLang f a -> FixLang f a -> FixLang f b -> FixLang f b
saferSubst a b c
| a =* c = subst a b c
| otherwise = case c of
(x :!$: y) -> saferSubst a b x :!$: saferSubst a b y
(LLam f) -> if a `occurs` c then LLam $ saferSubst a b . f else LLam f
_ -> c
--------------------------------------------------------
--2. Random Syntax
--------------------------------------------------------
{-|
These are data structures that will be replaced in the course of
a generating list syntax items for testing. If one thinks of the piece of
syntax as a tree, then the buds are what are replaced by blossoms as the
tree grows.
-}
data Buds f where Bud :: Typeable a => f a -> Buds f
{-|
These are data structures that will replace buds
-}
data Blossoms f where Blossom :: Typeable a => f a -> Blossoms f
bloom :: (MonadVar (FixLang f) (State Int), MonadVar (FixLang f) (StateT Int []), FirstOrder (FixLang f), UniformlyEq (FixLang f)) =>
[Buds (FixLang f)] -> [Blossoms (FixLang f)] -> FixLang f a -> [FixLang f a]
bloom buds blossoms tree = evalStateT (sbloom buds blossoms tree) (scopeHeight tree + 1)
sbloom :: (MonadVar (FixLang f) (StateT Int []), FirstOrder (FixLang f), UniformlyEq (FixLang f)) =>
[Buds (FixLang f)] -> [Blossoms (FixLang f)] -> FixLang f a -> StateT Int [] (FixLang f a)
sbloom buds blossoms tree =
do (Bud bud) <- S.lift buds
(Blossom blossom) <- S.lift blossoms
case tree of
(h :!$: t) ->
do head <- S.lift [True,False]
if head
then do h' <- sbloom buds blossoms h
return $ h' :!$: t
else do t' <- sbloom buds blossoms t
return $ h :!$: t'
(LLam f) -> do sv <- fresh
f' <- sbloom buds blossoms (f sv)
return $ LLam $ \x -> subst sv x f'
_ -> case (equalizeTypes bud blossom, equalizeTypes bud tree) of
(Just Refl, Just Refl) ->
if bud =* tree
then return blossom
else S.lift []
_ -> S.lift []
grow :: (MonadVar (FixLang f) (State Int), MonadVar (FixLang f) (StateT Int []), FirstOrder (FixLang f), UniformlyEq (FixLang f), Eq (FixLang f a)) =>
[Buds (FixLang f)] -> [Blossoms (FixLang f)] -> FixLang f a -> [[FixLang f a]]
grow buds blossoms tree = iterate (\x -> x >>= nub . bloom buds blossoms) [tree]
{-
If some of your blossoms are lambdas, you might be more pleased with the results of this function.
-}
-- betaGrow :: (MonadVar (FixLang f) (State Int), MonadVar (FixLang f) (StateT Int []), FirstOrder (FixLang f), UniformlyEq (FixLang f), Eq (FixLang f a)) =>
-- [Buds (FixLang f)] -> [Blossoms (FixLang f)] -> FixLang f a -> [[FixLang f a]]
-- betaGrow buds blossoms tree = iterate (\x -> x >>= fmap betaReduce . nub . bloom buds blossoms) [tree]
| opentower/carnap | Carnap/src/Carnap/Core/Data/Util.hs | gpl-3.0 | 9,241 | 0 | 16 | 2,435 | 2,881 | 1,472 | 1,409 | 114 | 6 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.CloudDebugger.Controller.Debuggees.Register
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Registers the debuggee with the controller service. All agents attached
-- to the same application must call this method with exactly the same
-- request content to get back the same stable \`debuggee_id\`. Agents
-- should call this method again whenever \`google.rpc.Code.NOT_FOUND\` is
-- returned from any controller method. This protocol allows the controller
-- service to disable debuggees, recover from data loss, or change the
-- \`debuggee_id\` format. Agents must handle \`debuggee_id\` value
-- changing upon re-registration.
--
-- /See:/ <https://cloud.google.com/debugger Cloud Debugger API Reference> for @clouddebugger.controller.debuggees.register@.
module Network.Google.Resource.CloudDebugger.Controller.Debuggees.Register
(
-- * REST Resource
ControllerDebuggeesRegisterResource
-- * Creating a Request
, controllerDebuggeesRegister
, ControllerDebuggeesRegister
-- * Request Lenses
, cdrXgafv
, cdrUploadProtocol
, cdrAccessToken
, cdrUploadType
, cdrPayload
, cdrCallback
) where
import Network.Google.Debugger.Types
import Network.Google.Prelude
-- | A resource alias for @clouddebugger.controller.debuggees.register@ method which the
-- 'ControllerDebuggeesRegister' request conforms to.
type ControllerDebuggeesRegisterResource =
"v2" :>
"controller" :>
"debuggees" :>
"register" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] RegisterDebuggeeRequest :>
Post '[JSON] RegisterDebuggeeResponse
-- | Registers the debuggee with the controller service. All agents attached
-- to the same application must call this method with exactly the same
-- request content to get back the same stable \`debuggee_id\`. Agents
-- should call this method again whenever \`google.rpc.Code.NOT_FOUND\` is
-- returned from any controller method. This protocol allows the controller
-- service to disable debuggees, recover from data loss, or change the
-- \`debuggee_id\` format. Agents must handle \`debuggee_id\` value
-- changing upon re-registration.
--
-- /See:/ 'controllerDebuggeesRegister' smart constructor.
data ControllerDebuggeesRegister =
ControllerDebuggeesRegister'
{ _cdrXgafv :: !(Maybe Xgafv)
, _cdrUploadProtocol :: !(Maybe Text)
, _cdrAccessToken :: !(Maybe Text)
, _cdrUploadType :: !(Maybe Text)
, _cdrPayload :: !RegisterDebuggeeRequest
, _cdrCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ControllerDebuggeesRegister' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cdrXgafv'
--
-- * 'cdrUploadProtocol'
--
-- * 'cdrAccessToken'
--
-- * 'cdrUploadType'
--
-- * 'cdrPayload'
--
-- * 'cdrCallback'
controllerDebuggeesRegister
:: RegisterDebuggeeRequest -- ^ 'cdrPayload'
-> ControllerDebuggeesRegister
controllerDebuggeesRegister pCdrPayload_ =
ControllerDebuggeesRegister'
{ _cdrXgafv = Nothing
, _cdrUploadProtocol = Nothing
, _cdrAccessToken = Nothing
, _cdrUploadType = Nothing
, _cdrPayload = pCdrPayload_
, _cdrCallback = Nothing
}
-- | V1 error format.
cdrXgafv :: Lens' ControllerDebuggeesRegister (Maybe Xgafv)
cdrXgafv = lens _cdrXgafv (\ s a -> s{_cdrXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
cdrUploadProtocol :: Lens' ControllerDebuggeesRegister (Maybe Text)
cdrUploadProtocol
= lens _cdrUploadProtocol
(\ s a -> s{_cdrUploadProtocol = a})
-- | OAuth access token.
cdrAccessToken :: Lens' ControllerDebuggeesRegister (Maybe Text)
cdrAccessToken
= lens _cdrAccessToken
(\ s a -> s{_cdrAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
cdrUploadType :: Lens' ControllerDebuggeesRegister (Maybe Text)
cdrUploadType
= lens _cdrUploadType
(\ s a -> s{_cdrUploadType = a})
-- | Multipart request metadata.
cdrPayload :: Lens' ControllerDebuggeesRegister RegisterDebuggeeRequest
cdrPayload
= lens _cdrPayload (\ s a -> s{_cdrPayload = a})
-- | JSONP
cdrCallback :: Lens' ControllerDebuggeesRegister (Maybe Text)
cdrCallback
= lens _cdrCallback (\ s a -> s{_cdrCallback = a})
instance GoogleRequest ControllerDebuggeesRegister
where
type Rs ControllerDebuggeesRegister =
RegisterDebuggeeResponse
type Scopes ControllerDebuggeesRegister =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud_debugger"]
requestClient ControllerDebuggeesRegister'{..}
= go _cdrXgafv _cdrUploadProtocol _cdrAccessToken
_cdrUploadType
_cdrCallback
(Just AltJSON)
_cdrPayload
debuggerService
where go
= buildClient
(Proxy :: Proxy ControllerDebuggeesRegisterResource)
mempty
| brendanhay/gogol | gogol-debugger/gen/Network/Google/Resource/CloudDebugger/Controller/Debuggees/Register.hs | mpl-2.0 | 6,075 | 0 | 18 | 1,307 | 729 | 431 | 298 | 107 | 1 |
-- Implicit CAD. Copyright (C) 2011, Christopher Olah (chris@colah.ca)
-- Copyright 2014-2019, Julia Longtin (julial@turinglace.com)
-- Released under the GNU AGPLV3+, see LICENSE
module Graphics.Implicit.FastIntUtil (Fastℕ(Fastℕ), toFastℕ, fromFastℕ) where
import Prelude (Integral(toInteger, quot, rem, quotRem, div, mod, divMod), Num((+), (*), abs, negate, signum, fromInteger), Eq, Ord, Enum(succ, pred, toEnum, fromEnum), Real(toRational), Show(show), ($), Read, Int)
import qualified Prelude as P ((+), (*), abs, negate, signum, fromInteger, succ, pred, toEnum, quotRem, divMod, toInteger)
import GHC.Real (Ratio((:%)))
class FastN n where
fromFastℕ :: Fastℕ -> n
toFastℕ :: n -> Fastℕ
instance FastN Int where
fromFastℕ (Fastℕ a) = a
{-# INLINABLE fromFastℕ #-}
toFastℕ = Fastℕ
{-# INLINABLE toFastℕ #-}
instance FastN Fastℕ where
fromFastℕ (Fastℕ a) = Fastℕ a
{-# INLINABLE fromFastℕ #-}
toFastℕ a = a
{-# INLINABLE toFastℕ #-}
-- System integers, meant to go fast, and have no chance of wrapping 2^31.
newtype Fastℕ = Fastℕ Int
deriving (Read, Eq, Ord)
instance Real Fastℕ where
toRational (Fastℕ a) = P.toInteger a :% 1
{-# INLINABLE toRational #-}
instance Show Fastℕ where
show (Fastℕ n) = show n
fastℕBoth :: (Int, Int) -> (Fastℕ, Fastℕ)
fastℕBoth (a, b) = (Fastℕ a, Fastℕ b)
{-# INLINABLE fastℕBoth #-}
instance Integral Fastℕ where
toInteger (Fastℕ a) = P.toInteger a
{-# INLINABLE toInteger #-}
quot (Fastℕ n) (Fastℕ d) = Fastℕ q where (q,_) = quotRem n d
{-# INLINABLE quot #-}
rem (Fastℕ n) (Fastℕ d) = Fastℕ r where (_,r) = quotRem n d
{-# INLINABLE rem #-}
quotRem (Fastℕ a) (Fastℕ b) = fastℕBoth $ P.quotRem a b
{-# INLINABLE quotRem #-}
div (Fastℕ n) (Fastℕ d) = Fastℕ q where (q,_) = divMod n d
{-# INLINABLE div #-}
mod (Fastℕ n) (Fastℕ d) = Fastℕ r where (_,r) = divMod n d
{-# INLINABLE mod #-}
divMod (Fastℕ n) (Fastℕ d) = fastℕBoth $ P.divMod n d
{-# INLINABLE divMod #-}
instance Num Fastℕ where
(+) (Fastℕ a) (Fastℕ b) = Fastℕ $ a P.+ b
{-# INLINABLE (+) #-}
(*) (Fastℕ a) (Fastℕ b) = Fastℕ $ a P.* b
{-# INLINABLE (*) #-}
abs (Fastℕ a) = Fastℕ $ P.abs a
{-# INLINABLE abs #-}
negate (Fastℕ a) = Fastℕ $ P.negate a
{-# INLINABLE negate #-}
signum (Fastℕ a) = Fastℕ $ P.signum a
{-# INLINABLE signum #-}
fromInteger a = Fastℕ $ P.fromInteger a
{-# INLINABLE fromInteger #-}
instance Enum Fastℕ where
succ (Fastℕ x) = Fastℕ $ P.succ x
{-# INLINABLE succ #-}
pred (Fastℕ x) = Fastℕ $ P.pred x
{-# INLINABLE pred #-}
toEnum n = Fastℕ $ P.toEnum n
{-# INLINABLE toEnum #-}
fromEnum (Fastℕ n) = n
{-# INLINABLE fromEnum #-}
| krakrjak/ImplicitCAD | Graphics/Implicit/FastIntUtil.hs | agpl-3.0 | 2,884 | 64 | 10 | 630 | 1,022 | 583 | 439 | 43 | 1 |
{-
tempgres, REST service for creating temporary PostgreSQL databases.
Copyright (C) 2014-2020 Bardur Arantsson
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
module Tempgres.DatabaseId
( DatabaseId
, mkDatabaseId
, sqlIdentifier
, unquotedIdentifier
, unsafeMkDatabaseId
) where
import Data.Either (fromRight)
import System.Envy (Var(..))
-- Newtype to prevent unsafe construction.
newtype DatabaseId = DatabaseId String
deriving (Show)
deriving Var via String
-- Create a new database identifier. This implementation is
-- *extremely* conservative in what is accepts in the input
-- string.
mkDatabaseId :: String -> Either String DatabaseId
mkDatabaseId s = do
-- Since we don't do any quoting or anything we just need to check
-- if the string obeys all the rules.
s' <- first letters s
s'' <- rest (letters ++ digits) s'
return $ s'' `seq` DatabaseId s
where
rest :: [Char] -> String -> Either String String
rest _ [ ] = Right [ ]
rest choices (c:cs) | c `elem` choices = rest choices cs
rest _ (c:_) = invalid c
first :: [Char] -> String -> Either String String
first _ [ ] = Left "Database name cannot be empty"
first choices (c:cs) | c `elem` choices = Right cs
first _ (c:_) = invalid c
invalid c = Left $ "Invalid character '" ++ [c] ++ "' in database name '" ++ s ++ "'"
letters = "abcdefghjiklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_"
digits = "0123456789"
-- Unsafe conversion from a String to a DatabaseId.
unsafeMkDatabaseId :: String -> DatabaseId
unsafeMkDatabaseId s =
fromRight (error "bad default") $ mkDatabaseId s
-- Turn database identifier into an SQL identifier for the
-- database. Will include quotes if necessary.
sqlIdentifier :: DatabaseId -> String
sqlIdentifier (DatabaseId s) = s -- Our whitelist ensures that we do not need any quoting.
-- Turn database identifier into a RAW identifier for the
-- database. Will NOT include quotes!
unquotedIdentifier :: DatabaseId -> String
unquotedIdentifier (DatabaseId s) = s
| ClockworkConsulting/tempgres-server | server/src/Tempgres/DatabaseId.hs | agpl-3.0 | 2,790 | 0 | 11 | 667 | 453 | 242 | 211 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable, DeriveGeneric, BangPatterns,
ScopedTypeVariables, RankNTypes, StandaloneDeriving #-}
module DNA.Channel.File (
FileChan
, createFileChanImp
, deleteFileChan
, withFileChan
, readFileChan
, mmapFileChan
) where
import Control.Monad
import Data.Vector.Binary ()
import Data.Binary
import Data.Typeable
import qualified Data.Vector.Storable as S
import qualified Data.Vector.Storable.Mutable as MS
import Foreign
import Foreign.C.Types
import Foreign.C.String
import System.Directory
import System.FilePath
import System.IO
import System.Posix.Temp
import DNA.Types ( Location(..) )
data FileChan a = FileChan
{ fhPath :: FilePath
, fhLocation :: Location
}
deriving (Typeable)
instance Binary (FileChan a) where
put ch = do
put (fhPath ch)
put (fhLocation ch)
get = do
path <- get
loc <- get
return $ FileChan path loc
-- | Creates a file handle fit to be shared with another actor with
-- the given locality.
createFileChanImp :: FilePath -> Location -> String -> IO (Maybe (FileChan a))
createFileChanImp workDir loc name = do
-- Decide parent directory
let dirs = case loc of
Local -> ["/ramdisks", "/tmp", workDir]
Remote -> [workDir]
-- Use first directory that actually exists
edirs <- filterM doesDirectoryExist dirs
if null edirs then return Nothing else do
-- Generate temporary directory
dir <- mkdtemp (head edirs </> name)
return $ Just $ FileChan{ fhPath = dir
, fhLocation = loc
}
-- | Deletes the file channel. This frees all files contained inside it.
deleteFileChan :: FileChan a -> IO ()
deleteFileChan ch = removeDirectoryRecursive (fhPath ch)
-- | Open a file in the file channel
withFileChan :: FileChan a -> String -> IOMode -> (Handle -> IO b) -> IO b
withFileChan ch name io = withFile (fhPath ch </> name) io
-- read a buffer from a file into pinned memory
-- arguments: buffer ptr, size, offset, path
foreign import ccall unsafe "read_data"
c_read_data :: Ptr CDouble -> CLong -> CLong -> CString -> IO ()
-- read a buffer from a file into mmapped memory.
-- arguments: size (num of elements of double type), offset, path
foreign import ccall unsafe "read_data_mmap"
c_read_data_mmap :: CLong -> CLong -> CString -> CString -> IO (Ptr CDouble)
-- Unmap buffer fir the vector
foreign import ccall unsafe "&munmap_data"
c_munmap_data :: FunPtr (Ptr CLong -> Ptr CDouble -> IO ())
-- | Read the given portion of a vector from the file channel
readFileChan :: forall a b. Storable b
=> Int64 -- ^ Number of elements to read
-> Int64 -- ^ Offset to start reading
-> FileChan a -> String
-> IO (S.Vector b)
readFileChan n o ch p = do
mv <- MS.new (fromIntegral n) :: IO (MS.IOVector b)
let size = fromIntegral $ sizeOf (undefined :: b)
MS.unsafeWith mv $ \ptr ->
withCString (fhPath ch </> p) $
c_read_data (castPtr ptr) (size * fromIntegral n) (size * fromIntegral o)
S.unsafeFreeze mv
mmapFileChan :: Int64 -> Int64 -> FileChan a -> String -> String -> IO (S.Vector Double)
mmapFileChan n o ch p nodeId =
withCString (fhPath ch </> p) $ \path ->
withCString nodeId $ \nodeStr -> do
ptr <- c_read_data_mmap (fromIntegral n) (fromIntegral o) path nodeStr
-- NOTE: pointer with length is freed in c_munmap_data
nPtr <- new (fromIntegral n :: CLong)
fptr <- newForeignPtrEnv c_munmap_data nPtr ptr
return $ S.unsafeFromForeignPtr0 (castForeignPtr fptr) (fromIntegral n)
| SKA-ScienceDataProcessor/RC | MS3/lib/DNA/Channel/File.hs | apache-2.0 | 3,710 | 0 | 14 | 925 | 979 | 509 | 470 | 75 | 3 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ExistentialQuantification #-}
-- |Contains error-handling mechanisms built atop 'Control.Monad.Except'.
module Crawling.Hephaestos.Fetch.ErrorHandling where
import Control.Monad
import Control.Monad.Catch
import Data.Either (lefts)
import Data.Either.Combinators
-- |Collects the errors from a list of results.
-- Defined as @return . mconcat . lefts@.
collectErrors :: (Monad m) => [Either e a] -> m [e]
collectErrors = return . lefts
-- |Version of 'mapM_' which collects all the errors which occur.
mapErr_ :: (MonadCatch m, Exception e) => (a -> m b) -> [a] -> m [e]
mapErr_ f = mapErr f >=> collectErrors
-- |Version of 'mapM' which returns either the result of the function
-- or the error which occurred during its evaluation.
mapErr :: (MonadCatch m, Exception e) => (a -> m b) -> [a] -> m [Either e b]
mapErr _ [] = return []
mapErr f (x:xs) = liftM2 (:) (liftM Right (f x) `catch` (return . Left))
(mapErr f xs)
-- |Version of 'filterM' which collects all the errors encountered.
-- If @f x = False@ or @f x@ throws an error (for any x in the list),
-- it is filtered out.
filterErr :: (MonadCatch m, Exception e) => (a -> m Bool) -> [a] -> m ([a],[e])
filterErr _ [] = return ([],[])
filterErr f (x:xs) = do pr <- liftM Right (f x) `catch` (return . Left)
(ys,es) <- filterErr f xs
let ys' = if fromRight False pr then x : ys
else ys
es' = if isLeft pr then fromLeft' pr : es
else es
return (ys',es')
| jtapolczai/Hephaestos | Crawling/Hephaestos/Fetch/ErrorHandling.hs | apache-2.0 | 1,766 | 0 | 12 | 508 | 489 | 267 | 222 | 26 | 3 |
-----------------------------------------------------------------------------
--
-- Module : TestDecoder
-- Description :
-- Copyright : (c) Tobias Reinhardt, 2015 <tobioso92_@hotmail.com
-- License : Apache License, Version 2.0
--
-- Maintainer : Tobias Reinhardt <tobioso92_@hotmail.com>
-- Portability : tested only on linux
-- |
--
-----------------------------------------------------------------------------
module TestDecoder (
describtion
) where
import Control.DeepSeq (force)
import Control.Exception (evaluate)
import IniConfiguration
import Test.Hspec
-- TODO Make the describtion texts better
describtion = do
it "Has a default section where all unsectioned options are stored" $ do
let result = decode "op1=val1\nop2=val2"
let Just properties = lookup "" result
lookup "op1" properties `shouldBe` Just "val1"
lookup "op2" properties `shouldBe` Just "val2"
lookup "op3" properties `shouldBe` Nothing
it "Can decode text where there is a section together with an unsectioned part" $ do
let result = decode "op1=val1\n\
\op2=val2\n\
\ \n\
\[section1]\n\
\op3=val3\n\
\op4=val4\n\
\op5=val5"
let Just unsectionedProperties = lookup "" result
let Just sectionedProperties = lookup "section1" result
lookup "op1" unsectionedProperties `shouldBe` Just "val1"
lookup "op2" unsectionedProperties `shouldBe` Just "val2"
lookup "op3" sectionedProperties `shouldBe` Just "val3"
lookup "op4" sectionedProperties `shouldBe` Just "val4"
lookup "op5" sectionedProperties `shouldBe` Just "val5"
it "Can have multiple sections" $ do
let result = decode "[section1]\n\
\op1=val1\n\
\op2=val2\n\
\ \n\
\[section2]\n\
\op3=val3\n\
\op4=val4\n\
\ \n\
\[section3]\n\
\op5=val5\n"
let Just section1 = lookup "section1" result
let Just section2 = lookup "section2" result
let Just section3 = lookup "section3" result
(lookup "op1" section1) `shouldBe` Just "val1"
(lookup "op2" section1) `shouldBe` Just "val2"
(lookup "op3" section2) `shouldBe` Just "val3"
(lookup "op4" section2) `shouldBe` Just "val4"
(lookup "op5" section3) `shouldBe` Just "val5"
it "Strips empty lines and trailing whitespaces" $ do
let result = decode " [section1] \n\
\ \n\
\ op1=val1 \n\
\ \n \
\ op2=val2 \n\
\ [section2] \n\
\ op3=val3 \n\
\ \n"
let Just section1 = lookup "section1" result
let Just section2 = lookup "section2" result
(lookup "op1" section1) `shouldBe` Just "val1"
(lookup "op2" section1) `shouldBe` Just "val2"
(lookup "op3" section2) `shouldBe` Just "val3"
it "keys are allowed to have empty values" $ do
let result = decode "op1=\n"
let Just section = lookup "" result
(lookup "op1" section) `shouldBe` Just ""
it "Throws an error if equal sign is missing" $ do
(evaluate . force) (show $ decode "op1") `shouldThrow` anyErrorCall
it "Throws an error if section contains whitespace" $ do
(evaluate . force) (show $ decode " [ section1]") `shouldThrow` anyErrorCall
(evaluate . force) (show $ decode "[secti on1]") `shouldThrow` anyErrorCall
(evaluate . force) (show $ decode "[section1 ]") `shouldThrow` anyErrorCall
it "Throws an error if property contains whitespace" $ do
(evaluate . force) (show $ decode "o p1=val1") `shouldThrow` anyErrorCall
(evaluate . force) (show $ decode "op1 =val1") `shouldThrow` anyErrorCall
(evaluate . force) (show $ decode "op1= val1") `shouldThrow` anyErrorCall
(evaluate . force) (show $ decode "op1=va l1") `shouldThrow` anyErrorCall
it "property contains more than one equal sign" $ do
(evaluate . force) (show $ decode "op1=val1=val2") `shouldThrow` anyErrorCall
it "Can read directly from files" $ do
result <- readConfiguration "tests/material/example.ini"
let Just defaultSection = lookup "" result
let Just section1 = lookup "section1" result
let Just section2 = lookup "section2" result
(lookup "op1" defaultSection) `shouldBe` Just "arg1"
(lookup "op2" defaultSection) `shouldBe` Just "arg2"
(lookup "op3" section1) `shouldBe` Just "arg3"
(lookup "op4" section2) `shouldBe` Just "arg4"
(lookup "op5" section2) `shouldBe` Just "arg5"
| tobiasreinhardt/show | IniConfiguration/tests/TestDecoder.hs | apache-2.0 | 4,748 | 0 | 14 | 1,327 | 1,115 | 530 | 585 | 66 | 1 |
{-# LANGUAGE BangPatterns, RecordWildCards #-}
-- |
-- Module : Criterion
-- Copyright : (c) 2009-2014 Bryan O'Sullivan
--
-- License : BSD-style
-- Maintainer : bos@serpentine.com
-- Stability : experimental
-- Portability : GHC
--
-- Core benchmarking code.
module Criterion.Internal
(
runAndAnalyse
, runAndAnalyseOne
, runOne
, runFixedIters
) where
import Control.Applicative ((<$>))
import Control.DeepSeq (rnf)
import Control.Exception (evaluate)
import Control.Monad (foldM, forM_, void, when)
import Control.Monad.Reader (ask, asks)
import Control.Monad.Trans (MonadIO, liftIO)
import Control.Monad.Trans.Except
import Data.Binary (encode)
import Data.Int (Int64)
import qualified Data.ByteString.Lazy as L
import Criterion.Analysis (analyseSample, noteOutliers)
import Criterion.IO (header, hGetRecords)
import Criterion.IO.Printf (note, printError, prolix, writeCsv)
import Criterion.Measurement (runBenchmark, secs)
import Criterion.Monad (Criterion)
import Criterion.Report (report)
import Criterion.Types hiding (measure)
import qualified Data.Map as Map
import Data.Vector (Vector)
import Statistics.Resampling.Bootstrap (Estimate(..))
import System.Directory (getTemporaryDirectory, removeFile)
import System.IO (IOMode(..), SeekMode(..), hClose, hSeek, openBinaryFile,
openBinaryTempFile, openTempFile, openFile, hPutStrLn)
import Text.Printf (printf)
-- | Run a single benchmark.
runOne :: Int -> String -> Benchmarkable -> Criterion DataRecord
runOne i desc bm = do
Config{..} <- ask
(meas,timeTaken) <- liftIO $ runBenchmark bm timeLimit
when (timeTaken > timeLimit * 1.25) .
void $ prolix "measurement took %s\n" (secs timeTaken)
return (Measurement i desc meas)
-- | Analyse a single benchmark.
analyseOne :: Int -> String -> Vector Measured -> Criterion DataRecord
analyseOne i desc meas = do
Config{..} <- ask
_ <- prolix "analysing with %d resamples\n" resamples
erp <- runExceptT $ analyseSample i desc meas
case erp of
Left err -> printError "*** Error: %s\n" err
Right rpt@Report{..} -> do
let SampleAnalysis{..} = reportAnalysis
OutlierVariance{..} = anOutlierVar
wibble = case ovEffect of
Unaffected -> "unaffected" :: String
Slight -> "slightly inflated"
Moderate -> "moderately inflated"
Severe -> "severely inflated"
(builtin, others) = splitAt 1 anRegress
let r2 n = printf "%.3f R\178" n
forM_ builtin $ \Regression{..} ->
case Map.lookup "iters" regCoeffs of
Nothing -> return ()
Just t -> bs secs "time" t >> bs r2 "" regRSquare
bs secs "mean" anMean
bs secs "std dev" anStdDev
forM_ others $ \Regression{..} -> do
_ <- bs r2 (regResponder ++ ":") regRSquare
forM_ (Map.toList regCoeffs) $ \(prd,val) ->
bs (printf "%.3g") (" " ++ prd) val
writeCsv (desc,
estPoint anMean, estLowerBound anMean, estUpperBound anMean,
estPoint anStdDev, estLowerBound anStdDev,
estUpperBound anStdDev)
when (verbosity == Verbose || (ovEffect > Slight && verbosity > Quiet)) $ do
when (verbosity == Verbose) $ noteOutliers reportOutliers
_ <- note "variance introduced by outliers: %d%% (%s)\n"
(round (ovFraction * 100) :: Int) wibble
return ()
_ <- note "\n"
return (Analysed rpt)
where bs :: (Double -> String) -> String -> Estimate -> Criterion ()
bs f metric Estimate{..} =
note "%-20s %-10s (%s .. %s%s)\n" metric
(f estPoint) (f estLowerBound) (f estUpperBound)
(if estConfidenceLevel == 0.95 then ""
else printf ", ci %.3f" estConfidenceLevel)
-- | Run a single benchmark and analyse its performance.
runAndAnalyseOne :: Int -> String -> Benchmarkable -> Criterion DataRecord
runAndAnalyseOne i desc bm = do
Measurement _ _ meas <- runOne i desc bm
analyseOne i desc meas
-- | Run, and analyse, one or more benchmarks.
runAndAnalyse :: (String -> Bool) -- ^ A predicate that chooses
-- whether to run a benchmark by its
-- name.
-> Benchmark
-> Criterion ()
runAndAnalyse select bs = do
mbRawFile <- asks rawDataFile
mbShowFile <- asks showDataFile -- Parallels rawDataFile
(rawFile, handle) <- liftIO $
case mbRawFile of
Nothing -> do
tmpDir <- getTemporaryDirectory
openBinaryTempFile tmpDir "criterion.dat"
Just file -> do
handle <- openBinaryFile file ReadWriteMode
return (file, handle)
(showFile, showHandle) <- liftIO $
case mbShowFile of
Nothing -> do
tmpDir <- getTemporaryDirectory
openTempFile tmpDir "criterion_dat.txt"
Just file -> do
handle <- openFile file ReadWriteMode
return (file, handle)
liftIO $ L.hPut handle header
for select bs $ \idx desc bm -> do
_ <- note "benchmarking %s\n" desc
rpt <- runAndAnalyseOne idx desc bm
liftIO $ print rpt -- hPutStrLn showHandle (show rpt)
_ <- note " WROTE out show data %s \n" desc
liftIO $ print (read (show rpt) :: Report)
liftIO $ L.hPut handle (encode rpt)
-- rpts <- (either fail return =<<) . liftIO $ do
-- hSeek handle AbsoluteSeek 0
-- rs <- fmap (map (\(Analysed r) -> r)) <$> hGetRecords handle
-- hClose handle
-- case mbRawFile of
-- Just _ -> return rs
-- _ -> removeFile rawFile >> return rs
liftIO $ hClose showHandle
rpts <- (either fail return =<<) . liftIO $ do
hSeek handle AbsoluteSeek 0
rs <- fmap (map (\(Analysed r) -> r)) <$> hGetRecords handle
hClose handle
case mbRawFile of
Just _ -> return rs
_ -> removeFile rawFile >> return rs
report rpts
junit rpts
-- | Run a benchmark without analysing its performance.
runFixedIters :: Int64 -- ^ Number of loop iterations to run.
-> (String -> Bool) -- ^ A predicate that chooses
-- whether to run a benchmark by its
-- name.
-> Benchmark
-> Criterion ()
runFixedIters iters select bs =
for select bs $ \_idx desc bm -> do
_ <- note "benchmarking %s\n" desc
liftIO $ runRepeatedly bm iters
-- | Iterate over benchmarks.
for :: MonadIO m => (String -> Bool) -> Benchmark
-> (Int -> String -> Benchmarkable -> m ()) -> m ()
for select bs0 handle = go (0::Int) ("", bs0) >> return ()
where
go !idx (pfx, Environment mkenv mkbench)
| shouldRun pfx mkbench = do
e <- liftIO $ do
ee <- mkenv
evaluate (rnf ee)
return ee
go idx (pfx, mkbench e)
| otherwise = return idx
go idx (pfx, Benchmark desc b)
| select desc' = do handle idx desc' b; return $! idx + 1
| otherwise = return idx
where desc' = addPrefix pfx desc
go idx (pfx, BenchGroup desc bs) =
foldM go idx [(addPrefix pfx desc, b) | b <- bs]
shouldRun pfx mkbench =
any (select . addPrefix pfx) . benchNames . mkbench $ undefined
-- | Write summary JUnit file (if applicable)
junit :: [Report] -> Criterion ()
junit rs
= do junitOpt <- asks junitFile
case junitOpt of
Just fn -> liftIO $ writeFile fn msg
Nothing -> return ()
where
msg = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" ++
printf "<testsuite name=\"Criterion benchmarks\" tests=\"%d\">\n"
(length rs) ++
concatMap single rs ++
"</testsuite>\n"
single Report{..} = printf " <testcase name=\"%s\" time=\"%f\" />\n"
(attrEsc reportName) (estPoint $ anMean $ reportAnalysis)
attrEsc = concatMap esc
where
esc '\'' = "'"
esc '"' = """
esc '<' = "<"
esc '>' = ">"
esc '&' = "&"
esc c = [c]
| rrnewton/criterion | Criterion/Internal.hs | bsd-2-clause | 8,095 | 0 | 20 | 2,299 | 2,306 | 1,163 | 1,143 | 173 | 7 |
{-# LANGUAGE RankNTypes, RecordWildCards #-}
-- |
-- Module : Network.Wreq.Session
-- Copyright : (c) 2014 Bryan O'Sullivan
--
-- License : BSD-style
-- Maintainer : bos@serpentine.com
-- Stability : experimental
-- Portability : GHC
--
-- The functions in this module use a 'Session' to handle the
-- following common needs:
--
-- * TCP connection reuse. This is important for performance when
-- multiple requests go to a single server, particularly if TLS is
-- being used.
--
-- * Transparent cookie management. Any cookies set by the server
-- persist from one request to the next. (Bypass this overhead
-- using 'withAPISession'.)
--
--
-- This module is designed to be used alongside the "Network.Wreq"
-- module. Typical usage will look like this:
--
-- @
-- import "Network.Wreq"
-- import qualified "Network.Wreq.Session" as Sess
--
-- main = Sess.'withSession' $ \\sess ->
-- Sess.'get' sess \"http:\/\/httpbin.org\/get\"
-- @
--
-- We create a 'Session' using 'withSession', then pass the session to
-- subsequent functions. When talking to a REST-like service that does
-- not use cookies, it is more efficient to use 'withAPISession'.
--
-- Note the use of qualified import statements in the examples above,
-- so that we can refer unambiguously to the 'Session'-specific
-- implementation of HTTP GET.
module Network.Wreq.Session
(
-- * Session creation
Session
, withSession
, withAPISession
-- ** More control-oriented session creation
, withSessionWith
, withSessionControl
-- * HTTP verbs
, get
, post
, head_
, options
, put
, delete
-- ** Configurable verbs
, getWith
, postWith
, headWith
, optionsWith
, putWith
, deleteWith
-- * Extending a session
, Lens.seshRun
) where
import Control.Lens ((&), (?~), (.~))
import Data.Foldable (forM_)
import Data.IORef (newIORef, readIORef, writeIORef)
import Network.Wreq (Options, Response)
import Network.Wreq.Internal
import Network.Wreq.Internal.Types (Body(..), Req(..), Session(..))
import Network.Wreq.Types (Postable, Deletable, Putable, Run)
import Prelude hiding (head)
import qualified Data.ByteString.Lazy as L
import qualified Network.HTTP.Client as HTTP
import qualified Network.Wreq.Internal.Lens as Lens
-- | Create a 'Session', passing it to the given function. The
-- 'Session' will no longer be valid after that function returns.
--
-- This session manages cookies and uses default session manager
-- configuration.
withSession :: (Session -> IO a) -> IO a
withSession = withSessionWith defaultManagerSettings
-- | Create a session.
--
-- This uses the default session manager settings, but does not manage
-- cookies. It is intended for use with REST-like HTTP-based APIs,
-- which typically do not use cookies.
withAPISession :: (Session -> IO a) -> IO a
withAPISession = withSessionControl Nothing defaultManagerSettings
-- | Create a session, using the given manager settings. This session
-- manages cookies.
withSessionWith :: HTTP.ManagerSettings -> (Session -> IO a) -> IO a
withSessionWith = withSessionControl (Just (HTTP.createCookieJar []))
{-# DEPRECATED withSessionWith "Use withSessionControl instead." #-}
-- | Create a session, using the given cookie jar and manager settings.
withSessionControl :: Maybe HTTP.CookieJar
-- ^ If 'Nothing' is specified, no cookie management
-- will be performed.
-> HTTP.ManagerSettings
-> (Session -> IO a) -> IO a
withSessionControl mj settings act = do
mref <- maybe (return Nothing) (fmap Just . newIORef) mj
HTTP.withManager settings $ \mgr ->
act Session { seshCookies = mref
, seshManager = mgr
, seshRun = runWith
}
-- | 'Session'-specific version of 'Network.Wreq.get'.
get :: Session -> String -> IO (Response L.ByteString)
get = getWith defaults
-- | 'Session'-specific version of 'Network.Wreq.post'.
post :: Postable a => Session -> String -> a -> IO (Response L.ByteString)
post = postWith defaults
-- | 'Session'-specific version of 'Network.Wreq.head_'.
head_ :: Session -> String -> IO (Response ())
head_ = headWith defaults
-- | 'Session'-specific version of 'Network.Wreq.options'.
options :: Session -> String -> IO (Response ())
options = optionsWith defaults
-- | 'Session'-specific version of 'Network.Wreq.put'.
put :: Putable a => Session -> String -> a -> IO (Response L.ByteString)
put = putWith defaults
-- | 'Session'-specific version of 'Network.Wreq.delete'.
delete :: Deletable a => Session -> String -> Maybe a -> IO (Response L.ByteString)
delete = deleteWith defaults
-- | 'Session'-specific version of 'Network.Wreq.getWith'.
getWith :: Options -> Session -> String -> IO (Response L.ByteString)
getWith opts sesh url = run string sesh =<< prepareGet opts url
-- | 'Session'-specific version of 'Network.Wreq.postWith'.
postWith :: Postable a => Options -> Session -> String -> a
-> IO (Response L.ByteString)
postWith opts sesh url payload =
run string sesh =<< preparePost opts url payload
-- | 'Session'-specific version of 'Network.Wreq.headWith'.
headWith :: Options -> Session -> String -> IO (Response ())
headWith opts sesh url = run ignore sesh =<< prepareHead opts url
-- | 'Session'-specific version of 'Network.Wreq.optionsWith'.
optionsWith :: Options -> Session -> String -> IO (Response ())
optionsWith opts sesh url = run ignore sesh =<< prepareOptions opts url
-- | 'Session'-specific version of 'Network.Wreq.putWith'.
putWith :: Putable a => Options -> Session -> String -> a
-> IO (Response L.ByteString)
putWith opts sesh url payload = run string sesh =<< preparePut opts url payload
-- | 'Session'-specific version of 'Network.Wreq.deleteWith'.
deleteWith :: Deletable a => Options -> Session -> String -> Maybe a -> IO (Response L.ByteString)
deleteWith opts sesh url Nothing = run string sesh =<< prepareDelete opts url
deleteWith opts sesh url (Just payload) = run string sesh =<< prepareDeleteWithBody opts url payload
runWith :: Session -> Run Body -> Run Body
runWith Session{..} act (Req _ req) = do
req' <- case seshCookies of
Nothing -> return (req & Lens.cookieJar .~ Nothing)
Just ref -> (\s -> req & Lens.cookieJar ?~ s) `fmap` readIORef ref
resp <- act (Req (Right seshManager) req')
forM_ seshCookies $ \ref ->
writeIORef ref (HTTP.responseCookieJar resp)
return resp
type Mapping a = (Body -> a, a -> Body, Run a)
run :: Mapping a -> Session -> Run a
run (to,from,act) sesh =
fmap (fmap to) . seshRun sesh sesh (fmap (fmap from) . act)
string :: Mapping L.ByteString
string = (\(StringBody s) -> s, StringBody, runRead)
ignore :: Mapping ()
ignore = (\_ -> (), const NoBody, runIgnore)
| Feeniks/wreq | Network/Wreq/Session.hs | bsd-3-clause | 6,828 | 0 | 16 | 1,340 | 1,501 | 820 | 681 | 93 | 2 |
module Stack where
data Symbol a = Symbol a
| Bottom
deriving (Eq, Ord, Show)
type Stack a = [Symbol a]
pop :: Stack a -> Stack a
pop [Bottom] = [Bottom]
pop (_:ss) = ss
push :: Symbol a -> Stack a -> Stack a
push s ss = (s:ss)
peek :: Stack a -> Symbol a
peek = head | katydid/nwe | src/Stack.hs | bsd-3-clause | 280 | 0 | 7 | 73 | 148 | 79 | 69 | 12 | 1 |
{-# LANGUAGE Rank2Types, FlexibleContexts #-}
module Numeric.MaxEnt.Deconvolution.Internal where
import Numeric.MaxEnt
import Data.List.Split
import Data.List
import Debug.Trace
import Numeric.Integration.TanhSinh
import Numeric.AD
import Control.Applicative
import Numeric.MaxEnt.Deconvolution.Convolution2D
import Math.GaussianQuadratureIntegration
import Numeric.MaxEnt.Deconvolution.GaussianPSF
import Debug.Trace
import Data.Word
import Data.Array.Repa hiding ((++), map, transpose, toList)
import qualified Data.Array.Repa as R
import qualified Data.Array.Repa.Eval as R
import qualified Data.Array.Repa.Repr.ForeignPtr as R
import qualified Data.Vector.Unboxed as UV
import Control.Lens hiding (ala)
import System.FilePath.Lens
import qualified Data.Vector.Storable as VS
import Foreign.Marshal.Array
import System.IO.Unsafe
import Foreign.ForeignPtr.Safe
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString as BS
import Codec.Picture.Png
import Codec.Picture.Types
import Numeric
import Unsafe.Coerce
import qualified Data.Vector.Storable as S
import Numeric.MaxEnt.Deconvolution.Iterative
import Numeric.LinearAlgebra
import Data.Packed.Matrix
--import Numeric.MaxEnt.Deconvolution.IterativeRepa
--import qualified Data.Array.Repa as A
--import qualified Data.Array.Repa.Repr.Unboxed as U
--import qualified Data.Array.Repa.Stencil as A
--import qualified Data.Array.Repa.Stencil.Dim2 as A
--import Data.Functor.Identity
traceIt x = trace (show x) x
traceItNote :: Show a => String -> a -> a
traceItNote msg x = trace (msg ++ show x) x
gaussianConvolve2D :: Double -> [[Double]] -> [[Double]]
gaussianConvolve2D var xs =
convolve2DZeroBoundary (sampleGaussian2D var width height) $ xs where
width = length . head $ xs
height = length xs
gaussianConvolve2D' :: Double -> [[Double]] -> [[Double]]
gaussianConvolve2D' var xs =
convolve2DZeroBoundary (sampleGaussian2D' var width height) $ xs where
width = length . head $ xs
height = length xs
--deconvolve2D :: [[Double]] -> [Double] -> Either String [[Double]]
deconvolve2D steps psf image = result where
convoPSF = toConvolve2DZeroBoundary 0 psf
total = sum . concat $ image
width = length . head $ image
height = length image
normalizedImage = normalize . concat $ image
fromLinearImage = chunksOf width . map (total*)
-- I think here I can
result = case linear 0.001 (LC convoPSF normalizedImage) of
Right x -> Right $ fromLinearImage $ S.toList x
Left x -> Left $ show x
--result = Right $ fromLinearImage $ toList $
-- update steps (fromLists convoPSF) (fromList normalizedImage)
--result = Right $ fromLinearImage $ A.toList $
-- runIdentity $ update' steps psf (fromListUnboxed (Z :. width :. height) normalizedImage) $
-- (delay $ fromListUnboxed (Z :. width :. height) $ replicate (width * height) 0)
--result = Right $ fromLinearImage $ last $ linear'' (LC convoPSF normalizedImage) :: Either String [[Double]]
--testPSF :: (RealFloat a)
-- => [[a]]
testPSF = gaussianPSF 0.25 5 5
-- -> a -> Int -> Int -> [[a]]
gsamples var width height = gaussianPSF var width height
testInput :: [[Double]]
testInput = [[0,0,0], [0.0, 1.0, 0.0], [0,0,0]]
testInput1 :: [[Double]]
testInput1 = [[0, 0, 0, 0, 0],
[0, 1.0/9.0, 1.0/9.0, 1.0/9.0, 0],
[0, 1.0/9.0, 1.0/9.0, 1.0/9.0, 0],
[0, 1.0/9.0, 1.0/9.0, 1.0/9.0, 0],
[0, 0, 0, 0, 0]]
testInput2 :: [[Double]]
testInput2 = [[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 1.0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0]]
testInput3 :: [[Double]]
testInput3 = map (map (1.0-)) testInput2
testDecon :: Either String [[Double]]
testDecon = deconvolve2D 1000 testPSF
(gaussianConvolve2D 0.25 testInput2)
roundTrip :: Double -> [[Double]] -> Either String [[Double]]
roundTrip var image = deconvolve2D 1000 (gsamples var width height) $
(gaussianConvolve2D var image) where
width = length . head $ image
height = length image
printRT var = either show (show . FloatList) . roundTrip var
toWord8 :: Double -> Word8
toWord8 x = floor $ x * 256.0
--testId :: (forall a. RealFloat a => [[a]]) -> [[Double]] -> Either String [[Double]]
testId x y = Right y
getR (PixelRGBA8 x _ _ _) = x
toPixel x = [x, x, x, 255]
--TODO make an image that matches the one above
--make increasingly larger examples until I can find the smallest ont
--that exhibits the problem
deconvolve' :: Int -> Double -> Image Pixel8 -> Image Pixel8
deconvolve' steps var i@(Image width height dat) = result where
input = [[fromIntegral (pixelAt i w h) / 256.0 |
w <- [0..(width - 1)]] |
h <- [0..(height - 1)] ]
--
result = case deconvolve2D steps (gsamples var width height) input of
Right z -> Image width height . VS.fromList $ concatMap (map toWord8) z
Left x -> error "" -- $ show x
-- The test that I should have, is
-- open a file and blur it
-- and then unblur it
blurImage :: Double -> Image Pixel8 -> Image Pixel8
blurImage var i@(Image width height dat) = result where
input = [[fromIntegral (pixelAt i w h) / 256.0 |
w <- [0..(width - 1)]] |
h <- [0..(height - 1)] ]
blured = gaussianConvolve2D' var input
result = Image width height $ VS.fromList $ reverse $ concatMap (map toWord8) $ transpose blured
deconvolve :: Int -> Double -> FilePath -> Int -> IO ()
deconvolve steps var filePath i = do
let outputPath = basename %~ (++ "_decon" ++ show i) $ filePath
pngBytes <- BS.readFile filePath
let image = either error id . decodePng $ pngBytes
newImage = case image of
ImageY8 x -> deconvolve' steps var x
x -> error $ "bad format"
writePng outputPath newImage
deconvolve2 :: Int -> Double -> FilePath -> IO ()
deconvolve2 steps var filePath = do
let outputPath = basename %~ (++ "_decon") $ filePath
blurPath = basename %~ (++ "_blur") $ filePath
pngBytes <- BS.readFile filePath
let image = either error id . decodePng $ pngBytes
newImage <- case image of
ImageY8 x -> do
--blur the image and write out the blured one
let blurImaged = blurImage var x
writePng blurPath blurImaged
return $ deconvolve' steps var blurImaged
x -> error $ "bad format"
writePng outputPath newImage
| jfischoff/Sharpen | src/Numeric/MaxEnt/Deconvolution/Internal.hs | bsd-3-clause | 6,767 | 0 | 16 | 1,719 | 1,907 | 1,062 | 845 | 127 | 2 |
module Paths_hsql_mysql (
version,
getBinDir, getLibDir, getDataDir, getLibexecDir,
getDataFileName, getSysconfDir
) where
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
catchIO = Exception.catch
version :: Version
version = Version {versionBranch = [1,8,4], versionTags = []}
bindir, libdir, datadir, libexecdir, sysconfdir :: FilePath
bindir = "/Users/alanhawkins/.cabal/bin"
libdir = "/Users/alanhawkins/.cabal/lib/x86_64-osx-ghc-7.8.2/hsql-mysql-1.8.4"
datadir = "/Users/alanhawkins/.cabal/share/x86_64-osx-ghc-7.8.2/hsql-mysql-1.8.4"
libexecdir = "/Users/alanhawkins/.cabal/libexec"
sysconfdir = "/Users/alanhawkins/.cabal/etc"
getBinDir, getLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath
getBinDir = catchIO (getEnv "hsql_mysql_bindir") (\_ -> return bindir)
getLibDir = catchIO (getEnv "hsql_mysql_libdir") (\_ -> return libdir)
getDataDir = catchIO (getEnv "hsql_mysql_datadir") (\_ -> return datadir)
getLibexecDir = catchIO (getEnv "hsql_mysql_libexecdir") (\_ -> return libexecdir)
getSysconfDir = catchIO (getEnv "hsql_mysql_sysconfdir") (\_ -> return sysconfdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
| xpika/hsql-mysql | dist/build/autogen/Paths_hsql_mysql.hs | bsd-3-clause | 1,388 | 0 | 10 | 182 | 368 | 211 | 157 | 28 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Words (tests) where
import qualified Sanskell.Words as W (wordCount)
import qualified Data.Map as M
import Test.HUnit
testSpaces = TestCase $ do
let wordsCount = W.wordCount [ " \t" ]
assertEqual "empty list," [] (M.elems wordsCount)
testSpacesWithWords = TestCase $ do
let wordsCount = W.wordCount [ " ashish here " ]
assertEqual "should not parse english words," [] (M.elems wordsCount)
testSanskritWords = TestCase $ do
let wordsCount = W.wordCount [ "आरब्धम् उत्तमजनाः ना परितयजन्ति" ]
assertEqual "should parse sanskrti words," [1, 1, 1, 1] (M.elems wordsCount)
testSanskritWordsMulti = TestCase $ do
let wordsCount = W.wordCount [ "आरब्धम् उत्तमजनाः ना परितयजन्ति || आरब्धम् उत्तमजनाः ..." ]
assertEqual "should parse sanskrti words," [2, 2, 1, 1] (M.elems wordsCount)
tests = runTestTT $ TestList [ TestLabel "empty spaces should be ignored" testSpaces
, TestLabel "with words and spaces" testSpacesWithWords
, TestLabel "with sanskrit words" testSanskritWords
, TestLabel "with multi sanskrit words" testSanskritWordsMulti]
| ashishnegi/sanskell | test/Words.hs | bsd-3-clause | 1,327 | 0 | 12 | 262 | 284 | 149 | 135 | 21 | 1 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.ARB.DrawBuffersBlend
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- The <https://www.opengl.org/registry/specs/ARB/draw_buffers_blend.txt ARB_draw_buffers_blend> extension.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.ARB.DrawBuffersBlend (
-- * Functions
glBlendEquationSeparateiARB,
glBlendEquationiARB,
glBlendFuncSeparateiARB,
glBlendFunciARB
) where
import Graphics.Rendering.OpenGL.Raw.Functions
| phaazon/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/ARB/DrawBuffersBlend.hs | bsd-3-clause | 750 | 0 | 4 | 87 | 46 | 37 | 9 | 6 | 0 |
module Tronkell.Network.TcpSockets where
import qualified Tronkell.Server.Types as ST
import qualified Control.Concurrent as Con
import qualified Tronkell.Network.Utils as NU (nextUserID)
import Network (PortID(..), accept, listenOn, withSocketsDo, Socket)
import System.IO
import qualified Control.Concurrent as C (forkIO)
import Control.Exception (handle, SomeException(..))
import Control.Monad (forever, when)
import qualified Data.Text as T (pack)
start :: Con.MVar ST.UserID -> ST.NetworkChans -> Con.Chan ST.OutMessage -> IO ()
start uIdGen chans outChan = withSocketsDo $ do
sock <- listenOn . PortNumber $ 4242
tcpMainLoop uIdGen chans outChan sock
tcpMainLoop :: Con.MVar ST.UserID -> ST.NetworkChans -> Con.Chan ST.OutMessage -> Socket -> IO ()
tcpMainLoop uIdGen chans outChan socket = do
(clientHdl, _, _) <- accept socket
hSetBuffering clientHdl NoBuffering
userId <- NU.nextUserID uIdGen
C.forkIO $ runClient userId clientHdl chans outChan
tcpMainLoop uIdGen chans outChan socket
runClient :: ST.UserID -> Handle -> ST.NetworkChans -> Con.Chan ST.OutMessage -> IO ()
runClient uId clientHdl (inChan, clientSpecificOutChan) outChan = do
-- duplicate the channels to read.
dupOutChan <- Con.dupChan outChan
dupClientSpecificOutChan <- Con.dupChan clientSpecificOutChan
Con.writeChan inChan $ ST.PlayerJoined uId
writeThread <- Con.forkIO $ forever $ do
msg <- Con.readChan dupOutChan
hPutStrLn clientHdl $ encodeMsg msg
clientSpecificWriteThread <- Con.forkIO $ forever $ do
(userId, msg) <- Con.readChan dupClientSpecificOutChan
when (userId == uId) $ hPutStrLn clientHdl $ encodeMsg msg
handle (\(SomeException _) -> return ()) $ forever $ do
canRead <- hIsReadable clientHdl
when canRead $ do
inmsg <- cleanString <$> hGetLine clientHdl
Con.writeChan inChan $ decodeMessage uId inmsg
Con.writeChan inChan $ ST.UserExit uId
Con.killThread writeThread
Con.killThread clientSpecificWriteThread
cleanString :: String -> String
cleanString = reverse . dropWhile (\c -> c == '\n' || c == '\r') . reverse
decodeMessage :: ST.UserID -> String -> ST.InMessage
decodeMessage userId msg = case msg of
"L" -> ST.PlayerTurnLeft userId
"R" -> ST.PlayerTurnRight userId
"Q" -> ST.PlayerExit userId
"ready" -> ST.PlayerReady userId
n -> ST.PlayerName userId $ T.pack n
encodeMsg :: ST.OutMessage -> String
encodeMsg = show
| nilenso/tronkell | src/Tronkell/Network/TcpSockets.hs | bsd-3-clause | 2,438 | 0 | 15 | 424 | 805 | 402 | 403 | 51 | 5 |
{-# LANGUAGE FlexibleContexts #-}
{-|
Module : Numeric.AERN.DoubleBasis.RealIntervalApprox
Description : Double intervals for approximating real intervals
Copyright : (c) Michal Konecny, Jan Duracz
License : BSD3
Maintainer : mikkonecny@gmail.com
Stability : experimental
Portability : portable
Intervals with Double endpoints as an
abstract data type for approximating real number intervals.
Each interval is either an outer or inner approximation
of another interval.
Only operations that respect this view are available
via this module.
-}
module Numeric.AERN.DoubleBasis.RealIntervalApprox
(
-- |
-- A convenience module re-exporting various interval operations
-- with default effort indicators.
-- * Main type
RealIntervalApprox,
-- ** associated operations
width, bisect,
-- * Order relations
-- |
-- There are two types of order relations to consider:
--
-- * the /numerical/ order, generalising the order relation
-- on Doubles and
--
-- * the /refinement/ order, generalising the reverse-inclusion
-- relation on consistent intervals.
--
-- The consistent intervals in 'RealIntevalApprox' form a /meet/-semilattice
-- corresponding to the refiniement order under the operation /\\
-- returning the subset-least interval containing the /union/ of
-- its argument intervals. The operation is extended to all of 'RealIntevalApprox'
-- by returning the highest interval below both of its argument
-- intervals.
--
-- The structure ({ 'di' | 'di' is consistent \}, /\\,
-- 'bottom') is a complete meet-semilattice.
--
-- Lower and upper approximations of the exact operation /\\
-- are given by '</\>' and '>/\<' respectively.
--
-- The dual operation to /\\ is partial on consistent intervals,
-- since the /intersection/ of disjoint sets is empty. Therefore,
-- the /join/-semilattice structure on 'RealIntevalApprox' comes in two flavours:
--
-- * the partial consistent interval-valued join \\/\? which
-- returns 'Nothing' for disjoint and anticonsistent arguments
-- and
--
-- * the total join \\/ which returns the lowest interval in
-- 'RealIntevalApprox' above both of its argument intervals.
--
-- The structure ('RealIntevalApprox', \/\\, \\\/, 'bottom', 'top') is a complete
-- lattice.
--
-- Lower and upper approximations of the exact operations \\/\?
-- and \\\/ are given by '<\/>?', '<\/>' and '>\/<' respectively.
-- ** Numerical order
-- |
-- Interval extensions of the corresponding tests and relations on
-- Double.
-- *** Extrema
-- |
-- The values retured by 'least' and 'greatest' complete the
-- numerical partial order on 'RealIntevalApprox'.
least,greatest,
-- *** Comparability tests
(==?),(<==>?),(</=>?),
-- *** Order relations
(<?),(>?),(<=?),(>=?),
-- ** Refinement order
-- |
-- Tests and relations in the interval poset.
-- *** Extrema
-- |
-- The values retured by 'bottom' and 'top' complete the
-- refinement partial order on 'RealIntevalApprox'.
-- **** ASCII versions
bottom,top,
-- **** Unicode versions
(⊥),(⊤),
-- *** Comparability tests
(|==?),(|<==>?),(|</=>?),
-- *** Order relations
-- **** ASCII versions
(|<?),(|>?),(|<=?),(|>=?),
-- **** Unicode versions
(⊏?),(⊑?),(⊒?),(⊐?),
-- * Outward rounded operations
-- |
-- Interval extensions of common functions. The 'Num', 'Fractional'
-- and 'Floating' instances for 'RealIntevalApprox' use such versions as instance
-- methods.
-- ** Order operations
-- *** Numerical order
-- |
-- Outward rounded interval extensions of the corresponding
-- operations on Double.
minOut,maxOut,
-- *** Refinement order
-- |
-- Outward rounded lattice operations in the interval poset.
-- **** ASCII versions
(</\>),(<\/>),(<\/>?),
-- **** Unicode versions
(<⊓>),(<⊔>),(<⊔>?),
-- ** Field operations
-- *** Interval operations
(<+>),(<->),(<*>),(</>),
-- *** Mixed type operations
(|<+>),(<+>|),(|<*>),(<*>|),(</>|),(<^>),
-- ** Special constants
piOut,eOut,
-- ** Elementary functions
absOut,
sqrtOut, sqrtOutWithIters,
expOut,expOutWithDegree,
sinOut, sinOutWithDegree, cosOut, cosOutWithDegree,
-- * Inward rounded operations
-- ** Order operations
-- *** Numerical order
-- |
-- Inward rounded interval extensions of the corresponding
-- operations on Double.
minIn,maxIn,
-- *** Refinement order
-- **** ASCII versions
(>/\<),(>\/<),
-- **** Unicode versions
(>⊓<),(>⊔<),
-- ** Field operations
-- *** Interval operations
(>+<),(>-<),(>*<),(>/<),
-- *** Mixed type operations
(|>+<),(>+<|),(|>*<),(>*<|),(>/<|),(>^<),
-- ** Special constants
piIn,eIn,
-- ** Elementary functions
absIn,
sqrtIn, sqrtInWithIters,
expIn,expInWithDegree,
sinIn, sinInWithDegree, cosIn, cosInWithDegree,
)
where
import Numeric.AERN.Basics.Interval
(Interval)
import Numeric.AERN.NumericOrder
hiding (least,greatest)
import qualified Numeric.AERN.NumericOrder as BNO
(least,greatest)
import Numeric.AERN.RefinementOrder
hiding (bottom,top,(⊥),(⊤))
import qualified Numeric.AERN.RefinementOrder as BRO
(bottom,top,(⊥),(⊤))
import Numeric.AERN.RealArithmetic.Interval
(SqrtThinEffortIndicator(..),
ExpThinEffortIndicator(..),
SineCosineThinEffortIndicator(..))
import Numeric.AERN.RealArithmetic.RefinementOrderRounding
hiding (piOut,piIn,eOut,eIn)
import qualified Numeric.AERN.RealArithmetic.RefinementOrderRounding as RAROR
(piOut,piIn,eOut,eIn)
import Numeric.AERN.RealArithmetic.Basis.Double()
import Numeric.AERN.RealArithmetic.Interval.Double (width, bisect)
-- |
-- Intervals with Double endpoints.
--
-- Note that ('l','r') = 'getEndpoints' ('di' :: 'RealIntervalApprox') does not
-- fix an ordering of 'l' and 'r'.
--
-- * 'di' is called /consistent/ when 'l' '<=' 'r'
--
-- * 'di' is called /anticonsistent/ when 'r' '<=' 'l'
--
-- A consistent interval 'di' may be identified with the set defined by
-- \{ 'x' | 'l' '<=' 'x' and 'x' '<=' 'r' \}.
type RealIntervalApprox = Interval Double
sampleRealIntervalApprox :: RealIntervalApprox
sampleRealIntervalApprox = 0
least :: RealIntervalApprox
least = BNO.least sampleRealIntervalApprox
greatest :: RealIntervalApprox
greatest = BNO.greatest sampleRealIntervalApprox
bottom :: RealIntervalApprox
bottom = BRO.bottom sampleRealIntervalApprox
top :: RealIntervalApprox
top = BRO.top sampleRealIntervalApprox
-- | Convenience Unicode notation for 'bottom'
(⊥) :: RealIntervalApprox
(⊥) = (BRO.⊥) sampleRealIntervalApprox
-- | Convenience Unicode notation for 'top'
(⊤) :: RealIntervalApprox
(⊤) = (BRO.⊤) sampleRealIntervalApprox
-- | Outward rounded pi
piOut :: RealIntervalApprox
piOut = RAROR.piOut sampleRealIntervalApprox
-- | Outward rounded e
eOut :: RealIntervalApprox
eOut = RAROR.eOut sampleRealIntervalApprox
-- | Inward rounded pi
piIn :: RealIntervalApprox
piIn = RAROR.piIn sampleRealIntervalApprox
-- | Inward rounded e
eIn :: RealIntervalApprox
eIn = RAROR.eIn sampleRealIntervalApprox
-- | Outwards rounded square root with convenient effort regulation
sqrtOutWithIters :: Int -> RealIntervalApprox -> RealIntervalApprox
sqrtOutWithIters iters x =
sqrtOutEff eff x
where
eff =
(sqrtDefaultEffort x)
{
sqrteff_newtonIters = iters
}
-- | Inwards rounded square root with convenient effort regulation
sqrtInWithIters :: Int -> RealIntervalApprox -> RealIntervalApprox
sqrtInWithIters iters x =
sqrtInEff eff x
where
eff =
(sqrtDefaultEffort x)
{
sqrteff_newtonIters = iters
}
-- | Outwards rounded exponential with convenient effort regulation
expOutWithDegree :: Int -> RealIntervalApprox -> RealIntervalApprox
expOutWithDegree degree x =
expOutEff eff x
where
eff =
(expDefaultEffort x)
{
expeff_taylorDeg = degree
}
-- | Inwards rounded exponential with convenient effort regulation
expInWithDegree :: Int -> RealIntervalApprox -> RealIntervalApprox
expInWithDegree degree x =
expInEff eff x
where
eff =
(expDefaultEffort x)
{
expeff_taylorDeg = degree
}
-- | Outwards rounded sine with convenient effort regulation
sinOutWithDegree :: Int -> RealIntervalApprox -> RealIntervalApprox
sinOutWithDegree degree x =
sinOutEff eff x
where
eff = sincosDefaultEffortWithDegree x degree
-- | Outwards rounded cosine with convenient effort regulation
cosOutWithDegree :: Int -> RealIntervalApprox -> RealIntervalApprox
cosOutWithDegree degree x =
cosOutEff eff x
where
eff = sincosDefaultEffortWithDegree x degree
-- | Inwards rounded sine with convenient effort regulation
sinInWithDegree :: Int -> RealIntervalApprox -> RealIntervalApprox
sinInWithDegree degree x =
sinInEff eff x
where
eff = sincosDefaultEffortWithDegree x degree
-- | Inwards rounded cosine with convenient effort regulation
cosInWithDegree :: Int -> RealIntervalApprox -> RealIntervalApprox
cosInWithDegree degree x =
cosInEff eff x
where
eff = sincosDefaultEffortWithDegree x degree
sincosDefaultEffortWithDegree x degree =
(sincosDefaultEffort x)
{
sincoseff_taylorDeg = degree
}
| michalkonecny/aern | aern-double/src/Numeric/AERN/DoubleBasis/RealIntervalApprox.hs | bsd-3-clause | 9,858 | 0 | 9 | 2,273 | 1,249 | 843 | 406 | 119 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Main where
import Args
import Control.Lens
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans.AWS
import Data.Conduit
import qualified Data.Conduit.Binary as CB
import Data.Maybe
import Data.Monoid
import qualified Data.Text as T
import Network.AWS.Auth
import Network.AWS.S3
import Network.AWS.S3.Encryption
import Network.AWS.S3.Encryption.Types
import Network.URI
import System.Directory
import System.FilePath
import System.IO
main:: IO ()
main = runWithArgs $ \Args{..} -> do
lgr <- newLogger (if argsVerbose then Debug else Info) stderr
env <- case argsAwsProfile
of Nothing -> newEnv Discover
Just p -> do cf <- credFile
newEnv $ FromFile p cf
let setReg = case argsRegion
of Nothing -> id
Just r -> set envRegion r
awsEnv = (set envLogger lgr . setReg) env
-- apparently there's no need for key when decrypting,
-- it's part of S3 object's metadata
keyEnv = KeyEnv awsEnv ((kmsKey . fromMaybe "unused") argsKmsKey)
(s3Bucket, s3Obj) <- case parseS3URI argsS3Uri
of Left e -> error e
Right bo -> return bo
-- if a file is not given we interact with stdin/out
hBinMode stdin
hBinMode stdout
let s3kmsDecrypt = runResourceT . runAWST keyEnv $ do
when (isJust argsKmsKey) $ liftIO $ hPutStrLn stderr "Warning: ignoring KMS key parameter, not needed to decrypt."
res <- decrypt (getObject s3Bucket s3Obj)
let cOut = case argsFileName
of Nothing -> CB.sinkHandle stdout
Just f -> do liftIO $ mkParentDirs f
CB.sinkFile f
view gorsBody res `sinkBody` cOut
s3kmsEncrypt = runResourceT . runAWST keyEnv $ do
unless (isJust argsKmsKey) $ error "KMS key parameter is required for encryption!"
oBody <- case argsFileName
of Nothing -> toBody <$> (CB.sourceHandle stdin $$ CB.sinkLbs)
Just f -> toBody <$> hashedFile f
-- an unnecessary extra bit of paranoia, encrypt at rest with default S3 key
let req = set poServerSideEncryption (Just AES256)
(putObject s3Bucket s3Obj oBody)
_ <- encrypt req
return ()
case argsCmd
of CmdGet -> s3kmsDecrypt
CmdPut -> s3kmsEncrypt
parseS3URI :: String
-> Either String (BucketName, ObjectKey)
parseS3URI s3u = do
URI{..} <- case parseURI s3u
of Nothing -> Left $ "Failed to parse URI " <> s3u
Just u -> Right u
_ <- if uriScheme == "s3:"
then Right ()
else Left $ "Expected s3: URI scheme in " <> s3u <> " but got " <> uriScheme
URIAuth{..} <- case uriAuthority
of Nothing -> Left $ "Expected authority part in an s3 uri, got " <> s3u
Just a -> Right a
objKey <- if null uriPath
then Left $ "URI path must not be empty (object key part) in " <> s3u
else (Right . T.tail . T.pack) uriPath -- skip 1st '/'
return ( (BucketName . T.pack) uriRegName
, ObjectKey objKey )
mkParentDirs :: FilePath
-> IO ()
mkParentDirs fp = do
let (dir, _) = splitFileName fp
createDirectoryIfMissing True dir
hBinMode :: Handle
-> IO ()
hBinMode h = do
hSetBinaryMode h True
hSetBuffering h (BlockBuffering Nothing)
| andreyk0/kms-s3 | Main.hs | bsd-3-clause | 3,735 | 0 | 24 | 1,280 | 957 | 474 | 483 | 85 | 8 |
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
module GHC (
-- * GHC packages
array, base, binary, bytestring, cabal, compareSizes, compiler, containers,
deepseq, deriveConstants, directory, filepath, genapply, genprimopcode, ghc,
ghcBoot, ghcBootTh, ghcCabal, ghcCompact, ghci, ghcPkg, ghcPrim, ghcTags,
ghcSplit, haddock, haskeline, hsc2hs, hp2ps, hpc, hpcBin, integerGmp,
integerSimple, iservBin, libffi, mtl, parsec, parallel, pretty, primitive,
process, rts, runGhc, stm, templateHaskell, terminfo, text, time, touchy,
transformers, unlit, unix, win32, xhtml, ghcPackages, isGhcPackage,
defaultPackages,
-- * Package information
programName, nonCabalContext, nonHsMainPackage, autogenPath, installStage,
-- * Miscellaneous
programPath, ghcSplitPath, stripCmdPath, buildDll0
) where
import Base
import CommandLine
import Context
import Oracles.Flag
import Oracles.Setting
-- | These are all GHC packages we know about. Build rules will be generated for
-- all of them. However, not all of these packages will be built. For example,
-- package 'win32' is built only on Windows. 'defaultPackages' defines default
-- conditions for building each package. Users can add their own packages and
-- modify build default build conditions in "UserSettings".
ghcPackages :: [Package]
ghcPackages =
[ array, base, binary, bytestring, cabal, compareSizes, compiler, containers
, deepseq, deriveConstants, directory, filepath, genapply, genprimopcode
, ghc, ghcBoot, ghcBootTh, ghcCabal, ghcCompact, ghci, ghcPkg, ghcPrim
, ghcTags, haddock, haskeline, hsc2hs, hp2ps, hpc, hpcBin, integerGmp
, integerSimple, iservBin, libffi, mtl, parsec, parallel, pretty, primitive
, process, rts, runGhc, stm, templateHaskell, terminfo, text, time, touchy
, transformers, unlit, unix, win32, xhtml ]
-- TODO: Optimise by switching to sets of packages.
isGhcPackage :: Package -> Bool
isGhcPackage = (`elem` ghcPackages)
-- | Package definitions, see 'Package'.
array = hsLib "array"
base = hsLib "base"
binary = hsLib "binary"
bytestring = hsLib "bytestring"
cabal = hsLib "Cabal" `setPath` "libraries/Cabal/Cabal"
compareSizes = hsUtil "compareSizes" `setPath` "utils/compare_sizes"
compiler = hsTop "ghc" `setPath` "compiler"
containers = hsLib "containers"
deepseq = hsLib "deepseq"
deriveConstants = hsUtil "deriveConstants"
directory = hsLib "directory"
filepath = hsLib "filepath"
genapply = hsUtil "genapply"
genprimopcode = hsUtil "genprimopcode"
ghc = hsPrg "ghc-bin" `setPath` "ghc"
ghcBoot = hsLib "ghc-boot"
ghcBootTh = hsLib "ghc-boot-th"
ghcCabal = hsUtil "ghc-cabal"
ghcCompact = hsLib "ghc-compact"
ghci = hsLib "ghci"
ghcPkg = hsUtil "ghc-pkg"
ghcPrim = hsLib "ghc-prim"
ghcTags = hsUtil "ghctags"
ghcSplit = hsUtil "ghc-split"
haddock = hsUtil "haddock"
haskeline = hsLib "haskeline"
hsc2hs = hsUtil "hsc2hs"
hp2ps = cUtil "hp2ps"
hpc = hsLib "hpc"
hpcBin = hsUtil "hpc-bin" `setPath` "utils/hpc"
integerGmp = hsLib "integer-gmp"
integerSimple = hsLib "integer-simple"
iservBin = hsPrg "iserv-bin" `setPath` "iserv"
libffi = cTop "libffi"
mtl = hsLib "mtl"
parsec = hsLib "parsec"
parallel = hsLib "parallel"
pretty = hsLib "pretty"
primitive = hsLib "primitive"
process = hsLib "process"
rts = cTop "rts"
runGhc = hsUtil "runghc"
stm = hsLib "stm"
templateHaskell = hsLib "template-haskell"
terminfo = hsLib "terminfo"
text = hsLib "text"
time = hsLib "time"
touchy = cUtil "touchy"
transformers = hsLib "transformers"
unlit = cUtil "unlit"
unix = hsLib "unix"
win32 = hsLib "Win32"
xhtml = hsLib "xhtml"
-- | Construct a Haskell library package, e.g. @array@.
hsLib :: PackageName -> Package
hsLib name = hsLibrary name ("libraries" -/- name)
-- | Construct a top-level Haskell library package, e.g. @compiler@.
hsTop :: PackageName -> Package
hsTop name = hsLibrary name name
-- | Construct a top-level C library package, e.g. @rts@.
cTop :: PackageName -> Package
cTop name = cLibrary name name
-- | Construct a top-level Haskell program package, e.g. @ghc@.
hsPrg :: PackageName -> Package
hsPrg name = hsProgram name name
-- | Construct a Haskell utility package, e.g. @haddock@.
hsUtil :: PackageName -> Package
hsUtil name = hsProgram name ("utils" -/- name)
-- | Construct a C utility package, e.g. @haddock@.
cUtil :: PackageName -> Package
cUtil name = cProgram name ("utils" -/- name)
-- | Amend a package path if it doesn't conform to a typical pattern.
setPath :: Package -> FilePath -> Package
setPath pkg path = pkg { pkgPath = path }
-- | Packages that are built by default. You can change this in "UserSettings".
defaultPackages :: Stage -> Action [Package]
defaultPackages Stage0 = stage0Packages
defaultPackages Stage1 = stage1Packages
defaultPackages Stage2 = stage2Packages
defaultPackages Stage3 = return []
stage0Packages :: Action [Package]
stage0Packages = do
win <- windowsHost
ios <- iosHost
cross <- crossCompiling
return $ [ binary
, cabal
, compareSizes
, compiler
, deriveConstants
, genapply
, genprimopcode
, ghc
, ghcBoot
, ghcBootTh
, ghcCabal
, ghci
, ghcPkg
, ghcTags
, hsc2hs
, hp2ps
, hpc
, mtl
, parsec
, templateHaskell
, text
, transformers
, unlit ]
++ [ terminfo | not win, not ios, not cross ]
++ [ touchy | win ]
stage1Packages :: Action [Package]
stage1Packages = do
win <- windowsHost
intSimple <- cmdIntegerSimple
libraries0 <- filter isLibrary <$> stage0Packages
return $ libraries0 -- Build all Stage0 libraries in Stage1
++ [ array
, base
, bytestring
, containers
, deepseq
, directory
, filepath
, ghc
, ghcCabal
, ghcCompact
, ghcPrim
, haskeline
, hpcBin
, hsc2hs
, if intSimple then integerSimple else integerGmp
, pretty
, process
, rts
, runGhc
, stm
, time
, xhtml ]
++ [ iservBin | not win ]
++ [ unix | not win ]
++ [ win32 | win ]
stage2Packages :: Action [Package]
stage2Packages = return [haddock]
-- | Given a 'Context', compute the name of the program that is built in it
-- assuming that the corresponding package's type is 'Program'. For example, GHC
-- built in 'Stage0' is called @ghc-stage1@. If the given package is a
-- 'Library', the function simply returns its name.
programName :: Context -> String
programName Context {..}
| package == ghc = "ghc-stage" ++ show (fromEnum stage + 1)
| package == hpcBin = "hpc"
| package == runGhc = "runhaskell"
| package == iservBin = "ghc-iserv"
| otherwise = pkgName package
-- | The build stage whose results are used when installing a package, or
-- @Nothing@ if the package is not installed, e.g. because it is a user package.
-- The current implementation installs the /latest/ build stage of a package.
installStage :: Package -> Action (Maybe Stage)
installStage pkg
| not (isGhcPackage pkg) = return Nothing -- Only GHC packages are installed
| otherwise = do
stages <- filterM (fmap (pkg `elem`) . defaultPackages) [Stage0 ..]
return $ if null stages then Nothing else Just (maximum stages)
-- | Is the program corresponding to a given context built 'inplace', i.e. in
-- the @inplace/bin@ directory? For most programs, only their /latest/ build
-- stages are built 'inplace'. The only exception is the GHC itself, which is
-- built 'inplace' in all stages. The function returns @False@ for libraries and
-- all user packages.
isBuiltInplace :: Context -> Action Bool
isBuiltInplace Context {..}
| isLibrary package = return False
| not (isGhcPackage package) = return False
| package == ghc = return True
| otherwise = (Just stage ==) <$> installStage package
-- | The 'FilePath' to a program executable in a given 'Context'.
programPath :: Context -> Action FilePath
programPath context@Context {..} = do
path <- buildPath context
inplace <- isBuiltInplace context
let contextPath = if inplace then inplacePath else path
return $ contextPath -/- programName context <.> exe
where
inplacePath | package `elem` [touchy, unlit, iservBin] = inplaceLibBinPath
| otherwise = inplaceBinPath
-- | Some contexts are special: their packages do not have @.cabal@ metadata or
-- we cannot run @ghc-cabal@ on them, e.g. because the latter hasn't been built
-- yet (this is the case with the 'ghcCabal' package in 'Stage0').
nonCabalContext :: Context -> Bool
nonCabalContext Context {..} = (package `elem` [hp2ps, rts, touchy, unlit])
|| package == ghcCabal && stage == Stage0
-- | Some program packages should not be linked with Haskell main function.
nonHsMainPackage :: Package -> Bool
nonHsMainPackage = (`elem` [ghc, hp2ps, iservBin, touchy, unlit])
-- | Path to the autogen directory generated by @ghc-cabal@ of a given 'Context'.
autogenPath :: Context -> Action FilePath
autogenPath context@Context {..}
| isLibrary package = autogen "build"
| package == ghc = autogen "build/ghc"
| package == hpcBin = autogen "build/hpc"
| package == iservBin = autogen "build/iserv"
| otherwise = autogen $ "build" -/- pkgName package
where
autogen dir = buildPath context <&> (-/- dir -/- "autogen")
-- | @ghc-split@ is a Perl script used by GHC with @-split-objs@ flag. It is
-- generated in "Rules.Generators.GhcSplit".
ghcSplitPath :: FilePath
ghcSplitPath = inplaceLibBinPath -/- "ghc-split"
-- ref: mk/config.mk
-- | Command line tool for stripping.
stripCmdPath :: Action FilePath
stripCmdPath = do
targetPlatform <- setting TargetPlatform
top <- topDirectory
case targetPlatform of
"x86_64-unknown-mingw32" ->
return (top -/- "inplace/mingw/bin/strip.exe")
"arm-unknown-linux" ->
return ":" -- HACK: from the make-based system, see the ref above
_ -> return "strip"
buildDll0 :: Context -> Action Bool
buildDll0 Context {..} = do
windows <- windowsHost
return $ windows && stage == Stage1 && package == compiler
| ezyang/ghc | hadrian/src/GHC.hs | bsd-3-clause | 11,380 | 0 | 13 | 3,311 | 2,293 | 1,258 | 1,035 | -1 | -1 |
{-# LANGUAGE RankNTypes #-}
-- | A data type for monetary values, with associated operations and
-- only sensible instances.
module Data.Money
(
Money(Money)
-- * Optics
, money
-- * Operators
, ($+$)
, ($-$)
, (*$)
, ($*)
, ($/)
, ($/$)
, ($^)
, ($^^)
, ($**)
) where
import Data.Functor (Functor (fmap))
import Data.Foldable (Foldable (foldMap))
import Data.Monoid (Monoid, mempty, mappend)
import Data.Profunctor (Profunctor, dimap)
import Data.Semigroup (Semigroup, (<>))
import Data.Traversable (Traversable (traverse))
-- | A newtype for monetary values represented as type @num@.
--
-- The 'Semigroup' instance allows amounts of money to be added together.
--
-- Any 'Num' instances present are hidden, as operations like multiplying
-- money by money don't make any sense.
newtype Money num =
Money num
deriving (Eq, Ord)
instance Show num => Show (Money num) where
show (Money m) = '$': show m
instance Num a => Semigroup (Money a) where
Money m <> Money n = Money (m + n)
instance Num a => Monoid (Money a) where
mappend = (<>)
mempty = Money 0
instance Functor Money where
fmap f (Money n) = Money (f n)
instance Foldable Money where
foldMap f (Money n) = f n
instance Traversable Money where
traverse f (Money n) = fmap Money (f n)
type Iso s t a b = forall p f. (Profunctor p, Functor f) => p a (f b) -> p s (f t)
-- | The raw numeric value inside monetary value
money :: Iso (Money a) (Money b) a b
money = dimap (\(Money a) -> a) (fmap Money)
-- | Add money to money. A synonym for @<>@.
infixl 6 $+$
($+$) :: Num a => Money a -> Money a -> Money a
($+$) = (<>)
-- | Subtract money from money
infixl 6 $-$
($-$) :: Num a => Money a -> Money a -> Money a
($-$) (Money m) (Money n) = Money (m - n)
-- | Multiply a scalar by money
infixr 7 *$
(*$) :: Num a => a -> Money a -> Money a
(*$) x (Money m) = Money (x * m)
-- | Multiply money by a scalar
infixl 7 $*
($*) :: Num a => Money a -> a -> Money a
($*) = flip (*$)
-- | Divide money by a scalar
infixl 7 $/
($/) :: Fractional a => Money a -> a -> Money a
($/) (Money m) x = Money (m/x)
-- | Divide money by money
infixl 7 $/$
($/$) :: Fractional a => Money a -> Money a -> a
($/$) (Money n) (Money m) = n / m
-- | Raise money to a non-negative integral power
infixr 8 $^
($^) :: (Num a, Integral b) => Money a -> b -> Money a
($^) (Money m) x = Money (m ^ x)
-- | Raise money to an integral power
infixr 8 $^^
($^^) :: (Fractional a, Integral b) => Money a -> b -> Money a
($^^) (Money m) x = Money (m ^^ x)
-- | Raise money to a floating-point power
infixr 8 $**
($**) :: Floating a => Money a -> a -> Money a
($**) (Money m) x = Money (m ** x)
| gwils/money | src/Data/Money.hs | bsd-3-clause | 2,691 | 0 | 10 | 622 | 1,066 | 592 | 474 | 69 | 1 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables, ViewPatterns #-}
module Main (main) where
import System.Environment (getArgs)
import Debug.Trace
import Control.Monad
import Control.Monad.Trans (MonadIO, liftIO)
import Control.Monad.Loops (whileM_)
import Control.Applicative (Applicative(..), (<$>), (<*>))
import Data.Monoid (Monoid(..))
import qualified Data.Attoparsec.Enumerator as AE
import qualified Data.Aeson as Aeson
import Data.Aeson ((.:), (.=), (./))
import qualified Data.Enumerator as DE
import Network (withSocketsDo)
import qualified Network.HTTP.Enumerator as HE
import qualified Network.Wai as W
import Network.URI (escapeURIString)
import qualified Data.ByteString.Lazy as LBS
import qualified Data.ByteString.Lazy.Char8 as LC8
import qualified Data.ByteString.Char8 as C8
import qualified Data.ByteString.Base64 as B64
import qualified Network.Riak.Basic as R (connect, delete, foldKeys)
import qualified Network.Riak.Types as R
import qualified Network.Riak.Content as R (Content(..), Link, link)
import qualified Network.Riak.Value.Monoid as R
import qualified Data.Text as T
import Data.Maybe (fromMaybe, catMaybes)
import qualified Data.Sequence as S
import qualified Data.Foldable as F (toList)
import qualified Data.Set as Set
import Data.Function (on)
import Network.Twitter.Types
import qualified Network.OAuth.Consumer as O
import qualified Network.OAuth.Http.HttpClient as O
import qualified Network.OAuth.Http.Request as O
import qualified Network.OAuth.Http.Response as O
basicauth :: String -> String -> HE.Request -> HE.Request
basicauth u p r = r { HE.requestHeaders = basicauthhdr u p : HE.requestHeaders r }
basicauthhdr u p = (W.mkCIByteString "authorization", C8.concat [ "Basic ", auth ])
where auth = B64.encode $ C8.concat $ map C8.pack [ u, ":", p ]
idToKey :: TwitterID -> R.Key
idToKey = LC8.pack . show . twitterid
tweetKey :: Tweet -> R.Key
tweetKey = idToKey . t_id
userProfileKey :: TwitterUserProfile -> R.Key
userProfileKey = idToKey . tup_id
userKey :: TwitterUser -> R.Key
userKey = idToKey . tu_id
data ContentT a = ContentT { content :: a
, links :: Set.Set R.Link
--, usermeta :: S.Seq R.Pair
} deriving (Show)
content_eq a b = -- trace ("eq content a: " ++ show (content a) ++ " b:" ++ show (content b) ++ " eq: " ++ show eq_content) $
-- trace (" links a: " ++ show (links a) ++ " b:" ++ show (links b) ++ " eq: " ++ show eq_links) $
eq_links && eq_content
where eq_content = ((==) `on` content) a b
eq_links = ((==) `on` links) a b
instance (Show a, Eq a) => Eq (ContentT a) where
(==) = content_eq
mkContentT :: a -> ContentT a
mkContentT a = ContentT a Set.empty
instance (Monoid a) => Monoid (ContentT a) where
mempty = error "no mempty for ContentT a"
a `mappend` b = ContentT ((mappend `on` content) a b) ((mappend `on` links) a b)
-- avoid using mempty in mconcat
mconcat [a] = a
mconcat (a:as) = foldr mappend a as
instance Functor ContentT where
fmap f a = a { content = f (content a) }
instance Applicative ContentT where
pure = mkContentT
(ContentT f l1) <*> (ContentT x l2) = ContentT (f x) (l1 `mappend` l2)
instance (Aeson.FromJSON a, Aeson.ToJSON a) => R.IsContent (ContentT a) where
parseContent c = mk `fmap` (R.parseContent c >>= Aeson.parseJSON)
where mk v = ContentT v (Set.fromList . F.toList . R.links $ c)
toContent o = c { R.links = (S.fromList . Set.toAscList . links) o }
where c = (R.toContent . Aeson.toJSON . content) o
addlinks :: [R.Link] -> ContentT a -> ContentT a
addlinks l ct = ct { links = links ct `mappend` Set.fromList l }
{-
Store tweet to database, along with extra info:
- Tweets/: TweetID -> Tweet -- tweets themselves
- TweetUsers/: UserID -> [TweetID] -- users who actually tweet (?RT?)
- TweetMentions/: UserID -> [TweetID] -- users mentioned in tweets
- TweetURLs/: URL -> [TweetID] -- urls referenced by tweets
Other buckets:
- TwitterUserProfiles/: UserID -> TwitterUserProfile -- complete profile info of user
- Users/: UserID -> User -- local user info (User ID same as Twitter's)
This should probably use Riak links for better querying...
-}
urlkeys :: Tweet -> [R.Key]
urlkeys t = (urlkey . besturl) `fmap` (Set.toAscList . te_urls . t_entities) t
where urlkey = LC8.pack . (escapeURIString (not . flip elem "/?&%")) . show
besturl u = fromMaybe (url u) (expandedUrl u)
mentionkeys :: Tweet -> [R.Key]
mentionkeys t = userKey `fmap` (Set.toAscList . te_mentions . t_entities) t
getMulti :: (Monoid c, R.IsContent c) => R.Connection -> R.Bucket -> [R.Key] -> IO [Maybe (c, R.VClock)]
getMulti c b k = R.getMany c b k R.Default
-- Do a read-modify-update by using the Monoid instance to perform an
-- update to an existing entry (if any)
updateMulti' :: (Show c, Eq c, Monoid c, R.IsContent c) =>
R.Connection -> R.Bucket -> [R.Key] -> [c] -> IO [(R.Key, Maybe R.VClock, c)]
updateMulti' c b k v = do orig <- getMulti c b k
--putStrLn $ "Original " ++ show orig
let vc = map (snd `fmap`) orig
let v' = zipWith combine v $ map (fst `fmap`) orig
let kvcv = zip3 k vc v'
--putStrLn $ "Updating " ++ show b ++ " with " ++ show kvcv
return kvcv
where
combine :: Monoid a => a -> Maybe a -> a
combine a Nothing = a
combine a (Just b) = a `mappend` b
updateMulti :: (Show c, Eq c, Monoid c, R.IsContent c) =>
R.Connection -> R.Bucket -> [R.Key] -> [c] -> IO [(c, R.VClock)]
updateMulti c b k v = do kvcv <- updateMulti' c b k v
R.putMany c b kvcv R.Default R.Default
updateMulti_ :: (Show c, Eq c, Monoid c, R.IsContent c) =>
R.Connection -> R.Bucket -> [R.Key] -> [c] -> IO ()
updateMulti_ c b k v = do kvcv <- updateMulti' c b k v
R.putMany_ c b kvcv R.Default R.Default
getUrls :: R.Connection -> [R.Key] -> IO [Maybe (ContentT TwitterID, R.VClock)]
getUrls c k = getMulti c "TweetURLs" k
type TweetIDs = Set.Set TwitterID
updateUrls :: R.Connection -> [R.Key] -> [ContentT TweetIDs] -> IO [(ContentT TweetIDs, R.VClock)]
updateUrls c k v = updateMulti c "TweetURLs" k v
updateUrls_ :: R.Connection -> [R.Key] -> [ContentT TweetIDs] -> IO ()
updateUrls_ c k v = updateMulti_ c "TweetURLs" k v
getMentions :: R.Connection -> [R.Key] -> IO [Maybe (ContentT TweetIDs, R.VClock)]
getMentions c k = getMulti c "TweetMentions" k
updateMentions :: R.Connection -> [R.Key] -> [ContentT TweetIDs] -> IO [(ContentT TweetIDs, R.VClock)]
updateMentions c k v = updateMulti c "TweetMentions" k v
updateMentions_ :: R.Connection -> [R.Key] -> [ContentT TweetIDs] -> IO ()
updateMentions_ c k v = updateMulti_ c "TweetMentions" k v
updateTweet :: R.Connection -> R.Key -> ContentT Tweet -> IO (ContentT Tweet, R.VClock)
updateTweet c k v = head `liftM` updateMulti c "Tweets" [k] [v]
updateTweetUser :: R.Connection -> R.Key -> ContentT TweetIDs -> IO (ContentT TweetIDs, R.VClock)
updateTweetUser c k v = head `liftM` updateMulti c "TweetUsers" [k] [v]
updateTweet_ :: R.Connection -> R.Key -> ContentT Tweet -> IO ()
updateTweet_ c k v = updateMulti_ c "Tweets" [k] [v]
updateTweetUser_ :: R.Connection -> R.Key -> ContentT TweetIDs -> IO ()
updateTweetUser_ c k v = updateMulti_ c "TweetUsers" [k] [v]
addTweetLinks :: ContentT Tweet -> ContentT Tweet
addTweetLinks ct@(content -> t) = addlinks (catMaybes . map mklink $ l) ct
where
mklink (f, b, r) = R.link <$> pure b <*> f t <*> pure r
l = [ (fmap idToKey . t_reply_status, "Tweets", "reply")
, (fmap tweetKey . t_retweet, "Tweets", "retweet")
, (fmap userKey . Just . t_user, "TweetUsers", "user")
]
mkTweet :: Tweet -> ContentT Tweet
mkTweet = addTweetLinks . mkContentT
stashTweet :: R.Connection -> Tweet -> IO ()
stashTweet c t = do updateTweet_ c (tweetKey t) jt
updateTweetUser_ c (userKey . t_user $ t) tweetid
updateUrls_ c urls (repeat tweetid)
updateMentions_ c mentions (repeat tweetid)
where jt = addlinks (mentionlinks ++ urllinks) . mkTweet $ t
tweetlink = R.link "Tweets" (tweetKey t) "tweet"
tweetid :: ContentT (Set.Set TwitterID)
tweetid = addlinks [tweetlink] $ (mkContentT . Set.singleton . t_id $ t)
mentions = mentionkeys t
mentionlinks = map (\k -> R.link "TweetUsers" k "mention") mentions
urls = urlkeys t
urllinks = map (\k -> R.link "TweetURLs" k "url") urls
untilDone :: forall a a1. DE.Iteratee a1 IO a -> DE.Iteratee a1 IO ()
untilDone = whileM_ $ liftM not DE.isEOF
httpiter :: R.Connection -> W.Status -> t -> DE.Iteratee C8.ByteString IO ()
httpiter conn st _ | st == W.status200 = untilDone go
| otherwise =
DE.throwError $ HE.StatusCodeException (W.statusCode st)
(LBS.fromChunks [W.statusMessage st])
where
go = do
x <- AE.iterParser Aeson.json
case x ./ "delete" ./ "status" ./ "id_str" of
(Aeson.String str) -> liftIO $ do
putStrLn $ "Delete status " ++ T.unpack str
deletekeys conn "Tweets" [key]
where key = (LC8.pack . T.unpack) str
_ -> case Aeson.fromJSON x :: Aeson.Result Tweet of
Aeson.Success a -> liftIO $ do
--putStrLn $ show a
putStrLn $ "Stashing tweet " ++
(show . twitterid . t_id $ a) ++ " (" ++
(T.unpack . tu_name . t_user $ a) ++ "): " ++
(T.unpack . t_text $ a)
stashTweet conn a
return ()
Aeson.Error e -> liftIO $ putStrLn $ "Failed: " ++ show x ++ " -> " ++ e
data BasicAuth = BasicAuth { ba_user :: String
, ba_pass :: String
} deriving (Eq, Show)
instance Aeson.FromJSON BasicAuth where
parseJSON (Aeson.Object v) = BasicAuth <$> v .: "user" <*> v .: "pass"
parseJSON _ = fail "Wrong thing"
instance Aeson.ToJSON BasicAuth where
toJSON v = Aeson.object [ "user" .= ba_user v, "pass" .= ba_pass v]
instance Monoid BasicAuth where
mappend = const
mempty = error "no mempty for BasicAuth"
riak_conn :: IO R.Connection
riak_conn = R.connect $ R.Client "127.0.0.1" "8081" LBS.empty
main :: IO ()
main = do
r_conn <- riak_conn
auth <- R.get r_conn "admin" "basicauth" R.Default
case auth of
Just (ja, _) -> do
request <- basicauth (ba_user a) (ba_pass a) <$> HE.parseUrl "http://stream.twitter.com/1/statuses/sample.json"
withSocketsDo . HE.withHttpEnumerator . DE.run_ $ HE.httpRedirect request (httpiter r_conn)
where a = content ja
Nothing -> putStrLn "Can't find auth details in admin/basicauth"
allkeys :: R.Connection -> R.Bucket -> IO [R.Key]
allkeys conn bucket = R.foldKeys conn bucket (\a k -> return (k:a)) []
deletekeys :: R.Connection -> R.Bucket -> [R.Key] -> IO ()
deletekeys conn bucket = mapM_ $ \k -> R.delete conn bucket k R.Default
deleteall :: R.Connection -> R.Bucket -> IO ()
deleteall c b = do keys <- allkeys c b
deletekeys c b keys
countkeys :: R.Connection -> R.Bucket -> IO Int
countkeys c b = do keys <- allkeys c b
return $ length keys
wipeeverything :: IO ()
wipeeverything = do c <- riak_conn
putStrLn "Wiping Tweets"
deleteall c "Tweets"
putStrLn "Wiping TweetUsers"
deleteall c "TweetUsers"
putStrLn "Wiping TweetMentions"
deleteall c "TweetMentions"
putStrLn "Wiping TweetURLs"
deleteall c "TweetURLs"
{-
-- Convert a Network.OAuth.Http.Request into a Network.HTTP.Enumerator.Request
-- What. A. Pain.
http_cvt_request :: O.Request -> HE.Request
http_cvt_request oar = HE.Request method secure host port path query headers body
where method = C8.pack . show . O.method $ oar
secure = O.ssl oar
host = C8.pack . O.host $ oar
port = O.port oar
path = C8.pack . intercalate "/" $ O.pathComps oar
query = packpair <$> (O.toList . O.qString $ oar)
headers = (first W.mkCIByteString) . packpair <$> (O.toList . O.reqHeaders $ oar)
body = O.reqPayload oar
-- Convert a Network.HTTP.Enumerator.Response into a Network.OAuth.Http.Response
-- See above.
http_cvt_response :: HE.Response -> O.Response
http_cvt_response her = O.RspHttp status reason headers payload
where status = HE.statusCode her
reason = ""
headers = O.fromList $ (unpackpair . first W.ciOriginal) <$> HE.responseHeaders her
payload = HE.responseBody her
mappair f (a,b) = (f a, f b)
packpair = mappair C8.pack
unpackpair = mappair C8.unpack
newtype HttpOAuthStream a m b = HttpOAuthStream { iter :: W.Status -> W.ResponseHeaders -> DE.Iteratee a m b }
instance MonadIO m => O.HttpClient (HttpOAuthStream a m b) where
-- runClient :: (MonadIO m) => c -> Request -> m (Either String Response)
runClient c r = liftM cvt $ DE.run $ HE.http (http_cvt_request r) (iter c)
where
cvt :: Show a => Either a HE.Response -> Either String O.Response
cvt (Left a) = Left $ show a
cvt (Right r) = Right $ http_cvt_response r
data HttpOAuth = HttpOAuth { }
instance O.HttpClient HttpOAuth where
runClient c r = (HE.httpLbs . http_cvt_request) r >>= return . cvt
where
cvt :: HE.Response -> Either String O.Response
cvt r@(HE.Response st _ b) | 200 <= st && st < 300 = Right $ http_cvt_response r
| otherwise = Left $ "HTTP status" ++ show st
-}
| jsgf/SigFromNoise | src/slurp.hs | bsd-3-clause | 14,607 | 5 | 32 | 4,216 | 4,046 | 2,134 | 1,912 | 210 | 3 |
-- Sums primes up to nth (yay lazy evaluation!)
sumPrimesTo :: Integer -> Integer
sumPrimesTo n = sieve [2..] 0
where sieve (p:ms) sumacc | p >= n = sumacc
| p < n = sieve (filter (\m -> m `mod` p /= 0) ms) (p+sumacc)
main :: IO ()
main = print $ sumPrimesTo 2000000
| akerber47/haskalah | test/files/euler/10.hs | bsd-3-clause | 304 | 0 | 14 | 92 | 129 | 66 | 63 | 6 | 1 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.NV.ExplicitMultisample
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.NV.ExplicitMultisample (
-- * Extension Support
glGetNVExplicitMultisample,
gl_NV_explicit_multisample,
-- * Enums
pattern GL_INT_SAMPLER_RENDERBUFFER_NV,
pattern GL_MAX_SAMPLE_MASK_WORDS_NV,
pattern GL_SAMPLER_RENDERBUFFER_NV,
pattern GL_SAMPLE_MASK_NV,
pattern GL_SAMPLE_MASK_VALUE_NV,
pattern GL_SAMPLE_POSITION_NV,
pattern GL_TEXTURE_BINDING_RENDERBUFFER_NV,
pattern GL_TEXTURE_RENDERBUFFER_DATA_STORE_BINDING_NV,
pattern GL_TEXTURE_RENDERBUFFER_NV,
pattern GL_UNSIGNED_INT_SAMPLER_RENDERBUFFER_NV,
-- * Functions
glGetMultisamplefvNV,
glSampleMaskIndexedNV,
glTexRenderbufferNV
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
import Graphics.GL.Functions
| haskell-opengl/OpenGLRaw | src/Graphics/GL/NV/ExplicitMultisample.hs | bsd-3-clause | 1,165 | 0 | 5 | 143 | 108 | 74 | 34 | 20 | 0 |
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeFamilies #-}
module Mailgun where
import ClassyPrelude
import Control.Monad.Trans.Either
import Servant.API
import Servant.Client
import Data.Proxy
import Data.Aeson
import Data.Text (Text)
import qualified Data.Text as T
import Data.ByteString (ByteString)
import qualified Data.ByteString.Base64 as B64
import Data.Time.Format (formatTime)
import Servant.Common.Req
-- parseTime defaultTimeLocale "%a, %e %b %Y %T %Z" "Thu, 13 Oct 2011 18:02:00 GMT" :: Maybe UTCTime
-- formatTime defaultTimeLocale "%a, %e %b %Y %T %Z" someUTCTime
data Creds = Creds
{ credsDomain :: Text
, credsToken :: AuthToken
} deriving Show
newtype AuthToken = AuthToken
{ unAuthToken :: Text
} deriving (Show, FromText)
newtype EncodedAuthToken = EncodedAuthToken
{ unEncodedAuthToken :: Text
} deriving Show
instance ToText EncodedAuthToken where
toText = ("Basic " <>) . unEncodedAuthToken
data TrackingClicks = YesTC | NoTC | HtmlOnly
instance Show TrackingClicks where
show = \case
YesTC -> "yes"
NoTC -> "no"
HtmlOnly -> "htmlonly"
data YesOrNo = Yes | No
instance Show YesOrNo where
show = \case
Yes -> "yes"
No -> "no"
newtype DateTime = DateTime
{ unDateTime :: UTCTime }
instance Show DateTime where
show = formatTime defaultTimeLocale "%a, %e %b %Y %T %Z" . unDateTime
data Message = Message
{ from :: EmailAddress
, to :: [EmailAddress]
, cc :: [EmailAddress]
, bcc :: [EmailAddress]
, subject :: Text
, text :: Text
, html :: Maybe Text
, o_tag :: Maybe [Text]
, o_campaign :: Maybe Text
, o_deliverytime :: Maybe DateTime
, o_dkim :: Maybe YesOrNo
, o_testmode :: Maybe YesOrNo
, o_tracking :: Maybe YesOrNo
, o_tracking_clicks :: Maybe TrackingClicks
, o_tracking_opens :: Maybe YesOrNo
, o_headers :: [(Text, Text)]
, o_vars :: [(Text, Text)]
} deriving Show
instance ToFormUrlEncoded Message where
toFormUrlEncoded Message{..} =
mconcat [basic, recipients, mhtml, optional, headers, vars]
where basic =
[ ("from" , from)
, ("subject", subject)
, ("text" , text)
]
recipients = foldr convertList []
[ ("to" , to)
, ("cc" , cc)
, ("bcc", bcc)
]
mhtml = maybe [] (\v -> [("html", v)]) html
tags = maybe [] (map (\v -> ("o:tag", v))) o_tag
optional = foldr convertOptional []
[ ("o:campaign" , o_campaign)
, ("o:deliverytime" , tshow <$> o_deliverytime)
, ("o:dkim" , tshow <$> o_dkim)
, ("o:testmode" , tshow <$> o_testmode)
, ("o:tracking" , tshow <$> o_tracking)
, ("o:tracking-clicks" , tshow <$> o_tracking_clicks)
, ("o:tracking-opens" , tshow <$> o_tracking_opens)
]
headers = convert "h:" o_headers
vars = convert "v:" o_vars
convert str = fmap (first (str <>))
convertList (k, v) acc = case v of
[] -> acc
_ -> (k, intercalate ", " v) : acc
convertOptional (k, mv) acc = case mv of
Nothing -> acc
Just v -> (k, tshow v) : acc
emptyMessage :: Message
emptyMessage = Message
{ from = ""
, to = []
, cc = []
, bcc = []
, subject = ""
, text = ""
, html = Nothing
, o_tag = Nothing
, o_campaign = Nothing
, o_deliverytime = Nothing
, o_dkim = Nothing
, o_testmode = Nothing
, o_tracking = Nothing
, o_tracking_clicks = Nothing
, o_tracking_opens = Nothing
, o_headers = []
, o_vars = []
}
validateMessage :: Message -> Either Text Message
validateMessage msg@Message{..}
| null from = Left "\"from\" field must contain an email address"
| null to = Left "\"to\" field must contain at least one email address"
| null subject = Left "\"subject\" field must not be empty"
| null text = Left "\"text\" field must not be empty"
| otherwise = Right msg
type EmailAddress = Text
-- newtype EmailAddress =
-- EmailAddress Text
-- deriving (Show, Generic)
-- instance ToJSON EmailAddress
type Account = Text
data MessageResponse = MessageResponse
{ responseId :: Text
, responseContent :: Text
} deriving (Show)
instance FromJSON MessageResponse where
parseJSON (Object o) =
MessageResponse
<$> o .: "id"
<*> o .: "message"
type MailgunAPI =
Header "Authorization" EncodedAuthToken
:> "v3"
:> Capture "account" Account
:> "messages"
:> ReqBody '[FormUrlEncoded] Message
:> Post '[JSON] MessageResponse
data Debug
deriving instance Show Req
instance HasClient (Post a b :> Debug) where
type Client (Post a b :> Debug) = Req
clientWithRoute Proxy req baseurl = req
api :: Proxy MailgunAPI
api = Proxy
message' :: Maybe EncodedAuthToken
-> Account
-> Message
-> EitherT ServantError IO MessageResponse
message' = client api (BaseUrl Https "api.mailgun.net" 443)
message :: AuthToken
-> Account
-> Message
-> IO (Either ServantError MessageResponse)
message token account msg = runEitherT $ message' (Just $ creds token) account msg
creds :: AuthToken -> EncodedAuthToken
creds (AuthToken token) = EncodedAuthToken . decodeUtf8 . B64.encode . encodeUtf8 $
("api:" <> token)
| ericnething/mailgun | src/Mailgun.hs | bsd-3-clause | 6,392 | 0 | 13 | 2,110 | 1,495 | 854 | 641 | -1 | -1 |
module Monad(
ServerEnv(..)
, ServerM
, newServerEnv
, runServerM
, runServerMIO
, serverMtoHandler
, AuthM(..)
, runAuth
) where
import Control.Monad.Base
import Control.Monad.Catch (MonadCatch, MonadThrow)
import Control.Monad.Except
import Control.Monad.Logger
import Control.Monad.Reader
import Control.Monad.Trans.Control
import Data.Monoid
import Database.Persist.Sql
import Servant.Server
import Servant.Server.Auth.Token.Config
import Servant.Server.Auth.Token.Model
import Servant.Server.Auth.Token.Persistent
import qualified Servant.Server.Auth.Token.Persistent.Schema as S
import Config
-- | Server private environment
data ServerEnv = ServerEnv {
-- | Configuration used to create the server
envConfig :: !ServerConfig
-- | Configuration of auth server
, envAuthConfig :: !AuthConfig
-- | DB pool
, envPool :: !ConnectionPool
}
-- | Create new server environment
newServerEnv :: MonadIO m => ServerConfig -> m ServerEnv
newServerEnv cfg = do
let authConfig = defaultAuthConfig
pool <- liftIO $ do
pool <- createPool cfg
-- run migrations
flip runSqlPool pool $ runMigration S.migrateAll
-- create default admin if missing one
_ <- runPersistentBackendT authConfig pool $ ensureAdmin 17 "admin" "123456" "admin@localhost"
return pool
let env = ServerEnv {
envConfig = cfg
, envAuthConfig = authConfig
, envPool = pool
}
return env
-- | Server monad that holds internal environment
newtype ServerM a = ServerM { unServerM :: ReaderT ServerEnv (LoggingT Handler) a }
deriving (Functor, Applicative, Monad, MonadIO, MonadBase IO, MonadReader ServerEnv
, MonadLogger, MonadLoggerIO, MonadThrow, MonadCatch, MonadError ServantErr)
newtype StMServerM a = StMServerM { unStMServerM :: StM (ReaderT ServerEnv (LoggingT Handler)) a }
instance MonadBaseControl IO ServerM where
type StM ServerM a = StMServerM a
liftBaseWith f = ServerM $ liftBaseWith $ \q -> f (fmap StMServerM . q . unServerM)
restoreM = ServerM . restoreM . unStMServerM
-- | Lift servant monad to server monad
liftHandler :: Handler a -> ServerM a
liftHandler = ServerM . lift . lift
-- | Execution of 'ServerM'
runServerM :: ServerEnv -> ServerM a -> Handler a
runServerM e = runStdoutLoggingT . flip runReaderT e . unServerM
-- | Execution of 'ServerM' in IO monad
runServerMIO :: ServerEnv -> ServerM a -> IO a
runServerMIO env m = do
ea <- runHandler $ runServerM env m
case ea of
Left e -> fail $ "runServerMIO: " <> show e
Right a -> return a
-- | Transformation to Servant 'Handler'
serverMtoHandler :: ServerEnv -> ServerM :~> Handler
serverMtoHandler e = NT (runServerM e)
-- | Special monad for authorisation actions
newtype AuthM a = AuthM { unAuthM :: PersistentBackendT IO a }
deriving (Functor, Applicative, Monad, MonadIO, MonadError ServantErr, HasStorage, HasAuthConfig)
-- | Execution of authorisation actions that require 'AuthHandler' context
runAuth :: AuthM a -> ServerM a
runAuth m = do
cfg <- asks envAuthConfig
pool <- asks envPool
liftHandler $ Handler . ExceptT $ runPersistentBackendT cfg pool $ unAuthM m
| VyacheslavHashov/servant-auth-token | example/persistent/src/Monad.hs | bsd-3-clause | 3,158 | 0 | 13 | 592 | 814 | 438 | 376 | -1 | -1 |
module Llvm.Pass.PassTester where
import Llvm.Hir.Data
import Llvm.Query.HirCxt
import Llvm.Query.Hir
import qualified Compiler.Hoopl as H
import qualified Data.Set as Ds
type Optimization m a g u x = IrCxt g -> a -> H.Label -> H.Graph (Node g u) H.C H.C -> m (H.Graph (Node g u) H.C H.C, x)
opt :: (H.CheckpointMonad m, H.FuelMonad m) => IrCxt g -> a -> Optimization m a g u x -> Toplevel g u ->
m [(Toplevel g u, (FunctionInterface g, x))]
opt dl gs f (ToplevelDefine (TlDefine fn entry graph)) =
do { (graph', x) <- f dl gs entry graph
; return [(ToplevelDefine $ TlDefine fn entry graph', (fn, x))]
}
opt _ _ _ _ = return []
optModule :: (Show g, Ord g, H.CheckpointMonad m, H.FuelMonad m) => Optimization m (Ds.Set (Dtype, g)) g u x -> Module g u ->
m (Module g u, [(FunctionInterface g, x)])
optModule f (Module l) =
let gs = globalIdOfModule (Module l)
dl = irCxtOfModule (Module l)
in mapM (opt dl gs f) l >>= \x -> let (lx, ly) = unzip $ concat x
in return (Module lx, ly) | mlite/hLLVM | src/Llvm/Pass/PassTester.hs | bsd-3-clause | 1,071 | 0 | 14 | 277 | 526 | 277 | 249 | 20 | 1 |
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
module Language.Nano.SSA.SSA (ssaTransform) where
import Control.Applicative ((<$>), (<*>))
import Control.Monad
import qualified Data.HashMap.Strict as M
import Language.Nano.Types
import Language.Nano.Errors
import Language.Nano.Env
import Language.Nano.Typecheck.Types
import Language.Nano.SSA.SSAMonad
import Language.ECMAScript3.Syntax
import Language.ECMAScript3.Syntax.Annotations
import Language.ECMAScript3.PrettyPrint
import Language.ECMAScript3.Parser (SourceSpan (..))
import Language.Fixpoint.Misc
import Text.Printf (printf)
----------------------------------------------------------------------------------
ssaTransform :: (PP t) => Nano SourceSpan t -> Nano AnnSSA t
----------------------------------------------------------------------------------
ssaTransform = either (errorstar . snd) id . execute . ssaNano
----------------------------------------------------------------------------------
ssaNano :: (PP t) => Nano SourceSpan t -> SSAM (Nano AnnSSA t)
----------------------------------------------------------------------------------
ssaNano p@(Nano {code = Src fs})
= do addImmutables $ envMap (\_ -> ()) (specs p)
addImmutables $ envMap (\_ -> ()) (defs p)
addImmutables $ envMap (\_ -> ()) (consts p)
(_,fs') <- ssaStmts fs -- mapM ssaFun fs
anns <- getAnns
return $ p {code = Src $ (patchAnn anns <$>) <$> fs'}
-- stripAnn :: AnnBare -> SSAM SourceSpan
-- stripAnn (Ann l fs) = forM_ fs (addAnn l) >> return l
patchAnn :: AnnInfo -> SourceSpan -> AnnSSA
patchAnn m l = Ann l $ M.lookupDefault [] l m
-------------------------------------------------------------------------------------
ssaFun :: FunctionStatement SourceSpan -> SSAM (FunctionStatement SourceSpan)
-------------------------------------------------------------------------------------
ssaFun (FunctionStmt l f xs body)
= do θ <- getSsaEnv
imms <- getImmutables
addImmutables $ envMap (\_ -> ()) θ -- Variables from OUTER scope are IMMUTABLE
setSsaEnv $ extSsaEnv ((returnId l) : xs) θ -- Extend SsaEnv with formal binders
(_, body') <- ssaStmts body -- Transform function
setSsaEnv θ -- Restore Outer SsaEnv
setImmutables imms -- Restore Outer Immutables
return $ FunctionStmt l f xs body'
-------------------------------------------------------------------------------------
ssaSeq :: (a -> SSAM (Bool, a)) -> [a] -> SSAM (Bool, [a])
-------------------------------------------------------------------------------------
ssaSeq f = go True
where
go False zs = return (False, zs)
go b [] = return (b , [])
go True (x:xs) = do (b , y) <- f x
(b', ys) <- go b xs
return (b', y:ys)
-------------------------------------------------------------------------------------
ssaStmts :: [Statement SourceSpan] -> SSAM (Bool, [Statement SourceSpan])
-------------------------------------------------------------------------------------
ssaStmts = ssaSeq ssaStmt
-------------------------------------------------------------------------------------
ssaStmt :: Statement SourceSpan -> SSAM (Bool, Statement SourceSpan)
-------------------------------------------------------------------------------------
-- skip
ssaStmt s@(EmptyStmt _)
= return (True, s)
-- x = e
ssaStmt (ExprStmt l1 (AssignExpr l2 OpAssign (LVar l3 x) e))
= do (x', e') <- ssaAsgn l2 (Id l3 x) e
return (True, VarDeclStmt l1 [VarDecl l2 x' (Just e')])
-- e
ssaStmt (ExprStmt l e)
= do e' <- ssaExpr e
return (True, ExprStmt l e')
-- s1;s2;...;sn
ssaStmt (BlockStmt l stmts)
= do (b, stmts') <- ssaStmts stmts
return (b, BlockStmt l stmts')
-- if b { s1 }
ssaStmt (IfSingleStmt l b s)
= ssaStmt (IfStmt l b s (EmptyStmt l))
-- if b { s1 } else { s2 }
ssaStmt (IfStmt l e s1 s2)
= do e' <- ssaExpr e
θ <- getSsaEnv
(θ1, s1') <- ssaWith θ ssaStmt s1
(θ2, s2') <- ssaWith θ ssaStmt s2
(θ', φ1, φ2) <- envJoin l θ1 θ2
let stmt' = IfStmt l e' (splice s1' φ1) (splice s2' φ2)
case θ' of
Just θ'' -> setSsaEnv θ'' >> return (True, stmt')
Nothing -> return (False, stmt')
-- var x1 [ = e1 ]; ... ; var xn [= en];
ssaStmt (VarDeclStmt l ds)
= do (_, ds') <- ssaSeq ssaVarDecl ds
return (True, VarDeclStmt l ds')
-- return e
ssaStmt s@(ReturnStmt _ Nothing)
= return (False, s)
-- return e
ssaStmt (ReturnStmt l (Just e))
= do e' <- ssaExpr e
return (False, ReturnStmt l (Just e'))
-- function f(...){ s }
ssaStmt s@(FunctionStmt _ _ _ _)
= (True,) <$> ssaFun s
-- OTHER (Not handled)
ssaStmt s
= convertError "ssaStmt" s
-------------------------------------------------------------------------------------
splice :: Statement SourceSpan -> Maybe (Statement SourceSpan) -> Statement SourceSpan
-------------------------------------------------------------------------------------
splice s Nothing = s
splice s (Just s') = seqStmt (getAnnotation s) s s'
seqStmt _ (BlockStmt l s) (BlockStmt _ s') = BlockStmt l (s ++ s')
seqStmt l s s' = BlockStmt l [s, s']
-------------------------------------------------------------------------------------
ssaWith :: SsaEnv -> (a -> SSAM (Bool, a)) -> a -> SSAM (Maybe SsaEnv, a)
-------------------------------------------------------------------------------------
ssaWith θ f x
= do setSsaEnv θ
(b, x') <- f x
(, x') <$> (if b then Just <$> getSsaEnv else return Nothing)
-------------------------------------------------------------------------------------
ssaExpr :: Expression SourceSpan -> SSAM (Expression SourceSpan)
-------------------------------------------------------------------------------------
ssaExpr e@(IntLit _ _)
= return e
ssaExpr e@(BoolLit _ _)
= return e
ssaExpr e@(VarRef l x)
= do imm <- isImmutable x
xo <- findSsaEnv x
case xo of
Just z -> return $ VarRef l z
Nothing -> if imm
then return e
else ssaError (srcPos x) $ errorUnboundId x
-- errorUnboundIdEnv x ns
ssaExpr (PrefixExpr l o e)
= PrefixExpr l o <$> ssaExpr e
ssaExpr (InfixExpr l o e1 e2)
= InfixExpr l o <$> ssaExpr e1 <*> ssaExpr e2
ssaExpr (CallExpr l e es)
= CallExpr l <$> ssaExpr e <*> mapM ssaExpr es
ssaExpr (BracketRef l e1 e2)
= BracketRef l <$> ssaExpr e1 <*> ssaExpr e2
ssaExpr e
= convertError "ssaExpr" e
-------------------------------------------------------------------------------------
ssaVarDecl :: VarDecl SourceSpan -> SSAM (Bool, VarDecl SourceSpan)
-------------------------------------------------------------------------------------
ssaVarDecl (VarDecl l x (Just e))
= do (x', e') <- ssaAsgn l x e
return (True, VarDecl l x' (Just e'))
ssaVarDecl z@(VarDecl l x Nothing)
= errorstar $ printf "Cannot handle ssaVarDECL %s at %s" (ppshow x) (ppshow l)
------------------------------------------------------------------------------------
ssaAsgn :: SourceSpan -> Id SourceSpan -> Expression SourceSpan
-> SSAM (Id SourceSpan, Expression SourceSpan)
------------------------------------------------------------------------------------
ssaAsgn l x e
= do e' <- ssaExpr e
x' <- updSsaEnv l x
return (x', e')
-------------------------------------------------------------------------------------
envJoin :: SourceSpan -> Maybe SsaEnv -> Maybe SsaEnv
-> SSAM ( Maybe SsaEnv
, Maybe (Statement SourceSpan)
, Maybe (Statement SourceSpan) )
-------------------------------------------------------------------------------------
envJoin _ Nothing Nothing = return (Nothing, Nothing, Nothing)
envJoin _ Nothing (Just θ) = return (Just θ , Nothing, Nothing)
envJoin _ (Just θ) Nothing = return (Just θ , Nothing, Nothing)
envJoin l (Just θ1) (Just θ2) = envJoin' l θ1 θ2
envJoin' l θ1 θ2
= do setSsaEnv θ' -- Keep Common binders
stmts <- forM phis $ phiAsgn l -- Adds Phi-Binders, Phi Annots, Return Stmts
θ'' <- getSsaEnv
let (s1,s2) = unzip stmts
return (Just θ'', Just $ BlockStmt l s1, Just $ BlockStmt l s2)
where
θ = envIntersectWith meet θ1 θ2
θ' = envRights θ
phis = envToList $ envLefts θ
meet = \x1 x2 -> if x1 == x2 then Right x1 else Left (x1, x2)
phiAsgn l (x, (SI x1, SI x2))
= do x' <- updSsaEnv l x -- Generate FRESH phi name
addAnn l (PhiVar x') -- RECORD x' as PHI-Var at l
let s1 = mkPhiAsgn l x' x1 -- Create Phi-Assignments
let s2 = mkPhiAsgn l x' x2 -- for both branches
return $ (s1, s2)
where
mkPhiAsgn l x y = VarDeclStmt l [VarDecl l x (Just $ VarRef l y)]
| UCSD-PL/nano-js | Language/Nano/SSA/SSA.hs | bsd-3-clause | 9,512 | 0 | 14 | 2,472 | 2,718 | 1,387 | 1,331 | 154 | 3 |
module Main where
import Ivory.Tower.Config
import Ivory.OS.FreeRTOS.Tower.STM32
import BSP.Tests.Platforms
import BSP.Tests.UART.TestApp (app)
main :: IO ()
main = compileTowerSTM32FreeRTOS testplatform_stm32 p $
app testplatform_leds
(stm32config_clock . testplatform_stm32)
testplatform_uart
where
p topts = getConfig topts testPlatformParser
| GaloisInc/ivory-tower-stm32 | ivory-bsp-tests/tests/UARTTest.hs | bsd-3-clause | 386 | 0 | 8 | 75 | 89 | 51 | 38 | 11 | 1 |
{-# LANGUAGE CPP #-}
----------------------------------------------------------------------------
-- |
-- Module : XMonad.Util.Font
-- Description : A module for abstracting a font facility over Core fonts and Xft.
-- Copyright : (c) 2007 Andrea Rossato and Spencer Janssen
-- License : BSD-style (see xmonad/LICENSE)
--
-- Maintainer : andrea.rossato@unibz.it
-- Stability : unstable
-- Portability : unportable
--
-- A module for abstracting a font facility over Core fonts and Xft
--
-----------------------------------------------------------------------------
module XMonad.Util.Font
( -- * Usage:
-- $usage
XMonadFont(..)
, initXMF
, releaseXMF
, initCoreFont
, releaseCoreFont
, initUtf8Font
, releaseUtf8Font
, Align (..)
, stringPosition
, textWidthXMF
, textExtentsXMF
, printStringXMF
, stringToPixel
, pixelToString
, fi
) where
import XMonad
import XMonad.Prelude
import Foreign
import Control.Exception as E
import Text.Printf (printf)
#ifdef XFT
import qualified Data.List.NonEmpty as NE
import Graphics.X11.Xrender
import Graphics.X11.Xft
#endif
-- Hide the Core Font/Xft switching here
data XMonadFont = Core FontStruct
| Utf8 FontSet
#ifdef XFT
| Xft (NE.NonEmpty XftFont)
#endif
-- $usage
-- See "XMonad.Layout.Tabbed" or "XMonad.Prompt" for usage examples
-- | Get the Pixel value for a named color: if an invalid name is
-- given the black pixel will be returned.
stringToPixel :: (Functor m, MonadIO m) => Display -> String -> m Pixel
stringToPixel d s = fromMaybe fallBack <$> io getIt
where getIt = initColor d s
fallBack = blackPixel d (defaultScreen d)
-- | Convert a @Pixel@ into a @String@.
pixelToString :: (MonadIO m) => Display -> Pixel -> m String
pixelToString d p = do
let cm = defaultColormap d (defaultScreen d)
(Color _ r g b _) <- io (queryColor d cm $ Color p 0 0 0 0)
return ("#" ++ hex r ++ hex g ++ hex b)
where
-- NOTE: The @Color@ type has 16-bit values for red, green, and
-- blue, even though the actual type in X is only 8 bits wide. It
-- seems that the upper and lower 8-bit sections of the @Word16@
-- values are the same. So, we just discard the lower 8 bits.
hex = printf "%02x" . (`shiftR` 8)
econst :: a -> IOException -> a
econst = const
-- | Given a fontname returns the font structure. If the font name is
-- not valid the default font will be loaded and returned.
initCoreFont :: String -> X FontStruct
initCoreFont s = do
d <- asks display
io $ E.catch (getIt d) (fallBack d)
where getIt d = loadQueryFont d s
fallBack d = econst $ loadQueryFont d "-misc-fixed-*-*-*-*-10-*-*-*-*-*-*-*"
releaseCoreFont :: FontStruct -> X ()
releaseCoreFont fs = do
d <- asks display
io $ freeFont d fs
initUtf8Font :: String -> X FontSet
initUtf8Font s = do
d <- asks display
(_,_,fs) <- io $ E.catch (getIt d) (fallBack d)
return fs
where getIt d = createFontSet d s
fallBack d = econst $ createFontSet d "-misc-fixed-*-*-*-*-10-*-*-*-*-*-*-*"
releaseUtf8Font :: FontSet -> X ()
releaseUtf8Font fs = do
d <- asks display
io $ freeFontSet d fs
-- | When initXMF gets a font name that starts with 'xft:' it switches to the Xft backend
-- Example: 'xft: Sans-10'
initXMF :: String -> X XMonadFont
initXMF s =
#ifndef XFT
Utf8 <$> initUtf8Font s
#else
if xftPrefix `isPrefixOf` s then
do dpy <- asks display
let fonts = case wordsBy (== ',') (drop (length xftPrefix) s) of
[] -> "xft:monospace" :| [] -- NE.singleton only in base 4.15
(x : xs) -> x :| xs
Xft <$> io (traverse (openFont dpy) fonts)
else Utf8 <$> initUtf8Font s
where
xftPrefix = "xft:"
openFont dpy str = xftFontOpen dpy (defaultScreenOfDisplay dpy) str
wordsBy p str = case dropWhile p str of
"" -> []
str' -> w : wordsBy p str''
where (w, str'') = break p str'
#endif
releaseXMF :: XMonadFont -> X ()
#ifdef XFT
releaseXMF (Xft xftfonts) = do
dpy <- asks display
io $ mapM_ (xftFontClose dpy) xftfonts
#endif
releaseXMF (Utf8 fs) = releaseUtf8Font fs
releaseXMF (Core fs) = releaseCoreFont fs
textWidthXMF :: MonadIO m => Display -> XMonadFont -> String -> m Int
textWidthXMF _ (Utf8 fs) s = return $ fi $ wcTextEscapement fs s
textWidthXMF _ (Core fs) s = return $ fi $ textWidth fs s
#ifdef XFT
textWidthXMF dpy (Xft xftdraw) s = liftIO $ do
#if MIN_VERSION_X11_xft(0, 3, 4)
gi <- xftTextAccumExtents dpy (toList xftdraw) s
#else
gi <- xftTextExtents dpy (NE.head xftdraw) s
#endif
return $ xglyphinfo_xOff gi
#endif
textExtentsXMF :: MonadIO m => XMonadFont -> String -> m (Int32,Int32)
textExtentsXMF (Utf8 fs) s = do
let (_,rl) = wcTextExtents fs s
ascent = fi $ - (rect_y rl)
descent = fi $ rect_height rl + fi (rect_y rl)
return (ascent, descent)
textExtentsXMF (Core fs) s = do
let (_,a,d,_) = textExtents fs s
return (a,d)
#ifdef XFT
#if MIN_VERSION_X11_xft(0, 3, 4)
textExtentsXMF (Xft xftfonts) _ = io $ do
ascent <- fi <$> xftfont_max_ascent xftfonts
descent <- fi <$> xftfont_max_descent xftfonts
#else
textExtentsXMF (Xft xftfonts) _ = io $ do
ascent <- fi <$> xftfont_ascent (NE.head xftfonts)
descent <- fi <$> xftfont_descent (NE.head xftfonts)
#endif
return (ascent, descent)
#endif
-- | String position
data Align = AlignCenter | AlignRight | AlignLeft | AlignRightOffset Int
deriving (Show, Read)
-- | Return the string x and y 'Position' in a 'Rectangle', given a
-- 'FontStruct' and the 'Align'ment
stringPosition :: (Functor m, MonadIO m) => Display -> XMonadFont -> Rectangle -> Align -> String -> m (Position,Position)
stringPosition dpy fs (Rectangle _ _ w h) al s = do
width <- textWidthXMF dpy fs s
(a,d) <- textExtentsXMF fs s
let y = fi $ ((h - fi (a + d)) `div` 2) + fi a;
x = case al of
AlignCenter -> fi (w `div` 2) - fi (width `div` 2)
AlignLeft -> 1
AlignRight -> fi (w - (fi width + 1));
AlignRightOffset offset -> fi (w - (fi width + 1)) - fi offset;
return (x,y)
printStringXMF :: (Functor m, MonadIO m) => Display -> Drawable -> XMonadFont -> GC -> String -> String
-> Position -> Position -> String -> m ()
printStringXMF d p (Core fs) gc fc bc x y s = io $ do
setFont d gc $ fontFromFontStruct fs
[fc',bc'] <- mapM (stringToPixel d) [fc,bc]
setForeground d gc fc'
setBackground d gc bc'
drawImageString d p gc x y s
printStringXMF d p (Utf8 fs) gc fc bc x y s = io $ do
[fc',bc'] <- mapM (stringToPixel d) [fc,bc]
setForeground d gc fc'
setBackground d gc bc'
io $ wcDrawImageString d p fs gc x y s
#ifdef XFT
printStringXMF dpy drw fs@(Xft fonts) gc fc bc x y s = do
let screen = defaultScreenOfDisplay dpy
colormap = defaultColormapOfScreen screen
visual = defaultVisualOfScreen screen
bcolor <- stringToPixel dpy bc
(a,d) <- textExtentsXMF fs s
#if MIN_VERSION_X11_xft(0, 3, 4)
gi <- io $ xftTextAccumExtents dpy (toList fonts) s
#else
gi <- io $ xftTextExtents dpy (NE.head fonts) s
#endif
io $ setForeground dpy gc bcolor
io $ fillRectangle dpy drw gc (x - fi (xglyphinfo_x gi))
(y - fi a)
(fi $ xglyphinfo_xOff gi)
(fi $ a + d)
io $ withXftDraw dpy drw visual colormap $
\draw -> withXftColorName dpy visual colormap fc $
#if MIN_VERSION_X11_xft(0, 3, 4)
\color -> xftDrawStringFallback draw color (toList fonts) (fi x) (fi y) s
#else
\color -> xftDrawString draw color (NE.head fonts) x y s
#endif
#endif
| xmonad/xmonad-contrib | XMonad/Util/Font.hs | bsd-3-clause | 7,770 | 0 | 19 | 1,938 | 2,088 | 1,073 | 1,015 | 102 | 4 |
module Prednote.Expressions.Infix
( InfixToken (..)
, Paren(..)
, createRPN
) where
import qualified Prednote.Expressions.RPN as R
import qualified Data.Foldable as Fdbl
data InfixToken f a
= TokRPN (R.RPNToken f a)
| TokParen Paren
data Paren = Open | Close
-- | Values on the operator stack.
data OpStackVal
= StkOp R.Operator
| StkOpenParen
-- In the shunting yard algorithm, the output sequence is a queue. The
-- first values to go into the output sequence are the first to be
-- processed by the RPN parser. In this module, the output sequence is
-- implemented as a list stack, which means it must be reversed upon
-- output (this is done in the createRPN function.)
processInfixToken
:: ([OpStackVal], [R.RPNToken f a])
-> InfixToken f a
-> Maybe ([OpStackVal], [R.RPNToken f a])
processInfixToken (os, ts) t = case t of
TokRPN tok -> return $ processRPNToken (os, ts) tok
TokParen p -> processParen (os, ts) p
-- | If the token is a binary operator A, then:
--
-- If A is left associative, while there is an operator B of higher or
-- equal precedence than A at the top of the stack, pop B off the
-- stack and append it to the output.
--
-- If A is right associative, while there is an operator B of higher
-- precedence than A at the top of the stack, pop B off the stack and
-- append it to the output.
--
-- Push A onto the stack.
--
-- If a token is an operand, append it to the postfix output.
--
-- And has higher precedence than Or.
processRPNToken
:: ([OpStackVal], [R.RPNToken f a])
-> R.RPNToken f a
-> ([OpStackVal], [R.RPNToken f a])
processRPNToken (os, ts) t = case t of
p@(R.TokOperand _) -> (os, p:ts)
R.TokOperator d -> case d of
R.OpNot -> (StkOp R.OpNot : os, ts)
R.OpAnd -> (StkOp R.OpAnd : os, ts)
R.OpOr ->
let (os', ts') = popper os ts
in (StkOp R.OpOr : os', ts')
-- | Pops operators from the operator stack and places then in the
-- output queue, as long as there is an And operator on the top of the
-- operator stack.
popper :: [OpStackVal] -> [R.RPNToken f a] -> ([OpStackVal], [R.RPNToken f a])
popper os ts = case os of
[] -> (os, ts)
x:xs -> case x of
StkOp R.OpAnd ->
let os' = xs
ts' = R.TokOperator R.OpAnd : ts
in popper os' ts'
_ -> (os, ts)
-- | Pops operators off the operator stack and onto the output stack
-- as long as the top of the operator stack is not an open
-- parenthesis. When an open parenthesis is encountered, pop that too,
-- but not onto the output stack. Fails if the stack has no open
-- parentheses.
popThroughOpen
:: ([OpStackVal], [R.RPNToken f a])
-> Maybe ([OpStackVal], [R.RPNToken f a])
popThroughOpen (os, ts) = case os of
[] -> Nothing
v:vs -> case v of
StkOp op -> popThroughOpen (vs, R.TokOperator op : ts)
StkOpenParen -> return (vs, ts)
-- | Places an open parenthesis on the top of the operator stack. For
-- Close parenthesis, pops operators off the operator stack through
-- the next open parenthesis on the operator stack.
processParen
:: ([OpStackVal], [R.RPNToken f a])
-> Paren
-> Maybe ([OpStackVal], [R.RPNToken f a])
processParen (os, ts) p = case p of
Open -> Just (StkOpenParen : os, ts)
Close -> popThroughOpen (os, ts)
-- | Creates an RPN expression from an infix one. Fails only if there
-- are mismatched parentheses. It is possible to create a nonsensical
-- RPN expression; the RPN parser must catch this.
createRPN
:: Fdbl.Foldable f
=> f (InfixToken m a)
-- ^ The input tokens, with the beginning of the expression on the
-- left side of the sequence.
-> Maybe [R.RPNToken m a]
-- ^ The output sequence of tokens, with the beginning of the
-- expression on the left side of the list.
createRPN ts = do
(stack, toks) <- Fdbl.foldlM processInfixToken ([], []) ts
fmap reverse $ popRemainingOperators stack toks
-- | Pops remaining items off operator stack. Fails if there is an
-- open paren left on the stack, as this indicates mismatched
-- parenthesis.
popRemainingOperators :: [OpStackVal] -> [R.RPNToken f a] -> Maybe [R.RPNToken f a]
popRemainingOperators os ts = case os of
[] -> return ts
x:xs -> case x of
StkOp op -> popRemainingOperators xs (R.TokOperator op : ts)
StkOpenParen -> Nothing
| massysett/prednote | lib/Prednote/Expressions/Infix.hs | bsd-3-clause | 4,262 | 0 | 17 | 913 | 1,059 | 583 | 476 | 69 | 4 |
{-# LINE 1 "GHC.IO.BufferedIO.hs" #-}
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# OPTIONS_GHC -funbox-strict-fields #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.IO.BufferedIO
-- Copyright : (c) The University of Glasgow 2008
-- License : see libraries/base/LICENSE
--
-- Maintainer : cvs-ghc@haskell.org
-- Stability : internal
-- Portability : non-portable (GHC Extensions)
--
-- Class of buffered IO devices
--
-----------------------------------------------------------------------------
module GHC.IO.BufferedIO (
BufferedIO(..),
readBuf, readBufNonBlocking, writeBuf, writeBufNonBlocking
) where
import GHC.Base
import GHC.Ptr
import Data.Word
import GHC.Num
import GHC.IO.Device as IODevice
import GHC.IO.Device as RawIO
import GHC.IO.Buffer
-- | The purpose of 'BufferedIO' is to provide a common interface for I/O
-- devices that can read and write data through a buffer. Devices that
-- implement 'BufferedIO' include ordinary files, memory-mapped files,
-- and bytestrings. The underlying device implementing a 'Handle' must
-- provide 'BufferedIO'.
--
class BufferedIO dev where
-- | allocate a new buffer. The size of the buffer is at the
-- discretion of the device; e.g. for a memory-mapped file the
-- buffer will probably cover the entire file.
newBuffer :: dev -> BufferState -> IO (Buffer Word8)
-- | reads bytes into the buffer, blocking if there are no bytes
-- available. Returns the number of bytes read (zero indicates
-- end-of-file), and the new buffer.
fillReadBuffer :: dev -> Buffer Word8 -> IO (Int, Buffer Word8)
-- | reads bytes into the buffer without blocking. Returns the
-- number of bytes read (Nothing indicates end-of-file), and the new
-- buffer.
fillReadBuffer0 :: dev -> Buffer Word8 -> IO (Maybe Int, Buffer Word8)
-- | Prepares an empty write buffer. This lets the device decide
-- how to set up a write buffer: the buffer may need to point to a
-- specific location in memory, for example. This is typically used
-- by the client when switching from reading to writing on a
-- buffered read/write device.
--
-- There is no corresponding operation for read buffers, because before
-- reading the client will always call 'fillReadBuffer'.
emptyWriteBuffer :: dev -> Buffer Word8 -> IO (Buffer Word8)
emptyWriteBuffer _dev buf
= return buf{ bufL=0, bufR=0, bufState = WriteBuffer }
-- | Flush all the data from the supplied write buffer out to the device.
-- The returned buffer should be empty, and ready for writing.
flushWriteBuffer :: dev -> Buffer Word8 -> IO (Buffer Word8)
-- | Flush data from the supplied write buffer out to the device
-- without blocking. Returns the number of bytes written and the
-- remaining buffer.
flushWriteBuffer0 :: dev -> Buffer Word8 -> IO (Int, Buffer Word8)
-- for an I/O device, these operations will perform reading/writing
-- to/from the device.
-- for a memory-mapped file, the buffer will be the whole file in
-- memory. fillReadBuffer sets the pointers to encompass the whole
-- file, and flushWriteBuffer needs to do no I/O. A memory-mapped
-- file has to maintain its own file pointer.
-- for a bytestring, again the buffer should match the bytestring in
-- memory.
-- ---------------------------------------------------------------------------
-- Low-level read/write to/from buffers
-- These operations make it easy to implement an instance of 'BufferedIO'
-- for an object that supports 'RawIO'.
readBuf :: RawIO dev => dev -> Buffer Word8 -> IO (Int, Buffer Word8)
readBuf dev bbuf = do
let bytes = bufferAvailable bbuf
res <- withBuffer bbuf $ \ptr ->
RawIO.read dev (ptr `plusPtr` bufR bbuf) bytes
return (res, bbuf{ bufR = bufR bbuf + res })
-- zero indicates end of file
readBufNonBlocking :: RawIO dev => dev -> Buffer Word8
-> IO (Maybe Int, -- Nothing ==> end of file
-- Just n ==> n bytes were read (n>=0)
Buffer Word8)
readBufNonBlocking dev bbuf = do
let bytes = bufferAvailable bbuf
res <- withBuffer bbuf $ \ptr ->
IODevice.readNonBlocking dev (ptr `plusPtr` bufR bbuf) bytes
case res of
Nothing -> return (Nothing, bbuf)
Just n -> return (Just n, bbuf{ bufR = bufR bbuf + n })
writeBuf :: RawIO dev => dev -> Buffer Word8 -> IO (Buffer Word8)
writeBuf dev bbuf = do
let bytes = bufferElems bbuf
withBuffer bbuf $ \ptr ->
IODevice.write dev (ptr `plusPtr` bufL bbuf) bytes
return bbuf{ bufL=0, bufR=0 }
-- XXX ToDo
writeBufNonBlocking :: RawIO dev => dev -> Buffer Word8 -> IO (Int, Buffer Word8)
writeBufNonBlocking dev bbuf = do
let bytes = bufferElems bbuf
res <- withBuffer bbuf $ \ptr ->
IODevice.writeNonBlocking dev (ptr `plusPtr` bufL bbuf) bytes
return (res, bufferAdjustL (bufL bbuf + res) bbuf)
| phischu/fragnix | builtins/base/GHC.IO.BufferedIO.hs | bsd-3-clause | 5,018 | 0 | 15 | 1,073 | 848 | 461 | 387 | 50 | 2 |
module Language.Pankti.Parser (
parsePktFile
) where
import Text.Parsec
import Text.Parsec.String
import Text.Parsec.Token
import Control.Applicative hiding (many, (<|>))
import Language.Pankti.AST
-- | Parse .pkt Pankti file
parsePktFile :: FilePath -> IO (Either ParseError Pankti)
parsePktFile = parseFromFile parsePankti
-- | Parse Pankti to tree
parsePankti :: Parser Pankti
parsePankti = do string "package" <* space
package <- many1 $ alphaNum <|> char '.'
many1 newline
imports <- parseImport `sepEndBy` (many1 newline)
classes <- parseClass `sepEndBy1` (many1 newline)
return $ Pankti package imports classes
-- | Parse import in Pankti file
parseImport :: Parser String
parseImport = do string "import" <* space
package <- many1 (alphaNum <|> char '.')
return package
-- Class related functions ----------------------
-- | Parse a class
parseClass :: Parser Class
parseClass = do string "class" <* space
name <- many1 alphaNum
many1 newline
statements <- parseStatement `sepEndBy` (many1 newline)
return $ Class name statements
-- | Parse top-level Statement
parseStatement :: Parser Statement
parseStatement = do tab
parseFuncDecl <|> parseVarDecl
-- Function related functions -------------------
-- | Parse top-level-only FuncDecl
parseFuncDecl :: Parser Statement
parseFuncDecl = do returnType <- parseReturnType
space
name <- many1 alphaNum
space *> string "::" *> space
params <- parseParameter `sepBy` (char ',')
newline
body <- parseFuncDeclBody `sepEndBy1` newline
return $ FuncDecl returnType name params body
-- | Parse ReturnType
parseReturnType :: Parser DataType
parseReturnType = do keyword <- many1 letter
return $ case keyword of
"void" -> Void
"int" -> Integer
"float" -> Float
"double" -> Double
"char" -> Char
"string" -> String
"bool" -> Bool
-- | Parse Parameter
parseParameter :: Parser Parameter
parseParameter = do dataType <- parseDataType
space
name <- many1 alphaNum
return $ (dataType, name)
-- | Parse FuncDecl body
parseFuncDeclBody :: Parser Statement
parseFuncDeclBody = do parseVarDecl
-- VAR related functions ------------------------
-- | Parse VarDecl
parseVarDecl :: Parser Statement
parseVarDecl = do tab *> tab
varType <- parseDataType
space
name <- many1 alphaNum
value <- try parseVarDeclValue
let castVal = case varType of
Integer -> Int (read value :: Integer)
Float -> Flo (read value :: Float)
Double -> Dou (read value :: Double)
Char -> Cha (read value :: Char)
String -> Str value
Bool -> Boo (read value :: Bool)
return $ VarDecl varType name castVal
-- | Parse value in VarDecl
parseVarDeclValue :: Parser String
parseVarDeclValue = do space *> char '=' *> space
value <- manyTill anyChar (try newline)
return value
-- | Parse DataType
parseDataType :: Parser DataType
parseDataType = do keyword <- many1 letter
return $ case keyword of
"int" -> Integer
"float" -> Float
"double" -> Double
"char" -> Char
"string" -> String
"bool" -> Bool
| srujun/Pankti | src/Language/Pankti/Parser.hs | mit | 4,051 | 0 | 15 | 1,588 | 868 | 428 | 440 | 82 | 7 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeApplications #-}
import Control.Applicative (empty)
import Control.Exception.Safe (throwString)
import Control.Lens (at, (^.))
import Control.Monad.Trans (liftIO)
import Control.Monad.Trans.Resource (ResourceT, runResourceT)
import Data.Conduit (ConduitT, Void, runConduit, yield, (.|))
import Data.Conduit.Binary (sinkFile, sourceFile)
import qualified Data.Conduit.Binary as CB
import qualified Data.Conduit.List as CL
import Data.Conduit.Text (decode, encode, utf8)
import Data.Foldable (foldrM)
import Data.List (intersperse)
import qualified Data.Map as M
import Data.Text (Text)
import Formatting (bprint, int, right, sformat, (%))
import qualified Formatting as F
import System.IO (FilePath)
import Data.Attoparsec.Text (parseOnly)
import Options.Applicative.Simple (simpleOptions)
import Bench.Network.Commons (LogMessage (..), MeasureEvent (..),
MeasureInfo (..), MsgId, Payload (..), Timestamp,
logMessageParser, measureInfoParser)
import LogReaderOptions (Args (..), argsParser)
import Pos.Util.Trace (Severity (..), Trace, traceWith, wlogTrace)
import Pos.Util.Wlog (centiUtcTimeF, productionB, removeAllHandlers,
setupLogging')
type Measures = M.Map MsgId (Payload, [(MeasureEvent, Timestamp)])
type RowId = Int
analyze :: Trace IO (Severity, Text) -> FilePath -> Measures -> IO Measures
analyze logTrace file initialMeasures = runResourceT $ pipeline
where
pipelineSource :: ConduitT () (Text, RowId) (ResourceT IO) ()
pipelineSource =
sourceFile file
.| CB.lines
.| decode utf8
.| (CL.mapAccum withRowId 0 >> pure ())
pipelineSink :: ConduitT (Text, RowId) Void (ResourceT IO) Measures
pipelineSink = CL.foldM saveMeasure initialMeasures
pipeline :: ResourceT IO Measures
pipeline = runConduit $ pipelineSource .| pipelineSink
withRowId :: Text -> RowId -> (RowId, (Text, RowId))
withRowId t rowid = let !rowid' = rowid + 1 in (rowid', (t, rowid))
saveMeasure :: Measures -> (Text, RowId) -> ResourceT IO Measures
saveMeasure !measures (row, rowid) = case parseOnly (logMessageParser measureInfoParser) row of
Left err -> do
liftIO $ traceWith logTrace $ (,) Warning $
sformat ("Parse error at file "%F.build%" (line "%F.int%"): "%F.build)
file rowid err
pure measures
Right (Just (LogMessage MeasureInfo{..})) ->
let alteration Nothing = Just (miPayload, [(miEvent, miTime)])
alteration (Just (miPayload', stuff)) = Just (miPayload', (miEvent, miTime) : stuff)
measures' = M.alter alteration miId measures
in pure measures'
Right _ -> pure measures
printMeasures :: FilePath -> Measures -> IO ()
printMeasures file measures = runResourceT . runConduit $
source .| encode utf8 .| sinkFile file
where
source = printHeader >> mapM_ printMeasure (M.toList measures)
printHeader = printRow $ "MsgId" : "Size" : map (sformat F.build) eventsUniverse
printMeasure :: Monad m
=> (MsgId, (Payload, [(MeasureEvent, Timestamp)])) -> ConduitT () Text m ()
printMeasure (mid, (Payload p, mm)) = do
case uniqMap mm of
Just mm' -> printRow $ sformat int mid
: sformat int p
: [ maybe "-" (sformat int) $ mm' ^. at ev | ev <- eventsUniverse ]
_ -> return ()
printRow :: Monad m => [Text] -> ConduitT () Text m ()
printRow = yield
. sformat (F.build%"\n")
. mconcat
. intersperse ","
. alignColumns
uniqMap = foldl upd (Just mempty)
where
upd m (ev, ts) = m >>= \m' ->
case ev `M.lookup` m' of
Nothing -> return $ M.insert ev ts m'
_ -> throwString ""
alignColumns = map (\(s, m) -> bprint (right s ' ') m)
. zip (7 : 7 : (18 <$ eventsUniverse))
eventsUniverse = [minBound .. maxBound]
getOptions :: IO Args
getOptions = (\(a, ()) -> a) <$> simpleOptions
"bench-log-reader"
"Utility to extract measures from logs into csv file"
"Use it!"
argsParser
empty
main :: IO ()
main = do
lh <- setupLogging' (Just centiUtcTimeF) productionB
let logTrace = wlogTrace mempty
Args{..} <- liftIO getOptions
measures <- foldrM (analyze logTrace) M.empty inputFiles
printMeasures resultFile measures
removeAllHandlers lh
| input-output-hk/pos-haskell-prototype | networking/bench/LogReader/Main.hs | mit | 4,815 | 0 | 20 | 1,392 | 1,511 | 823 | 688 | 100 | 4 |
module Test.Auxx.Lang.LexerSpec
( spec
) where
import Universum
import Test.Hspec (Expectation, Spec, describe, it, shouldBe)
import Test.Hspec.QuickCheck (prop)
import Test.QuickCheck (Property, property)
import Pos.Chain.Update (ApplicationName (..), BlockVersion (..),
SoftwareVersion (..))
import Pos.Core (decodeTextAddress)
import Pos.Crypto (decodeAbstractHash, parseFullPublicKey,
unsafeCheatingHashCoerce)
import Lang.Lexer (BracketSide (..), Token (..), detokenize, tokenize,
tokenize')
import Lang.Name (unsafeMkName)
spec :: Spec
spec = describe "Auxx.Lang.Lexer" $ do
prop "accepts any input" propAcceptsAnyInput
prop "handles valid input" propHandlesValidInput
it "handles sample-1" unitLexerSample1
it "handles sample-2" unitLexerSample2
propAcceptsAnyInput :: Property
propAcceptsAnyInput = property $ isJust . tokenize' . fromString
propHandlesValidInput :: Property
propHandlesValidInput = property $ liftA2 (==) (map snd . tokenize . detokenize) identity
unitLexerSample1 :: Expectation
unitLexerSample1 = map snd (tokenize input) `shouldBe` output
where
input = " ( \"Hello\"; [=propose-patak-update ./secret.key /home/a_b\\ b-c] \"\"\"\" ) "
output =
[ TokenParenthesis BracketSideOpening
, TokenString "Hello"
, TokenSemicolon
, TokenSquareBracket BracketSideOpening
, TokenEquals
, TokenName $ unsafeMkName ["propose", "patak", "update"]
, TokenFilePath "./secret.key"
, TokenFilePath "/home/a_b b-c"
, TokenSquareBracket BracketSideClosing
, TokenString "\""
, TokenParenthesis BracketSideClosing
]
unitLexerSample2 :: Expectation
unitLexerSample2 = map snd (tokenize input) `shouldBe` output
where
input =
" oyGcGsd/FX3Zl98PPt/jE/mo+6Mz/HxaVcHxhrtxh6MrBkBi2U4h0pwaPDhWUo+IgcGzl4xLOqkoB4suojuNUA== \
\ 5f53e01e1366aeda8811c2a630f0e037077a7b651093d2bdc4ef7200 \
\ 04f2bf626c4e92d97683592c5af70ec243a5a5508a0bbb0adf7af49483cc9894 \
\ 1.22.3 \
\ ~software~cardano-sl:41 \
\ Ae2tdPwUPEZ3Fd8HkQabvTJo3Ues7o2kNXXcK6LgGBfYwTM3pxpn5pijrBu \
\ "
output =
[ TokenPublicKey . discardErrorText . parseFullPublicKey $
"oyGcGsd/FX3Zl98PPt/jE/mo+6Mz/HxaVcHxhrtxh6MrBkBi2U4h0pwaPDhWUo+IgcGzl4xLOqkoB4suojuNUA=="
, TokenStakeholderId . discardErrorText . decodeAbstractHash $
"5f53e01e1366aeda8811c2a630f0e037077a7b651093d2bdc4ef7200"
, TokenHash . unsafeCheatingHashCoerce . discardErrorText . decodeAbstractHash $
"04f2bf626c4e92d97683592c5af70ec243a5a5508a0bbb0adf7af49483cc9894"
, TokenBlockVersion $ BlockVersion 1 22 3
, TokenSoftwareVersion $ SoftwareVersion
{ svAppName = ApplicationName "cardano-sl"
, svNumber = 41 }
, TokenAddress . discardErrorText . decodeTextAddress $
"Ae2tdPwUPEZ3Fd8HkQabvTJo3Ues7o2kNXXcK6LgGBfYwTM3pxpn5pijrBu"
]
discardErrorText = either (\(_ :: Text) -> error "impossible") identity
| input-output-hk/pos-haskell-prototype | auxx/test/Test/Auxx/Lang/LexerSpec.hs | mit | 3,225 | 0 | 11 | 769 | 546 | 308 | 238 | -1 | -1 |
module Projections
( wrap
, alt
, sliding_wave
, bounded_sin
, scale
, shift
, affine
) where
import Data.Fixed
-- Wraps
wrap :: (Fractional a, Real a) => a -> a
wrap x = x `mod'` 1.0
alt :: Num a => a -> a
alt x = 1 - abs(1-2*x)
-- Wave
sliding_wave :: (Fractional a, Floating a) => a -> a -> a -> a -> a
sliding_wave wvl per t x = x + bounded_sin wvl (shift (t*wvl/per) x)
bounded_sin :: (Fractional a, Floating a) => a -> a -> a
bounded_sin p x = (1/factor) * sin (factor*x)
where factor = 2*pi/p
-- Affine
scale :: Num a => a -> a -> a
scale = (*)
shift :: Num a => a -> a -> a
shift = (+)
affine :: (Num c) => c -> c -> c -> c
affine a b = shift b . scale a
| Ni-Br/luminifu | brain/Projections.hs | mit | 676 | 0 | 11 | 169 | 367 | 198 | 169 | 24 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Arbitrary where
import Control.Applicative hiding (empty)
import Data.Text (Text)
import qualified Data.Text as Text
import qualified Data.Text.IO as Text
import Data.String
import Test.QuickCheck
import Data.Set (Set)
import qualified Data.Set as Set
import Control.Monad (foldM)
import Instance ()
import qualified Parse
import Internal
import qualified Data.Config as Config
import ParseUtil (isEndOfLine)
parse :: Text -> Config
parse input = case Config.parse input of
Right x -> x
Left err -> error err
instance (Ord a, Arbitrary a) => Arbitrary (Set a) where
arbitrary = Set.fromList <$> arbitrary
instance Arbitrary Section where
arbitrary = Section <$> sectionName
instance Arbitrary Key where
arbitrary = Key <$> key
instance Arbitrary Value where
arbitrary = Value <$> value
instance Arbitrary Config where
arbitrary = parse <$> config
concatG :: [Gen Text] -> Gen Text
concatG l = Text.concat <$> sequence l
blanks :: Gen Text
blanks = resize 5 $ fromString <$> (listOf . elements) [' ', '\t']
arbitraryLine :: Gen Text
arbitraryLine = resize 10 $ fromString <$> listOf (arbitrary `suchThat` (not . isEndOfLine))
sectionName :: Gen Text
sectionName = resize 5 $ fromString <$> listOf (arbitrary `suchThat` Parse.sectionClass)
key :: Gen Text
key = resize 5 $ fromString <$> listOf1 (arbitrary `suchThat` Parse.keyClass)
value :: Gen Text
value = resize 10 $ Text.stripStart <$> arbitraryLine
sectionHeader :: Text -> Gen Text
sectionHeader name = concatG [blanks, pure "[", pure name, pure "]", blanks]
option :: Text -> Gen Text
option k = concatG [blanks, pure k, blanks, separator, blanks, value]
where
separator = elements ["=", ":"]
comment :: Gen Text
comment = concatG [blanks, pure "#", arbitraryLine]
blankLine :: Gen Text
blankLine = blanks
data Line = OptionLine | CommentLine | BlankLine
deriving Show
sectionBody :: Gen [Text]
sectionBody = do
l <- listOf $ frequency [(2, pure OptionLine), (1, pure CommentLine), (1, pure BlankLine)]
snd `fmap` foldM f (Set.empty, []) l
where
f (set, xs) OptionLine = do
k <- key `suchThat` flip Set.notMember set
x <- option k
return (Set.insert k set, x : xs)
f (set, xs) CommentLine = do
x <- comment
return (set, x : xs)
f (set, xs) BlankLine = do
x <- blankLine
return (set, x : xs)
configBody :: Gen [[Text]]
configBody = do
sectionNames <- resize 10 $ listOf sectionBody
(_, l) <- foldM f (Set.empty, []) sectionNames
defaultSection <- oneof [pure Nothing, Just <$> sectionBody]
return $ maybe l (:l) defaultSection
where
f (set, xs) body = do
name <- sectionName `suchThat` (\n -> n /= toText defaultSectionName && Set.notMember n set)
header <- sectionHeader name
return (Set.insert name set, (header : body) : xs)
config :: Gen Text
config = Text.unlines . concat <$> configBody
inputAndConfig :: Gen (Text, Config)
inputAndConfig = do
input <- config
return $ (input, parse input)
inputAndConfig1 :: Gen (Text, Config)
inputAndConfig1 = inputAndConfig `suchThat` (not . null . toList . snd)
data Input = Input Text
deriving Show
instance Arbitrary Input where
arbitrary = Input <$> config
data InputAndConfig = InputAndConfig Text Config
deriving Show
instance Arbitrary InputAndConfig where
arbitrary = uncurry InputAndConfig <$> inputAndConfig
data InputAndConfig1 = InputAndConfig1 Text Config
deriving Show
instance Arbitrary InputAndConfig1 where
arbitrary = uncurry InputAndConfig1 <$> inputAndConfig1
sampleConfig :: IO ()
sampleConfig = sample' config >>= mapM_ (\s -> Text.putStrLn s >> putStrLn (replicate 78 '#'))
| sol/config-ng | test/Arbitrary.hs | mit | 3,832 | 0 | 15 | 811 | 1,350 | 720 | 630 | 100 | 3 |
{-- snippet all --}
import Data.Bits
import Network.Socket
import Network.BSD
import Data.List
import Control.Concurrent
import Control.Concurrent.MVar
import System.IO
type HandlerFunc = SockAddr -> String -> IO ()
serveLog :: String -- ^ Port number or name; 514 is default
-> HandlerFunc -- ^ Function to handle incoming messages
-> IO ()
serveLog port handlerfunc = withSocketsDo $
do -- Look up the port. Either raises an exception or returns
-- a nonempty list.
addrinfos <- getAddrInfo
(Just (defaultHints {addrFlags = [AI_PASSIVE]}))
Nothing (Just port)
let serveraddr = head addrinfos
-- Create a socket
sock <- socket (addrFamily serveraddr) Stream defaultProtocol
-- Bind it to the address we're listening to
bindSocket sock (addrAddress serveraddr)
-- Start listening for connection requests. Maximum queue size
-- of 5 connection requests waiting to be accepted.
listen sock 5
-- Create a lock to use for synchronizing access to the handler
lock <- newMVar ()
-- Loop forever waiting for connections. Ctrl-C to abort.
procRequests lock sock
where
-- | Process incoming connection requests
procRequests :: MVar () -> Socket -> IO ()
procRequests lock mastersock =
do (connsock, clientaddr) <- accept mastersock
handle lock clientaddr
"syslogtcpserver.hs: client connnected"
forkIO $ procMessages lock connsock clientaddr
procRequests lock mastersock
-- | Process incoming messages
procMessages :: MVar () -> Socket -> SockAddr -> IO ()
procMessages lock connsock clientaddr =
do connhdl <- socketToHandle connsock ReadMode
hSetBuffering connhdl LineBuffering
messages <- hGetContents connhdl
mapM_ (handle lock clientaddr) (lines messages)
hClose connhdl
handle lock clientaddr
"syslogtcpserver.hs: client disconnected"
-- Lock the handler before passing data to it.
handle :: MVar () -> HandlerFunc
-- This type is the same as
-- handle :: MVar () -> SockAddr -> String -> IO ()
handle lock clientaddr msg =
withMVar lock
(\a -> handlerfunc clientaddr msg >> return a)
-- A simple handler that prints incoming packets
plainHandler :: HandlerFunc
plainHandler addr msg =
putStrLn $ "From " ++ show addr ++ ": " ++ msg
{-- /snippet all --}
| binesiyu/ifl | examples/ch27/syslogtcpserver.hs | mit | 2,706 | 0 | 15 | 886 | 495 | 245 | 250 | 45 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="de-DE">
<title>Sequence Scanner | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/sequence/src/main/javahelp/org/zaproxy/zap/extension/sequence/resources/help_de_DE/helpset_de_DE.hs | apache-2.0 | 978 | 80 | 66 | 160 | 415 | 210 | 205 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.IAM.GetAccountSummary
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Retrieves information about IAM entity usage and IAM quotas in the AWS
-- account.
--
-- For information about limitations on IAM entities, see <http://docs.aws.amazon.com/IAM/latest/UserGuide/LimitationsOnEntities.html Limitations on IAMEntities> in the /Using IAM/ guide.
--
-- <http://docs.aws.amazon.com/IAM/latest/APIReference/API_GetAccountSummary.html>
module Network.AWS.IAM.GetAccountSummary
(
-- * Request
GetAccountSummary
-- ** Request constructor
, getAccountSummary
-- * Response
, GetAccountSummaryResponse
-- ** Response constructor
, getAccountSummaryResponse
-- ** Response lenses
, gasrSummaryMap
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.IAM.Types
import qualified GHC.Exts
data GetAccountSummary = GetAccountSummary
deriving (Eq, Ord, Read, Show, Generic)
-- | 'GetAccountSummary' constructor.
getAccountSummary :: GetAccountSummary
getAccountSummary = GetAccountSummary
newtype GetAccountSummaryResponse = GetAccountSummaryResponse
{ _gasrSummaryMap :: EMap "entry" "key" "value" SummaryKeyType Int
} deriving (Eq, Read, Show, Monoid, Semigroup)
-- | 'GetAccountSummaryResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'gasrSummaryMap' @::@ 'HashMap' 'SummaryKeyType' 'Int'
--
getAccountSummaryResponse :: GetAccountSummaryResponse
getAccountSummaryResponse = GetAccountSummaryResponse
{ _gasrSummaryMap = mempty
}
-- | A set of key value pairs containing information about IAM entity usage and
-- IAM quotas.
--
-- 'SummaryMap' contains the following keys: AccessKeysPerUserQuota
--
-- The maximum number of active access keys allowed for each IAM user.
--
-- AccountAccessKeysPresent
--
-- This value is 1 if the AWS account (root) has an access key, otherwise it is
-- 0.
--
-- AccountMFAEnabled
--
-- This value is 1 if the AWS account (root) has an MFA device assigned,
-- otherwise it is 0.
--
-- AccountSigningCertificatesPresent
--
-- This value is 1 if the AWS account (root) has a signing certificate,
-- otherwise it is 0.
--
-- AssumeRolePolicySizeQuota
--
-- The maximum allowed size for assume role policy documents (trust policies),
-- in non-whitespace characters.
--
-- AttachedPoliciesPerGroupQuota
--
-- The maximum number of managed policies that can be attached to an IAM group.
--
-- AttachedPoliciesPerRoleQuota
--
-- The maximum number of managed policies that can be attached to an IAM role.
--
-- AttachedPoliciesPerUserQuota
--
-- The maximum number of managed policies that can be attached to an IAM user.
--
-- GroupPolicySizeQuota
--
-- The maximum allowed size for the aggregate of all inline policies embedded
-- in an IAM group, in non-whitespace characters.
--
-- Groups
--
-- The number of IAM groups in the AWS account.
--
-- GroupsPerUserQuota
--
-- The maximum number of IAM groups each IAM user can belong to.
--
-- GroupsQuota
--
-- The maximum number of IAM groups allowed in the AWS account.
--
-- InstanceProfiles
--
-- The number of instance profiles in the AWS account.
--
-- InstanceProfilesQuota
--
-- The maximum number of instance profiles allowed in the AWS account.
--
-- MFADevices
--
-- The number of MFA devices in the AWS account, including those assigned and
-- unassigned.
--
-- MFADevicesInUse
--
-- The number of MFA devices that have been assigned to an IAM user or to the
-- AWS account (root).
--
-- Policies
--
-- The number of customer managed policies in the AWS account.
--
-- PoliciesQuota
--
-- The maximum number of customer managed policies allowed in the AWS account.
--
-- PolicySizeQuota
--
-- The maximum allowed size of a customer managed policy, in non-whitespace
-- characters.
--
-- PolicyVersionsInUse
--
-- The number of managed policies that are attached to IAM users, groups, or
-- roles in the AWS account.
--
-- PolicyVersionsInUseQuota
--
-- The maximum number of managed policies that can be attached to IAM users,
-- groups, or roles in the AWS account.
--
-- Providers
--
-- The number of identity providers in the AWS account.
--
-- RolePolicySizeQuota
--
-- The maximum allowed size for the aggregate of all inline policies (access
-- policies, not the trust policy) embedded in an IAM role, in non-whitespace
-- characters.
--
-- Roles
--
-- The number of IAM roles in the AWS account.
--
-- RolesQuota
--
-- The maximum number of IAM roles allowed in the AWS account.
--
-- ServerCertificates
--
-- The number of server certificates in the AWS account.
--
-- ServerCertificatesQuota
--
-- The maximum number of server certificates allowed in the AWS account.
--
-- SigningCertificatesPerUserQuota
--
-- The maximum number of X.509 signing certificates allowed for each IAM user.
--
-- UserPolicySizeQuota
--
-- The maximum allowed size for the aggregate of all inline policies embedded
-- in an IAM user, in non-whitespace characters.
--
-- Users
--
-- The number of IAM users in the AWS account.
--
-- UsersQuota
--
-- The maximum number of IAM users allowed in the AWS account.
--
-- VersionsPerPolicyQuota
--
-- The maximum number of policy versions allowed for each managed policy.
--
--
gasrSummaryMap :: Lens' GetAccountSummaryResponse (HashMap SummaryKeyType Int)
gasrSummaryMap = lens _gasrSummaryMap (\s a -> s { _gasrSummaryMap = a }) . _EMap
instance ToPath GetAccountSummary where
toPath = const "/"
instance ToQuery GetAccountSummary where
toQuery = const mempty
instance ToHeaders GetAccountSummary
instance AWSRequest GetAccountSummary where
type Sv GetAccountSummary = IAM
type Rs GetAccountSummary = GetAccountSummaryResponse
request = post "GetAccountSummary"
response = xmlResponse
instance FromXML GetAccountSummaryResponse where
parseXML = withElement "GetAccountSummaryResult" $ \x -> GetAccountSummaryResponse
<$> x .@? "SummaryMap" .!@ mempty
| kim/amazonka | amazonka-iam/gen/Network/AWS/IAM/GetAccountSummary.hs | mpl-2.0 | 6,930 | 0 | 10 | 1,270 | 505 | 364 | 141 | 46 | 1 |
{-# LANGUAGE DeriveFunctor #-}
module Distribution.Solver.Modular.Flag
( FInfo(..)
, Flag
, FlagInfo
, FN(..)
, QFN
, QSN
, SN(..)
, mkFlag
, showFBool
, showQFN
, showQFNBool
, showQSN
, showQSNBool
) where
import Data.Map as M
import Prelude hiding (pi)
import Distribution.PackageDescription hiding (Flag) -- from Cabal
import Distribution.Solver.Modular.Package
import Distribution.Solver.Types.OptionalStanza
-- | Flag name. Consists of a package instance and the flag identifier itself.
data FN qpn = FN (PI qpn) Flag
deriving (Eq, Ord, Show, Functor)
-- | Flag identifier. Just a string.
type Flag = FlagName
unFlag :: Flag -> String
unFlag (FlagName fn) = fn
mkFlag :: String -> Flag
mkFlag fn = FlagName fn
-- | Flag info. Default value, whether the flag is manual, and
-- whether the flag is weak. Manual flags can only be set explicitly.
-- Weak flags are typically deferred by the solver.
data FInfo = FInfo { fdefault :: Bool, fmanual :: Bool, fweak :: Bool }
deriving (Eq, Ord, Show)
-- | Flag defaults.
type FlagInfo = Map Flag FInfo
-- | Qualified flag name.
type QFN = FN QPN
-- | Stanza name. Paired with a package name, much like a flag.
data SN qpn = SN (PI qpn) OptionalStanza
deriving (Eq, Ord, Show, Functor)
-- | Qualified stanza name.
type QSN = SN QPN
unStanza :: OptionalStanza -> String
unStanza TestStanzas = "test"
unStanza BenchStanzas = "bench"
showQFNBool :: QFN -> Bool -> String
showQFNBool qfn@(FN pi _f) b = showPI pi ++ ":" ++ showFBool qfn b
showQSNBool :: QSN -> Bool -> String
showQSNBool qsn@(SN pi _f) b = showPI pi ++ ":" ++ showSBool qsn b
showFBool :: FN qpn -> Bool -> String
showFBool (FN _ f) True = "+" ++ unFlag f
showFBool (FN _ f) False = "-" ++ unFlag f
showSBool :: SN qpn -> Bool -> String
showSBool (SN _ s) True = "*" ++ unStanza s
showSBool (SN _ s) False = "!" ++ unStanza s
showQFN :: QFN -> String
showQFN (FN pi f) = showPI pi ++ ":" ++ unFlag f
showQSN :: QSN -> String
showQSN (SN pi f) = showPI pi ++ ":" ++ unStanza f
| bennofs/cabal | cabal-install/Distribution/Solver/Modular/Flag.hs | bsd-3-clause | 2,069 | 0 | 8 | 449 | 642 | 353 | 289 | 51 | 1 |
data Lens s a = Lens { set :: s -> a -> s,
view :: s -> a
}
view :: Lens s a -> s -> a
set :: Lens s a -> s -> a -> s
| junnf/Functional-Programming | codes/lens.hs | unlicense | 169 | 0 | 10 | 91 | 76 | 41 | 35 | 4 | 0 |
module MoveDefSpec (main, spec) where
import Test.Hspec
import Language.Haskell.Refact.Refactoring.MoveDef
import System.Directory
import TestUtils
-- ---------------------------------------------------------------------
main :: IO ()
main = do
hspec spec
spec :: Spec
spec = do
-- -------------------------------------------------------------------
describe "liftToTopLevel" $ do
it "cannot lift a top level declaration" $ do
-- res <- catchException (ct $ liftToTopLevel logTestSettings testOptions "./MoveDef/Md1.hs" (4,1))
res <- catchException (ct $ liftToTopLevel defaultTestSettings testOptions "./MoveDef/Md1.hs" (4,1))
(show res) `shouldBe` "Just \"\\nThe identifier is not a local function/pattern name!\""
-- ---------------------------------
it "checks for name clashes" $ do
-- res <- catchException (doLiftToTopLevel ["./test/testdata/MoveDef/Md1.hs","17","5"])
res <- catchException (ct $ liftToTopLevel defaultTestSettings testOptions "./MoveDef/Md1.hs" (17,5))
(show res) `shouldBe` "Just \"The identifier(s): (ff, MoveDef/Md1.hs:17:5) will cause name clash/capture or ambiguity occurrence problem after lifting, please do renaming first!\""
{-
it "checks for invalid new name" $ do
res <- catchException (doDuplicateDef ["./test/testdata/DupDef/Dd1.hs","$c","5","1"])
(show res) `shouldBe` "Just \"Invalid new function name:$c!\""
it "notifies if no definition selected" $ do
res <- catchException (doDuplicateDef ["./test/testdata/DupDef/Dd1.hs","ccc","14","13"])
(show res) `shouldBe` "Just \"The selected identifier is not a function/simple pattern name, or is not defined in this module \""
-}
-- ---------------------------------
it "lifts a definition to the top level" $ do
r <- ct $ liftToTopLevel defaultTestSettings testOptions "./MoveDef/Md1.hs" (24,5)
-- r <- ct $ liftToTopLevel logTestSettings testOptions "./test/testdata/MoveDef/Md1.hs" (24,5)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/MoveDef/Md1.hs\"]"
diff <- compareFiles "./test/testdata/MoveDef/Md1.hs.expected"
"./test/testdata/MoveDef/Md1.refactored.hs"
diff `shouldBe` []
-- ---------------------------------
it "liftToTopLevel D1 C1 A1 8 6" $ do
r <- ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/D1.hs" (8,6)
-- r <- ct $ liftToTopLevel logTestSettings testOptions "./LiftToToplevel/D1.hs" (8,6)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"LiftToToplevel/D1.hs\",\"LiftToToplevel/C1.hs\"]"
diff <- compareFiles "./test/testdata/LiftToToplevel/D1.hs.expected"
"./test/testdata/LiftToToplevel/D1.refactored.hs"
diff `shouldBe` []
diff2 <- compareFiles "./test/testdata/LiftToToplevel/C1.hs.expected"
"./test/testdata/LiftToToplevel/C1.refactored.hs"
diff2 `shouldBe` []
a1Refactored <- doesFileExist "./test/testdata/LiftToToplevel/A1.refactored.hs"
a1Refactored `shouldBe` False
-- ---------------------------------
it "liftToTopLevel D2 C2 A2 8 6" $ do
r <- ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/D2.hs" (8,6)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"LiftToToplevel/D2.hs\",\"LiftToToplevel/C2.hs\"]"
diff <- compareFiles "./test/testdata/LiftToToplevel/D2.hs.expected"
"./test/testdata/LiftToToplevel/D2.refactored.hs"
diff `shouldBe` []
diff2 <- compareFiles "./test/testdata/LiftToToplevel/C2.hs.expected"
"./test/testdata/LiftToToplevel/C2.refactored.hs"
diff2 `shouldBe` []
a1Refactored <- doesFileExist "./test/testdata/LiftToToplevel/A2.refactored.hs"
a1Refactored `shouldBe` False
-- ---------------------------------
it "liftToTopLevel D3 C3 A3 8 6" $ do
r <- ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/D3.hs" (8,6)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftToToplevel/D3.hs\"]"
diff <- compareFiles "./test/testdata/LiftToToplevel/D3.hs.expected"
"./test/testdata/LiftToToplevel/D3.refactored.hs"
diff `shouldBe` []
c3Refactored <- doesFileExist "./test/testdata/LiftToToplevel/C3.refactored.hs"
c3Refactored `shouldBe` False
a3Refactored <- doesFileExist "./test/testdata/LiftToToplevel/A3.refactored.hs"
a3Refactored `shouldBe` False
-- ---------------------------------
it "liftToTopLevel WhereIn1 12 18" $ do
r <- ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/WhereIn1.hs" (12,18)
-- r <- ct $ liftToTopLevel logTestSettings testOptions Nothing "./test/testdata/LiftToToplevel/WhereIn1.hs" (12,18)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftToToplevel/WhereIn1.hs\"]"
diff <- compareFiles "./test/testdata/LiftToToplevel/WhereIn1.hs.expected"
"./test/testdata/LiftToToplevel/WhereIn1.refactored.hs"
diff `shouldBe` []
-- ---------------------------------
it "liftToTopLevel WhereIn6 13 29" $ do
r <- ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/WhereIn6.hs" (13,29)
-- r <- ct $ liftToTopLevel logTestSettings testOptions "./LiftToToplevel/WhereIn6.hs" (13,29)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftToToplevel/WhereIn6.hs\"]"
diff <- compareFiles "./test/testdata/LiftToToplevel/WhereIn6.hs.expected"
"./test/testdata/LiftToToplevel/WhereIn6.refactored.hs"
diff `shouldBe` []
-- ---------------------------------
it "liftToTopLevel WhereIn7 12 14" $ do
r <- ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/WhereIn7.hs" (12,14)
-- r <- ct $ liftToTopLevel logTestSettings testOptions "./LiftToToplevel/WhereIn7.hs" (12,14)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftToToplevel/WhereIn7.hs\"]"
diff <- compareFiles "./test/testdata/LiftToToplevel/WhereIn7.hs.expected"
"./test/testdata/LiftToToplevel/WhereIn7.refactored.hs"
diff `shouldBe` []
-- ---------------------------------
it "liftToTopLevel LetIn1 11 22" $ do
r <- ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/LetIn1.hs" (11,22)
-- r <- ct $ liftToTopLevel logTestSettings testOptions "./LiftToToplevel/LetIn1.hs" (11,22)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftToToplevel/LetIn1.hs\"]"
diff <- compareFiles "./test/testdata/LiftToToplevel/LetIn1.hs.expected"
"./test/testdata/LiftToToplevel/LetIn1.refactored.hs"
diff `shouldBe` []
-- ---------------------------------
it "liftToTopLevel LetIn2 10 22" $ do
r <- ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/LetIn2.hs" (10,22)
-- r <- ct $ liftToTopLevel logTestSettings testOptions "./LiftToToplevel/LetIn2.hs" (10,22)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftToToplevel/LetIn2.hs\"]"
diff <- compareFiles "./test/testdata/LiftToToplevel/LetIn2.hs.expected"
"./test/testdata/LiftToToplevel/LetIn2.refactored.hs"
diff `shouldBe` []
-- ---------------------------------
it "liftToTopLevel LetIn3 10 27" $ do
r <- ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/LetIn3.hs" (10,27)
-- r <- ct $ liftToTopLevel logTestSettings testOptions "./LiftToToplevel/LetIn3.hs" (10,27)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftToToplevel/LetIn3.hs\"]"
diff <- compareFiles "./test/testdata/LiftToToplevel/LetIn3.hs.expected"
"./test/testdata/LiftToToplevel/LetIn3.refactored.hs"
diff `shouldBe` []
-- ---------------------------------
{-
This is trying to test an invalid lift
it "liftToTopLevel PatBindIn1 18 7" $ do
r <- ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/PatBindIn1.hs" (18,7)
-- r <- ct $ liftToTopLevel logTestSettings testOptions "./LiftToToplevel/PatBindIn1.hs" (18,7)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftToToplevel/PatBindIn1.hs\"]"
diff <- compareFiles "./test/testdata/LiftToToplevel/PatBindIn1.hs.expected"
"./test/testdata/LiftToToplevel/PatBindIn1.refactored.hs"
diff `shouldBe` []
-}
-- ---------------------------------
it "liftToTopLevel PatBindIn2 17 7 fails" $ do
res <- catchException (ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/PatBindIn2.hs" (17,7))
-- ct $ liftToTopLevel logTestSettings testOptions "./LiftToToplevel/PatBindIn2.hs" (17,7)
(show res) `shouldBe` "Just \"Cannot lift a declaration assigning to a tuple pattern\""
-- ---------------------------------
it "liftToTopLevel PatBindIn3 11 15" $ do
r <- ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/PatBindIn3.hs" (11,15)
-- r <- ct $ liftToTopLevel logTestSettings testOptions "./LiftToToplevel/PatBindIn3.hs" (11,15)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftToToplevel/PatBindIn3.hs\"]"
diff <- compareFiles "./test/testdata/LiftToToplevel/PatBindIn3.hs.expected"
"./test/testdata/LiftToToplevel/PatBindIn3.refactored.hs"
diff `shouldBe` []
-- ---------------------------------
it "liftToTopLevel PatBindIn4 12 30 fails" $ do
res <- catchException (ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/PatBindIn4.hs" (12,30))
-- ct $ liftToTopLevel logTestSettings testOptions "./LiftToToplevel/PatBindIn4.hs" (12,30)
(show res) `shouldBe` "Just \"Cannot lift a declaration assigning to a tuple pattern\""
-- ---------------------------------
it "liftToTopLevel CaseIn1 10 28" $ do
r <- ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/CaseIn1.hs" (10,28)
-- r <- ct $ liftToTopLevel logTestSettings testOptions "./LiftToToplevel/CaseIn1.hs" (10,28)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftToToplevel/CaseIn1.hs\"]"
diff <- compareFiles "./test/testdata/LiftToToplevel/CaseIn1.hs.expected"
"./test/testdata/LiftToToplevel/CaseIn1.refactored.hs"
diff `shouldBe` []
-- ---------------------------------
it "liftToTopLevel WhereIn2 11 18 fails" $ do
-- res <- catchException (doLiftToTopLevel ["./test/testdata/LiftToToplevel/WhereIn2.hs","11","18"])
res <- catchException (ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/WhereIn2.hs" (11,18))
-- liftToTopLevel logTestSettings testOptions Nothing "./test/testdata/LiftToToplevel/WhereIn2.hs" (11,18)
(show res) `shouldBe` "Just \"The identifier(s): (sq, LiftToToplevel/WhereIn2.hs:11:18) will cause name clash/capture or ambiguity occurrence problem after lifting, please do renaming first!\""
-- ---------------------------------
it "liftToTopLevel Collapse1 8 6" $ do
r <- ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/Collapse1.hs" (8,6)
-- r <- ct $ liftToTopLevel logTestSettings testOptions "./LiftToToplevel/Collapse1.hs" (8,6)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftToToplevel/Collapse1.hs\"]"
diff <- compareFiles "./test/testdata/LiftToToplevel/Collapse1.expected.hs"
"./test/testdata/LiftToToplevel/Collapse1.refactored.hs"
diff `shouldBe` []
{- original tests
positive=[(["D1.hs","C1.hs","A1.hs"],["8","6"]),
(["D2.hs","C2.hs","A2.hs"],["8","6"]),
(["D3.hs","C3.hs","A3.hs"],["8","6"]),
(["WhereIn1.hs"],["12","18"]),
(["WhereIn6.hs"],["13","29"]),
(["WhereIn7.hs"],["12","14"]),
(["LetIn1.hs"],["11","22"]),
(["LetIn2.hs"],["10","22"]),
(["LetIn3.hs"],["10","27"]),
(["PatBindIn1.hs"],["18","7"]),
(["PatBindIn3.hs"],["11","15"]),
(["CaseIn1.hs"],["10","28"])],
negative=[(["PatBindIn2.hs"],["17","7"]),
(["WhereIn2.hs"],["11","18"])
]
-}
-- ---------------------------------
it "liftToTopLevel Zmapq" $ do
r <- ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/Zmapq.hs" (6,3)
-- r <- ct $ liftToTopLevel logTestSettings testOptions "./LiftToToplevel/Zmapq.hs" (6,3)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftToToplevel/Zmapq.hs\"]"
diff <- compareFiles "./test/testdata/LiftToToplevel/Zmapq.expected.hs"
"./test/testdata/LiftToToplevel/Zmapq.refactored.hs"
diff `shouldBe` []
-- ---------------------------------
it "liftToTopLevel LiftInLambda 10 5" $ do
r <- ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/LiftInLambda.hs" (10,5)
-- r <- ct $ liftToTopLevel logTestSettings testOptions "./LiftToToplevel/LiftInLambda.hs" (10,5)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftToToplevel/LiftInLambda.hs\"]"
diff <- compareFiles "./test/testdata/LiftToToplevel/LiftInLambda.expected.hs"
"./test/testdata/LiftToToplevel/LiftInLambda.refactored.hs"
diff `shouldBe` []
-- ---------------------------------
it "liftToTopLevel NoWhere" $ do
r <- ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/NoWhere.hs" (14,12)
-- r <- ct $ liftToTopLevel logTestSettings testOptions "./LiftToToplevel/NoWhere.hs" (14,12)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftToToplevel/NoWhere.hs\"]"
diff <- compareFiles "./test/testdata/LiftToToplevel/NoWhere.expected.hs"
"./test/testdata/LiftToToplevel/NoWhere.refactored.hs"
diff `shouldBe` []
-- ---------------------------------
it "liftToTopLevel Signature" $ do
r <- ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/Signature.hs" (9,5)
-- r <- ct $ liftToTopLevel logTestSettings testOptions "./LiftToToplevel/Signature.hs" (9,5)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftToToplevel/Signature.hs\"]"
diff <- compareFiles "./test/testdata/LiftToToplevel/Signature.expected.hs"
"./test/testdata/LiftToToplevel/Signature.refactored.hs"
diff `shouldBe` []
-- ---------------------------------
it "liftToTopLevel Signature2" $ do
-- should throw exception for forall in signature
res <- catchException (ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/Signature2.hs" (16,5))
-- r <- ct $ liftToTopLevel logTestSettings testOptions "./LiftToToplevel/Signature2.hs" (16,5)
(show res) `shouldBe` "Just \"\\nNew type signature may fail type checking: :: (forall t. Num t => t -> t -> t) -> Int -> \\n\""
{-
r <- ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/Signature2.hs" (16,5)
-- r <- ct $ liftToTopLevel logTestSettings testOptions "./LiftToToplevel/Signature2.hs" (16,5)
(show r) `shouldBe` "[\"./test/testdata/LiftToToplevel/Signature2.hs\"]"
diff <- compareFiles "./test/testdata/LiftToToplevel/Signature2.expected.hs"
"./test/testdata/LiftToToplevel/Signature2.refactored.hs"
diff `shouldBe` []
-}
-- ---------------------------------
it "liftToTopLevel Signature2r" $ do
-- should throw exception for forall in signature
r <- catchException (ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/Signature2r.hs" (12,5))
-- r <- ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/Signature2r.hs" (12,5)
-- r <- ct $ liftToTopLevel logTestSettings testOptions "./LiftToToplevel/Signature2r.hs" (12,5)
(show r) `shouldBe` "Just \"\\nNew type signature may fail type checking: :: (forall t. Num t => t -> t -> t) -> Int -> \\n\""
{-
(show r) `shouldBe` "[\"./test/testdata/LiftToToplevel/Signature2r.hs\"]"
diff <- compareFiles "./test/testdata/LiftToToplevel/Signature2r.expected.hs"
"./test/testdata/LiftToToplevel/Signature2r.refactored.hs"
diff `shouldBe` []
-}
-- ---------------------------------
it "liftToTopLevel Signature3" $ do
r <- ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/Signature3.hs" (9,5)
-- r <- ct $ liftToTopLevel logTestSettings testOptions "./LiftToToplevel/Signature3.hs" (9,5)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftToToplevel/Signature3.hs\"]"
diff <- compareFiles "./test/testdata/LiftToToplevel/Signature3.expected.hs"
"./test/testdata/LiftToToplevel/Signature3.refactored.hs"
diff `shouldBe` []
-- ---------------------------------
it "liftToTopLevel Signature4" $ do
-- should throw exception for forall in signature
r <- catchException $ ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/Signature4.hs" (9,5)
-- r <- ct $ liftToTopLevel defaultTestSettings testOptions "./LiftToToplevel/Signature4.hs" (9,5)
-- r <- ct $ liftToTopLevel logTestSettings testOptions "./LiftToToplevel/Signature4.hs" (9,5)
(show r) `shouldBe` "Just \"\\nNew type signature may fail type checking: :: (forall t. (Integral t, Num t) => t -> t -> Int) -> t -> \\n\""
{-
(show r) `shouldBe` "[\"./test/testdata/LiftToToplevel/Signature4.hs\"]"
diff <- compareFiles "./test/testdata/LiftToToplevel/Signature4.expected.hs"
"./test/testdata/LiftToToplevel/Signature4.refactored.hs"
diff `shouldBe` []
-}
-- -------------------------------------------------------------------
describe "LiftOneLevel" $ do
it "liftOneLevel.liftToMod D1 C1 A1 8 6" $ do
r <- ct $ liftOneLevel defaultTestSettings testOptions "./LiftOneLevel/D1.hs" (8,6)
-- r <- ct $ liftOneLevel logTestSettings testOptions "./LiftOneLevel/D1.hs" (8,6)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"LiftOneLevel/D1.hs\",\"LiftOneLevel/C1.hs\"]"
diff <- compareFiles "./test/testdata/LiftOneLevel/D1.hs.expected"
"./test/testdata/LiftOneLevel/D1.refactored.hs"
diff `shouldBe` []
diff2 <- compareFiles "./test/testdata/LiftOneLevel/C1.hs.expected"
"./test/testdata/LiftOneLevel/C1.refactored.hs"
diff2 `shouldBe` []
a1Refactored <- doesFileExist "./test/testdata/LiftOneLevel/A1.refactored.hs"
a1Refactored `shouldBe` False
-- ---------------------------------
it "LiftOneLevel.liftToMod D2 C2 A2 8 6" $ do
r <- ct $ liftOneLevel defaultTestSettings testOptions "./LiftOneLevel/D2.hs" (8,6)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"LiftOneLevel/D2.hs\",\"LiftOneLevel/C2.hs\"]"
diff <- compareFiles "./test/testdata/LiftOneLevel/D2.hs.expected"
"./test/testdata/LiftOneLevel/D2.refactored.hs"
diff `shouldBe` []
diff2 <- compareFiles "./test/testdata/LiftOneLevel/C2.hs.expected"
"./test/testdata/LiftOneLevel/C2.refactored.hs"
diff2 `shouldBe` []
a2Refactored <- doesFileExist "./test/testdata/LiftOneLevel/A2.refactored.hs"
a2Refactored `shouldBe` False
-- ---------------------------------
it "LiftOneLevel.liftToMod D3 C3 A3 8 6" $ do
r <- ct $ liftOneLevel defaultTestSettings testOptions "./LiftOneLevel/D3.hs" (8,6)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"LiftOneLevel/D3.hs\"]"
diff <- compareFiles "./test/testdata/LiftOneLevel/D3.hs.expected"
"./test/testdata/LiftOneLevel/D3.refactored.hs"
diff `shouldBe` []
c3Refactored <- doesFileExist "./test/testdata/LiftOneLevel/C3.refactored.hs"
c3Refactored `shouldBe` False
a3Refactored <- doesFileExist "./test/testdata/LiftOneLevel/A3.refactored.hs"
a3Refactored `shouldBe` False
-- ---------------------------------
it "LiftOneLevel WhereIn1 12 18" $ do
r <- ct $ liftOneLevel defaultTestSettings testOptions "./LiftOneLevel/WhereIn1.hs" (12,18)
-- r <- ct $ liftOneLevel logTestSettings testOptions "./LiftOneLevel/WhereIn1.hs" (12,18)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftOneLevel/WhereIn1.hs\"]"
diff <- compareFiles "./test/testdata/LiftOneLevel/WhereIn1.hs.expected"
"./test/testdata/LiftOneLevel/WhereIn1.refactored.hs"
diff `shouldBe` []
-- ---------------------------------
it "LiftOneLevel WhereIn6 13 29" $ do
r <- ct $ liftOneLevel defaultTestSettings testOptions "./LiftOneLevel/WhereIn6.hs" (13,29)
-- r <- ct $ liftOneLevel logTestSettings testOptions "./LiftOneLevel/WhereIn6.hs" (13,29)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftOneLevel/WhereIn6.hs\"]"
diff <- compareFiles "./test/testdata/LiftOneLevel/WhereIn6.hs.expected"
"./test/testdata/LiftOneLevel/WhereIn6.refactored.hs"
diff `shouldBe` []
-- ---------------------------------
it "liftOneLevel WhereIn7 12 14" $ do
r <- ct $ liftOneLevel defaultTestSettings testOptions "./LiftOneLevel/WhereIn7.hs" (12,14)
-- r <- ct $ liftOneLevel logTestSettings testOptions "./LiftOneLevel/WhereIn7.hs" (12,14)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftOneLevel/WhereIn7.hs\"]"
diff <- compareFiles "./test/testdata/LiftOneLevel/WhereIn7.hs.expected"
"./test/testdata/LiftOneLevel/WhereIn7.refactored.hs"
diff `shouldBe` []
-- ---------------------------------
it "LiftOneLevel WhereIn8 8 11" $ do
r <- ct $ liftOneLevel defaultTestSettings testOptions "./LiftOneLevel/WhereIn8.hs" (8,11)
-- r <- ct $ liftOneLevel logTestSettings testOptions "./LiftOneLevel/WhereIn8.hs" (8,11)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftOneLevel/WhereIn8.hs\"]"
diff <- compareFiles "./test/testdata/LiftOneLevel/WhereIn8.hs.expected"
"./test/testdata/LiftOneLevel/WhereIn8.refactored.hs"
diff `shouldBe` []
-- ---------------------------------
it "LiftOneLevel LetIn1 11 22" $ do
r <- ct $ liftOneLevel defaultTestSettings testOptions "./LiftOneLevel/LetIn1.hs" (11,22)
-- r <- ct $ liftOneLevel logTestSettings testOptions "./LiftOneLevel/LetIn1.hs" (11,22)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftOneLevel/LetIn1.hs\"]"
diff <- compareFiles "./test/testdata/LiftOneLevel/LetIn1.hs.expected"
"./test/testdata/LiftOneLevel/LetIn1.refactored.hs"
diff `shouldBe` []
-- ---------------------------------
it "LiftOneLevel LetIn2 11 22" $ do
r <- ct $ liftOneLevel defaultTestSettings testOptions "./LiftOneLevel/LetIn2.hs" (11,22)
-- r <- ct $ liftOneLevel logTestSettings testOptions "./LiftOneLevel/LetIn2.hs" (11,22)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftOneLevel/LetIn2.hs\"]"
diff <- compareFiles "./test/testdata/LiftOneLevel/LetIn2.hs.expected"
"./test/testdata/LiftOneLevel/LetIn2.refactored.hs"
diff `shouldBe` []
-- ---------------------------------
it "LiftOneLevel LetIn3 10 27" $ do
r <- ct $ liftOneLevel defaultTestSettings testOptions "./LiftOneLevel/LetIn3.hs" (10,27)
-- r <- ct $ liftOneLevel logTestSettings testOptions "./LiftOneLevel/LetIn3.hs" (10,27)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftOneLevel/LetIn3.hs\"]"
diff <- compareFiles "./test/testdata/LiftOneLevel/LetIn3.hs.expected"
"./test/testdata/LiftOneLevel/LetIn3.refactored.hs"
diff `shouldBe` []
-- ---------------------------------
it "LiftOneLevel PatBindIn3 11 15" $ do
r <- ct $ liftOneLevel defaultTestSettings testOptions "./LiftOneLevel/PatBindIn3.hs" (11,15)
-- r <- ct $ liftOneLevel logTestSettings testOptions "./LiftOneLevel/PatBindIn3.hs" (11,15)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftOneLevel/PatBindIn3.hs\"]"
diff <- compareFiles "./test/testdata/LiftOneLevel/PatBindIn3.hs.expected"
"./test/testdata/LiftOneLevel/PatBindIn3.refactored.hs"
diff `shouldBe` []
-- ---------------------------------
it "liftOneLevel CaseIn1 10 28" $ do
r <- ct $ liftOneLevel defaultTestSettings testOptions "./LiftOneLevel/CaseIn1.hs" (10,28)
-- r <- ct $ liftOneLevel logTestSettings testOptions "./LiftOneLevel/CaseIn1.hs" (10,28)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/LiftOneLevel/CaseIn1.hs\"]"
diff <- compareFiles "./test/testdata/LiftOneLevel/CaseIn1.hs.expected"
"./test/testdata/LiftOneLevel/CaseIn1.refactored.hs"
diff `shouldBe` []
-- -----------------------------------------------------------------
it "fails PatBindIn2 17 7" $ do
{-
res <- catchException (liftOneLevel defaultTestSettings testOptions Nothing "./test/testdata/LiftOneLevel/PatBindIn2.hs" (17,7))
-- liftOneLevel logTestSettings testOptions Nothing "./test/testdata/LiftOneLevel/PatBindIn2.hs" (17,7)
(show res) `shouldBe` "Just \"Lifting this definition failed. This might be because that the definition to be lifted is defined in a class/instance declaration.\""
-}
pending -- Not clear that this was covered in the original, will
-- come back to it
-- -----------------------------------------------------------------
it "fails WhereIn2 8 18" $ do
res <- catchException (ct $ liftOneLevel defaultTestSettings testOptions "./LiftOneLevel/WhereIn2.hs" (8,18))
-- liftOneLevel logTestSettings testOptions "./LiftOneLevel/WhereIn2.hs" (8,18)
(show res) `shouldBe` "Just \"The identifier(s): (sq, LiftOneLevel/WhereIn2.hs:8:18) will cause name clash/capture or ambiguity occurrence problem after lifting, please do renaming first!\""
-- TODO: check that other declarations in a list that make use of the
-- one being lifted also have params changed.
{- original tests
TestCases{refactorCmd="liftOneLevel",
positive=[(["D1.hs","C1.hs","A1.hs"],["8","6"]),
(["D2.hs","C2.hs","A2.hs"],["8","6"]),
(["D3.hs","C3.hs","A3.hs"],["8","6"]),
(["WhereIn1.hs"],["12","18"]),
(["WhereIn6.hs"],["15","29"]),
(["WhereIn7.hs"],["12","14"]),
(["WhereIn8.hs"],["8","11"]),
(["LetIn1.hs"],["11","22"]),
(["LetIn2.hs"],["10","22"]),
(["LetIn3.hs"],["10","27"]),
(["PatBindIn3.hs"],["11","15"]),
(["CaseIn1.hs"],["10","28"])],
negative=[(["PatBindIn2.hs"],["17","7"]),
(["WhereIn2.hs"],["8","18"])]
}
-}
-- -------------------------------------------------------------------
describe "demote" $ do
it "notifies if no definition selected" $ do
-- res <- catchException (doDemote ["./test/testdata/MoveDef/Md1.hs","14","13"])
res <- catchException (ct $ demote defaultTestSettings testOptions "./MoveDef/Md1.hs" (14,13))
(show res) `shouldBe` "Just \"\\nInvalid cursor position!\""
it "will not demote if nowhere to go" $ do
res <- catchException (ct $ demote defaultTestSettings testOptions "./MoveDef/Md1.hs" (8,1))
-- res <- ct $ demote logTestSettings testOptions "./MoveDef/Md1.hs" (8,1)
(show res) `shouldBe` "Just \"\\n Nowhere to demote this function!\\n\""
-- -----------------------------------------------------------------
it "demotes a definition from the top level 1" $ do
r <- ct $ demote defaultTestSettings testOptions "./MoveDef/Demote.hs" (7,1)
-- r <- ct $ demote logTestSettings testOptions "./MoveDef/Demote.hs" (7,1)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/MoveDef/Demote.hs\"]"
diff <- compareFiles "./test/testdata/MoveDef/Demote.refactored.hs"
"./test/testdata/MoveDef/Demote.hs.expected"
diff `shouldBe` []
-- -----------------------------------------------------------------
it "demotes a definition from the top level D1" $ do
r <- ct $ demote defaultTestSettings testOptions "./Demote/D1.hs" (9,1)
-- r <- ct $ demote logTestSettings testOptions "./Demote/D1.hs" (9,1)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/Demote/D1.hs\"]"
diff <- compareFiles "./test/testdata/Demote/D1.refactored.hs"
"./test/testdata/Demote/D1.hs.expected"
diff `shouldBe` []
-- -----------------------------------------------------------------
it "demotes WhereIn1 12 1" $ do
r <- ct $ demote defaultTestSettings testOptions "./Demote/WhereIn1.hs" (12,1)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/Demote/WhereIn1.hs\"]"
diff <- compareFiles "./test/testdata/Demote/WhereIn1.refactored.hs"
"./test/testdata/Demote/WhereIn1.hs.expected"
diff `shouldBe` []
-- -----------------------------------------------------------------
it "demotes WhereIn3 14 1" $ do
r <- ct $ demote defaultTestSettings testOptions "./Demote/WhereIn3.hs" (14,1)
-- r <- ct $ demote logTestSettings testOptions "./Demote/WhereIn3.hs" (14,1)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/Demote/WhereIn3.hs\"]"
diff <- compareFiles "./test/testdata/Demote/WhereIn3.refactored.hs"
"./test/testdata/Demote/WhereIn3.hs.expected"
diff `shouldBe` []
-- -----------------------------------------------------------------
it "demotes WhereIn4 14 1" $ do
-- r <- doDemote ["./test/testdata/Demote/WhereIn4.hs","14","1"]
r <- ct $ demote defaultTestSettings testOptions "./Demote/WhereIn4.hs" (14,1)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/Demote/WhereIn4.hs\"]"
diff <- compareFiles "./test/testdata/Demote/WhereIn4.refactored.hs"
"./test/testdata/Demote/WhereIn4.hs.expected"
diff `shouldBe` []
-- -----------------------------------------------------------------
it "demotes WhereIn5 14 1" $ do
r <- ct $ demote defaultTestSettings testOptions "./Demote/WhereIn5.hs" (14,1)
-- r <- ct $ demote logTestSettings testOptions "./Demote/WhereIn5.hs" (14,1)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/Demote/WhereIn5.hs\"]"
diff <- compareFiles "./test/testdata/Demote/WhereIn5.refactored.hs"
"./test/testdata/Demote/WhereIn5.hs.expected"
diff `shouldBe` []
-- -----------------------------------------------------------------
it "demotes WhereIn6 13 1" $ do
r <- ct $ demote defaultTestSettings testOptions "./Demote/WhereIn6.hs" (13,1)
-- r <- ct $ demote logTestSettings testOptions "./Demote/WhereIn6.hs" (13,1)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/Demote/WhereIn6.hs\"]"
diff <- compareFiles "./test/testdata/Demote/WhereIn6.refactored.hs"
"./test/testdata/Demote/WhereIn6.hs.expected"
diff `shouldBe` []
-- -----------------------------------------------------------------
it "demotes WhereIn7 13 1" $ do
r <- ct $ demote defaultTestSettings testOptions "./Demote/WhereIn7.hs" (13,1)
-- r <- ct $ demote logTestSettings testOptions "./Demote/WhereIn7.hs" (13,1)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/Demote/WhereIn7.hs\"]"
diff <- compareFiles "./test/testdata/Demote/WhereIn7.refactored.hs"
"./test/testdata/Demote/WhereIn7.hs.expected"
diff `shouldBe` []
-- -----------------------------------------------------------------
it "demotes CaseIn1 16 1" $ do
r <- ct $ demote defaultTestSettings testOptions "./Demote/CaseIn1.hs" (16,1)
-- r <- ct $ demote logTestSettings testOptions "./Demote/CaseIn1.hs" (16,1)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/Demote/CaseIn1.hs\"]"
diff <- compareFiles "./test/testdata/Demote/CaseIn1.refactored.hs"
"./test/testdata/Demote/CaseIn1.hs.expected"
diff `shouldBe` []
-- -----------------------------------------------------------------
it "demotes LetIn1 12 22" $ do
r <- ct $ demote defaultTestSettings testOptions "./Demote/LetIn1.hs" (12,22)
-- r <- ct $ demote logTestSettings testOptions "./Demote/LetIn1.hs" (12,22)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/Demote/LetIn1.hs\"]"
diff <- compareFiles "./test/testdata/Demote/LetIn1.refactored.hs"
"./test/testdata/Demote/LetIn1.hs.expected"
diff `shouldBe` []
-- -----------------------------------------------------------------
it "demotes PatBindIn1 19 1" $ do
r <- ct $ demote defaultTestSettings testOptions "./Demote/PatBindIn1.hs" (19,1)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/Demote/PatBindIn1.hs\"]"
diff <- compareFiles "./test/testdata/Demote/PatBindIn1.refactored.hs"
"./test/testdata/Demote/PatBindIn1.hs.expected"
diff `shouldBe` []
-- -----------------------------------------------------------------
it "demotes D2a 5 1 when not imported by other module" $ do
r <- ct $ demote defaultTestSettings testOptions "./Demote/D2a.hs" (5,1)
-- r <- ct $ demote logTestSettings testOptions "./Demote/D2a.hs" (5,1)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/Demote/D2a.hs\"]"
diff <- compareFiles "./test/testdata/Demote/D2a.refactored.hs"
"./test/testdata/Demote/D2a.hs.expected"
diff `shouldBe` []
-- -----------------------------------------------------------------
it "fails WhereIn2 14 1" $ do
-- res <- catchException (doDemote ["./test/testdata/Demote/WhereIn2.hs","14","1"])
res <- catchException (ct $ demote defaultTestSettings testOptions "./Demote/WhereIn2.hs" (14,1))
-- demote (Just logSettings) testOptions Nothing "./test/testdata/Demote/WhereIn2.hs" (14,1)
(show res) `shouldBe` "Just \"\\n Nowhere to demote this function!\\n\""
-- -----------------------------------------------------------------
it "fails LetIn2 11 22" $ do
-- res <- catchException (doDemote ["./test/testdata/Demote/LetIn2.hs","11","22"])
res <- catchException (ct $ demote defaultTestSettings testOptions "./Demote/LetIn2.hs" (11,22))
(show res) `shouldBe` "Just \"This function can not be demoted as it is used in current level!\\n\""
-- -----------------------------------------------------------------
it "fails PatBindIn4 18 1" $ do
-- res <- catchException (doDemote ["./test/testdata/Demote/PatBindIn4.hs","18","1"])
res <- catchException (ct $ demote defaultTestSettings testOptions "./Demote/PatBindIn4.hs" (18,1))
-- (show res) `shouldBe` "Just \"\\n Nowhere to demote this function!\\n\""
(show res) `shouldBe` "Just \"\\nThis function/pattern binding is used by more than one friend bindings\\n\""
-- -----------------------------------------------------------------
it "fails WhereIn8 16 1" $ do
-- res <- catchException (doDemote ["./test/testdata/Demote/WhereIn8.hs","16","1"])
res <- catchException (ct $ demote defaultTestSettings testOptions "./Demote/WhereIn8.hs" (16,1))
(show res) `shouldBe` "Just \"\\n Nowhere to demote this function!\\n\""
-- -----------------------------------------------------------------
it "fails D2 5 1" $ do
res <- catchException (ct $ demote defaultTestSettings testOptions "./Demote/D2.hs" (5,1))
-- res <- catchException (ct $ demote logTestSettings testOptions "./Demote/D2.hs" (5,1))
(show res) `shouldBe` "Just \"This definition can not be demoted, as it is used in the client module 'Demote.A2'!\""
-- -----------------------------------------------------------------
it "fails for re-export in client module" $ do
pending
-- -----------------------------------------------------------------
it "fails D3 5 1" $ do
-- res <- catchException (doDemote ["./test/testdata/Demote/D3.hs","5","1"])
res <- catchException (ct $ demote defaultTestSettings testOptions "./Demote/D3.hs" (5,1))
(show res) `shouldBe` "Just \"This definition can not be demoted, as it is explicitly exported by the current module!\""
{- Original test cases. These files are now in testdata/Demote
TestCases{refactorCmd="demote",
positive=[(["D1.hs","C1.hs","A1.hs"],["9","1"]), x
(["WhereIn1.hs"],["12","1"]), x
(["WhereIn3.hs"],["14","1"]), x
(["WhereIn4.hs"],["14","1"]), x
(["WhereIn5.hs"],["14","1"]), x
(["WhereIn6.hs"],["13","1"]), x
(["WhereIn7.hs"],["13","1"]), x
(["CaseIn1.hs"],["16","1"]), x
(["LetIn1.hs"],["12","22"]), x
(["PatBindIn1.hs"],["19","1"])], x
negative=[(["WhereIn2.hs"],["14","1"]), x
(["LetIn2.hs"],["11","22"]), x
(["PatBindIn4.hs"],["18","1"]), x
(["WhereIn8.hs"],["16","1"]), x
(["D2.hs","C2.hs","A2.hs"],["5","1"]), x
(["D3.hs"],["5","1"])] x
}
-}
-- -----------------------------------------------------------------
it "fails MultiLeg.hs" $ do
res <- catchException (ct $ demote defaultTestSettings testOptions "./Demote/MultiLeg.hs" (14,1))
-- demote logTestSettings testOptions "./Demote/MultiLeg.hs" (14,1)
(show res) `shouldBe` "Just \"\\nThis function/pattern binding is used by more than one friend bindings\\n\""
-- -----------------------------------------------------------------
it "passes MultiLeg2.hs" $ do
r <- ct $ demote defaultTestSettings testOptions "./Demote/MultiLeg2.hs" (14,1)
-- demote logTestSettings testOptions "./Demote/MultiLeg2.hs" (14,1)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/Demote/MultiLeg2.hs\"]"
diff <- compareFiles "./test/testdata/Demote/MultiLeg2.refactored.hs"
"./test/testdata/Demote/MultiLeg2.hs.expected"
diff `shouldBe` []
-- -----------------------------------------------------------------
it "passes UsedAtLevel.hs" $ do
r <- ct $ demote defaultTestSettings testOptions "./Demote/UsedAtLevel.hs" (19,12)
-- demote logTestSettings testOptions "./Demote/UsedAtLevel.hs" (19,12)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/Demote/UsedAtLevel.hs\"]"
diff <- compareFiles "./test/testdata/Demote/UsedAtLevel.refactored.hs"
"./test/testdata/Demote/UsedAtLevel.expected.hs"
diff `shouldBe` []
-- -----------------------------------------------------------------
it "passes UsedAtLevel.hs2" $ do
r <- ct $ demote defaultTestSettings testOptions "./Demote/UsedAtLevel2.hs" (23,12)
-- demote logTestSettings testOptions "./Demote/UsedAtLevel2.hs" (23,12)
r' <- mapM makeRelativeToCurrentDirectory r
(show r') `shouldBe` "[\"test/testdata/Demote/UsedAtLevel2.hs\"]"
diff <- compareFiles "./test/testdata/Demote/UsedAtLevel2.refactored.hs"
"./test/testdata/Demote/UsedAtLevel2.expected.hs"
diff `shouldBe` []
-- ---------------------------------------------------------------------
-- Helper functions
| RefactoringTools/HaRe | test/MoveDefSpec.hs | bsd-3-clause | 41,062 | 0 | 18 | 7,806 | 5,424 | 2,680 | 2,744 | 414 | 1 |
--
-- Adapted from the program "infer", believed to have been originally
-- authored by Philip Wadler, and used in the nofib benchmark suite
-- since at least the late 90s.
--
module Main where
import Parse
import Lex
import Term
import Type
import Environment
import InferMonad
import Infer
import Control.Monad.Par.Scheds.Trace
import System.IO
import System.Exit (exitWith, ExitCode(..))
import qualified Data.Map as Map
main :: IO ()
main = do
l <- getContents
case parseBinds (alexScanTokens l) of
Left err -> die err
Right t -> print (inferBinds initialEnv t)
die :: String -> IO ()
die s = hPutStrLn stderr s >> exitWith (ExitFailure 1)
test :: String -> IO ()
test str =
case parseExp (alexScanTokens str) of
Left err -> die err
Right t -> print (useI (error "type error") $ inferTerm initialEnv t)
inferBinds :: Env -> [(VarId,Term)] -> [(VarId,PolyType)]
inferBinds e t = runPar $ do
ys <- mapM (\(x,ty) -> do v <- newFull ty; return (x,v)) (unmakeEnv e)
let topenv = Map.fromList ys
inferTop topenv t
initialEnv :: Env
initialEnv = foldl (uncurry . extendGlobal) emptyEnv types
where
types = [("+",intop),("*",intop),("-",intop),("/",intop)]
intop = All [] (intType `arrow` intType `arrow` intType)
| seahug/parconc-examples | parinfer/parinfer.hs | bsd-3-clause | 1,254 | 0 | 15 | 240 | 474 | 256 | 218 | 34 | 2 |
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE OverloadedStrings #-}
module IRTS.CodegenJavaScript (codegenJavaScript, codegenNode, JSTarget(..)) where
import IRTS.JavaScript.AST
import Idris.AbsSyntax hiding (TypeCase)
import IRTS.Bytecode
import IRTS.Lang
import IRTS.Simplified
import IRTS.Defunctionalise
import IRTS.CodegenCommon
import Idris.Core.TT
import IRTS.System
import Util.System
import Control.Arrow
import Control.Monad (mapM)
import Control.Applicative ((<$>), (<*>), pure)
import Control.Monad.RWS hiding (mapM)
import Control.Monad.State
import Data.Char
import Numeric
import Data.List
import Data.Maybe
import Data.Word
import Data.Traversable hiding (mapM)
import System.IO
import System.Directory
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
data CompileInfo = CompileInfo { compileInfoApplyCases :: [Int]
, compileInfoEvalCases :: [Int]
, compileInfoNeedsBigInt :: Bool
}
initCompileInfo :: [(Name, [BC])] -> CompileInfo
initCompileInfo bc =
CompileInfo (collectCases "APPLY" bc) (collectCases "EVAL" bc) (lookupBigInt bc)
where
lookupBigInt :: [(Name, [BC])] -> Bool
lookupBigInt = any (needsBigInt . snd)
where
needsBigInt :: [BC] -> Bool
needsBigInt bc = any testBCForBigInt bc
where
testBCForBigInt :: BC -> Bool
testBCForBigInt (ASSIGNCONST _ c) =
testConstForBigInt c
testBCForBigInt (CONSTCASE _ c d) =
maybe False needsBigInt d
|| any (needsBigInt . snd) c
|| any (testConstForBigInt . fst) c
testBCForBigInt (CASE _ _ c d) =
maybe False needsBigInt d
|| any (needsBigInt . snd) c
testBCForBigInt _ = False
testConstForBigInt :: Const -> Bool
testConstForBigInt (BI _) = True
testConstForBigInt (B64 _) = True
testConstForBigInt _ = False
collectCases :: String -> [(Name, [BC])] -> [Int]
collectCases fun bc = getCases $ findFunction fun bc
findFunction :: String -> [(Name, [BC])] -> [BC]
findFunction f ((MN 0 fun, bc):_)
| fun == txt f = bc
findFunction f (_:bc) = findFunction f bc
getCases :: [BC] -> [Int]
getCases = concatMap analyze
where
analyze :: BC -> [Int]
analyze (CASE _ _ b _) = map fst b
analyze _ = []
data JSTarget = Node | JavaScript deriving Eq
codegenJavaScript :: CodeGenerator
codegenJavaScript ci =
codegenJS_all JavaScript (simpleDecls ci)
(includes ci) [] (outputFile ci) (outputType ci)
codegenNode :: CodeGenerator
codegenNode ci =
codegenJS_all Node (simpleDecls ci)
(includes ci) (compileLibs ci) (outputFile ci) (outputType ci)
codegenJS_all
:: JSTarget
-> [(Name, SDecl)]
-> [FilePath]
-> [String]
-> FilePath
-> OutputType
-> IO ()
codegenJS_all target definitions includes libs filename outputType = do
let bytecode = map toBC definitions
let info = initCompileInfo bytecode
let js = concatMap (translateDecl info) bytecode
let full = concatMap processFunction js
let code = deadCodeElim full
let (cons, opt) = optimizeConstructors code
let (header, rt) = case target of
Node -> ("#!/usr/bin/env node\n", "-node")
JavaScript -> ("", "-browser")
included <- concat <$> getIncludes includes
path <- (++) <$> getDataDir <*> (pure "/jsrts/")
idrRuntime <- readFile $ path ++ "Runtime-common.js"
tgtRuntime <- readFile $ concat [path, "Runtime", rt, ".js"]
jsbn <- if compileInfoNeedsBigInt info
then readFile $ path ++ "jsbn/jsbn.js"
else return ""
let runtime = ( header
++ includeLibs libs
++ included
++ jsbn
++ idrRuntime
++ tgtRuntime
)
writeSourceText filename ( T.pack runtime
`T.append` T.concat (map compileJS opt)
`T.append` T.concat (map compileJS cons)
`T.append` main
`T.append` invokeMain
)
setPermissions filename (emptyPermissions { readable = True
, executable = target == Node
, writable = True
})
where
deadCodeElim :: [JS] -> [JS]
deadCodeElim js = concatMap collectFunctions js
where
collectFunctions :: JS -> [JS]
collectFunctions fun@(JSAlloc name _)
| name == translateName (sMN 0 "runMain") = [fun]
collectFunctions fun@(JSAlloc name (Just (JSFunction _ body))) =
let invokations = sum $ map (
\x -> execState (countInvokations name x) 0
) js
in if invokations == 0
then []
else [fun]
countInvokations :: String -> JS -> State Int ()
countInvokations name (JSAlloc _ (Just (JSFunction _ body))) =
countInvokations name body
countInvokations name (JSSeq seq) =
void $ traverse (countInvokations name) seq
countInvokations name (JSAssign _ rhs) =
countInvokations name rhs
countInvokations name (JSCond conds) =
void $ traverse (
runKleisli $ arr id *** Kleisli (countInvokations name)
) conds
countInvokations name (JSSwitch _ conds def) =
void $ traverse (
runKleisli $ arr id *** Kleisli (countInvokations name)
) conds >> traverse (countInvokations name) def
countInvokations name (JSApp lhs rhs) =
void $ countInvokations name lhs >> traverse (countInvokations name) rhs
countInvokations name (JSNew _ args) =
void $ traverse (countInvokations name) args
countInvokations name (JSArray args) =
void $ traverse (countInvokations name) args
countInvokations name (JSIdent name')
| name == name' = get >>= put . (+1)
| otherwise = return ()
countInvokations _ _ = return ()
processFunction :: JS -> [JS]
processFunction =
collectSplitFunctions . (\x -> evalRWS (splitFunction x) () 0)
includeLibs :: [String] -> String
includeLibs =
concatMap (\lib -> "var " ++ lib ++ " = require(\"" ++ lib ++"\");\n")
getIncludes :: [FilePath] -> IO [String]
getIncludes = mapM readFile
main :: T.Text
main =
compileJS $ JSAlloc "main" (Just $
JSFunction [] (
case target of
Node -> mainFun
JavaScript -> jsMain
)
)
jsMain :: JS
jsMain =
JSCond [ (exists document `jsAnd` isReady, mainFun)
, (exists window, windowMainFun)
, (JSTrue, mainFun)
]
where
exists :: JS -> JS
exists js = jsTypeOf js `jsNotEq` JSString "undefined"
window :: JS
window = JSIdent "window"
document :: JS
document = JSIdent "document"
windowMainFun :: JS
windowMainFun =
jsMeth window "addEventListener" [ JSString "DOMContentLoaded"
, JSFunction [] ( mainFun )
, JSFalse
]
isReady :: JS
isReady = JSParens $ readyState `jsEq` JSString "complete" `jsOr` readyState `jsEq` JSString "loaded"
readyState :: JS
readyState = JSProj (JSIdent "document") "readyState"
mainFun :: JS
mainFun =
JSSeq [ JSAlloc "vm" (Just $ JSNew "i$VM" [])
, JSApp (JSIdent "i$SCHED") [JSIdent "vm"]
, JSApp (
JSIdent (translateName (sMN 0 "runMain"))
) [JSNew "i$POINTER" [JSNum (JSInt 0)]]
, JSApp (JSIdent "i$RUN") []
]
invokeMain :: T.Text
invokeMain = compileJS $ JSApp (JSIdent "main") []
optimizeConstructors :: [JS] -> ([JS], [JS])
optimizeConstructors js =
let (js', cons) = runState (traverse optimizeConstructor' js) M.empty in
(map (allocCon . snd) (M.toList cons), js')
where
allocCon :: (String, JS) -> JS
allocCon (name, con) = JSAlloc name (Just con)
newConstructor :: Int -> String
newConstructor n = "i$CON$" ++ show n
optimizeConstructor' :: JS -> State (M.Map Int (String, JS)) JS
optimizeConstructor' js@(JSNew "i$CON" [ JSNum (JSInt tag)
, JSArray []
, a
, e
]) = do
s <- get
case M.lookup tag s of
Just (i, c) -> return $ JSIdent i
Nothing -> do let n = newConstructor tag
put $ M.insert tag (n, js) s
return $ JSIdent n
optimizeConstructor' (JSSeq seq) =
JSSeq <$> traverse optimizeConstructor' seq
optimizeConstructor' (JSSwitch reg cond def) = do
cond' <- traverse (runKleisli $ arr id *** Kleisli optimizeConstructor') cond
def' <- traverse optimizeConstructor' def
return $ JSSwitch reg cond' def'
optimizeConstructor' (JSCond cond) =
JSCond <$> traverse (runKleisli $ arr id *** Kleisli optimizeConstructor') cond
optimizeConstructor' (JSAlloc fun (Just (JSFunction args body))) = do
body' <- optimizeConstructor' body
return $ JSAlloc fun (Just (JSFunction args body'))
optimizeConstructor' (JSAssign lhs rhs) = do
lhs' <- optimizeConstructor' lhs
rhs' <- optimizeConstructor' rhs
return $ JSAssign lhs' rhs'
optimizeConstructor' js = return js
collectSplitFunctions :: (JS, [(Int,JS)]) -> [JS]
collectSplitFunctions (fun, splits) = map generateSplitFunction splits ++ [fun]
where
generateSplitFunction :: (Int,JS) -> JS
generateSplitFunction (depth, JSAlloc name fun) =
JSAlloc (name ++ "$" ++ show depth) fun
splitFunction :: JS -> RWS () [(Int,JS)] Int JS
splitFunction (JSAlloc name (Just (JSFunction args body@(JSSeq _)))) = do
body' <- splitSequence body
return $ JSAlloc name (Just (JSFunction args body'))
where
splitCondition :: JS -> RWS () [(Int,JS)] Int JS
splitCondition js
| JSCond branches <- js =
JSCond <$> processBranches branches
| JSSwitch cond branches def <- js =
JSSwitch cond <$> (processBranches branches) <*> (traverse splitSequence def)
| otherwise = return js
where
processBranches :: [(JS,JS)] -> RWS () [(Int,JS)] Int [(JS,JS)]
processBranches =
traverse (runKleisli (arr id *** Kleisli splitSequence))
splitSequence :: JS -> RWS () [(Int, JS)] Int JS
splitSequence js@(JSSeq seq) =
let (pre,post) = break isBranch seq in
case post of
[_] -> JSSeq <$> traverse splitCondition seq
[call@(JSCond _),rest@(JSApp _ _)] -> do
rest' <- splitCondition rest
call' <- splitCondition call
return $ JSSeq (pre ++ [rest', call'])
[call@(JSSwitch _ _ _),rest@(JSApp _ _)] -> do
rest' <- splitCondition rest
call' <- splitCondition call
return $ JSSeq (pre ++ [rest', call'])
(call:rest) -> do
depth <- get
put (depth + 1)
new <- splitFunction (newFun rest)
tell [(depth, new)]
call' <- splitCondition call
return $ JSSeq (pre ++ (newCall depth : [call']))
_ -> JSSeq <$> traverse splitCondition seq
splitSequence js = return js
isBranch :: JS -> Bool
isBranch (JSApp (JSIdent "i$CALL") _) = True
isBranch (JSCond _) = True
isBranch (JSSwitch _ _ _) = True
isBranch _ = False
newCall :: Int -> JS
newCall depth =
JSApp (JSIdent "i$CALL") [ JSIdent $ name ++ "$" ++ show depth
, JSArray [jsOLDBASE, jsMYOLDBASE]
]
newFun :: [JS] -> JS
newFun seq =
JSAlloc name (Just $ JSFunction ["oldbase", "myoldbase"] (JSSeq seq))
splitFunction js = return js
translateDecl :: CompileInfo -> (Name, [BC]) -> [JS]
translateDecl info (name@(MN 0 fun), bc)
| txt "APPLY" == fun =
allocCaseFunctions (snd body)
++ [ JSAlloc (
translateName name
) (Just $ JSFunction ["oldbase"] (
JSSeq $ jsFUNPRELUDE ++ map (translateBC info) (fst body) ++ [
JSCond [ ( (translateReg $ caseReg (snd body)) `jsInstanceOf` "i$CON" `jsAnd` (JSProj (translateReg $ caseReg (snd body)) "app")
, JSApp (JSProj (translateReg $ caseReg (snd body)) "app") [jsOLDBASE, jsMYOLDBASE]
)
, ( JSNoop
, JSSeq $ map (translateBC info) (defaultCase (snd body))
)
]
]
)
)
]
| txt "EVAL" == fun =
allocCaseFunctions (snd body)
++ [ JSAlloc (
translateName name
) (Just $ JSFunction ["oldbase"] (
JSSeq $ jsFUNPRELUDE ++ map (translateBC info) (fst body) ++ [
JSCond [ ( (translateReg $ caseReg (snd body)) `jsInstanceOf` "i$CON" `jsAnd` (JSProj (translateReg $ caseReg (snd body)) "ev")
, JSApp (JSProj (translateReg $ caseReg (snd body)) "ev") [jsOLDBASE, jsMYOLDBASE]
)
, ( JSNoop
, JSSeq $ map (translateBC info) (defaultCase (snd body))
)
]
]
)
)
]
where
body :: ([BC], [BC])
body = break isCase bc
isCase :: BC -> Bool
isCase bc
| CASE {} <- bc = True
| otherwise = False
defaultCase :: [BC] -> [BC]
defaultCase ((CASE _ _ _ (Just d)):_) = d
caseReg :: [BC] -> Reg
caseReg ((CASE _ r _ _):_) = r
allocCaseFunctions :: [BC] -> [JS]
allocCaseFunctions ((CASE _ _ c _):_) = splitBranches c
allocCaseFunctions _ = []
splitBranches :: [(Int, [BC])] -> [JS]
splitBranches = map prepBranch
prepBranch :: (Int, [BC]) -> JS
prepBranch (tag, code) =
JSAlloc (
translateName name ++ "$" ++ show tag
) (Just $ JSFunction ["oldbase", "myoldbase"] (
JSSeq $ map (translateBC info) code
)
)
translateDecl info (name, bc) =
[ JSAlloc (
translateName name
) (Just $ JSFunction ["oldbase"] (
JSSeq $ jsFUNPRELUDE ++ map (translateBC info)bc
)
)
]
jsFUNPRELUDE :: [JS]
jsFUNPRELUDE = [jsALLOCMYOLDBASE]
jsALLOCMYOLDBASE :: JS
jsALLOCMYOLDBASE = JSAlloc "myoldbase" (Just $ JSNew "i$POINTER" [])
translateReg :: Reg -> JS
translateReg reg
| RVal <- reg = jsRET
| Tmp <- reg = JSRaw "//TMPREG"
| L n <- reg = jsLOC n
| T n <- reg = jsTOP n
translateConstant :: Const -> JS
translateConstant (I i) = JSNum (JSInt i)
translateConstant (Fl f) = JSNum (JSFloat f)
translateConstant (Ch c) = JSString $ translateChar c
translateConstant (Str s) = JSString $ concatMap translateChar s
translateConstant (AType (ATInt ITNative)) = JSType JSIntTy
translateConstant StrType = JSType JSStringTy
translateConstant (AType (ATInt ITBig)) = JSType JSIntegerTy
translateConstant (AType ATFloat) = JSType JSFloatTy
translateConstant (AType (ATInt ITChar)) = JSType JSCharTy
translateConstant Forgot = JSType JSForgotTy
translateConstant (BI 0) = JSNum (JSInteger JSBigZero)
translateConstant (BI 1) = JSNum (JSInteger JSBigOne)
translateConstant (BI i) = jsBigInt (JSString $ show i)
translateConstant (B8 b) = JSWord (JSWord8 b)
translateConstant (B16 b) = JSWord (JSWord16 b)
translateConstant (B32 b) = JSWord (JSWord32 b)
translateConstant (B64 b) = JSWord (JSWord64 b)
translateConstant c =
JSError $ "Unimplemented Constant: " ++ show c
translateChar :: Char -> String
translateChar ch
| '\a' <- ch = "\\u0007"
| '\b' <- ch = "\\b"
| '\f' <- ch = "\\f"
| '\n' <- ch = "\\n"
| '\r' <- ch = "\\r"
| '\t' <- ch = "\\t"
| '\v' <- ch = "\\v"
| '\SO' <- ch = "\\u000E"
| '\DEL' <- ch = "\\u007F"
| '\\' <- ch = "\\\\"
| '\"' <- ch = "\\\""
| '\'' <- ch = "\\\'"
| ch `elem` asciiTab = "\\u" ++ fill (showHex (ord ch) "")
| ord ch > 255 = "\\u" ++ fill (showHex (ord ch) "")
| otherwise = [ch]
where
fill :: String -> String
fill s = case length s of
1 -> "000" ++ s
2 -> "00" ++ s
3 -> "0" ++ s
_ -> s
asciiTab =
['\NUL', '\SOH', '\STX', '\ETX', '\EOT', '\ENQ', '\ACK', '\BEL',
'\BS', '\HT', '\LF', '\VT', '\FF', '\CR', '\SO', '\SI',
'\DLE', '\DC1', '\DC2', '\DC3', '\DC4', '\NAK', '\SYN', '\ETB',
'\CAN', '\EM', '\SUB', '\ESC', '\FS', '\GS', '\RS', '\US']
translateName :: Name -> String
translateName n = "_idris_" ++ concatMap cchar (showCG n)
where cchar x | isAlphaNum x = [x]
| otherwise = "_" ++ show (fromEnum x) ++ "_"
jsASSIGN :: CompileInfo -> Reg -> Reg -> JS
jsASSIGN _ r1 r2 = JSAssign (translateReg r1) (translateReg r2)
jsASSIGNCONST :: CompileInfo -> Reg -> Const -> JS
jsASSIGNCONST _ r c = JSAssign (translateReg r) (translateConstant c)
jsCALL :: CompileInfo -> Name -> JS
jsCALL _ n =
JSApp (
JSIdent "i$CALL"
) [JSIdent (translateName n), JSArray [jsMYOLDBASE]]
jsTAILCALL :: CompileInfo -> Name -> JS
jsTAILCALL _ n =
JSApp (
JSIdent "i$CALL"
) [JSIdent (translateName n), JSArray [jsOLDBASE]]
jsFOREIGN :: CompileInfo -> Reg -> String -> [(FType, Reg)] -> JS
jsFOREIGN _ reg n args
| n == "isNull"
, [(FPtr, arg)] <- args =
JSAssign (
translateReg reg
) (
JSBinOp "==" (translateReg arg) JSNull
)
| n == "idris_eqPtr"
, [(_, lhs),(_, rhs)] <- args =
JSAssign (
translateReg reg
) (
JSBinOp "==" (translateReg lhs) (translateReg rhs)
)
| otherwise =
JSAssign (
translateReg reg
) (
JSFFI n (map generateWrapper args)
)
where
generateWrapper :: (FType, Reg) -> JS
generateWrapper (ty, reg)
| FFunction <- ty =
JSApp (JSIdent "i$ffiWrap") [ translateReg reg
, JSIdent "oldbase"
, JSIdent "myoldbase"
]
| FFunctionIO <- ty =
JSApp (JSIdent "i$ffiWrap") [ translateReg reg
, JSIdent "oldbase"
, JSIdent "myoldbase"
]
generateWrapper (_, reg) =
translateReg reg
jsREBASE :: CompileInfo -> JS
jsREBASE _ = JSAssign jsSTACKBASE (JSProj jsOLDBASE "addr")
jsSTOREOLD :: CompileInfo ->JS
jsSTOREOLD _ = JSAssign (JSProj jsMYOLDBASE "addr") jsSTACKBASE
jsADDTOP :: CompileInfo -> Int -> JS
jsADDTOP info n
| 0 <- n = JSNoop
| otherwise =
JSBinOp "+=" jsSTACKTOP (JSNum (JSInt n))
jsTOPBASE :: CompileInfo -> Int -> JS
jsTOPBASE _ 0 = JSAssign jsSTACKTOP jsSTACKBASE
jsTOPBASE _ n = JSAssign jsSTACKTOP (JSBinOp "+" jsSTACKBASE (JSNum (JSInt n)))
jsBASETOP :: CompileInfo -> Int -> JS
jsBASETOP _ 0 = JSAssign jsSTACKBASE jsSTACKTOP
jsBASETOP _ n = JSAssign jsSTACKBASE (JSBinOp "+" jsSTACKTOP (JSNum (JSInt n)))
jsNULL :: CompileInfo -> Reg -> JS
jsNULL _ r = JSDelete (translateReg r)
jsERROR :: CompileInfo -> String -> JS
jsERROR _ = JSError
jsSLIDE :: CompileInfo -> Int -> JS
jsSLIDE _ 1 = JSAssign (jsLOC 0) (jsTOP 0)
jsSLIDE _ n = JSApp (JSIdent "i$SLIDE") [JSNum (JSInt n)]
jsMKCON :: CompileInfo -> Reg -> Int -> [Reg] -> JS
jsMKCON info r t rs =
JSAssign (translateReg r) (
JSNew "i$CON" [ JSNum (JSInt t)
, JSArray (map translateReg rs)
, if t `elem` compileInfoApplyCases info
then JSIdent $ translateName (sMN 0 "APPLY") ++ "$" ++ show t
else JSNull
, if t `elem` compileInfoEvalCases info
then JSIdent $ translateName (sMN 0 "EVAL") ++ "$" ++ show t
else JSNull
]
)
jsCASE :: CompileInfo -> Bool -> Reg -> [(Int, [BC])] -> Maybe [BC] -> JS
jsCASE info safe reg cases def =
JSSwitch (tag safe $ translateReg reg) (
map ((JSNum . JSInt) *** prepBranch) cases
) (fmap prepBranch def)
where
tag :: Bool -> JS -> JS
tag True = jsCTAG
tag False = jsTAG
prepBranch :: [BC] -> JS
prepBranch bc = JSSeq $ map (translateBC info) bc
jsTAG :: JS -> JS
jsTAG js =
(JSTernary (js `jsInstanceOf` "i$CON") (
JSProj js "tag"
) (JSNum (JSInt $ negate 1)))
jsCTAG :: JS -> JS
jsCTAG js = JSProj js "tag"
jsCONSTCASE :: CompileInfo -> Reg -> [(Const, [BC])] -> Maybe [BC] -> JS
jsCONSTCASE info reg cases def =
JSCond $ (
map (jsEq (translateReg reg) . translateConstant *** prepBranch) cases
) ++ (maybe [] ((:[]) . ((,) JSNoop) . prepBranch) def)
where
prepBranch :: [BC] -> JS
prepBranch bc = JSSeq $ map (translateBC info) bc
jsPROJECT :: CompileInfo -> Reg -> Int -> Int -> JS
jsPROJECT _ reg loc 0 = JSNoop
jsPROJECT _ reg loc 1 =
JSAssign (jsLOC loc) (
JSIndex (
JSProj (translateReg reg) "args"
) (
JSNum (JSInt 0)
)
)
jsPROJECT _ reg loc ar =
JSApp (JSIdent "i$PROJECT") [ translateReg reg
, JSNum (JSInt loc)
, JSNum (JSInt ar)
]
jsOP :: CompileInfo -> Reg -> PrimFn -> [Reg] -> JS
jsOP _ reg op args = JSAssign (translateReg reg) jsOP'
where
jsOP' :: JS
jsOP'
| LNoOp <- op = translateReg (last args)
| LWriteStr <- op,
(_:str:_) <- args = JSApp (JSIdent "i$putStr") [translateReg str]
| LReadStr <- op = JSApp (JSIdent "i$getLine") []
| (LZExt (ITFixed IT8) ITNative) <- op = jsUnPackBits $ translateReg (last args)
| (LZExt (ITFixed IT16) ITNative) <- op = jsUnPackBits $ translateReg (last args)
| (LZExt (ITFixed IT32) ITNative) <- op = jsUnPackBits $ translateReg (last args)
| (LZExt _ ITBig) <- op = jsBigInt $ JSApp (JSIdent "String") [translateReg (last args)]
| (LPlus (ATInt ITBig)) <- op
, (lhs:rhs:_) <- args = invokeMeth lhs "add" [rhs]
| (LMinus (ATInt ITBig)) <- op
, (lhs:rhs:_) <- args = invokeMeth lhs "subtract" [rhs]
| (LTimes (ATInt ITBig)) <- op
, (lhs:rhs:_) <- args = invokeMeth lhs "multiply" [rhs]
| (LSDiv (ATInt ITBig)) <- op
, (lhs:rhs:_) <- args = invokeMeth lhs "divide" [rhs]
| (LSRem (ATInt ITBig)) <- op
, (lhs:rhs:_) <- args = invokeMeth lhs "mod" [rhs]
| (LEq (ATInt ITBig)) <- op
, (lhs:rhs:_) <- args = JSPreOp "+" $ invokeMeth lhs "equals" [rhs]
| (LSLt (ATInt ITBig)) <- op
, (lhs:rhs:_) <- args = JSPreOp "+" $ invokeMeth lhs "lesser" [rhs]
| (LSLe (ATInt ITBig)) <- op
, (lhs:rhs:_) <- args = JSPreOp "+" $ invokeMeth lhs "lesserOrEquals" [rhs]
| (LSGt (ATInt ITBig)) <- op
, (lhs:rhs:_) <- args = JSPreOp "+" $ invokeMeth lhs "greater" [rhs]
| (LSGe (ATInt ITBig)) <- op
, (lhs:rhs:_) <- args = JSPreOp "+" $ invokeMeth lhs "greaterOrEquals" [rhs]
| (LPlus ATFloat) <- op
, (lhs:rhs:_) <- args = translateBinaryOp "+" lhs rhs
| (LMinus ATFloat) <- op
, (lhs:rhs:_) <- args = translateBinaryOp "-" lhs rhs
| (LTimes ATFloat) <- op
, (lhs:rhs:_) <- args = translateBinaryOp "*" lhs rhs
| (LSDiv ATFloat) <- op
, (lhs:rhs:_) <- args = translateBinaryOp "/" lhs rhs
| (LEq ATFloat) <- op
, (lhs:rhs:_) <- args = translateCompareOp "==" lhs rhs
| (LSLt ATFloat) <- op
, (lhs:rhs:_) <- args = translateCompareOp "<" lhs rhs
| (LSLe ATFloat) <- op
, (lhs:rhs:_) <- args = translateCompareOp "<=" lhs rhs
| (LSGt ATFloat) <- op
, (lhs:rhs:_) <- args = translateCompareOp ">" lhs rhs
| (LSGe ATFloat) <- op
, (lhs:rhs:_) <- args = translateCompareOp ">=" lhs rhs
| (LPlus (ATInt ITChar)) <- op
, (lhs:rhs:_) <- args =
jsCall "i$fromCharCode" [
JSBinOp "+" (
jsCall "i$charCode" [translateReg lhs]
) (
jsCall "i$charCode" [translateReg rhs]
)
]
| (LTrunc (ITFixed IT16) (ITFixed IT8)) <- op
, (arg:_) <- args =
jsPackUBits8 (
JSBinOp "&" (jsUnPackBits $ translateReg arg) (JSNum (JSInt 0xFF))
)
| (LTrunc (ITFixed IT32) (ITFixed IT16)) <- op
, (arg:_) <- args =
jsPackUBits16 (
JSBinOp "&" (jsUnPackBits $ translateReg arg) (JSNum (JSInt 0xFFFF))
)
| (LTrunc (ITFixed IT64) (ITFixed IT32)) <- op
, (arg:_) <- args =
jsPackUBits32 (
jsMeth (jsMeth (translateReg arg) "and" [
jsBigInt (JSString $ show 0xFFFFFFFF)
]) "intValue" []
)
| (LTrunc ITBig (ITFixed IT64)) <- op
, (arg:_) <- args =
jsMeth (translateReg arg) "and" [
jsBigInt (JSString $ show 0xFFFFFFFFFFFFFFFF)
]
| (LLSHR (ITFixed IT8)) <- op
, (lhs:rhs:_) <- args = jsPackUBits8 $ bitsBinaryOp ">>" lhs rhs
| (LLSHR (ITFixed IT16)) <- op
, (lhs:rhs:_) <- args = jsPackUBits16 $ bitsBinaryOp ">>" lhs rhs
| (LLSHR (ITFixed IT32)) <- op
, (lhs:rhs:_) <- args = jsPackUBits32 $ bitsBinaryOp ">>" lhs rhs
| (LLSHR (ITFixed IT64)) <- op
, (lhs:rhs:_) <- args =
jsMeth (translateReg lhs) "shiftRight" [translateReg rhs]
| (LSHL (ITFixed IT8)) <- op
, (lhs:rhs:_) <- args = jsPackUBits8 $ bitsBinaryOp "<<" lhs rhs
| (LSHL (ITFixed IT16)) <- op
, (lhs:rhs:_) <- args = jsPackUBits16 $ bitsBinaryOp "<<" lhs rhs
| (LSHL (ITFixed IT32)) <- op
, (lhs:rhs:_) <- args = jsPackUBits32 $ bitsBinaryOp "<<" lhs rhs
| (LSHL (ITFixed IT64)) <- op
, (lhs:rhs:_) <- args =
jsMeth (jsMeth (translateReg lhs) "shiftLeft" [translateReg rhs]) "and" [
jsBigInt (JSString $ show 0xFFFFFFFFFFFFFFFF)
]
| (LAnd (ITFixed IT8)) <- op
, (lhs:rhs:_) <- args = jsPackUBits8 $ bitsBinaryOp "&" lhs rhs
| (LAnd (ITFixed IT16)) <- op
, (lhs:rhs:_) <- args = jsPackUBits16 $ bitsBinaryOp "&" lhs rhs
| (LAnd (ITFixed IT32)) <- op
, (lhs:rhs:_) <- args = jsPackUBits32 $ bitsBinaryOp "&" lhs rhs
| (LAnd (ITFixed IT64)) <- op
, (lhs:rhs:_) <- args =
jsMeth (translateReg lhs) "and" [translateReg rhs]
| (LOr (ITFixed IT8)) <- op
, (lhs:rhs:_) <- args = jsPackUBits8 $ bitsBinaryOp "|" lhs rhs
| (LOr (ITFixed IT16)) <- op
, (lhs:rhs:_) <- args = jsPackUBits16 $ bitsBinaryOp "|" lhs rhs
| (LOr (ITFixed IT32)) <- op
, (lhs:rhs:_) <- args = jsPackUBits32 $ bitsBinaryOp "|" lhs rhs
| (LOr (ITFixed IT64)) <- op
, (lhs:rhs:_) <- args =
jsMeth (translateReg lhs) "or" [translateReg rhs]
| (LXOr (ITFixed IT8)) <- op
, (lhs:rhs:_) <- args = jsPackUBits8 $ bitsBinaryOp "^" lhs rhs
| (LXOr (ITFixed IT16)) <- op
, (lhs:rhs:_) <- args = jsPackUBits16 $ bitsBinaryOp "^" lhs rhs
| (LXOr (ITFixed IT32)) <- op
, (lhs:rhs:_) <- args = jsPackUBits32 $ bitsBinaryOp "^" lhs rhs
| (LXOr (ITFixed IT64)) <- op
, (lhs:rhs:_) <- args =
jsMeth (translateReg lhs) "xor" [translateReg rhs]
| (LPlus (ATInt (ITFixed IT8))) <- op
, (lhs:rhs:_) <- args = jsPackUBits8 $ bitsBinaryOp "+" lhs rhs
| (LPlus (ATInt (ITFixed IT16))) <- op
, (lhs:rhs:_) <- args = jsPackUBits16 $ bitsBinaryOp "+" lhs rhs
| (LPlus (ATInt (ITFixed IT32))) <- op
, (lhs:rhs:_) <- args = jsPackUBits32 $ bitsBinaryOp "+" lhs rhs
| (LPlus (ATInt (ITFixed IT64))) <- op
, (lhs:rhs:_) <- args =
jsMeth (jsMeth (translateReg lhs) "add" [translateReg rhs]) "and" [
jsBigInt (JSString $ show 0xFFFFFFFFFFFFFFFF)
]
| (LMinus (ATInt (ITFixed IT8))) <- op
, (lhs:rhs:_) <- args = jsPackUBits8 $ bitsBinaryOp "-" lhs rhs
| (LMinus (ATInt (ITFixed IT16))) <- op
, (lhs:rhs:_) <- args = jsPackUBits16 $ bitsBinaryOp "-" lhs rhs
| (LMinus (ATInt (ITFixed IT32))) <- op
, (lhs:rhs:_) <- args = jsPackUBits32 $ bitsBinaryOp "-" lhs rhs
| (LMinus (ATInt (ITFixed IT64))) <- op
, (lhs:rhs:_) <- args =
jsMeth (jsMeth (translateReg lhs) "subtract" [translateReg rhs]) "and" [
jsBigInt (JSString $ show 0xFFFFFFFFFFFFFFFF)
]
| (LTimes (ATInt (ITFixed IT8))) <- op
, (lhs:rhs:_) <- args = jsPackUBits8 $ bitsBinaryOp "*" lhs rhs
| (LTimes (ATInt (ITFixed IT16))) <- op
, (lhs:rhs:_) <- args = jsPackUBits16 $ bitsBinaryOp "*" lhs rhs
| (LTimes (ATInt (ITFixed IT32))) <- op
, (lhs:rhs:_) <- args = jsPackUBits32 $ bitsBinaryOp "*" lhs rhs
| (LTimes (ATInt (ITFixed IT64))) <- op
, (lhs:rhs:_) <- args =
jsMeth (jsMeth (translateReg lhs) "multiply" [translateReg rhs]) "and" [
jsBigInt (JSString $ show 0xFFFFFFFFFFFFFFFF)
]
| (LEq (ATInt (ITFixed IT8))) <- op
, (lhs:rhs:_) <- args = bitsCompareOp "==" lhs rhs
| (LEq (ATInt (ITFixed IT16))) <- op
, (lhs:rhs:_) <- args = bitsCompareOp "==" lhs rhs
| (LEq (ATInt (ITFixed IT32))) <- op
, (lhs:rhs:_) <- args = bitsCompareOp "==" lhs rhs
| (LEq (ATInt (ITFixed IT64))) <- op
, (lhs:rhs:_) <- args = JSPreOp "+" $ invokeMeth lhs "equals" [rhs]
| (LLt (ITFixed IT8)) <- op
, (lhs:rhs:_) <- args = bitsCompareOp "<" lhs rhs
| (LLt (ITFixed IT16)) <- op
, (lhs:rhs:_) <- args = bitsCompareOp "<" lhs rhs
| (LLt (ITFixed IT32)) <- op
, (lhs:rhs:_) <- args = bitsCompareOp "<" lhs rhs
| (LLt (ITFixed IT64)) <- op
, (lhs:rhs:_) <- args = JSPreOp "+" $ invokeMeth lhs "lesser" [rhs]
| (LLe (ITFixed IT8)) <- op
, (lhs:rhs:_) <- args = bitsCompareOp "<=" lhs rhs
| (LLe (ITFixed IT16)) <- op
, (lhs:rhs:_) <- args = bitsCompareOp "<=" lhs rhs
| (LLe (ITFixed IT32)) <- op
, (lhs:rhs:_) <- args = bitsCompareOp "<=" lhs rhs
| (LLe (ITFixed IT64)) <- op
, (lhs:rhs:_) <- args = JSPreOp "+" $ invokeMeth lhs "lesserOrEquals" [rhs]
| (LGt (ITFixed IT8)) <- op
, (lhs:rhs:_) <- args = bitsCompareOp ">" lhs rhs
| (LGt (ITFixed IT16)) <- op
, (lhs:rhs:_) <- args = bitsCompareOp ">" lhs rhs
| (LGt (ITFixed IT32)) <- op
, (lhs:rhs:_) <- args = bitsCompareOp ">" lhs rhs
| (LGt (ITFixed IT64)) <- op
, (lhs:rhs:_) <- args = JSPreOp "+" $ invokeMeth lhs "greater" [rhs]
| (LGe (ITFixed IT8)) <- op
, (lhs:rhs:_) <- args = bitsCompareOp ">=" lhs rhs
| (LGe (ITFixed IT16)) <- op
, (lhs:rhs:_) <- args = bitsCompareOp ">=" lhs rhs
| (LGe (ITFixed IT32)) <- op
, (lhs:rhs:_) <- args = bitsCompareOp ">=" lhs rhs
| (LGe (ITFixed IT64)) <- op
, (lhs:rhs:_) <- args = JSPreOp "+" $ invokeMeth lhs "greaterOrEquals" [rhs]
| (LUDiv (ITFixed IT8)) <- op
, (lhs:rhs:_) <- args =
jsPackUBits8 (
JSBinOp "/" (jsUnPackBits (translateReg lhs)) (jsUnPackBits (translateReg rhs))
)
| (LUDiv (ITFixed IT16)) <- op
, (lhs:rhs:_) <- args =
jsPackUBits16 (
JSBinOp "/" (jsUnPackBits (translateReg lhs)) (jsUnPackBits (translateReg rhs))
)
| (LUDiv (ITFixed IT32)) <- op
, (lhs:rhs:_) <- args =
jsPackUBits32 (
JSBinOp "/" (jsUnPackBits (translateReg lhs)) (jsUnPackBits (translateReg rhs))
)
| (LUDiv (ITFixed IT64)) <- op
, (lhs:rhs:_) <- args = invokeMeth lhs "divide" [rhs]
| (LSDiv (ATInt (ITFixed IT8))) <- op
, (lhs:rhs:_) <- args =
jsPackSBits8 (
JSBinOp "/" (
jsUnPackBits $ jsPackSBits8 $ jsUnPackBits (translateReg lhs)
) (
jsUnPackBits $ jsPackSBits8 $ jsUnPackBits (translateReg rhs)
)
)
| (LSDiv (ATInt (ITFixed IT16))) <- op
, (lhs:rhs:_) <- args =
jsPackSBits16 (
JSBinOp "/" (
jsUnPackBits $ jsPackSBits16 $ jsUnPackBits (translateReg lhs)
) (
jsUnPackBits $ jsPackSBits16 $ jsUnPackBits (translateReg rhs)
)
)
| (LSDiv (ATInt (ITFixed IT32))) <- op
, (lhs:rhs:_) <- args =
jsPackSBits32 (
JSBinOp "/" (
jsUnPackBits $ jsPackSBits32 $ jsUnPackBits (translateReg lhs)
) (
jsUnPackBits $ jsPackSBits32 $ jsUnPackBits (translateReg rhs)
)
)
| (LSDiv (ATInt (ITFixed IT64))) <- op
, (lhs:rhs:_) <- args = invokeMeth lhs "divide" [rhs]
| (LSRem (ATInt (ITFixed IT8))) <- op
, (lhs:rhs:_) <- args =
jsPackSBits8 (
JSBinOp "%" (
jsUnPackBits $ jsPackSBits8 $ jsUnPackBits (translateReg lhs)
) (
jsUnPackBits $ jsPackSBits8 $ jsUnPackBits (translateReg rhs)
)
)
| (LSRem (ATInt (ITFixed IT16))) <- op
, (lhs:rhs:_) <- args =
jsPackSBits16 (
JSBinOp "%" (
jsUnPackBits $ jsPackSBits16 $ jsUnPackBits (translateReg lhs)
) (
jsUnPackBits $ jsPackSBits16 $ jsUnPackBits (translateReg rhs)
)
)
| (LSRem (ATInt (ITFixed IT32))) <- op
, (lhs:rhs:_) <- args =
jsPackSBits32 (
JSBinOp "%" (
jsUnPackBits $ jsPackSBits32 $ jsUnPackBits (translateReg lhs)
) (
jsUnPackBits $ jsPackSBits32 $ jsUnPackBits (translateReg rhs)
)
)
| (LSRem (ATInt (ITFixed IT64))) <- op
, (lhs:rhs:_) <- args = invokeMeth lhs "mod" [rhs]
| (LCompl (ITFixed IT8)) <- op
, (arg:_) <- args =
jsPackSBits8 $ JSPreOp "~" $ jsUnPackBits (translateReg arg)
| (LCompl (ITFixed IT16)) <- op
, (arg:_) <- args =
jsPackSBits16 $ JSPreOp "~" $ jsUnPackBits (translateReg arg)
| (LCompl (ITFixed IT32)) <- op
, (arg:_) <- args =
jsPackSBits32 $ JSPreOp "~" $ jsUnPackBits (translateReg arg)
| (LCompl (ITFixed IT64)) <- op
, (arg:_) <- args = invokeMeth arg "not" []
| (LPlus _) <- op
, (lhs:rhs:_) <- args = translateBinaryOp "+" lhs rhs
| (LMinus _) <- op
, (lhs:rhs:_) <- args = translateBinaryOp "-" lhs rhs
| (LTimes _) <- op
, (lhs:rhs:_) <- args = translateBinaryOp "*" lhs rhs
| (LSDiv _) <- op
, (lhs:rhs:_) <- args = translateBinaryOp "/" lhs rhs
| (LSRem _) <- op
, (lhs:rhs:_) <- args = translateBinaryOp "%" lhs rhs
| (LEq _) <- op
, (lhs:rhs:_) <- args = translateCompareOp "==" lhs rhs
| (LSLt _) <- op
, (lhs:rhs:_) <- args = translateCompareOp "<" lhs rhs
| (LSLe _) <- op
, (lhs:rhs:_) <- args = translateCompareOp "<=" lhs rhs
| (LSGt _) <- op
, (lhs:rhs:_) <- args = translateCompareOp ">" lhs rhs
| (LSGe _) <- op
, (lhs:rhs:_) <- args = translateCompareOp ">=" lhs rhs
| (LAnd _) <- op
, (lhs:rhs:_) <- args = translateBinaryOp "&" lhs rhs
| (LOr _) <- op
, (lhs:rhs:_) <- args = translateBinaryOp "|" lhs rhs
| (LXOr _) <- op
, (lhs:rhs:_) <- args = translateBinaryOp "^" lhs rhs
| (LSHL _) <- op
, (lhs:rhs:_) <- args = translateBinaryOp "<<" rhs lhs
| (LASHR _) <- op
, (lhs:rhs:_) <- args = translateBinaryOp ">>" rhs lhs
| (LCompl _) <- op
, (arg:_) <- args = JSPreOp "~" (translateReg arg)
| LStrConcat <- op
, (lhs:rhs:_) <- args = translateBinaryOp "+" lhs rhs
| LStrEq <- op
, (lhs:rhs:_) <- args = translateCompareOp "==" lhs rhs
| LStrLt <- op
, (lhs:rhs:_) <- args = translateCompareOp "<" lhs rhs
| LStrLen <- op
, (arg:_) <- args = JSProj (translateReg arg) "length"
| (LStrInt ITNative) <- op
, (arg:_) <- args = jsCall "parseInt" [translateReg arg]
| (LIntStr ITNative) <- op
, (arg:_) <- args = jsCall "String" [translateReg arg]
| (LSExt ITNative ITBig) <- op
, (arg:_) <- args = jsBigInt $ jsCall "String" [translateReg arg]
| (LTrunc ITBig ITNative) <- op
, (arg:_) <- args = jsMeth (translateReg arg) "intValue" []
| (LIntStr ITBig) <- op
, (arg:_) <- args = jsMeth (translateReg arg) "toString" []
| (LStrInt ITBig) <- op
, (arg:_) <- args = jsBigInt $ translateReg arg
| LFloatStr <- op
, (arg:_) <- args = jsCall "String" [translateReg arg]
| LStrFloat <- op
, (arg:_) <- args = jsCall "parseFloat" [translateReg arg]
| (LIntFloat ITNative) <- op
, (arg:_) <- args = translateReg arg
| (LIntFloat ITBig) <- op
, (arg:_) <- args = jsMeth (translateReg arg) "intValue" []
| (LFloatInt ITNative) <- op
, (arg:_) <- args = translateReg arg
| (LChInt ITNative) <- op
, (arg:_) <- args = jsCall "i$charCode" [translateReg arg]
| (LIntCh ITNative) <- op
, (arg:_) <- args = jsCall "i$fromCharCode" [translateReg arg]
| LFExp <- op
, (arg:_) <- args = jsCall "Math.exp" [translateReg arg]
| LFLog <- op
, (arg:_) <- args = jsCall "Math.log" [translateReg arg]
| LFSin <- op
, (arg:_) <- args = jsCall "Math.sin" [translateReg arg]
| LFCos <- op
, (arg:_) <- args = jsCall "Math.cos" [translateReg arg]
| LFTan <- op
, (arg:_) <- args = jsCall "Math.tan" [translateReg arg]
| LFASin <- op
, (arg:_) <- args = jsCall "Math.asin" [translateReg arg]
| LFACos <- op
, (arg:_) <- args = jsCall "Math.acos" [translateReg arg]
| LFATan <- op
, (arg:_) <- args = jsCall "Math.atan" [translateReg arg]
| LFSqrt <- op
, (arg:_) <- args = jsCall "Math.sqrt" [translateReg arg]
| LFFloor <- op
, (arg:_) <- args = jsCall "Math.floor" [translateReg arg]
| LFCeil <- op
, (arg:_) <- args = jsCall "Math.ceil" [translateReg arg]
| LFNegate <- op
, (arg:_) <- args = JSPreOp "-" (translateReg arg)
| LStrCons <- op
, (lhs:rhs:_) <- args = invokeMeth lhs "concat" [rhs]
| LStrHead <- op
, (arg:_) <- args = JSIndex (translateReg arg) (JSNum (JSInt 0))
| LStrRev <- op
, (arg:_) <- args = JSProj (translateReg arg) "split('').reverse().join('')"
| LStrIndex <- op
, (lhs:rhs:_) <- args = JSIndex (translateReg lhs) (translateReg rhs)
| LStrTail <- op
, (arg:_) <- args =
let v = translateReg arg in
JSApp (JSProj v "substr") [
JSNum (JSInt 1),
JSBinOp "-" (JSProj v "length") (JSNum (JSInt 1))
]
| LSystemInfo <- op
, (arg:_) <- args = jsCall "i$systemInfo" [translateReg arg]
| LExternal nul <- op
, nul == sUN "prim__null"
, _ <- args = JSNull
| LExternal ex <- op
, ex == sUN "prim__eqPtr"
, [lhs, rhs] <- args = translateCompareOp "==" lhs rhs
| otherwise = JSError $ "Not implemented: " ++ show op
where
translateBinaryOp :: String -> Reg -> Reg -> JS
translateBinaryOp op lhs rhs =
JSBinOp op (translateReg lhs) (translateReg rhs)
translateCompareOp :: String -> Reg -> Reg -> JS
translateCompareOp op lhs rhs =
JSPreOp "+" $ translateBinaryOp op lhs rhs
bitsBinaryOp :: String -> Reg -> Reg -> JS
bitsBinaryOp op lhs rhs =
JSBinOp op (jsUnPackBits (translateReg lhs)) (jsUnPackBits (translateReg rhs))
bitsCompareOp :: String -> Reg -> Reg -> JS
bitsCompareOp op lhs rhs =
JSPreOp "+" $ bitsBinaryOp op lhs rhs
invokeMeth :: Reg -> String -> [Reg] -> JS
invokeMeth obj meth args =
JSApp (JSProj (translateReg obj) meth) $ map translateReg args
jsRESERVE :: CompileInfo -> Int -> JS
jsRESERVE _ _ = JSNoop
jsSTACK :: JS
jsSTACK = JSIdent "i$valstack"
jsCALLSTACK :: JS
jsCALLSTACK = JSIdent "i$callstack"
jsSTACKBASE :: JS
jsSTACKBASE = JSIdent "i$valstack_base"
jsSTACKTOP :: JS
jsSTACKTOP = JSIdent "i$valstack_top"
jsOLDBASE :: JS
jsOLDBASE = JSIdent "oldbase"
jsMYOLDBASE :: JS
jsMYOLDBASE = JSIdent "myoldbase"
jsRET :: JS
jsRET = JSIdent "i$ret"
jsLOC :: Int -> JS
jsLOC 0 = JSIndex jsSTACK jsSTACKBASE
jsLOC n = JSIndex jsSTACK (JSBinOp "+" jsSTACKBASE (JSNum (JSInt n)))
jsTOP :: Int -> JS
jsTOP 0 = JSIndex jsSTACK jsSTACKTOP
jsTOP n = JSIndex jsSTACK (JSBinOp "+" jsSTACKTOP (JSNum (JSInt n)))
jsPUSH :: [JS] -> JS
jsPUSH args = JSApp (JSProj jsCALLSTACK "push") args
jsPOP :: JS
jsPOP = JSApp (JSProj jsCALLSTACK "pop") []
translateBC :: CompileInfo -> BC -> JS
translateBC info bc
| ASSIGN r1 r2 <- bc = jsASSIGN info r1 r2
| ASSIGNCONST r c <- bc = jsASSIGNCONST info r c
| UPDATE r1 r2 <- bc = jsASSIGN info r1 r2
| ADDTOP n <- bc = jsADDTOP info n
| NULL r <- bc = jsNULL info r
| CALL n <- bc = jsCALL info n
| TAILCALL n <- bc = jsTAILCALL info n
| FOREIGNCALL r _ (FStr n) args
<- bc = jsFOREIGN info r n (map fcall args)
| FOREIGNCALL _ _ _ _ <- bc = error "JS FFI call not statically known"
| TOPBASE n <- bc = jsTOPBASE info n
| BASETOP n <- bc = jsBASETOP info n
| STOREOLD <- bc = jsSTOREOLD info
| SLIDE n <- bc = jsSLIDE info n
| REBASE <- bc = jsREBASE info
| RESERVE n <- bc = jsRESERVE info n
| MKCON r _ t rs <- bc = jsMKCON info r t rs
| CASE s r c d <- bc = jsCASE info s r c d
| CONSTCASE r c d <- bc = jsCONSTCASE info r c d
| PROJECT r l a <- bc = jsPROJECT info r l a
| OP r o a <- bc = jsOP info r o a
| ERROR e <- bc = jsERROR info e
| otherwise = JSRaw $ "//" ++ show bc
where fcall (t, arg) = (toFType t, arg)
toAType (FCon i)
| i == sUN "JS_IntChar" = ATInt ITChar
| i == sUN "JS_IntNative" = ATInt ITNative
toAType t = error (show t ++ " not defined in toAType")
toFnType (FApp c [_,_,s,t])
| c == sUN "JS_Fn" = toFnType t
toFnType (FApp c [_,_,r])
| c == sUN "JS_FnIO" = FFunctionIO
toFnType (FApp c [_,r])
| c == sUN "JS_FnBase" = FFunction
toFnType t = error (show t ++ " not defined in toFnType")
toFType (FCon c)
| c == sUN "JS_Str" = FString
| c == sUN "JS_Float" = FArith ATFloat
| c == sUN "JS_Ptr" = FPtr
| c == sUN "JS_Unit" = FUnit
toFType (FApp c [_,ity])
| c == sUN "JS_IntT" = FArith (toAType ity)
toFType (FApp c [_,fty])
| c == sUN "JS_FnT" = toFnType fty
toFType t = error (show t ++ " not yet defined in toFType")
| stevejb71/Idris-dev | src/IRTS/CodegenJavaScript.hs | bsd-3-clause | 46,309 | 0 | 25 | 16,675 | 17,488 | 8,732 | 8,756 | -1 | -1 |
module Range () where
import Language.Haskell.Liquid.Prelude
goo x = let z = [x] in z
z0 _ = True
z1 _ = False
poo (x:_) = 0 == 0
poo ([]) = liquidAssertB False
xs = goo (choose 0)
prop1 = liquidAssertB (poo xs)
| mightymoose/liquidhaskell | tests/pos/meas0.hs | bsd-3-clause | 222 | 0 | 9 | 54 | 115 | 61 | 54 | 9 | 1 |
module Data.ByteString.Utils
( lazifyBS, strictifyBS, randomBS, xorBS, ljust
) where
import Control.Monad (replicateM)
import Data.Bits (xor)
import Data.Function (on)
import Data.Random.Instances()
import Data.Word (Word8)
import System.Random (randomIO)
import qualified Data.ByteString as SBS
import qualified Data.ByteString.Lazy as LBS
strictifyBS :: LBS.ByteString -> SBS.ByteString
strictifyBS = SBS.concat . LBS.toChunks
lazifyBS :: SBS.ByteString -> LBS.ByteString
lazifyBS = LBS.fromChunks . return
randomBS :: Int -> IO SBS.ByteString
randomBS l = SBS.pack `fmap` replicateM l randomIO
ljust :: Int -> Word8 -> SBS.ByteString -> SBS.ByteString
ljust len chr s = s `SBS.append` SBS.replicate (len - SBS.length s) chr
xorBS :: SBS.ByteString -> SBS.ByteString -> SBS.ByteString
xorBS x y =
SBS.pack $ on (SBS.zipWith xor) (ljust l 0) x y
where
l = on max SBS.length x y
| jgrimes/lamdu | bottlelib/Data/ByteString/Utils.hs | gpl-3.0 | 901 | 0 | 10 | 144 | 323 | 180 | 143 | 22 | 1 |
module CodeGen.Metadata.DirectImage where
import Data.Maybe
import Data.List
import Data.Serialize
import GHC.Generics
import Utils.Utils
import CodeGen.HaskellCode
import Server.NutleyInstance
import CodeGen.Metadata.Metadata
import Data.Name
import Data.Schema
import Data.Types
import CodeGen.TupleUtils
import CodeGen.NutleyQueryUtils
import qualified Crypto.Hash.SHA256 as SHA
directImage f md = DirectImageMetadata
{
directImageName = "dirim" ++ (name md),
directImageMap = f,
directImageInnerMetadata = md,
directImageHashCode = SHA.finalize $ foldr (flip SHA.update) SHA.init [dbHashCode md,encode f]
}
codeDirectImageRearange :: SchemaMap -> SubSchema -> SubSchema -> HaskellCode
codeDirectImageRearange f img siginv = Lam (nTupPat $ length invSimps)
$ Tpl $ flip map (subSchemaVertices img)
(\s -> Tpl $ map (Lit . ("x_"++) . show . fst)
$ sortBy (compare `on` (fromJust.tupNatInverse.(mapVertexFunc f).snd))
$ filter ((==s).(mapApplyVertex f).snd) invSimps)
where invSimps = zip [1..] $ subSchemaVertices siginv
codeDirectImageSection metadata ss =
([("i",SectionQuery (directImageInnerMetadata metadata) ss')],
Fun (sectionFName metadata ss) (sectionType metadata ss)
$ Lam (Fnp "DirectImage" [Ltp "instID"])
$ Do [(Ltp "preresult",c_1 innerSecName $ Lit "instID"),
do_return $ c_map (codeDirectImageRearange f ss ss') $ Lit "preresult"])
where innerSecName = "i_" ++ (sectionFName (directImageInnerMetadata metadata) ss')
ss' = schemaPreimage ss f
f = directImageMap metadata | jvictor0/JoSQL | CodeGen/Metadata/DirectImage.hs | mit | 1,736 | 2 | 18 | 425 | 518 | 278 | 240 | 36 | 1 |
import Game.Cosanostra.Glue.Instances.JSON ()
import Game.Cosanostra.Glue.Plan
import Game.Cosanostra.Lenses
import Control.Lens
import qualified Data.ByteString.Char8 as BS
import Data.Yaml
import System.Environment
main :: IO ()
main = do
[gamePath, planPath] <- getArgs
game <- either error id <$> decodeEither <$> BS.readFile gamePath
p <- either error id <$> decodeEither <$> BS.readFile planPath
BS.putStrLn $ encode (planInfo (game ^. gamePlayers)
(game ^. gameActions)
(game ^. gameTurn)
(game ^. gamePhase)
p)
| rfw/cosanostra | glue/view-plan.hs | mit | 679 | 0 | 12 | 226 | 181 | 98 | 83 | 17 | 1 |
-- | Core options, i.e. the options used by tasty itself
{-# LANGUAGE DeriveDataTypeable, GeneralizedNewtypeDeriving #-}
{-# OPTIONS_GHC -fno-warn-type-defaults #-} -- for (^)
module Test.Tasty.Options.Core
( NumThreads(..)
, Timeout(..)
, mkTimeout
, coreOptions
)
where
import Data.Typeable
import Data.Proxy
import Data.Tagged
import Data.Fixed
import Options.Applicative
import Options.Applicative.Types (ReadM(..))
import Test.Tasty.Options
import Test.Tasty.Patterns
-- | Number of parallel threads to use for running tests.
--
-- Note that this is /not/ included in 'coreOptions'.
-- Instead, it's automatically included in the options for any
-- 'TestReporter' ingredient by 'ingredientOptions', because the way test
-- reporters are handled already involves parallelism. Other ingredients
-- may also choose to include this option.
newtype NumThreads = NumThreads { getNumThreads :: Int }
deriving (Eq, Ord, Num, Typeable)
instance IsOption NumThreads where
defaultValue = 1
parseValue = fmap NumThreads . safeRead
optionName = return "num-threads"
optionHelp = return "Number of threads to use for tests execution"
optionCLParser =
nullOption parse
( short 'j'
<> long name
<> help (untag (optionHelp :: Tagged NumThreads String))
)
where
name = untag (optionName :: Tagged NumThreads String)
parse =
ReadM .
maybe (Left (ErrorMsg $ "Could not parse " ++ name)) Right .
parseValue
-- | Timeout to be applied to individual tests
data Timeout
= Timeout Integer String
-- ^ 'String' is the original representation of the timeout (such as
-- @\"0.5m\"@), so that we can print it back. 'Integer' is the number of
-- microseconds.
| NoTimeout
deriving (Show, Typeable)
instance IsOption Timeout where
defaultValue = NoTimeout
parseValue str =
Timeout
<$> parseTimeout str
<*> pure str
optionName = return "timeout"
optionHelp = return "Timeout for individual tests (suffixes: ms,s,m,h; default: s)"
optionCLParser =
nullOption parse
( short 't'
<> long name
<> help (untag (optionHelp :: Tagged Timeout String))
)
where
name = untag (optionName :: Tagged Timeout String)
parse =
ReadM .
maybe (Left (ErrorMsg $ "Could not parse " ++ name)) Right .
parseValue
parseTimeout :: String -> Maybe Integer
parseTimeout str =
-- it sucks that there's no more direct way to convert to a number of
-- microseconds
(round :: Micro -> Integer) . (* 10^6) <$>
case reads str of
[(n, suffix)] ->
case suffix of
"ms" -> Just (n / 10^3)
"" -> Just n
"s" -> Just n
"m" -> Just (n * 60)
"h" -> Just (n * 60^2)
_ -> Nothing
_ -> Nothing
-- | A shortcut for creating 'Timeout' values
mkTimeout
:: Integer -- ^ microseconds
-> Timeout
mkTimeout n =
Timeout n $
showFixed True (fromInteger n / (10^6) :: Micro) ++ "s"
-- | The list of all core options, i.e. the options not specific to any
-- provider or ingredient, but to tasty itself. Currently contains
-- 'TestPattern' and 'Timeout'.
coreOptions :: [OptionDescription]
coreOptions =
[ Option (Proxy :: Proxy TestPattern)
, Option (Proxy :: Proxy Timeout)
]
| SAdams601/ParRegexSearch | test/tasty-0.9.0.1/Test/Tasty/Options/Core.hs | mit | 3,294 | 0 | 15 | 777 | 713 | 391 | 322 | 77 | 7 |
{-# LANGUAGE OverloadedStrings #-}
import Prelude hiding (putStrLn, readFile)
import qualified Prelude as P
import Control.Monad
import System.Directory
import System.Environment
import System.FilePath
import System.Posix.Files
import Data.Char
import Data.ByteString.Char8
import qualified Data.ByteString.Char8 as BS
import qualified Data.List as L
import Data.Map
import GHC.Exts (sortWith)
main :: IO ()
main = do
args <- getArgs
if P.null args
then usage
else do
words <- recurseGetWords (P.head args)
sequence_ $ P.map putStrLn words
-- printWordFrequencies words
-- printWordFrequencies :: [ByteString] -> IO ()
-- printWordFrequencies ws = do
-- let sorted_ws = L.sort ws
-- P.print sorted_ws
-- -- let freq_table = buildWordFrequencies ws
-- -- P.print freq_table
-- -- let freq_list = sortWith (negate . snd) $ toList freq_table
-- -- sequence_ $ P.map P.print freq_list
buildWordFrequencies :: [ByteString] -> Map ByteString Int
buildWordFrequencies ws = fromListWith (+) (P.map (\x -> (x, 1)) ws)
usage :: IO ()
usage = do
name <- getProgName
P.putStrLn $ "Usage: " ++ name ++ " SOURCEDIR"
cleanWord :: ByteString -> ByteString
cleanWord = BS.map toLower . BS.filter (\x -> isAlpha x && isAscii x)
recurseGetWords :: FilePath -> IO [ByteString]
recurseGetWords path = do
stat <- getFileStatus path
if isDirectory stat
then do
files' <- getDirectoryContents path
let files = P.filter (\x -> x /= ".." && x /= ".") files'
liftM P.concat (sequence $ P.map recurseGetWords (P.map (path </>) files))
else if takeExtension path == ".txt"
then do
contents <- readFile path
return $! P.filter (not . BS.null) . P.map cleanWord $ BS.words contents
else return []
| bhamrick/puzzlib | analyze_corpus/GetWords.hs | mit | 1,886 | 0 | 18 | 473 | 509 | 270 | 239 | 43 | 3 |
{-# LANGUAGE LambdaCase #-}
module Oden.Core.Package where
import Oden.Identifier
import Oden.Metadata
import Oden.SourceInfo
import Oden.QualifiedName (PackageName)
data PackageDeclaration
= PackageDeclaration { packageDeclarationSourceInfo :: Metadata SourceInfo
, packageDeclarationName :: PackageName
}
deriving (Show, Eq, Ord)
data ImportReference
= ImportReference (Metadata SourceInfo) [String]
| ImportForeignReference (Metadata SourceInfo) String
deriving (Show, Eq, Ord)
instance HasSourceInfo ImportReference where
getSourceInfo =
\case
ImportReference (Metadata si) _ -> si
ImportForeignReference (Metadata si) _ -> si
setSourceInfo si =
\case
ImportReference _ pkgName ->
ImportReference (Metadata si) pkgName
ImportForeignReference _ pkgPath ->
ImportForeignReference (Metadata si) pkgPath
data ImportedPackage p
= ImportedPackage ImportReference Identifier p
deriving (Show, Eq, Ord)
| oden-lang/oden | src/Oden/Core/Package.hs | mit | 1,056 | 0 | 11 | 258 | 252 | 134 | 118 | 28 | 0 |
{-# htermination genericDrop :: Int -> [a] -> [a] #-}
import List
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/List_genericDrop_1.hs | mit | 66 | 0 | 3 | 12 | 5 | 3 | 2 | 1 | 0 |
{-# LANGUAGE GeneralizedNewtypeDeriving, PatternSynonyms #-}
{-# OPTIONS_GHC -fno-warn-incomplete-patterns #-}
{-# OPTIONS_HADDOCK not-home #-}
module Data.Urn.Internal (
-- * Types
-- ** Parameters of the trees
Weight, Index(..), Size(..),
-- ** Tree types (and constructors)
BTree(..), WTree(..), pattern WLeaf, pattern WNode, Urn(..),
-- * Sampling/lookup ('WTree's and 'BTree's)
sample, bsample,
-- * Insertion ('Urn's)
insert, uninsert,
-- * Update and construct ('WTree's)
update, replace, construct,
-- * General weight-based 'WTree' traversal
foldWTree,
-- * Raw random index generation
randomIndexWith,
-- * Debugging
showUrnTreeStructureWith,
showUrnTreeStructure
) where
import Data.Bits
import Data.Urn.Internal.AlmostPerfect
import Data.List.NonEmpty (NonEmpty(..))
-- For 'NFData' instances
import Control.DeepSeq
-- For the 'Show' instance
import qualified Data.Ord as Ord
import qualified Data.List as List
----------------------------------------
type Weight = Word
newtype Index = Index { getIndex :: Word } deriving (Eq, Ord, NFData)
-- This type is opaque, so there's no 'Show' instance.
newtype Size = Size { getSize :: Word }
deriving ( Eq, Ord, Show, Bounded, Enum
, Num, Real, Integral
, Bits, FiniteBits
, NFData )
data BTree a = BLeaf a
| BNode !(WTree a) !(WTree a)
deriving (Eq, Ord, Show)
data WTree a = WTree { weight :: !Weight
, btree :: !(BTree a) }
deriving (Eq, Ord, Show)
pattern WLeaf :: Weight -> a -> WTree a
pattern WNode :: Weight -> WTree a -> WTree a -> WTree a
pattern WLeaf w a = WTree { weight = w, btree = BLeaf a }
pattern WNode w l r = WTree { weight = w, btree = BNode l r }
data Urn a = Urn { size :: !Size
, wtree :: !(WTree a) }
-- TODO: 'Eq' and 'Ord' instances? We can provide an O(n²) 'Eq' instance, and
-- an O(n log n) 'Ord' instance; the 'Eq' instance goes down to O(n log n) if
-- we're willing to require an 'Ord' constraint.
instance NFData a => NFData (BTree a) where
rnf (BLeaf a) = rnf a
rnf (BNode l r) = rnf l `seq` rnf r
instance NFData a => NFData (WTree a) where
rnf (WTree w t) = rnf w `seq` rnf t
instance NFData a => NFData (Urn a) where
rnf (Urn size wt) = rnf size `seq` rnf wt
-- |This 'Show' instance prints out the elements from most-weighted to
-- least-weighted; however, do not rely on the order of equally-weighted
-- elements, as this may depend on details of the implementation.
instance Show a => Show (Urn a) where
showsPrec p u = showParen (p > 10) $
showString "fromList " . shows (toList [] $ wtree u) where
toList acc (WLeaf w a) = List.insertBy (flip $ Ord.comparing fst) (w,a) acc
toList acc (WNode _ l r) = toList (toList acc l) r
showUrnTreeStructureWith :: (a -> String) -> Urn a -> String
showUrnTreeStructureWith disp (Urn (Size size) wtree) =
unlines $ ("Urn, size " ++ show size ++ ":") : strings wtree
where
strings (WLeaf w a) = ["(" ++ show w ++ ": " ++ disp a ++ ")"]
strings (WNode w l r) = ("[" ++ show w ++ "]") :
" |" :
nest '+' '|' (strings l) ++
" |" :
nest '`' ' ' (strings r)
nest cc gc (child:grandchildren) =
([' ',cc,'-'] ++ child) : map ([' ', gc, ' '] ++) grandchildren
nest _ _ [] = []
showUrnTreeStructure :: Show a => Urn a -> String
showUrnTreeStructure = showUrnTreeStructureWith show
----------------------------------------
randomIndexWith :: Functor f => ((Word,Word) -> f Word) -> Urn a -> f Index
randomIndexWith rand u = Index <$> rand (0, weight (wtree u) - 1)
{-# INLINABLE randomIndexWith #-}
----------------------------------------
bsample :: BTree a -> Index -> a
bsample (BLeaf a) _ =
a
bsample (BNode (WTree wl l) (WTree _ r)) (Index i)
| i < wl = bsample l (Index i)
| otherwise = bsample r (Index $ i - wl)
sample :: WTree a -> Index -> a
sample = bsample . btree
{-# INLINABLE sample #-}
foldWTree :: (Weight -> a -> b)
-> (Weight -> b -> WTree a -> b)
-> (Weight -> WTree a -> b -> b)
-> Size -> WTree a
-> b
foldWTree fLeaf fLeft fRight = go where
go _ (WLeaf w a) = fLeaf w a
go path (WNode w l r) | path `testBit` 0 = fRight w l (go path' r)
| otherwise = fLeft w (go path' l) r
where path' = path `shiftR` 1
{-# INLINABLE foldWTree #-}
insert :: Weight -> a -> Urn a -> Urn a
insert w' a' (Urn size wt) =
Urn (size+1) $ foldWTree (\w a -> WNode (w+w') (WLeaf w a) (WLeaf w' a'))
(\w -> WNode (w+w'))
(\w -> WNode (w+w'))
size wt
{-# INLINABLE insert #-}
uninsert :: Urn a -> (Weight, a, Weight, Maybe (Urn a))
uninsert (Urn size wt) =
case foldWTree (\w a -> (w, a, 0, Nothing))
(\w ul' r -> case ul' of
(w', a', lb, Just l') -> (w', a', lb, Just $ WNode (w-w') l' r)
(w', a', lb, Nothing) -> (w', a', lb, Just r))
(\w l ur' -> case ur' of
(w', a', lb, Just r') -> (w', a', lb + weight l, Just $ WNode (w-w') l r')
(w', a', lb, Nothing) -> (w', a', lb + weight l, Just l))
(size-1) wt of
(w', a', lb, mt) -> (w', a', lb, Urn (size-1) <$> mt)
{-# INLINABLE uninsert #-}
update :: (Weight -> a -> (Weight, a)) -> WTree a -> Index -> (Weight, a, Weight, a, WTree a)
update upd = go where
go (WLeaf w a) _ =
let (wNew, aNew) = upd w a
in (w, a, wNew, aNew, WLeaf wNew aNew)
go (WNode w l@(WTree wl _) r) (Index i)
| i < wl = case go l (Index i) of
(wOld, aOld, wNew, aNew, l') -> (wOld, aOld, wNew, aNew, WNode (w-wOld+wNew) l' r)
| otherwise = case go r (Index $ i-wl) of
(wOld, aOld, wNew, aNew, r') -> (wOld, aOld, wNew, aNew, WNode (w-wOld+wNew) l r')
replace :: Weight -> a -> WTree a -> Index -> (Weight, a, WTree a)
replace wNew aNew = go where
go (WLeaf w a) _ =
(w, a, WLeaf wNew aNew)
go (WNode w l@(WTree wl _) r) (Index i)
| i < wl = case go l (Index i) of
(w', a', l') -> (w', a', WNode (w-w'+wNew) l' r)
| otherwise = case go r (Index $ i-wl) of
(w', a', r') -> (w', a', WNode (w-w'+wNew) l r')
construct :: NonEmpty (Weight, a) -> Urn a
construct list = Urn (Size size) tree
where
size = fromIntegral $ length list
tree = almostPerfect (\l r -> WNode (weight l + weight r) l r)
(uncurry WLeaf)
size
list
| antalsz/urn-random | src/Data/Urn/Internal.hs | mit | 6,912 | 0 | 16 | 2,190 | 2,693 | 1,441 | 1,252 | 139 | 3 |
-- | Athena.Utils.Show module.
-- Adapted from https://github.com/asr/apia.
{-# LANGUAGE CPP #-}
{-# LANGUAGE UnicodeSyntax #-}
module Athena.Utils.Show
( showListLn
, showLn
) where
------------------------------------------------------------------------------
-- | Version of 'show' adding a newline character.
showLn ∷ Show a ⇒ a → String
showLn = (++ "\n") . show
-- | Version of 'show' on lists where the elements are separated by
-- newline characters.
showListLn ∷ Show a ⇒ [a] → String
showListLn [] = "[]"
showListLn xs = concatMap showLn xs
| jonaprieto/athena | src/Athena/Utils/Show.hs | mit | 584 | 0 | 7 | 103 | 94 | 55 | 39 | 10 | 1 |
-- TP 2 - Dragon
-- Ferlicot-Delbecque Cyril.
module Main where
import Graphics.Gloss
main::IO()
main = animate(InWindow "Dragon" (500, 500) (200, 200)) white (dragonAnime' (50,250) (450,250))
dragonAnime :: RealFrac a => Point -> Point -> a -> Picture
dragonAnime a b t = Line (dragon a b !! (round t `mod` 18))
-- Question 1
alterne :: [a] -> [a]
alterne [] = []
alterne [x] = [x]
alterne (x:_:xs) = x:alterne xs
-- Question 2
combine :: (a -> b -> c) -> [a] -> [b] -> [c]
combine _ [] _ = []
combine _ _ [] = []
combine f (x:xs) (y:ys) = f x y:combine f xs ys
-- Question 3
pasPascal :: [Integer] -> [Integer]
pasPascal [] = []
pasPascal xs = zipWith (+) (0:xs) (xs ++ [0])
-- Question 4
pascal :: [[Integer]]
pascal = iterate pasPascal [1]
-- Question 5
pointAintercaler :: Point -> Point -> Point
pointAintercaler (xA, yA) (xB, yB) = ((xA + xB)/2 + (yB - yA)/2, (yA + yB)/2 + (xA - xB)/2)
-- Question 6
pasDragon :: Path -> Path
pasDragon [] = []
pasDragon [x] = [x]
pasDragon (x:y:xs) = x:pointAintercaler x y:y:pasDragonInv (y:xs)
where pasDragonInv [] = []
pasDragonInv [_] = []
pasDragonInv (x':y':xs') = pointAintercaler y' x':pasDragon (y':xs')
--Question 7
dragon :: Point -> Point -> [Path]
dragon pt1 pt2 = iterate pasDragon [pt1, pt2]
-- Question 8
dragonOrdre :: Point -> Point -> Int -> Path
dragonOrdre a b 0= [a,b]
dragonOrdre a b 1 = [a,pointAintercaler a b, b]
dragonOrdre a b n = dragonOrdre a (last (take 2 (dragonOrdre a b 1))) (n-1) ++ tail (reverse (dragonOrdre b (last (take 2 (dragonOrdre a b 1))) (n-1)))
--Question 9
listeSegment :: Point -> Point -> Int -> [Path]
listeSegment _ _ 0 = []
listeSegment a b n = listeSegment a b (n-1) ++ [dragonOrdre a b (n+1)]
dragonAnime' :: RealFrac a => Point -> Point -> a -> Picture
dragonAnime' a b t = Line (listeSegment a b 16 !! (round t `mod` 16)) | jecisc/TP_PF_L3 | PF-TP2/src/Main.hs | mit | 1,954 | 8 | 16 | 451 | 1,001 | 534 | 467 | 39 | 3 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UnicodeSyntax #-}
module Graphics.PlotDSL where
import Control.Applicative
import Control.Monad
import Control.Monad.Trans.RWS
import Data.Char
import Data.List (intercalate)
import System.Process
------------------------------------------------------------------------------
data Free f a = (Functor f) => Pure a | Roll (f (Free f a))
instance (Functor f) => Functor (Free f) where
fmap f (Pure a) = Pure (f a)
fmap f (Roll as) = Roll (fmap (fmap f) as)
instance (Functor f) => Applicative (Free f) where
pure = Pure
(Pure f) <*> x = fmap f x
(Roll f) <*> x = Roll $ fmap (<*> x) f
instance (Functor f) => Monad (Free f) where
return = Pure
Pure a >>= f = f a
Roll as >>= f = Roll (fmap (>>= f) as)
liftF ∷ (Functor f) => f a -> Free f a
liftF = Roll . fmap Pure
retract ∷ Monad f => Free f a -> f a
retract (Pure a) = return a
retract (Roll as) = as >>= retract
toLowerCase = map toLower
------------------------------------------------------------------------------
data StyleValue = Lines | Points | Dots | Impulses | Linespoints deriving (Show)
data ColorValue = Default | Red | Blue | Green | Yellow | Orange
| Magenta | Cyan | Violet | White | Black | Raw String
deriving (Show)
data SmoothValue = Unique | Csplines | Acsplines | Bezier | Sbezier deriving (Show)
data Axis = X | Y | XY deriving (Show)
data ScaleValue = NoLogscale Axis | Logscale Axis Double deriving (Show)
------------------------------------------------------------------------------
data Graph next = Name String next
| Color ColorValue next
| Style StyleValue next
| Smooth SmoothValue next
deriving (Functor)
type GraphF a = Free Graph a
name ∷ String -> GraphF ()
name n = liftF $ Name n ()
color ∷ ColorValue -> GraphF ()
color c = liftF $ Color c ()
style ∷ StyleValue -> GraphF ()
style s = liftF $ Style s ()
smooth ∷ SmoothValue -> GraphF ()
smooth s = liftF $ Smooth s ()
values ∷ a -> GraphF a
values = pure
------------------------------------------------------------------------------
data Plot a next = Plot (GraphF a) next
| Scale ScaleValue next
| Title String next
deriving (Functor)
type PlotF a = Free (Plot a) ()
plot ∷ GraphF a -> PlotF a
plot g = liftF $ Plot g ()
title ∷ String -> PlotF a
title t = liftF $ Title t ()
scale ∷ ScaleValue -> PlotF a
scale s = liftF $ Scale s ()
------------------------------------------------------------------------------
compileToGnuPlot ∷ (GnuPlotData a) => PlotF a -> RWS GnuPlotTerminal String [GraphF a] ()
compileToGnuPlot (Roll (Title t next)) = do
tell $ "set title \"" ++ t ++ "\"\n"
compileToGnuPlot next
compileToGnuPlot (Roll (Scale s next )) = do
tell $ fn s ++ "\n"
compileToGnuPlot next
where
fn (NoLogscale a) = "unset logscale " ++ toLowerCase (show a)
fn (Logscale a v) = "set logscale " ++ toLowerCase (show a) ++ " " ++ show v
compileToGnuPlot (Roll (Plot g next)) = do
s <- get
put $ s ++ [g]
compileToGnuPlot next
compileToGnuPlot (Pure ()) = do
gs <- get
t <- ask
tell $ "set term " ++ toLowerCase (show t) ++ "\n"
tell $ compileGraphs gs ++ concatMap (serialize . extract) gs
where
extract ∷ GraphF a -> a
extract (Pure a) = a
extract (Roll (Name _ n)) = extract n
extract (Roll (Color _ n)) = extract n
extract (Roll (Smooth _ n)) = extract n
extract (Roll (Style _ n)) = extract n
compileGraphs ∷ [GraphF a] -> String
compileGraphs gs = "plot " ++ intercalate ", " (map f gs) ++ "\n"
where
f ∷ GraphF a -> String
f x = show "-" ++ " " ++ compileGraphAttributes x
compileGraphAttributes ∷ GraphF a -> String
compileGraphAttributes (Roll (Name t next)) = unwords ["title", show t, compileGraphAttributes next]
compileGraphAttributes (Roll (Color c next)) = unwords [compileColorToGnuPlot c, compileGraphAttributes next]
compileGraphAttributes (Roll (Style s next)) = unwords [compileStyleToGnuPlot s, compileGraphAttributes next]
compileGraphAttributes (Roll (Smooth s next)) = unwords [compileSmoothToGnuPlot s, compileGraphAttributes next]
compileGraphAttributes (Pure _) = ""
compileColorToGnuPlot ∷ ColorValue -> String
compileColorToGnuPlot c = " linecolor rgb(\"" ++ toLowerCase (show c) ++ "\") "
compileStyleToGnuPlot ∷ StyleValue -> String
compileStyleToGnuPlot s = " with " ++ toLowerCase (show s)
compileSmoothToGnuPlot ∷ SmoothValue -> String
compileSmoothToGnuPlot s = " smooth " ++ toLowerCase (show s)
------------------------------------------------------------------------------
data GnuPlotTerminal = Dumb | Wxt | X11 deriving (Show)
gnuplot :: (GnuPlotData a) => GnuPlotTerminal -> PlotF a -> IO String
gnuplot t p = readProcess "gnuplot" ["-p"] (snd $ evalRWS (compileToGnuPlot p) t [])
dumbplot, wxtplot :: (GnuPlotData a) => PlotF a -> IO ()
dumbplot = gnuplot Dumb >=> putStrLn
wxtplot = void . gnuplot Wxt
------------------------------------------------------------------------------
class GnuPlotData a where
serialize :: a -> String
instance (Show a, Show b) => GnuPlotData [(a,b)] where
serialize = pairsToGnuplotData
pairsToGnuplotData ∷ (Show a, Show b) => [(a,b)] -> String
pairsToGnuplotData [] = "e\n"
pairsToGnuplotData ((x,y):xs) = concat [show x, " ", show y, "\n"] ++ pairsToGnuplotData xs
| damianfral/PlotDSL | src/Graphics/PlotDSL.hs | mit | 5,688 | 0 | 12 | 1,308 | 2,023 | 1,038 | 985 | -1 | -1 |
{-#OPTIONS_GHC -fno-warn-unused-do-bind #-}
module GraphQL.Language.Parser where
import Control.Applicative hiding (many, (<|>))
import Data.Monoid
import Text.Parsec
import Text.Parsec.String
-- | Alias for NamedType
newtype TypeCondition = TypeCondition Name
data Token =
T_Punctuator
| T_Name
| T_IntValue
| T_FloatValue
| T_StringValue
deriving (Show, Eq)
data Punctuator =
P_Bang
| P_Dollar
| P_OpenParen
| P_ClosedParen
| P_Ellipsis
| P_Colon
| P_Equals
| P_At
| P_OpenSquareBracket
| P_ClosedSquareBracket
| P_OpenBracket
| P_ClosedBracket
deriving (Show, Eq)
data Document =
D_OperatorDefinition
| D_FragmentDefinition
deriving (Show, Eq)
type Name = String
withMaybe p q op = do
resp <- optionMaybe p
resq <- q
return $ case resp of
Nothing -> resq
Just resp' -> op resp' resq
notString :: String -> Parser ()
notString = notFollowedBy . string
notStrings :: [String] -> Parser ()
notStrings = mapM_ notString
with :: Monad m => m a -> m [a] -> m [a]
with p s = do
v <- p
w <- s
return $ v:w
withMonoid :: (Monad m, Monoid a) => m a -> m a -> m a
withMonoid p s = do
v <- p
w <- s
return $ v <> w
options :: [ParsecT s u m a] -> ParsecT s u m a
options = foldr1 (<|>)
-- TODO: Line/Paragraph Separators
lineTerminator :: Parser Char
lineTerminator = oneOf "\r\n"
comma :: Parser Char
comma = char ','
comment :: Parser String
comment = char '#' *> many (noneOf "\r\n")
punctuator :: Parser Punctuator
punctuator = options $ map (uncurry bind) bindings
where bind s p = string s *> pure p
bindings =
[ ("!", P_Bang)
, ("$", P_Dollar)
, ("(", P_OpenParen)
, (")", P_ClosedParen)
, ("...", P_Ellipsis)
, ("@", P_At)
, ("[", P_OpenSquareBracket)
, ("]", P_ClosedSquareBracket)
, ("{", P_OpenBracket)
, ("}", P_ClosedBracket)
]
name :: Parser Name
name = oneOf alpha `with` many (oneOf alphaNumeric)
where alpha = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz_"
alphaNumeric = alpha ++ ['0'..'9']
-- TODO: Variable
data GraphQLValue =
-- V_Variable Variable
V_IntValue Int
| V_FloatValue Float
| V_BoolValue Bool
| V_StringValue StringValue
| V_EnumValue EnumValue
deriving (Show, Eq)
intValue :: Parser GraphQLValue
intValue = V_IntValue <$> (read <$> withMaybe (char '-') (many1 digit) (:))
floatValue :: Parser GraphQLValue
floatValue = V_FloatValue <$> (read <$> options
[ try (integerPart `withMonoid` exponentPart)
, try (integerPart `withMonoid` fractionalPart `withMonoid` exponentPart)
, integerPart `withMonoid` fractionalPart
])
where integerPart = withMaybe (char '-') (many1 digit) (:)
exponentIndicator = oneOf "eE"
sign = oneOf "+-"
fractionalPart = char '.' `with` many1 digit
exponentPart = exponentIndicator `with` withMaybe sign integerPart (:)
boolValue :: Parser GraphQLValue
boolValue = V_BoolValue <$> options
[ string "true" *> pure True
, string "false" *> pure False
]
data CharValue =
CV_Char Char
| CV_EscapedChar Char
| CV_EscapedUnicode String
deriving (Show, Eq)
-- | Useful for showing
condense :: StringValue -> String
condense (CV_Char c:xs) = c:condense xs
condense (CV_EscapedChar c:xs) = '\\':c:condense xs
condense (CV_EscapedUnicode str:xs) = '\\':str ++ condense xs
condense [] = []
type StringValue = [CharValue]
type EnumValue = String
stringValue :: Parser GraphQLValue
stringValue = V_StringValue <$> between (char '"') (char '"') (many stringCharacter)
where stringCharacter = try escapedUnicode
<|> try escapedCharacter
<|> CV_Char <$> noneOf "\"\\\n\r"
unicode = ['0'..'9'] ++ ['A'..'F'] ++ ['a'..'f']
escapedUnicode = CV_EscapedUnicode
<$> (char '\\' *> (char 'u' `with` count 4 (oneOf unicode)))
escapedCharacter = CV_EscapedChar <$> (char '\\' *> oneOf "\\/bfnrt")
-- | Parse an EnumValue
enumValue :: Parser GraphQLValue
enumValue = V_EnumValue <$> (notStrings ["null", "false", "true"] *> name)
-- | TODO: Refine/test
value :: Parser GraphQLValue
value = options
[ try intValue
, try floatValue
, try boolValue
, try stringValue
, enumValue
]
-- | Parse a Variable
variable :: Parser String
variable = char '$' *> name
data GraphQLType = NamedType Name
| ListType GraphQLType
-- | NB. Allows NonNullType (NonNullType ...), but cannot be
-- | created in this way via the parser
| NonNullType GraphQLType
deriving (Show, Eq)
-- | Parse a type
type_ :: Parser GraphQLType
type_ = try nonNullType <|> try listType <|> namedType
where namedType = NamedType <$> name
listType = ListType <$> between (char '[') (char ']') type_
nonNullType = NonNullType <$> (options [listType, namedType] <* char '!')
-- | Parse a TypeCondition
typeCondition :: Parser TypeCondition
typeCondition = TypeCondition <$> name
data Argument = Argument Name GraphQLValue
deriving (Show, Eq)
arguments :: Parser [Argument]
arguments = between (char '(') (char ')') (many1 argument)
-- | TODO: whitespace insensitivity
argument :: Parser Argument
argument = do
n <- name
char ':'
v <- value
pure $ Argument n v
| 5outh/graphql-hs | src/GraphQL/Language/Parser.hs | mit | 5,417 | 0 | 14 | 1,324 | 1,660 | 891 | 769 | 155 | 2 |
module PPL2.VM.Memory.Segment
(Segment, get, put, new, newInit, toDataRef, dump)
where
import PPL2.Prelude ()
import PPL2.VM.Types
import qualified Data.IntMap as M
import qualified Data.List as L
-- ----------------------------------------
-- A segment has a size and an IntMap for storing values
data Segment a = Segment !Offset !(M.IntMap a)
-- ----------------------------------------
-- read from a cell of a segment
get :: Offset -> Segment a -> Maybe a
get i (Segment len m)
| 0 <= i && i < len = M.lookup (fromEnum i) m
| otherwise = Nothing
-- write a value into a cell of a segment
put :: Offset -> a -> Segment a -> Maybe (Segment a)
put i v (Segment len m)
| i < len = Just (Segment len $ M.insert (fromEnum i) v m)
| otherwise = Nothing
-- make a new segment and init all cell with a default value
--
-- a segment is never empty
-- it contains at least a single cell
new :: Offset -> a -> Segment a
new len v = Segment len seg
where
seg
| len /= 0 = L.foldl' (\ m i -> M.insert (fromEnum i) v m) M.empty [0..len - 1]
| otherwise = M.empty
newInit :: [a] -> Segment a
newInit vs = Segment (fromIntegral $ M.size seg) seg
where
seg = L.foldl' (\ m (i, v) -> M.insert i v m) M.empty $
zip [0..] vs
toDataRef :: Offset -> Segment a -> DataRef
toDataRef i _seg = (dataSid, i)
dump :: Segment a -> [a]
dump (Segment _len seg) = M.elems seg
-- ----------------------------------------
| UweSchmidt/ppl2 | src/PPL2/VM/Memory/Segment.hs | mit | 1,472 | 0 | 13 | 345 | 531 | 278 | 253 | 32 | 1 |
/*Owner & Copyrights: Vance King Saxbe. A.*//* Copyright (c) <2014> Author Vance King Saxbe. A, and contributors Power Dominion Enterprise, Precieux Consulting and other contributors. Modelled, Architected and designed by Vance King Saxbe. A. with the geeks from GoldSax Consulting and GoldSax Technologies email @vsaxbe@yahoo.com. Development teams from Power Dominion Enterprise, Precieux Consulting. Project sponsored by GoldSax Foundation, GoldSax Group and executed by GoldSax Manager.*/{-# Modelled, Architected and designed by Vance King Saxbe. A. with the geeks from GoldSax Consulting, GoldSax Money, GoldSax Treasury, GoldSax Finance, GoldSax Banking and GoldSax Technologies email @vsaxbe@yahoo.com. Development teams from Power Dominion Enterprise, Precieux Consulting. This Engagement sponsored by GoldSax Foundation, GoldSax Group and executed by GoldSax Manager. LANGUAGE GADTs #-}
module GoldSaxMachine.GADTs where
data Offer a = Present a
| PercentDiscount Float
| AbsoluteDiscount Float
| Restrict [a] (Offer a)
| From Integer (Offer a)
| Until Integer (Offer a)
| Extend Integer (Offer a)
| Both (Offer a) (Offer a)
| BetterOf (Offer a) (Offer a)
| If (Expr a Bool) (Offer a) (Offer a)
data Expr a r where
AmountOf :: a -> Expr a Integer
PriceOf :: a -> Expr a Float
TotalNumberProducts :: Expr a Integer
TotalPrice :: Expr a Float
{- -- Not overloaded versions of values
IVal :: Integer -> Expr a Integer
FVal :: Float -> Expr a Float
-}
Val :: Num n => n -> Expr a n
{- -- Not overloaded versions of comparisons
(:+:) :: Expr a Integer -> Expr a Integer -> Expr a Integer
(:+.:) :: Expr a Float -> Expr a Float -> Expr a Float
(:*:) :: Expr a Integer -> Expr a Integer -> Expr a Integer
(:*.:) :: Expr a Float -> Expr a Float -> Expr a Float
(:<:) :: Expr a Integer -> Expr a Integer -> Expr a Bool
(:<.:) :: Expr a Float -> Expr a Float -> Expr a Bool
(:<=:) :: Expr a Integer -> Expr a Integer -> Expr a Bool
(:<=.:) :: Expr a Float -> Expr a Float -> Expr a Bool
(:>:) :: Expr a Integer -> Expr a Integer -> Expr a Bool
(:>.:) :: Expr a Float -> Expr a Float -> Expr a Bool
(:>=:) :: Expr a Integer -> Expr a Integer -> Expr a Bool
(:>=.:) :: Expr a Float -> Expr a Float -> Expr a Bool
-}
-- Overloaded versions of comparisons
(:+:) :: Num n => Expr a n -> Expr a n -> Expr a n
(:*:) :: Num n => Expr a n -> Expr a n -> Expr a n
(:<:) :: Num n => Expr a n -> Expr a n -> Expr a Bool
(:<=:) :: Num n => Expr a n -> Expr a n -> Expr a Bool
(:>:) :: Num n => Expr a n -> Expr a n -> Expr a Bool
(:>=:) :: Num n => Expr a n -> Expr a n -> Expr a Bool
(:&&:) :: Expr a Bool -> Expr a Bool -> Expr a Bool
(:||:) :: Expr a Bool -> Expr a Bool -> Expr a Bool
Not :: Expr a Bool -> Expr a Bool
{- Does not compile
incorrectExpression :: Expr Char Bool
incorrectExpression = TotalPrice :||: (TotalNumberProducts :<: PriceOf 'a')
-}
interpretExpr :: Expr a t -> [(a,Float)] -> t
interpretExpr (e1 :+: e2) list = interpretExpr e1 list + interpretExpr e2 list
interpretExpr (e1 :||: e2) list = interpretExpr e1 list || interpretExpr e2 list
/*email to provide support at vancekingsaxbe@powerdominionenterprise.com, businessaffairs@powerdominionenterprise.com, For donations please write to fundraising@powerdominionenterprise.com*/ | VanceKingSaxbeA/GoldSaxMachineStore | GoldSaxMachineModule13/src/Chapter13/GADTs.hs | mit | 3,794 | 23 | 16 | 1,150 | 860 | 438 | 422 | -1 | -1 |
module Craft3e where
import E'10''1
import E'10''2
import E'10''3
import E'10''4
import E'10''5
import E'10''6
import E'10''7
import E'10''8
import E'10''9
import E'10'10
import E'10'11
import E'10'12
import E'10'13
import E'10'14
import E'10'15
import E'10'16
import E'10'17
import E'10'18
import E'10'19
import E'10'20
import E'10'21
import E'10'22
import E'10'23
import E'10'24
import E'10'25
import E'10'26
import E'10'27
import E'10'28
import E'10'29
import E'10'30
import E'10'31
import E'10'32
import E'10'33
import E'10'34
import E'10'35
import E'10'36
import E'10'37
import E'11''1
import E'11''2
import E'11''3
import E'11''4
import E'11''5
import E'11''6
import E'11''7
import E'11''8
import E'11''9
import E'11'10
import E'11'11
import E'11'12
import E'11'13
import E'11'14
import E'11'15
import E'11'16
import E'11'17
import E'11'18
import E'11'19
import E'11'20
import E'11'21
import E'11'22
import E'11'23
import E'11'24
import E'11'25
import E'11'26
import E'11'27
import E'11'28
import E'11'29
import E'11'30
import E'11'31
import E'11'32
import E'11'33
import E'11'34
import E'11'35
import E'11'36
import E'12''1
import E'12''2
import E'12''3
import E'12''4
import E'12''5
import E'12''6
import E'12''7
import E'12''8
import E'12''9
import E'12'10
import E'12'11
import E'12'12
import E'12'13
import E'12'14
import E'12'15
import E'12'16
import E'12'17
import E'12'18
import E'12'19
import E'12'20
import E'12'21
import E'12'22
import E'12'23
import E'12'24
import E'12'25
import E'12'26
import E'12'27
import E'12'28
import E'12'29
import E'12'30
import E'12'31
import E'12'32
import E'12'33
import E'12'34
import E'12'35
import E'12'36
import E'12'37
import E'12'38
import E'12'39
import E'12'40
import E'12'41
import E'12'42
import E'12'43
import E'12'44
import E'12'45
import E'3''1
import E'3''2
import E'3''3
import E'3''4
import E'3''5
import E'3''6
import E'3''7
import E'3''8
import E'3''9
import E'3'10
import E'3'11
import E'3'12
import E'3'13
import E'3'14
import E'3'15
import E'3'16
import E'3'17
import E'3'18
import E'3'19
import E'3'20
import E'3'21
import E'3'22
import E'3'23
import E'3'24
import E'3'25
import E'3'26
import E'3'27
import E'4''1
import E'4''2
import E'4''3
import E'4''4
import E'4''5
import E'4''6
import E'4''7
import E'4''8
import E'4''9
import E'4'10
import E'4'11
import E'4'12
import E'4'13
import E'4'14
import E'4'15
import E'4'16
import E'4'17
import E'4'18
import E'4'19
import E'4'20
import E'4'21
import E'4'22
import E'4'23
import E'4'24
import E'4'25
import E'4'26
import E'4'27
import E'4'28
import E'4'29
import E'4'30
import E'4'31
import E'4'32
import E'4'33
import E'4'34
import E'4'35
import E'4'36
import E'4'37
import E'4'38
import E'4'39
import E'5''1
import E'5''2
import E'5''3
import E'5''4
import E'5''5
import E'5''6
import E'5''7
import E'5''8
import E'5''9
import E'5'10
import E'5'11
import E'5'12
import E'5'13
import E'5'14
import E'5'15
import E'5'16
import E'5'17
import E'5'18
import E'5'19
import E'5'20
import E'5'21
import E'5'22
import E'5'23
import E'5'24
import E'5'25
import E'5'26
import E'5'27
import E'5'28
import E'5'29
import E'5'30
import E'5'31
import E'5'32
import E'5'33
import E'5'34
import E'6''1
import E'6''2
import E'6''3
import E'6''4
import E'6''5
import E'6''6
import E'6''7
import E'6''8
import E'6''9
import E'6'10
import E'6'11
import E'6'12
import E'6'13
import E'6'14
import E'6'15
import E'6'16
import E'6'17
import E'6'18
import E'6'19
import E'6'20
import E'6'21
import E'6'22
import E'6'23
import E'6'24
import E'6'25
import E'6'26
import E'6'27
import E'6'28
import E'6'29
import E'6'30
import E'6'31
import E'6'32
import E'6'33
import E'6'34
import E'6'35
import E'6'36
import E'6'37
import E'6'38
import E'6'39
import E'6'40
import E'6'41
import E'6'42
import E'6'43
import E'6'44
import E'6'45
import E'6'46
import E'6'47
import E'6'48
import E'6'49
import E'6'50
import E'6'51
import E'6'52
import E'6'53
import E'6'54
import E'6'55
import E'6'56
import E'6'57
import E'6'58
import E'6'59
import E'6'60
import E'6'61
import E'6'62
import E'6'63
import E'7''1
import E'7''2
import E'7''3
import E'7''4
import E'7''5
import E'7''6
import E'7''7
import E'7''8
import E'7''9
import E'7'10
import E'7'11
import E'7'12
import E'7'13
import E'7'14
import E'7'15
import E'7'16
import E'7'17
import E'7'18
import E'7'19
import E'7'20
import E'7'21
import E'7'22
import E'7'23
import E'7'24
import E'7'25
import E'7'26
import E'7'27
import E'7'28
import E'7'29
import E'7'30
import E'7'31
import E'7'32
import E'7'33
import E'7'34
import E'7'35
import E'8''1
import E'8''2
import E'8''3
import E'8''4
import E'8''5
import E'8''6
import E'8''7
import E'8''8
import E'8''9
import E'8'10
import E'8'11
import E'8'12
import E'8'13
import E'8'14
import E'8'15
import E'8'16
import E'8'17
import E'8'18
import E'8'19
import E'8'20
import E'8'21
import E'8'22
import E'8'23
import E'9''1
import E'9''2
import E'9''3
import E'9''4
import E'9''5
import E'9''6
import E'9''7
import E'9''8
import E'9''9
import E'9'10
import E'9'11
import E'9'12
import E'9'13
import E'9'14
import E'9'15
| pascal-knodel/haskell-craft | _/links/Craft3e.hs | mit | 5,096 | 0 | 3 | 712 | 1,066 | 711 | 355 | 355 | 0 |
{-# LANGUAGE ExistentialQuantification, TemplateHaskell, StandaloneDeriving #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Implementation of the opcodes.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013, 2014 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.OpCodes
( pyClasses
, OpCode(..)
, ReplaceDisksMode(..)
, DiskIndex
, mkDiskIndex
, unDiskIndex
, opID
, opReasonSrcID
, allOpIDs
, allOpFields
, opSummary
, CommonOpParams(..)
, defOpParams
, MetaOpCode(..)
, resolveDependencies
, wrapOpCode
, setOpComment
, setOpPriority
) where
import Control.Applicative
import Data.List (intercalate)
import Data.Map (Map)
import qualified Text.JSON
import Text.JSON (readJSON, JSObject, JSON, JSValue(..), fromJSObject)
import qualified Ganeti.Constants as C
import qualified Ganeti.Hs2Py.OpDoc as OpDoc
import Ganeti.JSON (DictObject(..), readJSONfromDict, showJSONtoDict)
import Ganeti.OpParams
import Ganeti.PyValue ()
import Ganeti.Query.Language (queryTypeOpToRaw)
import Ganeti.THH
import Ganeti.Types
instance PyValue DiskIndex where
showValue = showValue . unDiskIndex
instance PyValue IDiskParams where
showValue _ = error "OpCodes.showValue(IDiskParams): unhandled case"
instance PyValue RecreateDisksInfo where
showValue RecreateDisksAll = "[]"
showValue (RecreateDisksIndices is) = showValue is
showValue (RecreateDisksParams is) = showValue is
instance PyValue a => PyValue (SetParamsMods a) where
showValue SetParamsEmpty = "[]"
showValue _ = error "OpCodes.showValue(SetParamsMods): unhandled case"
instance PyValue a => PyValue (NonNegative a) where
showValue = showValue . fromNonNegative
instance PyValue a => PyValue (NonEmpty a) where
showValue = showValue . fromNonEmpty
-- FIXME: should use the 'toRaw' function instead of being harcoded or
-- perhaps use something similar to the NonNegative type instead of
-- using the declareSADT
instance PyValue ExportMode where
showValue ExportModeLocal = show C.exportModeLocal
showValue ExportModeRemote = show C.exportModeLocal
instance PyValue CVErrorCode where
showValue = cVErrorCodeToRaw
instance PyValue VerifyOptionalChecks where
showValue = verifyOptionalChecksToRaw
instance PyValue INicParams where
showValue = error "instance PyValue INicParams: not implemented"
instance PyValue a => PyValue (JSObject a) where
showValue obj =
"{" ++ intercalate ", " (map showPair (fromJSObject obj)) ++ "}"
where showPair (k, v) = show k ++ ":" ++ showValue v
instance PyValue JSValue where
showValue (JSObject obj) = showValue obj
showValue x = show x
type JobIdListOnly = Map String [(Bool, Either String JobId)]
type InstanceMultiAllocResponse =
([(Bool, Either String JobId)], NonEmptyString)
type QueryFieldDef =
(NonEmptyString, NonEmptyString, TagKind, NonEmptyString)
type QueryResponse =
([QueryFieldDef], [[(QueryResultCode, JSValue)]])
type QueryFieldsResponse = [QueryFieldDef]
-- | OpCode representation.
--
-- We only implement a subset of Ganeti opcodes: those which are actually used
-- in the htools codebase.
$(genOpCode "OpCode"
[ ("OpClusterPostInit",
[t| Bool |],
OpDoc.opClusterPostInit,
[],
[])
, ("OpClusterDestroy",
[t| NonEmptyString |],
OpDoc.opClusterDestroy,
[],
[])
, ("OpClusterQuery",
[t| JSObject JSValue |],
OpDoc.opClusterQuery,
[],
[])
, ("OpClusterVerify",
[t| JobIdListOnly |],
OpDoc.opClusterVerify,
[ pDebugSimulateErrors
, pErrorCodes
, pSkipChecks
, pIgnoreErrors
, pVerbose
, pOptGroupName
],
[])
, ("OpClusterVerifyConfig",
[t| Bool |],
OpDoc.opClusterVerifyConfig,
[ pDebugSimulateErrors
, pErrorCodes
, pIgnoreErrors
, pVerbose
],
[])
, ("OpClusterVerifyGroup",
[t| Bool |],
OpDoc.opClusterVerifyGroup,
[ pGroupName
, pDebugSimulateErrors
, pErrorCodes
, pSkipChecks
, pIgnoreErrors
, pVerbose
],
"group_name")
, ("OpClusterVerifyDisks",
[t| JobIdListOnly |],
OpDoc.opClusterVerifyDisks,
[],
[])
, ("OpGroupVerifyDisks",
[t| (Map String String, [String], Map String [[String]]) |],
OpDoc.opGroupVerifyDisks,
[ pGroupName
],
"group_name")
, ("OpClusterRepairDiskSizes",
[t| [(NonEmptyString, NonNegative Int, NonEmptyString, NonNegative Int)]|],
OpDoc.opClusterRepairDiskSizes,
[ pInstances
],
[])
, ("OpClusterConfigQuery",
[t| [JSValue] |],
OpDoc.opClusterConfigQuery,
[ pOutputFields
],
[])
, ("OpClusterRename",
[t| NonEmptyString |],
OpDoc.opClusterRename,
[ pName
],
"name")
, ("OpClusterSetParams",
[t| Either () JobIdListOnly |],
OpDoc.opClusterSetParams,
[ pForce
, pHvState
, pDiskState
, pVgName
, pEnabledHypervisors
, pClusterHvParams
, pClusterBeParams
, pOsHvp
, pClusterOsParams
, pClusterOsParamsPrivate
, pGroupDiskParams
, pCandidatePoolSize
, pMaxRunningJobs
, pMaxTrackedJobs
, pUidPool
, pAddUids
, pRemoveUids
, pMaintainNodeHealth
, pPreallocWipeDisks
, pNicParams
, withDoc "Cluster-wide node parameter defaults" pNdParams
, withDoc "Cluster-wide ipolicy specs" pIpolicy
, pDrbdHelper
, pDefaultIAllocator
, pDefaultIAllocatorParams
, pNetworkMacPrefix
, pMasterNetdev
, pMasterNetmask
, pReservedLvs
, pHiddenOs
, pBlacklistedOs
, pUseExternalMipScript
, pEnabledDiskTemplates
, pModifyEtcHosts
, pClusterFileStorageDir
, pClusterSharedFileStorageDir
, pClusterGlusterStorageDir
, pInstallImage
, pInstanceCommunicationNetwork
, pZeroingImage
, pCompressionTools
, pEnabledUserShutdown
],
[])
, ("OpClusterRedistConf",
[t| () |],
OpDoc.opClusterRedistConf,
[],
[])
, ("OpClusterActivateMasterIp",
[t| () |],
OpDoc.opClusterActivateMasterIp,
[],
[])
, ("OpClusterDeactivateMasterIp",
[t| () |],
OpDoc.opClusterDeactivateMasterIp,
[],
[])
, ("OpClusterRenewCrypto",
[t| () |],
OpDoc.opClusterRenewCrypto,
[],
[])
, ("OpQuery",
[t| QueryResponse |],
OpDoc.opQuery,
[ pQueryWhat
, pUseLocking
, pQueryFields
, pQueryFilter
],
"what")
, ("OpQueryFields",
[t| QueryFieldsResponse |],
OpDoc.opQueryFields,
[ pQueryWhat
, pQueryFieldsFields
],
"what")
, ("OpOobCommand",
[t| [[(QueryResultCode, JSValue)]] |],
OpDoc.opOobCommand,
[ pNodeNames
, withDoc "List of node UUIDs to run the OOB command against" pNodeUuids
, pOobCommand
, pOobTimeout
, pIgnoreStatus
, pPowerDelay
],
[])
, ("OpRestrictedCommand",
[t| [(Bool, String)] |],
OpDoc.opRestrictedCommand,
[ pUseLocking
, withDoc
"Nodes on which the command should be run (at least one)"
pRequiredNodes
, withDoc
"Node UUIDs on which the command should be run (at least one)"
pRequiredNodeUuids
, pRestrictedCommand
],
[])
, ("OpNodeRemove",
[t| () |],
OpDoc.opNodeRemove,
[ pNodeName
, pNodeUuid
],
"node_name")
, ("OpNodeAdd",
[t| () |],
OpDoc.opNodeAdd,
[ pNodeName
, pHvState
, pDiskState
, pPrimaryIp
, pSecondaryIp
, pReadd
, pNodeGroup
, pMasterCapable
, pVmCapable
, pNdParams
],
"node_name")
, ("OpNodeQueryvols",
[t| [JSValue] |],
OpDoc.opNodeQueryvols,
[ pOutputFields
, withDoc "Empty list to query all nodes, node names otherwise" pNodes
],
[])
, ("OpNodeQueryStorage",
[t| [[JSValue]] |],
OpDoc.opNodeQueryStorage,
[ pOutputFields
, pOptStorageType
, withDoc
"Empty list to query all, list of names to query otherwise"
pNodes
, pStorageName
],
[])
, ("OpNodeModifyStorage",
[t| () |],
OpDoc.opNodeModifyStorage,
[ pNodeName
, pNodeUuid
, pStorageType
, pStorageName
, pStorageChanges
],
"node_name")
, ("OpRepairNodeStorage",
[t| () |],
OpDoc.opRepairNodeStorage,
[ pNodeName
, pNodeUuid
, pStorageType
, pStorageName
, pIgnoreConsistency
],
"node_name")
, ("OpNodeSetParams",
[t| [(NonEmptyString, JSValue)] |],
OpDoc.opNodeSetParams,
[ pNodeName
, pNodeUuid
, pForce
, pHvState
, pDiskState
, pMasterCandidate
, withDoc "Whether to mark the node offline" pOffline
, pDrained
, pAutoPromote
, pMasterCapable
, pVmCapable
, pSecondaryIp
, pNdParams
, pPowered
],
"node_name")
, ("OpNodePowercycle",
[t| Maybe NonEmptyString |],
OpDoc.opNodePowercycle,
[ pNodeName
, pNodeUuid
, pForce
],
"node_name")
, ("OpNodeMigrate",
[t| JobIdListOnly |],
OpDoc.opNodeMigrate,
[ pNodeName
, pNodeUuid
, pMigrationMode
, pMigrationLive
, pMigrationTargetNode
, pMigrationTargetNodeUuid
, pAllowRuntimeChgs
, pIgnoreIpolicy
, pIallocator
],
"node_name")
, ("OpNodeEvacuate",
[t| JobIdListOnly |],
OpDoc.opNodeEvacuate,
[ pEarlyRelease
, pNodeName
, pNodeUuid
, pRemoteNode
, pRemoteNodeUuid
, pIallocator
, pEvacMode
],
"node_name")
, ("OpInstanceCreate",
[t| [NonEmptyString] |],
OpDoc.opInstanceCreate,
[ pInstanceName
, pForceVariant
, pWaitForSync
, pNameCheck
, pIgnoreIpolicy
, pOpportunisticLocking
, pInstBeParams
, pInstDisks
, pOptDiskTemplate
, pOptGroupName
, pFileDriver
, pFileStorageDir
, pInstHvParams
, pHypervisor
, pIallocator
, pResetDefaults
, pIpCheck
, pIpConflictsCheck
, pInstCreateMode
, pInstNics
, pNoInstall
, pInstOsParams
, pInstOsParamsPrivate
, pInstOsParamsSecret
, pInstOs
, pPrimaryNode
, pPrimaryNodeUuid
, pSecondaryNode
, pSecondaryNodeUuid
, pSourceHandshake
, pSourceInstance
, pSourceShutdownTimeout
, pSourceX509Ca
, pSrcNode
, pSrcNodeUuid
, pSrcPath
, pBackupCompress
, pStartInstance
, pInstTags
, pInstanceCommunication
, pHelperStartupTimeout
, pHelperShutdownTimeout
],
"instance_name")
, ("OpInstanceMultiAlloc",
[t| InstanceMultiAllocResponse |],
OpDoc.opInstanceMultiAlloc,
[ pOpportunisticLocking
, pIallocator
, pMultiAllocInstances
],
[])
, ("OpInstanceReinstall",
[t| () |],
OpDoc.opInstanceReinstall,
[ pInstanceName
, pInstanceUuid
, pForceVariant
, pInstOs
, pTempOsParams
, pTempOsParamsPrivate
, pTempOsParamsSecret
],
"instance_name")
, ("OpInstanceRemove",
[t| () |],
OpDoc.opInstanceRemove,
[ pInstanceName
, pInstanceUuid
, pShutdownTimeout
, pIgnoreFailures
],
"instance_name")
, ("OpInstanceRename",
[t| NonEmptyString |],
OpDoc.opInstanceRename,
[ pInstanceName
, pInstanceUuid
, withDoc "New instance name" pNewName
, pNameCheck
, pIpCheck
],
[])
, ("OpInstanceStartup",
[t| () |],
OpDoc.opInstanceStartup,
[ pInstanceName
, pInstanceUuid
, pForce
, pIgnoreOfflineNodes
, pTempHvParams
, pTempBeParams
, pNoRemember
, pStartupPaused
-- timeout to cleanup a user down instance
, pShutdownTimeout
],
"instance_name")
, ("OpInstanceShutdown",
[t| () |],
OpDoc.opInstanceShutdown,
[ pInstanceName
, pInstanceUuid
, pForce
, pIgnoreOfflineNodes
, pShutdownTimeout'
, pNoRemember
, pAdminStateSource
],
"instance_name")
, ("OpInstanceReboot",
[t| () |],
OpDoc.opInstanceReboot,
[ pInstanceName
, pInstanceUuid
, pShutdownTimeout
, pIgnoreSecondaries
, pRebootType
],
"instance_name")
, ("OpInstanceReplaceDisks",
[t| () |],
OpDoc.opInstanceReplaceDisks,
[ pInstanceName
, pInstanceUuid
, pEarlyRelease
, pIgnoreIpolicy
, pReplaceDisksMode
, pReplaceDisksList
, pRemoteNode
, pRemoteNodeUuid
, pIallocator
],
"instance_name")
, ("OpInstanceFailover",
[t| () |],
OpDoc.opInstanceFailover,
[ pInstanceName
, pInstanceUuid
, pShutdownTimeout
, pIgnoreConsistency
, pMigrationTargetNode
, pMigrationTargetNodeUuid
, pIgnoreIpolicy
, pMigrationCleanup
, pIallocator
],
"instance_name")
, ("OpInstanceMigrate",
[t| () |],
OpDoc.opInstanceMigrate,
[ pInstanceName
, pInstanceUuid
, pMigrationMode
, pMigrationLive
, pMigrationTargetNode
, pMigrationTargetNodeUuid
, pAllowRuntimeChgs
, pIgnoreIpolicy
, pMigrationCleanup
, pIallocator
, pAllowFailover
],
"instance_name")
, ("OpInstanceMove",
[t| () |],
OpDoc.opInstanceMove,
[ pInstanceName
, pInstanceUuid
, pShutdownTimeout
, pIgnoreIpolicy
, pMoveTargetNode
, pMoveTargetNodeUuid
, pMoveCompress
, pIgnoreConsistency
],
"instance_name")
, ("OpInstanceConsole",
[t| JSObject JSValue |],
OpDoc.opInstanceConsole,
[ pInstanceName
, pInstanceUuid
],
"instance_name")
, ("OpInstanceActivateDisks",
[t| [(NonEmptyString, NonEmptyString, NonEmptyString)] |],
OpDoc.opInstanceActivateDisks,
[ pInstanceName
, pInstanceUuid
, pIgnoreDiskSize
, pWaitForSyncFalse
],
"instance_name")
, ("OpInstanceDeactivateDisks",
[t| () |],
OpDoc.opInstanceDeactivateDisks,
[ pInstanceName
, pInstanceUuid
, pForce
],
"instance_name")
, ("OpInstanceRecreateDisks",
[t| () |],
OpDoc.opInstanceRecreateDisks,
[ pInstanceName
, pInstanceUuid
, pRecreateDisksInfo
, withDoc "New instance nodes, if relocation is desired" pNodes
, withDoc "New instance node UUIDs, if relocation is desired" pNodeUuids
, pIallocator
],
"instance_name")
, ("OpInstanceQueryData",
[t| JSObject (JSObject JSValue) |],
OpDoc.opInstanceQueryData,
[ pUseLocking
, pInstances
, pStatic
],
[])
, ("OpInstanceSetParams",
[t| [(NonEmptyString, JSValue)] |],
OpDoc.opInstanceSetParams,
[ pInstanceName
, pInstanceUuid
, pForce
, pForceVariant
, pIgnoreIpolicy
, pInstParamsNicChanges
, pInstParamsDiskChanges
, pInstBeParams
, pRuntimeMem
, pInstHvParams
, pOptDiskTemplate
, pExtParams
, pFileDriver
, pFileStorageDir
, pPrimaryNode
, pPrimaryNodeUuid
, withDoc "Secondary node (used when changing disk template)" pRemoteNode
, withDoc
"Secondary node UUID (used when changing disk template)"
pRemoteNodeUuid
, pOsNameChange
, pInstOsParams
, pInstOsParamsPrivate
, pWaitForSync
, withDoc "Whether to mark the instance as offline" pOffline
, pIpConflictsCheck
, pHotplug
, pHotplugIfPossible
, pOptInstanceCommunication
],
"instance_name")
, ("OpInstanceGrowDisk",
[t| () |],
OpDoc.opInstanceGrowDisk,
[ pInstanceName
, pInstanceUuid
, pWaitForSync
, pDiskIndex
, pDiskChgAmount
, pDiskChgAbsolute
],
"instance_name")
, ("OpInstanceChangeGroup",
[t| JobIdListOnly |],
OpDoc.opInstanceChangeGroup,
[ pInstanceName
, pInstanceUuid
, pEarlyRelease
, pIallocator
, pTargetGroups
],
"instance_name")
, ("OpGroupAdd",
[t| Either () JobIdListOnly |],
OpDoc.opGroupAdd,
[ pGroupName
, pNodeGroupAllocPolicy
, pGroupNodeParams
, pGroupDiskParams
, pHvState
, pDiskState
, withDoc "Group-wide ipolicy specs" pIpolicy
],
"group_name")
, ("OpGroupAssignNodes",
[t| () |],
OpDoc.opGroupAssignNodes,
[ pGroupName
, pForce
, withDoc "List of nodes to assign" pRequiredNodes
, withDoc "List of node UUIDs to assign" pRequiredNodeUuids
],
"group_name")
, ("OpGroupSetParams",
[t| [(NonEmptyString, JSValue)] |],
OpDoc.opGroupSetParams,
[ pGroupName
, pNodeGroupAllocPolicy
, pGroupNodeParams
, pGroupDiskParams
, pHvState
, pDiskState
, withDoc "Group-wide ipolicy specs" pIpolicy
],
"group_name")
, ("OpGroupRemove",
[t| () |],
OpDoc.opGroupRemove,
[ pGroupName
],
"group_name")
, ("OpGroupRename",
[t| NonEmptyString |],
OpDoc.opGroupRename,
[ pGroupName
, withDoc "New group name" pNewName
],
[])
, ("OpGroupEvacuate",
[t| JobIdListOnly |],
OpDoc.opGroupEvacuate,
[ pGroupName
, pEarlyRelease
, pIallocator
, pTargetGroups
, pSequential
, pForceFailover
],
"group_name")
, ("OpOsDiagnose",
[t| [[JSValue]] |],
OpDoc.opOsDiagnose,
[ pOutputFields
, withDoc "Which operating systems to diagnose" pNames
],
[])
, ("OpExtStorageDiagnose",
[t| [[JSValue]] |],
OpDoc.opExtStorageDiagnose,
[ pOutputFields
, withDoc "Which ExtStorage Provider to diagnose" pNames
],
[])
, ("OpBackupPrepare",
[t| Maybe (JSObject JSValue) |],
OpDoc.opBackupPrepare,
[ pInstanceName
, pInstanceUuid
, pExportMode
],
"instance_name")
, ("OpBackupExport",
[t| (Bool, [Bool]) |],
OpDoc.opBackupExport,
[ pInstanceName
, pInstanceUuid
, pBackupCompress
, pShutdownTimeout
, pExportTargetNode
, pExportTargetNodeUuid
, pShutdownInstance
, pRemoveInstance
, pIgnoreRemoveFailures
, defaultField [| ExportModeLocal |] pExportMode
, pX509KeyName
, pX509DestCA
, pZeroFreeSpace
, pZeroingTimeoutFixed
, pZeroingTimeoutPerMiB
],
"instance_name")
, ("OpBackupRemove",
[t| () |],
OpDoc.opBackupRemove,
[ pInstanceName
, pInstanceUuid
],
"instance_name")
, ("OpTagsGet",
[t| [NonEmptyString] |],
OpDoc.opTagsGet,
[ pTagsObject
, pUseLocking
, withDoc "Name of object to retrieve tags from" pTagsName
],
"name")
, ("OpTagsSearch",
[t| [(NonEmptyString, NonEmptyString)] |],
OpDoc.opTagsSearch,
[ pTagSearchPattern
],
"pattern")
, ("OpTagsSet",
[t| () |],
OpDoc.opTagsSet,
[ pTagsObject
, pTagsList
, withDoc "Name of object where tag(s) should be added" pTagsName
],
[])
, ("OpTagsDel",
[t| () |],
OpDoc.opTagsDel,
[ pTagsObject
, pTagsList
, withDoc "Name of object where tag(s) should be deleted" pTagsName
],
[])
, ("OpTestDelay",
[t| () |],
OpDoc.opTestDelay,
[ pDelayDuration
, pDelayOnMaster
, pDelayOnNodes
, pDelayOnNodeUuids
, pDelayRepeat
, pDelayInterruptible
, pDelayNoLocks
],
"duration")
, ("OpTestAllocator",
[t| String |],
OpDoc.opTestAllocator,
[ pIAllocatorDirection
, pIAllocatorMode
, pIAllocatorReqName
, pIAllocatorNics
, pIAllocatorDisks
, pHypervisor
, pIallocator
, pInstTags
, pIAllocatorMemory
, pIAllocatorVCpus
, pIAllocatorOs
, pDiskTemplate
, pIAllocatorInstances
, pIAllocatorEvacMode
, pTargetGroups
, pIAllocatorSpindleUse
, pIAllocatorCount
, pOptGroupName
],
"iallocator")
, ("OpTestJqueue",
[t| Bool |],
OpDoc.opTestJqueue,
[ pJQueueNotifyWaitLock
, pJQueueNotifyExec
, pJQueueLogMessages
, pJQueueFail
],
[])
, ("OpTestDummy",
[t| () |],
OpDoc.opTestDummy,
[ pTestDummyResult
, pTestDummyMessages
, pTestDummyFail
, pTestDummySubmitJobs
],
[])
, ("OpNetworkAdd",
[t| () |],
OpDoc.opNetworkAdd,
[ pNetworkName
, pNetworkAddress4
, pNetworkGateway4
, pNetworkAddress6
, pNetworkGateway6
, pNetworkMacPrefix
, pNetworkAddRsvdIps
, pIpConflictsCheck
, withDoc "Network tags" pInstTags
],
"network_name")
, ("OpNetworkRemove",
[t| () |],
OpDoc.opNetworkRemove,
[ pNetworkName
, pForce
],
"network_name")
, ("OpNetworkSetParams",
[t| () |],
OpDoc.opNetworkSetParams,
[ pNetworkName
, pNetworkGateway4
, pNetworkAddress6
, pNetworkGateway6
, pNetworkMacPrefix
, withDoc "Which external IP addresses to reserve" pNetworkAddRsvdIps
, pNetworkRemoveRsvdIps
],
"network_name")
, ("OpNetworkConnect",
[t| () |],
OpDoc.opNetworkConnect,
[ pGroupName
, pNetworkName
, pNetworkMode
, pNetworkLink
, pNetworkVlan
, pIpConflictsCheck
],
"network_name")
, ("OpNetworkDisconnect",
[t| () |],
OpDoc.opNetworkDisconnect,
[ pGroupName
, pNetworkName
],
"network_name")
])
deriving instance Ord OpCode
-- | Returns the OP_ID for a given opcode value.
$(genOpID ''OpCode "opID")
-- | A list of all defined/supported opcode IDs.
$(genAllOpIDs ''OpCode "allOpIDs")
-- | Convert the opcode name to lowercase with underscores and strip
-- the @Op@ prefix.
$(genOpLowerStrip (C.opcodeReasonSrcOpcode ++ ":") ''OpCode "opReasonSrcID")
instance JSON OpCode where
readJSON = readJSONfromDict
showJSON = showJSONtoDict
-- | Generates the summary value for an opcode.
opSummaryVal :: OpCode -> Maybe String
opSummaryVal OpClusterVerifyGroup { opGroupName = s } = Just (fromNonEmpty s)
opSummaryVal OpGroupVerifyDisks { opGroupName = s } = Just (fromNonEmpty s)
opSummaryVal OpClusterRename { opName = s } = Just (fromNonEmpty s)
opSummaryVal OpQuery { opWhat = s } = Just (queryTypeOpToRaw s)
opSummaryVal OpQueryFields { opWhat = s } = Just (queryTypeOpToRaw s)
opSummaryVal OpNodeRemove { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpNodeAdd { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpNodeModifyStorage { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpRepairNodeStorage { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpNodeSetParams { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpNodePowercycle { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpNodeMigrate { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpNodeEvacuate { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpInstanceCreate { opInstanceName = s } = Just s
opSummaryVal OpInstanceReinstall { opInstanceName = s } = Just s
opSummaryVal OpInstanceRemove { opInstanceName = s } = Just s
-- FIXME: instance rename should show both names; currently it shows none
-- opSummaryVal OpInstanceRename { opInstanceName = s } = Just s
opSummaryVal OpInstanceStartup { opInstanceName = s } = Just s
opSummaryVal OpInstanceShutdown { opInstanceName = s } = Just s
opSummaryVal OpInstanceReboot { opInstanceName = s } = Just s
opSummaryVal OpInstanceReplaceDisks { opInstanceName = s } = Just s
opSummaryVal OpInstanceFailover { opInstanceName = s } = Just s
opSummaryVal OpInstanceMigrate { opInstanceName = s } = Just s
opSummaryVal OpInstanceMove { opInstanceName = s } = Just s
opSummaryVal OpInstanceConsole { opInstanceName = s } = Just s
opSummaryVal OpInstanceActivateDisks { opInstanceName = s } = Just s
opSummaryVal OpInstanceDeactivateDisks { opInstanceName = s } = Just s
opSummaryVal OpInstanceRecreateDisks { opInstanceName = s } = Just s
opSummaryVal OpInstanceSetParams { opInstanceName = s } = Just s
opSummaryVal OpInstanceGrowDisk { opInstanceName = s } = Just s
opSummaryVal OpInstanceChangeGroup { opInstanceName = s } = Just s
opSummaryVal OpGroupAdd { opGroupName = s } = Just (fromNonEmpty s)
opSummaryVal OpGroupAssignNodes { opGroupName = s } = Just (fromNonEmpty s)
opSummaryVal OpGroupSetParams { opGroupName = s } = Just (fromNonEmpty s)
opSummaryVal OpGroupRemove { opGroupName = s } = Just (fromNonEmpty s)
opSummaryVal OpGroupEvacuate { opGroupName = s } = Just (fromNonEmpty s)
opSummaryVal OpBackupPrepare { opInstanceName = s } = Just s
opSummaryVal OpBackupExport { opInstanceName = s } = Just s
opSummaryVal OpBackupRemove { opInstanceName = s } = Just s
opSummaryVal OpTagsGet { opKind = s } = Just (show s)
opSummaryVal OpTagsSearch { opTagSearchPattern = s } = Just (fromNonEmpty s)
opSummaryVal OpTestDelay { opDelayDuration = d } = Just (show d)
opSummaryVal OpTestAllocator { opIallocator = s } =
-- FIXME: Python doesn't handle None fields well, so we have behave the same
Just $ maybe "None" fromNonEmpty s
opSummaryVal OpNetworkAdd { opNetworkName = s} = Just (fromNonEmpty s)
opSummaryVal OpNetworkRemove { opNetworkName = s} = Just (fromNonEmpty s)
opSummaryVal OpNetworkSetParams { opNetworkName = s} = Just (fromNonEmpty s)
opSummaryVal OpNetworkConnect { opNetworkName = s} = Just (fromNonEmpty s)
opSummaryVal OpNetworkDisconnect { opNetworkName = s} = Just (fromNonEmpty s)
opSummaryVal _ = Nothing
-- | Computes the summary of the opcode.
opSummary :: OpCode -> String
opSummary op =
case opSummaryVal op of
Nothing -> op_suffix
Just s -> op_suffix ++ "(" ++ s ++ ")"
where op_suffix = drop 3 $ opID op
-- | Generic\/common opcode parameters.
$(buildObject "CommonOpParams" "op"
[ pDryRun
, pDebugLevel
, pOpPriority
, pDependencies
, pComment
, pReason
])
deriving instance Ord CommonOpParams
-- | Default common parameter values.
defOpParams :: CommonOpParams
defOpParams =
CommonOpParams { opDryRun = Nothing
, opDebugLevel = Nothing
, opPriority = OpPrioNormal
, opDepends = Nothing
, opComment = Nothing
, opReason = []
}
-- | Resolve relative dependencies to absolute ones, given the job ID.
resolveDependsCommon :: (Monad m) => CommonOpParams -> JobId -> m CommonOpParams
resolveDependsCommon p@(CommonOpParams { opDepends = Just deps}) jid = do
deps' <- mapM (`absoluteJobDependency` jid) deps
return p { opDepends = Just deps' }
resolveDependsCommon p _ = return p
-- | The top-level opcode type.
data MetaOpCode = MetaOpCode { metaParams :: CommonOpParams
, metaOpCode :: OpCode
} deriving (Show, Eq, Ord)
-- | Resolve relative dependencies to absolute ones, given the job Id.
resolveDependencies :: (Monad m) => MetaOpCode -> JobId -> m MetaOpCode
resolveDependencies mopc jid = do
mpar <- resolveDependsCommon (metaParams mopc) jid
return (mopc { metaParams = mpar })
instance DictObject MetaOpCode where
toDict (MetaOpCode meta op) = toDict meta ++ toDict op
fromDictWKeys dict = MetaOpCode <$> fromDictWKeys dict
<*> fromDictWKeys dict
instance JSON MetaOpCode where
readJSON = readJSONfromDict
showJSON = showJSONtoDict
-- | Wraps an 'OpCode' with the default parameters to build a
-- 'MetaOpCode'.
wrapOpCode :: OpCode -> MetaOpCode
wrapOpCode = MetaOpCode defOpParams
-- | Sets the comment on a meta opcode.
setOpComment :: String -> MetaOpCode -> MetaOpCode
setOpComment comment (MetaOpCode common op) =
MetaOpCode (common { opComment = Just comment}) op
-- | Sets the priority on a meta opcode.
setOpPriority :: OpSubmitPriority -> MetaOpCode -> MetaOpCode
setOpPriority prio (MetaOpCode common op) =
MetaOpCode (common { opPriority = prio }) op
| ribag/ganeti-experiments | src/Ganeti/OpCodes.hs | gpl-2.0 | 28,651 | 0 | 12 | 7,404 | 5,779 | 3,534 | 2,245 | 926 | 2 |
--------------------------------------------------------------------------------
-- | The Pandoc AST is not extensible, so we need to use another way to model
-- different parts of slides that we want to appear bit by bit.
--
-- We do this by modelling a slide as a list of instructions, that manipulate
-- the contents on a slide in a (for now) very basic way.
module Patat.Presentation.Instruction
( Instructions
, fromList
, toList
, Instruction (..)
, numFragments
, Fragment (..)
, renderFragment
) where
import qualified Text.Pandoc as Pandoc
newtype Instructions a = Instructions [Instruction a] deriving (Show)
-- A smart constructor that guarantees some invariants:
--
-- * No consecutive pauses.
-- * All pauses moved to the top level.
-- * No pauses at the end.
fromList :: [Instruction a] -> Instructions a
fromList = Instructions . go
where
go instrs = case break (not . isPause) instrs of
(_, []) -> []
(_ : _, remainder) -> Pause : go remainder
([], x : remainder) -> x : go remainder
toList :: Instructions a -> [Instruction a]
toList (Instructions xs) = xs
data Instruction a
-- Pause.
= Pause
-- Append items.
| Append [a]
-- Remove the last item.
| Delete
-- Modify the last block with the provided instruction.
| ModifyLast (Instruction a)
deriving (Show)
isPause :: Instruction a -> Bool
isPause Pause = True
isPause (Append _) = False
isPause Delete = False
isPause (ModifyLast i) = isPause i
numPauses :: Instructions a -> Int
numPauses (Instructions xs) = length $ filter isPause xs
numFragments :: Instructions a -> Int
numFragments = succ . numPauses
newtype Fragment = Fragment [Pandoc.Block] deriving (Show)
renderFragment :: Int -> Instructions Pandoc.Block -> Fragment
renderFragment = \n (Instructions instrs) -> Fragment $ go [] n instrs
where
go acc _ [] = acc
go acc n (Pause : instrs) = if n <= 0 then acc else go acc (n - 1) instrs
go acc n (instr : instrs) = go (goBlocks instr acc) n instrs
goBlocks :: Instruction Pandoc.Block -> [Pandoc.Block] -> [Pandoc.Block]
goBlocks Pause xs = xs
goBlocks (Append ys) xs = xs ++ ys
goBlocks Delete xs = sinit xs
goBlocks (ModifyLast f) xs
| null xs = xs -- Shouldn't happen unless instructions are malformed.
| otherwise = modifyLast (goBlock f) xs
goBlock :: Instruction Pandoc.Block -> Pandoc.Block -> Pandoc.Block
goBlock Pause x = x
goBlock (Append ys) block = case block of
-- We can only append to a few specific block types for now.
Pandoc.BulletList xs -> Pandoc.BulletList $ xs ++ [ys]
Pandoc.OrderedList attr xs -> Pandoc.OrderedList attr $ xs ++ [ys]
_ -> block
goBlock Delete block = case block of
-- We can only append to a few specific block types for now.
Pandoc.BulletList xs -> Pandoc.BulletList $ sinit xs
Pandoc.OrderedList attr xs -> Pandoc.OrderedList attr $ sinit xs
_ -> block
goBlock (ModifyLast f) block = case block of
-- We can only modify the last content of a few specific block types for
-- now.
Pandoc.BulletList xs -> Pandoc.BulletList $ modifyLast (goBlocks f) xs
Pandoc.OrderedList attr xs ->
Pandoc.OrderedList attr $ modifyLast (goBlocks f) xs
_ -> block
modifyLast :: (a -> a) -> [a] -> [a]
modifyLast f (x : y : zs) = x : modifyLast f (y : zs)
modifyLast f (x : []) = [f x]
modifyLast _ [] = []
sinit :: [a] -> [a]
sinit xs = if null xs then [] else init xs
| jaspervdj/patat | lib/Patat/Presentation/Instruction.hs | gpl-2.0 | 3,514 | 0 | 11 | 819 | 1,073 | 564 | 509 | 67 | 7 |
{- |
Module : $EmptyHeader$
Description : <optional short description entry>
Copyright : (c) <Authors or Affiliations>
License : GPLv2 or higher, see LICENSE.txt
Maintainer : <email>
Stability : unstable | experimental | provisional | stable | frozen
Portability : portable | non-portable (<reason>)
<optional description>
-}
module Grothendieck (module Logic, module Grothendieck) where
import Logic
data AnyLogic =
forall id s m sen b sy .
Logic id s m sen b sy =>
G_logic id
data AnyTranslation =
forall id1 s1 m1 sen1 b1 sy1 id2 s2 m2 sen2 b2 sy2 .
(Logic id1 s1 m1 sen1 b1 sy1, Logic id2 s2 m2 sen2 b2 sy2) =>
G_LTR (Logic_translation id1 s1 m1 sen1 b1 sy1 id2 s2 m2 sen2 b2 sy2)
instance Show AnyTranslation where
show _ = "<tr>"
type LogicGraph = ([(String,AnyLogic)],[(String,AnyTranslation)])
data G_basic_spec =
forall id s m sen b sy .
Logic id s m sen b sy =>
G_basic_spec id b
instance Show G_basic_spec where
show (G_basic_spec id b) = show b
data G_symbol_mapping_list =
forall id s m sen b sy .
Logic id s m sen b sy =>
G_symbol_mapping_list id sy
instance Show G_symbol_mapping_list where
show (G_symbol_mapping_list id sy) = show sy
data G_sentence =
forall id s m sen b sy .
Logic id s m sen b sy =>
G_sentence id sen
data G_theory =
forall id s m sen b sy .
Logic id s m sen b sy =>
G_theory id (Theory s sen)
data G_morphism =
forall id s m sen b sy .
Logic id s m sen b sy =>
G_morphism id m
instance Show G_theory where
show (G_theory _ (sig,ax)) = show sig
-- auxiliary functions for conversion between different logics
coerce :: (Typeable a, Typeable b) => a -> Maybe b
coerce = fromDynamic . toDyn
coerce1 :: (Typeable a, Typeable b) => a -> b
coerce1 = the . coerce
the :: Maybe a -> a
the (Just x) = x
| nevrenato/Hets_Fork | mini/Grothendieck.hs | gpl-2.0 | 1,956 | 0 | 9 | 556 | 569 | 321 | 248 | -1 | -1 |
-- data Bool = False | True
data Day = Day | Mon | Tue | Wed | Thu | Fri | Sat | Sun deriving (Show, Eq, Ord, Enum, Read)
tomorrow :: Day -> Day
tomorrow Sun = Mon
tomorrow d = succ d
yesterday :: Day -> Day
yesterday Mon = Sun
yesterday d = pred d
type Name = String
type Author = String
type ISBN = String
type Price = Float
-- data Book = Book Name Author ISBN Price deriving (Show, Eq)
data Book = Book{
name :: Name,
author :: Author,
isbn :: ISBN,
price :: Price
}
safeHead :: [a] -> Maybe a
safeHead [] = Nothing
safeHead (x:xs) = Just x
safeDiv :: Integral a => a -> a -> Maybe a
safeDiv a 0 = Nothing
safeDiv a b = Just (div a b)
disjoint :: [a] -> [b] -> [Either a b]
disjoint as bs = map Left as ++ map Right bs
--[Left 80, Right "Cheated", Left 95, Right "Illness"]
-- either :: (a->c) -> (b->c) -> Either a b -> c
-- either f _ (Left x) = f x
-- either _ g (Right y) = g y
-- partitionEithers :: [Either a b] -> ([a], [bs])
-- partitionEithers = foldr (either left right) ([], [])
-- where
-- left a (l, r) = (a:l, r)
-- right a (l, r) = (l, a:r)
-- data (Num a) => Shape a = Rectangular a a
--
data List a = Nil | Cons a (List a) deriving (Eq,Show)
listToMylist Nil = []
listToMylist (Cons x xs) = x:(listToMylist xs)
mylistToList [] = Nil
mylistToList (x:xs) = Cons x (mylistToList xs)
data ThreeNum = One | Two | Three
data Level = Low | Middle | High
f :: ThreeNum -> Level
f One = Low
f Two = Middle
f Three = High
g :: Level -> ThreeNum
g Low = One
g Middle = Two
add a b = a + b
myDrop n xs = if n <= 0 || null xs
then xs
else myDrop (n-1) (tail xs)
| dalonng/hellos | haskell.hello/dataType.hs | gpl-2.0 | 1,672 | 24 | 8 | 456 | 599 | 325 | 274 | 42 | 2 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE StandaloneDeriving #-}
module Blaaargh.Internal.Post
( getTimeStamp
, parsePersons
, collectPosts
, recentPosts
, chronologicalPosts
, reverseChronologicalPosts
, alphabeticalPosts
, buildContentMap
, setEntryId
, setEntryTitle
, setEntryUpdated
, setEntryAuthors
, setEntrySummary
, setEntryHTMLContent
, setEntryContributor
, setEntryCategories
, setEntryLinks
, setEntryPublished
, setEntryRights
, setEntrySource
, setEntryInReplyTo
, setEntryInReplyTotal
, setEntryAttrs
, setEntryOther
)
where
------------------------------------------------------------------------
import Control.Applicative
import Control.Monad.Error
import Control.Monad.Identity
import Control.Monad.State
import qualified Data.ByteString.Char8 as B
import Data.ByteString.Char8 (ByteString)
import Data.Char
import qualified Data.ConfigFile as Cfg
import Data.List
import Data.List.Split
import qualified Data.Map as Map
import Data.Maybe
import Data.Time.Clock
import Data.Time.Clock.POSIX
import Data.Time.LocalTime
import System.Directory
import System.FilePath
import System.IO
import System.Posix.Files
import Text.Atom.Feed
import qualified Text.Pandoc as Pandoc
import Text.Printf
import Text.XML.Light
------------------------------------------------------------------------
import Blaaargh.Internal.Time
import Blaaargh.Internal.Types
import qualified Blaaargh.Internal.Util.ExcludeList as EL
import Blaaargh.Internal.Util.ExcludeList (ExcludeList)
------------------------------------------------------------------------
getTimeStamp :: FilePath -> IO UTCTime
getTimeStamp file =
(posixSecondsToUTCTime . realToFrac)
<$> modificationTime
<$> getFileStatus file
trim :: String -> String
trim = f . f where f = reverse . dropWhile isSpace
parsePersons :: String -> [Person]
parsePersons = map mkPerson . endBy ","
where
mkPerson s = Person x Nothing y []
where
(x,y) = parseOut s
parseOut s = (trim a, mb c)
where
mb x = if null x then Nothing else Just x
(a,b') = span (/= '<') s
b = drop 1 b'
(c,_) = span (/= '>') b
parseHeaders :: String -> (Either Cfg.CPError Cfg.ConfigParser)
parseHeaders = Cfg.readstring Cfg.emptyCP
getKVP :: Cfg.ConfigParser -> String -> Maybe String
getKVP cp key = retval
where
e :: Either Cfg.CPError String
e = runIdentity . runErrorT $ Cfg.get cp "DEFAULT" key
retval = case e of Left _ -> Nothing
Right x -> Just x
headerTable :: [(String, String -> Post -> Post)]
headerTable = [ ("title", setEntryTitle)
, ("author", setEntryAuthors)
, ("authors" , setEntryAuthors)
, ("summary", setEntrySummary)
, ("updated", setEntryUpdated)
, ("published", setEntryPublished . Just) ]
-- break the post apart at the header ender, strip the prefix chars
-- off of the header, return both
breakPost :: B.ByteString -> (B.ByteString, B.ByteString)
breakPost s = (B.unlines hdr, B.unlines body)
where
chomp x = if "| " `B.isPrefixOf` x
then B.drop 2 x
else if "|" `B.isPrefixOf` x
then B.drop 1 x
else x
lns = B.lines s
(hdr', body) = span ("|" `B.isPrefixOf`) lns
hdr = chomp `map` hdr'
readPost :: String -> FilePath -> IO Post
readPost pId path = do
!tz <- getCurrentTimeZone
!t <- getTimeStamp path
let !atm = formatAtomTime tz t
!contents <- B.readFile path
let (hdr,body) = breakPost contents
let !hdrS = B.unpack hdr
let !cfg = case parseHeaders hdrS of
Left e -> error
$ printf "Couldn't parse headers from %s:\n%s"
path (show e)
Right r -> r
let !post = foldl (\p (k,f) ->
case getKVP cfg k of
Nothing -> p
Just x -> f x p)
(Post $ nullEntry pId (HTMLString "") atm)
headerTable
let !pstate = Pandoc.defaultParserState { Pandoc.stateSmart = True
, Pandoc.stateSanitizeHTML=True }
let !wopts = Pandoc.defaultWriterOptions { Pandoc.writerStandalone = False }
let !html = Pandoc.writeHtmlString wopts $ Pandoc.readMarkdown pstate $ B.unpack body
return $! setEntryHTMLContent html
$! setEntryLinks [ (nullLink pId) {
linkRel = Just $ Left "alternate"
} ]
$! post
collectPosts :: ExcludeList -> ContentMap -> [Post]
collectPosts el m = help el ("", ContentDirectory "" m)
where
-- don't count posts named "index" -- they're there to provide
-- text for directory indices
help :: ExcludeList -> (ByteString, ContentItem) -> [Post]
help s (nm, ContentPost p) =
if nm == "index" || EL.matches nm s then [] else [p]
help s (nm, ContentDirectory _ cm) =
if not $ EL.matches nm s
then concatMap (help (EL.descend nm s)) $ Map.assocs cm
else []
help _ _ = []
recentPosts :: ExcludeList -> ContentMap -> Int -> [Post]
recentPosts sl m nposts =
take nposts $ reverseChronologicalPosts sl m
chronologicalPosts :: ExcludeList -> ContentMap -> [Post]
chronologicalPosts sl m =
sortBy cmp $ collectPosts sl m
where
pt = zonedTimeToUTC . getPostTime
cmp a b = pt a `compare` pt b
reverseChronologicalPosts :: ExcludeList -> ContentMap -> [Post]
reverseChronologicalPosts = (reverse .) . chronologicalPosts
alphabeticalPosts :: ExcludeList -> ContentMap -> [Post]
alphabeticalPosts sl m = sortBy cmp $ collectPosts sl m
where
cmp (Post a) (Post b) = entryId a `compare` entryId b
buildContentMap :: String -> FilePath -> IO ContentMap
buildContentMap baseURL basedir = build [] "."
where
build :: [String] -> FilePath -> IO ContentMap
build prefixes path = do
files <- getDirectoryContents $ basedir </> path
foldM processFile Map.empty files
where
----------------------------------------------------------------
pathSoFar :: FilePath
pathSoFar = intercalate "/" prefixes
----------------------------------------------------------------
processFile :: ContentMap -> FilePath -> IO ContentMap
processFile mp f =
if "." `isPrefixOf` f || "~" `isSuffixOf` f then
return mp
else do
isDir <- doesDirectoryExist $ basedir </> pathSoFar </> f
if isDir then dir mp f else file mp f
----------------------------------------------------------------
dir :: ContentMap -> FilePath -> IO ContentMap
dir mp f = do
let fp = if null pathSoFar then f else concat [pathSoFar, "/", f]
let fullPath = B.pack (concat [baseURL, "/", fp])
!cm <- build (prefixes ++ [f]) fp
return $! Map.insert (B.pack f)
(ContentDirectory fullPath cm)
mp
----------------------------------------------------------------
file :: ContentMap -> FilePath -> IO ContentMap
file mp f = do
let fp = basedir </> pathSoFar </> f
if ".md" `isSuffixOf` f then do
-- it's a post
let baseName = dropExtension f
let pId = concat [baseURL, "/", pathSoFar, "/", baseName]
!p <- readPost pId fp
return $! Map.insert (B.pack baseName) (ContentPost p) mp
else
-- it's a static item
return $! Map.insert (B.pack f) (ContentStatic fp) mp
------------------------------------------------------------------------
-- mutator functions for post objects
setEntryId :: String -> Post -> Post
setEntryId x (Post p) = Post $ p { entryId = x }
setEntryTitle :: String -> Post -> Post
setEntryTitle x (Post p) = Post $ p { entryTitle = TextString x }
--setEntryUpdated :: TimeZone -> UTCTime -> Post -> Post
--setEntryUpdated tz tm (Post p) = Post $ p { entryUpdated = formatAtomTime tz tm }
setEntryUpdated :: String -> Post -> Post
setEntryUpdated tm (Post p) = Post $ p { entryUpdated = tm }
setEntryAuthors :: String -> Post -> Post
setEntryAuthors x (Post p) = Post $ p { entryAuthors = parsePersons x }
setEntrySummary :: String -> Post -> Post
setEntrySummary x (Post p) = Post $ p { entrySummary = Just $ HTMLString x }
setEntryHTMLContent :: String -> Post -> Post
setEntryHTMLContent x (Post p) = Post $ p { entryContent = Just $ HTMLContent x }
setEntryContributor :: String -> Post -> Post
setEntryContributor x (Post p) = Post $ p { entryContributor = parsePersons x }
-- doubt we'll be using these for now
setEntryCategories :: [Category] -> Post -> Post
setEntryCategories x (Post p) = Post $ p { entryCategories = x }
setEntryLinks :: [Link] -> Post -> Post
setEntryLinks x (Post p) = Post $ p { entryLinks = x }
setEntryPublished :: Maybe Date -> Post -> Post
setEntryPublished x (Post p) = Post $ p { entryPublished = x }
setEntryRights :: Maybe TextContent -> Post -> Post
setEntryRights x (Post p) = Post $ p { entryRights = x }
setEntrySource :: Maybe Source -> Post -> Post
setEntrySource x (Post p) = Post $ p { entrySource = x }
setEntryInReplyTo :: Maybe InReplyTo -> Post -> Post
setEntryInReplyTo x (Post p) = Post $ p { entryInReplyTo = x }
setEntryInReplyTotal :: Maybe InReplyTotal -> Post -> Post
setEntryInReplyTotal x (Post p) = Post $ p { entryInReplyTotal = x }
setEntryAttrs :: [Attr] -> Post -> Post
setEntryAttrs x (Post p) = Post $ p { entryAttrs = x }
setEntryOther :: [Element] -> Post -> Post
setEntryOther x (Post p) = Post $ p { entryOther = x }
| gregorycollins/blaaargh | src/Blaaargh/Internal/Post.hs | gpl-2.0 | 10,240 | 37 | 17 | 2,928 | 2,712 | 1,480 | 1,232 | 211 | 5 |
-- Compiler Toolkit: test routines for state modules
--
-- Author : Manuel M. T. Chakravarty
-- Created: 2 November 95
--
-- Copyright (c) [1995..1998] Manuel M. T. Chakravarty
--
-- This file is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This file is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
--- DESCRIPTION ---------------------------------------------------------------
--
-- This module provides code used to test the state base modules.
--
--- DOCU ----------------------------------------------------------------------
--
-- language: Haskell 1.4
--
--- TODO ----------------------------------------------------------------------
--
module Main
where
import State
main :: IO ()
main = run ("", "", "") () (
putStrCIO "Compiler Toolkit state test program\n" +>
putStrCIO "===================================\n\n" +>
testErrors +>
testExceptions
)
testErrors :: PreCST e s ()
testErrors = putStrCIO "Testing error management...\n\n" +>
raiseWarning ("myfile", 100, 10) ["A nice warning message"] +>
raiseError ("myfile", 50, 25) ["That's too much of an error.",
"Please avoid this kind of\
\ errors in the future."] +>
dumpErrors +>
dumpErrors +>
putStrCIO "\n...done (testing error management).\n"
where
dumpErrors :: PreCST e s ()
dumpErrors = showErrors +>= \msg ->
putStrCIO msg +>
newlineCIO
testExceptions :: PreCST e s ()
testExceptions = putStrCIO "Testing exception handling...\n\n" +>
(raiseExc "testexc")
`catchExc` ("testexc",
\msg ->
putStrCIO ("Caught `testexc' with message `"
++ msg ++ "'.\n")
) +>
raiseFatal
`fatalsHandledBy` (\err ->
putStrCIO ("Caught fatal error: "
++ show err ++ "\n")
) +>
-- the following exception is not caught and so
-- should turn into a fatal error
((raiseExc "otherexc")
`catchExc` ("testexc",
\msg ->
putStrCIO ("ATTENTION: If this shows an \
\exception erroneously caught!!!\n")
)
)
`fatalsHandledBy` (\err ->
putStrCIO ("Caught fatal error: "
++ show err ++ "\n")
) +>
putStrCIO "\n...done (testing exception handling).\n"
where
raiseExc :: String -> PreCST e s ()
raiseExc exc = putStrCIO ("Will now raise `" ++ exc
++ "'.\n") +>
throwExc exc "A hell of an exception!" +>
putStrCIO ("ATTENTION: This message must \
\never show!!!\n")
raiseFatal :: PreCST e s ()
raiseFatal = putStrCIO "Will now trigger a fatal \
\error!\n" +>
fatal "Fatal indeed!" +>
putStrCIO ("ATTENTION: This message must \
\never show!!!\n")
| jrockway/c2hs | tests/state.hs | gpl-2.0 | 3,307 | 75 | 16 | 972 | 542 | 301 | 241 | 53 | 1 |
{- |
Module : $Header$
Description : General connection to the database
Copyright : (c) Immanuel Normann, Uni Bremen 2007
License : GPLv2 or higher, see LICENSE.txt
Maintainer : inormann@jacobs-university.de
Stability : provisional
Portability : portable
-}
module Search.DB.Connection where
import Database.HaskellDB.HSQL.MySQL
import Data.Map (Map)
import Database.HaskellDB
import Database.HaskellDB.Database
--import Database.HaskellDB.DriverAPI
import Database.HaskellDB.HDBRec
import Search.DB.FormulaDB.Profile as P
import Search.DB.FormulaDB.Inclusion as I
import Search.DB.FormulaDB.Statistics as S
import MD5
type URI = String
type Theory = URI
type SourceTheory = URI
type TargetTheory = URI
{- TODO:
- handle success/failure info from the database
- provide signatures and types
-}
{- PROFILE
mysql> describe profile;
+-----------------+----------------------------+------+-----+---------+-------+
| Field | Type | Null | Key | Default | Extra |
+-----------------+----------------------------+------+-----+---------+-------+
| library | char(32) | YES | | NULL | |
| file | char(255) | YES | | NULL | |
| line | int(11) | YES | | NULL | |
| formula | text | YES | | NULL | |
| skeleton | text | YES | | NULL | |
| skeleton_md5 | char(32) | YES | | NULL | |
| parameter | text | YES | | NULL | |
| role | enum('axiom','conjecture') | YES | | NULL | |
| norm_strength | enum('strong','weak') | YES | | NULL | |
| skeleton_length | int(11) | YES | | NULL | |
+-----------------+----------------------------+------+-----+---------+-------+
-}
-- ProfileTuple = (library',file',line',role',formula',skeleton',parameter',norm_strength')
type ProfileTuple = (String, String, Int, String, String, String, String, String, String, Int)
type ProfileRec =
RecNil ->
RecCons P.Library
(Expr String)
(RecCons P.File
(Expr String)
(RecCons P.Line
(Expr Int)
(RecCons P.Formula
(Expr String)
(RecCons P.Skeleton
(Expr String)
(RecCons P.Skeleton_md5
(Expr String)
(RecCons P.Parameter
(Expr String)
(RecCons P.Role
(Expr String)
(RecCons P.Norm_strength
(Expr String)
(RecCons Skeleton_length
(Expr Int)
RecNil)))))))))
profile2clause (library',file',line',role',formula',skeleton',parameter',norm_strength') =
((P.library << constant library') #
(P.file << constant file') #
(line << constant line') #
(formula << (constant $ md5s $ Str $ show formula')) #
-- (formula << (constant $ show formula')) #
(skeleton << (constant $ show skeleton')) #
(skeleton_md5 << (constant $ md5s $ Str $ show $ skeleton')) #
(parameter << constant (show parameter')) #
(role << constant role') #
(norm_strength << constant norm_strength') #
(skeleton_length << (constant $ length $ show skeleton')))
multiInsertProfiles :: (Show a, Show t1, Show t) =>
[(String, String, Int, String, t, a, t1, String)] -> IO ()
multiInsertProfiles recs = myMultiInsert profile (map profile2clause recs)
{- INCLUSION
mysql> describe inclusion;
+---------------+-----------+------+-----+---------+-------+
| Field | Type | Null | Key | Default | Extra |
+---------------+-----------+------+-----+---------+-------+
| source | char(255) | YES | | NULL | |
| target | char(255) | YES | | NULL | |
| line_assoc | text | YES | | NULL | |
| morphism | text | YES | | NULL | |
| morphism_size | int(11) | YES | | NULL | |
+---------------+-----------+------+-----+---------+-------+
-}
type FormulaIdMap fid = Map fid fid -- from source to target
type ParameterMap p = Map p p -- from source to target
type InclusionTuple f p = (URI, URI, FormulaIdMap f, ParameterMap p, Int)
type InclusionRec =
RecNil ->
RecCons I.Source
(Expr String)
(RecCons I.Target
(Expr String)
(RecCons I.Line_assoc
(Expr String)
(RecCons I.Morphism
(Expr String)
(RecCons I.Morphism_size (Expr Int) RecNil))))
inclusion2clause :: (Show f, Show p) => InclusionTuple f p -> InclusionRec
inclusion2clause (source', target', line_assoc', morphism', morphism_size') =
((source <<- source') #
(target <<- target') #
(line_assoc <<- show line_assoc') #
(morphism <<- show morphism') #
(morphism_size <<- morphism_size'))
insertInclusion rec = myInsert inclusion (inclusion2clause rec)
multiInsertInclusion :: (Show f, Show p) => [InclusionTuple f p] -> IO ()
multiInsertInclusion recs = myMultiInsert inclusion (map inclusion2clause recs)
{- STATISTICS
mysql> describe statistics;
+-------------+-----------+------+-----+---------+-------+
| Field | Type | Null | Key | Default | Extra |
+-------------+-----------+------+-----+---------+-------+
| library | char(32) | YES | | NULL | |
| file | char(255) | YES | | NULL | |
| tautologies | int(11) | YES | | NULL | |
| duplicates | int(11) | YES | | NULL | |
| formulae | int(11) | YES | | NULL | |
+-------------+-----------+------+-----+---------+-------+
-}
type StatisticsTuple = (URI, Theory, Int, Int, Int)
type StatisticsRec =
RecNil ->
RecCons S.Library
(Expr String)
(RecCons S.File
(Expr String)
(RecCons S.Tautologies
(Expr Int)
(RecCons S.Duplicates
(Expr Int)
(RecCons S.Formulae (Expr Int) RecNil))))
stat2clause :: StatisticsTuple -> StatisticsRec
stat2clause (library',file',nrOfTautologies,nrOfDuplicates,len) =
((S.library << constant library') #
(S.file << constant file') #
(tautologies << constant nrOfTautologies) #
(duplicates << constant nrOfDuplicates) #
(formulae << constant len)) -- the number of formulae without tautologies and duplicates
insertStatistics :: StatisticsTuple -> IO ()
insertStatistics stat = myInsert statistics (stat2clause stat)
{-
DATABASE CONNECTION
connect :: (MonadIO m) => DriverInterface -> [(String, String)] -> (Database -> m a) -> m a
-}
--options = [("server","localhost"),("db","formulaDB"),("uid","constructive"),("pwd","constructive")]
options = [("server","localhost"),("db","formulaDB"),("uid","active"),("pwd","pi=3,141")]
myConnect :: (Database -> IO a) -> IO a
myConnect = connect driver options
--myConnect = connect defaultdriver [] -- todo: set real driver and options
--see: Search.Config (home,dbDriver,dbServer,dbDatabase,dbPassword,dbUsername)
myQuery :: (GetRec er vr) => Query (Rel er) -> IO [Record vr]
myQuery q = myConnect (\db -> query db q)
myInsert table rec = do myConnect (\db -> insert db table rec)
myMultiInsert table recs = do myConnect (\db -> mapM_ (insert db table) recs) | nevrenato/Hets_Fork | Search/DB/Connection.hs | gpl-2.0 | 7,425 | 111 | 20 | 2,062 | 1,490 | 823 | 667 | 104 | 1 |
module Operate.Param where
import qualified Control.Passwort
import Control.Types ( Wert )
import Inter.Types ( Variant, Make )
import Operate.Click
import Control.Types
-- import qualified Text.XHtml
import qualified Gateway.Html as H
-- | CGI - Zustands-Daten
data Type =
Param { -- | configured
makers :: [ Make ]
, input_width :: Int
-- | user input (deprecated identification)
, mmatrikel :: Maybe MNr
, mpasswort :: Maybe Control.Passwort.Type
, aufgabe :: Name
, typ :: Typ
, conf :: Config
, remark :: Remark
, minstant :: Maybe H.Html
, input :: Maybe String
, mresult :: Maybe Wert
, report :: Maybe H.Html
, wahl :: String -- ^ vorige aufgabe
, click :: Click
-- | after login key for DB
, ident :: SNr
, highscore :: HiLo
, anr :: ANr
, vnr :: VNr
, names :: [ Name ]
-- | generated
, variante :: Variant
}
deriving Show
{-
result p = case mresult p of
Just r -> r
Nothing -> error "Inter.Param.mresult = Nothing"
-}
matrikel p = case mmatrikel p of
Just m -> m
Nothing -> error "Inter.Param.mmatrikel = Nothing"
passwort p = case mpasswort p of
Just m -> m
Nothing -> error "Inter.Param.mpasswort = Nothing"
smatrikel p = toString $ matrikel p
saufgabe p = toString $ aufgabe p
styp p = toString $ typ p
sident p = toString $ ident p
subject p = toString (vnr p) ++ "-" ++ toString (anr p)
empty :: Type
empty = Param { makers = []
, mmatrikel = Nothing
, mpasswort = Nothing
, aufgabe = error "Param.empty.aufgabe"
, typ = error "Param.empty.typ"
, conf = error "Param.empty.conf"
, remark = error "Param.empty.remark"
, minstant = Nothing
, input = Nothing
, report = Nothing
, mresult = Nothing
, wahl = ""
, click = Example
, ident = error "Param.empty.ident"
, input_width = 80
, variante = error "Param.empty.variante"
, names = []
, highscore = error "Param.empty.highscore"
, anr = error "Param.empty.anr"
, vnr = error "Param.empty.vnr"
}
| marcellussiegburg/autotool | db/src/Operate/Param.hs | gpl-2.0 | 2,383 | 2 | 10 | 866 | 549 | 316 | 233 | 62 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeSynonymInstances #-} --FIXME only needed for debug code
{-# LANGUAGE FlexibleInstances #-} --dito
{-
Concurrent Hashmap -
Copyright (C) 2014 Mathias Bartl
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
-- TODO make assertEqualOrNothing
-- TODO Data.HashTables... or Data.Concurrent...
--something like Data.HashTables.IO.NonBlocking.something
-- Data.HashTables.IO.Concurrent.NonBlocking.something
-- TODO make list of all Hashtable libraries in haskell and compare
module Data.HashTable.Concurrent
(
-- * Alpha version limitations
-- | Table resize is not working, use 'newConcurrentHashTableHint' and 'clearHint' with appropirate /hint/ for the number of slots!
-- For technical reasons once a slot has been used for a given key-value it can not be reused with a different key even after
-- the original mapping has been removed.
--
-- Be /n/ the maximum number of mappings stored in the hashtable at any time, and /k/ the size of the set of all keys entered over the
-- entire lifespan of the hashtable. It is not only required that /hint > n/ but also /hint > k/.
--
-- In order to reduce the number of reprobes we recommend that at last /hint > 8k/.
--
-- * Maximum number of slots
--
-- A Hashtable can hold between __2^3__ and __2^31__ slots.
-- * Creating hash tables
ConcurrentHashTable
, newConcurrentHashTableHint, newConcurrentHashTable
-- * Predicates and properties
, size, isEmpty, containsKey, containsValue
-- * Basic reading and writing
, put, putIfAbsent, get
-- * Removing or replacing
, removeKey, remove, replace, replaceTest, clear, clearHint -- TODO should clearHint really be exported
-- * Debuging
, debugShow, getNumberOfOngoingResizes, getLengthsOfVectors, getSlotsCounters, countUsedSlots, getReprobeCount
)
where
import GHC.IORef(IORef(IORef), readIORef, newIORef, writeIORef)
import Data.Hashable(Hashable, hash)
import Data.Bits((.&.), xor, shiftL, shiftR)
import Data.Atomics
--todo restrict and qualify
import Control.Exception(assert)
import Data.Atomics.Counter
import qualified Data.Vector as V
import Data.Maybe (isJust, isNothing, fromJust)
import Data.Either.Unwrap (fromLeft, isLeft, isRight, fromRight)
import Control.Monad.ST (runST)
import Data.Word (Word)
import Data.Int (Int64)
import Control.Exception.Base (ioError) -- TODO remove once resizing works
import System.IO.Error (userError) -- TODO ditto
import Numeric (showIntAtBase) -- FIXME for debug only
import Data.Char (intToDigit) --dito
import Test.QuickCheck.Arbitrary as QCA
import Test.QuickCheck.Gen as QCG
import Test.QuickCheck.Gen.Unsafe as QCGU
-- TODO look for 32/64 bit issues, Magic Numbers
setLastResizeTime :: ConcurrentHashTable key value -> IO ()
setLastResizeTime = undefined
min_size_log = 3
min_size = 2 ^ min_size_log --must be power of 2, compiler should turn this into a constant --TODO have this as a perprocessor constant, so it
--can be included in haddock
max_size_log = 31
max_size = 2 ^ max_size_log
resize_milliseconds:: Time
resize_milliseconds = 1000 -- FIXME question 1 second or 10 seconds
-- TODO put this value in the haddock documentation
-- FIXME whats the defined behaviour, if the hashtable gets really full, better, to throw an error, than to somehow fail or hang
-- TODO is there an suitable error to be thrown
_reprobe_limit = 10
reprobe_limit :: Int -> Int
reprobe_limit len = _reprobe_limit + (shiftR len 2)
getMask:: Size -> Mask
getMask size = size -1
--data representation
---------------------------------------------------------------------------------------------------------------------------------
-- Kempty : empty, K : neverchanging key
--data Key key = Kempty | K key deriving (Eq)
-- TODO make instance of Eq
-- TODO do keys need to be primed
data Key k = Kempty
| Key { fullHash :: !FullHash
, keyE :: !k
}
-- T : empty, tombstone, Tp : tombstone primed, V : value, Vp : value primed -- TODO need tombstones to be primed
data Value value = T | Tp | V value | Vp value | S deriving (Eq) -- TODO what kind of comparision is used
data Slot k v = Slot {
key :: IORef (Key k)
, value :: IORef (Value v)
}
data Kvs k v = Kvs {
newkvs :: IORef (Maybe ( Kvs k v))
-- TODO, get rid of the extra indirection,
-- https://github.com/gregorycollins/hashtables/blob/master/src/Data/HashTable/Internal/UnsafeTricks.hs
-- TODO but this trick should already be in the Maybe Monad, if I do this I should make it general for all the indirections
-- TODO, is there some IORef Maybe module or even moe general around, if not I could write one, or should ghc be able to optimize this.
,slots :: Slots k v
, mask :: Mask
, slotsCounter :: SlotsCounter
, sizeCounter :: SizeCounter
, _copyDone :: IORef CopyDone
, copyIndex :: IORef CopyIndex
}
type SlotsCounter = AtomicCounter
type SizeCounter = AtomicCounter
type FullHash = SlotsIndex
type CopyDone = SlotsIndex -- originally an atomic-long-field updater
type CopyIndex = SlotsIndex
type FullHashUnsigned = Word
type SlotsIndex = Int
type Mask = SlotsIndex
type Size = Int
type SizeLog = Int
type Time = Int64 -- ^ time in milliseconds -- TODO should be long or something
type ReprobeCounter = Int
newReprobeCounter = 0
data ValComparator = MATCH_ANY | NO_MATCH_OLD deriving Eq
type ValComp val = Either (Value val) ValComparator
type ReturnValue val = (Bool, Value val)
-- TODO find good name for this type, write description
type Slots key val = V.Vector (Slot key val)
-- TODO issue accessing the array generates a full copy, fix this latter
data ConcurrentHashTable key val = ConcurrentHashTable {
kvs :: IORef(Kvs key val)
, timeOfLastResize :: IORef Time
}
-- functions for keys
--------------------------------------------------------------------------------------------------------------------------------------------
newKey :: Hashable k => k -> Key k
newKey k = Key (spreadHash $ hash k) k
emptyKey = Kempty
getFullHash :: Key k -> FullHash
getFullHash = fullHash
getKey :: Key k -> Maybe k
getKey Kempty = Nothing
getKey (Key h k) = Just k
--compares keys
keyComp:: Eq key =>
Key key -> Key key -> Bool
keyComp Kempty Kempty = True
keyComp Kempty _ = False
keyComp _ Kempty = False
keyComp (Key h1 k1) (Key h2 k2) = if h1 == h2 then k1 == k2 else False
isKEmpty :: Key key -> Bool
isKEmpty Kempty = True
isKEmpty _ = False
-- functions for values
--------------------------------------------------------------------------------------------------------------------------------------------
unwrapValue :: Value val -> Maybe val
unwrapValue T = Nothing
unwrapValue Tp = Nothing
unwrapValue (V a) = Just a
unwrapValue (Vp a) = Just a
-- TODO what if Sentinel
--probably best to throw error
--for use by
valCompComp :: Eq val =>
ValComp val -> Value val -> Bool
valCompComp (Left v1) v2 = valComp v1 v2
valCompComp (Right MATCH_ANY) _ = True
-- TODO does comparision with NO_MATCH_OLD fit
valComp :: Eq val => Value val -> Value val -> Bool
valComp (V a) (V b)= a == b
valComp T T = True
valComp T _ = False
valComp _ T = False
valComp S S = True
valComp S _ = False
valComp _ S = False
-- TODO primed values
isPrimedValue :: Value val -> Bool
isPrimedValue Tp = True
isPrimedValue (Vp _) = True
isPrimedValue _ = False
isPrimedValComp :: ValComp val -> Bool
isPrimedValComp (Left v) = isPrimedValue v
isPrimedValComp (Right _) = False
primeValue :: Value val -> Value val
primeValue (V v) = Vp v
isSentinel :: Value val -> Bool
isSentinel S = True
isSentinel _ = False
--puts an Sentinel into the slot
kill :: Slot key val -> IO ()
kill (Slot _ v) = writeIORef v S
isTombstone :: Value val -> Bool
isTombstone T = True
isTombstone _ = False
isValue :: Value val -> Bool
isValue (V _) = True
isValue _ = False
-- TODO what if primed Tombstone
--------------------------------------------------------------------------------------------------------------------------------------------
-- does not terminate if array is full, and key is not in it
-- TODO possibly return whether the slot has a key or the key is empty
-- TODO testcases for reprobe
-- FIXME does not terminate if reprobes infinitely or very often, best would be to return an Maybe
-- | Reprobes until it find an Slot with a fitting or empty key
getSlot :: forall key value . (Eq key) =>
Slots key value -> Mask -> Key key -> IO(Slot key value, ReprobeCounter)
getSlot slots mask key = do let fllhash = fullHash key
idx = maskHash mask fllhash
getSlt slots newReprobeCounter key idx mask
where full :: Key key -> Key key -> Bool
full Kempty _ = False
full k1 k2 = not (keyComp k1 k2) --Collision treatment is done again whe CASing key
getSlt:: Slots key value -> ReprobeCounter -> Key key -> SlotsIndex -> Mask -> IO(Slot key value, ReprobeCounter)
getSlt slots rpcntr newkey idx mask =
do let slot = (slots V.! idx) :: (Slot key value)
oldkey <- (readKeySlot slot)::IO(Key key)
-- TODO error that the table is not already full, then remove this assertion as soon as
-- resizing works, this should make sure that tests that rely on resizing fail with an error instead of hang
-- FIXME REMOVE
sz <- return $ V.length slots
if rpcntr > sz -- every slot has already been probed
then ioError $ userError "Table is full and resizing does not work." else return ()
-- FIXME REMOVE
if full oldkey newkey
then getSlt slots (incReprobeCounter rpcntr) newkey (collision idx mask) mask -- Reprobe
else return (slot,rpcntr) -- Found a Slot that has either an fitting or an empty key
collision :: SlotsIndex -> Mask -> SlotsIndex
collision idx mask = (idx +1) .&. mask
maskHash :: Mask -> FullHash -> SlotsIndex
maskHash mask hsh = hsh .&. mask
-- | variant of single word Wang/Jenkins Hash
-- see line 262 <https://github.com/boundary/high-scale-lib/blob/master/src/main/java/org/cliffc/high_scale_lib/NonBlockingHashtable.java>
spreadHash :: FullHash -> FullHash
spreadHash input = runST $ do h <- return $ input + ( (shiftL input 15) `xor` 0xffffcd7d)
h <- return $ h `xor` (unsignedShiftR h 10)
h <- return $ h + (shiftL h 3)
h <- return $ h `xor` (unsignedShiftR h 6)
h <- return $ h + (shiftL h 2) + (shiftL h 14)
return $ h `xor` (unsignedShiftR h 16)
where unsignedShiftR :: Int -> Int -> Int
unsignedShiftR input len= fromIntegral (shiftR ((fromIntegral input)::FullHashUnsigned) len)
-- TODO write a testcase for this hash
-- | Compares the key in a slot with another key
keyCompSlot:: Eq key =>
Slot key val-> Key key -> IO Bool
keyCompSlot slot key = do slotkey <- readKeySlot slot
return $ keyComp slotkey key
isKEmptySlot :: Slot key val -> IO Bool
isKEmptySlot slot = do slotkey <- readKeySlot slot
return $ isKEmpty slotkey
-- see 'get'
-- reading during resize is already implemented
get_impl :: (Eq key, Hashable key) =>
ConcurrentHashTable key val -> Kvs key val -> Key key -> IO(Value val)
get_impl table kvs key = do let msk = mask kvs
slts = slots kvs
(slt,_) <- getSlot slts msk key
k <- readKeySlot slt
v <- readValueSlot slt
if keyComp k key
then if isSentinel v
then do ass <- hasNextKvs kvs
return $ assert ass
newkvs <- getNextKvs kvs
get_impl table newkvs key --look in resized table
else return v
else return T
-- TODO actually we could use IO(Maybe (Value val)) as return type
-- TODO attention may return a primed value
-- TODO treat resize -- TODO call helpCopy
-- TODO count reprobes
-- TODO only pass reference to table if necessary
-- TODO fit get function with table resizing
-- Accessing the slot
--------------------------------------------------------------------------------------------------------------------------------
readKeySlot:: Slot key value -> IO (Key key)
readKeySlot state = readIORef ( key state )
readValueSlot:: Slot key value -> IO (Value value)
readValueSlot state = readIORef ( value state )
readSlot :: Slot key value -> IO (Key key, Value value)
readSlot slt = do key <- readKeySlot slt
value <- readValueSlot slt
return (key, value)
-- | CAS the slot from KEmpty to the new key and succeeds
-- , does nothing and succeeds if the new key is already in the slot
-- , does nothing and fails if another key is already in the slot
-- , never changes the slot if there is already a key in the slot
-- , returns pair of Bool: 1.: True -> new key is in the slot, False -> some other key is in the slot (use this for collision detection)
-- , 2.: True -> cased the slot of KEmpty to the new key thereby using up the slot, False -> the new key was already
-- in the slot ( use this for adapting the slotscounter)
casKeySlot :: (Eq key) =>
(Slot key value) -> Key key -> IO (Bool,Bool)
casKeySlot (Slot ke _) new = do return $ assert $ not $ isKEmpty new -- new key is a actuall key, not KEmpty
oldticket <- readForCAS ke
oldkey <- return $ peekTicket oldticket
if not $ isKEmpty $ oldkey then if keyComp new oldkey then return (True,False) else return (False,False) else
-- is there already an key in the slot, if so is it the same as the new key
do (success, retkeyticket) <- casIORef ke oldticket new
retkey <- return $ peekTicket retkeyticket
return $ assert $ (success && (keyComp new retkey)) || -- cas succeds
((not success) && (not $ isKEmpty retkey)) -- cas only fails if there is already an key
if success then return (True,True) else
if keyComp new retkey then return (True,False) else return (False,False)
-- there was already an key in the slot, no retry because slotkeys are never to be overwritten
-- is it the same as the new key
-- | CAS the slot to the new value; if the slot value fits the compare value
-- returns success and the old value.
casValueSlot :: forall value key. (Eq value) =>
(Slot key value) -> ValComp value -> Value value -> IO (Bool, Value value)
casValueSlot slt@(Slot _ va) cmpvaluecomp newvalue = do
oldvalueticket <- readForCAS va
oldvalue <- return $ peekTicket oldvalueticket
return $ assert $ (not $ isSentinel oldvalue) && (not $ isPrimedValue oldvalue) --FIXME resize
if not $ matchesVal oldvalue cmpvaluecomp newvalue then return (False, oldvalue) else do -- TODO then else confused
(success, retticket) <- casIORef va oldvalueticket newvalue
if success then return (True, oldvalue) else casValueSlot slt cmpvaluecomp newvalue -- TODO This is the Only place for backoff code
where matchesVal :: Value value -> ValComp value -> Value value -> Bool
matchesVal oldvalue (Right MATCH_ANY) _ = not $ isTombstone oldvalue --oldvalue is not a tombstone --FIXME, RESIZE related PRIMED,SENTINEL
matchesVal oldvalue (Right NO_MATCH_OLD) newvalue = not $ valComp oldvalue newvalue -- newvalue != oldvalue
matchesVal oldvalue (Left cmpvalue) _ = valComp oldvalue cmpvalue --oldvalue == cmpvalue --FIXME, RESIZE related PRIMED,SENTINEL
casVal = undefined
--matchesVal is resize ignorant
-- TODO lets think about how to threat primed values
-- TODO one could save oneself one readForCas by reusing retticket, that why the thing returns a ticket.
--by having casValueSlot as an wrapper for an recursive function using tickets
casStripPrime :: (Slot key value) -> IO ()
casStripPrime slt@(Slot _ va) = do oldticket <- readForCAS va
oldvalue <- return $ peekTicket oldticket
if not $ isPrimedValue oldvalue then return () else
do (_,_) <- casIORef va oldticket (stripPrime oldvalue)
return ()
where stripPrime :: Value val -> Value val
stripPrime (Vp a) = V a
stripPrime Tp = T
stripPrime unprimed = unprimed
--get a reference to the value,
-- if not a prime, then write has already happend -> end
-- else contruct an unprimed value, and a reference to it
-- cas that agaist the original reference if failed because somebody already wrote an value then ->end
--resizes stuff
-----------------------------------------------------------------------------------------------------------------------
--
helpCopy :: ConcurrentHashTable key value -> IO ()
helpCopy ht = do topkvs <- getHeadKvs ht
resizeinprogress <- hasNextKvs topkvs
if not resizeinprogress then return () else helpCopyImpl ht topkvs False
where helpCopyImpl :: ConcurrentHashTable key value -> Kvs key value -> Bool -> IO ()
helpCopyImpl ht oldkvs copyall = do newkvs <- getNextKvs oldkvs -- TODO assert hasNextKvs oldkvs == return True
undefined
-- TODO enter calls of helpCopy in the marked functions
resizeInProgress :: ConcurrentHashTable key value -> IO Bool
resizeInProgress ht = do topkvs <- getHeadKvs ht
hasNextKvs topkvs
-- TODO, what happens to the slotscounter of the old kvs, how does the program know, that
-- the old kvs has been completely copied
copyOnePair :: Slot key value -> Kvs key value -> IO ()
copyOnePair slt newkvs = do undefined -- TODO read Slot
-- TODO should there be any assert special case
-- treatment, or should these be in a wrapper
(oldkey, oldvalue) <- readSlot slt -- TODO no Key here -> nothing to copy
-- TODO Tombstone no need to copy, but best to cas a sentinel
(casNewSuccess, newSlot) <- putAndReturnSlot oldkey oldvalue newkvs -- the slot on the newkvs where the primed value has been put)
if casNewSuccess then do casSsuccess <- undefined
if casSsuccess then do fence
casStripPrime newSlot
else undefined
else undefined
where fence = undefined
putAndReturnSlot :: Key key -> Value value -> Kvs key value -> IO (Bool, Slot key value)
putAndReturnSlot key val kvs = undefined
-- | removes the oldest kvs from the ht
--throws error, if there is no resize in progress and thus only one kvs
--some other routine has to determine that the oldest kvs is completely copied, and that the routine is not called multiple times for the same kvs
-- maybe there has to be an cas used -- TODO probably better us CAS with 'casHeadKvs'
removeOldestKvs :: ConcurrentHashTable key val -> IO ()
removeOldestKvs ht = do let htKvsRef = kvs ht
oldestKvs <- getHeadKvs ht
secondOldestKvs <- getNextKvs oldestKvs --throws error, if no resize is in progress
writeIORef htKvsRef secondOldestKvs
--oldestKvs will be GCted, one could explicitly destroy oldestKvs here
copySlotAndCheck :: ConcurrentHashTable key value -> Kvs key value -> SlotsIndex -> Bool -> IO () --(Kvs key value) -- TODO make type signature
copySlotAndCheck ht oldkvs idx shouldHelp = do newkvs <- getNextKvs oldkvs -- TODO originally there's a volatile read
success <- undefined -- TODO Copy Slot
if success then copyCheckAndPromote ht oldkvs 1 else return ()
if not shouldHelp then return () else -- TODO possible return newkvs here
helpCopy ht -- TODO does helpCopy need any further parameters
-- | Increases the copy done counter for oldkvs by workdone, and removes oldkvs if it is the oldest kvs and has been fully copied
copyCheckAndPromote :: ConcurrentHashTable key value -> Kvs key value -> SlotsIndex -> IO ()
copyCheckAndPromote ht oldkvs workdone = do let oldlen = getLength oldkvs -- I do think I should get the ticket before this function is called so we can check simply if the thing that we identified as headkvs is still the headkvs, or something.
copydone <- if workdone > 0 then casCopyDone oldkvs workdone else getCopyDone oldkvs
isheadkvs <- isHeadKvs ht oldkvs-- TODO
if copydone < oldlen then return () else if not $ isheadkvs then return () else
do newkvs <- getNextKvs oldkvs -- Assert hasNextKvs
casHeadKvs ht oldkvs newkvs
setLastResizeTime ht
-- TODO assert workdone is posetive
-- TODO 1. if workdone > 0 casCopyDone.
-- 2. test if new value of copyDon == oldlen
-- 3. if table has been fully copied
-- if oldkvs == topKvs then casKVS with newKvs
--set last resize milli
-- TODO, how to determine if kvs is topkvs
-- TODO write a casRoutine for copydone
-- TODO only use in copyCheckAndPromote
casCopyDone :: Kvs key value -> SlotsIndex -> IO SlotsIndex
casCopyDone kv workdone = do copydoneref <- (return $ getCopyDoneRef kv)::(IO(IORef CopyDone)) -- TODO get copydone object -- Note copy done is originally an
--atomicLongFieldUpdater -- TODO have copydone its own type
copydoneticket <- readForCAS copydoneref
casCD copydoneref copydoneticket workdone
where casCD cdref ticket workdone = do let newval = (peekTicket ticket) + workdone
(success, retticket) <- casIORef cdref ticket newval
if success then return newval else casCD cdref retticket workdone
-- TODO assert copydone + workdone <= oldlen, workdone > 0
getCopyDone :: Kvs key value -> IO CopyDone
getCopyDone kvs = do let ref = _copyDone kvs
readIORef ref
getCopyDoneRef :: Kvs key value -> IORef CopyDone
getCopyDoneRef = _copyDone
resize :: ConcurrentHashTable key value -> Kvs key value -> IO (Kvs key value)
resize ht oldkvs= do hasnextkvs <- hasNextKvs oldkvs
if hasnextkvs then do newkvs <- getNextKvs oldkvs
return newkvs
else undefined -- TODO
where heuristicNewSize:: Size -> SizeCounter -> Time -> Time -> SlotsCounter -> IO Size
heuristicNewSize len szcntr oldtime newtime sltcntr = do sz <- readCounter szcntr
slts <- readCounter sltcntr
newsze <- return sz
-- TODO assert len is positive
newsze <- return $ if sz >= (shiftR len 2) then
if sz >= (shiftR len 1) then shiftL len 2 else shiftL len 1 else newsze
newsze <- return $ if (newsze <= len)
&& (newtime - oldtime <= resize_milliseconds)
&& (slts >= (shiftL sz 1))
then shiftL len 1 else newsze
newsze <- return $ if newsze < len then len else newsze
return $ normSize newsze-- TODO assert table is not shrinking
-- TODO more functional coding, or at least separate ST, IO, handle time better
-- TODO add resize to putIfMatch
-- TODO add tableFull to putIfMatch
-- TODO Note, for performance sake an implementation of SlotsCounter that is only approximately accurate would fully suffice
-- | Heuristic to determine whether the kvs is so full, that an resize is recommended
tableFull :: ReprobeCounter -- ^ just to check if a resize s in order because of to many reprobes anyway
-> Size -- ^ the number of slots in the kvs
-> SlotsCounter -- ^ how many of them are in use
-> IO Bool
tableFull recounter len sltcounter = do sltcn <- readCounter sltcounter
return $ recounter >= _reprobe_limit && --always allow a few reprobes
sltcn >= (reprobe_limit len) -- kvs is quarter full
-- TODO add _copyIdx and _copyDoe to kvs, and write kvs for them
-------------------------------------------------------------------------------------------------------------------------
putIfMatch_T ::(Hashable key, Eq key, Eq value) =>
ConcurrentHashTable key value -> key -> Value value -> ValComp value -> IO ( Value value)
putIfMatch_T table key putVal expVal = do let ky = newKey key
kv <- getHeadKvs table
putIfMatch kv ky putVal expVal
-- TODO write during resize
-- TODO call helpCopy, resize, tableFull
-- TODO refactor for readability/structure
-- TODO add reprobe counter
-- TODO, do we need to pass the Hashtable as parameter?
-- TODO use only by accessor functions, not by resizing algorithm
-- TODO assert key is not empty, putval is no empty, but possibly a tombstone, key value are not primed
putIfMatch :: forall key val. (Hashable key, Eq key, Eq val) =>
Kvs key val -> Key key -> Value val -> ValComp val -> IO (Value val)
putIfMatch kvs key putVal expVal = do
let msk = mask kvs :: Mask
slts = slots kvs
fllhash = fullHash key :: FullHash
idx = maskHash msk fllhash ::SlotsIndex --parameterize maskHash with kvs and key
-- TODO get highest kvs and test if a resize is running and then get the second highest kvs
return $ assert $ not $ isKEmpty key -- TODO this is not in the original
return $ assert $ not $ isPrimedValue putVal
return $ assert $ not $ isPrimedValComp expVal
(slot, rpcntr) <- (getSlot slts msk key) ::IO(Slot key val, ReprobeCounter) --TODO, either remove this or have it give back a reprobe counter
oldKey <- readKeySlot slot
if isKEmpty oldKey --if putvall TMBSTONE and oldkey == empty do nothing -- TODO put this lines into an sub function
then if ((isTombstone putVal) || expVal == Right MATCH_ANY || if isLeft expVal then not $ isTombstone $ fromLeft expVal else False)
-- if oldkey empty and MATCH_ANY do nothing
then return T {-TODO break writing value unnecessary -}
else ptIfmtch slts msk key putVal expVal idx rpcntr -- TODO remove line duplication
else ptIfmtch slts msk key putVal expVal idx rpcntr
-- TODO when would cas fail
--actually does the putting after tests and special cases have been handled
where ptIfmtch :: Slots key val -> Mask -> Key key -> Value val -> ValComp val ->
SlotsIndex -> ReprobeCounter -> IO(Value val)
ptIfmtch slts msk key newval compval idx reprobectr = do let slt = slts V.! idx
rekcall = ptIfmtch slts msk key newval compval
-- TODO check if Slot is Kempty and compval==match any, then break and return T
keyfits <- helper2 slt
if keyfits
then
do (success,ret) <- casValueSlot slt compval newval
if success then --was there an change made
opSizeCntr ret newval else return ()--updating the sizecounter
return ret
else rekcall (collision idx msk)
(reprobectr +1)
-- checks if the key in slt fits or puts the newkey there if ther's an empty
-- responsible for updating the slotscounter
where helper2 :: Slot key val -> IO Bool
helper2 slt = do (success, cased) <- casKeySlot slt key
if cased then incSlotsCntr else return ()
-- TODO doing a simple check before the expensive cas should not be harmful, because of the monotonic nature of keys
return success
-- TODO if T to Value inc size counter, if V to T or S dec size counter
opSizeCntr :: Value val -> Value val -> IO()
opSizeCntr T (V _) = incSizeCounter kvs
opSizeCntr T (Vp _) = incSizeCounter kvs -- TODO debatable Primes are used for 2 stage copy so I need a detialed plan on how to count size during copys, in effect once a key val pair becomes available size increases it becomes unavailiable decreases
opSizeCntr T S = return ()
opSizeCntr (V _) (Vp _) = return ()
opSizeCntr (V _ )(V _) = return ()
opSizeCntr (Vp _) T = decSizeCounter kvs
opSizeCntr (V _ ) T = decSizeCounter kvs
opSizeCntr (V _) S = decSizeCounter kvs
opSizeCntr (Vp _) (V _) = return ()
--opSizeCntr _ _ = return ()
-- TODO check witch changes are possible and witch arnt
-- TODO save sizecntr operationd on resize
-- TODO what if T T
incSlotsCntr :: IO()
incSlotsCntr = incSlotsCounter kvs
-- TODO add reprobe count
-- counter functions
------------------------------------------------------------------------------------------------------------------------
-- | Increments the counter of used slots in the array.
incSlotsCounter :: Kvs key value -> IO ()
incSlotsCounter kvs = do let counter = slotsCounter kvs
incrCounter_ 1 counter
incSizeCounter :: Kvs key value -> IO ()
incSizeCounter kvs = do let counter = sizeCounter kvs
incrCounter_ 1 counter
-- TODO possibly parameter table
decSizeCounter :: Kvs key value -> IO ()
decSizeCounter kvs = do let counter = sizeCounter kvs
incrCounter_ (-1) counter
readSlotsCounter :: Kvs key value -> IO Int
readSlotsCounter kvs = do let counter = slotsCounter kvs
readCounter counter
readSizeCounter :: Kvs key value -> IO Int
readSizeCounter kvs = do let counter = sizeCounter kvs
readCounter counter
newSizeCounter :: IO(SizeCounter)
newSizeCounter = newCounter 0
incReprobeCounter :: ReprobeCounter -> ReprobeCounter
incReprobeCounter cnt = cnt + 1
-- TODO possibly parameter table
--Exported functions
-- kvs functions
-----------------------------------------------------------------------------------------------------------------------------------------------------
--helper to acess first kvs
getHeadKvs :: ConcurrentHashTable key val -> IO(Kvs key val)
getHeadKvs table = do let kvsref= kvs table
readIORef kvsref
isHeadKvs :: ConcurrentHashTable key val -> Kvs key val -> IO Bool
isHeadKvs ht kv = do let headref = kvs ht -- TODO Does this have to be in IO, ok
return undefined -- TODO ok, I wanted to make an pointer comparision, this would if it was a good idea require me to pass an IOref as parameter, witch would screw up the interface
--gets then new resizedtable, throws error if does not exist
getNextKvs :: Kvs key val -> IO(Kvs key val)
getNextKvs kv = do let kvsref = newkvs kv --throws error
nwkvs <- readIORef kvsref
return $ fromJust nwkvs
-- TODO change if structure of kvs changes
--Is a resize in progress?
hasNextKvs :: Kvs key val -> IO Bool
hasNextKvs kv = do let kvsref = newkvs kv
nwkvs <- readIORef kvsref
return $ isJust nwkvs
noKvs :: IO (IORef (Maybe (Kvs key val)))
noKvs = newIORef Nothing
--cas the newkvs field, returns true if previously empty otherwise false
casNextKvs :: Kvs key val -> Kvs key val -> IO Bool
casNextKvs kvs nwkvs = do let kvsref = newkvs kvs
oldticket <- readForCAS kvsref
if isJust $ peekTicket oldticket then return False else do (success, _) <- casIORef kvsref oldticket (Just nwkvs)
return $ success
-- TODO rewrite the other cas stuff accordingly
-- TODO (Just IORef a) is a stupid construction because setting the IORef from Nothing to Just changes an immutable data structure also you cant do an cas on the Maybe type, todo have some value of IORef that says nothing
-- TODO is this correctly
-- TODO use tickets correctly here
casHeadKvs :: ConcurrentHashTable key val -> Kvs key val -> Kvs key val -> IO ()
casHeadKvs ht oldheadkvs newkvs = do ticket <- readForCAS $ kvs ht
undefined -- TODO See if its still the old kvs in place, or actually would the correct thing not be
-- too get the ticket at the very beginning, tha is actually the only thing that makes sense
getLength :: Kvs key value -> Int
getLength = V.length . slots
-------------------------------------------------------------------------------------------------------------
-- | Returns the number of key-value mappings in this map
size :: ConcurrentHashTable key val -> IO(Size)
size table = do let kvsref= kvs table
kvs <- readIORef kvsref
readSizeCounter kvs
-- | Returns True if there are no key-value mappings
isEmpty :: ConcurrentHashTable key val -> IO(Bool)
isEmpty table = do
s <- size table
return $ s == 0
-- | Tests if the key in the table
containsKey :: (Eq key, Eq val, Hashable key) =>
ConcurrentHashTable key val -> key -> IO(Bool)
containsKey table key = do
value <- get table key
return $ not $ value == Nothing
-- | Tests if the value is in the table.
--
-- __Attention:__ Unlike access by keys this is /computationally very expensive,/ since it requires an traversal of the entire table.
-- If you do this a lot, you need a different data structure.
containsValue :: (Eq val) => ConcurrentHashTable key val -> val -> IO(Bool)
containsValue table val = do let kvsref = kvs table
kv <- readIORef kvsref
containsVal kv (V val)
-- TODO low priority
-- TODO adopt if changes to data representation
-- TODO adopt to resize
-- TODO search should break off once found
containsVal :: forall key val. (Eq val) => Kvs key val -> Value val -> IO(Bool)
containsVal kvs val = do let slts = slots kvs
anyM (pred val) slts
where
pred :: Value val -> Slot key val -> IO(Bool)
pred val slot = do sltkey <- readKeySlot slot
sltval <- readValueSlot slot
if isKEmpty sltkey then return False else
if valComp sltval val then return True else return False
-- check if key is set (linearistion point for get is key AND value set) FIXME Primed values
anyM :: forall m a. Monad m => (a -> m Bool) -> V.Vector a -> m Bool
anyM test v = V.foldM' g False v
where g :: Bool -> a -> (m Bool)
g akk content = do testresult <- test content
return $ testresult || akk
-- TODO adopt to resizing, (by recursively calling for newkvs) anyway what about primed, I should read that up
-- TODO for this the linearization point for inputing would be the cas on value even if the cas on key has not be done yet, actually its better to think about this for a while, maybe not export this function for a while
-- TODO no reason anyM should not be inlined
-- | puts the key-value mapping in the table, thus overwriting any previous mapping of the key to an value
put :: (Eq val,Eq key, Hashable key) =>
ConcurrentHashTable key val -> key
-> val
-> IO( Maybe val) -- ^ Just oldvalue if the key was mapped to an value previously, Nothing if the key was not mapped to any value
put table key val = do old <- putIfMatch_T table key (V val) (Right NO_MATCH_OLD)
return $ unwrapValue old
-- | puts the value if there is no value matched to the key
putIfAbsent :: (Eq val,Eq key, Hashable key) =>
ConcurrentHashTable key val
-> key
-> val
-> IO( Maybe val) -- ^ 'Just' oldvalue if there was an mapping from key (/thus the put WAS NOT done/), 'Nothing' if there wasn't an
-- mapping (/thus the put WAS done/)
putIfAbsent table key val = do old <- putIfMatch_T table key (V val) (Left T) -- TODO is tombstone correct, what if there is a primed value
return $ unwrapValue old
-- | Removes the key (and its corresponding value) from this map.
removeKey :: (Eq val, Eq key, Hashable key) =>
ConcurrentHashTable key val -> key
-> IO( Maybe val) -- ^ 'Just' oldvalue if removed, Nothing if key-value mapping was not in table
removeKey table key = do old <- putIfMatch_T table key T (Right NO_MATCH_OLD)
return $ unwrapValue old
-- | Removes key if matched.
remove :: (Eq val, Eq key, Hashable key) =>
ConcurrentHashTable key val -> key -> val
-> IO( Bool) -- ^ 'True' if key-value mapping removed, False key-value mapping was not in table
remove table key val = do old <- putIfMatch_T table key T (Left (V val))
return $ (unwrapValue old) == Just val
-- | do a put if the key is already mapped to some value
replace :: (Eq val, Eq key, Hashable key) =>
ConcurrentHashTable key val -> key -> val
-> IO( Maybe val) -- ^ Just old value if replaced, Nothing if not replaced
replace table key val = do old <- putIfMatch_T table key (V val) (Right MATCH_ANY)
return $ unwrapValue old
-- | do a put if the key is already mapped to the old value
replaceTest :: (Eq val, Eq key, Hashable key) =>
ConcurrentHashTable key val
-> key -- ^ key
-> val -- ^ new value
-> val -- ^ old value
-> IO(Bool) -- ^ True if replaced
replaceTest table key newval oldval= do old <- putIfMatch_T table key (V newval) (Left (V oldval))
return $ (unwrapValue old) == Just oldval
-- TODO Haddock comment gives actuall min_size
-- | Removes all of the mappings from this map, number of slots to min_size=2^3
--
-- may have concurrency bug
clear :: ConcurrentHashTable key val -> IO()
clear table = clearHint table min_size
-- | Removes all of the mappings from this map, number of slots to next largest power of 2. See: 'newConcurrentHashTableHint'.
--
-- may have concurrency bug
clearHint :: ConcurrentHashTable key val -> Size -> IO()
clearHint table hint = do let size = normSize hint
kvsref = kvs table
szcntr <- newSizeCounter
kvs <- newKvs size szcntr
writeIORef kvsref kvs
-- TODO rewrite in case type of ConcurrentHashTable changes
-- TODO the java version uses a kvsCAS here
-- TODO write an concurrent testcase for this
-- TODO without cas a store load barrier might be in order, see discussion with ryan
-- | Returns the value to which the specified key is mapped.
get :: (Eq key, Hashable key) =>
ConcurrentHashTable key val -> key -> IO( Maybe val)
get table key = do topkvs <- getHeadKvs table
result <- get_impl table topkvs (newKey key)
return $ unwrapValue result
-- | Create a new NonBlockingHashtable with default minimum size (currently set
-- to 8 K/V pairs)
newConcurrentHashTable :: IO(ConcurrentHashTable key val)
newConcurrentHashTable = newConcurrentHashTableHint min_size
-- |Create a new NonBlockingHashtable with initial room for the given number of
-- elements, thus avoiding internal resizing operations to reach an
-- appropriate size. Large numbers here when used with a small count of
-- elements will sacrifice space for a small amount of time gained. The
-- initial size will be rounded up internally to the next larger power of 2.
newConcurrentHashTableHint :: Size -> IO(ConcurrentHashTable key val)
newConcurrentHashTableHint hint = do let size = normSize hint
szcntr <- newSizeCounter
kvs <- newKvs size szcntr
kvsref <- newIORef kvs
timer <- newTimer
return $ ConcurrentHashTable kvsref timer
where newTimer :: IO (IORef Time)
newTimer = newIORef 0 -- TODO set actuall time
-- TODO throw error if size <0
-- | Returns the next larger potency of 2
-- In case inputSize is potency of 2 : identity
normSize:: Size -> Size
normSize inputSize = max min_size (sizeHelp inputSize 1)
where sizeHelp :: Size -> Size -> Size
sizeHelp input size = if (size >= input) || (size == max_size) then size else sizeHelp input (shiftL size 1)
--FIXME TODO guard for Integer overun meaning some maximum size has to be set
--size has to be power of 2
newKvs :: Size -> SizeCounter-> IO(Kvs key val)
newKvs size counter = do let msk = getMask size
slts <- newSlots size
sltcntr <- newSlotsCounter
kvsref <- noKvs
copyDone <- newIORef 0
copyIndex <- newCopyIndex
return $ Kvs kvsref slts msk sltcntr counter copyDone copyIndex
where
newSlots :: Size -> IO( Slots key val)
newSlots size = V.replicateM size newSlot -- TODO
newSlotsCounter :: IO(SlotsCounter)
newSlotsCounter = newCounter 0
newSlot :: IO(Slot key val)
newSlot = do keyref <- newIORef Kempty
valref <- newIORef T -- TODO optimize somewhere, somewhat
return $ Slot keyref valref
newCopyIndex = newIORef 0
--Debug code -- TODO make inclusion conditional with preprocessor or something for DEBUG only
----------------------------------------------------------------------------------------------------------------------------------------------------
-- TODO shorten the output and make it more readable
class DebugShow a where
debugShow :: a -> IO String
-- TODO Automatic indentation, ask on Stack Overflow about it.
--Debug print for the Hashtable
instance (Show k, Show v) => DebugShow (ConcurrentHashTable k v) where
debugShow ht = do kvs <- getHeadKvs ht
str <- debugShow kvs
return $ "ConcurrentHashtable:\n" ++ str
instance forall k v. (Show k, Show v) => DebugShow (Kvs k v) where
debugShow kvs = debugshw 0 kvs
where debugshw :: Int -> Kvs k v -> IO String
debugshw resizeCounter kvs = do str <- return $ "Kvs number " ++ (show resizeCounter) ++ " \n"
hsNextKvs <- hasNextKvs kvs
str <- return $ str ++ if not $ hsNextKvs then "Is newest Kvs.\n" else "Is older Kvs.\n"
maskstr <- debugShow $ mask kvs
str <- return $ str ++ maskstr
str <- return $ str ++ "sizeCounter:\n "
sizeCounterStr <- debugShow $ sizeCounter kvs
str <- return $ str ++ sizeCounterStr
str <- return $ str ++ "slotsCounter:\n "
slotsCounterStr <- debugShow $ slotsCounter kvs
str <- return $ str ++ slotsCounterStr ++ "Slots:\n"
slotsstr <- debugShow $ slots kvs
str <- return $ str ++ slotsstr
hsNextKvs <- hasNextKvs kvs
newKvsstr <- if not $ hsNextKvs then return "END.\n" else
do nwKvs <- getNextKvs kvs
debugShow nwKvs
return $ str ++ newKvsstr
instance DebugShow Mask where
debugShow mask = do bitsstr <- return $ showIntAtBase 2 intToDigit mask ""
return $ "Mask: " ++ bitsstr ++ "\n"
--instance DebugShow SizeCounter where
-- debugShow counter = undefined
--instance DebugShow SlotsCounter where
-- debugShow counter = undefined
instance DebugShow AtomicCounter where
debugShow counter = do number <- readCounter counter
return $ "Countervalue: " ++ (show number) ++ "\n"
-- TODO number the slots
instance forall k v. (Show k, Show v) => DebugShow (Slots k v) where
debugShow slts = do str <- return $ "Vector length: " ++ (show $ V.length slts) ++ "\n"
V.foldM' g str slts where
g :: String -> Slot k v -> IO String
g akk slt = do sltstr <- debugShow slt
return $ akk ++ "Slot:\n" ++sltstr
instance (Show k, Show v) => DebugShow (Slot k v) where
debugShow slt = do key <- readKeySlot slt
val <- readValueSlot slt
return $ "Key:\n" ++ (show key) ++ "\nValue:\n" ++ (show val) ++ "\n"
instance (Show k) => Show (Key k) where -- TODO keys get primed
show (Key h key) = "Key: " ++ (show key) -- TODO show FullHash
show Kempty = "Key empty"
instance (Show v) => Show (Value v) where
show (V val) = "Value: " ++ (show val)
show (Vp val)= "Value (primed): " ++ (show val)
show T = "Tombstone"
show Tp = "Tombstone (primed)"
show S = "Sentinel"
-- Hashtable allows for telescopic resizes
-- returns number of ongoing resizes (number of tables -1)
getNumberOfOngoingResizes :: ConcurrentHashTable k v-> IO Int
getNumberOfOngoingResizes ht = do kvs <- getHeadKvs ht
getNumber kvs
where getNumber :: Kvs k v -> IO Int
getNumber kvs = do hsNextKvs <- hasNextKvs kvs
if not $ hsNextKvs then return 0 else do nwkvs <- getNextKvs kvs
newkvsNumber <- getNumber nwkvs
return $ 1 + newkvsNumber
-- Hashtable allows for telescopic resizes
-- This means Values are stored somewhere in a list of vectors
-- returns length of every vector starting with the oldest
getLengthsOfVectors ::ConcurrentHashTable k v-> IO [Int]
getLengthsOfVectors ht = do kvs <- getHeadKvs ht
getLengths kvs
where getLengths :: Kvs k v -> IO [Int]
getLengths kvs = do slots <- return $ slots kvs --FIXME could use let here
lngth <- return $ V.length slots
hsNextKvs <- hasNextKvs kvs
if not $ hsNextKvs then return $ lngth:[] else do newkvs <- getNextKvs kvs
lst <- getLengths newkvs
return $ lngth:lst
getSlotsCounters ::ConcurrentHashTable k v-> IO [Int]
getSlotsCounters ht = mapOnKvs ht readSlotsCounter
countUsedSlots :: ConcurrentHashTable k v -> IO [Int]
countUsedSlots ht = countSlotsWithPredicate ht (\s -> fmap not (isKEmptySlot s) ) -- TODO use point operator
countSlotsWithPredicate :: ConcurrentHashTable k v-> (Slot k v -> IO Bool) -> IO [Int]
countSlotsWithPredicate ht predicate = mapOnKvs ht (countSlots predicate)
where countSlots ::(Slot k v -> IO Bool) -> (Kvs k v) -> IO Int
countSlots predicate kvs = do let slts = slots kvs
lst <- V.forM slts predicate
return $ V.foldl (\acc -> \bool -> if bool then acc + 1 else acc + 0) 0 lst
mapOnKvs :: forall k v a. ConcurrentHashTable k v -> ((Kvs k v) -> IO a) -> IO [a]
mapOnKvs ht fun = do kvs <- getHeadKvs ht
mapOn kvs
where mapOn :: Kvs k v -> IO [a]
mapOn kvs = do a <- fun kvs
hsNextKvs <- hasNextKvs kvs
lst <- if not $ hsNextKvs then return [] else do newKvs <- getNextKvs kvs
mapOn newKvs
return $ a:lst
-- | Countes the number of Reprobes for a given key
-- does not change ht
getReprobeCount :: forall k v . (Hashable k, Eq k) => ConcurrentHashTable k v -> k -> IO Int
getReprobeCount ht key = do headkvs <- getHeadKvs ht
rpCount headkvs (newKey key)
where rpCount :: Kvs k v -> Key k -> IO Int
rpCount kv key = do let msk = mask kv
slts = slots kv
(_, rpcnt) <- getSlot slts msk key
return rpcnt
-- TODO use this in an resize related unit test
-- work only on the headKvs
keyFullHashEqual :: (Hashable key) => key -> key -> Bool
keyFullHashEqual a b = (spreadHash $ hash a) == (spreadHash $ hash b)
keyIdxCollision :: forall key . (Hashable key) => Size -> key -> key -> Bool
keyIdxCollision sze a b = (getIdx a) == (getIdx b)
where getFullHash :: key -> FullHash
getFullHash a = spreadHash $ hash a
getIdx a = maskHash (getMask sze) (getFullHash a)
--- Quick Check generator
------------------------------------------------------------------------------------------------------------------
instance (Eq val,Eq key, Hashable key) => QCA.Arbitrary (IO (ConcurrentHashTable key val)) where
arbitrary = sized (\size -> htGen size size (Left size))
shrink = (\ht -> [])
htGen :: forall key value .(Eq value,Eq key, Hashable key) =>
Int -> Int -> Either Int (key -> Gen value) -> QCG.Gen (IO (ConcurrentHashTable key value))
htGen htsize keysize (Left valuesize) = undefined
--htGen htsize keysize (Left valuesize) = QCGU.promote $ do lst <- newConcurrentHashTable -- TODO In order to have this need a fúnction that takes an ht and returns IO Gen HT -- ht
htGen htsize keysize (Right valuegen) = do gen <- h2
return gen -- TODO get h2 and add htsize times h3
where h1 :: QCG.Gen (IO (ConcurrentHashTable key value)) -> key -> value -> QCG.Gen (IO (ConcurrentHashTable key value))
h1 gen key val = fmap (\ioht -> ioht >>= (\ht -> (put ht key val) >> (return ht))) gen -- TODO question is there
h2 :: QCG.Gen (IO (ConcurrentHashTable key value))
h2 = return newConcurrentHashTable
h3 :: QCG.Gen (IO (ConcurrentHashTable key value)) -> Gen key -> Gen value -> QCG.Gen (IO (ConcurrentHashTable key value))
h3 gen genk genv = do k <- genk
v <- genv
h1 gen k v
-- TODO now use monadic programing to feed h1 with Gen key and Gen values
-- we have a generator of Gen IO Concurrent hashtable and we fmap an put on it, probably requires some other monadic operation
-- or what
-- use of promote is correct but
-- First generator of a key-valuetor list
-- IO of generator of an ht
-- then promote to generator of IO
-- then QuickCheck monadic
-- TODO parametrise this with custom keygen
-- TODO write a debug function telling the ht to arbitrarily resize
--todo generate arbitrary hash tables
--write an assertion for resize
-- TODO Assert each kvs does not contain the same key twice
-- TODO ticket api : key containing a fullhash, allows the user to cache hashes
-- TODO write comments on the interface
-- TODO how about an instance of arbitrary
-- |Set of Keys| = size , |List of Values| = size all genratet with size parameter, zip and put, actually not that compilcated
| MathiasBartl/Concurrent_Datastructures | Data/HashTable/Concurrent.hs | gpl-2.0 | 50,050 | 539 | 26 | 12,581 | 10,726 | 5,600 | 5,126 | 637 | 15 |
import Hledger (tests_Hledger)
import Test.Framework.Providers.HUnit (hUnitTestToTests)
import Test.Framework.Runners.Console (defaultMain)
main :: IO ()
main = defaultMain $ hUnitTestToTests tests_Hledger
| kmels/hledger | hledger-lib/tests/suite.hs | gpl-3.0 | 207 | 0 | 6 | 20 | 55 | 32 | 23 | 5 | 1 |
{-# LANGUAGE MagicHash, FlexibleContexts, DataKinds, TypeFamilies #-}
module Http(get,post) where
import Java
foreign import java unsafe "@static HttpsRequest.get"
get :: String -> Maybe String
foreign import java unsafe "@static HttpsRequest.post"
post :: String -> String -> Maybe String
| koerriva/paygate | src/Http.hs | gpl-3.0 | 296 | 7 | 4 | 44 | 62 | 35 | 27 | -1 | -1 |
-- | Decentralized MDP
-- Based on an email exchange with Ashutosh Nayyar
module Data.Teams.Examples.DecMdp where
import Data.Teams.Structure
x = mkNonReward "x"
u1 = mkNonReward "u1"
u2 = mkNonReward "u2"
r = mkReward "r"
f = mkStochastic "f"
g1 = mkControl "g1"
g2 = mkControl "g2"
d = mkStochastic "d"
dynamics t = f(t).$.(x(t) .|. onlyif (t /= 1) [x(t-1), u1(t-1), u2(t-1)])
++ g1(t).$.(u1(t) .|. map x[1..t] ++ map u1[1..t-1])
++ g2(t).$.(u2(t) .|. map x[1..t] ++ map u2[1..t-1])
++ d(t) .$.(r(t) .|. [x(t), u1(t), u2(t)])
decMdp = mkTeamTime dynamics 4
decMdp' = simplify decMdp
| adityam/teams | Data/Teams/Examples/DecMdp.hs | gpl-3.0 | 636 | 0 | 18 | 145 | 344 | 181 | 163 | 16 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DLP.Projects.Locations.StoredInfoTypes.Create
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a pre-built stored infoType to be used for inspection. See
-- https:\/\/cloud.google.com\/dlp\/docs\/creating-stored-infotypes to
-- learn more.
--
-- /See:/ <https://cloud.google.com/dlp/docs/ Cloud Data Loss Prevention (DLP) API Reference> for @dlp.projects.locations.storedInfoTypes.create@.
module Network.Google.Resource.DLP.Projects.Locations.StoredInfoTypes.Create
(
-- * REST Resource
ProjectsLocationsStoredInfoTypesCreateResource
-- * Creating a Request
, projectsLocationsStoredInfoTypesCreate
, ProjectsLocationsStoredInfoTypesCreate
-- * Request Lenses
, plsitcParent
, plsitcXgafv
, plsitcUploadProtocol
, plsitcAccessToken
, plsitcUploadType
, plsitcPayload
, plsitcCallback
) where
import Network.Google.DLP.Types
import Network.Google.Prelude
-- | A resource alias for @dlp.projects.locations.storedInfoTypes.create@ method which the
-- 'ProjectsLocationsStoredInfoTypesCreate' request conforms to.
type ProjectsLocationsStoredInfoTypesCreateResource =
"v2" :>
Capture "parent" Text :>
"storedInfoTypes" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
GooglePrivacyDlpV2CreateStoredInfoTypeRequest
:> Post '[JSON] GooglePrivacyDlpV2StoredInfoType
-- | Creates a pre-built stored infoType to be used for inspection. See
-- https:\/\/cloud.google.com\/dlp\/docs\/creating-stored-infotypes to
-- learn more.
--
-- /See:/ 'projectsLocationsStoredInfoTypesCreate' smart constructor.
data ProjectsLocationsStoredInfoTypesCreate =
ProjectsLocationsStoredInfoTypesCreate'
{ _plsitcParent :: !Text
, _plsitcXgafv :: !(Maybe Xgafv)
, _plsitcUploadProtocol :: !(Maybe Text)
, _plsitcAccessToken :: !(Maybe Text)
, _plsitcUploadType :: !(Maybe Text)
, _plsitcPayload :: !GooglePrivacyDlpV2CreateStoredInfoTypeRequest
, _plsitcCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsStoredInfoTypesCreate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plsitcParent'
--
-- * 'plsitcXgafv'
--
-- * 'plsitcUploadProtocol'
--
-- * 'plsitcAccessToken'
--
-- * 'plsitcUploadType'
--
-- * 'plsitcPayload'
--
-- * 'plsitcCallback'
projectsLocationsStoredInfoTypesCreate
:: Text -- ^ 'plsitcParent'
-> GooglePrivacyDlpV2CreateStoredInfoTypeRequest -- ^ 'plsitcPayload'
-> ProjectsLocationsStoredInfoTypesCreate
projectsLocationsStoredInfoTypesCreate pPlsitcParent_ pPlsitcPayload_ =
ProjectsLocationsStoredInfoTypesCreate'
{ _plsitcParent = pPlsitcParent_
, _plsitcXgafv = Nothing
, _plsitcUploadProtocol = Nothing
, _plsitcAccessToken = Nothing
, _plsitcUploadType = Nothing
, _plsitcPayload = pPlsitcPayload_
, _plsitcCallback = Nothing
}
-- | Required. Parent resource name. The format of this value varies
-- depending on the scope of the request (project or organization) and
-- whether you have [specified a processing
-- location](https:\/\/cloud.google.com\/dlp\/docs\/specifying-location): +
-- Projects scope, location specified:
-- \`projects\/\`PROJECT_ID\`\/locations\/\`LOCATION_ID + Projects scope,
-- no location specified (defaults to global): \`projects\/\`PROJECT_ID +
-- Organizations scope, location specified:
-- \`organizations\/\`ORG_ID\`\/locations\/\`LOCATION_ID + Organizations
-- scope, no location specified (defaults to global):
-- \`organizations\/\`ORG_ID The following example \`parent\` string
-- specifies a parent project with the identifier \`example-project\`, and
-- specifies the \`europe-west3\` location for processing data:
-- parent=projects\/example-project\/locations\/europe-west3
plsitcParent :: Lens' ProjectsLocationsStoredInfoTypesCreate Text
plsitcParent
= lens _plsitcParent (\ s a -> s{_plsitcParent = a})
-- | V1 error format.
plsitcXgafv :: Lens' ProjectsLocationsStoredInfoTypesCreate (Maybe Xgafv)
plsitcXgafv
= lens _plsitcXgafv (\ s a -> s{_plsitcXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plsitcUploadProtocol :: Lens' ProjectsLocationsStoredInfoTypesCreate (Maybe Text)
plsitcUploadProtocol
= lens _plsitcUploadProtocol
(\ s a -> s{_plsitcUploadProtocol = a})
-- | OAuth access token.
plsitcAccessToken :: Lens' ProjectsLocationsStoredInfoTypesCreate (Maybe Text)
plsitcAccessToken
= lens _plsitcAccessToken
(\ s a -> s{_plsitcAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plsitcUploadType :: Lens' ProjectsLocationsStoredInfoTypesCreate (Maybe Text)
plsitcUploadType
= lens _plsitcUploadType
(\ s a -> s{_plsitcUploadType = a})
-- | Multipart request metadata.
plsitcPayload :: Lens' ProjectsLocationsStoredInfoTypesCreate GooglePrivacyDlpV2CreateStoredInfoTypeRequest
plsitcPayload
= lens _plsitcPayload
(\ s a -> s{_plsitcPayload = a})
-- | JSONP
plsitcCallback :: Lens' ProjectsLocationsStoredInfoTypesCreate (Maybe Text)
plsitcCallback
= lens _plsitcCallback
(\ s a -> s{_plsitcCallback = a})
instance GoogleRequest
ProjectsLocationsStoredInfoTypesCreate
where
type Rs ProjectsLocationsStoredInfoTypesCreate =
GooglePrivacyDlpV2StoredInfoType
type Scopes ProjectsLocationsStoredInfoTypesCreate =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsLocationsStoredInfoTypesCreate'{..}
= go _plsitcParent _plsitcXgafv _plsitcUploadProtocol
_plsitcAccessToken
_plsitcUploadType
_plsitcCallback
(Just AltJSON)
_plsitcPayload
dLPService
where go
= buildClient
(Proxy ::
Proxy ProjectsLocationsStoredInfoTypesCreateResource)
mempty
| brendanhay/gogol | gogol-dlp/gen/Network/Google/Resource/DLP/Projects/Locations/StoredInfoTypes/Create.hs | mpl-2.0 | 7,060 | 0 | 17 | 1,397 | 797 | 472 | 325 | 122 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.CloudKMS.Projects.Locations.KeyRings.CryptoKeys.CryptoKeyVersions.Restore
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Restore a CryptoKeyVersion in the DESTROY_SCHEDULED state. Upon
-- restoration of the CryptoKeyVersion, state will be set to DISABLED, and
-- destroy_time will be cleared.
--
-- /See:/ <https://cloud.google.com/kms/ Cloud Key Management Service (KMS) API Reference> for @cloudkms.projects.locations.keyRings.cryptoKeys.cryptoKeyVersions.restore@.
module Network.Google.Resource.CloudKMS.Projects.Locations.KeyRings.CryptoKeys.CryptoKeyVersions.Restore
(
-- * REST Resource
ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestoreResource
-- * Creating a Request
, projectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestore
, ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestore
-- * Request Lenses
, plkrckckvrXgafv
, plkrckckvrUploadProtocol
, plkrckckvrAccessToken
, plkrckckvrUploadType
, plkrckckvrPayload
, plkrckckvrName
, plkrckckvrCallback
) where
import Network.Google.CloudKMS.Types
import Network.Google.Prelude
-- | A resource alias for @cloudkms.projects.locations.keyRings.cryptoKeys.cryptoKeyVersions.restore@ method which the
-- 'ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestore' request conforms to.
type ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestoreResource
=
"v1" :>
CaptureMode "name" "restore" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] RestoreCryptoKeyVersionRequest :>
Post '[JSON] CryptoKeyVersion
-- | Restore a CryptoKeyVersion in the DESTROY_SCHEDULED state. Upon
-- restoration of the CryptoKeyVersion, state will be set to DISABLED, and
-- destroy_time will be cleared.
--
-- /See:/ 'projectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestore' smart constructor.
data ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestore =
ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestore'
{ _plkrckckvrXgafv :: !(Maybe Xgafv)
, _plkrckckvrUploadProtocol :: !(Maybe Text)
, _plkrckckvrAccessToken :: !(Maybe Text)
, _plkrckckvrUploadType :: !(Maybe Text)
, _plkrckckvrPayload :: !RestoreCryptoKeyVersionRequest
, _plkrckckvrName :: !Text
, _plkrckckvrCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestore' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plkrckckvrXgafv'
--
-- * 'plkrckckvrUploadProtocol'
--
-- * 'plkrckckvrAccessToken'
--
-- * 'plkrckckvrUploadType'
--
-- * 'plkrckckvrPayload'
--
-- * 'plkrckckvrName'
--
-- * 'plkrckckvrCallback'
projectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestore
:: RestoreCryptoKeyVersionRequest -- ^ 'plkrckckvrPayload'
-> Text -- ^ 'plkrckckvrName'
-> ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestore
projectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestore pPlkrckckvrPayload_ pPlkrckckvrName_ =
ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestore'
{ _plkrckckvrXgafv = Nothing
, _plkrckckvrUploadProtocol = Nothing
, _plkrckckvrAccessToken = Nothing
, _plkrckckvrUploadType = Nothing
, _plkrckckvrPayload = pPlkrckckvrPayload_
, _plkrckckvrName = pPlkrckckvrName_
, _plkrckckvrCallback = Nothing
}
-- | V1 error format.
plkrckckvrXgafv :: Lens' ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestore (Maybe Xgafv)
plkrckckvrXgafv
= lens _plkrckckvrXgafv
(\ s a -> s{_plkrckckvrXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plkrckckvrUploadProtocol :: Lens' ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestore (Maybe Text)
plkrckckvrUploadProtocol
= lens _plkrckckvrUploadProtocol
(\ s a -> s{_plkrckckvrUploadProtocol = a})
-- | OAuth access token.
plkrckckvrAccessToken :: Lens' ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestore (Maybe Text)
plkrckckvrAccessToken
= lens _plkrckckvrAccessToken
(\ s a -> s{_plkrckckvrAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plkrckckvrUploadType :: Lens' ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestore (Maybe Text)
plkrckckvrUploadType
= lens _plkrckckvrUploadType
(\ s a -> s{_plkrckckvrUploadType = a})
-- | Multipart request metadata.
plkrckckvrPayload :: Lens' ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestore RestoreCryptoKeyVersionRequest
plkrckckvrPayload
= lens _plkrckckvrPayload
(\ s a -> s{_plkrckckvrPayload = a})
-- | Required. The resource name of the CryptoKeyVersion to restore.
plkrckckvrName :: Lens' ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestore Text
plkrckckvrName
= lens _plkrckckvrName
(\ s a -> s{_plkrckckvrName = a})
-- | JSONP
plkrckckvrCallback :: Lens' ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestore (Maybe Text)
plkrckckvrCallback
= lens _plkrckckvrCallback
(\ s a -> s{_plkrckckvrCallback = a})
instance GoogleRequest
ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestore
where
type Rs
ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestore
= CryptoKeyVersion
type Scopes
ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestore
=
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloudkms"]
requestClient
ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestore'{..}
= go _plkrckckvrName _plkrckckvrXgafv
_plkrckckvrUploadProtocol
_plkrckckvrAccessToken
_plkrckckvrUploadType
_plkrckckvrCallback
(Just AltJSON)
_plkrckckvrPayload
cloudKMSService
where go
= buildClient
(Proxy ::
Proxy
ProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsRestoreResource)
mempty
| brendanhay/gogol | gogol-cloudkms/gen/Network/Google/Resource/CloudKMS/Projects/Locations/KeyRings/CryptoKeys/CryptoKeyVersions/Restore.hs | mpl-2.0 | 7,185 | 0 | 16 | 1,393 | 787 | 462 | 325 | 127 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Analytics.Management.Goals.Update
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates an existing goal.
--
-- /See:/ <https://developers.google.com/analytics/ Google Analytics API Reference> for @analytics.management.goals.update@.
module Network.Google.Resource.Analytics.Management.Goals.Update
(
-- * REST Resource
ManagementGoalsUpdateResource
-- * Creating a Request
, managementGoalsUpdate
, ManagementGoalsUpdate
-- * Request Lenses
, mguWebPropertyId
, mguGoalId
, mguProFileId
, mguPayload
, mguAccountId
) where
import Network.Google.Analytics.Types
import Network.Google.Prelude
-- | A resource alias for @analytics.management.goals.update@ method which the
-- 'ManagementGoalsUpdate' request conforms to.
type ManagementGoalsUpdateResource =
"analytics" :>
"v3" :>
"management" :>
"accounts" :>
Capture "accountId" Text :>
"webproperties" :>
Capture "webPropertyId" Text :>
"profiles" :>
Capture "profileId" Text :>
"goals" :>
Capture "goalId" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Goal :> Put '[JSON] Goal
-- | Updates an existing goal.
--
-- /See:/ 'managementGoalsUpdate' smart constructor.
data ManagementGoalsUpdate =
ManagementGoalsUpdate'
{ _mguWebPropertyId :: !Text
, _mguGoalId :: !Text
, _mguProFileId :: !Text
, _mguPayload :: !Goal
, _mguAccountId :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ManagementGoalsUpdate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mguWebPropertyId'
--
-- * 'mguGoalId'
--
-- * 'mguProFileId'
--
-- * 'mguPayload'
--
-- * 'mguAccountId'
managementGoalsUpdate
:: Text -- ^ 'mguWebPropertyId'
-> Text -- ^ 'mguGoalId'
-> Text -- ^ 'mguProFileId'
-> Goal -- ^ 'mguPayload'
-> Text -- ^ 'mguAccountId'
-> ManagementGoalsUpdate
managementGoalsUpdate pMguWebPropertyId_ pMguGoalId_ pMguProFileId_ pMguPayload_ pMguAccountId_ =
ManagementGoalsUpdate'
{ _mguWebPropertyId = pMguWebPropertyId_
, _mguGoalId = pMguGoalId_
, _mguProFileId = pMguProFileId_
, _mguPayload = pMguPayload_
, _mguAccountId = pMguAccountId_
}
-- | Web property ID to update the goal.
mguWebPropertyId :: Lens' ManagementGoalsUpdate Text
mguWebPropertyId
= lens _mguWebPropertyId
(\ s a -> s{_mguWebPropertyId = a})
-- | Index of the goal to be updated.
mguGoalId :: Lens' ManagementGoalsUpdate Text
mguGoalId
= lens _mguGoalId (\ s a -> s{_mguGoalId = a})
-- | View (Profile) ID to update the goal.
mguProFileId :: Lens' ManagementGoalsUpdate Text
mguProFileId
= lens _mguProFileId (\ s a -> s{_mguProFileId = a})
-- | Multipart request metadata.
mguPayload :: Lens' ManagementGoalsUpdate Goal
mguPayload
= lens _mguPayload (\ s a -> s{_mguPayload = a})
-- | Account ID to update the goal.
mguAccountId :: Lens' ManagementGoalsUpdate Text
mguAccountId
= lens _mguAccountId (\ s a -> s{_mguAccountId = a})
instance GoogleRequest ManagementGoalsUpdate where
type Rs ManagementGoalsUpdate = Goal
type Scopes ManagementGoalsUpdate =
'["https://www.googleapis.com/auth/analytics.edit"]
requestClient ManagementGoalsUpdate'{..}
= go _mguAccountId _mguWebPropertyId _mguProFileId
_mguGoalId
(Just AltJSON)
_mguPayload
analyticsService
where go
= buildClient
(Proxy :: Proxy ManagementGoalsUpdateResource)
mempty
| brendanhay/gogol | gogol-analytics/gen/Network/Google/Resource/Analytics/Management/Goals/Update.hs | mpl-2.0 | 4,526 | 0 | 20 | 1,115 | 624 | 368 | 256 | 100 | 1 |
{-# LANGUAGE ScopedTypeVariables, LambdaCase, GeneralizedNewtypeDeriving #-}
module Chess where {
import Data.List;
import Control.Monad;
import Data.Maybe;
import Data.Array.IArray;
import Debug.Trace;
import Retrograde;
import Data.Map.Strict(Map);
import qualified Data.Map.Strict as Map;
import qualified Data.Set as Set;
import Data.Set(Set);
import Control.Monad.GenericReplicate;
import Control.Exception(assert);
import Data.Ord;
import System.Random(randomRIO);
my_boardsize :: (Integer,Integer);
my_boardsize = (4,4); -- col row
stalemate_draw :: Bool;
stalemate_draw = False;
pass_permitted :: Bool;
pass_permitted = False;
-- no stalemate
-- 43 = 2.5 min
-- 44 = 16 min
-- 45 = 55 min
-- 55 = 123 min on mkc(?)
-- test directory
test_directory :: Directory;
test_directory = dir_qr;
max_row :: Row;
max_row = Row $ pred $ snd my_boardsize;
max_column :: Column;
max_column = Column $ pred $ fst my_boardsize;;
-- to avoid the redundancy warning
trace_placeholder :: ();
trace_placeholder = trace "trace" ();
type Offset = (Column,Row);
newtype Row = Row Integer deriving (Eq, Ord, Enum, Ix, Show);
newtype Column = Column Integer deriving (Eq, Ord, Enum, Ix, Show);
type Position = Array Piecenum (Maybe Location);
newtype Location = Location Offset deriving (Eq, Ord, Ix, Show);
newtype Piecenum = Piecenum Integer deriving (Eq, Ord, Ix, Show);
data Orthogonal = NoOrthogonal | Wazir | Rook deriving (Show, Eq, Ord, Bounded, Ix);
data Diagonal = NoDiagonal | Ferz | Bishop deriving (Show, Eq, Ord, Bounded, Ix);
data Knight = NoKnight | YesKnight deriving (Show, Eq, Ord, Bounded, Ix);
data Alfil = NoAlfil | YesAlfil deriving (Show, Eq, Ord, Bounded, Ix);
data Dabbaba = NoDabbaba | Dabbaba_single | Dabbaba_rider deriving (Show,Eq, Ord, Bounded, Ix);
data Royal = Commoner | Royal deriving (Show, Eq, Ord, Bounded, Ix);
-- | Maximizing or Minimizing the Value of a position
data Color = White | Black deriving (Show, Eq, Ord, Bounded, Ix);
other :: Color -> Color;
other White = Black;
other Black = White;
data Piece = Piece Royal Orthogonal Diagonal Knight Alfil Dabbaba Color deriving (Show, Eq, Ord, Bounded, Ix);
king :: Color -> Piece;
king = Piece Royal Wazir Ferz NoKnight NoAlfil NoDabbaba;
man :: Color -> Piece;
man = Piece Commoner Wazir Ferz NoKnight NoAlfil NoDabbaba;
queen :: Color -> Piece;
queen = Piece Commoner Rook Bishop NoKnight NoAlfil NoDabbaba;
rook :: Color -> Piece;
rook = Piece Commoner Rook NoDiagonal NoKnight NoAlfil NoDabbaba;
knight :: Color -> Piece;
knight = Piece Commoner NoOrthogonal NoDiagonal YesKnight NoAlfil NoDabbaba;
bishop :: Color -> Piece;
bishop = Piece Commoner NoOrthogonal Bishop NoKnight NoAlfil NoDabbaba;
td :: Directory;
td = test_directory;
test_position :: MovePosition;
test_position = (listArray (Piecenum 0, Piecenum 3) $ map (\(x,y) -> Just $ Location (Column x, Row y))
[(3,2),(2,0),(1,2),(1,1)],White);
--[(3,3),(3,1),(3,4),(0,3)],White);
--[(3,3),(1,0),(2,0),(0,1)],Black);
--[(3,3),(1,0),(2,2),(0,1)],White);
dir_experiment :: Directory;
dir_experiment = listArray (Piecenum 0, Piecenum 3) [king White, king Black
, Piece Commoner NoOrthogonal Ferz NoKnight NoAlfil NoDabbaba White
, Piece Commoner NoOrthogonal NoDiagonal YesKnight NoAlfil NoDabbaba White
];
dir_bn :: Directory;
dir_bn = listArray (Piecenum 0, Piecenum 3) [king White, king Black
, bishop White
, knight White
];
dir_nn :: Directory;
dir_nn = listArray (Piecenum 0, Piecenum 3) [king White, king Black
, knight White
, knight White
];
-- winnable when stalemate==loss
dir_n :: Directory;
dir_n = make_dir [king White, king Black , knight White];
dir_bb :: Directory;
dir_bb = listArray (Piecenum 0, Piecenum 3) [king White, king Black
, bishop White
, bishop White
];
dir_qr :: Directory;
dir_qr = listArray (Piecenum 0, Piecenum 3) [king White, king Black, queen White, rook Black];
dir_kmk :: Directory;
dir_kmk = listArray (Piecenum 0, Piecenum 2) [king White, king Black, man White];
type IOffset = (Integer,Integer);
reflect45 :: IOffset -> IOffset;
reflect45 (x,y) = (y,x);
reflectx :: IOffset -> IOffset;
reflectx (x,y) = (x,negate y);
reflecty :: IOffset -> IOffset;
reflecty (x,y) = (negate x,y);
eightway_with_duplicates :: IOffset -> [IOffset];
eightway_with_duplicates z = do {
p <- [id,reflect45];
q <- [id,reflectx];
r <- [id, reflecty];
return $ r $ q $ p $ z;
};
eightway :: IOffset -> [Offset];
eightway = map (\z -> (Column $ fst z, Row $ snd z)) . nub . eightway_with_duplicates;
extend :: Offset -> [Offset];
extend (Column x,Row y) = do {
s <- enumFromTo 1 $ unBoardsize board_max;
return (Column $ s*x,Row $ s*y);
};
board_max :: Boardsize;
board_max = case (max_row, max_column) of
{ (Row r, Column c) -> Boardsize $ max r c };
newtype Boardsize = Boardsize Integer deriving (Show);
unBoardsize :: Boardsize -> Integer;
unBoardsize (Boardsize x) =x;
moves :: Directory -> Position -> Piecenum -> [Location];
moves directory position num = case position ! num of {
Nothing -> [];
Just mylocation -> let {
andBundle :: (Location -> Position -> [Location]) -> [Location];
andBundle f = f mylocation position;
} in nub $ filter (inRange board_bounds) $ case directory ! num of {
Piece _roy orth diag jknight jalfil jda color -> filter (not . stomp directory position color)
$ concatMap andBundle
[orthmoves orth
,diagmoves diag
,knightmoves jknight
,alfilmoves jalfil
,dabbabamoves jda
]}};
orthmoves :: Orthogonal -> Location -> Position -> [Location];
orthmoves NoOrthogonal _ _ = [];
orthmoves Wazir me _ = map (add_offset me) $ eightway (1,0);
orthmoves Rook me pos = concatMap (extendUntilOccupied me pos) $ eightway (1,0);
diagmoves :: Diagonal -> Location -> Position -> [Location];
diagmoves NoDiagonal _ _ = [];
diagmoves Ferz me _ = map (add_offset me) $ eightway (1,1);
diagmoves Bishop me pos = concatMap (extendUntilOccupied me pos) $ eightway (1,1);
knightmoves :: Knight -> Location -> Position -> [Location];
knightmoves NoKnight _ _ = [];
knightmoves YesKnight me _ = map (add_offset me) $ eightway (1,2);
alfilmoves :: Alfil -> Location -> Position -> [Location];
alfilmoves NoAlfil _ _ = [];
alfilmoves YesAlfil me _ = map (add_offset me) $ eightway (2,2);
dabbabamoves :: Dabbaba -> Location -> Position -> [Location];
dabbabamoves NoDabbaba _ _ = [];
dabbabamoves Dabbaba_single me _ = map (add_offset me) $ eightway (2,0);
dabbabamoves Dabbaba_rider me pos = concatMap (extendUntilOccupied me pos) $ eightway (2,0);
board_bounds :: (Location,Location);
board_bounds = (Location (Column 0,Row 0),Location (max_column, max_row));
empty :: Position -> Location -> Bool;
empty p l = inRange board_bounds l
&& (isNothing $ at_location p l);
type Directory = Array Piecenum Piece;
add_offset :: Location -> Offset -> Location;
add_offset (Location (ox,oy)) (dx,dy) = Location (column_add ox dx,row_add oy dy);
row_add :: Row -> Row -> Row;
row_add (Row x) (Row y) = Row $ x+y;
column_add :: Column -> Column -> Column;
column_add (Column x) (Column y) = Column $ x+y;
take_including_first_failure :: (a -> Bool) -> [a] -> [a];
take_including_first_failure p l = case span p l of {
(l2,[]) -> l2;
(aa,zz) -> aa ++ [head zz]};
-- take_including_first_failure allows captures
extendUntilOccupied :: Location -> Position -> Offset -> [Location];
extendUntilOccupied me pos off = take_including_first_failure (empty pos) $ map (add_offset me) $ extend off;
-- trying to capture one's own piece
stomp :: Directory -> Position -> Color -> Location -> Bool;
stomp directory p mycolor mylocation = case at_location p mylocation of {
Nothing -> False;
Just num -> mycolor == get_color (directory ! num);
};
get_color :: Piece -> Color;
get_color (Piece _ _ _ _ _ _ c) = c;
is_royal :: Piece -> Bool;
is_royal (Piece Royal _ _ _ _ _ _) = True;
is_royal _ = False;
at_location :: Position -> Location -> Maybe Piecenum;
at_location pos loc = let {
check1 :: (Piecenum, Maybe Location) -> [Piecenum];
check1 (_,Nothing) = mzero;
check1 (num, Just loc2) = if loc==loc2 then return num else mzero;
} in case concatMap check1 $ assocs pos of {
[] -> Nothing;
[x] -> Just x;
_ -> error "multiple occupancy"
};
-- | position and player to move
type MovePosition = (Position, Color);
-- stalemate detection
has_king :: Directory -> MovePosition -> Bool;
-- generalize to any number of royal pieces
has_king dir (position,color) = any (\(ml, p) -> isJust ml && is_royal p && get_color p == color)
$ zip (elems position) (elems dir);
retrograde_positions :: Directory -> MovePosition -> [MovePosition];
retrograde_positions dir mp@(pos, color) = do {
(i :: Piecenum , p :: Piece) <- assocs dir;
guard $ other color == get_color p;
new_loc <- moves dir pos i;
-- | No captures for retrograde analysis. (Though later, uncaptures.)
guard $ isNothing $ at_location pos new_loc;
let { pos2 = pos // [(i,Just new_loc)]; };
uncapture :: [(Piecenum, Maybe Location)] <- [] : do {
(i2, ml) <- assocs pos2;
guard $ (get_color $ dir ! i2) == color;
guard $ isNothing ml;
return [(i2, pos ! i)]; -- ^old position
};
let { pos3 = pos2 // uncapture; };
guard $ has_king dir (pos3, other color);
-- ^ optimization: other color has a king, possibly uncaptured
return (pos3, other color);
} ++ do {
guard pass_permitted;
let {newpos = do_pass mp};
guard $ has_king dir newpos;
return newpos;
};
overlapping :: Eq a => [a] -> Bool;
overlapping l = length l /= (length $ nub l);
all_positions :: Directory -> [MovePosition];
all_positions dir = do {
l :: [Maybe Location] <- mapM (\_ -> Nothing:(map Just $ range board_bounds)) $ elems dir;
guard $ not $ overlapping $ catMaybes l;
color <- [White, Black];
return (listArray (bounds dir) l, color);
};
-- entries in which the value is known without analysis
final_entries :: Directory -> [(MovePosition,Value)];
final_entries dir = losses (kingless dir)
++ if stalemate_draw then stalemates dir
else losses (locked_with_king dir);
losses :: [MovePosition] -> [(MovePosition,Value)];
losses = map (\a -> (a,loss));
kingless :: Directory -> [MovePosition];
kingless dir = filter (not . has_king dir) $ all_positions dir ;
locked_with_king :: Directory -> [MovePosition];
locked_with_king dir = filter (\p -> has_king dir p && (null $ successors dir p)) $ all_positions dir;
value_via_successors :: Directory -> MovePosition -> [(MovePosition,Value)] -> Maybe Value;
value_via_successors dir mp@(_,color) succs = let {
table :: Map [Maybe Location] Value;
table = Map.fromList $ map (\((p, c2),v) -> assert (c2 == other color) (elems p,v)) succs;
} in combine_values_greedy $ map ((flip Map.lookup) table) $ map (\(p,c2) -> assert (c2 == other color) $ elems p) $ successors dir mp ;
successors :: Directory -> MovePosition -> [MovePosition];
successors dir mp@(pos,color) = if not $ has_king dir mp
then []
else do {
(i :: Piecenum, p :: Piece) <- assocs dir;
guard $ color == get_color p;
new_loc <- moves dir pos i;
return (pos // ((i,Just new_loc):case at_location pos new_loc of {
Nothing -> [];
Just captured -> assert (captured /= i) [(captured, Nothing)]})
, other color);
} ++ if pass_permitted then [do_pass mp] else [];
redfn :: Directory -> MovePosition -> [((MovePosition,Value),Epoch)] -> Maybe (MovePosition,Value);
redfn dir mp esuccs = do {
-- skip already known values
guard $ all (\case {(_,Known) -> False;_->True}) esuccs;
v <- value_via_successors dir mp (map fst esuccs);
return (mp,v);
};
mapfn :: Directory -> (MovePosition, Value) -> [(MovePosition, Epoch)];
mapfn dir (pos,_val) = (pos,Known):(map (\x -> (x, Unknown)) $ retrograde_positions dir pos);
do_mapreduce :: Directory -> [Entry] -> [Entry];
do_mapreduce dir = catMaybes . mapReduce (mapfn dir) (redfn dir);
test_retro1 :: Directory -> MovePosition -> [(MovePosition, Bool)];
test_retro1 dir pos = do {
p1 <- retrograde_positions dir pos;
return (p1, elem pos $ successors dir p1);
};
simple_pos :: MovePosition -> (Color, [Maybe Location]);
simple_pos (p,color) = (color, elems p);
test_retro2 :: Directory -> MovePosition -> Bool;
test_retro2 dir = and . map snd . test_retro1 dir;
-- omit answers which are already known
test_compare :: Directory -> [Entry] -> Set MovePosition;
test_compare dir seed = let {
s1 :: Set MovePosition;
s1 = Set.fromList $ map fst $ seed;
s2 :: Set MovePosition;
s2 = Set.fromList $ map fst $ do_mapreduce dir seed;
} in Set.intersection s1 s2;
type Entry = (MovePosition, Value);
gen_0 :: [Entry];
gen_0 = final_entries test_directory;
gen_1 :: [Entry];
gen_1 = do_mapreduce test_directory gen_0;
gen_2 :: [Entry];
gen_2 = do_mapreduce test_directory $ gen_0 ++ gen_1;
iterate_mapreduce :: Directory -> [Entry] -> [[Entry]];
iterate_mapreduce dir start = let {
more = do_mapreduce dir start;
} in if null more then []
else more:iterate_mapreduce dir (start ++ more);
display_piece :: Piece -> String;
display_piece p = if p == king White then "K"
else if p == king Black then "k"
else if p == queen White then "Q"
else if p == rook Black then "r"
else if p == rook White then "R"
else if p == bishop White then "B"
else if p == knight White then "N"
else if p == bishop Black then "b"
else if p == knight Black then "n"
else if p == man White then "M"
else if p == man Black then "m"
else "?";
show_board_p :: Directory -> Position -> String;
show_board_p dir = show_board_f (display_piece . (dir!));
show_board_numbers :: Position -> String;
show_board_numbers = show_board_f (\(Piecenum n) -> show n);
show_board_f ::(Piecenum -> String) -> Position -> String;
show_board_f f pos = unlines $ do { rank <- reverse $ enumFromTo (Row 0) max_row;
return $ unwords $ do {
file <- enumFromTo (Column 0) max_column;
return $ case at_location pos $ Location (file, rank) of {
Nothing -> "-";
Just num -> f num;
}}
};
show_mp :: Directory -> MovePosition -> String;
show_mp _dir (p,color) = show_board_numbers p ++ show color;
show_entry :: Directory -> Entry -> String;
show_entry dir (mp,val) = show_mp dir mp ++ " " ++ show val;
in_check :: Directory -> MovePosition -> Bool;
in_check dir mp = assert (has_king dir mp) $
illegal_position dir $ do_pass mp;
-- | King can be captured
illegal_position :: Directory -> MovePosition -> Bool;
illegal_position dir mp = assert (has_king dir $ do_pass mp) $
any (not . has_king dir) $ successors dir mp;
do_pass :: MovePosition -> MovePosition;
do_pass (pos,color) = (pos, other color);
-- Note: this method of discovering stalemates does not work, i.e.,
-- never stalemate, if passing is permitted. I feel this logically
-- makes sense, though it would alternatively be possible to
-- special-case stalemate as a position in which one can instantly
-- claim a draw even if passing is permitted.
no_legal_moves :: Directory -> MovePosition -> Bool;
no_legal_moves dir = null . filter (not . illegal_position dir) . successors dir;
stalemate :: Directory -> MovePosition -> Bool;
stalemate dir mp = (not $ in_check dir mp) && no_legal_moves dir mp;
checkmate :: Directory -> MovePosition -> Bool;
checkmate dir mp = in_check dir mp && no_legal_moves dir mp;
stalemates :: Directory -> [Entry];
stalemates dir = do {
mp <- all_positions dir;
guard $ has_king dir mp;
guard $ stalemate dir mp;
return (mp, draw);
};
all_pieces :: [Piece];
all_pieces = whole_range;
piece_set :: Integer -> [[Piece]];
piece_set size = do {
z <- genericReplicateM size all_pieces;
guard $ any (\p -> is_royal p && (White == get_color p)) z;
guard $ any (\p -> is_royal p && (Black == get_color p)) z;
guard (z == sort z);
return z;
};
piece_set2 :: Integer -> [Piece] -> [[Piece]];
piece_set2 0 z {-^ accumulating parameter -} = assert (z == sort z) $ if (any (\p -> is_royal p && (White == get_color p)) z)
&& (any (\p -> is_royal p && (Black == get_color p)) z)
&& z <= sort (map flip_color z)
then return z
else mzero;
piece_set2 n z = do {
h <- all_pieces;
guard $ case z of {[] -> True; (x:_)->h<=x};
piece_set2 (pred n) (h:z);
};
flip_color :: Piece -> Piece;
flip_color (Piece x1 x2 x3 x4 x5 x6 c) = Piece x1 x2 x3 x4 x5 x6 $ other c;
type Map_v = Map MovePosition Value;
all_list :: [[Entry]];
all_list = gen_0:iterate_mapreduce test_directory gen_0;
allmap :: Map_v;
allmap = Map.fromList $ concat all_list;
do_trace :: Directory -> Map_v -> Entry -> [Entry];
do_trace dir m p = p:case Map.lookup (fst p) m of {
Just v | v == loss -> []
| True -> let {next = minimumBy (comparing (snd::(Entry -> Value))) $ do {
s <- successors dir $ fst p;
let {nv = Map.lookup s m};
guard $ isJust nv;
return (s,fromJust nv)}} in do_trace dir m next;
Nothing -> error "do_trace"
};
longest_win :: [Entry] -> Entry;
longest_win = maximumBy (\x y -> winlength (snd x) (snd y));
show_longest :: IO();
show_longest = do {
eval_iterate;
mapM_ (putStrLn . show_entry test_directory) $ do_trace test_directory allmap $ longest_win $ concat all_list;
};
integers_from_zero :: [Integer];
integers_from_zero = enumFrom 0;
eval_iterate :: IO();
eval_iterate = do {
mapM_ print $ assocs test_directory;
mapM_ print $ zip integers_from_zero $map length all_list;
};
three_pieces :: [[Piece]];
three_pieces = piece_set2 4 [];
three_pieces_length :: Integer;
-- three_pieces_length = genericLength $ three_pieces ();
--three_pieces_length = 562464;
three_pieces_length = 62432010;
three_pieces_length_check :: IO();
three_pieces_length_check = assert (genericLength three_pieces == three_pieces_length) $ return ();
make_dir :: [Piece] -> Directory;
make_dir pcs = listArray (Piecenum 0, Piecenum $ pred $ genericLength pcs) pcs;
try_three_pieces :: Integer -> IO();
try_three_pieces n = do {
let {pcs = genericIndex three_pieces n};
mapM_ print $ zip integers_from_zero pcs;
let {
dir :: Directory;
dir = make_dir pcs;
fin_en :: [Entry];
fin_en = final_entries dir;
ls :: [[Entry]];
ls = fin_en:iterate_mapreduce dir fin_en;
amap1 :: Map_v;
amap1 = Map.fromList $ concat ls;
longest :: Entry;
longest = maximumBy (\x y -> winlength (snd x) (snd y)) $ concat ls;
len :: Integer;
len = genericLength ls;
};
putStrLn $"totals " ++ (show $ Map.size amap1);
putStr "length ";
print $ len;
if len>9 then mapM_ (putStrLn . show_entry dir) $ do_trace dir amap1 $ longest
else return ();
};
rand_three_pieces :: IO();
rand_three_pieces = do {
n :: Integer <- randomRIO (0,pred three_pieces_length);
putStr "seed ";
print n;
try_three_pieces n;
rand_three_pieces;
};
whole_range :: (Ix a, Bounded a) => [a];
whole_range = range (minBound, maxBound);
all_pieces_for_cplusplus :: String;
all_pieces_for_cplusplus = concat $ intersperse ",\n" $ do {
o :: Orthogonal <- whole_range;
d :: Diagonal <- whole_range;
k :: Knight <- whole_range;
a :: Alfil <- whole_range;
da :: Dabbaba <- whole_range;
return $ "Piece(Orthogonal::"++show o
++", Diagonal::"++show d
++", Knight::"++show k
++", Alfil::"++show a
++", Dabbaba::"++case da of {
NoDabbaba -> "NoDabbaba";
Dabbaba_single -> "Single";
Dabbaba_rider -> "Rider";
}
++", White, true)";
};
moves_on_empty_board :: Piece -> Location -> [Location];
moves_on_empty_board p mylocation = let {
dir :: Directory;
dir = make_dir [p];
pos :: Position;
pos = listArray (Piecenum 0, Piecenum 0) [Just mylocation];
} in sort $ moves dir pos (Piecenum 0);
all_simple_pieces :: [Piece];
all_simple_pieces = filter is_simple_piece all_pieces;
is_simple_piece :: Piece -> Bool;
is_simple_piece (Piece Royal _ _ _ _ _ White) = True;
is_simple_piece _ = False;
verify_piece_locs_inputs :: [(Piece, Location, [Location])];
verify_piece_locs_inputs = do {
p <- all_simple_pieces;
l <- range board_bounds;
return (p,l,moves_on_empty_board p l);
};
take2 :: [Integer] -> [Offset];
take2 [] = [];
take2 (x:y:rest) = (Column x, Row y): take2 rest;
take2 _ = error "odd number for take2";
verify_piece_locs :: IO ();
verify_piece_locs = do {
ll :: [[Location]] <- getContents >>= return . map (map Location . sort . take2 . map read . words) . lines;
if length ll /= length verify_piece_locs_inputs
then error "not same length"
else return();
zipWithM_ (\(a,b,l1) l2 -> print (a,b,if l1==l2 then (True,[],[]) else (False,l1,l2))) verify_piece_locs_inputs ll;
};
location_from_integer :: Integer -> Maybe Location;
location_from_integer n = if n<0
then error "negative location_from_integer"
else let
{(num_rows, num_columns) =
case (max_row, max_column) of
{(Row rmax, Column cmax) -> (rmax+1, cmax+1)};
maxsize = num_rows * num_columns
}
in if n == 0
then Nothing
else if n > maxsize
then error "too big location_from_integer"
else let {
ans = divMod (n-1) num_rows;
} in Just $ Location (Column $ fst ans, Row $ snd ans);
read_moveposition :: [Integer] -> MovePosition;
read_moveposition l = (listArray (bounds test_directory) $ map location_from_integer $ tail l,
case head l of {
0 -> White;
1 -> Black;
_ -> error "read_moveposition color invalid";
});
verify_successors_concat :: Integer -> IO();
verify_successors_concat depth = do {
s1 :: [MovePosition] <- getContents >>= return . sort . map (read_moveposition . map read . words) . lines;
-- mapM_ (putStrLn . show_mp undefined) s1;
let {correct = sort $ recursive_successors depth test_position};
-- mapM_ (putStrLn . show_mp undefined) correct;
mapM_ (putStrLn . unwords . map show . position_to_integer) correct;
print $ s1==correct;
};
recursive_successors :: Integer -> MovePosition -> [MovePosition];
recursive_successors 0 p = [p];
recursive_successors n p = concatMap (recursive_successors $ pred n) $ successors test_directory p;
location_to_integer :: Maybe Location -> Integer;
location_to_integer l = case l of {
Nothing -> 0;
Just (Location (Column c, Row r)) -> 1+ c*(case max_row of {Row n -> n} + 1) +r;
};
position_to_integer :: MovePosition -> [Integer];
position_to_integer (p,c) = (case c of {
White->0;Black->1}):(map location_to_integer $ elems p);
n_chunk :: Integer -> [a] -> [[a]];
n_chunk i = unfoldr (\l -> if null l then Nothing else Just (genericSplitAt i l));
verify_successors :: IO();
verify_successors = do {
ii :: [[[Integer]]] <- getContents >>= return . map (n_chunk 5 . map read . words) . lines;
(flip mapM_) ii $ (\(h:t) -> do {
if sort (map read_moveposition t) == (sort $ successors test_directory $ read_moveposition h) then return ()
else do {
print h;
print $ elems $ fst $ read_moveposition h;
print t;
}});
return ();
};
table_line :: Entry -> [Integer];
table_line (mp,v) = position_to_integer mp ++ [c_value v];
read_table_line :: [Integer] -> Entry;
read_table_line is = (read_moveposition $ init is, read_c_value $ last is);
read_dump_from_file :: String -> IO [Entry];
read_dump_from_file fn = readFile fn >>= return . (map (read_table_line . map read . words)) . filter (not . is_comment) . lines;
is_comment :: String -> Bool;
is_comment [] = False;
is_comment ('#':_) = True;
is_comment _ = False;
zip_map :: (a -> b) -> [a] -> [(a,b)];
zip_map f l = zip l $ map f l;
mate_distance :: Value -> Integer;
mate_distance (Value n) = if n>0
then div n 2-1
else error "need positive Value";
} --end
| kenta2/retrograde | Chess.hs | agpl-3.0 | 23,150 | 0 | 21 | 4,068 | 9,366 | 5,108 | 4,258 | -1 | -1 |
import Data.List
isAnagram x y = sort x == sort y
| nstarke/icc13-introduction-to-haskell | anagram.hs | lgpl-3.0 | 50 | 0 | 6 | 11 | 26 | 12 | 14 | 2 | 1 |
module Probability.BreakTest (breakTest) where
import Test.HUnit
import Data.Ratio ((%))
import Data.Set (fromList, size)
import Probability.Event
import Probability.Break
assertBreak :: Ord a => Rational -> [a] -> [Integer] -> Assertion
assertBreak expected set break = expected @=? probability (Break (fromList set) break)
assertLimSpaceBreak :: Ord a => Rational -> [a] -> [(Integer, Integer)] -> Assertion
assertLimSpaceBreak expected set break
= expected @=? probability (LimSpaceBreak (fromList set) break)
breakTest = TestLabel "Test computation of set breaks" $ TestList [
simpleBreakTest,
limSpaceBreakTest
]
simpleBreakTest = TestList [
testIllegalBreaksCannotHappen,
testBreakOfTwoElements,
testBreakOfThreeElements,
testBreakManyElements,
testBreakIntoThreeSubsets
]
testIllegalBreaksCannotHappen = TestCase $ do
-- sum of breaks is not equal to set's length
assertBreak (0 % 1) [0..9] [3, 3, 3]
assertBreak (0 % 1) [0, 1] [2, 2]
-- empty breaks list also return 0% probability
assertBreak (0 % 1) [0..21] []
-- empty set cannot break anyhow
assertBreak (0 % 1) ([] :: [Int]) []
testBreakOfTwoElements = TestCase $ do
assertBreak (1 % 4) ['Q', 'J'] [2, 0]
assertBreak (1 % 4) ['Q', 'J'] [0, 2]
assertBreak (1 % 2) ['Q', 'J'] [1, 1]
testBreakOfThreeElements = TestCase $ do
assertBreak (1 % 8) ['K', '1', '8'] [0, 3]
assertBreak (3 % 8) ['K', '1', '8'] [1, 2]
testBreakManyElements = TestCase $ do
assertAllBreaks [0..100]
where
assertAllBreaks :: Ord a => [a] -> Assertion
assertAllBreaks lst = let set = fromList lst
setSize = fromIntegral $ size set
in (1 % 1) @=? sum (map (probability . Break set) [[x, setSize - x] | x <- [0..setSize]])
testBreakIntoThreeSubsets = TestCase $ do
assertBreak (6 % 27) ['K', 'J', '0'] [1, 1, 1]
assertBreak (3 % 27) ['A', 'Q', 'J'] [2, 1, 0]
assertBreak (3 % 27) ['A', 'Q', 'J'] [1, 0, 2]
limSpaceBreakTest = TestList [
testTwoElemsBreak,
testThreeElemsBreak,
testSixElemsBreak
]
testTwoElemsBreak = TestCase $ do
assertLimSpaceBreak (1 % 1) ['K', 'Q'] [(1, 1), (1, 1)]
assertLimSpaceBreak (0 % 1) ['K', 'Q'] [(2, 1), (0, 1)]
assertLimSpaceBreak (52 % 100) ['K', 'Q'] [(1, 13), (1, 13)]
testThreeElemsBreak = TestCase $ do
assertLimSpaceBreak (11 % 100) ['K', 'Q', 'J'] [(3, 13), (0, 13)]
assertLimSpaceBreak (39 % 100) ['K', 'Q', 'J'] [(1, 13), (2, 13)]
testSixElemsBreak = TestCase $ do
assertLimSpaceBreak (6 % 805) [1..6] [(0, 13), (6, 13)]
assertLimSpaceBreak (117 % 1610) [1..6] [(1, 13), (5, 13)]
assertLimSpaceBreak (39 % 161) [1..6] [(2, 13), (4, 13)]
assertLimSpaceBreak (286 % 805) [1..6] [(3, 13), (3, 13)]
| Sventimir/bridge-tools | Probability/BreakTest.hs | apache-2.0 | 2,850 | 0 | 16 | 679 | 1,184 | 662 | 522 | 58 | 1 |
module StronglyLiveVariables (
runAnalysis
) where
import Data.Graph.Inductive.Graph (labNodes, edges)
import Data.Graph.Inductive.PatriciaTree (Gr)
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.Maybe (fromMaybe)
import Data.Tuple (swap)
import AttributeGrammar
import Monotone (Context, liftTransfer, mfp)
-- Exposed analysis
runAnalysis :: Gr ProcOrStat () -> Int -> Map Int ([(Context, Set String)], [(Context, Set String)])
runAnalysis graph entryLabel = mfp (Map.fromList (labNodes graph)) [entryLabel] extremalValue (map swap (edges graph)) unaryTransfer binaryTransfer merge
-- Internal functions
extremalValue :: Set String
extremalValue = Set.empty
merge :: Set String -> Set String -> Set String
merge = Set.union
unaryTransfer :: ProcOrStat -> [(Context, Set String)] -> [(Context, Set String)]
unaryTransfer (P _) = id -- Ignore procedures
unaryTransfer (S s) = liftTransfer $ transfer s
binaryTransfer :: (Int, Int) -> Map Int ProcOrStat -> Map Int [(Context, Set String)] -> [(Context, Set String)] -> [(Context, Set String)]
binaryTransfer (l, _) nodes _ = unaryTransfer $ nodes Map.! l
transfer :: Stat' -> Set String -> Set String
transfer stat input = (input Set.\\ kill stat) `Set.union` (gen stat Set.\\ kill stat)
kill :: Stat' -> Set String
kill (IAssign' _ name _) = Set.singleton name
kill (BAssign' _ name _) = Set.singleton name
kill (Malloc' _ name _) = Set.singleton name
kill _ = Set.empty
gen :: Stat' -> Set String
gen (Skip' _) = Set.empty
gen (IfThenElse' _ cond _ _) = freeVarsB cond
gen (While' _ cond _) = freeVarsB cond
gen (IAssign' _ _ val) = freeVarsI val
gen (BAssign' _ _ val) = freeVarsB val
gen (Malloc' _ name size) = Set.empty
gen (Free' _ _) = Set.empty
gen (RefAssign' _ _ val) = freeVarsI val
gen (Continue' _) = Set.empty
gen (Break' _) = Set.empty
gen (Call' _ _ name _ _) = Set.empty
gen (Seq' _ _) = error "Called gen on Seq"
freeVarsI :: IExpr -> Set String
freeVarsI (IConst v) = Set.empty
freeVarsI (Var n) = Set.singleton n
freeVarsI (Plus x y) = Set.union (freeVarsI x) (freeVarsI y)
freeVarsI (Minus x y) = Set.union (freeVarsI x) (freeVarsI y)
freeVarsI (Times x y) = Set.union (freeVarsI x) (freeVarsI y)
freeVarsI (Divide x y) = Set.union (freeVarsI x) (freeVarsI y)
freeVarsI (Deref x) = freeVarsI x
freeVarsB :: BExpr -> Set String
freeVarsB (BConst v) = Set.empty
freeVarsB (BVar n) = Set.singleton n
freeVarsB (LessThan x y) = Set.union (freeVarsI x) (freeVarsI y)
freeVarsB (GreaterThan x y) = Set.union (freeVarsI x) (freeVarsI y)
freeVarsB (LessEqual x y) = Set.union (freeVarsI x) (freeVarsI y)
freeVarsB (GreaterEqual x y) = Set.union (freeVarsI x) (freeVarsI y)
freeVarsB (IEqual x y) = Set.union (freeVarsI x) (freeVarsI y)
freeVarsB (BEqual x y) = Set.union (freeVarsB x) (freeVarsB y)
freeVarsB (And x y) = Set.union (freeVarsB x) (freeVarsB y)
freeVarsB (Or x y) = Set.union (freeVarsB x) (freeVarsB y)
freeVarsB (Not x) = freeVarsB x
| aochagavia/CompilerConstruction | mf/src/StronglyLiveVariables.hs | apache-2.0 | 3,019 | 0 | 11 | 496 | 1,393 | 712 | 681 | 63 | 1 |
module Interface.Clp where
-- standard modules
import Data.List
import Data.Maybe
import System.IO
import System.IO.Unsafe
import System.Process
-- local modules
import Helpful.Directory
--import Data.Time.Clock (diffUTCTime, getCurrentTime)
--import Debug.Trace
zeroObjective :: String -> Maybe Bool
zeroObjective p = unsafePerformIO $
withTempDir "Qstrlib_clp" (\tmpDir -> do
clpTempFile <- openTempFile tmpDir "clpTempFile.lp"
hPutStr (snd clpTempFile) p
hClose $ snd clpTempFile
clpAnswer <- readProcess "clp" [fst clpTempFile] ""
let answer = last $ lines clpAnswer
if "PrimalInfeasible" `isPrefixOf` answer then
return Nothing
else if "DualInfeasible" `isPrefixOf` answer then
return $ Just False
else do
let value = stripPrefix "Optimal objective " answer
if isNothing value then
error $ "clp answered in an unexpected way.\n\
\Expected Answer: \"Value of objective function: NUMBER\"\n\
\Actual Answer: " ++ clpAnswer
else if (read $ takeWhile (/= ' ') $ fromJust value :: Float) == 0.0 then
return $ Just True
else
return $ Just False
)
{-# NOINLINE zeroObjective #-}
| spatial-reasoning/zeno | src/Interface/Clp.hs | bsd-2-clause | 1,232 | 0 | 22 | 306 | 278 | 144 | 134 | 28 | 5 |
module Calculus.Opra16
( module Calculus.Opra
, Opra16(..)
) where
-- standard modules
-- local modules
import Basics
import Calculus.Opra
data Opra16 = Opra16_0_0 | Opra16_0_1 | Opra16_0_2 | Opra16_0_3
| Opra16_0_4 | Opra16_0_5 | Opra16_0_6 | Opra16_0_7
| Opra16_0_8 | Opra16_0_9 | Opra16_0_10 | Opra16_0_11
| Opra16_0_12 | Opra16_0_13 | Opra16_0_14 | Opra16_0_15
| Opra16_0_16 | Opra16_0_17 | Opra16_0_18 | Opra16_0_19
| Opra16_0_20 | Opra16_0_21 | Opra16_0_22 | Opra16_0_23
| Opra16_0_24 | Opra16_0_25 | Opra16_0_26 | Opra16_0_27
| Opra16_0_28 | Opra16_0_29 | Opra16_0_30 | Opra16_0_31
| Opra16_0_32 | Opra16_0_33 | Opra16_0_34 | Opra16_0_35
| Opra16_0_36 | Opra16_0_37 | Opra16_0_38 | Opra16_0_39
| Opra16_0_40 | Opra16_0_41 | Opra16_0_42 | Opra16_0_43
| Opra16_0_44 | Opra16_0_45 | Opra16_0_46 | Opra16_0_47
| Opra16_0_48 | Opra16_0_49 | Opra16_0_50 | Opra16_0_51
| Opra16_0_52 | Opra16_0_53 | Opra16_0_54 | Opra16_0_55
| Opra16_0_56 | Opra16_0_57 | Opra16_0_58 | Opra16_0_59
| Opra16_0_60 | Opra16_0_61 | Opra16_0_62 | Opra16_0_63
| Opra16_1_0 | Opra16_1_1 | Opra16_1_2 | Opra16_1_3
| Opra16_1_4 | Opra16_1_5 | Opra16_1_6 | Opra16_1_7
| Opra16_1_8 | Opra16_1_9 | Opra16_1_10 | Opra16_1_11
| Opra16_1_12 | Opra16_1_13 | Opra16_1_14 | Opra16_1_15
| Opra16_1_16 | Opra16_1_17 | Opra16_1_18 | Opra16_1_19
| Opra16_1_20 | Opra16_1_21 | Opra16_1_22 | Opra16_1_23
| Opra16_1_24 | Opra16_1_25 | Opra16_1_26 | Opra16_1_27
| Opra16_1_28 | Opra16_1_29 | Opra16_1_30 | Opra16_1_31
| Opra16_1_32 | Opra16_1_33 | Opra16_1_34 | Opra16_1_35
| Opra16_1_36 | Opra16_1_37 | Opra16_1_38 | Opra16_1_39
| Opra16_1_40 | Opra16_1_41 | Opra16_1_42 | Opra16_1_43
| Opra16_1_44 | Opra16_1_45 | Opra16_1_46 | Opra16_1_47
| Opra16_1_48 | Opra16_1_49 | Opra16_1_50 | Opra16_1_51
| Opra16_1_52 | Opra16_1_53 | Opra16_1_54 | Opra16_1_55
| Opra16_1_56 | Opra16_1_57 | Opra16_1_58 | Opra16_1_59
| Opra16_1_60 | Opra16_1_61 | Opra16_1_62 | Opra16_1_63
| Opra16_2_0 | Opra16_2_1 | Opra16_2_2 | Opra16_2_3
| Opra16_2_4 | Opra16_2_5 | Opra16_2_6 | Opra16_2_7
| Opra16_2_8 | Opra16_2_9 | Opra16_2_10 | Opra16_2_11
| Opra16_2_12 | Opra16_2_13 | Opra16_2_14 | Opra16_2_15
| Opra16_2_16 | Opra16_2_17 | Opra16_2_18 | Opra16_2_19
| Opra16_2_20 | Opra16_2_21 | Opra16_2_22 | Opra16_2_23
| Opra16_2_24 | Opra16_2_25 | Opra16_2_26 | Opra16_2_27
| Opra16_2_28 | Opra16_2_29 | Opra16_2_30 | Opra16_2_31
| Opra16_2_32 | Opra16_2_33 | Opra16_2_34 | Opra16_2_35
| Opra16_2_36 | Opra16_2_37 | Opra16_2_38 | Opra16_2_39
| Opra16_2_40 | Opra16_2_41 | Opra16_2_42 | Opra16_2_43
| Opra16_2_44 | Opra16_2_45 | Opra16_2_46 | Opra16_2_47
| Opra16_2_48 | Opra16_2_49 | Opra16_2_50 | Opra16_2_51
| Opra16_2_52 | Opra16_2_53 | Opra16_2_54 | Opra16_2_55
| Opra16_2_56 | Opra16_2_57 | Opra16_2_58 | Opra16_2_59
| Opra16_2_60 | Opra16_2_61 | Opra16_2_62 | Opra16_2_63
| Opra16_3_0 | Opra16_3_1 | Opra16_3_2 | Opra16_3_3
| Opra16_3_4 | Opra16_3_5 | Opra16_3_6 | Opra16_3_7
| Opra16_3_8 | Opra16_3_9 | Opra16_3_10 | Opra16_3_11
| Opra16_3_12 | Opra16_3_13 | Opra16_3_14 | Opra16_3_15
| Opra16_3_16 | Opra16_3_17 | Opra16_3_18 | Opra16_3_19
| Opra16_3_20 | Opra16_3_21 | Opra16_3_22 | Opra16_3_23
| Opra16_3_24 | Opra16_3_25 | Opra16_3_26 | Opra16_3_27
| Opra16_3_28 | Opra16_3_29 | Opra16_3_30 | Opra16_3_31
| Opra16_3_32 | Opra16_3_33 | Opra16_3_34 | Opra16_3_35
| Opra16_3_36 | Opra16_3_37 | Opra16_3_38 | Opra16_3_39
| Opra16_3_40 | Opra16_3_41 | Opra16_3_42 | Opra16_3_43
| Opra16_3_44 | Opra16_3_45 | Opra16_3_46 | Opra16_3_47
| Opra16_3_48 | Opra16_3_49 | Opra16_3_50 | Opra16_3_51
| Opra16_3_52 | Opra16_3_53 | Opra16_3_54 | Opra16_3_55
| Opra16_3_56 | Opra16_3_57 | Opra16_3_58 | Opra16_3_59
| Opra16_3_60 | Opra16_3_61 | Opra16_3_62 | Opra16_3_63
| Opra16_4_0 | Opra16_4_1 | Opra16_4_2 | Opra16_4_3
| Opra16_4_4 | Opra16_4_5 | Opra16_4_6 | Opra16_4_7
| Opra16_4_8 | Opra16_4_9 | Opra16_4_10 | Opra16_4_11
| Opra16_4_12 | Opra16_4_13 | Opra16_4_14 | Opra16_4_15
| Opra16_4_16 | Opra16_4_17 | Opra16_4_18 | Opra16_4_19
| Opra16_4_20 | Opra16_4_21 | Opra16_4_22 | Opra16_4_23
| Opra16_4_24 | Opra16_4_25 | Opra16_4_26 | Opra16_4_27
| Opra16_4_28 | Opra16_4_29 | Opra16_4_30 | Opra16_4_31
| Opra16_4_32 | Opra16_4_33 | Opra16_4_34 | Opra16_4_35
| Opra16_4_36 | Opra16_4_37 | Opra16_4_38 | Opra16_4_39
| Opra16_4_40 | Opra16_4_41 | Opra16_4_42 | Opra16_4_43
| Opra16_4_44 | Opra16_4_45 | Opra16_4_46 | Opra16_4_47
| Opra16_4_48 | Opra16_4_49 | Opra16_4_50 | Opra16_4_51
| Opra16_4_52 | Opra16_4_53 | Opra16_4_54 | Opra16_4_55
| Opra16_4_56 | Opra16_4_57 | Opra16_4_58 | Opra16_4_59
| Opra16_4_60 | Opra16_4_61 | Opra16_4_62 | Opra16_4_63
| Opra16_5_0 | Opra16_5_1 | Opra16_5_2 | Opra16_5_3
| Opra16_5_4 | Opra16_5_5 | Opra16_5_6 | Opra16_5_7
| Opra16_5_8 | Opra16_5_9 | Opra16_5_10 | Opra16_5_11
| Opra16_5_12 | Opra16_5_13 | Opra16_5_14 | Opra16_5_15
| Opra16_5_16 | Opra16_5_17 | Opra16_5_18 | Opra16_5_19
| Opra16_5_20 | Opra16_5_21 | Opra16_5_22 | Opra16_5_23
| Opra16_5_24 | Opra16_5_25 | Opra16_5_26 | Opra16_5_27
| Opra16_5_28 | Opra16_5_29 | Opra16_5_30 | Opra16_5_31
| Opra16_5_32 | Opra16_5_33 | Opra16_5_34 | Opra16_5_35
| Opra16_5_36 | Opra16_5_37 | Opra16_5_38 | Opra16_5_39
| Opra16_5_40 | Opra16_5_41 | Opra16_5_42 | Opra16_5_43
| Opra16_5_44 | Opra16_5_45 | Opra16_5_46 | Opra16_5_47
| Opra16_5_48 | Opra16_5_49 | Opra16_5_50 | Opra16_5_51
| Opra16_5_52 | Opra16_5_53 | Opra16_5_54 | Opra16_5_55
| Opra16_5_56 | Opra16_5_57 | Opra16_5_58 | Opra16_5_59
| Opra16_5_60 | Opra16_5_61 | Opra16_5_62 | Opra16_5_63
| Opra16_6_0 | Opra16_6_1 | Opra16_6_2 | Opra16_6_3
| Opra16_6_4 | Opra16_6_5 | Opra16_6_6 | Opra16_6_7
| Opra16_6_8 | Opra16_6_9 | Opra16_6_10 | Opra16_6_11
| Opra16_6_12 | Opra16_6_13 | Opra16_6_14 | Opra16_6_15
| Opra16_6_16 | Opra16_6_17 | Opra16_6_18 | Opra16_6_19
| Opra16_6_20 | Opra16_6_21 | Opra16_6_22 | Opra16_6_23
| Opra16_6_24 | Opra16_6_25 | Opra16_6_26 | Opra16_6_27
| Opra16_6_28 | Opra16_6_29 | Opra16_6_30 | Opra16_6_31
| Opra16_6_32 | Opra16_6_33 | Opra16_6_34 | Opra16_6_35
| Opra16_6_36 | Opra16_6_37 | Opra16_6_38 | Opra16_6_39
| Opra16_6_40 | Opra16_6_41 | Opra16_6_42 | Opra16_6_43
| Opra16_6_44 | Opra16_6_45 | Opra16_6_46 | Opra16_6_47
| Opra16_6_48 | Opra16_6_49 | Opra16_6_50 | Opra16_6_51
| Opra16_6_52 | Opra16_6_53 | Opra16_6_54 | Opra16_6_55
| Opra16_6_56 | Opra16_6_57 | Opra16_6_58 | Opra16_6_59
| Opra16_6_60 | Opra16_6_61 | Opra16_6_62 | Opra16_6_63
| Opra16_7_0 | Opra16_7_1 | Opra16_7_2 | Opra16_7_3
| Opra16_7_4 | Opra16_7_5 | Opra16_7_6 | Opra16_7_7
| Opra16_7_8 | Opra16_7_9 | Opra16_7_10 | Opra16_7_11
| Opra16_7_12 | Opra16_7_13 | Opra16_7_14 | Opra16_7_15
| Opra16_7_16 | Opra16_7_17 | Opra16_7_18 | Opra16_7_19
| Opra16_7_20 | Opra16_7_21 | Opra16_7_22 | Opra16_7_23
| Opra16_7_24 | Opra16_7_25 | Opra16_7_26 | Opra16_7_27
| Opra16_7_28 | Opra16_7_29 | Opra16_7_30 | Opra16_7_31
| Opra16_7_32 | Opra16_7_33 | Opra16_7_34 | Opra16_7_35
| Opra16_7_36 | Opra16_7_37 | Opra16_7_38 | Opra16_7_39
| Opra16_7_40 | Opra16_7_41 | Opra16_7_42 | Opra16_7_43
| Opra16_7_44 | Opra16_7_45 | Opra16_7_46 | Opra16_7_47
| Opra16_7_48 | Opra16_7_49 | Opra16_7_50 | Opra16_7_51
| Opra16_7_52 | Opra16_7_53 | Opra16_7_54 | Opra16_7_55
| Opra16_7_56 | Opra16_7_57 | Opra16_7_58 | Opra16_7_59
| Opra16_7_60 | Opra16_7_61 | Opra16_7_62 | Opra16_7_63
| Opra16_8_0 | Opra16_8_1 | Opra16_8_2 | Opra16_8_3
| Opra16_8_4 | Opra16_8_5 | Opra16_8_6 | Opra16_8_7
| Opra16_8_8 | Opra16_8_9 | Opra16_8_10 | Opra16_8_11
| Opra16_8_12 | Opra16_8_13 | Opra16_8_14 | Opra16_8_15
| Opra16_8_16 | Opra16_8_17 | Opra16_8_18 | Opra16_8_19
| Opra16_8_20 | Opra16_8_21 | Opra16_8_22 | Opra16_8_23
| Opra16_8_24 | Opra16_8_25 | Opra16_8_26 | Opra16_8_27
| Opra16_8_28 | Opra16_8_29 | Opra16_8_30 | Opra16_8_31
| Opra16_8_32 | Opra16_8_33 | Opra16_8_34 | Opra16_8_35
| Opra16_8_36 | Opra16_8_37 | Opra16_8_38 | Opra16_8_39
| Opra16_8_40 | Opra16_8_41 | Opra16_8_42 | Opra16_8_43
| Opra16_8_44 | Opra16_8_45 | Opra16_8_46 | Opra16_8_47
| Opra16_8_48 | Opra16_8_49 | Opra16_8_50 | Opra16_8_51
| Opra16_8_52 | Opra16_8_53 | Opra16_8_54 | Opra16_8_55
| Opra16_8_56 | Opra16_8_57 | Opra16_8_58 | Opra16_8_59
| Opra16_8_60 | Opra16_8_61 | Opra16_8_62 | Opra16_8_63
| Opra16_9_0 | Opra16_9_1 | Opra16_9_2 | Opra16_9_3
| Opra16_9_4 | Opra16_9_5 | Opra16_9_6 | Opra16_9_7
| Opra16_9_8 | Opra16_9_9 | Opra16_9_10 | Opra16_9_11
| Opra16_9_12 | Opra16_9_13 | Opra16_9_14 | Opra16_9_15
| Opra16_9_16 | Opra16_9_17 | Opra16_9_18 | Opra16_9_19
| Opra16_9_20 | Opra16_9_21 | Opra16_9_22 | Opra16_9_23
| Opra16_9_24 | Opra16_9_25 | Opra16_9_26 | Opra16_9_27
| Opra16_9_28 | Opra16_9_29 | Opra16_9_30 | Opra16_9_31
| Opra16_9_32 | Opra16_9_33 | Opra16_9_34 | Opra16_9_35
| Opra16_9_36 | Opra16_9_37 | Opra16_9_38 | Opra16_9_39
| Opra16_9_40 | Opra16_9_41 | Opra16_9_42 | Opra16_9_43
| Opra16_9_44 | Opra16_9_45 | Opra16_9_46 | Opra16_9_47
| Opra16_9_48 | Opra16_9_49 | Opra16_9_50 | Opra16_9_51
| Opra16_9_52 | Opra16_9_53 | Opra16_9_54 | Opra16_9_55
| Opra16_9_56 | Opra16_9_57 | Opra16_9_58 | Opra16_9_59
| Opra16_9_60 | Opra16_9_61 | Opra16_9_62 | Opra16_9_63
| Opra16_10_0 | Opra16_10_1 | Opra16_10_2 | Opra16_10_3
| Opra16_10_4 | Opra16_10_5 | Opra16_10_6 | Opra16_10_7
| Opra16_10_8 | Opra16_10_9 | Opra16_10_10 | Opra16_10_11
| Opra16_10_12 | Opra16_10_13 | Opra16_10_14 | Opra16_10_15
| Opra16_10_16 | Opra16_10_17 | Opra16_10_18 | Opra16_10_19
| Opra16_10_20 | Opra16_10_21 | Opra16_10_22 | Opra16_10_23
| Opra16_10_24 | Opra16_10_25 | Opra16_10_26 | Opra16_10_27
| Opra16_10_28 | Opra16_10_29 | Opra16_10_30 | Opra16_10_31
| Opra16_10_32 | Opra16_10_33 | Opra16_10_34 | Opra16_10_35
| Opra16_10_36 | Opra16_10_37 | Opra16_10_38 | Opra16_10_39
| Opra16_10_40 | Opra16_10_41 | Opra16_10_42 | Opra16_10_43
| Opra16_10_44 | Opra16_10_45 | Opra16_10_46 | Opra16_10_47
| Opra16_10_48 | Opra16_10_49 | Opra16_10_50 | Opra16_10_51
| Opra16_10_52 | Opra16_10_53 | Opra16_10_54 | Opra16_10_55
| Opra16_10_56 | Opra16_10_57 | Opra16_10_58 | Opra16_10_59
| Opra16_10_60 | Opra16_10_61 | Opra16_10_62 | Opra16_10_63
| Opra16_11_0 | Opra16_11_1 | Opra16_11_2 | Opra16_11_3
| Opra16_11_4 | Opra16_11_5 | Opra16_11_6 | Opra16_11_7
| Opra16_11_8 | Opra16_11_9 | Opra16_11_10 | Opra16_11_11
| Opra16_11_12 | Opra16_11_13 | Opra16_11_14 | Opra16_11_15
| Opra16_11_16 | Opra16_11_17 | Opra16_11_18 | Opra16_11_19
| Opra16_11_20 | Opra16_11_21 | Opra16_11_22 | Opra16_11_23
| Opra16_11_24 | Opra16_11_25 | Opra16_11_26 | Opra16_11_27
| Opra16_11_28 | Opra16_11_29 | Opra16_11_30 | Opra16_11_31
| Opra16_11_32 | Opra16_11_33 | Opra16_11_34 | Opra16_11_35
| Opra16_11_36 | Opra16_11_37 | Opra16_11_38 | Opra16_11_39
| Opra16_11_40 | Opra16_11_41 | Opra16_11_42 | Opra16_11_43
| Opra16_11_44 | Opra16_11_45 | Opra16_11_46 | Opra16_11_47
| Opra16_11_48 | Opra16_11_49 | Opra16_11_50 | Opra16_11_51
| Opra16_11_52 | Opra16_11_53 | Opra16_11_54 | Opra16_11_55
| Opra16_11_56 | Opra16_11_57 | Opra16_11_58 | Opra16_11_59
| Opra16_11_60 | Opra16_11_61 | Opra16_11_62 | Opra16_11_63
| Opra16_12_0 | Opra16_12_1 | Opra16_12_2 | Opra16_12_3
| Opra16_12_4 | Opra16_12_5 | Opra16_12_6 | Opra16_12_7
| Opra16_12_8 | Opra16_12_9 | Opra16_12_10 | Opra16_12_11
| Opra16_12_12 | Opra16_12_13 | Opra16_12_14 | Opra16_12_15
| Opra16_12_16 | Opra16_12_17 | Opra16_12_18 | Opra16_12_19
| Opra16_12_20 | Opra16_12_21 | Opra16_12_22 | Opra16_12_23
| Opra16_12_24 | Opra16_12_25 | Opra16_12_26 | Opra16_12_27
| Opra16_12_28 | Opra16_12_29 | Opra16_12_30 | Opra16_12_31
| Opra16_12_32 | Opra16_12_33 | Opra16_12_34 | Opra16_12_35
| Opra16_12_36 | Opra16_12_37 | Opra16_12_38 | Opra16_12_39
| Opra16_12_40 | Opra16_12_41 | Opra16_12_42 | Opra16_12_43
| Opra16_12_44 | Opra16_12_45 | Opra16_12_46 | Opra16_12_47
| Opra16_12_48 | Opra16_12_49 | Opra16_12_50 | Opra16_12_51
| Opra16_12_52 | Opra16_12_53 | Opra16_12_54 | Opra16_12_55
| Opra16_12_56 | Opra16_12_57 | Opra16_12_58 | Opra16_12_59
| Opra16_12_60 | Opra16_12_61 | Opra16_12_62 | Opra16_12_63
| Opra16_13_0 | Opra16_13_1 | Opra16_13_2 | Opra16_13_3
| Opra16_13_4 | Opra16_13_5 | Opra16_13_6 | Opra16_13_7
| Opra16_13_8 | Opra16_13_9 | Opra16_13_10 | Opra16_13_11
| Opra16_13_12 | Opra16_13_13 | Opra16_13_14 | Opra16_13_15
| Opra16_13_16 | Opra16_13_17 | Opra16_13_18 | Opra16_13_19
| Opra16_13_20 | Opra16_13_21 | Opra16_13_22 | Opra16_13_23
| Opra16_13_24 | Opra16_13_25 | Opra16_13_26 | Opra16_13_27
| Opra16_13_28 | Opra16_13_29 | Opra16_13_30 | Opra16_13_31
| Opra16_13_32 | Opra16_13_33 | Opra16_13_34 | Opra16_13_35
| Opra16_13_36 | Opra16_13_37 | Opra16_13_38 | Opra16_13_39
| Opra16_13_40 | Opra16_13_41 | Opra16_13_42 | Opra16_13_43
| Opra16_13_44 | Opra16_13_45 | Opra16_13_46 | Opra16_13_47
| Opra16_13_48 | Opra16_13_49 | Opra16_13_50 | Opra16_13_51
| Opra16_13_52 | Opra16_13_53 | Opra16_13_54 | Opra16_13_55
| Opra16_13_56 | Opra16_13_57 | Opra16_13_58 | Opra16_13_59
| Opra16_13_60 | Opra16_13_61 | Opra16_13_62 | Opra16_13_63
| Opra16_14_0 | Opra16_14_1 | Opra16_14_2 | Opra16_14_3
| Opra16_14_4 | Opra16_14_5 | Opra16_14_6 | Opra16_14_7
| Opra16_14_8 | Opra16_14_9 | Opra16_14_10 | Opra16_14_11
| Opra16_14_12 | Opra16_14_13 | Opra16_14_14 | Opra16_14_15
| Opra16_14_16 | Opra16_14_17 | Opra16_14_18 | Opra16_14_19
| Opra16_14_20 | Opra16_14_21 | Opra16_14_22 | Opra16_14_23
| Opra16_14_24 | Opra16_14_25 | Opra16_14_26 | Opra16_14_27
| Opra16_14_28 | Opra16_14_29 | Opra16_14_30 | Opra16_14_31
| Opra16_14_32 | Opra16_14_33 | Opra16_14_34 | Opra16_14_35
| Opra16_14_36 | Opra16_14_37 | Opra16_14_38 | Opra16_14_39
| Opra16_14_40 | Opra16_14_41 | Opra16_14_42 | Opra16_14_43
| Opra16_14_44 | Opra16_14_45 | Opra16_14_46 | Opra16_14_47
| Opra16_14_48 | Opra16_14_49 | Opra16_14_50 | Opra16_14_51
| Opra16_14_52 | Opra16_14_53 | Opra16_14_54 | Opra16_14_55
| Opra16_14_56 | Opra16_14_57 | Opra16_14_58 | Opra16_14_59
| Opra16_14_60 | Opra16_14_61 | Opra16_14_62 | Opra16_14_63
| Opra16_15_0 | Opra16_15_1 | Opra16_15_2 | Opra16_15_3
| Opra16_15_4 | Opra16_15_5 | Opra16_15_6 | Opra16_15_7
| Opra16_15_8 | Opra16_15_9 | Opra16_15_10 | Opra16_15_11
| Opra16_15_12 | Opra16_15_13 | Opra16_15_14 | Opra16_15_15
| Opra16_15_16 | Opra16_15_17 | Opra16_15_18 | Opra16_15_19
| Opra16_15_20 | Opra16_15_21 | Opra16_15_22 | Opra16_15_23
| Opra16_15_24 | Opra16_15_25 | Opra16_15_26 | Opra16_15_27
| Opra16_15_28 | Opra16_15_29 | Opra16_15_30 | Opra16_15_31
| Opra16_15_32 | Opra16_15_33 | Opra16_15_34 | Opra16_15_35
| Opra16_15_36 | Opra16_15_37 | Opra16_15_38 | Opra16_15_39
| Opra16_15_40 | Opra16_15_41 | Opra16_15_42 | Opra16_15_43
| Opra16_15_44 | Opra16_15_45 | Opra16_15_46 | Opra16_15_47
| Opra16_15_48 | Opra16_15_49 | Opra16_15_50 | Opra16_15_51
| Opra16_15_52 | Opra16_15_53 | Opra16_15_54 | Opra16_15_55
| Opra16_15_56 | Opra16_15_57 | Opra16_15_58 | Opra16_15_59
| Opra16_15_60 | Opra16_15_61 | Opra16_15_62 | Opra16_15_63
| Opra16_16_0 | Opra16_16_1 | Opra16_16_2 | Opra16_16_3
| Opra16_16_4 | Opra16_16_5 | Opra16_16_6 | Opra16_16_7
| Opra16_16_8 | Opra16_16_9 | Opra16_16_10 | Opra16_16_11
| Opra16_16_12 | Opra16_16_13 | Opra16_16_14 | Opra16_16_15
| Opra16_16_16 | Opra16_16_17 | Opra16_16_18 | Opra16_16_19
| Opra16_16_20 | Opra16_16_21 | Opra16_16_22 | Opra16_16_23
| Opra16_16_24 | Opra16_16_25 | Opra16_16_26 | Opra16_16_27
| Opra16_16_28 | Opra16_16_29 | Opra16_16_30 | Opra16_16_31
| Opra16_16_32 | Opra16_16_33 | Opra16_16_34 | Opra16_16_35
| Opra16_16_36 | Opra16_16_37 | Opra16_16_38 | Opra16_16_39
| Opra16_16_40 | Opra16_16_41 | Opra16_16_42 | Opra16_16_43
| Opra16_16_44 | Opra16_16_45 | Opra16_16_46 | Opra16_16_47
| Opra16_16_48 | Opra16_16_49 | Opra16_16_50 | Opra16_16_51
| Opra16_16_52 | Opra16_16_53 | Opra16_16_54 | Opra16_16_55
| Opra16_16_56 | Opra16_16_57 | Opra16_16_58 | Opra16_16_59
| Opra16_16_60 | Opra16_16_61 | Opra16_16_62 | Opra16_16_63
| Opra16_17_0 | Opra16_17_1 | Opra16_17_2 | Opra16_17_3
| Opra16_17_4 | Opra16_17_5 | Opra16_17_6 | Opra16_17_7
| Opra16_17_8 | Opra16_17_9 | Opra16_17_10 | Opra16_17_11
| Opra16_17_12 | Opra16_17_13 | Opra16_17_14 | Opra16_17_15
| Opra16_17_16 | Opra16_17_17 | Opra16_17_18 | Opra16_17_19
| Opra16_17_20 | Opra16_17_21 | Opra16_17_22 | Opra16_17_23
| Opra16_17_24 | Opra16_17_25 | Opra16_17_26 | Opra16_17_27
| Opra16_17_28 | Opra16_17_29 | Opra16_17_30 | Opra16_17_31
| Opra16_17_32 | Opra16_17_33 | Opra16_17_34 | Opra16_17_35
| Opra16_17_36 | Opra16_17_37 | Opra16_17_38 | Opra16_17_39
| Opra16_17_40 | Opra16_17_41 | Opra16_17_42 | Opra16_17_43
| Opra16_17_44 | Opra16_17_45 | Opra16_17_46 | Opra16_17_47
| Opra16_17_48 | Opra16_17_49 | Opra16_17_50 | Opra16_17_51
| Opra16_17_52 | Opra16_17_53 | Opra16_17_54 | Opra16_17_55
| Opra16_17_56 | Opra16_17_57 | Opra16_17_58 | Opra16_17_59
| Opra16_17_60 | Opra16_17_61 | Opra16_17_62 | Opra16_17_63
| Opra16_18_0 | Opra16_18_1 | Opra16_18_2 | Opra16_18_3
| Opra16_18_4 | Opra16_18_5 | Opra16_18_6 | Opra16_18_7
| Opra16_18_8 | Opra16_18_9 | Opra16_18_10 | Opra16_18_11
| Opra16_18_12 | Opra16_18_13 | Opra16_18_14 | Opra16_18_15
| Opra16_18_16 | Opra16_18_17 | Opra16_18_18 | Opra16_18_19
| Opra16_18_20 | Opra16_18_21 | Opra16_18_22 | Opra16_18_23
| Opra16_18_24 | Opra16_18_25 | Opra16_18_26 | Opra16_18_27
| Opra16_18_28 | Opra16_18_29 | Opra16_18_30 | Opra16_18_31
| Opra16_18_32 | Opra16_18_33 | Opra16_18_34 | Opra16_18_35
| Opra16_18_36 | Opra16_18_37 | Opra16_18_38 | Opra16_18_39
| Opra16_18_40 | Opra16_18_41 | Opra16_18_42 | Opra16_18_43
| Opra16_18_44 | Opra16_18_45 | Opra16_18_46 | Opra16_18_47
| Opra16_18_48 | Opra16_18_49 | Opra16_18_50 | Opra16_18_51
| Opra16_18_52 | Opra16_18_53 | Opra16_18_54 | Opra16_18_55
| Opra16_18_56 | Opra16_18_57 | Opra16_18_58 | Opra16_18_59
| Opra16_18_60 | Opra16_18_61 | Opra16_18_62 | Opra16_18_63
| Opra16_19_0 | Opra16_19_1 | Opra16_19_2 | Opra16_19_3
| Opra16_19_4 | Opra16_19_5 | Opra16_19_6 | Opra16_19_7
| Opra16_19_8 | Opra16_19_9 | Opra16_19_10 | Opra16_19_11
| Opra16_19_12 | Opra16_19_13 | Opra16_19_14 | Opra16_19_15
| Opra16_19_16 | Opra16_19_17 | Opra16_19_18 | Opra16_19_19
| Opra16_19_20 | Opra16_19_21 | Opra16_19_22 | Opra16_19_23
| Opra16_19_24 | Opra16_19_25 | Opra16_19_26 | Opra16_19_27
| Opra16_19_28 | Opra16_19_29 | Opra16_19_30 | Opra16_19_31
| Opra16_19_32 | Opra16_19_33 | Opra16_19_34 | Opra16_19_35
| Opra16_19_36 | Opra16_19_37 | Opra16_19_38 | Opra16_19_39
| Opra16_19_40 | Opra16_19_41 | Opra16_19_42 | Opra16_19_43
| Opra16_19_44 | Opra16_19_45 | Opra16_19_46 | Opra16_19_47
| Opra16_19_48 | Opra16_19_49 | Opra16_19_50 | Opra16_19_51
| Opra16_19_52 | Opra16_19_53 | Opra16_19_54 | Opra16_19_55
| Opra16_19_56 | Opra16_19_57 | Opra16_19_58 | Opra16_19_59
| Opra16_19_60 | Opra16_19_61 | Opra16_19_62 | Opra16_19_63
| Opra16_20_0 | Opra16_20_1 | Opra16_20_2 | Opra16_20_3
| Opra16_20_4 | Opra16_20_5 | Opra16_20_6 | Opra16_20_7
| Opra16_20_8 | Opra16_20_9 | Opra16_20_10 | Opra16_20_11
| Opra16_20_12 | Opra16_20_13 | Opra16_20_14 | Opra16_20_15
| Opra16_20_16 | Opra16_20_17 | Opra16_20_18 | Opra16_20_19
| Opra16_20_20 | Opra16_20_21 | Opra16_20_22 | Opra16_20_23
| Opra16_20_24 | Opra16_20_25 | Opra16_20_26 | Opra16_20_27
| Opra16_20_28 | Opra16_20_29 | Opra16_20_30 | Opra16_20_31
| Opra16_20_32 | Opra16_20_33 | Opra16_20_34 | Opra16_20_35
| Opra16_20_36 | Opra16_20_37 | Opra16_20_38 | Opra16_20_39
| Opra16_20_40 | Opra16_20_41 | Opra16_20_42 | Opra16_20_43
| Opra16_20_44 | Opra16_20_45 | Opra16_20_46 | Opra16_20_47
| Opra16_20_48 | Opra16_20_49 | Opra16_20_50 | Opra16_20_51
| Opra16_20_52 | Opra16_20_53 | Opra16_20_54 | Opra16_20_55
| Opra16_20_56 | Opra16_20_57 | Opra16_20_58 | Opra16_20_59
| Opra16_20_60 | Opra16_20_61 | Opra16_20_62 | Opra16_20_63
| Opra16_21_0 | Opra16_21_1 | Opra16_21_2 | Opra16_21_3
| Opra16_21_4 | Opra16_21_5 | Opra16_21_6 | Opra16_21_7
| Opra16_21_8 | Opra16_21_9 | Opra16_21_10 | Opra16_21_11
| Opra16_21_12 | Opra16_21_13 | Opra16_21_14 | Opra16_21_15
| Opra16_21_16 | Opra16_21_17 | Opra16_21_18 | Opra16_21_19
| Opra16_21_20 | Opra16_21_21 | Opra16_21_22 | Opra16_21_23
| Opra16_21_24 | Opra16_21_25 | Opra16_21_26 | Opra16_21_27
| Opra16_21_28 | Opra16_21_29 | Opra16_21_30 | Opra16_21_31
| Opra16_21_32 | Opra16_21_33 | Opra16_21_34 | Opra16_21_35
| Opra16_21_36 | Opra16_21_37 | Opra16_21_38 | Opra16_21_39
| Opra16_21_40 | Opra16_21_41 | Opra16_21_42 | Opra16_21_43
| Opra16_21_44 | Opra16_21_45 | Opra16_21_46 | Opra16_21_47
| Opra16_21_48 | Opra16_21_49 | Opra16_21_50 | Opra16_21_51
| Opra16_21_52 | Opra16_21_53 | Opra16_21_54 | Opra16_21_55
| Opra16_21_56 | Opra16_21_57 | Opra16_21_58 | Opra16_21_59
| Opra16_21_60 | Opra16_21_61 | Opra16_21_62 | Opra16_21_63
| Opra16_22_0 | Opra16_22_1 | Opra16_22_2 | Opra16_22_3
| Opra16_22_4 | Opra16_22_5 | Opra16_22_6 | Opra16_22_7
| Opra16_22_8 | Opra16_22_9 | Opra16_22_10 | Opra16_22_11
| Opra16_22_12 | Opra16_22_13 | Opra16_22_14 | Opra16_22_15
| Opra16_22_16 | Opra16_22_17 | Opra16_22_18 | Opra16_22_19
| Opra16_22_20 | Opra16_22_21 | Opra16_22_22 | Opra16_22_23
| Opra16_22_24 | Opra16_22_25 | Opra16_22_26 | Opra16_22_27
| Opra16_22_28 | Opra16_22_29 | Opra16_22_30 | Opra16_22_31
| Opra16_22_32 | Opra16_22_33 | Opra16_22_34 | Opra16_22_35
| Opra16_22_36 | Opra16_22_37 | Opra16_22_38 | Opra16_22_39
| Opra16_22_40 | Opra16_22_41 | Opra16_22_42 | Opra16_22_43
| Opra16_22_44 | Opra16_22_45 | Opra16_22_46 | Opra16_22_47
| Opra16_22_48 | Opra16_22_49 | Opra16_22_50 | Opra16_22_51
| Opra16_22_52 | Opra16_22_53 | Opra16_22_54 | Opra16_22_55
| Opra16_22_56 | Opra16_22_57 | Opra16_22_58 | Opra16_22_59
| Opra16_22_60 | Opra16_22_61 | Opra16_22_62 | Opra16_22_63
| Opra16_23_0 | Opra16_23_1 | Opra16_23_2 | Opra16_23_3
| Opra16_23_4 | Opra16_23_5 | Opra16_23_6 | Opra16_23_7
| Opra16_23_8 | Opra16_23_9 | Opra16_23_10 | Opra16_23_11
| Opra16_23_12 | Opra16_23_13 | Opra16_23_14 | Opra16_23_15
| Opra16_23_16 | Opra16_23_17 | Opra16_23_18 | Opra16_23_19
| Opra16_23_20 | Opra16_23_21 | Opra16_23_22 | Opra16_23_23
| Opra16_23_24 | Opra16_23_25 | Opra16_23_26 | Opra16_23_27
| Opra16_23_28 | Opra16_23_29 | Opra16_23_30 | Opra16_23_31
| Opra16_23_32 | Opra16_23_33 | Opra16_23_34 | Opra16_23_35
| Opra16_23_36 | Opra16_23_37 | Opra16_23_38 | Opra16_23_39
| Opra16_23_40 | Opra16_23_41 | Opra16_23_42 | Opra16_23_43
| Opra16_23_44 | Opra16_23_45 | Opra16_23_46 | Opra16_23_47
| Opra16_23_48 | Opra16_23_49 | Opra16_23_50 | Opra16_23_51
| Opra16_23_52 | Opra16_23_53 | Opra16_23_54 | Opra16_23_55
| Opra16_23_56 | Opra16_23_57 | Opra16_23_58 | Opra16_23_59
| Opra16_23_60 | Opra16_23_61 | Opra16_23_62 | Opra16_23_63
| Opra16_24_0 | Opra16_24_1 | Opra16_24_2 | Opra16_24_3
| Opra16_24_4 | Opra16_24_5 | Opra16_24_6 | Opra16_24_7
| Opra16_24_8 | Opra16_24_9 | Opra16_24_10 | Opra16_24_11
| Opra16_24_12 | Opra16_24_13 | Opra16_24_14 | Opra16_24_15
| Opra16_24_16 | Opra16_24_17 | Opra16_24_18 | Opra16_24_19
| Opra16_24_20 | Opra16_24_21 | Opra16_24_22 | Opra16_24_23
| Opra16_24_24 | Opra16_24_25 | Opra16_24_26 | Opra16_24_27
| Opra16_24_28 | Opra16_24_29 | Opra16_24_30 | Opra16_24_31
| Opra16_24_32 | Opra16_24_33 | Opra16_24_34 | Opra16_24_35
| Opra16_24_36 | Opra16_24_37 | Opra16_24_38 | Opra16_24_39
| Opra16_24_40 | Opra16_24_41 | Opra16_24_42 | Opra16_24_43
| Opra16_24_44 | Opra16_24_45 | Opra16_24_46 | Opra16_24_47
| Opra16_24_48 | Opra16_24_49 | Opra16_24_50 | Opra16_24_51
| Opra16_24_52 | Opra16_24_53 | Opra16_24_54 | Opra16_24_55
| Opra16_24_56 | Opra16_24_57 | Opra16_24_58 | Opra16_24_59
| Opra16_24_60 | Opra16_24_61 | Opra16_24_62 | Opra16_24_63
| Opra16_25_0 | Opra16_25_1 | Opra16_25_2 | Opra16_25_3
| Opra16_25_4 | Opra16_25_5 | Opra16_25_6 | Opra16_25_7
| Opra16_25_8 | Opra16_25_9 | Opra16_25_10 | Opra16_25_11
| Opra16_25_12 | Opra16_25_13 | Opra16_25_14 | Opra16_25_15
| Opra16_25_16 | Opra16_25_17 | Opra16_25_18 | Opra16_25_19
| Opra16_25_20 | Opra16_25_21 | Opra16_25_22 | Opra16_25_23
| Opra16_25_24 | Opra16_25_25 | Opra16_25_26 | Opra16_25_27
| Opra16_25_28 | Opra16_25_29 | Opra16_25_30 | Opra16_25_31
| Opra16_25_32 | Opra16_25_33 | Opra16_25_34 | Opra16_25_35
| Opra16_25_36 | Opra16_25_37 | Opra16_25_38 | Opra16_25_39
| Opra16_25_40 | Opra16_25_41 | Opra16_25_42 | Opra16_25_43
| Opra16_25_44 | Opra16_25_45 | Opra16_25_46 | Opra16_25_47
| Opra16_25_48 | Opra16_25_49 | Opra16_25_50 | Opra16_25_51
| Opra16_25_52 | Opra16_25_53 | Opra16_25_54 | Opra16_25_55
| Opra16_25_56 | Opra16_25_57 | Opra16_25_58 | Opra16_25_59
| Opra16_25_60 | Opra16_25_61 | Opra16_25_62 | Opra16_25_63
| Opra16_26_0 | Opra16_26_1 | Opra16_26_2 | Opra16_26_3
| Opra16_26_4 | Opra16_26_5 | Opra16_26_6 | Opra16_26_7
| Opra16_26_8 | Opra16_26_9 | Opra16_26_10 | Opra16_26_11
| Opra16_26_12 | Opra16_26_13 | Opra16_26_14 | Opra16_26_15
| Opra16_26_16 | Opra16_26_17 | Opra16_26_18 | Opra16_26_19
| Opra16_26_20 | Opra16_26_21 | Opra16_26_22 | Opra16_26_23
| Opra16_26_24 | Opra16_26_25 | Opra16_26_26 | Opra16_26_27
| Opra16_26_28 | Opra16_26_29 | Opra16_26_30 | Opra16_26_31
| Opra16_26_32 | Opra16_26_33 | Opra16_26_34 | Opra16_26_35
| Opra16_26_36 | Opra16_26_37 | Opra16_26_38 | Opra16_26_39
| Opra16_26_40 | Opra16_26_41 | Opra16_26_42 | Opra16_26_43
| Opra16_26_44 | Opra16_26_45 | Opra16_26_46 | Opra16_26_47
| Opra16_26_48 | Opra16_26_49 | Opra16_26_50 | Opra16_26_51
| Opra16_26_52 | Opra16_26_53 | Opra16_26_54 | Opra16_26_55
| Opra16_26_56 | Opra16_26_57 | Opra16_26_58 | Opra16_26_59
| Opra16_26_60 | Opra16_26_61 | Opra16_26_62 | Opra16_26_63
| Opra16_27_0 | Opra16_27_1 | Opra16_27_2 | Opra16_27_3
| Opra16_27_4 | Opra16_27_5 | Opra16_27_6 | Opra16_27_7
| Opra16_27_8 | Opra16_27_9 | Opra16_27_10 | Opra16_27_11
| Opra16_27_12 | Opra16_27_13 | Opra16_27_14 | Opra16_27_15
| Opra16_27_16 | Opra16_27_17 | Opra16_27_18 | Opra16_27_19
| Opra16_27_20 | Opra16_27_21 | Opra16_27_22 | Opra16_27_23
| Opra16_27_24 | Opra16_27_25 | Opra16_27_26 | Opra16_27_27
| Opra16_27_28 | Opra16_27_29 | Opra16_27_30 | Opra16_27_31
| Opra16_27_32 | Opra16_27_33 | Opra16_27_34 | Opra16_27_35
| Opra16_27_36 | Opra16_27_37 | Opra16_27_38 | Opra16_27_39
| Opra16_27_40 | Opra16_27_41 | Opra16_27_42 | Opra16_27_43
| Opra16_27_44 | Opra16_27_45 | Opra16_27_46 | Opra16_27_47
| Opra16_27_48 | Opra16_27_49 | Opra16_27_50 | Opra16_27_51
| Opra16_27_52 | Opra16_27_53 | Opra16_27_54 | Opra16_27_55
| Opra16_27_56 | Opra16_27_57 | Opra16_27_58 | Opra16_27_59
| Opra16_27_60 | Opra16_27_61 | Opra16_27_62 | Opra16_27_63
| Opra16_28_0 | Opra16_28_1 | Opra16_28_2 | Opra16_28_3
| Opra16_28_4 | Opra16_28_5 | Opra16_28_6 | Opra16_28_7
| Opra16_28_8 | Opra16_28_9 | Opra16_28_10 | Opra16_28_11
| Opra16_28_12 | Opra16_28_13 | Opra16_28_14 | Opra16_28_15
| Opra16_28_16 | Opra16_28_17 | Opra16_28_18 | Opra16_28_19
| Opra16_28_20 | Opra16_28_21 | Opra16_28_22 | Opra16_28_23
| Opra16_28_24 | Opra16_28_25 | Opra16_28_26 | Opra16_28_27
| Opra16_28_28 | Opra16_28_29 | Opra16_28_30 | Opra16_28_31
| Opra16_28_32 | Opra16_28_33 | Opra16_28_34 | Opra16_28_35
| Opra16_28_36 | Opra16_28_37 | Opra16_28_38 | Opra16_28_39
| Opra16_28_40 | Opra16_28_41 | Opra16_28_42 | Opra16_28_43
| Opra16_28_44 | Opra16_28_45 | Opra16_28_46 | Opra16_28_47
| Opra16_28_48 | Opra16_28_49 | Opra16_28_50 | Opra16_28_51
| Opra16_28_52 | Opra16_28_53 | Opra16_28_54 | Opra16_28_55
| Opra16_28_56 | Opra16_28_57 | Opra16_28_58 | Opra16_28_59
| Opra16_28_60 | Opra16_28_61 | Opra16_28_62 | Opra16_28_63
| Opra16_29_0 | Opra16_29_1 | Opra16_29_2 | Opra16_29_3
| Opra16_29_4 | Opra16_29_5 | Opra16_29_6 | Opra16_29_7
| Opra16_29_8 | Opra16_29_9 | Opra16_29_10 | Opra16_29_11
| Opra16_29_12 | Opra16_29_13 | Opra16_29_14 | Opra16_29_15
| Opra16_29_16 | Opra16_29_17 | Opra16_29_18 | Opra16_29_19
| Opra16_29_20 | Opra16_29_21 | Opra16_29_22 | Opra16_29_23
| Opra16_29_24 | Opra16_29_25 | Opra16_29_26 | Opra16_29_27
| Opra16_29_28 | Opra16_29_29 | Opra16_29_30 | Opra16_29_31
| Opra16_29_32 | Opra16_29_33 | Opra16_29_34 | Opra16_29_35
| Opra16_29_36 | Opra16_29_37 | Opra16_29_38 | Opra16_29_39
| Opra16_29_40 | Opra16_29_41 | Opra16_29_42 | Opra16_29_43
| Opra16_29_44 | Opra16_29_45 | Opra16_29_46 | Opra16_29_47
| Opra16_29_48 | Opra16_29_49 | Opra16_29_50 | Opra16_29_51
| Opra16_29_52 | Opra16_29_53 | Opra16_29_54 | Opra16_29_55
| Opra16_29_56 | Opra16_29_57 | Opra16_29_58 | Opra16_29_59
| Opra16_29_60 | Opra16_29_61 | Opra16_29_62 | Opra16_29_63
| Opra16_30_0 | Opra16_30_1 | Opra16_30_2 | Opra16_30_3
| Opra16_30_4 | Opra16_30_5 | Opra16_30_6 | Opra16_30_7
| Opra16_30_8 | Opra16_30_9 | Opra16_30_10 | Opra16_30_11
| Opra16_30_12 | Opra16_30_13 | Opra16_30_14 | Opra16_30_15
| Opra16_30_16 | Opra16_30_17 | Opra16_30_18 | Opra16_30_19
| Opra16_30_20 | Opra16_30_21 | Opra16_30_22 | Opra16_30_23
| Opra16_30_24 | Opra16_30_25 | Opra16_30_26 | Opra16_30_27
| Opra16_30_28 | Opra16_30_29 | Opra16_30_30 | Opra16_30_31
| Opra16_30_32 | Opra16_30_33 | Opra16_30_34 | Opra16_30_35
| Opra16_30_36 | Opra16_30_37 | Opra16_30_38 | Opra16_30_39
| Opra16_30_40 | Opra16_30_41 | Opra16_30_42 | Opra16_30_43
| Opra16_30_44 | Opra16_30_45 | Opra16_30_46 | Opra16_30_47
| Opra16_30_48 | Opra16_30_49 | Opra16_30_50 | Opra16_30_51
| Opra16_30_52 | Opra16_30_53 | Opra16_30_54 | Opra16_30_55
| Opra16_30_56 | Opra16_30_57 | Opra16_30_58 | Opra16_30_59
| Opra16_30_60 | Opra16_30_61 | Opra16_30_62 | Opra16_30_63
| Opra16_31_0 | Opra16_31_1 | Opra16_31_2 | Opra16_31_3
| Opra16_31_4 | Opra16_31_5 | Opra16_31_6 | Opra16_31_7
| Opra16_31_8 | Opra16_31_9 | Opra16_31_10 | Opra16_31_11
| Opra16_31_12 | Opra16_31_13 | Opra16_31_14 | Opra16_31_15
| Opra16_31_16 | Opra16_31_17 | Opra16_31_18 | Opra16_31_19
| Opra16_31_20 | Opra16_31_21 | Opra16_31_22 | Opra16_31_23
| Opra16_31_24 | Opra16_31_25 | Opra16_31_26 | Opra16_31_27
| Opra16_31_28 | Opra16_31_29 | Opra16_31_30 | Opra16_31_31
| Opra16_31_32 | Opra16_31_33 | Opra16_31_34 | Opra16_31_35
| Opra16_31_36 | Opra16_31_37 | Opra16_31_38 | Opra16_31_39
| Opra16_31_40 | Opra16_31_41 | Opra16_31_42 | Opra16_31_43
| Opra16_31_44 | Opra16_31_45 | Opra16_31_46 | Opra16_31_47
| Opra16_31_48 | Opra16_31_49 | Opra16_31_50 | Opra16_31_51
| Opra16_31_52 | Opra16_31_53 | Opra16_31_54 | Opra16_31_55
| Opra16_31_56 | Opra16_31_57 | Opra16_31_58 | Opra16_31_59
| Opra16_31_60 | Opra16_31_61 | Opra16_31_62 | Opra16_31_63
| Opra16_32_0 | Opra16_32_1 | Opra16_32_2 | Opra16_32_3
| Opra16_32_4 | Opra16_32_5 | Opra16_32_6 | Opra16_32_7
| Opra16_32_8 | Opra16_32_9 | Opra16_32_10 | Opra16_32_11
| Opra16_32_12 | Opra16_32_13 | Opra16_32_14 | Opra16_32_15
| Opra16_32_16 | Opra16_32_17 | Opra16_32_18 | Opra16_32_19
| Opra16_32_20 | Opra16_32_21 | Opra16_32_22 | Opra16_32_23
| Opra16_32_24 | Opra16_32_25 | Opra16_32_26 | Opra16_32_27
| Opra16_32_28 | Opra16_32_29 | Opra16_32_30 | Opra16_32_31
| Opra16_32_32 | Opra16_32_33 | Opra16_32_34 | Opra16_32_35
| Opra16_32_36 | Opra16_32_37 | Opra16_32_38 | Opra16_32_39
| Opra16_32_40 | Opra16_32_41 | Opra16_32_42 | Opra16_32_43
| Opra16_32_44 | Opra16_32_45 | Opra16_32_46 | Opra16_32_47
| Opra16_32_48 | Opra16_32_49 | Opra16_32_50 | Opra16_32_51
| Opra16_32_52 | Opra16_32_53 | Opra16_32_54 | Opra16_32_55
| Opra16_32_56 | Opra16_32_57 | Opra16_32_58 | Opra16_32_59
| Opra16_32_60 | Opra16_32_61 | Opra16_32_62 | Opra16_32_63
| Opra16_33_0 | Opra16_33_1 | Opra16_33_2 | Opra16_33_3
| Opra16_33_4 | Opra16_33_5 | Opra16_33_6 | Opra16_33_7
| Opra16_33_8 | Opra16_33_9 | Opra16_33_10 | Opra16_33_11
| Opra16_33_12 | Opra16_33_13 | Opra16_33_14 | Opra16_33_15
| Opra16_33_16 | Opra16_33_17 | Opra16_33_18 | Opra16_33_19
| Opra16_33_20 | Opra16_33_21 | Opra16_33_22 | Opra16_33_23
| Opra16_33_24 | Opra16_33_25 | Opra16_33_26 | Opra16_33_27
| Opra16_33_28 | Opra16_33_29 | Opra16_33_30 | Opra16_33_31
| Opra16_33_32 | Opra16_33_33 | Opra16_33_34 | Opra16_33_35
| Opra16_33_36 | Opra16_33_37 | Opra16_33_38 | Opra16_33_39
| Opra16_33_40 | Opra16_33_41 | Opra16_33_42 | Opra16_33_43
| Opra16_33_44 | Opra16_33_45 | Opra16_33_46 | Opra16_33_47
| Opra16_33_48 | Opra16_33_49 | Opra16_33_50 | Opra16_33_51
| Opra16_33_52 | Opra16_33_53 | Opra16_33_54 | Opra16_33_55
| Opra16_33_56 | Opra16_33_57 | Opra16_33_58 | Opra16_33_59
| Opra16_33_60 | Opra16_33_61 | Opra16_33_62 | Opra16_33_63
| Opra16_34_0 | Opra16_34_1 | Opra16_34_2 | Opra16_34_3
| Opra16_34_4 | Opra16_34_5 | Opra16_34_6 | Opra16_34_7
| Opra16_34_8 | Opra16_34_9 | Opra16_34_10 | Opra16_34_11
| Opra16_34_12 | Opra16_34_13 | Opra16_34_14 | Opra16_34_15
| Opra16_34_16 | Opra16_34_17 | Opra16_34_18 | Opra16_34_19
| Opra16_34_20 | Opra16_34_21 | Opra16_34_22 | Opra16_34_23
| Opra16_34_24 | Opra16_34_25 | Opra16_34_26 | Opra16_34_27
| Opra16_34_28 | Opra16_34_29 | Opra16_34_30 | Opra16_34_31
| Opra16_34_32 | Opra16_34_33 | Opra16_34_34 | Opra16_34_35
| Opra16_34_36 | Opra16_34_37 | Opra16_34_38 | Opra16_34_39
| Opra16_34_40 | Opra16_34_41 | Opra16_34_42 | Opra16_34_43
| Opra16_34_44 | Opra16_34_45 | Opra16_34_46 | Opra16_34_47
| Opra16_34_48 | Opra16_34_49 | Opra16_34_50 | Opra16_34_51
| Opra16_34_52 | Opra16_34_53 | Opra16_34_54 | Opra16_34_55
| Opra16_34_56 | Opra16_34_57 | Opra16_34_58 | Opra16_34_59
| Opra16_34_60 | Opra16_34_61 | Opra16_34_62 | Opra16_34_63
| Opra16_35_0 | Opra16_35_1 | Opra16_35_2 | Opra16_35_3
| Opra16_35_4 | Opra16_35_5 | Opra16_35_6 | Opra16_35_7
| Opra16_35_8 | Opra16_35_9 | Opra16_35_10 | Opra16_35_11
| Opra16_35_12 | Opra16_35_13 | Opra16_35_14 | Opra16_35_15
| Opra16_35_16 | Opra16_35_17 | Opra16_35_18 | Opra16_35_19
| Opra16_35_20 | Opra16_35_21 | Opra16_35_22 | Opra16_35_23
| Opra16_35_24 | Opra16_35_25 | Opra16_35_26 | Opra16_35_27
| Opra16_35_28 | Opra16_35_29 | Opra16_35_30 | Opra16_35_31
| Opra16_35_32 | Opra16_35_33 | Opra16_35_34 | Opra16_35_35
| Opra16_35_36 | Opra16_35_37 | Opra16_35_38 | Opra16_35_39
| Opra16_35_40 | Opra16_35_41 | Opra16_35_42 | Opra16_35_43
| Opra16_35_44 | Opra16_35_45 | Opra16_35_46 | Opra16_35_47
| Opra16_35_48 | Opra16_35_49 | Opra16_35_50 | Opra16_35_51
| Opra16_35_52 | Opra16_35_53 | Opra16_35_54 | Opra16_35_55
| Opra16_35_56 | Opra16_35_57 | Opra16_35_58 | Opra16_35_59
| Opra16_35_60 | Opra16_35_61 | Opra16_35_62 | Opra16_35_63
| Opra16_36_0 | Opra16_36_1 | Opra16_36_2 | Opra16_36_3
| Opra16_36_4 | Opra16_36_5 | Opra16_36_6 | Opra16_36_7
| Opra16_36_8 | Opra16_36_9 | Opra16_36_10 | Opra16_36_11
| Opra16_36_12 | Opra16_36_13 | Opra16_36_14 | Opra16_36_15
| Opra16_36_16 | Opra16_36_17 | Opra16_36_18 | Opra16_36_19
| Opra16_36_20 | Opra16_36_21 | Opra16_36_22 | Opra16_36_23
| Opra16_36_24 | Opra16_36_25 | Opra16_36_26 | Opra16_36_27
| Opra16_36_28 | Opra16_36_29 | Opra16_36_30 | Opra16_36_31
| Opra16_36_32 | Opra16_36_33 | Opra16_36_34 | Opra16_36_35
| Opra16_36_36 | Opra16_36_37 | Opra16_36_38 | Opra16_36_39
| Opra16_36_40 | Opra16_36_41 | Opra16_36_42 | Opra16_36_43
| Opra16_36_44 | Opra16_36_45 | Opra16_36_46 | Opra16_36_47
| Opra16_36_48 | Opra16_36_49 | Opra16_36_50 | Opra16_36_51
| Opra16_36_52 | Opra16_36_53 | Opra16_36_54 | Opra16_36_55
| Opra16_36_56 | Opra16_36_57 | Opra16_36_58 | Opra16_36_59
| Opra16_36_60 | Opra16_36_61 | Opra16_36_62 | Opra16_36_63
| Opra16_37_0 | Opra16_37_1 | Opra16_37_2 | Opra16_37_3
| Opra16_37_4 | Opra16_37_5 | Opra16_37_6 | Opra16_37_7
| Opra16_37_8 | Opra16_37_9 | Opra16_37_10 | Opra16_37_11
| Opra16_37_12 | Opra16_37_13 | Opra16_37_14 | Opra16_37_15
| Opra16_37_16 | Opra16_37_17 | Opra16_37_18 | Opra16_37_19
| Opra16_37_20 | Opra16_37_21 | Opra16_37_22 | Opra16_37_23
| Opra16_37_24 | Opra16_37_25 | Opra16_37_26 | Opra16_37_27
| Opra16_37_28 | Opra16_37_29 | Opra16_37_30 | Opra16_37_31
| Opra16_37_32 | Opra16_37_33 | Opra16_37_34 | Opra16_37_35
| Opra16_37_36 | Opra16_37_37 | Opra16_37_38 | Opra16_37_39
| Opra16_37_40 | Opra16_37_41 | Opra16_37_42 | Opra16_37_43
| Opra16_37_44 | Opra16_37_45 | Opra16_37_46 | Opra16_37_47
| Opra16_37_48 | Opra16_37_49 | Opra16_37_50 | Opra16_37_51
| Opra16_37_52 | Opra16_37_53 | Opra16_37_54 | Opra16_37_55
| Opra16_37_56 | Opra16_37_57 | Opra16_37_58 | Opra16_37_59
| Opra16_37_60 | Opra16_37_61 | Opra16_37_62 | Opra16_37_63
| Opra16_38_0 | Opra16_38_1 | Opra16_38_2 | Opra16_38_3
| Opra16_38_4 | Opra16_38_5 | Opra16_38_6 | Opra16_38_7
| Opra16_38_8 | Opra16_38_9 | Opra16_38_10 | Opra16_38_11
| Opra16_38_12 | Opra16_38_13 | Opra16_38_14 | Opra16_38_15
| Opra16_38_16 | Opra16_38_17 | Opra16_38_18 | Opra16_38_19
| Opra16_38_20 | Opra16_38_21 | Opra16_38_22 | Opra16_38_23
| Opra16_38_24 | Opra16_38_25 | Opra16_38_26 | Opra16_38_27
| Opra16_38_28 | Opra16_38_29 | Opra16_38_30 | Opra16_38_31
| Opra16_38_32 | Opra16_38_33 | Opra16_38_34 | Opra16_38_35
| Opra16_38_36 | Opra16_38_37 | Opra16_38_38 | Opra16_38_39
| Opra16_38_40 | Opra16_38_41 | Opra16_38_42 | Opra16_38_43
| Opra16_38_44 | Opra16_38_45 | Opra16_38_46 | Opra16_38_47
| Opra16_38_48 | Opra16_38_49 | Opra16_38_50 | Opra16_38_51
| Opra16_38_52 | Opra16_38_53 | Opra16_38_54 | Opra16_38_55
| Opra16_38_56 | Opra16_38_57 | Opra16_38_58 | Opra16_38_59
| Opra16_38_60 | Opra16_38_61 | Opra16_38_62 | Opra16_38_63
| Opra16_39_0 | Opra16_39_1 | Opra16_39_2 | Opra16_39_3
| Opra16_39_4 | Opra16_39_5 | Opra16_39_6 | Opra16_39_7
| Opra16_39_8 | Opra16_39_9 | Opra16_39_10 | Opra16_39_11
| Opra16_39_12 | Opra16_39_13 | Opra16_39_14 | Opra16_39_15
| Opra16_39_16 | Opra16_39_17 | Opra16_39_18 | Opra16_39_19
| Opra16_39_20 | Opra16_39_21 | Opra16_39_22 | Opra16_39_23
| Opra16_39_24 | Opra16_39_25 | Opra16_39_26 | Opra16_39_27
| Opra16_39_28 | Opra16_39_29 | Opra16_39_30 | Opra16_39_31
| Opra16_39_32 | Opra16_39_33 | Opra16_39_34 | Opra16_39_35
| Opra16_39_36 | Opra16_39_37 | Opra16_39_38 | Opra16_39_39
| Opra16_39_40 | Opra16_39_41 | Opra16_39_42 | Opra16_39_43
| Opra16_39_44 | Opra16_39_45 | Opra16_39_46 | Opra16_39_47
| Opra16_39_48 | Opra16_39_49 | Opra16_39_50 | Opra16_39_51
| Opra16_39_52 | Opra16_39_53 | Opra16_39_54 | Opra16_39_55
| Opra16_39_56 | Opra16_39_57 | Opra16_39_58 | Opra16_39_59
| Opra16_39_60 | Opra16_39_61 | Opra16_39_62 | Opra16_39_63
| Opra16_40_0 | Opra16_40_1 | Opra16_40_2 | Opra16_40_3
| Opra16_40_4 | Opra16_40_5 | Opra16_40_6 | Opra16_40_7
| Opra16_40_8 | Opra16_40_9 | Opra16_40_10 | Opra16_40_11
| Opra16_40_12 | Opra16_40_13 | Opra16_40_14 | Opra16_40_15
| Opra16_40_16 | Opra16_40_17 | Opra16_40_18 | Opra16_40_19
| Opra16_40_20 | Opra16_40_21 | Opra16_40_22 | Opra16_40_23
| Opra16_40_24 | Opra16_40_25 | Opra16_40_26 | Opra16_40_27
| Opra16_40_28 | Opra16_40_29 | Opra16_40_30 | Opra16_40_31
| Opra16_40_32 | Opra16_40_33 | Opra16_40_34 | Opra16_40_35
| Opra16_40_36 | Opra16_40_37 | Opra16_40_38 | Opra16_40_39
| Opra16_40_40 | Opra16_40_41 | Opra16_40_42 | Opra16_40_43
| Opra16_40_44 | Opra16_40_45 | Opra16_40_46 | Opra16_40_47
| Opra16_40_48 | Opra16_40_49 | Opra16_40_50 | Opra16_40_51
| Opra16_40_52 | Opra16_40_53 | Opra16_40_54 | Opra16_40_55
| Opra16_40_56 | Opra16_40_57 | Opra16_40_58 | Opra16_40_59
| Opra16_40_60 | Opra16_40_61 | Opra16_40_62 | Opra16_40_63
| Opra16_41_0 | Opra16_41_1 | Opra16_41_2 | Opra16_41_3
| Opra16_41_4 | Opra16_41_5 | Opra16_41_6 | Opra16_41_7
| Opra16_41_8 | Opra16_41_9 | Opra16_41_10 | Opra16_41_11
| Opra16_41_12 | Opra16_41_13 | Opra16_41_14 | Opra16_41_15
| Opra16_41_16 | Opra16_41_17 | Opra16_41_18 | Opra16_41_19
| Opra16_41_20 | Opra16_41_21 | Opra16_41_22 | Opra16_41_23
| Opra16_41_24 | Opra16_41_25 | Opra16_41_26 | Opra16_41_27
| Opra16_41_28 | Opra16_41_29 | Opra16_41_30 | Opra16_41_31
| Opra16_41_32 | Opra16_41_33 | Opra16_41_34 | Opra16_41_35
| Opra16_41_36 | Opra16_41_37 | Opra16_41_38 | Opra16_41_39
| Opra16_41_40 | Opra16_41_41 | Opra16_41_42 | Opra16_41_43
| Opra16_41_44 | Opra16_41_45 | Opra16_41_46 | Opra16_41_47
| Opra16_41_48 | Opra16_41_49 | Opra16_41_50 | Opra16_41_51
| Opra16_41_52 | Opra16_41_53 | Opra16_41_54 | Opra16_41_55
| Opra16_41_56 | Opra16_41_57 | Opra16_41_58 | Opra16_41_59
| Opra16_41_60 | Opra16_41_61 | Opra16_41_62 | Opra16_41_63
| Opra16_42_0 | Opra16_42_1 | Opra16_42_2 | Opra16_42_3
| Opra16_42_4 | Opra16_42_5 | Opra16_42_6 | Opra16_42_7
| Opra16_42_8 | Opra16_42_9 | Opra16_42_10 | Opra16_42_11
| Opra16_42_12 | Opra16_42_13 | Opra16_42_14 | Opra16_42_15
| Opra16_42_16 | Opra16_42_17 | Opra16_42_18 | Opra16_42_19
| Opra16_42_20 | Opra16_42_21 | Opra16_42_22 | Opra16_42_23
| Opra16_42_24 | Opra16_42_25 | Opra16_42_26 | Opra16_42_27
| Opra16_42_28 | Opra16_42_29 | Opra16_42_30 | Opra16_42_31
| Opra16_42_32 | Opra16_42_33 | Opra16_42_34 | Opra16_42_35
| Opra16_42_36 | Opra16_42_37 | Opra16_42_38 | Opra16_42_39
| Opra16_42_40 | Opra16_42_41 | Opra16_42_42 | Opra16_42_43
| Opra16_42_44 | Opra16_42_45 | Opra16_42_46 | Opra16_42_47
| Opra16_42_48 | Opra16_42_49 | Opra16_42_50 | Opra16_42_51
| Opra16_42_52 | Opra16_42_53 | Opra16_42_54 | Opra16_42_55
| Opra16_42_56 | Opra16_42_57 | Opra16_42_58 | Opra16_42_59
| Opra16_42_60 | Opra16_42_61 | Opra16_42_62 | Opra16_42_63
| Opra16_43_0 | Opra16_43_1 | Opra16_43_2 | Opra16_43_3
| Opra16_43_4 | Opra16_43_5 | Opra16_43_6 | Opra16_43_7
| Opra16_43_8 | Opra16_43_9 | Opra16_43_10 | Opra16_43_11
| Opra16_43_12 | Opra16_43_13 | Opra16_43_14 | Opra16_43_15
| Opra16_43_16 | Opra16_43_17 | Opra16_43_18 | Opra16_43_19
| Opra16_43_20 | Opra16_43_21 | Opra16_43_22 | Opra16_43_23
| Opra16_43_24 | Opra16_43_25 | Opra16_43_26 | Opra16_43_27
| Opra16_43_28 | Opra16_43_29 | Opra16_43_30 | Opra16_43_31
| Opra16_43_32 | Opra16_43_33 | Opra16_43_34 | Opra16_43_35
| Opra16_43_36 | Opra16_43_37 | Opra16_43_38 | Opra16_43_39
| Opra16_43_40 | Opra16_43_41 | Opra16_43_42 | Opra16_43_43
| Opra16_43_44 | Opra16_43_45 | Opra16_43_46 | Opra16_43_47
| Opra16_43_48 | Opra16_43_49 | Opra16_43_50 | Opra16_43_51
| Opra16_43_52 | Opra16_43_53 | Opra16_43_54 | Opra16_43_55
| Opra16_43_56 | Opra16_43_57 | Opra16_43_58 | Opra16_43_59
| Opra16_43_60 | Opra16_43_61 | Opra16_43_62 | Opra16_43_63
| Opra16_44_0 | Opra16_44_1 | Opra16_44_2 | Opra16_44_3
| Opra16_44_4 | Opra16_44_5 | Opra16_44_6 | Opra16_44_7
| Opra16_44_8 | Opra16_44_9 | Opra16_44_10 | Opra16_44_11
| Opra16_44_12 | Opra16_44_13 | Opra16_44_14 | Opra16_44_15
| Opra16_44_16 | Opra16_44_17 | Opra16_44_18 | Opra16_44_19
| Opra16_44_20 | Opra16_44_21 | Opra16_44_22 | Opra16_44_23
| Opra16_44_24 | Opra16_44_25 | Opra16_44_26 | Opra16_44_27
| Opra16_44_28 | Opra16_44_29 | Opra16_44_30 | Opra16_44_31
| Opra16_44_32 | Opra16_44_33 | Opra16_44_34 | Opra16_44_35
| Opra16_44_36 | Opra16_44_37 | Opra16_44_38 | Opra16_44_39
| Opra16_44_40 | Opra16_44_41 | Opra16_44_42 | Opra16_44_43
| Opra16_44_44 | Opra16_44_45 | Opra16_44_46 | Opra16_44_47
| Opra16_44_48 | Opra16_44_49 | Opra16_44_50 | Opra16_44_51
| Opra16_44_52 | Opra16_44_53 | Opra16_44_54 | Opra16_44_55
| Opra16_44_56 | Opra16_44_57 | Opra16_44_58 | Opra16_44_59
| Opra16_44_60 | Opra16_44_61 | Opra16_44_62 | Opra16_44_63
| Opra16_45_0 | Opra16_45_1 | Opra16_45_2 | Opra16_45_3
| Opra16_45_4 | Opra16_45_5 | Opra16_45_6 | Opra16_45_7
| Opra16_45_8 | Opra16_45_9 | Opra16_45_10 | Opra16_45_11
| Opra16_45_12 | Opra16_45_13 | Opra16_45_14 | Opra16_45_15
| Opra16_45_16 | Opra16_45_17 | Opra16_45_18 | Opra16_45_19
| Opra16_45_20 | Opra16_45_21 | Opra16_45_22 | Opra16_45_23
| Opra16_45_24 | Opra16_45_25 | Opra16_45_26 | Opra16_45_27
| Opra16_45_28 | Opra16_45_29 | Opra16_45_30 | Opra16_45_31
| Opra16_45_32 | Opra16_45_33 | Opra16_45_34 | Opra16_45_35
| Opra16_45_36 | Opra16_45_37 | Opra16_45_38 | Opra16_45_39
| Opra16_45_40 | Opra16_45_41 | Opra16_45_42 | Opra16_45_43
| Opra16_45_44 | Opra16_45_45 | Opra16_45_46 | Opra16_45_47
| Opra16_45_48 | Opra16_45_49 | Opra16_45_50 | Opra16_45_51
| Opra16_45_52 | Opra16_45_53 | Opra16_45_54 | Opra16_45_55
| Opra16_45_56 | Opra16_45_57 | Opra16_45_58 | Opra16_45_59
| Opra16_45_60 | Opra16_45_61 | Opra16_45_62 | Opra16_45_63
| Opra16_46_0 | Opra16_46_1 | Opra16_46_2 | Opra16_46_3
| Opra16_46_4 | Opra16_46_5 | Opra16_46_6 | Opra16_46_7
| Opra16_46_8 | Opra16_46_9 | Opra16_46_10 | Opra16_46_11
| Opra16_46_12 | Opra16_46_13 | Opra16_46_14 | Opra16_46_15
| Opra16_46_16 | Opra16_46_17 | Opra16_46_18 | Opra16_46_19
| Opra16_46_20 | Opra16_46_21 | Opra16_46_22 | Opra16_46_23
| Opra16_46_24 | Opra16_46_25 | Opra16_46_26 | Opra16_46_27
| Opra16_46_28 | Opra16_46_29 | Opra16_46_30 | Opra16_46_31
| Opra16_46_32 | Opra16_46_33 | Opra16_46_34 | Opra16_46_35
| Opra16_46_36 | Opra16_46_37 | Opra16_46_38 | Opra16_46_39
| Opra16_46_40 | Opra16_46_41 | Opra16_46_42 | Opra16_46_43
| Opra16_46_44 | Opra16_46_45 | Opra16_46_46 | Opra16_46_47
| Opra16_46_48 | Opra16_46_49 | Opra16_46_50 | Opra16_46_51
| Opra16_46_52 | Opra16_46_53 | Opra16_46_54 | Opra16_46_55
| Opra16_46_56 | Opra16_46_57 | Opra16_46_58 | Opra16_46_59
| Opra16_46_60 | Opra16_46_61 | Opra16_46_62 | Opra16_46_63
| Opra16_47_0 | Opra16_47_1 | Opra16_47_2 | Opra16_47_3
| Opra16_47_4 | Opra16_47_5 | Opra16_47_6 | Opra16_47_7
| Opra16_47_8 | Opra16_47_9 | Opra16_47_10 | Opra16_47_11
| Opra16_47_12 | Opra16_47_13 | Opra16_47_14 | Opra16_47_15
| Opra16_47_16 | Opra16_47_17 | Opra16_47_18 | Opra16_47_19
| Opra16_47_20 | Opra16_47_21 | Opra16_47_22 | Opra16_47_23
| Opra16_47_24 | Opra16_47_25 | Opra16_47_26 | Opra16_47_27
| Opra16_47_28 | Opra16_47_29 | Opra16_47_30 | Opra16_47_31
| Opra16_47_32 | Opra16_47_33 | Opra16_47_34 | Opra16_47_35
| Opra16_47_36 | Opra16_47_37 | Opra16_47_38 | Opra16_47_39
| Opra16_47_40 | Opra16_47_41 | Opra16_47_42 | Opra16_47_43
| Opra16_47_44 | Opra16_47_45 | Opra16_47_46 | Opra16_47_47
| Opra16_47_48 | Opra16_47_49 | Opra16_47_50 | Opra16_47_51
| Opra16_47_52 | Opra16_47_53 | Opra16_47_54 | Opra16_47_55
| Opra16_47_56 | Opra16_47_57 | Opra16_47_58 | Opra16_47_59
| Opra16_47_60 | Opra16_47_61 | Opra16_47_62 | Opra16_47_63
| Opra16_48_0 | Opra16_48_1 | Opra16_48_2 | Opra16_48_3
| Opra16_48_4 | Opra16_48_5 | Opra16_48_6 | Opra16_48_7
| Opra16_48_8 | Opra16_48_9 | Opra16_48_10 | Opra16_48_11
| Opra16_48_12 | Opra16_48_13 | Opra16_48_14 | Opra16_48_15
| Opra16_48_16 | Opra16_48_17 | Opra16_48_18 | Opra16_48_19
| Opra16_48_20 | Opra16_48_21 | Opra16_48_22 | Opra16_48_23
| Opra16_48_24 | Opra16_48_25 | Opra16_48_26 | Opra16_48_27
| Opra16_48_28 | Opra16_48_29 | Opra16_48_30 | Opra16_48_31
| Opra16_48_32 | Opra16_48_33 | Opra16_48_34 | Opra16_48_35
| Opra16_48_36 | Opra16_48_37 | Opra16_48_38 | Opra16_48_39
| Opra16_48_40 | Opra16_48_41 | Opra16_48_42 | Opra16_48_43
| Opra16_48_44 | Opra16_48_45 | Opra16_48_46 | Opra16_48_47
| Opra16_48_48 | Opra16_48_49 | Opra16_48_50 | Opra16_48_51
| Opra16_48_52 | Opra16_48_53 | Opra16_48_54 | Opra16_48_55
| Opra16_48_56 | Opra16_48_57 | Opra16_48_58 | Opra16_48_59
| Opra16_48_60 | Opra16_48_61 | Opra16_48_62 | Opra16_48_63
| Opra16_49_0 | Opra16_49_1 | Opra16_49_2 | Opra16_49_3
| Opra16_49_4 | Opra16_49_5 | Opra16_49_6 | Opra16_49_7
| Opra16_49_8 | Opra16_49_9 | Opra16_49_10 | Opra16_49_11
| Opra16_49_12 | Opra16_49_13 | Opra16_49_14 | Opra16_49_15
| Opra16_49_16 | Opra16_49_17 | Opra16_49_18 | Opra16_49_19
| Opra16_49_20 | Opra16_49_21 | Opra16_49_22 | Opra16_49_23
| Opra16_49_24 | Opra16_49_25 | Opra16_49_26 | Opra16_49_27
| Opra16_49_28 | Opra16_49_29 | Opra16_49_30 | Opra16_49_31
| Opra16_49_32 | Opra16_49_33 | Opra16_49_34 | Opra16_49_35
| Opra16_49_36 | Opra16_49_37 | Opra16_49_38 | Opra16_49_39
| Opra16_49_40 | Opra16_49_41 | Opra16_49_42 | Opra16_49_43
| Opra16_49_44 | Opra16_49_45 | Opra16_49_46 | Opra16_49_47
| Opra16_49_48 | Opra16_49_49 | Opra16_49_50 | Opra16_49_51
| Opra16_49_52 | Opra16_49_53 | Opra16_49_54 | Opra16_49_55
| Opra16_49_56 | Opra16_49_57 | Opra16_49_58 | Opra16_49_59
| Opra16_49_60 | Opra16_49_61 | Opra16_49_62 | Opra16_49_63
| Opra16_50_0 | Opra16_50_1 | Opra16_50_2 | Opra16_50_3
| Opra16_50_4 | Opra16_50_5 | Opra16_50_6 | Opra16_50_7
| Opra16_50_8 | Opra16_50_9 | Opra16_50_10 | Opra16_50_11
| Opra16_50_12 | Opra16_50_13 | Opra16_50_14 | Opra16_50_15
| Opra16_50_16 | Opra16_50_17 | Opra16_50_18 | Opra16_50_19
| Opra16_50_20 | Opra16_50_21 | Opra16_50_22 | Opra16_50_23
| Opra16_50_24 | Opra16_50_25 | Opra16_50_26 | Opra16_50_27
| Opra16_50_28 | Opra16_50_29 | Opra16_50_30 | Opra16_50_31
| Opra16_50_32 | Opra16_50_33 | Opra16_50_34 | Opra16_50_35
| Opra16_50_36 | Opra16_50_37 | Opra16_50_38 | Opra16_50_39
| Opra16_50_40 | Opra16_50_41 | Opra16_50_42 | Opra16_50_43
| Opra16_50_44 | Opra16_50_45 | Opra16_50_46 | Opra16_50_47
| Opra16_50_48 | Opra16_50_49 | Opra16_50_50 | Opra16_50_51
| Opra16_50_52 | Opra16_50_53 | Opra16_50_54 | Opra16_50_55
| Opra16_50_56 | Opra16_50_57 | Opra16_50_58 | Opra16_50_59
| Opra16_50_60 | Opra16_50_61 | Opra16_50_62 | Opra16_50_63
| Opra16_51_0 | Opra16_51_1 | Opra16_51_2 | Opra16_51_3
| Opra16_51_4 | Opra16_51_5 | Opra16_51_6 | Opra16_51_7
| Opra16_51_8 | Opra16_51_9 | Opra16_51_10 | Opra16_51_11
| Opra16_51_12 | Opra16_51_13 | Opra16_51_14 | Opra16_51_15
| Opra16_51_16 | Opra16_51_17 | Opra16_51_18 | Opra16_51_19
| Opra16_51_20 | Opra16_51_21 | Opra16_51_22 | Opra16_51_23
| Opra16_51_24 | Opra16_51_25 | Opra16_51_26 | Opra16_51_27
| Opra16_51_28 | Opra16_51_29 | Opra16_51_30 | Opra16_51_31
| Opra16_51_32 | Opra16_51_33 | Opra16_51_34 | Opra16_51_35
| Opra16_51_36 | Opra16_51_37 | Opra16_51_38 | Opra16_51_39
| Opra16_51_40 | Opra16_51_41 | Opra16_51_42 | Opra16_51_43
| Opra16_51_44 | Opra16_51_45 | Opra16_51_46 | Opra16_51_47
| Opra16_51_48 | Opra16_51_49 | Opra16_51_50 | Opra16_51_51
| Opra16_51_52 | Opra16_51_53 | Opra16_51_54 | Opra16_51_55
| Opra16_51_56 | Opra16_51_57 | Opra16_51_58 | Opra16_51_59
| Opra16_51_60 | Opra16_51_61 | Opra16_51_62 | Opra16_51_63
| Opra16_52_0 | Opra16_52_1 | Opra16_52_2 | Opra16_52_3
| Opra16_52_4 | Opra16_52_5 | Opra16_52_6 | Opra16_52_7
| Opra16_52_8 | Opra16_52_9 | Opra16_52_10 | Opra16_52_11
| Opra16_52_12 | Opra16_52_13 | Opra16_52_14 | Opra16_52_15
| Opra16_52_16 | Opra16_52_17 | Opra16_52_18 | Opra16_52_19
| Opra16_52_20 | Opra16_52_21 | Opra16_52_22 | Opra16_52_23
| Opra16_52_24 | Opra16_52_25 | Opra16_52_26 | Opra16_52_27
| Opra16_52_28 | Opra16_52_29 | Opra16_52_30 | Opra16_52_31
| Opra16_52_32 | Opra16_52_33 | Opra16_52_34 | Opra16_52_35
| Opra16_52_36 | Opra16_52_37 | Opra16_52_38 | Opra16_52_39
| Opra16_52_40 | Opra16_52_41 | Opra16_52_42 | Opra16_52_43
| Opra16_52_44 | Opra16_52_45 | Opra16_52_46 | Opra16_52_47
| Opra16_52_48 | Opra16_52_49 | Opra16_52_50 | Opra16_52_51
| Opra16_52_52 | Opra16_52_53 | Opra16_52_54 | Opra16_52_55
| Opra16_52_56 | Opra16_52_57 | Opra16_52_58 | Opra16_52_59
| Opra16_52_60 | Opra16_52_61 | Opra16_52_62 | Opra16_52_63
| Opra16_53_0 | Opra16_53_1 | Opra16_53_2 | Opra16_53_3
| Opra16_53_4 | Opra16_53_5 | Opra16_53_6 | Opra16_53_7
| Opra16_53_8 | Opra16_53_9 | Opra16_53_10 | Opra16_53_11
| Opra16_53_12 | Opra16_53_13 | Opra16_53_14 | Opra16_53_15
| Opra16_53_16 | Opra16_53_17 | Opra16_53_18 | Opra16_53_19
| Opra16_53_20 | Opra16_53_21 | Opra16_53_22 | Opra16_53_23
| Opra16_53_24 | Opra16_53_25 | Opra16_53_26 | Opra16_53_27
| Opra16_53_28 | Opra16_53_29 | Opra16_53_30 | Opra16_53_31
| Opra16_53_32 | Opra16_53_33 | Opra16_53_34 | Opra16_53_35
| Opra16_53_36 | Opra16_53_37 | Opra16_53_38 | Opra16_53_39
| Opra16_53_40 | Opra16_53_41 | Opra16_53_42 | Opra16_53_43
| Opra16_53_44 | Opra16_53_45 | Opra16_53_46 | Opra16_53_47
| Opra16_53_48 | Opra16_53_49 | Opra16_53_50 | Opra16_53_51
| Opra16_53_52 | Opra16_53_53 | Opra16_53_54 | Opra16_53_55
| Opra16_53_56 | Opra16_53_57 | Opra16_53_58 | Opra16_53_59
| Opra16_53_60 | Opra16_53_61 | Opra16_53_62 | Opra16_53_63
| Opra16_54_0 | Opra16_54_1 | Opra16_54_2 | Opra16_54_3
| Opra16_54_4 | Opra16_54_5 | Opra16_54_6 | Opra16_54_7
| Opra16_54_8 | Opra16_54_9 | Opra16_54_10 | Opra16_54_11
| Opra16_54_12 | Opra16_54_13 | Opra16_54_14 | Opra16_54_15
| Opra16_54_16 | Opra16_54_17 | Opra16_54_18 | Opra16_54_19
| Opra16_54_20 | Opra16_54_21 | Opra16_54_22 | Opra16_54_23
| Opra16_54_24 | Opra16_54_25 | Opra16_54_26 | Opra16_54_27
| Opra16_54_28 | Opra16_54_29 | Opra16_54_30 | Opra16_54_31
| Opra16_54_32 | Opra16_54_33 | Opra16_54_34 | Opra16_54_35
| Opra16_54_36 | Opra16_54_37 | Opra16_54_38 | Opra16_54_39
| Opra16_54_40 | Opra16_54_41 | Opra16_54_42 | Opra16_54_43
| Opra16_54_44 | Opra16_54_45 | Opra16_54_46 | Opra16_54_47
| Opra16_54_48 | Opra16_54_49 | Opra16_54_50 | Opra16_54_51
| Opra16_54_52 | Opra16_54_53 | Opra16_54_54 | Opra16_54_55
| Opra16_54_56 | Opra16_54_57 | Opra16_54_58 | Opra16_54_59
| Opra16_54_60 | Opra16_54_61 | Opra16_54_62 | Opra16_54_63
| Opra16_55_0 | Opra16_55_1 | Opra16_55_2 | Opra16_55_3
| Opra16_55_4 | Opra16_55_5 | Opra16_55_6 | Opra16_55_7
| Opra16_55_8 | Opra16_55_9 | Opra16_55_10 | Opra16_55_11
| Opra16_55_12 | Opra16_55_13 | Opra16_55_14 | Opra16_55_15
| Opra16_55_16 | Opra16_55_17 | Opra16_55_18 | Opra16_55_19
| Opra16_55_20 | Opra16_55_21 | Opra16_55_22 | Opra16_55_23
| Opra16_55_24 | Opra16_55_25 | Opra16_55_26 | Opra16_55_27
| Opra16_55_28 | Opra16_55_29 | Opra16_55_30 | Opra16_55_31
| Opra16_55_32 | Opra16_55_33 | Opra16_55_34 | Opra16_55_35
| Opra16_55_36 | Opra16_55_37 | Opra16_55_38 | Opra16_55_39
| Opra16_55_40 | Opra16_55_41 | Opra16_55_42 | Opra16_55_43
| Opra16_55_44 | Opra16_55_45 | Opra16_55_46 | Opra16_55_47
| Opra16_55_48 | Opra16_55_49 | Opra16_55_50 | Opra16_55_51
| Opra16_55_52 | Opra16_55_53 | Opra16_55_54 | Opra16_55_55
| Opra16_55_56 | Opra16_55_57 | Opra16_55_58 | Opra16_55_59
| Opra16_55_60 | Opra16_55_61 | Opra16_55_62 | Opra16_55_63
| Opra16_56_0 | Opra16_56_1 | Opra16_56_2 | Opra16_56_3
| Opra16_56_4 | Opra16_56_5 | Opra16_56_6 | Opra16_56_7
| Opra16_56_8 | Opra16_56_9 | Opra16_56_10 | Opra16_56_11
| Opra16_56_12 | Opra16_56_13 | Opra16_56_14 | Opra16_56_15
| Opra16_56_16 | Opra16_56_17 | Opra16_56_18 | Opra16_56_19
| Opra16_56_20 | Opra16_56_21 | Opra16_56_22 | Opra16_56_23
| Opra16_56_24 | Opra16_56_25 | Opra16_56_26 | Opra16_56_27
| Opra16_56_28 | Opra16_56_29 | Opra16_56_30 | Opra16_56_31
| Opra16_56_32 | Opra16_56_33 | Opra16_56_34 | Opra16_56_35
| Opra16_56_36 | Opra16_56_37 | Opra16_56_38 | Opra16_56_39
| Opra16_56_40 | Opra16_56_41 | Opra16_56_42 | Opra16_56_43
| Opra16_56_44 | Opra16_56_45 | Opra16_56_46 | Opra16_56_47
| Opra16_56_48 | Opra16_56_49 | Opra16_56_50 | Opra16_56_51
| Opra16_56_52 | Opra16_56_53 | Opra16_56_54 | Opra16_56_55
| Opra16_56_56 | Opra16_56_57 | Opra16_56_58 | Opra16_56_59
| Opra16_56_60 | Opra16_56_61 | Opra16_56_62 | Opra16_56_63
| Opra16_57_0 | Opra16_57_1 | Opra16_57_2 | Opra16_57_3
| Opra16_57_4 | Opra16_57_5 | Opra16_57_6 | Opra16_57_7
| Opra16_57_8 | Opra16_57_9 | Opra16_57_10 | Opra16_57_11
| Opra16_57_12 | Opra16_57_13 | Opra16_57_14 | Opra16_57_15
| Opra16_57_16 | Opra16_57_17 | Opra16_57_18 | Opra16_57_19
| Opra16_57_20 | Opra16_57_21 | Opra16_57_22 | Opra16_57_23
| Opra16_57_24 | Opra16_57_25 | Opra16_57_26 | Opra16_57_27
| Opra16_57_28 | Opra16_57_29 | Opra16_57_30 | Opra16_57_31
| Opra16_57_32 | Opra16_57_33 | Opra16_57_34 | Opra16_57_35
| Opra16_57_36 | Opra16_57_37 | Opra16_57_38 | Opra16_57_39
| Opra16_57_40 | Opra16_57_41 | Opra16_57_42 | Opra16_57_43
| Opra16_57_44 | Opra16_57_45 | Opra16_57_46 | Opra16_57_47
| Opra16_57_48 | Opra16_57_49 | Opra16_57_50 | Opra16_57_51
| Opra16_57_52 | Opra16_57_53 | Opra16_57_54 | Opra16_57_55
| Opra16_57_56 | Opra16_57_57 | Opra16_57_58 | Opra16_57_59
| Opra16_57_60 | Opra16_57_61 | Opra16_57_62 | Opra16_57_63
| Opra16_58_0 | Opra16_58_1 | Opra16_58_2 | Opra16_58_3
| Opra16_58_4 | Opra16_58_5 | Opra16_58_6 | Opra16_58_7
| Opra16_58_8 | Opra16_58_9 | Opra16_58_10 | Opra16_58_11
| Opra16_58_12 | Opra16_58_13 | Opra16_58_14 | Opra16_58_15
| Opra16_58_16 | Opra16_58_17 | Opra16_58_18 | Opra16_58_19
| Opra16_58_20 | Opra16_58_21 | Opra16_58_22 | Opra16_58_23
| Opra16_58_24 | Opra16_58_25 | Opra16_58_26 | Opra16_58_27
| Opra16_58_28 | Opra16_58_29 | Opra16_58_30 | Opra16_58_31
| Opra16_58_32 | Opra16_58_33 | Opra16_58_34 | Opra16_58_35
| Opra16_58_36 | Opra16_58_37 | Opra16_58_38 | Opra16_58_39
| Opra16_58_40 | Opra16_58_41 | Opra16_58_42 | Opra16_58_43
| Opra16_58_44 | Opra16_58_45 | Opra16_58_46 | Opra16_58_47
| Opra16_58_48 | Opra16_58_49 | Opra16_58_50 | Opra16_58_51
| Opra16_58_52 | Opra16_58_53 | Opra16_58_54 | Opra16_58_55
| Opra16_58_56 | Opra16_58_57 | Opra16_58_58 | Opra16_58_59
| Opra16_58_60 | Opra16_58_61 | Opra16_58_62 | Opra16_58_63
| Opra16_59_0 | Opra16_59_1 | Opra16_59_2 | Opra16_59_3
| Opra16_59_4 | Opra16_59_5 | Opra16_59_6 | Opra16_59_7
| Opra16_59_8 | Opra16_59_9 | Opra16_59_10 | Opra16_59_11
| Opra16_59_12 | Opra16_59_13 | Opra16_59_14 | Opra16_59_15
| Opra16_59_16 | Opra16_59_17 | Opra16_59_18 | Opra16_59_19
| Opra16_59_20 | Opra16_59_21 | Opra16_59_22 | Opra16_59_23
| Opra16_59_24 | Opra16_59_25 | Opra16_59_26 | Opra16_59_27
| Opra16_59_28 | Opra16_59_29 | Opra16_59_30 | Opra16_59_31
| Opra16_59_32 | Opra16_59_33 | Opra16_59_34 | Opra16_59_35
| Opra16_59_36 | Opra16_59_37 | Opra16_59_38 | Opra16_59_39
| Opra16_59_40 | Opra16_59_41 | Opra16_59_42 | Opra16_59_43
| Opra16_59_44 | Opra16_59_45 | Opra16_59_46 | Opra16_59_47
| Opra16_59_48 | Opra16_59_49 | Opra16_59_50 | Opra16_59_51
| Opra16_59_52 | Opra16_59_53 | Opra16_59_54 | Opra16_59_55
| Opra16_59_56 | Opra16_59_57 | Opra16_59_58 | Opra16_59_59
| Opra16_59_60 | Opra16_59_61 | Opra16_59_62 | Opra16_59_63
| Opra16_60_0 | Opra16_60_1 | Opra16_60_2 | Opra16_60_3
| Opra16_60_4 | Opra16_60_5 | Opra16_60_6 | Opra16_60_7
| Opra16_60_8 | Opra16_60_9 | Opra16_60_10 | Opra16_60_11
| Opra16_60_12 | Opra16_60_13 | Opra16_60_14 | Opra16_60_15
| Opra16_60_16 | Opra16_60_17 | Opra16_60_18 | Opra16_60_19
| Opra16_60_20 | Opra16_60_21 | Opra16_60_22 | Opra16_60_23
| Opra16_60_24 | Opra16_60_25 | Opra16_60_26 | Opra16_60_27
| Opra16_60_28 | Opra16_60_29 | Opra16_60_30 | Opra16_60_31
| Opra16_60_32 | Opra16_60_33 | Opra16_60_34 | Opra16_60_35
| Opra16_60_36 | Opra16_60_37 | Opra16_60_38 | Opra16_60_39
| Opra16_60_40 | Opra16_60_41 | Opra16_60_42 | Opra16_60_43
| Opra16_60_44 | Opra16_60_45 | Opra16_60_46 | Opra16_60_47
| Opra16_60_48 | Opra16_60_49 | Opra16_60_50 | Opra16_60_51
| Opra16_60_52 | Opra16_60_53 | Opra16_60_54 | Opra16_60_55
| Opra16_60_56 | Opra16_60_57 | Opra16_60_58 | Opra16_60_59
| Opra16_60_60 | Opra16_60_61 | Opra16_60_62 | Opra16_60_63
| Opra16_61_0 | Opra16_61_1 | Opra16_61_2 | Opra16_61_3
| Opra16_61_4 | Opra16_61_5 | Opra16_61_6 | Opra16_61_7
| Opra16_61_8 | Opra16_61_9 | Opra16_61_10 | Opra16_61_11
| Opra16_61_12 | Opra16_61_13 | Opra16_61_14 | Opra16_61_15
| Opra16_61_16 | Opra16_61_17 | Opra16_61_18 | Opra16_61_19
| Opra16_61_20 | Opra16_61_21 | Opra16_61_22 | Opra16_61_23
| Opra16_61_24 | Opra16_61_25 | Opra16_61_26 | Opra16_61_27
| Opra16_61_28 | Opra16_61_29 | Opra16_61_30 | Opra16_61_31
| Opra16_61_32 | Opra16_61_33 | Opra16_61_34 | Opra16_61_35
| Opra16_61_36 | Opra16_61_37 | Opra16_61_38 | Opra16_61_39
| Opra16_61_40 | Opra16_61_41 | Opra16_61_42 | Opra16_61_43
| Opra16_61_44 | Opra16_61_45 | Opra16_61_46 | Opra16_61_47
| Opra16_61_48 | Opra16_61_49 | Opra16_61_50 | Opra16_61_51
| Opra16_61_52 | Opra16_61_53 | Opra16_61_54 | Opra16_61_55
| Opra16_61_56 | Opra16_61_57 | Opra16_61_58 | Opra16_61_59
| Opra16_61_60 | Opra16_61_61 | Opra16_61_62 | Opra16_61_63
| Opra16_62_0 | Opra16_62_1 | Opra16_62_2 | Opra16_62_3
| Opra16_62_4 | Opra16_62_5 | Opra16_62_6 | Opra16_62_7
| Opra16_62_8 | Opra16_62_9 | Opra16_62_10 | Opra16_62_11
| Opra16_62_12 | Opra16_62_13 | Opra16_62_14 | Opra16_62_15
| Opra16_62_16 | Opra16_62_17 | Opra16_62_18 | Opra16_62_19
| Opra16_62_20 | Opra16_62_21 | Opra16_62_22 | Opra16_62_23
| Opra16_62_24 | Opra16_62_25 | Opra16_62_26 | Opra16_62_27
| Opra16_62_28 | Opra16_62_29 | Opra16_62_30 | Opra16_62_31
| Opra16_62_32 | Opra16_62_33 | Opra16_62_34 | Opra16_62_35
| Opra16_62_36 | Opra16_62_37 | Opra16_62_38 | Opra16_62_39
| Opra16_62_40 | Opra16_62_41 | Opra16_62_42 | Opra16_62_43
| Opra16_62_44 | Opra16_62_45 | Opra16_62_46 | Opra16_62_47
| Opra16_62_48 | Opra16_62_49 | Opra16_62_50 | Opra16_62_51
| Opra16_62_52 | Opra16_62_53 | Opra16_62_54 | Opra16_62_55
| Opra16_62_56 | Opra16_62_57 | Opra16_62_58 | Opra16_62_59
| Opra16_62_60 | Opra16_62_61 | Opra16_62_62 | Opra16_62_63
| Opra16_63_0 | Opra16_63_1 | Opra16_63_2 | Opra16_63_3
| Opra16_63_4 | Opra16_63_5 | Opra16_63_6 | Opra16_63_7
| Opra16_63_8 | Opra16_63_9 | Opra16_63_10 | Opra16_63_11
| Opra16_63_12 | Opra16_63_13 | Opra16_63_14 | Opra16_63_15
| Opra16_63_16 | Opra16_63_17 | Opra16_63_18 | Opra16_63_19
| Opra16_63_20 | Opra16_63_21 | Opra16_63_22 | Opra16_63_23
| Opra16_63_24 | Opra16_63_25 | Opra16_63_26 | Opra16_63_27
| Opra16_63_28 | Opra16_63_29 | Opra16_63_30 | Opra16_63_31
| Opra16_63_32 | Opra16_63_33 | Opra16_63_34 | Opra16_63_35
| Opra16_63_36 | Opra16_63_37 | Opra16_63_38 | Opra16_63_39
| Opra16_63_40 | Opra16_63_41 | Opra16_63_42 | Opra16_63_43
| Opra16_63_44 | Opra16_63_45 | Opra16_63_46 | Opra16_63_47
| Opra16_63_48 | Opra16_63_49 | Opra16_63_50 | Opra16_63_51
| Opra16_63_52 | Opra16_63_53 | Opra16_63_54 | Opra16_63_55
| Opra16_63_56 | Opra16_63_57 | Opra16_63_58 | Opra16_63_59
| Opra16_63_60 | Opra16_63_61 | Opra16_63_62 | Opra16_63_63
| Opra16_s_0 | Opra16_s_1 | Opra16_s_2 | Opra16_s_3
| Opra16_s_4 | Opra16_s_5 | Opra16_s_6 | Opra16_s_7
| Opra16_s_8 | Opra16_s_9 | Opra16_s_10 | Opra16_s_11
| Opra16_s_12 | Opra16_s_13 | Opra16_s_14 | Opra16_s_15
| Opra16_s_16 | Opra16_s_17 | Opra16_s_18 | Opra16_s_19
| Opra16_s_20 | Opra16_s_21 | Opra16_s_22 | Opra16_s_23
| Opra16_s_24 | Opra16_s_25 | Opra16_s_26 | Opra16_s_27
| Opra16_s_28 | Opra16_s_29 | Opra16_s_30 | Opra16_s_31
| Opra16_s_32 | Opra16_s_33 | Opra16_s_34 | Opra16_s_35
| Opra16_s_36 | Opra16_s_37 | Opra16_s_38 | Opra16_s_39
| Opra16_s_40 | Opra16_s_41 | Opra16_s_42 | Opra16_s_43
| Opra16_s_44 | Opra16_s_45 | Opra16_s_46 | Opra16_s_47
| Opra16_s_48 | Opra16_s_49 | Opra16_s_50 | Opra16_s_51
| Opra16_s_52 | Opra16_s_53 | Opra16_s_54 | Opra16_s_55
| Opra16_s_56 | Opra16_s_57 | Opra16_s_58 | Opra16_s_59
| Opra16_s_60 | Opra16_s_61 | Opra16_s_62 | Opra16_s_63
deriving (Eq, Ord, Read, Show, Enum, Bounded)
instance Opram Opra16 where
m _ = 16
instance Calculus Opra16 where
rank _ = 2
cName _ = "opra-16"
cNameGqr _ ="opra16"
cReadRel = readOpram
cShowRel = showOpram
cSparqifyRel = sparqifyOpram
cGqrifyRel = sparqifyOpram
cBaserelationsArealList = areal cBaserelationsList
cBaserelationsNonArealList = nonAreal cBaserelationsList
bcConvert = opraConvert 16
| spatial-reasoning/zeno | src/Calculus/Opra16.hs | bsd-2-clause | 75,343 | 0 | 6 | 22,142 | 12,634 | 8,405 | 4,229 | 1,059 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.