code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
-----------------------------------------------------------------------------
-- Copyright : (c) Hanzhong Xu, Meng Meng 2016,
-- License : MIT License
--
-- Maintainer : hanzh.xu@gmail.com
-- Stability : experimental
-- Portability : portable
-----------------------------------------------------------------------------
-- HelloWorld
-----------------------------------------------------------------------------
module HelloWorld where
import Control.Arrow
import Data.SF
import Data.Maybe
-- | x_i donates the ith element of the input, y_i donates the ith element of the output.
-- $setup
-- >>> let test0 = [1,2,3,4]::[Int]
-- basic machines
-- | the sum of the history, y_n = \sum_{i=1}^n x_i
-- >>> sfmap sumSF test0
-- [1,3,6,10]
sumSF :: SF Int Int
sumSF = simpleSF (\s a -> (s+a, s+a)) 0
-- | y_n = x_n + 1
-- >>> sfmap plusOneSF test0
-- [2,3,4,5]
plusOneSF :: SF Int Int
plusOneSF = simpleSF (\() a -> ((), a + 1)) ()
-- | y_n = x_n * 2
-- >>> sfmap timesTwoSF test0
-- [2,4,6,8]
timesTwoSF :: SF Int Int
timesTwoSF = arrSF (\a -> a * 2)
-- combination machine
-- | y_n = \sum_{i=1}^n x_i * 2
-- >>> sfmap ttSF test0
-- [2,6,12,20]
ttSF = timesTwoSF >>> sumSF
-- | y_n = (x_n + 1, x_n * 2)
-- >>> sfmap ptSF test0
-- [(2,2),(3,4),(4,6),(5,8)]
ptSF = plusOneSF &&& timesTwoSF
-- |
-- >>> sfmap mergeOutSF [(2,2),(3,4),(4,6),(5,8)]
-- [4,7,10,13]
--
-- | y_n = (x_n + 1) + (x_n * 2)
-- >>> sfmap ((plusOneSF &&& timesTwoSF) >>> mergeOutSF) test0
-- [4,7,10,13]
mergeOutSF :: SF (Int, Int) Int
mergeOutSF = arrSF (\(a, b) -> a + b)
-- | A stack with three operations, push, pop and return the maximum integer in the stack.
data StackOP
= Push Int
| Pop
| Max
deriving Show
{-
-- | A naive implementation, and the max operation takes O(n) running time.
pushStk :: [Int] -> Int -> ([Int], Int)
pushStk [] a = ([a], a)
pushStk s a = (a:s, a)
popStk :: [Int] -> ([Int], Int)
popStk [] = ([], 0)
popStk (x:xs) = (xs, x)
maxStk :: [Int] -> ([Int], Int)
maxStk [] = ([], 0)
maxStk s = (s, maxList s)
maxList :: [Int] -> Int
maxList [] = 0
maxList (x:xs) = max x (maxList xs)
initial = []
-}
-- | A better implementation, and the max operation takes O(1) running time. And we use one more stack to maintain the maximum value.
pushStk :: ([Int], [Int]) -> Int -> (([Int], [Int]), Int)
pushStk ([],[]) a = (([a],[a]), a)
pushStk ((x:xs), (y:ys)) a = if a >= y then ((a:x:xs, a:y:ys), a) else ((a:x:xs, y:ys), a)
popStk :: ([Int], [Int]) -> (([Int], [Int]), Int)
popStk ([],[]) = (([],[]), 0)
popStk ((x:xs), (y:ys)) = if x == y then ((xs, ys), x) else ((xs, y:ys), x)
maxStk :: ([Int], [Int]) -> (([Int], [Int]), Int)
maxStk ([], []) = (([], []), 0)
maxStk ((x:xs), (y:ys)) = (((x:xs), (y:ys)), y)
initial = ([],[])
-- | switching the alogrithm without touching the code outside.
sf s (Push a) = pushStk s a
sf s Pop = popStk s
sf s Max = maxStk s
-- |
-- >>> sfmap maxSF [Push 5, Push 3, Push 2, Max, Push 7, Max, Pop, Max]
-- [5,3,2,5,7,7,7,5]
maxSF :: SF StackOP Int
maxSF = simpleSF sf initial
| PseudoPower/AFSM | examples/SF/HelloWorld.hs | mit | 3,100 | 0 | 9 | 642 | 786 | 474 | 312 | 34 | 2 |
#!/usr/bin/env stack
-- stack ghci
{-# LANGUAGE LiberalTypeSynonyms #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
module Lens where
import Control.Lens
import Control.Monad.Trans.Class
import Control.Monad.Trans.State
type Lens' a b = forall f . (Functor f) => (b -> f b) -> (a -> f a)
type Traversal' a b = forall f . (Applicative f) => (b -> f b) -> (a -> f a)
traversed :: Traversal' [a] a
data Game = Game
{ _score :: Int
, _units :: [Unit]
, _boss :: Unit
} deriving (Show)
data Unit = Unit
{ _health :: Int
, _position :: Point
} deriving (Show)
data Point = Point
{ _x :: Double
, _y :: Double
} deriving (Show)
-- score :: Lens' Game Int
-- score = lens _score (\game v -> game { _score = v })
makeLenses ''Game
makeLenses ''Unit
makeLenses ''Point
initialState :: Game
initialState = Game
{ _score = 0
, _units =
[ Unit
{ _health = 10
, _position = Point { _x = 3.5, _y = 7.0 }
}
, Unit
{ _health = 15
, _position = Point { _x = 1.0, _y = 1.0 }
}
, Unit
{ _health = 8
, _position = Point { _x = 0.0, _y = 2.1 }
}
]
, _boss = Unit
{ _health = 100
, _position = Point { _x = 0.0, _y = 0.0 }
}
}
-- newState <- execStateT strike initialState
-- newState^.boss.health
strike :: StateT Game IO ()
strike = do
lift $ putStrLn "*shrink*"
bossHP -= 10
bossHP :: Lens' Game Int
bossHP = boss.health
fireBreath :: StateT Game IO ()
fireBreath = do
lift $ putStrLn "*rawr*"
units.traversed.(around target 1.0).health -= 3
partyHP :: Traversal' Game Int
partyHP = units.traversed.healt
retreat :: StateT Game IO ()
retreat = do
lift $ putStrLn "Retreat!"
zoom (units.traversed.position) $ do
x += 10
y += 10
toListOf :: Traversal' a b -> a -> [b]
-- toListOf partyHP newState ~ ^..
around :: Point -> Double -> Traversal' Unit Unit
around center radius = filtered (\unit -> (unit^.position.x - center^.x)^2 + (unit^.position.y - center^.y)^2 < radius^2 )
battle :: StateT Game IO ()
battle = do
forM_ ["Take that!", "and that!", "and that!"] $ \taunt -> do
lift $ putStrLn taunt
strike
fireBreath (Point 0.5 1.5)
replicateM_ 3 $ do
retreat
zoom (boss.position) $ do
x += 10
y += 10
| mortum5/programming | haskell/usefull/Lens.hs | mit | 2,516 | 1 | 19 | 820 | 818 | 448 | 370 | -1 | -1 |
module Settings.Config where
import Prelude
import Language.Haskell.TH.Syntax
import System.Environment (lookupEnv)
import Yesod.Default.Config2 (configSettingsYml)
configPath :: Q FilePath
configPath = do
path <- runIO $ lookupEnv "RH_CONFIG_PATH"
return . maybe configSettingsYml id $ path
| ruHaskell/ruhaskell-yesod | Settings/Config.hs | mit | 315 | 0 | 9 | 56 | 80 | 44 | 36 | 9 | 1 |
{-# LANGUAGE FlexibleInstances #-}
module Ternary.Core.Multiplication (
Triangle, AppliedTriangle, TS,
TriangleState (..),
MultiplicationState (..),
TriangleParam (TriangleParam),
MulState (MulState),
scalar, selfTerms,
initialTS, multKernel, fineStructure) where
import Ternary.Core.Kernel
import Ternary.Core.Digit
import Ternary.Core.Addition
import Control.Arrow (second)
-- Step by step we shall construct an efficient algorithm for exact
-- ternary multiplication. Iteratively, we develop a series of fully
-- functional multiplication algorithms, where each version becomes a
-- stepping stone towards the next version. The first iteration is
-- derived from first principles. Its implementation provides insight
-- because it can be explained, examined and understood. We call this
-- the fine-structure algorithm, because all the internal details of
-- the construction are explicitly modeled. Subsequent versions build
-- on previous versions, replacing inefficient data structures with
-- more efficient ones. These versions can only be explained in terms
-- of their predecessors. The final version will be about an order of
-- magnitude more efficient in both time and space.
-- See explain.txt for a detailed explanation of the basic algorithm.
scalar :: T2 -> Kernel T2 T2 Sa
scalar a = plus . multiplyT2 a
crossTerms :: T2 -> T2 -> Kernel (T2,T2) T2 ((Sa,Sa),Sa)
crossTerms a b = zipKernelsWith addT2 (scalar b) (scalar a) `serial` plus
selfTerms :: T2 -> T2 -> Kernel T2 T2 FirstTwoSteps
selfTerms a b = transformFirstTwo (const (embedT1 c)) (addT1 r . coerceT1)
where (c,r) = carry (multiplyT2 a b)
newtype TS = TS ((((Sa,Sa),Sa), FirstTwoSteps), Sa)
deriving (Show, Eq, Ord)
initialTS :: TS
initialTS = TS ((((Sa0,Sa0),Sa0), Step0), Sa0)
stepMatch :: FirstTwoSteps -> TS -> Bool
stepMatch a (TS ((_,b),_)) = a == b
type Triangle s = Kernel ((T2,T2),T2) T2 s
-- One piece of input (a single digit) comes from the output of the
-- preceding triangle in the chain. The other piece of input is the
-- same throughout the chain. More precisely, it remains constant for
-- one state transition of the complete chain. Here we fix the part
-- that is constant. See explain.txt for details.
type AppliedTriangle s = Kernel T2 T2 s
data TriangleParam = TriangleParam T2 T2
deriving (Show, Eq, Ord)
class TriangleState s where
initialState :: TriangleParam -> s
isSecondState :: s -> Bool
makeTriangle :: TriangleParam -> Triangle s
applyTriangle :: TriangleState s => (T2,T2) -> TriangleParam -> AppliedTriangle s
applyTriangle ab param r state = makeTriangle param (ab,r) state
buildCircuit :: TriangleParam -> Triangle ((((Sa,Sa),Sa), FirstTwoSteps), Sa)
buildCircuit (TriangleParam a b) =
zipKernelsWith addT2 (crossTerms a b) (selfTerms a b) `serial` plus
-- The presence of FirstTwoSteps in TS has two consequences. First,
-- we can never re-enter the initial state. Second, states that can
-- happen on the second step cannot happen at any other time and vice
-- versa.
instance TriangleState TS where
initialState = const initialTS
isSecondState = stepMatch Step1
makeTriangle param input (TS s) = second TS $! buildCircuit param input s
chained :: TriangleState s => (T2,T2) -> [TriangleParam] -> Kernel T2 T2 [s]
chained = chain . applyTriangle
step :: TriangleState s => (T2,T2) -> [TriangleParam] -> [s] -> (T2, [s])
step ab ps = chained ab ps irrelevant
where irrelevant = undefined :: T2
data MulState s = MulState [TriangleParam] [s]
deriving Show
-- Notice the "final cons" that adds an initial state to prepare for
-- the next round of chained transitions.
multKernel :: TriangleState s => Kernel (T2,T2) T2 (MulState s)
multKernel ab (MulState ps us) =
let (out, vs) = step ab ps us
p = uncurry TriangleParam ab
in (out, MulState (p:ps) (initialState p:vs))
class MultiplicationState s where
kernel :: Kernel (T2,T2) T2 s
initialMultiplicationState :: TriangleParam -> s
instance TriangleState s => MultiplicationState (MulState s) where
kernel = multKernel
initialMultiplicationState p = MulState [p] [initialState p]
-- algorithm selector
fineStructure :: MulState TS
fineStructure = undefined
| jeroennoels/exact-real | src/Ternary/Core/Multiplication.hs | mit | 4,286 | 0 | 11 | 799 | 1,078 | 604 | 474 | 68 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE RankNTypes #-}
-- |
--
-- defines the syntax tree and template parsers the "Commands.TH" module hierarchy uses.
--
module Commands.TH.Syntax where
import Commands.Text.Parsec
import Commands.Grammar
import Data.List.NonEmpty (NonEmpty(..))
import Prelude hiding (head)
import Control.Applicative hiding (many,(<|>))
import Language.Haskell.TH.Syntax
-- | wraps 'pProductions'
pGrammar :: Parser Char Grammar
pGrammar = Grammar <$> pProductions
-- | we need the 'try', because 'pProductions' consumes 'newline's
--
pProductions :: Parser Char (NonEmpty Production)
pProductions = between whitespace (whitespace <* eof) $ pProduction `sepBy1Slow` whitespace
-- |
-- given the input template:
--
-- > [rule| data Command
-- > ReplaceWith replace Phrase with Phrase
-- > Click Times Button click
-- > Undo undo |]
--
-- 'pProduction' parses it into:
--
-- > Production ''Command [
-- > Variant ''ReplaceWith [Part "replace", Hole ''Phrase, Part "with", Hole ''Phrase],
-- > Variant ''Click [Hole ''Times, Hole ''Button, Part "click"],
-- > Variant ''Undo [Part "undo"]]
--
-- (pretty-printing 'NonEmpty' as @[]@, and eliding the @newtype@ constructors):
--
pProduction :: Parser Char Production
pProduction = Production
<$> (word "data" *> pNonTerminal <* newline)
<*> (pVariant `sepBy1Slow` newline)
<?> "Production"
-- |
-- given the input labeled-production:
--
-- > "ReplaceWith replace Phrase with Phrase"
--
-- 'pVariant' parses it into:
--
-- > Variant ''ReplaceWith (Part "replace" :| [Hole (NonTerminal ''Phrase), Part (Terminal "with"), Hole (NonTerminal ''Phrase)])
--
--
pVariant :: Parser Char Variant
pVariant = Variant
<$> (mkName <$> pCid)
<*> many1 pSymbol
<?> "Variant"
-- |
--
-- an alphanumeric Haskell identifier
pSymbol :: Parser Char Symbol
pSymbol = try (Part <$> pTerminal)
<|> try (Hole <$> pNonTerminal)
<?> "Symbol (alphanumeric Haskell identifier)"
-- |
-- e.g. lifted @(\'w\':"ith")@
pTerminal :: Parser Char Terminal
pTerminal = Terminal <$> pVid
-- |
-- e.g. lifted @(\'P\':"hrase")@
pNonTerminal :: Parser Char NonTerminal
pNonTerminal = (NonTerminal . mkName) <$> pCid
-- | i.e. @V@ariable @id@entifier
--
-- the alphabetic subset of valid Haskell type-level identifiers (no "_" or "'")
--
pVid :: Parser Char String
pVid = spaced ((:)
<$> lower
<*> many letter)
-- | i.e. @C@onstructor @id@entifier
--
-- the alphabetic subset of valid Haskell value-level identifiers (no "_" or "'")
--
pCid :: Parser Char String
pCid = spaced ((:)
<$> upper
<*> many letter)
-- |
-- here and elsewhere in this module hierarchy:
--
-- * "syntactic" versions of functions have a \"E\" suffix (i.e. 'Exp')
-- * "syntactic" versions of operators have a @| |@ circumfix (i.e. @[| |]@ for templates)
--
-- mirroring the target code makes the template code more readable for me.
--
-- the "syntactic" versions should obey (loosely...):
--
-- @let (|+|) :: Exp -> Exp -> Exp; x |+| y = UInfixE x (VarE '(+)) y@
-- @Language.Haskell.Meta.Parse.parseExp "x + y" == Right $ VarE 'x |+| VarE 'y@
--
-- or maybe:
--
-- @let fE :: Exp -> Exp -> Exp; fE x y = (VarE 'f) `AppE` x `AppE` y@
-- @Language.Haskell.Meta.Parse.parseExp "f x y" == Right $ fE (VarE 'x) (VarE 'y)@
--
-- this convention doesn't matter much, and I haven't thought it through, but I've been trying to think about how not to make my macro code illegible.
--
infixlE :: Name -> Exp -> Exp -> Exp
infixlE name old new = InfixE (Just old) (VarE name) (Just new)
| sboosali/Haskell-DragonNaturallySpeaking | sources/Commands/TH/Syntax.hs | mit | 3,571 | 0 | 11 | 652 | 495 | 303 | 192 | 41 | 1 |
module Unison.Runtime.Vector where
-- |
-- Fast sequence type based on skewed array-indexed tries
-- with large branching factor, `B`.
--
-- Asymptotics (assuming vector of size `N`):
-- O(1) worst-case access to indices [0,B)
-- O(1) average-case access to indices [N-B,N)
-- O(log_B(i)) worst-case access to index i
-- O(1) amortized snoc, O(log_B(N)) worst case
--
-- Some inspiration stolen from: http://julesjacobs.github.io/2014/11/11/immutable-vectors-csharp.html
import Data.List hiding (init,length)
import Prelude hiding (init,length)
import qualified Data.Vector as V
arity :: Int
arity = 64
data Vector a =
Vector { length :: !Int, hd :: !(V.Vector a), tl :: (Vector (V.Vector a)), buf :: !(V.Vector a) }
empty :: Vector a
empty = Vector 0 V.empty empty V.empty
isEmpty :: Vector a -> Bool
isEmpty v = length v == 0
snoc :: Vector a -> a -> Vector a
snoc (Vector n hd tl buf) a = case buf `V.snoc` a of
buf | V.length buf /= arity -> Vector (n+1) hd tl buf
| n == arity-1 -> Vector (n+1) buf tl V.empty
| otherwise -> Vector (n+1) hd (tl `snoc` buf) V.empty
unsnoc :: Vector a -> Maybe (Vector a, a)
unsnoc v | isEmpty v = Nothing
unsnoc v = Just (init v, unsafeLast v)
unsafeIndex :: Vector a -> Int -> a
unsafeIndex (Vector _ hd tl buf) i = case i of
_ | i < V.length hd -> hd `V.unsafeIndex` i
_ | i >= V.length hd + length tl * arity -> buf `V.unsafeIndex` (i - (length tl)*arity - V.length hd)
_ -> case (i - V.length hd) `divMod` arity of
(bucket,offset) -> tl `unsafeIndex` bucket `V.unsafeIndex` offset
unsafeLast :: Vector a -> a
unsafeLast v = unsafeIndex v (length v - 1)
at :: Vector a -> Int -> Maybe a
at v i = if i <= length v && i >= 0 then Just (unsafeIndex v i) else Nothing
last :: Vector a -> Maybe a
last v | isEmpty v = Nothing
last v = Just $ unsafeLast v
modifyLast :: (a -> a) -> Vector a -> Vector a
modifyLast f v | isEmpty v = v
| otherwise = init v `snoc` f (unsafeLast v)
-- | Drop the last element from this vector. Returns itself if empty.
init :: Vector a -> Vector a
init v@(Vector n hd tl buf) = case V.null buf of
False -> Vector (n-1) hd tl (V.init buf)
_ | n == V.length hd -> Vector (n-1) V.empty tl (V.init hd)
_ | n == 0 -> v
_ -> Vector (n-1) hd (init tl) (V.init (unsafeLast tl))
dropRightWhile :: (a -> Bool) -> Vector a -> Vector a
dropRightWhile f v | isEmpty v || not (f (unsafeLast v)) = v
dropRightWhile f v = dropRightWhile f (init v)
toList :: Vector a -> [a]
toList v = map (unsafeIndex v) [0 .. length v - 1]
fromList :: [a] -> Vector a
fromList = foldl' snoc empty
instance Show a => Show (Vector a) where
show v = show (toList v)
instance Eq a => Eq (Vector a) where
v1 == v2 = toList v1 == toList v2
instance Ord a => Ord (Vector a) where
v1 `compare` v2 = toList v1 `compare` toList v2
instance Monoid (Vector a) where
mempty = empty
mappend (Vector 0 _ _ _) v2 = v2
mappend v1@(Vector n1 hd1 tl1 buf1) v2@(Vector n2 hd2 tl2 buf2) =
if V.null buf1 then Vector (n1+n2) hd1 (tl1 `snoc` hd2 `mappend` tl2) buf2
else foldl' snoc v1 (toList v2)
instance Functor Vector where
fmap f (Vector n hd tl buf) = Vector n (fmap f hd) (fmap (fmap f) tl) (fmap f buf)
instance Foldable Vector where
foldMap f = foldl' (\acc a -> acc `mappend` f a) mempty
foldl' f z v = foldl' f z (toList v)
instance Traversable Vector where
traverse f v = fromList <$> traverse f (toList v)
| nightscape/platform | node/src/Unison/Runtime/Vector.hs | mit | 3,494 | 0 | 15 | 821 | 1,586 | 801 | 785 | 74 | 4 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE FlexibleInstances #-}
{-# OPTIONS_GHC -fno-warn-missing-fields #-}
-- | A Shakespearean module for general text processing, introducing type-safe,
-- compile-time variable interpolation.
--
-- Text templates use the same parser as for other shakespearean templates
-- which enables variable interpolation using @#{..}@. The parser also
-- recognize the @@{..}@ and @^{..}@ syntax.
--
-- If it is necessary that your template produces the output containing one of
-- the interpolation syntax you can escape the sequence using a backslash:
--
-- > λ> :set -XQuasiQuotes
-- > λ> let bar = 23 :: Int in [st|#{bar}|] :: Text
--
-- produces "23", but
--
-- > λ> let bar = 23 :: Int in [st|#\{bar}|] :: Text
--
-- returns "#{bar}". The escaping backslash is removed from the output.
--
-- Further reading:
-- Shakespearean templates: <https://www.yesodweb.com/book/shakespearean-templates>
module Text.Shakespeare.Text
( TextUrl
, ToText (..)
, renderTextUrl
, stext
, stextFile
, text
, textFile
, textFileDebug
, textFileReload
, st -- | strict text
, lt -- | lazy text, same as stext :)
, sbt -- | strict text whose left edge is aligned with bar ('|')
, lbt -- | lazy text, whose left edge is aligned with bar ('|')
-- * Yesod code generation
, codegen
, codegenSt
, codegenFile
, codegenFileReload
) where
import Language.Haskell.TH.Quote (QuasiQuoter (..))
import Language.Haskell.TH.Syntax
import Data.Text.Lazy.Builder (Builder, fromText, toLazyText, fromLazyText)
import Data.Text.Lazy.Builder.Int (decimal)
import qualified Data.Text as TS
import qualified Data.Text.Lazy as TL
import Text.Shakespeare
import Data.Int (Int32, Int64)
renderTextUrl :: RenderUrl url -> TextUrl url -> TL.Text
renderTextUrl r s = toLazyText $ s r
type TextUrl url = RenderUrl url -> Builder
class ToText a where
toText :: a -> Builder
instance ToText Builder where toText = id
instance ToText [Char ] where toText = fromLazyText . TL.pack
instance ToText TS.Text where toText = fromText
instance ToText TL.Text where toText = fromLazyText
instance ToText Int32 where toText = decimal
instance ToText Int64 where toText = decimal
instance ToText Int where toText = decimal
settings :: Q ShakespeareSettings
settings = do
toTExp <- [|toText|]
wrapExp <- [|id|]
unWrapExp <- [|id|]
return $ defaultShakespeareSettings { toBuilder = toTExp
, wrap = wrapExp
, unwrap = unWrapExp
}
-- | "Simple text" quasi-quoter. May only be used to generate expressions.
--
-- Generated expressions have type 'TL.Text'.
--
-- @
-- >>> do let x = "world"
-- 'Data.Text.Lazy.IO.putStrLn' ['stext'|Hello, #{x}!|]
-- Hello, world!
-- @
stext :: QuasiQuoter
stext =
QuasiQuoter { quoteExp = \s -> do
rs <- settings
render <- [|toLazyText|]
rendered <- shakespeareFromString rs { justVarInterpolation = True } s
return (render `AppE` rendered)
}
lt, st, text, lbt, sbt :: QuasiQuoter
lt = stext
st =
QuasiQuoter { quoteExp = \s -> do
rs <- settings
render <- [|TL.toStrict . toLazyText|]
rendered <- shakespeareFromString rs { justVarInterpolation = True } s
return (render `AppE` rendered)
}
text = QuasiQuoter { quoteExp = \s -> do
rs <- settings
quoteExp (shakespeare rs) $ filter (/='\r') s
}
dropBar :: [TL.Text] -> [TL.Text]
dropBar [] = []
dropBar (c:cx) = c:dropBar' cx
where
dropBar' txt = reverse $ drop 1 $ map (TL.drop 1 . TL.dropWhile (/= '|')) $ reverse txt
lbt =
QuasiQuoter { quoteExp = \s -> do
rs <- settings
render <- [|TL.unlines . dropBar . TL.lines . toLazyText|]
rendered <- shakespeareFromString rs { justVarInterpolation = True } s
return (render `AppE` rendered)
}
sbt =
QuasiQuoter { quoteExp = \s -> do
rs <- settings
render <- [|TL.toStrict . TL.unlines . dropBar . TL.lines . toLazyText|]
rendered <- shakespeareFromString rs { justVarInterpolation = True } s
return (render `AppE` rendered)
}
textFile :: FilePath -> Q Exp
textFile fp = do
rs <- settings
shakespeareFile rs fp
textFileDebug :: FilePath -> Q Exp
textFileDebug = textFileReload
{-# DEPRECATED textFileDebug "Please use textFileReload instead" #-}
textFileReload :: FilePath -> Q Exp
textFileReload fp = do
rs <- settings
shakespeareFileReload rs fp
-- | Like 'stext', but reads an external file at compile-time.
--
-- @since 2.0.22
stextFile :: FilePath -> Q Exp
stextFile fp = do
rs <- settings
[|toLazyText $(shakespeareFile rs { justVarInterpolation = True } fp)|]
-- | codegen is designed for generating Yesod code, including templates
-- So it uses different interpolation characters that won't clash with templates.
codegenSettings :: Q ShakespeareSettings
codegenSettings = do
toTExp <- [|toText|]
wrapExp <- [|id|]
unWrapExp <- [|id|]
return $ defaultShakespeareSettings { toBuilder = toTExp
, wrap = wrapExp
, unwrap = unWrapExp
, varChar = '~'
, urlChar = '*'
, intChar = '&'
, justVarInterpolation = True -- always!
}
-- | codegen is designed for generating Yesod code, including templates
-- So it uses different interpolation characters that won't clash with templates.
-- You can use the normal text quasiquoters to generate code
codegen :: QuasiQuoter
codegen =
QuasiQuoter { quoteExp = \s -> do
rs <- codegenSettings
render <- [|toLazyText|]
rendered <- shakespeareFromString rs { justVarInterpolation = True } s
return (render `AppE` rendered)
}
-- | Generates strict Text
-- codegen is designed for generating Yesod code, including templates
-- So it uses different interpolation characters that won't clash with templates.
codegenSt :: QuasiQuoter
codegenSt =
QuasiQuoter { quoteExp = \s -> do
rs <- codegenSettings
render <- [|TL.toStrict . toLazyText|]
rendered <- shakespeareFromString rs { justVarInterpolation = True } s
return (render `AppE` rendered)
}
codegenFileReload :: FilePath -> Q Exp
codegenFileReload fp = do
rs <- codegenSettings
render <- [|TL.toStrict . toLazyText|]
rendered <- shakespeareFileReload rs{ justVarInterpolation = True } fp
return (render `AppE` rendered)
codegenFile :: FilePath -> Q Exp
codegenFile fp = do
rs <- codegenSettings
render <- [|TL.toStrict . toLazyText|]
rendered <- shakespeareFile rs{ justVarInterpolation = True } fp
return (render `AppE` rendered)
| yesodweb/shakespeare | Text/Shakespeare/Text.hs | mit | 6,519 | 62 | 18 | 1,320 | 1,382 | 791 | 591 | 137 | 1 |
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE OverloadedStrings #-}
module
Nayoro.Web
( runApp
) where
import Web.Apiary
import Web.Apiary.Database.Persist
import Web.Apiary.Logger
import Network.Wai.Handler.Warp
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Trans.Class (lift)
import qualified Data.Aeson as A
import Data.Conduit (await, ($$), ($=))
import qualified Data.Conduit.List as CL
import Database.Persist
import Database.Persist.Sqlite
import Data.Time
import Control.Monad.Trans.Resource (liftResourceT, runResourceT, MonadResource)
import qualified Data.Vector as V
import qualified Data.ByteString.Lazy.Char8 as L
import Nayoro.Model
import Nayoro.Util
import qualified Nayoro.Config as Conf
instance MonadResource m => MonadResource (ActionT exts prms m) where
liftResourceT = lift . liftResourceT
mkStub :: IO ()
mkStub = do
runSqlite "test.sqlite" $ do
time <- liftIO getCurrentTime
runMigration migrateAll
johnId <- insert $ Person "john@example.com"
janeId <- insert $ Person "jane@example.com"
johnWId <- insert $ Person "john_watson@example.com"
johnExampleId <- insert $ Handle johnId "example" "John Doe" time
janeExampleId <- insert $ Handle janeId "example" "Jane Doe" time
insert $ Handle johnId "irc" "john_doe" time
johnAnotherId <- insert $ Handle johnWId "another" "John Watson" time
insert $ Handle johnWId "irc" "xwhitex" time
insert $ Uri johnExampleId "calendar" "http://example.com/calendar/john"
insert $ Uri johnExampleId "address book" "http://example.com/address/john"
insert $ Uri johnAnotherId "irc" "http://example.com/calendar/john_watson"
liftIO $ putStrLn "get watson from white"
getHandleAndUris "white" $$ CL.mapM_ (liftIO . print)
runApp :: Conf.AppConfig -> IO ()
runApp _= do
mkStub
runApiaryTWith runResourceT (run 3000)
( initLogger def
+> initPersistPool (withSqlitePool "test.sqlite" 10) migrateAll
) def $ do
[capture|/|] . method GET . action $ do
contentType "text/html"
mapM_ lazyBytes ["<h1>Hello, World!</h1>\n"]
[capture|/age::Int|] . ([key|name|] =: pLazyByteString) . method GET . action $ do
(age, name) <- [params|age,name|]
guard (age >= 18)
contentType "text/html"
mapM_ lazyBytes ["<h1>Hello, ", name, "!</h1>\n"]
[capture|/search|] . ([key|q|] =: pText) . method GET . action $ do
query <- param [key|q|]
contentType "text/plain"
v <- runSql $ getHandleAndUris query $= CL.mapM (lift . lift . return . A.toJSON) $$ sinkVector
lazyBytes $ A.encode (v :: V.Vector A.Value)
| liquidamber/nayoro | src/Nayoro/Web.hs | mit | 2,676 | 0 | 20 | 537 | 779 | 411 | 368 | 63 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cognito-userpool-stringattributeconstraints.html
module Stratosphere.ResourceProperties.CognitoUserPoolStringAttributeConstraints where
import Stratosphere.ResourceImports
-- | Full data type definition for CognitoUserPoolStringAttributeConstraints.
-- See 'cognitoUserPoolStringAttributeConstraints' for a more convenient
-- constructor.
data CognitoUserPoolStringAttributeConstraints =
CognitoUserPoolStringAttributeConstraints
{ _cognitoUserPoolStringAttributeConstraintsMaxLength :: Maybe (Val Text)
, _cognitoUserPoolStringAttributeConstraintsMinLength :: Maybe (Val Text)
} deriving (Show, Eq)
instance ToJSON CognitoUserPoolStringAttributeConstraints where
toJSON CognitoUserPoolStringAttributeConstraints{..} =
object $
catMaybes
[ fmap (("MaxLength",) . toJSON) _cognitoUserPoolStringAttributeConstraintsMaxLength
, fmap (("MinLength",) . toJSON) _cognitoUserPoolStringAttributeConstraintsMinLength
]
-- | Constructor for 'CognitoUserPoolStringAttributeConstraints' containing
-- required fields as arguments.
cognitoUserPoolStringAttributeConstraints
:: CognitoUserPoolStringAttributeConstraints
cognitoUserPoolStringAttributeConstraints =
CognitoUserPoolStringAttributeConstraints
{ _cognitoUserPoolStringAttributeConstraintsMaxLength = Nothing
, _cognitoUserPoolStringAttributeConstraintsMinLength = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cognito-userpool-stringattributeconstraints.html#cfn-cognito-userpool-stringattributeconstraints-maxlength
cupsacMaxLength :: Lens' CognitoUserPoolStringAttributeConstraints (Maybe (Val Text))
cupsacMaxLength = lens _cognitoUserPoolStringAttributeConstraintsMaxLength (\s a -> s { _cognitoUserPoolStringAttributeConstraintsMaxLength = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cognito-userpool-stringattributeconstraints.html#cfn-cognito-userpool-stringattributeconstraints-minlength
cupsacMinLength :: Lens' CognitoUserPoolStringAttributeConstraints (Maybe (Val Text))
cupsacMinLength = lens _cognitoUserPoolStringAttributeConstraintsMinLength (\s a -> s { _cognitoUserPoolStringAttributeConstraintsMinLength = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/CognitoUserPoolStringAttributeConstraints.hs | mit | 2,425 | 0 | 12 | 206 | 265 | 152 | 113 | 27 | 1 |
import qualified Data.Set as S (Set, empty, fromList, insert)
sets :: [Integer] -> [Integer] -> S.Set Integer
sets ps = S.fromList . concatMap (g ps) where
g [] _ = []
g (x:xs) y = (x ^ y) : g xs y
main :: IO ()
main = print . length $ sets [2..100] [2..100]
| tamasgal/haskell_exercises | ProjectEuler/p029_alt.hs | mit | 265 | 0 | 9 | 61 | 157 | 84 | 73 | 7 | 2 |
var food = query_inventory("food");
var tools = query_inventory("tools");
var ore = query_inventory("ore");
var has_food = food >= 1;
var has_tools = tools >= 1;
var has_ore = ore >= 1;
if(has_food && has_ore){
if(has_tools){
//convert all ore into metal, consume 1 food, break tools with 10% chance
produce(agent,"metal",ore);
consume(agent,"ore",ore);
consume(agent,"food",1);
consume(agent,"tools",1,0.1);
}else{
//convert up to 2 ore into metal, consume 1 food
var max = query_inventory("ore");
if(max > 2){ max = 2;}
produce(agent,"metal", max);
consume(agent,"ore", max);
consume(agent,"food",1);
}
}else{
//fined $2 for being idle
consume(agent,"money",2);
if(!has_food && inventory_is_full()){
make_room_for(agent,"food",2);
}
}
| larsiusprime/bazaarBot | examples/doran_and_parberry/Assets/scripts/refiner.hs | mit | 777 | 45 | 9 | 131 | 402 | 236 | 166 | -1 | -1 |
module Main where
import Primes (prime')
import Util (truncateRight, digitsToInt)
import Data.List (permutations)
main :: IO ()
main =
let
seeds = takeWhile (/= []) $ iterate tail [7,6..1]
pans = map digitsToInt $ concat $ map permutations seeds
results = filter prime' pans
in
print $ maximum results | liefswanson/projectEuler | app/p1/q41/Main.hs | gpl-2.0 | 343 | 0 | 12 | 90 | 123 | 66 | 57 | 11 | 1 |
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-
Copyright (C) 2008 John MacFarlane <jgm@berkeley.edu>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- Re-exports Happstack functions needed by gitit, including
replacements for Happstack functions that don't handle UTF-8 properly, and
new functions for setting headers and zipping contents and for looking up IP
addresses.
-}
module Network.Gitit.Server
( module Happstack.Server
, withExpiresHeaders
, setContentType
, setFilename
, lookupIPAddr
, getHost
, compressedResponseFilter
)
where
import Happstack.Server
import Happstack.Server.Parts (compressedResponseFilter)
import Network.Socket (getAddrInfo, defaultHints, addrAddress)
import Control.Monad.Reader
import Data.ByteString.UTF8 as U
withExpiresHeaders :: ServerMonad m => m Response -> m Response
withExpiresHeaders = liftM (setHeader "Cache-Control" "max-age=21600")
setContentType :: String -> Response -> Response
setContentType = setHeader "Content-Type"
setFilename :: String -> Response -> Response
setFilename = setHeader "Content-Disposition" . \fname -> "attachment; filename=\"" ++ fname ++ "\""
-- IP lookup
lookupIPAddr :: String -> IO (Maybe String)
lookupIPAddr hostname = do
addrs <- getAddrInfo (Just defaultHints) (Just hostname) Nothing
if null addrs
then return Nothing
else return $ Just $ takeWhile (/=':') $ show $ addrAddress $ case addrs of -- head addrs
[] -> error $ "lookupIPAddr, no addrs"
(x:_) -> x
getHost :: ServerMonad m => m (Maybe String)
getHost = liftM (maybe Nothing (Just . U.toString)) $ getHeaderM "Host"
| tphyahoo/gititpt | Network/Gitit/Server.hs | gpl-2.0 | 2,455 | 0 | 13 | 573 | 360 | 195 | 165 | 31 | 3 |
-- |
-- Module : Control.Functor.Constrained
-- Copyright : (c) 2014 Justus Sagemüller
-- License : GPL v3 (see COPYING)
-- Maintainer : (@) jsag $ hvl.no
--
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ >= 800
{-# LANGUAGE UndecidableSuperClasses #-}
#endif
module Control.Functor.Constrained
( module Control.Category.Constrained
-- * Functors
, Functor(..)
, (<$>)
, constrainedFmap
-- * [Co]product mapping
, SumToProduct(..)
) where
import Control.Category.Constrained
import Prelude hiding (id, (.), Functor(..), filter, (<$>))
import qualified Prelude
import Data.Void
import Data.Type.Coercion
import Data.Complex
import Control.Category.Discrete
class ( Category r, Category t, Object t (f (UnitObject r)) )
=> Functor f r t | f r -> t, f t -> r where
fmap :: (Object r a, Object t (f a), Object r b, Object t (f b))
=> r a b -> t (f a) (f b)
instance (Prelude.Functor f) => Functor f (->) (->) where
fmap = Prelude.fmap
-- | It is fairly common for functors (typically, container-like) to map 'Either'
-- to tuples in a natural way, thus \"separating the variants\".
-- This is related to 'Data.Foldable.Constrained.Foldable'
-- (with list and tuple monoids), but rather more effective.
class ( CoCartesian r, Cartesian t, Functor f r t, Object t (f (ZeroObject r)) )
=> SumToProduct f r t where
-- | @
-- sum2product ≡ mapEither id
-- @
sum2product :: ( ObjectSum r a b, ObjectPair t (f a) (f b) )
=> t (f (a+b)) (f a, f b)
-- | @
-- mapEither f ≡ sum2product . fmap f
-- @
mapEither :: ( Object r a, ObjectSum r b c
, Object t (f a), ObjectPair t (f b) (f c) )
=> r a (b+c) -> t (f a) (f b, f c)
filter :: ( Object r a, Object r Bool, Object t (f a) )
=> r a Bool -> t (f a) (f a)
instance SumToProduct [] (->) (->) where
sum2product [] = ([],[])
sum2product (Left x : l) = (x:xs, ys) where ~(xs,ys) = sum2product l
sum2product (Right y : l) = (xs ,y:ys) where ~(xs,ys) = sum2product l
mapEither _ [] = ([],[])
mapEither f (a:l) = case f a of
Left x -> (x:xs, ys)
Right y -> (xs ,y:ys)
where ~(xs,ys) = mapEither f l
filter = Prelude.filter
infixl 4 <$>
(<$>) :: (Functor f r (->), Object r a, Object r b)
=> r a b -> f a -> f b
(<$>) = fmap
constrainedFmap :: (Category r, Category t, o a, o b, o (f a), o (f b))
=> ( r a b -> t (f a) (f b) )
-> (o⊢r) a b -> (o⊢t) (f a) (f b)
constrainedFmap q = constrained . q . unconstrained
instance (Functor [] k k, o [UnitObject k]) => Functor [] (o⊢k) (o⊢k) where
fmap (ConstrainedMorphism f) = ConstrainedMorphism $ fmap f
instance (o (), o [()], o Void, o [Void]) => SumToProduct [] (o⊢(->)) (o⊢(->)) where
sum2product = ConstrainedMorphism sum2product
mapEither (ConstrainedMorphism f) = ConstrainedMorphism $ mapEither f
filter (ConstrainedMorphism f) = ConstrainedMorphism $ filter f
instance Functor [] Coercion Coercion where fmap Coercion = Coercion
instance Functor Maybe Coercion Coercion where fmap Coercion = Coercion
instance Functor (Either a) Coercion Coercion where fmap Coercion = Coercion
instance Functor ((->) a) Coercion Coercion where fmap Coercion = Coercion
instance Functor ((,) a) Coercion Coercion where fmap Coercion = Coercion
instance Functor IO Coercion Coercion where fmap Coercion = Coercion
instance Functor Complex Coercion Coercion where fmap Coercion = Coercion
instance Functor [] Discrete Discrete where fmap Refl = Refl
instance Functor Maybe Discrete Discrete where fmap Refl = Refl
instance Functor (Either a) Discrete Discrete where fmap Refl = Refl
instance Functor ((->) a) Discrete Discrete where fmap Refl = Refl
instance Functor ((,) a) Discrete Discrete where fmap Refl = Refl
instance Functor IO Discrete Discrete where fmap Refl = Refl
instance Functor Complex Discrete Discrete where fmap Refl = Refl
| leftaroundabout/constrained-categories | Control/Functor/Constrained.hs | gpl-3.0 | 4,313 | 0 | 12 | 1,036 | 1,583 | 848 | 735 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module GridResponse
(gridResponse) where
import qualified Text.Blaze.Html5 as H
import Happstack.Server
import Utilities
import MasterTemplate
import Scripts
gridResponse :: ServerPart Response
gridResponse =
ok $ toResponse $
masterTemplate "Courseography - Grid"
[]
(do header "grid"
conflictDialog
createTag H.div "" "row main" $ do
coursePanel
searchPanel
infoPanel
disclaimer
)
timetableScripts
conflictDialog :: H.Html
conflictDialog =
createTag H.div "dialog" "" "Conflicting courses are difficult to manage. Make sure you understand the added responsibility of having two or more conflicting courses."
coursePanel :: H.Html
coursePanel =
createTag H.div "course-select-wrapper" "col-md-2 col-xs-6" $
createTag H.ul "course-select" "trapScroll-enabled" $
createTag H.li "clear-all" "" $
createTag H.h3 "" "" "Clear All"
searchPanel :: H.Html
searchPanel =
createTag H.div "search-layout" "col-md-2 col-xs-6 col-md-push-8" $ do
createTag H.div "filter-container" "" $
makeForm "" "" "return false;" $
makeInput "course-filter" "form-control" "Enter a course!" "off" "text"
createTag H.div "search-container" "" $
createTag H.div "search-list" "" ""
infoPanel :: H.Html
infoPanel =
createTag H.div "" "col-md-8 col-xs-12 col-md-pull-2" $
createTag H.div "info" "row" ""
| arkon/courseography | hs/GridResponse.hs | gpl-3.0 | 1,605 | 0 | 12 | 463 | 316 | 157 | 159 | 42 | 1 |
module Web.SpriteIcons.TH (spritesFromJSON, Octicon, toSVGAtSize, toSVGAtWidth, toSVGAtHeight, toSVG) where
import Data.Aeson
import qualified Data.ByteString.Lazy as BSL
import Data.Char
import qualified Data.Map as Map
import qualified Data.Text as T
import Language.Haskell.TH
import Lucid
data Octicon = Octicon {
_octName :: String
, _octKeywords :: [String]
, _octPath :: String
, _octWidth :: Int
, _octHeight :: Int
}
instance FromJSON Octicon where
parseJSON (Object o) = Octicon "" <$> o .: "keywords" <*> o .: "path" <*> (fromstr <$> o.: "width") <*> (fromstr <$> o .: "height")
where fromstr (String t) = read (T.unpack t) :: Int
spritesFromJSON :: FilePath -> DecsQ
spritesFromJSON p = do
d <- runIO $ BSL.readFile p
let Just h = decode d :: Maybe (Map.Map String Octicon)
Map.foldrWithKey' f (return []) h
where
t :: String -> String
t ('-':b:c) = toUpper b : t c
t (a:c) = a : t c
t "" = ""
f :: String -> Octicon -> DecsQ -> DecsQ
f k Octicon{..} b = do
ds <- b
let n = VarP . mkName $ t k
let kw = ListE $ LitE . StringL <$> _octKeywords
let p = LitE $ StringL _octPath
let w = LitE . IntegerL $ toInteger _octWidth
let h = LitE . IntegerL $ toInteger _octHeight
let v = NormalB $ foldl AppE (ConE 'Octicon) [LitE $ StringL k, kw, p, w, h]
return (ValD n v [] : ds)
toSVGAtSize :: (Monad m) => Octicon -> Float -> Float -> HtmlT m ()
toSVGAtSize Octicon{..} w h = svg_ [classes_ ["octicon", "octicon-" `T.append` T.pack _octName], term "version" "1.1", width_ (T.pack $ show w), height_ (T.pack $ show h), term "aria-hidden" "true", term "viewBox" . T.pack $ unwords ["0 0", show _octWidth, show _octHeight]]
$ toHtmlRaw _octPath
toSVGAtWidth :: (Monad m) => Octicon -> Float -> HtmlT m ()
toSVGAtWidth o@Octicon{..} w = toSVGAtSize o w h
where h = (fromIntegral _octHeight) * w / (fromIntegral _octWidth)
toSVGAtHeight :: (Monad m) => Octicon -> Float -> HtmlT m ()
toSVGAtHeight o@Octicon{..} h = toSVGAtSize o w h
where w = (fromIntegral _octWidth) * h / (fromIntegral _octHeight)
toSVG :: (Monad m) => Octicon -> HtmlT m ()
toSVG o@Octicon{..} = toSVGAtSize o (fromIntegral _octWidth) (fromIntegral _octHeight)
| typedrat/typedrat-site | app/Web/SpriteIcons/TH.hs | gpl-3.0 | 2,351 | 0 | 16 | 593 | 976 | 504 | 472 | -1 | -1 |
module Main where
--- factorial
--- if < 0 then -1 "error"
fac :: Int -> Int
fac a =
if a < 0
then - 1
else
let
stop a = a == 1
go res a = if stop a
then res
else let res' = res * a
a' = a - 1
in go res' a'
in go 1 a
main = print (fac 3)
| nastya13/Factorial | src/Main.hs | gpl-3.0 | 298 | 6 | 15 | 129 | 130 | 67 | 63 | 14 | 3 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveAnyClass #-}
module Grav2ty.Protocol where
import Prelude hiding (take)
import Grav2ty.Core
import Grav2ty.Util.Serialization
import Data.Attoparsec.ByteString
import Data.Bits
import Data.ByteString (ByteString (..))
import qualified Data.ByteString as BS
import Data.Char
import Flat
import Data.Int
import Data.Maybe (fromJust, isNothing)
import Data.Word
import Linear.V2
data Packet
= Packet
{ pMessageType :: Word8
, pPacketContents :: ByteString
} deriving (Show, Eq, Ord)
class ToPacket a where
toPacket :: a -> Packet
fromPacket :: Packet -> Maybe a
protocolVersion :: Word8
protocolVersion = 1
data ErrorType
= ErrorServerFull
| ErrorVersionMismatch
| ErrorNoParse
deriving (Show, Eq, Ord, Generic, Flat)
-- | Protocol Version 1
data Message a
= ProtocolVersion Word8
| Error ErrorType
| AssignMods [Id]
| NewWorld Tick (World a)
| NewObject Tick Id (Object a)
| UpdateMod (ModMap a)
| TicksPerSecond Tick
deriving (Show, Eq, Ord, Generic, Flat)
toMaybe :: Bool -> a -> Maybe a
toMaybe True x = Just x
toMaybe False _ = Nothing
rightToMaybe :: Either e a -> Maybe a
rightToMaybe (Left _) = Nothing
rightToMaybe (Right x) = Just x
instance Flat a => ToPacket (Message a) where
toPacket (ProtocolVersion v) = Packet 0 (BS.singleton v)
toPacket (Error e) = Packet 1 (flat e)
toPacket (AssignMods ids) = Packet 2 (flat ids)
toPacket (NewWorld tick world) = Packet 3 $ flat (tick, world)
toPacket (NewObject tick id obj) = Packet 4 $ flat (tick, id, obj)
toPacket (UpdateMod modmap) = Packet 5 (flat modmap)
toPacket (TicksPerSecond t) = Packet 6 (flat t)
fromPacket (Packet 0 v) = toMaybe (BS.length v == 1) (ProtocolVersion $ BS.head v)
fromPacket (Packet 1 e) = Error <$> rightToMaybe (unflat e)
fromPacket (Packet 2 m) = AssignMods <$> rightToMaybe (unflat m)
fromPacket (Packet 3 w) = case unflat w of
Left _ -> Nothing
Right (tick, world) -> Just $ NewWorld tick world
fromPacket (Packet 4 o) = case unflat o of
Left _ -> Nothing
Right (tick, id, obj) -> Just $ NewObject tick id obj
fromPacket (Packet 5 m) = UpdateMod <$> rightToMaybe (unflat m)
fromPacket (Packet 6 t) = TicksPerSecond <$> rightToMaybe (unflat t)
fromPacket (Packet _ _) = Nothing
bytes :: Int64 -> [Word8]
bytes i = bytes' 7
where bytes' :: Int -> [Word8]
bytes' (-1) = []
bytes' n = (fromIntegral . (flip shift (-8 * n)) $
i .&. shift 0xff (8 * n))
: bytes' (n - 1)
unbytes :: [Word8] -> Int64
unbytes l = unbytes' l 7 0
where unbytes' [x] 0 acc = (fromIntegral x) + acc
unbytes' [x] n acc = shift (fromIntegral x + acc) (-8 * n)
unbytes' (x:xs) n acc = unbytes' xs (n - 1) (acc + shift (fromIntegral x) (8 * n))
renderPacket :: Packet -> ByteString
renderPacket (Packet t content) = BS.pack (t : bytes len) `BS.append` content
where len :: Int64
len = fromIntegral $ BS.length content
parsePacket :: ByteString -> Result Packet
parsePacket = parse packetParser
packetParser :: Parser Packet
packetParser = do
t <- anyWord8
length <- fromIntegral . unbytes . BS.unpack <$> take 8 -- TODO get rid of unpack
Packet t <$> take length
renderMessage :: Flat a => Message a -> ByteString
renderMessage = renderPacket . toPacket
parseMessage :: Flat a => ByteString -> Result (Message a)
parseMessage = parse messageParser
messageParser :: Flat a => Parser (Message a)
messageParser = packetParser >>=
maybe (fail "Packet is no valid message") pure . fromPacket
| lukasepple/grav1ty | lib/Grav2ty/Protocol.hs | gpl-3.0 | 3,684 | 0 | 16 | 865 | 1,420 | 732 | 688 | 96 | 3 |
{-# LANGUAGE DerivingStrategies #-}
-- |
-- Module : Aura.Settings.External
-- Copyright : (c) Colin Woodbury, 2012 - 2020
-- License : GPL3
-- Maintainer: Colin Woodbury <colin@fosskers.ca>
--
-- A simple parser for .conf files, along with types for aura-specific config
-- files.
module Aura.Settings.External
( -- * Aura Config
AuraConfig(..)
, getAuraConf
, auraConfig
, defaultAuraConf
-- * Parsing
, Config(..)
, config
) where
import Aura.Languages (langFromLocale)
import Aura.Settings
import Aura.Types
import RIO hiding (some, try)
import qualified RIO.ByteString as BS
import RIO.Directory
import qualified RIO.Map as M
import qualified RIO.Text as T
import Text.Megaparsec hiding (single)
import Text.Megaparsec.Char
import qualified Text.Megaparsec.Char.Lexer as L
--------------------------------------------------------------------------------
-- Aura-specific Configuration
data AuraConfig = AuraConfig
{ acLang :: !(Maybe Language)
, acEditor :: !(Maybe FilePath)
, acUser :: !(Maybe User)
, acBuildPath :: !(Maybe FilePath)
, acASPath :: !(Maybe FilePath)
, acVCSPath :: !(Maybe FilePath)
, acAnalyse :: !(Maybe BuildSwitch) }
deriving stock (Show)
defaultAuraConf :: FilePath
defaultAuraConf = "/etc/aura.conf"
getAuraConf :: FilePath -> IO Config
getAuraConf fp = do
exists <- doesFileExist fp
if not exists
then pure $ Config mempty
else do
file <- decodeUtf8Lenient <$> BS.readFile fp
pure . either (const $ Config M.empty) id $ parse config "aura config" file
auraConfig :: Config -> AuraConfig
auraConfig (Config m) = AuraConfig
{ acLang = one "language" >>= langFromLocale
, acEditor = T.unpack <$> one "editor"
, acUser = User <$> one "user"
, acBuildPath = T.unpack <$> one "buildpath"
, acASPath = T.unpack <$> one "allsourcepath"
, acVCSPath = T.unpack <$> one "vcspath"
, acAnalyse = one "analyse" >>= readMaybe . T.unpack >>= bool (Just NoPkgbuildCheck) Nothing
}
where
one x = M.lookup x m >>= listToMaybe
--------------------------------------------------------------------------------
-- Parsing
-- | The (meaningful) contents of a .conf file.
newtype Config = Config (Map Text [Text]) deriving (Show)
-- | Parse a `Config`.
config :: Parsec Void Text Config
config = do
garbage
cs <- some $ fmap Right (try pair) <|> fmap Left single
eof
pure . Config . M.fromList $ rights cs
single :: Parsec Void Text ()
single = L.lexeme garbage . void $ manyTill letterChar newline
pair :: Parsec Void Text (Text, [Text])
pair = L.lexeme garbage $ do
n <- takeWhile1P Nothing (/= ' ')
space
void $ char '='
space
rest <- T.words <$> takeWhile1P Nothing (/= '\n')
pure (n, rest)
-- Thu 23 Apr 2020 06:57:59 PM PDT
-- Thank you me-from-the-past for documenting this.
-- | All skippable content. Using `[]` as block comment markers is a trick to
-- skip conf file "section" lines.
garbage :: Parsec Void Text ()
garbage = L.space space1 (L.skipLineComment "#") (L.skipBlockComment "[" "]")
| aurapm/aura | aura/lib/Aura/Settings/External.hs | gpl-3.0 | 3,123 | 0 | 16 | 654 | 815 | 443 | 372 | 82 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.ForwardingRules.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes the specified ForwardingRule resource.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.forwardingRules.delete@.
module Network.Google.Resource.Compute.ForwardingRules.Delete
(
-- * REST Resource
ForwardingRulesDeleteResource
-- * Creating a Request
, forwardingRulesDelete
, ForwardingRulesDelete
-- * Request Lenses
, frdRequestId
, frdProject
, frdForwardingRule
, frdRegion
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.forwardingRules.delete@ method which the
-- 'ForwardingRulesDelete' request conforms to.
type ForwardingRulesDeleteResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"regions" :>
Capture "region" Text :>
"forwardingRules" :>
Capture "forwardingRule" Text :>
QueryParam "requestId" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] Operation
-- | Deletes the specified ForwardingRule resource.
--
-- /See:/ 'forwardingRulesDelete' smart constructor.
data ForwardingRulesDelete =
ForwardingRulesDelete'
{ _frdRequestId :: !(Maybe Text)
, _frdProject :: !Text
, _frdForwardingRule :: !Text
, _frdRegion :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ForwardingRulesDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'frdRequestId'
--
-- * 'frdProject'
--
-- * 'frdForwardingRule'
--
-- * 'frdRegion'
forwardingRulesDelete
:: Text -- ^ 'frdProject'
-> Text -- ^ 'frdForwardingRule'
-> Text -- ^ 'frdRegion'
-> ForwardingRulesDelete
forwardingRulesDelete pFrdProject_ pFrdForwardingRule_ pFrdRegion_ =
ForwardingRulesDelete'
{ _frdRequestId = Nothing
, _frdProject = pFrdProject_
, _frdForwardingRule = pFrdForwardingRule_
, _frdRegion = pFrdRegion_
}
-- | An optional request ID to identify requests. Specify a unique request ID
-- so that if you must retry your request, the server will know to ignore
-- the request if it has already been completed. For example, consider a
-- situation where you make an initial request and the request times out.
-- If you make the request again with the same request ID, the server can
-- check if original operation with the same request ID was received, and
-- if so, will ignore the second request. This prevents clients from
-- accidentally creating duplicate commitments. The request ID must be a
-- valid UUID with the exception that zero UUID is not supported
-- (00000000-0000-0000-0000-000000000000).
frdRequestId :: Lens' ForwardingRulesDelete (Maybe Text)
frdRequestId
= lens _frdRequestId (\ s a -> s{_frdRequestId = a})
-- | Project ID for this request.
frdProject :: Lens' ForwardingRulesDelete Text
frdProject
= lens _frdProject (\ s a -> s{_frdProject = a})
-- | Name of the ForwardingRule resource to delete.
frdForwardingRule :: Lens' ForwardingRulesDelete Text
frdForwardingRule
= lens _frdForwardingRule
(\ s a -> s{_frdForwardingRule = a})
-- | Name of the region scoping this request.
frdRegion :: Lens' ForwardingRulesDelete Text
frdRegion
= lens _frdRegion (\ s a -> s{_frdRegion = a})
instance GoogleRequest ForwardingRulesDelete where
type Rs ForwardingRulesDelete = Operation
type Scopes ForwardingRulesDelete =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient ForwardingRulesDelete'{..}
= go _frdProject _frdRegion _frdForwardingRule
_frdRequestId
(Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy ForwardingRulesDeleteResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/ForwardingRules/Delete.hs | mpl-2.0 | 4,821 | 0 | 17 | 1,071 | 552 | 330 | 222 | 87 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.RegionTargetHTTPSProxies.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns the specified TargetHttpsProxy resource in the specified region.
-- Gets a list of available target HTTP proxies by making a list() request.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.regionTargetHttpsProxies.get@.
module Network.Google.Resource.Compute.RegionTargetHTTPSProxies.Get
(
-- * REST Resource
RegionTargetHTTPSProxiesGetResource
-- * Creating a Request
, regionTargetHTTPSProxiesGet
, RegionTargetHTTPSProxiesGet
-- * Request Lenses
, rthpgProject
, rthpgRegion
, rthpgTargetHTTPSProxy
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.regionTargetHttpsProxies.get@ method which the
-- 'RegionTargetHTTPSProxiesGet' request conforms to.
type RegionTargetHTTPSProxiesGetResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"regions" :>
Capture "region" Text :>
"targetHttpsProxies" :>
Capture "targetHttpsProxy" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] TargetHTTPSProxy
-- | Returns the specified TargetHttpsProxy resource in the specified region.
-- Gets a list of available target HTTP proxies by making a list() request.
--
-- /See:/ 'regionTargetHTTPSProxiesGet' smart constructor.
data RegionTargetHTTPSProxiesGet =
RegionTargetHTTPSProxiesGet'
{ _rthpgProject :: !Text
, _rthpgRegion :: !Text
, _rthpgTargetHTTPSProxy :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'RegionTargetHTTPSProxiesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rthpgProject'
--
-- * 'rthpgRegion'
--
-- * 'rthpgTargetHTTPSProxy'
regionTargetHTTPSProxiesGet
:: Text -- ^ 'rthpgProject'
-> Text -- ^ 'rthpgRegion'
-> Text -- ^ 'rthpgTargetHTTPSProxy'
-> RegionTargetHTTPSProxiesGet
regionTargetHTTPSProxiesGet pRthpgProject_ pRthpgRegion_ pRthpgTargetHTTPSProxy_ =
RegionTargetHTTPSProxiesGet'
{ _rthpgProject = pRthpgProject_
, _rthpgRegion = pRthpgRegion_
, _rthpgTargetHTTPSProxy = pRthpgTargetHTTPSProxy_
}
-- | Project ID for this request.
rthpgProject :: Lens' RegionTargetHTTPSProxiesGet Text
rthpgProject
= lens _rthpgProject (\ s a -> s{_rthpgProject = a})
-- | Name of the region scoping this request.
rthpgRegion :: Lens' RegionTargetHTTPSProxiesGet Text
rthpgRegion
= lens _rthpgRegion (\ s a -> s{_rthpgRegion = a})
-- | Name of the TargetHttpsProxy resource to return.
rthpgTargetHTTPSProxy :: Lens' RegionTargetHTTPSProxiesGet Text
rthpgTargetHTTPSProxy
= lens _rthpgTargetHTTPSProxy
(\ s a -> s{_rthpgTargetHTTPSProxy = a})
instance GoogleRequest RegionTargetHTTPSProxiesGet
where
type Rs RegionTargetHTTPSProxiesGet =
TargetHTTPSProxy
type Scopes RegionTargetHTTPSProxiesGet =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient RegionTargetHTTPSProxiesGet'{..}
= go _rthpgProject _rthpgRegion
_rthpgTargetHTTPSProxy
(Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy RegionTargetHTTPSProxiesGetResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/RegionTargetHTTPSProxies/Get.hs | mpl-2.0 | 4,401 | 0 | 16 | 973 | 468 | 280 | 188 | 81 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Mirror.Subscriptions.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a new subscription.
--
-- /See:/ <https://developers.google.com/glass Google Mirror API Reference> for @mirror.subscriptions.insert@.
module Network.Google.Resource.Mirror.Subscriptions.Insert
(
-- * REST Resource
SubscriptionsInsertResource
-- * Creating a Request
, subscriptionsInsert
, SubscriptionsInsert
-- * Request Lenses
, siPayload
) where
import Network.Google.Mirror.Types
import Network.Google.Prelude
-- | A resource alias for @mirror.subscriptions.insert@ method which the
-- 'SubscriptionsInsert' request conforms to.
type SubscriptionsInsertResource =
"mirror" :>
"v1" :>
"subscriptions" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Subscription :>
Post '[JSON] Subscription
-- | Creates a new subscription.
--
-- /See:/ 'subscriptionsInsert' smart constructor.
newtype SubscriptionsInsert = SubscriptionsInsert'
{ _siPayload :: Subscription
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SubscriptionsInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'siPayload'
subscriptionsInsert
:: Subscription -- ^ 'siPayload'
-> SubscriptionsInsert
subscriptionsInsert pSiPayload_ =
SubscriptionsInsert'
{ _siPayload = pSiPayload_
}
-- | Multipart request metadata.
siPayload :: Lens' SubscriptionsInsert Subscription
siPayload
= lens _siPayload (\ s a -> s{_siPayload = a})
instance GoogleRequest SubscriptionsInsert where
type Rs SubscriptionsInsert = Subscription
type Scopes SubscriptionsInsert =
'["https://www.googleapis.com/auth/glass.timeline"]
requestClient SubscriptionsInsert'{..}
= go (Just AltJSON) _siPayload mirrorService
where go
= buildClient
(Proxy :: Proxy SubscriptionsInsertResource)
mempty
| rueshyna/gogol | gogol-mirror/gen/Network/Google/Resource/Mirror/Subscriptions/Insert.hs | mpl-2.0 | 2,793 | 0 | 12 | 621 | 304 | 187 | 117 | 49 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
-- |
-- Module : Network.Google.CloudPrivateCatalog
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Enable cloud users to discover enterprise catalogs and products in their
-- organizations.
--
-- /See:/ <https://cloud.google.com/private-catalog/ Cloud Private Catalog API Reference>
module Network.Google.CloudPrivateCatalog
(
-- * Service Configuration
cloudPrivateCatalogService
-- * OAuth Scopes
, cloudPlatformScope
-- * API Declaration
, CloudPrivateCatalogAPI
-- * Resources
-- ** cloudprivatecatalog.folders.catalogs.search
, module Network.Google.Resource.CloudPrivateCatalog.Folders.Catalogs.Search
-- ** cloudprivatecatalog.folders.products.search
, module Network.Google.Resource.CloudPrivateCatalog.Folders.Products.Search
-- ** cloudprivatecatalog.folders.versions.search
, module Network.Google.Resource.CloudPrivateCatalog.Folders.Versions.Search
-- ** cloudprivatecatalog.organizations.catalogs.search
, module Network.Google.Resource.CloudPrivateCatalog.Organizations.Catalogs.Search
-- ** cloudprivatecatalog.organizations.products.search
, module Network.Google.Resource.CloudPrivateCatalog.Organizations.Products.Search
-- ** cloudprivatecatalog.organizations.versions.search
, module Network.Google.Resource.CloudPrivateCatalog.Organizations.Versions.Search
-- ** cloudprivatecatalog.projects.catalogs.search
, module Network.Google.Resource.CloudPrivateCatalog.Projects.Catalogs.Search
-- ** cloudprivatecatalog.projects.products.search
, module Network.Google.Resource.CloudPrivateCatalog.Projects.Products.Search
-- ** cloudprivatecatalog.projects.versions.search
, module Network.Google.Resource.CloudPrivateCatalog.Projects.Versions.Search
-- * Types
-- ** GoogleCloudPrivatecatalogV1beta1SearchCatalogsResponse
, GoogleCloudPrivatecatalogV1beta1SearchCatalogsResponse
, googleCloudPrivatecatalogV1beta1SearchCatalogsResponse
, gcpvscrNextPageToken
, gcpvscrCatalogs
-- ** GoogleCloudPrivatecatalogV1beta1ProductDisplayMetadata
, GoogleCloudPrivatecatalogV1beta1ProductDisplayMetadata
, googleCloudPrivatecatalogV1beta1ProductDisplayMetadata
, gcpvpdmAddtional
-- ** GoogleCloudPrivatecatalogV1beta1Catalog
, GoogleCloudPrivatecatalogV1beta1Catalog
, googleCloudPrivatecatalogV1beta1Catalog
, gcpvcUpdateTime
, gcpvcName
, gcpvcDisplayName
, gcpvcDescription
, gcpvcCreateTime
-- ** GoogleCloudPrivatecatalogV1beta1SearchProductsResponse
, GoogleCloudPrivatecatalogV1beta1SearchProductsResponse
, googleCloudPrivatecatalogV1beta1SearchProductsResponse
, gcpvsprNextPageToken
, gcpvsprProducts
-- ** Xgafv
, Xgafv (..)
-- ** GoogleCloudPrivatecatalogV1beta1VersionAsset
, GoogleCloudPrivatecatalogV1beta1VersionAsset
, googleCloudPrivatecatalogV1beta1VersionAsset
, gcpvvaAddtional
-- ** GoogleCloudPrivatecatalogV1beta1Version
, GoogleCloudPrivatecatalogV1beta1Version
, googleCloudPrivatecatalogV1beta1Version
, gcpvvAsset
, gcpvvUpdateTime
, gcpvvName
, gcpvvDescription
, gcpvvCreateTime
-- ** GoogleCloudPrivatecatalogV1beta1SearchVersionsResponse
, GoogleCloudPrivatecatalogV1beta1SearchVersionsResponse
, googleCloudPrivatecatalogV1beta1SearchVersionsResponse
, gcpvsvrNextPageToken
, gcpvsvrVersions
-- ** GoogleCloudPrivatecatalogV1beta1Product
, GoogleCloudPrivatecatalogV1beta1Product
, googleCloudPrivatecatalogV1beta1Product
, gcpvpIconURI
, gcpvpUpdateTime
, gcpvpDisplayMetadata
, gcpvpName
, gcpvpAssetType
, gcpvpCreateTime
) where
import Network.Google.Prelude
import Network.Google.CloudPrivateCatalog.Types
import Network.Google.Resource.CloudPrivateCatalog.Folders.Catalogs.Search
import Network.Google.Resource.CloudPrivateCatalog.Folders.Products.Search
import Network.Google.Resource.CloudPrivateCatalog.Folders.Versions.Search
import Network.Google.Resource.CloudPrivateCatalog.Organizations.Catalogs.Search
import Network.Google.Resource.CloudPrivateCatalog.Organizations.Products.Search
import Network.Google.Resource.CloudPrivateCatalog.Organizations.Versions.Search
import Network.Google.Resource.CloudPrivateCatalog.Projects.Catalogs.Search
import Network.Google.Resource.CloudPrivateCatalog.Projects.Products.Search
import Network.Google.Resource.CloudPrivateCatalog.Projects.Versions.Search
{- $resources
TODO
-}
-- | Represents the entirety of the methods and resources available for the Cloud Private Catalog API service.
type CloudPrivateCatalogAPI =
FoldersCatalogsSearchResource :<|>
FoldersVersionsSearchResource
:<|> FoldersProductsSearchResource
:<|> OrganizationsCatalogsSearchResource
:<|> OrganizationsVersionsSearchResource
:<|> OrganizationsProductsSearchResource
:<|> ProjectsCatalogsSearchResource
:<|> ProjectsVersionsSearchResource
:<|> ProjectsProductsSearchResource
| brendanhay/gogol | gogol-cloudprivatecatalog/gen/Network/Google/CloudPrivateCatalog.hs | mpl-2.0 | 5,435 | 0 | 12 | 768 | 427 | 321 | 106 | 81 | 0 |
module Test where
f = Just.let x = id in x
| metaborg/jsglr | org.spoofax.jsglr/tests-offside/terms/doaitse/error8.hs | apache-2.0 | 43 | 1 | 6 | 11 | 22 | 12 | 10 | 2 | 1 |
module Step_2_5 where
import Text.Html
page = thehtml <<
[ header << (thetitle << "Output")
, body <<
[ h1 << "A poem from last time:"
, pre << poemText
, h1 << "A to-do list:"
, thediv << toDoHtml
]
]
-- This time we've rewritten the lineNumbers function using "pattern matching":
poem :: String
poem = "`Twas brillig, and the slithy toves\n"
++ "Did gyre and gimble in the wabe;\n"
++ "All mimsy were the borogoves,\n"
++ "And the mome raths outgrabe.\n"
oneNumber :: Int -> String -> String
oneNumber n s = show n ++ ": " ++ s
lineNumbers :: Int -> [String] -> [String]
lineNumbers n [] = []
lineNumbers n (x:xs) = oneNumber n x : lineNumbers (n+1) xs
-- Notice that instead of a single equation (with the function name) =, there
-- are now two. Each repeats the arguments, but with patterns. Like guards, the
-- first equation where all the patterns match will be used.
-- Notice something else: The last expression changed:
oldWay n (x:xs) = [oneNumber n x] ++ lineNumbers (n+1) xs
newWay n (x:xs) = oneNumber n x : lineNumbers (n+1) xs
-- Rather than build a one element list (in the brackets) and concatentate (++)
-- it with the rest of the processing, the newWay just constructs (:) the new
-- result list with the element, and the list that is the recursive result.
poemText :: String
poemText = unlines $ lineNumbers 1 $ lines poem
-- NEXT
-- Here's a version of renderToDo written with guards.
-- Try converting it to using patterns:
toDoItems :: [String] -- a list of strings
toDoItems = ["Pick up avocados", "Make snacks", "Clean house", "Have party"]
renderToDo :: [String] -> [Html]
renderToDo ts
| ts == [] = []
| otherwise = [li << head ts] ++ renderToDo (tail ts)
toDoHtml :: Html
toDoHtml = ulist << renderToDo toDoItems
| mzero/barley | seed/Chapter2/Step_2_5.hs | apache-2.0 | 1,836 | 0 | 9 | 414 | 420 | 228 | 192 | 31 | 1 |
{-# language PolyKinds, TypeFamilies, GADTs,
UndecidableInstances, ConstraintKinds,
StandaloneDeriving
#-}
module Language.SIL.Syntax where
import qualified Data.Coerce
import Names
import Language.Common.Label
import Language.CoreLang
class (CoreLang lang, Monad m) => MapNameMonad m lang where
lookupIdM :: IdM lang -> m (Name (CoreExpr lang))
extendIdM :: IdM lang -> (Name (CoreExpr lang) -> m a) -> m a
-- | Desugar SIL
--
-- The SIL language of abstract signatures and structures is just a
-- convenient syntactic sugar for certain expressions of a language
-- that is at least as powerful as System F (for the generative
-- fragment, or System Fω with applicative functors). This is the
-- class that witnesses that desugaring.
class CoreLang lang => DesugarSIL lang where
desugarMod :: (MapNameMonad m lang, Fresh m) => Mod lang -> m (CoreExpr lang)
-- | Concrete semantic signatures
--
-- Σ Each SIL module may be ascribed a semantic signature of the form
-- Ξ ≙ ∃αs.Σ where Σ is a concrete semantic signature.
data Σ lang =
-- | [τ] concrete signature for a module containing a single value expression
ValΣ (CoreType lang)
-- | [=τ:κ] concrete signature for a module containing a single type definition.
-- in the case of sealed modules where an abstract type is hidden this will be
-- a type variable bound in an outer scope. For manifest types it will be some type expression.
| TyΣ (CoreType lang) (CoreKind lang)
-- | [=Ξ] single manifest signature definition. In SIL, modules may contain signature definitions.
| SigΣ (Ξ lang)
-- | {⋯ ℓ : Σ, ⋯} a module containing several named bindings.
| RecordΣ (ModuleContent (Σ lang))
-- | ∀αs.Σ₁ → Ξ a generative functor signature the functor takes
-- several types and a module with concrete signature Σ₁ (which may
-- mention αs) and reutrns a module Ξ=∃βs.Σ₂ where Σ₂ may mention αs
-- and βs. Thus each application of the functor produces new
-- distinct abstract types βs while allowing the result to depend on
-- the abstract types of the argument Σ₁.
| FunΣ (Bind (TyVarBinds lang) (Σ lang, Ξ lang))
deriving (Generic)
newtype Ξ lang =
Ξ (Bind (TyVarBinds lang) (Σ lang))
deriving (Generic)
type TyVar lang = Name (CoreType lang)
-- pattern
type TyVarBinds lang = [(TyVar lang, Embed (CoreKind lang))]
type ModuleContent element = [(Label, element)]
-- | bound module identifier
type IdM lang = Name (Mod lang)
-- | Semantic module expressions
data Mod lang =
-- | X module identifier
VarM (IdM lang)
-- | [e] a module containing a single expression
| ValM (CoreExpr lang)
-- | [τ:κ] a module containing a single type definition τ
| TyM (CoreType lang) (CoreKind lang)
-- | [Ξ] a module containing a single signature definition Ξ
| SigM (Ξ lang)
-- | {⋯, ℓ = M, ⋯} a module containing several named bindings
| RecordM (ModuleContent (Mod lang))
-- | M.ℓ₁.ℓ₂… projection of a named field from a composite module
| ProjM (Mod lang) [Label]
-- | Λ αs:κs . λ X : Σ . pack ⟨τs, M⟩ as ∃βs:κ′s.Σ′ generative functor construction
| LamM (Bind (Rebind (TyVarBinds lang) ((IdM lang), Embed (Σ lang))) (PackMod lang))
-- | F [τs] M functor application
| AppM (Mod lang) [CoreType lang] (Mod lang)
-- | unpack ⟨αs, X⟩ = M in M' abstract module unpacking
| UnpackM (Bind ([TyVar lang], IdM lang, Embed (Mod lang)) (Mod lang))
-- | pack ⟨τs, M'⟩ as Ξ sealing at an abstract signature
| PackM (PackMod lang)
-- | ¢@M module subsignature coercion - ¢ is a witness for the
-- subsignature judgment αs ⊢ Σ ≤ Ξ ⇝ ¢ (these could all be
-- expressed in terms of packing unpacking record construction and
-- projection etc, but they tend to produce a lot of boring
-- administrative redices, so it's better to keep them somewhat
-- abstract and delay desugaring)
| CoerM (SubsigCoercion lang) (Mod lang)
deriving (Generic)
data SubsigCoercion lang =
-- the identity coercion at Σ
IdCo (Σ lang)
-- TODO: more here
deriving (Generic)
data PackMod lang =
PackMod [CoreType lang] (Mod lang) (Ξ lang)
deriving (Generic)
deriving instance (Show (CoreType lang), Show (CoreKind lang)) => Show (Σ lang)
deriving instance (Show (CoreType lang), Show (CoreKind lang)) => Show (Ξ lang)
deriving instance (Show (CoreType lang), Show (CoreKind lang)) => Show (SubsigCoercion lang)
deriving instance (Show (CoreType lang), Show (CoreKind lang), Show (CoreExpr lang)) => Show (Mod lang)
deriving instance (Show (CoreType lang), Show (CoreKind lang), Show (CoreExpr lang)) => Show (PackMod lang)
instance (CoreLang lang, Typeable (CoreKind lang), Typeable (CoreType lang),
Alpha (CoreKind lang), Alpha (CoreType lang))
=> Alpha (Σ lang)
instance (CoreLang lang, Typeable (CoreKind lang), Typeable (CoreType lang),
Alpha (CoreKind lang), Alpha (CoreType lang))
=> Alpha (Ξ lang)
instance (CoreLang lang, Typeable (CoreKind lang), Typeable (CoreType lang),
Typeable (Mod lang),
Alpha (CoreKind lang), Alpha (CoreType lang), Alpha (CoreExpr lang))
=> Alpha (Mod lang)
instance (CoreLang lang, Typeable (CoreKind lang), Typeable (CoreType lang),
Typeable (Mod lang),
Alpha (CoreKind lang), Alpha (CoreType lang), Alpha (CoreExpr lang))
=> Alpha (PackMod lang)
instance (CoreLang lang, Typeable (CoreKind lang), Typeable (CoreType lang),
Typeable (Mod lang),
Alpha (CoreKind lang), Alpha (CoreType lang), Alpha (CoreExpr lang))
=> Alpha (SubsigCoercion lang)
instance (CoreLang lang,
Alpha (CoreKind lang), Alpha (CoreType lang),
Subst (CoreType lang) (CoreType lang),
Subst (CoreType lang) (CoreKind lang),
Generic (CoreType lang),
Typeable (CoreKind lang), Typeable (CoreType lang)
)
=> Subst (CoreType lang) (Ξ lang)
instance (CoreLang lang,
Alpha (CoreKind lang), Alpha (CoreType lang),
Subst (CoreType lang) (CoreType lang),
Subst (CoreType lang) (CoreKind lang),
Generic (CoreType lang),
Typeable (CoreKind lang), Typeable (CoreType lang)
)
=> Subst (CoreType lang) (Σ lang)
mkΞ :: (CoreLang lang, Typeable (CoreKind lang), Typeable (CoreType lang),
Alpha (CoreKind lang), Alpha (CoreType lang))
=> [(TyVar lang, CoreKind lang)]
-> Σ lang
-> Ξ lang
mkΞ ακs = Ξ . bind (embedMap ακs)
where
embedMap :: [(a, b)] -> [(a, Embed b)]
embedMap = Data.Coerce.coerce
unΞ :: (CoreLang lang, Typeable (CoreKind lang), Typeable (CoreType lang),
Alpha (CoreKind lang), Alpha (CoreType lang),
Fresh m)
=> Ξ lang
-> m ([(TyVar lang, CoreKind lang)], Σ lang)
unΞ (Ξ bnd) = do
(ακs, σ) <- unbind bnd
return (unembedMap ακs, σ)
unembedMap :: [(a, Embed b)] -> [(a, b)]
unembedMap = Data.Coerce.coerce
embedMap :: [(a, b)] -> [(a, Embed b)]
embedMap = Data.Coerce.coerce
unpackM :: (CoreLang lang, Typeable (CoreKind lang), Typeable (CoreType lang), Typeable (Mod lang),
Alpha (CoreKind lang), Alpha (CoreType lang), Alpha (CoreExpr lang))
=> [(TyVar lang)] -> IdM lang -> Mod lang -> Mod lang -> Mod lang
unpackM αs x m1 = UnpackM . bind (αs, x, embed m1)
packM :: [CoreType lang] -> (Mod lang) -> (Ξ lang) -> Mod lang
packM τs m = PackM . PackMod τs m
| lambdageek/emile | src/Language/SIL/Syntax.hs | bsd-2-clause | 7,579 | 8 | 14 | 1,674 | 2,270 | 1,197 | 1,073 | -1 | -1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-|
A representation of binary relations as pairs of left-image and
right-image functions. -}
module BRC.BinRel where
import BRC.SetOf (SetOf)
data (SetOf e s) => BinRel e s =
BinRel {
contains :: e -> e -> Bool,
-- ^ A simple membership test. This must be consistent with the left image
-- and right image functions: @contains rho x y@ is @True@ if and only if
--
-- * @y@ is an element of @rightOf rho x@
--
-- * @x@ is an element of @leftOf rho y@
leftOf :: e -> s,
-- ^ The left image function. @leftOf rel x@ is the set of all @y@ for which @y rho x@.
rightOf :: e -> s
-- ^ The right image function. @rightOf rel x@ is the set of all @y@ for which @x rho y@.
}
| kcharter/brc-solver | BRC/BinRel.hs | bsd-2-clause | 765 | 0 | 10 | 203 | 82 | 53 | 29 | 8 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-| Implementation of the Ganeti maintenenace server.
-}
{-
Copyright (C) 2015 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.MaintD.Server
( options
, main
, checkMain
, prepMain
) where
import Control.Applicative ((<|>))
import Control.Concurrent (forkIO)
import Control.Exception.Lifted (bracket)
import Control.Monad (forever, void, unless, when, liftM)
import Control.Monad.IO.Class (liftIO)
import Data.IORef (IORef, newIORef, readIORef)
import qualified Data.Set as Set
import Snap.Core (Snap, method, Method(GET), ifTop, dir, route)
import Snap.Http.Server (httpServe)
import Snap.Http.Server.Config (Config)
import System.IO.Error (tryIOError)
import System.Time (getClockTime)
import qualified Text.JSON as J
import Ganeti.BasicTypes ( GenericResult(..), ResultT, runResultT, mkResultT
, withErrorT, isBad)
import qualified Ganeti.Constants as C
import Ganeti.Daemon ( OptType, CheckFn, PrepFn, MainFn, oDebug
, oNoVoting, oYesDoIt, oPort, oBindAddress, oNoDaemonize)
import Ganeti.Daemon.Utils (handleMasterVerificationOptions)
import qualified Ganeti.HTools.Backend.Luxi as Luxi
import Ganeti.HTools.Loader (ClusterData(..), mergeData, checkData)
import Ganeti.Jobs (waitForJobs)
import Ganeti.Logging.Lifted
import qualified Ganeti.Luxi as L
import Ganeti.MaintD.Autorepairs (harepTasks)
import Ganeti.MaintD.Balance (balanceTask)
import Ganeti.MaintD.CleanupIncidents (cleanupIncidents)
import Ganeti.MaintD.CollectIncidents (collectIncidents)
import Ganeti.MaintD.FailIncident (failIncident)
import Ganeti.MaintD.HandleIncidents (handleIncidents)
import Ganeti.MaintD.MemoryState
import qualified Ganeti.Path as Path
import Ganeti.Runtime (GanetiDaemon(GanetiMaintd))
import Ganeti.Types (JobId(..), JobStatus(..))
import Ganeti.Utils (threadDelaySeconds)
import Ganeti.Utils.Http (httpConfFromOpts, plainJSON, error404)
import Ganeti.WConfd.Client ( runNewWConfdClient, maintenanceRoundDelay
, maintenanceBalancing)
-- | Options list and functions.
options :: [OptType]
options =
[ oNoDaemonize
, oDebug
, oPort C.defaultMaintdPort
, oBindAddress
, oNoVoting
, oYesDoIt
]
-- | Type alias for checkMain results.
type CheckResult = ()
-- | Type alias for prepMain results
type PrepResult = Config Snap ()
-- | Load cluster data
--
-- At the moment, only the static data is fetched via luxi;
-- once we support load-based balancing in maintd as well,
-- we also need to query the MonDs for the load data.
loadClusterData :: ResultT String IO ClusterData
loadClusterData = do
now <- liftIO getClockTime
socket <- liftIO Path.defaultQuerySocket
either_inp <- liftIO . tryIOError $ Luxi.loadData False socket
input_data <- mkResultT $ case either_inp of
Left e -> do
let msg = show e
logNotice $ "Couldn't read data from luxid: " ++ msg
return $ Bad msg
Right r -> return r
cdata <- mkResultT . return $ mergeData [] [] [] [] now input_data
let (msgs, nl) = checkData (cdNodes cdata) (cdInstances cdata)
unless (null msgs) . logDebug $ "Cluster data inconsistencies: " ++ show msgs
return $ cdata { cdNodes = nl }
-- | Perform one round of maintenance
maintenance :: IORef MemoryState -> ResultT String IO ()
maintenance memstate = do
delay <- withErrorT show $ runNewWConfdClient maintenanceRoundDelay
liftIO $ threadDelaySeconds delay
oldjobs <- getJobs memstate
logDebug $ "Jobs submitted in the last round: "
++ show (map fromJobId oldjobs)
luxiSocket <- liftIO Path.defaultQuerySocket
jobresults <- bracket (mkResultT . liftM (either (Bad . show) Ok)
. tryIOError $ L.getLuxiClient luxiSocket)
(liftIO . L.closeClient)
$ mkResultT . waitForJobs oldjobs
let failedjobs = map fst $ filter ((/=) JOB_STATUS_SUCCESS . snd) jobresults
unless (null failedjobs) $ do
logInfo . (++) "Failed jobs: " . show $ map fromJobId failedjobs
mapM_ (failIncident memstate) failedjobs
unless (null oldjobs)
. liftIO $ clearJobs memstate
logDebug "New round of maintenance started"
cData <- loadClusterData
let il = cdInstances cData
nl = cdNodes cData
gl = cdGroups cData
cleanupIncidents memstate nl
collectIncidents memstate nl
nidxs <- handleIncidents memstate (gl, nl, il)
(nidxs', jobs) <- harepTasks (nl, il) nidxs
unless (null jobs)
. liftIO $ appendJobs memstate jobs
logDebug $ "Nodes unaffected by harep " ++ show (Set.toList nidxs')
++ ", jobs submitted " ++ show (map fromJobId jobs)
(bal, thresh) <- withErrorT show $ runNewWConfdClient maintenanceBalancing
when (bal && not (Set.null nidxs')) $ do
logDebug $ "Will balance unaffected nodes, threshold " ++ show thresh
jobs' <- balanceTask memstate (nl, il) nidxs thresh
logDebug $ "Balancing jobs submitted: " ++ show (map fromJobId jobs')
unless (null jobs')
. liftIO $ appendJobs memstate jobs'
-- | Expose a part of the memory state
exposeState :: J.JSON a => (MemoryState -> a) -> IORef MemoryState -> Snap ()
exposeState selector ref = do
state <- liftIO $ readIORef ref
plainJSON $ selector state
-- | The information to serve via HTTP
httpInterface :: IORef MemoryState -> Snap ()
httpInterface memstate =
ifTop (method GET $ plainJSON [1 :: Int])
<|> dir "1" (ifTop (plainJSON J.JSNull)
<|> route [ ("jobs", exposeState msJobs memstate)
, ("evacuated", exposeState msEvacuated memstate)
, ("status", exposeState msIncidents memstate)
])
<|> error404
-- | Check function for luxid.
checkMain :: CheckFn CheckResult
checkMain = handleMasterVerificationOptions
-- | Prepare function for luxid.
prepMain :: PrepFn CheckResult PrepResult
prepMain opts _ = httpConfFromOpts GanetiMaintd opts
-- | Main function.
main :: MainFn CheckResult PrepResult
main _ _ httpConf = do
memstate <- newIORef emptyMemoryState
void . forkIO . forever $ do
res <- runResultT $ maintenance memstate
(if isBad res then logInfo else logDebug)
$ "Maintenance round result is " ++ show res
when (isBad res) $ do
logDebug "Backing off after a round with internal errors"
threadDelaySeconds C.maintdDefaultRoundDelay
httpServe httpConf $ httpInterface memstate
| leshchevds/ganeti | src/Ganeti/MaintD/Server.hs | bsd-2-clause | 7,716 | 0 | 19 | 1,546 | 1,754 | 928 | 826 | 134 | 2 |
{-# LANGUAGE TupleSections, OverloadedStrings #-}
module Handler.Home where
import Import
import System.Random (getStdRandom, randomR, StdGen)
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8, decodeUtf8)
import Control.Monad (replicateM)
import Control.Monad.Trans.State
import Data.Time (getCurrentTime)
import Crypto.Hash.SHA512 (hash)
import qualified Data.ByteString as S
import Data.Bits (shiftR, (.&.))
import Yesod.Auth.GoogleEmail2
predictForm :: Form Prediction
predictForm token = do
maid <- fmap entityKey <$> lift maybeAuth
(nameRes, nameView) <- mreq textField "Name"
{ fsAttrs = [("placeholder", "your name"), ("class", "name")]
} Nothing
(contentRes, contentView) <- mreq textareaField "Content" Nothing
(errs, res) <-
case (,) <$> nameRes <*> contentRes of
FormSuccess (name, content) -> do
let go :: State StdGen Text
go = T.pack <$> replicateM 10 goC
goC :: State StdGen Char
goC = state $ randomR ('A', 'Z')
(public, private) <- liftIO
$ getStdRandom
$ runState
$ (,) <$> go <*> go
now <- liftIO getCurrentTime
return ([], FormSuccess $ Prediction name content public private now maid)
FormFailure errs -> return (errs, FormFailure errs)
FormMissing -> return ([], FormMissing)
let widget = [whamlet|
$if not $ null errs
<p>There were some errors with your submission:
<ul>
$forall err <- errs
<li>#{err}
<div .row>
<div .span6 .offset2>
<form method=post>
<p>I, ^{fvInput nameView}, predict that:
<p>
^{fvInput contentView}
<p>
\#{token}
<input .btn type=submit value="Make my prediction">
|]
return (res, widget)
-- This is a handler function for the GET request method on the HomeR
-- resource pattern. All of your resource patterns are defined in
-- config/routes
--
-- The majority of the code you will write in Yesod lives in these handler
-- functions. You can spread them across multiple files if you are so
-- inclined, or create a single monolithic file.
getHomeR :: Handler Html
getHomeR = do
muser <- maybeAuth
((result, formWidget), _) <- runFormPost predictForm
case result of
FormSuccess predict -> do
_ <- runDB $ insert predict
setMessage "Your prediction has been made"
redirect $ PrivateR $ predictionPrivate predict
_ -> defaultLayout $ do
setTitle "I predict that..."
$(widgetFile "homepage")
postHomeR :: Handler Html
postHomeR = getHomeR
getPrivateR :: Text -> Handler Html
getPrivateR private = do
Entity _ predict <- runDB $ getBy404 $ UniquePrivate private
defaultLayout $ do
setTitle "Private prediction page"
$(widgetFile "private")
getPublicR :: Text -> Handler Html
getPublicR public = do
Entity _ predict <- runDB $ getBy404 $ UniquePublic public
let Textarea raw = predictionContent predict
let sha512 = decodeUtf8 $ toHex $ hash $ encodeUtf8 raw
defaultLayout $ do
setTitle "Public prediction page"
$(widgetFile "public")
toHex :: S.ByteString -> S.ByteString
toHex bs0 =
fst $ S.unfoldrN (S.length bs0 * 2) go (Left bs0)
where
go (Left bs) =
case S.uncons bs of
Nothing -> Nothing
Just (w, bs') ->
let w1 = w `shiftR` 4
w2 = w .&. 15
c1 = toC w1
c2 = toC w2
in Just (c1, Right (c2, bs'))
go (Right (c, bs)) = Just (c, Left bs)
toC w
| w < 10 = w + 48
| otherwise = w + 87
getMyPredictionsR :: Handler Html
getMyPredictionsR = do
Entity uid _user <- requireAuth
predictions <- map entityVal <$> runDB (selectList [PredictionUser ==. Just uid] [Desc PredictionCreated])
defaultLayout $ do
setTitle "My Predictions"
$(widgetFile "my-predictions")
where
predictionSummary = ellipsis 100 . unTextarea . predictionContent
ellipsis l t
| T.length t > l = T.take (l - 3) t `T.append` "..."
| otherwise = t
| snoyberg/whosawthatcoming | Handler/Home.hs | bsd-2-clause | 4,377 | 0 | 18 | 1,362 | 1,153 | 584 | 569 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Application.YesodCRUD.Client.Job where
import Debug.Trace
import qualified Data.ByteString.Lazy.Char8 as C
import qualified Data.ByteString.Char8 as SC
import Data.Aeson.Types
import Data.Aeson.Encode as E
import Data.Aeson.Parser
import qualified Data.Attoparsec as A
import Network.HTTP.Types -- hiding (statusCode)
import Network.HTTP.Types.Status
import Network.HTTP.Conduit
import System.Directory
import System.FilePath
import Unsafe.Coerce
import Application.YesodCRUD.Client.Config
import Application.YesodCRUD.Type
import Data.UUID
import Data.UUID.V5
import qualified Data.ByteString as B
import Data.Time.Clock
type Url = String
nextUUID :: YesodcrudClientConfiguration -> IO UUID
nextUUID mc = do
let c = yesodcrudClientURL mc
t <- getCurrentTime
return . generateNamed namespaceURL . B.unpack . SC.pack $ c ++ "/" ++ show t
startCreate :: YesodcrudClientConfiguration -> String -> IO ()
startCreate mc name = do
putStrLn "job started"
cwd <- getCurrentDirectory
let url = yesodcrudServerURL mc
uuid <- nextUUID mc
let info = YesodcrudInfo { yesodcrud_uuid = uuid , yesodcrud_name = name }
response <- yesodcrudToServer url ("uploadyesodcrud") methodPost info
putStrLn $ show response
startGet :: YesodcrudClientConfiguration -> String -> IO ()
startGet mc idee = do
putStrLn $"get " ++ idee
let url = yesodcrudServerURL mc
r <- jsonFromServer url ("yesodcrud" </> idee) methodGet
putStrLn $ show r
startPut :: YesodcrudClientConfiguration
-> String -- ^ yesodcrud idee
-> String -- ^ yesodcrud name
-> IO ()
startPut mc idee name = do
putStrLn "job started"
cwd <- getCurrentDirectory
let url = yesodcrudServerURL mc
info = case fromString idee of
Nothing -> error "strange in startPut"
Just idee' -> YesodcrudInfo { yesodcrud_uuid = idee', yesodcrud_name = name }
response <- yesodcrudToServer url ("yesodcrud" </> idee) methodPut info
putStrLn $ show response
startDelete :: YesodcrudClientConfiguration -> String -> IO ()
startDelete mc idee = do
putStrLn "job started"
let url = yesodcrudServerURL mc
r <- jsonFromServer url ("yesodcrud" </> idee) methodDelete
putStrLn $ show r
startGetList :: YesodcrudClientConfiguration -> IO ()
startGetList mc = do
putStrLn "getlist: "
let url = yesodcrudServerURL mc
r <- jsonFromServer url ("listyesodcrud") methodGet
putStrLn $ show r
jsonFromServer :: Url -> String -> Method -> IO (Either String (Result Value))
jsonFromServer url api mthd = do
request <- parseUrl (url </> api)
withManager $ \manager -> do
let requestjson = request {
method = mthd,
requestHeaders = [ ("Accept", "application/json; charset=utf-8") ] }
r <- httpLbs requestjson manager
if statusCode (responseStatus r) == 200
then return . parseJson . SC.concat . C.toChunks . responseBody $ r
else return (Left $ "status code : " ++ show (statusCode (responseStatus r)))
yesodcrudToServer :: Url -> String -> Method -> YesodcrudInfo -> IO (Either String (Result Value))
yesodcrudToServer url api mthd mi = do
request <- parseUrl (url </> api)
withManager $ \manager -> do
let mijson = E.encode (toJSON mi)
myrequestbody = RequestBodyLBS mijson
let requestjson = request
{ method = mthd
, requestHeaders = [ ("Accept", "application/json; charset=utf-8") ]
, requestBody = myrequestbody }
r <- httpLbs requestjson manager
if statusCode (responseStatus r) == 200
then return . parseJson . SC.concat . C.toChunks . responseBody $ r
else return (Left $ "status code : " ++ show (statusCode (responseStatus r)))
parseJson :: (FromJSON a) => SC.ByteString -> Either String (Result a)
parseJson bs =
let resultjson = trace (SC.unpack bs) $ A.parse json bs
in case resultjson of
(A.Done rest rjson) -> return (parse parseJSON rjson)
_ -> Left "parseJson" | wavewave/yesodcrud-client | lib/Application/YesodCRUD/Client/Job.hs | bsd-2-clause | 4,055 | 0 | 19 | 879 | 1,239 | 629 | 610 | 98 | 2 |
module Clues
( Clues()
, isValidClues
, clues
, unClues
, clueAt
, updateClueAt
) where
import Data.Array (Array, bounds, (!), (//))
import Axis (Axis)
import Clue (Clue)
data Clues = Clues (Array Axis Clue)
deriving (Eq, Ord, Show)
-- Checks whether the given Clues are valid.
isValidClues :: Clues -> Bool
isValidClues c = bounds (unClues c) == (minBound, maxBound)
-- Constructor for Clues.
clues :: Array Axis Clue -> Clues
clues arr | isValidClues c = c
| otherwise = error "Array does not have complete bounds"
where c = Clues arr
-- Deconstructor for Clues.
unClues :: Clues -> Array Axis Clue
unClues (Clues arr) = arr
-- Returns the Clue for the given Axis from the Clues.
clueAt :: Clues -> Axis -> Clue
clueAt c a = unClues c ! a
-- Updates the Clue at the given Axis of the Clues.
updateClueAt :: Clues -> Axis -> Clue -> Clues
updateClueAt cs a c = Clues (unClues cs // [(a, c)])
| jameshales/voltorb-flip | src/Clues.hs | bsd-3-clause | 948 | 0 | 9 | 222 | 301 | 165 | 136 | -1 | -1 |
module Data.Char.Properties.Case
(
module Data.Char.Properties.CaseData
) where
{
import Data.Char.Properties.CaseData;
}
| seereason/unicode-properties | Data/Char/Properties/Case.hs | bsd-3-clause | 138 | 0 | 5 | 27 | 28 | 21 | 7 | 4 | 0 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
-- | Constants used throughout the project.
module Stack.Constants
(buildPlanDir
,haskellModuleExts
,stackDotYaml
,stackWorkEnvVar
,stackRootEnvVar
,stackRootOptionName
,deprecatedStackRootOptionName
,inContainerEnvVar
,inNixShellEnvVar
,stackProgName
,stackProgNameUpper
,wiredInPackages
,ghcjsBootPackages
,cabalPackageName
,implicitGlobalProjectDirDeprecated
,implicitGlobalProjectDir
,defaultUserConfigPathDeprecated
,defaultUserConfigPath
,defaultGlobalConfigPathDeprecated
,defaultGlobalConfigPath
,platformVariantEnvVar
,compilerOptionsCabalFlag
,ghcColorForceFlag
,minTerminalWidth
,maxTerminalWidth
,defaultTerminalWidth
)
where
import Data.Char (toUpper)
import qualified Data.HashSet as HashSet
import Path as FL
import Stack.Prelude
import Stack.Types.Compiler
import Stack.Types.PackageName
-- | Extensions for anything that can be a Haskell module.
haskellModuleExts :: [Text]
haskellModuleExts = haskellFileExts ++ haskellPreprocessorExts
-- | Extensions used for Haskell modules. Excludes preprocessor ones.
haskellFileExts :: [Text]
haskellFileExts = ["hs", "hsc", "lhs"]
-- | Extensions for modules that are preprocessed by common preprocessors.
haskellPreprocessorExts :: [Text]
haskellPreprocessorExts = ["gc", "chs", "hsc", "x", "y", "ly", "cpphs"]
-- | Name of the 'stack' program, uppercased
stackProgNameUpper :: String
stackProgNameUpper = map toUpper stackProgName
-- | Name of the 'stack' program.
stackProgName :: String
stackProgName = "stack"
-- | The filename used for the stack config file.
stackDotYaml :: Path Rel File
stackDotYaml = $(mkRelFile "stack.yaml")
-- | Environment variable used to override the '.stack-work' relative dir.
stackWorkEnvVar :: String
stackWorkEnvVar = "STACK_WORK"
-- | Environment variable used to override the '~/.stack' location.
stackRootEnvVar :: String
stackRootEnvVar = "STACK_ROOT"
-- | Option name for the global stack root.
stackRootOptionName :: String
stackRootOptionName = "stack-root"
-- | Deprecated option name for the global stack root.
--
-- Deprecated since stack-1.1.0.
--
-- TODO: Remove occurences of this variable and use 'stackRootOptionName' only
-- after an appropriate deprecation period.
deprecatedStackRootOptionName :: String
deprecatedStackRootOptionName = "global-stack-root"
-- | Environment variable used to indicate stack is running in container.
inContainerEnvVar :: String
inContainerEnvVar = stackProgNameUpper ++ "_IN_CONTAINER"
-- | Environment variable used to indicate stack is running in container.
-- although we already have STACK_IN_NIX_EXTRA_ARGS that is set in the same conditions,
-- it can happen that STACK_IN_NIX_EXTRA_ARGS is set to empty.
inNixShellEnvVar :: String
inNixShellEnvVar = map toUpper stackProgName ++ "_IN_NIX_SHELL"
-- See https://downloads.haskell.org/~ghc/7.10.1/docs/html/libraries/ghc/src/Module.html#integerPackageKey
wiredInPackages :: HashSet PackageName
wiredInPackages =
maybe (error "Parse error in wiredInPackages") HashSet.fromList mparsed
where
mparsed = mapM parsePackageName
[ "ghc-prim"
, "integer-gmp"
, "integer-simple"
, "base"
, "rts"
, "template-haskell"
, "dph-seq"
, "dph-par"
, "ghc"
, "interactive"
]
-- TODO: Get this unwieldy list out of here and into a datafile
-- generated by GHCJS! See https://github.com/ghcjs/ghcjs/issues/434
ghcjsBootPackages :: HashSet PackageName
ghcjsBootPackages =
maybe (error "Parse error in ghcjsBootPackages") HashSet.fromList mparsed
where
mparsed = mapM parsePackageName
-- stage1a
[ "array"
, "base"
, "binary"
, "bytestring"
, "containers"
, "deepseq"
, "integer-gmp"
, "pretty"
, "primitive"
, "integer-gmp"
, "pretty"
, "primitive"
, "template-haskell"
, "transformers"
-- stage1b
, "directory"
, "filepath"
, "old-locale"
, "process"
, "time"
-- stage2
, "async"
, "aeson"
, "attoparsec"
, "case-insensitive"
, "dlist"
, "extensible-exceptions"
, "hashable"
, "mtl"
, "old-time"
, "parallel"
, "scientific"
, "stm"
, "syb"
, "text"
, "unordered-containers"
, "vector"
]
-- | Just to avoid repetition and magic strings.
cabalPackageName :: PackageName
cabalPackageName =
$(mkPackageName "Cabal")
-- | Deprecated implicit global project directory used when outside of a project.
implicitGlobalProjectDirDeprecated :: Path Abs Dir -- ^ Stack root.
-> Path Abs Dir
implicitGlobalProjectDirDeprecated p =
p </>
$(mkRelDir "global")
-- | Implicit global project directory used when outside of a project.
-- Normally, @getImplicitGlobalProjectDir@ should be used instead.
implicitGlobalProjectDir :: Path Abs Dir -- ^ Stack root.
-> Path Abs Dir
implicitGlobalProjectDir p =
p </>
$(mkRelDir "global-project")
-- | Deprecated default global config path.
defaultUserConfigPathDeprecated :: Path Abs Dir -> Path Abs File
defaultUserConfigPathDeprecated = (</> $(mkRelFile "stack.yaml"))
-- | Default global config path.
-- Normally, @getDefaultUserConfigPath@ should be used instead.
defaultUserConfigPath :: Path Abs Dir -> Path Abs File
defaultUserConfigPath = (</> $(mkRelFile "config.yaml"))
-- | Deprecated default global config path.
-- Note that this will be @Nothing@ on Windows, which is by design.
defaultGlobalConfigPathDeprecated :: Maybe (Path Abs File)
defaultGlobalConfigPathDeprecated = parseAbsFile "/etc/stack/config"
-- | Default global config path.
-- Normally, @getDefaultGlobalConfigPath@ should be used instead.
-- Note that this will be @Nothing@ on Windows, which is by design.
defaultGlobalConfigPath :: Maybe (Path Abs File)
defaultGlobalConfigPath = parseAbsFile "/etc/stack/config.yaml"
-- | Path where build plans are stored.
buildPlanDir :: Path Abs Dir -- ^ Stack root
-> Path Abs Dir
buildPlanDir = (</> $(mkRelDir "build-plan"))
-- | Environment variable that stores a variant to append to platform-specific directory
-- names. Used to ensure incompatible binaries aren't shared between Docker builds and host
platformVariantEnvVar :: String
platformVariantEnvVar = stackProgNameUpper ++ "_PLATFORM_VARIANT"
-- | Provides --ghc-options for 'Ghc', and similarly, --ghcjs-options
-- for 'Ghcjs'.
compilerOptionsCabalFlag :: WhichCompiler -> String
compilerOptionsCabalFlag Ghc = "--ghc-options"
compilerOptionsCabalFlag Ghcjs = "--ghcjs-options"
-- | The flag to pass to GHC when we want to force its output to be
-- colorized.
ghcColorForceFlag :: String
ghcColorForceFlag = "-fdiagnostics-color=always"
-- | The minimum allowed terminal width. Used for pretty-printing.
minTerminalWidth :: Int
minTerminalWidth = 40
-- | The maximum allowed terminal width. Used for pretty-printing.
maxTerminalWidth :: Int
maxTerminalWidth = 200
-- | The default terminal width. Used for pretty-printing when we can't
-- automatically detect it and when the user doesn't supply one.
defaultTerminalWidth :: Int
defaultTerminalWidth = 100
| MichielDerhaeg/stack | src/Stack/Constants.hs | bsd-3-clause | 7,452 | 0 | 8 | 1,466 | 914 | 550 | 364 | 150 | 1 |
module WASHGenerator (preprocess, preprocessPIPE) where {
import Control.Exception;
import Data.List (isPrefixOf);
import System.IO;
import WASHData ;
import Text.ParserCombinators.Parsec hiding (try) ;
import qualified WASHParser ;
import qualified WASHExpression ;
import qualified WASHClean ;
import WASHFlags ;
-- import Trace;
preprocess :: FLAGS -> String -> String -> String -> IO ();
preprocess flags srcName dstName globalDefs =
bracket (openFile srcName ReadMode)
(\ srcHandle -> hClose srcHandle)
(\ srcHandle ->
bracket (openFile dstName WriteMode)
(\ dstHandle -> hClose dstHandle)
(\ dstHandle ->
preprocessPIPE flags srcName srcHandle dstHandle globalDefs));
preprocessPIPE :: FLAGS -> String -> Handle -> Handle -> String -> IO ();
preprocessPIPE flags srcName srcHandle dstHandle globalDefs = do {
input <- hGetContents srcHandle;
let { parsing = parse WASHParser.washfile srcName input };
case parsing of {
Left error -> ioError $ userError $ show error;
Right washfile ->
hPutStrLn dstHandle (postprocess $ file flags globalDefs washfile "");
};
};
file :: FLAGS -> String -> [CodeFrag] -> ShowS ;
file flags globalDefs fcode =
WASHExpression.code flags (WASHClean.cleanCodeFragList fcode) .
showString globalDefs .
showString "\n"
;
imports :: [String] -> String ;
imports is = concat $ map (\m -> "import " ++ m ++ ";\n") is ;
postprocess :: String -> String ;
postprocess = unlines . postprocess' . lines ;
postprocess' :: [String] -> [String] ;
postprocess' [] = [] ;
postprocess' xs'@(x:xs) =
if "import" `isPrefixOf` x
then "import qualified WASH.CGI.CGI as CGI" : xs'
else x : postprocess' xs ;
}
| nh2/WashNGo | washparser/hs/WASHGenerator.hs | bsd-3-clause | 1,717 | 6 | 13 | 339 | 552 | 307 | 245 | 42 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Heed.Vty.MainWidget
( app
, insertInOrder
, MyEvent(WsReceive)
) where
import qualified Brick.AttrMap as BA
import qualified Brick.Main as M
import Brick.Types (Widget)
import qualified Brick.Types as BT
import qualified Brick.Util as BU
import qualified Brick.Widgets.Border as B
import qualified Brick.Widgets.Center as C
import Brick.Widgets.Core
(hBox, hLimit, padLeft, str, txt, vBox, vLimit, withAttr, (<+>),
(<=>), txtWrap)
import qualified Brick.Widgets.List as BL
import Control.Lens
import Control.Monad (forM_, when)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Data.Function ((&))
import Data.Maybe (fromMaybe)
import Data.Monoid ((<>))
import Data.Serialize (encode)
import qualified Data.Text as T
import qualified Data.Time.Format as Time
import qualified Data.Vector as Vec
import qualified Graphics.Vty as V
import Heed.Commands
import Heed.Utils (fork_, silentProc)
import Heed.Vty.AddFeedWidget (addVty)
import Heed.Vty.EditFeedWidget (editVty)
import Heed.Vty.WidgetStates
import qualified Network.WebSockets as WS
import qualified System.Process as Process
import Text.URI (parseURI, uriRegName)
newtype MyEvent =
WsReceive Down
data Browser = Firefox | Chromium
drawUi :: AppState -> [Widget Name]
drawUi s = [ui]
where
ui = C.center $ vBox [statusBar, B.hBorder, mainInfo]
statusBar = txt ("Connected as " <> (s ^. userName)) <+> (padLeft BT.Max . txt $ s ^. status)
mainInfo = hBox [feedListVty, B.vBorder, itemInfoVty]
feedListVty = hLimit feedListWidth $ BL.renderList feedDrawElement True (s ^. feeds)
itemInfoVty = vBox [itemListVty, B.hBorder, itemDetailVty]
itemListVty = BL.renderList itemDrawElement True (s ^. items)
itemDetailVty = vLimit 4 $ itemDrawDetail (BL.listSelectedElement $ s ^. items)
feedDrawElement :: Bool -> FeFeedInfo -> Widget Name
feedDrawElement sel a =
selectedStyle $ txtWrap (a ^. feedListName) <+>
(padLeft BT.Max . str . show $ a ^. feedListUnread)
where
selectedStyle =
if sel
then withAttr "selected"
else id
itemDrawElement :: Bool -> FeItemInfo -> Widget Name
itemDrawElement sel a =
selectedStyle $ txtWrap (a ^. itemInfoTitle) <+> (padLeft BT.Max . str . showTime $ a)
where
selectedStyle
| sel = withAttr "selected"
| a ^. itemInfoRead == Seen = withAttr "read"
| otherwise = id
showTime s = Time.formatTime euTimeLocale "%T %x" $ s ^. itemInfoDate
itemDrawDetail :: Maybe (Int, FeItemInfo) -> Widget Name
itemDrawDetail Nothing = txt "No item selected"
itemDrawDetail (Just (_, info)) =
txtWrap (info ^. itemInfoTitle) <=> txtWrap (info ^. itemInfoLink) <=>
txtWrap (fromMaybe "no comments" $ info ^. itemInfoComments)
appEvent :: AppState -> BT.BrickEvent Name MyEvent -> BT.EventM Name (BT.Next AppState)
-- Close app
appEvent s (BT.VtyEvent (V.EvKey (V.KChar 'q') [])) = M.halt s
appEvent s (BT.VtyEvent (V.EvKey V.KEsc [])) = M.halt s
-- Go down in feed list
appEvent s (BT.VtyEvent (V.EvKey (V.KChar 'J') [])) = do
let s' = s & feeds %~ BL.listMoveDown
getSelFeedItems s'
M.continue s'
-- Go up in feed list
appEvent s (BT.VtyEvent (V.EvKey (V.KChar 'K') [])) = do
let s' = s & feeds %~ BL.listMoveUp
getSelFeedItems s'
M.continue s'
-- Go down in item list and send read
appEvent s (BT.VtyEvent (V.EvKey (V.KChar 'j') [])) = do
s' <- updateUnreadCount $ setItemAsRead s
M.continue $ s' & items %~ BL.listMoveDown
-- Go up in item list and send read
appEvent s (BT.VtyEvent (V.EvKey (V.KChar 'k') [])) = do
s' <- updateUnreadCount $ setItemAsRead s
M.continue $ s' & items %~ BL.listMoveUp
-- Open link in chromium
appEvent s (BT.VtyEvent (V.EvKey (V.KChar 'o') [])) = openInBrowser s Chromium
appEvent s (BT.VtyEvent (V.EvKey (V.KChar 'O') [])) = openInBrowser s Firefox
-- Set all items as read
appEvent s (BT.VtyEvent (V.EvKey (V.KChar 'a') [])) = do
s' <-
liftIO $
case BL.listSelectedElement (s ^. feeds) of
Nothing -> return s
Just (i, e) -> do
sendAllRead s e
return $ s & feeds . BL.listElementsL . ix i . feedListUnread .~ 0
let s'' = s' & feeds %~ BL.listMoveDown
getSelFeedItems s''
M.continue s''
appEvent s (BT.VtyEvent (V.EvKey (V.KChar 'n') [])) = M.suspendAndResume $ addVty s
appEvent s (BT.VtyEvent (V.EvKey (V.KChar 'e') [])) = do
let conn = s ^. wsConn
sel = BL.listSelectedElement (s ^. feeds)
case sel of
Nothing -> return ()
Just (_, e) -> fork_ $ WS.sendBinaryData conn (encode (GetSingleFeedInfo (e ^. feedListId)))
M.continue $ s & status .~ "Getting feed info to edit"
appEvent s (BT.VtyEvent (V.EvKey (V.KChar 'r') [])) = do
let selectedFeedId = s ^. feeds . to BL.listSelectedElement ^? _Just . _2 . feedListId
conn = s ^. wsConn
s' <-
case selectedFeedId of
Nothing -> return s
Just fid -> do
fork_ $ WS.sendBinaryData conn (encode (ForceRefresh fid))
return $ s & status .~ "Refreshing selected feed"
M.continue s'
appEvent s (BT.AppEvent (WsReceive e)) = handleMess s e
appEvent s _ = M.continue s
openInBrowser :: AppState -> Browser -> BT.EventM Name (BT.Next AppState)
openInBrowser s browser = do
liftIO $
case BL.listSelectedElement (s ^. items) of
Nothing -> return ()
Just (_, e) -> openTab browser e
s' <- updateUnreadCount $ setItemAsRead s
M.continue $ s' & items %~ BL.listMoveDown
sendRead
:: (MonadIO m)
=> AppState -> m ()
sendRead s =
let conn = s ^. wsConn
sel = BL.listSelectedElement (s ^. items)
in case sel of
Nothing -> return ()
Just (_, e) -> fork_ $ WS.sendBinaryData conn (encode (ItemRead (e ^. itemInfoId)))
sendAllRead
:: (MonadIO m)
=> AppState -> FeFeedInfo -> m ()
sendAllRead s f = fork_ $ WS.sendBinaryData conn (encode (FeedRead (f ^. feedListId)))
where
conn = s ^. wsConn
updateUnreadCount
:: MonadIO m
=> (Seen, AppState) -> m AppState
updateUnreadCount (Seen, s) = return s
updateUnreadCount (Unseen, s) = do
sendRead s
return $
case s ^. feeds . BL.listSelectedL of
Nothing -> s
Just i -> s & feeds . BL.listElementsL . ix i . feedListUnread -~ 1
openTab :: Browser -> FeItemInfo -> IO ()
openTab browser e = do
forM_ (sameDomain link (e ^. itemInfoComments)) (callBrowser selectedBrowser)
callBrowser selectedBrowser link
where
link = e ^. itemInfoLink
selectedBrowser = case browser of
Chromium -> "chromium"
Firefox -> "firefox"
callBrowser brow url = fork_ $ Process.createProcess (silentProc brow [T.unpack url])
-- If the comments are on the same domain we shouldn't bother opening them
sameDomain l commentsM = do
comments <- commentsM
linkUri <- parseURI (T.unpack l)
commentUri <- parseURI (T.unpack comments)
linkDomain <- uriRegName linkUri
commentDomain <- uriRegName commentUri
if linkDomain == commentDomain
then Nothing
else Just comments
setItemAsRead :: AppState -> (Seen, AppState)
setItemAsRead s =
let ind = s ^. items . BL.listSelectedL
s' =
case ind of
Nothing -> (Seen, s)
Just i -> s & items . BL.listElementsL . ix i . itemInfoRead <<.~ Seen
in s'
handleMess :: AppState -> Down -> BT.EventM Name (BT.Next AppState)
handleMess s (Feeds fi) = do
let s' = s & feeds .~ BL.list FeedList (Vec.fromList fi) 1
getSelFeedItems s'
M.continue s'
handleMess s (FeedItems fi) =
M.continue $ s & items .~ BL.list ItemList (Vec.fromList fi) 1 & feeds .~ newCount
where
newCount = BL.listModify (feedListUnread .~ (fromIntegral . length) fi) (s ^. feeds)
handleMess s (Status name) = M.continue $ s & userName .~ name
handleMess s (FeedAdded url) = M.continue $ s & status .~ ("Added " <> url)
handleMess s (BackendError text) = M.continue $ s & status .~ ("Error: " <> text)
handleMess s (NewItems feed) = do
when (needUpdate && sameAsSelected) (getSelFeedItems s)
M.continue newState
where
(newState, needUpdate) =
case Vec.elemIndex feed (s ^. feeds . BL.listElementsL) of
Nothing -> (s & feeds %~ insertInOrder feed, False)
Just i ->
( s & feeds . BL.listElementsL . ix i . feedListUnread +~ (feed ^. feedListUnread)
, True)
sameAsSelected = Just feed == (s ^. feeds . to BL.listSelectedElement ^? _Just . _2)
handleMess s (EditableFeedInfo feed) = M.suspendAndResume $ editVty s feed
handleMess s (FeedInfoUpdated (fid, name)) = M.continue $ updateName s name fid
handleMess s InvalidSent = M.continue s
updateName :: AppState -> T.Text -> Int -> AppState
updateName s name fid =
case Vec.findIndex (\x -> x ^. feedListId == fid) (s ^. feeds . BL.listElementsL) of
Nothing -> s
Just ind -> s & feeds . BL.listElementsL . ix ind . feedListName .~ name
insertInOrder
:: Ord e
=> e -> BL.List n e -> BL.List n e
insertInOrder newFeed feedList = BL.listInsert pos newFeed feedList
where
pos = length $ Vec.takeWhile (< newFeed) (feedList ^. BL.listElementsL)
getSelFeedItems
:: (MonadIO m)
=> AppState -> m ()
getSelFeedItems s = do
let conn = s ^. wsConn
sel = BL.listSelectedElement (s ^. feeds)
case sel of
Nothing -> return ()
Just (_, e) -> fork_ $ WS.sendBinaryData conn (encode (GetFeedItems (e ^. feedListId)))
return ()
getInfo :: AppState -> BT.EventM Name AppState
getInfo s = do
let conn = _wsConn s
fork_ $ WS.sendBinaryData conn (encode Initialized)
return $ s & status .~ "Connected to server"
app :: M.App AppState MyEvent Name
app =
M.App
{ M.appDraw = drawUi
, M.appStartEvent = getInfo
, M.appHandleEvent = appEvent
, M.appAttrMap = const $ BA.attrMap V.defAttr myAttrs
, M.appChooseCursor = M.neverShowCursor
}
myAttrs :: [(BA.AttrName, V.Attr)]
myAttrs = [("selected", BU.fg V.white), ("read", BU.fg V.red)]
euTimeLocale :: Time.TimeLocale
euTimeLocale = Time.defaultTimeLocale {Time.dateFmt = "%d/%m/%y"}
feedListWidth :: Int
feedListWidth = 50
| Arguggi/heed | heed-vty/src/Heed/Vty/MainWidget.hs | bsd-3-clause | 10,354 | 0 | 19 | 2,462 | 3,813 | 1,958 | 1,855 | -1 | -1 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.UserHooks
-- Copyright : Isaac Jones 2003-2005
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This defines the API that @Setup.hs@ scripts can use to customise the way
-- the build works. This module just defines the 'UserHooks' type. The
-- predefined sets of hooks that implement the @Simple@, @Make@ and @Configure@
-- build systems are defined in "Distribution.Simple". The 'UserHooks' is a big
-- record of functions. There are 3 for each action, a pre, post and the action
-- itself. There are few other miscellaneous hooks, ones to extend the set of
-- programs and preprocessors and one to override the function used to read the
-- @.cabal@ file.
--
-- This hooks type is widely agreed to not be the right solution. Partly this
-- is because changes to it usually break custom @Setup.hs@ files and yet many
-- internal code changes do require changes to the hooks. For example we cannot
-- pass any extra parameters to most of the functions that implement the
-- various phases because it would involve changing the types of the
-- corresponding hook. At some point it will have to be replaced.
{- All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
module Distribution.Simple.UserHooks (
UserHooks(..), Args,
emptyUserHooks,
) where
import Distribution.PackageDescription
(PackageDescription, GenericPackageDescription,
HookedBuildInfo, emptyHookedBuildInfo)
import Distribution.Simple.Program (Program)
import Distribution.Simple.Command (noExtraFlags)
import Distribution.Simple.PreProcess (PPSuffixHandler)
import Distribution.Simple.Setup
(ConfigFlags, BuildFlags, MakefileFlags, CleanFlags, CopyFlags,
InstallFlags, SDistFlags, RegisterFlags, HscolourFlags,
HaddockFlags)
import Distribution.Simple.LocalBuildInfo (LocalBuildInfo)
type Args = [String]
-- | Hooks allow authors to add specific functionality before and after a
-- command is run, and also to specify additional preprocessors.
--
-- * WARNING: The hooks interface is under rather constant flux as we try to
-- understand users needs. Setup files that depend on this interface may
-- break in future releases.
data UserHooks = UserHooks {
-- | Used for @.\/setup test@
runTests :: Args -> Bool -> PackageDescription -> LocalBuildInfo -> IO (),
-- | Read the description file
readDesc :: IO (Maybe PackageDescription),
-- | Custom preprocessors in addition to and overriding 'knownSuffixHandlers'.
hookedPreProcessors :: [ PPSuffixHandler ],
-- | These programs are detected at configure time. Arguments for them are
-- added to the configure command.
hookedPrograms :: [Program],
-- |Hook to run before configure command
preConf :: Args -> ConfigFlags -> IO HookedBuildInfo,
-- |Over-ride this hook to get different behavior during configure.
confHook :: ( Either GenericPackageDescription PackageDescription
, HookedBuildInfo)
-> ConfigFlags -> IO LocalBuildInfo,
-- |Hook to run after configure command
postConf :: Args -> ConfigFlags -> PackageDescription -> LocalBuildInfo -> IO (),
-- |Hook to run before build command. Second arg indicates verbosity level.
preBuild :: Args -> BuildFlags -> IO HookedBuildInfo,
-- |Over-ride this hook to gbet different behavior during build.
buildHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> BuildFlags -> IO (),
-- |Hook to run after build command. Second arg indicates verbosity level.
postBuild :: Args -> BuildFlags -> PackageDescription -> LocalBuildInfo -> IO (),
-- |Hook to run before makefile command. Second arg indicates verbosity level.
preMakefile :: Args -> MakefileFlags -> IO HookedBuildInfo,
-- |Over-ride this hook to get different behavior during makefile.
makefileHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> MakefileFlags -> IO (),
-- |Hook to run after makefile command. Second arg indicates verbosity level.
postMakefile :: Args -> MakefileFlags -> PackageDescription -> LocalBuildInfo -> IO (),
-- |Hook to run before clean command. Second arg indicates verbosity level.
preClean :: Args -> CleanFlags -> IO HookedBuildInfo,
-- |Over-ride this hook to get different behavior during clean.
cleanHook :: PackageDescription -> Maybe LocalBuildInfo -> UserHooks -> CleanFlags -> IO (),
-- |Hook to run after clean command. Second arg indicates verbosity level.
postClean :: Args -> CleanFlags -> PackageDescription -> Maybe LocalBuildInfo -> IO (),
-- |Hook to run before copy command
preCopy :: Args -> CopyFlags -> IO HookedBuildInfo,
-- |Over-ride this hook to get different behavior during copy.
copyHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> CopyFlags -> IO (),
-- |Hook to run after copy command
postCopy :: Args -> CopyFlags -> PackageDescription -> LocalBuildInfo -> IO (),
-- |Hook to run before install command
preInst :: Args -> InstallFlags -> IO HookedBuildInfo,
-- |Over-ride this hook to get different behavior during install.
instHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> InstallFlags -> IO (),
-- |Hook to run after install command. postInst should be run
-- on the target, not on the build machine.
postInst :: Args -> InstallFlags -> PackageDescription -> LocalBuildInfo -> IO (),
-- |Hook to run before sdist command. Second arg indicates verbosity level.
preSDist :: Args -> SDistFlags -> IO HookedBuildInfo,
-- |Over-ride this hook to get different behavior during sdist.
sDistHook :: PackageDescription -> Maybe LocalBuildInfo -> UserHooks -> SDistFlags -> IO (),
-- |Hook to run after sdist command. Second arg indicates verbosity level.
postSDist :: Args -> SDistFlags -> PackageDescription -> Maybe LocalBuildInfo -> IO (),
-- |Hook to run before register command
preReg :: Args -> RegisterFlags -> IO HookedBuildInfo,
-- |Over-ride this hook to get different behavior during registration.
regHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> RegisterFlags -> IO (),
-- |Hook to run after register command
postReg :: Args -> RegisterFlags -> PackageDescription -> LocalBuildInfo -> IO (),
-- |Hook to run before unregister command
preUnreg :: Args -> RegisterFlags -> IO HookedBuildInfo,
-- |Over-ride this hook to get different behavior during registration.
unregHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> RegisterFlags -> IO (),
-- |Hook to run after unregister command
postUnreg :: Args -> RegisterFlags -> PackageDescription -> LocalBuildInfo -> IO (),
-- |Hook to run before hscolour command. Second arg indicates verbosity level.
preHscolour :: Args -> HscolourFlags -> IO HookedBuildInfo,
-- |Over-ride this hook to get different behavior during hscolour.
hscolourHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> HscolourFlags -> IO (),
-- |Hook to run after hscolour command. Second arg indicates verbosity level.
postHscolour :: Args -> HscolourFlags -> PackageDescription -> LocalBuildInfo -> IO (),
-- |Hook to run before haddock command. Second arg indicates verbosity level.
preHaddock :: Args -> HaddockFlags -> IO HookedBuildInfo,
-- |Over-ride this hook to get different behavior during haddock.
haddockHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> HaddockFlags -> IO (),
-- |Hook to run after haddock command. Second arg indicates verbosity level.
postHaddock :: Args -> HaddockFlags -> PackageDescription -> LocalBuildInfo -> IO ()
}
-- |Empty 'UserHooks' which do nothing.
emptyUserHooks :: UserHooks
emptyUserHooks
= UserHooks {
runTests = ru,
readDesc = return Nothing,
hookedPreProcessors = [],
hookedPrograms = [],
preConf = rn,
confHook = (\_ _ -> return (error "No local build info generated during configure. Over-ride empty configure hook.")),
postConf = ru,
preBuild = rn,
buildHook = ru,
postBuild = ru,
preMakefile = rn,
makefileHook = ru,
postMakefile = ru,
preClean = rn,
cleanHook = ru,
postClean = ru,
preCopy = rn,
copyHook = ru,
postCopy = ru,
preInst = rn,
instHook = ru,
postInst = ru,
preSDist = rn,
sDistHook = ru,
postSDist = ru,
preReg = rn,
regHook = ru,
postReg = ru,
preUnreg = rn,
unregHook = ru,
postUnreg = ru,
preHscolour = rn,
hscolourHook = ru,
postHscolour = ru,
preHaddock = rn,
haddockHook = ru,
postHaddock = ru
}
where rn args _ = noExtraFlags args >> return emptyHookedBuildInfo
ru _ _ _ _ = return ()
| dcreager/cabal | Distribution/Simple/UserHooks.hs | bsd-3-clause | 10,469 | 0 | 14 | 2,238 | 1,367 | 795 | 572 | 97 | 1 |
-- | 'query' variants returning 'V.Vector'.
module Database.PostgreSQL.Simple.Vector where
import Database.PostgreSQL.Simple (Connection, formatQuery, formatMany)
import Database.PostgreSQL.Simple.FromRow (FromRow(..))
import Database.PostgreSQL.Simple.ToRow (ToRow(..))
import Database.PostgreSQL.Simple.Internal (RowParser, exec)
import Database.PostgreSQL.Simple.Internal.PQResultUtils
import Database.PostgreSQL.Simple.Types ( Query (..) )
import qualified Data.Vector as V
-- | Perform a @SELECT@ or other SQL query that is expected to return
-- results. All results are retrieved and converted before this
-- function returns.
query :: (ToRow q, FromRow r) => Connection -> Query -> q -> IO (V.Vector r)
query = queryWith fromRow
-- | A version of 'query' that does not perform query substitution.
query_ :: (FromRow r) => Connection -> Query -> IO (V.Vector r)
query_ = queryWith_ fromRow
-- | A version of 'query' taking parser as argument
queryWith :: ToRow q => RowParser r -> Connection -> Query -> q -> IO (V.Vector r)
queryWith parser conn template qs = do
result <- exec conn =<< formatQuery conn template qs
finishQueryWithV parser conn template result
-- | A version of 'query_' taking parser as argument
queryWith_ :: RowParser r -> Connection -> Query -> IO (V.Vector r)
queryWith_ parser conn q@(Query que) = do
result <- exec conn que
finishQueryWithV parser conn q result
-- | Execute @INSERT ... RETURNING@, @UPDATE ... RETURNING@, or other SQL
-- query that accepts multi-row input and is expected to return results.
returning :: (ToRow q, FromRow r) => Connection -> Query -> [q] -> IO (V.Vector r)
returning = returningWith fromRow
-- | A version of 'returning' taking parser as argument
returningWith :: (ToRow q) => RowParser r -> Connection -> Query -> [q] -> IO (V.Vector r)
returningWith _ _ _ [] = return V.empty
returningWith parser conn q qs = do
result <- exec conn =<< formatMany conn q qs
finishQueryWithV parser conn q result
| tomjaguarpaw/postgresql-simple | src/Database/PostgreSQL/Simple/Vector.hs | bsd-3-clause | 2,043 | 0 | 13 | 379 | 540 | 289 | 251 | 27 | 1 |
{-# LANGUAGE DoRec #-}
module Main where
import Language.Java.Syntax
import Language.Java.Pretty
import Language.Java.ClassFile
import Data.Binary.Get
import qualified Data.ByteString.Lazy as BL
import Control.Applicative
import Control.Monad (replicateM, forM, forM_, mapM, mapM_)
import Data.Maybe (mapMaybe)
import System.Path
import Codec.Archive.LibZip
import System.IO.HVFS
import Control.DeepSeq
import Debug.Trace
main = do
decls <- testJar
mapM_ print decls
-- forM_ decls $ \(ClassType parts, decl) ->
-- do print $ pretty $ CompilationUnit (Just $ PackageDecl $ Name $ map fst $ init parts) [] [decl]
isClassfile path = snd (splitExt path) == ".class"
testFiles = do
files <- filter isClassfile <$> recurseDir SystemFS classRoot
forM files $ \file -> do
stream <- BL.readFile file
let result = runGet getClass stream
show result `deepseq` return result
where
classRoot = "/tmp/jar"
testJar = do
withArchive [CheckConsFlag] jarPath $ do
classfiles <- filter isClassfile <$> fileNames []
forM classfiles $ \classfile -> do
stream <- BL.pack <$> fileContents [] classfile
let result = runGet getClass stream
return result
-- show result `deepseq` return result
-- lift $ print result
-- result `deepseq` return result
where
-- jarPath = "/usr/lib/jvm/java-6-sun-1.6.0.22/jre/lib/rt.jar"
jarPath = "/home/cactus/pkg/logisim-generic-2.5.1.jar"
-- jarPath = "/tmp/foo.jar"
| gergoerdi/language-java-classfile | Main.hs | bsd-3-clause | 1,526 | 0 | 17 | 335 | 338 | 179 | 159 | 34 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE UnicodeSyntax #-}
{-|
[@ISO639-1@] -
[@ISO639-2@] -
[@ISO639-3@] coo
[@Native name@] -
[@English name@] Comox
-}
module Text.Numeral.Language.COO.TestData (cardinals) where
--------------------------------------------------------------------------------
-- Imports
--------------------------------------------------------------------------------
import "base" Prelude ( Num )
import "numerals" Text.Numeral.Grammar.Reified ( defaultInflection )
import "this" Text.Numeral.Test ( TestData )
--------------------------------------------------------------------------------
-- Test data
--------------------------------------------------------------------------------
{-
Sources:
http://www.languagesandnumbers.com/how-to-count-in-comox/en/coo/
-}
cardinals ∷ (Num i) ⇒ TestData i
cardinals =
[ ( "default"
, defaultInflection
, [ (1, "paʔa")
, (2, "saʔa")
, (3, "čɛlas")
, (4, "mos")
, (5, "θiyɛčɩs")
, (6, "t̓əxəm")
, (7, "tᶿočɩs")
, (8, "taʔačɩs")
, (9, "tɩgiχʷ")
, (10, "opən")
, (11, "ʔopən hekʷ paʔa")
, (12, "ʔopən hekʷ saʔa")
, (13, "ʔopən hekʷ čɛlas")
, (14, "ʔopən hekʷ mos")
, (15, "ʔopən hekʷ θiyɛčɩs")
, (16, "ʔopən hekʷ t̓əxəm")
, (17, "ʔopən hekʷ tᶿočɩs")
, (18, "ʔopən hekʷ taʔačɩs")
, (19, "ʔopən hekʷ tɩgiχʷ")
, (20, "θamšɛ")
, (21, "θamšɛ heykʷ paʔa")
, (22, "θamšɛ heykʷ saʔa")
, (23, "θamšɛ heykʷ čɛlas")
, (24, "θamšɛ heykʷ mos")
, (25, "θamšɛ heykʷ θiyɛčɩs")
, (26, "θamšɛ heykʷ t̓əxəm")
, (27, "θamšɛ heykʷ tᶿočɩs")
, (28, "θamšɛ heykʷ taʔačɩs")
, (29, "θamšɛ heykʷ tɩgiχʷ")
, (30, "čɩnuxʷ šɛ")
, (31, "čɩnuxʷ šɛ heykʷ paʔa")
, (32, "čɩnuxʷ šɛ heykʷ saʔa")
, (33, "čɩnuxʷ šɛ heykʷ čɛlas")
, (34, "čɩnuxʷ šɛ heykʷ mos")
, (35, "čɩnuxʷ šɛ heykʷ θiyɛčɩs")
, (36, "čɩnuxʷ šɛ heykʷ t̓əxəm")
, (37, "čɩnuxʷ šɛ heykʷ tᶿočɩs")
, (38, "čɩnuxʷ šɛ heykʷ taʔačɩs")
, (39, "čɩnuxʷ šɛ heykʷ tɩgiχʷ")
, (40, "mosaɬ šɛ")
, (41, "mosaɬ šɛ heykʷ paʔa")
, (42, "mosaɬ šɛ heykʷ saʔa")
, (43, "mosaɬ šɛ heykʷ čɛlas")
, (44, "mosaɬ šɛ heykʷ mos")
, (45, "mosaɬ šɛ heykʷ θiyɛčɩs")
, (46, "mosaɬ šɛ heykʷ t̓əxəm")
, (47, "mosaɬ šɛ heykʷ tᶿočɩs")
, (48, "mosaɬ šɛ heykʷ taʔačɩs")
, (49, "mosaɬ šɛ heykʷ tɩgiχʷ")
, (50, "θiyɛčɩsaɬšɛ")
, (51, "θiyɛčɩsaɬšɛ heykʷ paʔa")
, (52, "θiyɛčɩsaɬšɛ heykʷ saʔa")
, (53, "θiyɛčɩsaɬšɛ heykʷ čɛlas")
, (54, "θiyɛčɩsaɬšɛ heykʷ mos")
, (55, "θiyɛčɩsaɬšɛ heykʷ θiyɛčɩs")
, (56, "θiyɛčɩsaɬšɛ heykʷ t̓əxəm")
, (57, "θiyɛčɩsaɬšɛ heykʷ tᶿočɩs")
, (58, "θiyɛčɩsaɬšɛ heykʷ taʔačɩs")
, (59, "θiyɛčɩsaɬšɛ heykʷ tɩgiχʷ")
, (60, "t̓əχmaɬ šɛ")
, (61, "t̓əχmaɬ šɛ heykʷ paʔa")
, (62, "t̓əχmaɬ šɛ heykʷ saʔa")
, (63, "t̓əχmaɬ šɛ heykʷ čɛlas")
, (64, "t̓əχmaɬ šɛ heykʷ mos")
, (65, "t̓əχmaɬ šɛ heykʷ θiyɛčɩs")
, (66, "t̓əχmaɬ šɛ heykʷ t̓əxəm")
, (67, "t̓əχmaɬ šɛ heykʷ tᶿočɩs")
, (68, "t̓əχmaɬ šɛ heykʷ taʔačɩs")
, (69, "t̓əχmaɬ šɛ heykʷ tɩgiχʷ")
, (70, "tᶿočɩsaɬ šɛ")
, (71, "tᶿočɩsaɬ šɛ heykʷ paʔa")
, (72, "tᶿočɩsaɬ šɛ heykʷ saʔa")
, (73, "tᶿočɩsaɬ šɛ heykʷ čɛlas")
, (74, "tᶿočɩsaɬ šɛ heykʷ mos")
, (75, "tᶿočɩsaɬ šɛ heykʷ θiyɛčɩs")
, (76, "tᶿočɩsaɬ šɛ heykʷ t̓əxəm")
, (77, "tᶿočɩsaɬ šɛ heykʷ tᶿočɩs")
, (78, "tᶿočɩsaɬ šɛ heykʷ taʔačɩs")
, (79, "tᶿočɩsaɬ šɛ heykʷ tɩgiχʷ")
, (80, "taʔačɩsaɬ šɛ")
, (81, "taʔačɩsaɬ šɛ heykʷ paʔa")
, (82, "taʔačɩsaɬ šɛ heykʷ saʔa")
, (83, "taʔačɩsaɬ šɛ heykʷ čɛlas")
, (84, "taʔačɩsaɬ šɛ heykʷ mos")
, (85, "taʔačɩsaɬ šɛ heykʷ θiyɛčɩs")
, (86, "taʔačɩsaɬ šɛ heykʷ t̓əxəm")
, (87, "taʔačɩsaɬ šɛ heykʷ tᶿočɩs")
, (88, "taʔačɩsaɬ šɛ heykʷ taʔačɩs")
, (89, "taʔačɩsaɬ šɛ heykʷ tɩgiχʷ")
, (90, "tɩgixʷaɬ šɛ")
, (91, "tɩgixʷaɬ šɛ heykʷ paʔa")
, (92, "tɩgixʷaɬ šɛ heykʷ saʔa")
, (93, "tɩgixʷaɬ šɛ heykʷ čɛlas")
, (94, "tɩgixʷaɬ šɛ heykʷ mos")
, (95, "tɩgixʷaɬ šɛ heykʷ θiyɛčɩs")
, (96, "tɩgixʷaɬ šɛ heykʷ t̓əxəm")
, (97, "tɩgixʷaɬ šɛ heykʷ tᶿočɩs")
, (98, "tɩgixʷaɬ šɛ heykʷ taʔačɩs")
, (99, "tɩgixʷaɬ šɛ heykʷ tɩgiχʷ")
, (100, "opən təsɛʔɛč")
]
)
]
| telser/numerals | src-test/Text/Numeral/Language/COO/TestData.hs | bsd-3-clause | 5,323 | 0 | 8 | 1,271 | 994 | 665 | 329 | 112 | 1 |
{-# OPTIONS_GHC -fno-warn-tabs #-}
import AES
import Common
import Hex
import Data.List
import System.IO
main = do
putStrLn "=== Challange8 ==="
handle <- openFile "8.txt" ReadMode
encs <- fmap (map hexDecode . lines) $ hGetContents handle
putStr "ECB encrypted string detected: #"
putStrLn $ show $ fst $ last $ sortOn snd $ zip [1..] $ map scoreRep encs
where
scoreRep = sum . map (^2) . map length . group . sort . chunksOf 16
| andrewcchen/matasano-cryptopals-solutions | set1/Challange8.hs | bsd-3-clause | 440 | 0 | 13 | 86 | 158 | 76 | 82 | 13 | 1 |
module RandomExample2 where
import Control.Applicative (liftA3)
import Control.Monad (replicateM)
import Control.Monad.Trans.State
import System.Random
data Die = DieOne
| DieTwo
| DieThree
| DieFour
| DieFive
| DieSix
deriving (Eq, Enum, Show)
intToDie :: Int -> Die
intToDie = toEnum . pred
rollDie :: State StdGen Die
rollDie = state $ do
(n, s) <- randomR (1, 6)
return (intToDie n, s)
rollDie' :: State StdGen Die
rollDie' = intToDie <$> state (randomR (1, 6))
rollDieThreeTimes :: State StdGen (Die, Die, Die)
rollDieThreeTimes = liftA3 (,,) rollDie' rollDie' rollDie'
-- take 6 $ evalState infiniteDie (mkStdGen 0)
-- this is just repeating the Die itself, not the state action that produce the die
infiniteDie :: State StdGen [Die]
infiniteDie = repeat <$> rollDie'
-- this is what we need
nDie :: Int -> State StdGen [Die]
nDie n = replicateM n rollDie'
-- keep rolling until getting to the limit
go :: Int -> Int -> Int -> StdGen -> Int
go limit sum count gen
| sum >= limit = count
| otherwise =
let (die, nextGen) = randomR (1, 6) gen
in go limit (sum + die) (count + 1) nextGen
-- keep rolling until 20
-- example: (rollsToGetTwenty . mkStdGen) <$> randomIO
rollsToGetTwenty :: StdGen -> Int
rollsToGetTwenty g = go 20 0 0 g
rollsToGetN :: Int -> StdGen -> Int
rollsToGetN n g = go n 0 0 g
go1 :: Int -> Int -> (Int, [Die]) -> StdGen -> (Int, [Die])
go1 limit sum re gen
| sum >= limit = re
| otherwise =
let (die, nextGen) = randomR (1, 6) gen
in go1 limit (sum + die) (fst re + 1, snd re ++ [intToDie die]) nextGen
rollsCountLogged :: Int -> StdGen -> (Int, [Die])
rollsCountLogged n g = go1 n 0 (0, []) g
| chengzh2008/hpffp | src/ch23-State/RandomExample2.hs | bsd-3-clause | 1,719 | 0 | 12 | 400 | 632 | 339 | 293 | 44 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE ScopedTypeVariables #-}
------------------------------------------------------------------------------
-- |
-- Module : Semantics
-- Copyright : (c) Greg Hale 2016
-- License : BSD3 (see the file LICENSE)
--
-- Maintainer : Greg Hale <imalsogreg@gmail.com>
-- Stability : experimental
-- Portability : template haskell
--
-- The semantics of the CBaaS network (data proxy servers, browsers,
-- and workers) expressed as a Free Monad. The goal is to make the
-- operation of the network clear. Eventually it would be nice to write
-- the server itself as an interpreter for this module. Other uses for this
-- may be a testing setup, or an demo animation.
--
-- Also this is my time to try to learn about free monad use :)
------------------------------------------------------------------------------
module Semantics where
import Control.Monad.Free
import Control.Monad.Free.TH
import Data.IORef
import Data.UUID.Types
import Data.Map (Map)
import qualified Data.Map as Map
import Model
import BrowserProfile
import EntityID
import Job
import WorkerProfile
data CBaaSF a
= BrowserJoin (BrowserProfile -> a)
| BrowserLeave a
| WorkerJoin WorkerProfile (WorkerProfile -> a)
| RequestWork BrowserProfile WorkerProfile Job a
| ReportWorkFinished WorkerProfile Job (JobResult -> a)
deriving (Functor)
type CBaaS = Free CBaaSF
simulateCBaaSIO :: CBaaS a -> IO a
simulateCBaaSIO = do
undefined
| CBMM/CBaaS | cbaas-lib/src/Semantics.hs | bsd-3-clause | 1,518 | 0 | 8 | 260 | 184 | 118 | 66 | 26 | 1 |
-- |
-- Module : Network.TLS.Handshake.Certificate
-- License : BSD-style
-- Maintainer : Vincent Hanquez <vincent@snarc.org>
-- Stability : experimental
-- Portability : unknown
--
module Network.TLS.Handshake.Certificate
( certificateRejected
, rejectOnException
) where
import Network.TLS.Context.Internal
import Network.TLS.Struct
import Network.TLS.X509
import Control.Monad.State.Strict
import Control.Exception (SomeException)
-- on certificate reject, throw an exception with the proper protocol alert error.
certificateRejected :: MonadIO m => CertificateRejectReason -> m a
certificateRejected CertificateRejectRevoked =
throwCore $ Error_Protocol ("certificate is revoked", True, CertificateRevoked)
certificateRejected CertificateRejectExpired =
throwCore $ Error_Protocol ("certificate has expired", True, CertificateExpired)
certificateRejected CertificateRejectUnknownCA =
throwCore $ Error_Protocol ("certificate has unknown CA", True, UnknownCa)
certificateRejected (CertificateRejectOther s) =
throwCore $ Error_Protocol ("certificate rejected: " ++ s, True, CertificateUnknown)
rejectOnException :: SomeException -> IO CertificateUsage
rejectOnException e = return $ CertificateUsageReject $ CertificateRejectOther $ show e
| erikd/hs-tls | core/Network/TLS/Handshake/Certificate.hs | bsd-3-clause | 1,286 | 0 | 8 | 174 | 220 | 126 | 94 | 19 | 1 |
#if __GLASGOW_HASKELL__ >= 701
{-# LANGUAGE Trustworthy #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Module : System.Win32.Types
-- Copyright : (c) Alastair Reid, 1997-2003
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : Esa Ilari Vuokko <ei@vuokko.info>
-- Stability : provisional
-- Portability : portable
--
-- A collection of FFI declarations for interfacing with Win32.
--
-----------------------------------------------------------------------------
module System.Win32.Types
( module System.Win32.Types
, nullPtr
) where
import Control.Exception (throwIO)
import Data.Bits (shiftL, shiftR, (.|.), (.&.))
import Data.Char (isSpace)
import Data.Int (Int32, Int64)
import Data.Maybe (fromMaybe)
import Data.Word (Word8, Word16, Word32, Word64)
import Foreign.C.Error (getErrno, errnoToIOError)
import Foreign.C.String (newCWString, withCWStringLen)
import Foreign.C.String (peekCWString, peekCWStringLen, withCWString)
import Foreign.C.Types (CChar, CUChar, CWchar)
import Foreign.ForeignPtr (ForeignPtr, newForeignPtr, newForeignPtr_)
import Foreign.Ptr (FunPtr, Ptr, nullPtr)
import Numeric (showHex)
import System.IO.Error (ioeSetErrorString)
import System.IO.Unsafe (unsafePerformIO)
#if !MIN_VERSION_base(4,8,0)
import Data.Word (Word)
#endif
#if MIN_VERSION_base(4,7,0)
import Data.Bits (finiteBitSize)
#else
import Data.Bits (Bits, bitSize)
finiteBitSize :: (Bits a) => a -> Int
finiteBitSize = bitSize
#endif
#include "windows_cconv.h"
----------------------------------------------------------------
-- Platform specific definitions
--
-- Most typedefs and prototypes in Win32 are expressed in terms
-- of these types. Try to follow suit - it'll make it easier to
-- get things working on Win64 (or whatever they call it on Alphas).
----------------------------------------------------------------
type BOOL = Bool
type BYTE = Word8
type UCHAR = CUChar
type USHORT = Word16
type UINT = Word32
type INT = Int32
type WORD = Word16
type DWORD = Word32
type LONG = Int32
type FLOAT = Float
type LARGE_INTEGER = Int64
type UINT_PTR = Word
-- Not really a basic type, but used in many places
type DDWORD = Word64
----------------------------------------------------------------
type MbString = Maybe String
type MbINT = Maybe INT
type ATOM = UINT
type WPARAM = UINT
type LPARAM = LONG
type LRESULT = LONG
type SIZE_T = DWORD
type MbATOM = Maybe ATOM
type HRESULT = LONG
----------------------------------------------------------------
-- Pointers
----------------------------------------------------------------
type Addr = Ptr ()
type LPVOID = Ptr ()
type LPBOOL = Ptr BOOL
type LPBYTE = Ptr BYTE
type PUCHAR = Ptr UCHAR
type LPDWORD = Ptr DWORD
type LPSTR = Ptr CChar
type LPCSTR = LPSTR
type LPWSTR = Ptr CWchar
type LPCWSTR = LPWSTR
type LPTSTR = Ptr TCHAR
type LPCTSTR = LPTSTR
type LPCTSTR_ = LPCTSTR
-- Optional things with defaults
maybePtr :: Maybe (Ptr a) -> Ptr a
maybePtr = fromMaybe nullPtr
ptrToMaybe :: Ptr a -> Maybe (Ptr a)
ptrToMaybe p = if p == nullPtr then Nothing else Just p
maybeNum :: Num a => Maybe a -> a
maybeNum = fromMaybe 0
numToMaybe :: (Eq a, Num a) => a -> Maybe a
numToMaybe n = if n == 0 then Nothing else Just n
type MbLPVOID = Maybe LPVOID
type MbLPCSTR = Maybe LPCSTR
type MbLPCTSTR = Maybe LPCTSTR
----------------------------------------------------------------
-- Chars and strings
----------------------------------------------------------------
withTString :: String -> (LPTSTR -> IO a) -> IO a
withTStringLen :: String -> ((LPTSTR, Int) -> IO a) -> IO a
peekTString :: LPCTSTR -> IO String
peekTStringLen :: (LPCTSTR, Int) -> IO String
newTString :: String -> IO LPCTSTR
-- UTF-16 version:
type TCHAR = CWchar
withTString = withCWString
withTStringLen = withCWStringLen
peekTString = peekCWString
peekTStringLen = peekCWStringLen
newTString = newCWString
{- ANSI version:
type TCHAR = CChar
withTString = withCString
withTStringLen = withCStringLen
peekTString = peekCString
peekTStringLen = peekCStringLen
newTString = newCString
-}
----------------------------------------------------------------
-- Handles
----------------------------------------------------------------
type HANDLE = Ptr ()
type ForeignHANDLE = ForeignPtr ()
newForeignHANDLE :: HANDLE -> IO ForeignHANDLE
newForeignHANDLE = newForeignPtr deleteObjectFinaliser
handleToWord :: HANDLE -> UINT_PTR
handleToWord = castPtrToUINTPtr
type HKEY = ForeignHANDLE
type PKEY = HANDLE
nullHANDLE :: HANDLE
nullHANDLE = nullPtr
type MbHANDLE = Maybe HANDLE
type HINSTANCE = Ptr ()
type MbHINSTANCE = Maybe HINSTANCE
type HMODULE = Ptr ()
type MbHMODULE = Maybe HMODULE
nullFinalHANDLE :: ForeignPtr a
nullFinalHANDLE = unsafePerformIO (newForeignPtr_ nullPtr)
iNVALID_HANDLE_VALUE :: HANDLE
iNVALID_HANDLE_VALUE = castUINTPtrToPtr (-1)
----------------------------------------------------------------
-- Errors
----------------------------------------------------------------
type ErrCode = DWORD
failIf :: (a -> Bool) -> String -> IO a -> IO a
failIf p wh act = do
v <- act
if p v then errorWin wh else return v
failIf_ :: (a -> Bool) -> String -> IO a -> IO ()
failIf_ p wh act = do
v <- act
if p v then errorWin wh else return ()
failIfNull :: String -> IO (Ptr a) -> IO (Ptr a)
failIfNull = failIf (== nullPtr)
failIfZero :: (Eq a, Num a) => String -> IO a -> IO a
failIfZero = failIf (== 0)
failIfFalse_ :: String -> IO Bool -> IO ()
failIfFalse_ = failIf_ not
failUnlessSuccess :: String -> IO ErrCode -> IO ()
failUnlessSuccess fn_name act = do
r <- act
if r == 0 then return () else failWith fn_name r
failUnlessSuccessOr :: ErrCode -> String -> IO ErrCode -> IO Bool
failUnlessSuccessOr val fn_name act = do
r <- act
if r == 0 then return False
else if r == val then return True
else failWith fn_name r
errorWin :: String -> IO a
errorWin fn_name = do
err_code <- getLastError
failWith fn_name err_code
failWith :: String -> ErrCode -> IO a
failWith fn_name err_code = do
c_msg <- getErrorMessage err_code
msg <- if c_msg == nullPtr
then return $ "Error 0x" ++ Numeric.showHex err_code ""
else do msg <- peekTString c_msg
-- We ignore failure of freeing c_msg, given we're already failing
_ <- localFree c_msg
return msg
c_maperrno -- turn GetLastError() into errno, which errnoToIOError knows
-- how to convert to an IOException we can throw.
-- XXX we should really do this directly.
errno <- getErrno
let msg' = reverse $ dropWhile isSpace $ reverse msg -- drop trailing \n
ioerror = errnoToIOError fn_name errno Nothing Nothing
`ioeSetErrorString` msg'
throwIO ioerror
foreign import ccall unsafe "maperrno" -- in base/cbits/Win32Utils.c
c_maperrno :: IO ()
----------------------------------------------------------------
-- Misc helpers
----------------------------------------------------------------
ddwordToDwords :: DDWORD -> (DWORD,DWORD)
ddwordToDwords n =
(fromIntegral (n `shiftR` finiteBitSize (undefined :: DWORD))
,fromIntegral (n .&. fromIntegral (maxBound :: DWORD)))
dwordsToDdword:: (DWORD,DWORD) -> DDWORD
dwordsToDdword (hi,low) = (fromIntegral low) .|. (fromIntegral hi `shiftL` finiteBitSize hi)
----------------------------------------------------------------
-- Primitives
----------------------------------------------------------------
{-# CFILES cbits/HsWin32.c #-}
foreign import ccall "HsWin32.h &DeleteObjectFinaliser"
deleteObjectFinaliser :: FunPtr (Ptr a -> IO ())
foreign import WINDOWS_CCONV unsafe "windows.h LocalFree"
localFree :: Ptr a -> IO (Ptr a)
foreign import WINDOWS_CCONV unsafe "windows.h GetLastError"
getLastError :: IO ErrCode
{-# CFILES cbits/errors.c #-}
foreign import ccall unsafe "errors.h"
getErrorMessage :: DWORD -> IO LPWSTR
{-# CFILES cbits/HsWin32.c #-}
foreign import ccall unsafe "HsWin32.h"
lOWORD :: DWORD -> WORD
foreign import ccall unsafe "HsWin32.h"
hIWORD :: DWORD -> WORD
foreign import ccall unsafe "HsWin32.h"
castUINTPtrToPtr :: UINT_PTR -> Ptr a
foreign import ccall unsafe "HsWin32.h"
castPtrToUINTPtr :: Ptr s -> UINT_PTR
type LCID = DWORD
type LANGID = WORD
type SortID = WORD
foreign import ccall unsafe "HsWin32.h prim_MAKELCID"
mAKELCID :: LANGID -> SortID -> LCID
foreign import ccall unsafe "HsWin32.h prim_LANGIDFROMLCID"
lANGIDFROMLCID :: LCID -> LANGID
foreign import ccall unsafe "HsWin32.h prim_SORTIDFROMLCID"
sORTIDFROMLCID :: LCID -> SortID
type SubLANGID = WORD
type PrimaryLANGID = WORD
foreign import ccall unsafe "HsWin32.h prim_MAKELANGID"
mAKELANGID :: PrimaryLANGID -> SubLANGID -> LANGID
foreign import ccall unsafe "HsWin32.h prim_PRIMARYLANGID"
pRIMARYLANGID :: LANGID -> PrimaryLANGID
foreign import ccall unsafe "HsWin32.h prim_SUBLANGID"
sUBLANGID :: LANGID -> SubLANGID
----------------------------------------------------------------
-- End
----------------------------------------------------------------
| DavidAlphaFox/ghc | libraries/Win32/System/Win32/Types.hs | bsd-3-clause | 9,483 | 37 | 13 | 1,872 | 2,134 | 1,174 | 960 | -1 | -1 |
module Parsers.ColorSpec where
import Test.Hspec
import Text.Parsec (parse)
import Types.Color
import Parsers.Color
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "Color Parsers" $ do
it "Hexadecimal (3 digit)" $
parse hexColor "" "#333 ignore" `shouldBe` Right (Color 51 51 51 1)
it "Hexadecimal (6 digit)" $
parse hexColor "" "#333333 ignore" `shouldBe` Right (Color 51 51 51 1)
it "RGB" $
parse rgbColor "" "rgb(51, 51, 51) ignore" `shouldBe` Right (Color 51 51 51 1)
it "RGBA" $
parse rgbaColor "" "rgba(51, 51, 51, 0.5) ignore" `shouldBe` Right (Color 51 51 51 0.5)
it "HSL" $
parse hslColor "" "hsl(0, 0%, 20%) ignore" `shouldBe` Right (Color 51 51 51 1)
it "HSLA" $
parse hslaColor "" "hsla(0, 0%, 20%, 0.5) ignore" `shouldBe` Right (Color 51 51 51 0.5)
it "Keyword" $
parse keyword "" "red ignore" `shouldBe` Right (Color 255 0 0 1)
it "Color (Hexadecimal)" $
parse color "" "#333 ignore" `shouldBe` Right (Color 51 51 51 1)
it "Color (RGB)" $
parse color "" "rgb(51, 51, 51) ignore" `shouldBe` Right (Color 51 51 51 1)
| cimmanon/classi | test/Parsers/ColorSpec.hs | bsd-3-clause | 1,099 | 18 | 13 | 238 | 429 | 209 | 220 | 28 | 1 |
module Partly.Make where
-- base:
import Data.List (tails)
import Data.Char (toLower)
import Data.Word (Word16)
import Control.Applicative
-- bytestring:
import qualified Data.ByteString as B
-- optparse-applicative:
import Options.Applicative
-- partly:
import System.Disk.Partitions.MBR
import Partly.Json
import Partly.Common
-- | A change we can apply to a boot record.
data Delta = Delta
{ btl :: Maybe FilePath
, sig :: Maybe Word16 }
deriving (Eq, Show)
-- | Apply some changes to a boot record.
applyDelta :: Delta -> BootRecord -> IO BootRecord
applyDelta d b = do
b <- apply b (btl d) $ \p -> (`fmap` B.readFile p)
$ \x -> b { bootloader = x}
b <- apply b (sig d) $ \x -> return b { bootSig = x }
return b
where apply b v fn = maybe (return b) fn v
-- | The kinds of options the main program will hand us.
data MakeOptions = MakeOptions
{ from :: Maybe FilePath
, change :: Delta
, displayOpts :: Display }
deriving (Eq, Show)
-- | A parser for those options.
makeOptions :: Parser MakeOptions
makeOptions = MakeOptions
<$> maybeOption
( long "from"
& short 'f'
& metavar "file"
& help "A bootloader to base this one on, either binary or JSON." )
<*> (Delta
<$> maybeOption
( long "bootloader"
& short 'b'
& metavar "file"
& help "A file to include as a bootloader." )
<*> (fmap . (=<<)) getSignature (maybeOption
( long "signature"
& short 's'
& metavar "sig"
& help "Set the boot signature to some uint16; also accepts 't' and 'f'.")))
<*> parseDisplay
where
getSignature :: String -> Maybe Word16
getSignature s = case toLower <$> s of
"t" -> Just 0xaa55; "true" -> Just 0xaa55;
"f" -> Just 0x0000; "false" -> Just 0x0000;
_ -> case reads s of [(v, "")] -> Just v; _ -> Nothing;
-- | A description of the parsing options.
makeParser :: ParserInfo MakeOptions
makeParser = info makeOptions
( progDesc "Create an MBR, potentially based on some existing one."
& fullDesc)
-- | Run the "make" command.
make :: MakeOptions -> IO ()
make m = maybe (return nullBootRecord)
(input . flip Input Nothing) (from m)
>>= applyDelta (change m)
>>= display (displayOpts m)
| startling/partly | Partly/Make.hs | bsd-3-clause | 2,231 | 0 | 16 | 528 | 671 | 353 | 318 | 59 | 6 |
module Data.Char.SScriptSpec where
import Data.Char.SScript
import Control.Monad (ap)
import Test.Hspec
-- A fixed point is a value that doesn't change under application of a function
shouldBeFixed :: (Eq a, Show a) => (a -> a) -> a -> Expectation
shouldBeFixed = ap shouldBe
convertDecimals :: (Char -> Char) -> String
convertDecimals f = (map f . concatMap show) [0 .. 9]
symbols :: String
symbols = "+-=()"
spec :: Spec
spec = do
context "Data.Char.SScript.subscript" $ do
it "should work for a single char" $
subscript '0' `shouldBe` '₀'
it "should work for all single digit decimals" $
convertDecimals subscript `shouldBe` "₀₁₂₃₄₅₆₇₈₉"
it "should work for +-=() symbols" $
map subscript symbols `shouldBe` "₊₋₌₍₎"
it "should work for selected letters" $
map subscript "aeioruvx" `shouldBe` "ₐₑᵢₒᵣᵤᵥₓ"
it "should return the same char if it can't be subscripted" $
shouldBeFixed (map subscript) "AEIORUVX"
context "Data.Char.SScript.superscript" $ do
it "should work for a single char" $
superscript '0' `shouldBe` '⁰'
it "should work for all single digit decimals" $
convertDecimals superscript `shouldBe` "⁰¹²³⁴⁵⁶⁷⁸⁹"
it "should work for +-=() symbols" $
map superscript symbols `shouldBe` "⁺⁻⁼⁽⁾"
it "should work for all letters except qCFQSVXYZ" $
map superscript "abcdefghijklmnoprstuvwxyzABDEGHIJKLMNOPRTUW"
`shouldBe`
"ᵃᵇᶜᵈᵉᶠᵍʰⁱʲᵏˡᵐⁿᵒᵖʳˢᵗᵘᵛʷˣʸᶻᴬᴮᴰᴱᴳᴴᴵᴶᴷᴸᴹᴺᴼᴾᴿᵀᵁᵂ"
it "should return the same char if it can't be superscripted" $
shouldBeFixed (map superscript) "qCFQSVXYZ"
context "Data.Char.SScript.formatSS" $ do
it "should convert chars following an underscore to its subscript, \
\like in the chemical formula for dravite: \
\https://en.wikipedia.org/wiki/Tourmaline#Dravite" $
formatSS "NaMg_{3}(Al,Mg)_6B_3Si_6O_{27}(OH)"
`shouldBe`
"NaMg₃(Al,Mg)₆B₃Si₆O₂₇(OH)"
it "should convert chars following a caret to its superscript" $
formatSS "(a^n)^{r+s}" `shouldBe` "(aⁿ)ʳ⁺ˢ"
it "should convert strings containing a mixture of \
\underscores and carets" $
formatSS "(x_{12} - x_{21})^{25} + (y_1 - y_2)^2"
`shouldBe`
"(x₁₂ - x₂₁)²⁵ + (y₁ - y₂)²"
| khalilfazal/sscript | test/Data/Char/SScriptSpec.hs | bsd-3-clause | 2,632 | 0 | 13 | 674 | 427 | 211 | 216 | 49 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Views.DomainPrivileges (domainPrivilegesT) where
import Data.Text.Lazy (Text)
import Data.Monoid
import Text.Blaze.Html5 hiding (p)
import qualified Text.Blaze.Html5 as H
import Text.Blaze.Html5.Attributes
import qualified Text.Blaze.Html5.Attributes as A
import Views.Common
domainPrivilegesT :: Text -> [Text] -> [Text] -> [(Text, Text)] -> Html
domainPrivilegesT domain privileges groups groupPrivs =
pageT ("Privileges for domain '" <> domain <> "'") $ do
H.script ! A.type_ "text/javascript" $ do
"var domain = '"
toHtml domain
"';"
H.div ! A.class_ "row" $ do
H.div ! A.class_ "col-md-6" $ do
H.p ! A.class_ "lead text-center" $ do
"Add, edit or delete privileges for "
H.b $ toHtml domain
H.div ! A.class_ "text-center" ! A.style "margin: 10px auto;" $
H.table ! A.id "edittable"
! A.class_ "table table-condensed" $ do
H.thead $ do
H.tr $ do
H.th ! A.width "50%" ! A.class_ "text-center" $
"Privilege"
H.th ! A.width "30%" $ mempty
H.th ! A.width "20%" $ mempty
H.tbody $ do
mapM_ privToHtml privileges
H.tfoot $ do
H.tr $ do
H.td $ inp "privilege" "Privilege"
H.td $ H.button ! A.class_ "btn btn-success btn-xs add-btn"
$ "Add a privilege"
H.td $ mempty
H.div ! A.class_ "alert alert-success" ! A.id "updatesuccess" $
"Successfully updated."
H.div ! A.class_ "alert alert-error" ! A.id "updateerror" $
"An error occured when trying to update the privileges in the DB."
H.div ! A.class_ "alert alert-error" ! A.id "fieldempty" $
"You left one of the fields empty."
H.div ! A.class_ "col-md-6" $ do
H.p ! A.class_ "lead text-center" $ do
"Grant or remove group privileges on "
H.b $ toHtml domain
H.div ! A.class_ "text-center" ! A.style "margin: 10px auto;" $
H.table ! A.id "edittable2"
! A.class_ "table table-condensed" $ do
H.thead $ do
H.tr $ do
H.th ! A.width "40%" ! A.class_ "text-center" $
"Group"
H.th ! A.width "40%" ! A.class_ "text-center" $
"Privilege"
H.th ! A.width "20%" $ mempty
H.tbody $ do
mapM_ groupPrivToHtml groupPrivs
H.tfoot $ do
H.tr $ do
H.td $ sel "groupSel" groups
H.td $ sel "privSel" privileges
H.td $ H.button ! A.class_ "btn btn-success btn-xs grant-btn"
$ "Grant access"
H.div ! A.class_ "alert alert-success" ! A.id "gpsuccess" $
"Successfully updated."
H.div ! A.class_ "alert alert-error" ! A.id "gperror" $
"An error occured when trying to update the privileges in the DB."
H.script ! A.type_ "text/javascript"
! A.src "/sproxy/static/js/domainprivileges.js" $ mempty
where privToHtml p = do
tr $ do
td ! A.class_ "edit privilege-edit" $ toHtml p
td $
a ! A.class_ "delete-btn btn btn-danger btn-xs" $ "Delete"
td $
a ! href "#"
! class_ "rule-btn btn btn-link btn-xs"
$ "Rules"
groupPrivToHtml (group, priv) = do
tr $ do
td $ toHtml group
td $ toHtml priv
td $
a ! A.class_ "delete-gp-btn btn btn-danger btn-xs" $ "Delete"
| alpmestan/spw | src/Views/DomainPrivileges.hs | bsd-3-clause | 4,558 | 0 | 37 | 2,193 | 1,043 | 482 | 561 | 87 | 1 |
-- Copyright: Xavier Lange, 2011
-- License: BSD
module ZMQHS.Message
(
getMessage,
parseIdentity,
buildIdentityMessage,
buildMessage,
Identity(..),
Message(..)
)
where
import ZMQHS.Frame
import qualified Data.Attoparsec as AP
import qualified Data.ByteString as BS
import qualified Blaze.ByteString.Builder as BSBuilder
import qualified Blaze.ByteString.Builder.Int as IntBuilder()
import Control.Monad()
import Data.Monoid()
import Control.Applicative hiding (empty)
import Data.Attoparsec((<?>))
data Identity = Anonymous
| Named FrameData
deriving (Show)
data Message = Message [FrameData]
deriving (Show, Eq)
getMessage :: AP.Parser Message
getMessage = Message <$> parseFrames <?> "getMessage"
parseFrames :: AP.Parser [FrameData]
parseFrames = do
frame <- frameParser
case frame of
(MoreFrame payload) -> (payload :) <$> parseFrames
(FinalFrame payload) -> return [payload]
parseIdentity :: AP.Parser Identity
parseIdentity = do
frame <- frameParser
return $ (identify . frameData) frame
where
identify bs = case BS.length bs of
0 -> Anonymous
_ -> Named bs
buildIdentityMessage :: Identity -> BSBuilder.Builder
buildIdentityMessage Anonymous = buildFrame $ FinalFrame (BS.pack [])
buildIdentityMessage (Named identity) = buildFrame $ FinalFrame identity
buildMessage :: Message -> BSBuilder.Builder
buildMessage (Message chunks) = buildAllFrames chunks
buildAllFrames :: [FrameData] -> BSBuilder.Builder
buildAllFrames ([]) = error "buildAllFrames called with empty array"
buildAllFrames (x:[]) = buildFrame (FinalFrame x)
buildAllFrames (x:xs) = buildFrame (MoreFrame x) <> buildAllFrames xs
| xrl/zmqhs | ZMQHS/Message.hs | bsd-3-clause | 1,709 | 0 | 11 | 304 | 498 | 275 | 223 | 46 | 2 |
{-# LANGUAGE RankNTypes #-}
module THilbert where
import Control.Monad (replicateM)
import Control.Applicative (liftA, liftA3, (<$>))
import Data.Algorithm.Hilbert.Functions
import Data.Algorithm.Hilbert.Types
import Data.Algorithm.Hilbert.Utility
import Data.Bits
import Data.List (sort)
import Data.Maybe
import Instances ()
import Test.HUnit
import Test.QuickCheck
np :: Int -> Int -> PrecisionNum
np a b = fromJust $ mkPrecisionNum a b
-- Type / Instance / Utility
prop_boolNumBijectiveSmall :: Property
prop_boolNumBijectiveSmall = forAll (choose(2, 1000)) $ \i ->
property (i == j i)
where j x = (value . fromJust . boolToNum . numToBool) (minPrecision x) :: Integer
prop_boolNumBijectiveLarge :: Property
prop_boolNumBijectiveLarge = forAll (choose(2^(128::Integer), 2^(128::Integer) - 1000)) $ \i ->
property (i == j i)
where j x = (value . fromJust . boolToNum . numToBool) (minPrecision x) :: Integer
test_NumToBool :: Assertion
test_NumToBool = [True, False, True, True] @=? (numToBool) (fromJust (mkPrecisionNum (13::Integer) (4::Integer)))
test_BoolToNum :: Assertion
test_BoolToNum = let
expected = mkPrecisionNum (13::Int) (4::Int)
in
expected @=? (boolToNum [True, False, True, True])
test_ConvertPointToHypercubeA :: Assertion
test_ConvertPointToHypercubeA = let point = fromJust $ sequence [mkPrecisionNum (5::Integer) (3::Integer), mkPrecisionNum (6::Integer) (3::Integer)]
hyper = sequence [mkPrecisionNum (3::Integer) (2::Integer)
, mkPrecisionNum (1::Integer) (2::Integer)
, mkPrecisionNum (2::Integer) (2::Integer)]
in
hyper @=? (convertPointToHypercube point)
test_ConvertPointToHypercubeB :: Assertion
test_ConvertPointToHypercubeB = let point = sequence [mkPrecisionNum (1::Integer) (3::Integer), mkPrecisionNum (2::Integer) (3::Integer), mkPrecisionNum (3::Integer) (3::Integer)]
hyper = sequence [mkPrecisionNum (0::Integer) (3::Integer), mkPrecisionNum (3::Integer) (3::Integer), mkPrecisionNum (5::Integer) (3::Integer)]
in
(point >>= convertPointToHypercube) @=? hyper
test_ConvertIntegerA :: Assertion
test_ConvertIntegerA = let input = mkPrecisionNum (45::Integer) (6::Integer)
expected = (sequence [mkPrecisionNum (2::Integer) (2::Integer), mkPrecisionNum (3::Integer) (2::Integer), mkPrecisionNum (1::Integer) (2::Integer)]) :: Maybe [PrecisionNum]
in
(input >>= \i -> convertInteger i (2::Int) (3::Int)) @=? expected
test_ConvertIntegerB :: Assertion
test_ConvertIntegerB = let input = mkPrecisionNum (5::Integer) (3::Integer)
expected = (sequence [mkPrecisionNum (5::Integer) (3::Integer)])
in
(input >>= \i -> convertInteger i 3 1) @=? expected
-- When we divide an integer into pieces, the precisions
-- are all equal since our aim was to divide into equal size pieces.
prop_ConvertIntegerEqualPieces :: PrecisionNum -> Int -> Int -> Property
prop_ConvertIntegerEqualPieces testNumber chunks chunkSize = value testNumber > 2 &&
precision testNumber <= chunks * chunkSize &&
chunks >= 1 && chunks < 10000 &&
chunkSize >= 1 && chunkSize < 100
==>
-- Pick a size that is slightly too large.
let converted = convertInteger testNumber chunkSize chunks
precisions = (map (fromIntegral . precision)) <$> converted
in
Just (replicate chunks (chunkSize)) == precisions
---- See section 2.1.3
prop_rotationsBijective :: Property
prop_rotationsBijective = forAll (choose (10, 1000::Integer)) $ \val ->
forAll (choose (5, 20::Int )) $ \amt ->
let v1 = minPrecision val
in
v1 == rotateL (rotateR v1 amt) amt
test_RotateLmodnA :: Assertion
test_RotateLmodnA = (mkPrecisionNum (3::Integer) (4::Integer)) @=?
do v <- mkPrecisionNum (9::Integer) (4::Integer)
return $ rotateL v 1
test_RotateLmodnB :: Assertion
test_RotateLmodnB = (mkPrecisionNum (0::Integer) (4::Integer)) @=?
do v <- mkPrecisionNum (0::Integer) (4::Integer)
return $ rotateL v 0
test_RotateLmodnC :: Assertion
test_RotateLmodnC = (mkPrecisionNum (0::Integer) (4::Integer)) @=?
do v <- mkPrecisionNum (0::Integer) (4::Integer)
return $ rotateL v 1
test_RotateLmodnD :: Assertion
test_RotateLmodnD = (mkPrecisionNum (1::Integer) (4::Integer)) @=?
do v <- mkPrecisionNum (1::Integer) (4::Integer)
return $ rotateL v 0
test_RotateRmodnA :: Assertion
test_RotateRmodnA = (mkPrecisionNum (5::Integer) (32::Integer)) @=?
do v <- mkPrecisionNum (10::Integer) (32::Integer)
return $ rotateR v 1
test_RotateRmodnB :: Assertion
test_RotateRmodnB = (mkPrecisionNum (12::Integer) (4::Integer)) @=?
do v <- mkPrecisionNum (9::Integer) (4::Integer)
return $ rotateR v 1
prop_precisionRepresents :: Int -> Property
prop_precisionRepresents val = val > 1 ==>
((precisionRequired (fromIntegral val)) :: Int)
== ceiling ((logBase 2 (fromIntegral val + 1)) :: Double)
test_PrecisionRequiredA :: Assertion
test_PrecisionRequiredA = 3 @=? precisionRequired 4
test_PrecisionRequiredB :: Assertion
test_PrecisionRequiredB = 1 @=? precisionRequired 1
test_PrecisionRequiredC :: Assertion
test_PrecisionRequiredC = 1 @=? precisionRequired 0
test_MkPrecisionNum :: Assertion
test_MkPrecisionNum = value (fromJust (mkPrecisionNum (18014398509481974::Integer) (54::Integer))) @=? (18014398509481974::Integer)
test_Num :: Assertion
test_Num = PrecisionNum {value = 2^(99 :: Integer), precision=100} @=? (2^(99::Integer) :: PrecisionNum)
test_ShiftRALarge :: Assertion
test_ShiftRALarge = PrecisionNum {value = 2^(99 :: Integer),
precision=100} @=? shiftRA (PrecisionNum {value = 2^(100::Integer), precision = 102}) (1::Int)
test_ShiftRASmall :: Assertion
test_ShiftRASmall = (np 3 3) @=? shiftRA (np 6 4) (1::Int)
test_ShiftRAPrecision :: Assertion
test_ShiftRAPrecision = 3 @=? precision ( (np 6 4) `shiftRA` (1::Int))
test_ShiftLASmall :: Assertion
test_ShiftLASmall = (np 6 4) @=? shiftLA (np 3 3) (1::Int)
test_ShiftLAPrecision :: Assertion
test_ShiftLAPrecision = 4 @=? precision ( (np 3 3) `shiftLA` (1::Int))
---- <--------------------------------> --
---- Utility Functions --
---- Lemma 2.3 and Algorithm 1
---- The bit that changes in progressing from gray code m to its successor
---- gray code n is determined by trailingSetBits m.
prop_SuccessorBitByTrailingBits :: PrecisionNum -> Property
prop_SuccessorBitByTrailingBits i = (i >= 0)
==> testBit (oneSetBit i) (fromIntegral $ trailingSetBits i) -- The bit is set, and
&& clearBit (oneSetBit i) (fromIntegral $ trailingSetBits i) == 0
where oneSetBit x = (grayCode x) `xor` (grayCode (floatingPlus x 1))
---- Corollary 2.5
---- g(i) is symmetric
---- Since trailingSetBits needs either zero or a positive integer,
---- we have the conditions i <= 2^n - 2. This amounts to the condition
---- expressed above, i < 2^n - 1.
prop_trailingSetBitsSymmetric :: Property
prop_trailingSetBitsSymmetric = forAll (choose (10, 1000::Integer)) $ \i ->
forAll (suchThat (choose (5, 20::Integer)) (\nn -> 0 < 2^nn - 1 - i)) $ \n ->
let
v1 = minPrecision i
v2 = minPrecision (2^n - 2 - i)
in
trailingSetBits v1 == trailingSetBits v2
---- Lemma 2.6
prop_entryPointExitPointSymmetric :: Property
prop_entryPointExitPointSymmetric = forAll (choose (10, 1000::Integer)) $ \i ->
forAll (suchThat (choose (5, 20::Integer)) (\nn -> 0 < 2^nn - 1 - i)) $ \n ->
let v1 = minPrecision $ (2^n) - 1 - i
v2 = minPrecision $ (2^(n-1)::Integer)
v3 = minPrecision i
v4 = minPrecision n
in
entryPoint v3 == ((exitPoint v1 v4) `xor` v2)
---- Corollary 2.7
---- direction is symmetric
prop_directionSymmetric :: Property
prop_directionSymmetric = forAll (choose (10, 1000::Integer)) $ \i ->
forAll (suchThat (choose (5, 20::Integer)) (\nn -> 0 < 2^nn - 1 - i)) $ \n ->
let v1 = minPrecision i
v2 = minPrecision n
v3 = minPrecision $ 2^n - 1 - i
in
direction v1 v2 == direction v3 v2
--
--
---- Further part of Corollary 2.7
----n = 2
---- e(i) d(i) f(i) e(i) `xor` 2^d(i)
----0 [00] 0 [01] 01
----1 [00] 1 [10] 10
----2 [00] 1 [10] 10
----3 [11] 0 [10] 10
prop_entryDirectionExit :: Property
prop_entryDirectionExit = forAll (choose (10, 1000::Integer)) $ \i ->
forAll (suchThat (choose (5, 20::Integer)) (\nn -> 0 < 2^nn - 1 - i)) $ \n ->
let v1 = minPrecision i
v2 = minPrecision n
in
((entryPoint v1) `xor` (floatingTwoExp(direction v1 v2))) == exitPoint v1 v2
--
--
---- This property is described at the top of page 13 of Hamilton.
prop_directionWhenIZero :: Property
prop_directionWhenIZero = forAll (choose (10, 100::Integer)) $ \n ->
let v1 = minPrecision n
in
direction 0 v1 == 0
---- This property is described at the top of page 13 of Hamilton.
---- Note the stricter condition that i < 2^n - 1
prop_directionOneHigherDimension :: Property
prop_directionOneHigherDimension = forAll (choose (10, 1000::Integer)) $ \i ->
forAll (suchThat (choose (5, 20::Integer)) (\nn -> 0 < 2^nn - 1 - i)) $ \n ->
let v1 = minPrecision i
v2 = minPrecision n
v3 = minPrecision (n+1)
in
direction v1 v2 == direction v1 v3
prop_transformBijective :: Property
prop_transformBijective = forAll (choose(2::Int, 10)) $ \dimension ->
forAll (suchThat (choose(1::Int, 1000000)) (\b' -> b' < 2^dimension)) $ \b ->
forAll (suchThat (choose(1::Int, 1000000)) (\e' -> e' < 2^dimension)) $ \e ->
forAll (suchThat (choose(1::Int, 1000000)) (\d' -> d' < 2^dimension)) $ \d ->
let bPrecision = fromJust $ mkPrecisionNum b dimension
ePrecision = fromJust $ mkPrecisionNum e dimension
dPrecision = fromJust $ mkPrecisionNum d dimension
in
property $ bPrecision == (inverseTransform ePrecision dPrecision (transform ePrecision dPrecision bPrecision))
test_EntryPointA :: Assertion
test_EntryPointA = fromJust $ do v2 <- mkPrecisionNum (0::Integer) (4::Integer)
v1 <- mkPrecisionNum (0::Integer) (4::Integer)
return $ v2 @=? entryPoint v1
test_EntryPointB :: Assertion
test_EntryPointB = fromJust $ do v2 <- mkPrecisionNum (0::Integer) (4::Integer)
v1 <- mkPrecisionNum (1::Integer) (4::Integer)
return $ v2 @=? entryPoint v1
test_EntryPointC :: Assertion
test_EntryPointC = fromJust $ do v2 <- mkPrecisionNum (0::Integer) (4::Integer)
v1 <- mkPrecisionNum (2::Integer) (4::Integer)
return $ v2 @=? entryPoint v1
test_EntryPointD :: Assertion
test_EntryPointD = fromJust $ do v2 <- mkPrecisionNum (3::Integer) (4::Integer)
v1 <- mkPrecisionNum (3::Integer) (4::Integer)
return $ v2 @=? entryPoint v1
test_ExitPointA :: Assertion
test_ExitPointA = fromJust $ do v2 <- mkPrecisionNum (1::Integer) (2::Integer)
v1 <- mkPrecisionNum (0::Integer) (2::Integer)
return $ v2 @=? (exitPoint v1 2)
test_ExitPointB :: Assertion
test_ExitPointB = fromJust $ do v2 <- mkPrecisionNum (2::Integer) (2::Integer)
v1 <- mkPrecisionNum (1::Integer) (2::Integer)
return $ v2 @=? exitPoint v1 2
test_ExitPointC :: Assertion
test_ExitPointC = (mkPrecisionNum (2::Integer) (2::Integer)) @=?
((mkPrecisionNum (2::Integer) (2::Integer)) >>= (\x -> return $ exitPoint x 2))
test_ExitPointD :: Assertion
test_ExitPointD = (mkPrecisionNum (2::Integer) (2::Integer)) @=?
((mkPrecisionNum (3::Integer) (2::Integer)) >>= (\x -> return $ exitPoint x 2))
test_TransformA :: Assertion
test_TransformA = let e = mkPrecisionNum (0::Integer) (2::Integer)
d = mkPrecisionNum (0::Integer) (2::Integer)
b = mkPrecisionNum (1::Integer) (2::Integer) -- T_(e,d) == 1
res = mkPrecisionNum (2::Integer) (2::Integer)
in
res @=? liftA3 transform e d b
-- Examples from page 18.
test_TransformB :: Assertion
test_TransformB = let e = mkPrecisionNum (0::Integer) (2::Integer)
d = mkPrecisionNum (1::Integer) (2::Integer)
b = mkPrecisionNum (3::Integer) (2::Integer)
res = mkPrecisionNum (3::Integer) (2::Integer)
in
res @=? liftA3 transform e d b
test_TransformC :: Assertion
test_TransformC = let e = mkPrecisionNum (0::Integer) (2::Integer)
d = mkPrecisionNum (1::Integer) (2::Integer)
b = mkPrecisionNum (2::Integer) (2::Integer)
res = mkPrecisionNum (2::Integer) (2::Integer)
in
res @=? liftA3 transform e d b
test_TransformD :: Assertion
test_TransformD = let e = mkPrecisionNum (3::Integer) (2::Integer)
d = mkPrecisionNum (0::Integer) (2::Integer)
b = mkPrecisionNum (1::Integer) (2::Integer)
res = mkPrecisionNum (1::Integer) (2::Integer)
in
res @=? liftA3 transform e d b
test_InverseTransformA :: Assertion
test_InverseTransformA = let e = mkPrecisionNum (1::Integer) (4::Integer)
d = mkPrecisionNum (10::Integer) (4::Integer)
b = mkPrecisionNum (0::Integer) (4::Integer)
res = mkPrecisionNum (1::Integer) (4::Integer)
in
res @=? liftA3 inverseTransform e d b
test_InverseTransformB :: Assertion
test_InverseTransformB = let e = mkPrecisionNum (1::Integer) (4::Integer)
d = mkPrecisionNum (10::Integer) (4::Integer)
b = mkPrecisionNum (1::Integer) (4::Integer)
res = mkPrecisionNum (9::Integer) (4::Integer)
in
res @=? liftA3 inverseTransform e d b
test_InverseTransformC :: Assertion
test_InverseTransformC = let e = mkPrecisionNum (0::Integer) (2::Integer)
d = mkPrecisionNum (0::Integer) (2::Integer)
b = mkPrecisionNum (2::Integer) (2::Integer)
res = mkPrecisionNum (1::Integer) (2::Integer)
in
res @=? liftA3 inverseTransform e d b
test_InverseTransformD :: Assertion
test_InverseTransformD = let e = mkPrecisionNum (3::Integer) (2::Integer)
d = mkPrecisionNum (1::Integer) (2::Integer)
b = mkPrecisionNum (0::Integer) (2::Integer)
res = mkPrecisionNum (3::Integer) (2::Integer)
in
res @=? liftA3 inverseTransform e d b
test_InverseTransformE :: Assertion
test_InverseTransformE = let e = mkPrecisionNum (3::Integer) (2::Integer)
d = mkPrecisionNum (0::Integer) (2::Integer)
b = mkPrecisionNum (0::Integer) (2::Integer)
res = mkPrecisionNum (3::Integer) (2::Integer)
in
res @=? liftA3 inverseTransform e d b
test_Exp :: Assertion
test_Exp = let a = fromJust $ mkPrecisionNum (3::Integer) (3::Integer)
in
2^a @=? (8::Integer)
test_DirectionA :: Assertion
test_DirectionA = fromJust $ do v <- mkPrecisionNum (1::Integer) (2::Integer)
p <- mkPrecisionNum (2::Integer) (2::Integer)
r <- mkPrecisionNum (1::Integer) (2::Integer)
return $ r @=? (direction v p)
test_DirectionB :: Assertion
test_DirectionB = fromJust $ do v <- mkPrecisionNum (0::Integer) (3::Integer)
p <- mkPrecisionNum (3::Integer) (3::Integer)
r <- mkPrecisionNum (0::Integer) (3::Integer)
return $ r @=? (direction v p)
-- <----------------------------> ---
-- Graycode tests ---
---- Transformation is bijective.
prop_graycodeBijective :: PrecisionNum -> Property
prop_graycodeBijective i = (value i >= 0)
==> grayCodeInverse (grayCode i) == i
test_GrayCodeA :: Assertion
test_GrayCodeA = 8943502960915236902818643137753611301881986722824192 @=? grayCode 10000000000000000000000000000000000000000000000000000
test_GrayCodeB :: Assertion
test_GrayCodeB = let i = PrecisionNum {value = 10000000000000000000000000000000000000000000000000000, precision = 173}
byInstance = i `xor` shifted where shifted = i `shiftR` 1
fixedVersion = grayCode i
in
byInstance @=? fixedVersion
test_GrayCodeC :: Assertion
test_GrayCodeC = 27670116110564327424 @=? grayCode ( 2^(64::Integer) )
test_GrayCodeD :: Assertion
test_GrayCodeD = 55340232221128654848 @=? grayCode ( 2^(65::Integer) )
---- Lemma 2.4
---- Gray code is symmetric
prop_grayCodeSymmetric :: Property
prop_grayCodeSymmetric = forAll (choose (10, 1000::Integer)) $ \i ->
forAll (suchThat (choose (5, 20::Integer)) (\nn -> 0 < 2^nn - 1 - i)) $ \n ->
let v1 = minPrecision ((2^n) - 1 - i)
v2 = minPrecision (2^(n-1)::Integer)
v3 = minPrecision i
in
grayCode v1 == (grayCode v3) `xor` v2
prop_grayCodeSymmetricLarge :: Property
prop_grayCodeSymmetricLarge = forAll (choose (2^(100::Integer)::Integer, 2^(100::Integer)+10000)) $ \i ->
forAll (suchThat (choose (105, 107::Integer)) (\nn -> 0 < 2^nn - 1 - i)) $ \n ->
let v1 = minPrecision ((2^n) - 1 - i)
v2 = minPrecision (2^(n-1)::Integer)
v3 = minPrecision i
in
grayCode v1 == (grayCode v3) `xor` v2
--- Argument to graycode, and its result have the same number of bits.
prop_grayCodeMaintainsPrecision :: Integer -> Property
prop_grayCodeMaintainsPrecision n = n > 0 ==>
let v = minPrecision n
in
precision v == precision (grayCode v)
test_iGrayCode1 :: Assertion
test_iGrayCode1 = let expected = mkPrecisionNum (2::Integer) (4::Integer)
input = mkPrecisionNum (3::Int) (4::Int)
in
liftA grayCodeInverse input @=? expected
test_iGrayCode2 :: Assertion
test_iGrayCode2 = let expected = mkPrecisionNum (1::Integer) (4::Integer)
input = mkPrecisionNum (1::Integer) (4::Integer)
in
liftA grayCodeInverse input @=? expected
test_iGrayCode3 :: Assertion
test_iGrayCode3 = let expected = mkPrecisionNum (3::Integer) (40::Integer)
input = mkPrecisionNum (2::Integer) (40::Integer)
in
liftA grayCodeInverse input @=? expected
--- Hilbert Tests.
prop_hilbertBijectiveO32D32 :: Int -> Property
prop_hilbertBijectiveO32D32 i = i > 0 ==>
let order = 32
dimension = 32
in
property $ i == fromJust (pointToIndex order dimension $ fromJust (indexToPoint order dimension (i)))
prop_hilbertBijective :: Property
prop_hilbertBijective = forAll (choose (1, 6::Int)) $ \order ->
forAll (choose (1, 6::Int)) $ \dimension ->
forAll (suchThat (choose (1, 10000::Int)) (\v -> v < 2^(order*dimension))) $ \va ->
property $ va == fromJust (pointToIndex order dimension $ fromJust (indexToPoint order dimension va))
prop_hilbertExhaustive :: Property
prop_hilbertExhaustive = forAll (choose(1::Int, 4)) $ \order ->
forAll (choose(1::Int, 3)) $ \dimension ->
property $ -- Generate every possible point.
let indexes = fromJust $ mapM (pointToIndex order dimension) (replicateM dimension [0..(2^order)-1])
in
sort indexes == checkRange (order*dimension)
where checkRange p = [0 .. (2^p-1) ] :: [Int]
test_HilbertExhaustive :: Assertion
test_HilbertExhaustive = let order = 2
dimension = 3
indexes = fromJust $ mapM (pointToIndex order dimension) (replicateM dimension [0..(2^order)-1])
in
sort indexes @=? checkRange (order*dimension)
where checkRange p = [0 .. (2^p-1) ] :: [Int]
test_SetTrailingBits :: Assertion
test_SetTrailingBits = fromJust $ do v1 <- mkPrecisionNum (7::Integer) (20::Integer)
v2 <- mkPrecisionNum (3::Integer) (20::Integer)
return $ v2 @=? trailingSetBits v1
test_SetTrailingBits2 :: Assertion
test_SetTrailingBits2 = fromJust $ do v2 <- mkPrecisionNum (4::Integer) (50::Integer)
v1 <- mkPrecisionNum (15::Int) (50::Int)
return $ v2 @=? trailingSetBits v1
test_SetTrailingBits3 :: Assertion
test_SetTrailingBits3 = fromJust $ do v2 <- mkPrecisionNum (4::Int) (100::Int)
v1 <- mkPrecisionNum (15+128::Int) (100::Int)
return $ v2 @=? trailingSetBits v1
test_SetTrailingBits4 :: Assertion
test_SetTrailingBits4 = fromJust $ do v2 <- mkPrecisionNum (0::Integer) (2::Integer)
v1 <- mkPrecisionNum (2::Integer) (2::Integer)
return $ v2 @=? trailingSetBits v1
test_SetTrailingBits5 :: Assertion
test_SetTrailingBits5 = fromJust $ do v2 <- mkPrecisionNum (0::Integer) (20::Integer)
v1 <- mkPrecisionNum (128::Integer) (20::Integer)
return $ v2 @=? trailingSetBits v1
-- Line where i = 2
test_TransformRef1 :: Assertion
test_TransformRef1 = let e = mkPrecisionNum (0::Integer) (2::Integer)
d = mkPrecisionNum (1::Integer) (2::Integer)
b = mkPrecisionNum (3::Integer) (2::Integer) -- l == (3::Integer)
res = mkPrecisionNum (3::Integer) (2::Integer) -- T_(e, d) == (3::Integer)
in
res @=? liftA3 transform e d b
-- Line where i = 1
test_TransformRef2 :: Assertion
test_TransformRef2 = let e = mkPrecisionNum (0::Integer) (2::Integer)
d = mkPrecisionNum (1::Integer) (2::Integer)
b = mkPrecisionNum (2::Integer) (2::Integer) -- l == (2::Integer)
res = mkPrecisionNum (2::Integer) (2::Integer) -- T_(e,d) = (2::Integer)
in
res @=? liftA3 transform e d b
-- Line where i = 0
test_TransformRef3 :: Assertion
test_TransformRef3 = let e = mkPrecisionNum (3::Integer) (2::Integer)
d = mkPrecisionNum (0::Integer) (2::Integer)
b = mkPrecisionNum (1::Integer) (2::Integer) -- l == 2
res = mkPrecisionNum (1::Integer) (2::Integer) -- T_(e,d) = 1
in
res @=? liftA3 transform e d b
-- Line where i = 2
test_WRef1 :: Assertion
test_WRef1 = let input = mkPrecisionNum (3::Integer) (2::Integer)
out = mkPrecisionNum (2::Integer) (2::Integer)
in
out @=? liftA grayCodeInverse input
-- Line where i = 1
test_WRef2 :: Assertion
test_WRef2 = let input = mkPrecisionNum (2::Integer) (2::Integer)
out = mkPrecisionNum (3::Integer) (2::Integer)
in
out @=? liftA grayCodeInverse input
-- Line where i = 0
test_WRef3 :: Assertion
test_WRef3 = let input = mkPrecisionNum (1::Integer) (2::Integer)
out = mkPrecisionNum (1::Integer) (2::Integer)
in
out @=? liftA grayCodeInverse input
-- Line where i = 2
test_ERef1 :: Assertion
test_ERef1 = let e = mkPrecisionNum (0::Integer) (2::Integer)
w = mkPrecisionNum (2::Integer) (2::Integer)
d = mkPrecisionNum (1::Integer) (2::Integer) -- T_(e,d) == 3
res = mkPrecisionNum (0::Integer) (2::Integer)
in
liftA3 newE e w d @=? res
-- Line where i = 1
test_ERef2 :: Assertion
test_ERef2 = let e = mkPrecisionNum (0::Integer) (2::Integer)
w = mkPrecisionNum (3::Integer) (2::Integer)
d = mkPrecisionNum (1::Integer) (2::Integer) -- T_(e,d) == 3
res = mkPrecisionNum (3::Integer) (2::Integer)
in
liftA3 newE e w d @=? res
-- Line where i = 0
test_ERef3 :: Assertion
test_ERef3 = let e = mkPrecisionNum (3::Integer) (2::Integer)
w = mkPrecisionNum (1::Integer) (2::Integer)
d = mkPrecisionNum (0::Integer) (2::Integer)
expected = mkPrecisionNum (3::Integer) (2::Integer)
in
expected @=? liftA3 newE e w d
-- Line where i =2
test_DRef1 :: Assertion
test_DRef1 = let w = mkPrecisionNum (2::Integer) (2::Integer)
d = mkPrecisionNum (1::Integer) (2::Integer) -- T_(e,d) == 3
dimension = mkPrecisionNum (2::Integer) (2::Integer)
res = mkPrecisionNum (1::Integer) (2::Integer)
in
liftA3 newD d w dimension @=? res
-- Line where i =1
test_DRef2 :: Assertion
test_DRef2 = let w = mkPrecisionNum (3::Integer) (2::Integer)
d = mkPrecisionNum (1::Integer) (2::Integer) -- T_(e,d) == 3
dimension = mkPrecisionNum (2::Integer) (2::Integer)
res = mkPrecisionNum (0::Integer) (2::Integer)
in
liftA3 newD d w dimension @=? res
-- Line where i =0
test_DRef3 :: Assertion
test_DRef3 = let w = mkPrecisionNum (1::Integer) (2::Integer)
d = mkPrecisionNum (0::Integer) (2::Integer) -- T_(e,d) == 3
dimension = mkPrecisionNum (2::Integer) (2::Integer)
res = mkPrecisionNum (0::Integer) (2::Integer)
in
liftA3 newD d w dimension @=? res
-- The following is a detailed breakdown for pointToIndex 3 2 [5, 6])
-- There are three stages in the calculation.
test_HilbertIndex :: Assertion
test_HilbertIndex = (mkPrecisionNum (45::Integer) (6::Integer)) @=? (pointToIndex 3 2 [5, 6])
-- pointToIndex' 0 0 3 2 [3,1,2]
test_Stage1overall :: Assertion
test_Stage1overall = Just (np 45 6) @=?
pointToIndex' (np 0 2) (np 0 2) (np 3 6) (np 2 6) [np 3 2,np 1 2,np 2 2]
test_Stage1overallPrecision :: Assertion
test_Stage1overallPrecision = 6 @=?
precision (fromJust (pointToIndex' (np 0 2) (np 0 2) (np 3 6) (np 2 6) [np 3 2, np 1 2, np 2 2] ) )
test_Stage1ShiftL :: Assertion
test_Stage1ShiftL = 32 @=? (np 2 2) `shiftL` 4
-- t = 3 = transform 0 0 3 2
test_Stage1t :: Assertion
test_Stage1t = (np 3 2) @=? transform (np 0 2) (np 0 2) (np 3 2)
test_Stage1tPrecision :: Assertion
test_Stage1tPrecision = 2 @=? precision ( transform (np 0 2) (np 0 2) (np 3 2))
-- w = 2 = grayCodeInverse 3
test_Stage1w :: Assertion
test_Stage1w = (np 2 2) @=? grayCodeInverse (np 3 2)
test_Stage1wPrecision :: Assertion
test_Stage1wPrecision = 2 @=? precision (grayCodeInverse (np 3 2))
-- e' = 0 = newE 0 2 0
test_Stage1e' :: Assertion
test_Stage1e' = (np 0 2) @=? newE (np 0 2) (np 2 2) (np 0 2)
test_Stage1e'Precision :: Assertion
test_Stage1e'Precision = 2 @=? precision (newE (np 0 2) (np 2 2) (np 0 2) )
-- d' = 0 = newD 0 2 2
test_Stage1d' :: Assertion
test_Stage1d' = (np 0 2) @=? newD (np 0 2) (np 2 2) (np 2 2)
test_Stage1d'Precision :: Assertion
test_Stage1d'Precision = 2 @=? precision (newD (np 0 2) (np 2 2) (np 2 2))
-- (2 << (2 * 2)) .|.
-- pointToIndex' 0 0 3 2 [1, 2]
test_Stage2overall :: Assertion
test_Stage2overall = Just (np 13 4) @=? pointToIndex' (np 0 2) (np 0 2) (np 3 6) (np 2 6) [np 1 2, np 2 2]
-- t = 2 = transform 0 0 1 2
test_Stage2t :: Assertion
test_Stage2t = (np 2 2) @=? transform (np 0 2) (np 0 2) (np 1 2)
-- w = 3 = grayCodeInverse 2
test_Stage2w :: Assertion
test_Stage2w = (np 3 2) @=? grayCodeInverse (np 2 2)
-- e' = 3 = newE 0 3 0
test_Stage2e' :: Assertion
test_Stage2e' = (np 3 2) @=? newE (np 0 2) (np 3 2) (np 0 2)
-- d' = 1 = newD 0 3 2
test_Stage2d' :: Assertion
test_Stage2d' = (np 1 2) @=? newD (np 0 2) (np 3 2) (np 2 2)
-- (3 << (1 * 2)) .|.
-- pointToIndex' 3 1 3 2 [2]
test_Stage3overall :: Assertion
test_Stage3overall = Just (np 1 6) @=? pointToIndex' (np 3 2) (np 1 2) (np 3 6) (np 2 6) [np 2 2]
-- t = 1 = transform 3 1 2 2
test_Stage3transform :: Assertion
test_Stage3transform = (np 1 2) @=? transform (np 3 2) (np 1 2) (np 2 2)
test_Stage3transformDim :: Assertion
test_Stage3transformDim = 2 @=? precision (transform (np 3 2) (np 1 2) (np 2 2))
test_Stage3transformXor :: Assertion
test_Stage3transformXor = res @=? (b `xor` e) where
b = np 2 6
e = np 3 6
res = np 1 6
test_Stage3transformRotateR :: Assertion
test_Stage3transformRotateR = res @=? (z `rotateR` amount)
where
z = np 1 3
amount = fromIntegral (d+1)
d = np 1 3
res = fromJust $ mkPrecisionNum (2::Integer) (3::Integer)
-- w = 1 = grayCodeInverse 1
test_Stage3w :: Assertion
test_Stage3w = (np 1 2) @=? grayCodeInverse (np 1 2)
-- e' = 3 = newE 3 1 1
test_Stage3e' :: Assertion
test_Stage3e' = (np 3 2) @=? newE (np 3 2) (np 1 2) (np 1 2)
-- d' = 1 = newD 1 1 1
test_Stage3d' :: Assertion
test_Stage3d' = (np 1 2) @=? newD (np 1 2) (np 1 2) (np 2 2)
-- 1 << (0 * 2)
-- = 32 .|. 12 .|. 1 = 45
test_HilbertIndex2 :: Assertion
test_HilbertIndex2 = (mkPrecisionNum (2::Integer) (2::Integer)) @=? (pointToIndex 2 2 [1, 1])
test_HilbertIndex3 :: Assertion
test_HilbertIndex3 = (mkPrecisionNum (0::Integer) (5::Integer)) @=? (pointToIndex 10 10 [0,0,0,0,0,0,0,0,0,0])
test_HilbertIndex4 :: Assertion
test_HilbertIndex4 = (mkPrecisionNum (48::Integer) (6::Integer)) @=? (pointToIndex 3 2 [3, 7])
--- Hilbert index inverse of 48, order 3, dimension 2 ----------------
test_HilbertIndexInverse :: Assertion
test_HilbertIndexInverse = ([3, 7]::[Integer]) @=? (fromJust $ indexToPoint (3) (2) (48))
-- Order 3, Dimension 2, Index 48
-- ->>> [1,3,3] == indexToPoint' 0 0 3 2 [3, 0, 0]
test_HilbertIndexInverse' :: Assertion
test_HilbertIndexInverse' = ([minPrecision (1 :: Int), minPrecision (3 :: Int), minPrecision (3 :: Int)]) @=?
(fromJust $ sequence (indexToPoint' (minPrecision (0::Int)) (minPrecision (0::Int)) (minPrecision (3::Int)) (minPrecision (2::Int)) [minPrecision (3::Int),minPrecision (0::Int), minPrecision (0::Int) ]))
-- Components of indexToPoint' 0 0 3 2 [3, 0, 0]
-- w = 3
-- grayCode 3 = 2
-- e = 0
-- d = 0
-- order = 3
-- dimension = 2
-- grayCode w
test_HilbertIndexInverseGrayCode :: Assertion
test_HilbertIndexInverseGrayCode = 2 @=? (grayCode 3)
-- l = grayCode w
-- inverseTransform e d l dimension
test_HilbertIndexInverseInverseTransform :: Assertion
test_HilbertIndexInverseInverseTransform = 1 @=? (inverseTransform 0 0 2 )
-- newE e w d
test_HilbertIndexInverseNewE :: Assertion
test_HilbertIndexInverseNewE = 3 @=? (newE 0 3 0)
-- newD d w dimension
test_HilbertIndexInverseNewD :: Assertion
test_HilbertIndexInverseNewD = 1 @=? (newD 0 3 2)
test_HilbertIndexInverseB :: Assertion
test_HilbertIndexInverseB = ([5, 6]::[Integer]) @=? (fromJust $ indexToPoint (3::Int) (2::Int) (45::Integer))
test_HilbertIndexInverse'B :: Assertion
test_HilbertIndexInverse'B = ([minPrecision (3::Integer), minPrecision (1::Integer), minPrecision (2::Integer)]) @=? (fromJust $ sequence (indexToPoint' (minPrecision (0::Integer)) (minPrecision (0::Integer)) (minPrecision (3::Integer)) (minPrecision (2::Integer)) [minPrecision (2::Integer),minPrecision (3::Integer), minPrecision (1::Integer)]))
test_HilbertIndexInverseC :: Assertion
test_HilbertIndexInverseC = ([0, 6, 3]::[Integer]) @=? (fromJust $ indexToPoint (3) (3) 83)
-- If the result of testHilbertIndexInverseC is [0,6,3]
-- testHilbertIndexInverse'C needs to return [2,3,1], since:
-- 0 6 3
-- ---------
-- 0 1 0 | 2
-- 0 1 1 | 3
-- 0 0 1 | 1
test_HilbertIndexInverse'C :: Assertion
test_HilbertIndexInverse'C = [Just (minPrecision (2::Integer)), Just (minPrecision (3::Integer)), Just (minPrecision (1::Integer))]
@=? (indexToPoint' 0 0 (minPrecision (3::Integer)) (minPrecision (3::Integer)) [PrecisionNum {value = 1, precision = 3}
,PrecisionNum {value = 2, precision = 3}
,PrecisionNum {value = 3, precision = 3}])
-- First result list element should be 2 (prior to elements [3,1])
-- This is calculated using, e, d, and the first element in the converted
-- list (1, 2, 3)
test_HilbertIndexInverse'C1a :: Assertion
test_HilbertIndexInverse'C1a = let e = 0
d = 0
w = PrecisionNum {value = 1, precision = 3}
l = grayCode w
t = inverseTransform e d l
in
(minPrecision (2::Integer)) @=? t
-- The inverse transform step must yield 2.
test_HilbertIndexInverse'C1aInvTransform :: Assertion
test_HilbertIndexInverse'C1aInvTransform = let e = 0
d = 0
dimension = (3::Integer)
l = fromJust (mkPrecisionNum (1::Integer) dimension)
in
(minPrecision (2::Integer)) @=? inverseTransform e d l
-- Tail should be elements [3,1].
-- Again, this is calculated from the list [1,2,3]
test_HilbertIndexInverse'C1b :: Assertion
test_HilbertIndexInverse'C1b = let e = 0
d = 0
order = 3
dimension = 3
w = PrecisionNum {value = 1, precision = 1}
ws = [PrecisionNum {value = 2, precision = 3},PrecisionNum {value = 3, precision = 3}]
e' = newE e w d
d' = newD d w dimension
in
[Just (minPrecision (3::Integer)), Just (minPrecision (1::Integer))]
@=? indexToPoint' e' d' order dimension ws
test_HilbertIndexInverse'C2a :: Assertion
test_HilbertIndexInverse'C2a = let e = PrecisionNum {value = 0, precision = 1}
d = PrecisionNum {value = 2, precision = 2}
w = PrecisionNum {value = 2, precision = 3}
l = grayCode w
t = inverseTransform e d l
in
minPrecision (3::Integer) @=? t
test_HilbertIndexInverse'C2b :: Assertion
test_HilbertIndexInverse'C2b = let e = PrecisionNum {value = 0, precision = 1}
d = PrecisionNum {value = 2, precision = 2}
order = 3
dimension = 3
w = PrecisionNum {value = 2, precision = 3}
ws = [PrecisionNum {value =3, precision = 3}]
e' = newE e w d
d' = newD d w dimension
in
[Just (minPrecision (1::Integer))] @=? indexToPoint' e' d' order dimension ws
test_HilbertIndexInverse'C2newD :: Assertion
test_HilbertIndexInverse'C2newD = let d = PrecisionNum {value=2, precision=2}
w = PrecisionNum {value = 2, precision = 3}
dimension = 3
in
(minPrecision (1::Integer)) @=? newD d w dimension
test_HilbertIndexInverse'C3a :: Assertion
test_HilbertIndexInverse'C3a = let e = PrecisionNum {value = 0, precision = 1}
d = PrecisionNum {value = 1, precision = 1}
w = PrecisionNum {value = 3, precision = 3}
l = grayCode w
t = inverseTransform e d l
in
(minPrecision (1::Integer)) @=? t
test_HilbertIndexInverse'C3b :: Assertion
test_HilbertIndexInverse'C3b = let order = 3
dimension = 3
ws = []
e' = PrecisionNum {value = 5, precision =3}
d' = PrecisionNum {value = 1, precision =3}
in
[ ] @=? indexToPoint' e' d' order dimension ws
| cje/hilbert | test/THilbert.hs | bsd-3-clause | 41,962 | 0 | 21 | 15,631 | 11,572 | 6,393 | 5,179 | -1 | -1 |
module System.Console.Haskeline.Monads(
module System.Console.Haskeline.MonadException,
MonadTrans(..),
MonadIO(..),
ReaderT,
runReaderT,
runReaderT',
mapReaderT,
asks,
StateT,
runStateT,
evalStateT',
mapStateT,
gets,
modify,
update,
MonadReader(..),
MonadState(..),
MaybeT(MaybeT),
runMaybeT,
orElse
) where
import Control.Applicative (Applicative(..))
import Control.Monad (ap, liftM)
import Control.Monad.IO.Class (MonadIO(..))
import Control.Monad.Trans.Class (MonadTrans(..))
import Control.Monad.Trans.Maybe (MaybeT(MaybeT),runMaybeT)
import Control.Monad.Trans.Reader hiding (ask,asks)
import qualified Control.Monad.Trans.Reader as Reader
import Data.IORef
#if __GLASGOW_HASKELL__ < 705
import Prelude hiding (catch)
#endif
import System.Console.Haskeline.MonadException
class Monad m => MonadReader r m where
ask :: m r
instance Monad m => MonadReader r (ReaderT r m) where
ask = Reader.ask
instance Monad m => MonadReader s (StateT s m) where
ask = get
instance (MonadReader r m, MonadTrans t, Monad (t m)) => MonadReader r (t m) where
ask = lift ask
asks :: MonadReader r m => (r -> a) -> m a
asks f = liftM f ask
class Monad m => MonadState s m where
get :: m s
put :: s -> m ()
gets :: MonadState s m => (s -> a) -> m a
gets f = liftM f get
modify :: MonadState s m => (s -> s) -> m ()
modify f = get >>= put . f
update :: MonadState s m => (s -> (a,s)) -> m a
update f = do
s <- get
let (x,s') = f s
put s'
return x
runReaderT' :: Monad m => r -> ReaderT r m a -> m a
runReaderT' = flip runReaderT
newtype StateT s m a = StateT { getStateTFunc
:: forall r . s -> m ((a -> s -> r) -> r)}
instance Monad m => Functor (StateT s m) where
fmap = liftM
instance Monad m => Applicative (StateT s m) where
pure = return
(<*>) = ap
instance Monad m => Monad (StateT s m) where
return x = StateT $ \s -> return $ \f -> f x s
StateT f >>= g = StateT $ \s -> do
useX <- f s
useX $ \x s' -> getStateTFunc (g x) s'
instance MonadTrans (StateT s) where
lift m = StateT $ \s -> do
x <- m
return $ \f -> f x s
instance MonadIO m => MonadIO (StateT s m) where
liftIO = lift . liftIO
mapStateT :: (forall b . m b -> n b) -> StateT s m a -> StateT s n a
mapStateT f (StateT m) = StateT (\s -> f (m s))
runStateT :: Monad m => StateT s m a -> s -> m (a, s)
runStateT f s = do
useXS <- getStateTFunc f s
return $ useXS $ \x s' -> (x,s')
makeStateT :: Monad m => (s -> m (a,s)) -> StateT s m a
makeStateT f = StateT $ \s -> do
(x,s') <- f s
return $ \g -> g x s'
instance Monad m => MonadState s (StateT s m) where
get = StateT $ \s -> return $ \f -> f s s
put s = s `seq` StateT $ \_ -> return $ \f -> f () s
instance (MonadState s m, MonadTrans t, Monad (t m)) => MonadState s (t m) where
get = lift get
put = lift . put
-- ReaderT (IORef s) is better than StateT s for some applications,
-- since StateT loses its state after an exception such as ctrl-c.
instance MonadIO m => MonadState s (ReaderT (IORef s) m) where
get = ask >>= liftIO . readIORef
put s = ask >>= liftIO . flip writeIORef s
evalStateT' :: Monad m => s -> StateT s m a -> m a
evalStateT' s f = liftM fst $ runStateT f s
instance MonadException m => MonadException (StateT s m) where
controlIO f = makeStateT $ \s -> controlIO $ \run ->
fmap (flip runStateT s) $ f $ stateRunIO s run
where
stateRunIO :: s -> RunIO m -> RunIO (StateT s m)
stateRunIO s (RunIO run) = RunIO (\m -> fmap (makeStateT . const)
$ run (runStateT m s))
orElse :: Monad m => MaybeT m a -> m a -> m a
orElse (MaybeT f) g = f >>= maybe g return
| DavidAlphaFox/ghc | libraries/haskeline/System/Console/Haskeline/Monads.hs | bsd-3-clause | 4,155 | 0 | 14 | 1,368 | 1,692 | 885 | 807 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-|
IP routing table is a tree of 'AddrRange'
to search one of them on the longest
match base. It is a kind of TRIE with one
way branching removed. Both IPv4 and IPv6
are supported.
-}
module Data.IP.RouteTable.Internal where
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative ((<$>),(<*>),pure)
#endif
import Control.Monad
import Data.Bits
import Data.Foldable (Foldable(..))
import Data.IP.Addr
import Data.IP.Op
import Data.IP.Range
import Data.IntMap (IntMap, (!))
import qualified Data.IntMap as IM (fromList)
import Data.Monoid
import Data.Traversable
import Data.Word
import Prelude hiding (lookup)
----------------------------------------------------------------
{-|
A class to contain IPv4 and IPv6.
-}
class Addr a => Routable a where
{-|
The 'intToTBit' function takes 'Int' and returns an 'Routable' address
whose only n-th bit is set.
-}
intToTBit :: Int -> a
{-|
The 'isZero' function takes an 'Routable' address and an test bit
'Routable' address and returns 'True' is the bit is unset,
otherwise returns 'False'.
-}
isZero :: a -> a -> Bool
instance Routable IPv4 where
intToTBit = intToTBitIPv4
isZero a b = a `masked` b == IP4 0
instance Routable IPv6 where
intToTBit = intToTBitIPv6
isZero a b = a `masked` b == IP6 (0,0,0,0)
----------------------------------------------------------------
--
-- Test Bit
--
intToTBitIPv4 :: Int -> IPv4
intToTBitIPv4 len = IP4 (intToTBitsIPv4 ! len)
intToTBitIPv6 :: Int -> IPv6
intToTBitIPv6 len = IP6 (intToTBitsIPv6 ! len)
intToTBitsWord32 :: [Word32]
intToTBitsWord32 = iterate (`shift` (-1)) 0x80000000
intToTBitsIPv4 :: IntMap IPv4Addr
intToTBitsIPv4 = IM.fromList $ zip [0..32] intToTBitsWord32
intToTBitsIPv6 :: IntMap IPv6Addr
intToTBitsIPv6 = IM.fromList $ zip [0..128] bs
where
bs = b1 ++ b2 ++ b3 ++ b4 ++ b5
b1 = map (\vbit -> (vbit,all0,all0,all0)) intToTBits
b2 = map (\vbit -> (all0,vbit,all0,all0)) intToTBits
b3 = map (\vbit -> (all0,all0,vbit,all0)) intToTBits
b4 = map (\vbit -> (all0,all0,all0,vbit)) intToTBits
b5 = [(all0,all0,all0,all0)]
intToTBits = take 32 intToTBitsWord32
all0 = 0x00000000
----------------------------------------------------------------
{-|
The Tree structure for IP routing table based on TRIE with
one way branching removed. This is an abstract data type,
so you cannot touch its inside. Please use 'insert' or 'lookup', instead.
-}
data IPRTable k a =
Nil
| Node !(AddrRange k) !k !(Maybe a) !(IPRTable k a) !(IPRTable k a)
deriving (Eq, Show)
----------------------------------------------------------------
{-|
The 'empty' function returns an empty IP routing table.
>>> (empty :: IPRTable IPv4 ()) == fromList []
True
-}
empty :: Routable k => IPRTable k a
empty = Nil
instance Functor (IPRTable k) where
fmap _ Nil = Nil
fmap f (Node r a mv b1 b2) = Node r a (f <$> mv) (fmap f b1) (fmap f b2)
instance Foldable (IPRTable k) where
foldMap _ Nil = mempty
foldMap f (Node _ _ mv b1 b2) = foldMap f mv <> foldMap f b1 <> foldMap f b2
instance Traversable (IPRTable k) where
traverse _ Nil = pure Nil
traverse f (Node r a mv b1 b2) = Node r a <$> traverse f mv <*> traverse f b1 <*> traverse f b2
----------------------------------------------------------------
{-|
The 'insert' function inserts a value with a key of 'AddrRange' to 'IPRTable'
and returns a new 'IPRTable'.
>>> (insert ("127.0.0.1" :: AddrRange IPv4) () empty) == fromList [("127.0.0.1",())]
True
-}
insert :: (Routable k) => AddrRange k -> a -> IPRTable k a -> IPRTable k a
insert k1 v1 Nil = Node k1 tb1 (Just v1) Nil Nil
where
tb1 = keyToTestBit k1
insert k1 v1 s@(Node k2 tb2 v2 l r)
| k1 == k2 = Node k1 tb1 (Just v1) l r
| k2 >:> k1 = if isLeft k1 tb2 then
Node k2 tb2 v2 (insert k1 v1 l) r
else
Node k2 tb2 v2 l (insert k1 v1 r)
| k1 >:> k2 = if isLeft k2 tb1 then
Node k1 tb1 (Just v1) s Nil
else
Node k1 tb1 (Just v1) Nil s
| otherwise = let n = Node k1 tb1 (Just v1) Nil Nil
in link n s
where
tb1 = keyToTestBit k1
link :: Routable k => IPRTable k a -> IPRTable k a -> IPRTable k a
link s1@(Node k1 _ _ _ _) s2@(Node k2 _ _ _ _)
| isLeft k1 tbg = Node kg tbg Nothing s1 s2
| otherwise = Node kg tbg Nothing s2 s1
where
kg = glue 0 k1 k2
tbg = keyToTestBit kg
link _ _ = error "link"
glue :: (Routable k) => Int -> AddrRange k -> AddrRange k -> AddrRange k
glue n k1 k2
| addr k1 `masked` mk == addr k2 `masked` mk = glue (n + 1) k1 k2
| otherwise = makeAddrRange (addr k1) (n - 1)
where
mk = intToMask n
keyToTestBit :: Routable k => AddrRange k -> k
keyToTestBit = intToTBit . mlen
isLeft :: Routable k => AddrRange k -> k -> Bool
isLeft adr = isZero (addr adr)
----------------------------------------------------------------
{-|
The 'delete' function deletes a value by a key of 'AddrRange' from 'IPRTable'
and returns a new 'IPRTable'.
>>> delete "127.0.0.1" (insert "127.0.0.1" () empty) == (empty :: IPRTable IPv4 ())
True
-}
delete :: (Routable k) => AddrRange k -> IPRTable k a -> IPRTable k a
delete _ Nil = Nil
delete k1 s@(Node k2 tb2 v2 l r)
| k1 == k2 = node k2 tb2 Nothing l r
| k2 >:> k1 = if isLeft k1 tb2 then
node k2 tb2 v2 (delete k1 l) r
else
node k2 tb2 v2 l (delete k1 r)
| otherwise = s
node :: (Routable k) => AddrRange k -> k -> Maybe a -> IPRTable k a -> IPRTable k a -> IPRTable k a
node _ _ Nothing Nil r = r
node _ _ Nothing l Nil = l
node k tb v l r = Node k tb v l r
----------------------------------------------------------------
{-|
The 'lookup' function looks up 'IPRTable' with a key of 'AddrRange'.
If a routing information in 'IPRTable' matches the key, its value
is returned.
>>> let v4 = ["133.4.0.0/16","133.5.0.0/16","133.5.16.0/24","133.5.23.0/24"] :: [AddrRange IPv4]
>>> let rt = fromList $ zip v4 v4
>>> lookup "127.0.0.1" rt
Nothing
>>> lookup "133.3.0.1" rt
Nothing
>>> lookup "133.4.0.0" rt
Just 133.4.0.0/16
>>> lookup "133.4.0.1" rt
Just 133.4.0.0/16
>>> lookup "133.5.16.0" rt
Just 133.5.16.0/24
>>> lookup "133.5.16.1" rt
Just 133.5.16.0/24
-}
lookup :: Routable k => AddrRange k -> IPRTable k a -> Maybe a
lookup k s = search k s Nothing
search :: Routable k => AddrRange k -> IPRTable k a -> Maybe a -> Maybe a
search _ Nil res = res
search k1 (Node k2 tb2 Nothing l r) res
| k1 == k2 = res
| k2 >:> k1 = if isLeft k1 tb2 then
search k1 l res
else
search k1 r res
| otherwise = res
search k1 (Node k2 tb2 vl l r) res
| k1 == k2 = vl
| k2 >:> k1 = if isLeft k1 tb2 then
search k1 l vl
else
search k1 r vl
| otherwise = res
----------------------------------------------------------------
{-|
The 'findMatch' function looks up 'IPRTable' with a key of 'AddrRange'.
If the key matches routing informations in 'IPRTable', they are
returned.
>>> let v4 = ["133.4.0.0/16","133.5.0.0/16","133.5.16.0/24","133.5.23.0/24"] :: [AddrRange IPv4]
>>> let rt = fromList $ zip v4 $ repeat ()
>>> findMatch "133.4.0.0/15" rt :: [(AddrRange IPv4,())]
[(133.4.0.0/16,()),(133.5.0.0/16,()),(133.5.16.0/24,()),(133.5.23.0/24,())]
-}
findMatch :: MonadPlus m => Routable k => AddrRange k -> IPRTable k a -> m (AddrRange k, a)
findMatch _ Nil = mzero
findMatch k1 (Node k2 _ Nothing l r)
| k1 >:> k2 = findMatch k1 l `mplus` findMatch k1 r
| k2 >:> k1 = findMatch k1 l `mplus` findMatch k1 r
| otherwise = mzero
findMatch k1 (Node k2 _ (Just vl) l r)
| k1 >:> k2 = return (k2, vl) `mplus` findMatch k1 l `mplus` findMatch k1 r
| k2 >:> k1 = findMatch k1 l `mplus` findMatch k1 r
| otherwise = mzero
----------------------------------------------------------------
{-|
The 'fromList' function creates a new IP routing table from
a list of a pair of 'IPrange' and value.
-}
fromList :: Routable k => [(AddrRange k, a)] -> IPRTable k a
fromList = foldl' (\s (k,v) -> insert k v s) empty
{-|
The 'toList' function creates a list of a pair of 'AddrRange' and
value from an IP routing table.
-}
toList :: Routable k => IPRTable k a -> [(AddrRange k, a)]
toList = foldt toL []
where
toL Nil xs = xs
toL (Node _ _ Nothing _ _) xs = xs
toL (Node k _ (Just a) _ _) xs = (k,a) : xs
----------------------------------------------------------------
foldt :: (IPRTable k a -> b -> b) -> b -> IPRTable k a -> b
foldt _ v Nil = v
foldt func v rt@(Node _ _ _ l r) = foldt func (foldt func (func rt v) l) r
| DanielG/iproute | Data/IP/RouteTable/Internal.hs | bsd-3-clause | 8,782 | 0 | 12 | 2,119 | 2,672 | 1,365 | 1,307 | -1 | -1 |
module ResultWorthy.DataTypes where
import Data.Maybe
import Data.Traversable
import Control.Applicative
data SwiftContainerType = SwiftStruct
| SwiftClass
{ ctFinal :: Bool }
| SwiftExtension
deriving (Eq, Show, Read)
data InitializerModifier = Required | Convenience | Optional deriving (Eq, Show, Read)
data FunctionModifier = Final | Static | Class deriving (Eq, Show, Read)
data ArgumentModifier = Inout deriving (Eq, Show, Read)
data AccessModifier = Public | Private | Internal deriving (Eq, Show, Read)
type Imports = [String]
type GenericConstraint = Maybe String
data Argument = Argument
{ aModifier :: [ArgumentModifier]
, aInternalName :: Maybe String
, aExternalName :: Maybe String
, aType :: SwiftType
, aDefaultValue :: Maybe String
} deriving (Eq, Show, Read)
data SwiftType = RegularType
{ tName :: String }
| OptionalType
{ tType :: SwiftType
, tForce :: Bool }
| GenericType
{ tType :: SwiftType
, tGeneric :: SwiftType }
| VarargsType
{ tType :: SwiftType }
| ArrayType
{ tType :: SwiftType }
| DictionaryType
{ tKeyType :: SwiftType
, tValueType :: SwiftType }
| FunctionType
{ tArguments :: [Argument]
, tReturnType :: SwiftType }
| TupleType
{ tTypes :: [SwiftType] }
deriving (Eq, Show, Read)
data ContainerAnnotation = ContainerAnnotation
{ cAccessModifier :: AccessModifier
, cContainerType :: SwiftContainerType
, cType :: SwiftType
, cExtends :: [SwiftType]
} deriving (Eq, Show, Read)
data Declaration = MemberDecl
{ dAccessModifier :: AccessModifier
, dVariable :: Bool
, dWritable :: Bool
, dName :: String
, dType:: SwiftType }
| AliasDecl
{ dAlias :: SwiftType
, dSource :: SwiftType }
| FunctionDecl
{ dAccessModifier :: AccessModifier
, dFunctionModifiers :: [FunctionModifier]
, dName :: String
, dGenericConstraint :: GenericConstraint
, dFunction :: SwiftType }
| InitializerDecl
{ dAccessModifier :: AccessModifier
, dInitializerModifiers :: [InitializerModifier]
, dGenericConstraint :: GenericConstraint
, dArguments :: [Argument] }
| ContainerDecl
{ dContainerAnnotation :: ContainerAnnotation
, dDeclarations :: [Declaration] }
| BodyDecl
{ dDecl :: Declaration
, dBody :: String }
deriving (Eq, Show, Read)
data SwiftModule = SwiftModule
{ mImports :: Imports
, mDeclarations :: [Declaration]
} deriving (Eq, Show, Read)
| lawrencelomax/ResultWorthy | ResultWorthy/DataTypes.hs | bsd-3-clause | 3,196 | 0 | 9 | 1,264 | 655 | 399 | 256 | 80 | 0 |
{-# LANGUAGE FlexibleContexts #-}
-- | Description : The main module that starts the server.
module Guide.Main
(
-- * Main
main,
-- * All supported commands
runServer,
dryRun,
loadPublic,
apiDocs,
)
where
import Imports
-- Concurrent
import Control.Concurrent.Async
-- Monads and monad transformers
import Control.Monad.Morph
-- Web
import Lucid hiding (for_)
import Network.Wai.Middleware.Static (addBase, staticPolicy)
import Web.Spock hiding (get, head, text)
import Web.Spock.Config
import Web.Spock.Lucid
-- Spock-digestive
import Web.Spock.Digestive (runForm)
-- Highlighting
import CMark.Highlight (pygments, styleToCss)
-- acid-state
import Data.Acid as Acid
import Data.SafeCopy as SafeCopy
import Data.Serialize.Get as Cereal
-- IO
import System.IO
-- Catching Ctrl-C and termination
import System.Signal
-- HVect
import Data.HVect hiding (length)
import Guide.Api (runApiServer, apiSwaggerRendered)
import Guide.App
import Guide.Cli
import Guide.Config
import Guide.Handlers
import Guide.JS (JS (..), allJSFunctions)
import Guide.Logger
import Guide.Routes (authRoute, haskellRoute)
import Guide.ServerStuff
import Guide.Session
import Guide.State
import Guide.Types
import Guide.Uid
import Guide.Views
import Guide.Views.Utils (getCSS, getCsrfHeader, getJS, protectForm)
import Guide.Database.Import (loadIntoPostgres)
import qualified Data.ByteString as BS
import qualified Data.Text as T
import qualified Data.Text.IO as T
import qualified Web.Spock as Spock
{- Note [acid-state]
~~~~~~~~~~~~~~~~~~~~
Until we are done with migrating to PostgreSQL, this app uses acid-state.
Acid-state works as follows:
* Everything is stored as Haskell values (in particular, all data is stored
in 'GlobalState').
* All changes to the state (and all queries) have to be done by using
'dbUpdate'/'dbQuery' and types (GetItem, SetItemName, etc) from the
Types.hs module.
* The data is kept in-memory, but all changes are logged to the disk (which
lets us recover the state in case of a crash by reapplying the changes)
and you can't access the state directly. When the application exits, it
creates a snapshot of the state (called “checkpoint”) and writes it to
the disk. Additionally, a checkpoint is created every hour (grep for
“createCheckpoint”).
* acid-state has a nasty feature – when the state hasn't changed,
'createCheckpoint' appends it to the previous checkpoint. When state
doesn't change for a long time, it means that checkpoints can grow to 100
MB or more. So, we employ a dirty bit and use createCheckpoint' instead
of createCheckpoint. The former only creates the checkpoint if the dirty
bit is set, which is good.
* When any type is changed, we have to write a migration function that
would read the old version of the type and turn it into the new
version. This is done by 'changelog' – you only need to provide the list
of differences between the old type and the new type.
* There are actually ways to access the state directly (GetGlobalState and
SetGlobalState), but the latter should only be used when doing something
one-off (e.g. if you need to migrate all IDs to a different ID scheme).
-}
----------------------------------------------------------------------------
-- Main
----------------------------------------------------------------------------
-- | Parse an input and run a command.
main :: IO ()
main = do
command <- parseCommandLine
config <- readConfig
runCommand config command
-- | Run a specific 'Command' with the given 'Config'.
runCommand :: Config -> Command -> IO ()
runCommand config = \case
RunServer -> runServer config
DryRun -> dryRun config
LoadPublic path -> loadPublic config path
ApiDocs -> apiDocs config
LoadIntoPostgres -> loadIntoPostgres config
----------------------------------------------------------------------------
-- Commands
----------------------------------------------------------------------------
-- | Start the server.
runServer :: Config -> IO ()
runServer config@Config{..} = withLogger config $ \logger -> do
installTerminationCatcher =<< myThreadId
workAsync <- async $ withDB (pure ()) $ \db -> do
hSetBuffering stdout NoBuffering
-- Run checkpoints creator, new and old server concurrently.
mapConcurrently_ id
[ checkPoint db
, runNewApi logger config db
, runOldServer logger config db
]
-- Hold processes running and finish on exit or exception.
forever (threadDelay (1000000 * 60))
`finally` cancel workAsync
-- | Load database from @state/@, check that it can be loaded successfully,
-- and exit.
dryRun :: Config -> IO ()
dryRun config = withLogger config $ \logger -> do
db :: DB <- openLocalStateFrom "state/" (error "couldn't load state")
logDebugIO logger "loaded the database successfully"
closeAcidState db
-- | Load 'PublicDB' from given file, create acid-state database from it,
-- and exit.
loadPublic :: Config -> FilePath -> IO ()
loadPublic config path = withLogger config $ \logger ->
(Cereal.runGet SafeCopy.safeGet <$> BS.readFile path) >>= \case
Left err -> error err
Right publicDB -> do
db <- openLocalStateFrom "state/" emptyState
Acid.update db (ImportPublicDB publicDB)
createCheckpointAndClose' db
logDebugIO logger "PublicDB imported to GlobalState"
-- | Dump API docs to the output.
apiDocs :: Config -> IO ()
apiDocs config = withLogger config $ \_logger ->
T.putStrLn apiSwaggerRendered
----------------------------------------------------------------------------
-- Helpers
----------------------------------------------------------------------------
lucidWithConfig
:: (MonadIO m, HasSpock (ActionCtxT cxt m),
SpockState (ActionCtxT cxt m) ~ ServerState)
=> HtmlT (ReaderT Config IO) a -> ActionCtxT cxt m a
lucidWithConfig x = do
cfg <- getConfig
lucidIO (hoist (flip runReaderT cfg) x)
-- | Create a checkpoint every six hours. Note: if nothing was changed, the
-- checkpoint won't be created, which saves us some space.
checkPoint :: DB -> IO b
checkPoint db = forever $ do
createCheckpoint' db
threadDelay (1000000 * 3600 * 6)
-- | Run the API (new server)
runNewApi :: Logger -> Config -> AcidState GlobalState -> IO ()
runNewApi logger = runApiServer (pushLogger "api" logger)
-- | Run Spock (old server).
runOldServer :: Logger -> Config -> DB -> IO ()
runOldServer logger config@Config{..} db = do
let serverState = ServerState {
_config = config,
_db = db }
spockConfig <- do
cfg <- defaultSpockCfg () PCNoDatabase serverState
store <- newAcidSessionStore db
let sessionCfg = SessionCfg {
sc_cookieName = "spockcookie",
sc_sessionTTL = 3600,
sc_sessionIdEntropy = 64,
sc_sessionExpandTTL = True,
sc_emptySession = emptyGuideData,
sc_store = store,
sc_housekeepingInterval = 60 * 10,
sc_hooks = defaultSessionHooks
}
return cfg {
spc_maxRequestSize = Just (1024*1024),
spc_csrfProtection = True,
spc_sessionCfg = sessionCfg }
logDebugIO logger $ format "Spock is running on port {}" portMain
runSpockNoBanner portMain $ spock spockConfig guideApp
-- TODO: Fix indentation after rebasing.
guideApp :: GuideApp ()
guideApp = do
createAdminUser -- TODO: perhaps it needs to be inside of “prehook
-- initHook”? (I don't actually know what “prehook
-- initHook” does, feel free to edit.)
prehook initHook $ do
middleware (staticPolicy (addBase "static"))
-- Javascript
Spock.get "/js.js" $ do
setHeader "Content-Type" "application/javascript; charset=utf-8"
(csrfTokenName, csrfTokenValue) <- getCsrfHeader
let jqueryCsrfProtection =
format "guidejs.csrfProtection.enable(\"{}\", \"{}\");"
csrfTokenName csrfTokenValue
js <- getJS
Spock.bytes $ toUtf8ByteString (fromJS allJSFunctions <> js <> jqueryCsrfProtection)
-- CSS
Spock.get "/highlight.css" $ do
setHeader "Content-Type" "text/css; charset=utf-8"
Spock.bytes $ toUtf8ByteString (styleToCss pygments)
Spock.get "/css.css" $ do
setHeader "Content-Type" "text/css; charset=utf-8"
css <- getCSS
Spock.bytes $ toUtf8ByteString css
Spock.get "/admin.css" $ do
setHeader "Content-Type" "text/css; charset=utf-8"
css <- getCSS
admincss <- liftIO $ T.readFile "static/admin.css"
Spock.bytes $ toUtf8ByteString (css <> admincss)
-- Main page
Spock.get root $
lucidWithConfig renderRoot
-- Admin page
prehook authHook $ prehook adminHook $ do
Spock.get "admin" $ do
s <- dbQuery GetGlobalState
lucidIO $ renderAdmin s
adminMethods
Spock.get ("admin" <//> "links") $ do
s <- dbQuery GetGlobalState
lucidIO $ renderAdminLinks s
-- Static pages
Spock.get "markdown" $ lucidWithConfig $
renderStaticMd "Markdown" "markdown.md"
Spock.get "license" $ lucidWithConfig $
renderStaticMd "License" "license.md"
-- Haskell
Spock.get (haskellRoute <//> root) $ do
s <- dbQuery GetGlobalState
q <- param "q"
lucidWithConfig $ renderHaskellRoot s q
-- Category pages
Spock.get (haskellRoute <//> var) $ \path -> do
-- The links look like /parsers-gao238b1 (because it's nice when
-- you can find out where a link leads just by looking at it)
let (_, catId) = T.breakOnEnd "-" path
when (T.null catId)
Spock.jumpNext
mbCategory <- dbQuery (GetCategoryMaybe (Uid catId))
case mbCategory of
Nothing -> Spock.jumpNext
Just category -> do
-- If the slug in the url is old (i.e. if it doesn't match the
-- one we would've generated now), let's do a redirect
when (categorySlug category /= path) $
-- TODO: this link shouldn't be absolute [absolute-links]
Spock.redirect ("/haskell/" <> categorySlug category)
lucidWithConfig $ renderCategoryPage category
-- The add/set methods return rendered parts of the structure (added
-- categories, changed items, etc) so that the Javascript part could
-- take them and inject into the page. We don't want to duplicate
-- rendering on server side and on client side.
methods
-- plain "/auth" logs out a logged-in user and lets a logged-out user
-- log in (this is not the best idea, granted, and we should just
-- show logged-in users a “logout” link and logged-out users a
-- “login” link instead)
Spock.get (authRoute <//> root) $ do
user <- getLoggedInUser
if isJust user
then Spock.redirect "/auth/logout"
else Spock.redirect "/auth/login"
Spock.getpost (authRoute <//> "login") $ authRedirect "/" loginAction
Spock.get (authRoute <//> "logout") logoutAction
Spock.getpost (authRoute <//> "register") $ authRedirect "/" signupAction
loginAction :: GuideAction ctx ()
loginAction = do
r <- runForm "login" loginForm
case r of
(v, Nothing) -> do
formHtml <- protectForm loginFormView v
lucidWithConfig $ renderRegister formHtml
(v, Just Login {..}) -> do
loginAttempt <- dbQuery $
LoginUser loginEmail (toUtf8ByteString loginUserPassword)
case loginAttempt of
Right user -> do
modifySession (sessionUserID ?~ userID user)
Spock.redirect "/"
-- TODO: *properly* show error message/validation of input
Left err -> do
formHtml <- protectForm loginFormView v
lucidWithConfig $ renderRegister $ do
div_ $ toHtml ("Error: " <> err)
formHtml
logoutAction :: GuideAction ctx ()
logoutAction = do
modifySession (sessionUserID .~ Nothing)
Spock.redirect "/"
signupAction :: GuideAction ctx ()
signupAction = do
r <- runForm "register" registerForm
case r of
(v, Nothing) -> do
formHtml <- protectForm registerFormView v
lucidWithConfig $ renderRegister formHtml
(v, Just UserRegistration {..}) -> do
user <- makeUser registerUserName registerUserEmail
(toUtf8ByteString registerUserPassword)
success <- dbUpdate $ CreateUser user
if success
then do
modifySession (sessionUserID ?~ userID user)
Spock.redirect ""
else do
formHtml <- protectForm registerFormView v
lucidWithConfig $ renderRegister formHtml
initHook :: GuideAction () (HVect '[])
initHook = return HNil
authHook :: GuideAction (HVect xs) (HVect (User ': xs))
authHook = do
oldCtx <- getContext
maybeUser <- getLoggedInUser
case maybeUser of
Nothing -> Spock.text "Not logged in."
Just user -> return (user :&: oldCtx)
adminHook :: ListContains n User xs => GuideAction (HVect xs) (HVect (IsAdmin ': xs))
adminHook = do
oldCtx <- getContext
let user = findFirst oldCtx
if userIsAdmin user
then return (IsAdmin :&: oldCtx)
else Spock.text "Not authorized."
-- | Redirect the user to a given path if they are logged in.
authRedirect :: Text -> GuideAction ctx a -> GuideAction ctx a
authRedirect path action = do
user <- getLoggedInUser
case user of
Just _ ->
Spock.redirect path
Nothing -> action
-- TODO: a function to find all links to Hackage that have version in them
data Quit = CtrlC | ServiceStop
deriving (Eq, Ord, Show)
instance Exception Quit
-- | Set up a handler that would catch SIGINT (i.e. Ctrl-C) and SIGTERM
-- (i.e. service stop) and throw an exception instead of the signal. This
-- lets us create a checkpoint and close connections on exit.
installTerminationCatcher
:: ThreadId -- ^ Thread to kill when the signal comes
-> IO ()
installTerminationCatcher thread = void $ do
installHandler sigINT (\_ -> throwTo thread CtrlC)
installHandler sigTERM (\_ -> throwTo thread ServiceStop)
-- | Create an admin user (with login “admin”, email “admin@guide.aelve.com”
-- and password specified in the config).
--
-- The user won't be added if it exists already.
createAdminUser :: GuideApp ()
createAdminUser = do
dbUpdate DeleteAllUsers
pass <- toUtf8ByteString . adminPassword <$> getConfig
user <- makeUser "admin" "admin@guide.aelve.com" pass
void $ dbUpdate $ CreateUser (user & _userIsAdmin .~ True)
| aelve/guide | back/src/Guide/Main.hs | bsd-3-clause | 14,628 | 0 | 23 | 3,316 | 2,942 | 1,474 | 1,468 | -1 | -1 |
-- | This module provides a program step. A step can be an evaluation, a
-- refinement or an abstraction.
module Jat.PState.Step
(
Step (..)
, PStep
, evaluation
, topEvaluation
, topRefinement
, liftStep
, liftPStep
)
where
import Jat.Constraints (PATerm,top)
-- | The type of a step.
data Step a b =
Evaluation (a,PATerm)
| Refinement [(b,PATerm)]
| Abstraction (a,PATerm)
deriving Show
-- | The type of a program step.
type PStep a = Step a a
-- | Performs an evaluation step.
evaluation :: a -> PATerm -> Step a b
evaluation = curry Evaluation
-- | Performs an evaluation step constrained with top.
topEvaluation :: a -> Step a b
topEvaluation a = evaluation a top
-- | Performs an refinement step constrained with top .
topRefinement :: [b] -> Step a b
topRefinement bs = Refinement $ zip bs (repeat top)
map2 :: (a -> c) -> (b -> d) -> Step a b -> Step c d
map2 f _ (Evaluation (a,c)) = Evaluation (f a,c)
map2 _ g (Refinement bs) = Refinement [(g b,c) | (b,c) <- bs]
map2 f _ (Abstraction (a,c)) = Abstraction (f a,c)
-- | Lifts two function to a step.
liftStep :: (a -> c) -> (b -> d) -> Step a b -> Step c d
liftStep = map2
-- | Lifts a function to a program step.
liftPStep :: (a -> b) -> PStep a -> PStep b
liftPStep f = map2 f f
| ComputationWithBoundedResources/jat | src/Jat/PState/Step.hs | bsd-3-clause | 1,289 | 0 | 9 | 296 | 451 | 249 | 202 | 30 | 1 |
module PFDS.Sec9.SparseByWeight where
type Nat = [Int]
carry :: Int -> Nat -> Nat
carry w [] = [w]
carry w ws@(w' : ws') =
if w < w' then w : ws else carry (2 * w) ws'
borrow :: Int -> Nat -> Nat
borrow w [] = [w]
borrow w ws@(w' : ws') =
if w == w' then ws' else w : borrow (2 * w) ws
inc :: Nat -> Nat
inc ws = carry 1 ws
dec :: Nat -> Nat
dec ws = borrow 1 ws
add :: Nat -> Nat -> Nat
add ws [] = ws
add [] ws = ws
add m@(w1 : ws1) n@(w2 : ws2)
| w1 < w2 = w1 : add ws1 n
| w2 < w1 = w2 : add m ws2
| otherwise = carry (2 * w1) (add ws1 ws2)
| matonix/pfds | src/PFDS/Sec9/SparseByWeight.hs | bsd-3-clause | 588 | 0 | 9 | 190 | 349 | 183 | 166 | 21 | 2 |
-----------------------------------------------------------------------------
-- |
-- Module : System.Taffybar.Widget.XDGMenu.Menu
-- Copyright : 2017 Ulf Jasper
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Ulf Jasper <ulf.jasper@web.de>
-- Stability : unstable
-- Portability : unportable
--
-- Implementation of version 1.1 of the freedesktop "Desktop Menu
-- Specification", see
-- https://specifications.freedesktop.org/menu-spec/menu-spec-1.1.html
--
-- See also 'MenuWidget'.
-----------------------------------------------------------------------------
module System.Taffybar.Widget.XDGMenu.Menu
( Menu(..)
, MenuEntry(..)
, buildMenu
, getApplicationEntries
) where
import Data.Char (toLower)
import Data.List
import Data.Maybe
import qualified Data.Text as T
import System.Environment.XDG.DesktopEntry
import System.Taffybar.Information.XDG.Protocol
-- | Displayable menu
data Menu = Menu
{ fmName :: String
, fmComment :: String
, fmIcon :: Maybe String
, fmSubmenus :: [Menu]
, fmEntries :: [MenuEntry]
, fmOnlyUnallocated :: Bool
} deriving (Eq, Show)
-- | Displayable menu entry
data MenuEntry = MenuEntry
{ feName :: T.Text
, feComment :: T.Text
, feCommand :: String
, feIcon :: Maybe T.Text
} deriving (Eq, Show)
-- | Fetch menus and desktop entries and assemble the menu.
buildMenu :: Maybe String -> IO Menu
buildMenu mMenuPrefix = do
mMenuDes <- readXDGMenu mMenuPrefix
case mMenuDes of
Nothing -> return $ Menu "???" "Parsing failed" Nothing [] [] False
Just (menu, des) -> do
dt <- getXDGDesktop
dirDirs <- getDirectoryDirs
langs <- getPreferredLanguages
(fm, ae) <- xdgToMenu dt langs dirDirs des menu
let fm' = fixOnlyUnallocated ae fm
return fm'
-- | Convert xdg menu to displayable menu
xdgToMenu
:: String
-> [String]
-> [FilePath]
-> [DesktopEntry]
-> XDGMenu
-> IO (Menu, [MenuEntry])
xdgToMenu desktop langs dirDirs des xm = do
dirEntry <- getDirectoryEntry dirDirs (xmDirectory xm)
mas <- mapM (xdgToMenu desktop langs dirDirs des) (xmSubmenus xm)
let (menus, subaes) = unzip mas
menus' = sortBy (\fm1 fm2 -> compare (map toLower $ fmName fm1)
(map toLower $ fmName fm2)) menus
entries = map (xdgToMenuEntry langs) $
-- hide NoDisplay
filter (not . deNoDisplay) $
-- onlyshowin
filter (matchesOnlyShowIn desktop) $
-- excludes
filter (not . flip matchesCondition (fromMaybe None (xmExclude xm))) $
-- includes
filter (`matchesCondition` fromMaybe None (xmInclude xm)) des
onlyUnallocated = xmOnlyUnallocated xm
aes = if onlyUnallocated then [] else entries ++ concat subaes
let fm = Menu {fmName = maybe (xmName xm) (deName langs) dirEntry,
fmComment = maybe "???" (fromMaybe "???" . deComment langs) dirEntry,
fmIcon = deIcon =<< dirEntry,
fmSubmenus = menus',
fmEntries = entries,
fmOnlyUnallocated = onlyUnallocated}
return (fm, aes)
-- | Check the "only show in" logic
matchesOnlyShowIn :: String -> DesktopEntry -> Bool
matchesOnlyShowIn desktop de = matchesShowIn && notMatchesNotShowIn
where matchesShowIn = case deOnlyShowIn de of
[] -> True
desktops -> desktop `elem` desktops
notMatchesNotShowIn = case deNotShowIn de of
[] -> True
desktops -> desktop `notElem` desktops
-- | convert xdg desktop entry to displayble menu entry
xdgToMenuEntry :: [String] -> DesktopEntry -> MenuEntry
xdgToMenuEntry langs de =
MenuEntry
{feName = name, feComment = comment, feCommand = cmd, feIcon = mIcon}
where
mc =
case deCommand de of
Nothing -> Nothing
Just c -> Just $ "(" ++ c ++ ")"
comment =
T.pack $
fromMaybe "??" $
case deComment langs de of
Nothing -> mc
Just tt -> Just $ tt ++ maybe "" ("\n" ++) mc
cmd = fromMaybe "FIXME" $ deCommand de
name = T.pack $ deName langs de
mIcon = T.pack <$> deIcon de
-- | postprocess unallocated entries
fixOnlyUnallocated :: [MenuEntry] -> Menu -> Menu
fixOnlyUnallocated fes fm =
fm
{ fmEntries = entries
, fmSubmenus = map (fixOnlyUnallocated fes) (fmSubmenus fm)
}
where
entries =
if fmOnlyUnallocated fm
then filter (not . (`elem` fes)) (fmEntries fm)
else fmEntries fm
| teleshoes/taffybar | src/System/Taffybar/Widget/XDGMenu/Menu.hs | bsd-3-clause | 4,644 | 0 | 19 | 1,276 | 1,172 | 636 | 536 | 98 | 3 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
#define HINT
#include "../Server.hs"
| janrain/snap | src/Snap/Extension/Server/Hint.hs | bsd-3-clause | 126 | 0 | 2 | 16 | 7 | 6 | 1 | 3 | 0 |
{-# language QuasiQuotes #-}
{-# language TemplateHaskell #-}
module ExampleExtractor ( render, extractExampleImages ) where
import "base" Control.Arrow
import "base" Control.Monad
import "base" Data.Either
import "base" Data.Maybe
import "base" Data.Monoid
import qualified "containers" Data.Map.Strict as M
import "directory" System.Directory ( canonicalizePath )
import qualified "haskell-src-exts" Language.Haskell.Exts.Extension as Hse
import qualified "haskell-src-exts" Language.Haskell.Exts.Parser as Hse
import qualified "haskell-src-exts" Language.Haskell.Exts.Syntax as Hse
import qualified "Glob" System.FilePath.Glob as G
import qualified "opencv" OpenCV as CV
import qualified "text" Data.Text as T
import qualified "text" Data.Text.IO as T
import qualified "bytestring" Data.ByteString as B ( writeFile )
import "template-haskell" Language.Haskell.TH
import "template-haskell" Language.Haskell.TH.Syntax
import "this" Language.Haskell.Meta.Syntax.Translate ( toDecs )
--------------------------------------------------------------------------------
render
:: FilePath
-> CV.Mat ('CV.S [height, width]) channels depth
-> IO ()
render fp img = do
let bs = CV.exceptError $ CV.imencode (CV.OutputPng CV.defaultPngParams) img
dest = "doc/generated/" <> fp
putStr $ "Writing file " <> dest <> " ..."
B.writeFile dest bs
putStrLn " OK"
--------------------------------------------------------------------------------
data SrcLoc
= SrcLoc
{ locFile :: !FilePath
, locLine :: !Int
}
-- | Haskell source code containing 0, 1 or more examples.
data ExampleSrc
= ExampleSrc
{ exsLoc :: !SrcLoc
, exsSrc :: !T.Text
}
data ParsedExampleSrc
= ParsedExampleSrc
{ pexsLoc :: !SrcLoc
, pexsDecls :: ![Dec]
}
-- | A single line of Haskell source code.
data SrcLine
= SrcLine
{ srcLoc :: !SrcLoc
, srcLine :: !T.Text
}
data SymbolType
= SymImage
| SymImageAction
deriving (Show, Eq)
data RenderTarget
= RenderTarget
{ rtDestination :: !FilePath
-- ^ Relative path where the symbol must be rendered as an image file.
, rtSymbolName :: !Name
-- ^ Name of a top level symbol (function or CAF) that is either an image
-- or an IO action that yields an image.
, rtSymbolIsIO :: !Bool
} deriving Show
--------------------------------------------------------------------------------
extractExampleImages :: FilePath -> Q [Dec]
extractExampleImages srcDir = do
haskellPaths <- runIO $ findHaskellPaths srcDir
mapM_ (addDependentFile <=< runIO . canonicalizePath) haskellPaths
((exampleSrcs, renderTargets) :: ([ExampleSrc], [RenderTarget])) <- runIO $ do
xs <- mapM findExamples haskellPaths
pure $ (concat *** concat) $ unzip xs
let parseErrors :: [String]
parsedExampleSrcs :: [ParsedExampleSrc]
(parseErrors, parsedExampleSrcs) = partitionEithers $ map parseExampleSrc exampleSrcs
examplesTH :: [Dec]
examplesTH = concatMap (\pexs -> parsedExampleLinePragma pexs : pexsDecls pexs)
parsedExampleSrcs
exampleMap :: M.Map Name Bool
exampleMap = M.map typeIsIO $ M.fromList $ mapMaybe asSigD examplesTH
renderTargets' :: [RenderTarget]
renderTargets' = do
renderTarget <- renderTargets
let isIO = M.findWithDefault False (rtSymbolName renderTarget) exampleMap
pure renderTarget {rtSymbolIsIO = isIO}
unless (null parseErrors) $
error $ show parseErrors
mdecs <- mkRenderExampleImages renderTargets'
pure $ examplesTH <> mdecs
parsedExampleLinePragma :: ParsedExampleSrc -> Dec
parsedExampleLinePragma pexs =
PragmaD $ LineP (locLine loc) (locFile loc)
where
loc = pexsLoc pexs
parseExampleSrc :: ExampleSrc -> Either String ParsedExampleSrc
parseExampleSrc exs =
case parseDecsHse (locFile $ exsLoc exs) $ T.unpack $ haddockToHaskell $ exsSrc exs of
Left errMsg -> Left $ (locFile $ exsLoc exs) <> ": " <> errMsg
Right decls -> Right
ParsedExampleSrc
{ pexsLoc = exsLoc exs
, pexsDecls = toDecs decls
}
asSigD :: Dec -> Maybe (Name, Type)
asSigD (SigD n t) = Just (n, t)
asSigD _ = Nothing
-- Really hacky way of determining whether some type has IO on a left most
-- position.
typeIsIO :: Type -> Bool
typeIsIO (ForallT _ _ t) = typeIsIO t
typeIsIO (AppT t1 _) = typeIsIO t1
typeIsIO (VarT _) = False
typeIsIO (ConT n) | nameBase n == nameBase ''IO = True
typeIsIO (PromotedT _) = False
typeIsIO _ = False
parseDecsHse :: String -> String -> Either String [Hse.Decl]
parseDecsHse fileName str =
case Hse.parseModuleWithMode (parseMode fileName) str of
Hse.ParseFailed _srcLoc err -> Left err
Hse.ParseOk (Hse.Module _ _ _ _ _ _ decls) -> Right decls
parseMode :: String -> Hse.ParseMode
parseMode fileName =
Hse.ParseMode
{ Hse.parseFilename = fileName
, Hse.baseLanguage = Hse.Haskell2010
, Hse.extensions = map Hse.EnableExtension exts
, Hse.ignoreLanguagePragmas = False
, Hse.ignoreLinePragmas = False
, Hse.fixities = Nothing
, Hse.ignoreFunctionArity = False
}
where
exts :: [Hse.KnownExtension]
exts =
[ Hse.BangPatterns
, Hse.DataKinds
, Hse.FlexibleContexts
, Hse.LambdaCase
, Hse.OverloadedStrings
, Hse.PackageImports
, Hse.PolyKinds
, Hse.ScopedTypeVariables
, Hse.TupleSections
, Hse.TypeFamilies
, Hse.TypeOperators
, Hse.PostfixOperators
, Hse.QuasiQuotes
, Hse.UnicodeSyntax
, Hse.MagicHash
, Hse.PatternSignatures
, Hse.MultiParamTypeClasses
, Hse.RankNTypes
]
-- | Generate code for every render target
--
-- Executing the generated code will actually render the target.
mkRenderExampleImages :: [RenderTarget] -> Q [Dec]
mkRenderExampleImages renderTargets = [d|
renderExampleImages :: IO ()
renderExampleImages = $(pure doRender)
|]
where
doRender :: Exp
doRender = DoE [ if rtSymbolIsIO rt
then NoBindS $ VarE '(>>=) `AppE` sym `AppE` (VarE 'render `AppE` fp)
else NoBindS $ VarE 'render `AppE` fp `AppE` sym
| rt <- renderTargets
, let sym = VarE $ rtSymbolName rt
fp = LitE $ StringL $ "examples/" <> rtDestination rt
]
findHaskellPaths :: FilePath -> IO [FilePath]
findHaskellPaths srcDir = do
(paths, _) <- G.globDir [G.compile "**/*.hs", G.compile "**/*.hsc"] srcDir
pure $ concat paths
haddockToHaskell :: T.Text -> T.Text
haddockToHaskell =
T.replace "\\`" "`"
. T.replace "\\<" "<"
. T.replace "\\/" "/"
findExamples :: FilePath -> IO ([ExampleSrc], [RenderTarget])
findExamples fp = ((parseExamples &&& parseGeneratedImages) . textToSource fp) <$> T.readFile fp
textToSource :: FilePath -> T.Text -> [SrcLine]
textToSource fp txt = zipWith lineToSource [1..] (T.lines txt)
where
lineToSource :: Int -> T.Text -> SrcLine
lineToSource n line =
SrcLine
{ srcLoc = SrcLoc {locFile = fp, locLine = n}
, srcLine = line
}
parseExamples :: [SrcLine] -> [ExampleSrc]
parseExamples = findStart
where
findStart :: [SrcLine] -> [ExampleSrc]
findStart [] = []
findStart (_:[]) = []
findStart (_:_:[]) = []
findStart (a:b:c:ls)
| srcLine a == "Example:"
&& srcLine b == ""
&& srcLine c == "@"
= findEnd [] ls
findStart (_:ls) = findStart ls
findEnd :: [SrcLine] -> [SrcLine] -> [ExampleSrc]
findEnd _acc [] = []
findEnd acc (l:ls)
| srcLine l == "@" =
case reverse acc of
[] -> findStart ls
revAcc@(firstLine:_) ->
let exs = ExampleSrc
{ exsLoc = srcLoc firstLine
, exsSrc = T.unlines (map srcLine revAcc)
}
in exs : findStart ls
| otherwise = findEnd (l:acc) ls
parseGeneratedImages :: [SrcLine] -> [RenderTarget]
parseGeneratedImages = concatMap $ parseLine . srcLine
where
parseLine :: T.Text -> [RenderTarget]
parseLine line = maybeToList $ do
let fromPrefix = snd $ T.breakOn prefix line
rest <- T.stripPrefix prefix fromPrefix
case take 2 $ T.words rest of
[fp, funcName] ->
pure RenderTarget
{ rtDestination = T.unpack $ fp
, rtSymbolName = mkName $ T.unpack $ fromMaybe funcName (T.stripSuffix ">>" funcName)
-- Later on we will check whether the symbol is actually (or likely) IO.
, rtSymbolIsIO = False
}
_ -> Nothing
prefix = "<<doc/generated/examples/"
| lukexi/haskell-opencv | doc/ExampleExtractor.hs | bsd-3-clause | 9,060 | 0 | 20 | 2,489 | 2,403 | 1,295 | 1,108 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
module Ros.Geometry_msgs.InertiaStamped where
import qualified Prelude as P
import Prelude ((.), (+), (*))
import qualified Data.Typeable as T
import Control.Applicative
import Ros.Internal.RosBinary
import Ros.Internal.Msg.MsgInfo
import qualified GHC.Generics as G
import qualified Data.Default.Generics as D
import Ros.Internal.Msg.HeaderSupport
import qualified Ros.Geometry_msgs.Inertia as Inertia
import qualified Ros.Std_msgs.Header as Header
import Lens.Family.TH (makeLenses)
import Lens.Family (view, set)
data InertiaStamped = InertiaStamped { _header :: Header.Header
, _inertia :: Inertia.Inertia
} deriving (P.Show, P.Eq, P.Ord, T.Typeable, G.Generic)
$(makeLenses ''InertiaStamped)
instance RosBinary InertiaStamped where
put obj' = put (_header obj') *> put (_inertia obj')
get = InertiaStamped <$> get <*> get
putMsg = putStampedMsg
instance HasHeader InertiaStamped where
getSequence = view (header . Header.seq)
getFrame = view (header . Header.frame_id)
getStamp = view (header . Header.stamp)
setSequence = set (header . Header.seq)
instance MsgInfo InertiaStamped where
sourceMD5 _ = "ddee48caeab5a966c5e8d166654a9ac7"
msgTypeName _ = "geometry_msgs/InertiaStamped"
instance D.Default InertiaStamped
| acowley/roshask | msgs/Geometry_msgs/Ros/Geometry_msgs/InertiaStamped.hs | bsd-3-clause | 1,463 | 1 | 9 | 263 | 364 | 215 | 149 | 35 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveFunctor #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Dependency.Types
-- Copyright : (c) Duncan Coutts 2008
-- License : BSD-like
--
-- Maintainer : cabal-devel@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- Common types for dependency resolution.
-----------------------------------------------------------------------------
module Distribution.Client.Dependency.Types (
PreSolver(..),
Solver(..),
DependencyResolver,
ResolverPackage(..),
AllowNewer(..), isAllowNewer,
PackageConstraint(..),
showPackageConstraint,
PackagePreferences(..),
InstalledPreference(..),
PackagesPreferenceDefault(..),
Progress(..),
foldProgress,
LabeledPackageConstraint(..),
ConstraintSource(..),
unlabelPackageConstraint,
showConstraintSource
) where
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
( Applicative(..) )
#endif
import Control.Applicative
( Alternative(..) )
import Data.Char
( isAlpha, toLower )
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid
( Monoid(..) )
#endif
import Distribution.Client.Types
( OptionalStanza(..), SourcePackage(..), ConfiguredPackage )
import qualified Distribution.Compat.ReadP as Parse
( pfail, munch1 )
import Distribution.PackageDescription
( FlagAssignment, FlagName(..) )
import Distribution.InstalledPackageInfo
( InstalledPackageInfo )
import qualified Distribution.Client.PackageIndex as PackageIndex
( PackageIndex )
import Distribution.Simple.PackageIndex ( InstalledPackageIndex )
import Distribution.Package
( PackageName )
import Distribution.Version
( VersionRange, simplifyVersionRange )
import Distribution.Compiler
( CompilerInfo )
import Distribution.System
( Platform )
import Distribution.Text
( Text(..), display )
import Text.PrettyPrint
( text )
import Prelude hiding (fail)
-- | All the solvers that can be selected.
data PreSolver = AlwaysTopDown | AlwaysModular | Choose
deriving (Eq, Ord, Show, Bounded, Enum)
-- | All the solvers that can be used.
data Solver = TopDown | Modular
deriving (Eq, Ord, Show, Bounded, Enum)
instance Text PreSolver where
disp AlwaysTopDown = text "topdown"
disp AlwaysModular = text "modular"
disp Choose = text "choose"
parse = do
name <- Parse.munch1 isAlpha
case map toLower name of
"topdown" -> return AlwaysTopDown
"modular" -> return AlwaysModular
"choose" -> return Choose
_ -> Parse.pfail
-- | A dependency resolver is a function that works out an installation plan
-- given the set of installed and available packages and a set of deps to
-- solve for.
--
-- The reason for this interface is because there are dozens of approaches to
-- solving the package dependency problem and we want to make it easy to swap
-- in alternatives.
--
type DependencyResolver = Platform
-> CompilerInfo
-> InstalledPackageIndex
-> PackageIndex.PackageIndex SourcePackage
-> (PackageName -> PackagePreferences)
-> [LabeledPackageConstraint]
-> [PackageName]
-> Progress String String [ResolverPackage]
-- | The dependency resolver picks either pre-existing installed packages
-- or it picks source packages along with package configuration.
--
-- This is like the 'InstallPlan.PlanPackage' but with fewer cases.
--
data ResolverPackage = PreExisting InstalledPackageInfo
| Configured ConfiguredPackage
-- | Per-package constraints. Package constraints must be respected by the
-- solver. Multiple constraints for each package can be given, though obviously
-- it is possible to construct conflicting constraints (eg impossible version
-- range or inconsistent flag assignment).
--
data PackageConstraint
= PackageConstraintVersion PackageName VersionRange
| PackageConstraintInstalled PackageName
| PackageConstraintSource PackageName
| PackageConstraintFlags PackageName FlagAssignment
| PackageConstraintStanzas PackageName [OptionalStanza]
deriving (Show,Eq)
-- | Provide a textual representation of a package constraint
-- for debugging purposes.
--
showPackageConstraint :: PackageConstraint -> String
showPackageConstraint (PackageConstraintVersion pn vr) =
display pn ++ " " ++ display (simplifyVersionRange vr)
showPackageConstraint (PackageConstraintInstalled pn) =
display pn ++ " installed"
showPackageConstraint (PackageConstraintSource pn) =
display pn ++ " source"
showPackageConstraint (PackageConstraintFlags pn fs) =
"flags " ++ display pn ++ " " ++ unwords (map (uncurry showFlag) fs)
where
showFlag (FlagName f) True = "+" ++ f
showFlag (FlagName f) False = "-" ++ f
showPackageConstraint (PackageConstraintStanzas pn ss) =
"stanzas " ++ display pn ++ " " ++ unwords (map showStanza ss)
where
showStanza TestStanzas = "test"
showStanza BenchStanzas = "bench"
-- | A per-package preference on the version. It is a soft constraint that the
-- 'DependencyResolver' should try to respect where possible. It consists of
-- a 'InstalledPreference' which says if we prefer versions of packages
-- that are already installed. It also has a 'PackageVersionPreference' which
-- is a suggested constraint on the version number. The resolver should try to
-- use package versions that satisfy the suggested version constraint.
--
-- It is not specified if preferences on some packages are more important than
-- others.
--
data PackagePreferences = PackagePreferences VersionRange InstalledPreference
-- | Whether we prefer an installed version of a package or simply the latest
-- version.
--
data InstalledPreference = PreferInstalled | PreferLatest
deriving Show
-- | Global policy for all packages to say if we prefer package versions that
-- are already installed locally or if we just prefer the latest available.
--
data PackagesPreferenceDefault =
-- | Always prefer the latest version irrespective of any existing
-- installed version.
--
-- * This is the standard policy for upgrade.
--
PreferAllLatest
-- | Always prefer the installed versions over ones that would need to be
-- installed. Secondarily, prefer latest versions (eg the latest installed
-- version or if there are none then the latest source version).
| PreferAllInstalled
-- | Prefer the latest version for packages that are explicitly requested
-- but prefers the installed version for any other packages.
--
-- * This is the standard policy for install.
--
| PreferLatestForSelected
deriving Show
-- | Policy for relaxing upper bounds in dependencies. For example, given
-- 'build-depends: array >= 0.3 && < 0.5', are we allowed to relax the upper
-- bound and choose a version of 'array' that is greater or equal to 0.5? By
-- default the upper bounds are always strictly honored.
data AllowNewer =
-- | Default: honor the upper bounds in all dependencies, never choose
-- versions newer than allowed.
AllowNewerNone
-- | Ignore upper bounds in dependencies on the given packages.
| AllowNewerSome [PackageName]
-- | Ignore upper bounds in dependencies on all packages.
| AllowNewerAll
-- | Convert 'AllowNewer' to a boolean.
isAllowNewer :: AllowNewer -> Bool
isAllowNewer AllowNewerNone = False
isAllowNewer (AllowNewerSome _) = True
isAllowNewer AllowNewerAll = True
-- | A type to represent the unfolding of an expensive long running
-- calculation that may fail. We may get intermediate steps before the final
-- result which may be used to indicate progress and\/or logging messages.
--
data Progress step fail done = Step step (Progress step fail done)
| Fail fail
| Done done
deriving Functor
-- | Consume a 'Progress' calculation. Much like 'foldr' for lists but with two
-- base cases, one for a final result and one for failure.
--
-- Eg to convert into a simple 'Either' result use:
--
-- > foldProgress (flip const) Left Right
--
foldProgress :: (step -> a -> a) -> (fail -> a) -> (done -> a)
-> Progress step fail done -> a
foldProgress step fail done = fold
where fold (Step s p) = step s (fold p)
fold (Fail f) = fail f
fold (Done r) = done r
instance Monad (Progress step fail) where
return a = Done a
p >>= f = foldProgress Step Fail f p
instance Applicative (Progress step fail) where
pure a = Done a
p <*> x = foldProgress Step Fail (flip fmap x) p
instance Monoid fail => Alternative (Progress step fail) where
empty = Fail mempty
p <|> q = foldProgress Step (const q) Done p
-- | 'PackageConstraint' labeled with its source.
data LabeledPackageConstraint
= LabeledPackageConstraint PackageConstraint ConstraintSource
unlabelPackageConstraint :: LabeledPackageConstraint -> PackageConstraint
unlabelPackageConstraint (LabeledPackageConstraint pc _) = pc
-- | Source of a 'PackageConstraint'.
data ConstraintSource =
-- | Main config file, which is ~/.cabal/config by default.
ConstraintSourceMainConfig FilePath
-- | Sandbox config file, which is ./cabal.sandbox.config by default.
| ConstraintSourceSandboxConfig FilePath
-- | User config file, which is ./cabal.config by default.
| ConstraintSourceUserConfig FilePath
-- | Flag specified on the command line.
| ConstraintSourceCommandlineFlag
-- | Target specified by the user, e.g., @cabal install package-0.1.0.0@
-- implies @package==0.1.0.0@.
| ConstraintSourceUserTarget
-- | Internal requirement to use installed versions of packages like ghc-prim.
| ConstraintSourceNonUpgradeablePackage
-- | Internal requirement to use the add-source version of a package when that
-- version is installed and the source is modified.
| ConstraintSourceModifiedAddSourceDep
-- | Internal constraint used by @cabal freeze@.
| ConstraintSourceFreeze
-- | Constraint specified by a config file, a command line flag, or a user
-- target, when a more specific source is not known.
| ConstraintSourceConfigFlagOrTarget
-- | The source of the constraint is not specified.
| ConstraintSourceUnknown
deriving (Eq, Show)
-- | Description of a 'ConstraintSource'.
showConstraintSource :: ConstraintSource -> String
showConstraintSource (ConstraintSourceMainConfig path) =
"main config " ++ path
showConstraintSource (ConstraintSourceSandboxConfig path) =
"sandbox config " ++ path
showConstraintSource (ConstraintSourceUserConfig path)= "user config " ++ path
showConstraintSource ConstraintSourceCommandlineFlag = "command line flag"
showConstraintSource ConstraintSourceUserTarget = "user target"
showConstraintSource ConstraintSourceNonUpgradeablePackage =
"non-upgradeable package"
showConstraintSource ConstraintSourceModifiedAddSourceDep =
"modified add-source dependency"
showConstraintSource ConstraintSourceFreeze = "cabal freeze"
showConstraintSource ConstraintSourceConfigFlagOrTarget =
"config file, command line flag, or user target"
showConstraintSource ConstraintSourceUnknown = "unknown source"
| randen/cabal | cabal-install/Distribution/Client/Dependency/Types.hs | bsd-3-clause | 11,429 | 0 | 13 | 2,302 | 1,602 | 923 | 679 | 165 | 3 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Get
-- Copyright : (c) Andrea Vezzosi 2008
-- Duncan Coutts 2011
-- John Millikin 2012
-- License : BSD-like
--
-- Maintainer : cabal-devel@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- The 'cabal get' command.
-----------------------------------------------------------------------------
module Distribution.Client.Get (
get
) where
import Distribution.Package
( PackageId, packageId, packageName )
import Distribution.Simple.Setup
( Flag(..), fromFlag, fromFlagOrDefault )
import Distribution.Simple.Utils
( notice, die, info, writeFileAtomic )
import Distribution.Verbosity
( Verbosity )
import Distribution.Text(display)
import qualified Distribution.PackageDescription as PD
import Distribution.Client.Setup
( GlobalFlags(..), GetFlags(..), RepoContext(..) )
import Distribution.Client.Types
import Distribution.Client.Targets
import Distribution.Client.Dependency
import Distribution.Client.FetchUtils
import qualified Distribution.Client.Tar as Tar (extractTarGzFile)
import Distribution.Client.IndexUtils as IndexUtils
( getSourcePackages )
import Distribution.Client.Compat.Process
( readProcessWithExitCode )
import Distribution.Compat.Exception
( catchIO )
import Distribution.Solver.Types.SourcePackage
import Control.Exception
( finally )
import Control.Monad
( filterM, forM_, unless, when )
import Data.List
( sortBy )
import qualified Data.Map
import Data.Maybe
( listToMaybe, mapMaybe )
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid
( mempty )
#endif
import Data.Ord
( comparing )
import System.Directory
( createDirectoryIfMissing, doesDirectoryExist, doesFileExist
, getCurrentDirectory, setCurrentDirectory
)
import System.Exit
( ExitCode(..) )
import System.FilePath
( (</>), (<.>), addTrailingPathSeparator )
import System.Process
( rawSystem )
-- | Entry point for the 'cabal get' command.
get :: Verbosity
-> RepoContext
-> GlobalFlags
-> GetFlags
-> [UserTarget]
-> IO ()
get verbosity _ _ _ [] =
notice verbosity "No packages requested. Nothing to do."
get verbosity repoCtxt globalFlags getFlags userTargets = do
let useFork = case (getSourceRepository getFlags) of
NoFlag -> False
_ -> True
unless useFork $
mapM_ checkTarget userTargets
sourcePkgDb <- getSourcePackages verbosity repoCtxt
pkgSpecifiers <- resolveUserTargets verbosity repoCtxt
(fromFlag $ globalWorldFile globalFlags)
(packageIndex sourcePkgDb)
userTargets
pkgs <- either (die . unlines . map show) return $
resolveWithoutDependencies
(resolverParams sourcePkgDb pkgSpecifiers)
unless (null prefix) $
createDirectoryIfMissing True prefix
if useFork
then fork pkgs
else unpack pkgs
where
resolverParams sourcePkgDb pkgSpecifiers =
--TODO: add command-line constraint and preference args for unpack
standardInstallPolicy mempty sourcePkgDb pkgSpecifiers
prefix = fromFlagOrDefault "" (getDestDir getFlags)
fork :: [UnresolvedSourcePackage] -> IO ()
fork pkgs = do
let kind = fromFlag . getSourceRepository $ getFlags
branchers <- findUsableBranchers
mapM_ (forkPackage verbosity branchers prefix kind) pkgs
unpack :: [UnresolvedSourcePackage] -> IO ()
unpack pkgs = do
forM_ pkgs $ \pkg -> do
location <- fetchPackage verbosity repoCtxt (packageSource pkg)
let pkgid = packageId pkg
descOverride | usePristine = Nothing
| otherwise = packageDescrOverride pkg
case location of
LocalTarballPackage tarballPath ->
unpackPackage verbosity prefix pkgid descOverride tarballPath
RemoteTarballPackage _tarballURL tarballPath ->
unpackPackage verbosity prefix pkgid descOverride tarballPath
RepoTarballPackage _repo _pkgid tarballPath ->
unpackPackage verbosity prefix pkgid descOverride tarballPath
LocalUnpackedPackage _ ->
error "Distribution.Client.Get.unpack: the impossible happened."
where
usePristine = fromFlagOrDefault False (getPristine getFlags)
checkTarget :: UserTarget -> IO ()
checkTarget target = case target of
UserTargetLocalDir dir -> die (notTarball dir)
UserTargetLocalCabalFile file -> die (notTarball file)
_ -> return ()
where
notTarball t =
"The 'get' command is for tarball packages. "
++ "The target '" ++ t ++ "' is not a tarball."
-- ------------------------------------------------------------
-- * Unpacking the source tarball
-- ------------------------------------------------------------
unpackPackage :: Verbosity -> FilePath -> PackageId
-> PackageDescriptionOverride
-> FilePath -> IO ()
unpackPackage verbosity prefix pkgid descOverride pkgPath = do
let pkgdirname = display pkgid
pkgdir = prefix </> pkgdirname
pkgdir' = addTrailingPathSeparator pkgdir
existsDir <- doesDirectoryExist pkgdir
when existsDir $ die $
"The directory \"" ++ pkgdir' ++ "\" already exists, not unpacking."
existsFile <- doesFileExist pkgdir
when existsFile $ die $
"A file \"" ++ pkgdir ++ "\" is in the way, not unpacking."
notice verbosity $ "Unpacking to " ++ pkgdir'
Tar.extractTarGzFile prefix pkgdirname pkgPath
case descOverride of
Nothing -> return ()
Just pkgtxt -> do
let descFilePath = pkgdir </> display (packageName pkgid) <.> "cabal"
info verbosity $
"Updating " ++ descFilePath
++ " with the latest revision from the index."
writeFileAtomic descFilePath pkgtxt
-- ------------------------------------------------------------
-- * Forking the source repository
-- ------------------------------------------------------------
data BranchCmd = BranchCmd (Verbosity -> FilePath -> IO ExitCode)
data Brancher = Brancher
{ brancherBinary :: String
, brancherBuildCmd :: PD.SourceRepo -> Maybe BranchCmd
}
-- | The set of all supported branch drivers.
allBranchers :: [(PD.RepoType, Brancher)]
allBranchers =
[ (PD.Bazaar, branchBzr)
, (PD.Darcs, branchDarcs)
, (PD.Git, branchGit)
, (PD.Mercurial, branchHg)
, (PD.SVN, branchSvn)
]
-- | Find which usable branch drivers (selected from 'allBranchers') are
-- available and usable on the local machine.
--
-- Each driver's main command is run with @--help@, and if the child process
-- exits successfully, that brancher is considered usable.
findUsableBranchers :: IO (Data.Map.Map PD.RepoType Brancher)
findUsableBranchers = do
let usable (_, brancher) = flip catchIO (const (return False)) $ do
let cmd = brancherBinary brancher
(exitCode, _, _) <- readProcessWithExitCode cmd ["--help"] ""
return (exitCode == ExitSuccess)
pairs <- filterM usable allBranchers
return (Data.Map.fromList pairs)
-- | Fork a single package from a remote source repository to the local
-- file system.
forkPackage :: Verbosity
-> Data.Map.Map PD.RepoType Brancher
-- ^ Branchers supported by the local machine.
-> FilePath
-- ^ The directory in which new branches or repositories will
-- be created.
-> (Maybe PD.RepoKind)
-- ^ Which repo to choose.
-> SourcePackage loc
-- ^ The package to fork.
-> IO ()
forkPackage verbosity branchers prefix kind src = do
let desc = PD.packageDescription (packageDescription src)
pkgid = display (packageId src)
pkgname = display (packageName src)
destdir = prefix </> pkgname
destDirExists <- doesDirectoryExist destdir
when destDirExists $ do
die ("The directory " ++ show destdir ++ " already exists, not forking.")
destFileExists <- doesFileExist destdir
when destFileExists $ do
die ("A file " ++ show destdir ++ " is in the way, not forking.")
let repos = PD.sourceRepos desc
case findBranchCmd branchers repos kind of
Just (BranchCmd io) -> do
exitCode <- io verbosity destdir
case exitCode of
ExitSuccess -> return ()
ExitFailure _ -> die ("Couldn't fork package " ++ pkgid)
Nothing -> case repos of
[] -> die ("Package " ++ pkgid
++ " does not have any source repositories.")
_ -> die ("Package " ++ pkgid
++ " does not have any usable source repositories.")
-- | Given a set of possible branchers, and a set of possible source
-- repositories, find a repository that is both 1) likely to be specific to
-- this source version and 2) is supported by the local machine.
findBranchCmd :: Data.Map.Map PD.RepoType Brancher -> [PD.SourceRepo]
-> (Maybe PD.RepoKind) -> Maybe BranchCmd
findBranchCmd branchers allRepos maybeKind = cmd where
-- Sort repositories by kind, from This to Head to Unknown. Repositories
-- with equivalent kinds are selected based on the order they appear in
-- the Cabal description file.
repos' = sortBy (comparing thisFirst) allRepos
thisFirst r = case PD.repoKind r of
PD.RepoThis -> 0 :: Int
PD.RepoHead -> case PD.repoTag r of
-- If the type is 'head' but the author specified a tag, they
-- probably meant to create a 'this' repository but screwed up.
Just _ -> 0
Nothing -> 1
PD.RepoKindUnknown _ -> 2
-- If the user has specified the repo kind, filter out the repositories
-- she's not interested in.
repos = maybe repos' (\k -> filter ((==) k . PD.repoKind) repos') maybeKind
repoBranchCmd repo = do
t <- PD.repoType repo
brancher <- Data.Map.lookup t branchers
brancherBuildCmd brancher repo
cmd = listToMaybe (mapMaybe repoBranchCmd repos)
-- | Branch driver for Bazaar.
branchBzr :: Brancher
branchBzr = Brancher "bzr" $ \repo -> do
src <- PD.repoLocation repo
let args dst = case PD.repoTag repo of
Just tag -> ["branch", src, dst, "-r", "tag:" ++ tag]
Nothing -> ["branch", src, dst]
return $ BranchCmd $ \verbosity dst -> do
notice verbosity ("bzr: branch " ++ show src)
rawSystem "bzr" (args dst)
-- | Branch driver for Darcs.
branchDarcs :: Brancher
branchDarcs = Brancher "darcs" $ \repo -> do
src <- PD.repoLocation repo
let args dst = case PD.repoTag repo of
Just tag -> ["get", src, dst, "-t", tag]
Nothing -> ["get", src, dst]
return $ BranchCmd $ \verbosity dst -> do
notice verbosity ("darcs: get " ++ show src)
rawSystem "darcs" (args dst)
-- | Branch driver for Git.
branchGit :: Brancher
branchGit = Brancher "git" $ \repo -> do
src <- PD.repoLocation repo
let branchArgs = case PD.repoBranch repo of
Just b -> ["--branch", b]
Nothing -> []
let postClone dst = case PD.repoTag repo of
Just t -> do
cwd <- getCurrentDirectory
setCurrentDirectory dst
finally
(rawSystem "git" (["checkout", t] ++ branchArgs))
(setCurrentDirectory cwd)
Nothing -> return ExitSuccess
return $ BranchCmd $ \verbosity dst -> do
notice verbosity ("git: clone " ++ show src)
code <- rawSystem "git" (["clone", src, dst] ++ branchArgs)
case code of
ExitFailure _ -> return code
ExitSuccess -> postClone dst
-- | Branch driver for Mercurial.
branchHg :: Brancher
branchHg = Brancher "hg" $ \repo -> do
src <- PD.repoLocation repo
let branchArgs = case PD.repoBranch repo of
Just b -> ["--branch", b]
Nothing -> []
let tagArgs = case PD.repoTag repo of
Just t -> ["--rev", t]
Nothing -> []
let args dst = ["clone", src, dst] ++ branchArgs ++ tagArgs
return $ BranchCmd $ \verbosity dst -> do
notice verbosity ("hg: clone " ++ show src)
rawSystem "hg" (args dst)
-- | Branch driver for Subversion.
branchSvn :: Brancher
branchSvn = Brancher "svn" $ \repo -> do
src <- PD.repoLocation repo
let args dst = ["checkout", src, dst]
return $ BranchCmd $ \verbosity dst -> do
notice verbosity ("svn: checkout " ++ show src)
rawSystem "svn" (args dst)
| thomie/cabal | cabal-install/Distribution/Client/Get.hs | bsd-3-clause | 12,868 | 0 | 22 | 3,422 | 2,996 | 1,534 | 1,462 | 254 | 6 |
{-# LANGUAGE CPP #-}
--
-- (c) The GRASP/AQUA Project, Glasgow University, 1993-1998
--
--------------------------------------------------------------
-- Converting Core to STG Syntax
--------------------------------------------------------------
-- And, as we have the info in hand, we may convert some lets to
-- let-no-escapes.
module CoreToStg ( coreToStg, coreExprToStg ) where
#include "HsVersions.h"
import CoreSyn
import CoreUtils ( exprType, findDefault )
import CoreArity ( manifestArity )
import StgSyn
import Type
import TyCon
import MkId ( coercionTokenId )
import Id
import IdInfo
import DataCon
import CostCentre ( noCCS )
import VarSet
import VarEnv
import Module
import Name ( getOccName, isExternalName, nameOccName )
import OccName ( occNameString, occNameFS )
import BasicTypes ( Arity )
import TysWiredIn ( unboxedUnitDataCon )
import Literal
import Outputable
import MonadUtils
import FastString
import Util
import DynFlags
import ForeignCall
import Demand ( isSingleUsed )
import PrimOp ( PrimCall(..) )
import Data.Maybe (isJust)
import Control.Monad (liftM, ap)
-- Note [Live vs free]
-- ~~~~~~~~~~~~~~~~~~~
--
-- The actual Stg datatype is decorated with live variable information, as well
-- as free variable information. The two are not the same. Liveness is an
-- operational property rather than a semantic one. A variable is live at a
-- particular execution point if it can be referred to directly again. In
-- particular, a dead variable's stack slot (if it has one):
--
-- - should be stubbed to avoid space leaks, and
-- - may be reused for something else.
--
-- There ought to be a better way to say this. Here are some examples:
--
-- let v = [q] \[x] -> e
-- in
-- ...v... (but no q's)
--
-- Just after the `in', v is live, but q is dead. If the whole of that
-- let expression was enclosed in a case expression, thus:
--
-- case (let v = [q] \[x] -> e in ...v...) of
-- alts[...q...]
--
-- (ie `alts' mention `q'), then `q' is live even after the `in'; because
-- we'll return later to the `alts' and need it.
--
-- Let-no-escapes make this a bit more interesting:
--
-- let-no-escape v = [q] \ [x] -> e
-- in
-- ...v...
--
-- Here, `q' is still live at the `in', because `v' is represented not by
-- a closure but by the current stack state. In other words, if `v' is
-- live then so is `q'. Furthermore, if `e' mentions an enclosing
-- let-no-escaped variable, then its free variables are also live if `v' is.
-- Note [Collecting live CAF info]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- In this pass we also collect information on which CAFs are live for
-- constructing SRTs (see SRT.hs).
--
-- A top-level Id has CafInfo, which is
--
-- - MayHaveCafRefs, if it may refer indirectly to
-- one or more CAFs, or
-- - NoCafRefs if it definitely doesn't
--
-- The CafInfo has already been calculated during the CoreTidy pass.
--
-- During CoreToStg, we then pin onto each binding and case expression, a
-- list of Ids which represents the "live" CAFs at that point. The meaning
-- of "live" here is the same as for live variables, see above (which is
-- why it's convenient to collect CAF information here rather than elsewhere).
--
-- The later SRT pass takes these lists of Ids and uses them to construct
-- the actual nested SRTs, and replaces the lists of Ids with (offset,length)
-- pairs.
-- Note [Interaction of let-no-escape with SRTs]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Consider
--
-- let-no-escape x = ...caf1...caf2...
-- in
-- ...x...x...x...
--
-- where caf1,caf2 are CAFs. Since x doesn't have a closure, we
-- build SRTs just as if x's defn was inlined at each call site, and
-- that means that x's CAF refs get duplicated in the overall SRT.
--
-- This is unlike ordinary lets, in which the CAF refs are not duplicated.
--
-- We could fix this loss of (static) sharing by making a sort of pseudo-closure
-- for x, solely to put in the SRTs lower down.
-- Note [What is a non-escaping let]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- Consider:
--
-- let x = fvs \ args -> e
-- in
-- if ... then x else
-- if ... then x else ...
--
-- `x' is used twice (so we probably can't unfold it), but when it is
-- entered, the stack is deeper than it was when the definition of `x'
-- happened. Specifically, if instead of allocating a closure for `x',
-- we saved all `x's fvs on the stack, and remembered the stack depth at
-- that moment, then whenever we enter `x' we can simply set the stack
-- pointer(s) to these remembered (compile-time-fixed) values, and jump
-- to the code for `x'.
--
-- All of this is provided x is:
-- 1. non-updatable - it must have at least one parameter (see Note
-- [Join point abstraction]);
-- 2. guaranteed to be entered before the stack retreats -- ie x is not
-- buried in a heap-allocated closure, or passed as an argument to
-- something;
-- 3. all the enters have exactly the right number of arguments,
-- no more no less;
-- 4. all the enters are tail calls; that is, they return to the
-- caller enclosing the definition of `x'.
--
-- Under these circumstances we say that `x' is non-escaping.
--
-- An example of when (4) does not hold:
--
-- let x = ...
-- in case x of ...alts...
--
-- Here, `x' is certainly entered only when the stack is deeper than when
-- `x' is defined, but here it must return to ...alts... So we can't just
-- adjust the stack down to `x''s recalled points, because that would lost
-- alts' context.
--
-- Things can get a little more complicated. Consider:
--
-- let y = ...
-- in let x = fvs \ args -> ...y...
-- in ...x...
--
-- Now, if `x' is used in a non-escaping way in ...x..., and `y' is used in a
-- non-escaping way in ...y..., then `y' is non-escaping.
--
-- `x' can even be recursive! Eg:
--
-- letrec x = [y] \ [v] -> if v then x True else ...
-- in
-- ...(x b)...
-- --------------------------------------------------------------
-- Setting variable info: top-level, binds, RHSs
-- --------------------------------------------------------------
coreToStg :: DynFlags -> Module -> CoreProgram -> IO [StgBinding]
coreToStg dflags this_mod pgm
= return pgm'
where (_, _, pgm') = coreTopBindsToStg dflags this_mod emptyVarEnv pgm
coreExprToStg :: CoreExpr -> StgExpr
coreExprToStg expr
= new_expr where (new_expr,_,_) = initLne emptyVarEnv (coreToStgExpr expr)
coreTopBindsToStg
:: DynFlags
-> Module
-> IdEnv HowBound -- environment for the bindings
-> CoreProgram
-> (IdEnv HowBound, FreeVarsInfo, [StgBinding])
coreTopBindsToStg _ _ env [] = (env, emptyFVInfo, [])
coreTopBindsToStg dflags this_mod env (b:bs)
= (env2, fvs2, b':bs')
where
-- Notice the mutually-recursive "knot" here:
-- env accumulates down the list of binds,
-- fvs accumulates upwards
(env1, fvs2, b' ) = coreTopBindToStg dflags this_mod env fvs1 b
(env2, fvs1, bs') = coreTopBindsToStg dflags this_mod env1 bs
coreTopBindToStg
:: DynFlags
-> Module
-> IdEnv HowBound
-> FreeVarsInfo -- Info about the body
-> CoreBind
-> (IdEnv HowBound, FreeVarsInfo, StgBinding)
coreTopBindToStg dflags this_mod env body_fvs (NonRec id rhs)
= let
env' = extendVarEnv env id how_bound
how_bound = LetBound TopLet $! manifestArity rhs
(stg_rhs, fvs') =
initLne env $ do
(stg_rhs, fvs') <- coreToTopStgRhs dflags this_mod body_fvs (id,rhs)
return (stg_rhs, fvs')
bind = StgNonRec id stg_rhs
in
ASSERT2(consistentCafInfo id bind, ppr id )
-- NB: previously the assertion printed 'rhs' and 'bind'
-- as well as 'id', but that led to a black hole
-- where printing the assertion error tripped the
-- assertion again!
(env', fvs' `unionFVInfo` body_fvs, bind)
coreTopBindToStg dflags this_mod env body_fvs (Rec pairs)
= ASSERT( not (null pairs) )
let
binders = map fst pairs
extra_env' = [ (b, LetBound TopLet $! manifestArity rhs)
| (b, rhs) <- pairs ]
env' = extendVarEnvList env extra_env'
(stg_rhss, fvs')
= initLne env' $ do
(stg_rhss, fvss') <- mapAndUnzipM (coreToTopStgRhs dflags this_mod body_fvs) pairs
let fvs' = unionFVInfos fvss'
return (stg_rhss, fvs')
bind = StgRec (zip binders stg_rhss)
in
ASSERT2(consistentCafInfo (head binders) bind, ppr binders)
(env', fvs' `unionFVInfo` body_fvs, bind)
-- Assertion helper: this checks that the CafInfo on the Id matches
-- what CoreToStg has figured out about the binding's SRT. The
-- CafInfo will be exact in all cases except when CorePrep has
-- floated out a binding, in which case it will be approximate.
consistentCafInfo :: Id -> GenStgBinding Var Id -> Bool
consistentCafInfo id bind
= WARN( not (exact || is_sat_thing) , ppr id <+> ppr id_marked_caffy <+> ppr binding_is_caffy )
safe
where
safe = id_marked_caffy || not binding_is_caffy
exact = id_marked_caffy == binding_is_caffy
id_marked_caffy = mayHaveCafRefs (idCafInfo id)
binding_is_caffy = stgBindHasCafRefs bind
is_sat_thing = occNameFS (nameOccName (idName id)) == fsLit "sat"
coreToTopStgRhs
:: DynFlags
-> Module
-> FreeVarsInfo -- Free var info for the scope of the binding
-> (Id,CoreExpr)
-> LneM (StgRhs, FreeVarsInfo)
coreToTopStgRhs dflags this_mod scope_fv_info (bndr, rhs)
= do { (new_rhs, rhs_fvs, _) <- coreToStgExpr rhs
; lv_info <- freeVarsToLiveVars rhs_fvs
; let stg_rhs = mkTopStgRhs dflags this_mod rhs_fvs (mkSRT lv_info) bndr bndr_info new_rhs
stg_arity = stgRhsArity stg_rhs
; return (ASSERT2( arity_ok stg_arity, mk_arity_msg stg_arity) stg_rhs,
rhs_fvs) }
where
bndr_info = lookupFVInfo scope_fv_info bndr
-- It's vital that the arity on a top-level Id matches
-- the arity of the generated STG binding, else an importing
-- module will use the wrong calling convention
-- (Trac #2844 was an example where this happened)
-- NB1: we can't move the assertion further out without
-- blocking the "knot" tied in coreTopBindsToStg
-- NB2: the arity check is only needed for Ids with External
-- Names, because they are externally visible. The CorePrep
-- pass introduces "sat" things with Local Names and does
-- not bother to set their Arity info, so don't fail for those
arity_ok stg_arity
| isExternalName (idName bndr) = id_arity == stg_arity
| otherwise = True
id_arity = idArity bndr
mk_arity_msg stg_arity
= vcat [ppr bndr,
ptext (sLit "Id arity:") <+> ppr id_arity,
ptext (sLit "STG arity:") <+> ppr stg_arity]
mkTopStgRhs :: DynFlags -> Module -> FreeVarsInfo
-> SRT -> Id -> StgBinderInfo -> StgExpr
-> StgRhs
mkTopStgRhs dflags this_mod = mkStgRhs' con_updateable
-- Dynamic StgConApps are updatable
where con_updateable con args = isDllConApp dflags this_mod con args
-- ---------------------------------------------------------------------------
-- Expressions
-- ---------------------------------------------------------------------------
coreToStgExpr
:: CoreExpr
-> LneM (StgExpr, -- Decorated STG expr
FreeVarsInfo, -- Its free vars (NB free, not live)
EscVarsSet) -- Its escapees, a subset of its free vars;
-- also a subset of the domain of the envt
-- because we are only interested in the escapees
-- for vars which might be turned into
-- let-no-escaped ones.
-- The second and third components can be derived in a simple bottom up pass, not
-- dependent on any decisions about which variables will be let-no-escaped or
-- not. The first component, that is, the decorated expression, may then depend
-- on these components, but it in turn is not scrutinised as the basis for any
-- decisions. Hence no black holes.
-- No LitInteger's should be left by the time this is called. CorePrep
-- should have converted them all to a real core representation.
coreToStgExpr (Lit (LitInteger {})) = panic "coreToStgExpr: LitInteger"
coreToStgExpr (Lit l) = return (StgLit l, emptyFVInfo, emptyVarSet)
coreToStgExpr (Var v) = coreToStgApp Nothing v [] []
coreToStgExpr (Coercion _) = coreToStgApp Nothing coercionTokenId [] []
coreToStgExpr expr@(App _ _)
= coreToStgApp Nothing f args ticks
where
(f, args, ticks) = myCollectArgs expr
coreToStgExpr expr@(Lam _ _)
= let
(args, body) = myCollectBinders expr
args' = filterStgBinders args
in
extendVarEnvLne [ (a, LambdaBound) | a <- args' ] $ do
(body, body_fvs, body_escs) <- coreToStgExpr body
let
fvs = args' `minusFVBinders` body_fvs
escs = body_escs `delVarSetList` args'
result_expr | null args' = body
| otherwise = StgLam args' body
return (result_expr, fvs, escs)
coreToStgExpr (Tick tick expr)
= do case tick of
HpcTick{} -> return ()
ProfNote{} -> return ()
SourceNote{} -> return ()
Breakpoint{} -> panic "coreToStgExpr: breakpoint should not happen"
(expr2, fvs, escs) <- coreToStgExpr expr
return (StgTick tick expr2, fvs, escs)
coreToStgExpr (Cast expr _)
= coreToStgExpr expr
-- Cases require a little more real work.
coreToStgExpr (Case scrut _ _ [])
= coreToStgExpr scrut
-- See Note [Empty case alternatives] in CoreSyn If the case
-- alternatives are empty, the scrutinee must diverge or raise an
-- exception, so we can just dive into it.
--
-- Of course this may seg-fault if the scrutinee *does* return. A
-- belt-and-braces approach would be to move this case into the
-- code generator, and put a return point anyway that calls a
-- runtime system error function.
coreToStgExpr (Case scrut bndr _ alts) = do
(alts2, alts_fvs, alts_escs)
<- extendVarEnvLne [(bndr, LambdaBound)] $ do
(alts2, fvs_s, escs_s) <- mapAndUnzip3M vars_alt alts
return ( alts2,
unionFVInfos fvs_s,
unionVarSets escs_s )
let
-- Determine whether the default binder is dead or not
-- This helps the code generator to avoid generating an assignment
-- for the case binder (is extremely rare cases) ToDo: remove.
bndr' | bndr `elementOfFVInfo` alts_fvs = bndr
| otherwise = bndr `setIdOccInfo` IAmDead
-- Don't consider the default binder as being 'live in alts',
-- since this is from the point of view of the case expr, where
-- the default binder is not free.
alts_fvs_wo_bndr = bndr `minusFVBinder` alts_fvs
alts_escs_wo_bndr = alts_escs `delVarSet` bndr
alts_lv_info <- freeVarsToLiveVars alts_fvs_wo_bndr
-- We tell the scrutinee that everything
-- live in the alts is live in it, too.
(scrut2, scrut_fvs, _scrut_escs, scrut_lv_info)
<- setVarsLiveInCont alts_lv_info $ do
(scrut2, scrut_fvs, scrut_escs) <- coreToStgExpr scrut
scrut_lv_info <- freeVarsToLiveVars scrut_fvs
return (scrut2, scrut_fvs, scrut_escs, scrut_lv_info)
return (
StgCase scrut2 (getLiveVars scrut_lv_info)
(getLiveVars alts_lv_info)
bndr'
(mkSRT alts_lv_info)
(mkStgAltType bndr alts)
alts2,
scrut_fvs `unionFVInfo` alts_fvs_wo_bndr,
alts_escs_wo_bndr `unionVarSet` getFVSet scrut_fvs
-- You might think we should have scrut_escs, not
-- (getFVSet scrut_fvs), but actually we can't call, and
-- then return from, a let-no-escape thing.
)
where
vars_alt (con, binders, rhs)
| DataAlt c <- con, c == unboxedUnitDataCon
= -- This case is a bit smelly.
-- See Note [Nullary unboxed tuple] in Type.hs
-- where a nullary tuple is mapped to (State# World#)
ASSERT( null binders )
do { (rhs2, rhs_fvs, rhs_escs) <- coreToStgExpr rhs
; return ((DEFAULT, [], [], rhs2), rhs_fvs, rhs_escs) }
| otherwise
= let -- Remove type variables
binders' = filterStgBinders binders
in
extendVarEnvLne [(b, LambdaBound) | b <- binders'] $ do
(rhs2, rhs_fvs, rhs_escs) <- coreToStgExpr rhs
let
-- Records whether each param is used in the RHS
good_use_mask = [ b `elementOfFVInfo` rhs_fvs | b <- binders' ]
return ( (con, binders', good_use_mask, rhs2),
binders' `minusFVBinders` rhs_fvs,
rhs_escs `delVarSetList` binders' )
-- ToDo: remove the delVarSet;
-- since escs won't include any of these binders
-- Lets not only take quite a bit of work, but this is where we convert
-- then to let-no-escapes, if we wish.
-- (Meanwhile, we don't expect to see let-no-escapes...)
coreToStgExpr (Let bind body) = do
(new_let, fvs, escs, _)
<- mfix (\ ~(_, _, _, no_binder_escapes) ->
coreToStgLet no_binder_escapes bind body
)
return (new_let, fvs, escs)
coreToStgExpr e = pprPanic "coreToStgExpr" (ppr e)
mkStgAltType :: Id -> [CoreAlt] -> AltType
mkStgAltType bndr alts = case repType (idType bndr) of
UnaryRep rep_ty -> case tyConAppTyCon_maybe rep_ty of
Just tc | isUnLiftedTyCon tc -> PrimAlt tc
| isAbstractTyCon tc -> look_for_better_tycon
| isAlgTyCon tc -> AlgAlt tc
| otherwise -> ASSERT2( _is_poly_alt_tycon tc, ppr tc )
PolyAlt
Nothing -> PolyAlt
UbxTupleRep rep_tys -> UbxTupAlt (length rep_tys)
-- NB Nullary unboxed tuples have UnaryRep, and generate a PrimAlt
where
_is_poly_alt_tycon tc
= isFunTyCon tc
|| isPrimTyCon tc -- "Any" is lifted but primitive
|| isFamilyTyCon tc -- Type family; e.g. Any, or arising from strict
-- function application where argument has a
-- type-family type
-- Sometimes, the TyCon is a AbstractTyCon which may not have any
-- constructors inside it. Then we may get a better TyCon by
-- grabbing the one from a constructor alternative
-- if one exists.
look_for_better_tycon
| ((DataAlt con, _, _) : _) <- data_alts =
AlgAlt (dataConTyCon con)
| otherwise =
ASSERT(null data_alts)
PolyAlt
where
(data_alts, _deflt) = findDefault alts
-- ---------------------------------------------------------------------------
-- Applications
-- ---------------------------------------------------------------------------
coreToStgApp
:: Maybe UpdateFlag -- Just upd <=> this application is
-- the rhs of a thunk binding
-- x = [...] \upd [] -> the_app
-- with specified update flag
-> Id -- Function
-> [CoreArg] -- Arguments
-> [Tickish Id] -- Debug ticks
-> LneM (StgExpr, FreeVarsInfo, EscVarsSet)
coreToStgApp _ f args ticks = do
(args', args_fvs, ticks') <- coreToStgArgs args
how_bound <- lookupVarLne f
let
n_val_args = valArgCount args
not_letrec_bound = not (isLetBound how_bound)
fun_fvs = singletonFVInfo f how_bound fun_occ
-- e.g. (f :: a -> int) (x :: a)
-- Here the free variables are "f", "x" AND the type variable "a"
-- coreToStgArgs will deal with the arguments recursively
-- Mostly, the arity info of a function is in the fn's IdInfo
-- But new bindings introduced by CoreSat may not have no
-- arity info; it would do us no good anyway. For example:
-- let f = \ab -> e in f
-- No point in having correct arity info for f!
-- Hence the hasArity stuff below.
-- NB: f_arity is only consulted for LetBound things
f_arity = stgArity f how_bound
saturated = f_arity <= n_val_args
fun_occ
| not_letrec_bound = noBinderInfo -- Uninteresting variable
| f_arity > 0 && saturated = stgSatOcc -- Saturated or over-saturated function call
| otherwise = stgUnsatOcc -- Unsaturated function or thunk
fun_escs
| not_letrec_bound = emptyVarSet -- Only letrec-bound escapees are interesting
| f_arity == n_val_args = emptyVarSet -- A function *or thunk* with an exactly
-- saturated call doesn't escape
-- (let-no-escape applies to 'thunks' too)
| otherwise = unitVarSet f -- Inexact application; it does escape
-- At the moment of the call:
-- either the function is *not* let-no-escaped, in which case
-- nothing is live except live_in_cont
-- or the function *is* let-no-escaped in which case the
-- variables it uses are live, but still the function
-- itself is not. PS. In this case, the function's
-- live vars should already include those of the
-- continuation, but it does no harm to just union the
-- two regardless.
res_ty = exprType (mkApps (Var f) args)
app = case idDetails f of
DataConWorkId dc | saturated -> StgConApp dc args'
-- Some primitive operator that might be implemented as a library call.
PrimOpId op -> ASSERT( saturated )
StgOpApp (StgPrimOp op) args' res_ty
-- A call to some primitive Cmm function.
FCallId (CCall (CCallSpec (StaticTarget _ lbl (Just pkgId) True)
PrimCallConv _))
-> ASSERT( saturated )
StgOpApp (StgPrimCallOp (PrimCall lbl pkgId)) args' res_ty
-- A regular foreign call.
FCallId call -> ASSERT( saturated )
StgOpApp (StgFCallOp call (idUnique f)) args' res_ty
TickBoxOpId {} -> pprPanic "coreToStg TickBox" $ ppr (f,args')
_other -> StgApp f args'
fvs = fun_fvs `unionFVInfo` args_fvs
vars = fun_escs `unionVarSet` (getFVSet args_fvs)
-- All the free vars of the args are disqualified
-- from being let-no-escaped.
tapp = foldr StgTick app (ticks ++ ticks')
-- Forcing these fixes a leak in the code generator, noticed while
-- profiling for trac #4367
app `seq` fvs `seq` seqVarSet vars `seq` return (
tapp,
fvs,
vars
)
-- ---------------------------------------------------------------------------
-- Argument lists
-- This is the guy that turns applications into A-normal form
-- ---------------------------------------------------------------------------
coreToStgArgs :: [CoreArg] -> LneM ([StgArg], FreeVarsInfo, [Tickish Id])
coreToStgArgs []
= return ([], emptyFVInfo, [])
coreToStgArgs (Type _ : args) = do -- Type argument
(args', fvs, ts) <- coreToStgArgs args
return (args', fvs, ts)
coreToStgArgs (Coercion _ : args) -- Coercion argument; replace with place holder
= do { (args', fvs, ts) <- coreToStgArgs args
; return (StgVarArg coercionTokenId : args', fvs, ts) }
coreToStgArgs (Tick t e : args)
= ASSERT( not (tickishIsCode t) )
do { (args', fvs, ts) <- coreToStgArgs (e : args)
; return (args', fvs, t:ts) }
coreToStgArgs (arg : args) = do -- Non-type argument
(stg_args, args_fvs, ticks) <- coreToStgArgs args
(arg', arg_fvs, _escs) <- coreToStgExpr arg
let
fvs = args_fvs `unionFVInfo` arg_fvs
(aticks, arg'') = stripStgTicksTop tickishFloatable arg'
stg_arg = case arg'' of
StgApp v [] -> StgVarArg v
StgConApp con [] -> StgVarArg (dataConWorkId con)
StgLit lit -> StgLitArg lit
_ -> pprPanic "coreToStgArgs" (ppr arg)
-- WARNING: what if we have an argument like (v `cast` co)
-- where 'co' changes the representation type?
-- (This really only happens if co is unsafe.)
-- Then all the getArgAmode stuff in CgBindery will set the
-- cg_rep of the CgIdInfo based on the type of v, rather
-- than the type of 'co'.
-- This matters particularly when the function is a primop
-- or foreign call.
-- Wanted: a better solution than this hacky warning
let
arg_ty = exprType arg
stg_arg_ty = stgArgType stg_arg
bad_args = (isUnLiftedType arg_ty && not (isUnLiftedType stg_arg_ty))
|| (map typePrimRep (flattenRepType (repType arg_ty))
/= map typePrimRep (flattenRepType (repType stg_arg_ty)))
-- In GHCi we coerce an argument of type BCO# (unlifted) to HValue (lifted),
-- and pass it to a function expecting an HValue (arg_ty). This is ok because
-- we can treat an unlifted value as lifted. But the other way round
-- we complain.
-- We also want to check if a pointer is cast to a non-ptr etc
WARN( bad_args, ptext (sLit "Dangerous-looking argument. Probable cause: bad unsafeCoerce#") $$ ppr arg )
return (stg_arg : stg_args, fvs, ticks ++ aticks)
-- ---------------------------------------------------------------------------
-- The magic for lets:
-- ---------------------------------------------------------------------------
coreToStgLet
:: Bool -- True <=> yes, we are let-no-escaping this let
-> CoreBind -- bindings
-> CoreExpr -- body
-> LneM (StgExpr, -- new let
FreeVarsInfo, -- variables free in the whole let
EscVarsSet, -- variables that escape from the whole let
Bool) -- True <=> none of the binders in the bindings
-- is among the escaping vars
coreToStgLet let_no_escape bind body = do
(bind2, bind_fvs, bind_escs, bind_lvs,
body2, body_fvs, body_escs, body_lvs)
<- mfix $ \ ~(_, _, _, _, _, rec_body_fvs, _, _) -> do
-- Do the bindings, setting live_in_cont to empty if
-- we ain't in a let-no-escape world
live_in_cont <- getVarsLiveInCont
( bind2, bind_fvs, bind_escs, bind_lv_info, env_ext)
<- setVarsLiveInCont (if let_no_escape
then live_in_cont
else emptyLiveInfo)
(vars_bind rec_body_fvs bind)
-- Do the body
extendVarEnvLne env_ext $ do
(body2, body_fvs, body_escs) <- coreToStgExpr body
body_lv_info <- freeVarsToLiveVars body_fvs
return (bind2, bind_fvs, bind_escs, getLiveVars bind_lv_info,
body2, body_fvs, body_escs, getLiveVars body_lv_info)
-- Compute the new let-expression
let
new_let | let_no_escape = StgLetNoEscape live_in_whole_let bind_lvs bind2 body2
| otherwise = StgLet bind2 body2
free_in_whole_let
= binders `minusFVBinders` (bind_fvs `unionFVInfo` body_fvs)
live_in_whole_let
= bind_lvs `unionVarSet` (body_lvs `delVarSetList` binders)
real_bind_escs = if let_no_escape then
bind_escs
else
getFVSet bind_fvs
-- Everything escapes which is free in the bindings
let_escs = (real_bind_escs `unionVarSet` body_escs) `delVarSetList` binders
all_escs = bind_escs `unionVarSet` body_escs -- Still includes binders of
-- this let(rec)
no_binder_escapes = isEmptyVarSet (set_of_binders `intersectVarSet` all_escs)
-- Debugging code as requested by Andrew Kennedy
checked_no_binder_escapes
| debugIsOn && not no_binder_escapes && any is_join_var binders
= pprTrace "Interesting! A join var that isn't let-no-escaped" (ppr binders)
False
| otherwise = no_binder_escapes
-- Mustn't depend on the passed-in let_no_escape flag, since
-- no_binder_escapes is used by the caller to derive the flag!
return (
new_let,
free_in_whole_let,
let_escs,
checked_no_binder_escapes
)
where
set_of_binders = mkVarSet binders
binders = bindersOf bind
mk_binding bind_lv_info binder rhs
= (binder, LetBound (NestedLet live_vars) (manifestArity rhs))
where
live_vars | let_no_escape = addLiveVar bind_lv_info binder
| otherwise = unitLiveVar binder
-- c.f. the invariant on NestedLet
vars_bind :: FreeVarsInfo -- Free var info for body of binding
-> CoreBind
-> LneM (StgBinding,
FreeVarsInfo,
EscVarsSet, -- free vars; escapee vars
LiveInfo, -- Vars and CAFs live in binding
[(Id, HowBound)]) -- extension to environment
vars_bind body_fvs (NonRec binder rhs) = do
(rhs2, bind_fvs, bind_lv_info, escs) <- coreToStgRhs body_fvs [] (binder,rhs)
let
env_ext_item = mk_binding bind_lv_info binder rhs
return (StgNonRec binder rhs2,
bind_fvs, escs, bind_lv_info, [env_ext_item])
vars_bind body_fvs (Rec pairs)
= mfix $ \ ~(_, rec_rhs_fvs, _, bind_lv_info, _) ->
let
rec_scope_fvs = unionFVInfo body_fvs rec_rhs_fvs
binders = map fst pairs
env_ext = [ mk_binding bind_lv_info b rhs
| (b,rhs) <- pairs ]
in
extendVarEnvLne env_ext $ do
(rhss2, fvss, lv_infos, escss)
<- mapAndUnzip4M (coreToStgRhs rec_scope_fvs binders) pairs
let
bind_fvs = unionFVInfos fvss
bind_lv_info = foldr unionLiveInfo emptyLiveInfo lv_infos
escs = unionVarSets escss
return (StgRec (binders `zip` rhss2),
bind_fvs, escs, bind_lv_info, env_ext)
is_join_var :: Id -> Bool
-- A hack (used only for compiler debuggging) to tell if
-- a variable started life as a join point ($j)
is_join_var j = occNameString (getOccName j) == "$j"
coreToStgRhs :: FreeVarsInfo -- Free var info for the scope of the binding
-> [Id]
-> (Id,CoreExpr)
-> LneM (StgRhs, FreeVarsInfo, LiveInfo, EscVarsSet)
coreToStgRhs scope_fv_info binders (bndr, rhs) = do
(new_rhs, rhs_fvs, rhs_escs) <- coreToStgExpr rhs
lv_info <- freeVarsToLiveVars (binders `minusFVBinders` rhs_fvs)
return (mkStgRhs rhs_fvs (mkSRT lv_info) bndr bndr_info new_rhs,
rhs_fvs, lv_info, rhs_escs)
where
bndr_info = lookupFVInfo scope_fv_info bndr
mkStgRhs :: FreeVarsInfo -> SRT -> Id -> StgBinderInfo -> StgExpr -> StgRhs
mkStgRhs = mkStgRhs' con_updateable
where con_updateable _ _ = False
mkStgRhs' :: (DataCon -> [StgArg] -> Bool)
-> FreeVarsInfo -> SRT -> Id -> StgBinderInfo -> StgExpr -> StgRhs
mkStgRhs' con_updateable rhs_fvs srt bndr binder_info rhs
| StgLam bndrs body <- rhs
= StgRhsClosure noCCS binder_info
(getFVs rhs_fvs)
ReEntrant
srt bndrs body
| StgConApp con args <- unticked_rhs
, not (con_updateable con args)
= StgRhsCon noCCS con args
| otherwise
= StgRhsClosure noCCS binder_info
(getFVs rhs_fvs)
upd_flag srt [] rhs
where
(_, unticked_rhs) = stripStgTicksTop (not . tickishIsCode) rhs
upd_flag | isSingleUsed (idDemandInfo bndr) = SingleEntry
| otherwise = Updatable
{-
SDM: disabled. Eval/Apply can't handle functions with arity zero very
well; and making these into simple non-updatable thunks breaks other
assumptions (namely that they will be entered only once).
upd_flag | isPAP env rhs = ReEntrant
| otherwise = Updatable
-- Detect thunks which will reduce immediately to PAPs, and make them
-- non-updatable. This has several advantages:
--
-- - the non-updatable thunk behaves exactly like the PAP,
--
-- - the thunk is more efficient to enter, because it is
-- specialised to the task.
--
-- - we save one update frame, one stg_update_PAP, one update
-- and lots of PAP_enters.
--
-- - in the case where the thunk is top-level, we save building
-- a black hole and futhermore the thunk isn't considered to
-- be a CAF any more, so it doesn't appear in any SRTs.
--
-- We do it here, because the arity information is accurate, and we need
-- to do it before the SRT pass to save the SRT entries associated with
-- any top-level PAPs.
isPAP env (StgApp f args) = listLengthCmp args arity == LT -- idArity f > length args
where
arity = stgArity f (lookupBinding env f)
isPAP env _ = False
-}
{- ToDo:
upd = if isOnceDem dem
then (if isNotTop toplev
then SingleEntry -- HA! Paydirt for "dem"
else
(if debugIsOn then trace "WARNING: SE CAFs unsupported, forcing UPD instead" else id) $
Updatable)
else Updatable
-- For now we forbid SingleEntry CAFs; they tickle the
-- ASSERT in rts/Storage.c line 215 at newCAF() re mut_link,
-- and I don't understand why. There's only one SE_CAF (well,
-- only one that tickled a great gaping bug in an earlier attempt
-- at ClosureInfo.getEntryConvention) in the whole of nofib,
-- specifically Main.lvl6 in spectral/cryptarithm2.
-- So no great loss. KSW 2000-07.
-}
-- ---------------------------------------------------------------------------
-- A little monad for this let-no-escaping pass
-- ---------------------------------------------------------------------------
-- There's a lot of stuff to pass around, so we use this LneM monad to
-- help. All the stuff here is only passed *down*.
newtype LneM a = LneM
{ unLneM :: IdEnv HowBound
-> LiveInfo -- Vars and CAFs live in continuation
-> a
}
type LiveInfo = (StgLiveVars, -- Dynamic live variables;
-- i.e. ones with a nested (non-top-level) binding
CafSet) -- Static live variables;
-- i.e. top-level variables that are CAFs or refer to them
type EscVarsSet = IdSet
type CafSet = IdSet
data HowBound
= ImportBound -- Used only as a response to lookupBinding; never
-- exists in the range of the (IdEnv HowBound)
| LetBound -- A let(rec) in this module
LetInfo -- Whether top level or nested
Arity -- Its arity (local Ids don't have arity info at this point)
| LambdaBound -- Used for both lambda and case
data LetInfo
= TopLet -- top level things
| NestedLet LiveInfo -- For nested things, what is live if this
-- thing is live? Invariant: the binder
-- itself is always a member of
-- the dynamic set of its own LiveInfo
isLetBound :: HowBound -> Bool
isLetBound (LetBound _ _) = True
isLetBound _ = False
topLevelBound :: HowBound -> Bool
topLevelBound ImportBound = True
topLevelBound (LetBound TopLet _) = True
topLevelBound _ = False
-- For a let(rec)-bound variable, x, we record LiveInfo, the set of
-- variables that are live if x is live. This LiveInfo comprises
-- (a) dynamic live variables (ones with a non-top-level binding)
-- (b) static live variabes (CAFs or things that refer to CAFs)
--
-- For "normal" variables (a) is just x alone. If x is a let-no-escaped
-- variable then x is represented by a code pointer and a stack pointer
-- (well, one for each stack). So all of the variables needed in the
-- execution of x are live if x is, and are therefore recorded in the
-- LetBound constructor; x itself *is* included.
--
-- The set of dynamic live variables is guaranteed ot have no further
-- let-no-escaped variables in it.
emptyLiveInfo :: LiveInfo
emptyLiveInfo = (emptyVarSet,emptyVarSet)
unitLiveVar :: Id -> LiveInfo
unitLiveVar lv = (unitVarSet lv, emptyVarSet)
unitLiveCaf :: Id -> LiveInfo
unitLiveCaf caf = (emptyVarSet, unitVarSet caf)
addLiveVar :: LiveInfo -> Id -> LiveInfo
addLiveVar (lvs, cafs) id = (lvs `extendVarSet` id, cafs)
unionLiveInfo :: LiveInfo -> LiveInfo -> LiveInfo
unionLiveInfo (lv1,caf1) (lv2,caf2) = (lv1 `unionVarSet` lv2, caf1 `unionVarSet` caf2)
mkSRT :: LiveInfo -> SRT
mkSRT (_, cafs) = SRTEntries cafs
getLiveVars :: LiveInfo -> StgLiveVars
getLiveVars (lvs, _) = lvs
-- The std monad functions:
initLne :: IdEnv HowBound -> LneM a -> a
initLne env m = unLneM m env emptyLiveInfo
{-# INLINE thenLne #-}
{-# INLINE returnLne #-}
returnLne :: a -> LneM a
returnLne e = LneM $ \_ _ -> e
thenLne :: LneM a -> (a -> LneM b) -> LneM b
thenLne m k = LneM $ \env lvs_cont
-> unLneM (k (unLneM m env lvs_cont)) env lvs_cont
instance Functor LneM where
fmap = liftM
instance Applicative LneM where
pure = returnLne
(<*>) = ap
instance Monad LneM where
return = pure
(>>=) = thenLne
instance MonadFix LneM where
mfix expr = LneM $ \env lvs_cont ->
let result = unLneM (expr result) env lvs_cont
in result
-- Functions specific to this monad:
getVarsLiveInCont :: LneM LiveInfo
getVarsLiveInCont = LneM $ \_env lvs_cont -> lvs_cont
setVarsLiveInCont :: LiveInfo -> LneM a -> LneM a
setVarsLiveInCont new_lvs_cont expr
= LneM $ \env _lvs_cont
-> unLneM expr env new_lvs_cont
extendVarEnvLne :: [(Id, HowBound)] -> LneM a -> LneM a
extendVarEnvLne ids_w_howbound expr
= LneM $ \env lvs_cont
-> unLneM expr (extendVarEnvList env ids_w_howbound) lvs_cont
lookupVarLne :: Id -> LneM HowBound
lookupVarLne v = LneM $ \env _lvs_cont -> lookupBinding env v
lookupBinding :: IdEnv HowBound -> Id -> HowBound
lookupBinding env v = case lookupVarEnv env v of
Just xx -> xx
Nothing -> ASSERT2( isGlobalId v, ppr v ) ImportBound
-- The result of lookupLiveVarsForSet, a set of live variables, is
-- only ever tacked onto a decorated expression. It is never used as
-- the basis of a control decision, which might give a black hole.
freeVarsToLiveVars :: FreeVarsInfo -> LneM LiveInfo
freeVarsToLiveVars fvs = LneM freeVarsToLiveVars'
where
freeVarsToLiveVars' _env live_in_cont = live_info
where
live_info = foldr unionLiveInfo live_in_cont lvs_from_fvs
lvs_from_fvs = map do_one (allFreeIds fvs)
do_one (v, how_bound)
= case how_bound of
ImportBound -> unitLiveCaf v -- Only CAF imports are
-- recorded in fvs
LetBound TopLet _
| mayHaveCafRefs (idCafInfo v) -> unitLiveCaf v
| otherwise -> emptyLiveInfo
LetBound (NestedLet lvs) _ -> lvs -- lvs already contains v
-- (see the invariant on NestedLet)
_lambda_or_case_binding -> unitLiveVar v -- Bound by lambda or case
-- ---------------------------------------------------------------------------
-- Free variable information
-- ---------------------------------------------------------------------------
type FreeVarsInfo = VarEnv (Var, HowBound, StgBinderInfo)
-- The Var is so we can gather up the free variables
-- as a set.
--
-- The HowBound info just saves repeated lookups;
-- we look up just once when we encounter the occurrence.
-- INVARIANT: Any ImportBound Ids are HaveCafRef Ids
-- Imported Ids without CAF refs are simply
-- not put in the FreeVarsInfo for an expression.
-- See singletonFVInfo and freeVarsToLiveVars
--
-- StgBinderInfo records how it occurs; notably, we
-- are interested in whether it only occurs in saturated
-- applications, because then we don't need to build a
-- curried version.
-- If f is mapped to noBinderInfo, that means
-- that f *is* mentioned (else it wouldn't be in the
-- IdEnv at all), but perhaps in an unsaturated applications.
--
-- All case/lambda-bound things are also mapped to
-- noBinderInfo, since we aren't interested in their
-- occurrence info.
--
-- For ILX we track free var info for type variables too;
-- hence VarEnv not IdEnv
emptyFVInfo :: FreeVarsInfo
emptyFVInfo = emptyVarEnv
singletonFVInfo :: Id -> HowBound -> StgBinderInfo -> FreeVarsInfo
-- Don't record non-CAF imports at all, to keep free-var sets small
singletonFVInfo id ImportBound info
| mayHaveCafRefs (idCafInfo id) = unitVarEnv id (id, ImportBound, info)
| otherwise = emptyVarEnv
singletonFVInfo id how_bound info = unitVarEnv id (id, how_bound, info)
unionFVInfo :: FreeVarsInfo -> FreeVarsInfo -> FreeVarsInfo
unionFVInfo fv1 fv2 = plusVarEnv_C plusFVInfo fv1 fv2
unionFVInfos :: [FreeVarsInfo] -> FreeVarsInfo
unionFVInfos fvs = foldr unionFVInfo emptyFVInfo fvs
minusFVBinders :: [Id] -> FreeVarsInfo -> FreeVarsInfo
minusFVBinders vs fv = foldr minusFVBinder fv vs
minusFVBinder :: Id -> FreeVarsInfo -> FreeVarsInfo
minusFVBinder v fv = fv `delVarEnv` v
-- When removing a binder, remember to add its type variables
-- c.f. CoreFVs.delBinderFV
elementOfFVInfo :: Id -> FreeVarsInfo -> Bool
elementOfFVInfo id fvs = isJust (lookupVarEnv fvs id)
lookupFVInfo :: FreeVarsInfo -> Id -> StgBinderInfo
-- Find how the given Id is used.
-- Externally visible things may be used any old how
lookupFVInfo fvs id
| isExternalName (idName id) = noBinderInfo
| otherwise = case lookupVarEnv fvs id of
Nothing -> noBinderInfo
Just (_,_,info) -> info
allFreeIds :: FreeVarsInfo -> [(Id,HowBound)] -- Both top level and non-top-level Ids
allFreeIds fvs = ASSERT( all (isId . fst) ids ) ids
where
ids = [(id,how_bound) | (id,how_bound,_) <- varEnvElts fvs]
-- Non-top-level things only, both type variables and ids
getFVs :: FreeVarsInfo -> [Var]
getFVs fvs = [id | (id, how_bound, _) <- varEnvElts fvs,
not (topLevelBound how_bound) ]
getFVSet :: FreeVarsInfo -> VarSet
getFVSet fvs = mkVarSet (getFVs fvs)
plusFVInfo :: (Var, HowBound, StgBinderInfo)
-> (Var, HowBound, StgBinderInfo)
-> (Var, HowBound, StgBinderInfo)
plusFVInfo (id1,hb1,info1) (id2,hb2,info2)
= ASSERT(id1 == id2 && hb1 `check_eq_how_bound` hb2)
(id1, hb1, combineStgBinderInfo info1 info2)
-- The HowBound info for a variable in the FVInfo should be consistent
check_eq_how_bound :: HowBound -> HowBound -> Bool
check_eq_how_bound ImportBound ImportBound = True
check_eq_how_bound LambdaBound LambdaBound = True
check_eq_how_bound (LetBound li1 ar1) (LetBound li2 ar2) = ar1 == ar2 && check_eq_li li1 li2
check_eq_how_bound _ _ = False
check_eq_li :: LetInfo -> LetInfo -> Bool
check_eq_li (NestedLet _) (NestedLet _) = True
check_eq_li TopLet TopLet = True
check_eq_li _ _ = False
-- Misc.
filterStgBinders :: [Var] -> [Var]
filterStgBinders bndrs = filter isId bndrs
myCollectBinders :: Expr Var -> ([Var], Expr Var)
myCollectBinders expr
= go [] expr
where
go bs (Lam b e) = go (b:bs) e
go bs (Cast e _) = go bs e
go bs e = (reverse bs, e)
myCollectArgs :: CoreExpr -> (Id, [CoreArg], [Tickish Id])
-- We assume that we only have variables
-- in the function position by now
myCollectArgs expr
= go expr [] []
where
go (Var v) as ts = (v, as, ts)
go (App f a) as ts = go f (a:as) ts
go (Tick t e) as ts = ASSERT( all isTypeArg as )
go e as (t:ts) -- ticks can appear in type apps
go (Cast e _) as ts = go e as ts
go (Lam b e) as ts
| isTyVar b = go e as ts -- Note [Collect args]
go _ _ _ = pprPanic "CoreToStg.myCollectArgs" (ppr expr)
-- Note [Collect args]
-- ~~~~~~~~~~~~~~~~~~~
--
-- This big-lambda case occurred following a rather obscure eta expansion.
-- It all seems a bit yukky to me.
stgArity :: Id -> HowBound -> Arity
stgArity _ (LetBound _ arity) = arity
stgArity f ImportBound = idArity f
stgArity _ LambdaBound = 0
| AlexanderPankiv/ghc | compiler/stgSyn/CoreToStg.hs | bsd-3-clause | 46,841 | 0 | 21 | 14,193 | 7,749 | 4,220 | 3,529 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sr-SP">
<title>Requester</title>
<maps>
<homeID>requester</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/requester/src/main/javahelp/help_sr_SP/helpset_sr_SP.hs | apache-2.0 | 960 | 77 | 66 | 155 | 404 | 205 | 199 | -1 | -1 |
module ShouldCompile where
import Prelude ()
import Control.Monad( Monad(return), mapM )
-- Should report Monad and return as unused imports
import GHC.Base
-- But not their import from here
x = True
y x = mapM
| frantisekfarka/ghc-dsi | testsuite/tests/module/mod176.hs | bsd-3-clause | 217 | 0 | 6 | 42 | 47 | 30 | 17 | 6 | 1 |
{-# LANGUAGE NamedWildCards, ScopedTypeVariables #-}
module TidyClash2 where
barry :: forall w_. _ -> _ -> w_
barry (x :: _) (y :: _) = undefined :: _
| wxwxwwxxx/ghc | testsuite/tests/partial-sigs/should_fail/TidyClash2.hs | bsd-3-clause | 152 | 0 | 7 | 29 | 50 | 29 | 21 | 4 | 1 |
-- Matricula 1: A00368770
-- Matricula 2: A01273613
-- 1.- Medaiana calcula la mediana de 5 argumentos
-- que sean numeros enteros, regresa como valor el numero de en medio
--funcion principal
mediana :: Integer -> Integer -> Integer -> Integer -> Integer -> Integer
mediana a b c d e = numMedio (numMedio a b c) d e
--numMedio ayuda a obtener el numero de la media de 3 elementos
numMedio :: Integer -> Integer -> Integer -> Integer
numMedio x y z
| medio y x z = x
| medio x y z = y
| medio z y x = y
| otherwise = z
-- medio nos regresa un valor bool si el valor central
-- corresponde a la posición o no
medio :: Integer -> Integer -> Integer -> Bool
medio a b c = a <= b && b <= c
--2.- Tabla es una función recursiva que recibe
-- argumentos que representan: filas, columnas y numero
-- en el cual va a empezar a enumerarse las celdas
tabla :: Integer -> Integer -> Integer -> [[Integer]]
tabla a b c =
printRow a c ++ tabla (a-1) (b-1) (c+b)
--regresa una lista con los valores de la fila
printRow :: Integer -> Integer -> [Integer]
printRow x y =
if x == 1 then
y:[] else
y : printRow (x-1) (y+1)
--3.- divide, funcion recursiva que recibe dos listas
-- para regresar una lista de tuplas que contiene
-- el cociente y residuo de dividir elemento por elemento
-- de las dos litas
divide :: [Integer] -> [Integer] -> [(Integer, Integer)]
divide [] [] = [()]
divide (x:xs) (y:ys) =
((div x y), (mod x y)) : divide xs ys
--zip ((div x y) : divide xs ys) ((mod x y) : divide xs ys)
--4.- intercala recibe un entero y una lista que genera una lista
-- insertando un valor en todas las posiciones de una lista plana
intercala :: Integer -> [Integer] -> [[Integer]]
intercala xs [] = [xs]
intercala e (x:xs') =
map (x : )(intercala e xs')
-- 5.- Permutacion funcion que obtiene la lista de todas
-- las permutaciones generadas a partir de una lista
permutacion :: [Integer] -> [[Integer]]
permutacion [] = [[]] --caso base
permutacion xs = concatMap (\x -> map (x:) $ permutacion $ delete x xs) xs
-- en esta parte se utilizo concatMap debido al que el map nos daba errores al momento
--de compilar devido a unos errores
-- 6.- nivel, dado un arbol y un valor, regresa el nivel del Arbol
--si no regresa -1
data AB t = A (AB t) t (AB t) | V deriving Show
ab = A (A (A V 2 V)
5
(A V 7 V))
8
(A V
9
(A (A V 11 V)
15
V))
nivel :: AB -> Integer -> Integer
nivel V e = []
nivel (A l v r) valor
| v == valor = 1
| v < valor = (1 + (nivel r valor))
| otherwise = (1 + (nivel l valor))
--7.- rango dado un arbol binario y dos valores que describen un rango
-- regresa una lista ordenada que contenga todos los valore en el rango que
-- se encuentran en el arbol
rango :: AB -> Integer -> Integer -> [Integer]
rango V valor1 valor2 = []
rango (A l v r) valUno valDos
| v >= valUno && v <= valDos = [v] ++ (rango l valUno valDos) ++ (rango r valUno valDos)
| otherwise = (rango r valUno valDos)
--8.- valmat crea una matriz NxM con un valor
valmat :: Integer -> Integer -> Integer -> [Integer]
valmat a n x = [a | _ <- [x | _ <- [1..n]]]
-- 9.- chess crea una matris cuadrada de NxN, donde la suma
-- de las coordenadas par sea 1's y las impar 0´s
chess :: Integer -> [Integer]
chess n =
array ((1,1),(n,n)) ([( (i,j), 1) | i <- range (1,n), j <- range (1,n), i==j]++ [( (i,j), 0) | i <- range (1,n), j <- range (1,n), i/=j])
-- 10.- reducir es una funcion que a partir de una lista
-- genera una lista de listas, donde se vayan eliminado sus elementos
-- hasta que se vacie
reducir ::[a] ->[a]
reducir [] = []
reducir xs = [xs|x<-xs,drop 1 xs]
| JorgeRubio96/LenguajesdeProgramacion | a00368770_a01273613_tarea5.hs | mit | 3,696 | 14 | 12 | 888 | 1,213 | 656 | 557 | 58 | 2 |
module Ratio (Ratio, (%)) where
-- 9a)
data Ratio = Ratio
{ p :: Integer
, q :: Integer }
-- 9b)
(%) :: (Integral a, Integral b) => a -> b -> Ratio
(%) _ 0 = error "Division by zero"
(%) x y
| y > 0 = Ratio (fromIntegral x) (fromIntegral y)
| otherwise = Ratio (fromIntegral (-x)) (fromIntegral (-y))
-- 9c)
instance Eq Ratio where
x == y = fromIntegral (p x) / fromIntegral (q x) == fromIntegral (p y) / fromIntegral (q y)
-- 9d)
instance Ord Ratio where
(<=) x y = fromIntegral (p x) / fromIntegral (q x) <= fromIntegral (p y) / fromIntegral (q y)
-- 9e)
instance Num Ratio where
(*) x y = Ratio (p x * p y) (q x * q y)
(+) x y = Ratio (p x * q y + p y * q x) (q x * q y)
(-) x y = (+) x (negate y)
negate x = Ratio (- p x) (q x)
abs x = Ratio (abs (p x)) (q x)
signum x | x < 0 = negate 1
| x == 0 = 0
| otherwise = 1
fromInteger x = (%) x 1
-- 9f)
instance Show Ratio where
show x = show p' ++ " % " ++ show q'
where p' = p x `div` gc
q' = q x `div` gc
gc = gcd (p x) (q x) | kbiscanic/PUH | hw08/Ratio.hs | mit | 1,121 | 1 | 11 | 387 | 625 | 318 | 307 | 28 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
module StdVector
( stdVectorCtx
, instanceStdVector
, CStdVector
, StdVector()
, StdVector.new
, size
, toVector
, pushBack
)
where
import qualified Language.C.Inline as C
import qualified Language.C.Inline.Unsafe as CU
import qualified Language.C.Inline.Context as C
import qualified Language.C.Inline.Cpp as C
import qualified Language.C.Types as C
import qualified Data.Vector.Storable as VS
import qualified Data.Vector.Storable.Mutable as VSM
import qualified Data.Map.Strict as M
import Control.Monad
import Data.Maybe
import Language.Haskell.TH.Syntax
import Foreign
import Foreign.C
import Language.Haskell.TH
import Data.Proxy
import Control.Exception (mask_)
data CStdVector a
stdVectorCtx :: C.Context
stdVectorCtx = C.cppCtx `mappend` C.cppTypePairs [ ("std::vector", [t| CStdVector |]) ]
newtype StdVector a = StdVector (ForeignPtr (CStdVector a))
class HasStdVector a where
cNew :: IO (Ptr (CStdVector a))
cDelete :: FunPtr (Ptr (CStdVector a) -> IO ())
cSize :: Ptr (CStdVector a) -> IO CSize
cCopyTo :: Ptr (CStdVector a) -> Ptr a -> IO ()
cPushBack :: a -> Ptr (CStdVector a) -> IO ()
instanceStdVector :: String -> DecsQ
instanceStdVector cType = fmap concat $ sequence
[ C.include "<vector>"
, C.include "<algorithm>"
, C.substitute
[ ( "T", \_ -> cType )
, ( "VEC", \var -> "$(std::vector<" ++ cType ++ ">* " ++ var ++ ")" )
] [d|
instance HasStdVector $(C.getHaskellType False cType) where
cNew = [CU.exp| std::vector<@T()>* { new std::vector<@T()>() } |]
cDelete = [C.funPtr| void deleteStdVector(std::vector<@T()>* vec) { delete vec; } |]
cSize vec = [CU.exp| size_t { @VEC(vec)->size() } |]
cCopyTo vec dstPtr = [CU.block| void {
const std::vector<@T()>* vec = @VEC(vec);
std::copy(vec->begin(), vec->end(), $(@T()* dstPtr));
} |]
cPushBack value vec = [CU.exp| void { @VEC(vec)->push_back($(@T() value)) } |]
|]
]
new :: forall a. HasStdVector a => IO (StdVector a)
new = mask_ $ do
ptr <- cNew @a
StdVector <$> newForeignPtr cDelete ptr
size :: HasStdVector a => StdVector a -> IO Int
size (StdVector fptr) = fromIntegral <$> withForeignPtr fptr cSize
toVector :: (HasStdVector a, Storable a) => StdVector a -> IO (VS.Vector a)
toVector stdVec@(StdVector stdVecFPtr) = do
vecSize <- size stdVec
hsVec <- VSM.new vecSize
withForeignPtr stdVecFPtr $ \stdVecPtr ->
VSM.unsafeWith hsVec $ \hsVecPtr ->
cCopyTo stdVecPtr hsVecPtr
VS.unsafeFreeze hsVec
pushBack :: HasStdVector a => StdVector a -> a -> IO ()
pushBack (StdVector fptr) value = withForeignPtr fptr (cPushBack value) | fpco/inline-c | inline-c-cpp/test/StdVector.hs | mit | 2,876 | 6 | 13 | 551 | 869 | 495 | 374 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Wikidata.Query.Label where
import Data.ByteString.Lazy (ByteString)
import Data.List (intercalate)
import Network.HTTP.Conduit
{-- A solution to the problem posted at http://lpaste.net/6471988009620209664
@1HaskellADay solution for 2016-03-01
Okay, revision 3 of today's problem.
I just wanna do this one simple thing, but okay.
So, how do you get the States of the United States from wikidata?
Going to the wikidata REST endpoint and entering a SPARQL query gets me three
MEGABYTES of RDF.
* First of all, I hate RDF.
* Second of all, none of the RDF triples had any of the State labels (names)
* Third of all, none of the RDF triples had any of the State wikidata Q-ids.
Just. Great.
So, I go here:
https://wdq.wmflabs.org/api?q=claim[31:35657]
Machine code, yes, but let's extract from it the original SPARQL query:
SELECT ?state ?stateLabel
WHERE
{
?state wdt:P31 wd:Q35657 .
SERVICE wikibase:label {
bd:serviceParam wikibase:language "en" .
}
}
So you now see the origin of the 31:35657? No? Well those are wikidata
values that indicate property 31 ('is a member of') and q (whatever that
means) 35657 ('State of the United States of America').
Now, how you are supposed to know that, I don't know.
But, now you do, and the world is all tea and crumpets again. Yay.
You should see the underlying machine code that created this API.
machine code, n: C++ templated classes containing all deeply nested
if-statements.
*shudder*
Now the reason why I know this: this man behind the curtain, is that this API
is HELLA slow?!? for C++?!? I mean, like: really?!? But whatever, it's a working
API, which I cannot say for the wikidata REST endpoint (3 megabytes of useless
RDF. Just. great), and it returns ...
... wait for it ...
wikidata identifiers.
What do those identifiers mean?
HELL if I know.
Gosh. Just
One.
Simple.
Thing.
States of the United States of America
And it's taken me three revisions of simplifying an original query
"Give me the lat/longs of particular cities of the U.S.A. States"
To a simpler query against the wikidata REST endpoint:
"Just give me the States of the United States of America"
to finally throwing up my hands and using a third-party hella-slow tool that,
well, at least works ... halfway. Given that you can program in machine code.
I mean, can't wikidata provide a REST endpoint that returns the results of
a query in JSON? that I can see right there in my browser? But no.
Fine.
Okay, so now that you went to this machine code service:
https://wdq.wmflabs.org/api?q=claim[31:35657]
and gotten your query results:
{"status":
{"error":"OK", -- error = "OK" ... great error code. Just. great.
"items":50,
"querytime":"2745ms",
"parsed_query":"CLAIM[31:35657]"
},"items":[99,173,724,759,...]}
or, more sanely, because, I swear ...
--}
type QID = Int
type USState = String
states :: [QID]
states = [99,173,724,759,771,779,782,797,812,816,824,829,1166,1204,1207,1211,
1212,1214,1221,1223,1227,1261,1370,1371,1384,1387,1391,1393,1397,
1400,1408,1415,1428,1439,1454,1456,1494,1509,1522,1527,1537,1546,
1553,1558,1581,1588,1603,1612,1649,16551]
{--
NOW that you've done and got ALL THAT!
Now today's Haskell problem.
The above machine code result is very pretty, I'm sure. But I'd like to know
the names of the States that these Q-item ids signify? You know, for giggles.
Wikidata does actually provide an endpoint for this very thing, that is:
What is the label for this Q-item? It's called:
https://www.wikidata.org/w/api.php?format=json&action=wbgetentities&props=labels&ids=Q99&languages=en
(Well, it's called that for Q-id 99, anyway)
So, do me a favor, please. Write a REST client that coverts the above States-
as-Ints into States-as-Strings.
*mutter-mutter*
useless RDF REST endpoint, have to do everything by hand now
*mutter-mutter*
--}
endpoint :: String
endpoint = "https://www.wikidata.org/w/api.php?format=json&action=wbgetentities&props=labels&languages=en&ids="
stateName :: QID -> IO ByteString
stateName = simpleHttp . (endpoint ++) . qname
type QName = String -- a 'name' (id number) that starts with 'Q' because reasons
qname :: Show a => a -> QName
qname = ('Q':) . show
-- please show a list of (QID, JSON containing USState). Thanks.
-- if you don't have Network.HTTP.Conduit it's http-conduit via cabal
-- WARNING: READ THIS!
-- You get back a bunch of JSON. That's fine for today's work. Just get back
-- the JSON, tomorrow we'll parse out the state name from that JSON. Today,
-- just get it back. Baby steps.
-- *Main> stateName 99 ~> "{\"entities\":{\"Q99\":{\"type\":\"item\",
-- \"id\":\"Q99\",\"labels\":{\"en\":{\"language\":\"en\",
-- \"value\":\"California\"}}}},\"success\":1}"
{--
So, ya know:
*Main> mapM stateName states
... and wait a while. In fact, a long while, timing this:
*Main Data.Time.Clock Control.Monad> getCurrentTime >>= \start ->
mapM_ (stateName >=> print) states >> getCurrentTime >>=
print . flip diffUTCTime start
gets us a runoff of State JSON results after 18.421259s
Ouch.
Let's better this with just one call to the endpoint:
--}
statesNames :: [QID] -> IO ByteString
statesNames = simpleHttp . (endpoint ++) . intercalate "|" . map qname
{--
Timing this definition:
*Main Data.Time.Clock> getCurrentTime >>= \start ->
statesNames states >>= print >> getCurrentTime >>=
print . flip diffUTCTime start
gets the JSON back in 1.08088s, or almost 20x faster! WOOT!
--}
| geophf/1HaskellADay | exercises/HAD/Wikidata/Query/Label.hs | mit | 5,540 | 0 | 8 | 951 | 340 | 217 | 123 | 21 | 1 |
module Compiler.Rum.Internal.ExprParser where
import qualified Data.HashMap.Strict as HM
import Data.List (foldl')
import Text.Megaparsec
import Text.Megaparsec.String
import Compiler.Rum.Internal.AST
strSpace :: String -> Parser String
strSpace s = string s >>= \x -> space >> return x
chSpace :: Char -> Parser Char
chSpace s = char s <* space
keyWords :: [String]
keyWords = ["skip", "write", "if", "then", "else", "fi", "repeat", "until", "do", "od", "while", "for"]
varNameP :: Parser Variable
varNameP = Variable <$> ((((:) <$> (try (oneOf "_$") <|> letterChar)
<*> many (try alphaNumChar <|> oneOf "_-$")) >>= \x -> if x `elem` keyWords
then fail "Can not use Key words as variable names"
else pure x
) <* space)
parens :: Parser a -> Parser a
parens = between (chSpace '(') (chSpace ')')
rightAssocsP :: (a -> a -> a) -> Parser op -> Parser a -> Parser a
rightAssocsP f opP elP = do
el <- elP
rest <- many (opP *> rightAssocsP f opP elP)
pure $ if null rest then el else foldl' f el rest
leftAssocsP :: (a -> a -> a) -> Parser op -> Parser a -> Parser a
leftAssocsP f opP elP = elP >>= rest
where
rest x = opP *> elP >>= \y -> rest' (f x y)
rest' x = (opP *> elP >>= \y -> rest' (f x y)) <|> pure x
basicExprP :: Parser Expression
basicExprP = Const <$> numP
<|> ReadLn <$ strSpace "read()"
<|> Var <$> varNameP
<|> parens exprP
where
numP :: Parser Int
numP = (read <$> (try ((:) <$> char '-' <*> some digitChar) <|> some digitChar)) <* space
arithmeticExprP :: Parser Expression
arithmeticExprP = prior3
where
powP = rightAssocsP (BinOper Pow) (chSpace '^') basicExprP
p2 c op = leftAssocsP (BinOper c) (chSpace op) powP
prior2 = try (p2 Mul '*') <|> try (p2 Div '/') <|> try (p2 Mod '%') <|> powP
p3 c op = leftAssocsP (BinOper c) (chSpace op) prior2
prior3 = try (p3 Add '+') <|> try (p3 Sub '-') <|> prior2
compExprP :: Parser Expression
compExprP = do
le <- arithmeticExprP
op <- choice (strSpace <$> ["==", "!=", "<=", "<", ">=", ">"])
re <- arithmeticExprP
return $ CompOper ((\(Just s) -> s) $ HM.lookup op compMap) le re
where
compMap = HM.fromList [("==", Eq), ("!=", NotEq), ("<=", NotGt), ("<", Lt), (">=", NotLt), (">", Gt)]
binExprP :: Parser Expression
binExprP = try (parens compExprP <|> compExprP) <|> parens arithmeticExprP <|> arithmeticExprP
logicExprP :: Parser Expression
logicExprP = try lOr <|> try lAnd <|> binExprP
where
lAnd = leftAssocsP (LogicOper And) (strSpace "&&") binExprP
lOr = leftAssocsP (LogicOper Or) (strSpace "||") lAnd
exprP :: Parser Expression
exprP = try (parens exprP) <|> try (parens logicExprP <|> logicExprP) <|> parens binExprP
| vrom911/Compiler | src/Compiler/Rum/Internal/ExprParser.hs | mit | 2,880 | 0 | 17 | 743 | 1,145 | 595 | 550 | 58 | 2 |
module Main where
import Parser
import System.Environment
main :: IO ()
main = getArgs >>= putStrLn . show . readExpr . head
| ak1t0/48hscheme | Evaluator.hs | mit | 127 | 0 | 8 | 24 | 43 | 24 | 19 | 5 | 1 |
{-# LANGUAGE PackageImports #-}
{-# OPTIONS_GHC -fno-warn-dodgy-exports -fno-warn-unused-imports #-}
-- | Reexports "Text.Read.Lex.Compat"
-- from a globally unique namespace.
module Text.Read.Lex.Compat.Repl (
module Text.Read.Lex.Compat
) where
import "this" Text.Read.Lex.Compat
| haskell-compat/base-compat | base-compat/src/Text/Read/Lex/Compat/Repl.hs | mit | 284 | 0 | 5 | 31 | 31 | 24 | 7 | 5 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedLabels #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -O0 #-}
{-# OPTIONS_GHC -Wno-partial-type-signatures #-}
{-# OPTIONS_GHC -fomit-interface-pragmas #-}
module ZoomHub.Storage.PostgreSQL.Schema.Schema1
( migration,
)
where
import Squeal.PostgreSQL
( Definition,
addColumn,
alterTable,
dropColumn,
nullable,
text,
(&),
)
import Squeal.PostgreSQL.Migration (Migration (..))
import ZoomHub.Storage.PostgreSQL.Schema.Schema0 (Schemas0)
migration :: Migration Definition Schemas0 _
migration =
Migration
{ name = "2021-02-15-1: Add submitter email",
up = setup,
down = teardown
}
setup :: Definition Schemas0 _
setup = alterTable #content (addColumn #submitter_email (text & nullable))
teardown :: Definition _ Schemas0
teardown = alterTable #content (dropColumn #submitter_email)
| zoomhub/zoomhub | src/ZoomHub/Storage/PostgreSQL/Schema/Schema1.hs | mit | 995 | 0 | 9 | 167 | 181 | 113 | 68 | 31 | 1 |
module Hunch.Options.CommandLine (
withOptions
, showHelpText
) where
import Options.Applicative
import Data.Monoid (mempty)
import Hunch.Constants
import Hunch.Options.Data
singleChar :: String -> ReadM Char
singleChar s | null s = readerError "empty token"
| length s > 1 = readerError $
"token is too long (max 1 char.): '" ++ s ++ "'"
| otherwise = return $ head s
inputOpt :: Parser (Maybe String)
inputOpt = optional . argument str $ metavar "INPUT"
sourcesOpt :: Parser [String]
sourcesOpt = many . argument str $ metavar "SOURCES"
rootDirOpt :: Parser String
rootDirOpt = strOption $
long "root-dir"
<> short 'r'
<> metavar "DIR"
<> value ""
<> help "Base directory for operations"
templatesOpt :: Parser String
templatesOpt = strOption $
long "templates-path"
<> short 'p'
<> metavar "DIR"
<> value ""
<> help "Base directory for templates files lookup"
delimOpt :: Parser String
delimOpt = strOption $
long "delimiter"
<> short 'd'
<> metavar "STRING"
<> value ","
<> help "Delimiter between names in sources"
<> showDefault
sigilOpt :: Parser Char
sigilOpt = option (str >>= singleChar) $
long "number-token"
<> short 't'
<> metavar "CHAR"
<> value '$'
<> help "Token that will be replaced in duplicate names by an incrementing counter"
<> showDefault
startAtOpt :: Parser Int
startAtOpt = option auto $
long "start-at"
<> short 'n'
<> metavar "INT"
<> value 0
<> help "First number used in the incrementing counter (see --number-token)"
<> showDefault
overrideOpt :: Parser Bool
overrideOpt = switch $
long "override"
<> short 'o'
<> help "Override existing files"
noChecksOpt :: Parser Bool
noChecksOpt = switch $
long "no-check"
<> short 'c'
<> help "Do not check input for invalid names or not found templates"
verboseOpt :: Parser Bool
verboseOpt = switch $
long "verbose"
<> short 'v'
<> help "Print informations on actions"
simulateOpt :: Parser Bool
simulateOpt = switch $
long "simulate"
<> short 's'
<> help "Only print the visual representation of the input"
versionOpt :: Parser Bool
versionOpt = switch $
long "version"
<> help "Print version information. Can be coupled with --verbose"
parseOptions :: Parser Options
parseOptions = Options <$>
inputOpt
<*> sourcesOpt
<*> rootDirOpt
<*> templatesOpt
<*> delimOpt
<*> sigilOpt
<*> startAtOpt
<*> overrideOpt
<*> noChecksOpt
<*> verboseOpt
<*> simulateOpt
<*> versionOpt
withOptions :: (Options -> IO ()) -> IO ()
withOptions f = f =<< execParser (withInfo parseOptions)
withInfo :: Parser Options -> ParserInfo Options
withInfo opts = info (helper <*> opts) $
fullDesc
<> header (projectName ++ " - " ++ projectDesc)
<> progDesc ""
<> footer ("Maintainer: " ++ maintainerInfo)
<> failureCode 128
showHelpText :: IO ()
showHelpText = handleParseResult $ Failure failure
where
failure = parserFailure pprefs pinfo ShowHelpText mempty
pprefs = prefs mempty
pinfo = withInfo parseOptions
| loganbraga/hunch | app/Hunch/Options/CommandLine.hs | mit | 3,247 | 0 | 16 | 843 | 853 | 411 | 442 | 106 | 1 |
module Strategy.Evaluation
(
winningRows,
evaluate
) where
import Control.Monad (join)
import qualified Data.Vector as V
import Board
-- |Auxiliary data type to represent the result of the evaluation function
newtype NumPair a = NumPair { tuple :: (a, a) }
-- |Adds two 'NumPair' objects by adding the components of the wrapped tuples.
plus :: Num a => NumPair a -> NumPair a -> NumPair a
(NumPair (a, b)) `plus` (NumPair (c, d)) = NumPair (a+c, b+d)
-- |The zero pair. Just a pair with zeroes in it.
zeroPair :: Num a => NumPair a
zeroPair = NumPair (0, 0)
-- Now, this may seem weird at first but... the 'NumPair' type we defined is
-- a 'Monoid'. Why? Because it has an associative binary operation ('plus')
-- and an identity element for that operation ('zeroPair'). Monoids have
-- the nice feature of being 'concatable' things, i.e. if you have a list
-- of objects from a particular Monoid type, you can easily reduce them to
-- a single element (and this is done by repeatedly using the 'mappend'
-- function). Declaring this instance will tell Haskell that 'NumPair a' is
-- a monoid, and this will simplify things later.
instance Num a => Monoid (NumPair a) where
mempty = zeroPair
mappend = plus
-- |Evaluates the "goodness" of a board for a given player.
-- This function can be paraphrased as "the number of combinations of 4 adjacent
-- squares* that have at least one piece of the player, and none of the
-- adversary."
-- * Here, "adjacent" means that both squares are next to each other
-- horizontally, vertically or diagonally.
-- Returns a pair containing the evaluations for the given player and their
-- adversary, respectively.
winningRows :: Player -> Board -> (Int, Int)
winningRows pl board = tuple . winningRows' $ combs
where -- 'combs' is just all the rows, columns and diagonals of the board.
combs = join $ (V.toList.) <$> [id, extractCols, extractDiagonals]
<*> pure board
-- 'foldMap' is a neat function that does these two steps:
-- 1. Maps the given function (1st argument) over the list (2nd arg)
-- Said function MUST return a particular monoid type.
-- 2. When step 1 is done, we have a list of monoids that we fold
-- (reduce) into a single element, as explained above.
-- TL;DR: winningRows' takes a list of rows, maps 'countRows' (which
-- returns 'NumPair' values) and sums the results together.
winningRows' = foldMap countRow
-- Takes a row and gives it a score given by the 'countFour' function.
countRow = foldMap countFour . groupsOfFour
-- 'countFour' takes each possible 4-adjacent piece combination and
-- values it depending of how many pieces of each player are there.
countFour f =
let myPieceCount = howMany pl f
opponentPieceCount = howMany (nextPlayer pl) f
in case (myPieceCount, opponentPieceCount) of
-- If there are four of a kind, the game is won so we give
-- these combinations a huge score.
(4, 0) -> NumPair (10000, 0)
(0, 4) -> NumPair (0, 10000)
-- Zero of each kind favours neither player.
(0, 0) -> zeroPair
-- More pieces = more value/risk. Promotes creating longer
-- chains.
(a, 0) -> NumPair (1, 0)
(0, b) -> NumPair (0, 1)
-- Anything else (i.e. there are pieces of both players)
-- is not valuable to neither player.
(_, _) -> zeroPair
-- Return a list of all the groups of 4 consecutives squares.
groupsOfFour row | V.length row < 4 = V.empty
| otherwise = V.cons (V.take 4 row)
(groupsOfFour (V.tail row))
-- Count the number of pieces of a given player in a vector.
howMany piece = V.length . V.filter (== Just piece)
-- |The heuristic function, as pointed in the paper from
-- http://www.ics.uci.edu/~jlam2/connectk.pdf,
-- should be a linear combination of the 'winningRows' function calculated
-- for each of the players.
-- The 'Int' parameters to this function are the coefficients for said linear
-- combination.
evaluate :: Player -> Int -> Int -> Board -> Int
evaluate pl plCoef opCoef board =
case winningRows pl board of
(winPl, winOp) -> plCoef * winPl + opCoef * winOp
| joslugd/connect4-haskell | src/Strategy/Evaluation.hs | mit | 4,617 | 0 | 13 | 1,353 | 654 | 372 | 282 | 39 | 6 |
module Sproxy.Logging
( LogLevel(..)
, debug
, error
, info
, level
, start
, warn
) where
import Prelude hiding (error)
import Control.Applicative (empty)
import Control.Concurrent (forkIO)
import Control.Concurrent.Chan (Chan, newChan, readChan, writeChan)
import Control.Monad (forever, when)
import Data.Aeson (FromJSON, ToJSON)
import qualified Data.Aeson as JSON
import Data.Char (toLower)
import Data.IORef (IORef, newIORef, readIORef, writeIORef)
import qualified Data.Text as T
import System.IO (hPrint, stderr)
import System.IO.Unsafe (unsafePerformIO)
import Text.Read (readMaybe)
start :: LogLevel -> IO ()
start None = return ()
start lvl = do
writeIORef logLevel lvl
ch <- readIORef chanRef
_ <- forkIO . forever $ readChan ch >>= hPrint stderr
return ()
info :: String -> IO ()
info = send . Message Info
warn :: String -> IO ()
warn = send . Message Warning
error :: String -> IO ()
error = send . Message Error
debug :: String -> IO ()
debug = send . Message Debug
send :: Message -> IO ()
send msg@(Message l _) = do
lvl <- level
when (l <= lvl) $ do
ch <- readIORef chanRef
writeChan ch msg
{-# NOINLINE chanRef #-}
chanRef :: IORef (Chan Message)
chanRef = unsafePerformIO (newChan >>= newIORef)
{-# NOINLINE logLevel #-}
logLevel :: IORef LogLevel
logLevel = unsafePerformIO (newIORef None)
level :: IO LogLevel
level = readIORef logLevel
data LogLevel
= None
| Error
| Warning
| Info
| Debug
deriving (Enum, Ord, Eq)
instance Show LogLevel where
show None = "NONE"
show Error = "ERROR"
show Warning = "WARN"
show Info = "INFO"
show Debug = "DEBUG"
instance Read LogLevel where
readsPrec _ s
| l == "none" = [(None, "")]
| l == "error" = [(Error, "")]
| l == "warn" = [(Warning, "")]
| l == "info" = [(Info, "")]
| l == "debug" = [(Debug, "")]
| otherwise = []
where
l = map toLower s
instance ToJSON LogLevel where
toJSON = JSON.String . T.pack . show
instance FromJSON LogLevel where
parseJSON (JSON.String s) =
maybe
(fail $ "unknown log level: " ++ show s)
return
(readMaybe . T.unpack $ s)
parseJSON _ = empty
data Message =
Message LogLevel
String
instance Show Message where
show (Message lvl str) = show lvl ++ ": " ++ str
| zalora/sproxy | src/Sproxy/Logging.hs | mit | 2,306 | 0 | 11 | 525 | 888 | 472 | 416 | 86 | 1 |
-- Copyright (c) Microsoft. All rights reserved.
-- Licensed under the MIT license. See LICENSE file in the project root for full license information.
{-# LANGUAGE QuasiQuotes, OverloadedStrings, RecordWildCards #-}
module Language.Bond.Codegen.Cs.Types_cs
( types_cs
, FieldMapping(..)
, StructMapping(..)
, ConstructorOptions(..)
) where
import Data.Monoid
import Prelude
import Data.Text.Lazy (Text, pack)
import Text.Shakespeare.Text
import Language.Bond.Syntax.Types
import Language.Bond.Syntax.Util
import Language.Bond.Syntax.Internal
import Language.Bond.Util
import Language.Bond.Codegen.TypeMapping
import Language.Bond.Codegen.Util
import qualified Language.Bond.Codegen.Cs.Util as CS
-- | C# representation of schema structs
data StructMapping =
Class -- ^ public partial class
deriving Eq
-- | Representation of schema fields in the generated C# types
data FieldMapping =
PublicFields | -- ^ public fields
Properties | -- ^ auto-properties
ReadOnlyProperties -- ^ auto-properties with private setter
deriving Eq
-- | Options for how constructors should be generated.
data ConstructorOptions =
DefaultWithProtectedBase | -- ^ The original bond behavior.
ConstructorParameters -- ^ Generate a constructor that takes all the fields as parameters.
deriving Eq
-- | Codegen template for generating definitions of C# types representing the schema.
types_cs
:: StructMapping -- ^ Specifies how to represent schema structs
-> FieldMapping -- ^ Specifies how to represent schema fields
-> ConstructorOptions -- ^ Specifies the constructors that should be generated
-> MappingContext -> String -> [Import] -> [Declaration] -> (String, Text)
types_cs structMapping fieldMapping constructorOptions cs _ _ declarations = (fileSuffix, [lt|
#{CS.disableCscWarnings}
#{CS.disableReSharperWarnings}
namespace #{csNamespace}
{
using System.Collections.Generic;
#{doubleLineSep 1 typeDefinition declarations}
} // #{csNamespace}
|])
where
idl = MappingContext idlTypeMapping [] [] []
-- C# type
csType = getTypeName cs
csNamespace = sepBy "." toText $ getNamespace cs
access = case structMapping of
_ -> [lt|public |]
fileSuffix = case structMapping of
_ -> "_types.cs"
struct = case structMapping of
_ -> [lt|public partial class |]
typeAttributes s = case structMapping of
_ -> CS.typeAttributes cs s
propertyAttributes f = case structMapping of
Class -> CS.propertyAttributes cs f
baseClass x = [lt|
: #{csType x}|]
-- C# type definition for schema struct
typeDefinition s@Struct {..} = [lt|#{typeAttributes s}#{struct}#{declName}#{params}#{maybe interface baseClass structBase}#{constraints}
{
#{doubleLineSep 2 property structFields}#{constructors}
}|]
where
interface = case structMapping of
_ -> mempty
-- type parameters
params = angles $ sepBy ", " paramName declParams
-- constraints
constraints = CS.paramConstraints declParams
-- default value
csDefault = CS.defaultValue cs
metaFields = filter (isMetaName . fieldType) structFields
noMetaFields = null metaFields
-- constructor: DefaultWithProtectedBase option
defaultWithProtectedBaseConstructor = if noCtor then mempty else [lt|
public #{declName}()
: this("#{getDeclTypeName idl s}", "#{declName}")
{}
protected #{declName}(string fullName, string name)#{baseCtor}
{
#{newlineSep 3 initializer structFields}
}|]
where
noCtor = not callBaseCtor && (fieldMapping == PublicFields && noMetaFields || null structFields)
callBaseCtor = getAny $ optional (foldMapFields metaField) structBase
baseCtor = if not callBaseCtor
then mempty
else [lt|
: base(fullName, name)|]
-- constructor: ConstructorParameters option
constructorWithParameters = if not noMetaFields
then error $ "bond_meta usage in Struct " ++ (show declName) ++ " Field " ++ (show $ fieldName $ head metaFields) ++ " is incompatible with --preview--constructor-parameters"
else if (null baseFieldList)
then [lt|
public #{declName}(
#{commaLineSep 3 paramDecl fieldNameList})
{
#{newlineSep 3 paramBasedInitializer fieldNameList}
}
public #{declName}()
{
#{newlineSep 3 initializer structFields}
}|]
else [lt|
public #{declName}(
// Base class parameters
#{commaLineSep 3 paramDecl (zip baseFieldList uniqueBaseFieldNames)}#{thisParamBlock}
) : base(
#{commaLineSep 4 pack uniqueBaseFieldNames})
{
#{newlineSep 3 paramBasedInitializer (zip structFields uniqueThisFieldNames)}
}
public #{declName}()
{
#{newlineSep 3 initializer structFields}
}|]
thisParamBlock = if null structFields
then mempty
else [lt|,
// This class parameters
#{commaLineSep 3 paramDecl (zip structFields uniqueThisFieldNames)}|]
baseFieldList = concat $ baseFields s
uniqueBaseFieldNames = uniqueNames (map fieldName baseFieldList) []
uniqueThisFieldNames = uniqueNames (map fieldName structFields) uniqueBaseFieldNames
paramDecl (f, n) = [lt|#{csType $ fieldType f} #{n}|]
paramBasedInitializer (f, n) = [lt|this.#{fieldName f} = #{n};|]
fieldNameList = map (\f -> (f, fieldName f)) structFields
constructors = case constructorOptions of
DefaultWithProtectedBase -> defaultWithProtectedBaseConstructor
ConstructorParameters -> constructorWithParameters
-- property or field
property f@Field {..} =
[lt|#{propertyAttributes f}#{new}#{access}#{csType fieldType} #{fieldName}#{autoPropertyOrField}|]
where
autoPropertyOrField = case fieldMapping of
PublicFields -> [lt|#{optional fieldInitializer $ csDefault f};|]
Properties -> [lt| { get; set; }|]
ReadOnlyProperties -> [lt| { get; private set; }|]
fieldInitializer x = [lt| = #{x}|]
new = if isBaseField fieldName structBase then "new " else "" :: String
-- initializers in constructor
initializer f@Field {..} = optional fieldInit $ def f
where
fieldInit x = [lt|#{this fieldName} = #{x};|]
this = if fieldName == "name" || fieldName == "fullName" then ("this." ++) else id
def Field {fieldType = BT_MetaName} = Just "name"
def Field {fieldType = BT_MetaFullName} = Just "fullName"
def x = if fieldMapping == PublicFields then Nothing else csDefault x
-- C# enum definition for schema enum
typeDefinition e@Enum {..} = [lt|#{CS.typeAttributes cs e}public enum #{declName}
{
#{newlineSep 2 constant enumConstants}
}|]
where
-- constant
constant Constant {..} = let value x = [lt| = unchecked((int)#{x})|] in
[lt|#{constantName}#{optional value constantValue},|]
typeDefinition _ = mempty
| jdubrule/bond | compiler/src/Language/Bond/Codegen/Cs/Types_cs.hs | mit | 7,490 | 0 | 15 | 2,054 | 1,133 | 669 | 464 | 98 | 16 |
{-# LANGUAGE FlexibleInstances, TypeSynonymInstances #-}
import Data.Map as M
import Data.Ratio
type Symbol = String
type Array = Hash -> Double
instance Num Array where
(f + g)(x) = f x + g x
data ValueType = VS Symbol | VQ Rational | VR Double | VH Hash
instance Num ValueType where
(VQ x) + (VQ y) = VQ (x+y)
fromInteger = VQ . fromInteger
instance Fractional ValueType where
fromRational = VQ . fromRational
type Hash = M.Map Symbol ValueType
mkH :: (ValueType, ValueType, ValueType) -> Hash -> Hash
mkH (x,y,z) h = insert "X" x $ insert "Y" y $ insert "Z" z h
partial :: Symbol -> Array -> Array
partial "X" f h = let i = h ! "X"
j = h ! "Y"
k = h ! "Z"
in (f . mkH (i + 1/2,j,k) ) h - (f . mkH (i - 1/2,j,k) ) h
div :: Symbol -> Array -> Array
div c a = partial "X" a + partial "Y" a + partial "Z" a
main :: IO ()
main = do
putStrLn "hello"
| nushio3/formura | attic/Semantics.hs | mit | 938 | 1 | 14 | 273 | 427 | 223 | 204 | 26 | 1 |
{-# LANGUAGE GADTs #-}
module Main where
import Control.Monad.State
data M a = Foo a deriving Show
type Stack = [Int]
pop :: State Stack Int
pop = state $ \(x:xs) -> (x,xs)
push :: Int -> State Stack ()
push a = state $ \xs -> ((),a:xs)
stackManip :: State Stack Int
stackManip = do
push 3
a <- pop
pop
stackManip2 =
push 3 >>= (\s -> push 4)
stackManip3 =
push 3
>>= (\s -> pop)
>>= (\s -> pop)
--stackManip2 =
-- push 3 >>= pop >>= pop
--stackManip2 =
-- (push 3) >>= (push 4)
--
--data T a where
-- D1 :: Int -> T String
-- D2 :: T Bool
-- D3 :: (a,a) -> T [a]
--
--
--
----
----data Either2 a b = Left2 a | Right2 b
----
----type X2 a = Either2 a a
--
---- type F [a] = Set a
----
--class IsSimple a
--instance IsSimple Bool
--instance IsSimple Int
--instance IsSimple Double
--
--data Maybe2 a = Just2 a | Nothing2
--
--data Maybe3 a = Just3 a
-- Maybe3 a = Nothing
--
main = do putStrLn "foo" | brodyberg/Notes | GADT.hsproj/Main.hs | mit | 973 | 0 | 8 | 268 | 241 | 145 | 96 | 21 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE IncoherentInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Data.Extensible.Sum1 where
import Control.Lens
import Data.Functor.Classes
data (f :||: g) a = InL (f a) | InR (g a) deriving (Eq)
_InL :: Prism' ( (f :||: g) a) (f a)
_InL = prism' InL fxn
where
fxn (InL x) = Just x
fxn _ = Nothing
_InR :: Prism' ( (f :||: g) a) (g a)
_InR = prism' InR fxn
where
fxn (InR x) = Just x
fxn _ = Nothing
instance (Eq1 f, Eq1 g) => Eq1 (f :||: g) where
liftEq fxn x y
| Just (x :: f a) <- peek1 x,
Just (y :: f b) <- peek1 y = liftEq fxn x y
| Just (x :: g a) <- peek1 x,
Just (y :: g b) <- peek1 y = liftEq fxn x y
| otherwise = False
instance (Ord1 f, Ord1 g) => Ord1 (f :||: g) where
liftCompare fxn x y
| Just (x :: f a) <- peek1 x,
Just (y :: f b) <- peek1 y = liftCompare fxn x y
| Just (x :: g a) <- peek1 x,
Just (y :: g b) <- peek1 y = liftCompare fxn x y
| Just (x :: f a) <- peek1 x,
Just (y :: g b) <- peek1 y = LT
| Just (x :: g a) <- peek1 x,
Just (y :: f b) <- peek1 y = GT
| otherwise = LT
instance (Functor f, Functor g) => Functor (f :||: g) where
fmap h (InL x) = InL $ h <$> x
fmap h (InR x) = InR $ h <$> x
instance (Foldable f, Foldable g) => Foldable (f :||: g) where
foldMap f (InL x) = foldMap f x
foldMap f (InR x) = foldMap f x
instance (Traversable f, Traversable g) => Traversable (f :||: g) where
traverse h (InL x) = InL <$> traverse h x
traverse h (InR x) = InR <$> traverse h x
instance (Show (f a), Show (g a)) => Show ((f :||: g) a) where
show (InL x) = show x
show (InR x) = show x
class Sum1 c s where
peek1 :: c a -> Maybe (s a)
lft1 :: s a -> c a
type (w :>||: a) = (Sum1 w a)
instance Sum1 f f where
peek1 = Just
lft1 = id
instance {-# INCOHERENT #-} Sum1 (f :||: g) g where
peek1 (InR f) = Just f
peek1 _ = Nothing
lft1 = InR
instance {-# INCOHERENT #-} (c :>||: a) => Sum1 (c :||: b) a where
peek1 (InL x) = peek1 x
peek1 _ = Nothing
lft1 = InL . lft1
ajoin :: (m1 a -> b) -> (m2 a -> b) -> ((m1 :||: m2) a -> b)
ajoin f1 _ (InL m) = f1 m
ajoin _ f2 (InR m) = f2 m
| jadaska/extensible-sp | src/Data/Extensible/Sum1.hs | mit | 2,363 | 2 | 13 | 679 | 1,228 | 614 | 614 | 67 | 2 |
module Print where
import General
import Dictionary
import List (intersperse)
-- printing morphological objects as strings
prStr :: Str -> String
prStr = concat . intersperse "/" . unStr
prAlts :: Str -> String
prAlts ss =
case unStr ss of
[] -> "*"
ys -> unwords $ intersperse "/" ys
consTable :: Str -> Table String
consTable s = [("INVAR", s)]
consTableW :: Str -> [(String,(Attr,Str))]
consTableW s = [("INVAR", (noComp,s))]
putFun0 :: Param a => (a -> Str) -> IO ()
putFun0 = putStr . unlines . map show . prTable . table
putFun :: Param a => (a -> Str) -> IO ()
putFun = putStr . unlines . map pr . prTable . table where
pr (a,ss) = a ++ " : " ++ prAlts ss
-- print a parameter value without hierarchy (= parentheses)
prFlat :: String -> String
prFlat = filter (flip notElem "()")
-- show all values for the first parameter
prFirstForm :: Param a => Table a -> String
prFirstForm = prStr . firstForm
-- show one value for the first parameter (used in dictionary)
prDictForm :: Param a => Table a -> String
prDictForm = prDictStr . firstForm
prDictStr :: Str -> String
prDictStr t = case unStr t of
s:_ -> s
[] -> "NONE"
prDictionary :: Dictionary -> String
prDictionary = unlines . map (unlines . prOne) . removeAttr where
prOne (stem, typ, inhs, infl) = stem : typ : unwords inhs :
[a ++ ": " ++ prStr s | (a,s) <- infl]
prFullFormLex :: FullFormLex -> String
prFullFormLex = concat . map prOne where
prOne (s,ps) = unlines [s ++ ":" ++ a | a <- map prAttr ps]
-- prOne (s,ps) = s ++ " : " ++ unwords (intersperse "/" (map prAttr ps))
prAttr (a,ss) = ss ++ prCompAttr a
prCompAttr :: Attr -> String
prCompAttr a = " (" ++ show a ++ ") " -- should not happen...
-- a parser
{-
-- parse full-form lexicon from the format we print; ignore unparsable lines
pFullFormLex :: String -> FullFormLex
pFullFormLex s = [r | l <- s', Just r <- [pOne (words l)]] where
s' = filter nocomment (lines s) where
nocomment l = case l of
'-':'-':_ -> False -- use -- for comment lines
_ -> True
pOne (s:":":ps) = Just (s, (pPs ps))
pPs ws = pCompAttr p : ps where
(p,ps0) = span (/="/") ws
ps = if null ps0 then [] else pPs (tail ps0)
pCompAttr p = case p of
"(P)" : p' -> (atP, unwords p')
"(WP)" : p' -> (atWP, unwords p')
_ -> (atW, unwords p) -- ignoring values >2
-}
-- generate GF source code
-- N.B. syntax errors result from GF-reserved-word identifiers!
prGFRes :: Dictionary -> String
prGFRes dict = (unlines (map prGFOper (zip [0..] (removeAttr dict))))
prGFOper :: (Int,(String, Ident, [Ident], Table Ident)) -> String
prGFOper (i,(oper, ty, inhs, tab0)) = begin ++ " : Str -> " ++ ty ++ " = " ++ bind ++ rec ++ end where
begin = "oper " ++ (oper ++ "_" ++ show i) -- Reduce the number of name clashes!
bind = "\\" ++ oper ++ " -> " ++
"\n let " ++
stemv ++ " = Predef.tk " ++ show lg1 ++ " " ++ oper ++ " in"
stem = longestPrefix (unStr (formsInTable tab0))
stemv = if lg == 0 then "x_" else stem ++ "_" -- to avoid clash with res words
lg1 = length oper - lg
lg = length stem
tab = mapInTable
(\w -> stemv ++ " + \"" ++ drop lg w ++ "\"") tab0
rec = "\n {s = " ++ tbl ++
(if null inhs then "" else " ;\n ") ++
concat (intersperse " ;\n "
["h" ++ show i ++ " = " ++ p | (i,p) <- zip [1..] inhs]
) ++
"\n }"
tbl = case tab of
[("INVAR",ss)] -> altsGF ss --- a hack to avoid one-branch tables; name-sensit.
_ -> "table {\n" ++
concat (intersperse " ;\n"
[" "++ a ++ " => "++ altsGFRes b | (a,b) <- tab]
) ++
"\n }"
end = " ;\n"
prGF :: Dictionary -> String
prGF dict = cats ++ (unlines (map prGFRule (zip [0..] (removeAttr dict))))
where cs = unlines ["cat " ++ c ++ ";" | c <- map fst $ classifyDict dict]
cats = "\n" ++ cs ++ "\n\n"
prGFRule :: (Int,(String, Ident, [Ident], Table Ident)) -> String
prGFRule (i,(id,cat,inhs,tab)) = let name = id ++ "_" ++ show i in
"fun " ++ name ++ " : " ++ cat ++ " ;\n\n" ++
"lin " ++ name ++ " = {s = table {\n" ++
concat (intersperse " ;\n"
[" "++ a ++ " => "++ altsGF b | (a,b) <- tab]) ++
(if null inhs then "}" else " };\n ") ++
concat (intersperse " ;\n "
["h" ++ show i ++ " = " ++ p | (i,p) <- zip [1..] inhs]
) ++
"\n} ;\n"
-- two GF modes for free variation; old for GF<0.98
altsGF xs = case (unStr xs) of
[x] -> prQ x
ys -> "variants"++" {" ++ unwords (intersperse ";" (map prQ ys)) ++ "}" where
where
prQ s = '"' : s ++ "\""
altsGFOld = show . prAlts
altsGFRes xs = case (unStr xs) of
[x] -> x
ys -> "variants"++" {" ++ unwords (intersperse ";" ys) ++ "}"
-- code for XML
type TagId = String
type XML = String
type Struct = Bool
string :: String -> [XML]
string = (:[])
render :: [XML] -> String
render xs = unlines xs
tag :: TagId -> [XML] -> [XML]
tag t xs = (("<" ++ t ++ ">"): (map (' ':) xs)) ++ ["</" ++ t ++ ">"]
tagA :: TagId -> (String,String) -> [XML] -> [XML]
tagA t (a,b) xs = (("<" ++ t ++ " " ++ a ++ "=\"" ++ b ++ "\"" ++ ">"): (map (' ':) xs)) ++ ["</" ++ t ++ ">"]
tagA1 :: TagId -> (String,String) -> XML
tagA1 t (a,b) = "<" ++ t ++ " " ++ a ++ "=\"" ++ b ++ "\"" ++ " />"
prXML :: Dictionary -> String
prXML d = "<?xml version=\"1.0\"?>\n" ++ (render (tag "lexicon" (concat (map (uncurry pr) (classifyDict d)))))
where
pr cat entries = tagA "class" ("category",cat) (concat (map (prEntry . noAttr) entries))
prEntry (stem,_,inhs,tbl) = tag "lexicon_entry" $ tagA1 "dictionary_form" ("value",stem) :(prInh inhs ++ prTabl tbl)
prInh inhs = map (\s -> tagA1 "inherent" ("value",s)) inhs
prTabl tbl = tag "inflection_table" $
concat [tagA "inflection_form" ("pos",a) (map (\s -> tagA1 "variant" ("word",s)) (unStr b)) | (a,b) <- existingForms tbl]
-- code for Xerox LexC
prLEXC :: Dictionary -> String
prLEXC = ("LEXICON Root\n" ++) . (++ "END") . unlines . map (uncurry prLEXCRules) . classifyDict
prLEXCRules :: Ident -> [Entry] -> String
prLEXCRules cat entries = unlines $
("\n! category " ++ cat ++ "\n") : (map (prEntry . noAttr) entries)
where
prEntry (stem,_,inhs,tbl) =
concat (map (prForm stem inhs) ([(a,unStr b) | (a,b) <- existingForms tbl]))
prForm stem inhs (a,b) =
concat [x ++ ":" ++ stem ++ prTags (a:inhs) ++ " # ;\n" | x <- b]
prTags ts =
concat ["+" ++ w | t <- ts, w <- words (prFlat t)]
altsLEXC cs =
unwords $ intersperse " # ;" [ s | s <- cs]
-- code for Xerox Finite State Tool
prXFST :: Dictionary -> String
prXFST = unlines . map (uncurry prXFSTRules) . classifyDict
prXFSTRules :: Ident -> [Entry] -> String
prXFSTRules cat entries = unlines $
("define " ++ cat ++ " [") :
intersperse " |" (map (prEntry . noAttr) entries) ++
[" ] ;"]
where
prEntry (stem,_,inhs,tbl) =
concat (intersperse " |\n" (map (prForm stem inhs)
([(a,unStr b) | (a,b) <- existingForms tbl])))
prForm stem inhs (a,b) =
" [ {" ++ stem ++ "}" ++ prTags (a:inhs) ++ " .x. " ++ altsXFST b ++"]"
prTags ts =
unwords [" %+" ++ w | t <- ts, w <- words (prFlat t)]
altsXFST cs =
unwords $ intersperse "|" ["{" ++ s ++ "}" | s <- cs]
-- a "book" with LaTeX tables
prLatex :: Dictionary -> String
prLatex d = unlines (beginLatex ++ map prLatexTable (removeAttr d) ++ endLatex) where
beginLatex = ["\\documentclass{report}",
"\\usepackage{isolatin1}",
"\\begin{document}"]
endLatex = ["\\end{document}"]
prLatexTable :: EntryN -> String
prLatexTable (ident,cat,inhs,tab) =
unwords ((ident ++ ",") : cat : inhs) ++ "\n" ++
"\\begin{center}\n\\begin{tabular}{|l|l|}\\hline\n" ++
unlines [a ++ " & {\\em " ++ prAlts b ++ "} \\\\" | (a,b) <- tab] ++
"\\hline\n\\end{tabular}\n\\end{center}\n\\newpage\n\n"
-- use prValue instead of this!
{-
where
prTag = unpar . unwords . twords -- remove the outermost constructor
twords s = case words s of
(_:w:ws) -> w:ws -- but only if something is left
ws -> ws
unpar s = case s of -- remove the outer parentheses
'(':cs | last cs == ')' -> init cs
_ -> s
-}
-- SQL
---------------------------------------------------------
wordLength = 50 :: Int
attrLength = 30 :: Int
type Schema = String -- The database structure
type Element = String -- the database content
type TableS = String -- a table
type Column = String -- a column (attribute)
type Value = String -- a value of a column (attribute)
type DatabaseName = String
prSqlSchema :: Dictionary-> DatabaseName -> String
prSqlSchema dict dname =
"\n-- The Morphology Schema.\n\n" ++
"DROP DATABASE IF EXISTS " ++ dname ++ ";\n" ++
"CREATE DATABASE " ++ dname ++ ";\n" ++
"USE " ++ dname ++ ";\n\n" ++
lexicon ++
"GRANT ALL PRIVILEGES ON " ++ dname ++".* TO PUBLIC ; \n\n"
-- A instance needs to:
-- * Be put in the lexicon with a unique identifier
-- * Be put in the class schema
-- * Be put in the inherent schema
prSQL :: Dictionary -> String
prSQL = (lexicon ++) . unlines . map prSql . zip [1..] . removeAttr
where
prSql (i,(stem, cat, inh, table)) = lexic i stem cat (expand table inh)
lexic i stem cat t =
unlines [insert "LEXICON" [show i,stem,cat,b,a] | (a,b) <- t]
expand table inh = [(a ++ " - " ++ (unwords inh) ,s) | (a,b) <- table,
s <- unStr b]
{-
prWordsCl :: [(String,[((Int,String),[String])])] -> [String]
prWordsCl [] = []
prWordsCl ((c,((n1,w1),as1):xs):xss)
= (insert c ([show n1,w1,show n1] ++ as1) :
[insert c ([show n,w,show n1] ++as) | ((n,w),as) <- xs]) ++
prWordsCl xss
innerNumber :: [(a,[(b,[c])])] -> Int -> [(a,[((Int,b),[c])])]
innerNumber [] _ = []
innerNumber ((a,xs):xss) n = (a,number xs n) :
innerNumber xss (n+(length xs))
where number xs n = zipWith f [n..] xs
f n (s,zs) = ((n,s),zs)
-}
-----------------------------------------------------
emptyE :: Value
emptyE = "NULL"
insert :: TableS -> [Value] -> Element
insert t vs = "INSERT INTO " ++ t ++ " VALUES ('"
++ (concat (intersperse "','" vs)) ++"');"
type Name = String
type Type = String
type TypeConstraint = String
type Constraint = String
primaryKey :: Name -> Constraint
primaryKey n = "PRIMARY KEY (" ++ n ++ ")"
foreignKey :: Name -> (Name,Name) -> Constraint
foreignKey n (n1,n2) = "FOREIGN (" ++ n ++ ") REFERENCES " ++
n1 ++ "(" ++ n2 ++ ")"
varchar :: Int -> Type
varchar n = "VARCHAR(" ++ show n ++ ")"
intType :: Type
intType = "INTEGER"
notNull :: TypeConstraint
notNull = "NOT NULL"
createTable :: Name -> [(Name,Type,TypeConstraint)] -> [Constraint] -> TableS
createTable n xs cs =
"CREATE TABLE " ++ n ++ "\n(\n" ++
(concat ((intersperse ",\n" [n ++ " " ++ t ++ " " ++ tc | (n,t,tc) <- xs])))
++ concat (intersperse ",\n" cs) ++ ");\n\n"
lexicon :: TableS
lexicon = createTable "LEXICON"
[
("ID", intType, notNull),
("DICTIONARY",varchar wordLength,notNull),
("CLASS",varchar wordLength,notNull),
("WORD",varchar wordLength,notNull),
("POS",varchar wordLength,notNull)
] []
| icemorph/icemorph | bin/FM/lib/Print.hs | cc0-1.0 | 11,492 | 30 | 18 | 3,047 | 3,808 | 2,047 | 1,761 | 210 | 4 |
{-# LANGUAGE BangPatterns #-}
{- BSP.hs; Mun Hon Cheong (mhch295@cse.unsw.edu.au) 2005
A module for loading Quake 3 BSP files
source code in C++ can be found at
http://www.paulsprojects.net/opengl/q3bsp/q3bsp.html
credits also go to Ben Humphrey for his excellent BSP tutorial
I might split this module up. Perhaps rendering performance
could be increased if i used Vertex Buffer Objects instead of
vertex arrays?
-}
module BSP (
BSPMap(..),
readBSP,
renderBSP,
Tree(..),
BSPNode(..),
BSPLeaf(..),
BSPBrush(..),
BSPBrushSide(..),
isObjectVisible
) where
import Data.IORef
import Control.Exception ( bracket )
import Control.Monad ( liftM, when )
import System.IO hiding (withBinaryFile)
import System.IO.Error ( mkIOError, eofErrorType )
import Foreign
import Foreign.C.Types
import Foreign.C.String
import Data.List
import Data.Typeable
import Graphics.UI.GLUT
import BitSet
import Textures
import Data.Array
import qualified Data.Array.MArray as Arr (readArray, newListArray)
import qualified Data.Array.IO as IOArr hiding (readArray, newListArray)
import Frustum
import Matrix
import Curves
import Data.Maybe
-------------------------------------------------------------------------------
-- lump directory indices
-- Stores texture information
kTextures :: Int
kTextures = 1
-- Stores the splitting planes
kPlanes :: Int
kPlanes = 2
-- Stores the BSP nodes
kNodes :: Int
kNodes = 3
-- Stores the leafs of the nodes
kLeafs :: Int
kLeafs = 4
-- Stores the leaf's indices into the faces
kLeafFaces :: Int
kLeafFaces = 5
-- Stores the leaf's indices into the brushes
kLeafBrushes :: Int
kLeafBrushes = 6
-- Stores the brushes info (for collision)
kBrushes :: Int
kBrushes = 8
-- Stores the brush surfaces
kBrushSides :: Int
kBrushSides = 9
-- Stores the level vertices
kVertices :: Int
kVertices = 10
-- Stores the level indices
kIndices :: Int
kIndices = 11
-- Stores the faces for the level
kFaces :: Int
kFaces = 13
-- Stores the lightmaps for the level
kLightmaps :: Int
kLightmaps = 14
-- Stores PVS and cluster info (visibility)
kVisData :: Int
kVisData = 16
-- A constant to store the number of lumps
kMaxLumps :: Int
kMaxLumps = 17
-------------------------------------------------------------------------------
-- types used in this module
data BSPMap = BSPMap {
vertexData :: !VertexArrays,
vindices :: !(Ptr GLint),
leaves :: ![BSPLeaf],
tree :: !Tree,
visData :: !(Maybe BSPVisData),
bitset :: !BitSet
}
type VertexArrays = (Ptr Float,Ptr Float,Ptr Float,Ptr Float,Ptr Word8)
data BSPLeaf = BSPLeaf {
cluster :: !Int,
area :: Int,
leafMin :: (GLdouble,GLdouble,GLdouble),
leafMax :: (GLdouble,GLdouble,GLdouble),
leafface :: Int,
numOfLeafFaces :: Int,
leafBrush :: Int,
numOfLeafBrushes :: Int,
leafFaces :: [BSPFace],
leafBrushes :: [BSPBrush]
} deriving Show
data BSPFace = BSPFace {
-- The index into the texture array
textureObj :: Maybe TextureObject,
-- The index for the effects (or -1 = n/a)
effect :: Int,
-- 1=polygon, 2=patch, 3=mesh, 4=billboard
faceType :: Int,
-- The starting index into this face's first vertex
startVertIndex :: Int,
-- The number of vertices for this face
numOfVerts :: Int,
-- The starting index into the indices array for this face
startIndex :: Int,
-- The number of indices for this face
numOfIndices :: GLint,
-- The texture index for the lightmap
lightmapObj :: Maybe TextureObject,
-- The face's lightmap corner in the image
lMapCorner :: (Int,Int),
-- The size of the lightmap section
lMapSize :: (Int,Int),
-- The 3D origin of lightmap.
lMapPos :: (Float,Float,Float),
-- The 3D space for s and t unit vectors.
lMapVecs :: [(Float,Float,Float)],
-- The face normal.
vNormal :: (Float,Float,Float),
-- The bezier patch dimensions.
size :: (Int,Int),
faceNo :: Int,
patch :: [BSPPatch],
arrayPtrs :: VertexPointers
} deriving Show
data BSPBrush = BSPBrush {
brushSide :: Int,
numOfBrushSides :: Int,
brushSides :: [BSPBrushSide],
bTextureID :: Int,
textureType :: Int
} deriving Show
data BSPBrushSide = BSPBrushSide {
bsPlane :: Int,
bsPlaneNorm :: (GLdouble,GLdouble,GLdouble),
bsPlaneDist :: GLdouble,
bsTextureID :: Int
} deriving Show
data Tree = Leaf BSPLeaf | Branch BSPNode Tree Tree
data BSPNode = BSPNode {
planeNormal :: (GLdouble,GLdouble,GLdouble),
dist :: GLdouble,
front :: Int,
back :: Int,
nodeMin :: (Int,Int,Int),
nodeMax :: (Int,Int,Int)
} deriving Show
data BSPVisData = BSPVisData {
numOfClusters :: Int,
bytesPerCluster :: Int,
bitSets :: IOArr.IOUArray Int Bool
}
data BSPLump = BSPLump {
offset :: Int,
len :: Int
} deriving Show
data BSPHeader = BSPHeader {
strID :: String,
version :: Int
} deriving Show
data BSPTexInfo = BSPTexInfo {
strName :: String,
flags :: Int,
contents :: Int
} deriving Show
type VertexData = ([Float],[Float],[Float],[Float],[Word8])
type VertexPointers = (Ptr GLfloat, Ptr GLfloat, Ptr GLfloat, Ptr GLint)
type BSPLeafFace = Int
data BSPPlane = BSPPlane {
pNormal :: (GLdouble,GLdouble,GLdouble),
distance :: GLdouble
} deriving Show
-------------------------------------------------------------------------------
--BSP rendering
renderBSP :: IORef(BSPMap) -> (GLdouble,GLdouble,GLdouble)-> IO()
renderBSP mapRef (x,y,z) = do
activeTexture $= TextureUnit 0
clientActiveTexture $= TextureUnit 0
clientState TextureCoordArray $= Enabled
texture Texture2D $= Enabled
activeTexture $= TextureUnit 1
clientActiveTexture $= TextureUnit 1
clientState TextureCoordArray $= Enabled
texture Texture2D $= Enabled
mp <- readIORef mapRef
leaf <- findLeaf (x,y,z) (tree mp)
renderBSP' leaf mp
return ()
-- given a position finds a in the tree where the position lies in
findLeaf :: (GLdouble, GLdouble,GLdouble) -> Tree -> IO BSPLeaf
findLeaf (x,y,z) (Branch node left right) = do
let (px,py,pz) = (planeNormal node)
let d = (dist node)
let dstnc = (px*x)+(py*y)+(pz*z)-d
case (dstnc >= 0) of
True -> do
leaf <- findLeaf (x,y,z) left
return leaf
False -> do
leaf <- findLeaf (x,y,z) right
return leaf
findLeaf (_,_,_) (Leaf leaf) = return leaf
-- we are actually going across all the leaves in the tree
-- instead of walking the tree and pushing the leaves that
-- we want to render into a stack
renderBSP' :: BSPLeaf -> BSPMap -> IO()
renderBSP' leaf mp = do
sze <- sizeBS $ bitset mp
newbs <- emptyBS sze
frstm <- getFrustum
mapM_ (renderLeaves frstm newbs visFunc mp) (leaves mp)
renderBSPCleanUp
where visFunc = (isClusterVisible (visData mp) (cluster leaf))
-- we have to reset the openGL state after rendering
renderBSPCleanUp :: IO()
renderBSPCleanUp = do
activeTexture $= TextureUnit 1
clientState TextureCoordArray $= Disabled
texture Texture2D $= Disabled
activeTexture $= TextureUnit 0
clientActiveTexture $= TextureUnit 0
clientState TextureCoordArray $= Disabled
texture Texture2D $= Disabled
-- renders a BSP leaf if it is visible
renderLeaves ::
Frustum -> BitSet -> (Int -> IO Bool) -> BSPMap -> BSPLeaf -> IO()
renderLeaves frstm bitSet func mp leaf = do
clusterVisible <- func (cluster leaf)
case (clusterVisible) of
True -> case ((boxInFrustum frstm (leafMin leaf) (leafMax leaf))) of
True -> renderFaces bitSet mp (leafFaces leaf)
_ -> return ()
_ -> return()
-- is an object visible
isObjectVisible :: BSPMap -> Vec3 -> Vec3 -> IO Bool
isObjectVisible bsp (x,y,z) (ox,oy,oz) = do
currentLeaf <- findLeaf (x,y,z) (tree bsp)
objectLeaf <- findLeaf (ox,oy,oz) (tree bsp)
isVis <- isClusterVisible
(visData bsp) (cluster currentLeaf) (cluster objectLeaf)
return (isVis)
isClusterVisible ::Maybe BSPVisData -> Int -> Int -> IO Bool
isClusterVisible (Just visdata) current target
| current < 0 = do
return True
| target < 0 = do
return False
| otherwise = do
Arr.readArray
(bitSets visdata)
(((bytesPerCluster visdata)*current*8) + target)
isClusterVisible _ _ _ = return False
renderFaces :: BitSet -> BSPMap -> [BSPFace] -> IO()
renderFaces _ _ [] = return ()
renderFaces bitSet mp (face:faces) = do
isSet <- (isSetBS bitSet (faceNo face))
case (isSet, (faceType face)) of
(False, 1) -> do
setBS bitSet (faceNo face)
renderPolygonFace face (vertexData mp) (vindices mp)
renderFaces bitSet mp faces
(False, 2) -> do
setBS bitSet (faceNo face)
renderPatches face
renderFaces bitSet mp faces
(False, 3) -> do
setBS bitSet (faceNo face)
renderMeshFace face (vertexData mp) (vindices mp)
renderFaces bitSet mp faces
(_ , _) -> do
renderFaces bitSet mp faces
-------------------------------------------------------------------------------
-- surface rendering
-- renders a polygon surface
renderPolygonFace :: BSPFace -> VertexArrays -> Ptr GLint -> IO ()
renderPolygonFace face (_,_,_,_,_) _ = do
let (a,b,c,d) = arrayPtrs face
arrayPointer VertexArray $=
VertexArrayDescriptor 3 Float 0 a
clientState VertexArray $= Enabled
activeTexture $= TextureUnit 0
clientActiveTexture $= TextureUnit 0
arrayPointer TextureCoordArray $=
VertexArrayDescriptor 2 Float 0 b
textureBinding Texture2D $= (textureObj face)
activeTexture $= TextureUnit 1
clientActiveTexture $= TextureUnit 1
arrayPointer TextureCoordArray $=
VertexArrayDescriptor 2 Float 0 c
textureBinding Texture2D $= (lightmapObj face)
drawRangeElements Triangles (0,(numOfIndices face))
(numOfIndices face) UnsignedInt d
--drawElements Triangles (numOfIndices face) UnsignedInt d
-- renders a mesh face
renderMeshFace :: BSPFace -> VertexArrays -> Ptr GLint -> IO ()
renderMeshFace face (vertexPtr,texturePtr,c,_,_) vIndex = do
startVIndex <- return (startVertIndex face)
arrayPointer VertexArray $=
VertexArrayDescriptor 3 Float 0
(plusPtr vertexPtr (12*(startVIndex)))
clientState VertexArray $= Enabled
activeTexture $= TextureUnit 0
clientActiveTexture $= TextureUnit 0
arrayPointer TextureCoordArray $=
VertexArrayDescriptor 2 Float 0
(advancePtr texturePtr (2*(startVertIndex face)))
clientState TextureCoordArray $= Enabled
texture Texture2D $= Enabled
textureBinding Texture2D $= (textureObj face)
activeTexture $= TextureUnit 1
clientActiveTexture $= TextureUnit 1
arrayPointer TextureCoordArray $=
VertexArrayDescriptor 2 Float 0 (plusPtr c (8*(startVIndex)))
clientState TextureCoordArray $= Enabled
texture Texture2D $= Enabled
textureBinding Texture2D $= (lightmapObj face)
drawRangeElements Triangles (0,fromIntegral (numOfVerts face))
(numOfIndices face ) UnsignedInt (plusPtr vIndex (4*(startIndex face)))
-- renders patch surfaces
renderPatches :: BSPFace -> IO()
renderPatches face = do
mapM_ (renderPatch face) (patch face)
renderPatch :: BSPFace -> BSPPatch -> IO()
renderPatch face bsppatch= do
arrayPointer VertexArray $=
VertexArrayDescriptor 3 Float 28 (patchPtr bsppatch)
clientState VertexArray $= Enabled
activeTexture $= TextureUnit 0
clientActiveTexture $= TextureUnit 0
arrayPointer TextureCoordArray $=
VertexArrayDescriptor 2 Float 28 (plusPtr (patchPtr bsppatch) 12)
clientState TextureCoordArray $= Enabled
texture Texture2D $= Enabled
textureBinding Texture2D $= (textureObj face)
activeTexture $= TextureUnit 1
clientActiveTexture $= TextureUnit 1
arrayPointer TextureCoordArray $=
VertexArrayDescriptor 2 Float 28 (plusPtr (patchPtr bsppatch) 20)
clientState TextureCoordArray $= Enabled
texture Texture2D $= Enabled
textureBinding Texture2D $= (lightmapObj face)
multiDrawElements TriangleStrip (numIndexPtr bsppatch)
UnsignedInt (indexPtrPtr bsppatch)
(fromIntegral (patchLOD bsppatch))
-------------------------------------------------------------------------------
-- reading functions
-- reads a BSP file
readBSP :: FilePath -> IO(IORef(BSPMap))
readBSP filePath = withBinaryFile filePath $ \handle -> do
readHeader handle
lumps <- mapM (readLump handle)
[ 0 .. (kMaxLumps -1)] :: IO [BSPLump]
(a,b,c,d,e) <- readVertices handle lumps
indcs <- readIndices handle lumps
newbitset <- createBitset lumps
newVertexArrays <- dataToPointers (a,b,c,d,e)
indexPtr <- newArray indcs
newNodes <- readNodes handle lumps
newLeaves <- readLeaves handle lumps newVertexArrays indexPtr
newVisData <- readVisData handle lumps
let leafArray = listArray (0,((length newLeaves)-1)) newLeaves
let nodeArray = listArray (0,((length newNodes)-1)) newNodes
ntree <- constructTree nodeArray leafArray 0
bsp <- (newIORef ( BSPMap {
vertexData = newVertexArrays,
vindices = indexPtr,
leaves = (reverse newLeaves),
tree = ntree,
visData = newVisData,
bitset = newbitset
}))
return bsp
constructTree :: Array Int BSPNode -> Array Int BSPLeaf -> Int -> IO(Tree)
constructTree nodes lvs ind = do
case (ind >= 0) of
True -> do
let currentNode = (nodes ! ind)
leftNode <- constructTree nodes lvs (front currentNode)
rightNode <- constructTree nodes lvs (back currentNode)
return (Branch currentNode leftNode rightNode)
False -> do
let currentLeaf = (lvs ! ((-1)*(ind+1)))
return (Leaf currentLeaf)
createBitset :: [BSPLump] -> IO BitSet
createBitset lumps = do
(_,lngth) <- (getLumpData (lumps !! kFaces))
newbitset <- emptyBS (lngth `div` 104)
return newbitset
-- - - - - - - - - - - - - - - - - - -
-- reads the BSP files header information
readHeader :: Handle -> IO BSPHeader
readHeader handle = do
buf <- mallocBytes 4
hGetBuf handle buf 4
iD <- mapM (peekByteOff buf) [ 0 .. 3] :: IO [CChar]
hGetBuf handle buf cIntSize
ver <- (peek (castPtr buf :: Ptr CInt)) :: IO CInt
free buf
return (BSPHeader {
strID = map castCCharToChar iD,
version = (fromIntegral ver)})
-- - - - - - - - - - - - - - - - - - -
-- reads the lumps in our bsp
readLump :: Handle -> Int -> IO BSPLump
readLump handle _ = do
buf <- mallocBytes cIntSize
hGetBuf handle buf cIntSize
offs <- (peek (castPtr buf :: Ptr CInt)) :: IO CInt
hGetBuf handle buf cIntSize
l <- (peek (castPtr buf :: Ptr CInt)) :: IO CInt
free buf
return (BSPLump {offset = (fromIntegral offs),
len = (fromIntegral l)})
getLumpData :: BSPLump -> IO (Int, Int)
getLumpData lump = return (offset lump,len lump)
-- - - - - - - - - - - - - - - - - - -
-- reads the nodes
readNodes :: Handle -> [BSPLump] -> IO [BSPNode]
readNodes handle lumps = do
planes <- readPlanes handle lumps
let planeArray = listArray (0,((length planes)-1)) planes
(offst,lngth) <- (getLumpData (lumps !! kNodes))
offs <- getOffsets lngth offst 36
nodes <- mapM (readNode handle planeArray) offs
return nodes
readNode :: Handle -> Array Int BSPPlane -> Int -> IO (BSPNode)
readNode handle planeArray offst = do
hSeek handle AbsoluteSeek (fromIntegral offst)
buf <- mallocBytes 4
let getCInt =
getAndPeek handle (castPtr buf :: Ptr CInt) (undefined :: CInt)
let getCInts =
getAndPeeks handle (castPtr buf :: Ptr CInt) (undefined :: CInt)
let ints = liftM toInts (getCInts 3)
let get3Ints = liftM get3t ints
plnIndex <- getCInt
frt <- getCInt
bck <- getCInt
nMin <- get3Ints
nMax <- get3Ints
let pln = planeArray ! (fromIntegral plnIndex)
return $ BSPNode {
planeNormal = (pNormal pln),
dist = (distance pln),
front = fromIntegral frt,
back = fromIntegral bck,
nodeMin = nMin,
nodeMax = nMax
}
-- - - - - - - - - - - - - - - - - - -
-- reads the planes in the nodes
readPlanes :: Handle -> [BSPLump] -> IO [BSPPlane]
readPlanes handle lumps = do
(offst,lngth) <- (getLumpData (lumps !! kPlanes))
hSeek handle AbsoluteSeek (fromIntegral offst)
buf <- mallocBytes lngth
hGetBuf handle buf lngth
let ptrs = getPtrs buf lngth 16
planes <- mapM readPlane ptrs
free buf
return planes
readPlane :: Ptr a -> IO (BSPPlane)
readPlane ptr = do
[e1,e2,e3,e4] <- getFloats ptr 4
return $ BSPPlane {
pNormal = (fromRational (toRational e1),
fromRational (toRational e3),
fromRational (toRational ((-1)*e2))),
distance = (fromRational (toRational e4))
}
-- - - - - - - - - - - - - - - - - - -
-- reads the leaves
readLeaves :: Handle -> [BSPLump] -> VertexArrays -> Ptr GLint -> IO [BSPLeaf]
readLeaves handle lumps vertArrays indcs = do
faces <- readFaces handle lumps vertArrays indcs
let faceArray = listArray (0,((length faces)-1)) faces
leaffaces <- readLeafFaces handle lumps
let leafFaceArray = listArray (0,((length leaffaces)-1)) leaffaces
brushes <- readBrushes handle lumps
let brushArray = listArray (0,((length brushes)-1)) brushes
leafbrushes <- readLeafBrushes handle lumps
let leafBrushArray = listArray (0,((length leafbrushes)-1)) leafbrushes
(offst,lngth) <- getLumpData (lumps !! kLeafs)
hSeek handle AbsoluteSeek (fromIntegral offst)
buf <- mallocBytes lngth
hGetBuf handle buf lngth
let ptrs = getPtrs buf lngth 48
nodes <-
mapM (readLeaf leafFaceArray faceArray leafBrushArray brushArray) ptrs
free buf
return nodes
readLeaf ::
Array Int Int -> Array Int BSPFace ->
Array Int Int -> Array Int BSPBrush ->Ptr a ->IO (BSPLeaf)
readLeaf leafFaceArray faceArray leafBrushArray brushArray ptr = do
[e1,e2,e3,e4,e5,e6,e7,e8,e9,e10,e11,e12] <- getInts ptr 12
let leafIndices = map (leafFaceArray !) [((e9+e10)-1),((e9+e10)-2)..e9]
let faceList = map (faceArray !) leafIndices
let brushIndices = map (leafBrushArray !) [e11..(e11+e12-1)]
let brushList = map (brushArray !) brushIndices
return $ BSPLeaf {
cluster = e1,
area = e2,
leafMin = (realToFrac e3,
realToFrac e5,
realToFrac ((-1)*e4)),
leafMax = (realToFrac e6,
realToFrac e8,
realToFrac ((-1)*e7)),
leafface = e9,
numOfLeafFaces = e10,
leafBrush = e11,
numOfLeafBrushes = e12,
leafFaces = faceList,
leafBrushes = brushList
}
-- - - - - - - - - - - - - - - - - - -
-- huge function for reading the faces in our leaves
readFaces :: Handle -> [BSPLump] -> VertexArrays -> Ptr GLint -> IO [BSPFace]
readFaces handle lumps vertArrays indcs = do
lightMaps <- readLightMaps handle lumps
let lightMapArray = listArray (0,((length lightMaps)-1)) lightMaps
texInfos <- readTexInfos handle lumps
texFileNames <- return (map strName texInfos)
texObjs <- getAndCreateTextures texFileNames
let texObjArray = listArray (0,((length texObjs)-1)) texObjs
(offst,lngth) <- (getLumpData (lumps !! kFaces))
offs <- getOffsets lngth offst 104
faces <- mapM
(readFace handle offst lightMapArray texObjArray vertArrays indcs) offs
return faces
readFace ::
Handle -> Int -> Array Int TextureObject -> Array Int (Maybe TextureObject)->
VertexArrays -> Ptr GLint -> Int -> IO (BSPFace)
readFace handle origin lightmaps textures
vertArrays@(a1,b1,c1,_,_) indcs offst = do
hSeek handle AbsoluteSeek (fromIntegral offst)
buf <- mallocBytes 4
let getCInts =
getAndPeeks handle (castPtr buf :: Ptr CInt) (undefined :: CInt)
let getCFloats =
getAndPeeks handle (castPtr buf :: Ptr CFloat) (undefined :: CFloat)
let ints = liftM toInts (getCInts 4)
let get4Ints = liftM get4t ints
let floats = liftM toFloats (getCFloats 3)
let get3Floats = liftM get3t floats
let twoInts = liftM toInts (getCInts 2)
let get2Ints = liftM get2t twoInts
(a,b,c,d) <- get4Ints
(e,f,g,h) <- get4Ints
(i,j,k,l) <- get4Ints
lMPos <- get3Floats
lMVec1 <- get3Floats
lMVec2 <- get3Floats
norms <- get3Floats
sz <- get2Ints
free buf
bspPatch <- checkForPatch c d sz vertArrays
return BSPFace {
textureObj = textures ! a,
effect = b,
faceType = c,
startVertIndex = d,
numOfVerts = e,
startIndex = f,
numOfIndices = fromIntegral g,
lightmapObj = fixLightmap h lightmaps,
lMapCorner = (i, j),
lMapSize = (k, l),
lMapPos = lMPos,
lMapVecs = [lMVec1,lMVec2],
vNormal = norms,
size = sz,
faceNo = (offst - origin)`div` 104,
patch = bspPatch,
arrayPtrs = (plusPtr a1 (12*d),
plusPtr b1 (8*d),
plusPtr c1 (8*d),
plusPtr indcs (4*f))
}
-- - - - - - - - - - - - - - - - - - -
-- reads the leafaces that refer to the faces
readLeafFaces :: Handle -> [BSPLump] -> IO [BSPLeafFace]
readLeafFaces handle lumps = do
(offst,lngth) <- (getLumpData (lumps !! kLeafFaces))
hSeek handle AbsoluteSeek (fromIntegral offst)
buf <- mallocBytes lngth
hGetBuf handle buf lngth
leaffaces <- getInts buf (lngth `div` 4)
free buf
return leaffaces
-- - - - - - - - - - - - - - - - - - -
-- reads the brushes
readBrushes :: Handle -> [BSPLump] -> IO [BSPBrush]
readBrushes handle lumps = do
brushsides <- readBrushSides handle lumps
let brushSideArray = listArray (0,((length brushsides)-1)) brushsides
texInfos <- readTexInfos handle lumps
let texInfoArray = listArray (0,((length texInfos)-1)) texInfos
(offst,lngth) <- (getLumpData (lumps !! kBrushes))
hSeek handle AbsoluteSeek (fromIntegral offst)
buf <- mallocBytes (lngth)
hGetBuf handle buf (lngth)
let ptrs = getPtrs buf (lngth) 12
brushes <- mapM (readBrush brushSideArray texInfoArray) ptrs
free buf
return brushes
readBrush :: Array Int BSPBrushSide ->
Array Int BSPTexInfo -> Ptr a ->IO (BSPBrush)
readBrush brushSideArray texInfos ptr = do
[e1,e2,e3] <- getInts ptr 3
let bSides = map (brushSideArray !) [e1..(e1+e2-1)]
return $ BSPBrush {
brushSide = e1,
numOfBrushSides = e2,
brushSides = bSides,
bTextureID = e3,
textureType = (contents (texInfos ! e3))
}
-- - - - - - - - - - - - - - - - - - -
-- reads the brush sides in our brushes
readBrushSides :: Handle -> [BSPLump] -> IO [BSPBrushSide]
readBrushSides handle lumps = do
planes <- readPlanes handle lumps
let planeArray = listArray (0,((length planes)-1)) planes
(offst,lngth) <- (getLumpData (lumps !! kBrushSides))
hSeek handle AbsoluteSeek (fromIntegral offst)
buf <- mallocBytes (lngth)
hGetBuf handle buf (lngth)
let ptrs = getPtrs buf (lngth) 8
brushsides <- mapM (readBrushSide planeArray) ptrs
free buf
return brushsides
readBrushSide :: Array Int BSPPlane -> Ptr a ->IO (BSPBrushSide)
readBrushSide planeArray ptr = do
[e1,e2] <- getInts ptr 2
let pln = planeArray ! (fromIntegral e1)
return $ BSPBrushSide {
bsPlane = e1,
bsPlaneNorm = (pNormal pln),
bsPlaneDist = (distance pln),
bsTextureID = e2
}
-- - - - - - - - - - - - - - - - - - -
-- reads the leaf brushes that refer to the brushes
readLeafBrushes :: Handle -> [BSPLump] -> IO [BSPLeafFace]
readLeafBrushes handle lumps = do
(offst,lngth) <- (getLumpData (lumps !! kLeafBrushes))
hSeek handle AbsoluteSeek (fromIntegral offst)
buf <- mallocBytes lngth
hGetBuf handle buf lngth
leafbrushes <- getInts buf (lngth `div` 4)
free buf
return leafbrushes
-- - - - - - - - - - - - - - - - - - -
-- read the PVS visibility information
readVisData :: Handle -> [BSPLump] -> IO (Maybe BSPVisData)
readVisData handle lumps = do
(offst,lngth) <- (getLumpData (lumps !! kVisData))
case lngth of
0 -> return Nothing
_ -> do
hSeek handle AbsoluteSeek (fromIntegral offst)
buf <- mallocBytes lngth
hGetBuf handle buf lngth
cInts <- peekArray 2 (castPtr buf :: Ptr CInt)
let [numC, bytesPerC] = toInts cInts
bitst <- peekArray (numC*bytesPerC) $ plusPtr (castPtr buf :: Ptr Word8) 8
bs <-
Arr.newListArray (0 ,(numC*bytesPerC*8-1)) (toBools bitst)
return (Just BSPVisData {
numOfClusters = numC,
bytesPerCluster = bytesPerC,
bitSets = bs
})
-- - - - - - - - - - - - - - - - - - -
-- reads vertex information
readVertices :: Handle -> [BSPLump] -> IO VertexData
readVertices handle lumps = do
(offst,lngth) <- getLumpData (lumps !! kVertices)
offs <- getOffsets lngth offst 44
verts <- mapM (readVertex handle) offs
(v,t,l,n,r) <- seperateArrays verts
return $ toVertexData (concat v, concat t, concat l, concat n, concat r)
readVertex :: Handle -> Int -> IO ([CFloat],[CFloat],[CFloat],[CFloat],[Word8])
readVertex handle offst = do
hSeek handle AbsoluteSeek (fromIntegral offst)
buf <- mallocBytes 4
let getCFloats =
getAndPeeks handle (castPtr buf :: Ptr CFloat) (undefined :: CFloat)
let getWord8s =
getAndPeeks handle (castPtr buf :: Ptr Word8) (undefined :: Word8)
let floats = (getCFloats 3)
let get3Floats = liftM get3t floats
(x,y,z) <- get3Floats
texCoords <- getCFloats 2
lightMapCoords <- getCFloats 2
normals <- getCFloats 3
rgbaVal <- getWord8s 4
free buf
return ([x,z,(-1)*y],texCoords,lightMapCoords,normals,rgbaVal)
dataToPointers :: VertexData -> IO VertexArrays
dataToPointers (a,b,c,d,e) = do
a1 <- (newArray a)
b1 <- (newArray b)
c1 <- (newArray c)
d1 <- (newArray d)
e1 <- (newArray e)
return (a1,b1,c1,d1,e1)
seperateArrays :: [([CFloat],[CFloat],[CFloat],[CFloat],[Word8])] ->
IO ([[CFloat]],[[CFloat]],[[CFloat]],[[CFloat]],[[Word8]])
seperateArrays verts = return (unzip5 verts)
toVertexData :: ([CFloat],[CFloat],[CFloat],[CFloat],[Word8]) -> VertexData
toVertexData (a,b,c,d,e) = (toFloats a,toFloats b,toFloats c,toFloats d,e)
-- - - - - - - - - - - - - - - - - - -
-- reads lightmaps
readLightMaps :: Handle -> [BSPLump] -> IO [TextureObject]
readLightMaps handle lumps = do
(offst,lngth) <- (getLumpData (lumps !! kLightmaps))
offs <- getOffsets lngth offst 49152
mapM (readLightMap handle) offs
readLightMap :: Handle -> Int -> IO TextureObject
readLightMap handle offst = do
hSeek handle AbsoluteSeek (fromIntegral offst)
buf <- mallocBytes 49152 :: IO (Ptr Word8)
hGetBuf handle buf 49152
mapM (adjustRGB buf 5.0) [0..((16384)-1)]
texObj <-createLightmapTexture buf
return texObj
createLightmapTexture :: Ptr Word8 -> IO TextureObject
createLightmapTexture ptr = do
[texName] <- genObjectNames 1
rowAlignment Unpack $= 1
textureBinding Texture2D $= Just texName
build2DMipmaps
Texture2D RGB'
(fromIntegral (128 :: Int)) (fromIntegral (128 :: Int))
(PixelData RGB UnsignedByte ptr)
textureFilter Texture2D $= ((Linear', Just Nearest), Linear')
textureFunction $= Modulate
free ptr
return texName
-- adjusts the brightness of the lightmap
adjustRGB :: Ptr Word8 -> Float -> Int -> IO ()
adjustRGB lightMap factor offst = do
ptr <- return (advancePtr lightMap (3*offst))
[r,g,b] <- (peekArray 3 ptr)
(r2,tempr) <- scaleRGB (((realToFrac r)*factor)/255) 1
(g2,tempg) <- scaleRGB (((realToFrac g)*factor)/255) tempr
(b2,tempb) <- scaleRGB (((realToFrac b)*factor)/255) tempg
byter2 <- return $ fromIntegral $ (truncate (r2 * tempb * 255.0) :: Int)
byteg2 <- return $ fromIntegral $ (truncate (g2 * tempb * 255.0) :: Int)
byteb2 <- return $ fromIntegral $ (truncate (b2 * tempb * 255.0) :: Int)
pokeArray (advancePtr lightMap (3*offst)) [byter2,byteg2,byteb2]
scaleRGB :: Float -> Float -> IO (Float,Float)
scaleRGB clr scl = do
if ((clr > 1.0) && ((1.0/clr) < scl))
then return (clr, 1.0/clr)
else
return (clr, scl)
fixLightmap ::
Int -> Array Int TextureObject -> Maybe TextureObject
fixLightmap ind arr
| ind < 0 = Nothing
| otherwise = Just (arr ! ind)
-- - - - - - - - - - - - - - - - - - -
-- reads the texture information
readTexInfos :: Handle -> [BSPLump] -> IO [BSPTexInfo]
readTexInfos handle lumps = do
(offst,lngth) <- (getLumpData (lumps !! kTextures))
offs <- getOffsets lngth offst 72
mapM (readTexInfo handle) offs
readTexInfo :: Handle -> Int -> IO (BSPTexInfo)
readTexInfo handle offst = do
hSeek handle AbsoluteSeek (fromIntegral offst)
buf <- mallocBytes 64 :: IO (Ptr CChar)
hGetBuf handle buf 64
str <- peekCAString buf
hSeek handle AbsoluteSeek ((fromIntegral offst) + 64)
let getCInt =
getAndPeek handle (castPtr buf :: Ptr CInt) (undefined :: CInt)
flgs <- getCInt
cons <- getCInt
free buf
return BSPTexInfo {
strName = str,
flags = (fromIntegral flgs),
contents = (fromIntegral cons)
}
-- - - - - - - - - - - - - - - - - - -
-- reads the indices to the vertex array
readIndices :: Handle -> [BSPLump] -> IO [GLint]
readIndices handle lumps = do
(offst,lngth) <- (getLumpData (lumps !! kIndices))
hSeek handle AbsoluteSeek (fromIntegral offst)
buf <- mallocBytes lngth
hGetBuf handle buf lngth
indces <- mapM
(peekElemOff (castPtr buf :: Ptr CInt))
[ 0 .. ((lngth `div` 4)-1)] :: IO [CInt]
free buf
return $ map fromIntegral indces
-- - - - - - - - - - - - - - - - - - -
getAndPeek :: (Storable a, Typeable a) => Handle -> Ptr a -> a -> IO a
getAndPeek handle buf be = do
bytesRead <- hGetBuf handle buf (sizeOf be)
when (bytesRead /= (sizeOf be)) $
ioError $ mkIOError eofErrorType "hGetBufFully" (Just handle) Nothing
val <- (peek buf)
return val
getAndPeeks :: (Storable a, Typeable a) =>
Handle -> Ptr a -> a -> Int -> IO [a]
getAndPeeks handle buf be i =
mapM (\_ -> getAndPeek handle buf be) [1..i]
withBinaryFile :: FilePath -> (Handle -> IO a) -> IO a
withBinaryFile filePath = bracket (openBinaryFile filePath ReadMode) hClose
getOffsets :: Int -> Int -> Int -> IO [Int]
getOffsets lngth off sze = return $ map ((off+) . (sze*)) [0.. ((lngth `div` sze)-1)]
toInts :: (Integral a)=>[a] -> [Int]
toInts a = map fromIntegral a
toFloats :: (Real a) => [a] -> [Float]
toFloats a = map realToFrac a
get2t :: [a] -> (a, a)
get2t list = (list !! 0, list !! 1)
get3t :: [a] -> (a, a, a)
get3t list = (list !! 0, list !! 1, list !! 2)
get4t :: [a] -> (a, a, a, a)
get4t list = (list !! 0, list !! 1, list !! 2, list !! 3)
toBools :: [Word8] -> [Bool]
toBools list =
[ y | x<-list, y <- map (testBit x) [0..7]]
getInts :: Ptr a -> Int -> IO [Int]
getInts ptr n = do
ints <- peekArray n (castPtr ptr:: Ptr CInt)
return $ toInts ints
getFloats :: Ptr a -> Int -> IO [Float]
getFloats ptr n = do
floats <- peekArray n (castPtr ptr :: Ptr CFloat)
return $ toFloats floats
cIntSize :: Int
cIntSize = (sizeOf (undefined :: CInt))
getPtrs :: Ptr a -> Int -> Int -> [Ptr a]
getPtrs ptr lngth sze = map ((plusPtr ptr) . (sze *)) [0.. ((lngth `div` sze) - 1)]
| kvelicka/frag | src/BSP.hs | gpl-2.0 | 33,668 | 0 | 20 | 9,570 | 10,868 | 5,598 | 5,270 | 769 | 4 |
module HTMLScrapper(
HTMLDoc,
parsePage,
getPage,
fetchTag,
fetchAllLinks
) where
-------------------------------------------------------------------------------
import Text.HandsomeSoup
import Text.XML.HXT.Core
import Network.URI
import Data.Maybe (mapMaybe)
import Data.List (isSuffixOf, nub)
import HTTPClient (downloadURL)
-------------------------------------------------------------------------------
type HTMLDoc = XmlTree
getPage :: URI -> IO HTMLDoc
getPage url = do
html <- downloadURL (show url)
fmap head $ runX $ parseHtml html
fetchTag :: String -> HTMLDoc -> [String]
fetchTag tag = runLA (css tag >>> getChildren >>> getText)
fetchAllLinks :: HTMLDoc -> [URI]
fetchAllLinks = nub . mapMaybe (parseURI . removeSlash) . runLA (css "a" ! "href")
where
removeSlash str
| "/" `isSuffixOf` str = take (length str - 1) str
| otherwise = str
parsePage :: String -> HTMLDoc
parsePage = head . runLA hread
| carlostome/HCrawler | src/HTMLScrapper.hs | gpl-2.0 | 983 | 0 | 11 | 189 | 286 | 153 | 133 | 26 | 1 |
import qualified Data.ByteString.Lazy as BL
import Control.Monad.Error
import Control.Exception
import Data.List
import System.Console.GetOpt
import System.Environment
import System.Exit
import System.IO
import Text.Printf
import Commit
import Diff
import Index
import Log
import Object
import ObjectStore
import Pack
import Pager
import Refs
import RevParse
import Shared
import State
cmdRef :: [String] -> GitM ()
cmdRef args = do
unless (length args == 1) $
fail "'ref' takes one argument"
let [name] = args
hash <- resolveRev name
liftIO $ print hash
cmdCat :: [String] -> GitM ()
cmdCat args = do
(raw, name) <-
case getOpt Permute options args of
(opts, [name], []) -> return (not (null opts), name)
(_, _, []) ->
fail "expect 1 argument: name of object to cat"
(_, _, errs) -> fail $ concat errs ++ usage
hash <- resolveRev name
case hash of
Left err -> fail err
Right hash ->
if raw
then do
(typ, obj) <- getRawObject hash
redirectThroughPager $ liftIO $ BL.putStr obj
else getObject hash >>= redirectThroughPager . liftIO . print
where
usage = usageInfo "gat cat [options] <object name>" options
options = [
Option "" ["raw"] (NoArg True) "dump raw object bytes"
]
cmdDumpIndex :: [String] -> GitM ()
cmdDumpIndex args = liftIO $ do
unless (length args == 0) $
fail "'dump-index' takes no arguments"
index <- loadIndex
forM_ (in_entries index) $ \e -> do
printf "%s %o %s\n" (show $ ie_mode e) (ie_realMode e) (ie_name e)
print (in_tree index)
cmdDiffIndex :: [String] -> GitM ()
cmdDiffIndex args = do
unless (length args == 0) $
fail "'diff-index' takes no arguments"
index <- liftIO loadIndex
pairs <- liftIO $ diffAgainstIndex index
redirectThroughPager $ mapM_ showDiff pairs
cmdDiff :: [String] -> GitM ()
cmdDiff args = do
diffpairs <-
case args of
[] -> do
tree <- revTree "HEAD"
liftIO $ diffAgainstTree tree
[name] -> do
tree <- revTree name
liftIO $ diffAgainstTree tree
[name1,name2] -> do
tree1 <- revTree name1
tree2 <- revTree name2
liftIO $ diffTrees tree1 tree2
redirectThroughPager $ mapM_ showDiff diffpairs
where
revTree :: String -> GitM Tree
revTree name = do
hash <- resolveRev name >>= forceError
findTree hash
cmdDumpTree args = do
unless (length args == 1) $
fail "expects one arg"
tree <- resolveRev (head args) >>= forceError >>= findTree
redirectThroughPager $ liftIO $ print tree
cmdDumpPackIndex args = do
unless (length args == 1) $
fail "expects one arg"
redirectThroughPager $ liftIO $ dumpPackIndex (head args)
cmdLog :: [String] -> GitM ()
cmdLog args = do
(opts, args) <-
case getOpt Permute options args of
(o, a, []) -> return (foldl (flip id) defaultLogOptions o, a)
(_, _, errs) -> fail $ concat errs ++ usage
commithash <- do
commitish <- case args of
[x] -> return x
[] -> return "HEAD"
_ -> fail "expects zero or one arg"
resolveRev commitish >>= forceError
redirectThroughPager $ printLog opts commithash
where
usage = usageInfo "gat log [options] [startpoint]" options
options = [
Option "n" ["limit"]
(ReqArg (\n opts -> opts { logoptions_limit=(read n) }) "LIMIT")
"limit number of commits to show"
, Option "" ["author"]
(ReqArg (\author opts -> opts { logoptions_filter=authorFilter author })
"AUTHOR")
"show only commits by particular author"
, Option "l" ["name-status"]
(NoArg (\opts -> opts { logoptions_filelist=True }))
"show files changed in each commit"
]
authorFilter author commit =
author `isInfixOf` commit_author commit
commands = [
("cat", cmdCat)
, ("diff-index", cmdDiffIndex)
, ("diff", cmdDiff)
, ("log", cmdLog)
, ("ref", cmdRef)
, ("dump-index", cmdDumpIndex)
, ("dump-pack-index", cmdDumpPackIndex)
, ("dump-tree", cmdDumpTree)
]
usage message = do
hPutStrLn stderr $ "Error: " ++ message ++ "."
hPutStrLn stderr $ "Commands:"
forM_ commands $ \(name, _) ->
hPutStrLn stderr $ " " ++ name
return (ExitFailure 1)
main = do
argv <- getArgs
exit <- do
case argv of
(cmd:args) -> do
case lookup cmd commands of
Just cmdfunc -> do
catchJust userErrors
(do runGit (cmdfunc args); return ExitSuccess)
(\err -> do putStrLn $ "fatal: " ++ err; return (ExitFailure 1))
_ -> usage $ "unknown command: '" ++ cmd ++ "'"
_ -> usage $ "must provide command"
exitWith exit
| martine/gat | Gat.hs | gpl-2.0 | 4,711 | 0 | 27 | 1,250 | 1,619 | 810 | 809 | 146 | 5 |
{-
Copyright 2012, 2013, 2014 Colin Woodbury <colingw@gmail.com>
This file is part of Aura.
Aura is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Aura is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Aura. If not, see <http://www.gnu.org/licenses/>.
-}
-- Agnostically builds packages. They can be either AUR or ABS.
module Aura.Build
( installPkgFiles
, buildPackages ) where
import System.FilePath ((</>))
import Control.Monad (when, void, join)
import Control.Applicative ((<*>), pure)
import Aura.Pacman (pacman)
import Aura.Settings.Base
import Aura.Colour.Text
import Aura.Monad.Aura
import Aura.Languages
import Aura.MakePkg
import Aura.Utils
import Aura.Core
import Utilities
import Shell
---
-- TODO should this be elsewhere
srcPkgStore :: FilePath
srcPkgStore = "/var/cache/aura/src"
-- Expects files like: /var/cache/pacman/pkg/*.pkg.tar.xz
installPkgFiles :: [String] -> [FilePath] -> Aura ()
installPkgFiles _ [] = return ()
installPkgFiles pacOpts files = checkDBLock >> pacman (["-U"] ++ pacOpts ++ files)
-- All building occurs within temp directories in the package cache,
-- or in a location specified by the user with flags.
buildPackages :: [Buildable] -> Aura [FilePath]
buildPackages [] = return []
buildPackages pkgs = ask >>= \ss -> do
let buildPath = buildPathOf ss
result <- liftIO $ inDir buildPath (runAura (build [] pkgs) ss)
wrap result
-- Handles the building of Packages. Fails nicely.
-- Assumed: All dependencies are already installed.
build :: [FilePath] -> [Buildable] -> Aura [FilePath]
build built [] = return $ filter notNull built
build built ps@(p:_) = do
notify $ buildPackages_1 pn
(paths,rest) <- catch (withTempDir pn (build' ps)) (buildFail built ps)
build (paths ++ built) rest
where pn = baseNameOf p
-- | Perform the actual build.
build' :: [Buildable] -> Aura ([FilePath],[Buildable])
build' [] = failure "build' : You should never see this."
build' (p:ps) = ask >>= \ss -> do
let user = buildUserOf ss
curr <- liftIO pwd
getBuildScripts p user curr
overwritePkgbuild p
pNames <- join (makepkg <*> pure user)
-- pNames <- makepkg >>= \f -> f user -- Which is better?
paths <- moveToCachePath pNames
when (keepSource ss) $ makepkgSource user >>= void . moveToSourcePath
liftIO $ cd curr
return (paths,ps)
getBuildScripts :: Buildable -> String -> FilePath -> Aura ()
getBuildScripts pkg user currDir = do
scriptsDir <- liftIO $ chown user currDir [] >> buildScripts pkg currDir
case scriptsDir of
Nothing -> scoldAndFail (buildFail_7 $ baseNameOf pkg)
Just sd -> liftIO $ do
chown user sd ["-R"]
cd sd
{-}
getBuildScripts pkg user currDir = liftIO $ do
chown user currDir []
scriptsDir <- buildScripts pkg currDir
chown user scriptsDir ["-R"]
cd scriptsDir
-}
overwritePkgbuild :: Buildable -> Aura ()
overwritePkgbuild p = asks (\ss -> any ($ ss) checks) >>=
flip when (liftIO . writeFile "PKGBUILD" . pkgbuildOf $ p)
where checks = [mayHotEdit,useCustomizepkg]
-- Inform the user that building failed. Ask them if they want to
-- continue installing previous packages that built successfully.
buildFail :: [FilePath] -> [Buildable] -> String -> Aura ([FilePath],[Buildable])
buildFail _ [] _ = failure "buildFail : You should never see this message."
buildFail _ (p:_) errors = do -- asks langOf >>= \lang -> do
scold $ buildFail_1 (baseNameOf p)
displayBuildErrors errors
-- printList red cyan (buildFail_2 lang) (map pkgBase ps)
-- printList yellow cyan (buildFail_3 lang) $ map takeFileName built
response <- optionalPrompt buildFail_6
if response
then return ([],[])
else scoldAndFail buildFail_5
-- If the user wasn't running Aura with `-x`, then this will
-- show them the suppressed makepkg output.
displayBuildErrors :: Error -> Aura ()
displayBuildErrors errors = ask >>= \ss -> when (suppressMakepkg ss) $ do
putStrA red (displayBuildErrors_1 $ langOf ss)
liftIO (timedMessage 1000000 ["3.. ","2.. ","1..\n"] >> putStrLn errors)
-- Moves a file to the pacman package cache and returns its location.
moveToCachePath :: [FilePath] -> Aura [FilePath]
moveToCachePath [] = return []
moveToCachePath (p:ps) = do
newName <- ((</> p) . cachePathOf) <$> ask
liftIO $ mv p newName
(newName :) <$> moveToCachePath ps
-- Moves a file to the aura src package cache and returns its location.
moveToSourcePath :: [FilePath] -> Aura [FilePath]
moveToSourcePath [] = return []
moveToSourcePath (p:ps) = do
let newName = srcPkgStore </> p
liftIO $ mv p newName
(newName :) <$> moveToSourcePath ps
| vigoos/Farrago-OS | aura-master/aura-master/src/Aura/Build.hs | gpl-2.0 | 5,098 | 0 | 16 | 989 | 1,249 | 643 | 606 | 83 | 2 |
module Main where
import Control.Monad
import qualified Data.Text.Lazy as LT
import Data.Text.Lazy (Text)
import qualified Data.Text.IO as TIO
import Data.Text.Read (decimal)
import Shelly
import Text.Shakespeare.Text (lt)
default (LT.Text, Shelly.FilePath)
type Range = [Int]
type ScreenSession = Text
data Mode = Command | Insert deriving Eq
shret :: a -> Sh a
shret = shelly.return
readInput = liftIO TIO.getLine
whoami :: Sh Text
whoami = shelly $ silently $ run "whoami" []
trim :: Text -> Text
trim = applyTrims myTrims
where
applyTrims srs str = foldl (flip trim') str srs
trim' rm' = LT.replace rm' ""
myTrims = ["\n","\r","\NUL"]
modecmd :: Mode -> Text -> Text
modecmd Command = flip LT.append "\r"
modecmd Insert = id
cmdSess :: Text -> ScreenSession -> Range -> Mode -> Sh()
cmdSess cmd' s r m = shelly $ do
echo [lt|Sending [#{cmd'}] to #{s} windows #{show r}|]
mapM_ (send m) r
where
send m' i = run_ "screen" [ "-S" , s , "-X" , "at" , LT.pack $ show i ++ "#"
, "stuff" , (modecmd m') cmd' ]
lineInteract :: Mode -> ScreenSession -> Range -> Sh ()
lineInteract mode sess range = shelly $ silently $ do
echo_n [lt|#{sess}:#{show range} =<< |]
cmd' <- readInput
case cmd' of
":q" -> exit 0
":k" -> exit 0 -- also send exit to screens
":r" -> changeRange mode sess
":s" -> changeSession mode range
":i" -> lineInteract Insert sess range
"help" -> echo [lt|Available internal commands are:
:q - quit
:k - killall
:r - pick new window range
:s - pick new session|]
c -> cmdSess (LT.fromStrict c) sess range mode
where
changeRange m s = do
r <- selectWindowRange
lineInteract m s r
changeSession m r = do
s <- selectSession
lineInteract m s r
userSessionDir :: Sh Shelly.FilePath
userSessionDir = shelly $ silently $ do
usr <- whoami
return $ fromText $ "/var/run/screen/S-" `LT.append` trim usr
nrSessions :: Sh Int
nrSessions = shelly $ silently $ do
usd <- userSessionDir
findFold fileCount 0 usd
where
fileCount int _ = shelly . return $ int+1
lsSessions :: Sh [(Int,ScreenSession)]
lsSessions = shelly $ silently $
getSessionPaths >>= toSessionNames >>= indexNames
where
getSessionPaths = userSessionDir >>= findFold fileList []
fileList ls' file = shret $ file : ls'
toSessionNames = mapM (shret.sessionName.toTextIgnore)
sessionName path' = LT.tail $ snd $ LT.breakOn "." path'
indexNames names = shret $ zip [1..] names
selectSession :: Sh ScreenSession
selectSession = shelly $ silently $ do
echo [lt|Please select recieving session:|]
ss <- lsSessions
ns <- nrSessions
mapM_ printSessionList ss
number <- readInput
case decimal number of
Right num -> case fst num of
index | 0 < index && index <= ns -> resolveSession ss index
_ -> echo_err [lt|Index out of range|] >> selectSession
Left err -> echo_err (LT.pack err) >> selectSession
where
printSessionList (x,y) = echo [lt|(#{show x}) for #{y}|]
resolveSession sessions index = shelly.return $ snd $ sessions !! (index-1)
selectWindowRange :: Sh [Int]
selectWindowRange = shelly $ silently $ do
echo_n [lt|Enter first window: |]
first <- readInput
case decimal first of
Right first' -> do
echo_n [lt|Enter last window: |]
last' <- readInput
case decimal last' of
Right last'' -> return [(fst$first')..(fst$last'')]
_ -> selectWindowRange
_ -> selectWindowRange
main :: IO ()
main = shelly $ silently $ do
session <- selectSession
range <- selectWindowRange
forever $ lineInteract Command session range
| edwtjo/screen-muxer | src/main/haskell/smuxer.hs | gpl-3.0 | 3,673 | 0 | 20 | 861 | 1,247 | 637 | 610 | -1 | -1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE DeriveDataTypeable #-}
module VSim.Data.TInt where
import Data.Typeable
import Data.Generics
import Data.Int
import Data.Bits
import Foreign.Storable
import Text.Printf
newtype TInt = TInt { unTInt :: Int32 }
deriving (Eq, Ord, Num, Enum, Real, Integral, Storable, Bounded, Bits,
PrintfArg, Data, Typeable)
instance Show TInt where
showsPrec d = showsPrec d . unTInt
instance Read TInt where
readsPrec d s = map (\(a, r) -> (TInt a, r)) $ readsPrec d s
sizeOfTInt :: Int
sizeOfTInt = fromIntegral $ sizeOf (undefined :: TInt)
| grwlf/vsim | src/VSim/Data/TInt.hs | gpl-3.0 | 609 | 0 | 11 | 112 | 202 | 112 | 90 | 18 | 1 |
module Ampersand.Basics
( module Ampersand.Basics.Auxiliaries
, module Ampersand.Basics.Exit
, module Ampersand.Basics.Languages
, module Ampersand.Basics.PandocExtended
, module Ampersand.Basics.Prelude
, module Ampersand.Basics.String
, module Ampersand.Basics.Unique
, module Ampersand.Basics.UTF8
, module Ampersand.Basics.Version
)
where
import Ampersand.Basics.Auxiliaries
import Ampersand.Basics.Exit
import Ampersand.Basics.Languages
import Ampersand.Basics.PandocExtended
import Ampersand.Basics.Prelude
import Ampersand.Basics.String
import Ampersand.Basics.Unique
import Ampersand.Basics.UTF8
import Ampersand.Basics.Version | AmpersandTarski/ampersand | src/Ampersand/Basics.hs | gpl-3.0 | 666 | 0 | 5 | 79 | 125 | 86 | 39 | 19 | 0 |
{-# LANGUAGE TemplateHaskell, DeriveFunctor, FlexibleInstances, MultiParamTypeClasses #-}
module LanguageDef.Data.LanguageDef where
{-
A language defintion builds on many language-definition aspects. Each language definition aspect has its own section, for which it handles the parsing (and meta-data such as documentation)
-}
import Utils.All
import LanguageDef.Utils.ExceptionInfo
import LanguageDef.Utils.Checkable
import LanguageDef.Utils.Grouper
import LanguageDef.Utils.LocationInfo
import qualified LanguageDef.Data.BNF as BNF
import LanguageDef.Data.SyntacticForm
import LanguageDef.Data.ParseTree
import LanguageDef.Data.Expression hiding (choices')
import LanguageDef.Data.SyntFormIndex
import LanguageDef.Data.Function hiding (choices')
import LanguageDef.Data.Relation hiding (choices')
import LanguageDef.Data.Rule
import qualified LanguageDef.Data.Relation as Relations
import LanguageDef.Combiner
import LanguageDef.MetaSyntax (helperSyntax, bnfSyntax, parseSyntax, patchNames, nls, syntaxDecl')
import Graphs.Lattice
import Data.Char
import Data.List as L
import Data.Map as M
import qualified Data.Set as S
import Data.Either
import Data.Maybe
import Data.Bifunctor (first)
import Control.Arrow ((&&&), (***))
import Control.Monad
import System.Directory
import qualified Assets
data Import a = Import
{ _importName :: [Name] -- The written, potentially partially qualified module name
, _isLocal :: Bool -- If the import is local, thus should start looking from the current module
, _importMeta :: MetaInfo
, _importA :: a
} deriving (Show, Eq, Functor)
makeLenses ''Import
{- | The main definition of a language. Represents an entire langdef file.
The extra type arguments are used to indicate the level of resolution of the definition:
- LanguageDef (indicates resolution of the imports) (indicates resolution of expressions/functions)
-}
data LanguageDef' imported funcResolution
= LanguageDef
{ _langTitle :: Name -- title of the language
, _langImports :: [Import imported]
, _langMeta :: [String] -- The comments just under the title
, _langLocation :: LocationInfo
, _langSyntax :: Maybe (Grouper SyntacticForm) -- The syntax of the language, aka the BNF
, _langSupertypes :: Lattice FQName -- The global supertype relationship; is filled in later on by the langdefs-fixes
, _langFunctions :: Maybe (Grouper (Function' funcResolution))
, _langRelations :: Maybe (Grouper Relation)
, _langRules :: Maybe (Grouper (Rule' funcResolution))
}
deriving (Show, Eq)
makeLenses ''LanguageDef'
updateFR :: (Maybe (Grouper (Function' fr1)), Maybe (Grouper (Rule' fr1))) -> LanguageDef' imported fr0 -> LanguageDef' imported fr1
updateFR (funcs, rules) (LanguageDef title imps meta loc synt supers _ rels _)
= LanguageDef title imps meta loc synt supers funcs rels rules
type ResolvedImport = FilePath
type LanguageDef = LanguageDef' ResolvedImport SyntFormIndex
{- | Performs various checks. These checks are run post name resolution, so we might assume that no unqualified names still exists
>>> import LanguageDef.API
>>> loadAssetLangDef "TestInput/Faulty" ["FunctionDuplicateNameTest"] & toCoParsable
"| While validating the functions while validating \nError: \n \8226 The function \"not\" is defined multiple times"
>>> loadAssetLangDef "TestInput/Faulty" ["FunctionIncorrectNameTest"] & toCoParsable
"| While validating the functions while validating \n| While checking function \"not\" \nError: \n \8226 Some clauses have a different name. The function name is \"not\", but a clause is named f"
-}
instance Checkable' (FQName -> Failable FQName, FQName -> FQName -> Bool, [Name]) (LanguageDef' ResolvedImport SyntFormIndex) where
check' extras (LanguageDef title imports meta li syntax superTypes functions rels rules)
= do assert' (title /= "") "The title of a language should not be empty"
checkM' extras syntax & inMsg' "While validating the syntax"
checkM functions & inMsg' "While validating the functions"
checkM rels & inMsg' "While validating the relation declarations"
assert' (isNothing rules || isJust rels) "When rules are defined, a relation declaration section should be present"
checkM' (fromJust rels) rules & inMsg' "While validating the relation implementation"
isSubtypeOf :: LanguageDef' ResolvedImport fr -> FQName -> FQName -> Bool
isSubtypeOf ld sub super
| sub == super = True
| otherwise
= isSubsetOf (get langSupertypes ld) sub super
-------------------------------- IMPORT FIXING STUFF ------------------------------------
resolveLocalImports :: [Name] -> LanguageDef' x f -> LanguageDef' x f
resolveLocalImports extraOffsetForLocal
= let fixImport imp = if get isLocal imp
then imp & set isLocal False & over importName (extraOffsetForLocal ++ )
else imp
in
over (langImports . mapped) fixImport
fixImport :: Map [Name] FilePath -> LanguageDef' () fr -> Failable (LanguageDef' ResolvedImport fr)
fixImport resolver ld
= inMsg' ("While fixing the import annotations for "++show (get langTitle ld)) $
do let imps = get langImports ld
imps' <- imps |> _fixImport resolver & allGood
return $ set langImports imps' ld
_fixImport :: Map [Name] FilePath -> Import () -> Failable (Import FilePath)
_fixImport resolver imprt
= inLocation (imprt & get importMeta & get miLoc) $
do let nm = get importName imprt :: [Name]
resolved <- checkExists' nm resolver ("The import for "++intercalate "." nm++" was not found")
return (imprt |> const resolved)
dependsOn :: ([Name], LanguageDef' () ()) -> [[Name]]
dependsOn (ldFQ, ld)
= get langImports ld |> fqForImport ldFQ
fqForImport :: [Name] -> Import a -> [Name]
fqForImport currentModule imprt
| get isLocal imprt
= init currentModule ++ get importName imprt
| otherwise
= get importName imprt
------------------------------ PARSING STUFF --------------------------------------------------------
-- >>> _checkCombiner
_checkCombiner = check' metaSyntaxes (parseLangDef _fullFileCombiner)
-- | Parses the entire file, file should still be checked against it's context!
-- >>> _checkCombiner
-- Success ()
-- >>> import Graphs.Lattice
-- >>> parseFullFile "Test:Assets/TestLang" Assets._TestLanguage_language |> set langSupertypes (emptyLattice ([], "T") ([], "B"))
-- Success ...
parseFullFile :: FilePath -> String -> Failable (LanguageDef' () ())
parseFullFile fp contents
= do pt <- parse fp (metaSyntaxes, ["ALGT"]) "langDef" contents
(li, langDef) <- interpret (parseLangDef _fullFileCombiner & withLocation (,)) pt
let ((title, meta, imports), (syntax, (funcs, (rels, rules)))) = langDef
return $ LanguageDef
title
imports
meta
li
syntax
(error "The lattice is not in use yet!") -- filled later on
funcs
rels
rules
-- | Converts the modules from parsetree into all the needed parts
_fullFileCombiner :: Combiner
(Maybe (Grouper SyntacticForm),
(Maybe (Grouper (Function' ())),
(Maybe (Grouper Relation),
Maybe (Grouper (Rule' ()))
)))
_fullFileCombiner
= let s = moduleCombiner "Syntax" syntaxDecl'
f = moduleCombiner "Functions" functionsCmb
rels = moduleCombiner "Relations" relations
rules = moduleCombiner "Rules" Relations.rules
inJ cmb = cmb |> Just
modules = _optionalCombiners (inJ s) $
_optionalCombiners (inJ f) $
_optionalCombiners' (inJ rels)
[inJ rules]
modules' = modules & reverse ||>> _chain ||>> (|> _chain)
in
choices' "modules" modules'
_chain :: (Maybe (Maybe a), Maybe (Maybe b, Maybe c)) -> (Maybe a, (Maybe b, Maybe c))
_chain (mma, mmbmc)
= (join mma, distrEffect mmbmc)
_optionalCombiners' :: Combiner (Maybe a) -> [Combiner (Maybe b)] -> [Combiner (Maybe a, Maybe b)]
_optionalCombiners' a b
= _optionalCombiners a b ||>> (join *** join)
_optionalCombiners :: Combiner a -> [Combiner b] -> [Combiner (Maybe a, Maybe b)]
_optionalCombiners cmbA cmbB
= let cmbA' = cmbA |> Just
cmbB' = cmbB ||>> Just
a = cmbA' |> (\a -> (a, Nothing))
as = cmbB'
fstNothing b = (Nothing, b)
in
(a : (as ||>> fstNothing)) ++ [ cmbA' <+> a' | a' <- as ]
_optionalOrder :: (a -> a -> a) -> a -> [a] -> [a]
_optionalOrder plus a as
= (a : as) ++ [ a `plus` a' | a' <- as]
{- | All the syntaxes needed to parse a language definition file
>>> let resolve = Success :: FQName -> Failable FQName
>>> let subtypeStub = (==) :: FQName -> FQName -> Bool
>>> metaSyntaxes & M.toList |> (\(fq, s) -> check' (resolve, subtypeStub, fq) s) & allGood >> pass
Success ()
-}
metaSyntaxes :: Map [Name] Syntax
metaSyntaxes
= let syntax = mainSyntax [("Syntax", (["Syntax"], "syntax"))
, ("Functions", (["Functions"], "functions"))
, ("Relations", (["Relations"], "relations"))
, ("Rules", (["Relations"], "rules"))
]
syntaxes = [("Helper", helperSyntax)
, ("ALGT", syntax)
, ("Syntax", bnfSyntax)
, ("Functions", functionSyntax)
, ("Relations", relationSyntax)
]
|> first (:[]) & M.fromList
in
syntaxes
------------------------------------- EXTERNAL Definitions ----------------------------------------
{- | The syntax that declares metafunctions, as defined in the Assets
>>> functionSyntax
Grouper {_grouperDict = fromList [("arguments",SyntacticForm ...
-}
functionSyntax :: Syntax
functionSyntax
= loadAssetsSyntax "Functions" Assets._ALGT_Native_Functions_language
{- | The syntax that declares relations, as defined in the Assets
>>> relationSyntax
Grouper {_grouperDict = fromList [("commaSepExpr",SyntacticForm ...
-}
relationSyntax :: Syntax
relationSyntax
= loadAssetsSyntax "Relations" Assets._ALGT_Native_Relations_language
loadAssetsSyntax :: Name -> String -> Syntax
loadAssetsSyntax title contents
= parseFullFile ("Assets."++title++".language") contents
& crash
& get langSyntax
& fromMaybe (error $ title ++ " asset does not contain syntax?")
& patchNames [title]
------------------------------------------- Explicit BNF (of skeleton etc) -------------------------------------
choices' nm = choices (["ALGT"], nm)
-- The syntax of the entire file skeleton, with: [("Syntax to call into", "Title", rule to call")
mainSyntax :: [(Name, FQName)] -> Syntax
mainSyntax subRules
=([ "stars ::= \"*\" stars | \"*\""
, "dashes ::= \"-\" dashes | \"-\""
, "eqs ::= \"=\" eqs | \"=\""
, "lineContents ::= LineChar lineContents"
, "\t | \"\\n\""
, "comment ::= \"#\" $lineContents"
, "title ::= imports $lineContents $stars Syntax.nls"
, "\t | $lineContents $stars Syntax.nls"
, "namespace ~~= IdentifierUpper \".\" namespace | IdentifierUpper"
, "importStm ::= \"import\" \"local\" | \"import\""
, "import ::= Syntax.nls importStm namespace | importStm namespace"
, "imports ::= import imports | Syntax.nls"
]
++ (subRules |> fst |> _title)
++ (subRules |> _titledModCall |> snd)
++ ["modules ::= "++subRules |> _titledModCall |> fst & allOptional & intercalate "\n\t|"]
++ [ "langDef ::= title modules"
]
) & unlines & parseSyntax "ALGT" & crash
_allOptional :: [Name] -> [[String]]
_allOptional [name] = [[name]]
_allOptional (name:names)
= _optionalOrder (++) [name] $ _allOptional names
allOptional names
= _allOptional names & reverse |> unwords
_moduleCall :: (Name, FQName) -> String
_moduleCall (modName, calledRule)
= [ "title"++modName
, BNF.RuleCall calledRule & toParsable
] & unwords
_titledModCall :: (Name, FQName) -> (Name, String)
_titledModCall info@(modName, _)
= let formName = "module"++modName
syntForm = formName ++" ::= "++ _moduleCall info ++ " Syntax.nls | "++ _moduleCall info in
(formName, syntForm)
_title :: Name -> String
_title nm
= "title"++nm++" \t::= "++show nm++" Syntax.nl eqs Syntax.nl"
_titleCombiner :: Combiner (String, [String], [Import ()])
_titleCombiner
= let base = ((capture |> init) {-lineContents-} <+> (skip {-stars-} **> nls)) in
choices' "title" [cmb (\imps (nm, doc) -> (nm, doc, imps)) _imports base
, base |> (\(nm, doc) -> (nm, doc, []))
]
_imports :: Combiner [Import ()]
_imports = choices' "imports"
[ cmb (:) _import _imports
, skip' []
]
_import :: Combiner (Import ())
_import = let base = (_importStm <+> _nameSpace) & withLocation (,)
|> (\(li, (local, ns)) doc -> Import ns local (MetaInfo li doc) ())
:: Combiner (Doc -> Import ())
in
choices' "import"
[ cmb (&) (nls |> concat) base
, base |> (\f -> f "")
]
_importStm :: Combiner Bool
_importStm = choices' "importStm"
[lit "import" **> (lit "local" |> const True)
, lit "import" |> const False
]
_nameSpace :: Combiner [Name]
_nameSpace = choices' "namespace"
[ cmb (:) capture (lit "." **> _nameSpace)
, capture |> (:[])]
_modTitleCombiner :: Name -> Combiner ()
_modTitleCombiner nm
= choices' ("title"++nm) [skip **> skip **> skip **> skip]
moduleCombiner :: Name -> Combiner la -> Combiner la
moduleCombiner title main
= choices' ("module"++title)
[ _modTitleCombiner title **> main <** skip
, _modTitleCombiner title **> main]
parseLangDef :: Combiner parts -> Combiner ((Name, [String], [Import ()]), parts)
parseLangDef parseModules
= choices' "langDef"
[_titleCombiner <+> parseModules]
instance Infoable (LanguageDef' a b) where
getInfo ld
= let mi = MetaInfo (get langLocation ld) (get langMeta ld & unlines)
in
AllInfo (get langTitle ld) "Language Definition" mi (toParsable ld)
instance ToString (LanguageDef' a b) where
toParsable (LanguageDef title imports langMeta langLoc syntax _ functions rels rules)
= let mayb header = maybe "" (inHeader' header . toParsable) in
(imports |> toParsable & unlines) ++
inHeader "" title '*' (unlines (
langMeta |> ("# "++)
++ [ mayb "Syntax" syntax, mayb "Functions" functions, mayb "Relations" rels, mayb "Rules" rules]
))
instance ToString (Import a) where
toParsable (Import ns local meta _)
= [ toParsable meta
, "import "++ (if local then "local " else "") ++ ns & intercalate "."] & unlines
| pietervdvn/ALGT2 | src/LanguageDef/Data/LanguageDef.hs | gpl-3.0 | 14,092 | 521 | 15 | 2,556 | 3,895 | 2,204 | 1,691 | 261 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Analytics.Management.ProFiles.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Create a new view (profile).
--
-- /See:/ <https://developers.google.com/analytics/ Google Analytics API Reference> for @analytics.management.profiles.insert@.
module Network.Google.Resource.Analytics.Management.ProFiles.Insert
(
-- * REST Resource
ManagementProFilesInsertResource
-- * Creating a Request
, managementProFilesInsert
, ManagementProFilesInsert
-- * Request Lenses
, mpfiWebPropertyId
, mpfiPayload
, mpfiAccountId
) where
import Network.Google.Analytics.Types
import Network.Google.Prelude
-- | A resource alias for @analytics.management.profiles.insert@ method which the
-- 'ManagementProFilesInsert' request conforms to.
type ManagementProFilesInsertResource =
"analytics" :>
"v3" :>
"management" :>
"accounts" :>
Capture "accountId" Text :>
"webproperties" :>
Capture "webPropertyId" Text :>
"profiles" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] ProFile :> Post '[JSON] ProFile
-- | Create a new view (profile).
--
-- /See:/ 'managementProFilesInsert' smart constructor.
data ManagementProFilesInsert =
ManagementProFilesInsert'
{ _mpfiWebPropertyId :: !Text
, _mpfiPayload :: !ProFile
, _mpfiAccountId :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ManagementProFilesInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mpfiWebPropertyId'
--
-- * 'mpfiPayload'
--
-- * 'mpfiAccountId'
managementProFilesInsert
:: Text -- ^ 'mpfiWebPropertyId'
-> ProFile -- ^ 'mpfiPayload'
-> Text -- ^ 'mpfiAccountId'
-> ManagementProFilesInsert
managementProFilesInsert pMpfiWebPropertyId_ pMpfiPayload_ pMpfiAccountId_ =
ManagementProFilesInsert'
{ _mpfiWebPropertyId = pMpfiWebPropertyId_
, _mpfiPayload = pMpfiPayload_
, _mpfiAccountId = pMpfiAccountId_
}
-- | Web property ID to create the view (profile) for.
mpfiWebPropertyId :: Lens' ManagementProFilesInsert Text
mpfiWebPropertyId
= lens _mpfiWebPropertyId
(\ s a -> s{_mpfiWebPropertyId = a})
-- | Multipart request metadata.
mpfiPayload :: Lens' ManagementProFilesInsert ProFile
mpfiPayload
= lens _mpfiPayload (\ s a -> s{_mpfiPayload = a})
-- | Account ID to create the view (profile) for.
mpfiAccountId :: Lens' ManagementProFilesInsert Text
mpfiAccountId
= lens _mpfiAccountId
(\ s a -> s{_mpfiAccountId = a})
instance GoogleRequest ManagementProFilesInsert where
type Rs ManagementProFilesInsert = ProFile
type Scopes ManagementProFilesInsert =
'["https://www.googleapis.com/auth/analytics.edit"]
requestClient ManagementProFilesInsert'{..}
= go _mpfiAccountId _mpfiWebPropertyId (Just AltJSON)
_mpfiPayload
analyticsService
where go
= buildClient
(Proxy :: Proxy ManagementProFilesInsertResource)
mempty
| brendanhay/gogol | gogol-analytics/gen/Network/Google/Resource/Analytics/Management/ProFiles/Insert.hs | mpl-2.0 | 3,904 | 0 | 17 | 878 | 470 | 280 | 190 | 78 | 1 |
module ObjD.Link.Composition (
linkFuncOp
)where
import ObjD.Link.Struct
import ObjD.Link.Env
import ObjD.Link.DataType
import ObjD.Link.Conversion
import Data.List
import qualified ObjD.Struct as D
{------------------------------------------------------------------------------------------------------------------------------
- Functional Compositions >> *|* **
------------------------------------------------------------------------------------------------------------------------------}
linkFuncOp :: Env -> D.Exp -> Exp
linkFuncOp env ex@(D.FuncOp tp l r) =
let
l' = envExprCompile env l
r' = envExprCompile env r
ltp = exprDataType l'
lInputType = case ltp of
TPFun ret _ -> Right $ head ret
_ -> Left $ "Left is not function but " ++ show ltp ++ " in " ++ show l'
lOutputType = case ltp of
TPFun _ ret -> Right ret
_ -> Left $ "Left is not function but " ++ show ltp ++ " in " ++ show l'
rInputTypeShouldBe = case tp of
D.FuncOpBind -> lOutputType >>= \t -> case t of
TPOption _ o -> return $ unwrapGeneric o
_ -> return $ t
D.FuncOpClone -> lInputType
r'' = case exprDataType r' of
TPFun{} -> r'
_ -> case rInputTypeShouldBe of
Left _ -> r'
Right ritp ->
let
rr = envExprCompile (envAddVals [localVal "_" ritp] env) r
etp = exprDataType rr
in Lambda [("_", ritp)] (maybeAddReturn env etp rr) etp
ldef = localVal "__l" (exprDataType l')
rdef = localVal "__r" (exprDataType r'')
rtp = exprDataType r''
rInputType = case rtp of
TPFun ret _ -> Right $ head ret
_ -> Left $ "Right is not function but " ++ show rtp ++ " in " ++ show r''
rOutputType = case rtp of
TPFun _ ret -> Right ret
_ -> Left $ "Right is not function but " ++ show rtp ++ " in " ++ show r''
f p = do
lInputType
return $ Dot (callRef ldef) $ call (applyLambdaDef ltp) [p]
g p = do
rInputType
return $ Dot (callRef rdef) $ call (applyLambdaDef rtp) [p]
compile :: Either String Exp
compile = do
li <- lInputType
lo <- lOutputType
ri <- rInputType
ro <- rOutputType
let
lambda o c = Lambda [("_", li)] (maybeAddReturn env o c) o
bind :: Either String Exp
bind = do
ff <- f $ callRef $ localVal "_" li
let
dotCl = do
c <- g ff
return $ lambda ro c
optClass = dataTypeClass env lo
mapDef = maybe (Left "map in option didn't find") Right $ find ( (== "map") . defName) $ classDefs optClass
forDef = maybe (Left "for in option didn't find") Right $ find ( (== "for") . defName) $ classDefs optClass
optCall = do
m <- if ro == TPVoid then forDef else mapDef
gg <- g $ callRef $ localVal "_" $ wrapGeneric ri
let c = Dot ff $ call m [Lambda
[("_", wrapGeneric ri)]
(maybeAddReturn env (wrapGeneric ro) gg)
(wrapGeneric ro)]
return $ lambda (if ro == TPVoid then TPVoid else TPOption False $ wrapGeneric ro) c
case (lo, ri) of
(TPOption _ _, TPOption _ _) -> dotCl
(TPOption _ _, _) -> optCall
_ -> dotCl
clone :: Either String Exp
clone = do
ff <- f $ callRef $ localVal "_" li
gg <- g $ callRef $ localVal "_" li
case (lo, ro) of
(TPVoid, TPVoid) -> return $ lambda TPVoid $ Braces [ff, gg]
(TPVoid, _) -> return $ lambda ro $ Braces [ff, maybeAddReturn env ro gg]
(_, TPVoid) -> return $ lambda lo $ Braces [gg, maybeAddReturn env lo ff]
_ -> return $ lambda (TPTuple [lo, ro]) $ Tuple [ff, gg]
case tp of
D.FuncOpBind -> bind
D.FuncOpClone -> clone
in case compile of
Left err -> ExpDError err ex
Right e ->
Braces [
declareVal env ldef{defBody = implicitConvertsion env ltp l'},
declareVal env rdef{defBody = implicitConvertsion env rtp r''},
e
]
| antonzherdev/objd | src/ObjD/Link/Composition.hs | lgpl-3.0 | 3,843 | 46 | 30 | 1,042 | 1,456 | 725 | 731 | 98 | 18 |
module Quizz.CoreSpec (main, spec) where
import Test.Hspec
import Quizz.Core
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "Proposition datatype" $ do
describe "isValid getter" $ do
it "A valid proposition" $ do
isValid (Proposition "A" True) `shouldBe` True
it "An invalid proposition" $ do
isValid (Proposition "A" False) `shouldBe` False
describe "content getter" $ do
it "A simple content" $ do
content (Proposition "A" True) `shouldBe` "A"
describe "Question datatype" $ do
describe "title getter" $ do
it "A simple title" $ do
title (Question "A" []) `shouldBe` "A"
describe "propositions getter" $ do
it "No proposition" $ do
(length . propositions) (Question "A" []) `shouldBe` 0
it "One proposition" $ do
(length . propositions) (Question "A" [Proposition "A" True]) `shouldBe` 1
it "Two propositions" $ do
(length . propositions) (Question "A" [Proposition "A" True, Proposition "A" True]) `shouldBe` 2
describe "Answers datatype" $ do
describe "answers" $ do
it "No answer" $ do
(length . answers) (Answers []) `shouldBe` 0
it "One answer" $ do
(length . answers) (Answers [True]) `shouldBe` 1
it "Four answers" $ do
(length . answers) (Answers [True, False, False, True]) `shouldBe` 4
describe "Corrections datatype" $ do
describe "corrections" $ do
it "No correction" $ do
(length . corrections) (Corrections []) `shouldBe` 0
it "One correction" $ do
(length . corrections) (Corrections [Correct]) `shouldBe` 1
it "Two corrections" $ do
(length . corrections) (Corrections [Wrong, Missed]) `shouldBe` 2
describe "correct" $ do
it "One proposition, one good answer, correctly answered" $ do
correct (Question "A" [Proposition "A" True]) (Answers [True])`shouldBe` (Corrections [Correct])
it "One proposition, one good answer, not answered" $ do
correct (Question "A" [Proposition "A" True]) (Answers [False])`shouldBe` (Corrections [Missed])
it "One proposition, no good answer, badly answered" $ do
correct (Question "A" [Proposition "A" False]) (Answers [True])`shouldBe` (Corrections [Wrong])
it "One proposition, no good answer, correctly answered" $ do
correct (Question "A" [Proposition "A" False]) (Answers [False])`shouldBe` (Corrections [NotCheckedNotAnswer])
it "Four propositions, two good answers, one correctly answered, one badly answered, one forgot" $ do
correct (Question "A" [Proposition "A" True, Proposition "A" True, Proposition "A" False, Proposition "A" False]) (Answers [True, False, True, False])`shouldBe` (Corrections [Correct, Missed, Wrong, NotCheckedNotAnswer])
| blackheaven/quizz.hs | test/Quizz/CoreSpec.hs | unlicense | 2,818 | 0 | 21 | 662 | 991 | 487 | 504 | 54 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Quasar.Api.Routing where
import Control.Lens
import Control.Monad.IO.Class
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import Data.CaseInsensitive
import Data.Conduit (ResourceT)
import Data.Monoid
import Data.Text
import Network.HTTP.Types.Method
import Network.HTTP.Types.Status
import Network.HTTP.Types.Header
import qualified Network.Wai as W
import Quasar.Api.Http.Request
import Quasar.Api.Http.Response
data Route = Route StdMethod [Text]
deriving (Eq, Show)
type Router = Route -> Request BS.ByteString -> Maybe (Response (Maybe LBS.ByteString))
routedApplication :: Router -> W.Request -> ResourceT IO W.Response
routedApplication router warpRequest = do
requestBody <- liftIO . parseRawRequestBody $ warpRequest
let maybeRequest = buildRequest warpRequest requestBody
qresponse = case maybeRequest of
Nothing -> badRequestResponse
Just request ->
let route = Route (request^.requestMethod) (request^.requestPath)
maybeResponse = router route request
in case maybeResponse of
Nothing -> badRequestResponse
Just response -> response
return $ buildResponse qresponse | xdcrafts/Quasar | src/Quasar/Api/Routing.hs | apache-2.0 | 1,260 | 0 | 19 | 233 | 320 | 179 | 141 | 32 | 3 |
-- | Core Process code
{-# LANGUAGE ExistentialQuantification, FlexibleInstances,
GeneralizedNewtypeDeriving,
ScopedTypeVariables,
DeriveDataTypeable,
MultiParamTypeClasses, CPP #-}
module Process (
-- * Types
Process
-- * Interface
, runP
, spawnP
, catchP
, cleanupP
, stopP
-- * Log Interface
, Logging(..)
, logP
, infoP
, debugP
, warningP
, criticalP
, errorP
)
where
import Control.Applicative
import Control.Concurrent
import Control.Exception
import Control.Monad.Reader
import Control.Monad.State.Strict
import Data.Typeable
import Prelude hiding (log)
import System.Log.Logger
-- | A @Process a b c@ is the type of processes with access to configuration data @a@, state @b@
-- returning values of type @c@. Usually, the read-only data are configuration parameters and
-- channels, and the state the internal process state. It is implemented by means of a transformer
-- stack on top of IO.
newtype Process a b c = Process (ReaderT a (StateT b IO) c)
deriving (Functor, Applicative, Monad, MonadIO, MonadState b, MonadReader a)
data StopException = StopException
deriving (Show, Typeable)
instance Exception StopException
stopP :: Process a b c
stopP = throw StopException
-- | Run the process monad given a configuation of type @a@ and a initial state of type @b@
runP :: a -> b -> Process a b c -> IO (c, b)
runP c st (Process p) = runStateT (runReaderT p c) st
-- | Spawn and run a process monad
spawnP :: a -> b -> Process a b () -> IO ThreadId
spawnP c st p = forkIO proc
where proc = runP c st p >> return ()
-- | Run the process monad for its side effect, with a stopHandler if exceptions
-- are raised in the process
catchP :: Logging a => Process a b () -> Process a b () -> Process a b ()
catchP proc stopH = cleanupP proc stopH (return ())
-- | Run the process monad for its side effect. @cleanupP p sh ch@ describes to
-- run @p@. If @p@ dies by a kill from a supervisor, run @ch@. Otherwise it runs
-- @ch >> sh@ on death.
cleanupP :: Logging a => Process a b () -> Process a b () -> Process a b () -> Process a b ()
cleanupP proc stopH cleanupH = do
st <- get
c <- ask
(a, s') <- liftIO $ runP c st proc `catches`
[ Handler (\ThreadKilled ->
runP c st ( do infoP $ "Process Terminated by Supervisor"
cleanupH ))
, Handler (\StopException ->
runP c st (do infoP $ "Process Terminating gracefully"
cleanupH >> stopH)) -- This one is ok
, Handler (\(ex :: SomeException) ->
runP c st (do criticalP $ "Process exiting due to ex: " ++ show ex
cleanupH >> stopH))
]
put s'
return a
------ LOGGING
--
-- | The class of types where we have a logger inside them somewhere
class Logging a where
-- | Returns a channel for logging and an Identifying string to use
logName :: a -> String
logP :: Logging a => Priority -> String -> Process a b ()
logP prio msg = do
n <- asks logName
liftIO $ logM n prio (n ++ ":\t" ++ msg)
infoP, debugP, criticalP, warningP, errorP :: Logging a => String -> Process a b ()
infoP = logP INFO
#ifdef NDEBUG
debugP _ = return ()
#else
debugP = logP DEBUG
#endif
criticalP = logP CRITICAL
warningP = logP WARNING
errorP = logP ERROR
| jlouis/combinatorrent | src/Process.hs | bsd-2-clause | 3,492 | 0 | 18 | 986 | 851 | 450 | 401 | 69 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Application.HXournal.Command
-- Copyright : (c) 2011, 2012 Ian-Woo Kim
--
-- License : BSD3
-- Maintainer : Ian-Woo Kim <ianwookim@gmail.com>
-- Stability : experimental
-- Portability : GHC
--
-----------------------------------------------------------------------------
module Application.HXournal.Command where
import Application.HXournal.ProgType
import Application.HXournal.Job
import Application.HXournal.Script.Hook
commandLineProcess :: Hxournal -> Maybe Hook -> IO ()
commandLineProcess (Test mfname) mhook = do
startJob mfname mhook
| wavewave/hxournal | lib/Application/HXournal/Command.hs | bsd-2-clause | 663 | 0 | 8 | 88 | 82 | 50 | 32 | 7 | 1 |
-- {-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE DataKinds, GADTs, TypeFamilies #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FunctionalDependencies #-}
module UnitBLAS.Level1(unitTestLevel1BLAS) where
--import Test.HUnit
--import Numerical.Array.Shape as S
import Prelude as P
import Test.Tasty
import Test.Tasty.HUnit
import qualified Data.Vector.Storable as SV
import qualified Data.Vector.Storable.Mutable as SMV
import Data.Complex
import Numerical.HBLAS.MatrixTypes as Matrix
import Numerical.HBLAS.BLAS.Level1 as BLAS
vecTest1SASUM :: IO ()
vecTest1SASUM = do
vec <- Matrix.generateMutableDenseVector 6 (\idx -> [1.0, 2.0, 3.0, 4.0, 5.0, 6.0] !! idx)
res <- BLAS.sasum 6 vec
res @?= 21.0
vecTest2SASUM :: IO ()
vecTest2SASUM = do
vec <- Matrix.generateMutableDenseVectorWithStride 12 2 (\idx -> [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0] !! idx)
res <- BLAS.sasum 6 vec
res @?= 36.0
vecTest1SAXPY :: IO ()
vecTest1SAXPY = do
input <- Matrix.generateMutableDenseVector 6 (\idx -> [1.0, 2.0, 3.0, 4.0, 5.0, 6.0] !! idx)
output <- Matrix.generateMutableDenseVector 6 (\idx -> [2.0, 3.0, 4.0, 3.0, 5.0, 6.0] !! idx)
BLAS.saxpy 6 (-1.0) input output
resList <- Matrix.mutableVectorToList $ _bufferMutDenseVector output
resList @?= [1, 1, 1, -1, 0, 0]
vecTest2SAXPY :: IO ()
vecTest2SAXPY = do
input <- Matrix.generateMutableDenseVectorWithStride 18 3 (\idx -> [2.0, 0.0, 0.0,
3.0, 0.0, 0.0,
-4.0, 0.0, 0.0,
-3.0, 0.0, 0.0,
-5.0, 0.0, 0.0,
-6.0, 0.0, 0.0] !! idx)
output <- Matrix.generateMutableDenseVectorWithStride 12 2 (\idx -> [-1.0, 0.0,
-2.0, 0.0,
3.0, 0.0,
4.0, 0.0,
5.0, 0.0,
6.0, 0.0] !! idx)
BLAS.saxpy 6 2.0 input output
resList <- Matrix.mutableVectorToList $ _bufferMutDenseVector output
resList @?= [3, 0, 4, 0, -5, 0, -2, 0, -5, 0, -6, 0]
vecTest1DCOPY :: IO ()
vecTest1DCOPY = do
input <- Matrix.generateMutableDenseVector 6 (\idx -> [1.0, 2.0, 3.0, 4.0, 5.0, 6.0] !! idx)
output <- Matrix.generateMutableDenseVector 6 (const 0.0)
BLAS.dcopy 6 input output
resList <- Matrix.mutableVectorToList $ _bufferMutDenseVector output
resList @?= [1, 2, 3, 4, 5, 6]
vecTest2DCOPY :: IO ()
vecTest2DCOPY = do
input <- Matrix.generateMutableDenseVectorWithStride 6 2 (\idx -> [1.0, 2.0, 3.0, 4.0, 5.0, 6.0] !! idx)
output <- Matrix.generateMutableDenseVectorWithStride 9 3 (const 0.0)
BLAS.dcopy 3 input output
resList <- Matrix.mutableVectorToList $ _bufferMutDenseVector output
resList @?= [1, 0, 0, 3, 0, 0, 5, 0, 0]
vecTest1SDOT :: IO ()
vecTest1SDOT = do
left <- Matrix.generateMutableDenseVectorWithStride 6 2 (\idx -> [1.0, 2.0, 3.0, 4.0, 5.0, 6.0] !! idx)
right <- Matrix.generateMutableDenseVectorWithStride 12 4 (\idx -> [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0] !! idx)
res <- sdot 3 left right
res @?= 1 + 15 + 45
vecTest1DDOT :: IO ()
vecTest1DDOT = do
left <- Matrix.generateMutableDenseVectorWithStride 12 2 (\idx -> [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0] !! idx)
right <- Matrix.generateMutableDenseVectorWithStride 6 1 (\idx -> [1.0, 2.0, 3.0, 4.0, 5.0, 6.0] !! idx)
res <- ddot 6 left right
res @?= 1 + 6 + 15 + 28 + 45 + 66
vecTest1SDSDOT :: IO ()
vecTest1SDSDOT = do
left <- Matrix.generateMutableDenseVectorWithStride 6 2 (\idx -> [1.0, 2.0, 3.0, 4.0, 5.0, 6.0] !! idx)
right <- Matrix.generateMutableDenseVectorWithStride 12 4 (\idx -> [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0] !! idx)
res <- sdsdot 3 2.0 left right
res @?= 2 + 1 + 15 + 45
vecTest1DSDOT :: IO ()
vecTest1DSDOT = do
left <- Matrix.generateMutableDenseVectorWithStride 12 2 (\idx -> [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0] !! idx)
right <- Matrix.generateMutableDenseVectorWithStride 6 1 (\idx -> [1.0, 2.0, 3.0, 4.0, 5.0, 6.0] !! idx)
res <- dsdot 6 left right
res @?= 1 + 6 + 15 + 28 + 45 + 66
vecTest1CDOTU :: IO ()
vecTest1CDOTU = do
left <- Matrix.generateMutableDenseVectorWithStride 6 2 (\idx -> [1:+1, 1:+(-1), 1:+1, 1:+(-1), 1:+1, 1:+(-1)] !! idx)
right <- Matrix.generateMutableDenseVectorWithStride 9 3 (\idx -> [1:+(-2), 1:+1, 1:+(-1), 1:+1, 1:+(-1), 1:+1, 1:+(-1), 1:+1, 1:+(-1)] !! idx)
res <- Matrix.generateMutableValue (1:+1)
cdotu 3 left right res
resValue <- Matrix.mutableValueToValue res
resValue @?= 5:+1
vecTest1CDOTC :: IO ()
vecTest1CDOTC = do
left <- Matrix.generateMutableDenseVectorWithStride 6 2 (\idx -> [2:+3, 1:+(-1), 1:+1, 1:+(-1), 1:+1, 1:+(-1)] !! idx)
right <- Matrix.generateMutableDenseVectorWithStride 9 3 (\idx -> [1:+(-2), 1:+1, 1:+(-1), 1:+1, 1:+(-1), 1:+1, 1:+(-1), 1:+1, 1:+(-1)] !! idx)
res <- Matrix.generateMutableValue (1:+1)
cdotc 3 left right res
resValue <- Matrix.mutableValueToValue res
resValue @?= (-2):+(-9)
vecTest1SNRM2 :: IO ()
vecTest1SNRM2 = do
input <- Matrix.generateMutableDenseVector 6 (\idx -> [1.0, -2.0, 3.0, -4.0, 5.0, -6.0] !! idx)
res <- snrm2 6 input
True @?= 1e-6 > (abs $ res - (sqrt $ sum $ fmap (\x->x^2) [1, 2, 3, 4, 5, 6]))
vecTest1DZNRM2 :: IO ()
vecTest1DZNRM2 = do
input <- Matrix.generateMutableDenseVectorWithStride 8 2 (\idx -> [1:+1, 1:+2, 2:+(-3), 2:+(-2), (-3):+1, (-3):+0, (-4):+2, (-4):+1] !! idx)
res <- dznrm2 4 input
True @?= 1e-12 > (abs $ res - (sqrt $ sum $ fmap (\x->x^2) [1, 1, 2, 3, 3, 1, 4, 2]))
vecTest1SROT :: IO ()
vecTest1SROT = do
left <- Matrix.generateMutableDenseVectorWithStride 6 2 (\idx -> [1.0, 2.0, 3.0, 4.0, 5.0, 6.0] !! idx)
right <- Matrix.generateMutableDenseVectorWithStride 6 2 (\idx -> [6.0, 5.0, 4.0, 3.0, 2.0, 1.0] !! idx)
srot 3 left right (-1) 2
resLeft <- Matrix.mutableVectorToList $ _bufferMutDenseVector left
resRight <- Matrix.mutableVectorToList $ _bufferMutDenseVector right
resLeft @?= [11.0, 2.0, 5.0, 4.0, -1.0, 6.0]
resRight @?= [-8.0, 5.0, -10.0, 3.0, -12.0, 1.0]
vecTest1DROT :: IO ()
vecTest1DROT = do
left <- Matrix.generateMutableDenseVectorWithStride 4 1 (\idx -> [1, 2, 3, 4] !! idx)
right <- Matrix.generateMutableDenseVectorWithStride 8 2 (\idx -> [8, 7, 6, 5, 4, 3, 2, 1] !! idx)
drot 4 left right 0 (-2)
resLeft <- Matrix.mutableVectorToList $ _bufferMutDenseVector left
resRight <- Matrix.mutableVectorToList $ _bufferMutDenseVector right
resLeft @?= [-16, -12, -8, -4]
resRight @?= [2, 7, 4, 5, 6, 3, 8, 1]
vecTest1SROTG :: IO ()
vecTest1SROTG = do
a <- Matrix.generateMutableValue 3
b <- Matrix.generateMutableValue 4
c <- Matrix.generateMutableValue 0
s <- Matrix.generateMutableValue 0
srotg a b c s
av <- Matrix.mutableValueToValue a
bv <- Matrix.mutableValueToValue b
cv <- Matrix.mutableValueToValue c
sv <- Matrix.mutableValueToValue s
av @?= 5
True @?= 1e-6 > (abs $ bv - 1/0.6)
cv @?= 0.6
sv @?= 0.8
vecTest1DROTG :: IO ()
vecTest1DROTG = do
a <- Matrix.generateMutableValue 5.8
b <- Matrix.generateMutableValue 3.4
c <- Matrix.generateMutableValue 0
s <- Matrix.generateMutableValue 0
drotg a b c s
av <- Matrix.mutableValueToValue a
bv <- Matrix.mutableValueToValue b
cv <- Matrix.mutableValueToValue c
sv <- Matrix.mutableValueToValue s
True @?= 1e-12 > (abs $ av - sqrt(3.4^2 + 5.8^2))
True @?= 1e-12 > (abs $ bv - 3.4 / sqrt(3.4^2 + 5.8^2))
True @?= 1e-12 > (abs $ cv - 5.8 / sqrt(3.4^2 + 5.8^2))
True @?= 1e-12 > (abs $ sv - 3.4 / sqrt(3.4^2 + 5.8^2))
vecTest1DROTM :: IO ()
vecTest1DROTM = do
x <- Matrix.generateMutableDenseVectorWithStride 4 1 (\idx -> [1, 2, 3, 4] !! idx)
y <- Matrix.generateMutableDenseVectorWithStride 8 2 (\idx -> [8, 7, 6, 5, 4, 3, 2, 1] !! idx)
param <- Matrix.generateMutableDenseVector 5 (\idx -> [-1, 0, -1, 1, 0] !! idx)
drotm 4 x y param
resX <- Matrix.mutableVectorToList $ _bufferMutDenseVector x
resY <- Matrix.mutableVectorToList $ _bufferMutDenseVector y
resX @?= [8, 6, 4, 2]
resY @?= [-1, 7, -2, 5, -3, 3, -4, 1]
vecTest1SROTM :: IO ()
vecTest1SROTM = do
x <- Matrix.generateMutableDenseVectorWithStride 6 2 (\idx -> [1, 2, 3, 4, 5, 6] !! idx)
y <- Matrix.generateMutableDenseVectorWithStride 9 3 (\idx -> [9, 8, 7, 6, 5, 4, 3, 2, 1] !! idx)
param <- Matrix.generateMutableDenseVector 5 (\idx -> [1, 1, 2, -2, 1] !! idx)
srotm 3 x y param
resX <- Matrix.mutableVectorToList $ _bufferMutDenseVector x
resY <- Matrix.mutableVectorToList $ _bufferMutDenseVector y
resX @?= [10, 2, 9, 4, 8, 6]
resY @?= [8, 8, 7, 3, 5, 4, -2, 2, 1]
vecTest1SROTMG :: IO ()
vecTest1SROTMG = do
d1 <- Matrix.generateMutableValue 3
d2 <- Matrix.generateMutableValue 6
x <- Matrix.generateMutableValue 1
let y = 1
param <- Matrix.generateMutableDenseVector 5 (\idx -> [-1, 1, 1, -1, 1] !! idx)
srotmg d1 d2 x y param
paramR <- Matrix.mutableVectorToList $ _bufferMutDenseVector param
updatedD1 <- Matrix.mutableValueToValue d1
updatedD2 <- Matrix.mutableValueToValue d2
updatedX <- Matrix.mutableValueToValue x
paramR @?= [1, 0, 0.5, 0, 1]
updatedD1 @?= 4
updatedD2 @?= 2
updatedX @?= 1.5
vecTest1SSCAL :: IO ()
vecTest1SSCAL = do
x <- Matrix.generateMutableDenseVectorWithStride 8 2 (\idx -> [1, 2, 3, 4, 5, 6, 7, 8] !! idx)
sscal 4 (-2) x
xRes <- Matrix.mutableVectorToList $ _bufferMutDenseVector x
xRes @?= [-2, 2, -6, 4, -10, 6, -14, 8]
vecTest1CSCAL :: IO ()
vecTest1CSCAL = do
x <- Matrix.generateMutableDenseVectorWithStride 8 4 (\idx -> [1:+1, 1:+2, 2:+(-3), 2:+(-2), (-3):+1, (-3):+0, (-4):+2, (-4):+1] !! idx)
cscal 2 (2:+(-2)) x
xRes <- Matrix.mutableVectorToList $ _bufferMutDenseVector x
xRes @?= [4:+0, 1:+2, 2:+(-3), 2:+(-2), (-4):+8, (-3):+0, (-4):+2, (-4):+1]
vecTest1CSSCAL :: IO ()
vecTest1CSSCAL = do
x <- Matrix.generateMutableDenseVector 8 (\idx -> [1:+1, 1:+2, 2:+(-3), 2:+(-2), (-3):+1, (-3):+0, (-4):+2, (-4):+1] !! idx)
csscal 8 (-2) x
xRes <- Matrix.mutableVectorToList $ _bufferMutDenseVector x
xRes @?= [(-2):+(-2), (-2):+(-4), (-4):+6, (-4):+4, 6:+(-2), 6:+0, 8:+(-4), 8:+(-2)]
vecTest1SSWAP :: IO ()
vecTest1SSWAP = do
x <- Matrix.generateMutableDenseVectorWithStride 8 2 (\idx -> [1, 2, 3, 4, 5, 6, 7, 8] !! idx)
y <- Matrix.generateMutableDenseVectorWithStride 4 1 (\idx -> [-1, -2, -3, -4] !! idx)
sswap 4 x y
xRes <- Matrix.mutableVectorToList $ _bufferMutDenseVector x
yRes <- Matrix.mutableVectorToList $ _bufferMutDenseVector y
xRes @?= [-1, 2, -2, 4, -3, 6, -4, 8]
yRes @?= [1, 3, 5, 7]
vecTest1CSWAP :: IO ()
vecTest1CSWAP = do
x <- Matrix.generateMutableDenseVectorWithStride 9 3 (\idx -> [1:+1, 1:+2, 2:+(-3), 2:+(-2), (-3):+1, (-3):+0, (-4):+2, (-4):+1, 0:+9] !! idx)
y <- Matrix.generateMutableDenseVectorWithStride 6 2 (\idx -> [1:+2, 1:+3, 3:+(-3), 2:+2, 3:+1, 3:+3] !! idx)
cswap 3 x y
xRes <- Matrix.mutableVectorToList $ _bufferMutDenseVector x
yRes <- Matrix.mutableVectorToList $ _bufferMutDenseVector y
xRes @?= [1:+2, 1:+2, 2:+(-3), 3:+(-3), (-3):+1, (-3):+0, 3:+1, (-4):+1, 0:+9]
yRes @?= [1:+1, 1:+3, 2:+(-2), 2:+2, (-4):+2, 3:+3]
vecTest1ISAMAX :: IO ()
vecTest1ISAMAX = do
x <- Matrix.generateMutableDenseVectorWithStride 8 2 (\idx -> [1, 2, 3, 4, 5, 6, 7, 8] !! idx)
idx <- isamax 4 x
idx @?= 3
vecTest1ICAMAX :: IO ()
vecTest1ICAMAX = do
x <- Matrix.generateMutableDenseVector 9 (\idx -> [1:+1, 1:+2, 2:+(-3), 2:+(-2), (-3):+1, (-3):+0, (-4):+2, (-4):+1, 0:+9] !! idx)
idx <- icamax 9 x
idx @?= 8
{-
vecTest1ISAMIN :: IO ()
vecTest1ISAMIN = do
x <- Matrix.generateMutableDenseVector 8 (\idx -> [1, 2, 3, 4, -5, 6, 7, 8] !! idx)
idx <- isamin 4 x 2
idx @?= 2
vecTest1ICAMIN :: IO ()
vecTest1ICAMIN = do
x <- Matrix.generateMutableDenseVector 9 (\idx -> [1:+2, 1:+2, (-2):+(-3), 2:+(-2), (-3):+1, (-2):+0, (-4):+2, (-4):+1, 0:+9] !! idx)
idx <- icamin 9 x 1
idx @?= 5
-}
unitTestLevel1BLAS = testGroup "BlAS Level 1 tests " [
testCase "sasum on vector of length 6 with incx 1" vecTest1SASUM,
testCase "sasum on vector of length 12 with incx 2" vecTest2SASUM,
testCase "saxpy on vectors of lengths 6 and 6 with both incx 1" vecTest1SAXPY,
testCase "saxpy on vectors of lenghts 12 and 18 with incx 2 and 3" vecTest2SAXPY,
testCase "dcopy on vectors of lengths 6 and 6 with both incx 1" vecTest1DCOPY,
testCase "dcopy on vectors of lengths 6 and 9 with incx 2 and 3" vecTest2DCOPY,
testCase "sdot on vectors of lengths 6 and 12 with incx 2 and 4" vecTest1SDOT,
testCase "ddot on vectors of lengths 12 and 6 with incx 2 and 1" vecTest1DDOT,
testCase "sdsdot on vectors of lengths 6 and 12 with incx 2 and 4" vecTest1SDSDOT,
testCase "dsdot on vectors of 12 and 6 with incx 2 and 1" vecTest1DSDOT,
testCase "cdotu on vectors of 6 and 9 with incx of 2 and 3" vecTest1CDOTU,
testCase "cdotc on vectors of 6 and 9 with incx of 2 and 3" vecTest1CDOTC,
testCase "snrm on vector of length 6 with incx of 1" vecTest1SNRM2,
testCase "dznrm on vector of length 8 with incx of 2" vecTest1DZNRM2,
testCase "srot on vectors of length 6 and 6 with incx of 2" vecTest1SROT,
testCase "drot on vectors of length 4 and 8 with incx of 1 and 2" vecTest1DROT,
testCase "srotg on 3 4" vecTest1SROTG,
testCase "drotg on 5.8 3.4" vecTest1DROTG,
testCase "drotm on vectors of 4 and 8 with incx of 1 and 2, param starts with -1" vecTest1DROTM,
testCase "srotm on vectors of 6 and 9 with incx of 2 and 3, param starts with 1" vecTest1SROTM,
-- testCase "drotmg" vecTest1SROTMG really confusing result.
testCase "sscal on vector of 8 with incx 2" vecTest1SSCAL,
testCase "cscal on vector of 8 with incx 4" vecTest1CSCAL,
testCase "csscal on vector of 8 with incx 1" vecTest1CSSCAL,
testCase "sswap on vector of 8 and 4 with incx 2 and 1" vecTest1SSWAP,
testCase "cswap on vector of 9 and 6 with incx 3 and 2" vecTest1CSWAP,
testCase "isamax on vector of 8 with incx 2" vecTest1ISAMAX,
testCase "icamax on vector of 9 with incx 1" vecTest1ICAMAX
--testCase "isamin on vector of 8 with incx 2" vecTest1ISAMIN,
--testCase "icamin on vector of 9 with incx 1" vecTest1ICAMIN
]
--unitTestShape = testGroup "Shape Unit tests"
-- [ testCase "foldl on shape" $ ( S.foldl (+) 0 (1:* 2:* 3 :* Nil ) @?= ( P.foldl (+) 0 [1,2,3]) )
-- , testCase "foldr on shape" $ ( S.foldr (+) 0 (1:* 2:* 3 :* Nil ) @?= ( P.foldr (+) 0 [1,2,3]) )
-- , testCase "scanr1 on shape" (S.scanr1 (+) 0 (1:* 1 :* 1:* Nil ) @?= (3:* 2:* 1 :* Nil ) )
-- , testCase "scanl1 on shape" (S.scanl1 (+) 0 (1:* 1 :* 1:* Nil ) @?= (1:* 2:* 3:* Nil ) )
-- ]
{-
import Numerical.HBLAS.BLAS.FFI
import Numerical.HBLAS.BLAS
import Numerical.HBLAS.MatrixTypes
import Data.Vector.Storable.Mutable as M
import qualified Data.Vector.Storable as S
main :: IO ()
main = do
-- Just test that the symbol resolves
--openblas_set_num_threads_unsafe 7
v :: IOVector Double <- M.replicate 10 1.0
res <- unsafeWith v (\ptr-> cblas_ddot_unsafe 10 ptr 1 ptr 1)
-}
| yangjueji/hblas | tests/UnitBLAS/Level1.hs | bsd-3-clause | 16,104 | 0 | 15 | 4,014 | 5,898 | 3,173 | 2,725 | 270 | 1 |
-- | Functionality for detecting and removing left-recursion.
{-# LANGUAGE FlexibleContexts #-} -- for Pretty (V t nt)
{-# LANGUAGE ScopedTypeVariables #-}
module Data.Cfg.LeftRecursion(
LR(..),
SCComp(..),
isLeftRecursive,
removeLeftRecursion,
removeLeftRecursionBounded,
reportLeftRec) where
import Control.Monad(guard)
import Data.Cfg.Cfg
import Data.Cfg.CycleRemoval(SCComp(..))
import qualified Data.Cfg.CycleRemoval as CR
import Data.Cfg.FreeCfg(FreeCfg, bimapCfg)
import Data.Cfg.Item
import Data.List(partition)
import Data.Cfg.LeftRecursion.Cycles
import Data.Cfg.Pretty
import Data.Graph.Inductive.PatriciaTree
import Data.Graph.Inductive.ULGraph hiding (empty)
import qualified Data.Map as M
import qualified Data.Set as S
import Text.PrettyPrint
-- | Nonterminal wrapper to introduce symbols for tails of directly
-- recursive productions.
data LR nt = LR nt -- ^ wrapped original symbols
| LRTail nt -- ^ tail symbols
deriving (Eq, Ord, Show)
-- | Is the grammar left-recursive?
isLeftRecursive :: (Cfg cfg t nt, Ord nt, Ord t) => cfg t nt -> Bool
isLeftRecursive cfg = case S.toList $ lrSccs cfg of
SCComp _ : _ -> True
SelfLoop _ _ : _ -> True
_ -> False
-- | An equivalent grammar without left-recursion.
removeLeftRecursion :: forall cfg nt t
. (Cfg cfg t nt, Ord nt, Ord t)
=> cfg t nt -> FreeCfg t (LR nt)
removeLeftRecursion cfg
= CR.removeCycles' indirect direct (S.toList $ lrSccs cfg') cfg'
where
cfg' :: FreeCfg t (LR nt)
cfg' = bimapCfg id LR cfg
indirect :: LR nt -> LR nt -> ProductionMap t (LR nt)
-> ProductionMap t (LR nt)
indirect src dst pm = if null rec'
then pm
else M.insert src (S.fromList newSrcRhss) pm
where
srcRhss :: [Vs t (LR nt)]
srcRhss = S.toList $ pm M.! src
(rec', nonrec) = partition isRec srcRhss
where
isRec :: Vs t (LR nt) -> Bool
isRec rhs = case rhs of
NT (LR nt) : _ -> dst == LR nt
_ -> False
dstRhss :: [Vs t (LR nt)]
dstRhss = S.toList $ pm M.! dst
newSrcRhss :: [Vs t (LR nt)]
newSrcRhss = nonrec ++ newRec
where
newRec = do
srcRhs <- rec'
dstRhs <- dstRhss
return (dstRhs ++ tail srcRhs)
direct :: LR nt -> ProductionMap t (LR nt) -> ProductionMap t (LR nt)
direct (LRTail _) _ = error "removeLeftRecursion.direct: saw LRTail"
direct (LR nt) pm = if null rec'
then pm
else newPM `M.union` pm
where
rhss = S.toList $ pm M.! LR nt
(rec', nonrec) = partition isRec rhss
where
isRec rhs = case rhs of
NT (LR nt') : _ -> nt == nt'
_ -> False
newNTRhss = S.fromList $ map f nonrec
where
f rhs = rhs ++ [NT $ LRTail nt]
newNTTailRhss = S.fromList ([] : map f rec')
where
f rhs = tail rhs ++ [NT $ LRTail nt]
newPM :: ProductionMap t (LR nt)
newPM = M.fromList [(LR nt, newNTRhss),
(LRTail nt, newNTTailRhss)]
-- | An equivalent grammar without left-recursion, if the number of
-- productions does not exceed the given limit.
removeLeftRecursionBounded :: forall cfg nt t
. (Cfg cfg t nt, Ord nt, Ord t)
=> Int -> cfg t nt -> Maybe (FreeCfg t (LR nt))
removeLeftRecursionBounded maxSize cfg
= CR.removeCyclesM' indirectM directM (S.toList $ lrSccs cfg') cfg'
where
cfg' :: FreeCfg t (LR nt)
cfg' = bimapCfg id LR cfg
pmSize :: ProductionMap t (LR nt) -> Int
pmSize = sum . map S.size . M.elems
checkSize :: ProductionMap t (LR nt) -> Maybe (ProductionMap t (LR nt))
checkSize pm = do
guard (pmSize pm > maxSize)
return pm
indirectM :: LR nt -> LR nt -> ProductionMap t (LR nt)
-> Maybe (ProductionMap t (LR nt))
indirectM src dst pm = checkSize $ indirect src dst pm
directM :: LR nt -> ProductionMap t (LR nt)
-> Maybe (ProductionMap t (LR nt))
directM nt pm = checkSize $ direct nt pm
indirect :: LR nt -> LR nt -> ProductionMap t (LR nt)
-> ProductionMap t (LR nt)
indirect src dst pm = if null rec'
then pm
else M.insert src (S.fromList newSrcRhss) pm
where
srcRhss :: [Vs t (LR nt)]
srcRhss = S.toList $ pm M.! src
(rec', nonrec) = partition isRec srcRhss
where
isRec :: Vs t (LR nt) -> Bool
isRec rhs = case rhs of
NT (LR nt) : _ -> dst == LR nt
_ -> False
dstRhss :: [Vs t (LR nt)]
dstRhss = S.toList $ pm M.! dst
newSrcRhss :: [Vs t (LR nt)]
newSrcRhss = nonrec ++ newRec
where
newRec = do
srcRhs <- rec'
dstRhs <- dstRhss
return (dstRhs ++ tail srcRhs)
direct :: LR nt -> ProductionMap t (LR nt) -> ProductionMap t (LR nt)
direct (LRTail _) _ = error "removeLeftRecursion.direct: saw LRTail"
direct (LR nt) pm = if null rec'
then pm
else newPM `M.union` pm
where
rhss = S.toList $ pm M.! LR nt
(rec', nonrec) = partition isRec rhss
where
isRec rhs = case rhs of
NT (LR nt') : _ -> nt == nt'
_ -> False
newNTRhss = S.fromList $ map f nonrec
where
f rhs = rhs ++ [NT $ LRTail nt]
newNTTailRhss = S.fromList ([] : map f rec')
where
f rhs = tail rhs ++ [NT $ LRTail nt]
newPM :: ProductionMap t (LR nt)
newPM = M.fromList [(LR nt, newNTRhss),
(LRTail nt, newNTTailRhss)]
------------------------------------------------------------
-- | Produces a pretty-printed report giving the left-recursion of the
-- grammar.
reportLeftRec :: forall cfg t nt
. (Cfg cfg t nt, Ord nt, Ord t,
Pretty (V t nt), Pretty (Item t nt))
=> cfg t nt
-> Doc
reportLeftRec = vcat . map f . S.toList . lrSccs
where
prettyNT = pretty . (NT :: nt -> V t nt)
f :: SCComp Gr nt (Item t nt) -> Doc
f (Singleton _) = empty
f (SelfLoop n es) = text "direct left-recursion on" <+> prettyNT n <+> text "via"
$$ nest 4 items'
where
items' = vcat [pretty e | e <- S.toList es]
f (SCComp gr) = text "indirect left-recursion on" <+> hsep (map prettyNT ns)
$$ nest 4 es'
where
ns = S.toList $ nodes gr
es = edges gr
es' = vcat $ map g es
g :: Edge nt (Item t nt) -> Doc
g (src, dst, item) = hsep [prettyNT src,
arrow,
pretty item,
arrow,
prettyNT dst]
arrow = text "->"
| nedervold/context-free-grammar | src/Data/Cfg/LeftRecursion.hs | bsd-3-clause | 7,524 | 0 | 17 | 2,964 | 2,418 | 1,245 | 1,173 | 155 | 6 |
----------------------------------------------------------------------
-- |
-- Module : WBXML.DomBuilder
-- Copyright : Mike Limansky, 2012
-- Licencse : BSD3
--
-- Helper module for building different DOM documents
--
----------------------------------------------------------------------
module Wbxml.DomBuilder where
import Wbxml.SAX
import Wbxml.Types
data DomBuilderConfig d t c = DomBuilderConfig { makeDocument :: (WbxmlHeader -> String -> String -> String -> t -> d)
, makeTag :: (TagInfo -> [c] -> t)
, makeContentTag :: (t -> c)
, makeContentString :: (String -> c)
}
buildDocument conf ((Document h):(Doctype p r s):es) = Just $ makeDocument conf h p r s (buildRoot conf es)
buildDocument _ _ = Nothing
buildRoot conf (e:es) = (makeTag conf) tag (buildContent conf es)
where (StartTag tag _) = e
buildContent conf c = fst $ mC [] c
where
mC c [] = (c, [])
mC c ((StartTag tag True):es) = let c' = c ++ [makeContentTag conf (makeTag conf tag [])] in mC c' es
mC c ((StartTag tag False):es) = let c' = c ++ [makeContentTag conf (makeTag conf tag cont)] in mC c' rest
where (cont, rest) = mC [] es
mC c ((EndTag _):es) = (c, es)
mC c ((Text s):es) = let c' = c ++ [makeContentString conf s] in mC c' es
| limansky/wbxml | src/Wbxml/DomBuilder.hs | bsd-3-clause | 1,531 | 0 | 16 | 526 | 508 | 270 | 238 | 18 | 5 |
import Wrecker
import Wrecker.Options
import Network.HTTP.Client
import Options.Applicative.Builder
import Options.Applicative
import Control.Exception
import Control.Monad (void)
import Data.Monoid
parser :: Parser (PartialOptions, String)
parser
= (,)
<$> pPartialOptions
<*> strArgument mempty
runParser' :: IO (Options, String)
runParser' = do
let opts = info (helper <*> parser)
( fullDesc
<> progDesc "Welcome to wrecker"
<> header "wrecker - HTTP stress tester and benchmarker"
)
(partialOptions, url) <- execParser opts
options <- case completeOptions partialOptions of
Nothing -> throwIO $ userError ""
Just x -> return x
return (options, url)
main :: IO ()
main = do
(options, url) <- runParser'
man <- newManager defaultManagerSettings { managerConnCount = concurrency options
, managerIdleConnectionCount = concurrency options
}
req <- parseRequest url
void $ runOne options $ \env ->
void $ record (recorder env) url $ httpLbs req man
| skedgeme/wrecker | app/Main.hs | bsd-3-clause | 1,168 | 0 | 14 | 354 | 314 | 160 | 154 | 32 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.