code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-|
Module : Math.ExpPairs.ProcessMatrix
Copyright : (c) Andrew Lelechenko, 2014-2020
License : GPL-3
Maintainer : andrew.lelechenko@gmail.com
Sequences of \( A \)- and \( B \)-processes
of van der Corput's method of exponential sums.
A good reference can be found in
/Graham S. W., Kolesnik G. A./
Van Der Corput's Method of Exponential Sums,
Cambridge University Press, 1991,
especially Ch. 5.
-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Math.ExpPairs.ProcessMatrix
( Process (..)
, ProcessMatrix ()
, aMatrix
, baMatrix
, evalMatrix
) where
import GHC.Generics (Generic (..))
import Prettyprinter
import Math.ExpPairs.Matrix3
-- | Since \( B \)-process is
-- <https://en.wikipedia.org/wiki/Involution_(mathematics) involutive>,
-- a sequence of \( A \)- and \( B \)-processes can be rewritten as a sequence
-- of 'A' and 'BA'.
data Process
-- | \( A \)-process
= A
-- | \( BA \)-process
| BA
deriving (Eq, Show, Read, Ord, Enum, Generic)
instance Pretty Process where
pretty = pretty . show
-- | Sequence of processes, represented as a matrix \( 3 \times 3 \).
newtype ProcessMatrix = ProcessMatrix (Matrix3 Integer)
deriving (Eq, Num, Show, Pretty)
instance Semigroup ProcessMatrix where
ProcessMatrix a <> ProcessMatrix b = ProcessMatrix $ normalize $ a * b
instance Monoid ProcessMatrix where
mempty = 1
mappend = (<>)
process2matrix :: Process -> ProcessMatrix
process2matrix A = ProcessMatrix $ Matrix3 1 0 0 1 1 1 2 0 2
process2matrix BA = ProcessMatrix $ Matrix3 0 1 0 2 0 1 2 0 2
-- | Return process matrix for \( A \)-process.
aMatrix :: ProcessMatrix
aMatrix = process2matrix A
-- | Return process matrix for \( BA \)-process.
baMatrix :: ProcessMatrix
baMatrix = process2matrix BA
-- | Apply a projective transformation, defined by 'Path',
-- to a given point in two-dimensional projective space.
evalMatrix :: Num t => ProcessMatrix -> (t, t, t) -> (t, t, t)
evalMatrix (ProcessMatrix m) = multCol (fmap fromInteger m)
{-# INLINABLE evalMatrix #-}
{-# SPECIALIZE evalMatrix :: ProcessMatrix -> (Integer, Integer, Integer) -> (Integer, Integer, Integer) #-}
|
Bodigrim/exp-pairs
|
Math/ExpPairs/ProcessMatrix.hs
|
gpl-3.0
| 2,197
| 0
| 8
| 418
| 374
| 211
| 163
| 37
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Ctxt where
import Control.Lens
import Data.Monoid ((<>))
import Data.Pool
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Vault.Lazy as V
import qualified Database.PostgreSQL.Simple as PG
import Network.Wai (Response)
import Network.Wai.Session (Session)
import Web.Fn
import Web.Larceny hiding (renderWith)
import qualified Web.Larceny as L
type SmoochSession = Session IO Text (Maybe Text)
data Ctxt = Ctxt { _req :: FnRequest
, _sessionKey :: V.Key SmoochSession
, _library :: Library Ctxt
, _substitutions :: Substitutions Ctxt
, _pool :: Pool PG.Connection }
makeLenses ''Ctxt
instance RequestContext Ctxt where
requestLens = req
renderWith :: Ctxt -> Path -> Substitutions Ctxt -> IO (Maybe Response)
renderWith ctxt tplPath addSubs = do
mRendered <- L.renderWith (ctxt ^. library)
(ctxt ^. substitutions <> addSubs)
ctxt
tplPath
case mRendered of
Nothing -> return Nothing
Just rendered -> okHtml rendered
tshow :: Int -> Text
tshow = T.pack . show
|
huggablemonad/smooch
|
app/src/Ctxt.hs
|
gpl-3.0
| 1,456
| 0
| 11
| 547
| 329
| 186
| 143
| 35
| 2
|
{-
Íàïèøèòå ðåàëèçàöèþ ôóíêöèè qsort. Ôóíêöèÿ qsort äîëæíàÿ ïðèíèìàòü íà âõîä ñïèñîê ýëåìåíòîâ è ñîðòèðîâàòü åãî â ïîðÿäêå âîçðàñòàíèÿ ñ ïîìîùüþ ñîðòèðîâêè Õîàðà: äëÿ êàêîãî-òî ýëåìåíòà x èçíà÷àëüíîãî ñïèñêà (îáû÷íî âûáèðàþò ïåðâûé) äåëèòü ñïèñîê íà ýëåìåíòû ìåíüøå è íå ìåíüøå x, è ïîòîì çàïóñêàòüñÿ ðåêóðñèâíî íà îáåèõ ÷àñòÿõ.
GHCi> qsort [1,3,2,5]
[1,2,3,5]
Ðàçðåøàåòñÿ èñïîëüçîâàòü òîëüêî ôóíêöèè, äîñòóïíûå èç áèáëèîòåêè Prelude.
-}
module Demo where
qsort :: Ord a => [a] -> [a]
qsort [] = []
qsort (p:xs) = (qsort lesser) ++ [p] ++ (qsort greater)
where
lesser = filter (< p) xs
greater = filter (>= p) xs
-- https://wiki.haskell.org/Introduction#Quicksort_in_Haskell
|
devtype-blogspot-com/Haskell-Examples
|
Qsort/Demo.hs
|
gpl-3.0
| 703
| 0
| 8
| 129
| 108
| 60
| 48
| 6
| 1
|
{-|
Module : Devel.ReverseProxy
Description : Reverse proxies and starts warp on localhost:<PORT>.
Copyright : (c)
License : GPL-3
Maintainer : njagi@urbanslug.com
Stability : experimental
Portability : POSIX
Reverse proxying and other network realated activities.
-}
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables, TemplateHaskell #-}
module Devel.ReverseProxy
( runServer
, createSocket
, checkPort
) where
import Network.Wai (Application, responseBuilder)
import Network.HTTP.ReverseProxy (WaiProxyResponse(WPRProxyDest), ProxyDest(ProxyDest), waiProxyTo)
import Network.HTTP.Client (newManager, defaultManagerSettings)
import Network.HTTP.Types (status503)
import Text.Hamlet (shamletFile)
import Text.Blaze.Html.Renderer.Utf8 (renderHtmlBuilder)
import Network.Wai.Handler.Warp
import Control.Exception
import Network.Socket
import Data.Streaming.Network
-- local imports
import Devel.Types (SourceError')
-- | run the warp server
runServer :: [SourceError'] -> Socket -> Int -> IO ()
runServer errorList sock destPort = do
app <- reverseProxy errorList destPort
runSettingsSocket defaultSettings sock app
-- | Does reverse proxying to localhost given port
reverseProxy :: [SourceError'] -> Int -> IO Application
reverseProxy errorList destPort = do
mgr <- newManager defaultManagerSettings
errorList `seq` return ()
let error500 :: SomeException -> Application
error500 _ _ respond = respond $
responseBuilder
status503
[("content-type", "text/html; charset=utf-8")]
(renderHtmlBuilder $(shamletFile "error.hamlet"))
return $ waiProxyTo
(const $ return $ WPRProxyDest $ ProxyDest "127.0.0.1" destPort)
error500
mgr
-- | Create the socket that we will use to communicate with
-- localhost:3000 here.
createSocket :: Int -> IO Socket
createSocket port = do
sock <- bindPortTCP port "*4"
-- Tell the OS *not* to reserve the socket after your program exits.
setSocketOption sock ReuseAddr 1
return sock
-- Check whether a port is available to bind to.
checkPort :: Int -> IO Bool
checkPort port = do
es <- try $ bindPortTCP port "*4"
case es of
Left (_ :: IOException) -> return False
Right s -> do
sClose s
return True
|
bitemyapp/wai-devel
|
src/Devel/ReverseProxy.hs
|
gpl-3.0
| 2,290
| 0
| 16
| 441
| 480
| 254
| 226
| 47
| 2
|
module SimpleArithmeticTest where
import Lib
import Data.List (sort)
-- 1. for a function
halfIdentity = (*2).half
prop_halfIdentity :: Double -> Bool
prop_halfIdentity n = (halfIdentity n) == n
-- 2. For any list you apply sort to
-- this property should hold
prop_listOrdered :: (Ord a) => [a] -> Bool
prop_listOrdered xs =
snd $ foldr go (Nothing, True) xs
where go _ status@(_, False) = status
go y (Nothing, t) = (Just y, t)
go y (Just x, t) = (Just y, x >= y)
prop_stringListOrdered :: [String] -> Bool
prop_stringListOrdered ls = prop_listOrdered (sort ls)
-- 3. Now we’ll test the associative and commutative properties
-- of addition:
plusAssociative :: Int -> Int -> Int -> Bool
plusAssociative x y z =
x + (y + z) == (x + y) + z
plusCommutative :: Int -> Int -> Bool
plusCommutative x y =
x + y == y + x
-- 4. Now do the same for multiplication.
productAssociative :: Int -> Int -> Int -> Bool
productAssociative x y z =
x * (y * z) == (x * y) * z
productCommutative :: Int -> Int -> Bool
productCommutative x y =
x * y == y * x
-- 5. We mentioned in one of the first chapters that there are
-- some laws involving the relationship of quot and rem and
-- div and mod. Write QuickCheck tests to prove them.
-- quot rem
prop_quotRem :: Int -> Int -> Bool
prop_quotRem 0 _ = True
prop_quotRem _ 0 = True
prop_quotRem x y = (quot x y)*y + (rem x y) == x
prop_divMod :: Int -> Int -> Bool
prop_divMod 0 _ = True
prop_divMod _ 0 = True
prop_divMod x y = (div x y)*y + (mod x y) == x
-- 6. Is (^) associative? Is it commutative? Use QuickCheck to see
-- if the computer can contradict such an assertion.
prop_powerNotAssociative :: Int -> Int -> Int -> Bool
prop_powerNotAssociative x y z = x ^ (y ^ z) /= (x ^ y) ^ z
prop_powerNotCommutative :: Int -> Int -> Bool
prop_powerNotCommutative x y = x ^ y /= y ^ x
-- 7. Test that reversing a list twice is the same as the identity
-- of the list:
prop_listDoubleReverse :: [String] -> Bool
prop_listDoubleReverse ls = (reverse $ reverse ls) == id ls
-- 8. Write a property for the definition of ($).
prop_dollarFunction :: Int -> Bool
prop_dollarFunction x = (id $ x) == (id x)
prop_compositionFunction :: Int -> Bool
prop_compositionFunction x = (id.id $ x) == (id $ id x)
-- 9. See if these two functions are equal:
-- foldr (++) [] == concat
prop_cons :: Int -> Int -> Bool
prop_cons x y = foldr (:) [] [x, y] == [x] ++ [y]
prop_concat :: Int -> Int -> Bool
prop_concat x y = foldr (++) [] [[x], [y]] == concat [[x], [y]]
-- 10. Hm. Is that so?
f :: Int -> [String] -> Bool
f n xs = length (take n xs) == n
-- 11. Finally, this is a fun one. You may remember we had you
-- compose read and show one time to complete a “round
-- trip.” Well, now you can test that it works:
f' :: Char -> Bool
f' x = (read (show x)) == x
|
nirvinm/Solving-Exercises-in-Haskell-Programming-From-First-Principles
|
Testing/quickcheck-testing/test/SimpleArithmeticTest.hs
|
gpl-3.0
| 2,845
| 0
| 9
| 629
| 945
| 512
| 433
| 52
| 3
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveAnyClass #-}
-- |
-- Copyright : (c) 2010-2012 Benedikt Schmidt & Simon Meier
-- License : GPL v3 (see LICENSE)
--
-- Maintainer : Benedikt Schmidt <beschmi@gmail.com>
--
-- Term Algebra and related notions.
module Term.Term.Raw (
-- * Terms
Term(..)
, TermView (..)
, viewTerm
, TermView2 (..)
, viewTerm2
, termViewToTerm
-- * Diff Type
, DiffType (..)
-- ** Standard function
, traverseTerm
, fmapTerm
, bindTerm
-- ** Smart constructors
, lit
, fApp
, fAppAC
, fAppC
, fAppNoEq
, fAppList
, unsafefApp
) where
import GHC.Generics (Generic)
import Data.List
-- import Data.Monoid
-- import Data.Foldable (Foldable, foldMap)
-- import Data.Traversable
import Data.Typeable
import Data.Binary
import Data.Data
import Control.DeepSeq
-- import Control.Basics
import qualified Data.ByteString.Char8 as BC
import Extension.Data.ByteString ()
import Term.Term.Classes
import Term.Term.FunctionSymbols
----------------------------------------------------------------------
-- Terms
----------------------------------------------------------------------
-- | A term in T(Sigma,a). Its constructors are kept abstract. Use 'viewTerm'
-- or 'viewTerm2' to inspect it.
data Term a = LIT a -- ^ atomic terms (constants, variables, ..)
| FAPP FunSym [Term a] -- ^ function applications
deriving (Eq, Ord, Typeable, Data, Generic, NFData, Binary )
----------------------------------------------------------------------
-- Diff Type - whether left/right interpretation of diff is desired,
-- or no diff should occur
----------------------------------------------------------------------
data DiffType = DiffLeft | DiffRight | DiffNone | DiffBoth
----------------------------------------------------------------------
-- Views and smart constructors
----------------------------------------------------------------------
-- | View on terms that corresponds to representation.
data TermView a = Lit a
| FApp FunSym [Term a]
deriving (Show, Eq, Ord)
-- | Return the 'TermView' of the given term.
{-# INLINE viewTerm #-}
viewTerm :: Term a -> TermView a
viewTerm (LIT l) = Lit l
viewTerm (FAPP sym ts) = FApp sym ts
-- | Return the term of the given TermView.
termViewToTerm :: TermView a -> Term a
termViewToTerm (Lit l) = LIT l
termViewToTerm (FApp sym ts) = FAPP sym ts
-- | @fApp fsym as@ creates an application of @fsym@ to @as@. The function
-- ensures that the resulting term is in AC-normal-form.
{-# INLINE fApp #-}
fApp :: Ord a => FunSym -> [Term a] -> Term a
fApp (AC acSym) ts = fAppAC acSym ts
fApp (C o) ts = fAppC o ts
fApp List ts = FAPP List ts
fApp s@(NoEq _) ts = FAPP s ts
-- | Smart constructor for AC terms.
fAppAC :: Ord a => ACSym -> [Term a] -> Term a
fAppAC _ [] = error "Term.fAppAC: empty argument list"
fAppAC _ [a] = a
fAppAC acsym as =
FAPP (AC acsym) (sort (o_as ++ non_o_as))
where
o = AC acsym
isOTerm (FAPP o' _) | o' == o = True
isOTerm _ = False
(o_as0, non_o_as) = partition isOTerm as
o_as = [ a | FAPP _ ts <- o_as0, a <- ts ]
-- | Smart constructor for C terms.
fAppC :: Ord a => CSym -> [Term a] -> Term a
fAppC nacsym as = FAPP (C nacsym) (sort as)
-- | Smart constructor for non-AC/C terms.
{-# INLINE fAppNoEq #-}
fAppNoEq :: NoEqSym -> [Term a] -> Term a
fAppNoEq freesym = FAPP (NoEq freesym)
-- | Smart constructor for list terms.
{-# INLINE fAppList #-}
fAppList :: [Term a] -> Term a
fAppList = FAPP List
-- | @lit l@ creates a term from the literal @l@.
{-# INLINE lit #-}
lit :: a -> Term a
lit l = LIT l
-- | @unsafefApp fsym as@ creates an application of @fsym@ to as. The
-- caller has to ensure that the resulting term is in AC-normal-form.
unsafefApp :: FunSym -> [Term a] -> Term a
unsafefApp fsym as = FAPP fsym as
-- | View on terms that distinguishes function application of builtin symbols like exp.
data TermView2 a = FExp (Term a) (Term a) | FInv (Term a) | FMult [Term a] | One
| FPMult (Term a) (Term a) | FEMap (Term a) (Term a)
| FXor [Term a] | Zero
| FUnion [Term a]
| FPair (Term a) (Term a)
| FDiff (Term a) (Term a)
| FAppNoEq NoEqSym [Term a]
| FAppC CSym [Term a]
| FList [Term a]
| Lit2 a
deriving (Show, Eq, Ord)
-- | Returns the 'TermView2' of the given term.
viewTerm2 :: Show a => Term a -> TermView2 a
viewTerm2 (LIT l) = Lit2 l
viewTerm2 (FAPP List ts) = FList ts
viewTerm2 t@(FAPP (AC o) ts)
| length ts < 2 = error $ "viewTerm2: malformed term `"++show t++"'"
| otherwise = (acSymToConstr o) ts
where
acSymToConstr Mult = FMult
acSymToConstr Union = FUnion
acSymToConstr Xor = FXor
viewTerm2 (FAPP (C EMap) [ t1 ,t2 ]) = FEMap t1 t2
viewTerm2 t@(FAPP (C _) _) = error $ "viewTerm2: malformed term `"++show t++"'"
viewTerm2 t@(FAPP (NoEq o) ts) = case ts of
[ t1, t2 ] | o == expSym -> FExp t1 t2 -- ensure here that FExp is always exp, never a user-defined symbol
[ t1, t2 ] | o == pmultSym -> FPMult t1 t2
[ t1, t2 ] | o == pairSym -> FPair t1 t2
[ t1, t2 ] | o == diffSym -> FDiff t1 t2
[ t1 ] | o == invSym -> FInv t1
[] | o == oneSym -> One
_ | o `elem` ssyms -> error $ "viewTerm2: malformed term `"++show t++"'"
_ -> FAppNoEq o ts
where
-- special symbols
ssyms = [ expSym, pairSym, diffSym, invSym, oneSym, pmultSym ]
----------------------------------------------------------------------
-- Instances
----------------------------------------------------------------------
{-# INLINE traverseTerm #-}
traverseTerm :: (Applicative f, Ord a, Ord b) => (a -> f b) -> Term a -> f (Term b)
traverseTerm f (LIT x) = LIT <$> f x
traverseTerm f (FAPP fsym as) = fApp fsym <$> traverse (traverseTerm f) as
{-# INLINE fmapTerm #-}
fmapTerm :: Ord b => (a -> b) -> Term a -> Term b
fmapTerm f = foldTerm (lit . f) fApp
{-# INLINE bindTerm #-}
bindTerm :: Ord b => Term a -> (a -> Term b) -> Term b
bindTerm m f = foldTerm f fApp m
instance Foldable Term where
{-# INLINE foldMap #-}
foldMap f = foldTerm f (const mconcat)
instance Show a => Show (Term a) where
show t =
case viewTerm t of
Lit l -> show l
FApp (NoEq (s,_)) [] -> BC.unpack s
FApp (NoEq (s,_)) as -> BC.unpack s++"("++(intercalate "," (map show as))++")"
FApp (C EMap) as -> BC.unpack emapSymString++"("++(intercalate "," (map show as))++")"
FApp List as -> "LIST"++"("++(intercalate "," (map show as))++")"
FApp (AC o) as -> show o++"("++(intercalate "," (map show as))++")"
-- | The fold function for @Term a@.
{-# INLINE foldTerm #-}
foldTerm :: (t -> b) -> (FunSym -> [b] -> b)
-> Term t -> b
foldTerm fLIT fFAPP t = go t
where go (LIT a) = fLIT a
go (FAPP fsym a) = fFAPP fsym $ map go a
instance Sized a => Sized (Term a) where
size = foldTerm size (const $ \xs -> sum xs + 1)
|
rsasse/tamarin-prover
|
lib/term/src/Term/Term/Raw.hs
|
gpl-3.0
| 7,525
| 0
| 14
| 2,046
| 2,244
| 1,170
| 1,074
| 138
| 10
|
module Foreign.Storable.FixedArray where
import Control.Monad.Trans.State (StateT, evalStateT, get, put, )
import Control.Monad.Trans.Class (lift, )
import Foreign.Ptr (Ptr, castPtr, )
import Foreign.Storable (Storable(..))
import Foreign.Marshal.Array (advancePtr, )
{-# INLINE roundUp #-}
roundUp :: Int -> Int -> Int
roundUp m x = x + mod (-x) m
{-# INLINE sizeOfArray #-}
sizeOfArray :: Storable a => Int -> a -> Int
sizeOfArray n x =
n * roundUp (alignment x) (sizeOf x)
{-# INLINE pokeNext #-}
pokeNext :: (Storable a) => a -> StateT (Ptr a) IO ()
pokeNext x =
do ptr <- get
lift $ poke ptr x
put (ptr `advancePtr` 1)
-- put (ptr `plusPtr` size x + div (- size x) (alignment x))
{-# INLINE peekNext #-}
peekNext :: (Storable a) => StateT (Ptr a) IO a
peekNext =
do ptr <- get
a <- lift $ peek ptr
put (ptr `advancePtr` 1)
return a
run :: Ptr (t a) -> StateT (Ptr a) IO c -> IO c
run ptr act =
evalStateT act (castPtr ptr)
|
elitak/hs-datahand
|
stm32f10x/hs_src/Foreign/Storable/FixedArray.hs
|
gpl-3.0
| 983
| 0
| 9
| 225
| 383
| 205
| 178
| 29
| 1
|
func = do
-- complex first step
aaa
-- complex second step
bbb
where
helper :: Helper
helper = helpful
other :: Other
other = True
|
lspitzner/brittany
|
data/Test225.hs
|
agpl-3.0
| 148
| 1
| 6
| 43
| 52
| 22
| 30
| 7
| 1
|
import Test.Hspec
import System.Random
import Prelude hiding (succ)
import Church
turn :: Int -> Number
turn 0 = zero
turn n = succ $ turn (n - 1)
randomList :: Int -> Int -> IO [Int]
randomList a b = getStdGen >>= return . randomRs (a, b)
getRand :: Int -> IO [Int]
getRand n = take n <$> randomList 233 666
runTest (a1, b1) = let a = mod a1 15 in
let b = mod b1 5 in do
eval (turn a + turn b) `shouldBe` (toInteger $ a + b)
eval (turn a * turn b) `shouldBe` (toInteger $ a * b)
eval (turn a `pow` turn b) `shouldBe` (toInteger $ a ^ b)
--
main :: IO ()
main = hspec $ do
describe "when using Chruch numbers" $ do
it "1 + 0 equals 1" $ do
eval (one + zero) `shouldBe` 1
it "2 * 2 equals 4" $ do
eval (succ one * succ one) `shouldBe` 4
it "2 `pow` 3 equals 8" $ do
eval (succ one `pow` succ (succ one)) `shouldBe` 8
it "2 + 3 equals 5" $ do
eval (two + three) `shouldBe` 5
it "3 * 2 equals 6" $ do
eval (three * two) `shouldBe` 6
it "3 `pow` 2 equals 9" $ do
eval (3 `pow` 2) `shouldBe` 9
it "random tests" $ do
rand1 <- getRand 100
rand2 <- getRand 100
forM_ (zip rand1 rand2) runTest
--
|
ice1000/OI-codes
|
codewars/authoring/haskell/ChurchSpec.hs
|
agpl-3.0
| 1,204
| 1
| 20
| 357
| 594
| 287
| 307
| 35
| 1
|
{-
Copyright (C) 2004 - 2009 John Goerzen <jgoerzen@complete.org>
-}
{- |
Module : Test.QuickCheck.Tools
Copyright : Copyright (C) 2004-2005 John Goerzen
License : GNU LGPL, version 2 or above
Maintainer : John Goerzen <jgoerzen@complete.org>
Stability : provisional
Portability: portable
Utilities for HUnit unit testing.
Written by John Goerzen, jgoerzen\@complete.org
-}
module Test.QuickCheck.Tools (-- * Comparisons
(@=?),
(@?=)
)
where
#if MIN_VERSION_QuickCheck(2,6,0)
import Test.QuickCheck.Property (Result(..), callbacks, expect, theException, ok, reason, stamp)
#if MIN_VERSION_QuickCheck(2,7,0)
#else
import Test.QuickCheck.Property (Result(..), callbacks, expect, interrupted, ok, reason, stamp)
#endif
#else
import Test.QuickCheck hiding (Result, reason)
import Test.QuickCheck.Property
#endif
{- | Compare two values. If same, the test passes. If different, the result indicates
what was expected and what was received as part of the error. -}
(@=?) :: (Eq a, Show a) => a -> a -> Result
expected @=? actual =
MkResult {ok = Just (expected == actual),
#if MIN_VERSION_QuickCheck(2,7,0)
expect = True, theException = Nothing,
#else
expect = True, interrupted = False,
#endif
reason = "Result: expected " ++ show expected ++ ", got " ++ show actual,
stamp = [], callbacks = []}
{- | Like '@=?', but with args in a different order. -}
(@?=) :: (Eq a, Show a) => a -> a -> Result
(@?=) = flip (@=?)
|
jgoerzen/testpack
|
src/Test/QuickCheck/Tools.hs
|
lgpl-2.1
| 1,654
| 0
| 10
| 453
| 219
| 137
| 82
| 13
| 1
|
--------------------------------------------------------------------------------
-- $Id: BuiltInMap.hs,v 1.5 2003/12/18 18:27:46 graham Exp $
--
-- Copyright (c) 2003, G. KLYNE. All rights reserved.
-- See end of this file for licence information.
--------------------------------------------------------------------------------
-- |
-- Module : BuiltInMap
-- Copyright : (c) 2003, Graham Klyne
-- License : GPL V2
--
-- Maintainer : Graham Klyne
-- Stability : provisional
-- Portability : H98
--
-- This module collects references and provides access to all of the
-- datatypes, variable binding modifiers and variable binding filters
-- built in to Swish.
--
--------------------------------------------------------------------------------
module Swish.HaskellRDF.BuiltInMap
( findRDFOpenVarBindingModifier
, findRDFDatatype
, rdfRulesetMap
, allRulesets, allDatatypeRulesets
)
where
import Swish.HaskellRDF.BuiltInDatatypes
import Swish.HaskellRDF.BuiltInRules
--------------------------------------------------------------------------------
--
-- Copyright (c) 2003, G. KLYNE. All rights reserved.
--
-- This file is part of Swish.
--
-- Swish is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- Swish is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with Swish; if not, write to:
-- The Free Software Foundation, Inc.,
-- 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
--------------------------------------------------------------------------------
-- $Source: /file/cvsdev/HaskellRDF/BuiltInMap.hs,v $
-- $Author: graham $
-- $Revision: 1.5 $
-- $Log: BuiltInMap.hs,v $
-- Revision 1.5 2003/12/18 18:27:46 graham
-- Datatyped literal inferences all working
-- (except equivalent literals with different datatypes)
--
-- Revision 1.4 2003/12/17 16:56:39 graham
-- Split content of BuiltInMap into separate modules, to avoid recursive
-- module dependency with RDFProofContext.
--
-- Revision 1.3 2003/12/11 19:11:07 graham
-- Script processor passes all initial tests.
--
-- Revision 1.2 2003/12/10 03:48:57 graham
-- SwishScript nearly complete: BwdChain and PrrofCheck to do.
--
-- Revision 1.1 2003/12/08 23:56:07 graham
-- Various enhancements to variable bindings and proof structure.
-- New module BuiltInMap coded and tested.
-- Script processor is yet to be completed.
--
|
amccausl/Swish
|
Swish/HaskellRDF/BuiltInMap.hs
|
lgpl-2.1
| 2,914
| 0
| 4
| 540
| 101
| 89
| 12
| 7
| 0
|
import Test.Tasty
import TestSet
import TestTree23
import TestTrie
import TestLightTrie
import TestBTree
import TestLightBTree
import TestFixFile
main :: IO ()
main = defaultMain $
testGroup "FixFile"
[
test23
,testSet
,testTrie
,testLightTrie
,testBTree
,testLightBTree
,testFixFile
]
|
revnull/fixfile
|
tests/Tests.hs
|
lgpl-3.0
| 352
| 0
| 7
| 101
| 78
| 44
| 34
| 19
| 1
|
data List a = Cons a (List a)
| Nil
deriving (Show)
fromList :: [a] -> List a
fromList (x:xs) = Cons x (fromList xs)
fromList [] = Nil
fromList' :: List a -> [a]
fromList' (Cons x xs) = x:fromList' xs
fromList' Nil = []
|
EricYT/Haskell
|
src/chapter-14-3.hs
|
apache-2.0
| 259
| 2
| 10
| 85
| 138
| 66
| 72
| 9
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Actions
( doExports
) where
import Prelude
import Data.Maybe (fromMaybe)
import Haste
import Haste.Foreign
import FormEngine.JQuery
import qualified Bridge as B
doExports :: IO ()
doExports = doExport B.SavePlan savePlan
doExport :: (ToAny a, FFI a) => B.ClientAction -> a -> IO ()
doExport action = export (toJSString $ B.toFnName action)
showMessage :: String -> JQuery -> IO ()
showMessage msg barJq = do
_ <- appearJq barJq >>= setHtml msg
_ <- setTimer (Once 3000) (do
_ <- disappearJq barJq >>= setHtml ""
return ()
)
return ()
showInfo :: JQuery -> String -> IO ()
showInfo _ msg = selectById B.infoBarId >>= showMessage msg
savePlan :: JQuery -> IO ()
savePlan jq = do
form <- selectById B.formId
ajaxSubmitForm form (showInfo jq . fromMaybe "")
|
DataStewardshipPortal/ds-elixir-cz
|
src/Actions.hs
|
apache-2.0
| 832
| 0
| 14
| 168
| 316
| 156
| 160
| 26
| 1
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
-- |Basic downloading and file saving functions.
module Crawling.Hephaestos.Fetch (
-- *Task categories
-- |These categories are used by 'download' to send updates about running
-- downloads. For full book-keeping, the 'downloadStatus' of 'FetchOptions'
-- that are passed to 'download' must have all of the below categories
-- (they don't /have/ to be present, however).
downloadingTasks,
finishedTasks,
failedTasks,
-- * Downloading
simpleDownload,
download,
downloadWhole,
consume,
saveFile,
downloadsFolder,
module Crawling.Hephaestos.Fetch.ErrorHandling,
)where
import Prelude hiding (FilePath)
import Control.Concurrent.STM
import Control.Concurrent.STM.Utils
import Control.Exception (SomeException(..))
import Control.Lens ((&), (%~), (^.), (+~), (.~))
import Control.Monad (when)
import Control.Monad.Catch (throwM)
import Control.Monad.IO.Class
import Control.Monad.Trans.Resource (ResourceT, runResourceT)
import Data.Aeson
import qualified Data.Binary as B
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BS (readInteger)
import qualified Data.ByteString.Lazy as BL
import qualified Data.Conduit as Con
import qualified Data.Conduit.Binary as ConB
import qualified Data.Conduit.List as ConL
import Data.Default
import Data.Functor
import Data.Functor.Monadic
import qualified Data.List.Safe as L
import Data.Maybe (isJust)
import Data.Monoid
import qualified Data.Text.Lazy as T
import qualified Data.Text.Lazy.Encoding as T
import Filesystem.Path.CurrentOS' hiding (append, encode)
import Network.HTTP.Conduit hiding (path, withManager)
import Network.HTTP.Types.Header (hContentLength)
import Network.Socket.Internal
import Network.URI (URI)
import System.Directory
import qualified System.Log.Logger as Log
import Crawling.Hephaestos.Fetch.Types
import Crawling.Hephaestos.Fetch.Successor hiding (reqMod)
import Crawling.Hephaestos.Fetch.ErrorHandling
infoM x = liftIO . Log.infoM ("Hephaestos.Fetch." L.++ x)
debugM x = liftIO . Log.debugM ("Hephaestos.Fetch." L.++ x)
alertM x = liftIO . Log.alertM ("Hephaestos.Fetch." L.++ x)
-- |Category idenfitier for currently running downloads.
downloadingTasks :: TaskCat
downloadingTasks = TaskCat 0
-- |Category identifier for successfully finished downloads.
finishedTasks :: TaskCat
finishedTasks = TaskCat 1
-- |Category identifier for failed downloads.
failedTasks :: TaskCat
failedTasks = TaskCat 2
-- |Gets the content of an URL.
-- @simpleDownload = withSocketsDo . simpleHttp@ and thus, caveats of
-- @simpleHttp@ apply.
simpleDownload :: URL -> IO BL.ByteString
simpleDownload = withSocketsDo . simpleHttp . T.unpack
-- |Downloads a whole resource and returns the contents as soon as the
-- downloading process has finished.
--
-- Calling this function is perfectly fine, but keep in mind that
--
-- 1. the whole resource will be downloaded before it returns and
-- 2. all of it will be kept in memory.
--
-- For large files and more fine-grained control, use 'download'.
downloadWhole :: FetchOptions -> URI -> IO BL.ByteString
downloadWhole opts url = runResourceT $ do
(_,content) <- download opts url
consume content
-- |Consumes an entire conduit and returns the contents.
consume :: Con.Source (ResourceT IO) BL.ByteString -> ResourceT IO BL.ByteString
consume content = content Con.$$ (ConL.map BL.toStrict Con.=$= ConL.consume)
>$> BL.fromChunks
-- |Downloads the contents of a URL and periodically provides information
-- about the download's progress via the 'downloadStatus' field of the
-- 'FetchOptions' argument.
--
-- == Laziness
-- This function always sends a request, but it returns as soon as the response
-- headers arrive. The response body can be accessed via the returned conduit,
-- meaning that large responses can be streamed on demand.
--
-- == Download status
-- When a download is starts, it is placed in the
-- 'downloadingTasks' category, where it will be continuously updated as data
-- comes in. If a download fails, it is put in the 'failedTasks' category;
-- if it finishes successfully, it is put into 'finishedTasks'. In either
-- of these two cases, it is not touched again.
--
-- In principle, any other function is free to modify the TVar as it sees fit
-- (i.e. doing so will not cause exceptions), but modifying the data of
-- currently running downloads is not really sensible.
--
-- /Note/: if you do not want download statuses to clog up memory, simply pass
-- a 'FetchOptions' object that lacks any or all of
-- @{downloadingsTasks, finishedTasks, failedTasks}@.
download :: FetchOptions
-- ^Array for storing the download progress.
-- 'download' will find the lowest unused key in the range [0..]
-- and insert a new 'Download' item under it, which it will
-- modify as the resource is downloaded.
-> URI -- ^The URL.
-> ResourceT IO (Int, Con.Source (ResourceT IO) BL.ByteString)
-- ^The key for the current download along with the 'Conduit' from
-- which the contents can be fetched.
download opts url = do
req <- (runRequestConfig $ opts ^. reqFunc) <$> parseUrl (show url)
key <- atomicallyM $ insertTask sl downloadingTasks
(def & downloadURL .~ T.pack (show url))
infoM "download" $ "Sending request to " L.++ show url L.++ "."
-- send the request
res <- http req (opts ^. manager)
-- first, we unwrap the source, turning the ResumableSource into a
-- regular one. This is done because ResumableSource doesn't have a bracketP.
debugM "download" $ "Response arrived from " L.++ show url L.++ "."
(src, finalizer) <- Con.unwrapResumable $ responseBody res
let -- we need to do four things in addition to just streaming the data:
-- 1. continuously update the number of downloaded bytes
conduit = src Con.$= reportProgressConduit key
-- 2. set the length (if available) and the download status when the
-- download starts
whenBegin = update key (\s -> s & downloadSize .~ clength res
& downloadStatus .~ InProgress)
-- 3. set the download status to 'Failed' in case of errors.
whenErr (e :: SomeException) = do
atomicallyM (do update key (& downloadStatus .~ Failed (SomeException e))
transferTask sl downloadingTasks key failedTasks)
throwM e
-- 4. set the download status to 'Finished' in the end.
whenEnd = do atomically (do update key (& downloadStatus .~ Finished)
transferTask sl downloadingTasks key finishedTasks)
infoM "download" $ "Download from " L.++ show url L.++ " finished."
-- for some reason, bracketP is specialized to IO. We therefore have to
-- add the finalizer via addCleanup.
conduit' = Con.addCleanup (const finalizer)
$ Con.bracketP (atomically whenBegin)
(const whenEnd)
(const $ Con.handleC whenErr conduit)
-- NOTE: all of this ONLY serves to update the TVar and thereby inform
-- anyone listening of the download progress. 'download' and thge conduit
-- it returns make no attempt at dealing with exceptions. That is left
-- to higher-level functions.
return (key, conduit')
where
sl = opts ^. downloadCategories
update = updateTask sl downloadingTasks
reportProgressConduit :: Int -> Con.Conduit BS.ByteString (ResourceT IO) BL.ByteString
reportProgressConduit slot = do
open <- ConL.peek >$> isJust
when open $ do
chunk <- ConB.take 8192
atomicallyM $ update slot (& downloadBytes +~ fromIntegral (BL.length chunk))
Con.yield chunk
reportProgressConduit slot
clength :: Response a -> Maybe Integer
clength r = lookup hContentLength (responseHeaders r)
>>= BS.readInteger
>$> fst
-- |Saves the contents of a HTTP response to a local file.
-- If the save directory does not exist, it will be created.
--
-- This function doesn't catch any exceptions and doesn't wrap exceptions
-- into failure nodes.
--
-- If the result type is a failure and the 'maxFailureNodes' setting
-- is 0, the file won't be saved at all.
--
-- This function is partial; inner nodes will not be saved
-- (except when they are wrapped in failure-nodes).
saveFile :: forall e i.Show e
=> FetchOptions
-> FilePath -- ^The root of the filename under which to save. Should not contain the extension.
-> FetchResult e i -- ^Contents of the file
-> IO (Maybe FilePath) -- ^The actual filename under which the response was saved.
saveFile opts fn response
| isFailure response && (opts ^. maxFailureNodes) <<= 0 = return Nothing
| otherwise = do
let path = opts ^. savePath
createDirectoryIfMissing True (encodeString path)
-- get a conduit and slam everything into the target file
let fn' = encodeString $ path </> fn <.> ext response
runResourceT $ do
content <- action response
-- we first fuse "toStrict" and "sinkFile" and give that as the finaliser
-- for "content".
content Con.$$ toStrict Con.=$= ConB.sinkFile fn'
return $ Just $ fn <.> ext response
where
toStrict = Con.awaitForever $ Con.yield . BL.toStrict
-- downloads a Blob and gets the contents
action :: FetchResult e i -> ResourceT IO (Con.Source (ResourceT IO) BL.ByteString)
action (Blob _ url reqMod) = withTaskLimit (opts ^. taskLimit) $
download (opts & reqFunc %~ (`mappend` reqMod)) url >$> snd
--gets a ByteString out of a non-Blob leaf
action (PlainText _ p) = return' $ T.encodeUtf8 p
action (XmlResult _ p) = return' $ B.encode p
action (BinaryData _ p) = return' p
action (Info _ k v) = return' $ encode $ object ["key" .= k, "value" .= v]
action f@(Failure _ _) = return' $ encode $ action' f (opts ^. maxFailureNodes) 0
action Inner{} = liftIO (alertM "saveFile" "called saveFile with Inner node!")
>> error "called saveFile with Inner node!"
-- saves a chain of failure nodes, omitting some if the maxFailureNodes
-- limit is reached.
-- we have two "modes": the regular one, in which we create a nested
-- object, and the "limit reached" one, in which we go straight to
-- the root and count the number of omitted nodes along the way.
action' :: Show e => FetchResult e i -> Maybe Int -> Int -> Value
action' (Failure _ (Just (orig, _))) (Just 0) omitted = action' orig (Just 0) $ omitted + 1
action' f@(Failure e (Just (orig, name))) limit omitted =
object' ["error" .= show e, "type" .= ext f,
"child" .= action' orig (limit >$> subtract 1) omitted]
[("filename", name >$> encodeString)]
-- the root of a failure chain
action' f _ n = object' ["type" .= ext f]
[("url", getURL f >$> show >$> toJSON),
("omitted", n' >$> toJSON),
("error", getError f >$> show >$> toJSON)]
where
n' = if n == 0 then Nothing else Just n
-- |Gets the user's Downloads folder. This is assumed to be
-- the directory named \"Dowloads\" (case sensitive)
-- in the user's home directory.
downloadsFolder :: (MonadIO m, Functor m) => m FilePath
downloadsFolder = liftIO getHomeDirectory
>$> decodeString
>$> (</> decodeString "Downloads")
>$> encodeString
>>= liftIO . canonicalizePath
>$> decodeString
-- Helpers
-------------------------------------------------------------------------------
(<<=) :: Ord a => Maybe a -> a -> Bool
(<<=) Nothing _ = False
(<<=) (Just x) y = x <= y
-- |Creates a new resumable source from a value.
return' :: Monad m => o -> ResourceT IO (Con.Source m o)
return' = return . Con.yield
-- creates a JSON object out of a list of mandatory and a list of
-- optional fields
object' xs = object . (xs L.++) . map (\(k,Just v) -> k.=v) . filter (isJust.snd)
|
jtapolczai/Hephaestos
|
Crawling/Hephaestos/Fetch.hs
|
apache-2.0
| 12,562
| 0
| 21
| 3,073
| 2,414
| 1,323
| 1,091
| -1
| -1
|
import Prelude
split' :: [a] -> [a] -> Int -> [[a]]
split' firstList xs 0 = (reverse firstList):xs:[] -- return the result
split' firstList [] _ = (reverse firstList):[]:[]
split' firstList (x:xs) n = split' (x:firstList) xs (n - 1)
split :: Eq a => [a] -> Int -> [[a]]
split [] _ = []
split xs n = split' [] xs n
|
2dor/99-problems-Haskell
|
11-20-lists-continued/problem-17.hs
|
bsd-2-clause
| 316
| 1
| 11
| 65
| 211
| 105
| 106
| 8
| 1
|
{-# OPTIONS -#include "openssl/md5.h" #-}
--------------------------------------------------------------------
-- |
-- Module : Data.Digest.OpenSSL.MD5
-- Copyright : (c) Galois, Inc. 2007
-- License : BSD3
-- Maintainer: Don Stewart <dons@galois.com>
-- Stability : provisional
-- Portability: Requires FFI
--
--------------------------------------------------------------------
--
-- ByteString-based, zero-copying binding to OpenSSL's md5 interface
--
module Data.Digest.OpenSSL.MD5 where
--
-- A few imports, should tidy these up one day.
--
#if __GLASGOW_HASKELL__ >= 608
import qualified Data.ByteString.Unsafe as B (unsafeUseAsCStringLen)
#else
import qualified Data.ByteString.Base as B (unsafeUseAsCStringLen)
#endif
import qualified Data.ByteString as B
import Foreign
import Foreign.C.Types
import Numeric (showHex)
import System.IO.Unsafe
md5_digest_length :: Int
md5_digest_length = 16
--
-- | Fast md5 using OpenSSL. The md5 hash should be referentially transparent..
-- The ByteString is guaranteed not to be copied.
--
-- The result string should be identical to the output of MD5(1).
-- That is:
--
-- > $ md5 /usr/share/dict/words
-- > MD5 (/usr/share/dict/words) = e5c152147e93b81424c13772330e74b3
--
-- While this md5sum binding will return:
--
md5sum :: B.ByteString -> String
md5sum p = unsafePerformIO $ B.unsafeUseAsCStringLen p $ \(ptr,n) -> do
allocaBytes md5_digest_length $ \digest_ptr -> do
digest <- c_md5 ptr (fromIntegral n) digest_ptr
go digest 0 []
where
-- print it in 0-padded hex format
go :: Ptr Word8 -> Int -> [String] -> IO String
#ifndef __HADDOCK__
go q n acc
| n `seq` q `seq` False = undefined
| n >= 16 = return $ concat (reverse acc)
| otherwise = do w <- peekElemOff q n
go q (n+1) (draw w : acc)
draw w = case showHex w [] of
[x] -> ['0', x]
x -> x
#endif
--
-- unsigned char *MD5(const unsigned char *d, unsigned long n, unsigned char *md);
--
foreign import ccall "openssl/md5.h MD5" c_md5
:: Ptr CChar -> CULong -> Ptr CChar -> IO (Ptr Word8)
|
Axiomatic-/nano-md5
|
Data/Digest/OpenSSL/MD5.hs
|
bsd-3-clause
| 2,173
| 0
| 16
| 480
| 402
| 229
| 173
| 26
| 2
|
{-# LANGUAGE RankNTypes, TemplateHaskell, DeriveGeneric #-}
{-| This module defines a data type that can be used as the summary
type for a composite analysis using all of the analyses defined in
this package.
It is useful to have it defined in a common module so it can be
re-used for all of the tests and the driver program.
Additionally, moving it to the library (instead of duplicating it in
each executable) makes it easier to use TemplateHaskell here to
generate lenses.
-}
module Foreign.Inference.Analysis.Util.CompositeSummary (
FunctionMetadata(..),
AnalysisSummary(..),
nullableSummary,
outputSummary,
arraySummary,
returnSummary,
finalizerSummary,
escapeSummary,
allocatorSummary,
refCountSummary,
sapSummary,
sapPTRelSummary,
scalarEffectSummary,
errorHandlingSummary,
transferSummary,
extractSummary
) where
import GHC.Generics
import Control.DeepSeq
import Control.DeepSeq.Generics
import Control.Lens
import Data.Monoid
import LLVM.Analysis
import LLVM.Analysis.BlockReturnValue
import LLVM.Analysis.Dominance
import LLVM.Analysis.CDG
import LLVM.Analysis.CFG
import LLVM.Analysis.NullPointers
import Foreign.Inference.Analysis.Allocator
import Foreign.Inference.Analysis.Array
import Foreign.Inference.Analysis.ErrorHandling
import Foreign.Inference.Analysis.Escape
import Foreign.Inference.Analysis.Finalize
import Foreign.Inference.Analysis.Nullable
import Foreign.Inference.Analysis.Output
import Foreign.Inference.Analysis.RefCount
import Foreign.Inference.Analysis.Return
import Foreign.Inference.Analysis.SAP
import Foreign.Inference.Analysis.SAPPTRel
import Foreign.Inference.Analysis.ScalarEffects
import Foreign.Inference.Analysis.Transfer
import Foreign.Inference.Diagnostics
import Foreign.Inference.Interface
-- | The value we derive from each function during the call graph
-- traversal. For now, it just adds a CFG.
data FunctionMetadata =
FunctionMetadata { functionOriginal :: Function
, functionCFG :: CFG
, functionCDG :: CDG
, functionDomTree :: DominatorTree
, functionPostdomTree :: PostdominatorTree
, functionBlockReturns :: BlockReturns
, functionNullPointers :: NullPointersSummary
}
instance HasNullSummary FunctionMetadata where
getNullSummary = functionNullPointers
instance HasBlockReturns FunctionMetadata where
getBlockReturns = functionBlockReturns
instance HasFunction FunctionMetadata where
getFunction = functionOriginal
instance HasCFG FunctionMetadata where
getCFG = functionCFG
instance HasDomTree FunctionMetadata where
getDomTree = functionDomTree
instance HasPostdomTree FunctionMetadata where
getPostdomTree = functionPostdomTree
instance FuncLike FunctionMetadata where
fromFunction f =
FunctionMetadata { functionOriginal = f
, functionCFG = cfg
, functionCDG = cdg
, functionDomTree = dominatorTree cfg
, functionPostdomTree = postdominatorTree cfg
, functionBlockReturns = labelBlockReturns cfg
, functionNullPointers = nullPointersAnalysis cdg
}
where
cdg = controlDependenceGraph cfg
cfg = controlFlowGraph f
instance HasCDG FunctionMetadata where
getCDG = functionCDG
-- | A type containing all of the sub-summaries.
data AnalysisSummary =
AnalysisSummary { _nullableSummary :: !NullableSummary
, _outputSummary :: !OutputSummary
, _arraySummary :: !ArraySummary
, _returnSummary :: !ReturnSummary
, _finalizerSummary :: !FinalizerSummary
, _escapeSummary :: !EscapeSummary
, _allocatorSummary :: !AllocatorSummary
, _refCountSummary :: !RefCountSummary
, _scalarEffectSummary :: !ScalarEffectSummary
, _errorHandlingSummary :: !ErrorSummary
, _transferSummary :: !TransferSummary
, _sapSummary :: !SAPSummary
, _sapPTRelSummary :: !SAPPTRelSummary
}
deriving (Eq, Generic)
$(makeLenses ''AnalysisSummary)
instance NFData AnalysisSummary where
rnf = genericRnf
instance Monoid AnalysisSummary where
mempty = AnalysisSummary { _nullableSummary = mempty
, _outputSummary = mempty
, _arraySummary = mempty
, _returnSummary = mempty
, _finalizerSummary = mempty
, _escapeSummary = mempty
, _allocatorSummary = mempty
, _refCountSummary = mempty
, _scalarEffectSummary = mempty
, _errorHandlingSummary = mempty
, _transferSummary = mempty
, _sapSummary = mempty
, _sapPTRelSummary = mempty
}
mappend a1 a2 =
AnalysisSummary { _nullableSummary = _nullableSummary a1 `mappend` _nullableSummary a2
, _outputSummary = _outputSummary a1 `mappend` _outputSummary a2
, _arraySummary = _arraySummary a1 `mappend` _arraySummary a2
, _returnSummary = _returnSummary a1 `mappend` _returnSummary a2
, _finalizerSummary = _finalizerSummary a1 `mappend` _finalizerSummary a2
, _escapeSummary = _escapeSummary a1 `mappend` _escapeSummary a2
, _allocatorSummary = _allocatorSummary a1 `mappend` _allocatorSummary a2
, _refCountSummary = _refCountSummary a1 `mappend` _refCountSummary a2
, _scalarEffectSummary = _scalarEffectSummary a1 `mappend` _scalarEffectSummary a2
, _errorHandlingSummary = _errorHandlingSummary a1 `mappend` _errorHandlingSummary a2
, _transferSummary = _transferSummary a1 `mappend` _transferSummary a2
, _sapSummary = _sapSummary a1 `mappend` _sapSummary a2
, _sapPTRelSummary = _sapPTRelSummary a1 `mappend` _sapPTRelSummary a2
}
-- | Apply a function that uniformly summarizes *all* of the
-- individual analysis summaries. Uses so far are extracting
-- diagnostics and producing module summaries.
extractSummary :: AnalysisSummary ->
(forall a . (HasDiagnostics a, SummarizeModule a) => a -> b)
-> [b]
extractSummary summ f =
[ f (_nullableSummary summ)
, f (_outputSummary summ)
, f (_arraySummary summ)
, f (_returnSummary summ)
, f (_finalizerSummary summ)
, f (_escapeSummary summ)
, f (_allocatorSummary summ)
, f (_refCountSummary summ)
, f (_scalarEffectSummary summ)
, f (_errorHandlingSummary summ)
, f (_transferSummary summ)
, f (_sapSummary summ)
, f (_sapPTRelSummary summ)
]
|
travitch/foreign-inference
|
src/Foreign/Inference/Analysis/Util/CompositeSummary.hs
|
bsd-3-clause
| 7,059
| 0
| 10
| 1,956
| 1,155
| 676
| 479
| 166
| 1
|
module Data.Geo.GPX.Lens.VdopL where
import Data.Lens.Common
class VdopL a where
vdopL :: Lens a (Maybe Double)
|
tonymorris/geo-gpx
|
src/Data/Geo/GPX/Lens/VdopL.hs
|
bsd-3-clause
| 117
| 0
| 9
| 20
| 40
| 23
| 17
| 4
| 0
|
module Data.Json.Projection where
import JPrelude
newtype Projection = Projection { unProject :: Value}
-- TODO: Implement
|
Prillan/haskell-jsontools
|
src/Data/Json/Projection.hs
|
bsd-3-clause
| 126
| 0
| 6
| 19
| 24
| 17
| 7
| 3
| 0
|
-- |
-- Module: Database.PostgreSQL.Store
-- Copyright: (c) Ole Krüger 2016
-- License: BSD3
-- Maintainer: Ole Krüger <ole@vprsm.de>
module Database.PostgreSQL.Store (
-- * Errand
Errand,
runErrand,
execute,
execute',
query,
queryWith,
prepare,
beginTransaction,
commitTransaction,
saveTransaction,
rollbackTransaction,
rollbackTransactionTo,
withTransaction,
-- * Query
Query (..),
PrepQuery (..),
pgQuery,
pgPrepQuery,
pgQueryGen,
-- * Types
Oid (..),
-- * Entity
Entity (..),
-- * Tables
Table (..),
TableEntity (..),
-- * Errors
ErrandError (..),
ErrorCode (..),
P.ExecStatus (..),
RowError (..),
RowErrorLocation (..),
RowErrorDetail (..)
) where
import Database.PostgreSQL.Store.Entity
import Database.PostgreSQL.Store.Errand
import Database.PostgreSQL.Store.Query
import Database.PostgreSQL.Store.RowParser
import Database.PostgreSQL.Store.Table
import Database.PostgreSQL.Store.Types
import qualified Database.PostgreSQL.LibPQ as P
|
vapourismo/pg-store
|
src/Database/PostgreSQL/Store.hs
|
bsd-3-clause
| 1,065
| 56
| 5
| 221
| 261
| 175
| 86
| 36
| 0
|
{-# LANGUAGE PolyKinds, DataKinds, TypeFamilies, RankNTypes, TypeOperators, FlexibleContexts, ScopedTypeVariables, MultiParamTypeClasses, FunctionalDependencies, TypeSynonymInstances, FlexibleInstances, GADTs, UndecidableInstances #-}
import GHC.Prim (Any)
import Unsafe.Coerce (unsafeCoerce)
import Prelude (undefined, ($))
type family (~>) :: i -> i -> *
type instance (~>) = (->)
newtype Nat f g = Nat { runNat :: forall a. f a ~> g a }
type instance (~>) = Nat
class Functor (f :: i -> j) where
fmap :: (a ~> b) -> f a ~> f b
-- Either a :: * -> *
instance Functor (Either a) where
fmap f = \case
Left a -> Left a
Right a -> Right (f a)
-- Either :: * -> (* -> *)
instance Functor Either where
fmap f = Nat $ \case
Left a -> Left (f a)
Right a -> Right a
class Profunctor p where
dimap :: (a ~> b) -> (c ~> d) -> p b c ~> p a d
class (Profunctor p, p ~ (~>)) => Category p where
id :: p a a
(.) :: p b c -> p a b -> p a c
evil :: p a b
evil = unsafeCoerce (id :: p a a)
type Iso s t a b = forall p. Profunctor p => p a b -> p s t
data COMPOSE = Compose
type Compose = (Any 'Compose :: (j -> k) -> (i -> j) -> i -> k)
composed :: Category ((~>) :: k -> k -> *) => Iso (Compose f g a :: k) (Compose f' g' a' :: k) (f (g a)) (f' (g' a))
composed = dimap evil evil
data Prod (p :: (i,j)) (q :: (i,j)) where
Prod :: (a ~> b) -> (c ~> d) -> Prod '(a,c) '(b,d)
type instance (~>) = Prod -- :: (i,j) -> (i,j) -> *)
class Functor f where
fmap :: (a ~> b) -> f a ~> f b
instance Category ((~>) :: j -> j -> *) => Functor ('(,) :: i -> j -> (i, j)) where
fmap f = Nat $ Prod f id
instance Category ((~>) :: i -> i -> *) => Functor ('(,) a :: j -> (i, j)) where
fmap = Prod id
-- '(,) 1 2 :: (Nat, Nat)
{-
instance Functor ('Left :: a -> Either a b)
instance Functor ('Right :: b -> Either a b)
instance Functor ('Just :: a -> Maybe a)
-}
data LIM = Lim
type Lim = (Any 'Lim :: (i -> j) -> j)
data CONST = Const
type Const = (Any 'Const :: j -> i -> j)
class f -| g | f -> g, g -> f where
adj :: Iso (f a ~> b) (f a' ~> b') (a ~> g b) (a' ~> g b')
instance Const -| Lim where
adj = dimap todo todo
todo :: a
todo = undefined
newtype Get r a b = Get { runGet :: a ~> r }
|
ekmett/hask
|
wip/Evil.hs
|
bsd-3-clause
| 2,233
| 50
| 11
| 571
| 1,079
| 575
| 504
| -1
| -1
|
--
-- A very simple example application using System.MIDI.
-- It's a basic MIDI monitor: prints all the incoming messages.
--
module Main where
import Control.Monad
import Control.Concurrent
import System.MIDI
-- the essence
mythread conn = do
events <- getEvents conn
mapM_ print events
(threadDelay 5000)
mythread conn
-- source / destination selection
maybeRead :: Read a => String -> Maybe a
maybeRead s = case reads s of
[(x,"")] -> Just x
_ -> Nothing
select srclist = do
names <- mapM getName srclist
forM_ (zip [1..] names) $ \(i,name) -> putStrLn $ show i ++ ": " ++ name
let nsrc = length srclist
src <- case srclist of
[] -> fail "no midi devices found"
[x] -> return x
_ -> do
putStrLn "please select a midi device"
l <- getLine
let k = case maybeRead l of
{ Nothing -> nsrc
; Just m -> if m<1 || m>nsrc then nsrc else m
}
putStrLn $ "device #" ++ show k ++ " selected."
return $ srclist!!(k-1)
return src
-- main
main = do
srclist <- enumerateSources
putStrLn "midi sources:"
src <- select srclist
conn <- openSource src Nothing
putStrLn "connected"
threadid <- forkIO (mythread conn)
start conn ; putStrLn "started. Press 'ENTER' to exit."
getLine
stop conn ; putStrLn "stopped."
killThread threadid
close conn ; putStrLn "closed."
|
hanshoglund/hamid
|
examples/monitor.hs
|
bsd-3-clause
| 1,419
| 0
| 22
| 394
| 463
| 218
| 245
| -1
| -1
|
module Language.Verilog.AST
( Identifier
, Module (..)
, ModuleItem (..)
, Stmt (..)
, LHS (..)
, Expr (..)
, UniOp (..)
, BinOp (..)
, Sense (..)
, Call (..)
, PortBinding
, Case
, Range
) where
import Data.Bits
import Data.List
import Data.Maybe
import Text.Printf
import Data.BitVec
type Identifier = String
data Module = Module Identifier [Identifier] [ModuleItem] deriving Eq
instance Show Module where
show (Module name ports items) = unlines
[ "module " ++ name ++ (if null ports then "" else "(" ++ commas ports ++ ")") ++ ";"
, unlines' $ map show items
, "endmodule"
]
data ModuleItem
= Comment String
| Parameter (Maybe Range) Identifier Expr
| Localparam (Maybe Range) Identifier Expr
| Input (Maybe Range) [Identifier]
| Output (Maybe Range) [Identifier]
| Inout (Maybe Range) [Identifier]
| Wire (Maybe Range) [(Identifier, Maybe Expr)]
| Reg (Maybe Range) [(Identifier, Maybe Range)]
| Integer [Identifier]
| Initial Stmt
| Always (Maybe Sense) Stmt
| Assign LHS Expr
| Instance Identifier [PortBinding] Identifier [PortBinding]
deriving Eq
type PortBinding = (Identifier, Maybe Expr)
instance Show ModuleItem where
show a = case a of
Comment a -> "// " ++ a
Parameter r n e -> printf "parameter %s%s = %s;" (showRange r) n (showExprConst e)
Localparam r n e -> printf "localparam %s%s = %s;" (showRange r) n (showExprConst e)
Input r a -> printf "input %s%s;" (showRange r) (commas a)
Output r a -> printf "output %s%s;" (showRange r) (commas a)
Inout r a -> printf "inout %s%s;" (showRange r) (commas a)
Wire r a -> printf "wire %s%s;" (showRange r) (commas [ a ++ showAssign r | (a, r) <- a ])
Reg r a -> printf "reg %s%s;" (showRange r) (commas [ a ++ showRange r | (a, r) <- a ])
Integer a -> printf "integer %s;" $ commas a
Initial a -> printf "initial\n%s" $ indent $ show a
Always Nothing b -> printf "always\n%s" $ indent $ show b
Always (Just a) b -> printf "always @(%s)\n%s" (show a) $ indent $ show b
Assign a b -> printf "assign %s = %s;" (show a) (show b)
Instance m params i ports
| null params -> printf "%s %s %s;" m i (showPorts show ports)
| otherwise -> printf "%s #%s %s %s;" m (showPorts showExprConst params) i (showPorts show ports)
where
showPorts :: (Expr -> String) -> [(Identifier, Maybe Expr)] -> String
showPorts s ports = printf "(%s)" $ commas [ printf ".%s(%s)" i (if isJust arg then s $ fromJust arg else "") | (i, arg) <- ports ]
showAssign :: Maybe Expr -> String
showAssign a = case a of
Nothing -> ""
Just a -> printf " = %s" $ show a
showRange :: Maybe Range -> String
showRange Nothing = ""
showRange (Just (h, l)) = printf "[%s:%s] " (showExprConst h) (showExprConst l)
indent :: String -> String
indent a = '\t' : f a
where
f [] = []
f (a : rest)
| a == '\n' = "\n\t" ++ f rest
| otherwise = a : f rest
unlines' :: [String] -> String
unlines' = intercalate "\n"
data Expr
= String String
| Number BitVec
| ConstBool Bool
| Ident Identifier
| IdentRange Identifier Range
| IdentBit Identifier Expr
| Repeat Expr [Expr]
| Concat [Expr]
| ExprCall Call
| UniOp UniOp Expr
| BinOp BinOp Expr Expr
| Mux Expr Expr Expr
| Bit Expr Int
deriving Eq
data UniOp = Not | BWNot | UAdd | USub deriving Eq
instance Show UniOp where
show a = case a of
Not -> "!"
BWNot -> "~"
UAdd -> "+"
USub -> "-"
data BinOp
= And
| Or
| BWAnd
| BWXor
| BWOr
| Mul
| Div
| Mod
| Add
| Sub
| ShiftL
| ShiftR
| Eq
| Ne
| Lt
| Le
| Gt
| Ge
deriving Eq
instance Show BinOp where
show a = case a of
And -> "&&"
Or -> "||"
BWAnd -> "&"
BWXor -> "^"
BWOr -> "|"
Mul -> "*"
Div -> "/"
Mod -> "%"
Add -> "+"
Sub -> "-"
ShiftL -> "<<"
ShiftR -> ">>"
Eq -> "=="
Ne -> "!="
Lt -> "<"
Le -> "<="
Gt -> ">"
Ge -> ">="
showBitVecDefault :: BitVec -> String
showBitVecDefault a = printf "%d'h%x" (width a) (value a)
showBitVecConst :: BitVec -> String
showBitVecConst a = show $ value a
instance Show Expr where show = showExpr showBitVecDefault
showExprConst :: Expr -> String
showExprConst = showExpr showBitVecConst
showExpr :: (BitVec -> String) -> Expr -> String
showExpr bv a = case a of
String a -> printf "\"%s\"" a
Number a -> bv a
ConstBool a -> printf "1'b%s" (if a then "1" else "0")
Ident a -> a
IdentBit a b -> printf "%s[%s]" a (showExprConst b)
IdentRange a (b, c) -> printf "%s[%s:%s]" a (showExprConst b) (showExprConst c)
Repeat a b -> printf "{%s {%s}}" (showExprConst a) (commas $ map s b)
Concat a -> printf "{%s}" (commas $ map show a)
ExprCall a -> show a
UniOp a b -> printf "(%s %s)" (show a) (s b)
BinOp a b c -> printf "(%s %s %s)" (s b) (show a) (s c)
Mux a b c -> printf "(%s ? %s : %s)" (s a) (s b) (s c)
Bit a b -> printf "(%s [%d])" (s a) b
where
s = showExpr bv
instance Num Expr where
(+) = BinOp Add
(-) = BinOp Sub
(*) = BinOp Mul
negate = UniOp USub
abs = undefined
signum = undefined
fromInteger = Number . fromInteger
instance Bits Expr where
(.&.) = BinOp BWAnd
(.|.) = BinOp BWOr
xor = BinOp BWXor
complement = UniOp BWNot
isSigned _ = False
shift = error "Not supported: shift"
rotate = error "Not supported: rotate"
bitSize = error "Not supported: bitSize"
bitSizeMaybe = error "Not supported: bitSizeMaybe"
testBit = error "Not supported: testBit"
bit = error "Not supported: bit"
popCount = error "Not supported: popCount"
instance Monoid Expr where
mempty = 0
mappend a b = mconcat [a, b]
mconcat = Concat
data LHS
= LHS Identifier
| LHSBit Identifier Expr
| LHSRange Identifier Range
| LHSConcat [LHS]
deriving Eq
instance Show LHS where
show a = case a of
LHS a -> a
LHSBit a b -> printf "%s[%s]" a (showExprConst b)
LHSRange a (b, c) -> printf "%s[%s:%s]" a (showExprConst b) (showExprConst c)
LHSConcat a -> printf "{%s}" (commas $ map show a)
data Stmt
= Block (Maybe Identifier) [Stmt]
| StmtReg (Maybe Range) [(Identifier, Maybe Range)]
| StmtInteger [Identifier]
| Case Expr [Case] (Maybe Stmt)
| BlockingAssignment LHS Expr
| NonBlockingAssignment LHS Expr
| For (Identifier, Expr) Expr (Identifier, Expr) Stmt
| If Expr Stmt Stmt
| StmtCall Call
| Delay Expr Stmt
| Null
deriving Eq
commas :: [String] -> String
commas = intercalate ", "
instance Show Stmt where
show a = case a of
Block Nothing b -> printf "begin\n%s\nend" $ indent $ unlines' $ map show b
Block (Just a) b -> printf "begin : %s\n%s\nend" a $ indent $ unlines' $ map show b
StmtReg a b -> printf "reg %s%s;" (showRange a) (commas [ a ++ showRange r | (a, r) <- b ])
StmtInteger a -> printf "integer %s;" $ commas a
Case a b Nothing -> printf "case (%s)\n%s\nendcase" (show a) (indent $ unlines' $ map showCase b)
Case a b (Just c) -> printf "case (%s)\n%s\n\tdefault:\n%s\nendcase" (show a) (indent $ unlines' $ map showCase b) (indent $ indent $ show c)
BlockingAssignment a b -> printf "%s = %s;" (show a) (show b)
NonBlockingAssignment a b -> printf "%s <= %s;" (show a) (show b)
For (a, b) c (d, e) f -> printf "for (%s = %s; %s; %s = %s)\n%s" a (show b) (show c) d (show e) $ indent $ show f
If a b Null -> printf "if (%s)\n%s" (show a) (indent $ show b)
If a b c -> printf "if (%s)\n%s\nelse\n%s" (show a) (indent $ show b) (indent $ show c)
StmtCall a -> printf "%s;" (show a)
Delay a b -> printf "#%s %s" (showExprConst a) (show b)
Null -> ";"
type Case = ([Expr], Stmt)
showCase :: Case -> String
showCase (a, b) = printf "%s:\n%s" (commas $ map show a) (indent $ show b)
data Call = Call Identifier [Expr] deriving Eq
instance Show Call where
show (Call a b) = printf "%s(%s)" a (commas $ map show b)
data Sense
= Sense LHS
| SenseOr Sense Sense
| SensePosedge LHS
| SenseNegedge LHS
deriving Eq
instance Show Sense where
show a = case a of
Sense a -> show a
SenseOr a b -> printf "%s or %s" (show a) (show b)
SensePosedge a -> printf "posedge %s" (show a)
SenseNegedge a -> printf "negedge %s" (show a)
type Range = (Expr, Expr)
|
tomahawkins/verilog
|
Language/Verilog/AST.hs
|
bsd-3-clause
| 9,396
| 0
| 15
| 3,202
| 3,390
| 1,741
| 1,649
| 249
| 14
|
{-# OPTIONS_GHC -Wall -fwarn-tabs #-}
{-# OPTIONS_GHC -O2 -fenable-rewrite-rules #-}
----------------------------------------------------------------
-- ~ 2021.10.17
-- |
-- Module : Data.Number.Transfinite
-- Copyright : Copyright (c) 2007--2021 wren gayle romano
-- License : BSD3
-- Maintainer : wren@cpan.org
-- Stability : stable
-- Portability : portable
--
-- This module presents a type class for numbers which have
-- representations for transfinite values. The idea originated from
-- the IEEE-754 floating-point special values, used by
-- "Data.Number.LogFloat". However not all 'Fractional' types
-- necessarily support transfinite values. In particular, @Ratio@
-- types including 'Rational' do not have portable representations.
--
-- For the Glasgow compiler (GHC 6.8.2), "GHC.Real" defines @1%0@
-- and @0%0@ as representations for 'infinity' and 'notANumber',
-- but most operations on them will raise exceptions. If 'toRational'
-- is used on an infinite floating value, the result is a rational
-- with a numerator sufficiently large that it will overflow when
-- converted back to a @Double@. If used on NaN, the result would
-- buggily convert back as 'negativeInfinity'. For more discussion
-- on why this approach is problematic, see:
--
-- * <http://www.haskell.org/pipermail/haskell-prime/2006-February/000791.html>
--
-- * <http://www.haskell.org/pipermail/haskell-prime/2006-February/000821.html>
--
-- Hugs (September 2006) stays closer to the haskell98 spec and
-- offers no way of constructing those values, raising arithmetic
-- overflow errors if attempted.
----------------------------------------------------------------
module Data.Number.Transfinite
( Transfinite(..)
, log
) where
import Prelude hiding (log, isInfinite, isNaN)
import qualified Prelude (log)
import qualified Hugs.RealFloat as Prelude (isInfinite, isNaN)
import Data.Number.PartialOrd
----------------------------------------------------------------
-- | Many numbers are not 'Bounded' yet, even though they can
-- represent arbitrarily large values, they are not necessarily
-- able to represent transfinite values such as infinity itself.
-- This class is for types which are capable of representing such
-- values. Notably, this class does not require the type to be
-- 'Fractional' nor 'Floating' since integral types could also have
-- representations for transfinite values. By popular demand the
-- 'Num' restriction has been lifted as well, due to complications
-- of defining 'Show' or 'Eq' for some types.
--
-- In particular, this class extends the ordered projection to have
-- a maximum value 'infinity' and a minimum value 'negativeInfinity',
-- as well as an exceptional value 'notANumber'. All the natural
-- laws regarding @infinity@ and @negativeInfinity@ should pertain.
-- (Some of these are discussed below.)
--
-- Hugs (September 2006) has buggy Prelude definitions for
-- 'Prelude.isNaN' and 'Prelude.isInfinite' on Float and Double.
-- This module provides correct definitions, so long as "Hugs.RealFloat"
-- is compiled correctly.
class (PartialOrd a) => Transfinite a where
-- | A transfinite value which is greater than all finite values.
-- Adding or subtracting any finite value is a no-op. As is
-- multiplying by any non-zero positive value (including
-- @infinity@), and dividing by any positive finite value. Also
-- obeys the law @negate infinity = negativeInfinity@ with all
-- appropriate ramifications.
infinity :: a
-- | A transfinite value which is less than all finite values.
-- Obeys all the same laws as @infinity@ with the appropriate
-- changes for the sign difference.
negativeInfinity :: a
-- | An exceptional transfinite value for dealing with undefined
-- results when manipulating infinite values. The following
-- operations must return @notANumber@, where @inf@ is any value
-- which @isInfinite@:
--
-- * @infinity + negativeInfinity@
--
-- * @negativeInfinity + infinity@
--
-- * @infinity - infinity@
--
-- * @negativeInfinity - negativeInfinity@
--
-- * @inf * 0@
--
-- * @0 * inf@
--
-- * @inf \/ inf@
--
-- * @inf \`div\` inf@
--
-- * @0 \/ 0@
--
-- * @0 \`div\` 0@
--
-- Additionally, any mathematical operations on @notANumber@
-- must also return @notANumber@, and any equality or ordering
-- comparison on @notANumber@ must return @False@ (violating
-- the law of the excluded middle, often assumed but not required
-- for 'Eq'; thus, 'eq' and 'ne' are preferred over ('==') and
-- ('/=')). Since it returns false for equality, there may be
-- more than one machine representation of this `value'.
notANumber :: a
-- | Return true for both @infinity@ and @negativeInfinity@,
-- false for all other values.
isInfinite :: a -> Bool
-- | Return true only for @notANumber@.
isNaN :: a -> Bool
instance Transfinite Double where
infinity = 1/0
negativeInfinity = negate (1/0)
notANumber = 0/0
isInfinite = Prelude.isInfinite
isNaN = Prelude.isNaN
instance Transfinite Float where
infinity = 1/0
negativeInfinity = negate (1/0)
notANumber = 0/0
isInfinite = Prelude.isInfinite
isNaN = Prelude.isNaN
----------------------------------------------------------------
-- | Since the normal 'Prelude.log' throws an error on zero, we
-- have to redefine it in order for things to work right. Arguing
-- from limits we can see that @log 0 == negativeInfinity@. Newer
-- versions of GHC have this behavior already, but older versions
-- and Hugs do not.
--
-- This function will raise an error when taking the log of negative
-- numbers, rather than returning 'notANumber' as the newer GHC
-- implementation does. The reason being that typically this is a
-- logical error, and @notANumber@ allows the error to propagate
-- silently.
--
-- In order to improve portability, the 'Transfinite' class is
-- required to indicate that the 'Floating' type does in fact have
-- a representation for negative infinity. Both native floating
-- types ('Double' and 'Float') are supported. If you define your
-- own instance of @Transfinite@, verify the above equation holds
-- for your @0@ and @negativeInfinity@. If it doesn't, then you
-- should avoid importing our @log@ and will probably want converters
-- to handle the discrepancy.
--
-- For GHC, this version of @log@ has rules for fusion with @exp@.
-- These can give different behavior by preventing overflow to
-- @infinity@ and preventing errors for taking the logarithm of
-- negative values. For 'Double' and 'Float' they can also give
-- different answers due to eliminating floating point fuzz. The
-- rules strictly improve mathematical accuracy, however they should
-- be noted in case your code depends on the implementation details.
log :: (Floating a, Transfinite a) => a -> a
{-# INLINE [0] log #-}
-- TODO: should we use NOINLINE or [~0] to avoid the possibility of code bloat?
log x = case x `cmp` 0 of
Just GT -> Prelude.log x
Just EQ -> negativeInfinity
Just LT -> err "argument out of range"
Nothing -> err "argument not comparable to 0"
where
err e = error $! "Data.Number.Transfinite.log: "++e
-- Note, Floating ultimately requires Num, but not Ord. If PartialOrd
-- proves to be an onerous requirement on Transfinite, we could
-- hack our way around without using PartialOrd by using isNaN, (==
-- 0), ((>0).signum) but that would be less efficient.
----------------------------------------------------------------
-- These rules moved here from "LogFloat" in v0.11.2
{-# RULES
"log/exp" forall x. log (exp x) = x
"exp/log" forall x. exp (log x) = x
#-}
-- We'd like to be able to take advantage of general rule versions
-- of our operators for 'LogFloat', with rules like @log x + log y
-- = log (x * y)@ and @log x - log y = log (x / y)@. However the
-- problem is that those equations could be driven in either direction
-- depending on whether we think time performance or non-underflow
-- performance is more important, and the answers may be different
-- at every call site.
--
-- Since we implore users to do normal-domain computations whenever
-- it would not degenerate accuracy, we should not rewrite their
-- decisions in any way. The log\/exp fusion strictly improves both
-- time and accuracy, so those are safe. But the buck stops with
-- them.
----------------------------------------------------------------
----------------------------------------------------------- fin.
|
wrengr/logfloat
|
src/Data/Number/Transfinite.hs
|
bsd-3-clause
| 8,775
| 0
| 9
| 1,756
| 484
| 337
| 147
| 38
| 4
|
{-# LANGUAGE OverloadedStrings, PatternGuards #-}
--
-- Author: Sean Seefried
-- Date: Wed 21 Dec 2011
--
module Main where
-- library imports
import Data.Text (Text)
import qualified Data.Text as T
import Text.Printf
import System.Environment (getArgs, getProgName)
import Control.Monad
import System.Exit
import Data.Map (Map)
import qualified Data.Map as Map
-- friends
import StringUtils
import Time
-- The commands
import StartCmd
import ModifyCmd
import ClearCmd
import FinishCmd
import ExportCmd
import CurrentCmd
import ListCmd
import QueryCmd
import InsertCmd
import GetOpt
main :: IO ()
main = do
zt <- getZonedTime
name <- getProgName
args <- getArgs
let cmdMap = commandMap name zt
dispatch (cmd:argsForCmd) =
case Map.lookup cmd cmdMap of
Just cmd -> cmdHandler cmd argsForCmd
Nothing -> printf "%s: '%s' is not a command. See '%s --help'.\n" name cmd name
when (wantsHelp args) $ do
putStr $ topLevelHelp name zt
exitWith ExitSuccess
let (cmd:restArgs) = args
when (cmd == "help" && length restArgs > 0) $ do
putStrLn $ helpFor (head restArgs) cmdMap
exitWith ExitSuccess
-- now dispatch
dispatch args
helpFor :: String -> Map String Command -> String
helpFor cmd cmdMap = case Map.lookup cmd cmdMap of
Just cmd -> printf "%s\n\n%s" (cmdDesc cmd) (cmdHelp cmd)
Nothing -> printf "Can't get help for unknown command '%s'" cmd
--------------
data Command = Cmd { cmdId :: String
, cmdDesc :: String
, cmdHelp :: String
, cmdHandler :: [String] -> IO () }
--
-- Note to maintainer:
--
-- The 'cmdHelp' string should not end in a newline.
--
commands :: String -> ZonedTime -> [Command]
commands name zt =
[ Cmd "start"
"Start a new task"
(usageInfo (printf "Usage: %s start [<flags>...]\n\nFlags:" name)
(startCmdOpts zt))
(startCmd zt)
, Cmd "current"
"Describe the current task"
(usageInfo (printf "Usage: %s current" name) [])
(currentCmd zt)
, Cmd "list"
"List the last few completed tasks"
(usageInfo (printf "Usage: %s list [<flags>...]\n\nFlags:" name)
(listCmdOpts zt))
(listCmd zt)
, Cmd "clear"
"Clear current task"
(printf "Usage: %s clear" name)
clearCmd
, Cmd "insert"
"Insert a task"
(insertCmdUsage name zt)
(insertCmd zt)
, Cmd "finish"
"Finish current task"
(usageInfo (printf "Usage: %s finish [<flags>...]\n\nFlags:" name)
(finishCmdOpts zt))
(finishCmd zt)
, Cmd "modify"
"Modify a task entry"
(error "not defined")
undefined
, Cmd "delete"
"Delete a task entry"
(error "not defined")
undefined
, Cmd "query"
"Query task entries"
(usageInfo (printf "Usage: %s query" name) (queryCmdOpts zt))
(queryCmd zt)
, Cmd "export"
"Export task data in a variety of formats"
(usageInfo (printf "Usage: %s export [<flags>...]\n\nFlags:" name)
(exportCmdOpts zt))
(exportCmd zt)
]
commandMap :: String -> ZonedTime -> Map String Command
commandMap name zt = foldl (\m cmd -> Map.insert (cmdId cmd) cmd m) Map.empty (commands name zt)
-----------
topLevelHelp :: String -> ZonedTime -> String
topLevelHelp name zt = unlines $ [
printf "Usage: %s <command> <flags...>" name
, ""
, "Commands:"
] ++ (indent 2 . twoColumns 4 $ map f $ commands name zt) ++
[ ""
, printf "See '%s help <command>' for more information on a specific command." name]
where f cmd = (cmdId cmd, cmdDesc cmd)
wantsHelp :: [String] -> Bool
wantsHelp args = containsHelp args || length args == 0 || (head args == "help" && length args == 1)
containsHelp :: [String] -> Bool
containsHelp = any pred
where
pred s = any (eq s) ["-h", "--help", "-?"]
eq s s' = strip s == s'
------------------------------
test :: IO ()
test = do
zt <- getZonedTime
let mtime = parseTaskTime zt "00:23"
case mtime of
Just time -> printf "Local: %s\nUTC: %s\n"
(show $ utcToZonedTime (zonedTimeZone zt) time) (show time)
Nothing -> return ()
|
sseefried/task
|
src/Task.hs
|
bsd-3-clause
| 4,256
| 0
| 15
| 1,144
| 1,203
| 617
| 586
| 119
| 2
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DataKinds #-}
module User.Handlers where
import Servant
import Text.Blaze.Html5 hiding (map)
import Database.Persist.Postgresql
import Crypto.PasswordStore
import System.Random
import Control.Monad
import Control.Monad.Trans
import Control.Monad.Trans.Either
import Data.Maybe
import Data.Time
import Data.Text (Text)
import Data.ByteString.Char8 (pack)
import Blaze.ByteString.Builder (toByteString)
import User.Types
import User.Routes
import User.Views
import User.Links
import Home.Links
import Common.Responses
import AppM
import Web.Cookie
userHandlers :: ServerT UserRoutes AppM
userHandlers = toLogin
:<|> login
:<|> logout
:<|> viewUsers
:<|> toCreateUser
:<|> createUser
:<|> viewUser
:<|> toResetPassword
:<|> resetPassword
:<|> deleteUser
:<|> toAssignRole
:<|> assignRole
:<|> deleteRole
toLogin :: Maybe Bool -> AppM Html
toLogin mValid = let isValid = maybe True id mValid in
return (loginPage isValid)
login :: LoginData -> AppM Text
login (LoginData name pass) = do
mUser <- runDb $ selectFirst [UserName ==. name] []
case mUser of
Nothing -> redirect (toLoginLink' False) >> return undefined
Just (Entity uid user) -> do
if verifyPassword pass (userHash user)
then do
ident <- fmap pack $ liftIO $
sequence $ replicate 32 $ randomRIO ('a', 'z')
loginTime <- liftIO getCurrentTime
runDb $ insert_ (Session ident uid loginTime)
let usernameCookie = toByteString $ renderSetCookie $
def { setCookieName = "username"
, setCookieValue = pack $ userName user }
let identCookie = toByteString $ renderSetCookie $
def { setCookieName = "ident" , setCookieValue = ident }
_ <- lift $ left $
err303 { errHeaders = [ ("location" , pack homePageLink')
, ("set-cookie", usernameCookie )
, ("set-cookie", identCookie ) ] }
return undefined
else redirect (toLoginLink' False) >> return undefined
logout :: Maybe String -> AppM Text
logout mCookie = do
when (isJust mCookie) $
let cookies = parseCookies (pack $ fromJust mCookie)
mIdent = lookup "ident" cookies
in case mIdent of
Nothing -> return ()
Just ident -> runDb $ deleteWhere [SessionIdent ==. ident]
let usernameCookie = toByteString $ renderSetCookie $
def { setCookieName = "username" , setCookieValue = "guest" }
let identCookie = toByteString $ renderSetCookie $
def { setCookieName = "ident" , setCookieValue = "guest" }
_ <- lift $ left $
err303 { errHeaders = [ ("location" , pack $ toLoginLink' True)
, ("set-cookie", usernameCookie )
, ("set-cookie", identCookie ) ] }
return undefined
viewUsers :: AppM Html
viewUsers = do
users <- runDb (selectList [UserHash !=. ""] [Asc UserName])
return (usersPage users)
toCreateUser :: AppM Html
toCreateUser = return userNewPage
createUser :: LoginData -> AppM Text
createUser (LoginData name pass) = do
hash <- liftIO (makePassword pass 17)
mUid <- runDb (insertUnique (User name hash))
case mUid of
Nothing -> lift $ left $ err400 { errBody = "Username is already taken" }
Just uid -> redirect (viewUserLink' uid) >> return undefined
viewUser :: Key User -> AppM Html
viewUser uid = do
mUser <- runDb (get uid)
case mUser of
Nothing -> lift (left err404)
Just user -> do
roles <- runDb (selectList [RoleUser ==. uid] [Asc RoleRole])
return (userPage (Entity uid user) roles)
toResetPassword :: Key User -> AppM Html
toResetPassword uid =
runDb (selectList [UserId ==. uid] [])
>>= maybe (lift (left err404)) (return . resetPasswordPage) . listToMaybe
resetPassword :: Key User -> LoginData -> AppM Text
resetPassword uid (LoginData _ pass) = do
hash <- liftIO (makePassword pass 17)
runDb (update uid [UserHash =. hash])
redirect (viewUserLink' uid)
return undefined
deleteUser :: Key User -> AppM Text
deleteUser uid = do
runDb (update uid [UserHash =. ""])
redirect viewUsersLink'
return "deactivated"
toAssignRole :: Key User -> AppM Html
toAssignRole uid = do
mUser <- runDb (get uid)
case mUser of Nothing -> lift (left err404)
Just user -> return (assignRolePage (Entity uid user))
assignRole :: Key User -> Role -> AppM Text
assignRole _ role = do
mRid <- runDb (insertUnique role)
case mRid of
Nothing -> lift $ left $ err400 { errBody = "Duplicate user role" }
Just _ -> redirect (viewUserLink' (roleUser role)) >> return undefined
deleteRole :: Key User -> Key Role -> AppM Text
deleteRole _ rid = runDb (delete rid) >> return "deleted"
|
hectorhon/autotrace2
|
app/User/Handlers.hs
|
bsd-3-clause
| 4,916
| 0
| 22
| 1,268
| 1,603
| 797
| 806
| 130
| 3
|
-- | Configuring the compiler
module Fay.Config
( Config
( configOptimize
, configFlattenApps
, configExportRuntime
, configExportStdlib
, configExportStdlibOnly
, configPrettyPrint
, configHtmlWrapper
, configSourceMap
, configHtmlJSLibs
, configLibrary
, configWarn
, configFilePath
, configTypecheck
, configWall
, configGClosure
, configPackageConf
, configBasePath
, configStrict
, configTypecheckOnly
, configRuntimePath
, configOptimizeNewtypes
)
, defaultConfig
, defaultConfigWithSandbox
, configDirectoryIncludes
, configDirectoryIncludePaths
, nonPackageConfigDirectoryIncludePaths
, addConfigDirectoryInclude
, addConfigDirectoryIncludes
, addConfigDirectoryIncludePaths
, configPackages
, addConfigPackage
, addConfigPackages
, shouldExportStrictWrapper
) where
import Fay.Compiler.Prelude
import Data.Default
import Data.Maybe ()
import Language.Haskell.Exts.Annotated (ModuleName (..))
import System.Environment
-- | Configuration of the compiler.
-- The fields with a leading underscore
data Config = Config
{ configOptimize :: Bool -- ^ Run optimizations
, configFlattenApps :: Bool -- ^ Flatten function application?
, configExportRuntime :: Bool -- ^ Export the runtime?
, configExportStdlib :: Bool -- ^ Export the stdlib?
, configExportStdlibOnly :: Bool -- ^ Export /only/ the stdlib?
, _configDirectoryIncludes :: [(Maybe String, FilePath)] -- ^ Possibly a fay package name, and a include directory.
, configPrettyPrint :: Bool -- ^ Pretty print the JS output?
, configHtmlWrapper :: Bool -- ^ Output a HTML file including the produced JS.
, configSourceMap :: Bool -- ^ Output a source map file as outfile.map.
, configHtmlJSLibs :: [FilePath] -- ^ Any JS files to link to in the HTML.
, configLibrary :: Bool -- ^ Don't invoke main in the produced JS.
, configWarn :: Bool -- ^ Warn on dubious stuff, not related to typechecking.
, configFilePath :: Maybe FilePath -- ^ File path to output to.
, configTypecheck :: Bool -- ^ Typecheck with GHC.
, configWall :: Bool -- ^ Typecheck with -Wall.
, configGClosure :: Bool -- ^ Run Google Closure on the produced JS.
, configPackageConf :: Maybe FilePath -- ^ The package config e.g. packages-6.12.3.
, _configPackages :: [String] -- ^ Included Fay packages.
, configBasePath :: Maybe FilePath -- ^ Custom source location for fay-base
, configStrict :: [String] -- ^ Produce strict and uncurried JavaScript callable wrappers for all
-- exported functions with type signatures in the given module
, configTypecheckOnly :: Bool -- ^ Only invoke GHC for typechecking, don't produce any output
, configRuntimePath :: Maybe FilePath
, configOptimizeNewtypes :: Bool -- ^ Optimize away newtype constructors?
} deriving (Show)
defaultConfig :: Config
defaultConfig = addConfigPackage "fay-base"
Config
{ configOptimize = False
, configFlattenApps = False
, configExportRuntime = True
, configExportStdlib = True
, configExportStdlibOnly = False
, _configDirectoryIncludes = []
, configPrettyPrint = False
, configHtmlWrapper = False
, configHtmlJSLibs = []
, configLibrary = False
, configWarn = True
, configFilePath = Nothing
, configTypecheck = True
, configWall = False
, configGClosure = False
, configPackageConf = Nothing
, _configPackages = []
, configBasePath = Nothing
, configStrict = []
, configTypecheckOnly = False
, configRuntimePath = Nothing
, configSourceMap = False
, configOptimizeNewtypes = True
}
defaultConfigWithSandbox :: IO Config
defaultConfigWithSandbox = do
packageConf <- fmap (lookup "HASKELL_PACKAGE_SANDBOX") getEnvironment
return defaultConfig { configPackageConf = packageConf }
-- | Default configuration.
instance Default Config where
def = defaultConfig
-- | Reading _configDirectoryIncludes is safe to do.
configDirectoryIncludes :: Config -> [(Maybe String, FilePath)]
configDirectoryIncludes = _configDirectoryIncludes
-- | Get all include directories without the package mapping.
configDirectoryIncludePaths :: Config -> [FilePath]
configDirectoryIncludePaths = map snd . _configDirectoryIncludes
-- | Get all include directories not included through packages.
nonPackageConfigDirectoryIncludePaths :: Config -> [FilePath]
nonPackageConfigDirectoryIncludePaths = map snd . filter (isJust . fst) . _configDirectoryIncludes
-- | Add a mapping from (maybe) a package to a source directory
addConfigDirectoryInclude :: Maybe String -> FilePath -> Config -> Config
addConfigDirectoryInclude pkg fp cfg = cfg { _configDirectoryIncludes = (pkg, fp) : _configDirectoryIncludes cfg }
-- | Add several include directories.
addConfigDirectoryIncludes :: [(Maybe String,FilePath)] -> Config -> Config
addConfigDirectoryIncludes pkgFps cfg = foldl (\c (pkg,fp) -> addConfigDirectoryInclude pkg fp c) cfg pkgFps
-- | Add several include directories without package references.
addConfigDirectoryIncludePaths :: [FilePath] -> Config -> Config
addConfigDirectoryIncludePaths fps cfg = foldl (flip (addConfigDirectoryInclude Nothing)) cfg fps
-- | Reading _configPackages is safe to do.
configPackages :: Config -> [String]
configPackages = _configPackages
-- | Add a package to compilation
addConfigPackage :: String -> Config -> Config
addConfigPackage pkg cfg = cfg { _configPackages = pkg : _configPackages cfg }
-- | Add several packages to compilation
addConfigPackages :: [String] -> Config -> Config
addConfigPackages fps cfg = foldl (flip addConfigPackage) cfg fps
-- | Should a strict wrapper be generated for this module?
shouldExportStrictWrapper :: ModuleName a -> Config -> Bool
shouldExportStrictWrapper (ModuleName _ m) cs = m `elem` configStrict cs
|
fpco/fay
|
src/Fay/Config.hs
|
bsd-3-clause
| 6,822
| 0
| 11
| 2,036
| 964
| 595
| 369
| 119
| 1
|
-- | Compute PCA from a Matrix
--
-- http://www.cs.otago.ac.nz/cosc453/student_tutorials/principal_components.pdf
module PCA (
-- * PCA Computations
pcaSVD, pca', pca'', pca''',
pcaNipals, fastPCA, fastPCA',
-- * Conversions
toMatrix, toLists
) where
import Debug.Trace
-- TODO remove
import qualified Model.Repa as T
import Model.Types (Layer)
import Numeric.LinearAlgebra
import Numeric.LinearAlgebra.NIPALS
type Vec = Vector Double
type Mat = Matrix Double
-- | Turn a Layer into a Matrix for purpose of PCA.
toMatrix :: Int -> Int -> T.Mat -> Mat
toMatrix r c = (r >< c) . T.layerToList
-----------------------------------------------------
-- * Standard (Full) PCA Computation
-- https://github.com/albertoruiz/hmatrix/blob/master/examples/pca1.hs
-- | Run *Principal Component Analysis* on given Matrix and returns requested number
-- of most significant dimensions.
-- creates the compression and decompression functions from the desired number of components
pcaSVD :: Int -> Mat -> (Mat, Vec, Vec -> Vec , Vec -> Vec)
pcaSVD n dataSet = (vp, m, encode,decode)
where
encode x = vp #> (x - m)
decode x = x <# vp + m
(m,c) = meanCov dataSet
(_,v) = eigSH (trustSym c)
vp = tr $ takeColumns n v
-- | Return a function that yields the PCA vector for some index of given matrix
pca' :: Int -> Mat -> (Int -> [Double])
pca' n dataSet = toList . enc . (mat' !!)
where
mat' = toRows dataSet
(_,_,enc,_) = pcaSVD n dataSet
pca'' :: Int -> Mat -> (Int -> Mat -> Mat) -> Mat
pca'' n dataSet pca = tr (pcaMat <> tr dataSet)
where
pcaMat = pca n dataSet
pca''' :: Int -> Mat -> (Int -> Mat -> [Vec]) -> Mat
pca''' n dataSet pca = tr (pcaMat <> tr dataSet)
where
pcaMat = fromRows $ pca n dataSet
--------------------------------------------------
-- * NIPALS Algorithm
pcaNipals :: Int -> Mat -> [ Vec ]
pcaNipals 0 _ = []
pcaNipals n dataSet = let (pc1, _ , residual) = firstPC dataSet
in pc1 : pcaNipals (n - 1) residual
-----------------------------------------------------
-- * Fast (Iterative) PCA Computation
-- https://maxwell.ict.griffith.edu.au/spl/publications/papers/prl07_alok_pca.pdf
-- | Computes a list of top PCA components for given matrix
fastPCA :: Int -> Matrix Double -> [ Vector Double ]
fastPCA n dataSet = fastPCARec n dataSet []
fastPCARec :: Int -> Matrix Double -> [ Vector Double ] -> [ Vector Double ]
fastPCARec 0 _ _ = []
fastPCARec n dataSet phis =
let (_,cov) = meanCov dataSet -- compute covariance matrix
max_iter = 30
phi_p :: Vector Double
phi_p = unitary $ konst 1 (cols dataSet)
gram_schmidt :: Vector Double -> [ Vector Double ] -> Vector Double
gram_schmidt phip phis = phip - sum (map (\ phi_j -> cmap (* (phip <.> phi_j)) phi_j) phis)
go :: Vector Double -> Int -> Vector Double
go phi k | k > max_iter = phi
| otherwise = let phi_p_new = cov #> phi
norm_phi = unitary $ gram_schmidt phi_p_new phis
conv = abs (norm_phi <.> phi - 1) < peps
in if conv
then norm_phi
else go norm_phi (k+1)
new_phi = go phi_p 0
in new_phi : fastPCARec (n-1) dataSet (new_phi:phis)
----------------------------------------------
-- * Another Fast PCA algorithm
-- Computes top k principal components using power iteration method
-- http://theory.stanford.edu/~tim/s15/l/l8.pdf
fastPCA' :: Int -> Matrix Double -> [ Vector Double ]
fastPCA' n dataSet = fastPCARec' n seed
where
square = uncurry (==) . size
seed = if not (square dataSet)
then tr dataSet <> dataSet
else dataSet
fastPCARec' 0 _ = []
fastPCARec' k mat =
let v_0 = unitary $ konst 1 (cols mat)
go v = let v' = mat #> v
unit_v = unitary v'
stop = abs (unit_v <.> unitary v - 1) < peps
in if stop
then unit_v
else go v'
new_v = go v_0
mat_v = mat #> new_v
mat' = mat - (mat_v `outer` new_v)
in new_v : fastPCARec' (k-1) mat'
|
abailly/hs-word2vec
|
src/PCA.hs
|
bsd-3-clause
| 4,367
| 0
| 20
| 1,306
| 1,218
| 649
| 569
| 74
| 4
|
-- | Labels for data types in the base package. The label types are kept
-- abstract to be fully reusable in custom contexts. Build to be imported
-- qualified.
{-# LANGUAGE NoMonomorphismRestriction, TemplateHaskell, TypeOperators #-}
module Label.Base
(
-- * Labels for lists.
head
, tail
, list
, at
-- * Labels for Either.
, left
, right
-- * Label for Maybe.
, just
-- * Labels for 2-tuples.
, fst
, snd
, swap
, pair
-- * Labels for 3-tuples.
, fst3
, snd3
, trd3
, triple
-- * Read/Show isomorphism.
, readShow
)
where
import Control.Applicative
import Data.Traversable (traverse)
import Label.Core (Iso(..), Label, make)
import Label.Derive (getLabel)
import Label.Mono (Mono)
import Prelude hiding (fst, snd, head, tail)
import qualified Data.Tuple as Tuple
-- | Label pointing to the head of a list's cons cell. (Partial and monomorphic)
head :: (Alternative m, Applicative n)
=> Mono (Label m n) [a] a
-- | Label pointing to the tail of a list's cons cell. (Partial and monomorphic)
tail :: (Alternative m, Applicative n)
=> Mono (Label m n) [a] [a]
(head, tail) = $(getLabel ''[])
-- | Pointwise label for all items in a list.
list :: Applicative m => Label [] m ([o] -> [i]) (o -> i)
list = make id traverse
-- | Partial label for indexed access into a list.
at :: (Alternative m, Applicative n) => Int -> Label m n ([a] -> [a]) (a -> a)
at i = make (\ls -> if length ls > i
then pure (ls !! i)
else empty)
(\f ls -> if length ls > i
then (take i ls ++) <$> ((: drop (i + 1) ls) <$> f (ls !! i))
else pure ls
)
-- | Label pointing to the left value in an Either. (Partial and polymorphic)
left :: (Applicative n, Alternative m)
=> Label m n (Either a b -> Either c b) (a -> c)
-- | Label pointing to the right value in an Either. (Partial and polymorphic)
right :: (Applicative n, Alternative m)
=> Label m n (Either a b -> Either a c) (b -> c)
(left, right) = $(getLabel ''Either)
-- | Label pointing to the value in a Maybe. (Partial and polymorphic)
just :: (Applicative n, Alternative m)
=> Label m n (Maybe a -> Maybe b) (a -> b)
just = $(getLabel ''Maybe)
-- | Label pointing to the first component of a 2-tuple. (Total and polymorphic)
fst :: (Applicative n, Applicative m)
=> Label m n ((a, b) -> (c, b)) (a -> c)
-- | Label pointing to the second component of a 2-tuple. (Total and polymorphic)
snd :: (Applicative n, Applicative m)
=> Label m n ((a, b) -> (a, c)) (b -> c)
(fst, snd) = $(getLabel ''(,))
-- | Polymorphic label that swaps the components of a tuple. (Total and polymorphic)
swap :: Applicative m => Iso m m (a, b) (b, a)
swap = Iso (pure . Tuple.swap) (pure . Tuple.swap)
-- | Pointwise access to the two items in a pair.
pair :: Applicative m
=> Label [] m ((o, o) -> (a, a)) (o -> a)
pair = make (\(a, b) -> [a, b]) (\m (a, b) -> (,) <$> m a <*> m b)
-- | Label pointing to the first component of a 3-tuple. (Total and polymorphic)
fst3 :: (Applicative m, Applicative n)
=> Label m n ((a, b, c) -> (d, b, c)) (a -> d)
-- | Label pointing to the second component of a 3-tuple. (Total and polymorphic)
snd3 :: (Applicative m, Applicative n)
=> Label m n ((a, b, c) -> (a, d, c)) (b -> d)
-- | Label pointing to the third component of a 3-tuple. (Total and polymorphic)
trd3 :: (Applicative m, Applicative n)
=> Label m n ((a, b, c) -> (a, b, d)) (c -> d)
(fst3, snd3, trd3) = $(getLabel ''(,,))
-- | Pointwise access to the three items in a triple.
triple :: Applicative m
=> Label [] m ((o, o, o) -> (a, a, a)) (o -> a)
triple = make (\(a, b, c) -> [a, b, c]) (\m (a, b, c) -> (,,) <$> m a <*> m b <*> m c)
-- | Partial isomorphism for readable and showable values. Can easily be lifted
-- into a label by using `iso`.
readShow :: (Alternative m, Applicative n, Read a, Show a)
=> Iso m n String a
readShow = Iso r s
where r v = case readsPrec 0 v of
(w, _):_ -> pure w
[] -> empty
s = pure . show
|
sebastiaanvisser/labels
|
src/Label/Base.hs
|
bsd-3-clause
| 4,119
| 0
| 15
| 1,059
| 1,441
| 811
| 630
| 75
| 3
|
{-# LANGUAGE OverloadedStrings #-}
module Hrpg.Game.Resources.Mobs.Common
( skeleton
, rat
) where
import Data.Text
import Hrpg.Framework.Level
import Hrpg.Framework.Mobs.MobType
import Hrpg.Framework.Stats
import Hrpg.Game.Resources.Items.LootTables
defaultMobBaseStats = Stats (Str 50) (Agi 50)
skeleton = MobType
{ mobTypeId = 1000
, mobTypeName = "a skeleton"
, mobTypeMinLevel = Level 1
, mobTypeMaxLevel = Level 2
, mobTypeBaseStats = defaultMobBaseStats
, mobTypeMaxHp = 100
, mobTypeLootTable = (Just commonItems)
}
rat = MobType
{ mobTypeId = 1001
, mobTypeName = "a rat"
, mobTypeMinLevel = Level 1
, mobTypeMaxLevel = Level 2
, mobTypeBaseStats = defaultMobBaseStats
, mobTypeMaxHp = 100
, mobTypeLootTable = (Just commonItems)
}
|
cwmunn/hrpg
|
src/Hrpg/Game/Resources/Mobs/Common.hs
|
bsd-3-clause
| 841
| 0
| 8
| 201
| 194
| 120
| 74
| 26
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ExistentialQuantification #-}
module HtmlTemplates where
import Aria.Types
import Aria.Routes
import Web.Routes.PathInfo (toPathInfo)
import Control.Lens
import Data.Maybe (fromJust)
import Data.Data
import Data.Text
import Data.Time (UTCTime(..))
import Text.Blaze ((!), string)
import Data.Monoid ((<>))
import Control.Monad
import qualified Aria.Scripts as AS
import qualified Data.List as DL
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import qualified Text.Blaze.Bootstrap as BH
instance H.ToMarkup AS.ScriptLog where
toMarkup = H.toHtml . fmap H.toHtml
instance H.ToMarkup UTCTime where
toMarkup = H.toHtml . show
instance H.ToMarkup AS.ScriptLogData where
toMarkup logData =
H.div ! A.class_ "arscript-log-data" $
do H.div ! A.class_ "arscript-command" $
do H.span ! A.class_ "exitcode" $
H.toHtml . show $ (logData ^. AS.exitCode)
" "
H.span ! A.class_ "file" $
H.toHtml . show $ (logData ^. AS.scriptFile)
" "
H.span ! A.class_ "args" $
mconcat . fmap H.toHtml . DL.intersperse " " $
(logData ^. AS.scriptArgs)
H.div ! A.class_ "arscript-rundata" $
do H.div ! A.class_ "arscript-runtimes" $
do H.span ! A.class_ "startTime" $
H.toHtml $ (logData ^. AS.scriptStartTime)
" - "
H.span ! A.class_ "endTime" $
H.toHtml $ (logData ^. AS.scriptEndTime)
H.div ! A.class_ "arscript-pipes" $
do H.div ! A.class_ "stdout" $
do H.span $ "stdout" <> H.br <> "---------"
H.pre $ H.toHtml $ (logData ^. AS.stdOut)
H.div ! A.class_ "stderr" $
do H.span $ "stderr" <> H.br <> "---------"
H.pre $ H.toHtml $ (logData ^. AS.stdErr)
racerPageButton :: RacerId -> String -> H.Html
racerPageButton rid msg =
H.a ! A.class_ "btn btn-success btn-large" !
A.href (H.toValue $ racerHomeRoute rid) $
H.string msg
appTemplate :: Text -> H.Html -> AriaWebApp H.Html
appTemplate title page =
return . H.docTypeHtml $
do H.head $
do bootStrapMeta
H.title $ H.text title
importCSS [bootstrapCSS, customCSS]
H.body $
mconcat [BH.container page, importJS [jqueryJS, bootstrapJS, customJS]]
bootstrapCSS :: H.AttributeValue
bootstrapCSS = "/css/bootstrap.min.css"
bootstrapJS :: H.AttributeValue
bootstrapJS = "/js/bootstrap.min.js"
bootStrapMeta :: H.Html
bootStrapMeta =
mconcat $
((H.meta !) . mconcat) <$>
[ [A.charset "utf-8"]
, [A.httpEquiv "X-UA-compatible", A.content "IE=edge"]
, [A.name "viewport", A.content "width=device-width, initial-scale=1"]
]
jqueryJS :: H.AttributeValue
jqueryJS = "/js/jquery-3.1.0.min.js"
customCSS :: H.AttributeValue
customCSS = "/css/custom.css"
customJS :: H.AttributeValue
customJS = "/js/custom.js"
importCSS = mconcat . fmap BH.stylesheet
importJS = mconcat . fmap BH.javascript
|
theNerd247/ariaRacer
|
arweb/app/HtmlTemplates.hs
|
bsd-3-clause
| 3,229
| 0
| 21
| 802
| 970
| 507
| 463
| 86
| 1
|
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.Compatibility33
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.Compatibility33 (
-- * Types
GLbitfield,
GLboolean,
GLbyte,
GLchar,
GLclampd,
GLclampf,
GLdouble,
GLenum,
GLfloat,
GLhalf,
GLint,
GLint64,
GLintptr,
GLshort,
GLsizei,
GLsizeiptr,
GLsync,
GLubyte,
GLuint,
GLuint64,
GLushort,
GLvoid,
-- * Enums
pattern GL_2D,
pattern GL_2_BYTES,
pattern GL_3D,
pattern GL_3D_COLOR,
pattern GL_3D_COLOR_TEXTURE,
pattern GL_3_BYTES,
pattern GL_4D_COLOR_TEXTURE,
pattern GL_4_BYTES,
pattern GL_ACCUM,
pattern GL_ACCUM_ALPHA_BITS,
pattern GL_ACCUM_BLUE_BITS,
pattern GL_ACCUM_BUFFER_BIT,
pattern GL_ACCUM_CLEAR_VALUE,
pattern GL_ACCUM_GREEN_BITS,
pattern GL_ACCUM_RED_BITS,
pattern GL_ACTIVE_ATTRIBUTES,
pattern GL_ACTIVE_ATTRIBUTE_MAX_LENGTH,
pattern GL_ACTIVE_TEXTURE,
pattern GL_ACTIVE_UNIFORMS,
pattern GL_ACTIVE_UNIFORM_BLOCKS,
pattern GL_ACTIVE_UNIFORM_BLOCK_MAX_NAME_LENGTH,
pattern GL_ACTIVE_UNIFORM_MAX_LENGTH,
pattern GL_ADD,
pattern GL_ADD_SIGNED,
pattern GL_ALIASED_LINE_WIDTH_RANGE,
pattern GL_ALIASED_POINT_SIZE_RANGE,
pattern GL_ALL_ATTRIB_BITS,
pattern GL_ALPHA,
pattern GL_ALPHA12,
pattern GL_ALPHA16,
pattern GL_ALPHA4,
pattern GL_ALPHA8,
pattern GL_ALPHA_BIAS,
pattern GL_ALPHA_BITS,
pattern GL_ALPHA_INTEGER,
pattern GL_ALPHA_SCALE,
pattern GL_ALPHA_TEST,
pattern GL_ALPHA_TEST_FUNC,
pattern GL_ALPHA_TEST_REF,
pattern GL_ALREADY_SIGNALED,
pattern GL_ALWAYS,
pattern GL_AMBIENT,
pattern GL_AMBIENT_AND_DIFFUSE,
pattern GL_AND,
pattern GL_AND_INVERTED,
pattern GL_AND_REVERSE,
pattern GL_ANY_SAMPLES_PASSED,
pattern GL_ARRAY_BUFFER,
pattern GL_ARRAY_BUFFER_BINDING,
pattern GL_ATTACHED_SHADERS,
pattern GL_ATTRIB_STACK_DEPTH,
pattern GL_AUTO_NORMAL,
pattern GL_AUX0,
pattern GL_AUX1,
pattern GL_AUX2,
pattern GL_AUX3,
pattern GL_AUX_BUFFERS,
pattern GL_BACK,
pattern GL_BACK_LEFT,
pattern GL_BACK_RIGHT,
pattern GL_BGR,
pattern GL_BGRA,
pattern GL_BGRA_INTEGER,
pattern GL_BGR_INTEGER,
pattern GL_BITMAP,
pattern GL_BITMAP_TOKEN,
pattern GL_BLEND,
pattern GL_BLEND_COLOR,
pattern GL_BLEND_DST,
pattern GL_BLEND_DST_ALPHA,
pattern GL_BLEND_DST_RGB,
pattern GL_BLEND_EQUATION,
pattern GL_BLEND_EQUATION_ALPHA,
pattern GL_BLEND_EQUATION_RGB,
pattern GL_BLEND_SRC,
pattern GL_BLEND_SRC_ALPHA,
pattern GL_BLEND_SRC_RGB,
pattern GL_BLUE,
pattern GL_BLUE_BIAS,
pattern GL_BLUE_BITS,
pattern GL_BLUE_INTEGER,
pattern GL_BLUE_SCALE,
pattern GL_BOOL,
pattern GL_BOOL_VEC2,
pattern GL_BOOL_VEC3,
pattern GL_BOOL_VEC4,
pattern GL_BUFFER_ACCESS,
pattern GL_BUFFER_ACCESS_FLAGS,
pattern GL_BUFFER_MAPPED,
pattern GL_BUFFER_MAP_LENGTH,
pattern GL_BUFFER_MAP_OFFSET,
pattern GL_BUFFER_MAP_POINTER,
pattern GL_BUFFER_SIZE,
pattern GL_BUFFER_USAGE,
pattern GL_BYTE,
pattern GL_C3F_V3F,
pattern GL_C4F_N3F_V3F,
pattern GL_C4UB_V2F,
pattern GL_C4UB_V3F,
pattern GL_CCW,
pattern GL_CLAMP,
pattern GL_CLAMP_FRAGMENT_COLOR,
pattern GL_CLAMP_READ_COLOR,
pattern GL_CLAMP_TO_BORDER,
pattern GL_CLAMP_TO_EDGE,
pattern GL_CLAMP_VERTEX_COLOR,
pattern GL_CLEAR,
pattern GL_CLIENT_ACTIVE_TEXTURE,
pattern GL_CLIENT_ALL_ATTRIB_BITS,
pattern GL_CLIENT_ATTRIB_STACK_DEPTH,
pattern GL_CLIENT_PIXEL_STORE_BIT,
pattern GL_CLIENT_VERTEX_ARRAY_BIT,
pattern GL_CLIP_DISTANCE0,
pattern GL_CLIP_DISTANCE1,
pattern GL_CLIP_DISTANCE2,
pattern GL_CLIP_DISTANCE3,
pattern GL_CLIP_DISTANCE4,
pattern GL_CLIP_DISTANCE5,
pattern GL_CLIP_DISTANCE6,
pattern GL_CLIP_DISTANCE7,
pattern GL_CLIP_PLANE0,
pattern GL_CLIP_PLANE1,
pattern GL_CLIP_PLANE2,
pattern GL_CLIP_PLANE3,
pattern GL_CLIP_PLANE4,
pattern GL_CLIP_PLANE5,
pattern GL_COEFF,
pattern GL_COLOR,
pattern GL_COLOR_ARRAY,
pattern GL_COLOR_ARRAY_BUFFER_BINDING,
pattern GL_COLOR_ARRAY_POINTER,
pattern GL_COLOR_ARRAY_SIZE,
pattern GL_COLOR_ARRAY_STRIDE,
pattern GL_COLOR_ARRAY_TYPE,
pattern GL_COLOR_ATTACHMENT0,
pattern GL_COLOR_ATTACHMENT1,
pattern GL_COLOR_ATTACHMENT10,
pattern GL_COLOR_ATTACHMENT11,
pattern GL_COLOR_ATTACHMENT12,
pattern GL_COLOR_ATTACHMENT13,
pattern GL_COLOR_ATTACHMENT14,
pattern GL_COLOR_ATTACHMENT15,
pattern GL_COLOR_ATTACHMENT16,
pattern GL_COLOR_ATTACHMENT17,
pattern GL_COLOR_ATTACHMENT18,
pattern GL_COLOR_ATTACHMENT19,
pattern GL_COLOR_ATTACHMENT2,
pattern GL_COLOR_ATTACHMENT20,
pattern GL_COLOR_ATTACHMENT21,
pattern GL_COLOR_ATTACHMENT22,
pattern GL_COLOR_ATTACHMENT23,
pattern GL_COLOR_ATTACHMENT24,
pattern GL_COLOR_ATTACHMENT25,
pattern GL_COLOR_ATTACHMENT26,
pattern GL_COLOR_ATTACHMENT27,
pattern GL_COLOR_ATTACHMENT28,
pattern GL_COLOR_ATTACHMENT29,
pattern GL_COLOR_ATTACHMENT3,
pattern GL_COLOR_ATTACHMENT30,
pattern GL_COLOR_ATTACHMENT31,
pattern GL_COLOR_ATTACHMENT4,
pattern GL_COLOR_ATTACHMENT5,
pattern GL_COLOR_ATTACHMENT6,
pattern GL_COLOR_ATTACHMENT7,
pattern GL_COLOR_ATTACHMENT8,
pattern GL_COLOR_ATTACHMENT9,
pattern GL_COLOR_BUFFER_BIT,
pattern GL_COLOR_CLEAR_VALUE,
pattern GL_COLOR_INDEX,
pattern GL_COLOR_INDEXES,
pattern GL_COLOR_LOGIC_OP,
pattern GL_COLOR_MATERIAL,
pattern GL_COLOR_MATERIAL_FACE,
pattern GL_COLOR_MATERIAL_PARAMETER,
pattern GL_COLOR_SUM,
pattern GL_COLOR_WRITEMASK,
pattern GL_COMBINE,
pattern GL_COMBINE_ALPHA,
pattern GL_COMBINE_RGB,
pattern GL_COMPARE_REF_TO_TEXTURE,
pattern GL_COMPARE_R_TO_TEXTURE,
pattern GL_COMPILE,
pattern GL_COMPILE_AND_EXECUTE,
pattern GL_COMPILE_STATUS,
pattern GL_COMPRESSED_ALPHA,
pattern GL_COMPRESSED_INTENSITY,
pattern GL_COMPRESSED_LUMINANCE,
pattern GL_COMPRESSED_LUMINANCE_ALPHA,
pattern GL_COMPRESSED_RED,
pattern GL_COMPRESSED_RED_RGTC1,
pattern GL_COMPRESSED_RG,
pattern GL_COMPRESSED_RGB,
pattern GL_COMPRESSED_RGBA,
pattern GL_COMPRESSED_RG_RGTC2,
pattern GL_COMPRESSED_SIGNED_RED_RGTC1,
pattern GL_COMPRESSED_SIGNED_RG_RGTC2,
pattern GL_COMPRESSED_SLUMINANCE,
pattern GL_COMPRESSED_SLUMINANCE_ALPHA,
pattern GL_COMPRESSED_SRGB,
pattern GL_COMPRESSED_SRGB_ALPHA,
pattern GL_COMPRESSED_TEXTURE_FORMATS,
pattern GL_CONDITION_SATISFIED,
pattern GL_CONSTANT,
pattern GL_CONSTANT_ALPHA,
pattern GL_CONSTANT_ATTENUATION,
pattern GL_CONSTANT_COLOR,
pattern GL_CONTEXT_COMPATIBILITY_PROFILE_BIT,
pattern GL_CONTEXT_CORE_PROFILE_BIT,
pattern GL_CONTEXT_FLAGS,
pattern GL_CONTEXT_FLAG_FORWARD_COMPATIBLE_BIT,
pattern GL_CONTEXT_PROFILE_MASK,
pattern GL_COORD_REPLACE,
pattern GL_COPY,
pattern GL_COPY_INVERTED,
pattern GL_COPY_PIXEL_TOKEN,
pattern GL_COPY_READ_BUFFER,
pattern GL_COPY_WRITE_BUFFER,
pattern GL_CULL_FACE,
pattern GL_CULL_FACE_MODE,
pattern GL_CURRENT_BIT,
pattern GL_CURRENT_COLOR,
pattern GL_CURRENT_FOG_COORD,
pattern GL_CURRENT_FOG_COORDINATE,
pattern GL_CURRENT_INDEX,
pattern GL_CURRENT_NORMAL,
pattern GL_CURRENT_PROGRAM,
pattern GL_CURRENT_QUERY,
pattern GL_CURRENT_RASTER_COLOR,
pattern GL_CURRENT_RASTER_DISTANCE,
pattern GL_CURRENT_RASTER_INDEX,
pattern GL_CURRENT_RASTER_POSITION,
pattern GL_CURRENT_RASTER_POSITION_VALID,
pattern GL_CURRENT_RASTER_SECONDARY_COLOR,
pattern GL_CURRENT_RASTER_TEXTURE_COORDS,
pattern GL_CURRENT_SECONDARY_COLOR,
pattern GL_CURRENT_TEXTURE_COORDS,
pattern GL_CURRENT_VERTEX_ATTRIB,
pattern GL_CW,
pattern GL_DECAL,
pattern GL_DECR,
pattern GL_DECR_WRAP,
pattern GL_DELETE_STATUS,
pattern GL_DEPTH,
pattern GL_DEPTH24_STENCIL8,
pattern GL_DEPTH32F_STENCIL8,
pattern GL_DEPTH_ATTACHMENT,
pattern GL_DEPTH_BIAS,
pattern GL_DEPTH_BITS,
pattern GL_DEPTH_BUFFER_BIT,
pattern GL_DEPTH_CLAMP,
pattern GL_DEPTH_CLEAR_VALUE,
pattern GL_DEPTH_COMPONENT,
pattern GL_DEPTH_COMPONENT16,
pattern GL_DEPTH_COMPONENT24,
pattern GL_DEPTH_COMPONENT32,
pattern GL_DEPTH_COMPONENT32F,
pattern GL_DEPTH_FUNC,
pattern GL_DEPTH_RANGE,
pattern GL_DEPTH_SCALE,
pattern GL_DEPTH_STENCIL,
pattern GL_DEPTH_STENCIL_ATTACHMENT,
pattern GL_DEPTH_TEST,
pattern GL_DEPTH_TEXTURE_MODE,
pattern GL_DEPTH_WRITEMASK,
pattern GL_DIFFUSE,
pattern GL_DITHER,
pattern GL_DOMAIN,
pattern GL_DONT_CARE,
pattern GL_DOT3_RGB,
pattern GL_DOT3_RGBA,
pattern GL_DOUBLE,
pattern GL_DOUBLEBUFFER,
pattern GL_DRAW_BUFFER,
pattern GL_DRAW_BUFFER0,
pattern GL_DRAW_BUFFER1,
pattern GL_DRAW_BUFFER10,
pattern GL_DRAW_BUFFER11,
pattern GL_DRAW_BUFFER12,
pattern GL_DRAW_BUFFER13,
pattern GL_DRAW_BUFFER14,
pattern GL_DRAW_BUFFER15,
pattern GL_DRAW_BUFFER2,
pattern GL_DRAW_BUFFER3,
pattern GL_DRAW_BUFFER4,
pattern GL_DRAW_BUFFER5,
pattern GL_DRAW_BUFFER6,
pattern GL_DRAW_BUFFER7,
pattern GL_DRAW_BUFFER8,
pattern GL_DRAW_BUFFER9,
pattern GL_DRAW_FRAMEBUFFER,
pattern GL_DRAW_FRAMEBUFFER_BINDING,
pattern GL_DRAW_PIXEL_TOKEN,
pattern GL_DST_ALPHA,
pattern GL_DST_COLOR,
pattern GL_DYNAMIC_COPY,
pattern GL_DYNAMIC_DRAW,
pattern GL_DYNAMIC_READ,
pattern GL_EDGE_FLAG,
pattern GL_EDGE_FLAG_ARRAY,
pattern GL_EDGE_FLAG_ARRAY_BUFFER_BINDING,
pattern GL_EDGE_FLAG_ARRAY_POINTER,
pattern GL_EDGE_FLAG_ARRAY_STRIDE,
pattern GL_ELEMENT_ARRAY_BUFFER,
pattern GL_ELEMENT_ARRAY_BUFFER_BINDING,
pattern GL_EMISSION,
pattern GL_ENABLE_BIT,
pattern GL_EQUAL,
pattern GL_EQUIV,
pattern GL_EVAL_BIT,
pattern GL_EXP,
pattern GL_EXP2,
pattern GL_EXTENSIONS,
pattern GL_EYE_LINEAR,
pattern GL_EYE_PLANE,
pattern GL_FALSE,
pattern GL_FASTEST,
pattern GL_FEEDBACK,
pattern GL_FEEDBACK_BUFFER_POINTER,
pattern GL_FEEDBACK_BUFFER_SIZE,
pattern GL_FEEDBACK_BUFFER_TYPE,
pattern GL_FILL,
pattern GL_FIRST_VERTEX_CONVENTION,
pattern GL_FIXED_ONLY,
pattern GL_FLAT,
pattern GL_FLOAT,
pattern GL_FLOAT_32_UNSIGNED_INT_24_8_REV,
pattern GL_FLOAT_MAT2,
pattern GL_FLOAT_MAT2x3,
pattern GL_FLOAT_MAT2x4,
pattern GL_FLOAT_MAT3,
pattern GL_FLOAT_MAT3x2,
pattern GL_FLOAT_MAT3x4,
pattern GL_FLOAT_MAT4,
pattern GL_FLOAT_MAT4x2,
pattern GL_FLOAT_MAT4x3,
pattern GL_FLOAT_VEC2,
pattern GL_FLOAT_VEC3,
pattern GL_FLOAT_VEC4,
pattern GL_FOG,
pattern GL_FOG_BIT,
pattern GL_FOG_COLOR,
pattern GL_FOG_COORD,
pattern GL_FOG_COORDINATE,
pattern GL_FOG_COORDINATE_ARRAY,
pattern GL_FOG_COORDINATE_ARRAY_BUFFER_BINDING,
pattern GL_FOG_COORDINATE_ARRAY_POINTER,
pattern GL_FOG_COORDINATE_ARRAY_STRIDE,
pattern GL_FOG_COORDINATE_ARRAY_TYPE,
pattern GL_FOG_COORDINATE_SOURCE,
pattern GL_FOG_COORD_ARRAY,
pattern GL_FOG_COORD_ARRAY_BUFFER_BINDING,
pattern GL_FOG_COORD_ARRAY_POINTER,
pattern GL_FOG_COORD_ARRAY_STRIDE,
pattern GL_FOG_COORD_ARRAY_TYPE,
pattern GL_FOG_COORD_SRC,
pattern GL_FOG_DENSITY,
pattern GL_FOG_END,
pattern GL_FOG_HINT,
pattern GL_FOG_INDEX,
pattern GL_FOG_MODE,
pattern GL_FOG_START,
pattern GL_FRAGMENT_DEPTH,
pattern GL_FRAGMENT_SHADER,
pattern GL_FRAGMENT_SHADER_DERIVATIVE_HINT,
pattern GL_FRAMEBUFFER,
pattern GL_FRAMEBUFFER_ATTACHMENT_ALPHA_SIZE,
pattern GL_FRAMEBUFFER_ATTACHMENT_BLUE_SIZE,
pattern GL_FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING,
pattern GL_FRAMEBUFFER_ATTACHMENT_COMPONENT_TYPE,
pattern GL_FRAMEBUFFER_ATTACHMENT_DEPTH_SIZE,
pattern GL_FRAMEBUFFER_ATTACHMENT_GREEN_SIZE,
pattern GL_FRAMEBUFFER_ATTACHMENT_LAYERED,
pattern GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME,
pattern GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE,
pattern GL_FRAMEBUFFER_ATTACHMENT_RED_SIZE,
pattern GL_FRAMEBUFFER_ATTACHMENT_STENCIL_SIZE,
pattern GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE,
pattern GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER,
pattern GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL,
pattern GL_FRAMEBUFFER_BINDING,
pattern GL_FRAMEBUFFER_COMPLETE,
pattern GL_FRAMEBUFFER_DEFAULT,
pattern GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT,
pattern GL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER,
pattern GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS,
pattern GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT,
pattern GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE,
pattern GL_FRAMEBUFFER_INCOMPLETE_READ_BUFFER,
pattern GL_FRAMEBUFFER_SRGB,
pattern GL_FRAMEBUFFER_UNDEFINED,
pattern GL_FRAMEBUFFER_UNSUPPORTED,
pattern GL_FRONT,
pattern GL_FRONT_AND_BACK,
pattern GL_FRONT_FACE,
pattern GL_FRONT_LEFT,
pattern GL_FRONT_RIGHT,
pattern GL_FUNC_ADD,
pattern GL_FUNC_REVERSE_SUBTRACT,
pattern GL_FUNC_SUBTRACT,
pattern GL_GENERATE_MIPMAP,
pattern GL_GENERATE_MIPMAP_HINT,
pattern GL_GEOMETRY_INPUT_TYPE,
pattern GL_GEOMETRY_OUTPUT_TYPE,
pattern GL_GEOMETRY_SHADER,
pattern GL_GEOMETRY_VERTICES_OUT,
pattern GL_GEQUAL,
pattern GL_GREATER,
pattern GL_GREEN,
pattern GL_GREEN_BIAS,
pattern GL_GREEN_BITS,
pattern GL_GREEN_INTEGER,
pattern GL_GREEN_SCALE,
pattern GL_HALF_FLOAT,
pattern GL_HINT_BIT,
pattern GL_INCR,
pattern GL_INCR_WRAP,
pattern GL_INDEX,
pattern GL_INDEX_ARRAY,
pattern GL_INDEX_ARRAY_BUFFER_BINDING,
pattern GL_INDEX_ARRAY_POINTER,
pattern GL_INDEX_ARRAY_STRIDE,
pattern GL_INDEX_ARRAY_TYPE,
pattern GL_INDEX_BITS,
pattern GL_INDEX_CLEAR_VALUE,
pattern GL_INDEX_LOGIC_OP,
pattern GL_INDEX_MODE,
pattern GL_INDEX_OFFSET,
pattern GL_INDEX_SHIFT,
pattern GL_INDEX_WRITEMASK,
pattern GL_INFO_LOG_LENGTH,
pattern GL_INT,
pattern GL_INTENSITY,
pattern GL_INTENSITY12,
pattern GL_INTENSITY16,
pattern GL_INTENSITY4,
pattern GL_INTENSITY8,
pattern GL_INTERLEAVED_ATTRIBS,
pattern GL_INTERPOLATE,
pattern GL_INT_2_10_10_10_REV,
pattern GL_INT_SAMPLER_1D,
pattern GL_INT_SAMPLER_1D_ARRAY,
pattern GL_INT_SAMPLER_2D,
pattern GL_INT_SAMPLER_2D_ARRAY,
pattern GL_INT_SAMPLER_2D_MULTISAMPLE,
pattern GL_INT_SAMPLER_2D_MULTISAMPLE_ARRAY,
pattern GL_INT_SAMPLER_2D_RECT,
pattern GL_INT_SAMPLER_3D,
pattern GL_INT_SAMPLER_BUFFER,
pattern GL_INT_SAMPLER_CUBE,
pattern GL_INT_VEC2,
pattern GL_INT_VEC3,
pattern GL_INT_VEC4,
pattern GL_INVALID_ENUM,
pattern GL_INVALID_FRAMEBUFFER_OPERATION,
pattern GL_INVALID_INDEX,
pattern GL_INVALID_OPERATION,
pattern GL_INVALID_VALUE,
pattern GL_INVERT,
pattern GL_KEEP,
pattern GL_LAST_VERTEX_CONVENTION,
pattern GL_LEFT,
pattern GL_LEQUAL,
pattern GL_LESS,
pattern GL_LIGHT0,
pattern GL_LIGHT1,
pattern GL_LIGHT2,
pattern GL_LIGHT3,
pattern GL_LIGHT4,
pattern GL_LIGHT5,
pattern GL_LIGHT6,
pattern GL_LIGHT7,
pattern GL_LIGHTING,
pattern GL_LIGHTING_BIT,
pattern GL_LIGHT_MODEL_AMBIENT,
pattern GL_LIGHT_MODEL_COLOR_CONTROL,
pattern GL_LIGHT_MODEL_LOCAL_VIEWER,
pattern GL_LIGHT_MODEL_TWO_SIDE,
pattern GL_LINE,
pattern GL_LINEAR,
pattern GL_LINEAR_ATTENUATION,
pattern GL_LINEAR_MIPMAP_LINEAR,
pattern GL_LINEAR_MIPMAP_NEAREST,
pattern GL_LINES,
pattern GL_LINES_ADJACENCY,
pattern GL_LINE_BIT,
pattern GL_LINE_LOOP,
pattern GL_LINE_RESET_TOKEN,
pattern GL_LINE_SMOOTH,
pattern GL_LINE_SMOOTH_HINT,
pattern GL_LINE_STIPPLE,
pattern GL_LINE_STIPPLE_PATTERN,
pattern GL_LINE_STIPPLE_REPEAT,
pattern GL_LINE_STRIP,
pattern GL_LINE_STRIP_ADJACENCY,
pattern GL_LINE_TOKEN,
pattern GL_LINE_WIDTH,
pattern GL_LINE_WIDTH_GRANULARITY,
pattern GL_LINE_WIDTH_RANGE,
pattern GL_LINK_STATUS,
pattern GL_LIST_BASE,
pattern GL_LIST_BIT,
pattern GL_LIST_INDEX,
pattern GL_LIST_MODE,
pattern GL_LOAD,
pattern GL_LOGIC_OP,
pattern GL_LOGIC_OP_MODE,
pattern GL_LOWER_LEFT,
pattern GL_LUMINANCE,
pattern GL_LUMINANCE12,
pattern GL_LUMINANCE12_ALPHA12,
pattern GL_LUMINANCE12_ALPHA4,
pattern GL_LUMINANCE16,
pattern GL_LUMINANCE16_ALPHA16,
pattern GL_LUMINANCE4,
pattern GL_LUMINANCE4_ALPHA4,
pattern GL_LUMINANCE6_ALPHA2,
pattern GL_LUMINANCE8,
pattern GL_LUMINANCE8_ALPHA8,
pattern GL_LUMINANCE_ALPHA,
pattern GL_MAJOR_VERSION,
pattern GL_MAP1_COLOR_4,
pattern GL_MAP1_GRID_DOMAIN,
pattern GL_MAP1_GRID_SEGMENTS,
pattern GL_MAP1_INDEX,
pattern GL_MAP1_NORMAL,
pattern GL_MAP1_TEXTURE_COORD_1,
pattern GL_MAP1_TEXTURE_COORD_2,
pattern GL_MAP1_TEXTURE_COORD_3,
pattern GL_MAP1_TEXTURE_COORD_4,
pattern GL_MAP1_VERTEX_3,
pattern GL_MAP1_VERTEX_4,
pattern GL_MAP2_COLOR_4,
pattern GL_MAP2_GRID_DOMAIN,
pattern GL_MAP2_GRID_SEGMENTS,
pattern GL_MAP2_INDEX,
pattern GL_MAP2_NORMAL,
pattern GL_MAP2_TEXTURE_COORD_1,
pattern GL_MAP2_TEXTURE_COORD_2,
pattern GL_MAP2_TEXTURE_COORD_3,
pattern GL_MAP2_TEXTURE_COORD_4,
pattern GL_MAP2_VERTEX_3,
pattern GL_MAP2_VERTEX_4,
pattern GL_MAP_COLOR,
pattern GL_MAP_FLUSH_EXPLICIT_BIT,
pattern GL_MAP_INVALIDATE_BUFFER_BIT,
pattern GL_MAP_INVALIDATE_RANGE_BIT,
pattern GL_MAP_READ_BIT,
pattern GL_MAP_STENCIL,
pattern GL_MAP_UNSYNCHRONIZED_BIT,
pattern GL_MAP_WRITE_BIT,
pattern GL_MATRIX_MODE,
pattern GL_MAX,
pattern GL_MAX_3D_TEXTURE_SIZE,
pattern GL_MAX_ARRAY_TEXTURE_LAYERS,
pattern GL_MAX_ATTRIB_STACK_DEPTH,
pattern GL_MAX_CLIENT_ATTRIB_STACK_DEPTH,
pattern GL_MAX_CLIP_DISTANCES,
pattern GL_MAX_CLIP_PLANES,
pattern GL_MAX_COLOR_ATTACHMENTS,
pattern GL_MAX_COLOR_TEXTURE_SAMPLES,
pattern GL_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS,
pattern GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS,
pattern GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS,
pattern GL_MAX_COMBINED_UNIFORM_BLOCKS,
pattern GL_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS,
pattern GL_MAX_CUBE_MAP_TEXTURE_SIZE,
pattern GL_MAX_DEPTH_TEXTURE_SAMPLES,
pattern GL_MAX_DRAW_BUFFERS,
pattern GL_MAX_DUAL_SOURCE_DRAW_BUFFERS,
pattern GL_MAX_ELEMENTS_INDICES,
pattern GL_MAX_ELEMENTS_VERTICES,
pattern GL_MAX_EVAL_ORDER,
pattern GL_MAX_FRAGMENT_INPUT_COMPONENTS,
pattern GL_MAX_FRAGMENT_UNIFORM_BLOCKS,
pattern GL_MAX_FRAGMENT_UNIFORM_COMPONENTS,
pattern GL_MAX_GEOMETRY_INPUT_COMPONENTS,
pattern GL_MAX_GEOMETRY_OUTPUT_COMPONENTS,
pattern GL_MAX_GEOMETRY_OUTPUT_VERTICES,
pattern GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS,
pattern GL_MAX_GEOMETRY_TOTAL_OUTPUT_COMPONENTS,
pattern GL_MAX_GEOMETRY_UNIFORM_BLOCKS,
pattern GL_MAX_GEOMETRY_UNIFORM_COMPONENTS,
pattern GL_MAX_INTEGER_SAMPLES,
pattern GL_MAX_LIGHTS,
pattern GL_MAX_LIST_NESTING,
pattern GL_MAX_MODELVIEW_STACK_DEPTH,
pattern GL_MAX_NAME_STACK_DEPTH,
pattern GL_MAX_PIXEL_MAP_TABLE,
pattern GL_MAX_PROGRAM_TEXEL_OFFSET,
pattern GL_MAX_PROJECTION_STACK_DEPTH,
pattern GL_MAX_RECTANGLE_TEXTURE_SIZE,
pattern GL_MAX_RENDERBUFFER_SIZE,
pattern GL_MAX_SAMPLES,
pattern GL_MAX_SAMPLE_MASK_WORDS,
pattern GL_MAX_SERVER_WAIT_TIMEOUT,
pattern GL_MAX_TEXTURE_BUFFER_SIZE,
pattern GL_MAX_TEXTURE_COORDS,
pattern GL_MAX_TEXTURE_IMAGE_UNITS,
pattern GL_MAX_TEXTURE_LOD_BIAS,
pattern GL_MAX_TEXTURE_SIZE,
pattern GL_MAX_TEXTURE_STACK_DEPTH,
pattern GL_MAX_TEXTURE_UNITS,
pattern GL_MAX_TRANSFORM_FEEDBACK_INTERLEAVED_COMPONENTS,
pattern GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_ATTRIBS,
pattern GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_COMPONENTS,
pattern GL_MAX_UNIFORM_BLOCK_SIZE,
pattern GL_MAX_UNIFORM_BUFFER_BINDINGS,
pattern GL_MAX_VARYING_COMPONENTS,
pattern GL_MAX_VARYING_FLOATS,
pattern GL_MAX_VERTEX_ATTRIBS,
pattern GL_MAX_VERTEX_OUTPUT_COMPONENTS,
pattern GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS,
pattern GL_MAX_VERTEX_UNIFORM_BLOCKS,
pattern GL_MAX_VERTEX_UNIFORM_COMPONENTS,
pattern GL_MAX_VIEWPORT_DIMS,
pattern GL_MIN,
pattern GL_MINOR_VERSION,
pattern GL_MIN_PROGRAM_TEXEL_OFFSET,
pattern GL_MIRRORED_REPEAT,
pattern GL_MODELVIEW,
pattern GL_MODELVIEW_MATRIX,
pattern GL_MODELVIEW_STACK_DEPTH,
pattern GL_MODULATE,
pattern GL_MULT,
pattern GL_MULTISAMPLE,
pattern GL_MULTISAMPLE_BIT,
pattern GL_N3F_V3F,
pattern GL_NAME_STACK_DEPTH,
pattern GL_NAND,
pattern GL_NEAREST,
pattern GL_NEAREST_MIPMAP_LINEAR,
pattern GL_NEAREST_MIPMAP_NEAREST,
pattern GL_NEVER,
pattern GL_NICEST,
pattern GL_NONE,
pattern GL_NOOP,
pattern GL_NOR,
pattern GL_NORMALIZE,
pattern GL_NORMAL_ARRAY,
pattern GL_NORMAL_ARRAY_BUFFER_BINDING,
pattern GL_NORMAL_ARRAY_POINTER,
pattern GL_NORMAL_ARRAY_STRIDE,
pattern GL_NORMAL_ARRAY_TYPE,
pattern GL_NORMAL_MAP,
pattern GL_NOTEQUAL,
pattern GL_NO_ERROR,
pattern GL_NUM_COMPRESSED_TEXTURE_FORMATS,
pattern GL_NUM_EXTENSIONS,
pattern GL_OBJECT_LINEAR,
pattern GL_OBJECT_PLANE,
pattern GL_OBJECT_TYPE,
pattern GL_ONE,
pattern GL_ONE_MINUS_CONSTANT_ALPHA,
pattern GL_ONE_MINUS_CONSTANT_COLOR,
pattern GL_ONE_MINUS_DST_ALPHA,
pattern GL_ONE_MINUS_DST_COLOR,
pattern GL_ONE_MINUS_SRC1_ALPHA,
pattern GL_ONE_MINUS_SRC1_COLOR,
pattern GL_ONE_MINUS_SRC_ALPHA,
pattern GL_ONE_MINUS_SRC_COLOR,
pattern GL_OPERAND0_ALPHA,
pattern GL_OPERAND0_RGB,
pattern GL_OPERAND1_ALPHA,
pattern GL_OPERAND1_RGB,
pattern GL_OPERAND2_ALPHA,
pattern GL_OPERAND2_RGB,
pattern GL_OR,
pattern GL_ORDER,
pattern GL_OR_INVERTED,
pattern GL_OR_REVERSE,
pattern GL_OUT_OF_MEMORY,
pattern GL_PACK_ALIGNMENT,
pattern GL_PACK_IMAGE_HEIGHT,
pattern GL_PACK_LSB_FIRST,
pattern GL_PACK_ROW_LENGTH,
pattern GL_PACK_SKIP_IMAGES,
pattern GL_PACK_SKIP_PIXELS,
pattern GL_PACK_SKIP_ROWS,
pattern GL_PACK_SWAP_BYTES,
pattern GL_PASS_THROUGH_TOKEN,
pattern GL_PERSPECTIVE_CORRECTION_HINT,
pattern GL_PIXEL_MAP_A_TO_A,
pattern GL_PIXEL_MAP_A_TO_A_SIZE,
pattern GL_PIXEL_MAP_B_TO_B,
pattern GL_PIXEL_MAP_B_TO_B_SIZE,
pattern GL_PIXEL_MAP_G_TO_G,
pattern GL_PIXEL_MAP_G_TO_G_SIZE,
pattern GL_PIXEL_MAP_I_TO_A,
pattern GL_PIXEL_MAP_I_TO_A_SIZE,
pattern GL_PIXEL_MAP_I_TO_B,
pattern GL_PIXEL_MAP_I_TO_B_SIZE,
pattern GL_PIXEL_MAP_I_TO_G,
pattern GL_PIXEL_MAP_I_TO_G_SIZE,
pattern GL_PIXEL_MAP_I_TO_I,
pattern GL_PIXEL_MAP_I_TO_I_SIZE,
pattern GL_PIXEL_MAP_I_TO_R,
pattern GL_PIXEL_MAP_I_TO_R_SIZE,
pattern GL_PIXEL_MAP_R_TO_R,
pattern GL_PIXEL_MAP_R_TO_R_SIZE,
pattern GL_PIXEL_MAP_S_TO_S,
pattern GL_PIXEL_MAP_S_TO_S_SIZE,
pattern GL_PIXEL_MODE_BIT,
pattern GL_PIXEL_PACK_BUFFER,
pattern GL_PIXEL_PACK_BUFFER_BINDING,
pattern GL_PIXEL_UNPACK_BUFFER,
pattern GL_PIXEL_UNPACK_BUFFER_BINDING,
pattern GL_POINT,
pattern GL_POINTS,
pattern GL_POINT_BIT,
pattern GL_POINT_DISTANCE_ATTENUATION,
pattern GL_POINT_FADE_THRESHOLD_SIZE,
pattern GL_POINT_SIZE,
pattern GL_POINT_SIZE_GRANULARITY,
pattern GL_POINT_SIZE_MAX,
pattern GL_POINT_SIZE_MIN,
pattern GL_POINT_SIZE_RANGE,
pattern GL_POINT_SMOOTH,
pattern GL_POINT_SMOOTH_HINT,
pattern GL_POINT_SPRITE,
pattern GL_POINT_SPRITE_COORD_ORIGIN,
pattern GL_POINT_TOKEN,
pattern GL_POLYGON,
pattern GL_POLYGON_BIT,
pattern GL_POLYGON_MODE,
pattern GL_POLYGON_OFFSET_FACTOR,
pattern GL_POLYGON_OFFSET_FILL,
pattern GL_POLYGON_OFFSET_LINE,
pattern GL_POLYGON_OFFSET_POINT,
pattern GL_POLYGON_OFFSET_UNITS,
pattern GL_POLYGON_SMOOTH,
pattern GL_POLYGON_SMOOTH_HINT,
pattern GL_POLYGON_STIPPLE,
pattern GL_POLYGON_STIPPLE_BIT,
pattern GL_POLYGON_TOKEN,
pattern GL_POSITION,
pattern GL_PREVIOUS,
pattern GL_PRIMARY_COLOR,
pattern GL_PRIMITIVES_GENERATED,
pattern GL_PRIMITIVE_RESTART,
pattern GL_PRIMITIVE_RESTART_INDEX,
pattern GL_PROGRAM_POINT_SIZE,
pattern GL_PROJECTION,
pattern GL_PROJECTION_MATRIX,
pattern GL_PROJECTION_STACK_DEPTH,
pattern GL_PROVOKING_VERTEX,
pattern GL_PROXY_TEXTURE_1D,
pattern GL_PROXY_TEXTURE_1D_ARRAY,
pattern GL_PROXY_TEXTURE_2D,
pattern GL_PROXY_TEXTURE_2D_ARRAY,
pattern GL_PROXY_TEXTURE_2D_MULTISAMPLE,
pattern GL_PROXY_TEXTURE_2D_MULTISAMPLE_ARRAY,
pattern GL_PROXY_TEXTURE_3D,
pattern GL_PROXY_TEXTURE_CUBE_MAP,
pattern GL_PROXY_TEXTURE_RECTANGLE,
pattern GL_Q,
pattern GL_QUADRATIC_ATTENUATION,
pattern GL_QUADS,
pattern GL_QUADS_FOLLOW_PROVOKING_VERTEX_CONVENTION,
pattern GL_QUAD_STRIP,
pattern GL_QUERY_BY_REGION_NO_WAIT,
pattern GL_QUERY_BY_REGION_WAIT,
pattern GL_QUERY_COUNTER_BITS,
pattern GL_QUERY_NO_WAIT,
pattern GL_QUERY_RESULT,
pattern GL_QUERY_RESULT_AVAILABLE,
pattern GL_QUERY_WAIT,
pattern GL_R,
pattern GL_R11F_G11F_B10F,
pattern GL_R16,
pattern GL_R16F,
pattern GL_R16I,
pattern GL_R16UI,
pattern GL_R16_SNORM,
pattern GL_R32F,
pattern GL_R32I,
pattern GL_R32UI,
pattern GL_R3_G3_B2,
pattern GL_R8,
pattern GL_R8I,
pattern GL_R8UI,
pattern GL_R8_SNORM,
pattern GL_RASTERIZER_DISCARD,
pattern GL_READ_BUFFER,
pattern GL_READ_FRAMEBUFFER,
pattern GL_READ_FRAMEBUFFER_BINDING,
pattern GL_READ_ONLY,
pattern GL_READ_WRITE,
pattern GL_RED,
pattern GL_RED_BIAS,
pattern GL_RED_BITS,
pattern GL_RED_INTEGER,
pattern GL_RED_SCALE,
pattern GL_REFLECTION_MAP,
pattern GL_RENDER,
pattern GL_RENDERBUFFER,
pattern GL_RENDERBUFFER_ALPHA_SIZE,
pattern GL_RENDERBUFFER_BINDING,
pattern GL_RENDERBUFFER_BLUE_SIZE,
pattern GL_RENDERBUFFER_DEPTH_SIZE,
pattern GL_RENDERBUFFER_GREEN_SIZE,
pattern GL_RENDERBUFFER_HEIGHT,
pattern GL_RENDERBUFFER_INTERNAL_FORMAT,
pattern GL_RENDERBUFFER_RED_SIZE,
pattern GL_RENDERBUFFER_SAMPLES,
pattern GL_RENDERBUFFER_STENCIL_SIZE,
pattern GL_RENDERBUFFER_WIDTH,
pattern GL_RENDERER,
pattern GL_RENDER_MODE,
pattern GL_REPEAT,
pattern GL_REPLACE,
pattern GL_RESCALE_NORMAL,
pattern GL_RETURN,
pattern GL_RG,
pattern GL_RG16,
pattern GL_RG16F,
pattern GL_RG16I,
pattern GL_RG16UI,
pattern GL_RG16_SNORM,
pattern GL_RG32F,
pattern GL_RG32I,
pattern GL_RG32UI,
pattern GL_RG8,
pattern GL_RG8I,
pattern GL_RG8UI,
pattern GL_RG8_SNORM,
pattern GL_RGB,
pattern GL_RGB10,
pattern GL_RGB10_A2,
pattern GL_RGB10_A2UI,
pattern GL_RGB12,
pattern GL_RGB16,
pattern GL_RGB16F,
pattern GL_RGB16I,
pattern GL_RGB16UI,
pattern GL_RGB16_SNORM,
pattern GL_RGB32F,
pattern GL_RGB32I,
pattern GL_RGB32UI,
pattern GL_RGB4,
pattern GL_RGB5,
pattern GL_RGB5_A1,
pattern GL_RGB8,
pattern GL_RGB8I,
pattern GL_RGB8UI,
pattern GL_RGB8_SNORM,
pattern GL_RGB9_E5,
pattern GL_RGBA,
pattern GL_RGBA12,
pattern GL_RGBA16,
pattern GL_RGBA16F,
pattern GL_RGBA16I,
pattern GL_RGBA16UI,
pattern GL_RGBA16_SNORM,
pattern GL_RGBA2,
pattern GL_RGBA32F,
pattern GL_RGBA32I,
pattern GL_RGBA32UI,
pattern GL_RGBA4,
pattern GL_RGBA8,
pattern GL_RGBA8I,
pattern GL_RGBA8UI,
pattern GL_RGBA8_SNORM,
pattern GL_RGBA_INTEGER,
pattern GL_RGBA_MODE,
pattern GL_RGB_INTEGER,
pattern GL_RGB_SCALE,
pattern GL_RG_INTEGER,
pattern GL_RIGHT,
pattern GL_S,
pattern GL_SAMPLER_1D,
pattern GL_SAMPLER_1D_ARRAY,
pattern GL_SAMPLER_1D_ARRAY_SHADOW,
pattern GL_SAMPLER_1D_SHADOW,
pattern GL_SAMPLER_2D,
pattern GL_SAMPLER_2D_ARRAY,
pattern GL_SAMPLER_2D_ARRAY_SHADOW,
pattern GL_SAMPLER_2D_MULTISAMPLE,
pattern GL_SAMPLER_2D_MULTISAMPLE_ARRAY,
pattern GL_SAMPLER_2D_RECT,
pattern GL_SAMPLER_2D_RECT_SHADOW,
pattern GL_SAMPLER_2D_SHADOW,
pattern GL_SAMPLER_3D,
pattern GL_SAMPLER_BINDING,
pattern GL_SAMPLER_BUFFER,
pattern GL_SAMPLER_CUBE,
pattern GL_SAMPLER_CUBE_SHADOW,
pattern GL_SAMPLES,
pattern GL_SAMPLES_PASSED,
pattern GL_SAMPLE_ALPHA_TO_COVERAGE,
pattern GL_SAMPLE_ALPHA_TO_ONE,
pattern GL_SAMPLE_BUFFERS,
pattern GL_SAMPLE_COVERAGE,
pattern GL_SAMPLE_COVERAGE_INVERT,
pattern GL_SAMPLE_COVERAGE_VALUE,
pattern GL_SAMPLE_MASK,
pattern GL_SAMPLE_MASK_VALUE,
pattern GL_SAMPLE_POSITION,
pattern GL_SCISSOR_BIT,
pattern GL_SCISSOR_BOX,
pattern GL_SCISSOR_TEST,
pattern GL_SECONDARY_COLOR_ARRAY,
pattern GL_SECONDARY_COLOR_ARRAY_BUFFER_BINDING,
pattern GL_SECONDARY_COLOR_ARRAY_POINTER,
pattern GL_SECONDARY_COLOR_ARRAY_SIZE,
pattern GL_SECONDARY_COLOR_ARRAY_STRIDE,
pattern GL_SECONDARY_COLOR_ARRAY_TYPE,
pattern GL_SELECT,
pattern GL_SELECTION_BUFFER_POINTER,
pattern GL_SELECTION_BUFFER_SIZE,
pattern GL_SEPARATE_ATTRIBS,
pattern GL_SEPARATE_SPECULAR_COLOR,
pattern GL_SET,
pattern GL_SHADER_SOURCE_LENGTH,
pattern GL_SHADER_TYPE,
pattern GL_SHADE_MODEL,
pattern GL_SHADING_LANGUAGE_VERSION,
pattern GL_SHININESS,
pattern GL_SHORT,
pattern GL_SIGNALED,
pattern GL_SIGNED_NORMALIZED,
pattern GL_SINGLE_COLOR,
pattern GL_SLUMINANCE,
pattern GL_SLUMINANCE8,
pattern GL_SLUMINANCE8_ALPHA8,
pattern GL_SLUMINANCE_ALPHA,
pattern GL_SMOOTH,
pattern GL_SMOOTH_LINE_WIDTH_GRANULARITY,
pattern GL_SMOOTH_LINE_WIDTH_RANGE,
pattern GL_SMOOTH_POINT_SIZE_GRANULARITY,
pattern GL_SMOOTH_POINT_SIZE_RANGE,
pattern GL_SOURCE0_ALPHA,
pattern GL_SOURCE0_RGB,
pattern GL_SOURCE1_ALPHA,
pattern GL_SOURCE1_RGB,
pattern GL_SOURCE2_ALPHA,
pattern GL_SOURCE2_RGB,
pattern GL_SPECULAR,
pattern GL_SPHERE_MAP,
pattern GL_SPOT_CUTOFF,
pattern GL_SPOT_DIRECTION,
pattern GL_SPOT_EXPONENT,
pattern GL_SRC0_ALPHA,
pattern GL_SRC0_RGB,
pattern GL_SRC1_ALPHA,
pattern GL_SRC1_COLOR,
pattern GL_SRC1_RGB,
pattern GL_SRC2_ALPHA,
pattern GL_SRC2_RGB,
pattern GL_SRC_ALPHA,
pattern GL_SRC_ALPHA_SATURATE,
pattern GL_SRC_COLOR,
pattern GL_SRGB,
pattern GL_SRGB8,
pattern GL_SRGB8_ALPHA8,
pattern GL_SRGB_ALPHA,
pattern GL_STACK_OVERFLOW,
pattern GL_STACK_UNDERFLOW,
pattern GL_STATIC_COPY,
pattern GL_STATIC_DRAW,
pattern GL_STATIC_READ,
pattern GL_STENCIL,
pattern GL_STENCIL_ATTACHMENT,
pattern GL_STENCIL_BACK_FAIL,
pattern GL_STENCIL_BACK_FUNC,
pattern GL_STENCIL_BACK_PASS_DEPTH_FAIL,
pattern GL_STENCIL_BACK_PASS_DEPTH_PASS,
pattern GL_STENCIL_BACK_REF,
pattern GL_STENCIL_BACK_VALUE_MASK,
pattern GL_STENCIL_BACK_WRITEMASK,
pattern GL_STENCIL_BITS,
pattern GL_STENCIL_BUFFER_BIT,
pattern GL_STENCIL_CLEAR_VALUE,
pattern GL_STENCIL_FAIL,
pattern GL_STENCIL_FUNC,
pattern GL_STENCIL_INDEX,
pattern GL_STENCIL_INDEX1,
pattern GL_STENCIL_INDEX16,
pattern GL_STENCIL_INDEX4,
pattern GL_STENCIL_INDEX8,
pattern GL_STENCIL_PASS_DEPTH_FAIL,
pattern GL_STENCIL_PASS_DEPTH_PASS,
pattern GL_STENCIL_REF,
pattern GL_STENCIL_TEST,
pattern GL_STENCIL_VALUE_MASK,
pattern GL_STENCIL_WRITEMASK,
pattern GL_STEREO,
pattern GL_STREAM_COPY,
pattern GL_STREAM_DRAW,
pattern GL_STREAM_READ,
pattern GL_SUBPIXEL_BITS,
pattern GL_SUBTRACT,
pattern GL_SYNC_CONDITION,
pattern GL_SYNC_FENCE,
pattern GL_SYNC_FLAGS,
pattern GL_SYNC_FLUSH_COMMANDS_BIT,
pattern GL_SYNC_GPU_COMMANDS_COMPLETE,
pattern GL_SYNC_STATUS,
pattern GL_T,
pattern GL_T2F_C3F_V3F,
pattern GL_T2F_C4F_N3F_V3F,
pattern GL_T2F_C4UB_V3F,
pattern GL_T2F_N3F_V3F,
pattern GL_T2F_V3F,
pattern GL_T4F_C4F_N3F_V4F,
pattern GL_T4F_V4F,
pattern GL_TEXTURE,
pattern GL_TEXTURE0,
pattern GL_TEXTURE1,
pattern GL_TEXTURE10,
pattern GL_TEXTURE11,
pattern GL_TEXTURE12,
pattern GL_TEXTURE13,
pattern GL_TEXTURE14,
pattern GL_TEXTURE15,
pattern GL_TEXTURE16,
pattern GL_TEXTURE17,
pattern GL_TEXTURE18,
pattern GL_TEXTURE19,
pattern GL_TEXTURE2,
pattern GL_TEXTURE20,
pattern GL_TEXTURE21,
pattern GL_TEXTURE22,
pattern GL_TEXTURE23,
pattern GL_TEXTURE24,
pattern GL_TEXTURE25,
pattern GL_TEXTURE26,
pattern GL_TEXTURE27,
pattern GL_TEXTURE28,
pattern GL_TEXTURE29,
pattern GL_TEXTURE3,
pattern GL_TEXTURE30,
pattern GL_TEXTURE31,
pattern GL_TEXTURE4,
pattern GL_TEXTURE5,
pattern GL_TEXTURE6,
pattern GL_TEXTURE7,
pattern GL_TEXTURE8,
pattern GL_TEXTURE9,
pattern GL_TEXTURE_1D,
pattern GL_TEXTURE_1D_ARRAY,
pattern GL_TEXTURE_2D,
pattern GL_TEXTURE_2D_ARRAY,
pattern GL_TEXTURE_2D_MULTISAMPLE,
pattern GL_TEXTURE_2D_MULTISAMPLE_ARRAY,
pattern GL_TEXTURE_3D,
pattern GL_TEXTURE_ALPHA_SIZE,
pattern GL_TEXTURE_ALPHA_TYPE,
pattern GL_TEXTURE_BASE_LEVEL,
pattern GL_TEXTURE_BINDING_1D,
pattern GL_TEXTURE_BINDING_1D_ARRAY,
pattern GL_TEXTURE_BINDING_2D,
pattern GL_TEXTURE_BINDING_2D_ARRAY,
pattern GL_TEXTURE_BINDING_2D_MULTISAMPLE,
pattern GL_TEXTURE_BINDING_2D_MULTISAMPLE_ARRAY,
pattern GL_TEXTURE_BINDING_3D,
pattern GL_TEXTURE_BINDING_BUFFER,
pattern GL_TEXTURE_BINDING_CUBE_MAP,
pattern GL_TEXTURE_BINDING_RECTANGLE,
pattern GL_TEXTURE_BIT,
pattern GL_TEXTURE_BLUE_SIZE,
pattern GL_TEXTURE_BLUE_TYPE,
pattern GL_TEXTURE_BORDER,
pattern GL_TEXTURE_BORDER_COLOR,
pattern GL_TEXTURE_BUFFER,
pattern GL_TEXTURE_BUFFER_DATA_STORE_BINDING,
pattern GL_TEXTURE_COMPARE_FUNC,
pattern GL_TEXTURE_COMPARE_MODE,
pattern GL_TEXTURE_COMPONENTS,
pattern GL_TEXTURE_COMPRESSED,
pattern GL_TEXTURE_COMPRESSED_IMAGE_SIZE,
pattern GL_TEXTURE_COMPRESSION_HINT,
pattern GL_TEXTURE_COORD_ARRAY,
pattern GL_TEXTURE_COORD_ARRAY_BUFFER_BINDING,
pattern GL_TEXTURE_COORD_ARRAY_POINTER,
pattern GL_TEXTURE_COORD_ARRAY_SIZE,
pattern GL_TEXTURE_COORD_ARRAY_STRIDE,
pattern GL_TEXTURE_COORD_ARRAY_TYPE,
pattern GL_TEXTURE_CUBE_MAP,
pattern GL_TEXTURE_CUBE_MAP_NEGATIVE_X,
pattern GL_TEXTURE_CUBE_MAP_NEGATIVE_Y,
pattern GL_TEXTURE_CUBE_MAP_NEGATIVE_Z,
pattern GL_TEXTURE_CUBE_MAP_POSITIVE_X,
pattern GL_TEXTURE_CUBE_MAP_POSITIVE_Y,
pattern GL_TEXTURE_CUBE_MAP_POSITIVE_Z,
pattern GL_TEXTURE_CUBE_MAP_SEAMLESS,
pattern GL_TEXTURE_DEPTH,
pattern GL_TEXTURE_DEPTH_SIZE,
pattern GL_TEXTURE_DEPTH_TYPE,
pattern GL_TEXTURE_ENV,
pattern GL_TEXTURE_ENV_COLOR,
pattern GL_TEXTURE_ENV_MODE,
pattern GL_TEXTURE_FILTER_CONTROL,
pattern GL_TEXTURE_FIXED_SAMPLE_LOCATIONS,
pattern GL_TEXTURE_GEN_MODE,
pattern GL_TEXTURE_GEN_Q,
pattern GL_TEXTURE_GEN_R,
pattern GL_TEXTURE_GEN_S,
pattern GL_TEXTURE_GEN_T,
pattern GL_TEXTURE_GREEN_SIZE,
pattern GL_TEXTURE_GREEN_TYPE,
pattern GL_TEXTURE_HEIGHT,
pattern GL_TEXTURE_INTENSITY_SIZE,
pattern GL_TEXTURE_INTENSITY_TYPE,
pattern GL_TEXTURE_INTERNAL_FORMAT,
pattern GL_TEXTURE_LOD_BIAS,
pattern GL_TEXTURE_LUMINANCE_SIZE,
pattern GL_TEXTURE_LUMINANCE_TYPE,
pattern GL_TEXTURE_MAG_FILTER,
pattern GL_TEXTURE_MATRIX,
pattern GL_TEXTURE_MAX_LEVEL,
pattern GL_TEXTURE_MAX_LOD,
pattern GL_TEXTURE_MIN_FILTER,
pattern GL_TEXTURE_MIN_LOD,
pattern GL_TEXTURE_PRIORITY,
pattern GL_TEXTURE_RECTANGLE,
pattern GL_TEXTURE_RED_SIZE,
pattern GL_TEXTURE_RED_TYPE,
pattern GL_TEXTURE_RESIDENT,
pattern GL_TEXTURE_SAMPLES,
pattern GL_TEXTURE_SHARED_SIZE,
pattern GL_TEXTURE_STACK_DEPTH,
pattern GL_TEXTURE_STENCIL_SIZE,
pattern GL_TEXTURE_SWIZZLE_A,
pattern GL_TEXTURE_SWIZZLE_B,
pattern GL_TEXTURE_SWIZZLE_G,
pattern GL_TEXTURE_SWIZZLE_R,
pattern GL_TEXTURE_SWIZZLE_RGBA,
pattern GL_TEXTURE_WIDTH,
pattern GL_TEXTURE_WRAP_R,
pattern GL_TEXTURE_WRAP_S,
pattern GL_TEXTURE_WRAP_T,
pattern GL_TIMEOUT_EXPIRED,
pattern GL_TIMEOUT_IGNORED,
pattern GL_TIMESTAMP,
pattern GL_TIME_ELAPSED,
pattern GL_TRANSFORM_BIT,
pattern GL_TRANSFORM_FEEDBACK_BUFFER,
pattern GL_TRANSFORM_FEEDBACK_BUFFER_BINDING,
pattern GL_TRANSFORM_FEEDBACK_BUFFER_MODE,
pattern GL_TRANSFORM_FEEDBACK_BUFFER_SIZE,
pattern GL_TRANSFORM_FEEDBACK_BUFFER_START,
pattern GL_TRANSFORM_FEEDBACK_PRIMITIVES_WRITTEN,
pattern GL_TRANSFORM_FEEDBACK_VARYINGS,
pattern GL_TRANSFORM_FEEDBACK_VARYING_MAX_LENGTH,
pattern GL_TRANSPOSE_COLOR_MATRIX,
pattern GL_TRANSPOSE_MODELVIEW_MATRIX,
pattern GL_TRANSPOSE_PROJECTION_MATRIX,
pattern GL_TRANSPOSE_TEXTURE_MATRIX,
pattern GL_TRIANGLES,
pattern GL_TRIANGLES_ADJACENCY,
pattern GL_TRIANGLE_FAN,
pattern GL_TRIANGLE_STRIP,
pattern GL_TRIANGLE_STRIP_ADJACENCY,
pattern GL_TRUE,
pattern GL_UNIFORM_ARRAY_STRIDE,
pattern GL_UNIFORM_BLOCK_ACTIVE_UNIFORMS,
pattern GL_UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES,
pattern GL_UNIFORM_BLOCK_BINDING,
pattern GL_UNIFORM_BLOCK_DATA_SIZE,
pattern GL_UNIFORM_BLOCK_INDEX,
pattern GL_UNIFORM_BLOCK_NAME_LENGTH,
pattern GL_UNIFORM_BLOCK_REFERENCED_BY_FRAGMENT_SHADER,
pattern GL_UNIFORM_BLOCK_REFERENCED_BY_GEOMETRY_SHADER,
pattern GL_UNIFORM_BLOCK_REFERENCED_BY_VERTEX_SHADER,
pattern GL_UNIFORM_BUFFER,
pattern GL_UNIFORM_BUFFER_BINDING,
pattern GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT,
pattern GL_UNIFORM_BUFFER_SIZE,
pattern GL_UNIFORM_BUFFER_START,
pattern GL_UNIFORM_IS_ROW_MAJOR,
pattern GL_UNIFORM_MATRIX_STRIDE,
pattern GL_UNIFORM_NAME_LENGTH,
pattern GL_UNIFORM_OFFSET,
pattern GL_UNIFORM_SIZE,
pattern GL_UNIFORM_TYPE,
pattern GL_UNPACK_ALIGNMENT,
pattern GL_UNPACK_IMAGE_HEIGHT,
pattern GL_UNPACK_LSB_FIRST,
pattern GL_UNPACK_ROW_LENGTH,
pattern GL_UNPACK_SKIP_IMAGES,
pattern GL_UNPACK_SKIP_PIXELS,
pattern GL_UNPACK_SKIP_ROWS,
pattern GL_UNPACK_SWAP_BYTES,
pattern GL_UNSIGNALED,
pattern GL_UNSIGNED_BYTE,
pattern GL_UNSIGNED_BYTE_2_3_3_REV,
pattern GL_UNSIGNED_BYTE_3_3_2,
pattern GL_UNSIGNED_INT,
pattern GL_UNSIGNED_INT_10F_11F_11F_REV,
pattern GL_UNSIGNED_INT_10_10_10_2,
pattern GL_UNSIGNED_INT_24_8,
pattern GL_UNSIGNED_INT_2_10_10_10_REV,
pattern GL_UNSIGNED_INT_5_9_9_9_REV,
pattern GL_UNSIGNED_INT_8_8_8_8,
pattern GL_UNSIGNED_INT_8_8_8_8_REV,
pattern GL_UNSIGNED_INT_SAMPLER_1D,
pattern GL_UNSIGNED_INT_SAMPLER_1D_ARRAY,
pattern GL_UNSIGNED_INT_SAMPLER_2D,
pattern GL_UNSIGNED_INT_SAMPLER_2D_ARRAY,
pattern GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE,
pattern GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE_ARRAY,
pattern GL_UNSIGNED_INT_SAMPLER_2D_RECT,
pattern GL_UNSIGNED_INT_SAMPLER_3D,
pattern GL_UNSIGNED_INT_SAMPLER_BUFFER,
pattern GL_UNSIGNED_INT_SAMPLER_CUBE,
pattern GL_UNSIGNED_INT_VEC2,
pattern GL_UNSIGNED_INT_VEC3,
pattern GL_UNSIGNED_INT_VEC4,
pattern GL_UNSIGNED_NORMALIZED,
pattern GL_UNSIGNED_SHORT,
pattern GL_UNSIGNED_SHORT_1_5_5_5_REV,
pattern GL_UNSIGNED_SHORT_4_4_4_4,
pattern GL_UNSIGNED_SHORT_4_4_4_4_REV,
pattern GL_UNSIGNED_SHORT_5_5_5_1,
pattern GL_UNSIGNED_SHORT_5_6_5,
pattern GL_UNSIGNED_SHORT_5_6_5_REV,
pattern GL_UPPER_LEFT,
pattern GL_V2F,
pattern GL_V3F,
pattern GL_VALIDATE_STATUS,
pattern GL_VENDOR,
pattern GL_VERSION,
pattern GL_VERTEX_ARRAY,
pattern GL_VERTEX_ARRAY_BINDING,
pattern GL_VERTEX_ARRAY_BUFFER_BINDING,
pattern GL_VERTEX_ARRAY_POINTER,
pattern GL_VERTEX_ARRAY_SIZE,
pattern GL_VERTEX_ARRAY_STRIDE,
pattern GL_VERTEX_ARRAY_TYPE,
pattern GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING,
pattern GL_VERTEX_ATTRIB_ARRAY_DIVISOR,
pattern GL_VERTEX_ATTRIB_ARRAY_ENABLED,
pattern GL_VERTEX_ATTRIB_ARRAY_INTEGER,
pattern GL_VERTEX_ATTRIB_ARRAY_NORMALIZED,
pattern GL_VERTEX_ATTRIB_ARRAY_POINTER,
pattern GL_VERTEX_ATTRIB_ARRAY_SIZE,
pattern GL_VERTEX_ATTRIB_ARRAY_STRIDE,
pattern GL_VERTEX_ATTRIB_ARRAY_TYPE,
pattern GL_VERTEX_PROGRAM_POINT_SIZE,
pattern GL_VERTEX_PROGRAM_TWO_SIDE,
pattern GL_VERTEX_SHADER,
pattern GL_VIEWPORT,
pattern GL_VIEWPORT_BIT,
pattern GL_WAIT_FAILED,
pattern GL_WEIGHT_ARRAY_BUFFER_BINDING,
pattern GL_WRITE_ONLY,
pattern GL_XOR,
pattern GL_ZERO,
pattern GL_ZOOM_X,
pattern GL_ZOOM_Y,
-- * Functions
glAccum,
glActiveTexture,
glAlphaFunc,
glAreTexturesResident,
glArrayElement,
glAttachShader,
glBegin,
glBeginConditionalRender,
glBeginQuery,
glBeginTransformFeedback,
glBindAttribLocation,
glBindBuffer,
glBindBufferBase,
glBindBufferRange,
glBindFragDataLocation,
glBindFragDataLocationIndexed,
glBindFramebuffer,
glBindRenderbuffer,
glBindSampler,
glBindTexture,
glBindVertexArray,
glBitmap,
glBlendColor,
glBlendEquation,
glBlendEquationSeparate,
glBlendFunc,
glBlendFuncSeparate,
glBlitFramebuffer,
glBufferData,
glBufferSubData,
glCallList,
glCallLists,
glCheckFramebufferStatus,
glClampColor,
glClear,
glClearAccum,
glClearBufferfi,
glClearBufferfv,
glClearBufferiv,
glClearBufferuiv,
glClearColor,
glClearDepth,
glClearIndex,
glClearStencil,
glClientActiveTexture,
glClientWaitSync,
glClipPlane,
glColor3b,
glColor3bv,
glColor3d,
glColor3dv,
glColor3f,
glColor3fv,
glColor3i,
glColor3iv,
glColor3s,
glColor3sv,
glColor3ub,
glColor3ubv,
glColor3ui,
glColor3uiv,
glColor3us,
glColor3usv,
glColor4b,
glColor4bv,
glColor4d,
glColor4dv,
glColor4f,
glColor4fv,
glColor4i,
glColor4iv,
glColor4s,
glColor4sv,
glColor4ub,
glColor4ubv,
glColor4ui,
glColor4uiv,
glColor4us,
glColor4usv,
glColorMask,
glColorMaski,
glColorMaterial,
glColorP3ui,
glColorP3uiv,
glColorP4ui,
glColorP4uiv,
glColorPointer,
glCompileShader,
glCompressedTexImage1D,
glCompressedTexImage2D,
glCompressedTexImage3D,
glCompressedTexSubImage1D,
glCompressedTexSubImage2D,
glCompressedTexSubImage3D,
glCopyBufferSubData,
glCopyPixels,
glCopyTexImage1D,
glCopyTexImage2D,
glCopyTexSubImage1D,
glCopyTexSubImage2D,
glCopyTexSubImage3D,
glCreateProgram,
glCreateShader,
glCullFace,
glDeleteBuffers,
glDeleteFramebuffers,
glDeleteLists,
glDeleteProgram,
glDeleteQueries,
glDeleteRenderbuffers,
glDeleteSamplers,
glDeleteShader,
glDeleteSync,
glDeleteTextures,
glDeleteVertexArrays,
glDepthFunc,
glDepthMask,
glDepthRange,
glDetachShader,
glDisable,
glDisableClientState,
glDisableVertexAttribArray,
glDisablei,
glDrawArrays,
glDrawArraysInstanced,
glDrawBuffer,
glDrawBuffers,
glDrawElements,
glDrawElementsBaseVertex,
glDrawElementsInstanced,
glDrawElementsInstancedBaseVertex,
glDrawPixels,
glDrawRangeElements,
glDrawRangeElementsBaseVertex,
glEdgeFlag,
glEdgeFlagPointer,
glEdgeFlagv,
glEnable,
glEnableClientState,
glEnableVertexAttribArray,
glEnablei,
glEnd,
glEndConditionalRender,
glEndList,
glEndQuery,
glEndTransformFeedback,
glEvalCoord1d,
glEvalCoord1dv,
glEvalCoord1f,
glEvalCoord1fv,
glEvalCoord2d,
glEvalCoord2dv,
glEvalCoord2f,
glEvalCoord2fv,
glEvalMesh1,
glEvalMesh2,
glEvalPoint1,
glEvalPoint2,
glFeedbackBuffer,
glFenceSync,
glFinish,
glFlush,
glFlushMappedBufferRange,
glFogCoordPointer,
glFogCoordd,
glFogCoorddv,
glFogCoordf,
glFogCoordfv,
glFogf,
glFogfv,
glFogi,
glFogiv,
glFramebufferRenderbuffer,
glFramebufferTexture,
glFramebufferTexture1D,
glFramebufferTexture2D,
glFramebufferTexture3D,
glFramebufferTextureLayer,
glFrontFace,
glFrustum,
glGenBuffers,
glGenFramebuffers,
glGenLists,
glGenQueries,
glGenRenderbuffers,
glGenSamplers,
glGenTextures,
glGenVertexArrays,
glGenerateMipmap,
glGetActiveAttrib,
glGetActiveUniform,
glGetActiveUniformBlockName,
glGetActiveUniformBlockiv,
glGetActiveUniformName,
glGetActiveUniformsiv,
glGetAttachedShaders,
glGetAttribLocation,
glGetBooleani_v,
glGetBooleanv,
glGetBufferParameteri64v,
glGetBufferParameteriv,
glGetBufferPointerv,
glGetBufferSubData,
glGetClipPlane,
glGetCompressedTexImage,
glGetDoublev,
glGetError,
glGetFloatv,
glGetFragDataIndex,
glGetFragDataLocation,
glGetFramebufferAttachmentParameteriv,
glGetInteger64i_v,
glGetInteger64v,
glGetIntegeri_v,
glGetIntegerv,
glGetLightfv,
glGetLightiv,
glGetMapdv,
glGetMapfv,
glGetMapiv,
glGetMaterialfv,
glGetMaterialiv,
glGetMultisamplefv,
glGetPixelMapfv,
glGetPixelMapuiv,
glGetPixelMapusv,
glGetPointerv,
glGetPolygonStipple,
glGetProgramInfoLog,
glGetProgramiv,
glGetQueryObjecti64v,
glGetQueryObjectiv,
glGetQueryObjectui64v,
glGetQueryObjectuiv,
glGetQueryiv,
glGetRenderbufferParameteriv,
glGetSamplerParameterIiv,
glGetSamplerParameterIuiv,
glGetSamplerParameterfv,
glGetSamplerParameteriv,
glGetShaderInfoLog,
glGetShaderSource,
glGetShaderiv,
glGetString,
glGetStringi,
glGetSynciv,
glGetTexEnvfv,
glGetTexEnviv,
glGetTexGendv,
glGetTexGenfv,
glGetTexGeniv,
glGetTexImage,
glGetTexLevelParameterfv,
glGetTexLevelParameteriv,
glGetTexParameterIiv,
glGetTexParameterIuiv,
glGetTexParameterfv,
glGetTexParameteriv,
glGetTransformFeedbackVarying,
glGetUniformBlockIndex,
glGetUniformIndices,
glGetUniformLocation,
glGetUniformfv,
glGetUniformiv,
glGetUniformuiv,
glGetVertexAttribIiv,
glGetVertexAttribIuiv,
glGetVertexAttribPointerv,
glGetVertexAttribdv,
glGetVertexAttribfv,
glGetVertexAttribiv,
glHint,
glIndexMask,
glIndexPointer,
glIndexd,
glIndexdv,
glIndexf,
glIndexfv,
glIndexi,
glIndexiv,
glIndexs,
glIndexsv,
glIndexub,
glIndexubv,
glInitNames,
glInterleavedArrays,
glIsBuffer,
glIsEnabled,
glIsEnabledi,
glIsFramebuffer,
glIsList,
glIsProgram,
glIsQuery,
glIsRenderbuffer,
glIsSampler,
glIsShader,
glIsSync,
glIsTexture,
glIsVertexArray,
glLightModelf,
glLightModelfv,
glLightModeli,
glLightModeliv,
glLightf,
glLightfv,
glLighti,
glLightiv,
glLineStipple,
glLineWidth,
glLinkProgram,
glListBase,
glLoadIdentity,
glLoadMatrixd,
glLoadMatrixf,
glLoadName,
glLoadTransposeMatrixd,
glLoadTransposeMatrixf,
glLogicOp,
glMap1d,
glMap1f,
glMap2d,
glMap2f,
glMapBuffer,
glMapBufferRange,
glMapGrid1d,
glMapGrid1f,
glMapGrid2d,
glMapGrid2f,
glMaterialf,
glMaterialfv,
glMateriali,
glMaterialiv,
glMatrixMode,
glMultMatrixd,
glMultMatrixf,
glMultTransposeMatrixd,
glMultTransposeMatrixf,
glMultiDrawArrays,
glMultiDrawElements,
glMultiDrawElementsBaseVertex,
glMultiTexCoord1d,
glMultiTexCoord1dv,
glMultiTexCoord1f,
glMultiTexCoord1fv,
glMultiTexCoord1i,
glMultiTexCoord1iv,
glMultiTexCoord1s,
glMultiTexCoord1sv,
glMultiTexCoord2d,
glMultiTexCoord2dv,
glMultiTexCoord2f,
glMultiTexCoord2fv,
glMultiTexCoord2i,
glMultiTexCoord2iv,
glMultiTexCoord2s,
glMultiTexCoord2sv,
glMultiTexCoord3d,
glMultiTexCoord3dv,
glMultiTexCoord3f,
glMultiTexCoord3fv,
glMultiTexCoord3i,
glMultiTexCoord3iv,
glMultiTexCoord3s,
glMultiTexCoord3sv,
glMultiTexCoord4d,
glMultiTexCoord4dv,
glMultiTexCoord4f,
glMultiTexCoord4fv,
glMultiTexCoord4i,
glMultiTexCoord4iv,
glMultiTexCoord4s,
glMultiTexCoord4sv,
glMultiTexCoordP1ui,
glMultiTexCoordP1uiv,
glMultiTexCoordP2ui,
glMultiTexCoordP2uiv,
glMultiTexCoordP3ui,
glMultiTexCoordP3uiv,
glMultiTexCoordP4ui,
glMultiTexCoordP4uiv,
glNewList,
glNormal3b,
glNormal3bv,
glNormal3d,
glNormal3dv,
glNormal3f,
glNormal3fv,
glNormal3i,
glNormal3iv,
glNormal3s,
glNormal3sv,
glNormalP3ui,
glNormalP3uiv,
glNormalPointer,
glOrtho,
glPassThrough,
glPixelMapfv,
glPixelMapuiv,
glPixelMapusv,
glPixelStoref,
glPixelStorei,
glPixelTransferf,
glPixelTransferi,
glPixelZoom,
glPointParameterf,
glPointParameterfv,
glPointParameteri,
glPointParameteriv,
glPointSize,
glPolygonMode,
glPolygonOffset,
glPolygonStipple,
glPopAttrib,
glPopClientAttrib,
glPopMatrix,
glPopName,
glPrimitiveRestartIndex,
glPrioritizeTextures,
glProvokingVertex,
glPushAttrib,
glPushClientAttrib,
glPushMatrix,
glPushName,
glQueryCounter,
glRasterPos2d,
glRasterPos2dv,
glRasterPos2f,
glRasterPos2fv,
glRasterPos2i,
glRasterPos2iv,
glRasterPos2s,
glRasterPos2sv,
glRasterPos3d,
glRasterPos3dv,
glRasterPos3f,
glRasterPos3fv,
glRasterPos3i,
glRasterPos3iv,
glRasterPos3s,
glRasterPos3sv,
glRasterPos4d,
glRasterPos4dv,
glRasterPos4f,
glRasterPos4fv,
glRasterPos4i,
glRasterPos4iv,
glRasterPos4s,
glRasterPos4sv,
glReadBuffer,
glReadPixels,
glRectd,
glRectdv,
glRectf,
glRectfv,
glRecti,
glRectiv,
glRects,
glRectsv,
glRenderMode,
glRenderbufferStorage,
glRenderbufferStorageMultisample,
glRotated,
glRotatef,
glSampleCoverage,
glSampleMaski,
glSamplerParameterIiv,
glSamplerParameterIuiv,
glSamplerParameterf,
glSamplerParameterfv,
glSamplerParameteri,
glSamplerParameteriv,
glScaled,
glScalef,
glScissor,
glSecondaryColor3b,
glSecondaryColor3bv,
glSecondaryColor3d,
glSecondaryColor3dv,
glSecondaryColor3f,
glSecondaryColor3fv,
glSecondaryColor3i,
glSecondaryColor3iv,
glSecondaryColor3s,
glSecondaryColor3sv,
glSecondaryColor3ub,
glSecondaryColor3ubv,
glSecondaryColor3ui,
glSecondaryColor3uiv,
glSecondaryColor3us,
glSecondaryColor3usv,
glSecondaryColorP3ui,
glSecondaryColorP3uiv,
glSecondaryColorPointer,
glSelectBuffer,
glShadeModel,
glShaderSource,
glStencilFunc,
glStencilFuncSeparate,
glStencilMask,
glStencilMaskSeparate,
glStencilOp,
glStencilOpSeparate,
glTexBuffer,
glTexCoord1d,
glTexCoord1dv,
glTexCoord1f,
glTexCoord1fv,
glTexCoord1i,
glTexCoord1iv,
glTexCoord1s,
glTexCoord1sv,
glTexCoord2d,
glTexCoord2dv,
glTexCoord2f,
glTexCoord2fv,
glTexCoord2i,
glTexCoord2iv,
glTexCoord2s,
glTexCoord2sv,
glTexCoord3d,
glTexCoord3dv,
glTexCoord3f,
glTexCoord3fv,
glTexCoord3i,
glTexCoord3iv,
glTexCoord3s,
glTexCoord3sv,
glTexCoord4d,
glTexCoord4dv,
glTexCoord4f,
glTexCoord4fv,
glTexCoord4i,
glTexCoord4iv,
glTexCoord4s,
glTexCoord4sv,
glTexCoordP1ui,
glTexCoordP1uiv,
glTexCoordP2ui,
glTexCoordP2uiv,
glTexCoordP3ui,
glTexCoordP3uiv,
glTexCoordP4ui,
glTexCoordP4uiv,
glTexCoordPointer,
glTexEnvf,
glTexEnvfv,
glTexEnvi,
glTexEnviv,
glTexGend,
glTexGendv,
glTexGenf,
glTexGenfv,
glTexGeni,
glTexGeniv,
glTexImage1D,
glTexImage2D,
glTexImage2DMultisample,
glTexImage3D,
glTexImage3DMultisample,
glTexParameterIiv,
glTexParameterIuiv,
glTexParameterf,
glTexParameterfv,
glTexParameteri,
glTexParameteriv,
glTexSubImage1D,
glTexSubImage2D,
glTexSubImage3D,
glTransformFeedbackVaryings,
glTranslated,
glTranslatef,
glUniform1f,
glUniform1fv,
glUniform1i,
glUniform1iv,
glUniform1ui,
glUniform1uiv,
glUniform2f,
glUniform2fv,
glUniform2i,
glUniform2iv,
glUniform2ui,
glUniform2uiv,
glUniform3f,
glUniform3fv,
glUniform3i,
glUniform3iv,
glUniform3ui,
glUniform3uiv,
glUniform4f,
glUniform4fv,
glUniform4i,
glUniform4iv,
glUniform4ui,
glUniform4uiv,
glUniformBlockBinding,
glUniformMatrix2fv,
glUniformMatrix2x3fv,
glUniformMatrix2x4fv,
glUniformMatrix3fv,
glUniformMatrix3x2fv,
glUniformMatrix3x4fv,
glUniformMatrix4fv,
glUniformMatrix4x2fv,
glUniformMatrix4x3fv,
glUnmapBuffer,
glUseProgram,
glValidateProgram,
glVertex2d,
glVertex2dv,
glVertex2f,
glVertex2fv,
glVertex2i,
glVertex2iv,
glVertex2s,
glVertex2sv,
glVertex3d,
glVertex3dv,
glVertex3f,
glVertex3fv,
glVertex3i,
glVertex3iv,
glVertex3s,
glVertex3sv,
glVertex4d,
glVertex4dv,
glVertex4f,
glVertex4fv,
glVertex4i,
glVertex4iv,
glVertex4s,
glVertex4sv,
glVertexAttrib1d,
glVertexAttrib1dv,
glVertexAttrib1f,
glVertexAttrib1fv,
glVertexAttrib1s,
glVertexAttrib1sv,
glVertexAttrib2d,
glVertexAttrib2dv,
glVertexAttrib2f,
glVertexAttrib2fv,
glVertexAttrib2s,
glVertexAttrib2sv,
glVertexAttrib3d,
glVertexAttrib3dv,
glVertexAttrib3f,
glVertexAttrib3fv,
glVertexAttrib3s,
glVertexAttrib3sv,
glVertexAttrib4Nbv,
glVertexAttrib4Niv,
glVertexAttrib4Nsv,
glVertexAttrib4Nub,
glVertexAttrib4Nubv,
glVertexAttrib4Nuiv,
glVertexAttrib4Nusv,
glVertexAttrib4bv,
glVertexAttrib4d,
glVertexAttrib4dv,
glVertexAttrib4f,
glVertexAttrib4fv,
glVertexAttrib4iv,
glVertexAttrib4s,
glVertexAttrib4sv,
glVertexAttrib4ubv,
glVertexAttrib4uiv,
glVertexAttrib4usv,
glVertexAttribDivisor,
glVertexAttribI1i,
glVertexAttribI1iv,
glVertexAttribI1ui,
glVertexAttribI1uiv,
glVertexAttribI2i,
glVertexAttribI2iv,
glVertexAttribI2ui,
glVertexAttribI2uiv,
glVertexAttribI3i,
glVertexAttribI3iv,
glVertexAttribI3ui,
glVertexAttribI3uiv,
glVertexAttribI4bv,
glVertexAttribI4i,
glVertexAttribI4iv,
glVertexAttribI4sv,
glVertexAttribI4ubv,
glVertexAttribI4ui,
glVertexAttribI4uiv,
glVertexAttribI4usv,
glVertexAttribIPointer,
glVertexAttribP1ui,
glVertexAttribP1uiv,
glVertexAttribP2ui,
glVertexAttribP2uiv,
glVertexAttribP3ui,
glVertexAttribP3uiv,
glVertexAttribP4ui,
glVertexAttribP4uiv,
glVertexAttribPointer,
glVertexP2ui,
glVertexP2uiv,
glVertexP3ui,
glVertexP3uiv,
glVertexP4ui,
glVertexP4uiv,
glVertexPointer,
glViewport,
glWaitSync,
glWindowPos2d,
glWindowPos2dv,
glWindowPos2f,
glWindowPos2fv,
glWindowPos2i,
glWindowPos2iv,
glWindowPos2s,
glWindowPos2sv,
glWindowPos3d,
glWindowPos3dv,
glWindowPos3f,
glWindowPos3fv,
glWindowPos3i,
glWindowPos3iv,
glWindowPos3s,
glWindowPos3sv
) where
import Graphics.GL.Types
import Graphics.GL.Tokens
import Graphics.GL.Functions
|
haskell-opengl/OpenGLRaw
|
src/Graphics/GL/Compatibility33.hs
|
bsd-3-clause
| 52,653
| 0
| 5
| 7,325
| 8,530
| 5,275
| 3,255
| 2,001
| 0
|
module SubListKata.Day6 (sublist, Result(..)) where
import Data.List(isInfixOf)
data Result = Equal | Unequal | Sublist | Superlist
deriving (Eq, Show)
sublist :: (Ord a) => [a] -> [a] -> Result
sublist [] [] = Equal
sublist [] _ = Sublist
sublist _ [] = Superlist
sublist l1 l2
| l1 == l2 = Equal
| isInfixOf l1 l2 = Sublist
| isInfixOf l2 l1 = Superlist
| otherwise = Unequal
|
Alex-Diez/haskell-tdd-kata
|
old-katas/src/SubListKata/Day6.hs
|
bsd-3-clause
| 458
| 0
| 8
| 150
| 178
| 94
| 84
| 13
| 1
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Test.Snooze.Arbitrary where
import Disorder.Corpus
import P
import Snooze.Balance.Data
import Snooze.Url
import Test.QuickCheck
import Test.QuickCheck.Instances ()
instance Arbitrary BalanceTable where
arbitrary = BalanceTable
<$> arbitrary
instance Arbitrary BalanceEntry where
arbitrary = BalanceEntry
<$> arbitrary
<*> arbitrary
instance Arbitrary Host where
arbitrary = Host
<$> elements muppets
instance Arbitrary Port where
arbitrary = Port
<$> fmap getPositive arbitrary
instance Arbitrary Path where
arbitrary = path <$> arbitrary
|
ambiata/snooze
|
test/Test/Snooze/Arbitrary.hs
|
bsd-3-clause
| 719
| 0
| 7
| 174
| 139
| 78
| 61
| 24
| 0
|
{-# LANGUAGE EmptyDataDecls, FlexibleInstances, MultiParamTypeClasses, FlexibleContexts #-}
module Lang.JavaScript where
import Generic.Control.Function
import Generic.Data.Bool
import Generic.Data.Either
import Generic.Data.List hiding ((++))
import Generic.Data.Maybe
import Generic.Data.Number
import Generic.Data.Tuple
import Lang.Value
import Prelude ((++))
import qualified Prelude
data JS
type JavaScript a = Val JS a
-- * JavaScript instances for AwesomePrelude datatypes.
instance NameC (Val JS) where
named s a = s `Name` a
instance FunC (Val JS) where
lam f = Lam f
app f g = App f g
fix f = fun1 "fix" (\[v] -> "fix = arguments.callee, " ++ v ++ "(function (i) { return fix(" ++ v ++ ")(i) })") (lam f)
instance BoolC (Val JS) where
true = Con "true"
false = Con "false"
bool x y z = fun3 "bool" (\[t, e, b] -> b ++ " ? " ++ t ++ "(/*force*/) : " ++ e ++ "(/*force*/)") (lam (const x)) (lam (const y)) z
instance NumC (Val JS) where
(+) = fun2 "add" (\[a, b] -> a ++ " + " ++ b)
(-) = fun2 "sub" (\[a, b] -> a ++ " - " ++ b)
(*) = fun2 "mul" (\[a, b] -> a ++ " * " ++ b)
(/) = fun2 "div" (\[a, b] -> a ++ " / " ++ b)
num x = Con (Prelude.show x)
instance MaybeC (Val JS) where
nothing = Con "{ nothing : 1 }"
just = fun1 "just" (\[x] -> "{ just : " ++ x ++ " }")
maybe p q = fun3 "maybe" (\[n, j, m] -> m ++ ".nothing ? " ++ n ++ " : " ++ j ++ "(" ++ m ++ ".just)") p (lam q)
instance TupleC (Val JS) where
mkTuple = fun2 "mkTuple" (\[a, b] -> "{ fst : " ++ a ++ ", snd : " ++ b ++ "}")
tuple p q = fun2 "tuple" (\[f, t] -> f ++ "(" ++ t ++ ".fst, " ++ t ++ ".snd)") (lam2 p) q
instance EitherC (Val JS) where
left = fun1 "left" (\[l] -> "{ left : " ++ l ++ " }")
right = fun1 "right" (\[r] -> "{ right : " ++ r ++ " }")
either p q = fun3 "either" (\[l, r, e] -> e ++ ".left ? " ++ l ++ "(" ++ e ++ ".left) : " ++ r ++ "(" ++ e ++ ".right)") (lam p) (lam q)
instance ListC (Val JS) where
nil = Con "{ nil : 1 }"
cons = fun2 "cons" (\[x, xs] -> "{ head : " ++ x ++ ", tail : " ++ xs ++ " }")
list b f = fun3 "list" (\[n, c, xs] -> xs ++ ".nil ? " ++ n ++ " : " ++ c ++ "(" ++ xs ++ ".head)(" ++ xs ++ ".tail)") b (lam2 f)
-- * JavaScript instances of AwesomePrelude type classes.
instance Eq (Val JS) Bool where
(==) = fun2 "eq" (\[a, b] -> a ++ " == " ++ b)
(/=) = fun2 "neq" (\[a, b] -> a ++ " /= " ++ b)
instance Eq (Val JS) Num where
(==) = fun2 "eq" (\[a, b] -> a ++ " == " ++ b)
(/=) = fun2 "neq" (\[a, b] -> a ++ " /= " ++ b)
instance (Eq (Val JS) a, Eq (Val JS) b) => Eq (Val JS) (a, b) where
(==) = fun2 "eq" (\[a, b] -> a ++ " == " ++ b)
(/=) = fun2 "neq" (\[a, b] -> a ++ " /= " ++ b)
instance Eq (Val JS) a => Eq (Val JS) [a] where
(==) = fun2 "eq" (\[a, b] -> a ++ " == " ++ b)
(/=) = fun2 "neq" (\[a, b] -> a ++ " /= " ++ b)
|
sebastiaanvisser/AwesomePrelude
|
src/Lang/JavaScript.hs
|
bsd-3-clause
| 2,882
| 0
| 17
| 764
| 1,415
| 780
| 635
| -1
| -1
|
module SECD.LambdaCalculusTests(allLambdaCalculusTests) where
import SECD.LambdaCalculus
import SECD.SECDCode
import SECD.TestUtils
allLambdaCalculusTests = do
testFunction (fst . toSECD 0 emptyEnv) toSECDCases
toSECDCases =
[(intVal 5, [int 5]),
(boolVal True, [bool True]),
(floatVal (-12.3), [float (-12.3)]),
(app (var "int_sub") [intVal 4, intVal (-23)],
[int 0, int 4, cons, int (-23), cons, builtin "int_sub", apply]),
(lambda ["no", "yes"] (app (var "int_add") [var "no", var "yes"]),
[closure
[int 0, access 0 0, cons, access 0 1, cons, builtin "int_add", apply, ret]]),
(lambda ["oooh"] (floatVal 9.3), [closure [float 9.3, ret]]),
(ifThenElse (boolVal True) (intVal 0) (intVal 2),
[bool True, condJump 0, int 2, jump 1, label 0, int 0, label 1]),
(letRec [("sub", (lambda ["n"]
(ifThenElse (var "n") (app (var "sub") [var "n"]) (boolVal True))))]
(app (var "sub") [boolVal True]),
[dum, int 0, closure [access 0 0, condJump 0, bool True, jump 1, label 0,
int 0, access 0 0, cons, access 1 0, apply, label 1, ret], cons,
closure [int 0, bool True, cons, access 0 0, apply, ret], rap])]
|
dillonhuff/SECD
|
test/SECD/LambdaCalculusTests.hs
|
bsd-3-clause
| 1,167
| 0
| 17
| 236
| 622
| 336
| 286
| 24
| 1
|
module Channel.Event where
import Type
data Event =
Join UserName
| Message UserName String
| Kick UserName UserName (Maybe String)
|
beni55/alonzo
|
src/Channel/Event.hs
|
mit
| 152
| 0
| 8
| 40
| 40
| 23
| 17
| 6
| 0
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable, DeriveFunctor, DeriveFoldable,
DeriveTraversable #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-} -- Note [Pass sensitive types]
-- in module PlaceHolder
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleInstances #-}
-- | Abstract syntax of global declarations.
--
-- Definitions for: @SynDecl@ and @ConDecl@, @ClassDecl@,
-- @InstDecl@, @DefaultDecl@ and @ForeignDecl@.
module ETA.HsSyn.HsDecls (
-- * Toplevel declarations
HsDecl(..), LHsDecl, HsDataDefn(..),
-- ** Class or type declarations
TyClDecl(..), LTyClDecl,
TyClGroup(..), tyClGroupConcat, mkTyClGroup,
isClassDecl, isDataDecl, isSynDecl, tcdName,
isFamilyDecl, isTypeFamilyDecl, isDataFamilyDecl,
isOpenTypeFamilyInfo, isClosedTypeFamilyInfo,
tyFamInstDeclName, tyFamInstDeclLName,
countTyClDecls, pprTyClDeclFlavour,
tyClDeclLName, tyClDeclTyVars,
hsDeclHasCusk, famDeclHasCusk,
FamilyDecl(..), LFamilyDecl,
-- ** Instance declarations
InstDecl(..), LInstDecl, NewOrData(..), FamilyInfo(..),
TyFamInstDecl(..), LTyFamInstDecl, instDeclDataFamInsts,
DataFamInstDecl(..), LDataFamInstDecl, pprDataFamInstFlavour,
TyFamEqn(..), TyFamInstEqn, LTyFamInstEqn, TyFamDefltEqn, LTyFamDefltEqn,
HsTyPats,
LClsInstDecl, ClsInstDecl(..),
-- ** Standalone deriving declarations
DerivDecl(..), LDerivDecl,
-- ** @RULE@ declarations
LRuleDecls,RuleDecls(..),RuleDecl(..), LRuleDecl, RuleBndr(..),LRuleBndr,
collectRuleBndrSigTys,
flattenRuleDecls,
-- ** @VECTORISE@ declarations
VectDecl(..), LVectDecl,
lvectDeclName, lvectInstDecl,
-- ** @default@ declarations
DefaultDecl(..), LDefaultDecl,
-- ** Template haskell declaration splice
SpliceExplicitFlag(..),
SpliceDecl(..), LSpliceDecl,
-- ** Foreign function interface declarations
ForeignDecl(..), LForeignDecl, ForeignImport(..), ForeignExport(..),
noForeignImportCoercionYet, noForeignExportCoercionYet,
CImportSpec(..),
-- ** Data-constructor declarations
ConDecl(..), LConDecl, ResType(..),
HsConDeclDetails, hsConDeclArgTys,
-- ** Document comments
DocDecl(..), LDocDecl, docDeclDoc,
-- ** Deprecations
WarnDecl(..), LWarnDecl,
WarnDecls(..), LWarnDecls,
-- ** Annotations
AnnDecl(..), LAnnDecl,
AnnProvenance(..), annProvenanceName_maybe,
-- ** Role annotations
RoleAnnotDecl(..), LRoleAnnotDecl, roleAnnotDeclName,
-- * Grouping
HsGroup(..), emptyRdrGroup, emptyRnGroup, appendGroups
) where
-- friends:
import {-# SOURCE #-} ETA.HsSyn.HsExpr( LHsExpr, HsExpr, HsSplice, pprExpr, pprUntypedSplice )
-- Because Expr imports Decls via HsBracket
import ETA.HsSyn.HsBinds
import ETA.HsSyn.HsPat
import ETA.HsSyn.HsTypes
import ETA.HsSyn.HsDoc
import ETA.Types.TyCon
import ETA.BasicTypes.Name
import ETA.BasicTypes.BasicTypes
import ETA.Types.Coercion
import ETA.Prelude.ForeignCall
import ETA.HsSyn.PlaceHolder ( PostTc,PostRn,PlaceHolder(..),DataId )
import ETA.BasicTypes.NameSet
-- others:
import ETA.Types.InstEnv
import ETA.Types.Class
import ETA.Utils.Outputable
import ETA.Utils.Util
import ETA.BasicTypes.SrcLoc
import ETA.Utils.FastString
import ETA.Utils.Bag
import Data.Data hiding (TyCon,Fixity)
#if __GLASGOW_HASKELL__ < 709
import Data.Foldable ( Foldable )
import Data.Traversable ( Traversable )
#endif
import Data.Maybe
{-
************************************************************************
* *
\subsection[HsDecl]{Declarations}
* *
************************************************************************
-}
type LHsDecl id = Located (HsDecl id)
-- ^ When in a list this may have
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnSemi'
--
-- For details on above see note [Api annotations] in ApiAnnotation
-- | A Haskell Declaration
data HsDecl id
= TyClD (TyClDecl id) -- ^ A type or class declaration.
| InstD (InstDecl id) -- ^ An instance declaration.
| DerivD (DerivDecl id)
| ValD (HsBind id)
| SigD (Sig id)
| DefD (DefaultDecl id)
| ForD (ForeignDecl id)
| WarningD (WarnDecls id)
| AnnD (AnnDecl id)
| RuleD (RuleDecls id)
| VectD (VectDecl id)
| SpliceD (SpliceDecl id)
| DocD (DocDecl)
| QuasiQuoteD (HsQuasiQuote id)
| RoleAnnotD (RoleAnnotDecl id)
deriving (Typeable)
deriving instance (DataId id) => Data (HsDecl id)
-- NB: all top-level fixity decls are contained EITHER
-- EITHER SigDs
-- OR in the ClassDecls in TyClDs
--
-- The former covers
-- a) data constructors
-- b) class methods (but they can be also done in the
-- signatures of class decls)
-- c) imported functions (that have an IfacSig)
-- d) top level decls
--
-- The latter is for class methods only
-- | A 'HsDecl' is categorised into a 'HsGroup' before being
-- fed to the renamer.
data HsGroup id
= HsGroup {
hs_valds :: HsValBinds id,
hs_splcds :: [LSpliceDecl id],
hs_tyclds :: [TyClGroup id],
-- A list of mutually-recursive groups
-- No family-instances here; they are in hs_instds
-- Parser generates a singleton list;
-- renamer does dependency analysis
hs_instds :: [LInstDecl id],
-- Both class and family instance declarations in here
hs_derivds :: [LDerivDecl id],
hs_fixds :: [LFixitySig id],
-- Snaffled out of both top-level fixity signatures,
-- and those in class declarations
hs_defds :: [LDefaultDecl id],
hs_fords :: [LForeignDecl id],
hs_warnds :: [LWarnDecls id],
hs_annds :: [LAnnDecl id],
hs_ruleds :: [LRuleDecls id],
hs_vects :: [LVectDecl id],
hs_docs :: [LDocDecl]
} deriving (Typeable)
deriving instance (DataId id) => Data (HsGroup id)
emptyGroup, emptyRdrGroup, emptyRnGroup :: HsGroup a
emptyRdrGroup = emptyGroup { hs_valds = emptyValBindsIn }
emptyRnGroup = emptyGroup { hs_valds = emptyValBindsOut }
emptyGroup = HsGroup { hs_tyclds = [], hs_instds = [],
hs_derivds = [],
hs_fixds = [], hs_defds = [], hs_annds = [],
hs_fords = [], hs_warnds = [], hs_ruleds = [], hs_vects = [],
hs_valds = error "emptyGroup hs_valds: Can't happen",
hs_splcds = [],
hs_docs = [] }
appendGroups :: HsGroup a -> HsGroup a -> HsGroup a
appendGroups
HsGroup {
hs_valds = val_groups1,
hs_splcds = spliceds1,
hs_tyclds = tyclds1,
hs_instds = instds1,
hs_derivds = derivds1,
hs_fixds = fixds1,
hs_defds = defds1,
hs_annds = annds1,
hs_fords = fords1,
hs_warnds = warnds1,
hs_ruleds = rulds1,
hs_vects = vects1,
hs_docs = docs1 }
HsGroup {
hs_valds = val_groups2,
hs_splcds = spliceds2,
hs_tyclds = tyclds2,
hs_instds = instds2,
hs_derivds = derivds2,
hs_fixds = fixds2,
hs_defds = defds2,
hs_annds = annds2,
hs_fords = fords2,
hs_warnds = warnds2,
hs_ruleds = rulds2,
hs_vects = vects2,
hs_docs = docs2 }
=
HsGroup {
hs_valds = val_groups1 `plusHsValBinds` val_groups2,
hs_splcds = spliceds1 ++ spliceds2,
hs_tyclds = tyclds1 ++ tyclds2,
hs_instds = instds1 ++ instds2,
hs_derivds = derivds1 ++ derivds2,
hs_fixds = fixds1 ++ fixds2,
hs_annds = annds1 ++ annds2,
hs_defds = defds1 ++ defds2,
hs_fords = fords1 ++ fords2,
hs_warnds = warnds1 ++ warnds2,
hs_ruleds = rulds1 ++ rulds2,
hs_vects = vects1 ++ vects2,
hs_docs = docs1 ++ docs2 }
instance OutputableBndr name => Outputable (HsDecl name) where
ppr (TyClD dcl) = ppr dcl
ppr (ValD binds) = ppr binds
ppr (DefD def) = ppr def
ppr (InstD inst) = ppr inst
ppr (DerivD deriv) = ppr deriv
ppr (ForD fd) = ppr fd
ppr (SigD sd) = ppr sd
ppr (RuleD rd) = ppr rd
ppr (VectD vect) = ppr vect
ppr (WarningD wd) = ppr wd
ppr (AnnD ad) = ppr ad
ppr (SpliceD dd) = ppr dd
ppr (DocD doc) = ppr doc
ppr (QuasiQuoteD qq) = ppr qq
ppr (RoleAnnotD ra) = ppr ra
instance OutputableBndr name => Outputable (HsGroup name) where
ppr (HsGroup { hs_valds = val_decls,
hs_tyclds = tycl_decls,
hs_instds = inst_decls,
hs_derivds = deriv_decls,
hs_fixds = fix_decls,
hs_warnds = deprec_decls,
hs_annds = ann_decls,
hs_fords = foreign_decls,
hs_defds = default_decls,
hs_ruleds = rule_decls,
hs_vects = vect_decls })
= vcat_mb empty
[ppr_ds fix_decls, ppr_ds default_decls,
ppr_ds deprec_decls, ppr_ds ann_decls,
ppr_ds rule_decls,
ppr_ds vect_decls,
if isEmptyValBinds val_decls
then Nothing
else Just (ppr val_decls),
ppr_ds (tyClGroupConcat tycl_decls),
ppr_ds inst_decls,
ppr_ds deriv_decls,
ppr_ds foreign_decls]
where
ppr_ds :: Outputable a => [a] -> Maybe SDoc
ppr_ds [] = Nothing
ppr_ds ds = Just (vcat (map ppr ds))
vcat_mb :: SDoc -> [Maybe SDoc] -> SDoc
-- Concatenate vertically with white-space between non-blanks
vcat_mb _ [] = empty
vcat_mb gap (Nothing : ds) = vcat_mb gap ds
vcat_mb gap (Just d : ds) = gap $$ d $$ vcat_mb blankLine ds
data SpliceExplicitFlag = ExplicitSplice | -- <=> $(f x y)
ImplicitSplice -- <=> f x y, i.e. a naked top level expression
deriving (Data, Typeable)
type LSpliceDecl name = Located (SpliceDecl name)
data SpliceDecl id
= SpliceDecl -- Top level splice
(Located (HsSplice id))
SpliceExplicitFlag
deriving (Typeable)
deriving instance (DataId id) => Data (SpliceDecl id)
instance OutputableBndr name => Outputable (SpliceDecl name) where
ppr (SpliceDecl (L _ e) _) = pprUntypedSplice e
{-
************************************************************************
* *
\subsection[SynDecl]{@data@, @newtype@ or @type@ (synonym) type declaration}
* *
************************************************************************
--------------------------------
THE NAMING STORY
--------------------------------
Here is the story about the implicit names that go with type, class,
and instance decls. It's a bit tricky, so pay attention!
"Implicit" (or "system") binders
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Each data type decl defines
a worker name for each constructor
to-T and from-T convertors
Each class decl defines
a tycon for the class
a data constructor for that tycon
the worker for that constructor
a selector for each superclass
All have occurrence names that are derived uniquely from their parent
declaration.
None of these get separate definitions in an interface file; they are
fully defined by the data or class decl. But they may *occur* in
interface files, of course. Any such occurrence must haul in the
relevant type or class decl.
Plan of attack:
- Ensure they "point to" the parent data/class decl
when loading that decl from an interface file
(See RnHiFiles.getSysBinders)
- When typechecking the decl, we build the implicit TyCons and Ids.
When doing so we look them up in the name cache (RnEnv.lookupSysName),
to ensure correct module and provenance is set
These are the two places that we have to conjure up the magic derived
names. (The actual magic is in OccName.mkWorkerOcc, etc.)
Default methods
~~~~~~~~~~~~~~~
- Occurrence name is derived uniquely from the method name
E.g. $dmmax
- If there is a default method name at all, it's recorded in
the ClassOpSig (in HsBinds), in the DefMeth field.
(DefMeth is defined in Class.lhs)
Source-code class decls and interface-code class decls are treated subtly
differently, which has given me a great deal of confusion over the years.
Here's the deal. (We distinguish the two cases because source-code decls
have (Just binds) in the tcdMeths field, whereas interface decls have Nothing.
In *source-code* class declarations:
- When parsing, every ClassOpSig gets a DefMeth with a suitable RdrName
This is done by RdrHsSyn.mkClassOpSigDM
- The renamer renames it to a Name
- During typechecking, we generate a binding for each $dm for
which there's a programmer-supplied default method:
class Foo a where
op1 :: <type>
op2 :: <type>
op1 = ...
We generate a binding for $dmop1 but not for $dmop2.
The Class for Foo has a NoDefMeth for op2 and a DefMeth for op1.
The Name for $dmop2 is simply discarded.
In *interface-file* class declarations:
- When parsing, we see if there's an explicit programmer-supplied default method
because there's an '=' sign to indicate it:
class Foo a where
op1 = :: <type> -- NB the '='
op2 :: <type>
We use this info to generate a DefMeth with a suitable RdrName for op1,
and a NoDefMeth for op2
- The interface file has a separate definition for $dmop1, with unfolding etc.
- The renamer renames it to a Name.
- The renamer treats $dmop1 as a free variable of the declaration, so that
the binding for $dmop1 will be sucked in. (See RnHsSyn.tyClDeclFVs)
This doesn't happen for source code class decls, because they *bind* the default method.
Dictionary functions
~~~~~~~~~~~~~~~~~~~~
Each instance declaration gives rise to one dictionary function binding.
The type checker makes up new source-code instance declarations
(e.g. from 'deriving' or generic default methods --- see
TcInstDcls.tcInstDecls1). So we can't generate the names for
dictionary functions in advance (we don't know how many we need).
On the other hand for interface-file instance declarations, the decl
specifies the name of the dictionary function, and it has a binding elsewhere
in the interface file:
instance {Eq Int} = dEqInt
dEqInt :: {Eq Int} <pragma info>
So again we treat source code and interface file code slightly differently.
Source code:
- Source code instance decls have a Nothing in the (Maybe name) field
(see data InstDecl below)
- The typechecker makes up a Local name for the dict fun for any source-code
instance decl, whether it comes from a source-code instance decl, or whether
the instance decl is derived from some other construct (e.g. 'deriving').
- The occurrence name it chooses is derived from the instance decl (just for
documentation really) --- e.g. dNumInt. Two dict funs may share a common
occurrence name, but will have different uniques. E.g.
instance Foo [Int] where ...
instance Foo [Bool] where ...
These might both be dFooList
- The CoreTidy phase externalises the name, and ensures the occurrence name is
unique (this isn't special to dict funs). So we'd get dFooList and dFooList1.
- We can take this relaxed approach (changing the occurrence name later)
because dict fun Ids are not captured in a TyCon or Class (unlike default
methods, say). Instead, they are kept separately in the InstEnv. This
makes it easy to adjust them after compiling a module. (Once we've finished
compiling that module, they don't change any more.)
Interface file code:
- The instance decl gives the dict fun name, so the InstDecl has a (Just name)
in the (Maybe name) field.
- RnHsSyn.instDeclFVs treats the dict fun name as free in the decl, so that we
suck in the dfun binding
-}
type LTyClDecl name = Located (TyClDecl name)
-- | A type or class declaration.
data TyClDecl name
= -- | @type/data family T :: *->*@
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnType',
-- 'ApiAnnotation.AnnData',
-- 'ApiAnnotation.AnnFamily','ApiAnnotation.AnnDcolon',
-- 'ApiAnnotation.AnnWhere',
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnDcolon',
-- 'ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
FamDecl { tcdFam :: FamilyDecl name }
| -- | @type@ declaration
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnType',
-- 'ApiAnnotation.AnnEqual',
-- For details on above see note [Api annotations] in ApiAnnotation
SynDecl { tcdLName :: Located name -- ^ Type constructor
, tcdTyVars :: LHsTyVarBndrs name -- ^ Type variables; for an associated type
-- these include outer binders
, tcdRhs :: LHsType name -- ^ RHS of type declaration
, tcdFVs :: PostRn name NameSet }
| -- | @data@ declaration
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnData',
-- 'ApiAnnotation.AnnFamily',
-- 'ApiAnnotation.AnnNewType',
-- 'ApiAnnotation.AnnNewType','ApiAnnotation.AnnDcolon'
-- 'ApiAnnotation.AnnWhere',
-- For details on above see note [Api annotations] in ApiAnnotation
DataDecl { tcdLName :: Located name -- ^ Type constructor
, tcdTyVars :: LHsTyVarBndrs name -- ^ Type variables; for an assoicated type
-- these include outer binders
-- Eg class T a where
-- type F a :: *
-- type F a = a -> a
-- Here the type decl for 'f' includes 'a'
-- in its tcdTyVars
, tcdDataDefn :: HsDataDefn name
, tcdFVs :: PostRn name NameSet }
| ClassDecl { tcdCtxt :: LHsContext name, -- ^ Context...
tcdLName :: Located name, -- ^ Name of the class
tcdTyVars :: LHsTyVarBndrs name, -- ^ Class type variables
tcdFDs :: [Located (FunDep (Located name))],
-- ^ Functional deps
tcdSigs :: [LSig name], -- ^ Methods' signatures
tcdMeths :: LHsBinds name, -- ^ Default methods
tcdATs :: [LFamilyDecl name], -- ^ Associated types;
tcdATDefs :: [LTyFamDefltEqn name], -- ^ Associated type defaults
tcdDocs :: [LDocDecl], -- ^ Haddock docs
tcdFVs :: PostRn name NameSet
}
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnClass',
-- 'ApiAnnotation.AnnWhere','ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose'
-- - The tcdFDs will have 'ApiAnnotation.AnnVbar',
-- 'ApiAnnotation.AnnComma'
-- 'ApiAnnotation.AnnRarrow'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Typeable)
deriving instance (DataId id) => Data (TyClDecl id)
-- This is used in TcTyClsDecls to represent
-- strongly connected components of decls
-- No familiy instances in here
-- The role annotations must be grouped with their decls for the
-- type-checker to infer roles correctly
data TyClGroup name
= TyClGroup { group_tyclds :: [LTyClDecl name]
, group_roles :: [LRoleAnnotDecl name] }
deriving (Typeable)
deriving instance (DataId id) => Data (TyClGroup id)
tyClGroupConcat :: [TyClGroup name] -> [LTyClDecl name]
tyClGroupConcat = concatMap group_tyclds
mkTyClGroup :: [LTyClDecl name] -> TyClGroup name
mkTyClGroup decls = TyClGroup { group_tyclds = decls, group_roles = [] }
type LFamilyDecl name = Located (FamilyDecl name)
data FamilyDecl name = FamilyDecl
{ fdInfo :: FamilyInfo name -- type or data, closed or open
, fdLName :: Located name -- type constructor
, fdTyVars :: LHsTyVarBndrs name -- type variables
, fdKindSig :: Maybe (LHsKind name) } -- result kind
deriving( Typeable )
deriving instance (DataId id) => Data (FamilyDecl id)
data FamilyInfo name
= DataFamily
| OpenTypeFamily
-- this list might be empty, if we're in an hs-boot file and the user
-- said "type family Foo x where .."
| ClosedTypeFamily [LTyFamInstEqn name]
deriving( Typeable )
deriving instance (DataId name) => Data (FamilyInfo name)
{-
------------------------------
Simple classifiers
-}
-- | @True@ <=> argument is a @data@\/@newtype@
-- declaration.
isDataDecl :: TyClDecl name -> Bool
isDataDecl (DataDecl {}) = True
isDataDecl _other = False
-- | type or type instance declaration
isSynDecl :: TyClDecl name -> Bool
isSynDecl (SynDecl {}) = True
isSynDecl _other = False
-- | type class
isClassDecl :: TyClDecl name -> Bool
isClassDecl (ClassDecl {}) = True
isClassDecl _ = False
-- | type/data family declaration
isFamilyDecl :: TyClDecl name -> Bool
isFamilyDecl (FamDecl {}) = True
isFamilyDecl _other = False
-- | type family declaration
isTypeFamilyDecl :: TyClDecl name -> Bool
isTypeFamilyDecl (FamDecl (FamilyDecl { fdInfo = info })) = case info of
OpenTypeFamily -> True
ClosedTypeFamily {} -> True
_ -> False
isTypeFamilyDecl _ = False
-- | open type family info
isOpenTypeFamilyInfo :: FamilyInfo name -> Bool
isOpenTypeFamilyInfo OpenTypeFamily = True
isOpenTypeFamilyInfo _ = False
-- | closed type family info
isClosedTypeFamilyInfo :: FamilyInfo name -> Bool
isClosedTypeFamilyInfo (ClosedTypeFamily {}) = True
isClosedTypeFamilyInfo _ = False
-- | data family declaration
isDataFamilyDecl :: TyClDecl name -> Bool
isDataFamilyDecl (FamDecl (FamilyDecl { fdInfo = DataFamily })) = True
isDataFamilyDecl _other = False
-- Dealing with names
tyFamInstDeclName :: OutputableBndr name
=> TyFamInstDecl name -> name
tyFamInstDeclName = unLoc . tyFamInstDeclLName
tyFamInstDeclLName :: OutputableBndr name
=> TyFamInstDecl name -> Located name
tyFamInstDeclLName (TyFamInstDecl { tfid_eqn =
(L _ (TyFamEqn { tfe_tycon = ln })) })
= ln
tyClDeclLName :: TyClDecl name -> Located name
tyClDeclLName (FamDecl { tcdFam = FamilyDecl { fdLName = ln } }) = ln
tyClDeclLName decl = tcdLName decl
tcdName :: TyClDecl name -> name
tcdName = unLoc . tyClDeclLName
tyClDeclTyVars :: OutputableBndr name => TyClDecl name -> LHsTyVarBndrs name
tyClDeclTyVars (FamDecl { tcdFam = FamilyDecl { fdTyVars = tvs } }) = tvs
tyClDeclTyVars d = tcdTyVars d
countTyClDecls :: [TyClDecl name] -> (Int, Int, Int, Int, Int)
-- class, synonym decls, data, newtype, family decls
countTyClDecls decls
= (count isClassDecl decls,
count isSynDecl decls, -- excluding...
count isDataTy decls, -- ...family...
count isNewTy decls, -- ...instances
count isFamilyDecl decls)
where
isDataTy DataDecl{ tcdDataDefn = HsDataDefn { dd_ND = DataType } } = True
isDataTy _ = False
isNewTy DataDecl{ tcdDataDefn = HsDataDefn { dd_ND = NewType } } = True
isNewTy _ = False
-- | Does this declaration have a complete, user-supplied kind signature?
-- See Note [Complete user-supplied kind signatures]
hsDeclHasCusk :: TyClDecl name -> Bool
hsDeclHasCusk (FamDecl { tcdFam = fam_decl }) = famDeclHasCusk fam_decl
hsDeclHasCusk (SynDecl { tcdTyVars = tyvars, tcdRhs = rhs })
= hsTvbAllKinded tyvars && rhs_annotated rhs
where
rhs_annotated (L _ ty) = case ty of
HsParTy lty -> rhs_annotated lty
HsKindSig {} -> True
_ -> False
hsDeclHasCusk (DataDecl { tcdTyVars = tyvars }) = hsTvbAllKinded tyvars
hsDeclHasCusk (ClassDecl { tcdTyVars = tyvars }) = hsTvbAllKinded tyvars
-- | Does this family declaration have a complete, user-supplied kind signature?
famDeclHasCusk :: FamilyDecl name -> Bool
famDeclHasCusk (FamilyDecl { fdInfo = ClosedTypeFamily _
, fdTyVars = tyvars
, fdKindSig = m_sig })
= hsTvbAllKinded tyvars && isJust m_sig
famDeclHasCusk _ = True -- all open families have CUSKs!
{-
Note [Complete user-supplied kind signatures]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We kind-check declarations differently if they have a complete, user-supplied
kind signature (CUSK). This is because we can safely generalise a CUSKed
declaration before checking all of the others, supporting polymorphic recursion.
See https://ghc.haskell.org/trac/ghc/wiki/GhcKinds/KindInference#Proposednewstrategy
and #9200 for lots of discussion of how we got here.
A declaration has a CUSK if we can know its complete kind without doing any inference,
at all. Here are the rules:
- A class or datatype is said to have a CUSK if and only if all of its type
variables are annotated. Its result kind is, by construction, Constraint or *
respectively.
- A type synonym has a CUSK if and only if all of its type variables and its
RHS are annotated with kinds.
- A closed type family is said to have a CUSK if and only if all of its type
variables and its return type are annotated.
- An open type family always has a CUSK -- unannotated type variables (and return type) default to *.
-}
instance OutputableBndr name
=> Outputable (TyClDecl name) where
ppr (FamDecl { tcdFam = decl }) = ppr decl
ppr (SynDecl { tcdLName = ltycon, tcdTyVars = tyvars, tcdRhs = rhs })
= hang (ptext (sLit "type") <+>
pp_vanilla_decl_head ltycon tyvars [] <+> equals)
4 (ppr rhs)
ppr (DataDecl { tcdLName = ltycon, tcdTyVars = tyvars, tcdDataDefn = defn })
= pp_data_defn (pp_vanilla_decl_head ltycon tyvars) defn
ppr (ClassDecl {tcdCtxt = context, tcdLName = lclas, tcdTyVars = tyvars,
tcdFDs = fds,
tcdSigs = sigs, tcdMeths = methods,
tcdATs = ats, tcdATDefs = at_defs})
| null sigs && isEmptyBag methods && null ats && null at_defs -- No "where" part
= top_matter
| otherwise -- Laid out
= vcat [ top_matter <+> ptext (sLit "where")
, nest 2 $ pprDeclList (map ppr ats ++
map ppr_fam_deflt_eqn at_defs ++
pprLHsBindsForUser methods sigs) ]
where
top_matter = ptext (sLit "class")
<+> pp_vanilla_decl_head lclas tyvars (unLoc context)
<+> pprFundeps (map unLoc fds)
instance OutputableBndr name => Outputable (TyClGroup name) where
ppr (TyClGroup { group_tyclds = tyclds, group_roles = roles })
= ppr tyclds $$
ppr roles
instance (OutputableBndr name) => Outputable (FamilyDecl name) where
ppr (FamilyDecl { fdInfo = info, fdLName = ltycon,
fdTyVars = tyvars, fdKindSig = mb_kind})
= vcat [ pprFlavour info <+> pp_vanilla_decl_head ltycon tyvars [] <+> pp_kind <+> pp_where
, nest 2 $ pp_eqns ]
where
pp_kind = case mb_kind of
Nothing -> empty
Just kind -> dcolon <+> ppr kind
(pp_where, pp_eqns) = case info of
ClosedTypeFamily eqns -> ( ptext (sLit "where")
, if null eqns
then ptext (sLit "..")
else vcat $ map ppr_fam_inst_eqn eqns )
_ -> (empty, empty)
pprFlavour :: FamilyInfo name -> SDoc
pprFlavour DataFamily = ptext (sLit "data family")
pprFlavour OpenTypeFamily = ptext (sLit "type family")
pprFlavour (ClosedTypeFamily {}) = ptext (sLit "type family")
instance Outputable (FamilyInfo name) where
ppr = pprFlavour
pp_vanilla_decl_head :: OutputableBndr name
=> Located name
-> LHsTyVarBndrs name
-> HsContext name
-> SDoc
pp_vanilla_decl_head thing tyvars context
= hsep [pprHsContext context, pprPrefixOcc (unLoc thing), ppr tyvars]
pp_fam_inst_lhs :: OutputableBndr name
=> Located name
-> HsTyPats name
-> HsContext name
-> SDoc
pp_fam_inst_lhs thing (HsWB { hswb_cts = typats }) context -- explicit type patterns
= hsep [ pprHsContext context, pprPrefixOcc (unLoc thing)
, hsep (map (pprParendHsType.unLoc) typats)]
pprTyClDeclFlavour :: TyClDecl a -> SDoc
pprTyClDeclFlavour (ClassDecl {}) = ptext (sLit "class")
pprTyClDeclFlavour (SynDecl {}) = ptext (sLit "type")
pprTyClDeclFlavour (FamDecl { tcdFam = FamilyDecl { fdInfo = info }})
= pprFlavour info
pprTyClDeclFlavour (DataDecl { tcdDataDefn = HsDataDefn { dd_ND = nd } })
= ppr nd
{-
************************************************************************
* *
\subsection[ConDecl]{A data-constructor declaration}
* *
************************************************************************
-}
data HsDataDefn name -- The payload of a data type defn
-- Used *both* for vanilla data declarations,
-- *and* for data family instances
= -- | Declares a data type or newtype, giving its constructors
-- @
-- data/newtype T a = <constrs>
-- data/newtype instance T [a] = <constrs>
-- @
HsDataDefn { dd_ND :: NewOrData,
dd_ctxt :: LHsContext name, -- ^ Context
dd_cType :: Maybe (Located CType),
dd_kindSig:: Maybe (LHsKind name),
-- ^ Optional kind signature.
--
-- @(Just k)@ for a GADT-style @data@,
-- or @data instance@ decl, with explicit kind sig
--
-- Always @Nothing@ for H98-syntax decls
dd_cons :: [LConDecl name],
-- ^ Data constructors
--
-- For @data T a = T1 | T2 a@
-- the 'LConDecl's all have 'ResTyH98'.
-- For @data T a where { T1 :: T a }@
-- the 'LConDecls' all have 'ResTyGADT'.
dd_derivs :: Maybe (Located [LHsType name])
-- ^ Derivings; @Nothing@ => not specified,
-- @Just []@ => derive exactly what is asked
--
-- These "types" must be of form
-- @
-- forall ab. C ty1 ty2
-- @
-- Typically the foralls and ty args are empty, but they
-- are non-empty for the newtype-deriving case
--
-- - 'ApiAnnotation.AnnKeywordId' :
-- 'ApiAnnotation.AnnDeriving',
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
}
deriving( Typeable )
deriving instance (DataId id) => Data (HsDataDefn id)
data NewOrData
= NewType -- ^ @newtype Blah ...@
| DataType -- ^ @data Blah ...@
deriving( Eq, Data, Typeable ) -- Needed because Demand derives Eq
type LConDecl name = Located (ConDecl name)
-- ^ May have 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnSemi' when
-- in a GADT constructor list
-- For details on above see note [Api annotations] in ApiAnnotation
-- |
--
-- @
-- data T b = forall a. Eq a => MkT a b
-- MkT :: forall b a. Eq a => MkT a b
--
-- data T b where
-- MkT1 :: Int -> T Int
--
-- data T = Int `MkT` Int
-- | MkT2
--
-- data T a where
-- Int `MkT` Int :: T Int
-- @
--
-- - 'ApiAnnotation.AnnKeywordId's : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnDotdot','ApiAnnotation.AnnCLose',
-- 'ApiAnnotation.AnnEqual','ApiAnnotation.AnnVbar',
-- 'ApiAnnotation.AnnDarrow','ApiAnnotation.AnnDarrow',
-- 'ApiAnnotation.AnnForall','ApiAnnotation.AnnDot'
-- For details on above see note [Api annotations] in ApiAnnotation
data ConDecl name
= ConDecl
{ con_names :: [Located name]
-- ^ Constructor names. This is used for the DataCon itself, and for
-- the user-callable wrapper Id.
-- It is a list to deal with GADT constructors of the form
-- T1, T2, T3 :: <payload>
, con_explicit :: HsExplicitFlag
-- ^ Is there an user-written forall? (cf. 'HsTypes.HsForAllTy')
, con_qvars :: LHsTyVarBndrs name
-- ^ Type variables. Depending on 'con_res' this describes the
-- following entities
--
-- - ResTyH98: the constructor's *existential* type variables
-- - ResTyGADT: *all* the constructor's quantified type variables
--
-- If con_explicit is Implicit, then con_qvars is irrelevant
-- until after renaming.
, con_cxt :: LHsContext name
-- ^ The context. This /does not/ include the \"stupid theta\" which
-- lives only in the 'TyData' decl.
, con_details :: HsConDeclDetails name
-- ^ The main payload
, con_res :: ResType (LHsType name)
-- ^ Result type of the constructor
, con_doc :: Maybe LHsDocString
-- ^ A possible Haddock comment.
, con_old_rec :: Bool
-- ^ TEMPORARY field; True <=> user has employed now-deprecated syntax for
-- GADT-style record decl C { blah } :: T a b
-- Remove this when we no longer parse this stuff, and hence do not
-- need to report decprecated use
} deriving (Typeable)
deriving instance (DataId name) => Data (ConDecl name)
type HsConDeclDetails name
= HsConDetails (LBangType name) (Located [LConDeclField name])
hsConDeclArgTys :: HsConDeclDetails name -> [LBangType name]
hsConDeclArgTys (PrefixCon tys) = tys
hsConDeclArgTys (InfixCon ty1 ty2) = [ty1,ty2]
hsConDeclArgTys (RecCon flds) = map (cd_fld_type . unLoc) (unLoc flds)
data ResType ty
= ResTyH98 -- Constructor was declared using Haskell 98 syntax
| ResTyGADT SrcSpan ty -- Constructor was declared using GADT-style syntax,
-- and here is its result type, and the SrcSpan
-- of the original sigtype, for API Annotations
deriving (Data, Typeable)
instance Outputable ty => Outputable (ResType ty) where
-- Debugging only
ppr ResTyH98 = ptext (sLit "ResTyH98")
ppr (ResTyGADT _ ty) = ptext (sLit "ResTyGADT") <+> ppr ty
pp_data_defn :: OutputableBndr name
=> (HsContext name -> SDoc) -- Printing the header
-> HsDataDefn name
-> SDoc
pp_data_defn pp_hdr (HsDataDefn { dd_ND = new_or_data, dd_ctxt = L _ context
, dd_kindSig = mb_sig
, dd_cons = condecls, dd_derivs = derivings })
| null condecls
= ppr new_or_data <+> pp_hdr context <+> pp_sig
| otherwise
= hang (ppr new_or_data <+> pp_hdr context <+> pp_sig)
2 (pp_condecls condecls $$ pp_derivings)
where
pp_sig = case mb_sig of
Nothing -> empty
Just kind -> dcolon <+> ppr kind
pp_derivings = case derivings of
Nothing -> empty
Just (L _ ds) -> hsep [ptext (sLit "deriving"),
parens (interpp'SP ds)]
instance OutputableBndr name => Outputable (HsDataDefn name) where
ppr d = pp_data_defn (\_ -> ptext (sLit "Naked HsDataDefn")) d
instance Outputable NewOrData where
ppr NewType = ptext (sLit "newtype")
ppr DataType = ptext (sLit "data")
pp_condecls :: OutputableBndr name => [LConDecl name] -> SDoc
pp_condecls cs@(L _ ConDecl{ con_res = ResTyGADT _ _ } : _) -- In GADT syntax
= hang (ptext (sLit "where")) 2 (vcat (map ppr cs))
pp_condecls cs -- In H98 syntax
= equals <+> sep (punctuate (ptext (sLit " |")) (map ppr cs))
instance (OutputableBndr name) => Outputable (ConDecl name) where
ppr = pprConDecl
pprConDecl :: OutputableBndr name => ConDecl name -> SDoc
pprConDecl (ConDecl { con_names = [L _ con] -- NB: non-GADT means 1 con
, con_explicit = expl, con_qvars = tvs
, con_cxt = cxt, con_details = details
, con_res = ResTyH98, con_doc = doc })
= sep [ppr_mbDoc doc, pprHsForAll expl tvs cxt, ppr_details details]
where
ppr_details (InfixCon t1 t2) = hsep [ppr t1, pprInfixOcc con, ppr t2]
ppr_details (PrefixCon tys) = hsep (pprPrefixOcc con
: map (pprParendHsType . unLoc) tys)
ppr_details (RecCon fields) = pprPrefixOcc con
<+> pprConDeclFields (unLoc fields)
pprConDecl (ConDecl { con_names = cons, con_explicit = expl, con_qvars = tvs
, con_cxt = cxt, con_details = PrefixCon arg_tys
, con_res = ResTyGADT _ res_ty })
= ppr_con_names cons <+> dcolon <+>
sep [pprHsForAll expl tvs cxt, ppr (foldr mk_fun_ty res_ty arg_tys)]
where
mk_fun_ty a b = noLoc (HsFunTy a b)
pprConDecl (ConDecl { con_names = cons, con_explicit = expl, con_qvars = tvs
, con_cxt = cxt, con_details = RecCon fields
, con_res = ResTyGADT _ res_ty })
= sep [ppr_con_names cons <+> dcolon <+> pprHsForAll expl tvs cxt,
pprConDeclFields (unLoc fields) <+> arrow <+> ppr res_ty]
pprConDecl decl@(ConDecl { con_details = InfixCon ty1 ty2, con_res = ResTyGADT {} })
= pprConDecl (decl { con_details = PrefixCon [ty1,ty2] })
-- In GADT syntax we don't allow infix constructors
-- so if we ever trip over one (albeit I can't see how that
-- can happen) print it like a prefix one
-- this fallthrough would happen with a non-GADT-syntax ConDecl with more
-- than one constructor, which should indeed be impossible
pprConDecl (ConDecl { con_names = cons }) = pprPanic "pprConDecl" (ppr cons)
ppr_con_names :: (OutputableBndr name) => [Located name] -> SDoc
ppr_con_names = pprWithCommas (pprPrefixOcc . unLoc)
{-
************************************************************************
* *
Instance declarations
* *
************************************************************************
Note [Type family instance declarations in HsSyn]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The data type TyFamEqn represents one equation of a type family instance.
It is parameterised over its tfe_pats field:
* An ordinary type family instance declaration looks like this in source Haskell
type instance T [a] Int = a -> a
(or something similar for a closed family)
It is represented by a TyFamInstEqn, with *type* in the tfe_pats field.
* On the other hand, the *default instance* of an associated type looksl like
this in source Haskell
class C a where
type T a b
type T a b = a -> b -- The default instance
It is represented by a TyFamDefltEqn, with *type variables8 in the tfe_pats field.
-}
----------------- Type synonym family instances -------------
type LTyFamInstEqn name = Located (TyFamInstEqn name)
-- ^ May have 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnSemi'
-- when in a list
-- For details on above see note [Api annotations] in ApiAnnotation
type LTyFamDefltEqn name = Located (TyFamDefltEqn name)
type HsTyPats name = HsWithBndrs name [LHsType name]
-- ^ Type patterns (with kind and type bndrs)
-- See Note [Family instance declaration binders]
type TyFamInstEqn name = TyFamEqn name (HsTyPats name)
type TyFamDefltEqn name = TyFamEqn name (LHsTyVarBndrs name)
-- See Note [Type family instance declarations in HsSyn]
-- | One equation in a type family instance declaration
-- See Note [Type family instance declarations in HsSyn]
data TyFamEqn name pats
= TyFamEqn
{ tfe_tycon :: Located name
, tfe_pats :: pats
, tfe_rhs :: LHsType name }
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnEqual'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving( Typeable )
deriving instance (DataId name, Data pats) => Data (TyFamEqn name pats)
type LTyFamInstDecl name = Located (TyFamInstDecl name)
data TyFamInstDecl name
= TyFamInstDecl
{ tfid_eqn :: LTyFamInstEqn name
, tfid_fvs :: PostRn name NameSet }
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnType',
-- 'ApiAnnotation.AnnInstance',
-- For details on above see note [Api annotations] in ApiAnnotation
deriving( Typeable )
deriving instance (DataId name) => Data (TyFamInstDecl name)
----------------- Data family instances -------------
type LDataFamInstDecl name = Located (DataFamInstDecl name)
data DataFamInstDecl name
= DataFamInstDecl
{ dfid_tycon :: Located name
, dfid_pats :: HsTyPats name -- LHS
, dfid_defn :: HsDataDefn name -- RHS
, dfid_fvs :: PostRn name NameSet } -- Free vars for
-- dependency analysis
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnData',
-- 'ApiAnnotation.AnnNewType','ApiAnnotation.AnnInstance',
-- 'ApiAnnotation.AnnDcolon'
-- 'ApiAnnotation.AnnWhere','ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving( Typeable )
deriving instance (DataId name) => Data (DataFamInstDecl name)
----------------- Class instances -------------
type LClsInstDecl name = Located (ClsInstDecl name)
data ClsInstDecl name
= ClsInstDecl
{ cid_poly_ty :: LHsType name -- Context => Class Instance-type
-- Using a polytype means that the renamer conveniently
-- figures out the quantified type variables for us.
, cid_binds :: LHsBinds name -- Class methods
, cid_sigs :: [LSig name] -- User-supplied pragmatic info
, cid_tyfam_insts :: [LTyFamInstDecl name] -- Type family instances
, cid_datafam_insts :: [LDataFamInstDecl name] -- Data family instances
, cid_overlap_mode :: Maybe (Located OverlapMode)
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose',
-- For details on above see note [Api annotations] in ApiAnnotation
}
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnInstance',
-- 'ApiAnnotation.AnnWhere',
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnClose',
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Typeable)
deriving instance (DataId id) => Data (ClsInstDecl id)
----------------- Instances of all kinds -------------
type LInstDecl name = Located (InstDecl name)
data InstDecl name -- Both class and family instances
= ClsInstD
{ cid_inst :: ClsInstDecl name }
| DataFamInstD -- data family instance
{ dfid_inst :: DataFamInstDecl name }
| TyFamInstD -- type family instance
{ tfid_inst :: TyFamInstDecl name }
deriving (Typeable)
deriving instance (DataId id) => Data (InstDecl id)
{-
Note [Family instance declaration binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A {Ty|Data}FamInstDecl is a data/type family instance declaration
the pats field is LHS patterns, and the tvs of the HsBSig
tvs are fv(pat_tys), *including* ones that are already in scope
Eg class C s t where
type F t p :: *
instance C w (a,b) where
type F (a,b) x = x->a
The tcdTyVars of the F decl are {a,b,x}, even though the F decl
is nested inside the 'instance' decl.
However after the renamer, the uniques will match up:
instance C w7 (a8,b9) where
type F (a8,b9) x10 = x10->a8
so that we can compare the type patter in the 'instance' decl and
in the associated 'type' decl
-}
instance (OutputableBndr name) => Outputable (TyFamInstDecl name) where
ppr = pprTyFamInstDecl TopLevel
pprTyFamInstDecl :: OutputableBndr name => TopLevelFlag -> TyFamInstDecl name -> SDoc
pprTyFamInstDecl top_lvl (TyFamInstDecl { tfid_eqn = eqn })
= ptext (sLit "type") <+> ppr_instance_keyword top_lvl <+> ppr_fam_inst_eqn eqn
ppr_instance_keyword :: TopLevelFlag -> SDoc
ppr_instance_keyword TopLevel = ptext (sLit "instance")
ppr_instance_keyword NotTopLevel = empty
ppr_fam_inst_eqn :: OutputableBndr name => LTyFamInstEqn name -> SDoc
ppr_fam_inst_eqn (L _ (TyFamEqn { tfe_tycon = tycon
, tfe_pats = pats
, tfe_rhs = rhs }))
= pp_fam_inst_lhs tycon pats [] <+> equals <+> ppr rhs
ppr_fam_deflt_eqn :: OutputableBndr name => LTyFamDefltEqn name -> SDoc
ppr_fam_deflt_eqn (L _ (TyFamEqn { tfe_tycon = tycon
, tfe_pats = tvs
, tfe_rhs = rhs }))
= pp_vanilla_decl_head tycon tvs [] <+> equals <+> ppr rhs
instance (OutputableBndr name) => Outputable (DataFamInstDecl name) where
ppr = pprDataFamInstDecl TopLevel
pprDataFamInstDecl :: OutputableBndr name => TopLevelFlag -> DataFamInstDecl name -> SDoc
pprDataFamInstDecl top_lvl (DataFamInstDecl { dfid_tycon = tycon
, dfid_pats = pats
, dfid_defn = defn })
= pp_data_defn pp_hdr defn
where
pp_hdr ctxt = ppr_instance_keyword top_lvl <+> pp_fam_inst_lhs tycon pats ctxt
pprDataFamInstFlavour :: DataFamInstDecl name -> SDoc
pprDataFamInstFlavour (DataFamInstDecl { dfid_defn = (HsDataDefn { dd_ND = nd }) })
= ppr nd
instance (OutputableBndr name) => Outputable (ClsInstDecl name) where
ppr (ClsInstDecl { cid_poly_ty = inst_ty, cid_binds = binds
, cid_sigs = sigs, cid_tyfam_insts = ats
, cid_overlap_mode = mbOverlap
, cid_datafam_insts = adts })
| null sigs, null ats, null adts, isEmptyBag binds -- No "where" part
= top_matter
| otherwise -- Laid out
= vcat [ top_matter <+> ptext (sLit "where")
, nest 2 $ pprDeclList $
map (pprTyFamInstDecl NotTopLevel . unLoc) ats ++
map (pprDataFamInstDecl NotTopLevel . unLoc) adts ++
pprLHsBindsForUser binds sigs ]
where
top_matter = ptext (sLit "instance") <+> ppOverlapPragma mbOverlap
<+> ppr inst_ty
ppOverlapPragma :: Maybe (Located OverlapMode) -> SDoc
ppOverlapPragma mb =
case mb of
Nothing -> empty
Just (L _ (NoOverlap _)) -> ptext (sLit "{-# NO_OVERLAP #-}")
Just (L _ (Overlappable _)) -> ptext (sLit "{-# OVERLAPPABLE #-}")
Just (L _ (Overlapping _)) -> ptext (sLit "{-# OVERLAPPING #-}")
Just (L _ (Overlaps _)) -> ptext (sLit "{-# OVERLAPS #-}")
Just (L _ (Incoherent _)) -> ptext (sLit "{-# INCOHERENT #-}")
instance (OutputableBndr name) => Outputable (InstDecl name) where
ppr (ClsInstD { cid_inst = decl }) = ppr decl
ppr (TyFamInstD { tfid_inst = decl }) = ppr decl
ppr (DataFamInstD { dfid_inst = decl }) = ppr decl
-- Extract the declarations of associated data types from an instance
instDeclDataFamInsts :: [LInstDecl name] -> [DataFamInstDecl name]
instDeclDataFamInsts inst_decls
= concatMap do_one inst_decls
where
do_one (L _ (ClsInstD { cid_inst = ClsInstDecl { cid_datafam_insts = fam_insts } }))
= map unLoc fam_insts
do_one (L _ (DataFamInstD { dfid_inst = fam_inst })) = [fam_inst]
do_one (L _ (TyFamInstD {})) = []
{-
************************************************************************
* *
\subsection[DerivDecl]{A stand-alone instance deriving declaration}
* *
************************************************************************
-}
type LDerivDecl name = Located (DerivDecl name)
data DerivDecl name = DerivDecl
{ deriv_type :: LHsType name
, deriv_overlap_mode :: Maybe (Located OverlapMode)
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose',
-- 'ApiAnnotation.AnnDeriving',
-- 'ApiAnnotation.AnnInstance'
-- For details on above see note [Api annotations] in ApiAnnotation
}
deriving (Typeable)
deriving instance (DataId name) => Data (DerivDecl name)
instance (OutputableBndr name) => Outputable (DerivDecl name) where
ppr (DerivDecl ty o)
= hsep [ptext (sLit "deriving instance"), ppOverlapPragma o, ppr ty]
{-
************************************************************************
* *
\subsection[DefaultDecl]{A @default@ declaration}
* *
************************************************************************
There can only be one default declaration per module, but it is hard
for the parser to check that; we pass them all through in the abstract
syntax, and that restriction must be checked in the front end.
-}
type LDefaultDecl name = Located (DefaultDecl name)
data DefaultDecl name
= DefaultDecl [LHsType name]
-- ^ - 'ApiAnnotation.AnnKeywordId's : 'ApiAnnotation.AnnDefault',
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Typeable)
deriving instance (DataId name) => Data (DefaultDecl name)
instance (OutputableBndr name)
=> Outputable (DefaultDecl name) where
ppr (DefaultDecl tys)
= ptext (sLit "default") <+> parens (interpp'SP tys)
{-
************************************************************************
* *
\subsection{Foreign function interface declaration}
* *
************************************************************************
-}
-- foreign declarations are distinguished as to whether they define or use a
-- Haskell name
--
-- * the Boolean value indicates whether the pre-standard deprecated syntax
-- has been used
--
type LForeignDecl name = Located (ForeignDecl name)
data ForeignDecl name
= ForeignImport (Located name) -- defines this name
(LHsType name) -- sig_ty
(PostTc name Coercion) -- rep_ty ~ sig_ty
ForeignImport
| ForeignExport (Located name) -- uses this name
(LHsType name) -- sig_ty
(PostTc name Coercion) -- sig_ty ~ rep_ty
ForeignExport
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnForeign',
-- 'ApiAnnotation.AnnImport','ApiAnnotation.AnnExport',
-- 'ApiAnnotation.AnnDcolon'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Typeable)
deriving instance (DataId name) => Data (ForeignDecl name)
{-
In both ForeignImport and ForeignExport:
sig_ty is the type given in the Haskell code
rep_ty is the representation for this type, i.e. with newtypes
coerced away and type functions evaluated.
Thus if the declaration is valid, then rep_ty will only use types
such as Int and IO that we know how to make foreign calls with.
-}
noForeignImportCoercionYet :: PlaceHolder
noForeignImportCoercionYet = PlaceHolder
noForeignExportCoercionYet :: PlaceHolder
noForeignExportCoercionYet = PlaceHolder
-- Specification Of an imported external entity in dependence on the calling
-- convention
--
data ForeignImport = -- import of a C entity
--
-- * the two strings specifying a header file or library
-- may be empty, which indicates the absence of a
-- header or object specification (both are not used
-- in the case of `CWrapper' and when `CFunction'
-- has a dynamic target)
--
-- * the calling convention is irrelevant for code
-- generation in the case of `CLabel', but is needed
-- for pretty printing
--
-- * `Safety' is irrelevant for `CLabel' and `CWrapper'
--
CImport (Located CCallConv) -- ccall or stdcall
(Located Safety) -- interruptible, safe or unsafe
(Maybe Header) -- name of C header
CImportSpec -- details of the C entity
(Located SourceText) -- original source text for
-- the C entity
deriving (Data, Typeable)
-- details of an external C entity
--
data CImportSpec = CLabel CLabelString -- import address of a C label
| CFunction CCallTarget -- static or dynamic function
| CWrapper CLabelString -- wrapper to expose closures
Bool -- Abstract class?
deriving (Data, Typeable)
-- specification of an externally exported entity in dependence on the calling
-- convention
--
data ForeignExport = CExport (Located CExportSpec) -- contains the calling
-- convention
(Located SourceText) -- original source text for
-- the C entity
deriving (Data, Typeable)
-- pretty printing of foreign declarations
--
instance OutputableBndr name => Outputable (ForeignDecl name) where
ppr (ForeignImport n ty _ fimport) =
hang (ptext (sLit "foreign import") <+> ppr fimport <+> ppr n)
2 (dcolon <+> ppr ty)
ppr (ForeignExport n ty _ fexport) =
hang (ptext (sLit "foreign export") <+> ppr fexport <+> ppr n)
2 (dcolon <+> ppr ty)
instance Outputable ForeignImport where
ppr (CImport cconv safety mHeader spec _) =
ppr cconv <+> ppr safety <+>
char '"' <> pprCEntity spec <> char '"'
where
pp_hdr = case mHeader of
Nothing -> empty
Just (Header header) -> ftext header
pprCEntity (CLabel lbl) =
ptext (sLit "static") <+> pp_hdr <+> char '&' <> ppr lbl
pprCEntity (CFunction (StaticTarget lbl _ isFun)) =
pp_hdr
<+> (if isFun then empty else ptext (sLit "value"))
<+> ppr lbl
pprCEntity (CFunction (DynamicTarget)) =
ptext (sLit "dynamic")
pprCEntity (CWrapper target isAbstract) =
ptext (sLit "@wrapper") <+> ppr target <+> ppr isAbstract
instance Outputable ForeignExport where
ppr (CExport (L _ (CExportStatic lbl cconv)) _) =
ppr cconv <+> char '"' <> ppr lbl <> char '"'
{-
************************************************************************
* *
\subsection{Transformation rules}
* *
************************************************************************
-}
type LRuleDecls name = Located (RuleDecls name)
-- Note [Pragma source text] in BasicTypes
data RuleDecls name = HsRules { rds_src :: SourceText
, rds_rules :: [LRuleDecl name] }
deriving (Typeable)
deriving instance (DataId name) => Data (RuleDecls name)
type LRuleDecl name = Located (RuleDecl name)
data RuleDecl name
= HsRule -- Source rule
(Located RuleName) -- Rule name
Activation
[LRuleBndr name] -- Forall'd vars; after typechecking this
-- includes tyvars
(Located (HsExpr name)) -- LHS
(PostRn name NameSet) -- Free-vars from the LHS
(Located (HsExpr name)) -- RHS
(PostRn name NameSet) -- Free-vars from the RHS
-- ^
-- - 'ApiAnnotation.AnnKeywordId' :
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnTilde',
-- 'ApiAnnotation.AnnVal',
-- 'ApiAnnotation.AnnClose',
-- 'ApiAnnotation.AnnForall','ApiAnnotation.AnnDot',
-- 'ApiAnnotation.AnnEqual',
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Typeable)
deriving instance (DataId name) => Data (RuleDecl name)
flattenRuleDecls :: [LRuleDecls name] -> [LRuleDecl name]
flattenRuleDecls decls = concatMap (rds_rules . unLoc) decls
type LRuleBndr name = Located (RuleBndr name)
data RuleBndr name
= RuleBndr (Located name)
| RuleBndrSig (Located name) (HsWithBndrs name (LHsType name))
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnDcolon','ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Typeable)
deriving instance (DataId name) => Data (RuleBndr name)
collectRuleBndrSigTys :: [RuleBndr name] -> [HsWithBndrs name (LHsType name)]
collectRuleBndrSigTys bndrs = [ty | RuleBndrSig _ ty <- bndrs]
instance OutputableBndr name => Outputable (RuleDecls name) where
ppr (HsRules _ rules) = ppr rules
instance OutputableBndr name => Outputable (RuleDecl name) where
ppr (HsRule name act ns lhs _fv_lhs rhs _fv_rhs)
= sep [text "{-# RULES" <+> doubleQuotes (ftext $ unLoc name)
<+> ppr act,
nest 4 (pp_forall <+> pprExpr (unLoc lhs)),
nest 4 (equals <+> pprExpr (unLoc rhs) <+> text "#-}") ]
where
pp_forall | null ns = empty
| otherwise = forAllLit <+> fsep (map ppr ns) <> dot
instance OutputableBndr name => Outputable (RuleBndr name) where
ppr (RuleBndr name) = ppr name
ppr (RuleBndrSig name ty) = ppr name <> dcolon <> ppr ty
{-
************************************************************************
* *
\subsection{Vectorisation declarations}
* *
************************************************************************
A vectorisation pragma, one of
{-# VECTORISE f = closure1 g (scalar_map g) #-}
{-# VECTORISE SCALAR f #-}
{-# NOVECTORISE f #-}
{-# VECTORISE type T = ty #-}
{-# VECTORISE SCALAR type T #-}
-}
type LVectDecl name = Located (VectDecl name)
data VectDecl name
= HsVect
SourceText -- Note [Pragma source text] in BasicTypes
(Located name)
(LHsExpr name)
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnEqual','ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
| HsNoVect
SourceText -- Note [Pragma source text] in BasicTypes
(Located name)
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
| HsVectTypeIn -- pre type-checking
SourceText -- Note [Pragma source text] in BasicTypes
Bool -- 'TRUE' => SCALAR declaration
(Located name)
(Maybe (Located name)) -- 'Nothing' => no right-hand side
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnType','ApiAnnotation.AnnClose',
-- 'ApiAnnotation.AnnEqual'
-- For details on above see note [Api annotations] in ApiAnnotation
| HsVectTypeOut -- post type-checking
Bool -- 'TRUE' => SCALAR declaration
TyCon
(Maybe TyCon) -- 'Nothing' => no right-hand side
| HsVectClassIn -- pre type-checking
SourceText -- Note [Pragma source text] in BasicTypes
(Located name)
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClass','ApiAnnotation.AnnClose',
-- For details on above see note [Api annotations] in ApiAnnotation
| HsVectClassOut -- post type-checking
Class
| HsVectInstIn -- pre type-checking (always SCALAR) !!!FIXME: should be superfluous now
(LHsType name)
| HsVectInstOut -- post type-checking (always SCALAR) !!!FIXME: should be superfluous now
ClsInst
deriving (Typeable)
deriving instance (DataId name) => Data (VectDecl name)
lvectDeclName :: NamedThing name => LVectDecl name -> Name
lvectDeclName (L _ (HsVect _ (L _ name) _)) = getName name
lvectDeclName (L _ (HsNoVect _ (L _ name))) = getName name
lvectDeclName (L _ (HsVectTypeIn _ _ (L _ name) _)) = getName name
lvectDeclName (L _ (HsVectTypeOut _ tycon _)) = getName tycon
lvectDeclName (L _ (HsVectClassIn _ (L _ name))) = getName name
lvectDeclName (L _ (HsVectClassOut cls)) = getName cls
lvectDeclName (L _ (HsVectInstIn _))
= panic "HsDecls.lvectDeclName: HsVectInstIn"
lvectDeclName (L _ (HsVectInstOut _))
= panic "HsDecls.lvectDeclName: HsVectInstOut"
lvectInstDecl :: LVectDecl name -> Bool
lvectInstDecl (L _ (HsVectInstIn _)) = True
lvectInstDecl (L _ (HsVectInstOut _)) = True
lvectInstDecl _ = False
instance OutputableBndr name => Outputable (VectDecl name) where
ppr (HsVect _ v rhs)
= sep [text "{-# VECTORISE" <+> ppr v,
nest 4 $
pprExpr (unLoc rhs) <+> text "#-}" ]
ppr (HsNoVect _ v)
= sep [text "{-# NOVECTORISE" <+> ppr v <+> text "#-}" ]
ppr (HsVectTypeIn _ False t Nothing)
= sep [text "{-# VECTORISE type" <+> ppr t <+> text "#-}" ]
ppr (HsVectTypeIn _ False t (Just t'))
= sep [text "{-# VECTORISE type" <+> ppr t, text "=", ppr t', text "#-}" ]
ppr (HsVectTypeIn _ True t Nothing)
= sep [text "{-# VECTORISE SCALAR type" <+> ppr t <+> text "#-}" ]
ppr (HsVectTypeIn _ True t (Just t'))
= sep [text "{-# VECTORISE SCALAR type" <+> ppr t, text "=", ppr t', text "#-}" ]
ppr (HsVectTypeOut False t Nothing)
= sep [text "{-# VECTORISE type" <+> ppr t <+> text "#-}" ]
ppr (HsVectTypeOut False t (Just t'))
= sep [text "{-# VECTORISE type" <+> ppr t, text "=", ppr t', text "#-}" ]
ppr (HsVectTypeOut True t Nothing)
= sep [text "{-# VECTORISE SCALAR type" <+> ppr t <+> text "#-}" ]
ppr (HsVectTypeOut True t (Just t'))
= sep [text "{-# VECTORISE SCALAR type" <+> ppr t, text "=", ppr t', text "#-}" ]
ppr (HsVectClassIn _ c)
= sep [text "{-# VECTORISE class" <+> ppr c <+> text "#-}" ]
ppr (HsVectClassOut c)
= sep [text "{-# VECTORISE class" <+> ppr c <+> text "#-}" ]
ppr (HsVectInstIn ty)
= sep [text "{-# VECTORISE SCALAR instance" <+> ppr ty <+> text "#-}" ]
ppr (HsVectInstOut i)
= sep [text "{-# VECTORISE SCALAR instance" <+> ppr i <+> text "#-}" ]
{-
************************************************************************
* *
\subsection[DocDecl]{Document comments}
* *
************************************************************************
-}
type LDocDecl = Located (DocDecl)
data DocDecl
= DocCommentNext HsDocString
| DocCommentPrev HsDocString
| DocCommentNamed String HsDocString
| DocGroup Int HsDocString
deriving (Data, Typeable)
-- Okay, I need to reconstruct the document comments, but for now:
instance Outputable DocDecl where
ppr _ = text "<document comment>"
docDeclDoc :: DocDecl -> HsDocString
docDeclDoc (DocCommentNext d) = d
docDeclDoc (DocCommentPrev d) = d
docDeclDoc (DocCommentNamed _ d) = d
docDeclDoc (DocGroup _ d) = d
{-
************************************************************************
* *
\subsection[DeprecDecl]{Deprecations}
* *
************************************************************************
We use exported entities for things to deprecate.
-}
type LWarnDecls name = Located (WarnDecls name)
-- Note [Pragma source text] in BasicTypes
data WarnDecls name = Warnings { wd_src :: SourceText
, wd_warnings :: [LWarnDecl name]
}
deriving (Data, Typeable)
type LWarnDecl name = Located (WarnDecl name)
data WarnDecl name = Warning [Located name] WarningTxt
deriving (Data, Typeable)
instance OutputableBndr name => Outputable (WarnDecls name) where
ppr (Warnings _ decls) = ppr decls
instance OutputableBndr name => Outputable (WarnDecl name) where
ppr (Warning thing txt)
= hsep [text "{-# DEPRECATED", ppr thing, doubleQuotes (ppr txt), text "#-}"]
{-
************************************************************************
* *
\subsection[AnnDecl]{Annotations}
* *
************************************************************************
-}
type LAnnDecl name = Located (AnnDecl name)
data AnnDecl name = HsAnnotation
SourceText -- Note [Pragma source text] in BasicTypes
(AnnProvenance name) (Located (HsExpr name))
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnType'
-- 'ApiAnnotation.AnnModule'
-- 'ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Typeable)
deriving instance (DataId name) => Data (AnnDecl name)
instance (OutputableBndr name) => Outputable (AnnDecl name) where
ppr (HsAnnotation _ provenance expr)
= hsep [text "{-#", pprAnnProvenance provenance, pprExpr (unLoc expr), text "#-}"]
data AnnProvenance name = ValueAnnProvenance (Located name)
| TypeAnnProvenance (Located name)
| ModuleAnnProvenance
deriving (Data, Typeable, Functor)
deriving instance Foldable AnnProvenance
deriving instance Traversable AnnProvenance
annProvenanceName_maybe :: AnnProvenance name -> Maybe name
annProvenanceName_maybe (ValueAnnProvenance (L _ name)) = Just name
annProvenanceName_maybe (TypeAnnProvenance (L _ name)) = Just name
annProvenanceName_maybe ModuleAnnProvenance = Nothing
pprAnnProvenance :: OutputableBndr name => AnnProvenance name -> SDoc
pprAnnProvenance ModuleAnnProvenance = ptext (sLit "ANN module")
pprAnnProvenance (ValueAnnProvenance (L _ name))
= ptext (sLit "ANN") <+> ppr name
pprAnnProvenance (TypeAnnProvenance (L _ name))
= ptext (sLit "ANN type") <+> ppr name
{-
************************************************************************
* *
\subsection[RoleAnnot]{Role annotations}
* *
************************************************************************
-}
type LRoleAnnotDecl name = Located (RoleAnnotDecl name)
-- See #8185 for more info about why role annotations are
-- top-level declarations
data RoleAnnotDecl name
= RoleAnnotDecl (Located name) -- type constructor
[Located (Maybe Role)] -- optional annotations
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnType',
-- 'ApiAnnotation.AnnRole'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving (Data, Typeable)
instance OutputableBndr name => Outputable (RoleAnnotDecl name) where
ppr (RoleAnnotDecl ltycon roles)
= ptext (sLit "type role") <+> ppr ltycon <+>
hsep (map (pp_role . unLoc) roles)
where
pp_role Nothing = underscore
pp_role (Just r) = ppr r
roleAnnotDeclName :: RoleAnnotDecl name -> name
roleAnnotDeclName (RoleAnnotDecl (L _ name) _) = name
|
pparkkin/eta
|
compiler/ETA/HsSyn/HsDecls.hs
|
bsd-3-clause
| 70,760
| 0
| 16
| 21,135
| 12,162
| 6,628
| 5,534
| 870
| 6
|
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "Data/ByteString/Lazy/Builder/ASCII.hs" #-}
-- | We decided to rename the Builder modules. Sorry about that.
--
-- In additon, the ASCII module has been merged into the main
-- "Data.ByteString.Builder" module.
--
-- The old names will hang about for at least once release cycle before we
-- deprecate them and then later remove them.
--
module Data.ByteString.Lazy.Builder.ASCII (
module Data.ByteString.Builder
, byteStringHexFixed
, lazyByteStringHexFixed
) where
import Data.ByteString.Builder
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
byteStringHexFixed :: S.ByteString -> Builder
byteStringHexFixed = byteStringHex
lazyByteStringHexFixed :: L.ByteString -> Builder
lazyByteStringHexFixed = lazyByteStringHex
|
phischu/fragnix
|
tests/packages/scotty/Data.ByteString.Lazy.Builder.ASCII.hs
|
bsd-3-clause
| 802
| 0
| 6
| 112
| 89
| 62
| 27
| 13
| 1
|
{-# LANGUAGE TemplateHaskell #-}
module Main where
-- Import our template "pr"
import Printf ( pr )
-- The splice operator $ takes the Haskell source code
-- generated at compile time by "pr" and splices it into
-- the argument of "putStrLn".
main = putStrLn ( $(pr "Hello") )
|
sol/doctest
|
test/integration/template-haskell-bugfix/Main.hs
|
mit
| 279
| 0
| 9
| 52
| 35
| 22
| 13
| 4
| 1
|
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeApplications #-}
-- This version is shorter than T12734, and should yield a
-- type error message. If things go wrong, you get
-- an infinite loop
module T12734a where
import Prelude
import Control.Applicative
import Control.Monad.Fix
import Control.Monad.Trans.Identity
import Control.Monad.Trans.Class
import Control.Monad.IO.Class
data A
data B
data Net
data Type
data Layer4 t l
data TermStore
data Stack lrs (t :: * -> *) where
SLayer :: t l -> Stack ls t -> Stack (l ': ls) t
SNull :: Stack '[] t
instance ( Con m (t l)
, Con m (Stack ls t)) => Con m (Stack (l ': ls) t)
instance Monad m => Con m (Stack '[] t)
instance ( expr ~ Expr t lrs
, Con m (TStk t lrs)) => Con m (Layer4 expr Type)
newtype Expr t lrs = Expr (TStk t lrs)
type TStk t lrs = Stack lrs (Layer4 (Expr t lrs))
class Con m t
-- HERE IS A FUNNY BEHAVIOR: the commented line raises context reduction stack overflow
test_gr :: forall m t lrs bind.
( Con m (TStk t lrs)
, bind ~ Expr t lrs
-- ) => m (Expr t lrs) -- GHC 8 worked if you said this...
) => m bind -- but not this!
test_gr = undefined
newtype KT (cls :: *) (t :: k) (m :: * -> *) (a :: *)
= KT (IdentityT m a)
test_ghc_err :: KT A '[Type] IO (Expr Net '[Type])
test_ghc_err = test_gr @(KT A '[Type] IO) @_ @'[Type] @(Expr Net '[Type])
{- Works!
test_ghc_err = test_gr @(KT A '[Type] IO)
@Net
@'[Type]
@(Expr Net '[Type])
-}
{- Some notes. See comment:10 on Trac #12734
[W] Con m (TStk t lrs)
[W] Inferable A lrs m
[W] bind ~ Expr t lrs
[W] m bind ~ KT A '[Type] IO (Expr Net '[Type])
==> m := KT A '[Type] IO
bind := Expr Net '[Type]
t := Net
lrs := '[Type]
[W] Con m (TStk t lrs)
= Con m (Stack lrs (Layer4 bind))
--> inline lrs
[W] Con m (Stack '[Type] (Layer4 bind))
--> instance
[W] Con m (Stack '[] bind)
--> Monad m
+
[W] Con m (Layer4 bind Type)
-->
[W] bind ~ Expr t0 lrs0
[W] Con m (TStk t0 lrs0)
-}
|
ezyang/ghc
|
testsuite/tests/typecheck/should_compile/T12734a.hs
|
bsd-3-clause
| 2,578
| 0
| 10
| 714
| 524
| 301
| 223
| -1
| -1
|
module PackageTests.BuildDeps.InternalLibrary4.Check where
import qualified Data.ByteString.Char8 as C
import PackageTests.PackageTester
import System.FilePath
import Test.HUnit
suite :: FilePath -> FilePath -> Test
suite ghcPath ghcPkgPath = TestCase $ do
let spec = PackageSpec
{ directory = "PackageTests" </> "BuildDeps" </> "InternalLibrary4"
, configOpts = []
, distPref = Nothing
}
let specTI = PackageSpec
{ directory = directory spec </> "to-install"
, configOpts = []
, distPref = Nothing
}
unregister "InternalLibrary4" ghcPkgPath
iResult <- cabal_install specTI ghcPath
assertInstallSucceeded iResult
bResult <- cabal_build spec ghcPath
assertBuildSucceeded bResult
unregister "InternalLibrary4" ghcPkgPath
(_, _, output) <- run (Just $ directory spec) (directory spec </> "dist" </> "build" </> "lemon" </> "lemon") [] []
C.appendFile (directory spec </> "test-log.txt") (C.pack $ "\ndist/build/lemon/lemon\n"++output)
assertEqual "executable should have linked with the installed library" "myLibFunc installed" (concat $ lines output)
|
DavidAlphaFox/ghc
|
libraries/Cabal/Cabal/tests/PackageTests/BuildDeps/InternalLibrary4/Check.hs
|
bsd-3-clause
| 1,195
| 0
| 15
| 279
| 302
| 155
| 147
| 24
| 1
|
-- | contains a prettyprinter for the
-- Template Haskell datatypes
module Language.Haskell.TH.Ppr where
-- All of the exports from this module should
-- be "public" functions. The main module TH
-- re-exports them all.
import Text.PrettyPrint (render)
import Language.Haskell.TH.PprLib
import Language.Haskell.TH.Syntax
import Data.Word ( Word8 )
import Data.Char ( toLower, chr, ord, isSymbol )
import GHC.Show ( showMultiLineString )
import Data.Ratio ( numerator, denominator )
nestDepth :: Int
nestDepth = 4
type Precedence = Int
appPrec, unopPrec, opPrec, noPrec :: Precedence
appPrec = 3 -- Argument of a function application
opPrec = 2 -- Argument of an infix operator
unopPrec = 1 -- Argument of an unresolved infix operator
noPrec = 0 -- Others
parensIf :: Bool -> Doc -> Doc
parensIf True d = parens d
parensIf False d = d
------------------------------
pprint :: Ppr a => a -> String
pprint x = render $ to_HPJ_Doc $ ppr x
class Ppr a where
ppr :: a -> Doc
ppr_list :: [a] -> Doc
ppr_list = vcat . map ppr
instance Ppr a => Ppr [a] where
ppr x = ppr_list x
------------------------------
instance Ppr Name where
ppr v = pprName v
------------------------------
instance Ppr Info where
ppr (TyConI d) = ppr d
ppr (ClassI d is) = ppr d $$ vcat (map ppr is)
ppr (FamilyI d is) = ppr d $$ vcat (map ppr is)
ppr (PrimTyConI name arity is_unlifted)
= text "Primitive"
<+> (if is_unlifted then text "unlifted" else empty)
<+> text "type constructor" <+> quotes (ppr name)
<+> parens (text "arity" <+> int arity)
ppr (ClassOpI v ty cls)
= text "Class op from" <+> ppr cls <> colon <+> ppr_sig v ty
ppr (DataConI v ty tc)
= text "Constructor from" <+> ppr tc <> colon <+> ppr_sig v ty
ppr (TyVarI v ty)
= text "Type variable" <+> ppr v <+> equals <+> ppr ty
ppr (VarI v ty mb_d)
= vcat [ppr_sig v ty,
case mb_d of { Nothing -> empty; Just d -> ppr d }]
ppr_sig :: Name -> Type -> Doc
ppr_sig v ty = ppr v <+> dcolon <+> ppr ty
pprFixity :: Name -> Fixity -> Doc
pprFixity _ f | f == defaultFixity = empty
pprFixity v (Fixity i d) = ppr_fix d <+> int i <+> ppr v
where ppr_fix InfixR = text "infixr"
ppr_fix InfixL = text "infixl"
ppr_fix InfixN = text "infix"
------------------------------
instance Ppr Module where
ppr (Module pkg m) = text (pkgString pkg) <+> text (modString m)
instance Ppr ModuleInfo where
ppr (ModuleInfo imps) = text "Module" <+> vcat (map ppr imps)
------------------------------
instance Ppr Exp where
ppr = pprExp noPrec
pprPrefixOcc :: Name -> Doc
-- Print operators with parens around them
pprPrefixOcc n = parensIf (isSymOcc n) (ppr n)
isSymOcc :: Name -> Bool
isSymOcc n
= case nameBase n of
[] -> True -- Empty name; weird
(c:_) -> isSymbolASCII c || (ord c > 0x7f && isSymbol c)
-- c.f. OccName.startsVarSym in GHC itself
isSymbolASCII :: Char -> Bool
isSymbolASCII c = c `elem` "!#$%&*+./<=>?@\\^|~-"
pprInfixExp :: Exp -> Doc
pprInfixExp (VarE v) = pprName' Infix v
pprInfixExp (ConE v) = pprName' Infix v
pprInfixExp _ = text "<<Non-variable/constructor in infix context>>"
pprExp :: Precedence -> Exp -> Doc
pprExp _ (VarE v) = pprName' Applied v
pprExp _ (ConE c) = pprName' Applied c
pprExp i (LitE l) = pprLit i l
pprExp i (AppE e1 e2) = parensIf (i >= appPrec) $ pprExp opPrec e1
<+> pprExp appPrec e2
pprExp _ (ParensE e) = parens (pprExp noPrec e)
pprExp i (UInfixE e1 op e2)
= parensIf (i > unopPrec) $ pprExp unopPrec e1
<+> pprInfixExp op
<+> pprExp unopPrec e2
pprExp i (InfixE (Just e1) op (Just e2))
= parensIf (i >= opPrec) $ pprExp opPrec e1
<+> pprInfixExp op
<+> pprExp opPrec e2
pprExp _ (InfixE me1 op me2) = parens $ pprMaybeExp noPrec me1
<+> pprInfixExp op
<+> pprMaybeExp noPrec me2
pprExp i (LamE ps e) = parensIf (i > noPrec) $ char '\\' <> hsep (map (pprPat appPrec) ps)
<+> text "->" <+> ppr e
pprExp i (LamCaseE ms) = parensIf (i > noPrec)
$ text "\\case" $$ nest nestDepth (ppr ms)
pprExp _ (TupE es) = parens $ sep $ punctuate comma $ map ppr es
pprExp _ (UnboxedTupE es) = hashParens $ sep $ punctuate comma $ map ppr es
-- Nesting in Cond is to avoid potential problems in do statments
pprExp i (CondE guard true false)
= parensIf (i > noPrec) $ sep [text "if" <+> ppr guard,
nest 1 $ text "then" <+> ppr true,
nest 1 $ text "else" <+> ppr false]
pprExp i (MultiIfE alts)
= parensIf (i > noPrec) $ vcat $
case alts of
[] -> [text "if {}"]
(alt : alts') -> text "if" <+> pprGuarded arrow alt
: map (nest 3 . pprGuarded arrow) alts'
pprExp i (LetE ds_ e) = parensIf (i > noPrec) $ text "let" <+> pprDecs ds_
$$ text " in" <+> ppr e
where
pprDecs [] = empty
pprDecs [d] = ppr d
pprDecs ds = braces $ sep $ punctuate semi $ map ppr ds
pprExp i (CaseE e ms)
= parensIf (i > noPrec) $ text "case" <+> ppr e <+> text "of"
$$ nest nestDepth (ppr ms)
pprExp i (DoE ss_) = parensIf (i > noPrec) $ text "do" <+> pprStms ss_
where
pprStms [] = empty
pprStms [s] = ppr s
pprStms ss = braces $ sep $ punctuate semi $ map ppr ss
pprExp _ (CompE []) = text "<<Empty CompExp>>"
-- This will probably break with fixity declarations - would need a ';'
pprExp _ (CompE ss) = text "[" <> ppr s
<+> text "|"
<+> (sep $ punctuate comma $ map ppr ss')
<> text "]"
where s = last ss
ss' = init ss
pprExp _ (ArithSeqE d) = ppr d
pprExp _ (ListE es) = brackets $ sep $ punctuate comma $ map ppr es
pprExp i (SigE e t) = parensIf (i > noPrec) $ ppr e <+> dcolon <+> ppr t
pprExp _ (RecConE nm fs) = ppr nm <> braces (pprFields fs)
pprExp _ (RecUpdE e fs) = pprExp appPrec e <> braces (pprFields fs)
pprExp i (StaticE e) = parensIf (i >= appPrec) $
text "static"<+> pprExp appPrec e
pprFields :: [(Name,Exp)] -> Doc
pprFields = sep . punctuate comma . map (\(s,e) -> ppr s <+> equals <+> ppr e)
pprMaybeExp :: Precedence -> Maybe Exp -> Doc
pprMaybeExp _ Nothing = empty
pprMaybeExp i (Just e) = pprExp i e
------------------------------
instance Ppr Stmt where
ppr (BindS p e) = ppr p <+> text "<-" <+> ppr e
ppr (LetS ds) = text "let" <+> ppr ds
ppr (NoBindS e) = ppr e
ppr (ParS sss) = sep $ punctuate (text "|")
$ map (sep . punctuate comma . map ppr) sss
------------------------------
instance Ppr Match where
ppr (Match p rhs ds) = ppr p <+> pprBody False rhs
$$ where_clause ds
------------------------------
pprGuarded :: Doc -> (Guard, Exp) -> Doc
pprGuarded eqDoc (guard, expr) = case guard of
NormalG guardExpr -> char '|' <+> ppr guardExpr <+> eqDoc <+> ppr expr
PatG stmts -> char '|' <+> vcat (punctuate comma $ map ppr stmts) $$
nest nestDepth (eqDoc <+> ppr expr)
------------------------------
pprBody :: Bool -> Body -> Doc
pprBody eq body = case body of
GuardedB xs -> nest nestDepth $ vcat $ map (pprGuarded eqDoc) xs
NormalB e -> eqDoc <+> ppr e
where eqDoc | eq = equals
| otherwise = arrow
------------------------------
instance Ppr Lit where
ppr = pprLit noPrec
pprLit :: Precedence -> Lit -> Doc
pprLit i (IntPrimL x) = parensIf (i > noPrec && x < 0)
(integer x <> char '#')
pprLit _ (WordPrimL x) = integer x <> text "##"
pprLit i (FloatPrimL x) = parensIf (i > noPrec && x < 0)
(float (fromRational x) <> char '#')
pprLit i (DoublePrimL x) = parensIf (i > noPrec && x < 0)
(double (fromRational x) <> text "##")
pprLit i (IntegerL x) = parensIf (i > noPrec && x < 0) (integer x)
pprLit _ (CharL c) = text (show c)
pprLit _ (CharPrimL c) = text (show c) <> char '#'
pprLit _ (StringL s) = pprString s
pprLit _ (StringPrimL s) = pprString (bytesToString s) <> char '#'
pprLit i (RationalL rat) = parensIf (i > noPrec) $
integer (numerator rat) <+> char '/'
<+> integer (denominator rat)
bytesToString :: [Word8] -> String
bytesToString = map (chr . fromIntegral)
pprString :: String -> Doc
-- Print newlines as newlines with Haskell string escape notation,
-- not as '\n'. For other non-printables use regular escape notation.
pprString s = vcat (map text (showMultiLineString s))
------------------------------
instance Ppr Pat where
ppr = pprPat noPrec
pprPat :: Precedence -> Pat -> Doc
pprPat i (LitP l) = pprLit i l
pprPat _ (VarP v) = pprName' Applied v
pprPat _ (TupP ps) = parens $ sep $ punctuate comma $ map ppr ps
pprPat _ (UnboxedTupP ps) = hashParens $ sep $ punctuate comma $ map ppr ps
pprPat i (ConP s ps) = parensIf (i >= appPrec) $ pprName' Applied s
<+> sep (map (pprPat appPrec) ps)
pprPat _ (ParensP p) = parens $ pprPat noPrec p
pprPat i (UInfixP p1 n p2)
= parensIf (i > unopPrec) (pprPat unopPrec p1 <+>
pprName' Infix n <+>
pprPat unopPrec p2)
pprPat i (InfixP p1 n p2)
= parensIf (i >= opPrec) (pprPat opPrec p1 <+>
pprName' Infix n <+>
pprPat opPrec p2)
pprPat i (TildeP p) = parensIf (i > noPrec) $ char '~' <> pprPat appPrec p
pprPat i (BangP p) = parensIf (i > noPrec) $ char '!' <> pprPat appPrec p
pprPat i (AsP v p) = parensIf (i > noPrec) $ ppr v <> text "@"
<> pprPat appPrec p
pprPat _ WildP = text "_"
pprPat _ (RecP nm fs)
= parens $ ppr nm
<+> braces (sep $ punctuate comma $
map (\(s,p) -> ppr s <+> equals <+> ppr p) fs)
pprPat _ (ListP ps) = brackets $ sep $ punctuate comma $ map ppr ps
pprPat i (SigP p t) = parensIf (i > noPrec) $ ppr p <+> dcolon <+> ppr t
pprPat _ (ViewP e p) = parens $ pprExp noPrec e <+> text "->" <+> pprPat noPrec p
------------------------------
instance Ppr Dec where
ppr = ppr_dec True
ppr_dec :: Bool -- declaration on the toplevel?
-> Dec
-> Doc
ppr_dec _ (FunD f cs) = vcat $ map (\c -> pprPrefixOcc f <+> ppr c) cs
ppr_dec _ (ValD p r ds) = ppr p <+> pprBody True r
$$ where_clause ds
ppr_dec _ (TySynD t xs rhs)
= ppr_tySyn empty t (hsep (map ppr xs)) rhs
ppr_dec _ (DataD ctxt t xs cs decs)
= ppr_data empty ctxt t (hsep (map ppr xs)) cs decs
ppr_dec _ (NewtypeD ctxt t xs c decs)
= ppr_newtype empty ctxt t (sep (map ppr xs)) c decs
ppr_dec _ (ClassD ctxt c xs fds ds)
= text "class" <+> pprCxt ctxt <+> ppr c <+> hsep (map ppr xs) <+> ppr fds
$$ where_clause ds
ppr_dec _ (InstanceD ctxt i ds) = text "instance" <+> pprCxt ctxt <+> ppr i
$$ where_clause ds
ppr_dec _ (SigD f t) = pprPrefixOcc f <+> dcolon <+> ppr t
ppr_dec _ (ForeignD f) = ppr f
ppr_dec _ (InfixD fx n) = pprFixity n fx
ppr_dec _ (PragmaD p) = ppr p
ppr_dec isTop (DataFamilyD tc tvs kind)
= text "data" <+> maybeFamily <+> ppr tc <+> hsep (map ppr tvs) <+> maybeKind
where
maybeFamily | isTop = text "family"
| otherwise = empty
maybeKind | (Just k') <- kind = dcolon <+> ppr k'
| otherwise = empty
ppr_dec isTop (DataInstD ctxt tc tys cs decs)
= ppr_data maybeInst ctxt tc (sep (map pprParendType tys)) cs decs
where
maybeInst | isTop = text "instance"
| otherwise = empty
ppr_dec isTop (NewtypeInstD ctxt tc tys c decs)
= ppr_newtype maybeInst ctxt tc (sep (map pprParendType tys)) c decs
where
maybeInst | isTop = text "instance"
| otherwise = empty
ppr_dec isTop (TySynInstD tc (TySynEqn tys rhs))
= ppr_tySyn maybeInst tc (sep (map pprParendType tys)) rhs
where
maybeInst | isTop = text "instance"
| otherwise = empty
ppr_dec isTop (OpenTypeFamilyD tc tvs res inj)
= text "type" <+> maybeFamily <+> ppr tc <+> hsep (map ppr tvs) <+>
ppr res <+> maybeInj
where
maybeFamily | isTop = text "family"
| otherwise = empty
maybeInj | (Just inj') <- inj = ppr inj'
| otherwise = empty
ppr_dec _ (ClosedTypeFamilyD tc tvs res inj eqns)
= hang (hsep [ text "type family", ppr tc, hsep (map ppr tvs), ppr res
, maybeInj, text "where" ])
nestDepth (vcat (map ppr_eqn eqns))
where
maybeInj | (Just inj') <- inj = ppr inj'
| otherwise = empty
ppr_eqn (TySynEqn lhs rhs)
= ppr tc <+> sep (map pprParendType lhs) <+> text "=" <+> ppr rhs
ppr_dec _ (RoleAnnotD name roles)
= hsep [ text "type role", ppr name ] <+> hsep (map ppr roles)
ppr_dec _ (StandaloneDerivD cxt ty)
= hsep [ text "deriving instance", pprCxt cxt, ppr ty ]
ppr_dec _ (DefaultSigD n ty)
= hsep [ text "default", pprPrefixOcc n, dcolon, ppr ty ]
ppr_data :: Doc -> Cxt -> Name -> Doc -> [Con] -> [Name] -> Doc
ppr_data maybeInst ctxt t argsDoc cs decs
= sep [text "data" <+> maybeInst
<+> pprCxt ctxt
<+> ppr t <+> argsDoc,
nest nestDepth (sep (pref $ map ppr cs)),
if null decs
then empty
else nest nestDepth
$ text "deriving"
<+> parens (hsep $ punctuate comma $ map ppr decs)]
where
pref :: [Doc] -> [Doc]
pref [] = [] -- No constructors; can't happen in H98
pref (d:ds) = (char '=' <+> d):map (char '|' <+>) ds
ppr_newtype :: Doc -> Cxt -> Name -> Doc -> Con -> [Name] -> Doc
ppr_newtype maybeInst ctxt t argsDoc c decs
= sep [text "newtype" <+> maybeInst
<+> pprCxt ctxt
<+> ppr t <+> argsDoc,
nest 2 (char '=' <+> ppr c),
if null decs
then empty
else nest nestDepth
$ text "deriving"
<+> parens (hsep $ punctuate comma $ map ppr decs)]
ppr_tySyn :: Doc -> Name -> Doc -> Type -> Doc
ppr_tySyn maybeInst t argsDoc rhs
= text "type" <+> maybeInst <+> ppr t <+> argsDoc <+> text "=" <+> ppr rhs
------------------------------
instance Ppr FunDep where
ppr (FunDep xs ys) = hsep (map ppr xs) <+> text "->" <+> hsep (map ppr ys)
ppr_list [] = empty
ppr_list xs = char '|' <+> sep (punctuate (text ", ") (map ppr xs))
------------------------------
instance Ppr FamFlavour where
ppr DataFam = text "data"
ppr TypeFam = text "type"
------------------------------
instance Ppr FamilyResultSig where
ppr NoSig = empty
ppr (KindSig k) = dcolon <+> ppr k
ppr (TyVarSig bndr) = text "=" <+> ppr bndr
------------------------------
instance Ppr InjectivityAnn where
ppr (InjectivityAnn lhs rhs) =
char '|' <+> ppr lhs <+> text "->" <+> hsep (map ppr rhs)
------------------------------
instance Ppr Foreign where
ppr (ImportF callconv safety impent as typ)
= text "foreign import"
<+> showtextl callconv
<+> showtextl safety
<+> text (show impent)
<+> ppr as
<+> dcolon <+> ppr typ
ppr (ExportF callconv expent as typ)
= text "foreign export"
<+> showtextl callconv
<+> text (show expent)
<+> ppr as
<+> dcolon <+> ppr typ
------------------------------
instance Ppr Pragma where
ppr (InlineP n inline rm phases)
= text "{-#"
<+> ppr inline
<+> ppr rm
<+> ppr phases
<+> ppr n
<+> text "#-}"
ppr (SpecialiseP n ty inline phases)
= text "{-# SPECIALISE"
<+> maybe empty ppr inline
<+> ppr phases
<+> sep [ ppr n <+> dcolon
, nest 2 $ ppr ty ]
<+> text "#-}"
ppr (SpecialiseInstP inst)
= text "{-# SPECIALISE instance" <+> ppr inst <+> text "#-}"
ppr (RuleP n bndrs lhs rhs phases)
= sep [ text "{-# RULES" <+> pprString n <+> ppr phases
, nest 4 $ ppr_forall <+> ppr lhs
, nest 4 $ char '=' <+> ppr rhs <+> text "#-}" ]
where ppr_forall | null bndrs = empty
| otherwise = text "forall"
<+> fsep (map ppr bndrs)
<+> char '.'
ppr (AnnP tgt expr)
= text "{-# ANN" <+> target1 tgt <+> ppr expr <+> text "#-}"
where target1 ModuleAnnotation = text "module"
target1 (TypeAnnotation t) = text "type" <+> ppr t
target1 (ValueAnnotation v) = ppr v
ppr (LineP line file)
= text "{-# LINE" <+> int line <+> text (show file) <+> text "#-}"
------------------------------
instance Ppr Inline where
ppr NoInline = text "NOINLINE"
ppr Inline = text "INLINE"
ppr Inlinable = text "INLINABLE"
------------------------------
instance Ppr RuleMatch where
ppr ConLike = text "CONLIKE"
ppr FunLike = empty
------------------------------
instance Ppr Phases where
ppr AllPhases = empty
ppr (FromPhase i) = brackets $ int i
ppr (BeforePhase i) = brackets $ char '~' <> int i
------------------------------
instance Ppr RuleBndr where
ppr (RuleVar n) = ppr n
ppr (TypedRuleVar n ty) = parens $ ppr n <+> dcolon <+> ppr ty
------------------------------
instance Ppr Clause where
ppr (Clause ps rhs ds) = hsep (map (pprPat appPrec) ps) <+> pprBody True rhs
$$ where_clause ds
------------------------------
instance Ppr Con where
ppr (NormalC c sts) = ppr c <+> sep (map pprStrictType sts)
ppr (RecC c vsts)
= ppr c <+> braces (sep (punctuate comma $ map pprVarStrictType vsts))
ppr (InfixC st1 c st2) = pprStrictType st1
<+> pprName' Infix c
<+> pprStrictType st2
ppr (ForallC ns ctxt con) = text "forall" <+> hsep (map ppr ns)
<+> char '.' <+> sep [pprCxt ctxt, ppr con]
------------------------------
pprVarStrictType :: (Name, Strict, Type) -> Doc
-- Slight infelicity: with print non-atomic type with parens
pprVarStrictType (v, str, t) = ppr v <+> dcolon <+> pprStrictType (str, t)
------------------------------
pprStrictType :: (Strict, Type) -> Doc
-- Prints with parens if not already atomic
pprStrictType (IsStrict, t) = char '!' <> pprParendType t
pprStrictType (NotStrict, t) = pprParendType t
pprStrictType (Unpacked, t) = text "{-# UNPACK #-} !" <> pprParendType t
------------------------------
pprParendType :: Type -> Doc
pprParendType (VarT v) = ppr v
pprParendType (ConT c) = ppr c
pprParendType (TupleT 0) = text "()"
pprParendType (TupleT n) = parens (hcat (replicate (n-1) comma))
pprParendType (UnboxedTupleT n) = hashParens $ hcat $ replicate (n-1) comma
pprParendType ArrowT = parens (text "->")
pprParendType ListT = text "[]"
pprParendType (LitT l) = pprTyLit l
pprParendType (PromotedT c) = text "'" <> ppr c
pprParendType (PromotedTupleT 0) = text "'()"
pprParendType (PromotedTupleT n) = quoteParens (hcat (replicate (n-1) comma))
pprParendType PromotedNilT = text "'[]"
pprParendType PromotedConsT = text "(':)"
pprParendType StarT = char '*'
pprParendType ConstraintT = text "Constraint"
pprParendType (SigT ty k) = parens (ppr ty <+> text "::" <+> ppr k)
pprParendType (WildCardT mbName) = char '_' <> maybe empty ppr mbName
pprParendType (InfixT x n y) = parens (ppr x <+> pprName' Infix n <+> ppr y)
pprParendType t@(UInfixT {}) = parens (pprUInfixT t)
pprParendType (ParensT t) = ppr t
pprParendType other = parens (ppr other)
pprUInfixT :: Type -> Doc
pprUInfixT (UInfixT x n y) = pprUInfixT x <+> pprName' Infix n <+> pprUInfixT y
pprUInfixT t = ppr t
instance Ppr Type where
ppr (ForallT tvars ctxt ty)
= text "forall" <+> hsep (map ppr tvars) <+> text "."
<+> sep [pprCxt ctxt, ppr ty]
ppr ty = pprTyApp (split ty)
-- Works, in a degnerate way, for SigT, and puts parens round (ty :: kind)
-- See Note [Pretty-printing kind signatures]
{- Note [Pretty-printing kind signatures]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
GHC's parser only recognises a kind signature in a type when there are
parens around it. E.g. the parens are required here:
f :: (Int :: *)
type instance F Int = (Bool :: *)
So we always print a SigT with parens (see Trac #10050). -}
pprTyApp :: (Type, [Type]) -> Doc
pprTyApp (ArrowT, [arg1,arg2]) = sep [pprFunArgType arg1 <+> text "->", ppr arg2]
pprTyApp (EqualityT, [arg1, arg2]) =
sep [pprFunArgType arg1 <+> text "~", ppr arg2]
pprTyApp (ListT, [arg]) = brackets (ppr arg)
pprTyApp (TupleT n, args)
| length args == n = parens (sep (punctuate comma (map ppr args)))
pprTyApp (PromotedTupleT n, args)
| length args == n = quoteParens (sep (punctuate comma (map ppr args)))
pprTyApp (fun, args) = pprParendType fun <+> sep (map pprParendType args)
pprFunArgType :: Type -> Doc -- Should really use a precedence argument
-- Everything except forall and (->) binds more tightly than (->)
pprFunArgType ty@(ForallT {}) = parens (ppr ty)
pprFunArgType ty@((ArrowT `AppT` _) `AppT` _) = parens (ppr ty)
pprFunArgType ty@(SigT _ _) = parens (ppr ty)
pprFunArgType ty = ppr ty
split :: Type -> (Type, [Type]) -- Split into function and args
split t = go t []
where go (AppT t1 t2) args = go t1 (t2:args)
go ty args = (ty, args)
pprTyLit :: TyLit -> Doc
pprTyLit (NumTyLit n) = integer n
pprTyLit (StrTyLit s) = text (show s)
instance Ppr TyLit where
ppr = pprTyLit
------------------------------
instance Ppr TyVarBndr where
ppr (PlainTV nm) = ppr nm
ppr (KindedTV nm k) = parens (ppr nm <+> dcolon <+> ppr k)
instance Ppr Role where
ppr NominalR = text "nominal"
ppr RepresentationalR = text "representational"
ppr PhantomR = text "phantom"
ppr InferR = text "_"
------------------------------
pprCxt :: Cxt -> Doc
pprCxt [] = empty
pprCxt [t] = ppr t <+> text "=>"
pprCxt ts = parens (sep $ punctuate comma $ map ppr ts) <+> text "=>"
------------------------------
instance Ppr Range where
ppr = brackets . pprRange
where pprRange :: Range -> Doc
pprRange (FromR e) = ppr e <> text ".."
pprRange (FromThenR e1 e2) = ppr e1 <> text ","
<> ppr e2 <> text ".."
pprRange (FromToR e1 e2) = ppr e1 <> text ".." <> ppr e2
pprRange (FromThenToR e1 e2 e3) = ppr e1 <> text ","
<> ppr e2 <> text ".."
<> ppr e3
------------------------------
where_clause :: [Dec] -> Doc
where_clause [] = empty
where_clause ds = nest nestDepth $ text "where" <+> vcat (map (ppr_dec False) ds)
showtextl :: Show a => a -> Doc
showtextl = text . map toLower . show
hashParens :: Doc -> Doc
hashParens d = text "(# " <> d <> text " #)"
quoteParens :: Doc -> Doc
quoteParens d = text "'(" <> d <> text ")"
-----------------------------
instance Ppr Loc where
ppr (Loc { loc_module = md
, loc_package = pkg
, loc_start = (start_ln, start_col)
, loc_end = (end_ln, end_col) })
= hcat [ text pkg, colon, text md, colon
, parens $ int start_ln <> comma <> int start_col
, text "-"
, parens $ int end_ln <> comma <> int end_col ]
|
acowley/ghc
|
libraries/template-haskell/Language/Haskell/TH/Ppr.hs
|
bsd-3-clause
| 24,210
| 0
| 14
| 7,312
| 8,977
| 4,376
| 4,601
| 487
| 6
|
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple
-- Copyright : Isaac Jones 2003-2005
-- License : BSD3
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This is the command line front end to the Simple build system. When given
-- the parsed command-line args and package information, is able to perform
-- basic commands like configure, build, install, register, etc.
--
-- This module exports the main functions that Setup.hs scripts use. It
-- re-exports the 'UserHooks' type, the standard entry points like
-- 'defaultMain' and 'defaultMainWithHooks' and the predefined sets of
-- 'UserHooks' that custom @Setup.hs@ scripts can extend to add their own
-- behaviour.
--
-- This module isn't called \"Simple\" because it's simple. Far from
-- it. It's called \"Simple\" because it does complicated things to
-- simple software.
--
-- The original idea was that there could be different build systems that all
-- presented the same compatible command line interfaces. There is still a
-- "Distribution.Make" system but in practice no packages use it.
{-
Work around this warning:
libraries/Cabal/Distribution/Simple.hs:78:0:
Warning: In the use of `runTests'
(imported from Distribution.Simple.UserHooks):
Deprecated: "Please use the new testing interface instead!"
-}
{-# OPTIONS_GHC -fno-warn-deprecations #-}
module Distribution.Simple (
module Distribution.Package,
module Distribution.Version,
module Distribution.License,
module Distribution.Simple.Compiler,
module Language.Haskell.Extension,
-- * Simple interface
defaultMain, defaultMainNoRead, defaultMainArgs,
-- * Customization
UserHooks(..), Args,
defaultMainWithHooks, defaultMainWithHooksArgs,
-- ** Standard sets of hooks
simpleUserHooks,
autoconfUserHooks,
defaultUserHooks, emptyUserHooks,
-- ** Utils
defaultHookedPackageDesc
) where
-- local
import Distribution.Simple.Compiler hiding (Flag)
import Distribution.Simple.UserHooks
import Distribution.Package --must not specify imports, since we're exporting module.
import Distribution.PackageDescription
( PackageDescription(..), GenericPackageDescription, Executable(..)
, updatePackageDescription, hasLibs
, HookedBuildInfo, emptyHookedBuildInfo )
import Distribution.PackageDescription.Parse
( readPackageDescription, readHookedBuildInfo )
import Distribution.PackageDescription.Configuration
( flattenPackageDescription )
import Distribution.Simple.Program
( defaultProgramConfiguration, addKnownPrograms, builtinPrograms
, restoreProgramConfiguration, reconfigurePrograms )
import Distribution.Simple.PreProcess (knownSuffixHandlers, PPSuffixHandler)
import Distribution.Simple.Setup
import Distribution.Simple.Command
import Distribution.Simple.Build ( build, repl )
import Distribution.Simple.SrcDist ( sdist )
import Distribution.Simple.Register
( register, unregister )
import Distribution.Simple.Configure
( getPersistBuildConfig, maybeGetPersistBuildConfig
, writePersistBuildConfig, checkPersistBuildConfigOutdated
, configure, checkForeignDeps, findDistPrefOrDefault )
import Distribution.Simple.LocalBuildInfo ( LocalBuildInfo(..) )
import Distribution.Simple.Bench (bench)
import Distribution.Simple.BuildPaths ( srcPref)
import Distribution.Simple.Test (test)
import Distribution.Simple.Install (install)
import Distribution.Simple.Haddock (haddock, hscolour)
import Distribution.Simple.Utils
(die, notice, info, warn, setupMessage, chattyTry,
defaultPackageDesc, defaultHookedPackageDesc,
rawSystemExitWithEnv, cabalVersion, topHandler )
import Distribution.System
( OS(..), buildOS )
import Distribution.Verbosity
import Language.Haskell.Extension
import Distribution.Version
import Distribution.License
import Distribution.Text
( display )
-- Base
import System.Environment(getArgs, getProgName)
import System.Directory(removeFile, doesFileExist,
doesDirectoryExist, removeDirectoryRecursive)
import System.Exit (exitWith,ExitCode(..))
import System.IO.Error (isDoesNotExistError)
import Control.Exception (throwIO)
import Distribution.Compat.Environment (getEnvironment)
import Distribution.Compat.Exception (catchIO)
import Control.Monad (when)
import Data.Foldable (traverse_)
import Data.List (intercalate, unionBy, nub, (\\))
-- | A simple implementation of @main@ for a Cabal setup script.
-- It reads the package description file using IO, and performs the
-- action specified on the command line.
defaultMain :: IO ()
defaultMain = getArgs >>= defaultMainHelper simpleUserHooks
-- | A version of 'defaultMain' that is passed the command line
-- arguments, rather than getting them from the environment.
defaultMainArgs :: [String] -> IO ()
defaultMainArgs = defaultMainHelper simpleUserHooks
-- | A customizable version of 'defaultMain'.
defaultMainWithHooks :: UserHooks -> IO ()
defaultMainWithHooks hooks = getArgs >>= defaultMainHelper hooks
-- | A customizable version of 'defaultMain' that also takes the command
-- line arguments.
defaultMainWithHooksArgs :: UserHooks -> [String] -> IO ()
defaultMainWithHooksArgs = defaultMainHelper
-- | Like 'defaultMain', but accepts the package description as input
-- rather than using IO to read it.
defaultMainNoRead :: GenericPackageDescription -> IO ()
defaultMainNoRead pkg_descr =
getArgs >>=
defaultMainHelper simpleUserHooks { readDesc = return (Just pkg_descr) }
defaultMainHelper :: UserHooks -> Args -> IO ()
defaultMainHelper hooks args = topHandler $
case commandsRun (globalCommand commands) commands args of
CommandHelp help -> printHelp help
CommandList opts -> printOptionsList opts
CommandErrors errs -> printErrors errs
CommandReadyToGo (flags, commandParse) ->
case commandParse of
_ | fromFlag (globalVersion flags) -> printVersion
| fromFlag (globalNumericVersion flags) -> printNumericVersion
CommandHelp help -> printHelp help
CommandList opts -> printOptionsList opts
CommandErrors errs -> printErrors errs
CommandReadyToGo action -> action
where
printHelp help = getProgName >>= putStr . help
printOptionsList = putStr . unlines
printErrors errs = do
putStr (intercalate "\n" errs)
exitWith (ExitFailure 1)
printNumericVersion = putStrLn $ display cabalVersion
printVersion = putStrLn $ "Cabal library version "
++ display cabalVersion
progs = addKnownPrograms (hookedPrograms hooks) defaultProgramConfiguration
commands =
[configureCommand progs `commandAddAction` \fs as ->
configureAction hooks fs as >> return ()
,buildCommand progs `commandAddAction` buildAction hooks
,replCommand progs `commandAddAction` replAction hooks
,installCommand `commandAddAction` installAction hooks
,copyCommand `commandAddAction` copyAction hooks
,haddockCommand `commandAddAction` haddockAction hooks
,cleanCommand `commandAddAction` cleanAction hooks
,sdistCommand `commandAddAction` sdistAction hooks
,hscolourCommand `commandAddAction` hscolourAction hooks
,registerCommand `commandAddAction` registerAction hooks
,unregisterCommand `commandAddAction` unregisterAction hooks
,testCommand `commandAddAction` testAction hooks
,benchmarkCommand `commandAddAction` benchAction hooks
]
-- | Combine the preprocessors in the given hooks with the
-- preprocessors built into cabal.
allSuffixHandlers :: UserHooks
-> [PPSuffixHandler]
allSuffixHandlers hooks
= overridesPP (hookedPreProcessors hooks) knownSuffixHandlers
where
overridesPP :: [PPSuffixHandler] -> [PPSuffixHandler] -> [PPSuffixHandler]
overridesPP = unionBy (\x y -> fst x == fst y)
configureAction :: UserHooks -> ConfigFlags -> Args -> IO LocalBuildInfo
configureAction hooks flags args = do
distPref <- findDistPrefOrDefault (configDistPref flags)
let flags' = flags { configDistPref = toFlag distPref }
pbi <- preConf hooks args flags'
(mb_pd_file, pkg_descr0) <- confPkgDescr
--get_pkg_descr (configVerbosity flags')
--let pkg_descr = updatePackageDescription pbi pkg_descr0
let epkg_descr = (pkg_descr0, pbi)
--(warns, ers) <- sanityCheckPackage pkg_descr
--errorOut (configVerbosity flags') warns ers
localbuildinfo0 <- confHook hooks epkg_descr flags'
-- remember the .cabal filename if we know it
-- and all the extra command line args
let localbuildinfo = localbuildinfo0 {
pkgDescrFile = mb_pd_file,
extraConfigArgs = args
}
writePersistBuildConfig distPref localbuildinfo
let pkg_descr = localPkgDescr localbuildinfo
postConf hooks args flags' pkg_descr localbuildinfo
return localbuildinfo
where
verbosity = fromFlag (configVerbosity flags)
confPkgDescr :: IO (Maybe FilePath, GenericPackageDescription)
confPkgDescr = do
mdescr <- readDesc hooks
case mdescr of
Just descr -> return (Nothing, descr)
Nothing -> do
pdfile <- defaultPackageDesc verbosity
descr <- readPackageDescription verbosity pdfile
return (Just pdfile, descr)
buildAction :: UserHooks -> BuildFlags -> Args -> IO ()
buildAction hooks flags args = do
distPref <- findDistPrefOrDefault (buildDistPref flags)
let verbosity = fromFlag $ buildVerbosity flags
flags' = flags { buildDistPref = toFlag distPref }
lbi <- getBuildConfig hooks verbosity distPref
progs <- reconfigurePrograms verbosity
(buildProgramPaths flags')
(buildProgramArgs flags')
(withPrograms lbi)
hookedAction preBuild buildHook postBuild
(return lbi { withPrograms = progs })
hooks flags' { buildArgs = args } args
replAction :: UserHooks -> ReplFlags -> Args -> IO ()
replAction hooks flags args = do
distPref <- findDistPrefOrDefault (replDistPref flags)
let verbosity = fromFlag $ replVerbosity flags
flags' = flags { replDistPref = toFlag distPref }
lbi <- getBuildConfig hooks verbosity distPref
progs <- reconfigurePrograms verbosity
(replProgramPaths flags')
(replProgramArgs flags')
(withPrograms lbi)
pbi <- preRepl hooks args flags'
let lbi' = lbi { withPrograms = progs }
pkg_descr0 = localPkgDescr lbi'
pkg_descr = updatePackageDescription pbi pkg_descr0
replHook hooks pkg_descr lbi' hooks flags' args
postRepl hooks args flags' pkg_descr lbi'
hscolourAction :: UserHooks -> HscolourFlags -> Args -> IO ()
hscolourAction hooks flags args = do
distPref <- findDistPrefOrDefault (hscolourDistPref flags)
let verbosity = fromFlag $ hscolourVerbosity flags
flags' = flags { hscolourDistPref = toFlag distPref }
hookedAction preHscolour hscolourHook postHscolour
(getBuildConfig hooks verbosity distPref)
hooks flags' args
haddockAction :: UserHooks -> HaddockFlags -> Args -> IO ()
haddockAction hooks flags args = do
distPref <- findDistPrefOrDefault (haddockDistPref flags)
let verbosity = fromFlag $ haddockVerbosity flags
flags' = flags { haddockDistPref = toFlag distPref }
lbi <- getBuildConfig hooks verbosity distPref
progs <- reconfigurePrograms verbosity
(haddockProgramPaths flags')
(haddockProgramArgs flags')
(withPrograms lbi)
hookedAction preHaddock haddockHook postHaddock
(return lbi { withPrograms = progs })
hooks flags' args
cleanAction :: UserHooks -> CleanFlags -> Args -> IO ()
cleanAction hooks flags args = do
distPref <- findDistPrefOrDefault (cleanDistPref flags)
let flags' = flags { cleanDistPref = toFlag distPref }
pbi <- preClean hooks args flags'
pdfile <- defaultPackageDesc verbosity
ppd <- readPackageDescription verbosity pdfile
let pkg_descr0 = flattenPackageDescription ppd
-- We don't sanity check for clean as an error
-- here would prevent cleaning:
--sanityCheckHookedBuildInfo pkg_descr0 pbi
let pkg_descr = updatePackageDescription pbi pkg_descr0
cleanHook hooks pkg_descr () hooks flags'
postClean hooks args flags' pkg_descr ()
where
verbosity = fromFlag (cleanVerbosity flags)
copyAction :: UserHooks -> CopyFlags -> Args -> IO ()
copyAction hooks flags args = do
distPref <- findDistPrefOrDefault (copyDistPref flags)
let verbosity = fromFlag $ copyVerbosity flags
flags' = flags { copyDistPref = toFlag distPref }
hookedAction preCopy copyHook postCopy
(getBuildConfig hooks verbosity distPref)
hooks flags' args
installAction :: UserHooks -> InstallFlags -> Args -> IO ()
installAction hooks flags args = do
distPref <- findDistPrefOrDefault (installDistPref flags)
let verbosity = fromFlag $ installVerbosity flags
flags' = flags { installDistPref = toFlag distPref }
hookedAction preInst instHook postInst
(getBuildConfig hooks verbosity distPref)
hooks flags' args
sdistAction :: UserHooks -> SDistFlags -> Args -> IO ()
sdistAction hooks flags args = do
distPref <- findDistPrefOrDefault (sDistDistPref flags)
let flags' = flags { sDistDistPref = toFlag distPref }
pbi <- preSDist hooks args flags'
mlbi <- maybeGetPersistBuildConfig distPref
pdfile <- defaultPackageDesc verbosity
ppd <- readPackageDescription verbosity pdfile
let pkg_descr0 = flattenPackageDescription ppd
sanityCheckHookedBuildInfo pkg_descr0 pbi
let pkg_descr = updatePackageDescription pbi pkg_descr0
sDistHook hooks pkg_descr mlbi hooks flags'
postSDist hooks args flags' pkg_descr mlbi
where
verbosity = fromFlag (sDistVerbosity flags)
testAction :: UserHooks -> TestFlags -> Args -> IO ()
testAction hooks flags args = do
distPref <- findDistPrefOrDefault (testDistPref flags)
let verbosity = fromFlag $ testVerbosity flags
flags' = flags { testDistPref = toFlag distPref }
localBuildInfo <- getBuildConfig hooks verbosity distPref
let pkg_descr = localPkgDescr localBuildInfo
-- It is safe to do 'runTests' before the new test handler because the
-- default action is a no-op and if the package uses the old test interface
-- the new handler will find no tests.
runTests hooks args False pkg_descr localBuildInfo
hookedActionWithArgs preTest testHook postTest
(getBuildConfig hooks verbosity distPref)
hooks flags' args
benchAction :: UserHooks -> BenchmarkFlags -> Args -> IO ()
benchAction hooks flags args = do
distPref <- findDistPrefOrDefault (benchmarkDistPref flags)
let verbosity = fromFlag $ benchmarkVerbosity flags
flags' = flags { benchmarkDistPref = toFlag distPref }
hookedActionWithArgs preBench benchHook postBench
(getBuildConfig hooks verbosity distPref)
hooks flags' args
registerAction :: UserHooks -> RegisterFlags -> Args -> IO ()
registerAction hooks flags args = do
distPref <- findDistPrefOrDefault (regDistPref flags)
let verbosity = fromFlag $ regVerbosity flags
flags' = flags { regDistPref = toFlag distPref }
hookedAction preReg regHook postReg
(getBuildConfig hooks verbosity distPref)
hooks flags' args
unregisterAction :: UserHooks -> RegisterFlags -> Args -> IO ()
unregisterAction hooks flags args = do
distPref <- findDistPrefOrDefault (regDistPref flags)
let verbosity = fromFlag $ regVerbosity flags
flags' = flags { regDistPref = toFlag distPref }
hookedAction preUnreg unregHook postUnreg
(getBuildConfig hooks verbosity distPref)
hooks flags' args
hookedAction :: (UserHooks -> Args -> flags -> IO HookedBuildInfo)
-> (UserHooks -> PackageDescription -> LocalBuildInfo
-> UserHooks -> flags -> IO ())
-> (UserHooks -> Args -> flags -> PackageDescription
-> LocalBuildInfo -> IO ())
-> IO LocalBuildInfo
-> UserHooks -> flags -> Args -> IO ()
hookedAction pre_hook cmd_hook =
hookedActionWithArgs pre_hook (\h _ pd lbi uh flags -> cmd_hook h pd lbi uh flags)
hookedActionWithArgs :: (UserHooks -> Args -> flags -> IO HookedBuildInfo)
-> (UserHooks -> Args -> PackageDescription -> LocalBuildInfo
-> UserHooks -> flags -> IO ())
-> (UserHooks -> Args -> flags -> PackageDescription
-> LocalBuildInfo -> IO ())
-> IO LocalBuildInfo
-> UserHooks -> flags -> Args -> IO ()
hookedActionWithArgs pre_hook cmd_hook post_hook get_build_config hooks flags args = do
pbi <- pre_hook hooks args flags
localbuildinfo <- get_build_config
let pkg_descr0 = localPkgDescr localbuildinfo
--pkg_descr0 <- get_pkg_descr (get_verbose flags)
sanityCheckHookedBuildInfo pkg_descr0 pbi
let pkg_descr = updatePackageDescription pbi pkg_descr0
-- TODO: should we write the modified package descr back to the
-- localbuildinfo?
cmd_hook hooks args pkg_descr localbuildinfo hooks flags
post_hook hooks args flags pkg_descr localbuildinfo
sanityCheckHookedBuildInfo :: PackageDescription -> HookedBuildInfo -> IO ()
sanityCheckHookedBuildInfo PackageDescription { library = Nothing } (Just _,_)
= die $ "The buildinfo contains info for a library, "
++ "but the package does not have a library."
sanityCheckHookedBuildInfo pkg_descr (_, hookExes)
| not (null nonExistant)
= die $ "The buildinfo contains info for an executable called '"
++ head nonExistant ++ "' but the package does not have a "
++ "executable with that name."
where
pkgExeNames = nub (map exeName (executables pkg_descr))
hookExeNames = nub (map fst hookExes)
nonExistant = hookExeNames \\ pkgExeNames
sanityCheckHookedBuildInfo _ _ = return ()
getBuildConfig :: UserHooks -> Verbosity -> FilePath -> IO LocalBuildInfo
getBuildConfig hooks verbosity distPref = do
lbi_wo_programs <- getPersistBuildConfig distPref
-- Restore info about unconfigured programs, since it is not serialized
let lbi = lbi_wo_programs {
withPrograms = restoreProgramConfiguration
(builtinPrograms ++ hookedPrograms hooks)
(withPrograms lbi_wo_programs)
}
case pkgDescrFile lbi of
Nothing -> return lbi
Just pkg_descr_file -> do
outdated <- checkPersistBuildConfigOutdated distPref pkg_descr_file
if outdated
then reconfigure pkg_descr_file lbi
else return lbi
where
reconfigure :: FilePath -> LocalBuildInfo -> IO LocalBuildInfo
reconfigure pkg_descr_file lbi = do
notice verbosity $ pkg_descr_file ++ " has been changed. "
++ "Re-configuring with most recently used options. "
++ "If this fails, please run configure manually.\n"
let cFlags = configFlags lbi
let cFlags' = cFlags {
-- Since the list of unconfigured programs is not serialized,
-- restore it to the same value as normally used at the beginning
-- of a configure run:
configPrograms = restoreProgramConfiguration
(builtinPrograms ++ hookedPrograms hooks)
(configPrograms cFlags),
-- Use the current, not saved verbosity level:
configVerbosity = Flag verbosity
}
configureAction hooks cFlags' (extraConfigArgs lbi)
-- --------------------------------------------------------------------------
-- Cleaning
clean :: PackageDescription -> CleanFlags -> IO ()
clean pkg_descr flags = do
let distPref = fromFlagOrDefault defaultDistPref $ cleanDistPref flags
notice verbosity "cleaning..."
maybeConfig <- if fromFlag (cleanSaveConf flags)
then maybeGetPersistBuildConfig distPref
else return Nothing
-- remove the whole dist/ directory rather than tracking exactly what files
-- we created in there.
chattyTry "removing dist/" $ do
exists <- doesDirectoryExist distPref
when exists (removeDirectoryRecursive distPref)
-- Any extra files the user wants to remove
mapM_ removeFileOrDirectory (extraTmpFiles pkg_descr)
-- If the user wanted to save the config, write it back
traverse_ (writePersistBuildConfig distPref) maybeConfig
where
removeFileOrDirectory :: FilePath -> IO ()
removeFileOrDirectory fname = do
isDir <- doesDirectoryExist fname
isFile <- doesFileExist fname
if isDir then removeDirectoryRecursive fname
else when isFile $ removeFile fname
verbosity = fromFlag (cleanVerbosity flags)
-- --------------------------------------------------------------------------
-- Default hooks
-- | Hooks that correspond to a plain instantiation of the
-- \"simple\" build system
simpleUserHooks :: UserHooks
simpleUserHooks =
emptyUserHooks {
confHook = configure,
postConf = finalChecks,
buildHook = defaultBuildHook,
replHook = defaultReplHook,
copyHook = \desc lbi _ f -> install desc lbi f, -- has correct 'copy' behavior with params
testHook = defaultTestHook,
benchHook = defaultBenchHook,
instHook = defaultInstallHook,
sDistHook = \p l h f -> sdist p l f srcPref (allSuffixHandlers h),
cleanHook = \p _ _ f -> clean p f,
hscolourHook = \p l h f -> hscolour p l (allSuffixHandlers h) f,
haddockHook = \p l h f -> haddock p l (allSuffixHandlers h) f,
regHook = defaultRegHook,
unregHook = \p l _ f -> unregister p l f
}
where
finalChecks _args flags pkg_descr lbi =
checkForeignDeps pkg_descr lbi (lessVerbose verbosity)
where
verbosity = fromFlag (configVerbosity flags)
-- | Basic autoconf 'UserHooks':
--
-- * 'postConf' runs @.\/configure@, if present.
--
-- * the pre-hooks 'preBuild', 'preClean', 'preCopy', 'preInst',
-- 'preReg' and 'preUnreg' read additional build information from
-- /package/@.buildinfo@, if present.
--
-- Thus @configure@ can use local system information to generate
-- /package/@.buildinfo@ and possibly other files.
{-# DEPRECATED defaultUserHooks
"Use simpleUserHooks or autoconfUserHooks, unless you need Cabal-1.2\n compatibility in which case you must stick with defaultUserHooks" #-}
defaultUserHooks :: UserHooks
defaultUserHooks = autoconfUserHooks {
confHook = \pkg flags -> do
let verbosity = fromFlag (configVerbosity flags)
warn verbosity
"defaultUserHooks in Setup script is deprecated."
confHook autoconfUserHooks pkg flags,
postConf = oldCompatPostConf
}
-- This is the annoying old version that only runs configure if it exists.
-- It's here for compatibility with existing Setup.hs scripts. See:
-- https://github.com/haskell/cabal/issues/158
where oldCompatPostConf args flags pkg_descr lbi
= do let verbosity = fromFlag (configVerbosity flags)
noExtraFlags args
confExists <- doesFileExist "configure"
when confExists $
runConfigureScript verbosity
backwardsCompatHack flags lbi
pbi <- getHookedBuildInfo verbosity
sanityCheckHookedBuildInfo pkg_descr pbi
let pkg_descr' = updatePackageDescription pbi pkg_descr
postConf simpleUserHooks args flags pkg_descr' lbi
backwardsCompatHack = True
autoconfUserHooks :: UserHooks
autoconfUserHooks
= simpleUserHooks
{
postConf = defaultPostConf,
preBuild = \_ flags ->
-- not using 'readHook' here because 'build' takes
-- extra args
getHookedBuildInfo $ fromFlag $ buildVerbosity flags,
preClean = readHook cleanVerbosity,
preCopy = readHook copyVerbosity,
preInst = readHook installVerbosity,
preHscolour = readHook hscolourVerbosity,
preHaddock = readHook haddockVerbosity,
preReg = readHook regVerbosity,
preUnreg = readHook regVerbosity
}
where defaultPostConf :: Args -> ConfigFlags -> PackageDescription -> LocalBuildInfo -> IO ()
defaultPostConf args flags pkg_descr lbi
= do let verbosity = fromFlag (configVerbosity flags)
noExtraFlags args
confExists <- doesFileExist "configure"
if confExists
then runConfigureScript verbosity
backwardsCompatHack flags lbi
else die "configure script not found."
pbi <- getHookedBuildInfo verbosity
sanityCheckHookedBuildInfo pkg_descr pbi
let pkg_descr' = updatePackageDescription pbi pkg_descr
postConf simpleUserHooks args flags pkg_descr' lbi
backwardsCompatHack = False
readHook :: (a -> Flag Verbosity) -> Args -> a -> IO HookedBuildInfo
readHook get_verbosity a flags = do
noExtraFlags a
getHookedBuildInfo verbosity
where
verbosity = fromFlag (get_verbosity flags)
runConfigureScript :: Verbosity -> Bool -> ConfigFlags -> LocalBuildInfo
-> IO ()
runConfigureScript verbosity backwardsCompatHack flags lbi = do
env <- getEnvironment
let programConfig = withPrograms lbi
(ccProg, ccFlags) <- configureCCompiler verbosity programConfig
-- The C compiler's compilation and linker flags (e.g.
-- "C compiler flags" and "Gcc Linker flags" from GHC) have already
-- been merged into ccFlags, so we set both CFLAGS and LDFLAGS
-- to ccFlags
-- We don't try and tell configure which ld to use, as we don't have
-- a way to pass its flags too
let env' = appendToEnvironment ("CFLAGS", unwords ccFlags)
env
args' = args ++ ["--with-gcc=" ++ ccProg]
handleNoWindowsSH $
rawSystemExitWithEnv verbosity "sh" args' env'
where
args = "./configure" : configureArgs backwardsCompatHack flags
appendToEnvironment (key, val) [] = [(key, val)]
appendToEnvironment (key, val) (kv@(k, v) : rest)
| key == k = (key, v ++ " " ++ val) : rest
| otherwise = kv : appendToEnvironment (key, val) rest
handleNoWindowsSH action
| buildOS /= Windows
= action
| otherwise
= action
`catchIO` \ioe -> if isDoesNotExistError ioe
then die notFoundMsg
else throwIO ioe
notFoundMsg = "The package has a './configure' script. This requires a "
++ "Unix compatibility toolchain such as MinGW+MSYS or Cygwin."
getHookedBuildInfo :: Verbosity -> IO HookedBuildInfo
getHookedBuildInfo verbosity = do
maybe_infoFile <- defaultHookedPackageDesc
case maybe_infoFile of
Nothing -> return emptyHookedBuildInfo
Just infoFile -> do
info verbosity $ "Reading parameters from " ++ infoFile
readHookedBuildInfo verbosity infoFile
defaultTestHook :: Args -> PackageDescription -> LocalBuildInfo
-> UserHooks -> TestFlags -> IO ()
defaultTestHook args pkg_descr localbuildinfo _ flags =
test args pkg_descr localbuildinfo flags
defaultBenchHook :: Args -> PackageDescription -> LocalBuildInfo
-> UserHooks -> BenchmarkFlags -> IO ()
defaultBenchHook args pkg_descr localbuildinfo _ flags =
bench args pkg_descr localbuildinfo flags
defaultInstallHook :: PackageDescription -> LocalBuildInfo
-> UserHooks -> InstallFlags -> IO ()
defaultInstallHook pkg_descr localbuildinfo _ flags = do
let copyFlags = defaultCopyFlags {
copyDistPref = installDistPref flags,
copyDest = toFlag NoCopyDest,
copyVerbosity = installVerbosity flags
}
install pkg_descr localbuildinfo copyFlags
let registerFlags = defaultRegisterFlags {
regDistPref = installDistPref flags,
regInPlace = installInPlace flags,
regPackageDB = installPackageDB flags,
regVerbosity = installVerbosity flags
}
when (hasLibs pkg_descr) $ register pkg_descr localbuildinfo registerFlags
defaultBuildHook :: PackageDescription -> LocalBuildInfo
-> UserHooks -> BuildFlags -> IO ()
defaultBuildHook pkg_descr localbuildinfo hooks flags =
build pkg_descr localbuildinfo flags (allSuffixHandlers hooks)
defaultReplHook :: PackageDescription -> LocalBuildInfo
-> UserHooks -> ReplFlags -> [String] -> IO ()
defaultReplHook pkg_descr localbuildinfo hooks flags args =
repl pkg_descr localbuildinfo flags (allSuffixHandlers hooks) args
defaultRegHook :: PackageDescription -> LocalBuildInfo
-> UserHooks -> RegisterFlags -> IO ()
defaultRegHook pkg_descr localbuildinfo _ flags =
if hasLibs pkg_descr
then register pkg_descr localbuildinfo flags
else setupMessage verbosity
"Package contains no library to register:" (packageId pkg_descr)
where verbosity = fromFlag (regVerbosity flags)
|
rimmington/cabal
|
Cabal/Distribution/Simple.hs
|
bsd-3-clause
| 30,341
| 0
| 17
| 7,656
| 6,120
| 3,123
| 2,997
| 503
| 8
|
-- | Groups black-box tests of cabal-install and configures them to test
-- the correct binary.
--
-- This file should do nothing but import tests from other modules and run
-- them with the path to the correct cabal-install binary.
module Main
where
-- Modules from Cabal.
import Distribution.Simple.Configure (findDistPrefOrDefault)
import Distribution.Simple.Program.Builtin (ghcPkgProgram)
import Distribution.Simple.Program.Db
(defaultProgramDb, requireProgram, setProgramSearchPath)
import Distribution.Simple.Program.Find
(ProgramSearchPathEntry(ProgramSearchPathDir), defaultProgramSearchPath)
import Distribution.Simple.Program.Types
( Program(..), simpleProgram, programPath)
import Distribution.Simple.Setup ( Flag(..) )
import Distribution.Simple.Utils ( findProgramVersion )
import Distribution.Verbosity (normal)
-- Third party modules.
import qualified Control.Exception.Extensible as E
import Distribution.Compat.Environment ( setEnv )
import System.Directory
( canonicalizePath, getCurrentDirectory, setCurrentDirectory
, removeFile, doesFileExist )
import System.FilePath ((</>))
import Test.Tasty (TestTree, defaultMain, testGroup)
import Control.Monad ( when )
-- Module containing common test code.
import PackageTests.PackageTester ( TestsPaths(..)
, packageTestsDirectory
, packageTestsConfigFile )
-- Modules containing the tests.
import qualified PackageTests.Exec.Check
import qualified PackageTests.Freeze.Check
import qualified PackageTests.MultipleSource.Check
-- List of tests to run. Each test will be called with the path to the
-- cabal binary to use.
tests :: PackageTests.PackageTester.TestsPaths -> TestTree
tests paths = testGroup "Package Tests" $
[ testGroup "Freeze" $ PackageTests.Freeze.Check.tests paths
, testGroup "Exec" $ PackageTests.Exec.Check.tests paths
, testGroup "MultipleSource" $ PackageTests.MultipleSource.Check.tests paths
]
cabalProgram :: Program
cabalProgram = (simpleProgram "cabal") {
programFindVersion = findProgramVersion "--numeric-version" id
}
main :: IO ()
main = do
-- Find the builddir used to build Cabal
distPref <- findDistPrefOrDefault NoFlag
-- Use the default builddir for all of the subsequent package tests
setEnv "CABAL_BUILDDIR" "dist"
buildDir <- canonicalizePath (distPref </> "build/cabal")
let programSearchPath = ProgramSearchPathDir buildDir : defaultProgramSearchPath
(cabal, _) <- requireProgram normal cabalProgram
(setProgramSearchPath programSearchPath defaultProgramDb)
(ghcPkg, _) <- requireProgram normal ghcPkgProgram defaultProgramDb
canonicalConfigPath <- canonicalizePath $ "tests" </> packageTestsDirectory
let testsPaths = TestsPaths {
cabalPath = programPath cabal,
ghcPkgPath = programPath ghcPkg,
configPath = canonicalConfigPath </> packageTestsConfigFile
}
putStrLn $ "Using cabal: " ++ cabalPath testsPaths
putStrLn $ "Using ghc-pkg: " ++ ghcPkgPath testsPaths
cwd <- getCurrentDirectory
let confFile = packageTestsDirectory </> "cabal-config"
removeConf = do
b <- doesFileExist confFile
when b $ removeFile confFile
let runTests = do
setCurrentDirectory "tests"
removeConf -- assert that there is no existing config file
-- (we want deterministic testing with the default
-- config values)
defaultMain $ tests testsPaths
runTests `E.finally` do
-- remove the default config file that got created by the tests
removeConf
-- Change back to the old working directory so that the tests can be
-- repeatedly run in `cabal repl` via `:main`.
setCurrentDirectory cwd
|
x-y-z/cabal
|
cabal-install/tests/PackageTests.hs
|
bsd-3-clause
| 3,921
| 0
| 13
| 863
| 661
| 372
| 289
| 62
| 1
|
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# LANGUAGE CPP, MagicHash, UnboxedTuples #-}
-------------------------------------------------------------------------------
--
-- (c) The University of Glasgow 2007
--
-- | Break Arrays
--
-- An array of bytes, indexed by a breakpoint number (breakpointId in Tickish)
-- There is one of these arrays per module.
--
-- Each byte is
-- 1 if the corresponding breakpoint is enabled
-- 0 otherwise
--
-------------------------------------------------------------------------------
module GHCi.BreakArray
(
BreakArray
(BA) -- constructor is exported only for ByteCodeGen
, newBreakArray
, getBreak
, setBreakOn
, setBreakOff
, showBreakArray
) where
import Control.Monad
import Data.Word
import GHC.Word
import GHC.Exts
import GHC.IO ( IO(..) )
import System.IO.Unsafe ( unsafeDupablePerformIO )
data BreakArray = BA (MutableByteArray# RealWorld)
breakOff, breakOn :: Word8
breakOn = 1
breakOff = 0
showBreakArray :: BreakArray -> IO ()
showBreakArray array = do
forM_ [0 .. (size array - 1)] $ \i -> do
val <- readBreakArray array i
putStr $ ' ' : show val
putStr "\n"
setBreakOn :: BreakArray -> Int -> IO Bool
setBreakOn array index
| safeIndex array index = do
writeBreakArray array index breakOn
return True
| otherwise = return False
setBreakOff :: BreakArray -> Int -> IO Bool
setBreakOff array index
| safeIndex array index = do
writeBreakArray array index breakOff
return True
| otherwise = return False
getBreak :: BreakArray -> Int -> IO (Maybe Word8)
getBreak array index
| safeIndex array index = do
val <- readBreakArray array index
return $ Just val
| otherwise = return Nothing
safeIndex :: BreakArray -> Int -> Bool
safeIndex array index = index < size array && index >= 0
size :: BreakArray -> Int
size (BA array) = size
where
-- We want to keep this operation pure. The mutable byte array
-- is never resized so this is safe.
size = unsafeDupablePerformIO $ sizeofMutableByteArray array
sizeofMutableByteArray :: MutableByteArray# RealWorld -> IO Int
sizeofMutableByteArray arr =
IO $ \s -> case getSizeofMutableByteArray# arr s of
(# s', n# #) -> (# s', I# n# #)
allocBA :: Int -> IO BreakArray
allocBA (I# sz) = IO $ \s1 ->
case newByteArray# sz s1 of { (# s2, array #) -> (# s2, BA array #) }
-- create a new break array and initialise elements to zero
newBreakArray :: Int -> IO BreakArray
newBreakArray entries@(I# sz) = do
BA array <- allocBA entries
case breakOff of
W8# off -> do
let loop n | isTrue# (n ==# sz) = return ()
| otherwise = do writeBA# array n off; loop (n +# 1#)
loop 0#
return $ BA array
writeBA# :: MutableByteArray# RealWorld -> Int# -> Word# -> IO ()
writeBA# array i word = IO $ \s ->
case writeWord8Array# array i word s of { s -> (# s, () #) }
writeBreakArray :: BreakArray -> Int -> Word8 -> IO ()
writeBreakArray (BA array) (I# i) (W8# word) = writeBA# array i word
readBA# :: MutableByteArray# RealWorld -> Int# -> IO Word8
readBA# array i = IO $ \s ->
case readWord8Array# array i s of { (# s, c #) -> (# s, W8# c #) }
readBreakArray :: BreakArray -> Int -> IO Word8
readBreakArray (BA array) (I# i) = readBA# array i
|
tolysz/prepare-ghcjs
|
spec-lts8/ghci/GHCi/BreakArray.hs
|
bsd-3-clause
| 3,419
| 0
| 20
| 839
| 1,005
| 505
| 500
| 79
| 1
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE MagicHash #-}
module Main where
import Language.Haskell.TH
import GHC.Prim(Addr#)
import GHC.Ptr
import Foreign.Marshal.Array (peekArray)
import Data.Word (Word8)
check_equal :: [Word8] -> Addr# -> IO ()
check_equal bytes addr = do
bytes' <- peekArray (length bytes) (Ptr addr)
print (bytes == bytes')
main = do
-- check round-trip
check_equal [0..255] $(litE $ stringPrimL [0..255])
-- check printing
let e = LitE (StringPrimL [0..255])
print e
putStrLn (pprint e)
|
manyoo/ghcjs
|
test/ghc/th/tH_StringPrimL.hs
|
mit
| 532
| 0
| 13
| 93
| 193
| 101
| 92
| 17
| 1
|
{-# LANGUAGE TemplateHaskell, DataKinds #-}
module T8455 where
ty = [t| 5 |]
|
forked-upstream-packages-for-ghcjs/ghc
|
testsuite/tests/th/T8455.hs
|
bsd-3-clause
| 78
| 0
| 4
| 14
| 14
| 11
| 3
| 3
| 1
|
--
-- Helper module for mod101.hs
module Mod101_AuxA where
data DataA = ConA Int | ConB Bool
|
urbanslug/ghc
|
testsuite/tests/module/Mod101_AuxA.hs
|
bsd-3-clause
| 94
| 0
| 6
| 18
| 20
| 13
| 7
| 2
| 0
|
module H6502.Util
(
setPC, setS, setA, setX, setY,
setFlag, unsetFlag, setFlags, putFlag,
onState, getPC, getS, getA, getX, getY,
getFlag, getFlags,
wordJoin, wordSplit,
peek, poke, peek16, poke16, peekPC, peekPC16,
pop, push, pop16, push16,
addrZeroPage, addrZeroPageX, addrZeroPageY, addrAbsolute, addrAbsoluteX, addrAbsoluteY, addrIxIndirect, addrIndirectIx
)
where
import Data.Bits
import Data.Word
import H6502.Env
import H6502.State
import H6502.Trans
modifyState :: Monad m => (H6502State -> H6502State) -> H6502T m ()
modifyState f = H6502T $ \e s -> return ((), f s)
-- | Set the program counter.
setPC :: Monad m => Word16 -> H6502T m ()
setPC pc = modifyState $ \s -> s{reg_pc = pc}
-- | Set the stack pointer.
setS :: Monad m => Word8 -> H6502T m ()
setS sp = modifyState $ \s -> s{reg_s = sp}
-- | Set the accumulator.
setA :: Monad m => Word8 -> H6502T m ()
setA a = modifyState $ \s -> s{reg_a = a}
-- | Set the X index.
setX :: Monad m => Word8 -> H6502T m ()
setX x = modifyState $ \s -> s{reg_x = x}
-- | Set the Y index.
setY :: Monad m => Word8 -> H6502T m ()
setY y = modifyState $ \s -> s{reg_y = y}
-- | Turn the given flag on, if not already.
setFlag :: Monad m => Word8 -> H6502T m ()
setFlag x = modifyState $ \s -> s{reg_flags = reg_flags s .|. x}
-- | Turn the given flag off, if not already.
unsetFlag :: Monad m => Word8 -> H6502T m ()
unsetFlag x = modifyState $ \s -> s{reg_flags = reg_flags s .&. complement x}
-- | Set the state of then given flag to the given boolean expression.
putFlag :: Monad m => Word8 -> Bool -> H6502T m ()
putFlag x True = setFlag x
putFlag x False = unsetFlag x
-- | Set the flags register.
setFlags :: Monad m => Word8 -> H6502T m ()
setFlags x = H6502T $ \e s -> return ((), s{reg_flags = x .|. reservedFlags})
onState :: Monad m => (H6502State -> a) -> H6502T m a
onState f = H6502T $ \e s -> return (f s, s)
-- | Get the program counter.
getPC :: Monad m => H6502T m Word16
getPC = onState reg_pc
-- | Get the stack pointer.
getS :: Monad m => H6502T m Word8
getS = onState reg_s
-- | Get the accumulator.
getA :: Monad m => H6502T m Word8
getA = onState reg_a
-- | Get the X index.
getX :: Monad m => H6502T m Word8
getX = onState reg_x
-- | Get the Y index.
getY :: Monad m => H6502T m Word8
getY = onState reg_y
-- | Get the state of the given flag.
getFlag :: Monad m => Word8 -> H6502T m Bool
getFlag x = H6502T $ \e s -> return (reg_flags s .&. x /= 0, s)
-- | Get the flags register.
getFlags :: Monad m => H6502T m Word8
getFlags = H6502T $ \e s -> return (reg_flags s, s)
-- | Join two 8-bit bytes into a 16-bit word.
wordJoin :: Word8 -> Word8 -> Word16
wordJoin hi lo = shiftL (fromIntegral hi) 8 .|. fromIntegral lo
-- | Split a 16-bit word into two 8-bit bytes.
wordSplit :: Word16 -> (Word8, Word8)
wordSplit w = (fromIntegral (shiftR w 8), fromIntegral w)
-- | Read a byte from the given adddress.
peek :: Monad m => Word16 -> H6502T m Word8
peek a = H6502T $ \e s -> do
r <- memoryRead e a
return (r, s)
-- | Write a byte to the given address.
poke :: Monad m => Word16 -> Word8 -> H6502T m ()
poke a x = H6502T $ \e s -> do
memoryWrite e a x
return ((), s)
-- | Read a little-endian word from a given address.
peek16 :: Monad m => Word16 -> H6502T m Word16
peek16 a = H6502T $ \e s -> do
lo <- memoryRead e a
hi <- memoryRead e (a + 1)
return (wordJoin hi lo, s)
-- | Write a little-endian word to a given address.
poke16 :: Monad m => Word16 -> Word16 -> H6502T m ()
poke16 a x = H6502T $ \e s -> do
let ~(hi, lo) = wordSplit x
memoryWrite e a lo
memoryWrite e (a + 1) hi
return ((), s)
onStack :: Word8 -> Word16
onStack x = fromIntegral x .|. 0x0100
onZeroPage :: Word8 -> Word16
onZeroPage x = fromIntegral x
-- | Pop a byte from the stack.
pop :: Monad m => H6502T m Word8
pop = H6502T $ \e s -> do
let sp = reg_s s
r <- memoryRead e (onStack $ sp + 1)
return (r, s{reg_s = sp + 1})
-- | Push a byte onto the stack.
push :: Monad m => Word8 -> H6502T m ()
push x = H6502T $ \e s -> do
let sp = reg_s s
memoryWrite e (onStack sp) x
return ((), s{reg_s = sp - 1})
-- | Pop a little-endian word from the stack.
pop16 :: Monad m => H6502T m Word16
pop16 = H6502T $ \e s -> do
let sp = reg_s s
lo <- memoryRead e (onStack $ sp + 1)
hi <- memoryRead e (onStack $ sp + 2)
return (wordJoin hi lo, s{reg_s = sp + 2})
-- | Push a little-endian word from the stack.
push16 :: Monad m => Word16 -> H6502T m ()
push16 x = H6502T $ \e s -> do
let ~(hi, lo) = wordSplit x
let sp = reg_s s
memoryWrite e (onStack $ sp - 1) lo
memoryWrite e (onStack sp) hi
return ((), s{reg_s = sp - 2})
-- | Read a byte at the location of the program counter and then increment it.
peekPC :: Monad m => H6502T m Word8
peekPC = H6502T $ \e s -> do
let pc = reg_pc s
r <- memoryRead e pc
return (r, s{reg_pc = pc + 1})
-- | Read a little-endian word at the location of the program counter and then increment it by two.
peekPC16 :: Monad m => H6502T m Word16
peekPC16 = H6502T $ \e s -> do
let pc = reg_pc s
lo <- memoryRead e pc
hi <- memoryRead e (pc + 1)
return (wordJoin hi lo, s{reg_pc = pc + 2})
-- | ZeroPage addressing mode: read a byte from the instruction, and use it as an offset into the zero page.
addrZeroPage :: Monad m => H6502T m Word16
addrZeroPage = do
imm <- peekPC
return (onZeroPage imm)
-- | ZeroPage,X addressing mode: read a byte from the instruction, add it to X (with 8-bit addition), and use it as an offset inot the zero page.
addrZeroPageX :: Monad m => H6502T m Word16
addrZeroPageX = do
imm <- peekPC
x <- getX
return (onZeroPage $ imm + x)
-- | ZeroPage,Y addressing mode: read a byte from the instruction, add it to Y (with 8-bit addition), and use it as an offset into the zero page.
addrZeroPageY :: Monad m => H6502T m Word16
addrZeroPageY = do
imm <- peekPC
y <- getY
return (onZeroPage $ imm + y)
-- | Absolute addressing mode: read a word from the instruction, and use it as an address
addrAbsolute :: Monad m => H6502T m Word16
addrAbsolute = peekPC16
-- | Absolute,X addressing mode: read a word from the instruction, add it to X, and use it as an address
addrAbsoluteX :: Monad m => H6502T m Word16
addrAbsoluteX = do
imm <- peekPC16
x <- getX
return (imm + fromIntegral x)
-- | Absolute,Y addressing mode: read a word from the instruction, add it to Y, and use it as an address
addrAbsoluteY :: Monad m => H6502T m Word16
addrAbsoluteY = do
imm <- peekPC16
y <- getY
return (imm + fromIntegral y)
-- | (Indirect,X) addressing mode: read a byte from the instruction, add it to X, use the result as an offset into the zero page, read a word at that offset, and use that as an address
addrIxIndirect :: Monad m => H6502T m Word16
addrIxIndirect = do
imm <- peekPC
x <- getX
peek16 (onZeroPage $ imm + x)
-- | (Indirect),Y addressing mode: read a byte from the instruction, use it as an offset into the zero page, read a word at that offset, add Y, and use that as an address
addrIndirectIx :: Monad m => H6502T m Word16
addrIndirectIx = do
imm <- peekPC
y <- getY
addr <- peek16 (onZeroPage imm)
return (addr + fromIntegral y)
|
mniip/h6502
|
src/H6502/Util.hs
|
mit
| 7,355
| 0
| 13
| 1,785
| 2,489
| 1,264
| 1,225
| 150
| 1
|
{-# LANGUAGE GADTs #-}
{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE OverloadedStrings #-}
-- | Execute a computation of type '{IO} () that has been previously added to
-- the codebase, without setting up an interactive environment.
--
-- This allows one to run standalone applications implemented in the Unison
-- language.
module Unison.Codebase.Execute where
import Unison.Prelude
import Unison.Codebase.MainTerm ( getMainTerm )
import qualified Unison.Codebase.MainTerm as MainTerm
import qualified Unison.Codebase as Codebase
import Unison.Parser ( Ann )
import qualified Unison.Codebase.Runtime as Runtime
import Unison.Codebase.Runtime ( Runtime )
import Unison.Var ( Var )
import qualified Unison.PrettyPrintEnv as PPE
import qualified Unison.Names3 as Names3
import qualified Unison.Codebase.Branch as Branch
import System.Exit (die)
import Control.Exception (finally)
execute
:: Var v
=> Codebase.Codebase IO v Ann
-> Runtime v
-> String
-> IO ()
execute codebase runtime mainName =
(`finally` Runtime.terminate runtime) $ do
root <- Codebase.getRootBranch codebase >>= \case
Right r -> pure r
Left Codebase.NoRootBranch ->
die "Couldn't identify a root namespace."
Left (Codebase.CouldntLoadRootBranch h) ->
die ("Couldn't load root branch " ++ show h)
Left (Codebase.CouldntParseRootBranch h) ->
die ("Couldn't parse root branch head " ++ show h)
let parseNames0 = Names3.makeAbsolute0 (Branch.toNames0 (Branch.head root))
loadTypeOfTerm = Codebase.getTypeOfTerm codebase
let mainType = Runtime.mainType runtime
mt <- getMainTerm loadTypeOfTerm parseNames0 mainName mainType
case mt of
MainTerm.NotAFunctionName s -> die ("Not a function name: " ++ s)
MainTerm.NotFound s -> die ("Not found: " ++ s)
MainTerm.BadType s _ -> die (s ++ " is not of type '{IO} ()")
MainTerm.Success _ tm _ -> do
let codeLookup = Codebase.toCodeLookup codebase
ppe = PPE.PrettyPrintEnv (const Nothing) (const Nothing)
void $ Runtime.evaluateTerm codeLookup ppe runtime tm
|
unisonweb/platform
|
parser-typechecker/src/Unison/Codebase/Execute.hs
|
mit
| 2,276
| 0
| 18
| 578
| 518
| 273
| 245
| -1
| -1
|
module GHCJS.DOM.OESVertexArrayObject (
) where
|
manyoo/ghcjs-dom
|
ghcjs-dom-webkit/src/GHCJS/DOM/OESVertexArrayObject.hs
|
mit
| 50
| 0
| 3
| 7
| 10
| 7
| 3
| 1
| 0
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
module Oden.Output.Compiler.Resolution where
import Text.PrettyPrint.Leijen
import Oden.Compiler.Resolution
import Oden.Core.ProtocolImplementation
import Oden.Metadata
import Oden.Output
import Oden.Output.Instantiate ()
import Oden.Pretty ()
instance OdenOutput ResolutionError where
outputType _ = Error
name =
\case
NotInScope{} ->
"Compiler.Resolution.NotInScope"
NoMatchingImplementationInScope{} ->
"Compiler.Resolution.NoMatchingImplementationInScope"
MultipleMatchingImplementationsInScope{} ->
"Compiler.Resolution.MultipleMatchingImplementationsInScope"
ResolutionInstantiateError e ->
name e
header e settings =
case e of
NotInScope ident ->
text "Not in scope:" <+> pretty ident
NoMatchingImplementationInScope _ protocol type' _ ->
text "No matching implementation in scope for"
<+> pretty protocol <+> parens (pretty type')
MultipleMatchingImplementationsInScope{} ->
text "Multiple matching implementations in scope"
ResolutionInstantiateError err ->
header err settings
details e settings =
case e of
NotInScope _ ->
empty
NoMatchingImplementationInScope _ _ _ allImpls ->
vcat (text "The following implementations are in scope:" : map pretty allImpls)
MultipleMatchingImplementationsInScope _ impls ->
vcat (text "The following implementations matched:" : concatMap printImpl impls)
where
printImpl impl@(ProtocolImplementation (Metadata si) _ _ _) =
[ empty
, pretty impl
, text "defined at" <+> pretty si
, empty
]
ResolutionInstantiateError err ->
details err settings
sourceInfo =
\case
NotInScope _ -> Nothing
NoMatchingImplementationInScope si _ _ _ -> Just si
MultipleMatchingImplementationsInScope si _ -> Just si
ResolutionInstantiateError err -> sourceInfo err
|
oden-lang/oden
|
src/Oden/Output/Compiler/Resolution.hs
|
mit
| 2,203
| 0
| 16
| 654
| 433
| 214
| 219
| 54
| 0
|
module Acronym (abbreviate) where
abbreviate :: String -> String
abbreviate xs = error "You need to implement this function."
|
exercism/xhaskell
|
exercises/practice/acronym/src/Acronym.hs
|
mit
| 127
| 0
| 5
| 20
| 29
| 16
| 13
| 3
| 1
|
module AIChallenger.Config
( Config(..)
, defaultConfig
, getConfigFromCommandlineFlags
) where
import Control.Applicative
import Data.Maybe
import Options.Applicative
import Options.Applicative.Builder.Internal (HasValue)
import Path
import System.Directory
import AIChallenger.Types
data Config = Config
{ cfgPort :: Int
, cfgAddress :: String
, cfgTurnLimit :: Turn
, cfgBotExecutables :: [Path Abs File]
}
defaultConfig :: Config
defaultConfig = Config
{ cfgPort = 8081
, cfgAddress = "127.0.0.1"
, cfgTurnLimit = Turn 200
, cfgBotExecutables = []
}
getConfigFromCommandlineFlags :: IO Config
getConfigFromCommandlineFlags = do
cwd <- parseAbsDir =<< getCurrentDirectory
let parseFileName fn = parseAbsFile fn <|> fmap (cwd </>) (parseRelFile fn)
execParser (info (parseConfig parseFileName) (progDesc "ai-challenger"))
parseConfig :: (String -> Maybe (Path Abs File)) -> Parser Config
parseConfig parseFileName = Config
<$> option auto
(long "port" <> def "HTTP port" cfgPort)
<*> strOption
(long "address" <> def "HTTP address" cfgAddress)
<*> fmap Turn (option auto
(long "turn-limit" <> def "turn limit" (fromTurn . cfgTurnLimit)))
<*> fmap (mapMaybe parseFileName) (many (argument str (metavar "BOT_EXECUTABLE")))
def :: (Show a, Options.Applicative.Builder.Internal.HasValue t) => String -> (Config -> a) -> Mod t a
def msg field =
let defaultValue = field defaultConfig
in help (msg <> ", default is " <> show defaultValue) <> value defaultValue
|
ethercrow/ai-challenger
|
src/AIChallenger/Config.hs
|
mit
| 1,584
| 0
| 14
| 325
| 478
| 252
| 226
| 40
| 1
|
{-# LANGUAGE Arrows, NoMonomorphismRestriction, DeriveDataTypeable #-}
--------------------------------------------------------------------
-- |
-- Module : Codemanic.NikeRuns
-- Copyright : (c) Uwe Hoffmann 2009
-- License : LGPL
--
-- Maintainer: Uwe Hoffmann <uwe@codemanic.com>
-- Stability : provisional
-- Portability: portable
--
-- Functions to fetch and process Nike+ run data.
--
--------------------------------------------------------------------
module Codemanic.NikeRuns
(
chartNikeRun,
NikeRun(..),
getNikeRun,
getMostRecentNikeRunId,
renderNikeRun
)
where
import Codemanic.NumericLists
import Codemanic.Weather
import Codemanic.Util
import Graphics.Google.Chart
import Data.Time
import Data.Time.LocalTime
import Data.Time.Clock
import Data.Time.Clock.POSIX
import Data.Time.Format
import System.Locale
import System.Time
import Text.Printf
import Text.Regex
import Text.Regex.Posix
import Text.JSON
import Text.JSON.String
import Text.JSON.Types
import Text.XML.HXT.Arrow
import Network.HTTP
import Network.URI
import Text.StringTemplate
import Data.Generics
import Data.Maybe
import System.FilePath
transformAndSmoothRunData :: [Double] -> [Double]
transformAndSmoothRunData =
flipInRange .
(convolve (gaussianKernel 5)) .
(correlate (movingAverageKernel 6)) .
(map (\x -> (1.0 / (6.0 * x)))) .
(filter (/=0)) .
diff
transformAndAverageRunData :: [Double] -> [Double]
transformAndAverageRunData =
flipInRange .
(correlate (movingAverageKernel 6)) .
(map (\x -> (1.0 / (6.0 * x)))) .
(filter (/=0)) .
diff
scaler :: [Double] -> Double -> (Double -> Double)
scaler xs y = (\x -> (x - minV) * y / d)
where
minV = minInList xs
maxV = maxInList xs
d = maxV - minV
encodeRunData :: [Double] -> ChartData
encodeRunData xs = encodeDataExtended [xs']
where
sc = scaler xs (fromIntegral 4095)
xs' = map (round . sc) xs :: [Int]
sampler :: Int -> Double -> Double -> [Double]
sampler n minV maxV = [(minV + d * (fromIntegral x) / (fromIntegral n)) | x <- [0..n]]
where
d = maxV - minV
yLabels :: Int -> [Double] -> [String]
yLabels n xs = map (\x -> printf "%.2f" x) (reverse (sampler n (minInList xs) (maxInList xs)))
xLabels :: Int -> [Double] -> [String]
xLabels n xs = map (\x -> printf "%.1f" x) (sampler n 0.0 duration)
where
duration = (fromIntegral $ length xs) / 6.0
suffix :: String -> (String -> String)
suffix s = (\x -> x ++ s)
data NikeRun = NikeRun {
userId :: Int,
runId :: Int,
extendedData :: [Double],
calories :: Double,
startTime :: UTCTime
} deriving (Eq,Show)
duration :: NikeRun -> Double
duration nr = (10.0 * (fromIntegral (length (extendedData nr)))) / 60.0
distance :: NikeRun -> Double
distance nr = last (extendedData nr)
pace :: NikeRun -> Double
pace nr = (duration nr) / (distance nr)
chartNikeRun :: Int -> Int -> NikeRun -> String
chartNikeRun w h NikeRun {extendedData = xs, calories = c} =
suffix "&chg=25.0,25.0,3,2" $
chartURL $
setAxisLabelPositions [[0, 25, 50, 75, 100], [50], [0, 25, 50, 75, 100], [50]] $
setAxisLabels [(yLabels 4 ylxs), ["pace (min/km)"], (xLabels 4 xs), ["time (min)"]] $
setAxisTypes [AxisLeft, AxisLeft, AxisBottom, AxisBottom] $
setSize w h $
setData (encodeRunData txs) $
newLineChart
where
ylxs = transformAndAverageRunData xs
txs = transformAndSmoothRunData xs
nikeRunURL :: Int -> Int -> String
nikeRunURL userId runId =
"http://nikerunning.nike.com/nikeplus/v1/services/widget/get_public_run.jsp?userID=" ++ (show userId) ++
"&id=" ++ (show runId)
nikeRunIdsURL :: Int -> String
nikeRunIdsURL userId =
"http://nikerunning.nike.com/nikeplus/v1/services/app/get_public_user_data.jsp?id=" ++ (show userId)
retrieveNikeRun :: Int -> Int -> IO String
retrieveNikeRun userId runId = do
case parseURI (nikeRunURL userId runId) of
Nothing -> ioError . userError $ "Invalid URL"
Just uri -> getHttpResponse uri
retrieveNikeRunIds :: Int -> IO String
retrieveNikeRunIds userId = do
case parseURI (nikeRunIdsURL userId) of
Nothing -> ioError . userError $ "Invalid URL"
Just uri -> getHttpResponse uri
readDoubles :: String -> [Double]
readDoubles s = map (\y -> read y::Double) (splitRegex (mkRegex ",") s)
parseNikeRun uId rId = atTag "sportsData" >>>
proc x -> do
cs <- textAtTag "calories" -< x
exds <- textAtTag "extendedData" -< x
sts <- textAtTag "startTime" -< x
returnA -< NikeRun {
userId = uId,
runId = rId,
extendedData = readDoubles exds,
startTime = readTime defaultTimeLocale "%Y-%m-%dT%H:%M:%S%z" sts,
calories = read cs }
parseNikeRunId = atTag "mostRecentRun" >>>
proc x -> do
runId <- getAttrValue "id" -< x
returnA -< (read runId)::Int
getNikeRun :: Int -> Int -> IO NikeRun
getNikeRun userId runId = do
doc <- retrieveNikeRun userId runId
let xml = parseXML doc
nikeRuns <- runX (xml >>> (parseNikeRun userId runId))
case nikeRuns of
[] -> ioError . userError $ "Failed to parse nike run " ++ show runId
nr:_ -> return nr
getMostRecentNikeRunId :: Int -> IO Int
getMostRecentNikeRunId userId = do
doc <- retrieveNikeRunIds userId
let xml = parseXML doc
nikeRunIds <- runX (xml >>> parseNikeRunId)
case nikeRunIds of
[] -> ioError . userError $ "Failed to parse most recent nike run id " ++ show userId
id:_ -> return id
parseJSONChartSize :: String -> IO (Int, Int)
parseJSONChartSize jsonOptions =
case runGetJSON readJSObject jsonOptions of
Right x -> do
let jo = fromJSONObject x
return (getKey "width" jo, getKey "height" jo)
where
getKey k j = fromJSONRational $ fromJust $ lookup k j
Left _ -> return (0, 0)
extractChartSize :: String -> String -> IO (Int, Int)
extractChartSize templates template = do
let chartOptionsPattern = "\\$\\!chartOptions(\\{.+\\})\\!\\$"
contents <- readFile (templates </> (addExtension template "st"))
let (_, _, _, groups) = (contents =~ chartOptionsPattern :: (String, String, String, [String]))
if ((length groups) > 0)
then parseJSONChartSize $ head groups
else return (0, 0)
renderNikeRun :: String -> String -> NikeRun -> String -> Maybe Weather -> IO String
renderNikeRun templates template nr message weather = do
dirs <- directoryGroup templates
(w, h) <- extractChartSize templates template
let chart = chartNikeRun w h nr
let tpl = fromJust $ getStringTemplate template dirs
timeZone <- getCurrentTimeZone
let localTime = utcToLocalTime timeZone (startTime nr)
return $ render $ setAttribute "chart" chart $
setAttribute "calories" (calories nr) $
setAttribute "duration" (renderDouble (duration nr)) $
setAttribute "distance" (renderDouble (distance nr)) $
setAttribute "pace" (renderDouble (pace nr)) $
setAttribute "startTime" (renderTime localTime) $
setAttribute "message" message $
setAttribute "userId" (userId nr) $
setAttribute "runId" (runId nr) $
setAttribute "weather" (renderWeather weather) tpl
where
renderDouble :: Double -> String
renderDouble x = (printf "%.2f" x)::String
renderTime :: LocalTime -> String
renderTime t = formatTime defaultTimeLocale "%x, %r" t
renderWeather :: Maybe Weather -> String
renderWeather (Just weather) = "Temperature " ++ (temperature weather) ++
", Wind " ++ (wind weather) ++ ", " ++ (humidity weather) ++ " humidity"
renderWeather Nothing = ""
|
uwedeportivo/NikePlus-Cocoa-RunLog
|
haskell/nikepub/Codemanic/NikeRuns.hs
|
mit
| 7,762
| 2
| 19
| 1,758
| 2,504
| 1,307
| 1,197
| 183
| 2
|
{-# LANGUAGE Arrows, NoMonomorphismRestriction, DeriveDataTypeable #-}
--------------------------------------------------------------------
-- |
-- Module : Codemanic.Util
-- Copyright : (c) Uwe Hoffmann 2009
-- License : LGPL
--
-- Maintainer: Uwe Hoffmann <uwe@codemanic.com>
-- Stability : provisional
-- Portability: portable
--
-- Small utility functions shared between other modules of nikepub.
--
--------------------------------------------------------------------
module Codemanic.Util
(
getHttpResponse,
atTag,
text,
textAtTag,
parseXML,
fromJSONRational,
fromJSONObject
)
where
import Data.Time
import Data.Time.LocalTime
import Data.Time.Clock
import Data.Time.Clock.POSIX
import Data.Time.Format
import System.Locale
import System.Time
import Text.Printf
import Text.Regex
import Text.XML.HXT.Arrow
import Text.JSON
import Text.JSON.String
import Text.JSON.Types
import Network.URI
import Network.HTTP
import Data.Generics
import Data.Maybe
getHttpResponse :: URI -> IO String
getHttpResponse uri = do
eresp <- simpleHTTP (Request uri GET [] "")
case eresp of
Left _ -> ioError . userError $ "Failed to get " ++ show uri
Right res -> return $ rspBody res
atTag tag = deep (isElem >>> hasName tag)
text = getChildren >>> getText
textAtTag tag = atTag tag >>> text
parseXML doc = readString [(a_validate,v_0)] doc
fromJSONRational :: JSValue -> Int
fromJSONRational (JSRational _ n) = fromInteger . round $ n
fromJSONObject :: JSValue -> [(String,JSValue)]
fromJSONObject (JSObject o) = fromJSObject o
|
uwedeportivo/NikePlus-Cocoa-RunLog
|
haskell/nikepub/Codemanic/Util.hs
|
mit
| 1,548
| 0
| 12
| 225
| 363
| 205
| 158
| 41
| 2
|
#!/usr/bin/env stack
{-
stack --resolver lts-8.6 script
--package stm
--package mtl
--package network-uri
--package http-client
--package http-client-tls
--package http-types
--package bytestring
--package conduit-combinators
--package containers
--package async
--package optparse-applicative
-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE MultiWayIf #-}
-- module Ch28.Check
-- where
import Control.Concurrent.Async
import Control.Concurrent.STM
import Control.Exception (IOException, catch)
import Control.Monad.Except
import Control.Monad.State
import Data.Char (isControl)
import Network.URI (URI, parseURI)
import System.IO (hFlush, stdout)
import Text.Printf (printf)
import qualified Data.ByteString.Char8 as StrictBS
import qualified Data.Set as Set
import Network.HTTP.Client ( Manager, Response, HttpException(..)
, newManager, httpNoBody, parseRequest, responseStatus
, responseHeaders
)
import Network.HTTP.Types.Status
import Network.HTTP.Types.Header (hLocation)
import Network.HTTP.Client.TLS (tlsManagerSettings)
import Conduit
import Options.Applicative ( (<**>), Parser, ParserInfo
, many, argument, metavar, option, auto, short
, value, help, info, helper, fullDesc, progDesc
, header, execParser, str)
import Data.Semigroup ((<>))
type URL =
StrictBS.ByteString
data Task
= Check URL
| Done
main :: IO ()
main = do
opts <- execParser p_opts
let
numFiles =
length (optFiles opts)
N k =
optFlag opts
badCount <- newTVarIO (0 :: Int)
badLinks <- newTChanIO
jobQueue <- newTChanIO
badLinksWriter <- async (writeBadLinks badLinks)
manager <- newManager tlsManagerSettings
workers <- replicateTimes k (worker manager badLinks jobQueue badCount)
stats <- execJob (mapM_ checkURLs (optFiles opts))
(JobState Set.empty 0 jobQueue)
atomically $ replicateM_ k (writeTChan jobQueue Done)
waitAll workers -- mapM_ waitCatch workers
uninterruptibleCancel badLinksWriter
broken <- readTVarIO badCount
printf fmt broken
(linksFound stats)
(Set.size (linksSeen stats))
numFiles
where
fmt =
"Found %d broken links. Checked %d links (%d unique) in %d files.\n"
replicateTimes :: Int -> IO () -> IO [Async ()]
replicateTimes n action =
forM [1..n] $ const (async action)
-- Wait for all workers to be done, ignoring failures
waitAll :: [Async a] -> IO ()
waitAll [] =
return ()
waitAll (a:as) =
waitCatch a *> waitAll as
writeBadLinks :: TChan String -> IO ()
writeBadLinks c =
forever $
atomically (readTChan c) >>= putStrLn . ("BAD LINK: " ++) >> hFlush stdout
-- CHECKING LINKS
getStatusE :: Manager -> URI -> IO (Either String Int)
getStatusE m =
runExceptT . chase (5 :: Int)
where
chase :: Int -> URI -> ExceptT String IO Int
chase 0 _ =
throwError "too many redirects"
chase n u = do
r <- embedEither show =<< liftIO (getHead u `catch` httpExceptionHandler)
case statusIsRedirection (responseStatus r) of
True -> do
u' <- embedMaybe (show r) $ findHeader hLocation
url <- embedMaybe "bad URL" $ parseURI (StrictBS.unpack u')
chase (n - 1) url
where
findHeader name =
lookup name (responseHeaders r)
False ->
return . statusCode . responseStatus $ r
where
httpExceptionHandler :: HttpException -> IO (Either StrictBS.ByteString (Response ()))
httpExceptionHandler (HttpExceptionRequest _ content) =
return . Left . StrictBS.pack $ show content
httpExceptionHandler (InvalidUrlException _ reason) =
return . Left . StrictBS.pack $ reason
getHead :: URI -> IO (Either StrictBS.ByteString (Response ()))
getHead uri = do
request <- parseRequest ("HEAD " ++ show uri)
response <- httpNoBody request m
-- Doesn't work cause of fundep `MonadError IO`
-- `catch` (throwError :: HttpException -> IO (Response ()))
let
status =
responseStatus response
if | statusIsSuccessful status ->
return $ Right response
| statusIsRedirection status ->
return $ Right response
| otherwise ->
return . Left . statusMessage . responseStatus $ response
embedEither :: (MonadError e m) => (s -> e) -> Either s a -> m a
embedEither f =
either (throwError . f) return
embedMaybe :: (MonadError e m) => e -> Maybe a -> m a
embedMaybe err =
maybe (throwError err) return
worker :: Manager -> TChan String -> TChan Task -> TVar Int -> IO ()
worker m badLinks jobQueue badCount =
loop
where
loop = do
job <- atomically $ readTChan jobQueue
case job of
Done ->
return ()
Check x ->
checkOne (StrictBS.unpack x) >> loop
checkOne :: String -> IO ()
checkOne url =
case parseURI url of
Just uri -> do
code <- getStatusE m uri `catch`
(return . Left . show :: IOException -> IO (Either String Int))
case code of
Right 200 ->
return ()
Right n ->
report (show n)
Left err ->
report err
_ ->
report "invalid URL"
where
report :: String -> IO ()
report s =
atomically $ do
modifyTVar' badCount (+1)
writeTChan badLinks (url ++ " " ++ s)
-- FINDING LINKS
data JobState =
JobState { linksSeen :: Set.Set URL
, linksFound :: Int
, linkQueue :: TChan Task
}
newtype Job a =
Job { runJob :: StateT JobState IO a }
deriving (Functor, Applicative, Monad, MonadState JobState, MonadIO)
execJob :: Job a -> JobState -> IO JobState
execJob =
execStateT . runJob
checkURLs :: FilePath -> Job ()
checkURLs fp =
Job $
runConduitRes $ sourceFileBS fp
.| mapC extractLinks
.| setupJob
where
setupJob :: Consumer [URL] (ResourceT (StateT JobState IO)) ()
setupJob =
(getZipConduit $
ZipConduit (filterMCE seenURI
.| (getZipConduit $
ZipConduit (mapM_CE insertURI)
*> ZipConduit (mapM_C enqueueTasks)))
<* ZipConduit (mapM_C (updateStats . length)))
updateStats :: (MonadState JobState m) => Int -> m ()
updateStats n =
modify $ \s ->
s { linksFound = linksFound s + n }
enqueueTasks :: (MonadState JobState m, MonadIO m) => [URL] -> m ()
enqueueTasks urls = do
task <- gets linkQueue
liftIO . atomically $ mapM_ (writeTChan task . Check) urls
insertURI :: (MonadState JobState m) => URL -> m ()
insertURI url = do
modify $ \s ->
s { linksSeen = Set.insert url (linksSeen s) }
seenURI :: (MonadState JobState m) => URL -> m Bool
seenURI url =
(not . Set.member url) <$> gets linksSeen
extractLinks :: StrictBS.ByteString -> [URL]
extractLinks =
concatMap uris . StrictBS.lines
where
uris s =
filter httpSchemes (StrictBS.splitWith isDelim s)
isDelim c =
isControl c || c `elem` (" <>\"{}|\\^[]`" :: String)
httpSchemes s =
"http:" `StrictBS.isPrefixOf` s || "https:" `StrictBS.isPrefixOf` s
-- COMMAND LINE PARSING
newtype Flag =
N Int
deriving Eq
data Options =
Options { optFiles :: [FilePath]
, optFlag :: !Flag
}
p_options :: Parser Options
p_options =
Options <$> p_files <*> p_flag
p_files :: Parser [String]
p_files =
many $ argument str (metavar "FILEPATH")
p_flag :: Parser Flag
p_flag =
N <$> option auto (short 'n'
<> value 16
<> help "Number of concurrent connections (defaults to 16)")
p_opts :: ParserInfo Options
p_opts =
info (p_options <**> helper)
(fullDesc
<> progDesc "Check hyperlinks contained in [FILEPATH ...]"
<> header "urlcheck - a hyperlink checker")
|
futtetennista/IntroductionToFunctionalProgramming
|
RWH/src/Ch28/Check.hs
|
mit
| 8,204
| 0
| 19
| 2,334
| 2,416
| 1,235
| 1,181
| 215
| 6
|
module ChangeCount where
import Data.List(sort)
changeCount :: Int -> [Int] -> Int
changeCount money coins | money <= 0 = 0
| otherwise = changeCount' money (reverse . sort $ coins)
changeCount' money coins | null coins = 0
| money == 0 = 1
| money < 0 = 0
| otherwise = (changeCount' (money - (head coins)) coins) + (changeCount' money (tail coins))
|
slon1024/functional_programming
|
haskell/ChangeCount.hs
|
mit
| 484
| 0
| 12
| 198
| 168
| 83
| 85
| 9
| 1
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{- |
Module : Orville.PostgreSQL.Expr.GroupBy.GroupByClause
Copyright : Flipstone Technology Partners 2016-2021
License : MIT
-}
module Orville.PostgreSQL.Internal.Expr.GroupBy.GroupByClause
( GroupByClause,
groupByClause,
)
where
import Orville.PostgreSQL.Internal.Expr.GroupBy.GroupByExpr (GroupByExpr)
import qualified Orville.PostgreSQL.Internal.RawSql as RawSql
newtype GroupByClause
= GroupByClause RawSql.RawSql
deriving (RawSql.SqlExpression)
groupByClause :: GroupByExpr -> GroupByClause
groupByClause expr = GroupByClause (RawSql.fromString "GROUP BY " <> RawSql.toRawSql expr)
|
flipstone/orville
|
orville-postgresql-libpq/src/Orville/PostgreSQL/Internal/Expr/GroupBy/GroupByClause.hs
|
mit
| 653
| 0
| 9
| 78
| 101
| 63
| 38
| 11
| 1
|
module Utility (module Control.Error, catchIO, foldML, strictIO) where
import Control.Error
import Control.Monad
catchIO :: IO a -> EitherT String IO a
catchIO a = tryIO a `catchT` (left . show)
foldML :: Monad m => [a -> m a] -> a -> m a
foldML = foldl (>=>) return
-- | Run an application with an error possibility. Terminate the
-- application with the error message or exit gracefully
strictIO :: Monad m => EitherT String m a -> m a
strictIO = eitherT error return
|
Prinhotels/goog-closure
|
src/Utility.hs
|
mit
| 476
| 0
| 9
| 92
| 157
| 84
| 73
| 9
| 1
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE RebindableSyntax #-}
module Course.Monad(
Monad(..)
, join
, (>>=)
, (<=<)
) where
import Course.Applicative hiding ((<*>))
import Course.Core
import Course.Functor
import Course.Id
import Course.List
import Course.Optional
import qualified Prelude as P((=<<))
-- | All instances of the `Monad` type-class must satisfy one law. This law
-- is not checked by the compiler. This law is given as:
--
-- * The law of associativity
-- `∀f g x. g =<< (f =<< x) ≅ ((g =<<) . f) =<< x`
class Applicative f => Monad f where
-- Pronounced, bind.
(=<<) ::
(a -> f b)
-> f a
-> f b
infixr 1 =<<
-- | Witness that all things with (=<<) and (<$>) also have (<*>).
--
-- >>> Id (+10) <*> Id 8
-- Id 18
--
-- >>> (+1) :. (*2) :. Nil <*> 1 :. 2 :. 3 :. Nil
-- [2,3,4,2,4,6]
--
-- >>> Full (+8) <*> Full 7
-- Full 15
--
-- >>> Empty <*> Full 7
-- Empty
--
-- >>> Full (+8) <*> Empty
-- Empty
--
-- >>> ((+) <*> (+10)) 3
-- 16
--
-- >>> ((+) <*> (+5)) 3
-- 11
--
-- >>> ((+) <*> (+5)) 1
-- 7
--
-- >>> ((*) <*> (+10)) 3
-- 39
--
-- >>> ((*) <*> (+2)) 3
-- 15
(<*>) ::
Monad f =>
f (a -> b)
-> f a
-> f b
(<*>) =
error "todo: Course.Monad#(<*>)"
infixl 4 <*>
-- | Binds a function on the Id monad.
--
-- >>> (\x -> Id(x+1)) =<< Id 2
-- Id 3
instance Monad Id where
(=<<) ::
(a -> Id b)
-> Id a
-> Id b
(=<<) =
error "todo: Course.Monad (=<<)#instance Id"
-- | Binds a function on a List.
--
-- >>> (\n -> n :. n :. Nil) =<< (1 :. 2 :. 3 :. Nil)
-- [1,1,2,2,3,3]
instance Monad List where
(=<<) ::
(a -> List b)
-> List a
-> List b
(=<<) =
error "todo: Course.Monad (=<<)#instance List"
-- | Binds a function on an Optional.
--
-- >>> (\n -> Full (n + n)) =<< Full 7
-- Full 14
instance Monad Optional where
(=<<) ::
(a -> Optional b)
-> Optional a
-> Optional b
(=<<) =
error "todo: Course.Monad (=<<)#instance Optional"
-- | Binds a function on the reader ((->) t).
--
-- >>> ((*) =<< (+10)) 7
-- 119
instance Monad ((->) t) where
(=<<) ::
(a -> ((->) t b))
-> ((->) t a)
-> ((->) t b)
(=<<) =
error "todo: Course.Monad (=<<)#instance ((->) t)"
-- | Flattens a combined structure to a single structure.
--
-- >>> join ((1 :. 2 :. 3 :. Nil) :. (1 :. 2 :. Nil) :. Nil)
-- [1,2,3,1,2]
--
-- >>> join (Full Empty)
-- Empty
--
-- >>> join (Full (Full 7))
-- Full 7
--
-- >>> join (+) 7
-- 14
join ::
Monad f =>
f (f a)
-> f a
join =
error "todo: Course.Monad#join"
-- | Implement a flipped version of @(=<<)@, however, use only
-- @join@ and @(<$>)@.
-- Pronounced, bind flipped.
--
-- >>> ((+10) >>= (*)) 7
-- 119
(>>=) ::
Monad f =>
f a
-> (a -> f b)
-> f b
(>>=) =
error "todo: Course.Monad#(>>=)"
infixl 1 >>=
-- | Implement composition within the @Monad@ environment.
-- Pronounced, kleisli composition.
--
-- >>> ((\n -> n :. n :. Nil) <=< (\n -> n+1 :. n+2 :. Nil)) 1
-- [2,2,3,3]
(<=<) ::
Monad f =>
(b -> f c)
-> (a -> f b)
-> a
-> f c
(<=<) =
error "todo: Course.Monad#(<=<)"
infixr 1 <=<
-----------------------
-- SUPPORT LIBRARIES --
-----------------------
instance Monad IO where
(=<<) =
(P.=<<)
|
harrisi/on-being-better
|
list-expansion/Haskell/course/src/Course/Monad.hs
|
cc0-1.0
| 3,271
| 0
| 11
| 809
| 669
| 407
| 262
| 84
| 1
|
module Ohua.DFLang.Util where
import Ohua.Prelude
import qualified Data.Foldable as F
import qualified Data.HashMap.Strict as HM
import qualified Data.HashSet as HS
import qualified Data.Sequence as DS
import Data.Sequence (Seq, (|>))
import Ohua.DFLang.Lang
-- | Find the usages of a binding
findUsages :: Foldable f => Binding -> f LetExpr -> [LetExpr]
findUsages binding = filter (elem (DFVar binding) . callArguments) . F.toList
-- | Find the definition of a binding
findDefinition :: Binding -> Seq LetExpr -> Maybe LetExpr
findDefinition binding = find ((binding `elem`) . output)
-- | Find the first call site of an expression by function reference.
findExpr :: DFFnRef -> Seq LetExpr -> Maybe LetExpr
findExpr fnRef = find ((== fnRef) . functionRef)
findAllExprs :: DFFnRef -> Seq LetExpr -> Seq LetExpr
findAllExprs fnRef = DS.filter ((== fnRef) . functionRef)
removeAllExprs :: Seq LetExpr -> Seq LetExpr -> Seq LetExpr
removeAllExprs toRemove allExprs =
let t = HS.fromList $ toList $ map callSiteId toRemove
in foldl
(\s e ->
if HS.member (callSiteId e) t
then s
else s |> e)
DS.empty
allExprs
|
ohua-dev/ohua-core
|
core/src/Ohua/DFLang/Util.hs
|
epl-1.0
| 1,218
| 0
| 13
| 287
| 357
| 195
| 162
| 26
| 2
|
module Two where
import Data.List
import Data.Time
fak 0 = 1
fak i = i * (fak $ i - 1)
fact i
| i == 0 = 1
| otherwise = i * (fact $ i - 1)
sum' [] = 0
sum' (x : []) = x
sum' (x : xs) = x + sum xs
range i j
| i == j = [i]
| otherwise = i : (range (succ i) j)
prime' x
| x < 2 = False
| x == 2 = True
| even x = False
| otherwise = loopi 3
where loopi i
| i*i > x = True
| 0 == rem x i = False
| otherwise = loopi $ i + 2
primes = 2 : filter prime' [3,5..]
prime = (!!) primes
fibo = 1:2:zipWith (+) fibo (tail fibo)
sol2 lim = sum $ filter even $ takeWhile (lim > ) fibo
oddPrime :: Int -> Bool
oddPrime n = iter 3
where iter i
| i*i > n = True
| 0 == rem n i = False
| otherwise = iter (i+2)
sol3 :: Int -> Int
sol3 tar = iter 3 tar
where iter i res
| res == i = i
| oddPrime i = if 0 == rem res i
then iter i (div res i)
else iter (i+2) res
| otherwise = iter (i+2) res
numcol :: Int -> [Int]
numcol n
| n < 10 = [n]
| otherwise = numcol (div n 10) ++ [rem n 10]
isPalin :: Int -> Bool
isPalin n = xs == reverse xs
where xs = numcol n
sol4 :: Int -> Int
sol4 lim = maximum [ x | i <- [lim..999], j <- [lim..999], let x = i*j, isPalin x]
sol5 :: [Int] -> Int
sol5 xs = iter [] xs
where iter [] (l : ls) = iter [l] ls
iter l1 [] = product l1
iter l1 (l:ls) = iter (l1 ++ [loko]) ls
where loko = iterone l1 l
where iterone [] l = l
iterone (lx:lxs) i
| 0 == rem i lx = iterone lxs (div i lx)
| otherwise = iterone lxs i
sol25 :: Integral a => Int
sol25 lim = iter 1 0 0
where
iter a b i
| a > lim = i
| otherwise = iter (a+b) a (succ i)
time f x = do
start <- getCurrentTime
print $ f x
stop <- getCurrentTime
print $ diffUTCTime stop start
|
zeniuseducation/poly-euler
|
Alfa/haskell/tutorial/two.hs
|
epl-1.0
| 1,976
| 0
| 15
| 757
| 1,069
| 522
| 547
| 70
| 4
|
-- | Combinations.
-- This module is depracated; it is equivalent to the module "Compositions",
-- but it turns out that \"compositions\" is the accepted name. I will
-- remove this module in the future.
module Math.Combinat.Combinations where
import Math.Combinat.Numbers (factorial,binomial)
-------------------------------------------------------
-- | Combinations fitting into a given shape and having a given degree.
-- The order is lexicographic, that is,
--
-- > sort cs == cs where cs = combinations' shape k
--
combinations'
:: [Int] -- ^ shape
-> Int -- ^ sum
-> [[Int]]
combinations' [] 0 = [[]]
combinations' [] _ = []
combinations' shape@(s:ss) n =
[ x:xs | x <- [0..min s n] , xs <- combinations' ss (n-x) ]
countCombinations' :: [Int] -> Int -> Integer
countCombinations' [] 0 = 1
countCombinations' [] _ = 0
countCombinations' shape@(s:ss) n = sum
[ countCombinations' ss (n-x) | x <- [0..min s n] ]
-- | All combinations fitting into a given shape.
allCombinations' :: [Int] -> [[[Int]]]
allCombinations' shape = map (combinations' shape) [0..d] where d = sum shape
-- | Combinations of a given length.
combinations
:: Int -- ^ length
-> Int -- ^ sum
-> [[Int]]
combinations len d = combinations' (replicate len d) d
-- | # = \\binom { len+d-1 } { len-1 }
countCombinations :: Int -> Int -> Integer
countCombinations len d = binomial (len+d-1) (len-1)
-- | Positive combinations of a given length.
combinations1
:: Int -- ^ length
-> Int -- ^ sum
-> [[Int]]
combinations1 len d
| len > d = []
| otherwise = map plus1 $ combinations len (d-len)
where
plus1 = map (+1)
countCombinations1 :: Int -> Int -> Integer
countCombinations1 len d = countCombinations len (d-len)
-------------------------------------------------------
|
garykrige/project_euler
|
Haskell/Math/Combinat/Combinations.hs
|
gpl-2.0
| 1,841
| 0
| 10
| 389
| 517
| 284
| 233
| 34
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
module Ast where
import Data.Typeable
import Data.Data
import Data.Generics.Uniplate.Direct
data TranslUnit =
TranslUnit [ExtDecl]
deriving (Data, Typeable)
data ExtDecl =
DeclExt CDecl
| FDefExt FunDef
deriving (Data, Typeable)
data CDecl = CDecl Int
deriving (Data, Typeable)
data FunDef = FunDef Int
deriving (Data, Typeable)
{-!
deriving instance UniplateDirect TranslUnit
deriving instance UniplateDirect ExtDecl
deriving instance UniplateDirect CDecl
deriving instance UniplateDirect FunDef
deriving instance UniplateDirect TranslUnit FunDef
deriving instance UniplateDirect ExtDecl FunDef
!-}
-- GENERATED START
instance Uniplate TranslUnit where
{-# INLINE uniplate #-}
uniplate x = plate x
instance Uniplate ExtDecl where
{-# INLINE uniplate #-}
uniplate x = plate x
instance Uniplate CDecl where
{-# INLINE uniplate #-}
uniplate x = plate x
instance Uniplate FunDef where
{-# INLINE uniplate #-}
uniplate x = plate x
instance Biplate TranslUnit FunDef where
{-# INLINE biplate #-}
biplate (TranslUnit x1) = plate TranslUnit ||+ x1
instance Biplate ExtDecl FunDef where
{-# INLINE biplate #-}
biplate (FDefExt x1) = plate FDefExt |* x1
biplate x = plate x
-- GENERATED STOP
|
copton/ocram
|
try/uniplate/src/Ast.hs
|
gpl-2.0
| 1,427
| 0
| 8
| 381
| 268
| 144
| 124
| -1
| -1
|
{-# LANGUAGE NoImplicitPrelude #-}
module Main where
import Control.Concurrent (threadDelay)
import Control.Monad ((>>=))
import Data.Function (($), (.))
import Data.Monoid ((<>))
import Foreign.Marshal.Alloc (alloca)
import Foreign.Ptr (nullPtr)
import Foreign.Storable (peek)
import System.IO
(BufferMode(NoBuffering), IO, hSetBuffering, print, putStrLn, stdout)
import Text.Show (show)
import Control.Monad.IO.Class (liftIO)
import Phone.Internal.FFI
( createPjSua
, codecSetPriority
, destroyPjSua
, pjsuaStart
, printDevices
, setNullSndDev
)
import Phone.Internal.FFI.Account
( credDataPlainPasswd
, isAccountRegistered
, setAccount
, setAccountCredCount
, setAccountData
, setAccountDataType
, setAccountRealm
, setAccountRegUri
, setAccountScheme
, setAccountUsername
, setAccountId
, withAccountConfig
)
import Phone.Internal.FFI.CallManipulation (answerCall, hangupAll, makeCall)
import Phone.Internal.FFI.Common (pjSuccess, pjTrue, pjFalse, runPjIO, liftAlloc)
import Phone.Internal.FFI.Configuration
( OnIncomingCallHandler
, OnMediaStateHandler
, OnRegistrationStateHandler
, initializePjSua
, setOnIncomingCallCallback
, setOnMediaStateCallback
, setOnRegistrationStateCallback
, toOnIncomingCall
, toOnMediaState
, toOnRegistrationState
, withPjConfig
)
import Phone.Internal.FFI.Logging
( withLoggingConfig
, setLogFilename
, setMsgLogging
-- , setConsoleLevel
)
import Phone.Internal.FFI.PjString
( withPjString
, withPjStringPtr
)
import Phone.Internal.FFI.Media (withMediaConfig, setMediaConfigClockRate)
import Phone.Internal.FFI.Transport
( createTransport
, udpTransport
, withTransportConfig
)
incomingCallHandler :: OnIncomingCallHandler
incomingCallHandler _ callId _ = do
res <- answerCall callId 200 nullPtr nullPtr
liftIO $ putStrLn $ "call accept result: " <> show res
onRegistrationHandler :: OnRegistrationStateHandler
onRegistrationHandler id = do
liftIO $ putStrLn "#####################################################"
r <- isAccountRegistered id
liftIO $ putStrLn $ "is account registred: " <> show r
liftIO $ putStrLn "#####################################################"
onMediaState :: OnMediaStateHandler
onMediaState _ =
liftIO $ putStrLn "Media state handler!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
main :: IO ()
main = runPjIO $ do
liftIO $ hSetBuffering stdout NoBuffering
createPjSua >>= liftIO . print
liftIO . putStrLn $ "pjTrue: " <> show pjTrue
liftIO . putStrLn $ "pjSuccess: " <> show pjSuccess
let withLog f =
withPjString "pjsua_log.txt" $ \logFile ->
withLoggingConfig $ \logCfg -> do
setMsgLogging logCfg pjFalse
setLogFilename logCfg logFile
f logCfg
let withMedia f =
withMediaConfig $ \mediaCfg -> do
setMediaConfigClockRate mediaCfg 8000
f mediaCfg
-- Initialize pjsua lib.
_ <- withPjConfig $ \pjCfg -> do
liftIO (toOnIncomingCall incomingCallHandler)
>>= setOnIncomingCallCallback pjCfg
liftIO (toOnMediaState onMediaState)
>>= setOnMediaStateCallback pjCfg
liftIO (toOnRegistrationState onRegistrationHandler)
>>= setOnRegistrationStateCallback pjCfg
withLog $ \logCfg ->
withMedia $ \mediaCfg ->
initializePjSua pjCfg logCfg mediaCfg
withPjStringPtr "PCMU" $ \codecStr -> codecSetPriority codecStr 255
withPjStringPtr "PCMA" $ \codecStr -> codecSetPriority codecStr 255
-- Initialize transport
withTransportConfig $ \transportCfg -> do
-- setPort transportCfg 5060
createTransport udpTransport transportCfg nullPtr >>= liftIO . print
_ <- pjsuaStart
liftIO $ putStrLn "****************************************"
printDevices
liftIO $ putStrLn "****************************************"
-- Create account
accountId <-
withPjString "sip:420242492304@10.120.51.51" $ \accountIdPjStr ->
withPjString "sip:10.120.51.51" $ \registrationUriPjStr ->
withPjString "*" $ \realmPjStr ->
withPjString "digest" $ \schemePjStr ->
withPjString "420242492304" $ \userNamePjStr ->
withPjString "420242492304" $ \passwordPjStr ->
withAccountConfig $ \accCfg -> do
setAccountId accCfg accountIdPjStr
setAccountRegUri accCfg registrationUriPjStr
setAccountCredCount accCfg 1
setAccountRealm accCfg 0 realmPjStr
setAccountScheme accCfg 0 schemePjStr
setAccountUsername accCfg 0 userNamePjStr
setAccountDataType accCfg 0 credDataPlainPasswd
setAccountData accCfg 0 passwordPjStr
liftAlloc alloca $ \accountId -> do
_ <- setAccount accCfg pjTrue accountId
liftIO $ peek accountId
setNullSndDev
liftIO $ threadDelay 1000000
withPjStringPtr "sip:420242492306@10.120.51.51" $ \dstPjStr ->
makeCall accountId dstPjStr nullPtr nullPtr nullPtr nullPtr >>= liftIO . print
liftIO $ threadDelay 10000000
hangupAll
destroyPjSua >>= liftIO . print
|
IxpertaSolutions/hsua
|
test/test.hs
|
gpl-2.0
| 5,319
| 0
| 30
| 1,252
| 1,139
| 590
| 549
| 132
| 1
|
import System.Environment (getArgs)
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as B
import Control.Parallel.Strategies
--encrypt :: Integer -> Integer -> ByteString -> ByteString
-- encrypt :: Integer -> Integer -> Integer -> Integer
-- encrypt n e x = x ^ e `mod` n
p, q, e, n, d, l :: Integer
p = 17
q = 11
e = l - 1
n = p * q
d = let (d',_) = gcdEx e l in if d' < 0 then d' + l else d'
l = (p-1) * (q-1) `div` gcd (p-1) (q-1)
gcdEx :: Integer -> Integer -> (Integer,Integer)
gcdEx a b
| b == 0 = (1,0)
| otherwise =
let q = a `div` b
r = a `mod` b
(x,y) = gcdEx b r
in (y, x - q * y)
byChar :: (Integer -> Integer) -> ByteString -> ByteString
byChar f = B.pack
. map (toEnum . fromIntegral . f . fromIntegral . fromEnum)
. B.unpack
byLine :: ([ByteString] -> [ByteString]) -> ByteString -> ByteString
byLine f = B.unlines . f . B.lines
encrypt_seq :: ByteString -> ByteString
encrypt_seq = B.unlines
. map (byChar encode)
. chunk 1000
decrypt_seq :: ByteString -> ByteString
decrypt_seq = byLine $ map (byChar decode)
encrypt :: ByteString -> ByteString
encrypt = B.unlines
. withStrategy (parList rdeepseq)
. map (byChar encode)
. chunk 1000
encode :: Integer -> Integer
encode x = x ^ e `mod` n
decode :: Integer -> Integer
decode x = x ^ d `mod` n
chunk :: Int -> ByteString -> [ByteString]
chunk n bs
| B.length bs < n = [bs]
| otherwise = h : chunk n t
where
(h, t) = B.splitAt n bs
main :: IO ()
main = do
[command, input] <- getArgs
content <- case input of
"-" -> B.getContents
x -> B.readFile input
case command of
"enc_seq" -> B.putStr . encrypt_seq $ content
"dec_seq" -> B.putStr . decrypt_seq $ content
"enc" -> B.putStr . encrypt $ content
_ -> error "pass `enc` ot `dec` as first argument"
|
y-kamiya/parallel-concurrent-haskell
|
src/Rsa/Main.hs
|
gpl-2.0
| 1,910
| 0
| 12
| 505
| 767
| 411
| 356
| 56
| 5
|
{- |
Module : $Header$
Description : Parser for symbols in translations and reductions
Copyright : (c) Christian Maeder, Uni Bremen 2002-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : portable
Parsing symbols for translations and reductions
-}
module CASL.SymbolParser
( symbItems
, symbMapItems
, opOrPredType
, symbKind
) where
import CASL.AS_Basic_CASL
import CASL.Formula
import CASL.ToDoc ()
import Common.AnnoState
import Common.DocUtils
import Common.Id
import Common.Keywords
import Common.Lexer
import Common.Token
import Text.ParserCombinators.Parsec
-- | parsing a possibly qualified identifier
symb :: [String] -> SYMB_KIND -> AParser st SYMB
symb ks k = parseId ks >>= \ i -> case k of
Sorts_kind -> return $ Symb_id i
_ -> case k of
Ops_kind -> do
c <- colonST
o <- opType ks
return $ Qual_id i (O_type o) $ tokPos c
Preds_kind -> do
c <- colonT
p <- predType ks
return $ Qual_id i (P_type p) $ tokPos c
_ -> do
c <- colonST
t <- opOrPredType ks
return (Qual_id i t $ tokPos c)
<|> return (Symb_id i)
-- | parsing a type for an operation or a predicate
opOrPredType :: [String] -> AParser st TYPE
opOrPredType ks =
do (b, s, p) <- opSort ks
if b then return (O_type (Op_type Partial [] s p))
else do
c <- crossT
(ts, ps) <- sortId ks `separatedBy` crossT
fmap O_type (opFunSort ks (s : ts) (c : ps))
<|> return (P_type $ Pred_type (s : ts) $ catRange $ c : ps)
<|> fmap O_type (opFunSort ks [s] [])
<|> return (A_type s)
<|> fmap P_type predUnitType
-- | parsing one symbol or a mapping of one to second symbol
symbMap :: [String] -> SYMB_KIND -> AParser st SYMB_OR_MAP
symbMap ks k =
do s <- symb ks k
do
f <- asKey mapsTo
k2 <- option Implicit $ fmap fst symbKind
case joinSymbKinds k k2 of
Nothing -> fail $ "contradicting symbol kinds '"
++ showDoc k "' and '" ++ showDoc k2 "'"
Just k3 -> do
t <- symb ks k3
return (Symb_map s t $ tokPos f)
<|> return (Symb s)
joinSymbKinds :: SYMB_KIND -> SYMB_KIND -> Maybe SYMB_KIND
joinSymbKinds k1 k2 = case k1 of
Implicit -> Just k2
_ -> if k2 == Implicit || k1 == k2 then Just k1 else Nothing
-- | parse a kind keyword
symbKind :: AParser st (SYMB_KIND, Token)
symbKind =
choice (map (\ (v, s) -> do
q <- pluralKeyword s
return (v, q))
[(Sorts_kind, sortS), (Ops_kind, opS), (Preds_kind, predS)])
<?> "kind"
{- | Parse a possible kinded list of comma separated CASL symbols.
The argument is a list of keywords to avoid as identifiers. -}
symbItems :: [String] -> AParser st SYMB_ITEMS
symbItems ks =
do (is, ps) <- symbs ks Implicit
return (Symb_items Implicit is $ catRange ps)
<|>
do (k, p) <- symbKind
(is, ps) <- symbs ks k
return (Symb_items k is $ catRange $ p : ps)
-- | parse a comma separated list of symbols
symbs :: [String] -> SYMB_KIND -> AParser st ([SYMB], [Token])
symbs ks k =
do s <- symb ks k
do
c <- commaT `followedWith` parseId ks
(is, ps) <- symbs ks k
return (s : is, c : ps)
<|> return ([s], [])
-- | parse a possible kinded list of CASL symbol mappings
symbMapItems :: [String] -> AParser st SYMB_MAP_ITEMS
symbMapItems ks =
do (is, ps) <- symbMaps ks Implicit
return (Symb_map_items Implicit is $ catRange ps)
<|>
do (k, p) <- symbKind
(is, ps) <- symbMaps ks k
return (Symb_map_items k is $ catRange $ p : ps)
-- | parse a comma separated list of symbol mappings
symbMaps :: [String] -> SYMB_KIND -> AParser st ([SYMB_OR_MAP], [Token])
symbMaps ks k =
do s <- symbMap ks k
do
c <- commaT `followedWith` parseId ks
(is, ps) <- symbMaps ks k
return (s : is, c : ps)
<|> return ([s], [])
|
nevrenato/Hets_Fork
|
CASL/SymbolParser.hs
|
gpl-2.0
| 4,127
| 0
| 22
| 1,235
| 1,374
| 690
| 684
| 100
| 4
|
{-|
Module: Network.Ricochet.Protocol.Protobuf.ContactRequest
Description: Lenses for ContactRequestChannel.proto messages
These types and lenses are useful to deal with protobuf messages sent in
relation with @im.ricochet.contact.request@ channels. They are used te
introduce a new client and ask for user approval to send messages.
@im.ricochet.auth.hidden-service@ authentication should be established
beforehand.
-}
module Network.Ricochet.Protocol.Protobuf.ContactRequest
( contact_request
, response
, CR.ContactRequest (CR.ContactRequest)
, nickname
, message_text
, R.Response (R.Response)
, R.status
, RS.Status (..)
, nicknameMaxCharacters
, messageMaxCharacters
) where
import qualified Network.Ricochet.Protocol.Data.ContactRequest as CRE
import qualified Network.Ricochet.Protocol.Data.ContactRequest.ContactRequest as CR
import Network.Ricochet.Protocol.Data.ContactRequest.Limits
import qualified Network.Ricochet.Protocol.Data.ContactRequest.Response as R
import qualified Network.Ricochet.Protocol.Data.ContactRequest.Response.Status as RS
import Network.Ricochet.Protocol.Data.Control.ChannelResult (ChannelResult)
import Network.Ricochet.Protocol.Data.Control.OpenChannel (OpenChannel)
import Network.Ricochet.Protocol.Protobuf (ext, utf8')
import Control.Lens (Lens', Traversal', _Just)
import Data.Text (Text)
-- | Request a hidden service @onion@ domain to be added to the recipient’s
-- contact list. This will usually prompt the recipient user.
contact_request :: Traversal' OpenChannel CR.ContactRequest
contact_request = ext CRE._contact_request . _Just
-- | Respond to a contact request, informing the recipient in what status the
-- request is.
response :: Traversal' ChannelResult R.Response
response = ext CRE._response . _Just
-- | An optional nickname included in the contact request, that will be shown to
-- the recipient user. It is limited to 'nicknameMaxCharacters' characters.
nickname :: Traversal' CR.ContactRequest Text
nickname = CR.nickname . _Just . utf8'
-- | An optional message text included in the contact request, that will be
-- shown to the recipient user. It is limited to 'messageMaxCharacters'
-- characters.
message_text :: Traversal' CR.ContactRequest Text
message_text = CR.message_text . _Just . utf8'
-- | The maximum amount of characters that is allowed in a nickname. This value
-- is specified in the protocol buffer specification files.
nicknameMaxCharacters :: Int
nicknameMaxCharacters = fromEnum NicknameMaxCharacters
-- | The maximum amount of characters that is allowed in a message. This value
-- is specified in the protocol buffer specification files.
messageMaxCharacters :: Int
messageMaxCharacters = fromEnum MessageMaxCharacters
|
Jugendhackt/haskell-ricochet
|
src/Network/Ricochet/Protocol/Protobuf/ContactRequest.hs
|
gpl-3.0
| 2,895
| 2
| 7
| 505
| 335
| 218
| 117
| -1
| -1
|
{-# LANGUAGE LambdaCase,TupleSections #-}
module Main where
import System.Directory
import System.Posix.Files
import System.FilePath (combine,(</>))
import System.Environment ( getArgs )
import System.Posix.Process ( getProcessID)
import Control.Applicative
import Control.Monad
import Control.Concurrent
import Control.Exception
import System.Console.ANSI
import Data.Maybe
import Data.String
import Data.List (sort)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BSC8
import System.IO
import Data.Word
isNoSpecialDir "." = False
isNoSpecialDir ".." = False
isNoSpecialDir _ = True
catchIO :: IO a -> (IOException -> IO a) -> IO a
catchIO = catch
handleIO = flip catchIO
filterJust = map fromJust . filter isJust
align :: Int -> String -> String
align m n = replicate (min m 2 - length n) ' ' ++ n
watch :: Int -> Int -> IO ()
watch interval pid =
let fds = "/proc" </> show pid </> "fd"
names = filter isNoSpecialDir <$> getDirectoryContents fds
getlink name = handleIO (const $ return Nothing) $
let path = fds </> name in do
islink <- isSymbolicLink <$> getSymbolicLinkStatus path
target <- if islink
then readSymbolicLink path
else return "no symlink?!"
return $ Just (name,target)
in do
names' <- fmap filterJust $ mapM getlink =<< names
let m = maximum $ map (length . fst) names'
output (l,t) = putStrLn $ align m l ++ " -> " ++ t
clearScreen
mapM output names'
threadDelay interval
watch interval pid
numbersOnly :: [String] -> [Int]
numbersOnly = sort . foldl addIfNumber []
where addIfNumber xs x = case reads x of
[(n,"")] -> n:xs
_ -> xs
getCmdLine :: Int -> IO BS.ByteString
getCmdLine pid = bracket (openBinaryFile ("/proc" </> show pid </> "cmdline") ReadMode) hClose
(BS.hGetLine >=> return . BS.map mapNull . BS.init)
where mapNull :: Word8 -> Word8
mapNull 0 = fromIntegral $ fromEnum ' '
mapNull x = x
mapMNonFailing :: (a -> IO b) -> [a] -> IO [b]
mapMNonFailing f = foldr accum (return []) . map f
where accum a b = catchIO ( (:) <$> a <*> b ) $ const b
main = getArgs >>= \args ->
let (interval,arg) = if (head args) == "-i"
then (read (args !! 1), args !! 2)
else (1 , head args)
watch' = watch $ interval * 10^6
selectProcess myPid =
let
allPids = numbersOnly . filter isNoSpecialDir <$> getDirectoryContents "/proc"
addCmdLine pid = getCmdLine pid >>= return . (pid,)
pidsWithCmdlines = allPids >>= mapMNonFailing addCmdLine
myFilter (pid,cmdline) = (fromString arg `BS.isInfixOf` cmdline) && myPid /= pid
in filter myFilter <$> pidsWithCmdlines >>= \case
[] -> putStrLn $ "No process matching \""++arg++"\" found."
x:[] -> watch' $ fst $ x
filtered -> let numbers = take (length filtered) [1..]
alignnumbers = maximum $ map (length.show) numbers
alignpids = maximum $ map (length.show.fst) filtered
output (n,(pid,cmdline)) = putStr (align alignnumbers (show n) ++ ") "
++ align alignpids (show pid)
++ ": ") >> BSC8.putStrLn cmdline
in do mapM output $ zip numbers filtered
reads <$> getLine >>= \case
[(n,"")] -> watch' $ fst $ filtered !! (n - 1)
_ -> selectProcess myPid
in case reads $ arg of
[(pid,"")] -> watch' pid
_ -> fromIntegral <$> getProcessID >>= selectProcess
|
cetu86/watchopenfd
|
Main.hs
|
gpl-3.0
| 4,169
| 0
| 29
| 1,551
| 1,294
| 664
| 630
| -1
| -1
|
{-# LANGUAGE TypeSynonymInstances, OverlappingInstances #-}
module Rebass.Path where
import Data.List
import Rebass.ListUtil
type Path = String
class Pathy a where
pathOf :: a -> Path
instance Pathy Path where
pathOf path = path
samePath :: Pathy a => Pathy b => a -> b -> Bool
samePath a b = pathOf a == pathOf b
parentOf :: Pathy a => Pathy b => a -> b -> Bool
parentOf parent child = (pathOf parent) ++ "/" `elem` inits (pathOf child)
parent :: Pathy a => a -> Path
parent path = reverse $ (drop n) $ reverse $ pathOf path
where n = 1 + length (lastPathElement path)
subPath :: Pathy a => Pathy b => a -> b -> Path
subPath root sub = (pathOf root) ++ "/" ++ (pathOf sub)
lastPathElement :: Pathy a => a -> Path
lastPathElement path = last $ split '/' (pathOf path)
isAbsolutePath :: Pathy a => a -> Bool
isAbsolutePath path = (pathOf path) `startsWith` "/"
|
codeflows/rebass
|
src/Rebass/Path.hs
|
gpl-3.0
| 896
| 0
| 10
| 200
| 363
| 183
| 180
| -1
| -1
|
module Engine.Graphics.Render.ShadowVolume(renderShadowVolumeToStencil) where
import Control.Monad
import Engine.Graphics.Common
import Foreign.Ptr (nullPtr)
import qualified Graphics.GLUtil as GLUtil
import Graphics.Rendering.OpenGL
import qualified Graphics.Rendering.OpenGL.Raw.ARB.GeometryShader4 as GLRaw
import qualified Graphics.Rendering.OpenGL.Raw.Core31 as GLRaw
import qualified Linear as L
import Model.Classes
import Model.Entity
import Model.Geometry
import Model.Light
import Model.Object
import Model.Types
renderShadowVolumeToStencil :: TransformationMatrix ->
PointLight ->
GLUtil.ShaderProgram -> [Object] ->
IO ()
renderShadowVolumeToStencil viewProjMat pl prog os =
do currentProgram $= (Just $ GLUtil.program prog)
mapM_ (renderObjectToStencil viewProjMat pl prog) os
checkError "renderShadowVolumeToStencil"
renderObjectToStencil :: TransformationMatrix -> PointLight -> GLUtil.ShaderProgram -> Object -> IO ()
renderObjectToStencil viewProjMat pl prog o =
mapM_ (renderEntityToStencil viewProjMat objMat pl prog) $ oEntities o
where objMat = mkTransMat o
renderEntityToStencil :: TransformationMatrix -> TransformationMatrix -> PointLight -> GLUtil.ShaderProgram -> Entity -> IO ()
renderEntityToStencil viewProjMat objMat pl prog e =
unless (eAmbOverride e == Just (1.0))
$ do bindVertexArrayObject $= Just vao
GLUtil.asUniform mvp $ GLUtil.getUniform prog "MVP"
GLUtil.asUniform modelMat $ GLUtil.getUniform prog "M"
GLUtil.asUniform viewProjMat $ GLUtil.getUniform prog "VP"
GLUtil.asUniform lightPosition $ GLUtil.getUniform prog "lightPosition"
vertexAttribArray vPosition $= Enabled
bindBuffer ArrayBuffer $= Just verts
vertexAttribPointer vPosition $= (ToFloat, VertexArrayDescriptor 3 Float 0 GLUtil.offset0)
bindBuffer ElementArrayBuffer $= Just elems
GLRaw.glDrawElements
GLRaw.gl_TRIANGLES_ADJACENCY
nofTris
GLRaw.gl_UNSIGNED_INT nullPtr
vertexAttribArray vPosition $= Disabled
bindBuffer ElementArrayBuffer $= Nothing
bindVertexArrayObject $= Nothing
where entMat = mkTransMat e
modelMat = objMat L.!*! entMat
mvp = viewProjMat L.!*! modelMat
lightPosition = plPosition pl
geometry = eGeometry e
verts = gVertices geometry
elems = gTriAdjElems geometry -- important
nofTris = gNOFAdjs geometry
vao = gVAO geometry
vPosition = GLUtil.getAttrib prog "v_position"
|
halvorgb/AO2D
|
src/Engine/Graphics/Render/ShadowVolume.hs
|
gpl-3.0
| 2,879
| 0
| 11
| 833
| 627
| 317
| 310
| 56
| 1
|
{-# LANGUAGE FlexibleContexts #-}
-- -----------------------------------------------------------------------------
module Main(main) where
-- -----------------------------------------------------------------------------
import System.Environment( getArgs, getProgName )
import qualified Data.Aeson as A
import qualified Text.Parsec as P
import qualified Data.ByteString.Lazy.Char8 as BS
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
import qualified Data.Vector as V
import Control.Exception( IOException, catch )
import Control.Monad( liftM )
import System.IO( hPutStrLn, stderr )
-- -----------------------------------------------------------------------------
identifier :: P.Stream s m Char => P.ParsecT s u m String
identifier = do
c <- P.letter
cs <- P.many (P.alphaNum P.<|> P.char '_' )
return (c:cs)
arraySubs :: P.Stream s m Char => P.ParsecT s u m Int
arraySubs = do
_ <- P.char '['
_ <- P.spaces
n <- P.many1 P.digit
_ <- P.spaces
_ <- P.char ']'
return . read $ n
expresionVal :: P.Stream s m Char => A.Value -> P.ParsecT s u m A.Value
expresionVal json = do
name <- identifier
val <- case json of
A.Object obj -> return $ HM.lookup (T.pack name) obj
_ -> return Nothing
case val of
Just v -> return v
_ -> P.parserFail "value not found"
arrayVal :: P.Stream s m Char => A.Value -> P.ParsecT s u m A.Value
arrayVal json = do
idx <- arraySubs
val <- case json of
A.Array arr -> return $ arr V.!? idx
_ -> return Nothing
case val of
Just v -> return v
_ -> P.parserFail "value not found"
getValueParser :: P.Stream s m Char => A.Value -> P.ParsecT s u m A.Value
getValueParser json = do
_ <- P.optional (P.char '.')
val1 <- P.choice [P.try (expresionVal json), P.try (arrayVal json)]
P.try (getValueParser val1) P.<|> return val1
-- -----------------------------------------------------------------------------
getValue :: A.Value -> String -> Maybe A.Value
getValue val path = case P.parse (getValueParser val) "" path of
Right a -> Just a
Left _ -> Nothing
-- -----------------------------------------------------------------------------
printUsageInfo :: IO ()
printUsageInfo = do
prog <- getProgName
putStrLn $ "Usage: " ++ prog ++ " name file"
putStrLn ""
putStrLn " name"
putStrLn " Path to a json element. E.g: values[0].name"
putStrLn " file"
putStrLn " json file"
-- -----------------------------------------------------------------------------
filedata :: String -> IO (Maybe BS.ByteString)
filedata f = catch
(liftM Just (BS.readFile f))
(\e -> do
hPutStrLn stderr ("Error: " ++ show (e :: IOException))
return Nothing)
-- -----------------------------------------------------------------------------
jsonFromFile :: String -> IO (Maybe A.Value)
jsonFromFile f = do
d <- filedata f
return $ maybe Nothing A.decode d
-- -----------------------------------------------------------------------------
cleanUpValue :: Maybe A.Value -> String
cleanUpValue (Just v) = case v of
A.String s -> T.unpack s
A.Number n -> show n
A.Bool b -> show b
A.Null -> "null"
_ -> ""
cleanUpValue _ = ""
-- -----------------------------------------------------------------------------
runGetJson :: String -> String -> IO ()
runGetJson path filename = do
json <- jsonFromFile filename
putStrLn . cleanUpValue . maybe Nothing (`getValue` path) $ json
-- -----------------------------------------------------------------------------
main :: IO ()
main = do
args <- getArgs
if length args /= 2
then printUsageInfo
else runGetJson (head args) (args!!1)
-- -----------------------------------------------------------------------------
|
zhensydow/zhtoolkit
|
src/GetJson.hs
|
gpl-3.0
| 3,933
| 0
| 15
| 868
| 1,187
| 587
| 600
| 89
| 5
|
module Language.Mulang.Inspector.Literal (
isAnything,
isBool,
isChar,
isNil,
isNumber,
isSelf,
isString,
isSymbol,
isLogic,
isMath,
isLiteral,
isSimple,
isCompound,
isNonliteral,
isOther) where
import Data.Function.Extra (orElse)
import Language.Mulang.Ast
import Language.Mulang.Ast.Operator (Operator (..))
import Language.Mulang.Inspector.Primitive (Inspection)
isAnything :: Inspection
isAnything = const True
isNil :: Inspection
isNil = (==) MuNil
isNumber :: Double -> Inspection
isNumber = (==) . MuNumber
isBool :: Bool -> Inspection
isBool = (==) . MuBool
isString :: String -> Inspection
isString = (==) . MuString
isChar :: Char -> Inspection
isChar = (==) . MuChar
isSymbol :: String -> Inspection
isSymbol = (==) . MuSymbol
isSelf :: Inspection
isSelf = (==) Self
isMath :: Inspection
isMath (Primitive Plus) = True
isMath (Primitive Minus) = True
isMath (Primitive Multiply) = True
isMath (Primitive Divide) = True
isMath _ = False
isLogic :: Inspection
isLogic (Primitive Negation) = True
isLogic (Primitive And) = True
isLogic (Primitive Or) = True
isLogic _ = False
isLiteral :: Inspection
isLiteral = isSimple `orElse` isCompound
isSimple :: Inspection
isSimple (MuBool _) = True
isSimple (MuChar _) = True
isSimple (MuNumber _) = True
isSimple (MuString _) = True
isSimple (MuSymbol _) = True
isSimple MuNil = True
isSimple Self = True
isSimple _ = False
isCompound :: Inspection
isCompound (MuDict _) = True
isCompound (MuList _) = True
isCompound (MuObject _) = True
isCompound (MuTuple _) = True
isCompound _ = False
isNonliteral :: Inspection
isNonliteral = not . isLiteral
isOther :: Inspection
isOther (Other _ _) = True
isOther _ = False
|
mumuki/mulang
|
src/Language/Mulang/Inspector/Literal.hs
|
gpl-3.0
| 1,826
| 0
| 7
| 408
| 594
| 333
| 261
| 69
| 1
|
module Application.Game.Engine where
import Application.Game.Logic (eventHandler)
import Middleware.Gloss.Environment (runEnvironment)
import View.State (runGameStep)
import View.View (drawState)
ticksPerSecond = 10
runEngine state = runEnvironment ticksPerSecond state drawState eventHandler runGameStep
|
EPashkin/gamenumber-gloss
|
src/Application/Game/Engine.hs
|
gpl-3.0
| 308
| 0
| 5
| 30
| 73
| 43
| 30
| 7
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.OpsWorks.DeleteApp
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Deletes a specified app.
--
-- Required Permissions: To use this action, an IAM user must have a Manage
-- permissions level for the stack, or an attached policy that explicitly grants
-- permissions. For more information on user permissions, see <http://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html Managing UserPermissions>.
--
-- <http://docs.aws.amazon.com/opsworks/latest/APIReference/API_DeleteApp.html>
module Network.AWS.OpsWorks.DeleteApp
(
-- * Request
DeleteApp
-- ** Request constructor
, deleteApp
-- ** Request lenses
, daAppId
-- * Response
, DeleteAppResponse
-- ** Response constructor
, deleteAppResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.OpsWorks.Types
import qualified GHC.Exts
newtype DeleteApp = DeleteApp
{ _daAppId :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'DeleteApp' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'daAppId' @::@ 'Text'
--
deleteApp :: Text -- ^ 'daAppId'
-> DeleteApp
deleteApp p1 = DeleteApp
{ _daAppId = p1
}
-- | The app ID.
daAppId :: Lens' DeleteApp Text
daAppId = lens _daAppId (\s a -> s { _daAppId = a })
data DeleteAppResponse = DeleteAppResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'DeleteAppResponse' constructor.
deleteAppResponse :: DeleteAppResponse
deleteAppResponse = DeleteAppResponse
instance ToPath DeleteApp where
toPath = const "/"
instance ToQuery DeleteApp where
toQuery = const mempty
instance ToHeaders DeleteApp
instance ToJSON DeleteApp where
toJSON DeleteApp{..} = object
[ "AppId" .= _daAppId
]
instance AWSRequest DeleteApp where
type Sv DeleteApp = OpsWorks
type Rs DeleteApp = DeleteAppResponse
request = post "DeleteApp"
response = nullResponse DeleteAppResponse
|
dysinger/amazonka
|
amazonka-opsworks/gen/Network/AWS/OpsWorks/DeleteApp.hs
|
mpl-2.0
| 2,962
| 0
| 9
| 664
| 349
| 214
| 135
| 47
| 1
|
module QuantLib.Prices
( PriceType (..)
, CallPrice (..)
, IntervalPrice (..)
) where
-- | Price types
data PriceType = Bid | Ask | Last | Close | Mid | MidEq | MidSafe
deriving (Show, Eq)
-- | Call price
data CallPrice = DirtyPrice {
cpPrice :: Double
} | CleanPrice {
cpPrice :: Double
} deriving (Show, Eq, Ord)
-- | Interval price
data IntervalPrice = IntervalPrice {
ipOpen :: Double,
ipHigh :: Double,
ipLow :: Double,
ipClose :: Double
} deriving (Show, Eq)
|
paulrzcz/hquantlib
|
src/QuantLib/Prices.hs
|
lgpl-3.0
| 599
| 0
| 8
| 219
| 153
| 96
| 57
| 17
| 0
|
module Prover.ProverMonad where
|
kik/ToyPr
|
src/Prover/ProverMonad.hs
|
apache-2.0
| 35
| 0
| 3
| 6
| 6
| 4
| 2
| 1
| 0
|
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
module Main where
import Control.Concurrent
import System.Directory
import Control.Monad (msum)
import Control.Monad.Zip
import Data.Time.Clock
import Data.Time.Calendar
import Happstack.Server
import Pages
runCleanup :: IO ()
runCleanup = do
pastePaths <- (map $ (++) "pastes/") <$> listDirectory "pastes"
modificationTimes <- mapM getModificationTime pastePaths
pathTimeList <- return $ zip pastePaths modificationTimes
currTime <- getCurrentTime
mapM_
(\(path, tm) ->
let UTCTime dayCurrent _ = currTime
UTCTime dayPaste _ = tm
pasteDiff = diffDays dayCurrent dayPaste
in if pasteDiff >= 7 -- delete after a week
then removeFile path
else return ())
pathTimeList
threadDelay 3600000000 -- an hour
runCleanup
kopasteApp :: ServerPart Response
kopasteApp =
msum
[ do nullDir
indexPage
, dir "static" $ serveDirectory DisableBrowsing [] "static"
, dir "upload" $ uploadPage
, dir "paste" $ path $ showPastePage
]
main :: IO ()
main = (forkIO runCleanup) >> simpleHTTP nullConf {port = 34522} kopasteApp
|
koto-bank/kopaste
|
src/Main.hs
|
bsd-2-clause
| 1,183
| 0
| 14
| 270
| 324
| 166
| 158
| 37
| 2
|
{-# LANGUAGE TemplateHaskell, OverloadedStrings #-}
module Handler.Feed where
import Lounge
import Storage
import Settings
import Common
import Model.Entry
import Data.Time.Clock.POSIX
import Yesod.Helpers.AtomFeed
getFeedR :: Handler RepAtom
getFeedR = do
entries <- liftIO $ runLSM Settings.connStr $ loadEntriesByDate
renderFeed entries
getTagFeedR :: Tag -> Handler RepAtom
getTagFeedR tag = do
entries <- (liftIO $ runLSM Settings.connStr $ loadEntriesByTag tag)
renderFeed entries
renderFeed :: [(EntryId, Entry)] -> Handler RepAtom
renderFeed entries = do
let entries' = take 5 entries
atomFeed $ AtomFeed
{ atomTitle = title
, atomLinkSelf = FeedR
, atomLinkHome = EntriesR
, atomUpdated = if null entries' then posixSecondsToUTCTime 0 else eTimestamp $ snd $ head entries'
, atomEntries = map go entries'
}
where
go e = AtomFeedEntry
{ atomEntryLink = EntryR $ fst e
, atomEntryUpdated = eTimestamp $ snd e
, atomEntryTitle = eTitle $ snd e
, atomEntryContent = preEscapedString $ eText $ snd $ e
}
|
fortytools/lounge
|
Handler/Feed.hs
|
bsd-2-clause
| 1,053
| 8
| 12
| 197
| 317
| 168
| 149
| 31
| 2
|
-- | Settings are centralized, as much as possible, into this file. This
-- includes database connection settings, static file locations, etc.
-- In addition, you can configure a number of different aspects of Yesod
-- by overriding methods in the Yesod typeclass. That instance is
-- declared in the Foundation.hs file.
module Settings
( widgetFile
, staticRoot
, staticDir
, Extra (..)
, parseExtra
, BitloveEnv (..)
) where
import Prelude
import Text.Shakespeare.Text (st)
import Language.Haskell.TH.Syntax
import Yesod.Default.Config
import qualified Yesod.Default.Util
import Data.Text (Text)
import Data.Yaml
import Control.Applicative
import Settings.Development
import Data.Default
data BitloveEnv = Development
| Production4
| Production6
| Production4SSL
| Production6SSL
deriving (Read, Show, Enum, Bounded)
-- Static setting below. Changing these requires a recompile
-- | The location of static files on your system. This is a file system
-- path. The default value works properly with your scaffolded site.
staticDir :: FilePath
staticDir = "static"
-- | The base URL for your static files. As you can see by the default
-- value, this can simply be "static" appended to your application root.
-- A powerful optimization can be serving static files from a separate
-- domain name. This allows you to use a web server optimized for static
-- files, more easily set expires and cache values, and avoid possibly
-- costly transference of cookies on static files. For more information,
-- please see:
-- http://code.google.com/speed/page-speed/docs/request.html#ServeFromCookielessDomain
--
-- If you change the resource pattern for StaticR in Foundation.hs, you will
-- have to make a corresponding change here.
--
-- To see how this value is used, see urlRenderOverride in Foundation.hs
staticRoot :: AppConfig DefaultEnv x -> Text
staticRoot conf = [st|#{appRoot conf}/static|]
-- The rest of this file contains settings which rarely need changing by a
-- user.
widgetFile :: String -> Q Exp
widgetFile = if development then Yesod.Default.Util.widgetFileReload def
else Yesod.Default.Util.widgetFileNoReload def
data Extra = Extra
{ extraCopyright :: Text
, extraCert :: Maybe String
, extraKey :: Maybe String
} deriving Show
parseExtra :: BitloveEnv -> Object -> Parser Extra
parseExtra _ o = Extra
<$> o .: "copyright"
<*> o .:? "cert"
<*> o .:? "key"
|
jannschu/bitlove-ui
|
Settings.hs
|
bsd-2-clause
| 2,549
| 0
| 10
| 539
| 318
| 197
| 121
| -1
| -1
|
{- |
Use this applicaton to generate the 'Data.Time.Clock.AnnouncedLeapSeconds'
module. Compile and pipe an EOP file from Celestrak through the binary,
e.g.:
curl http://www.celestrak.com/SpaceData/eop19620101.txt | ./MakeLeapSecondTable > Data/Time/Clock/AnnouncedLeapSeconds.hs
-}
import Astro.Celestrak
import Data.List (intercalate)
import Data.Time (Day)
import Data.Time.Format (formatTime)
import System.Locale (defaultTimeLocale)
-- | Converts an 'EOPList' into a minimal list of (day, leapsecond) pairs
-- in reverse chronological order.
eopToLS :: EOPList a -> [(Day, Integer)]
eopToLS = reverse . keepInitial . fmap (fmap deltaAT)
-- | Keeps the first pair with a given snd value while dropping the
-- following pairs with the same snd value.
keepInitial :: Eq a => [(b,a)] -> [(b,a)]
keepInitial (x:xs) = x : keepInitial (dropWhile (sndEq x) xs) where sndEq (_,x) (_,y) = x == y
keepInitial [] = []
{-
The above and this function are candidates for moving to "Astro.Celestrak":
-- | Converts an 'EOPList' to a light weight 'LeapSecondTable' (its internal
-- data is a short list as opposed to a huge array for the 'LeapSecondTable'
-- provided by "Astro.Celestrak".
eopToLST :: EOPList a -> LeapSecondTable
eopToLST eops d = snd $ headDef (undefined,0) $ dropWhile ((>d).fst) $ eopToLS eops
-}
-- | Convert a day/leapsecond pair into a compilable string.
lsToString :: (Day, Integer) -> String
lsToString (d,s) = formatTime defaultTimeLocale fmt d
where fmt = "(fromGregorian %Y %m %d, " ++ show s ++ ")"
-- | Shows a list in compilable format using the passed function to display
-- the elements of the list.
showL :: (a -> String) -> [a] -> String
showL showf xs = intercalate "\n : " (map showf xs) ++ "\n : []"
-- | Compilable leapsecond module.
showModule :: EOPList a -> String
showModule eops = unlines
[ "-- This file was automatically generated."
, ""
, "{- |"
, " Copyright : Copyright (C) 2009-2015 Bjorn Buckwalter"
, " License : BSD3"
, ""
, " Maintainer : bjorn.buckwalter@gmail.com"
, " Stability : stable"
, " Portability: full"
, ""
, "Provides a static 'Data.Time.Clock.TAI.LeapSecondTable' \\\"containing\\\""
, "the leap seconds announced at library release time. This version"
, "will become invalidated when/if the International Earth Rotation"
, "and Reference Systems Service (IERS) announces a new leap second at"
, "<http://hpiers.obspm.fr/eoppc/bul/bulc/bulletinc.dat>."
, "At that time a new version of the library will be released, against"
, "which any code wishing to remain up to date should be recompiled."
, ""
, "This module is intended to provide a quick-and-dirty leap second solution"
, "for one-off analyses concerned only with the past and present (i.e. up"
, "until the next as of yet unannounced leap second), or for applications"
, "which can afford to be recompiled against an updated library as often"
, "as every six months."
, "-}"
, ""
, "module Data.Time.Clock.AnnouncedLeapSeconds (lst) where"
, ""
, "import Data.Maybe (listToMaybe)"
, "import Data.Time (Day, fromGregorian)"
, "import Data.Time.Clock.TAI (LeapSecondTable)"
, ""
, "-- | List of all leap seconds up to 2015-07-01. An"
, "-- estimate of hypothetical leap seconds prior to 1972-01-01 is"
, "-- included. These can be understood as leap seconds that may have"
, "-- been introduced had UTC used the SI second since its inception in 1961."
, "-- One should be extremely careful in using this information as it is"
, "-- generally not appropriate. One specific case where it may be useful"
, "-- is in reducing the error in computed time differences between UTC time"
, "-- stamps in the 1961--1971 range from the order of 10 SI seconds to 1 SI"
, "-- second."
, "pseudoLeapSeconds :: [(Day, Int)]"
, "pseudoLeapSeconds = " ++ showL lsToString ls
, ""
, "-- | List of all official leap seconds from 1972-01-01 to 2015-07-01."
, "leapSeconds :: [(Day, Int)]"
, "leapSeconds = takeWhile (> introduction) pseudoLeapSeconds ++ [introduction]"
, " where"
, " introduction = (fromGregorian 1972 01 01, 10)"
, ""
, "-- | 'Data.Time.Clock.TAI.LeapSecondTable' containing all leap seconds"
, "-- from 1972-01-01 to " ++ (show.fst.head) ls ++ "."
, "lst :: LeapSecondTable"
, "lst d = fmap snd $ listToMaybe $ dropWhile ((>d).fst) leapSeconds"
] where ls = eopToLS eops
main = do
interact (showModule . parseEOPData)
|
bjornbm/leapseconds-announced
|
MakeLeapSecondTable.hs
|
bsd-3-clause
| 4,479
| 0
| 11
| 846
| 553
| 320
| 233
| 73
| 1
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module Mismi.IAM.Core.Data (
IamRole (..)
) where
import P
newtype IamRole =
IamRole {
iamRole :: Text
} deriving (Eq, Show, Ord)
|
ambiata/mismi
|
mismi-iam-core/src/Mismi/IAM/Core/Data.hs
|
bsd-3-clause
| 230
| 0
| 6
| 60
| 50
| 33
| 17
| 9
| 0
|
{-# LANGUAGE OverloadedStrings #-}
module Snap.Snaplet.OAuth.Internal.Utils where
import Control.Applicative
import Data.Aeson
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import Data.Maybe (fromMaybe)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Network.OAuth.OAuth2
import Network.OAuth.OAuth2.HttpClient
import Snap hiding (Response)
import qualified Text.Show.ByteString as TSB
----------------------------------------------------------------------
intToByteString :: Integer -> BS.ByteString
intToByteString = toStrickBS' . TSB.show
sToText :: Show s => s -> T.Text
sToText = T.pack . show
toStrickBS' :: LBS.ByteString -> BS.ByteString
toStrickBS' = BS.concat . LBS.toChunks
sToBS :: String -> BS.ByteString
sToBS = T.encodeUtf8 . T.pack
lbsToText :: LBS.ByteString -> T.Text
lbsToText = T.decodeUtf8 . toStrickBS'
textToBS :: T.Text -> BS.ByteString
textToBS = T.encodeUtf8
decodedParam :: MonadSnap m => BS.ByteString -> m BS.ByteString
decodedParam p = fromMaybe "" <$> getParam p
----------------------------------------------------------------------
apiRequestOAuth :: FromJSON a
=> BS.ByteString -- ^ API URL
-> OAuth2 -- ^ For append access token
-> IO (Maybe a)
apiRequestOAuth uri oa = doJSONGetRequest $ appendAccessToken uri oa
|
HaskellCNOrg/snaplet-oauth
|
src/Snap/Snaplet/OAuth/Internal/Utils.hs
|
bsd-3-clause
| 1,569
| 0
| 10
| 408
| 335
| 193
| 142
| 32
| 1
|
module Test.Util.Util where
sb = " should be "
|
visood/bioalgo
|
test/Test/Util/Util.hs
|
bsd-3-clause
| 48
| 0
| 4
| 10
| 12
| 8
| 4
| 2
| 1
|
{-# LANGUAGE PolyKinds #-}
{-# OPTIONS_HADDOCK hide #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeFamilyDependencies #-}
{-# LANGUAGE TypeOperators #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Generics.Internal.VL.Iso
-- Copyright : (C) 2020 Csongor Kiss
-- License : BSD3
-- Maintainer : Csongor Kiss <kiss.csongor.kiss@gmail.com>
-- Stability : experimental
-- Portability : non-portable
--
-- Internal lens helpers. Only exported for Haddock
--
-----------------------------------------------------------------------------
module Data.Generics.Internal.VL.Iso where
import Data.Coerce (coerce)
import Data.Functor.Identity (Identity(..))
import Data.Profunctor
import GHC.Generics
import Data.Generics.Internal.GenericN (Rec (..), GenericN (..), Param (..))
import qualified Data.Generics.Internal.Profunctor.Iso as P
data Exchange a b s t = Exchange (s -> a) (b -> t)
instance Functor (Exchange a b s) where
fmap f (Exchange p q) = Exchange p (f . q)
{-# INLINE fmap #-}
instance Profunctor (Exchange a b) where
dimap f g (Exchange sa bt) = Exchange (sa . f) (g . bt)
{-# INLINE dimap #-}
lmap f (Exchange sa bt) = Exchange (sa . f) bt
{-# INLINE lmap #-}
rmap f (Exchange sa bt) = Exchange sa (f . bt)
{-# INLINE rmap #-}
type Iso' s a
= forall p f. (Profunctor p, Functor f) => p a (f a) -> p s (f s)
type Iso s t a b
= forall p f. (Profunctor p, Functor f) => p a (f b) -> p s (f t)
fromIso :: Iso s t a b -> Iso b a t s
fromIso l = withIso l $ \ sa bt -> iso bt sa
{-# inline fromIso #-}
iso2isovl :: P.Iso s t a b -> Iso s t a b
iso2isovl _iso = P.withIso _iso $ \ sa bt -> iso sa bt
{-# INLINE iso2isovl #-}
-- | Extract the two functions, one from @s -> a@ and
-- one from @b -> t@ that characterize an 'Iso'.
withIso :: Iso s t a b -> ((s -> a) -> (b -> t) -> r) -> r
withIso ai k = case ai (Exchange id Identity) of
Exchange sa bt -> k sa (coerce bt)
{-# inline withIso #-}
-- | A type and its generic representation are isomorphic
repIso :: (Generic a, Generic b) => Iso a b (Rep a x) (Rep b x)
repIso = iso from to
repIsoN :: (GenericN a, GenericN b) => Iso a b (RepN a x) (RepN b x)
repIsoN = iso fromN toN
paramIso :: Iso (Param n a) (Param n b) a b
paramIso = iso getStarParam StarParam
-- | 'M1' is just a wrapper around `f p`
mIso :: Iso (M1 i c f p) (M1 i c g p) (f p) (g p)
mIso = iso unM1 M1
kIso :: Iso (K1 r a p) (K1 r b p) a b
kIso = iso unK1 K1
recIso :: Iso (Rec r a p) (Rec r b p) a b
recIso = iso (unK1 . unRec) (Rec . K1)
prodIso :: Iso ((a :*: b) x) ((a' :*: b') x) (a x, b x) (a' x, b' x)
prodIso = iso (\(a :*: b) -> (a, b)) (\(a, b) -> (a :*: b))
iso :: (s -> a) -> (b -> t) -> Iso s t a b
iso sa bt = dimap sa (fmap bt)
{-# INLINE iso #-}
|
kcsongor/generic-lens
|
generic-lens/src/Data/Generics/Internal/VL/Iso.hs
|
bsd-3-clause
| 3,000
| 0
| 11
| 695
| 1,113
| 606
| 507
| 58
| 1
|
{-|
Copyright : (c) Dave Laing, 2017
License : BSD3
Maintainer : dave.laing.80@gmail.com
Stability : experimental
Portability : non-portable
-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
module Fragment.Pair.Rules.Type.Infer.Common (
PairInferTypeHelper(..)
, PairInferTypeConstraint
, pairInferTypeInput
) where
import Data.Proxy (Proxy(..))
import GHC.Exts (Constraint)
import Bound (Bound)
import Control.Lens (preview, review)
import Control.Lens.Wrapped (_Wrapped, _Unwrapped)
import Ast.Type
import Ast.Type.Var
import Ast.Error.Common.Type
import Ast.Pattern
import Ast.Term
import Data.Bitransversable
import Data.Functor.Rec
import Rules.Unification
import Fragment.Pair.Ast.Type
import Fragment.Pair.Ast.Error
import Fragment.Pair.Ast.Pattern
import Fragment.Pair.Ast.Term
import Rules.Type.Infer.Common
import Rules.Type.Infer.SyntaxDirected (ITSyntax)
import Control.Monad.Except (MonadError)
import Rules.Type.Infer.Offline (ITOffline)
import Control.Monad.State (MonadState)
import Data.Equivalence.Monad (classDesc)
class MkInferType i => PairInferTypeHelper i where
type PairInferTypeHelperConstraint e w s r (m :: * -> *) (ki :: (* -> *) -> * -> *) (ty :: ((* -> *) -> * -> *) -> (* -> *) -> * -> *) a i :: Constraint
unifyPairRules :: PairInferTypeHelperConstraint e w s r m ki ty a i
=> Proxy (MonadProxy e w s r m)
-> Proxy i
-> [UnificationRule m (TyAst ki ty) (TyAstVar a)]
createPair :: PairInferTypeHelperConstraint e w s r m ki ty a i
=> Proxy (MonadProxy e w s r m)
-> Proxy i
-> Type ki ty a
-> Type ki ty a
-> InferTypeMonad m ki ty a i (Type ki ty a)
expectPair :: PairInferTypeHelperConstraint e w s r m ki ty a i
=> Proxy (MonadProxy e w s r m)
-> Proxy i
-> Type ki ty a
-> InferTypeMonad m ki ty a i (Type ki ty a, Type ki ty a)
instance PairInferTypeHelper ITSyntax where
type PairInferTypeHelperConstraint e w s r m ki ty a ITSyntax =
( AsTyPair ki ty
, MonadError e m
, AsExpectedTyPair e ki ty a
)
unifyPairRules _ _ =
[]
createPair _ _ ty1 ty2 =
return . review _TyPair $ (ty1, ty2)
expectPair _ _ =
expectTyPair
instance PairInferTypeHelper ITOffline where
type PairInferTypeHelperConstraint e w s r m ki ty a ITOffline =
( AsTyPair ki ty
, MonadState s m
, HasTyVarSupply s
, ToTyVar a
, Ord a
, OrdRec ki
, OrdRec (ty ki)
, MonadError e m
, AsUnknownTypeError e
, AsOccursError e (TyAst ki ty) (TyAstVar a)
, AsUnificationMismatch e (TyAst ki ty) (TyAstVar a)
, AsUnificationExpectedEq e (TyAst ki ty) (TyAstVar a)
, Bound ki
, Bound (ty ki)
, Bitransversable ki
, Bitransversable (ty ki)
)
unifyPairRules _ _ =
let
unifyPair unifyMany (UCEq ty1 ty2) = do
let ty1' = review _Wrapped ty1
ty2' = review _Wrapped ty2
(p1a, p1b) <- preview _TyPair ty1'
(p2a, p2b) <- preview _TyPair ty2'
let p1a' = review _Unwrapped p1a
p1b' = review _Unwrapped p1b
p2a' = review _Unwrapped p2a
p2b' = review _Unwrapped p2b
return $ do
c1a <- classDesc p1a'
c1b <- classDesc p1b'
c2a <- classDesc p2a'
c2b <- classDesc p2b'
unifyMany [c1a, c1b] [c2a, c2b]
in
[ UnificationMany unifyPair ]
createPair m i ty1 ty2 = do
tyV <- fmap (review _TyVar) freshTyVar
expectTypeEq m i (review _TyPair (ty1, ty2)) tyV
return tyV
expectPair m i tyP = do
tyP1 <- fmap (review _TyVar) freshTyVar
tyP2 <- fmap (review _TyVar) freshTyVar
expectTypeEq m i tyP (review _TyPair (tyP1, tyP2))
return (tyP1, tyP2)
type PairInferTypeConstraint e w s r m ki ty pt tm a i =
( PairInferConstraint e w s r m ki ty pt tm a i
, PairCheckConstraint e w s r m ki ty pt tm a i
)
type PairInferConstraint e w s r m ki ty pt tm a i =
( BasicInferTypeConstraint e w s r m ki ty pt tm a i
, PairInferTypeHelper i
, PairInferTypeHelperConstraint e w s r m ki ty a i
, AsTmPair ki ty pt tm
, AsTyPair ki ty
)
type PairCheckConstraint e w s r m ki ty pt tm a i =
( BasicInferTypeConstraint e w s r m ki ty pt tm a i
, PairInferTypeHelper i
, PairInferTypeHelperConstraint e w s r m ki ty a i
, AsPtPair pt
, AsTyPair ki ty
)
pairInferTypeInput :: PairInferTypeConstraint e w s r m ki ty pt tm a i
=> Proxy (MonadProxy e w s r m)
-> Proxy i
-> InferTypeInput e w s r m (InferTypeMonad m ki ty a i) ki ty pt tm a
pairInferTypeInput m i =
InferTypeInput
(unifyPairRules m i)
[ InferTypeRecurse $ inferTmPair m i
, InferTypeRecurse $ inferTmFst m i
, InferTypeRecurse $ inferTmSnd m i
]
[ PCheckRecurse $ checkPair m i]
inferTmPair :: PairInferConstraint e w s r m ki ty pt tm a i
=> Proxy (MonadProxy e w s r m)
-> Proxy i
-> (Term ki ty pt tm a -> InferTypeMonad m ki ty a i (Type ki ty a))
-> Term ki ty pt tm a
-> Maybe (InferTypeMonad m ki ty a i (Type ki ty a))
inferTmPair m i inferFn tm = do
(tm1, tm2) <- preview _TmPair tm
return $ do
ty1 <- inferFn tm1
ty2 <- inferFn tm2
createPair m i ty1 ty2
inferTmFst :: PairInferConstraint e w s r m ki ty pt tm a i
=> Proxy (MonadProxy e w s r m)
-> Proxy i
-> (Term ki ty pt tm a -> InferTypeMonad m ki ty a i (Type ki ty a))
-> Term ki ty pt tm a
-> Maybe (InferTypeMonad m ki ty a i (Type ki ty a))
inferTmFst m i inferFn tm = do
tmP <- preview _TmFst tm
return $ do
tyP <- inferFn tmP
(ty1, _) <- expectPair m i tyP
return ty1
inferTmSnd :: PairInferConstraint e w s r m ki ty pt tm a i
=> Proxy (MonadProxy e w s r m)
-> Proxy i
-> (Term ki ty pt tm a -> InferTypeMonad m ki ty a i (Type ki ty a))
-> Term ki ty pt tm a
-> Maybe (InferTypeMonad m ki ty a i (Type ki ty a))
inferTmSnd m i inferFn tm = do
tmP <- preview _TmFst tm
return $ do
tyP <- inferFn tmP
(_, ty2) <- expectPair m i tyP
return ty2
checkPair :: PairCheckConstraint e w s r m ki ty pt tm a i
=> Proxy (MonadProxy e w s r m)
-> Proxy i
-> (Pattern pt a -> Type ki ty a -> InferTypeMonad m ki ty a i [Type ki ty a])
-> Pattern pt a
-> Type ki ty a
-> Maybe (InferTypeMonad m ki ty a i [Type ki ty a])
checkPair m i checkFn p ty = do
(p1, p2) <- preview _PtPair p
return $ do
(ty1, ty2) <- expectPair m i ty
mappend <$> checkFn p1 ty1 <*> checkFn p2 ty2
|
dalaing/type-systems
|
src/Fragment/Pair/Rules/Type/Infer/Common.hs
|
bsd-3-clause
| 6,944
| 0
| 16
| 2,069
| 2,607
| 1,331
| 1,276
| 180
| 1
|
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE GADTs, NoImplicitPrelude, UnicodeSyntax, FlexibleContexts, InstanceSigs, UndecidableInstances
, AllowAmbiguousTypes#-}
module Data.Nested.Internal
( -- * Tree and Forest types
Tree (..), Forest (..)
-- * Query
, fruit, forest, trees, treeAssocs
, nullTree, nullForest
, sizeTree, sizeForest
, lookupTree, lookupForest
, memberTree, memberForest
-- * Construction
, emptyTree, emptyForest
, singletonTree, singletonForest
, fromFoldableTree, fromFoldableForest
-- * List
, toListForest, toListTree
, fromListTree, fromListForest
-- * Utils
, unionTree, unionForest
, unionTreeWithKey, unionForestWithKey
, unionTreeWithKey'
, unionTreeWith, unionForestWith
, apTree, apForest
, foldrForestWithAncestors
, foldrForestWithAncestors1
, foldrTreeWithAncestors1
, foldrForestWithAncestorsAndLeafMarker
, foldrForestWithAncestorsAndLeafMarker1
, foldrTreeWithAncestorsAndLeafMarker1
) where
import qualified Data.List as L
import Prelude.Unicode ((⊥))
import Prelude (Num, (+), Eq (..), (.), undefined)
import Data.Maybe (Maybe(Just, Nothing), maybe, isJust)
import Data.Int (Int)
import Data.Bool (Bool, otherwise)
import Data.Ord (Ord)
import Data.Tuple (uncurry, snd)
import Data.Function (flip, ($), const, id)
import Data.Function.Unicode ((∘))
import Data.Functor (Functor, fmap, (<$>))
import Data.Foldable (Foldable, foldr, foldMap)
import Data.Traversable (Traversable, mapAccumL, traverse)
import Data.Monoid (Monoid, mempty, mappend, mconcat)
import Data.Monoid.Unicode ((⊕))
import Text.Show (Show)
import Control.Arrow ((&&&))
import Control.Monad (MonadPlus, (>>=), join, return, mplus)
import Control.Applicative (Applicative(..), (<*>))
import Control.Applicative.Unicode ((⊛))
import Data.Map (Map)
import qualified Data.Map as M
import Data.MonoTraversable (MonoApplicative (..))
data Tree κ α where
Tree ∷ { fruit ∷ α
, forest ∷ Forest κ α
} → Tree κ α
deriving (Show)
data Forest κ α where
Forest ∷ { unForest ∷ Map κ (Tree κ α) } → Forest κ α
deriving (Show)
instance Functor (Forest κ) where
fmap f = Forest ∘ ((f <$>) <$>) ∘ unForest
instance Functor (Tree κ) where
fmap f (Tree v ts) = Tree (f v) (f <$> ts)
instance (Ord κ, Monoid α) ⇒ Monoid (Forest κ α) where
mempty = emptyForest
mappend = unionForestWith (⊕)
instance (Ord κ, Monoid α) ⇒ Monoid (Tree κ α) where
mempty = Tree mempty mempty
t1 `mappend` t2 = Tree (fruit t1 ⊕ fruit t2) (forest t1 ⊕ forest t2)
instance Foldable (Forest κ) where
foldMap f = foldMap (foldMap f) ∘ unForest
foldr f z = foldr (flip $ foldr f) z ∘ unForest
instance Foldable (Tree κ) where
foldMap f = (f ∘ fruit) ⊕ (foldMap f ∘ forest)
foldr f z (Tree v ts) = f v (foldr f z ts)
instance Traversable (Forest κ) where
traverse f = (Forest <$>) <$> traverse (traverse f) ∘ unForest
instance Traversable (Tree κ) where
traverse f (Tree v ts) = Tree <$> f v ⊛ traverse f ts
nullForest ∷ Forest κ α → Bool
nullForest = M.null ∘ unForest
nullTree ∷ Tree κ α → Bool
nullTree = nullForest ∘ forest
trees ∷ Forest κ α → [Tree κ α]
trees = M.elems ∘ unForest
treeAssocs ∷ Forest κ α → [(κ, Tree κ α)]
treeAssocs = M.assocs ∘ unForest
sizeForest ∷ Forest κ α → Int
sizeForest = foldr (const (+1)) 0
sizeTree ∷ Tree κ α → Int
sizeTree = (+1) ∘ sizeForest ∘ forest
-- a more general version would use Folable φ as input and a user-specifiable Monoid output
lookupForest ∷ (Traversable φ, Ord κ) ⇒ Forest κ α → φ κ → φ (Maybe α)
lookupForest f = snd ∘ mapAccumL (flip lookup) (Just f)
where lookup ∷ Ord κ ⇒ κ → Maybe (Forest κ α) → (Maybe (Forest κ α), Maybe α)
lookup k = (fmap forest &&& fmap fruit) ∘ join ∘ fmap (M.lookup k ∘ unForest)
lookupTree ∷ (Traversable φ, Ord κ) ⇒ Tree κ α → φ κ → (α, φ (Maybe α))
lookupTree t = (fruit t,) ∘ lookupForest (forest t)
memberTree ∷ (Traversable φ, Ord κ) ⇒ Tree κ α → φ κ → φ Bool
memberTree t = (isJust <$>) ∘ snd ∘ lookupTree t
memberForest ∷ (Traversable φ, Ord κ) ⇒ Forest κ α → φ κ → φ Bool
memberForest f = (isJust <$>) ∘ lookupForest f
emptyForest ∷ Forest κ α
emptyForest = Forest M.empty
emptyTree ∷ α → Tree κ α
emptyTree v = Tree v emptyForest
singletonForest ∷ Foldable φ ⇒ φ (κ,α) → Forest κ α
singletonForest = foldr (uncurry singleton) emptyForest
where singleton k v = Forest ∘ M.singleton k ∘ Tree v
singletonTree ∷ Foldable φ ⇒ α → φ (κ,α) → Tree κ α
singletonTree x = Tree x ∘ singletonForest
fromFoldableForest ∷ (Foldable φ, Foldable ψ, Ord κ) ⇒ ψ (φ (κ, α)) → Forest κ α
fromFoldableForest = foldr (unionForest ∘ singletonForest) emptyForest
fromFoldableTree ∷ (Foldable φ, Foldable ψ, Ord κ) ⇒ α → ψ (φ (κ, α)) → Tree κ α
fromFoldableTree x = Tree x ∘ fromFoldableForest
fromListForest ∷ Ord κ ⇒ [[(κ, α)]] → Forest κ α
fromListForest = fromFoldableForest
fromListTree ∷ Ord κ ⇒ α → [[(κ, α)]] → Tree κ α
fromListTree = fromFoldableTree
toListForest ∷ Forest κ α → [[(κ, α)]]
toListForest = fmap L.reverse ∘ foldrForestWithAncestorsAndLeafMarker leafCons []
where leafCons b = if b then (:) else flip const
toListTree ∷ Tree κ α → (α, [[(κ, α)]])
toListTree t = (fruit t, toListForest (forest t))
unionForest ∷ Ord κ ⇒ Forest κ α → Forest κ α → Forest κ α
unionForest (Forest f1) (Forest f2) = Forest $ M.unionWith unionTree f1 f2
unionTree ∷ Ord κ ⇒ Tree κ α → Tree κ α → Tree κ α
unionTree (Tree _x1 f1) (Tree x2 f2) = Tree x2 (unionForest f1 f2)
unionForestWithKey ∷ Ord κ ⇒ (κ → α → α → α) → Forest κ α → Forest κ α → Forest κ α
unionForestWithKey f (Forest m1) (Forest m2) = Forest $ M.unionWithKey (unionTreeWithKey' f) m1 m2
unionForestWith ∷ Ord κ ⇒ (α → α → α) → Forest κ α → Forest κ α → Forest κ α
unionForestWith f = unionForestWithKey (const f)
unionTreeWithKey' ∷ Ord κ ⇒ (κ → α → α → α) → κ → Tree κ α → Tree κ α → Tree κ α
unionTreeWithKey' f k t1 t2 = Tree (f k (fruit t1) (fruit t2)) (unionForestWithKey f (forest t1) (forest t2))
unionTreeWithKey ∷ Ord κ ⇒ (α → α → α) → (κ → α → α → α) → Tree κ α → Tree κ α → Tree κ α
unionTreeWithKey g f t1 t2 = Tree (g (fruit t1) (fruit t2)) (unionForestWithKey f (forest t1) (forest t2))
unionTreeWith ∷ Ord κ ⇒ (α → α → α) → Tree κ α → Tree κ α → Tree κ α
unionTreeWith f = unionTreeWithKey f (const f)
--class Functor f => MonoApplicative f where
--pureM :: a -> f a
--(<#>) :: f (a -> a) -> f a -> f a
--instance (Ord a, Eq a) => MonoApplicative (Map a) where
--pureM = const M.empty
--(<#>) = funcApFull
instance (Ord κ, MonoApplicative (Tree κ)) => MonoApplicative (Forest κ) where
opure = const emptyForest
oap = apForest
instance Ord a => MonoApplicative (Tree a) where
opure = emptyTree
oap = apTree
funcApFull :: Ord k => Map k (a -> a) -> Map k a -> Map k a
funcApFull f a = funcAp (M.union f (M.map (const id) a)) a
funcAp :: Ord k => Map k (a -> a) -> Map k a -> Map k a
funcAp = M.mergeWithKey (\_ f a -> Just $ f a) mapPure mapPure
apTree :: (Ord κ) => Tree κ (a -> a) -> Tree κ a -> Tree κ a
apTree (Tree ax af) (Tree bx bf) = Tree (ax bx) $ af `oap` bf
apForest ∷ (Ord κ, MonoApplicative (Tree κ)) => Forest κ (a -> a) -> Forest κ a -> Forest κ a
apForest (Forest a) (Forest b) = Forest $ M.foldrWithKey (\_ y z -> (y `oap`) <$> z) b a
mapPure = const M.empty
foldrForestWithAncestors ∷ ([(κ, α)] → β → β) → β → Forest κ α → β
foldrForestWithAncestors f = foldrForestWithAncestors1 f []
foldrForestWithAncestors1 ∷ ([(κ, α)] → β → β) → [(κ, α)] → β → Forest κ α → β
foldrForestWithAncestors1 f kvs z = M.foldrWithKey (foldrTreeWithAncestors1 f kvs) z ∘ unForest
foldrTreeWithAncestors1 ∷ ([(κ, α)] → β → β) → [(κ, α)] → κ → Tree κ α → β → β
foldrTreeWithAncestors1 f kvs k t z = f as (foldrForestWithAncestors1 f as z (forest t))
where as = (k, fruit t):kvs
foldrForestWithAncestorsAndLeafMarker ∷ (Bool → [(κ, α)] → β → β) → β → Forest κ α → β
foldrForestWithAncestorsAndLeafMarker f = foldrForestWithAncestorsAndLeafMarker1 f []
foldrForestWithAncestorsAndLeafMarker1 ∷ (Bool → [(κ, α)] → β → β) → [(κ, α)] → β → Forest κ α → β
foldrForestWithAncestorsAndLeafMarker1 f kvs z = M.foldrWithKey (foldrTreeWithAncestorsAndLeafMarker1 f kvs) z ∘ unForest
foldrTreeWithAncestorsAndLeafMarker1 ∷ (Bool → [(κ, α)] → β → β) → [(κ, α)] → κ → Tree κ α → β → β
foldrTreeWithAncestorsAndLeafMarker1 f kvs k t z = f isLeaf as (foldrForestWithAncestorsAndLeafMarker1 f as z (forest t))
where as = (k, fruit t):kvs
isLeaf = nullTree t
|
sheganinans/applicative-nestedmap
|
src/Data/Nested/Internal.hs
|
bsd-3-clause
| 9,307
| 0
| 13
| 1,878
| 3,720
| 1,977
| 1,743
| 166
| 2
|
module Main where
import Support ( inner )
import Control.Monad ( when )
import System.Environment ( getArgs )
main :: IO ( )
main =
do { -- Following stuff has nothing to do with the Lobster use case
-- It is for running and testing this program. We get names for
-- the internal files of top
args <- getArgs
; when ( length args /= 2 ) ( error ( "Arguments bad: " ++ show args ) )
-- Now for the Lobster use case
; inner "A" ( args!!0 ) ( args!!1 )
}
|
GaloisInc/sk-dev-platform
|
libs/symbolic-io/src/Data/Symbolic/UseCases/A.hs
|
bsd-3-clause
| 517
| 0
| 12
| 161
| 120
| 67
| 53
| 10
| 1
|
{-# LANGUAGE GADTs, GeneralizedNewtypeDeriving, FlexibleInstances, CPP #-}
module Data.JSTarget.AST where
import qualified Data.Set as S
#if __GLASGOW_HASKELL__ >= 708
import qualified Data.Map.Strict as M
#else
import qualified Data.Map as M
#endif
import System.IO.Unsafe
import System.Random (randomIO)
import Data.IORef
import Data.Word
import Control.Applicative
import Data.JSTarget.Op
type Arity = Int
type Comment = String
type Reorderable = Bool
-- | Shared statements.
newtype Shared a = Shared Lbl deriving (Eq, Show)
data Name = Name !String !(Maybe (String, String)) deriving (Eq, Ord, Show)
class HasModule a where
moduleOf :: a -> Maybe String
pkgOf :: a -> Maybe String
instance HasModule Name where
moduleOf (Name _ mmod) = fmap snd mmod
pkgOf (Name _ mmod) = fmap fst mmod
instance HasModule Var where
moduleOf (Foreign _) = Nothing
moduleOf (Internal n _) = moduleOf n
pkgOf (Foreign _) = Nothing
pkgOf (Internal n _) = pkgOf n
-- | Representation of variables.
data Var where
Foreign :: !String -> Var
Internal :: !Name -> !Comment -> Var
deriving (Show)
instance Eq Var where
(Foreign f1) == (Foreign f2) = f1 == f2
(Internal i1 _) == (Internal i2 _) = i1 == i2
_ == _ = False
instance Ord Var where
compare (Foreign f1) (Foreign f2) = compare f1 f2
compare (Internal i1 _) (Internal i2 _) = compare i1 i2
compare (Foreign _) (Internal _ _) = Prelude.LT
compare (Internal _ _) (Foreign _) = Prelude.GT
-- | Left hand side of an assignment. Normally we only assign internal vars,
-- but for some primops we need to assign array elements as well.
-- LhsExp is never reorderable.
data LHS where
NewVar :: !Reorderable -> !Var -> LHS
LhsExp :: !Exp -> LHS
deriving (Eq, Show)
-- | Distinguish between normal, optimized and method calls.
-- Normal and optimized calls take a boolean indicating whether the called
-- function should trampoline or not. This defaults to True, and should
-- only be set to False when there is absolutely no possibility whatsoever
-- that the called function will tailcall.
data Call where
Normal :: !Bool -> Call
Fast :: !Bool -> Call
Method :: !String -> Call
deriving (Eq, Show)
-- | Literals; nothing fancy to see here.
data Lit where
LNum :: !Double -> Lit
LStr :: !String -> Lit
LBool :: !Bool -> Lit
LInt :: !Integer -> Lit
LNull :: Lit
deriving (Eq, Show)
-- | Expressions. Completely predictable.
data Exp where
Var :: !Var -> Exp
Lit :: !Lit -> Exp
Not :: !Exp -> Exp
BinOp :: !BinOp -> Exp -> !Exp -> Exp
Fun :: !(Maybe Name) -> ![Var] -> !Stm -> Exp
Call :: !Arity -> !Call -> !Exp -> ![Exp] -> Exp
Index :: !Exp -> !Exp -> Exp
Arr :: ![Exp] -> Exp
AssignEx :: !Exp -> !Exp -> Exp
IfEx :: !Exp -> !Exp -> !Exp -> Exp
Eval :: !Exp -> Exp
Thunk :: !Bool -> !Stm -> Exp -- Thunk may be updatable or not
deriving (Eq, Show)
-- | Statements. The only mildly interesting thing here are the Case and Jump
-- constructors, which allow explicit sharing of continuations.
data Stm where
Case :: !Exp -> !Stm -> ![Alt] -> !(Shared Stm) -> Stm
Forever :: !Stm -> Stm
Assign :: !LHS -> !Exp -> !Stm -> Stm
Return :: !Exp -> Stm
Cont :: Stm
Jump :: !(Shared Stm) -> Stm
NullRet :: Stm
Tailcall :: !Exp -> Stm
ThunkRet :: !Exp -> Stm -- Return from a Thunk
deriving (Eq, Show)
-- | Case alternatives - an expression to match and a branch.
type Alt = (Exp, Stm)
-- | Represents a module. A module has a name, an owning
-- package, a dependency map of all its definitions, and a bunch of
-- definitions.
data Module = Module {
modPackageId :: !String,
modName :: !String,
modDeps :: !(M.Map Name (S.Set Name)),
modDefs :: !(M.Map Name (AST Exp))
}
-- | Merge two modules. The module and package IDs of the second argument are
-- used, and the second argument will take precedence for symbols which exist
-- in both.
merge :: Module -> Module -> Module
merge m1 m2 = Module {
modPackageId = modPackageId m2,
modName = modName m2,
modDeps = M.union (modDeps m1) (modDeps m2),
modDefs = M.union (modDefs m1) (modDefs m2)
}
-- | Imaginary module for foreign code that may need one.
foreignModule :: Module
foreignModule = Module {
modPackageId = "",
modName = "",
modDeps = M.empty,
modDefs = M.empty
}
-- | An LHS that's guaranteed to not ever be read, enabling the pretty
-- printer to ignore assignments to it.
blackHole :: LHS
blackHole =
LhsExp $ Var blackHoleVar
-- | The variable of the blackHole LHS.
blackHoleVar :: Var
blackHoleVar = Internal (Name "" (Just ("$blackhole", "$blackhole"))) ""
-- | An AST with local jumps.
data AST a = AST {
astCode :: !a,
astJumps :: !JumpTable
} deriving (Show, Eq)
instance Functor AST where
fmap f (AST ast js) = AST (f ast) js
instance Applicative AST where
pure = return
(AST f js) <*> (AST x js') = AST (f x) (M.union js' js)
instance Monad AST where
return x = AST x M.empty
(AST ast js) >>= f =
case f ast of
AST ast' js' -> AST ast' (M.union js' js)
-- | Returns the precedence of the top level operator of the given expression.
-- Everything that's not an operator has equal precedence, higher than any
-- binary operator.
expPrec :: Exp -> Int
expPrec (BinOp Sub (Lit (LNum 0)) _) = 500 -- 0-n is always printed as -n
expPrec (BinOp op _ _) = opPrec op
expPrec (Not _) = 500
expPrec _ = 1000
type JumpTable = M.Map Lbl Stm
data Lbl = Lbl !Word64 !Word64 deriving (Eq, Ord, Show)
{-# NOINLINE nextLbl #-}
nextLbl :: IORef Word64
nextLbl = unsafePerformIO $ newIORef 0
{-# NOINLINE lblNamespace #-}
-- | Namespace for labels, to avoid collisions when combining modules.
-- We really ought to make this f(package, module) or something, but a random
-- 64 bit unsigned int should suffice.
lblNamespace :: Word64
lblNamespace = unsafePerformIO $ randomIO
{-# NOINLINE lblFor #-}
-- | Produce a local reference to the given statement.
lblFor :: Stm -> AST Lbl
lblFor s = do
(r, s') <- freshRef
AST r (M.singleton r s')
where
freshRef = return $! unsafePerformIO $! do
r <- atomicModifyIORef nextLbl $ \lbl ->
lbl `seq` (lbl+1, Lbl lblNamespace lbl)
-- We need to depend on s, or GHC will hoist us out of lblFor, possibly
-- causing circular dependencies between expressions.
return (r, s)
|
joelburget/haste-compiler
|
src/Data/JSTarget/AST.hs
|
bsd-3-clause
| 6,646
| 0
| 15
| 1,681
| 1,812
| 959
| 853
| 255
| 1
|
{-# LANGUAGE QuasiQuotes, ScopedTypeVariables #-}
module Atomo.Kernel.Ports (load) where
import Data.Char (isSpace)
import Data.Maybe (catMaybes)
import System.Console.Haskeline as Haskeline
import System.Directory
import System.FilePath ((</>), (<.>))
import System.IO
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import Atomo
import Atomo.Parser
import Atomo.Pretty
load :: VM ()
load = do
([$p|Port|] =::) =<< eval [$e|Object clone|]
([$p|File|] =::) =<< eval [$e|Object clone|]
([$p|Directory|] =::) =<< eval [$e|Object clone|]
sinp <- portObj stdin
soutp <- portObj stdout
serrp <- portObj stderr
[$p|Port standard-input|] =:: sinp
[$p|Port standard-output|] =:: soutp
[$p|Port standard-error|] =:: serrp
[$p|(p: Port) show|] =: do
hdl <- getHandle [$e|p handle|] >>= liftIO . hShow
return (string ("<port " ++ hdl ++ ">"))
[$p|Port new: (fn: String)|] =::: [$e|Port new: fn mode: @read-write|]
[$p|Port new: (fn: String) mode: (m: Particle)|] =: do
fn <- getString [$e|fn|]
(m :: Particle Value) <- here "m" >>= getV
hdl <- case m of
PMSingle "read" ->
liftIO (openFile fn ReadMode)
PMSingle "write" ->
liftIO (openFile fn WriteMode)
PMSingle "append" ->
liftIO (openFile fn AppendMode)
PMSingle "read-write" ->
liftIO (openFile fn ReadWriteMode)
_ ->
error $ "unknown port mode: " ++ show (pretty m) ++ ", must be one of: @read, @write, @append, @read-write"
portObj hdl
[$p|(p: Port) print: x|] =: do
x <- here "x"
port <- here "p"
hdl <- getHandle [$e|p handle|]
c <- liftIO (hIsClosed hdl)
when c (raise ["port-closed", "for"] [port, x])
(s :: T.Text) <- eval [$e|x as: String|] >>= getV
liftIO (TIO.hPutStrLn hdl s)
liftIO (hFlush hdl)
return x
[$p|(p: Port) display: x|] =: do
x <- here "x"
port <- here "p"
hdl <- getHandle [$e|p handle|]
c <- liftIO (hIsClosed hdl)
when c (raise ["port-closed", "for"] [port, x])
(s :: T.Text) <- eval [$e|x as: String|] >>= getV
liftIO (TIO.hPutStr hdl s)
liftIO (hFlush hdl)
return x
[$p|(p: Port) read|] =: do
h <- getHandle [$e|p handle|]
segment <- liftIO (hGetSegment h)
parsed <- continuedParse segment "<read>"
let isPrimitive (Primitive {}) = True
isPrimitive (EParticle { eParticle = PMSingle _ }) = True
isPrimitive (EParticle { eParticle = PMKeyword _ ts }) =
all isPrimitive (catMaybes ts)
isPrimitive (EList { eContents = ts }) = all isPrimitive ts
isPrimitive _ = False
case parsed of
[] -> raise' "no-expressions-parsed"
is | all isPrimitive is -> evalAll is
(i:_) -> return (Expression i)
[$p|(p: Port) read-line|] =: do
h <- getHandle [$e|p handle|]
done <- liftIO (hIsEOF h)
if done
then raise' "end-of-input"
else liftM String $ liftIO (TIO.hGetLine h)
[$p|(p: Port) read-char|] =: do
h <- getHandle [$e|p handle|]
b <- liftIO (hGetBuffering h)
liftIO (hSetBuffering h NoBuffering)
c <- liftIO (hGetChar h)
liftIO (hSetBuffering h b)
return (Char c)
[$p|(p: Port) contents|] =:
getHandle [$e|p handle|] >>= liftM String . liftIO . TIO.hGetContents
[$p|(p: Port) flush|] =:
getHandle [$e|p handle|] >>= liftIO . hFlush
>> return (particle "ok")
[$p|(p: Port) close|] =:
getHandle [$e|p handle|] >>= liftIO . hClose
>> return (particle "ok")
[$p|(p: Port) open?|] =:
getHandle [$e|p handle|] >>= liftM Boolean . liftIO . hIsOpen
[$p|(p: Port) closed?|] =:
getHandle [$e|p handle|] >>= liftM Boolean . liftIO . hIsClosed
[$p|(p: Port) readable?|] =:
getHandle [$e|p handle|] >>= liftM Boolean . liftIO . hIsReadable
[$p|(p: Port) writable?|] =:
getHandle [$e|p handle|] >>= liftM Boolean . liftIO . hIsWritable
[$p|(p: Port) seekable?|] =:
getHandle [$e|p handle|] >>= liftM Boolean . liftIO . hIsSeekable
[$p|(p: Port) ready?|] =:
getHandle [$e|p handle|] >>= liftM Boolean . liftIO . hReady
[$p|(p: Port) eof?|] =:
getHandle [$e|p handle|] >>= liftM Boolean . liftIO . hIsEOF
[$p|File new: (fn: String)|] =::: [$e|Port new: fn|]
[$p|File open: (fn: String)|] =::: [$e|Port new: fn|]
[$p|File read: (fn: String)|] =:::
[$e|Port (new: fn mode: @read) ensuring: @close do: @contents|]
[$p|File delete: (fn: String)|] =: do
fn <- getString [$e|fn|]
liftIO (removeFile fn)
return (particle "ok")
[$p|File move: (from: String) to: (to: String)|] =:::
[$e|File rename: from to: to|]
[$p|File rename: (from: String) to: (to: String)|] =: do
from <- getString [$e|from|]
to <- getString [$e|to|]
liftIO (renameFile from to)
return (particle "ok")
[$p|File copy: (from: String) to: (to: String)|] =: do
from <- getString [$e|from|]
to <- getString [$e|to|]
liftIO (copyFile from to)
return (particle "ok")
[$p|File canonicalize-path: (fn: String)|] =: do
fn <- getString [$e|fn|]
liftM string $ liftIO (canonicalizePath fn)
[$p|File make-relative: (fn: String)|] =: do
fn <- getString [$e|fn|]
liftM string $ liftIO (makeRelativeToCurrentDirectory fn)
[$p|File exists?: (fn: String)|] =: do
fn <- getString [$e|fn|]
liftM Boolean $ liftIO (doesFileExist fn)
[$p|File find-executable: (name: String)|] =: do
name <- getString [$e|name|]
find <- liftIO (findExecutable name)
case find of
Nothing -> return (particle "none")
Just fn -> return (keyParticle ["ok"] [Nothing, Just (string fn)])
[$p|File readable?: (fn: String)|] =:
getString [$e|fn|]
>>= liftM (Boolean . readable) . liftIO . getPermissions
[$p|File writable?: (fn: String)|] =:
getString [$e|fn|]
>>= liftM (Boolean . writable) . liftIO . getPermissions
[$p|File executable?: (fn: String)|] =:
getString [$e|fn|]
>>= liftM (Boolean . executable) . liftIO . getPermissions
[$p|File searchable?: (fn: String)|] =:
getString [$e|fn|]
>>= liftM (Boolean . searchable) . liftIO . getPermissions
[$p|File set-readable: (fn: String) to: (b: Boolean)|] =: do
(r :: Bool) <- here "b" >>= getV
fn <- getString [$e|fn|]
ps <- liftIO (getPermissions fn)
liftIO (setPermissions fn (ps { readable = r }))
return (particle "ok")
[$p|File set-writable: (fn: String) to: (b: Boolean)|] =: do
(w :: Bool) <- here "b" >>= getV
fn <- getString [$e|fn|]
ps <- liftIO (getPermissions fn)
liftIO (setPermissions fn (ps { writable = w }))
return (particle "ok")
[$p|File set-executable: (fn: String) to: (b: Boolean)|] =: do
(x :: Bool) <- here "b" >>= getV
fn <- getString [$e|fn|]
ps <- liftIO (getPermissions fn)
liftIO (setPermissions fn (ps { executable = x }))
return (particle "ok")
[$p|File set-searchable: (fn: String) to: (b: Boolean)|] =: do
(s :: Bool) <- here "b" >>= getV
fn <- getString [$e|fn|]
ps <- liftIO (getPermissions fn)
liftIO (setPermissions fn (ps { searchable = s }))
return (particle "ok")
[$p|Directory create: (path: String)|] =: do
path <- getString [$e|path|]
liftIO (createDirectory path)
return (particle "ok")
[$p|Directory create-if-missing: (path: String)|] =: do
path <- getString [$e|path|]
liftIO (createDirectoryIfMissing False path)
return (particle "ok")
[$p|Directory create-tree-if-missing: (path: String)|] =: do
path <- getString [$e|path|]
liftIO (createDirectoryIfMissing True path)
return (particle "ok")
[$p|Directory remove: (path: String)|] =: do
path <- getString [$e|path|]
liftIO (removeDirectory path)
return (particle "ok")
[$p|Directory remove-recursive: (path: String)|] =: do
path <- getString [$e|path|]
liftIO (removeDirectoryRecursive path)
return (particle "ok")
[$p|Directory move: (from: String) to: (to: String)|] =:::
[$e|Directory rename: from to: to|]
[$p|Directory rename: (from: String) to: (to: String)|] =: do
from <- getString [$e|from|]
to <- getString [$e|to|]
liftIO (renameDirectory from to)
return (particle "ok")
[$p|Directory contents: (path: String)|] =:
liftM (list . map string . filter (`notElem` [".", ".."]))
(getString [$e|path|] >>= liftIO . getDirectoryContents)
[$p|Directory current|] =:
liftM string $ liftIO getCurrentDirectory
[$p|Directory current: (path: String)|] =: do
path <- getString [$e|path|]
liftIO (setCurrentDirectory path)
return (particle "ok")
[$p|Directory home|] =:
liftM string $ liftIO getHomeDirectory
[$p|Directory user-data-for: (app: String)|] =: do
app <- getString [$e|app|]
liftM string $ liftIO (getAppUserDataDirectory app)
[$p|Directory user-documents|] =:
liftM string $ liftIO getUserDocumentsDirectory
[$p|Directory temporary|] =:
liftM string $ liftIO getTemporaryDirectory
[$p|Directory exists?: (path: String)|] =: do
path <- getString [$e|path|]
liftM Boolean $ liftIO (doesDirectoryExist path)
[$p|(a: String) </> (b: String)|] =: do
a <- getString [$e|a|]
b <- getString [$e|b|]
return (string (a </> b))
[$p|(a: String) <.> (b: String)|] =: do
a <- getString [$e|a|]
b <- getString [$e|b|]
return (string (a <.> b))
[$p|interaction: (prompt: String)|] =: do
prompt <- getString [$e|prompt|]
history <- getString [$e|current-history-file|]
line <-
liftIO $ Haskeline.catch
(liftM Just $ runInput history (getInputLine prompt))
(\Interrupt -> return Nothing)
case line of
Just (Just i) -> return (string i)
Just Nothing -> raise' "end-of-input"
Nothing -> raise' "interrupt"
where
runInput h
= runInputT (defaultSettings { historyFile = Just h })
. withInterrupt
portObj hdl = newScope $ do
port <- eval [$e|Port clone|]
define (single "handle" (PMatch port))
(Primitive Nothing $ haskell hdl)
return port
getHandle ex = eval ex >>= fromHaskell
hGetSegment :: Handle -> IO String
hGetSegment h = dropSpaces >> hGetSegment' Nothing
where
dropSpaces = do
c <- hLookAhead h
when (isSpace c) (hGetChar h >> dropSpaces)
hGetSegment' stop = do
end <- hIsEOF h
if end
then return ""
else do
c <- hGetChar h
case c of
'"' -> wrapped '"'
'\'' -> wrapped '\''
'(' -> nested '(' ')'
'{' -> nested '{' '}'
'[' -> nested '[' ']'
s | (stop == Nothing && isSpace s) || (Just s == stop) ->
return [c]
_ -> do
cs <- hGetSegment' stop
return (c:cs)
where
wrapped d = do
w <- liftM (d:) $ hGetUntil h d
rest <- hGetSegment' stop
return (w ++ rest)
nested c end = do
sub <- liftM (c:) $ hGetSegment' (Just end)
rest <- hGetSegment' stop
return (sub ++ rest)
hGetUntil :: Handle -> Char -> IO String
hGetUntil h x = do
c <- hGetChar h
if c == x
then return [c]
else do
cs <- hGetUntil h x
return (c:cs)
|
Mathnerd314/atomo
|
src/Atomo/Kernel/Ports.hs
|
bsd-3-clause
| 12,398
| 187
| 60
| 3,979
| 5,610
| 2,720
| 2,890
| -1
| -1
|
module Main where
import Data.Reflection.Extras
main = print "hey"
|
jfischoff/reflection-extras
|
tests/Main.hs
|
bsd-3-clause
| 67
| 0
| 5
| 9
| 18
| 11
| 7
| 3
| 1
|
module TestData
-- blockchains
( singletonBlockchainUnvalidated
, singletonBlockchain
, blockchain1Block
, blockchain2BlockFork
, blockchain3Block
-- blocks
, genesisBlock
, block1A
, block1ACoinbasePrivateKey
, block1B
, block1BCoinbasePrivateKey
, block2A
, block2ACoinbasePrivateKey
-- utils
, validate'
, addBlock'
, readJSON
, throwLeft
) where
import qualified Data.Aeson as Aeson
import qualified Data.ByteString.Lazy as Lazy
import Data.Blockchain
-- Test Data -------------------------------------------------------------------------------------------------
singletonBlockchainUnvalidated :: IO (Blockchain Unvalidated)
singletonBlockchainUnvalidated = readJSON "data/singleton_blockchain.json"
singletonBlockchain :: IO (Blockchain Validated)
singletonBlockchain = validate' <$> singletonBlockchainUnvalidated
blockchain1Block, blockchain2BlockFork, blockchain3Block :: IO (Blockchain Validated)
blockchain1Block = blockchainWithBlock singletonBlockchain block1A
blockchain2BlockFork = blockchainWithBlock blockchain1Block block1B
blockchain3Block = blockchainWithBlock blockchain2BlockFork block2A
genesisBlock :: IO Block
genesisBlock = nodeBlock . blockchainNode <$> singletonBlockchain
block1A, block1B, block2A :: IO Block
block1A = readJSON "data/block_1a.json"
block1B = readJSON "data/block_1b.json"
block2A = readJSON "data/block_2a.json"
block1ACoinbasePrivateKey, block1BCoinbasePrivateKey, block2ACoinbasePrivateKey :: IO PrivateKey
block1ACoinbasePrivateKey = readJSON "data/block_1a_coinbase_private_key.json"
block1BCoinbasePrivateKey = readJSON "data/block_1b_coinbase_private_key.json"
block2ACoinbasePrivateKey = readJSON "data/block_2a_coinbase_private_key.json"
-- Utils -----------------------------------------------------------------------------------------------------
blockchainWithBlock :: IO (Blockchain Validated) -> IO Block -> IO (Blockchain Validated)
blockchainWithBlock chain block = do
c <- chain
b <- block
return (addBlock' b c)
validate' :: Blockchain Unvalidated -> Blockchain Validated
validate' = throwLeft . validate
addBlock' :: Block -> Blockchain Validated -> Blockchain Validated
addBlock' block = throwLeft . addBlock block
readJSON :: Aeson.FromJSON a => FilePath -> IO a
readJSON path = throwLeft . Aeson.eitherDecode <$> Lazy.readFile path
throwLeft :: Show a => Either a b -> b
throwLeft = either (error . show) id
|
TGOlson/blockchain
|
test/TestData.hs
|
bsd-3-clause
| 2,512
| 0
| 9
| 376
| 479
| 258
| 221
| 51
| 1
|
{-# LANGUAGE BangPatterns, CPP, MagicHash, OverloadedStrings #-}
#ifdef USE_MONO_PAT_BINDS
{-# LANGUAGE MonoPatBinds #-}
#endif
-- | Support for HTTP response encoding.
--
-- TODO: Improve documentation.
module Blaze.ByteString.Builder.HTTP (
-- * Chunked HTTP transfer encoding
chunkedTransferEncoding
, chunkedTransferTerminator
) where
import Data.Monoid
import qualified Data.ByteString as S
import Data.ByteString.Char8 ()
import Foreign
import Blaze.ByteString.Builder.Internal
import Blaze.ByteString.Builder.Internal.Types
import Blaze.ByteString.Builder.Internal.UncheckedShifts
import Blaze.ByteString.Builder.ByteString (copyByteString)
import qualified Blaze.ByteString.Builder.Char8 as Char8
-- only required by test-code
-- import qualified Data.ByteString.Lazy as L
-- import qualified Blaze.ByteString.Builder.ByteString as B
-- import Data.ByteString.Char8 ()
-- | Write a CRLF sequence.
writeCRLF :: Write
writeCRLF = Char8.writeChar '\r' `mappend` Char8.writeChar '\n'
{-# INLINE writeCRLF #-}
-- | Execute a write
{-# INLINE execWrite #-}
execWrite :: Write -> Ptr Word8 -> IO ()
execWrite w op = do
_ <- runPoke (getPoke w) op
return ()
------------------------------------------------------------------------------
-- Hex Encoding Infrastructure
------------------------------------------------------------------------------
{-
pokeWord16Hex :: Word16 -> Ptr Word8 -> IO ()
pokeWord16Hex x op = do
pokeNibble 0 12
pokeNibble 1 8
pokeNibble 2 4
pokeNibble 3 0
where
pokeNibble off s
| n < 10 = pokeWord8 off (fromIntegral $ 48 + n)
| otherwise = pokeWord8 off (fromIntegral $ 55 + n)
where
n = shiftr_w16 x s .&. 0xF
pokeWord8 :: Int -> Word8 -> IO ()
pokeWord8 off = poke (op `plusPtr` off)
writeWord16Hex :: Word16 -> Write
writeWord16Hex = exactWrite 4 . pokeWord16Hex
-}
pokeWord32HexN :: Int -> Word32 -> Ptr Word8 -> IO ()
pokeWord32HexN n0 w0 op0 =
go w0 (op0 `plusPtr` (n0 - 1))
where
go !w !op
| op < op0 = return ()
| otherwise = do
let nibble :: Word8
nibble = fromIntegral w .&. 0xF
hex | nibble < 10 = 48 + nibble
| otherwise = 55 + nibble
poke op hex
go (w `shiftr_w32` 4) (op `plusPtr` (-1))
{-# INLINE pokeWord32HexN #-}
iterationsUntilZero :: Integral a => (a -> a) -> a -> Int
iterationsUntilZero f = go 0
where
go !count 0 = count
go !count !x = go (count+1) (f x)
{-# INLINE iterationsUntilZero #-}
-- | Length of the hex-string required to encode the given 'Word32'.
word32HexLength :: Word32 -> Int
word32HexLength = max 1 . iterationsUntilZero (`shiftr_w32` 4)
{-# INLINE word32HexLength #-}
writeWord32Hex :: Word32 -> Write
writeWord32Hex w =
boundedWrite (2 * sizeOf w) (pokeN len $ pokeWord32HexN len w)
where
len = word32HexLength w
{-# INLINE writeWord32Hex #-}
{-
test = flip (toLazyByteStringWith 32 32 32) L.empty
$ chunkedTransferEncoding
$ mconcat $ map oneLine [0..16] ++
[B.insertByteString "hello"] ++
map oneLine [0,1] ++
[B.insertByteString ""] ++
map oneLine [0..16]
where
oneLine x = fromWriteSingleton writeWord32Hex x `mappend` Char8.fromChar ' '
test = print $ toLazyByteString
$ chunkedTransferEncoding body `mappend` chunkedTransferTerminator
body = copyByteString "maa" `mappend` copyByteString "foo" `mappend` copyByteString "bar"
-}
------------------------------------------------------------------------------
-- Chunked transfer encoding
------------------------------------------------------------------------------
-- | Transform a builder such that it uses chunked HTTP transfer encoding.
chunkedTransferEncoding :: Builder -> Builder
chunkedTransferEncoding (Builder b) =
fromBuildStepCont transferEncodingStep
where
finalStep !(BufRange op _) = return $ Done op ()
transferEncodingStep k = go (b (buildStep finalStep))
where
go innerStep !(BufRange op ope)
-- FIXME: Assert that outRemaining < maxBound :: Word32
| outRemaining < minimalBufferSize =
return $ bufferFull minimalBufferSize op (go innerStep)
| otherwise = do
let !brInner@(BufRange opInner _) = BufRange
(op `plusPtr` (chunkSizeLength + 2)) -- leave space for chunk header
(ope `plusPtr` (-maxAfterBufferOverhead)) -- leave space at end of data
-- wraps the chunk, if it is non-empty, and returns the
-- signal constructed with the correct end-of-data pointer
{-# INLINE wrapChunk #-}
wrapChunk :: Ptr Word8 -> (Ptr Word8 -> IO (BuildSignal a))
-> IO (BuildSignal a)
wrapChunk !opInner' mkSignal
| opInner' == opInner = mkSignal op
| otherwise = do
pokeWord32HexN chunkSizeLength
(fromIntegral $ opInner' `minusPtr` opInner)
op
execWrite writeCRLF (opInner `plusPtr` (-2))
execWrite writeCRLF opInner'
mkSignal (opInner' `plusPtr` 2)
-- execute inner builder with reduced boundaries
signal <- runBuildStep innerStep brInner
case signal of
Done opInner' _ ->
wrapChunk opInner' $ \op' -> do
let !br' = BufRange op' ope
k br'
BufferFull minRequiredSize opInner' nextInnerStep ->
wrapChunk opInner' $ \op' ->
return $! bufferFull
(minRequiredSize + maxEncodingOverhead)
op'
(go nextInnerStep)
InsertByteString opInner' bs nextInnerStep
| S.null bs -> -- flush
wrapChunk opInner' $ \op' ->
return $! insertByteString
op' S.empty
(go nextInnerStep)
| otherwise -> -- insert non-empty bytestring
wrapChunk opInner' $ \op' -> do
-- add header for inserted bytestring
-- FIXME: assert(S.length bs < maxBound :: Word32)
!op'' <- (`runPoke` op') $ getPoke $
writeWord32Hex (fromIntegral $ S.length bs)
`mappend` writeCRLF
-- insert bytestring and write CRLF in next buildstep
return $! InsertByteString
op'' bs
(unBuilder (fromWrite writeCRLF) $
buildStep $ go nextInnerStep)
where
-- minimal size guaranteed for actual data no need to require more
-- than 1 byte to guarantee progress the larger sizes will be
-- hopefully provided by the driver or requested by the wrapped
-- builders.
minimalChunkSize = 1
-- overhead computation
maxBeforeBufferOverhead = sizeOf (undefined :: Int) + 2 -- max chunk size and CRLF after header
maxAfterBufferOverhead = 2 + -- CRLF after data
sizeOf (undefined :: Int) + 2 -- max bytestring size, CRLF after header
maxEncodingOverhead = maxBeforeBufferOverhead + maxAfterBufferOverhead
minimalBufferSize = minimalChunkSize + maxEncodingOverhead
-- remaining and required space computation
outRemaining :: Int
outRemaining = ope `minusPtr` op
chunkSizeLength = word32HexLength $ fromIntegral outRemaining
-- | The zero-length chunk '0\r\n\r\n' signaling the termination of the data transfer.
chunkedTransferTerminator :: Builder
chunkedTransferTerminator = copyByteString "0\r\n\r\n"
|
meiersi/blaze-builder
|
Blaze/ByteString/Builder/HTTP.hs
|
bsd-3-clause
| 8,216
| 0
| 27
| 2,677
| 1,290
| 679
| 611
| 109
| 3
|
import Air.Env
import Air.TH
import Prelude ()
import System.Nemesis.Titan
import Test.Hspec
import Air.Spec
spec :: IO ()
spec = hspec $ do
describe "PlainShow" $ do
it "should show unicode spec" - do
-- (PlainShowWrapper "测试") `shouldBe` (PlainShowWrapper "测试 unicode")
"测试" === "测试 unicode"
"测试" `shouldBe` "测试"
main =
with_spec spec halt
|
nfjinjing/air-spec
|
src/Test.hs
|
bsd-3-clause
| 414
| 2
| 14
| 100
| 105
| 52
| 53
| 14
| 1
|
{-# LANGUAGE OverloadedStrings, TupleSections, TypeFamilies, FlexibleContexts,
PackageImports #-}
module Network.XmlPush.HttpPush.Body (
HttpPush, HttpPushArgs(..), makeHttpPush,
HttpPushTest(..), HttpPushTestArgs(..),
) where
import Prelude hiding (filter)
import Control.Applicative
import Control.Monad
import "monads-tf" Control.Monad.Trans
import Control.Monad.Base
import Control.Monad.Trans.Control
import Control.Concurrent hiding (yield)
import Control.Concurrent.STM
import Data.Maybe
import Data.HandleLike
import Data.Pipe
import Data.Pipe.Flow
import Data.Pipe.TChan
import Text.XML.Pipe
import Network.TigHTTP.Server
import qualified Data.ByteString.Lazy as LBS
import Network.XmlPush
import Network.XmlPush.HttpPush.Common
data HttpPush h = HttpPush {
needReply :: TVar Bool,
clientReadChan :: TChan (XmlNode, Bool),
clientWriteChan :: TChan (Maybe XmlNode),
serverReadChan :: TChan (XmlNode, Bool),
serverWriteChan :: TChan (Maybe XmlNode) }
instance XmlPusher HttpPush where
type NumOfHandle HttpPush = Two
type PusherArgs HttpPush = HttpPushArgs
generate (Two ch sh) = makeHttpPush [] ch sh
-- readFrom hp = fromTChans [clientReadChan hp, serverReadChan hp] =$=
readFrom hp = fromTChans [serverReadChan hp, clientReadChan hp] =$=
setNeedReply (needReply hp)
writeTo hp = (convert ((() ,) . Just) =$=) . toTChansM $ do
nr <- liftBase . atomically . readTVar $ needReply hp
liftBase . atomically $ writeTVar (needReply hp) False
return [
(const nr, serverWriteChan hp),
(const True, clientWriteChan hp) ]
data HttpPushTest h = HttpPushTest (HttpPush h)
data HttpPushTestArgs h = HttpPushTestArgs (HttpPushArgs h) [XmlNode]
instance XmlPusher HttpPushTest where
type NumOfHandle HttpPushTest = Two
type PusherArgs HttpPushTest = HttpPushTestArgs
generate (Two ch sh) (HttpPushTestArgs a p) =
HttpPushTest <$> makeHttpPush p ch sh a
-- readFrom hp = fromTChans [clientReadChan hp, serverReadChan hp] =$=
readFrom (HttpPushTest hp) = fromTChans [serverReadChan hp, clientReadChan hp] =$=
setNeedReply (needReply hp)
writeTo (HttpPushTest hp) = (convert ((() ,) . Just) =$=) . toTChansM $ do
nr <- liftBase . atomically . readTVar $ needReply hp
liftBase . atomically $ writeTVar (needReply hp) False
return [
(const nr, serverWriteChan hp),
(const True, clientWriteChan hp) ]
makeHttpPush :: (HandleLike h, MonadBaseControl IO (HandleMonad h)) =>
[XmlNode] ->
(Maybe h) -> (Maybe h) ->
HttpPushArgs h -> HandleMonad h (HttpPush h)
makeHttpPush pre mch msh (HttpPushArgs gc gs hi gp wr) = do
vch <- liftBase . atomically $ newTVar mch
vsh <- liftBase . atomically $ newTVar msh
v <- liftBase . atomically $ newTVar False
vhi <- liftBase . atomically $ newTVar hi
(ci, co) <- clientC vch vhi gp
(si, so) <- talk pre wr vsh vch vhi gc gs
return $ HttpPush v ci co si so
clientC :: (HandleLike h, MonadBaseControl IO (HandleMonad h)) =>
TVar (Maybe h) -> TVar (Maybe (String, Int, FilePath)) ->
(XmlNode -> FilePath) ->
HandleMonad h (TChan (XmlNode, Bool), TChan (Maybe XmlNode))
clientC vh vhi gp = do
inc <- liftBase $ atomically newTChan
otc <- liftBase $ atomically newTChan
void . liftBaseDiscard forkIO $ do
h <- liftBase . atomically $ do
mh <- readTVar vh
case mh of
Just h -> return h
_ -> retry
(hn, pn, pt) <- liftBase . atomically $ do
mhi <- readTVar vhi
case mhi of
Just hi -> return hi
_ -> retry
runPipe_ $ fromTChan otc
=$= filter isJust
=$= convert fromJust
=$= clientLoop h hn pn pt gp (convert id)
=$= convert (, False)
=$= toTChan inc
return (inc, otc)
talk :: (HandleLike h, MonadBaseControl IO (HandleMonad h)) =>
[XmlNode] ->
(XmlNode -> Bool) -> (TVar (Maybe h)) -> (TVar (Maybe h)) ->
(TVar (Maybe (String, Int, FilePath))) ->
(XmlNode -> Maybe (HandleMonad h h, String, Int, FilePath)) ->
Maybe (HandleMonad h h) ->
HandleMonad h (TChan (XmlNode, Bool), TChan (Maybe XmlNode))
talk pre wr vh vch vhi gc mgs = do
inc <- liftBase $ atomically newTChan
otc <- liftBase $ atomically newTChan
void . liftBaseDiscard forkIO $ do
flip (maybe (return ())) mgs $ \gs -> do
h <- gs
liftBase . atomically $ writeTVar vh (Just h)
h <- liftBase . atomically $ do
mh <- readTVar vh
case mh of
Just h -> return h
_ -> retry
runPipe_ . writeToChan h inc otc pre $
setClient vch vhi gc =$= checkReply wr otc
runPipe_ . forever $ do
req <- lift $ getRequest h
requestBody req
=$= xmlEvent
=$= convert fromJust
=$= xmlNode []
=$= setClient vch vhi gc
=$= checkReply wr otc
=$= toTChan inc
fromTChan otc =$= await >>= maybe (return ()) (\mn ->
lift . putResponse h . responseP $ case mn of
Just n -> LBS.fromChunks [xmlString [n]]
_ -> "")
return (inc, otc)
writeToChan :: (HandleLike h, MonadBase IO (HandleMonad h)) =>
h -> TChan a -> TChan (Maybe XmlNode) -> [XmlNode] ->
Pipe XmlNode a (HandleMonad h) () ->
Pipe () () (HandleMonad h) ()
writeToChan _ _ _ [] _ = return ()
writeToChan h inc otc pre pp = do
mapM yield pre =$= pp =$= toTChan inc
fromTChan otc =$= await >>= maybe (return ()) (\mn ->
lift . putResponse h . responseP $ case mn of
Just n -> LBS.fromChunks [xmlString [n]]
_ -> "")
setClient :: (MonadBase IO (HandleMonad h)) =>
TVar (Maybe h) -> TVar (Maybe (String, Int, FilePath)) ->
(XmlNode -> Maybe (HandleMonad h h, String, Int, FilePath)) ->
Pipe XmlNode XmlNode (HandleMonad h) ()
setClient vch vhi gc = (await >>=) . maybe (return ()) $ \n -> do
yield n
case gc n of
Just (gh, hn, pn, pt) -> do
h <- lift gh
lift . liftBase . atomically . writeTVar vch $ Just h
lift . liftBase . atomically . writeTVar vhi
$ Just (hn, pn, pt)
_ -> return ()
setClient vch vhi gc
|
YoshikuniJujo/xml-push
|
src/Network/XmlPush/HttpPush/Body.hs
|
bsd-3-clause
| 5,760
| 72
| 24
| 1,154
| 2,432
| 1,223
| 1,209
| 153
| 3
|
module Network.OpenFlow.Decode where
import qualified Data.ByteString as B
import Network.OpenFlow.Message (OfpMessage)
import Network.OpenFlow.Parser
decode :: B.ByteString -> Either String OfpMessage
decode = parseOnly ofpMessage
|
utky/openflow
|
src/Network/OpenFlow/Decode.hs
|
bsd-3-clause
| 254
| 0
| 6
| 46
| 56
| 34
| 22
| 6
| 1
|
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.EXT.PointParameters
-- Copyright : (c) Sven Panne 2013
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- All raw functions and tokens from the EXT_point_parameters extension, see
-- <http://www.opengl.org/registry/specs/EXT/point_parameters.txt>.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.EXT.PointParameters (
-- * Functions
glPointParameterf,
glPointParameterfv,
-- * Tokens
gl_POINT_SIZE_MIN,
gl_POINT_SIZE_MAX,
gl_POINT_FADE_THRESHOLD_SIZE,
gl_DISTANCE_ATTENUATION
) where
import Graphics.Rendering.OpenGL.Raw.ARB.Compatibility
import Graphics.Rendering.OpenGL.Raw.Core32
gl_DISTANCE_ATTENUATION :: GLenum
gl_DISTANCE_ATTENUATION = gl_POINT_DISTANCE_ATTENUATION
|
mfpi/OpenGLRaw
|
src/Graphics/Rendering/OpenGL/Raw/EXT/PointParameters.hs
|
bsd-3-clause
| 994
| 0
| 4
| 123
| 73
| 56
| 17
| 11
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.