code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module Ak.Tasks (
groupByPriority
) where
import Ak.Types
import Data.Function
( on )
import Data.List
( groupBy )
groupByPriority :: [Task] -> [(Priority,[Task])]
groupByPriority = map upd . groupBy ((==) `on` priority)
where
upd ts@(t:_) = (priority t, ts)
upd _ = error "groupByPriority: impossible"
| elliottt/ak | src/Ak/Tasks.hs | bsd-3-clause | 335 | 0 | 10 | 77 | 126 | 73 | 53 | 11 | 2 |
{-# LANGUAGE TemplateHaskell #-}
module Startups.Cards where
import qualified Data.Set as S
import qualified Data.Text as T
import qualified Data.MultiSet as MS
import qualified Data.Foldable as F
import qualified Data.Map.Strict as M
import Data.Monoid
import Data.String
import Control.Lens
import Startups.Base
data CardType = BaseResource -- ^ The "brown" cards, provide basic resources
| AdvancedResource -- ^ The "grey" cards, provide advanced resources
| Infrastructure -- ^ The "blue" cards, directly give victory points
| ResearchDevelopment -- ^ The "green" cards, the more you have, the more victory points you get
| Commercial -- ^ The "gold" cards, mainly get you funding
| HeadHunting -- ^ The "red" cards, giving poaching power
| Community -- ^ The "purple" cards, giving victory points according to various conditions
deriving (Eq, Show, Ord)
data Neighbor = NLeft
| NRight
deriving (Ord, Eq, Show)
data EffectDirection = Neighboring Neighbor
| Own
deriving (Ord, Eq, Show)
type Target = S.Set EffectDirection
data Condition = HappensOnce
| PerCard Target (S.Set CardType)
| ByPoachingResult Target (S.Set PoachingOutcome)
| ByStartupStage Target
deriving (Ord, Eq, Show)
neighbors :: Target
neighbors = S.fromList [Neighboring NLeft, Neighboring NRight]
myself :: Target
myself = S.singleton Own
everyone :: Target
everyone = neighbors <> myself
data Sharing = Shared | Kept
deriving (Ord, Eq, Show)
data ResearchType = Scaling
| Programming
| CustomSolution
deriving (Ord, Eq, Show)
data Effect = ProvideResource Resource Int Sharing
| ResourceChoice (S.Set Resource) Sharing
| CheapExchange (S.Set Resource) (S.Set Neighbor)
| AddVictory VictoryType VictoryPoint Condition
| GainFunding Funding Condition
| RnD ResearchType
| Poaching Poacher
| ScientificBreakthrough -- gives any science type
| Recycling -- play a card in the discard pile
| Opportunity (S.Set Age) -- build for free once per age
| Efficiency -- play the last card
| CopyCommunity
deriving (Ord, Eq, Show)
data Cost = Cost (MS.MultiSet Resource) Funding
deriving (Ord, Eq, Show)
instance Monoid Cost where
mempty = Cost mempty 0
Cost r1 f1 `mappend` Cost r2 f2 = Cost (r1 <> r2) (f1 + f2)
instance IsString Cost where
fromString = F.foldMap toCost
where
toCost 'Y' = Cost (MS.singleton Youthfulness) 0
toCost 'V' = Cost (MS.singleton Vision) 0
toCost 'A' = Cost (MS.singleton Adoption) 0
toCost 'D' = Cost (MS.singleton Development) 0
toCost 'O' = Cost (MS.singleton Operations) 0
toCost 'M' = Cost (MS.singleton Marketing) 0
toCost 'F' = Cost (MS.singleton Finance) 0
toCost '$' = Cost mempty 1
toCost _ = error "Invalid cost string"
data Card = Card { _cName :: T.Text
, _cMinplayers :: PlayerCount
, _cAge :: Age
, _cType :: CardType
, _cCost :: Cost
, _cFree :: [T.Text]
, _cEffect :: [Effect]
}
| CompanyCard { _cCompany :: CompanyProfile
, _cStage :: CompanyStage
, _cCost :: Cost
, _cEffect :: [Effect]
}
deriving (Ord,Eq,Show)
type Exchange = M.Map Neighbor (MS.MultiSet Resource)
makePrisms ''CardType
makePrisms ''Effect
makeLenses ''Card
| bitemyapp/7startups | Startups/Cards.hs | bsd-3-clause | 3,980 | 0 | 11 | 1,414 | 903 | 501 | 402 | 88 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Db.Utils where
import Snap
import Database.SQLite.Simple
import qualified Data.Text as T
import Data.Aeson
import Data.Aeson.Types
import Data.Char
import Data.Monoid
import Data.Int
tableExists :: Connection -> T.Text -> IO Bool
tableExists conn tblName = do
r <- query conn "SELECT name FROM sqlite_master WHERE type='table' AND name=?" (Only tblName)
case r :: [Only String] of
[_] -> return True
_ -> return False
createTable :: Connection -> T.Text -> [(T.Text,T.Text)] -> IO ()
createTable conn tblName tblFields = do
created <- tableExists conn tblName
unless created $ execute_ conn $
mkQuery
[ "CREATE TABLE"
, tblName
, wrapPar $ commas $ map (\(n,t) -> n <> " " <> t) tblFields
]
getRows :: FromRow a => Connection -> T.Text -> [T.Text] -> IO [a]
getRows conn tblName tblFlds = query_ conn $ mkQuery
[ "SELECT"
, commas tblFlds
, "FROM"
, tblName
]
newRow :: ToRow a => Connection -> T.Text -> [T.Text] -> a -> IO Int64
newRow conn tblName tblFlds r = do
execute conn
(mkQuery
[ "INSERT INTO"
, tblName
, wrapPar $ commas tblFlds
, "VALUES"
, wrapPar $ commas $ replicate (length tblFlds) "?"
])
r
lastInsertRowId conn
updateRow :: ToRow a => Connection -> T.Text -> [T.Text] -> a -> T.Text -> Int64 -> IO Int64
updateRow conn tblName tblFlds r idFld idVal = do
execute conn
(mkQuery
[ "UPDATE"
, tblName
, "SET"
, commas $ map (<> " = ?") tblFlds
, "WHERE"
, idFld <> " = ?"
])
(r :. Only idVal)
return idVal
deleteRow :: FromRow a => Connection -> T.Text -> T.Text -> Int64 -> IO [a]
deleteRow conn tblName idFld idVal = do
query conn
(mkQuery
[ "DELETE FROM"
, tblName
, "WHERE"
, idFld <> " = ?"
])
(Only idVal)
type Field = T.Text
fldTimestamp :: Field
fldTimestamp = "timestamp"
primaryKeyDBType :: T.Text
primaryKeyDBType = "INTEGER PRIMARY KEY"
timestampDBType :: T.Text
timestampDBType = "TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL"
noIdError :: String -> a
noIdError typ = error $ "ToJSON: " ++ typ ++ " must have Id"
wrapPar :: T.Text -> T.Text
wrapPar = T.cons '(' . flip T.snoc ')'
commas :: [T.Text] -> T.Text
commas = T.intercalate ","
commaSpaces :: [T.Text] -> T.Text
commaSpaces = T.intercalate ", "
mkQuery :: [T.Text] -> Query
mkQuery = Query . T.unwords
(.:#) :: (Num a) => Object -> T.Text -> Parser a
v .:# f = do
x <- v .: f
if all isDigit x
then return $ fromInteger $ read x
else fail "Expected an integer-representing string"
newtype Id = Id Int64 deriving (Show)
instance FromJSON Id where
parseJSON (Object v) = Id
<$> v .: "id"
parseJSON _ = mzero
instance ToJSON Id where
toJSON (Id i) = object
[ "id" .= i
]
instance FromRow Id where
fromRow = Id <$> field
| kylcarte/qclib | src/Db/Utils.hs | bsd-3-clause | 2,968 | 0 | 15 | 798 | 1,037 | 539 | 498 | 95 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
import Crypto.Random
import Network.BSD
import Network.Socket (socket, Family(..), SocketType(..), sClose, SockAddr(..), connect)
import Network.TLS
import Network.TLS.Extra.Cipher
import System.Console.GetOpt
import System.IO
import System.Timeout
import qualified Data.ByteString.Lazy.Char8 as LC
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString as B
import Control.Exception
import qualified Control.Exception as E
import Control.Monad
import System.Environment
import System.Exit
import System.X509
import Data.Default.Class
import Data.IORef
import Data.Monoid
import Data.Char (isDigit)
import Numeric (showHex)
import HexDump
ciphers :: [Cipher]
ciphers =
[ cipher_DHE_RSA_AES256_SHA256
, cipher_DHE_RSA_AES128_SHA256
, cipher_DHE_RSA_AES256_SHA1
, cipher_DHE_RSA_AES128_SHA1
, cipher_DHE_DSS_AES256_SHA1
, cipher_DHE_DSS_AES128_SHA1
, cipher_AES128_SHA1
, cipher_AES256_SHA1
, cipher_RC4_128_MD5
, cipher_RC4_128_SHA1
, cipher_RSA_3DES_EDE_CBC_SHA1
, cipher_DHE_RSA_AES128GCM_SHA256
--, cipher_ECDHE_RSA_AES256GCM_SHA384
, cipher_ECDHE_RSA_AES256CBC_SHA
, cipher_ECDHE_RSA_AES128GCM_SHA256
, cipher_ECDHE_ECDSA_AES128GCM_SHA256
]
defaultBenchAmount = 1024 * 1024
defaultTimeout = 2000
bogusCipher cid = cipher_AES128_SHA1 { cipherID = cid }
runTLS debug ioDebug params hostname portNumber f = do
he <- getHostByName hostname
sock <- socket AF_INET Stream defaultProtocol
let sockaddr = SockAddrInet portNumber (head $ hostAddresses he)
E.catch (connect sock sockaddr)
(\(e :: SomeException) -> sClose sock >> error ("cannot open socket " ++ show sockaddr ++ " " ++ show e))
ctx <- contextNew sock params
contextHookSetLogging ctx getLogging
() <- f ctx
sClose sock
where getLogging = ioLogging $ packetLogging $ def
packetLogging logging
| debug = logging { loggingPacketSent = putStrLn . ("debug: >> " ++)
, loggingPacketRecv = putStrLn . ("debug: << " ++)
}
| otherwise = logging
ioLogging logging
| ioDebug = logging { loggingIOSent = mapM_ putStrLn . hexdump ">>"
, loggingIORecv = \hdr body -> do
putStrLn ("<< " ++ show hdr)
mapM_ putStrLn $ hexdump "<<" body
}
| otherwise = logging
sessionRef ref = SessionManager
{ sessionEstablish = \sid sdata -> writeIORef ref (sid,sdata)
, sessionResume = \sid -> readIORef ref >>= \(s,d) -> if s == sid then return (Just d) else return Nothing
, sessionInvalidate = \_ -> return ()
}
getDefaultParams flags host store sStorage certCredsRequest session =
(defaultParamsClient host BC.empty)
{ clientSupported = def { supportedVersions = supportedVers, supportedCiphers = myCiphers }
, clientWantSessionResume = session
, clientUseServerNameIndication = not (NoSNI `elem` flags)
, clientShared = def { sharedSessionManager = sessionRef sStorage
, sharedCAStore = store
, sharedValidationCache = validateCache
, sharedCredentials = maybe mempty fst certCredsRequest
}
, clientHooks = def { onCertificateRequest = maybe (onCertificateRequest def) snd certCredsRequest }
, clientDebug = def { debugSeed = foldl getDebugSeed Nothing flags
, debugPrintSeed = if DebugPrintSeed `elem` flags
then (\seed -> putStrLn ("seed: " ++ show (seedToInteger seed)))
else (\_ -> return ())
}
}
where
validateCache
| validateCert = def
| otherwise = ValidationCache (\_ _ _ -> return ValidationCachePass)
(\_ _ _ -> return ())
myCiphers = foldl accBogusCipher (filter withUseCipher ciphers) flags
where accBogusCipher acc (BogusCipher c) =
case reads c of
[(v, "")] -> acc ++ [bogusCipher v]
_ -> acc
accBogusCipher acc _ = acc
getUsedCiphers = foldl f [] flags
where f acc (UseCipher am) = case readNumber am of
Nothing -> acc
Just i -> i : acc
f acc _ = acc
withUseCipher c =
case getUsedCiphers of
[] -> True
l -> cipherID c `elem` l
getDebugSeed :: Maybe Seed -> Flag -> Maybe Seed
getDebugSeed _ (DebugSeed seed) = seedFromInteger `fmap` readNumber seed
getDebugSeed acc _ = acc
tlsConnectVer
| Tls12 `elem` flags = TLS12
| Tls11 `elem` flags = TLS11
| Ssl3 `elem` flags = SSL3
| Tls10 `elem` flags = TLS10
| otherwise = TLS12
supportedVers
| NoVersionDowngrade `elem` flags = [tlsConnectVer]
| otherwise = filter (<= tlsConnectVer) allVers
allVers = [SSL3, TLS10, TLS11, TLS12]
validateCert = not (NoValidateCert `elem` flags)
data Flag = Verbose | Debug | IODebug | NoValidateCert | Session | Http11
| Ssl3 | Tls10 | Tls11 | Tls12
| NoSNI
| Uri String
| NoVersionDowngrade
| UserAgent String
| Output String
| Timeout String
| BogusCipher String
| ClientCert String
| BenchSend
| BenchRecv
| BenchData String
| UseCipher String
| ListCiphers
| DebugSeed String
| DebugPrintSeed
| Help
deriving (Show,Eq)
options :: [OptDescr Flag]
options =
[ Option ['v'] ["verbose"] (NoArg Verbose) "verbose output on stdout"
, Option ['d'] ["debug"] (NoArg Debug) "TLS debug output on stdout"
, Option [] ["io-debug"] (NoArg IODebug) "TLS IO debug output on stdout"
, Option ['s'] ["session"] (NoArg Session) "try to resume a session"
, Option ['O'] ["output"] (ReqArg Output "stdout") "output "
, Option ['t'] ["timeout"] (ReqArg Timeout "timeout") "timeout in milliseconds (2s by default)"
, Option [] ["no-validation"] (NoArg NoValidateCert) "disable certificate validation"
, Option [] ["client-cert"] (ReqArg ClientCert "cert-file:key-file") "add a client certificate to use with the server"
, Option [] ["http1.1"] (NoArg Http11) "use http1.1 instead of http1.0"
, Option [] ["ssl3"] (NoArg Ssl3) "use SSL 3.0"
, Option [] ["no-sni"] (NoArg NoSNI) "don't use server name indication"
, Option [] ["user-agent"] (ReqArg UserAgent "user-agent") "use a user agent"
, Option [] ["tls10"] (NoArg Tls10) "use TLS 1.0"
, Option [] ["tls11"] (NoArg Tls11) "use TLS 1.1"
, Option [] ["tls12"] (NoArg Tls12) "use TLS 1.2 (default)"
, Option [] ["bogocipher"] (ReqArg BogusCipher "cipher-id") "add a bogus cipher id for testing"
, Option ['x'] ["no-version-downgrade"] (NoArg NoVersionDowngrade) "do not allow version downgrade"
, Option [] ["uri"] (ReqArg Uri "URI") "optional URI requested by default /"
, Option ['h'] ["help"] (NoArg Help) "request help"
, Option [] ["bench-send"] (NoArg BenchSend) "benchmark send path. only with compatible server"
, Option [] ["bench-recv"] (NoArg BenchRecv) "benchmark recv path. only with compatible server"
, Option [] ["bench-data"] (ReqArg BenchData "amount") "amount of data to benchmark with"
, Option [] ["use-cipher"] (ReqArg UseCipher "cipher-id") "use a specific cipher"
, Option [] ["list-ciphers"] (NoArg ListCiphers) "list all ciphers supported and exit"
, Option [] ["debug-seed"] (ReqArg DebugSeed "debug-seed") "debug: set a specific seed for randomness"
, Option [] ["debug-print-seed"] (NoArg DebugPrintSeed) "debug: set a specific seed for randomness"
]
noSession = Nothing
runOn (sStorage, certStore) flags port hostname
| BenchSend `elem` flags = runBench True
| BenchRecv `elem` flags = runBench False
| otherwise = do
certCredRequest <- getCredRequest
doTLS certCredRequest noSession
when (Session `elem` flags) $ do
session <- readIORef sStorage
doTLS certCredRequest (Just session)
where
runBench isSend =
runTLS (Debug `elem` flags)
(IODebug `elem` flags)
(getDefaultParams flags hostname certStore sStorage Nothing noSession) hostname port $ \ctx -> do
handshake ctx
if isSend
then loopSendData getBenchAmount ctx
else loopRecvData getBenchAmount ctx
bye ctx
where
dataSend = BC.replicate 4096 'a'
loopSendData bytes ctx
| bytes <= 0 = return ()
| otherwise = do
sendData ctx $ LC.fromChunks [(if bytes > B.length dataSend then dataSend else BC.take bytes dataSend)]
loopSendData (bytes - B.length dataSend) ctx
loopRecvData bytes ctx
| bytes <= 0 = return ()
| otherwise = do
d <- recvData ctx
loopRecvData (bytes - B.length d) ctx
doTLS certCredRequest sess = do
let query = LC.pack (
"GET "
++ findURI flags
++ (if Http11 `elem` flags then (" HTTP/1.1\r\nHost: " ++ hostname) else " HTTP/1.0")
++ userAgent
++ "\r\n\r\n")
when (Verbose `elem` flags) (putStrLn "sending query:" >> LC.putStrLn query >> putStrLn "")
out <- maybe (return stdout) (flip openFile WriteMode) getOutput
runTLS (Debug `elem` flags)
(IODebug `elem` flags)
(getDefaultParams flags hostname certStore sStorage certCredRequest sess) hostname port $ \ctx -> do
handshake ctx
sendData ctx $ query
loopRecv out ctx
bye ctx
return ()
loopRecv out ctx = do
d <- timeout (timeoutMs * 1000) (recvData ctx) -- 2s per recv
case d of
Nothing -> when (Debug `elem` flags) (hPutStrLn stderr "timeout") >> return ()
Just b | BC.null b -> return ()
| otherwise -> BC.hPutStrLn out b >> loopRecv out ctx
getCredRequest =
case clientCert of
Nothing -> return Nothing
Just s -> do
case break (== ':') s of
(_ ,"") -> error "wrong format for client-cert, expecting 'cert-file:key-file'"
(cert,':':key) -> do
ecred <- credentialLoadX509 cert key
case ecred of
Left err -> error ("cannot load client certificate: " ++ err)
Right cred -> do
let certRequest _ = return $ Just cred
return $ Just (Credentials [cred], certRequest)
(_ ,_) -> error "wrong format for client-cert, expecting 'cert-file:key-file'"
findURI [] = "/"
findURI (Uri u:_) = u
findURI (_:xs) = findURI xs
userAgent = maybe "" (\s -> "\r\nUser-Agent: " ++ s) mUserAgent
mUserAgent = foldl f Nothing flags
where f _ (UserAgent ua) = Just ua
f acc _ = acc
getOutput = foldl f Nothing flags
where f _ (Output o) = Just o
f acc _ = acc
timeoutMs = foldl f defaultTimeout flags
where f _ (Timeout t) = read t
f acc _ = acc
clientCert = foldl f Nothing flags
where f _ (ClientCert c) = Just c
f acc _ = acc
getBenchAmount = foldl f defaultBenchAmount flags
where f acc (BenchData am) = case readNumber am of
Nothing -> acc
Just i -> i
f acc _ = acc
readNumber :: Read a => String -> Maybe a
readNumber s
| all isDigit s = Just $ read s
| otherwise = Nothing
printUsage =
putStrLn $ usageInfo "usage: simpleclient [opts] <hostname> [port]\n\n\t(port default to: 443)\noptions:\n" options
printCiphers = do
putStrLn "Supported ciphers"
putStrLn "====================================="
forM_ ciphers $ \c -> do
putStrLn (pad 50 (cipherName c) ++ " = " ++ pad 5 (show $ cipherID c) ++ " 0x" ++ showHex (cipherID c) "")
where
pad n s
| length s < n = s ++ replicate (n - length s) ' '
| otherwise = s
main = do
args <- getArgs
let (opts,other,errs) = getOpt Permute options args
when (not $ null errs) $ do
putStrLn $ show errs
exitFailure
when (Help `elem` opts) $ do
printUsage
exitSuccess
when (ListCiphers `elem` opts) $ do
printCiphers
exitSuccess
certStore <- getSystemCertificateStore
sStorage <- newIORef (error "storage ioref undefined")
case other of
[hostname] -> runOn (sStorage, certStore) opts 443 hostname
[hostname,port] -> runOn (sStorage, certStore) opts (fromInteger $ read port) hostname
_ -> printUsage >> exitFailure
| tolysz/hs-tls | debug/src/SimpleClient.hs | bsd-3-clause | 14,306 | 0 | 26 | 5,205 | 3,786 | 1,953 | 1,833 | 284 | 17 |
import System.IO
import System.Exit
import Control.Monad
import qualified Data.Text.IO as T
import Text.Unicoder
main :: IO ()
main = do
m_config <- loadConfig "test/test.config"
testFile <- case m_config of
Nothing -> die "Could not parse config file."
Just config -> return $ testFile config
let files = ["passthrough", "mono", "di"]
results <- mapM testFile files
unless (and results) $ do
mapM_ putStrLn $ zipWith (\a b -> a ++ ": " ++ if b then "OK" else "FAILURE") files results
exitFailure
testFile :: Config -> FilePath -> IO Bool
testFile config path = do
input <- T.readFile $ "test/" ++ path ++ ".in"
output <- T.readFile $ "test/" ++ path ++ ".out"
return $ unicodize config input == output
| Zankoku-Okuno/unicoder | test/test.hs | bsd-3-clause | 770 | 0 | 15 | 186 | 267 | 132 | 135 | 21 | 3 |
{-# LANGUAGE OverloadedStrings, TupleSections, TypeFamilies, FlexibleContexts,
PackageImports #-}
module TestPusherT (
XmlPusher(..), Zero(..), One(..), Two(..),
testPusher, checkFingerprint) where
import Control.Applicative
import Control.Monad
import "monads-tf" Control.Monad.Trans
import Control.Concurrent hiding (yield)
import Data.Maybe
import Data.List
import Data.Char
import Data.HandleLike
import Data.Pipe
import Data.Pipe.ByteString
import Data.X509
import Data.X509.Validation
import System.IO
import Text.XML.Pipe
import Numeric
import Network.PeyoTLS.Client (ValidateHandle)
import qualified Data.ByteString as BS
import Network.XmlPush
testPusher :: (XmlPusher xp, ValidateHandle h, HandleMonad h ~ IO) =>
xp h -> NumOfHandle xp h -> PusherArgs xp h -> IO ()
testPusher tp hs as = do
xp <- (`asTypeOf` tp) <$> generate hs as
runPipe_ $ do
readFrom xp =$= await >>= maybe (return ()) (lift . print)
yield (XmlNode (nullQ "hoge") [] [] []) =$= writeTo xp
void . forkIO . runPipe_ $ readFrom xp
=$= convert (xmlString . (: []))
=$= toHandle stdout
runPipe_ $ fromHandle stdin
=$= xmlEvent
=$= convert fromJust
=$= xmlNode []
=$= writeTo xp
checkFingerprint :: [String] -> SignedCertificate -> Bool
checkFingerprint fps c = cutFingerprint (getFingerprint c HashSHA256) `elem` fps
cutFingerprint :: Fingerprint -> String
cutFingerprint (Fingerprint bs) = lastN 29 .
intercalate ":" . map (map toUpper . flip showHex "") $ BS.unpack bs
lastN :: Int -> [a] -> [a]
lastN n xs = drop (length xs - n) xs
| YoshikuniJujo/xml-push | examples/TestPusherT.hs | bsd-3-clause | 1,544 | 8 | 15 | 247 | 563 | 299 | 264 | 45 | 1 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.NV.ShaderBufferLoad
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- The <https://www.opengl.org/registry/specs/NV/shader_buffer_load.txt NV_shader_buffer_load> extension.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.NV.ShaderBufferLoad (
-- * Enums
gl_BUFFER_GPU_ADDRESS_NV,
gl_GPU_ADDRESS_NV,
gl_MAX_SHADER_BUFFER_ADDRESS_NV,
-- * Functions
glGetBufferParameterui64vNV,
glGetIntegerui64vNV,
glGetNamedBufferParameterui64vNV,
glGetUniformui64vNV,
glIsBufferResidentNV,
glIsNamedBufferResidentNV,
glMakeBufferNonResidentNV,
glMakeBufferResidentNV,
glMakeNamedBufferNonResidentNV,
glMakeNamedBufferResidentNV,
glProgramUniformui64NV,
glProgramUniformui64vNV,
glUniformui64NV,
glUniformui64vNV
) where
import Graphics.Rendering.OpenGL.Raw.Tokens
import Graphics.Rendering.OpenGL.Raw.Functions
| phaazon/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/NV/ShaderBufferLoad.hs | bsd-3-clause | 1,165 | 0 | 4 | 133 | 94 | 70 | 24 | 20 | 0 |
module Yesod.Goodies.PNotify.Types where
import Data.Text (Text)
type URL = Text
data NotifyType = Notice
| Info
| Success
| Error
deriving (Eq, Ord, Enum)
data NotifyStyling = JqueryUI
| Bootstrap3
| BrightTheme
| FontAwesome
deriving (Eq, Ord, Enum)
data AnimationType = None
| Fade
| Slide
deriving (Eq, Ord, Enum)
data AnimateSpeed = Slow
| Def
| Normal
| Fast
deriving (Eq, Ord, Enum)
| cutsea110/yesod-pnotify | Yesod/Goodies/PNotify/Types.hs | bsd-3-clause | 665 | 0 | 6 | 339 | 145 | 86 | 59 | 22 | 0 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Exec
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- Implementation of the 'exec' command. Runs an arbitrary executable in an
-- environment suitable for making use of the sandbox.
-----------------------------------------------------------------------------
module Distribution.Client.Exec ( exec
) where
import qualified Distribution.Simple.GHC as GHC
import qualified Distribution.Simple.GHCJS as GHCJS
import Distribution.Client.Sandbox (getSandboxConfigFilePath)
import Distribution.Client.Sandbox.PackageEnvironment (sandboxPackageDBPath)
import Distribution.Client.Sandbox.Types (UseSandbox (..))
import Distribution.Simple.Compiler (Compiler, CompilerFlavor(..), compilerFlavor)
import Distribution.Simple.Program (ghcProgram, ghcjsProgram, lookupProgram)
import Distribution.Simple.Program.Db (ProgramDb, requireProgram, modifyProgramSearchPath)
import Distribution.Simple.Program.Find (ProgramSearchPathEntry(..))
import Distribution.Simple.Program.Run (programInvocation, runProgramInvocation)
import Distribution.Simple.Program.Types ( simpleProgram, ConfiguredProgram(..) )
import Distribution.Simple.Utils (die)
import Distribution.System (Platform)
import Distribution.Verbosity (Verbosity)
import System.FilePath (searchPathSeparator, (</>))
import Control.Applicative ((<$>))
import Data.Monoid (mempty)
-- | Execute the given command in the package's environment.
--
-- The given command is executed with GHC configured to use the correct
-- package database and with the sandbox bin directory added to the PATH.
exec :: Verbosity
-> UseSandbox
-> Compiler
-> Platform
-> ProgramDb
-> [String]
-> IO ()
exec verbosity useSandbox comp platform programDb extraArgs =
case extraArgs of
(exe:args) -> do
program <- requireProgram' verbosity useSandbox programDb exe
env <- ((++) (programOverrideEnv program)) <$> environmentOverrides
let invocation = programInvocation
program { programOverrideEnv = env }
args
runProgramInvocation verbosity invocation
[] -> die "Please specify an executable to run"
where
environmentOverrides =
case useSandbox of
NoSandbox -> return []
(UseSandbox sandboxDir) ->
sandboxEnvironment verbosity sandboxDir comp platform programDb
-- | Return the package's sandbox environment.
--
-- The environment sets GHC_PACKAGE_PATH so that GHC will use the sandbox.
sandboxEnvironment :: Verbosity
-> FilePath
-> Compiler
-> Platform
-> ProgramDb
-> IO [(String, Maybe String)]
sandboxEnvironment verbosity sandboxDir comp platform programDb =
case compilerFlavor comp of
GHC -> env GHC.getGlobalPackageDB ghcProgram "GHC_PACKAGE_PATH"
GHCJS -> env GHCJS.getGlobalPackageDB ghcjsProgram "GHCJS_PACKAGE_PATH"
_ -> die "exec only works with GHC and GHCJS"
where
env getGlobalPackageDB hcProgram packagePathEnvVar = do
let Just program = lookupProgram hcProgram programDb
gDb <- getGlobalPackageDB verbosity program
sandboxConfigFilePath <- getSandboxConfigFilePath mempty
let compilerPackagePath = hcPackagePath gDb
return [ (packagePathEnvVar, compilerPackagePath)
, ("CABAL_SANDBOX_PACKAGE_PATH", compilerPackagePath)
, ("CABAL_SANDBOX_CONFIG", Just sandboxConfigFilePath)
]
hcPackagePath gDb =
let s = sandboxPackageDBPath sandboxDir comp platform
in Just $ prependToSearchPath gDb s
prependToSearchPath path newValue =
newValue ++ [searchPathSeparator] ++ path
-- | Check that a program is configured and available to be run. If
-- a sandbox is available check in the sandbox's directory.
requireProgram' :: Verbosity
-> UseSandbox
-> ProgramDb
-> String
-> IO ConfiguredProgram
requireProgram' verbosity useSandbox programDb exe = do
(program, _) <- requireProgram
verbosity
(simpleProgram exe)
updateSearchPath
return program
where
updateSearchPath =
flip modifyProgramSearchPath programDb $ \searchPath ->
case useSandbox of
NoSandbox -> searchPath
UseSandbox sandboxDir ->
ProgramSearchPathDir (sandboxDir </> "bin") : searchPath
| plumlife/cabal | cabal-install/Distribution/Client/Exec.hs | bsd-3-clause | 4,794 | 0 | 15 | 1,228 | 844 | 465 | 379 | 81 | 3 |
module Main where
import System.Nagios.Plugin (runNagiosPlugin)
import System.Nagios.Plugin.Retcond
main :: IO ()
main = runNagiosPlugin checkRetcond
| anchor/nagios-plugin-retcond | src/Main.hs | bsd-3-clause | 180 | 0 | 6 | 46 | 40 | 24 | 16 | 5 | 1 |
module Raskell.Test where
| jonathankochems/raskell-git-download | src/Raskell/Test.hs | bsd-3-clause | 26 | 0 | 3 | 3 | 6 | 4 | 2 | 1 | 0 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Quantity.HR.Rules
( rules ) where
import Data.String
import Prelude
import qualified Data.Text as Text
import Duckling.Dimensions.Types
import Duckling.Numeral.Types (NumeralData (..))
import Duckling.Quantity.Helpers
import Duckling.Regex.Types
import Duckling.Types
import qualified Duckling.Numeral.Types as TNumeral
import qualified Duckling.Quantity.Types as TQuantity
ruleNumberUnits :: Rule
ruleNumberUnits = Rule
{ name = "<number> <units>"
, pattern =
[ dimension Numeral
, regex "k(il(o|e|a))?(g(rama?)?)?"
]
, prod = \tokens -> case tokens of
(Token Numeral NumeralData {TNumeral.value = v}:_) ->
Just . Token Quantity $ quantity TQuantity.Gram (1000 * v)
_ -> Nothing
}
ruleQuantityProduct :: Rule
ruleQuantityProduct = Rule
{ name = "<quantity> product"
, pattern =
[ dimension Quantity
, regex "(mes(o|a)|soli?)"
]
, prod = \tokens -> case tokens of
(Token Quantity qd:
Token RegexMatch (GroupMatch (match:_)):
_) -> case Text.toLower match of
"meso" -> Just . Token Quantity $ withProduct "meso" qd
"mesa" -> Just . Token Quantity $ withProduct "meso" qd
"sol" -> Just . Token Quantity $ withProduct "sol" qd
"soli" -> Just . Token Quantity $ withProduct "sol" qd
_ -> Nothing
_ -> Nothing
}
rules :: [Rule]
rules =
[ ruleNumberUnits
, ruleQuantityProduct
]
| rfranek/duckling | Duckling/Quantity/HR/Rules.hs | bsd-3-clause | 1,799 | 0 | 18 | 398 | 423 | 239 | 184 | 44 | 6 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-}
{-# LANGUAGE CPP, DeriveDataTypeable, DeriveFunctor #-}
-- | CoreSyn holds all the main data types for use by for the Glasgow Haskell Compiler midsection
module CoreSyn (
-- * Main data types
Expr(..), Alt, Bind(..), AltCon(..), Arg,
Tickish(..), TickishScoping(..), TickishPlacement(..),
CoreProgram, CoreExpr, CoreAlt, CoreBind, CoreArg, CoreBndr,
TaggedExpr, TaggedAlt, TaggedBind, TaggedArg, TaggedBndr(..), deTagExpr,
-- ** 'Expr' construction
mkLets, mkLams,
mkApps, mkTyApps, mkCoApps, mkVarApps,
mkIntLit, mkIntLitInt,
mkWordLit, mkWordLitWord,
mkWord64LitWord64, mkInt64LitInt64,
mkCharLit, mkStringLit,
mkFloatLit, mkFloatLitFloat,
mkDoubleLit, mkDoubleLitDouble,
mkConApp, mkConApp2, mkTyBind, mkCoBind,
varToCoreExpr, varsToCoreExprs,
isId, cmpAltCon, cmpAlt, ltAlt,
-- ** Simple 'Expr' access functions and predicates
bindersOf, bindersOfBinds, rhssOfBind, rhssOfAlts,
collectBinders, collectTyAndValBinders,
collectArgs, collectArgsTicks, flattenBinds,
exprToType, exprToCoercion_maybe,
applyTypeToArg,
isValArg, isTypeArg, isTyCoArg, valArgCount, valBndrCount,
isRuntimeArg, isRuntimeVar,
tickishCounts, tickishScoped, tickishScopesLike, tickishFloatable,
tickishCanSplit, mkNoCount, mkNoScope,
tickishIsCode, tickishPlace,
tickishContains,
-- * Unfolding data types
Unfolding(..), UnfoldingGuidance(..), UnfoldingSource(..),
-- ** Constructing 'Unfolding's
noUnfolding, evaldUnfolding, mkOtherCon,
unSaturatedOk, needSaturated, boringCxtOk, boringCxtNotOk,
-- ** Predicates and deconstruction on 'Unfolding'
unfoldingTemplate, expandUnfolding_maybe,
maybeUnfoldingTemplate, otherCons,
isValueUnfolding, isEvaldUnfolding, isCheapUnfolding,
isExpandableUnfolding, isConLikeUnfolding, isCompulsoryUnfolding,
isStableUnfolding, hasStableCoreUnfolding_maybe,
isClosedUnfolding, hasSomeUnfolding,
canUnfold, neverUnfoldGuidance, isStableSource,
-- * Annotated expression data types
AnnExpr, AnnExpr'(..), AnnBind(..), AnnAlt,
-- ** Operations on annotated expressions
collectAnnArgs, collectAnnArgsTicks,
-- ** Operations on annotations
deAnnotate, deAnnotate', deAnnAlt, collectAnnBndrs,
-- * Orphanhood
IsOrphan(..), isOrphan, notOrphan, chooseOrphanAnchor,
-- * Core rule data types
CoreRule(..), RuleBase,
RuleName, RuleFun, IdUnfoldingFun, InScopeEnv,
RuleEnv(..), mkRuleEnv, emptyRuleEnv,
-- ** Operations on 'CoreRule's
ruleArity, ruleName, ruleIdName, ruleActivation,
setRuleIdName,
isBuiltinRule, isLocalRule, isAutoRule,
-- * Core vectorisation declarations data type
CoreVect(..)
) where
#include "HsVersions.h"
import CostCentre
import VarEnv( InScopeSet )
import Var
import Type
import Coercion
import Name
import NameEnv( NameEnv, emptyNameEnv )
import Literal
import DataCon
import Module
import TyCon
import BasicTypes
import DynFlags
import Outputable
import Util
import SrcLoc ( RealSrcSpan, containsSpan )
import Binary
import Data.Data hiding (TyCon)
import Data.Int
import Data.Word
infixl 4 `mkApps`, `mkTyApps`, `mkVarApps`, `App`, `mkCoApps`
-- Left associative, so that we can say (f `mkTyApps` xs `mkVarApps` ys)
{-
************************************************************************
* *
\subsection{The main data types}
* *
************************************************************************
These data types are the heart of the compiler
-}
-- | This is the data type that represents GHCs core intermediate language. Currently
-- GHC uses System FC <http://research.microsoft.com/~simonpj/papers/ext-f/> for this purpose,
-- which is closely related to the simpler and better known System F <http://en.wikipedia.org/wiki/System_F>.
--
-- We get from Haskell source to this Core language in a number of stages:
--
-- 1. The source code is parsed into an abstract syntax tree, which is represented
-- by the data type 'HsExpr.HsExpr' with the names being 'RdrName.RdrNames'
--
-- 2. This syntax tree is /renamed/, which attaches a 'Unique.Unique' to every 'RdrName.RdrName'
-- (yielding a 'Name.Name') to disambiguate identifiers which are lexically identical.
-- For example, this program:
--
-- @
-- f x = let f x = x + 1
-- in f (x - 2)
-- @
--
-- Would be renamed by having 'Unique's attached so it looked something like this:
--
-- @
-- f_1 x_2 = let f_3 x_4 = x_4 + 1
-- in f_3 (x_2 - 2)
-- @
-- But see Note [Shadowing] below.
--
-- 3. The resulting syntax tree undergoes type checking (which also deals with instantiating
-- type class arguments) to yield a 'HsExpr.HsExpr' type that has 'Id.Id' as it's names.
--
-- 4. Finally the syntax tree is /desugared/ from the expressive 'HsExpr.HsExpr' type into
-- this 'Expr' type, which has far fewer constructors and hence is easier to perform
-- optimization, analysis and code generation on.
--
-- The type parameter @b@ is for the type of binders in the expression tree.
--
-- The language consists of the following elements:
--
-- * Variables
--
-- * Primitive literals
--
-- * Applications: note that the argument may be a 'Type'.
--
-- See "CoreSyn#let_app_invariant" for another invariant
--
-- * Lambda abstraction
--
-- * Recursive and non recursive @let@s. Operationally
-- this corresponds to allocating a thunk for the things
-- bound and then executing the sub-expression.
--
-- #top_level_invariant#
-- #letrec_invariant#
--
-- The right hand sides of all top-level and recursive @let@s
-- /must/ be of lifted type (see "Type#type_classification" for
-- the meaning of /lifted/ vs. /unlifted/).
--
-- See Note [CoreSyn let/app invariant]
--
-- #type_let#
-- We allow a /non-recursive/ let to bind a type variable, thus:
--
-- > Let (NonRec tv (Type ty)) body
--
-- This can be very convenient for postponing type substitutions until
-- the next run of the simplifier.
--
-- At the moment, the rest of the compiler only deals with type-let
-- in a Let expression, rather than at top level. We may want to revist
-- this choice.
--
-- * Case split. Operationally this corresponds to evaluating
-- the scrutinee (expression examined) to weak head normal form
-- and then examining at most one level of resulting constructor (i.e. you
-- cannot do nested pattern matching directly with this).
--
-- The binder gets bound to the value of the scrutinee,
-- and the 'Type' must be that of all the case alternatives
--
-- #case_invariants#
-- This is one of the more complicated elements of the Core language,
-- and comes with a number of restrictions:
--
-- 1. The list of alternatives may be empty;
-- See Note [Empty case alternatives]
--
-- 2. The 'DEFAULT' case alternative must be first in the list,
-- if it occurs at all.
--
-- 3. The remaining cases are in order of increasing
-- tag (for 'DataAlts') or
-- lit (for 'LitAlts').
-- This makes finding the relevant constructor easy,
-- and makes comparison easier too.
--
-- 4. The list of alternatives must be exhaustive. An /exhaustive/ case
-- does not necessarily mention all constructors:
--
-- @
-- data Foo = Red | Green | Blue
-- ... case x of
-- Red -> True
-- other -> f (case x of
-- Green -> ...
-- Blue -> ... ) ...
-- @
--
-- The inner case does not need a @Red@ alternative, because @x@
-- can't be @Red@ at that program point.
--
-- 5. Floating-point values must not be scrutinised against literals.
-- See Trac #9238 and Note [Rules for floating-point comparisons]
-- in PrelRules for rationale.
--
-- * Cast an expression to a particular type.
-- This is used to implement @newtype@s (a @newtype@ constructor or
-- destructor just becomes a 'Cast' in Core) and GADTs.
--
-- * Notes. These allow general information to be added to expressions
-- in the syntax tree
--
-- * A type: this should only show up at the top level of an Arg
--
-- * A coercion
-- If you edit this type, you may need to update the GHC formalism
-- See Note [GHC Formalism] in coreSyn/CoreLint.hs
data Expr b
= Var Id
| Lit Literal
| App (Expr b) (Arg b)
| Lam b (Expr b)
| Let (Bind b) (Expr b)
| Case (Expr b) b Type [Alt b] -- See #case_invariant#
| Cast (Expr b) Coercion
| Tick (Tickish Id) (Expr b)
| Type Type
| Coercion Coercion
deriving (Data, Typeable)
-- | Type synonym for expressions that occur in function argument positions.
-- Only 'Arg' should contain a 'Type' at top level, general 'Expr' should not
type Arg b = Expr b
-- | A case split alternative. Consists of the constructor leading to the alternative,
-- the variables bound from the constructor, and the expression to be executed given that binding.
-- The default alternative is @(DEFAULT, [], rhs)@
-- If you edit this type, you may need to update the GHC formalism
-- See Note [GHC Formalism] in coreSyn/CoreLint.hs
type Alt b = (AltCon, [b], Expr b)
-- | A case alternative constructor (i.e. pattern match)
-- If you edit this type, you may need to update the GHC formalism
-- See Note [GHC Formalism] in coreSyn/CoreLint.hs
data AltCon
= DataAlt DataCon -- ^ A plain data constructor: @case e of { Foo x -> ... }@.
-- Invariant: the 'DataCon' is always from a @data@ type, and never from a @newtype@
| LitAlt Literal -- ^ A literal: @case e of { 1 -> ... }@
-- Invariant: always an *unlifted* literal
-- See Note [Literal alternatives]
| DEFAULT -- ^ Trivial alternative: @case e of { _ -> ... }@
deriving (Eq, Ord, Data, Typeable)
-- | Binding, used for top level bindings in a module and local bindings in a @let@.
-- If you edit this type, you may need to update the GHC formalism
-- See Note [GHC Formalism] in coreSyn/CoreLint.hs
data Bind b = NonRec b (Expr b)
| Rec [(b, (Expr b))]
deriving (Data, Typeable)
{-
Note [Shadowing]
~~~~~~~~~~~~~~~~
While various passes attempt to rename on-the-fly in a manner that
avoids "shadowing" (thereby simplifying downstream optimizations),
neither the simplifier nor any other pass GUARANTEES that shadowing is
avoided. Thus, all passes SHOULD work fine even in the presence of
arbitrary shadowing in their inputs.
In particular, scrutinee variables `x` in expressions of the form
`Case e x t` are often renamed to variables with a prefix
"wild_". These "wild" variables may appear in the body of the
case-expression, and further, may be shadowed within the body.
So the Unique in an Var is not really unique at all. Still, it's very
useful to give a constant-time equality/ordering for Vars, and to give
a key that can be used to make sets of Vars (VarSet), or mappings from
Vars to other things (VarEnv). Moreover, if you do want to eliminate
shadowing, you can give a new Unique to an Id without changing its
printable name, which makes debugging easier.
Note [Literal alternatives]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Literal alternatives (LitAlt lit) are always for *un-lifted* literals.
We have one literal, a literal Integer, that is lifted, and we don't
allow in a LitAlt, because LitAlt cases don't do any evaluation. Also
(see Trac #5603) if you say
case 3 of
S# x -> ...
J# _ _ -> ...
(where S#, J# are the constructors for Integer) we don't want the
simplifier calling findAlt with argument (LitAlt 3). No no. Integer
literals are an opaque encoding of an algebraic data type, not of
an unlifted literal, like all the others.
Also, we do not permit case analysis with literal patterns on floating-point
types. See Trac #9238 and Note [Rules for floating-point comparisons] in
PrelRules for the rationale for this restriction.
-------------------------- CoreSyn INVARIANTS ---------------------------
Note [CoreSyn top-level invariant]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See #toplevel_invariant#
Note [CoreSyn letrec invariant]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See #letrec_invariant#
Note [CoreSyn let/app invariant]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The let/app invariant
the right hand side of a non-recursive 'Let', and
the argument of an 'App',
/may/ be of unlifted type, but only if
the expression is ok-for-speculation.
This means that the let can be floated around
without difficulty. For example, this is OK:
y::Int# = x +# 1#
But this is not, as it may affect termination if the
expression is floated out:
y::Int# = fac 4#
In this situation you should use @case@ rather than a @let@. The function
'CoreUtils.needsCaseBinding' can help you determine which to generate, or
alternatively use 'MkCore.mkCoreLet' rather than this constructor directly,
which will generate a @case@ if necessary
Th let/app invariant is initially enforced by DsUtils.mkCoreLet and mkCoreApp
Note [CoreSyn case invariants]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See #case_invariants#
Note [CoreSyn let goal]
~~~~~~~~~~~~~~~~~~~~~~~
* The simplifier tries to ensure that if the RHS of a let is a constructor
application, its arguments are trivial, so that the constructor can be
inlined vigorously.
Note [Type let]
~~~~~~~~~~~~~~~
See #type_let#
Note [Empty case alternatives]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The alternatives of a case expression should be exhaustive. But
this exhaustive list can be empty!
* A case expression can have empty alternatives if (and only if) the
scrutinee is bound to raise an exception or diverge. When do we know
this? See Note [Bottoming expressions] in CoreUtils.
* The possiblity of empty alternatives is one reason we need a type on
the case expression: if the alternatives are empty we can't get the
type from the alternatives!
* In the case of empty types (see Note [Bottoming expressions]), say
data T
we do NOT want to replace
case (x::T) of Bool {} --> error Bool "Inaccessible case"
because x might raise an exception, and *that*'s what we want to see!
(Trac #6067 is an example.) To preserve semantics we'd have to say
x `seq` error Bool "Inaccessible case"
but the 'seq' is just a case, so we are back to square 1. Or I suppose
we could say
x |> UnsafeCoerce T Bool
but that loses all trace of the fact that this originated with an empty
set of alternatives.
* We can use the empty-alternative construct to coerce error values from
one type to another. For example
f :: Int -> Int
f n = error "urk"
g :: Int -> (# Char, Bool #)
g x = case f x of { 0 -> ..., n -> ... }
Then if we inline f in g's RHS we get
case (error Int "urk") of (# Char, Bool #) { ... }
and we can discard the alternatives since the scrutinee is bottom to give
case (error Int "urk") of (# Char, Bool #) {}
This is nicer than using an unsafe coerce between Int ~ (# Char,Bool #),
if for no other reason that we don't need to instantiate the (~) at an
unboxed type.
* We treat a case expression with empty alternatives as trivial iff
its scrutinee is (see CoreUtils.exprIsTrivial). This is actually
important; see Note [Empty case is trivial] in CoreUtils
* An empty case is replaced by its scrutinee during the CoreToStg
conversion; remember STG is un-typed, so there is no need for
the empty case to do the type conversion.
************************************************************************
* *
Ticks
* *
************************************************************************
-}
-- | Allows attaching extra information to points in expressions
-- If you edit this type, you may need to update the GHC formalism
-- See Note [GHC Formalism] in coreSyn/CoreLint.hs
data Tickish id =
-- | An @{-# SCC #-}@ profiling annotation, either automatically
-- added by the desugarer as a result of -auto-all, or added by
-- the user.
ProfNote {
profNoteCC :: CostCentre, -- ^ the cost centre
profNoteCount :: !Bool, -- ^ bump the entry count?
profNoteScope :: !Bool -- ^ scopes over the enclosed expression
-- (i.e. not just a tick)
}
-- | A "tick" used by HPC to track the execution of each
-- subexpression in the original source code.
| HpcTick {
tickModule :: Module,
tickId :: !Int
}
-- | A breakpoint for the GHCi debugger. This behaves like an HPC
-- tick, but has a list of free variables which will be available
-- for inspection in GHCi when the program stops at the breakpoint.
--
-- NB. we must take account of these Ids when (a) counting free variables,
-- and (b) substituting (don't substitute for them)
| Breakpoint
{ breakpointId :: !Int
, breakpointFVs :: [id] -- ^ the order of this list is important:
-- it matches the order of the lists in the
-- appropriate entry in HscTypes.ModBreaks.
--
-- Careful about substitution! See
-- Note [substTickish] in CoreSubst.
}
-- | A source note.
--
-- Source notes are pure annotations: Their presence should neither
-- influence compilation nor execution. The semantics are given by
-- causality: The presence of a source note means that a local
-- change in the referenced source code span will possibly provoke
-- the generated code to change. On the flip-side, the functionality
-- of annotated code *must* be invariant against changes to all
-- source code *except* the spans referenced in the source notes
-- (see "Causality of optimized Haskell" paper for details).
--
-- Therefore extending the scope of any given source note is always
-- valid. Note that it is still undesirable though, as this reduces
-- their usefulness for debugging and profiling. Therefore we will
-- generally try only to make use of this property where it is
-- neccessary to enable optimizations.
| SourceNote
{ sourceSpan :: RealSrcSpan -- ^ Source covered
, sourceName :: String -- ^ Name for source location
-- (uses same names as CCs)
}
deriving (Eq, Ord, Data, Typeable)
-- | A "counting tick" (where tickishCounts is True) is one that
-- counts evaluations in some way. We cannot discard a counting tick,
-- and the compiler should preserve the number of counting ticks as
-- far as possible.
--
-- However, we still allow the simplifier to increase or decrease
-- sharing, so in practice the actual number of ticks may vary, except
-- that we never change the value from zero to non-zero or vice versa.
tickishCounts :: Tickish id -> Bool
tickishCounts n@ProfNote{} = profNoteCount n
tickishCounts HpcTick{} = True
tickishCounts Breakpoint{} = True
tickishCounts _ = False
-- | Specifies the scoping behaviour of ticks. This governs the
-- behaviour of ticks that care about the covered code and the cost
-- associated with it. Important for ticks relating to profiling.
data TickishScoping =
-- | No scoping: The tick does not care about what code it
-- covers. Transformations can freely move code inside as well as
-- outside without any additional annotation obligations
NoScope
-- | Soft scoping: We want all code that is covered to stay
-- covered. Note that this scope type does not forbid
-- transformations from happening, as as long as all results of
-- the transformations are still covered by this tick or a copy of
-- it. For example
--
-- let x = tick<...> (let y = foo in bar) in baz
-- ===>
-- let x = tick<...> bar; y = tick<...> foo in baz
--
-- Is a valid transformation as far as "bar" and "foo" is
-- concerned, because both still are scoped over by the tick.
--
-- Note though that one might object to the "let" not being
-- covered by the tick any more. However, we are generally lax
-- with this - constant costs don't matter too much, and given
-- that the "let" was effectively merged we can view it as having
-- lost its identity anyway.
--
-- Also note that this scoping behaviour allows floating a tick
-- "upwards" in pretty much any situation. For example:
--
-- case foo of x -> tick<...> bar
-- ==>
-- tick<...> case foo of x -> bar
--
-- While this is always leagl, we want to make a best effort to
-- only make us of this where it exposes transformation
-- opportunities.
| SoftScope
-- | Cost centre scoping: We don't want any costs to move to other
-- cost-centre stacks. This means we not only want no code or cost
-- to get moved out of their cost centres, but we also object to
-- code getting associated with new cost-centre ticks - or
-- changing the order in which they get applied.
--
-- A rule of thumb is that we don't want any code to gain new
-- annotations. However, there are notable exceptions, for
-- example:
--
-- let f = \y -> foo in tick<...> ... (f x) ...
-- ==>
-- tick<...> ... foo[x/y] ...
--
-- In-lining lambdas like this is always legal, because inlining a
-- function does not change the cost-centre stack when the
-- function is called.
| CostCentreScope
deriving (Eq)
-- | Returns the intended scoping rule for a Tickish
tickishScoped :: Tickish id -> TickishScoping
tickishScoped n@ProfNote{}
| profNoteScope n = CostCentreScope
| otherwise = NoScope
tickishScoped HpcTick{} = NoScope
tickishScoped Breakpoint{} = CostCentreScope
-- Breakpoints are scoped: eventually we're going to do call
-- stacks, but also this helps prevent the simplifier from moving
-- breakpoints around and changing their result type (see #1531).
tickishScoped SourceNote{} = SoftScope
-- | Returns whether the tick scoping rule is at least as permissive
-- as the given scoping rule.
tickishScopesLike :: Tickish id -> TickishScoping -> Bool
tickishScopesLike t scope = tickishScoped t `like` scope
where NoScope `like` _ = True
_ `like` NoScope = False
SoftScope `like` _ = True
_ `like` SoftScope = False
CostCentreScope `like` _ = True
-- | Returns @True@ for ticks that can be floated upwards easily even
-- where it might change execution counts, such as:
--
-- Just (tick<...> foo)
-- ==>
-- tick<...> (Just foo)
--
-- This is a combination of @tickishSoftScope@ and
-- @tickishCounts@. Note that in principle splittable ticks can become
-- floatable using @mkNoTick@ -- even though there's currently no
-- tickish for which that is the case.
tickishFloatable :: Tickish id -> Bool
tickishFloatable t = t `tickishScopesLike` SoftScope && not (tickishCounts t)
-- | Returns @True@ for a tick that is both counting /and/ scoping and
-- can be split into its (tick, scope) parts using 'mkNoScope' and
-- 'mkNoTick' respectively.
tickishCanSplit :: Tickish id -> Bool
tickishCanSplit ProfNote{profNoteScope = True, profNoteCount = True}
= True
tickishCanSplit _ = False
mkNoCount :: Tickish id -> Tickish id
mkNoCount n | not (tickishCounts n) = n
| not (tickishCanSplit n) = panic "mkNoCount: Cannot split!"
mkNoCount n@ProfNote{} = n {profNoteCount = False}
mkNoCount _ = panic "mkNoCount: Undefined split!"
mkNoScope :: Tickish id -> Tickish id
mkNoScope n | tickishScoped n == NoScope = n
| not (tickishCanSplit n) = panic "mkNoScope: Cannot split!"
mkNoScope n@ProfNote{} = n {profNoteScope = False}
mkNoScope _ = panic "mkNoScope: Undefined split!"
-- | Return @True@ if this source annotation compiles to some backend
-- code. Without this flag, the tickish is seen as a simple annotation
-- that does not have any associated evaluation code.
--
-- What this means that we are allowed to disregard the tick if doing
-- so means that we can skip generating any code in the first place. A
-- typical example is top-level bindings:
--
-- foo = tick<...> \y -> ...
-- ==>
-- foo = \y -> tick<...> ...
--
-- Here there is just no operational difference between the first and
-- the second version. Therefore code generation should simply
-- translate the code as if it found the latter.
tickishIsCode :: Tickish id -> Bool
tickishIsCode SourceNote{} = False
tickishIsCode _tickish = True -- all the rest for now
-- | Governs the kind of expression that the tick gets placed on when
-- annotating for example using @mkTick@. If we find that we want to
-- put a tickish on an expression ruled out here, we try to float it
-- inwards until we find a suitable expression.
data TickishPlacement =
-- | Place ticks exactly on run-time expressions. We can still
-- move the tick through pure compile-time constructs such as
-- other ticks, casts or type lambdas. This is the most
-- restrictive placement rule for ticks, as all tickishs have in
-- common that they want to track runtime processes. The only
-- legal placement rule for counting ticks.
PlaceRuntime
-- | As @PlaceRuntime@, but we float the tick through all
-- lambdas. This makes sense where there is little difference
-- between annotating the lambda and annotating the lambda's code.
| PlaceNonLam
-- | In addition to floating through lambdas, cost-centre style
-- tickishs can also be moved from constructors, non-function
-- variables and literals. For example:
--
-- let x = scc<...> C (scc<...> y) (scc<...> 3) in ...
--
-- Neither the constructor application, the variable or the
-- literal are likely to have any cost worth mentioning. And even
-- if y names a thunk, the call would not care about the
-- evaluation context. Therefore removing all annotations in the
-- above example is safe.
| PlaceCostCentre
deriving (Eq)
-- | Placement behaviour we want for the ticks
tickishPlace :: Tickish id -> TickishPlacement
tickishPlace n@ProfNote{}
| profNoteCount n = PlaceRuntime
| otherwise = PlaceCostCentre
tickishPlace HpcTick{} = PlaceRuntime
tickishPlace Breakpoint{} = PlaceRuntime
tickishPlace SourceNote{} = PlaceNonLam
-- | Returns whether one tick "contains" the other one, therefore
-- making the second tick redundant.
tickishContains :: Eq b => Tickish b -> Tickish b -> Bool
tickishContains (SourceNote sp1 n1) (SourceNote sp2 n2)
= n1 == n2 && containsSpan sp1 sp2
tickishContains t1 t2
= t1 == t2
{-
************************************************************************
* *
Orphans
* *
************************************************************************
-}
-- | Is this instance an orphan? If it is not an orphan, contains an 'OccName'
-- witnessing the instance's non-orphanhood.
-- See Note [Orphans]
data IsOrphan
= IsOrphan
| NotOrphan OccName -- The OccName 'n' witnesses the instance's non-orphanhood
-- In that case, the instance is fingerprinted as part
-- of the definition of 'n's definition
deriving (Data, Typeable)
-- | Returns true if 'IsOrphan' is orphan.
isOrphan :: IsOrphan -> Bool
isOrphan IsOrphan = True
isOrphan _ = False
-- | Returns true if 'IsOrphan' is not an orphan.
notOrphan :: IsOrphan -> Bool
notOrphan NotOrphan{} = True
notOrphan _ = False
chooseOrphanAnchor :: [Name] -> IsOrphan
-- Something (rule, instance) is relate to all the Names in this
-- list. Choose one of them to be an "anchor" for the orphan. We make
-- the choice deterministic to avoid gratuitious changes in the ABI
-- hash (Trac #4012). Specficially, use lexicographic comparison of
-- OccName rather than comparing Uniques
--
-- NB: 'minimum' use Ord, and (Ord OccName) works lexicographically
--
chooseOrphanAnchor local_names
| null local_names = IsOrphan
| otherwise = NotOrphan (minimum occs)
where
occs = map nameOccName local_names
instance Binary IsOrphan where
put_ bh IsOrphan = putByte bh 0
put_ bh (NotOrphan n) = do
putByte bh 1
put_ bh n
get bh = do
h <- getByte bh
case h of
0 -> return IsOrphan
_ -> do
n <- get bh
return $ NotOrphan n
{-
Note [Orphans]
~~~~~~~~~~~~~~
Class instances, rules, and family instances are divided into orphans
and non-orphans. Roughly speaking, an instance/rule is an orphan if
its left hand side mentions nothing defined in this module. Orphan-hood
has two major consequences
* A module that contains orphans is called an "orphan module". If
the module being compiled depends (transitively) on an oprhan
module M, then M.hi is read in regardless of whether M is oherwise
needed. This is to ensure that we don't miss any instance decls in
M. But it's painful, because it means we need to keep track of all
the orphan modules below us.
* A non-orphan is not finger-printed separately. Instead, for
fingerprinting purposes it is treated as part of the entity it
mentions on the LHS. For example
data T = T1 | T2
instance Eq T where ....
The instance (Eq T) is incorprated as part of T's fingerprint.
In constrast, orphans are all fingerprinted together in the
mi_orph_hash field of the ModIface.
See MkIface.addFingerprints.
Orphan-hood is computed
* For class instances:
when we make a ClsInst
(because it is needed during instance lookup)
* For rules and family instances:
when we generate an IfaceRule (MkIface.coreRuleToIfaceRule)
or IfaceFamInst (MkIface.instanceToIfaceInst)
-}
{-
************************************************************************
* *
\subsection{Transformation rules}
* *
************************************************************************
The CoreRule type and its friends are dealt with mainly in CoreRules,
but CoreFVs, Subst, PprCore, CoreTidy also inspect the representation.
-}
-- | Gathers a collection of 'CoreRule's. Maps (the name of) an 'Id' to its rules
type RuleBase = NameEnv [CoreRule]
-- The rules are unordered;
-- we sort out any overlaps on lookup
-- | A full rule environment which we can apply rules from. Like a 'RuleBase',
-- but it also includes the set of visible orphans we use to filter out orphan
-- rules which are not visible (even though we can see them...)
data RuleEnv
= RuleEnv { re_base :: RuleBase
, re_visible_orphs :: ModuleSet
}
mkRuleEnv :: RuleBase -> [Module] -> RuleEnv
mkRuleEnv rules vis_orphs = RuleEnv rules (mkModuleSet vis_orphs)
emptyRuleEnv :: RuleEnv
emptyRuleEnv = RuleEnv emptyNameEnv emptyModuleSet
-- | A 'CoreRule' is:
--
-- * \"Local\" if the function it is a rule for is defined in the
-- same module as the rule itself.
--
-- * \"Orphan\" if nothing on the LHS is defined in the same module
-- as the rule itself
data CoreRule
= Rule {
ru_name :: RuleName, -- ^ Name of the rule, for communication with the user
ru_act :: Activation, -- ^ When the rule is active
-- Rough-matching stuff
-- see comments with InstEnv.ClsInst( is_cls, is_rough )
ru_fn :: Name, -- ^ Name of the 'Id.Id' at the head of this rule
ru_rough :: [Maybe Name], -- ^ Name at the head of each argument to the left hand side
-- Proper-matching stuff
-- see comments with InstEnv.ClsInst( is_tvs, is_tys )
ru_bndrs :: [CoreBndr], -- ^ Variables quantified over
ru_args :: [CoreExpr], -- ^ Left hand side arguments
-- And the right-hand side
ru_rhs :: CoreExpr, -- ^ Right hand side of the rule
-- Occurrence info is guaranteed correct
-- See Note [OccInfo in unfoldings and rules]
-- Locality
ru_auto :: Bool, -- ^ @True@ <=> this rule is auto-generated
-- (notably by Specialise or SpecConstr)
-- @False@ <=> generated at the users behest
-- See Note [Trimming auto-rules] in TidyPgm
-- for the sole purpose of this field.
ru_origin :: !Module, -- ^ 'Module' the rule was defined in, used
-- to test if we should see an orphan rule.
ru_orphan :: !IsOrphan, -- ^ Whether or not the rule is an orphan.
ru_local :: Bool -- ^ @True@ iff the fn at the head of the rule is
-- defined in the same module as the rule
-- and is not an implicit 'Id' (like a record selector,
-- class operation, or data constructor). This
-- is different from 'ru_orphan', where a rule
-- can avoid being an orphan if *any* Name in
-- LHS of the rule was defined in the same
-- module as the rule.
}
-- | Built-in rules are used for constant folding
-- and suchlike. They have no free variables.
-- A built-in rule is always visible (there is no such thing as
-- an orphan built-in rule.)
| BuiltinRule {
ru_name :: RuleName, -- ^ As above
ru_fn :: Name, -- ^ As above
ru_nargs :: Int, -- ^ Number of arguments that 'ru_try' consumes,
-- if it fires, including type arguments
ru_try :: RuleFun
-- ^ This function does the rewrite. It given too many
-- arguments, it simply discards them; the returned 'CoreExpr'
-- is just the rewrite of 'ru_fn' applied to the first 'ru_nargs' args
}
-- See Note [Extra args in rule matching] in Rules.hs
type RuleFun = DynFlags -> InScopeEnv -> Id -> [CoreExpr] -> Maybe CoreExpr
type InScopeEnv = (InScopeSet, IdUnfoldingFun)
type IdUnfoldingFun = Id -> Unfolding
-- A function that embodies how to unfold an Id if you need
-- to do that in the Rule. The reason we need to pass this info in
-- is that whether an Id is unfoldable depends on the simplifier phase
isBuiltinRule :: CoreRule -> Bool
isBuiltinRule (BuiltinRule {}) = True
isBuiltinRule _ = False
isAutoRule :: CoreRule -> Bool
isAutoRule (BuiltinRule {}) = False
isAutoRule (Rule { ru_auto = is_auto }) = is_auto
-- | The number of arguments the 'ru_fn' must be applied
-- to before the rule can match on it
ruleArity :: CoreRule -> Int
ruleArity (BuiltinRule {ru_nargs = n}) = n
ruleArity (Rule {ru_args = args}) = length args
ruleName :: CoreRule -> RuleName
ruleName = ru_name
ruleActivation :: CoreRule -> Activation
ruleActivation (BuiltinRule { }) = AlwaysActive
ruleActivation (Rule { ru_act = act }) = act
-- | The 'Name' of the 'Id.Id' at the head of the rule left hand side
ruleIdName :: CoreRule -> Name
ruleIdName = ru_fn
isLocalRule :: CoreRule -> Bool
isLocalRule = ru_local
-- | Set the 'Name' of the 'Id.Id' at the head of the rule left hand side
setRuleIdName :: Name -> CoreRule -> CoreRule
setRuleIdName nm ru = ru { ru_fn = nm }
{-
************************************************************************
* *
\subsection{Vectorisation declarations}
* *
************************************************************************
Representation of desugared vectorisation declarations that are fed to the vectoriser (via
'ModGuts').
-}
data CoreVect = Vect Id CoreExpr
| NoVect Id
| VectType Bool TyCon (Maybe TyCon)
| VectClass TyCon -- class tycon
| VectInst Id -- instance dfun (always SCALAR) !!!FIXME: should be superfluous now
{-
************************************************************************
* *
Unfoldings
* *
************************************************************************
The @Unfolding@ type is declared here to avoid numerous loops
-}
-- | Records the /unfolding/ of an identifier, which is approximately the form the
-- identifier would have if we substituted its definition in for the identifier.
-- This type should be treated as abstract everywhere except in "CoreUnfold"
data Unfolding
= NoUnfolding -- ^ We have no information about the unfolding
| OtherCon [AltCon] -- ^ It ain't one of these constructors.
-- @OtherCon xs@ also indicates that something has been evaluated
-- and hence there's no point in re-evaluating it.
-- @OtherCon []@ is used even for non-data-type values
-- to indicated evaluated-ness. Notably:
--
-- > data C = C !(Int -> Int)
-- > case x of { C f -> ... }
--
-- Here, @f@ gets an @OtherCon []@ unfolding.
| DFunUnfolding { -- The Unfolding of a DFunId
-- See Note [DFun unfoldings]
-- df = /\a1..am. \d1..dn. MkD t1 .. tk
-- (op1 a1..am d1..dn)
-- (op2 a1..am d1..dn)
df_bndrs :: [Var], -- The bound variables [a1..m],[d1..dn]
df_con :: DataCon, -- The dictionary data constructor (never a newtype datacon)
df_args :: [CoreExpr] -- Args of the data con: types, superclasses and methods,
} -- in positional order
| CoreUnfolding { -- An unfolding for an Id with no pragma,
-- or perhaps a NOINLINE pragma
-- (For NOINLINE, the phase, if any, is in the
-- InlinePragInfo for this Id.)
uf_tmpl :: CoreExpr, -- Template; occurrence info is correct
uf_src :: UnfoldingSource, -- Where the unfolding came from
uf_is_top :: Bool, -- True <=> top level binding
uf_is_value :: Bool, -- exprIsHNF template (cached); it is ok to discard
-- a `seq` on this variable
uf_is_conlike :: Bool, -- True <=> applicn of constructor or CONLIKE function
-- Cached version of exprIsConLike
uf_is_work_free :: Bool, -- True <=> doesn't waste (much) work to expand
-- inside an inlining
-- Cached version of exprIsCheap
uf_expandable :: Bool, -- True <=> can expand in RULE matching
-- Cached version of exprIsExpandable
uf_guidance :: UnfoldingGuidance -- Tells about the *size* of the template.
}
-- ^ An unfolding with redundant cached information. Parameters:
--
-- uf_tmpl: Template used to perform unfolding;
-- NB: Occurrence info is guaranteed correct:
-- see Note [OccInfo in unfoldings and rules]
--
-- uf_is_top: Is this a top level binding?
--
-- uf_is_value: 'exprIsHNF' template (cached); it is ok to discard a 'seq' on
-- this variable
--
-- uf_is_work_free: Does this waste only a little work if we expand it inside an inlining?
-- Basically this is a cached version of 'exprIsWorkFree'
--
-- uf_guidance: Tells us about the /size/ of the unfolding template
------------------------------------------------
data UnfoldingSource
= -- See also Note [Historical note: unfoldings for wrappers]
InlineRhs -- The current rhs of the function
-- Replace uf_tmpl each time around
| InlineStable -- From an INLINE or INLINABLE pragma
-- INLINE if guidance is UnfWhen
-- INLINABLE if guidance is UnfIfGoodArgs/UnfoldNever
-- (well, technically an INLINABLE might be made
-- UnfWhen if it was small enough, and then
-- it will behave like INLINE outside the current
-- module, but that is the way automatic unfoldings
-- work so it is consistent with the intended
-- meaning of INLINABLE).
--
-- uf_tmpl may change, but only as a result of
-- gentle simplification, it doesn't get updated
-- to the current RHS during compilation as with
-- InlineRhs.
--
-- See Note [InlineRules]
| InlineCompulsory -- Something that *has* no binding, so you *must* inline it
-- Only a few primop-like things have this property
-- (see MkId.hs, calls to mkCompulsoryUnfolding).
-- Inline absolutely always, however boring the context.
-- | 'UnfoldingGuidance' says when unfolding should take place
data UnfoldingGuidance
= UnfWhen { -- Inline without thinking about the *size* of the uf_tmpl
-- Used (a) for small *and* cheap unfoldings
-- (b) for INLINE functions
-- See Note [INLINE for small functions] in CoreUnfold
ug_arity :: Arity, -- Number of value arguments expected
ug_unsat_ok :: Bool, -- True <=> ok to inline even if unsaturated
ug_boring_ok :: Bool -- True <=> ok to inline even if the context is boring
-- So True,True means "always"
}
| UnfIfGoodArgs { -- Arose from a normal Id; the info here is the
-- result of a simple analysis of the RHS
ug_args :: [Int], -- Discount if the argument is evaluated.
-- (i.e., a simplification will definitely
-- be possible). One elt of the list per *value* arg.
ug_size :: Int, -- The "size" of the unfolding.
ug_res :: Int -- Scrutinee discount: the discount to substract if the thing is in
} -- a context (case (thing args) of ...),
-- (where there are the right number of arguments.)
| UnfNever -- The RHS is big, so don't inline it
deriving (Eq)
{-
Note [Historical note: unfoldings for wrappers]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We used to have a nice clever scheme in interface files for
wrappers. A wrapper's unfolding can be reconstructed from its worker's
id and its strictness. This decreased .hi file size (sometimes
significantly, for modules like GHC.Classes with many high-arity w/w
splits) and had a slight corresponding effect on compile times.
However, when we added the second demand analysis, this scheme lead to
some Core lint errors. The second analysis could change the strictness
signatures, which sometimes resulted in a wrapper's regenerated
unfolding applying the wrapper to too many arguments.
Instead of repairing the clever .hi scheme, we abandoned it in favor
of simplicity. The .hi sizes are usually insignificant (excluding the
+1M for base libraries), and compile time barely increases (~+1% for
nofib). The nicer upshot is that the UnfoldingSource no longer mentions
an Id, so, eg, substitutions need not traverse them.
Note [DFun unfoldings]
~~~~~~~~~~~~~~~~~~~~~~
The Arity in a DFunUnfolding is total number of args (type and value)
that the DFun needs to produce a dictionary. That's not necessarily
related to the ordinary arity of the dfun Id, esp if the class has
one method, so the dictionary is represented by a newtype. Example
class C a where { op :: a -> Int }
instance C a -> C [a] where op xs = op (head xs)
The instance translates to
$dfCList :: forall a. C a => C [a] -- Arity 2!
$dfCList = /\a.\d. $copList {a} d |> co
$copList :: forall a. C a => [a] -> Int -- Arity 2!
$copList = /\a.\d.\xs. op {a} d (head xs)
Now we might encounter (op (dfCList {ty} d) a1 a2)
and we want the (op (dfList {ty} d)) rule to fire, because $dfCList
has all its arguments, even though its (value) arity is 2. That's
why we record the number of expected arguments in the DFunUnfolding.
Note that although it's an Arity, it's most convenient for it to give
the *total* number of arguments, both type and value. See the use
site in exprIsConApp_maybe.
-}
-- Constants for the UnfWhen constructor
needSaturated, unSaturatedOk :: Bool
needSaturated = False
unSaturatedOk = True
boringCxtNotOk, boringCxtOk :: Bool
boringCxtOk = True
boringCxtNotOk = False
------------------------------------------------
noUnfolding :: Unfolding
-- ^ There is no known 'Unfolding'
evaldUnfolding :: Unfolding
-- ^ This unfolding marks the associated thing as being evaluated
noUnfolding = NoUnfolding
evaldUnfolding = OtherCon []
mkOtherCon :: [AltCon] -> Unfolding
mkOtherCon = OtherCon
isStableSource :: UnfoldingSource -> Bool
-- Keep the unfolding template
isStableSource InlineCompulsory = True
isStableSource InlineStable = True
isStableSource InlineRhs = False
-- | Retrieves the template of an unfolding: panics if none is known
unfoldingTemplate :: Unfolding -> CoreExpr
unfoldingTemplate = uf_tmpl
-- | Retrieves the template of an unfolding if possible
-- maybeUnfoldingTemplate is used mainly wnen specialising, and we do
-- want to specialise DFuns, so it's important to return a template
-- for DFunUnfoldings
maybeUnfoldingTemplate :: Unfolding -> Maybe CoreExpr
maybeUnfoldingTemplate (CoreUnfolding { uf_tmpl = expr })
= Just expr
maybeUnfoldingTemplate (DFunUnfolding { df_bndrs = bndrs, df_con = con, df_args = args })
= Just (mkLams bndrs (mkApps (Var (dataConWorkId con)) args))
maybeUnfoldingTemplate _
= Nothing
-- | The constructors that the unfolding could never be:
-- returns @[]@ if no information is available
otherCons :: Unfolding -> [AltCon]
otherCons (OtherCon cons) = cons
otherCons _ = []
-- | Determines if it is certainly the case that the unfolding will
-- yield a value (something in HNF): returns @False@ if unsure
isValueUnfolding :: Unfolding -> Bool
-- Returns False for OtherCon
isValueUnfolding (CoreUnfolding { uf_is_value = is_evald }) = is_evald
isValueUnfolding _ = False
-- | Determines if it possibly the case that the unfolding will
-- yield a value. Unlike 'isValueUnfolding' it returns @True@
-- for 'OtherCon'
isEvaldUnfolding :: Unfolding -> Bool
-- Returns True for OtherCon
isEvaldUnfolding (OtherCon _) = True
isEvaldUnfolding (CoreUnfolding { uf_is_value = is_evald }) = is_evald
isEvaldUnfolding _ = False
-- | @True@ if the unfolding is a constructor application, the application
-- of a CONLIKE function or 'OtherCon'
isConLikeUnfolding :: Unfolding -> Bool
isConLikeUnfolding (OtherCon _) = True
isConLikeUnfolding (CoreUnfolding { uf_is_conlike = con }) = con
isConLikeUnfolding _ = False
-- | Is the thing we will unfold into certainly cheap?
isCheapUnfolding :: Unfolding -> Bool
isCheapUnfolding (CoreUnfolding { uf_is_work_free = is_wf }) = is_wf
isCheapUnfolding _ = False
isExpandableUnfolding :: Unfolding -> Bool
isExpandableUnfolding (CoreUnfolding { uf_expandable = is_expable }) = is_expable
isExpandableUnfolding _ = False
expandUnfolding_maybe :: Unfolding -> Maybe CoreExpr
-- Expand an expandable unfolding; this is used in rule matching
-- See Note [Expanding variables] in Rules.hs
-- The key point here is that CONLIKE things can be expanded
expandUnfolding_maybe (CoreUnfolding { uf_expandable = True, uf_tmpl = rhs }) = Just rhs
expandUnfolding_maybe _ = Nothing
hasStableCoreUnfolding_maybe :: Unfolding -> Maybe Bool
-- Just True <=> has stable inlining, very keen to inline (eg. INLINE pragma)
-- Just False <=> has stable inlining, open to inlining it (eg. INLINEABLE pragma)
-- Nothing <=> not stable, or cannot inline it anyway
hasStableCoreUnfolding_maybe (CoreUnfolding { uf_src = src, uf_guidance = guide })
| isStableSource src
= case guide of
UnfWhen {} -> Just True
UnfIfGoodArgs {} -> Just False
UnfNever -> Nothing
hasStableCoreUnfolding_maybe _ = Nothing
isCompulsoryUnfolding :: Unfolding -> Bool
isCompulsoryUnfolding (CoreUnfolding { uf_src = InlineCompulsory }) = True
isCompulsoryUnfolding _ = False
isStableUnfolding :: Unfolding -> Bool
-- True of unfoldings that should not be overwritten
-- by a CoreUnfolding for the RHS of a let-binding
isStableUnfolding (CoreUnfolding { uf_src = src }) = isStableSource src
isStableUnfolding (DFunUnfolding {}) = True
isStableUnfolding _ = False
isClosedUnfolding :: Unfolding -> Bool -- No free variables
isClosedUnfolding (CoreUnfolding {}) = False
isClosedUnfolding (DFunUnfolding {}) = False
isClosedUnfolding _ = True
-- | Only returns False if there is no unfolding information available at all
hasSomeUnfolding :: Unfolding -> Bool
hasSomeUnfolding NoUnfolding = False
hasSomeUnfolding _ = True
neverUnfoldGuidance :: UnfoldingGuidance -> Bool
neverUnfoldGuidance UnfNever = True
neverUnfoldGuidance _ = False
canUnfold :: Unfolding -> Bool
canUnfold (CoreUnfolding { uf_guidance = g }) = not (neverUnfoldGuidance g)
canUnfold _ = False
{-
Note [InlineRules]
~~~~~~~~~~~~~~~~~
When you say
{-# INLINE f #-}
f x = <rhs>
you intend that calls (f e) are replaced by <rhs>[e/x] So we
should capture (\x.<rhs>) in the Unfolding of 'f', and never meddle
with it. Meanwhile, we can optimise <rhs> to our heart's content,
leaving the original unfolding intact in Unfolding of 'f'. For example
all xs = foldr (&&) True xs
any p = all . map p {-# INLINE any #-}
We optimise any's RHS fully, but leave the InlineRule saying "all . map p",
which deforests well at the call site.
So INLINE pragma gives rise to an InlineRule, which captures the original RHS.
Moreover, it's only used when 'f' is applied to the
specified number of arguments; that is, the number of argument on
the LHS of the '=' sign in the original source definition.
For example, (.) is now defined in the libraries like this
{-# INLINE (.) #-}
(.) f g = \x -> f (g x)
so that it'll inline when applied to two arguments. If 'x' appeared
on the left, thus
(.) f g x = f (g x)
it'd only inline when applied to three arguments. This slightly-experimental
change was requested by Roman, but it seems to make sense.
See also Note [Inlining an InlineRule] in CoreUnfold.
Note [OccInfo in unfoldings and rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In unfoldings and rules, we guarantee that the template is occ-analysed,
so that the occurrence info on the binders is correct. This is important,
because the Simplifier does not re-analyse the template when using it. If
the occurrence info is wrong
- We may get more simpifier iterations than necessary, because
once-occ info isn't there
- More seriously, we may get an infinite loop if there's a Rec
without a loop breaker marked
************************************************************************
* *
AltCon
* *
************************************************************************
-}
-- The Ord is needed for the FiniteMap used in the lookForConstructor
-- in SimplEnv. If you declared that lookForConstructor *ignores*
-- constructor-applications with LitArg args, then you could get
-- rid of this Ord.
instance Outputable AltCon where
ppr (DataAlt dc) = ppr dc
ppr (LitAlt lit) = ppr lit
ppr DEFAULT = text "__DEFAULT"
cmpAlt :: (AltCon, a, b) -> (AltCon, a, b) -> Ordering
cmpAlt (con1, _, _) (con2, _, _) = con1 `cmpAltCon` con2
ltAlt :: (AltCon, a, b) -> (AltCon, a, b) -> Bool
ltAlt a1 a2 = (a1 `cmpAlt` a2) == LT
cmpAltCon :: AltCon -> AltCon -> Ordering
-- ^ Compares 'AltCon's within a single list of alternatives
cmpAltCon DEFAULT DEFAULT = EQ
cmpAltCon DEFAULT _ = LT
cmpAltCon (DataAlt d1) (DataAlt d2) = dataConTag d1 `compare` dataConTag d2
cmpAltCon (DataAlt _) DEFAULT = GT
cmpAltCon (LitAlt l1) (LitAlt l2) = l1 `compare` l2
cmpAltCon (LitAlt _) DEFAULT = GT
cmpAltCon con1 con2 = WARN( True, text "Comparing incomparable AltCons" <+>
ppr con1 <+> ppr con2 )
LT
{-
************************************************************************
* *
\subsection{Useful synonyms}
* *
************************************************************************
Note [CoreProgram]
~~~~~~~~~~~~~~~~~~
The top level bindings of a program, a CoreProgram, are represented as
a list of CoreBind
* Later bindings in the list can refer to earlier ones, but not vice
versa. So this is OK
NonRec { x = 4 }
Rec { p = ...q...x...
; q = ...p...x }
Rec { f = ...p..x..f.. }
NonRec { g = ..f..q...x.. }
But it would NOT be ok for 'f' to refer to 'g'.
* The occurrence analyser does strongly-connected component analysis
on each Rec binding, and splits it into a sequence of smaller
bindings where possible. So the program typically starts life as a
single giant Rec, which is then dependency-analysed into smaller
chunks.
-}
-- If you edit this type, you may need to update the GHC formalism
-- See Note [GHC Formalism] in coreSyn/CoreLint.hs
type CoreProgram = [CoreBind] -- See Note [CoreProgram]
-- | The common case for the type of binders and variables when
-- we are manipulating the Core language within GHC
type CoreBndr = Var
-- | Expressions where binders are 'CoreBndr's
type CoreExpr = Expr CoreBndr
-- | Argument expressions where binders are 'CoreBndr's
type CoreArg = Arg CoreBndr
-- | Binding groups where binders are 'CoreBndr's
type CoreBind = Bind CoreBndr
-- | Case alternatives where binders are 'CoreBndr's
type CoreAlt = Alt CoreBndr
{-
************************************************************************
* *
\subsection{Tagging}
* *
************************************************************************
-}
-- | Binders are /tagged/ with a t
data TaggedBndr t = TB CoreBndr t -- TB for "tagged binder"
type TaggedBind t = Bind (TaggedBndr t)
type TaggedExpr t = Expr (TaggedBndr t)
type TaggedArg t = Arg (TaggedBndr t)
type TaggedAlt t = Alt (TaggedBndr t)
instance Outputable b => Outputable (TaggedBndr b) where
ppr (TB b l) = char '<' <> ppr b <> comma <> ppr l <> char '>'
instance Outputable b => OutputableBndr (TaggedBndr b) where
pprBndr _ b = ppr b -- Simple
pprInfixOcc b = ppr b
pprPrefixOcc b = ppr b
deTagExpr :: TaggedExpr t -> CoreExpr
deTagExpr (Var v) = Var v
deTagExpr (Lit l) = Lit l
deTagExpr (Type ty) = Type ty
deTagExpr (Coercion co) = Coercion co
deTagExpr (App e1 e2) = App (deTagExpr e1) (deTagExpr e2)
deTagExpr (Lam (TB b _) e) = Lam b (deTagExpr e)
deTagExpr (Let bind body) = Let (deTagBind bind) (deTagExpr body)
deTagExpr (Case e (TB b _) ty alts) = Case (deTagExpr e) b ty (map deTagAlt alts)
deTagExpr (Tick t e) = Tick t (deTagExpr e)
deTagExpr (Cast e co) = Cast (deTagExpr e) co
deTagBind :: TaggedBind t -> CoreBind
deTagBind (NonRec (TB b _) rhs) = NonRec b (deTagExpr rhs)
deTagBind (Rec prs) = Rec [(b, deTagExpr rhs) | (TB b _, rhs) <- prs]
deTagAlt :: TaggedAlt t -> CoreAlt
deTagAlt (con, bndrs, rhs) = (con, [b | TB b _ <- bndrs], deTagExpr rhs)
{-
************************************************************************
* *
\subsection{Core-constructing functions with checking}
* *
************************************************************************
-}
-- | Apply a list of argument expressions to a function expression in a nested fashion. Prefer to
-- use 'MkCore.mkCoreApps' if possible
mkApps :: Expr b -> [Arg b] -> Expr b
-- | Apply a list of type argument expressions to a function expression in a nested fashion
mkTyApps :: Expr b -> [Type] -> Expr b
-- | Apply a list of coercion argument expressions to a function expression in a nested fashion
mkCoApps :: Expr b -> [Coercion] -> Expr b
-- | Apply a list of type or value variables to a function expression in a nested fashion
mkVarApps :: Expr b -> [Var] -> Expr b
-- | Apply a list of argument expressions to a data constructor in a nested fashion. Prefer to
-- use 'MkCore.mkCoreConApps' if possible
mkConApp :: DataCon -> [Arg b] -> Expr b
mkApps f args = foldl App f args
mkCoApps f args = foldl (\ e a -> App e (Coercion a)) f args
mkVarApps f vars = foldl (\ e a -> App e (varToCoreExpr a)) f vars
mkConApp con args = mkApps (Var (dataConWorkId con)) args
mkTyApps f args = foldl (\ e a -> App e (typeOrCoercion a)) f args
where
typeOrCoercion ty
| Just co <- isCoercionTy_maybe ty = Coercion co
| otherwise = Type ty
mkConApp2 :: DataCon -> [Type] -> [Var] -> Expr b
mkConApp2 con tys arg_ids = Var (dataConWorkId con)
`mkApps` map Type tys
`mkApps` map varToCoreExpr arg_ids
-- | Create a machine integer literal expression of type @Int#@ from an @Integer@.
-- If you want an expression of type @Int@ use 'MkCore.mkIntExpr'
mkIntLit :: DynFlags -> Integer -> Expr b
-- | Create a machine integer literal expression of type @Int#@ from an @Int@.
-- If you want an expression of type @Int@ use 'MkCore.mkIntExpr'
mkIntLitInt :: DynFlags -> Int -> Expr b
mkIntLit dflags n = Lit (mkMachInt dflags n)
mkIntLitInt dflags n = Lit (mkMachInt dflags (toInteger n))
-- | Create a machine word literal expression of type @Word#@ from an @Integer@.
-- If you want an expression of type @Word@ use 'MkCore.mkWordExpr'
mkWordLit :: DynFlags -> Integer -> Expr b
-- | Create a machine word literal expression of type @Word#@ from a @Word@.
-- If you want an expression of type @Word@ use 'MkCore.mkWordExpr'
mkWordLitWord :: DynFlags -> Word -> Expr b
mkWordLit dflags w = Lit (mkMachWord dflags w)
mkWordLitWord dflags w = Lit (mkMachWord dflags (toInteger w))
mkWord64LitWord64 :: Word64 -> Expr b
mkWord64LitWord64 w = Lit (mkMachWord64 (toInteger w))
mkInt64LitInt64 :: Int64 -> Expr b
mkInt64LitInt64 w = Lit (mkMachInt64 (toInteger w))
-- | Create a machine character literal expression of type @Char#@.
-- If you want an expression of type @Char@ use 'MkCore.mkCharExpr'
mkCharLit :: Char -> Expr b
-- | Create a machine string literal expression of type @Addr#@.
-- If you want an expression of type @String@ use 'MkCore.mkStringExpr'
mkStringLit :: String -> Expr b
mkCharLit c = Lit (mkMachChar c)
mkStringLit s = Lit (mkMachString s)
-- | Create a machine single precision literal expression of type @Float#@ from a @Rational@.
-- If you want an expression of type @Float@ use 'MkCore.mkFloatExpr'
mkFloatLit :: Rational -> Expr b
-- | Create a machine single precision literal expression of type @Float#@ from a @Float@.
-- If you want an expression of type @Float@ use 'MkCore.mkFloatExpr'
mkFloatLitFloat :: Float -> Expr b
mkFloatLit f = Lit (mkMachFloat f)
mkFloatLitFloat f = Lit (mkMachFloat (toRational f))
-- | Create a machine double precision literal expression of type @Double#@ from a @Rational@.
-- If you want an expression of type @Double@ use 'MkCore.mkDoubleExpr'
mkDoubleLit :: Rational -> Expr b
-- | Create a machine double precision literal expression of type @Double#@ from a @Double@.
-- If you want an expression of type @Double@ use 'MkCore.mkDoubleExpr'
mkDoubleLitDouble :: Double -> Expr b
mkDoubleLit d = Lit (mkMachDouble d)
mkDoubleLitDouble d = Lit (mkMachDouble (toRational d))
-- | Bind all supplied binding groups over an expression in a nested let expression. Assumes
-- that the rhs satisfies the let/app invariant. Prefer to use 'MkCore.mkCoreLets' if
-- possible, which does guarantee the invariant
mkLets :: [Bind b] -> Expr b -> Expr b
-- | Bind all supplied binders over an expression in a nested lambda expression. Prefer to
-- use 'MkCore.mkCoreLams' if possible
mkLams :: [b] -> Expr b -> Expr b
mkLams binders body = foldr Lam body binders
mkLets binds body = foldr Let body binds
-- | Create a binding group where a type variable is bound to a type. Per "CoreSyn#type_let",
-- this can only be used to bind something in a non-recursive @let@ expression
mkTyBind :: TyVar -> Type -> CoreBind
mkTyBind tv ty = NonRec tv (Type ty)
-- | Create a binding group where a type variable is bound to a type. Per "CoreSyn#type_let",
-- this can only be used to bind something in a non-recursive @let@ expression
mkCoBind :: CoVar -> Coercion -> CoreBind
mkCoBind cv co = NonRec cv (Coercion co)
-- | Convert a binder into either a 'Var' or 'Type' 'Expr' appropriately
varToCoreExpr :: CoreBndr -> Expr b
varToCoreExpr v | isTyVar v = Type (mkTyVarTy v)
| isCoVar v = Coercion (mkCoVarCo v)
| otherwise = ASSERT( isId v ) Var v
varsToCoreExprs :: [CoreBndr] -> [Expr b]
varsToCoreExprs vs = map varToCoreExpr vs
{-
************************************************************************
* *
Getting a result type
* *
************************************************************************
These are defined here to avoid a module loop between CoreUtils and CoreFVs
-}
applyTypeToArg :: Type -> CoreExpr -> Type
-- ^ Determines the type resulting from applying an expression with given type
-- to a given argument expression
applyTypeToArg fun_ty arg = piResultTy fun_ty (exprToType arg)
-- | If the expression is a 'Type', converts. Otherwise,
-- panics. NB: This does /not/ convert 'Coercion' to 'CoercionTy'.
exprToType :: CoreExpr -> Type
exprToType (Type ty) = ty
exprToType _bad = pprPanic "exprToType" empty
-- | If the expression is a 'Coercion', converts.
exprToCoercion_maybe :: CoreExpr -> Maybe Coercion
exprToCoercion_maybe (Coercion co) = Just co
exprToCoercion_maybe _ = Nothing
{-
************************************************************************
* *
\subsection{Simple access functions}
* *
************************************************************************
-}
-- | Extract every variable by this group
bindersOf :: Bind b -> [b]
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism] in coreSyn/CoreLint.hs
bindersOf (NonRec binder _) = [binder]
bindersOf (Rec pairs) = [binder | (binder, _) <- pairs]
-- | 'bindersOf' applied to a list of binding groups
bindersOfBinds :: [Bind b] -> [b]
bindersOfBinds binds = foldr ((++) . bindersOf) [] binds
rhssOfBind :: Bind b -> [Expr b]
rhssOfBind (NonRec _ rhs) = [rhs]
rhssOfBind (Rec pairs) = [rhs | (_,rhs) <- pairs]
rhssOfAlts :: [Alt b] -> [Expr b]
rhssOfAlts alts = [e | (_,_,e) <- alts]
-- | Collapse all the bindings in the supplied groups into a single
-- list of lhs\/rhs pairs suitable for binding in a 'Rec' binding group
flattenBinds :: [Bind b] -> [(b, Expr b)]
flattenBinds (NonRec b r : binds) = (b,r) : flattenBinds binds
flattenBinds (Rec prs1 : binds) = prs1 ++ flattenBinds binds
flattenBinds [] = []
-- | We often want to strip off leading lambdas before getting down to
-- business. Variants are 'collectTyBinders', 'collectValBinders',
-- and 'collectTyAndValBinders'
collectBinders :: Expr b -> ([b], Expr b)
collectTyBinders :: CoreExpr -> ([TyVar], CoreExpr)
collectValBinders :: CoreExpr -> ([Id], CoreExpr)
collectTyAndValBinders :: CoreExpr -> ([TyVar], [Id], CoreExpr)
collectBinders expr
= go [] expr
where
go bs (Lam b e) = go (b:bs) e
go bs e = (reverse bs, e)
collectTyBinders expr
= go [] expr
where
go tvs (Lam b e) | isTyVar b = go (b:tvs) e
go tvs e = (reverse tvs, e)
collectValBinders expr
= go [] expr
where
go ids (Lam b e) | isId b = go (b:ids) e
go ids body = (reverse ids, body)
collectTyAndValBinders expr
= (tvs, ids, body)
where
(tvs, body1) = collectTyBinders expr
(ids, body) = collectValBinders body1
-- | Takes a nested application expression and returns the the function
-- being applied and the arguments to which it is applied
collectArgs :: Expr b -> (Expr b, [Arg b])
collectArgs expr
= go expr []
where
go (App f a) as = go f (a:as)
go e as = (e, as)
-- | Like @collectArgs@, but also collects looks through floatable
-- ticks if it means that we can find more arguments.
collectArgsTicks :: (Tickish Id -> Bool) -> Expr b
-> (Expr b, [Arg b], [Tickish Id])
collectArgsTicks skipTick expr
= go expr [] []
where
go (App f a) as ts = go f (a:as) ts
go (Tick t e) as ts
| skipTick t = go e as (t:ts)
go e as ts = (e, as, reverse ts)
{-
************************************************************************
* *
\subsection{Predicates}
* *
************************************************************************
At one time we optionally carried type arguments through to runtime.
@isRuntimeVar v@ returns if (Lam v _) really becomes a lambda at runtime,
i.e. if type applications are actual lambdas because types are kept around
at runtime. Similarly isRuntimeArg.
-}
-- | Will this variable exist at runtime?
isRuntimeVar :: Var -> Bool
isRuntimeVar = isId
-- | Will this argument expression exist at runtime?
isRuntimeArg :: CoreExpr -> Bool
isRuntimeArg = isValArg
-- | Returns @True@ for value arguments, false for type args
-- NB: coercions are value arguments (zero width, to be sure,
-- like State#, but still value args).
isValArg :: Expr b -> Bool
isValArg e = not (isTypeArg e)
-- | Returns @True@ iff the expression is a 'Type' or 'Coercion'
-- expression at its top level
isTyCoArg :: Expr b -> Bool
isTyCoArg (Type {}) = True
isTyCoArg (Coercion {}) = True
isTyCoArg _ = False
-- | Returns @True@ iff the expression is a 'Type' expression at its
-- top level. Note this does NOT include 'Coercion's.
isTypeArg :: Expr b -> Bool
isTypeArg (Type {}) = True
isTypeArg _ = False
-- | The number of binders that bind values rather than types
valBndrCount :: [CoreBndr] -> Int
valBndrCount = count isId
-- | The number of argument expressions that are values rather than types at their top level
valArgCount :: [Arg b] -> Int
valArgCount = count isValArg
{-
************************************************************************
* *
\subsection{Annotated core}
* *
************************************************************************
-}
-- | Annotated core: allows annotation at every node in the tree
type AnnExpr bndr annot = (annot, AnnExpr' bndr annot)
-- | A clone of the 'Expr' type but allowing annotation at every tree node
data AnnExpr' bndr annot
= AnnVar Id
| AnnLit Literal
| AnnLam bndr (AnnExpr bndr annot)
| AnnApp (AnnExpr bndr annot) (AnnExpr bndr annot)
| AnnCase (AnnExpr bndr annot) bndr Type [AnnAlt bndr annot]
| AnnLet (AnnBind bndr annot) (AnnExpr bndr annot)
| AnnCast (AnnExpr bndr annot) (annot, Coercion)
-- Put an annotation on the (root of) the coercion
| AnnTick (Tickish Id) (AnnExpr bndr annot)
| AnnType Type
| AnnCoercion Coercion
-- | A clone of the 'Alt' type but allowing annotation at every tree node
type AnnAlt bndr annot = (AltCon, [bndr], AnnExpr bndr annot)
-- | A clone of the 'Bind' type but allowing annotation at every tree node
data AnnBind bndr annot
= AnnNonRec bndr (AnnExpr bndr annot)
| AnnRec [(bndr, AnnExpr bndr annot)]
-- | Takes a nested application expression and returns the the function
-- being applied and the arguments to which it is applied
collectAnnArgs :: AnnExpr b a -> (AnnExpr b a, [AnnExpr b a])
collectAnnArgs expr
= go expr []
where
go (_, AnnApp f a) as = go f (a:as)
go e as = (e, as)
collectAnnArgsTicks :: (Tickish Var -> Bool) -> AnnExpr b a
-> (AnnExpr b a, [AnnExpr b a], [Tickish Var])
collectAnnArgsTicks tickishOk expr
= go expr [] []
where
go (_, AnnApp f a) as ts = go f (a:as) ts
go (_, AnnTick t e) as ts | tickishOk t
= go e as (t:ts)
go e as ts = (e, as, reverse ts)
deAnnotate :: AnnExpr bndr annot -> Expr bndr
deAnnotate (_, e) = deAnnotate' e
deAnnotate' :: AnnExpr' bndr annot -> Expr bndr
deAnnotate' (AnnType t) = Type t
deAnnotate' (AnnCoercion co) = Coercion co
deAnnotate' (AnnVar v) = Var v
deAnnotate' (AnnLit lit) = Lit lit
deAnnotate' (AnnLam binder body) = Lam binder (deAnnotate body)
deAnnotate' (AnnApp fun arg) = App (deAnnotate fun) (deAnnotate arg)
deAnnotate' (AnnCast e (_,co)) = Cast (deAnnotate e) co
deAnnotate' (AnnTick tick body) = Tick tick (deAnnotate body)
deAnnotate' (AnnLet bind body)
= Let (deAnnBind bind) (deAnnotate body)
where
deAnnBind (AnnNonRec var rhs) = NonRec var (deAnnotate rhs)
deAnnBind (AnnRec pairs) = Rec [(v,deAnnotate rhs) | (v,rhs) <- pairs]
deAnnotate' (AnnCase scrut v t alts)
= Case (deAnnotate scrut) v t (map deAnnAlt alts)
deAnnAlt :: AnnAlt bndr annot -> Alt bndr
deAnnAlt (con,args,rhs) = (con,args,deAnnotate rhs)
-- | As 'collectBinders' but for 'AnnExpr' rather than 'Expr'
collectAnnBndrs :: AnnExpr bndr annot -> ([bndr], AnnExpr bndr annot)
collectAnnBndrs e
= collect [] e
where
collect bs (_, AnnLam b body) = collect (b:bs) body
collect bs body = (reverse bs, body)
| tjakway/ghcjvm | compiler/coreSyn/CoreSyn.hs | bsd-3-clause | 73,140 | 0 | 14 | 19,873 | 8,533 | 4,850 | 3,683 | 598 | 5 |
module Compose where
thing1 = take 5 . enumFrom $ 3
thing2 = take 5 . filter odd . enumFrom $ 3
| brodyberg/Notes | ProjectRosalind.hsproj/LearnHaskell/lib/HaskellBook/Compose.hs | mit | 99 | 0 | 8 | 25 | 43 | 22 | 21 | 3 | 1 |
{-
Copyright (c) 2014 Joachim Breitner
A data structure for undirected graphs of variables
(or in plain terms: Sets of unordered pairs of numbers)
This is very specifically tailored for the use in CallArity. In particular it
stores the graph as a union of complete and complete bipartite graph, which
would be very expensive to store as sets of edges or as adjanceny lists.
It does not normalize the graphs. This means that g `unionUnVarGraph` g is
equal to g, but twice as expensive and large.
-}
module Eta.Utils.UnVarGraph
( UnVarSet
, emptyUnVarSet, mkUnVarSet, varEnvDom, unionUnVarSet, unionUnVarSets
, delUnVarSet
, elemUnVarSet, isEmptyUnVarSet
, UnVarGraph
, emptyUnVarGraph
, unionUnVarGraph, unionUnVarGraphs
, completeGraph, completeBipartiteGraph
, neighbors
, delNode
) where
import Eta.BasicTypes.Id
import Eta.BasicTypes.VarEnv
import Eta.Utils.UniqFM
import Eta.Utils.Outputable
import Data.List
import Eta.Utils.Bag
import Eta.BasicTypes.Unique
import qualified Data.IntSet as S
-- We need a type for sets of variables (UnVarSet).
-- We do not use VarSet, because for that we need to have the actual variable
-- at hand, and we do not have that when we turn the domain of a VarEnv into a UnVarSet.
-- Therefore, use a IntSet directly (which is likely also a bit more efficient).
-- Set of uniques, i.e. for adjancet nodes
newtype UnVarSet = UnVarSet (S.IntSet)
deriving Eq
k :: Var -> Int
k v = getKey (getUnique v)
emptyUnVarSet :: UnVarSet
emptyUnVarSet = UnVarSet S.empty
elemUnVarSet :: Var -> UnVarSet -> Bool
elemUnVarSet v (UnVarSet s) = k v `S.member` s
isEmptyUnVarSet :: UnVarSet -> Bool
isEmptyUnVarSet (UnVarSet s) = S.null s
delUnVarSet :: UnVarSet -> Var -> UnVarSet
delUnVarSet (UnVarSet s) v = UnVarSet $ k v `S.delete` s
mkUnVarSet :: [Var] -> UnVarSet
mkUnVarSet vs = UnVarSet $ S.fromList $ map k vs
varEnvDom :: VarEnv a -> UnVarSet
varEnvDom ae = UnVarSet $ ufmToSet_Directly ae
unionUnVarSet :: UnVarSet -> UnVarSet -> UnVarSet
unionUnVarSet (UnVarSet set1) (UnVarSet set2) = UnVarSet (set1 `S.union` set2)
unionUnVarSets :: [UnVarSet] -> UnVarSet
unionUnVarSets = foldr unionUnVarSet emptyUnVarSet
instance Outputable UnVarSet where
ppr (UnVarSet s) = braces $
hcat $ punctuate comma [ ppr (getUnique i) | i <- S.toList s]
-- The graph type. A list of complete bipartite graphs
data Gen = CBPG UnVarSet UnVarSet -- complete bipartite
| CG UnVarSet -- complete
newtype UnVarGraph = UnVarGraph (Bag Gen)
emptyUnVarGraph :: UnVarGraph
emptyUnVarGraph = UnVarGraph emptyBag
unionUnVarGraph :: UnVarGraph -> UnVarGraph -> UnVarGraph
{-
Premature optimisation, it seems.
unionUnVarGraph (UnVarGraph [CBPG s1 s2]) (UnVarGraph [CG s3, CG s4])
| s1 == s3 && s2 == s4
= pprTrace "unionUnVarGraph fired" empty $
completeGraph (s1 `unionUnVarSet` s2)
unionUnVarGraph (UnVarGraph [CBPG s1 s2]) (UnVarGraph [CG s3, CG s4])
| s2 == s3 && s1 == s4
= pprTrace "unionUnVarGraph fired2" empty $
completeGraph (s1 `unionUnVarSet` s2)
-}
unionUnVarGraph (UnVarGraph g1) (UnVarGraph g2)
= -- pprTrace "unionUnVarGraph" (ppr (length g1, length g2)) $
UnVarGraph (g1 `unionBags` g2)
unionUnVarGraphs :: [UnVarGraph] -> UnVarGraph
unionUnVarGraphs = foldl' unionUnVarGraph emptyUnVarGraph
-- completeBipartiteGraph A B = { {a,b} | a ∈ A, b ∈ B }
completeBipartiteGraph :: UnVarSet -> UnVarSet -> UnVarGraph
completeBipartiteGraph s1 s2 = prune $ UnVarGraph $ unitBag $ CBPG s1 s2
completeGraph :: UnVarSet -> UnVarGraph
completeGraph s = prune $ UnVarGraph $ unitBag $ CG s
neighbors :: UnVarGraph -> Var -> UnVarSet
neighbors (UnVarGraph g) v = unionUnVarSets $ concatMap go $ bagToList g
where go (CG s) = (if v `elemUnVarSet` s then [s] else [])
go (CBPG s1 s2) = (if v `elemUnVarSet` s1 then [s2] else []) ++
(if v `elemUnVarSet` s2 then [s1] else [])
delNode :: UnVarGraph -> Var -> UnVarGraph
delNode (UnVarGraph g) v = prune $ UnVarGraph $ mapBag go g
where go (CG s) = CG (s `delUnVarSet` v)
go (CBPG s1 s2) = CBPG (s1 `delUnVarSet` v) (s2 `delUnVarSet` v)
prune :: UnVarGraph -> UnVarGraph
prune (UnVarGraph g) = UnVarGraph $ filterBag go g
where go (CG s) = not (isEmptyUnVarSet s)
go (CBPG s1 s2) = not (isEmptyUnVarSet s1) && not (isEmptyUnVarSet s2)
instance Outputable Gen where
ppr (CG s) = ppr s <> char '²'
ppr (CBPG s1 s2) = ppr s1 <+> char 'x' <+> ppr s2
instance Outputable UnVarGraph where
ppr (UnVarGraph g) = ppr g
| rahulmutt/ghcvm | compiler/Eta/Utils/UnVarGraph.hs | bsd-3-clause | 4,602 | 0 | 12 | 946 | 1,106 | 596 | 510 | 75 | 5 |
-- |
-- Module : Data.BERT.Packet
-- Copyright : (c) marius a. eriksen 2009
--
-- License : BSD3
-- Maintainer : marius@monkey.org
-- Stability : experimental
-- Portability : GHC
--
-- BERP (BERT packets) support.
module Data.BERT.Packet
( Packet(..)
, fromPacket
, packets
) where
import Control.Monad (liftM)
import Data.ByteString.Lazy as L
import Data.Binary (Binary(..), Get(..), encode, decode)
import Data.Binary.Put (putWord32be, putLazyByteString)
import Data.Binary.Get (getWord32be, getLazyByteString, runGet, runGetState)
import Data.BERT.Term
import Data.BERT.Types (Term(..))
-- | A single BERP. Little more than a wrapper for a term.
data Packet
= Packet Term
deriving (Show, Ord, Eq)
fromPacket (Packet t) = t
instance Binary Packet where
put (Packet term) =
putWord32be (fromIntegral len) >> putLazyByteString encoded
where encoded = encode term
len = L.length encoded
get = getPacket
getPacket =
liftM fromIntegral getWord32be >>=
getLazyByteString >>=
return . Packet . decode
-- | From a lazy bytestring, return a (lazy) list of packets. This is
-- convenient for parsing a stream of adjacent packets. (Eg. by using
-- some form of @getContents@ to get a @ByteString@ out of a data
-- source).
packets :: L.ByteString -> [Packet]
packets b
| L.null b = []
| otherwise = p:packets b'
where (p, b', _) = runGetState getPacket b 0
| mariusae/bert | Data/BERT/Packet.hs | bsd-3-clause | 1,447 | 0 | 9 | 306 | 339 | 197 | 142 | 30 | 1 |
{- Author: Jeff Newbern
Maintainer: Jeff Newbern <jnewbern@nomaware.com>
Time-stamp: <Thu Jul 24 13:39:30 2003>
License: GPL
-}
{- DESCRIPTION
Example 15 - Using the State monad
Usage: Compile the code and execute the command.
It will print two identical random values of type MyType.
The first value is computed without the State monad and
the second is computed using the State monad.
The MyType values are random but obey some internal
invariants:
o the Int value is in the range 1-100.
o the Char value is in the range 'a'-'z'
o the absolute value of the second Int value is
less than or equal to the value of the first Int value
Try: ./ex15
-}
import Monad
import System
import IO
import Random
import Control.Monad.State
-- This is the type that we want to generate random values of
data MyType = MT Int Bool Char Int deriving Show
{- Without using the State monad, we would have to thread the
random number generator state by hand. The function would
look like this:
-}
makeRandomValue :: StdGen -> (MyType, StdGen)
makeRandomValue g = let (n,g1) = randomR (1,100) g
(b,g2) = random g1
(c,g3) = randomR ('a','z') g2
(m,g4) = randomR (-n,n) g3
in (MT n b c m, g4)
{- Using the State monad, we can define a function that returns
a random value and updates the random generator state at
the same time.
-}
getAny :: (Random a) => State StdGen a
getAny = do g <- get
(x,g') <- return $ random g
put g'
return x
-- similar to getAny, but it bounds the random value returned
getOne :: (Random a) => (a,a) -> State StdGen a
getOne bounds = do g <- get
(x,g') <- return $ randomR bounds g
put g'
return x
{- Using the State monad with StdGen as the state, we can build
random complex types without manually threading the
random generator states through the code.
-}
makeRandomValueST :: StdGen -> (MyType, StdGen)
makeRandomValueST = runState (do n <- getOne (1,100)
b <- getAny
c <- getOne ('a','z')
m <- getOne (-n,n)
return (MT n b c m))
-- print a random value of MyType, showing the two implementations
-- are equivalent
main :: IO ()
main = do g <- getStdGen
print $ fst $ makeRandomValue g
print $ fst $ makeRandomValueST g
-- END OF FILE
| buetow/hsbot | examples/example15.hs | bsd-3-clause | 2,583 | 2 | 12 | 835 | 468 | 246 | 222 | 32 | 1 |
{-# LANGUAGE Haskell2010 #-}
{-# LINE 1 "System/Process.hs" #-}
{-# LANGUAGE CPP, ForeignFunctionInterface #-}
{-# LANGUAGE Safe #-}
{-# LANGUAGE InterruptibleFFI #-}
-----------------------------------------------------------------------------
-- |
-- Module : System.Process
-- Copyright : (c) The University of Glasgow 2004-2008
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : non-portable (requires concurrency)
--
-- Operations for creating and interacting with sub-processes.
--
-----------------------------------------------------------------------------
-- ToDo:
-- * Flag to control whether exiting the parent also kills the child.
module System.Process (
-- * Running sub-processes
createProcess,
createProcess_,
shell, proc,
CreateProcess(..),
CmdSpec(..),
StdStream(..),
ProcessHandle,
-- ** Simpler functions for common tasks
callProcess,
callCommand,
spawnProcess,
spawnCommand,
readCreateProcess,
readProcess,
readCreateProcessWithExitCode,
readProcessWithExitCode,
withCreateProcess,
-- ** Related utilities
showCommandForUser,
-- ** Control-C handling on Unix
-- $ctlc-handling
-- * Process completion
waitForProcess,
getProcessExitCode,
terminateProcess,
interruptProcessGroupOf,
-- Interprocess communication
createPipe,
createPipeFd,
-- * Old deprecated functions
-- | These functions pre-date 'createProcess' which is much more
-- flexible.
runProcess,
runCommand,
runInteractiveProcess,
runInteractiveCommand,
system,
rawSystem,
) where
import Prelude hiding (mapM)
import System.Process.Internals
import Control.Concurrent
import Control.DeepSeq (rnf)
import Control.Exception (SomeException, mask, bracket, try, throwIO)
import qualified Control.Exception as C
import Control.Monad
import Data.Maybe
import Foreign
import Foreign.C
import System.Exit ( ExitCode(..) )
import System.IO
import System.IO.Error (mkIOError, ioeSetErrorString)
-- Provide the data constructors for CPid on GHC 7.4 and later
import System.Posix.Types (CPid (..))
import GHC.IO.Exception ( ioException, IOErrorType(..), IOException(..) )
-- ----------------------------------------------------------------------------
-- createProcess
-- | Construct a 'CreateProcess' record for passing to 'createProcess',
-- representing a raw command with arguments.
--
-- See 'RawCommand' for precise semantics of the specified @FilePath@.
proc :: FilePath -> [String] -> CreateProcess
proc cmd args = CreateProcess { cmdspec = RawCommand cmd args,
cwd = Nothing,
env = Nothing,
std_in = Inherit,
std_out = Inherit,
std_err = Inherit,
close_fds = False,
create_group = False,
delegate_ctlc = False,
detach_console = False,
create_new_console = False,
new_session = False,
child_group = Nothing,
child_user = Nothing,
use_process_jobs = False }
-- | Construct a 'CreateProcess' record for passing to 'createProcess',
-- representing a command to be passed to the shell.
shell :: String -> CreateProcess
shell str = CreateProcess { cmdspec = ShellCommand str,
cwd = Nothing,
env = Nothing,
std_in = Inherit,
std_out = Inherit,
std_err = Inherit,
close_fds = False,
create_group = False,
delegate_ctlc = False,
detach_console = False,
create_new_console = False,
new_session = False,
child_group = Nothing,
child_user = Nothing,
use_process_jobs = False }
{- |
This is the most general way to spawn an external process. The
process can be a command line to be executed by a shell or a raw command
with a list of arguments. The stdin, stdout, and stderr streams of
the new process may individually be attached to new pipes, to existing
'Handle's, or just inherited from the parent (the default.)
The details of how to create the process are passed in the
'CreateProcess' record. To make it easier to construct a
'CreateProcess', the functions 'proc' and 'shell' are supplied that
fill in the fields with default values which can be overriden as
needed.
'createProcess' returns @(/mb_stdin_hdl/, /mb_stdout_hdl/, /mb_stderr_hdl/, /ph/)@,
where
* if @'std_in' == 'CreatePipe'@, then @/mb_stdin_hdl/@ will be @Just /h/@,
where @/h/@ is the write end of the pipe connected to the child
process's @stdin@.
* otherwise, @/mb_stdin_hdl/ == Nothing@
Similarly for @/mb_stdout_hdl/@ and @/mb_stderr_hdl/@.
For example, to execute a simple @ls@ command:
> r <- createProcess (proc "ls" [])
To create a pipe from which to read the output of @ls@:
> (_, Just hout, _, _) <-
> createProcess (proc "ls" []){ std_out = CreatePipe }
To also set the directory in which to run @ls@:
> (_, Just hout, _, _) <-
> createProcess (proc "ls" []){ cwd = Just "\home\bob",
> std_out = CreatePipe }
Note that @Handle@s provided for @std_in@, @std_out@, or @std_err@ via the
@UseHandle@ constructor will be closed by calling this function. This is not
always the desired behavior. In cases where you would like to leave the
@Handle@ open after spawning the child process, please use 'createProcess_'
instead. All created @Handle@s are initially in text mode; if you need them
to be in binary mode then use 'hSetBinaryMode'.
-}
createProcess
:: CreateProcess
-> IO (Maybe Handle, Maybe Handle, Maybe Handle, ProcessHandle)
createProcess cp = do
r <- createProcess_ "createProcess" cp
maybeCloseStd (std_in cp)
maybeCloseStd (std_out cp)
maybeCloseStd (std_err cp)
return r
where
maybeCloseStd :: StdStream -> IO ()
maybeCloseStd (UseHandle hdl)
| hdl /= stdin && hdl /= stdout && hdl /= stderr = hClose hdl
maybeCloseStd _ = return ()
{-
-- | A 'C.bracket'-style resource handler for 'createProcess'.
--
-- Does automatic cleanup when the action finishes. If there is an exception
-- in the body then it ensures that the process gets terminated and any
-- 'CreatePipe' 'Handle's are closed. In particular this means that if the
-- Haskell thread is killed (e.g. 'killThread'), that the external process is
-- also terminated.
--
-- e.g.
--
-- > withCreateProcess (proc cmd args) { ... } $ \_ _ _ ph -> do
-- > ...
--
-- @since 1.4.3.0
-}
withCreateProcess
:: CreateProcess
-> (Maybe Handle -> Maybe Handle -> Maybe Handle -> ProcessHandle -> IO a)
-> IO a
withCreateProcess c action =
C.bracket (createProcess c) cleanupProcess
(\(m_in, m_out, m_err, ph) -> action m_in m_out m_err ph)
-- wrapper so we can get exceptions with the appropriate function name.
withCreateProcess_
:: String
-> CreateProcess
-> (Maybe Handle -> Maybe Handle -> Maybe Handle -> ProcessHandle -> IO a)
-> IO a
withCreateProcess_ fun c action =
C.bracketOnError (createProcess_ fun c) cleanupProcess
(\(m_in, m_out, m_err, ph) -> action m_in m_out m_err ph)
cleanupProcess :: (Maybe Handle, Maybe Handle, Maybe Handle, ProcessHandle)
-> IO ()
cleanupProcess (mb_stdin, mb_stdout, mb_stderr,
ph@(ProcessHandle _ delegating_ctlc _)) = do
terminateProcess ph
-- Note, it's important that other threads that might be reading/writing
-- these handles also get killed off, since otherwise they might be holding
-- the handle lock and prevent us from closing, leading to deadlock.
maybe (return ()) (ignoreSigPipe . hClose) mb_stdin
maybe (return ()) hClose mb_stdout
maybe (return ()) hClose mb_stderr
-- terminateProcess does not guarantee that it terminates the process.
-- Indeed on Unix it's SIGTERM, which asks nicely but does not guarantee
-- that it stops. If it doesn't stop, we don't want to hang, so we wait
-- asynchronously using forkIO.
-- However we want to end the Ctl-C handling synchronously, so we'll do
-- that synchronously, and set delegating_ctlc as False for the
-- waitForProcess (which would otherwise end the Ctl-C delegation itself).
when delegating_ctlc
stopDelegateControlC
_ <- forkIO (waitForProcess (resetCtlcDelegation ph) >> return ())
return ()
where
resetCtlcDelegation (ProcessHandle m _ l) = ProcessHandle m False l
-- ----------------------------------------------------------------------------
-- spawnProcess/spawnCommand
-- | Creates a new process to run the specified raw command with the given
-- arguments. It does not wait for the program to finish, but returns the
-- 'ProcessHandle'.
--
-- @since 1.2.0.0
spawnProcess :: FilePath -> [String] -> IO ProcessHandle
spawnProcess cmd args = do
(_,_,_,p) <- createProcess_ "spawnProcess" (proc cmd args)
return p
-- | Creates a new process to run the specified shell command.
-- It does not wait for the program to finish, but returns the 'ProcessHandle'.
--
-- @since 1.2.0.0
spawnCommand :: String -> IO ProcessHandle
spawnCommand cmd = do
(_,_,_,p) <- createProcess_ "spawnCommand" (shell cmd)
return p
-- ----------------------------------------------------------------------------
-- callProcess/callCommand
-- | Creates a new process to run the specified command with the given
-- arguments, and wait for it to finish. If the command returns a non-zero
-- exit code, an exception is raised.
--
-- If an asynchronous exception is thrown to the thread executing
-- @callProcess@, the forked process will be terminated and
-- @callProcess@ will wait (block) until the process has been
-- terminated.
--
-- @since 1.2.0.0
callProcess :: FilePath -> [String] -> IO ()
callProcess cmd args = do
exit_code <- withCreateProcess_ "callProcess"
(proc cmd args) { delegate_ctlc = True } $ \_ _ _ p ->
waitForProcess p
case exit_code of
ExitSuccess -> return ()
ExitFailure r -> processFailedException "callProcess" cmd args r
-- | Creates a new process to run the specified shell command. If the
-- command returns a non-zero exit code, an exception is raised.
--
-- If an asynchronous exception is thrown to the thread executing
-- @callCommand@, the forked process will be terminated and
-- @callCommand@ will wait (block) until the process has been
-- terminated.
--
-- @since 1.2.0.0
callCommand :: String -> IO ()
callCommand cmd = do
exit_code <- withCreateProcess_ "callCommand"
(shell cmd) { delegate_ctlc = True } $ \_ _ _ p ->
waitForProcess p
case exit_code of
ExitSuccess -> return ()
ExitFailure r -> processFailedException "callCommand" cmd [] r
processFailedException :: String -> String -> [String] -> Int -> IO a
processFailedException fun cmd args exit_code =
ioError (mkIOError OtherError (fun ++ ": " ++ cmd ++
concatMap ((' ':) . show) args ++
" (exit " ++ show exit_code ++ ")")
Nothing Nothing)
-- ----------------------------------------------------------------------------
-- Control-C handling on Unix
-- $ctlc-handling
--
-- When running an interactive console process (such as a shell, console-based
-- text editor or ghci), we typically want that process to be allowed to handle
-- Ctl-C keyboard interrupts how it sees fit. For example, while most programs
-- simply quit on a Ctl-C, some handle it specially. To allow this to happen,
-- use the @'delegate_ctlc' = True@ option in the 'CreateProcess' options.
--
-- The gory details:
--
-- By default Ctl-C will generate a @SIGINT@ signal, causing a 'UserInterrupt'
-- exception to be sent to the main Haskell thread of your program, which if
-- not specially handled will terminate the program. Normally, this is exactly
-- what is wanted: an orderly shutdown of the program in response to Ctl-C.
--
-- Of course when running another interactive program in the console then we
-- want to let that program handle Ctl-C. Under Unix however, Ctl-C sends
-- @SIGINT@ to every process using the console. The standard solution is that
-- while running an interactive program, ignore @SIGINT@ in the parent, and let
-- it be handled in the child process. If that process then terminates due to
-- the @SIGINT@ signal, then at that point treat it as if we had recieved the
-- @SIGINT@ ourselves and begin an orderly shutdown.
--
-- This behaviour is implemented by 'createProcess' (and
-- 'waitForProcess' \/ 'getProcessExitCode') when the @'delegate_ctlc' = True@
-- option is set. In particular, the @SIGINT@ signal will be ignored until
-- 'waitForProcess' returns (or 'getProcessExitCode' returns a non-Nothing
-- result), so it becomes especially important to use 'waitForProcess' for every
-- processes created.
--
-- In addition, in 'delegate_ctlc' mode, 'waitForProcess' and
-- 'getProcessExitCode' will throw a 'UserInterrupt' exception if the process
-- terminated with @'ExitFailure' (-SIGINT)@. Typically you will not want to
-- catch this exception, but let it propagate, giving a normal orderly shutdown.
-- One detail to be aware of is that the 'UserInterrupt' exception is thrown
-- /synchronously/ in the thread that calls 'waitForProcess', whereas normally
-- @SIGINT@ causes the exception to be thrown /asynchronously/ to the main
-- thread.
--
-- For even more detail on this topic, see
-- <http://www.cons.org/cracauer/sigint.html "Proper handling of SIGINT/SIGQUIT">.
-- -----------------------------------------------------------------------------
-- | @readProcess@ forks an external process, reads its standard output
-- strictly, blocking until the process terminates, and returns the output
-- string. The external process inherits the standard error.
--
-- If an asynchronous exception is thrown to the thread executing
-- @readProcess@, the forked process will be terminated and @readProcess@ will
-- wait (block) until the process has been terminated.
--
-- Output is returned strictly, so this is not suitable for
-- interactive applications.
--
-- This function throws an 'IOError' if the process 'ExitCode' is
-- anything other than 'ExitSuccess'. If instead you want to get the
-- 'ExitCode' then use 'readProcessWithExitCode'.
--
-- Users of this function should compile with @-threaded@ if they
-- want other Haskell threads to keep running while waiting on
-- the result of readProcess.
--
-- > > readProcess "date" [] []
-- > "Thu Feb 7 10:03:39 PST 2008\n"
--
-- The arguments are:
--
-- * The command to run, which must be in the $PATH, or an absolute or relative path
--
-- * A list of separate command line arguments to the program
--
-- * A string to pass on standard input to the forked process.
--
readProcess
:: FilePath -- ^ Filename of the executable (see 'RawCommand' for details)
-> [String] -- ^ any arguments
-> String -- ^ standard input
-> IO String -- ^ stdout
readProcess cmd args = readCreateProcess $ proc cmd args
-- | @readCreateProcess@ works exactly like 'readProcess' except that it
-- lets you pass 'CreateProcess' giving better flexibility.
--
-- > > readCreateProcess (shell "pwd" { cwd = "/etc/" }) ""
-- > "/etc\n"
--
-- Note that @Handle@s provided for @std_in@ or @std_out@ via the CreateProcess
-- record will be ignored.
--
-- @since 1.2.3.0
readCreateProcess
:: CreateProcess
-> String -- ^ standard input
-> IO String -- ^ stdout
readCreateProcess cp input = do
let cp_opts = cp {
std_in = CreatePipe,
std_out = CreatePipe
}
(ex, output) <- withCreateProcess_ "readCreateProcess" cp_opts $
\(Just inh) (Just outh) _ ph -> do
-- fork off a thread to start consuming the output
output <- hGetContents outh
withForkWait (C.evaluate $ rnf output) $ \waitOut -> do
-- now write any input
unless (null input) $
ignoreSigPipe $ hPutStr inh input
-- hClose performs implicit hFlush, and thus may trigger a SIGPIPE
ignoreSigPipe $ hClose inh
-- wait on the output
waitOut
hClose outh
-- wait on the process
ex <- waitForProcess ph
return (ex, output)
case ex of
ExitSuccess -> return output
ExitFailure r -> processFailedException "readCreateProcess" cmd args r
where
cmd = case cp of
CreateProcess { cmdspec = ShellCommand sc } -> sc
CreateProcess { cmdspec = RawCommand fp _ } -> fp
args = case cp of
CreateProcess { cmdspec = ShellCommand _ } -> []
CreateProcess { cmdspec = RawCommand _ args' } -> args'
-- | @readProcessWithExitCode@ is like @readProcess@ but with two differences:
--
-- * it returns the 'ExitCode' of the process, and does not throw any
-- exception if the code is not 'ExitSuccess'.
--
-- * it reads and returns the output from process' standard error handle,
-- rather than the process inheriting the standard error handle.
--
-- On Unix systems, see 'waitForProcess' for the meaning of exit codes
-- when the process died as the result of a signal.
--
readProcessWithExitCode
:: FilePath -- ^ Filename of the executable (see 'RawCommand' for details)
-> [String] -- ^ any arguments
-> String -- ^ standard input
-> IO (ExitCode,String,String) -- ^ exitcode, stdout, stderr
readProcessWithExitCode cmd args =
readCreateProcessWithExitCode $ proc cmd args
-- | @readCreateProcessWithExitCode@ works exactly like 'readProcessWithExitCode' except that it
-- lets you pass 'CreateProcess' giving better flexibility.
--
-- Note that @Handle@s provided for @std_in@, @std_out@, or @std_err@ via the CreateProcess
-- record will be ignored.
--
-- @since 1.2.3.0
readCreateProcessWithExitCode
:: CreateProcess
-> String -- ^ standard input
-> IO (ExitCode,String,String) -- ^ exitcode, stdout, stderr
readCreateProcessWithExitCode cp input = do
let cp_opts = cp {
std_in = CreatePipe,
std_out = CreatePipe,
std_err = CreatePipe
}
withCreateProcess_ "readCreateProcessWithExitCode" cp_opts $
\(Just inh) (Just outh) (Just errh) ph -> do
out <- hGetContents outh
err <- hGetContents errh
-- fork off threads to start consuming stdout & stderr
withForkWait (C.evaluate $ rnf out) $ \waitOut ->
withForkWait (C.evaluate $ rnf err) $ \waitErr -> do
-- now write any input
unless (null input) $
ignoreSigPipe $ hPutStr inh input
-- hClose performs implicit hFlush, and thus may trigger a SIGPIPE
ignoreSigPipe $ hClose inh
-- wait on the output
waitOut
waitErr
hClose outh
hClose errh
-- wait on the process
ex <- waitForProcess ph
return (ex, out, err)
-- | Fork a thread while doing something else, but kill it if there's an
-- exception.
--
-- This is important in the cases above because we want to kill the thread
-- that is holding the Handle lock, because when we clean up the process we
-- try to close that handle, which could otherwise deadlock.
--
withForkWait :: IO () -> (IO () -> IO a) -> IO a
withForkWait async body = do
waitVar <- newEmptyMVar :: IO (MVar (Either SomeException ()))
mask $ \restore -> do
tid <- forkIO $ try (restore async) >>= putMVar waitVar
let wait = takeMVar waitVar >>= either throwIO return
restore (body wait) `C.onException` killThread tid
ignoreSigPipe :: IO () -> IO ()
ignoreSigPipe = C.handle $ \e -> case e of
IOError { ioe_type = ResourceVanished
, ioe_errno = Just ioe }
| Errno ioe == ePIPE -> return ()
_ -> throwIO e
-- ----------------------------------------------------------------------------
-- showCommandForUser
-- | Given a program @/p/@ and arguments @/args/@,
-- @showCommandForUser /p/ /args/@ returns a string suitable for pasting
-- into @\/bin\/sh@ (on Unix systems) or @CMD.EXE@ (on Windows).
showCommandForUser :: FilePath -> [String] -> String
showCommandForUser cmd args = unwords (map translate (cmd : args))
-- ----------------------------------------------------------------------------
-- waitForProcess
{- | Waits for the specified process to terminate, and returns its exit code.
GHC Note: in order to call @waitForProcess@ without blocking all the
other threads in the system, you must compile the program with
@-threaded@.
(/Since: 1.2.0.0/) On Unix systems, a negative value @'ExitFailure' -/signum/@
indicates that the child was terminated by signal @/signum/@.
The signal numbers are platform-specific, so to test for a specific signal use
the constants provided by "System.Posix.Signals" in the @unix@ package.
Note: core dumps are not reported, use "System.Posix.Process" if you need this
detail.
-}
waitForProcess
:: ProcessHandle
-> IO ExitCode
waitForProcess ph@(ProcessHandle _ delegating_ctlc _) = lockWaitpid $ do
p_ <- modifyProcessHandle ph $ \p_ -> return (p_,p_)
case p_ of
ClosedHandle e -> return e
OpenHandle h -> do
e <- alloca $ \pret -> do
-- don't hold the MVar while we call c_waitForProcess...
throwErrnoIfMinus1Retry_ "waitForProcess" (c_waitForProcess h pret)
modifyProcessHandle ph $ \p_' ->
case p_' of
ClosedHandle e -> return (p_', e)
OpenExtHandle{} -> return (p_', ExitFailure (-1))
OpenHandle ph' -> do
closePHANDLE ph'
code <- peek pret
let e = if (code == 0)
then ExitSuccess
else (ExitFailure (fromIntegral code))
return (ClosedHandle e, e)
when delegating_ctlc $
endDelegateControlC e
return e
OpenExtHandle _ _job _iocp ->
return $ ExitFailure (-1)
where
-- If more than one thread calls `waitpid` at a time, `waitpid` will
-- return the exit code to one of them and (-1) to the rest of them,
-- causing an exception to be thrown.
-- Cf. https://github.com/haskell/process/issues/46, and
-- https://github.com/haskell/process/pull/58 for further discussion
lockWaitpid m = withMVar (waitpidLock ph) $ \() -> m
-- ----------------------------------------------------------------------------
-- getProcessExitCode
{- |
This is a non-blocking version of 'waitForProcess'. If the process is
still running, 'Nothing' is returned. If the process has exited, then
@'Just' e@ is returned where @e@ is the exit code of the process.
On Unix systems, see 'waitForProcess' for the meaning of exit codes
when the process died as the result of a signal.
-}
getProcessExitCode :: ProcessHandle -> IO (Maybe ExitCode)
getProcessExitCode ph@(ProcessHandle _ delegating_ctlc _) = tryLockWaitpid $ do
(m_e, was_open) <- modifyProcessHandle ph $ \p_ ->
case p_ of
ClosedHandle e -> return (p_, (Just e, False))
open -> do
alloca $ \pExitCode -> do
case getHandle open of
Nothing -> return (p_, (Nothing, False))
Just h -> do
res <- throwErrnoIfMinus1Retry "getProcessExitCode" $
c_getProcessExitCode h pExitCode
code <- peek pExitCode
if res == 0
then return (p_, (Nothing, False))
else do
closePHANDLE h
let e | code == 0 = ExitSuccess
| otherwise = ExitFailure (fromIntegral code)
return (ClosedHandle e, (Just e, True))
case m_e of
Just e | was_open && delegating_ctlc -> endDelegateControlC e
_ -> return ()
return m_e
where getHandle :: ProcessHandle__ -> Maybe PHANDLE
getHandle (OpenHandle h) = Just h
getHandle (ClosedHandle _) = Nothing
getHandle (OpenExtHandle h _ _) = Just h
-- If somebody is currently holding the waitpid lock, we don't want to
-- accidentally remove the pid from the process table.
-- Try acquiring the waitpid lock. If it is held, we are done
-- since that means the process is still running and we can return
-- `Nothing`. If it is not held, acquire it so we can run the
-- (non-blocking) call to `waitpid` without worrying about any
-- other threads calling it at the same time.
tryLockWaitpid :: IO (Maybe ExitCode) -> IO (Maybe ExitCode)
tryLockWaitpid action = bracket acquire release between
where
acquire = tryTakeMVar (waitpidLock ph)
release m = case m of
Nothing -> return ()
Just () -> putMVar (waitpidLock ph) ()
between m = case m of
Nothing -> return Nothing
Just () -> action
-- ----------------------------------------------------------------------------
-- terminateProcess
-- | Attempts to terminate the specified process. This function should
-- not be used under normal circumstances - no guarantees are given regarding
-- how cleanly the process is terminated. To check whether the process
-- has indeed terminated, use 'getProcessExitCode'.
--
-- On Unix systems, 'terminateProcess' sends the process the SIGTERM signal.
-- On Windows systems, the Win32 @TerminateProcess@ function is called, passing
-- an exit code of 1.
--
-- Note: on Windows, if the process was a shell command created by
-- 'createProcess' with 'shell', or created by 'runCommand' or
-- 'runInteractiveCommand', then 'terminateProcess' will only
-- terminate the shell, not the command itself. On Unix systems, both
-- processes are in a process group and will be terminated together.
terminateProcess :: ProcessHandle -> IO ()
terminateProcess ph = do
withProcessHandle ph $ \p_ ->
case p_ of
ClosedHandle _ -> return ()
OpenExtHandle{} -> error "terminateProcess with OpenExtHandle should not happen on POSIX."
OpenHandle h -> do
throwErrnoIfMinus1Retry_ "terminateProcess" $ c_terminateProcess h
return ()
-- does not close the handle, we might want to try terminating it
-- again, or get its exit code.
-- ----------------------------------------------------------------------------
-- Interface to C bits
foreign import ccall unsafe "terminateProcess"
c_terminateProcess
:: PHANDLE
-> IO CInt
foreign import ccall unsafe "getProcessExitCode"
c_getProcessExitCode
:: PHANDLE
-> Ptr CInt
-> IO CInt
foreign import ccall interruptible "waitForProcess" -- NB. safe - can block
c_waitForProcess
:: PHANDLE
-> Ptr CInt
-> IO CInt
-- ----------------------------------------------------------------------------
-- Old deprecated variants
-- ----------------------------------------------------------------------------
-- TODO: We're not going to mark these functions as DEPRECATED immediately in
-- process-1.2.0.0. That's because some of their replacements have not been
-- around for all that long. But they should eventually be marked with a
-- suitable DEPRECATED pragma after a release or two.
-- ----------------------------------------------------------------------------
-- runCommand
--TODO: in a later release {-# DEPRECATED runCommand "Use 'spawnCommand' instead" #-}
{- | Runs a command using the shell.
-}
runCommand
:: String
-> IO ProcessHandle
runCommand string = do
(_,_,_,ph) <- createProcess_ "runCommand" (shell string)
return ph
-- ----------------------------------------------------------------------------
-- runProcess
--TODO: in a later release {-# DEPRECATED runProcess "Use 'spawnProcess' or 'createProcess' instead" #-}
{- | Runs a raw command, optionally specifying 'Handle's from which to
take the @stdin@, @stdout@ and @stderr@ channels for the new
process (otherwise these handles are inherited from the current
process).
Any 'Handle's passed to 'runProcess' are placed immediately in the
closed state.
Note: consider using the more general 'createProcess' instead of
'runProcess'.
-}
runProcess
:: FilePath -- ^ Filename of the executable (see 'RawCommand' for details)
-> [String] -- ^ Arguments to pass to the executable
-> Maybe FilePath -- ^ Optional path to the working directory
-> Maybe [(String,String)] -- ^ Optional environment (otherwise inherit)
-> Maybe Handle -- ^ Handle to use for @stdin@ (Nothing => use existing @stdin@)
-> Maybe Handle -- ^ Handle to use for @stdout@ (Nothing => use existing @stdout@)
-> Maybe Handle -- ^ Handle to use for @stderr@ (Nothing => use existing @stderr@)
-> IO ProcessHandle
runProcess cmd args mb_cwd mb_env mb_stdin mb_stdout mb_stderr = do
(_,_,_,ph) <-
createProcess_ "runProcess"
(proc cmd args){ cwd = mb_cwd,
env = mb_env,
std_in = mbToStd mb_stdin,
std_out = mbToStd mb_stdout,
std_err = mbToStd mb_stderr }
maybeClose mb_stdin
maybeClose mb_stdout
maybeClose mb_stderr
return ph
where
maybeClose :: Maybe Handle -> IO ()
maybeClose (Just hdl)
| hdl /= stdin && hdl /= stdout && hdl /= stderr = hClose hdl
maybeClose _ = return ()
mbToStd :: Maybe Handle -> StdStream
mbToStd Nothing = Inherit
mbToStd (Just hdl) = UseHandle hdl
-- ----------------------------------------------------------------------------
-- runInteractiveCommand
--TODO: in a later release {-# DEPRECATED runInteractiveCommand "Use 'createProcess' instead" #-}
{- | Runs a command using the shell, and returns 'Handle's that may
be used to communicate with the process via its @stdin@, @stdout@,
and @stderr@ respectively.
-}
runInteractiveCommand
:: String
-> IO (Handle,Handle,Handle,ProcessHandle)
runInteractiveCommand string =
runInteractiveProcess1 "runInteractiveCommand" (shell string)
-- ----------------------------------------------------------------------------
-- runInteractiveProcess
--TODO: in a later release {-# DEPRECATED runInteractiveCommand "Use 'createProcess' instead" #-}
{- | Runs a raw command, and returns 'Handle's that may be used to communicate
with the process via its @stdin@, @stdout@ and @stderr@ respectively.
For example, to start a process and feed a string to its stdin:
> (inp,out,err,pid) <- runInteractiveProcess "..."
> forkIO (hPutStr inp str)
-}
runInteractiveProcess
:: FilePath -- ^ Filename of the executable (see 'RawCommand' for details)
-> [String] -- ^ Arguments to pass to the executable
-> Maybe FilePath -- ^ Optional path to the working directory
-> Maybe [(String,String)] -- ^ Optional environment (otherwise inherit)
-> IO (Handle,Handle,Handle,ProcessHandle)
runInteractiveProcess cmd args mb_cwd mb_env = do
runInteractiveProcess1 "runInteractiveProcess"
(proc cmd args){ cwd = mb_cwd, env = mb_env }
runInteractiveProcess1
:: String
-> CreateProcess
-> IO (Handle,Handle,Handle,ProcessHandle)
runInteractiveProcess1 fun cmd = do
(mb_in, mb_out, mb_err, p) <-
createProcess_ fun
cmd{ std_in = CreatePipe,
std_out = CreatePipe,
std_err = CreatePipe }
return (fromJust mb_in, fromJust mb_out, fromJust mb_err, p)
-- ---------------------------------------------------------------------------
-- system & rawSystem
--TODO: in a later release {-# DEPRECATED system "Use 'callCommand' (or 'spawnCommand' and 'waitForProcess') instead" #-}
{-|
Computation @system cmd@ returns the exit code produced when the
operating system runs the shell command @cmd@.
This computation may fail with one of the following
'System.IO.Error.IOErrorType' exceptions:
[@PermissionDenied@]
The process has insufficient privileges to perform the operation.
[@ResourceExhausted@]
Insufficient resources are available to perform the operation.
[@UnsupportedOperation@]
The implementation does not support system calls.
On Windows, 'system' passes the command to the Windows command
interpreter (@CMD.EXE@ or @COMMAND.COM@), hence Unixy shell tricks
will not work.
On Unix systems, see 'waitForProcess' for the meaning of exit codes
when the process died as the result of a signal.
-}
system :: String -> IO ExitCode
system "" = ioException (ioeSetErrorString (mkIOError InvalidArgument "system" Nothing Nothing) "null command")
system str = do
(_,_,_,p) <- createProcess_ "system" (shell str) { delegate_ctlc = True }
waitForProcess p
--TODO: in a later release {-# DEPRECATED rawSystem "Use 'callProcess' (or 'spawnProcess' and 'waitForProcess') instead" #-}
{-|
The computation @'rawSystem' /cmd/ /args/@ runs the operating system command
@/cmd/@ in such a way that it receives as arguments the @/args/@ strings
exactly as given, with no funny escaping or shell meta-syntax expansion.
It will therefore behave more portably between operating systems than 'system'.
The return codes and possible failures are the same as for 'system'.
-}
rawSystem :: String -> [String] -> IO ExitCode
rawSystem cmd args = do
(_,_,_,p) <- createProcess_ "rawSystem" (proc cmd args) { delegate_ctlc = True }
waitForProcess p
| phischu/fragnix | tests/packages/scotty/System.Process.hs | bsd-3-clause | 34,759 | 0 | 34 | 8,743 | 4,607 | 2,487 | 2,120 | 388 | 9 |
{-# LANGUAGE BangPatterns,OverloadedStrings #-}
module Core (doCond, evalExpr, evalArgs, subst, coreTests) where
import Common
import qualified TclObj as T
import qualified Data.ByteString.Char8 as B
import TclParse (parseSubst, Subst(..), SubstArgs, allSubstArgs)
import TclErr
import Control.Monad.Error
import RToken
import qualified Expr as E
import Util
import VarName (arrName, NSQual(..), parseNSTag, toBStr, parseVarName, VarName(..))
import Test.HUnit
instance Runnable T.TclObj where
evalTcl s = asParsed s >>= runCmds
{-# INLINE evalTcl #-}
instance Runnable Cmd where
evalTcl = runCmd
runCmds :: [Cmd] -> TclM T.TclObj
runCmds cl = case cl of
[x] -> runCmd x
(x:xs) -> runCmd x >> runCmds xs
[] -> ret
{-# INLINE runCmds #-}
callProc :: NSQual BString -> [T.TclObj] -> TclM T.TclObj
callProc pn args = getCmdNS pn >>= doCall (toBStr pn) args
evalRTokens [] acc = return $! reverse acc
evalRTokens (x:xs) acc = case x of
Lit s -> next $ T.fromBStr s
LitInt i -> next $ T.fromInt i
CmdTok t -> runCmds t >>= next
VarRef vn -> varGetNS vn >>= next
Block s p -> next $ T.fromBlock s p
ArrRef ns n i -> do
ni <- evalArgs [i] >>= return . T.asBStr . head
varGetNS (NSQual ns (arrName n ni)) >>= next
CatLst l -> evalArgs l >>= next . valConcat
ExpTok t -> do
[rs] <- evalArgs [t]
l <- T.asList rs
evalRTokens xs ((reverse l) ++ acc)
where next !r = evalRTokens xs (r:acc)
{-# INLINE next #-}
valConcat = T.fromBStr . B.concat . map T.asBStr . filter (not . T.isEmpty)
evalArgs :: [RTokCmd] -> TclM [T.TclObj]
evalArgs args = evalRTokens args []
{-# INLINE evalArgs #-}
runCmd :: Cmd -> TclM T.TclObj
runCmd (Cmd n args) = do
evArgs <- evalArgs args
res <- go n evArgs
return $! res
where go (BasicCmd p@(NSQual _ name)) a = getCmdNS p >>= doCall name a
go (DynCmd rt) a = do
lst <- evalArgs [rt]
let (o:rs) = lst ++ a
let name = T.asBStr o
getCmd name >>= doCall name rs
doCall pn args !mproc = do
case mproc of
Nothing -> do ukproc <- getUnknownNS >>= maybe (return Nothing) getCmd
case ukproc of
Nothing -> tclErr $ "invalid command name " ++ show pn
Just uk -> uk `applyTo` ((T.fromBStr pn):args)
Just proc -> proc `applyTo` args
{-# INLINE doCall #-}
doCond obj = evalExpr obj >>= T.asBool
{-# INLINE doCond #-}
evalExpr e = E.runAsExpr e exprCallback
{-# INLINE evalExpr #-}
exprCallback !v = case v of
E.VarRef n -> varGetNS n
E.FunRef (n,a) -> callProc (NSQual mathfuncTag n) a
E.TokEval t -> evalArgs [t] >>= return . head
E.CmdEval cmdl -> runCmds cmdl
mathfuncTag = Just (parseNSTag "::tcl::mathfunc")
subst :: SubstArgs -> BString -> TclM BString
subst sargs str = do
lst <- elift $ parseSubst sargs str
getSubsts lst >>= return . B.concat
where
endIfErr f ef = f `catchError` (\e -> if toEnum (errCode e) == ef then return [] else throwError e)
getSubsts [] = return []
getSubsts (x:xs) = good `endIfErr` EBreak
where good = do fx <- f x
fxs <- getSubsts xs
return (fx:fxs)
elift x = case x of
Left e -> tclErr e
Right (v,_) -> return v
handleCmdErrs f = f `catchError` handler
where handler e = case toEnum (errCode e) of
EReturn -> return (errData e)
EContinue -> return T.empty
_ -> throwError e
f x = case x of
SStr s -> return s
SCmd c -> handleCmdErrs (runCmds (subCmdToCmds c)) >>= return . T.asBStr
SVar v -> do
val <- case parseVarName v of
NSQual ns (VarName n (Just ind)) ->
subst allSubstArgs ind >>= \i2 -> varGetNS (NSQual ns (arrName n i2))
vn -> varGetNS vn
return (T.asBStr val)
coreTests = TestList []
| muspellsson/hiccup | Core.hs | lgpl-2.1 | 4,228 | 0 | 22 | 1,411 | 1,572 | 780 | 792 | 104 | 9 |
{-
Copyright 2015 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE NoImplicitPrelude #-}
module GHC.Weak (module M) where
import "base" GHC.Weak as M
| Ye-Yong-Chi/codeworld | codeworld-base/src/GHC/Weak.hs | apache-2.0 | 731 | 0 | 4 | 136 | 23 | 17 | 6 | 4 | 0 |
module Data.Streaming.FileReadSpec (spec) where
import Test.Hspec
import qualified Data.ByteString as S
import qualified Data.Streaming.FileRead as F
import Control.Exception (bracket)
spec :: Spec
spec = describe "Data.Streaming.FileRead" $ do
it "works" $ do
let fp = "LICENSE"
expected <- S.readFile fp
actual <- bracket (F.openFile fp) F.closeFile $ \fh -> do
let loop front = do
bs <- F.readChunk fh
if S.null bs
then return $ S.concat $ front []
else loop (front . (bs:))
loop id
actual `shouldBe` expected
| phadej/streaming-commons | test/Data/Streaming/FileReadSpec.hs | mit | 663 | 0 | 25 | 229 | 207 | 107 | 100 | 18 | 2 |
import Graphics.Gnuplot.Simple
type Matrix = [[Double]]
-- Kronecker delta
--
--- http://en.wikipedia.org/wiki/Kronecker_delta
--
δ :: (Int, Int) -> Double
δ (i, j) | i==j = 1.0
| otherwise = 0.0
eye :: Int -> [[Double]]
eye n = [ [ δ(i, j) | i <- [0..(n-1)]] | j <- [0..(n-1)]]
ones :: Int -> [[Double]]
ones n = [ [ 1.0 | i <- [0..(n-1)]] | j <- [0..(n-1)]]
zeros :: Int -> [[Double]]
zeros n = [ [ 0.0 | i <- [0..(n-1)]] | j <- [0..(n-1)]]
-- Function to Display Matrix
disp matrix = putStrLn $ concat $ map (\row -> show row ++ "\n" ) matrix
enumerate seq = zip [0..(length seq)] seq
eleM matrix (i, j) = (matrix !! i) !! j -- eleMent i, j
colM matrix i = map (!!i) matrix -- Matrix row i
rowM matrix i = matrix !! i -- Matrix column i
addv va vb = zipWith (+) va vb
subv vb va = zipWith (-) vb va
mulv vb va = zipWith (*) va vb
mulvx x v = map (*x) v
normv vector = sqrt $ sum $ map (\e -> e^^2) vector
nrowsM matrix = length matrix
ncolsM matrix = length (matrix !! 0)
-- Subtract Matrix of equal sizes
subm mb ma = map (\(b, a) -> zipWith (-) b a) (zip mb ma)
-- Add matrix eleMent by eleMent
addm mb ma = map (\(b, a) -> zipWith (+) b a) (zip mb ma)
-- Multiply matrix a and b
mullm ma mb = [ [ c i j | i <- [0..(nrows -1)]] | j <- [0..(ncols -1)]]
where
c i j = sum $ zipWith (*) (rowM ma i) (colM mb j)
nrows = length ma
ncols = length (mb !! 0)
mullmx x ma = (map $ map (*x)) ma
mullmcol matrix col = map (\row -> sum $ zipWith (*) row col) matrix
-- Matrix Multiplication Operator
(@@) ma mb = mullm ma mb
powm matrix n = foldr (mullm) (eye (length matrix)) $ take n $ repeat matrix
-- Transpose Matrix
transpM matrix = map (colM matrix) [0..((ncolsM matrix) -1)]
matrixIterator :: ((Int, Int) -> Double) -> [[Double]] -> [[Double]]
matrixIterator element_ij matrix = newmatrix
where
lim_i = (nrowsM matrix) - 1
lim_j = (nrowsM matrix) - 1
matrix_ij = eleM matrix
newmatrix = [ [ element_ij (j, i) * matrix_ij(j, i) | i <- [0..lim_i]] | j <- [0..lim_j]]
-- Create Diagonal matrix from a Matrix
-- Cij = Aij * δij
-- Cij = 0 if i == j, = Aii if i == j
--
diagM matrix = matrixIterator δ matrix
-- Triangular Inferior Matrix
--
matrixL matrix = matrixIterator e_ij matrix
where
e_ij (i, j) | i <=j = 0
| otherwise = 1
-- Create a Triangular superior matrix from a given matrix
--
matrixU matrix = matrixIterator e_ij matrix
where
e_ij (i, j) | i >=j = 0
| otherwise = 1
matrixLU matrix = matrixIterator e_ij matrix
where
e_ij (i, j) | i /= j = 1
| otherwise = 0
diagonalM matrix = map (\i -> (eleM matrix)(i, i)) [0..((nrowsM matrix)-1)]
inverse3x3 :: [[Double]] -> [[Double]]
inverse3x3 matrix = inversedm
where
em = curry $ eleM matrix
a = em 0 0 ; b = em 0 1 ; c = em 0 2
d = em 1 0 ; e = em 1 1 ; f = em 1 2
g = em 2 0 ; h = em 2 1 ; i = em 2 2
aA = e*i - f*h
bB = -(d*i -f*g)
cC = d*h - e*g
dD = -b*i + c*h
eE = a*i - c*g
fF = -a*h + b*g
gG = b*f - c*e
hH = -a*f + c*d
iI = a*e - b*d
invdet = 1/(a*aA + b*bB + c*cC)
inversedm = mullmx invdet [[aA, dD, gG], [bB, eE, hH], [cC, fF, iI]]
pairs = tail
jacobi_step matrix vector_b vector_x = zipWith (/) (zipWith (-) vector_b (mullmcol lum vector_x)) diag
where
lum = matrixLU matrix
diag = diagonalM matrix
--mat = [[1, 2, 3], [4, 5, 6], [7, 8, 9]] :: [[Double]]
--b = [-1, 2, 3] :: [Double]
--matrixA = [[5, -2, 3], [-3, 9, 1], [2, -1, -7]] :: [[Double]]
jacobi matrixA vector_b vector_x0 = vector_x
where
generator = iterate ( jacobi_step matrixA vector_b) vector_x0
errorv vx = normv $ (matrixA `mullmcol` vx) `subv` vector_b
until_error vx = (errorv vx) >= 1e-5
vector_x = last $ take 200 $ takeWhile until_error generator
--ss_setep :: [[Double]] -> [Double] -> Double -> (Double -> [Double]) -> (Double, [Double]) -> (Double, [Double])
ss_step a b dt u (t, x) = (tnext, xnext)
where
-- (I + dt*A)
da = (mullmx dt a) `addm` eye (length a)
-- dt*B
db = mulvx dt b
xnext = (mullmcol da x) `addv` ( (u t) `mulvx` b)
tnext = t + dt
a = [[-20, -40, -60], [1, 0, 0], [0, 1, 0]] :: Matrix
b = [1, 0, 0] :: [Double]
c = [0, 0, 1] :: [Double]
d = [0, 0, 0] :: [Double]
u :: Double -> Double
u t = 1.0
--u t = 0.0
x0 = [0.1, 0.1, 0.1]
ssys = ss_step a b 0.01 u
simu = take 1000 $ iterate ssys (0, x0)
t_vector = map fst simu
x_vector = map snd simu
y_vector = map (!!2) x_vector
plot_solution = plotList [] (zip t_vector y_vector)
{-
Testing:
a = [[1, 3, 5], [2, 4, 6]] :: [[Double]]
b = [[3, 6], [1, 4], [5, 2]] :: [[Double]]
λ >
λ > [[3.0, 4.0], [5.0, 6.0]] @@ [[-3.0, 2], [5/2, -3/2]]
[[1.0,0.0],[0.0,1.0]]
λ >
mat = [[3.0, 4.0], [5.0, 6.0]] :: [[Double]]
-}
| Khady/Functional-Programming | codes/matrix.hs | unlicense | 5,024 | 0 | 13 | 1,460 | 2,192 | 1,196 | 996 | 95 | 1 |
module Main where
import Control.Exception
import Control.Monad
import System.Mem
import Control.Monad.ST
import Data.Array
import Data.Array.ST
import qualified Data.Array.Unboxed as U
import Control.DeepSeq
import Data.Compact
assertFail :: String -> IO ()
assertFail msg = throwIO $ AssertionFailed msg
assertEquals :: (Eq a, Show a) => a -> a -> IO ()
assertEquals expected actual =
if expected == actual then return ()
else assertFail $ "expected " ++ (show expected)
++ ", got " ++ (show actual)
arrTest :: (Monad m, MArray a e m, Num e) => m (a Int e)
arrTest = do
arr <- newArray (1, 10) 0
forM_ [1..10] $ \j -> do
writeArray arr j (fromIntegral $ 2*j + 1)
return arr
instance NFData (U.UArray i e) where
rnf x = seq x ()
-- test :: (Word -> a -> IO (Maybe (Compact a))) -> IO ()
test func = do
let fromList :: Array Int Int
fromList = listArray (1, 10) [1..]
frozen :: Array Int Int
frozen = runST $ do
arr <- arrTest :: ST s (STArray s Int Int)
freeze arr
stFrozen :: Array Int Int
stFrozen = runSTArray arrTest
unboxedFrozen :: U.UArray Int Int
unboxedFrozen = runSTUArray arrTest
let val = (fromList, frozen, stFrozen, unboxedFrozen)
str <- func 4096 val
-- check that val is still good
assertEquals (fromList, frozen, stFrozen, unboxedFrozen) val
-- check the value in the compact
assertEquals val (getCompact str)
performMajorGC
-- check again the value in the compact
assertEquals val (getCompact str)
main = do
test newCompact
test newCompactNoShare
| snoyberg/ghc | libraries/compact/tests/compact_simple_array.hs | bsd-3-clause | 1,582 | 0 | 16 | 370 | 554 | 284 | 270 | 45 | 2 |
{-# LANGUAGE BangPatterns, MagicHash #-}
-- |
-- Module : Data.Text.Internal.Fusion
-- Copyright : (c) Tom Harper 2008-2009,
-- (c) Bryan O'Sullivan 2009-2010,
-- (c) Duncan Coutts 2009
--
-- License : BSD-style
-- Maintainer : bos@serpentine.com
-- Stability : experimental
-- Portability : GHC
--
-- /Warning/: this is an internal module, and does not have a stable
-- API or name. Functions in this module may not check or enforce
-- preconditions expected by public modules. Use at your own risk!
--
-- Text manipulation functions represented as fusible operations over
-- streams.
module Data.Text.Internal.Fusion
(
-- * Types
Stream(..)
, Step(..)
-- * Creation and elimination
, stream
, unstream
, reverseStream
, length
-- * Transformations
, reverse
-- * Construction
-- ** Scans
, reverseScanr
-- ** Accumulating maps
, mapAccumL
-- ** Generation and unfolding
, unfoldrN
-- * Indexing
, index
, findIndex
, countChar
) where
import Prelude (Bool(..), Char, Maybe(..), Monad(..), Int,
Num(..), Ord(..), ($), (&&),
fromIntegral, otherwise)
import Data.Bits ((.&.))
import Data.Text.Internal (Text(..))
import Data.Text.Internal.Private (runText)
import Data.Text.Internal.Unsafe.Char (ord, unsafeChr, unsafeWrite)
import Data.Text.Internal.Unsafe.Shift (shiftL, shiftR)
import qualified Data.Text.Array as A
import qualified Data.Text.Internal.Fusion.Common as S
import Data.Text.Internal.Fusion.Types
import Data.Text.Internal.Fusion.Size
import qualified Data.Text.Internal as I
import qualified Data.Text.Internal.Encoding.Utf16 as U16
default(Int)
-- | /O(n)/ Convert a 'Text' into a 'Stream Char'.
stream :: Text -> Stream Char
stream (Text arr off len) = Stream next off (maxSize len)
where
!end = off+len
next !i
| i >= end = Done
| n >= 0xD800 && n <= 0xDBFF = Yield (U16.chr2 n n2) (i + 2)
| otherwise = Yield (unsafeChr n) (i + 1)
where
n = A.unsafeIndex arr i
n2 = A.unsafeIndex arr (i + 1)
{-# INLINE [0] stream #-}
-- | /O(n)/ Convert a 'Text' into a 'Stream Char', but iterate
-- backwards.
reverseStream :: Text -> Stream Char
reverseStream (Text arr off len) = Stream next (off+len-1) (maxSize len)
where
{-# INLINE next #-}
next !i
| i < off = Done
| n >= 0xDC00 && n <= 0xDFFF = Yield (U16.chr2 n2 n) (i - 2)
| otherwise = Yield (unsafeChr n) (i - 1)
where
n = A.unsafeIndex arr i
n2 = A.unsafeIndex arr (i - 1)
{-# INLINE [0] reverseStream #-}
-- | /O(n)/ Convert a 'Stream Char' into a 'Text'.
unstream :: Stream Char -> Text
unstream (Stream next0 s0 len) = runText $ \done -> do
let mlen = upperBound 4 len
arr0 <- A.new mlen
let outer arr top = loop
where
loop !s !i =
case next0 s of
Done -> done arr i
Skip s' -> loop s' i
Yield x s'
| j >= top -> {-# SCC "unstream/resize" #-} do
let top' = (top + 1) `shiftL` 1
arr' <- A.new top'
A.copyM arr' 0 arr 0 top
outer arr' top' s i
| otherwise -> do d <- unsafeWrite arr i x
loop s' (i+d)
where j | ord x < 0x10000 = i
| otherwise = i + 1
outer arr0 mlen s0 0
{-# INLINE [0] unstream #-}
{-# RULES "STREAM stream/unstream fusion" forall s. stream (unstream s) = s #-}
-- ----------------------------------------------------------------------------
-- * Basic stream functions
length :: Stream Char -> Int
length = S.lengthI
{-# INLINE[0] length #-}
-- | /O(n)/ Reverse the characters of a string.
reverse :: Stream Char -> Text
reverse (Stream next s len0)
| isEmpty len0 = I.empty
| otherwise = I.textP arr off' len'
where
len0' = upperBound 4 (larger len0 4)
(arr, (off', len')) = A.run2 (A.new len0' >>= loop s (len0'-1) len0')
loop !s0 !i !len marr =
case next s0 of
Done -> return (marr, (j, len-j))
where j = i + 1
Skip s1 -> loop s1 i len marr
Yield x s1 | i < least -> {-# SCC "reverse/resize" #-} do
let newLen = len `shiftL` 1
marr' <- A.new newLen
A.copyM marr' (newLen-len) marr 0 len
write s1 (len+i) newLen marr'
| otherwise -> write s1 i len marr
where n = ord x
least | n < 0x10000 = 0
| otherwise = 1
m = n - 0x10000
lo = fromIntegral $ (m `shiftR` 10) + 0xD800
hi = fromIntegral $ (m .&. 0x3FF) + 0xDC00
write t j l mar
| n < 0x10000 = do
A.unsafeWrite mar j (fromIntegral n)
loop t (j-1) l mar
| otherwise = do
A.unsafeWrite mar (j-1) lo
A.unsafeWrite mar j hi
loop t (j-2) l mar
{-# INLINE [0] reverse #-}
-- | /O(n)/ Perform the equivalent of 'scanr' over a list, only with
-- the input and result reversed.
reverseScanr :: (Char -> Char -> Char) -> Char -> Stream Char -> Stream Char
reverseScanr f z0 (Stream next0 s0 len) = Stream next (S1 :*: z0 :*: s0) (len+1) -- HINT maybe too low
where
{-# INLINE next #-}
next (S1 :*: z :*: s) = Yield z (S2 :*: z :*: s)
next (S2 :*: z :*: s) = case next0 s of
Yield x s' -> let !x' = f x z
in Yield x' (S2 :*: x' :*: s')
Skip s' -> Skip (S2 :*: z :*: s')
Done -> Done
{-# INLINE reverseScanr #-}
-- | /O(n)/ Like 'unfoldr', 'unfoldrN' builds a stream from a seed
-- value. However, the length of the result is limited by the
-- first argument to 'unfoldrN'. This function is more efficient than
-- 'unfoldr' when the length of the result is known.
unfoldrN :: Int -> (a -> Maybe (Char,a)) -> a -> Stream Char
unfoldrN n = S.unfoldrNI n
{-# INLINE [0] unfoldrN #-}
-------------------------------------------------------------------------------
-- ** Indexing streams
-- | /O(n)/ stream index (subscript) operator, starting from 0.
index :: Stream Char -> Int -> Char
index = S.indexI
{-# INLINE [0] index #-}
-- | The 'findIndex' function takes a predicate and a stream and
-- returns the index of the first element in the stream
-- satisfying the predicate.
findIndex :: (Char -> Bool) -> Stream Char -> Maybe Int
findIndex = S.findIndexI
{-# INLINE [0] findIndex #-}
-- | /O(n)/ The 'count' function returns the number of times the query
-- element appears in the given stream.
countChar :: Char -> Stream Char -> Int
countChar = S.countCharI
{-# INLINE [0] countChar #-}
-- | /O(n)/ Like a combination of 'map' and 'foldl''. Applies a
-- function to each element of a 'Text', passing an accumulating
-- parameter from left to right, and returns a final 'Text'.
mapAccumL :: (a -> Char -> (a,Char)) -> a -> Stream Char -> (a, Text)
mapAccumL f z0 (Stream next0 s0 len) = (nz,I.textP na 0 nl)
where
(na,(nz,nl)) = A.run2 (A.new mlen >>= \arr -> outer arr mlen z0 s0 0)
where mlen = upperBound 4 len
outer arr top = loop
where
loop !z !s !i =
case next0 s of
Done -> return (arr, (z,i))
Skip s' -> loop z s' i
Yield x s'
| j >= top -> {-# SCC "mapAccumL/resize" #-} do
let top' = (top + 1) `shiftL` 1
arr' <- A.new top'
A.copyM arr' 0 arr 0 top
outer arr' top' z s i
| otherwise -> do d <- unsafeWrite arr i c
loop z' s' (i+d)
where (z',c) = f z x
j | ord c < 0x10000 = i
| otherwise = i + 1
{-# INLINE [0] mapAccumL #-}
| fpco/text-stream-decode | text/Data/Text/Internal/Fusion.hs | mit | 8,429 | 0 | 25 | 3,004 | 2,245 | 1,180 | 1,065 | 151 | 4 |
import StackTest
import System.Directory
import Control.Monad
main :: IO ()
main = do
stack ["test"]
exists1 <- doesFileExist "foo"
unless exists1 $ error "exists1 should be True"
removeFile "foo"
stack ["test", "--no-rerun-tests"]
exists2 <- doesFileExist "foo"
when exists2 $ error "exists2 should be False"
| juhp/stack | test/integration/tests/no-rerun-tests/Main.hs | bsd-3-clause | 325 | 0 | 8 | 59 | 102 | 46 | 56 | 12 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Prompt.Email
-- Copyright : (c) 2007 Brent Yorgey
-- License : BSD-style (see LICENSE)
--
-- Maintainer : <byorgey@gmail.com>
-- Stability : stable
-- Portability : unportable
--
-- A prompt for sending quick, one-line emails, via the standard GNU
-- \'mail\' utility (which must be in your $PATH). This module is
-- intended mostly as an example of using "XMonad.Prompt.Input" to
-- build an action requiring user input.
--
-----------------------------------------------------------------------------
module XMonad.Prompt.Email (
-- * Usage
-- $usage
emailPrompt
) where
import XMonad.Core
import XMonad.Util.Run
import XMonad.Prompt
import XMonad.Prompt.Input
-- $usage
--
-- You can use this module by importing it, along with
-- "XMonad.Prompt", into your ~\/.xmonad\/xmonad.hs file:
--
-- > import XMonad.Prompt
-- > import XMonad.Prompt.Email
--
-- and adding an appropriate keybinding, for example:
--
-- > , ((modm .|. controlMask, xK_e), emailPrompt defaultXPConfig addresses)
--
-- where @addresses@ is a list of email addresses that should
-- autocomplete, for example:
--
-- > addresses = ["me@me.com", "mr@big.com", "tom.jones@foo.bar"]
--
-- You can still send email to any address, but sending to these
-- addresses will be faster since you only have to type a few
-- characters and then hit \'tab\'.
--
-- For detailed instructions on editing your key bindings, see
-- "XMonad.Doc.Extending#Editing_key_bindings".
-- | Prompt the user for a recipient, subject, and body, and send an
-- email via the GNU \'mail\' utility. The second argument is a list
-- of addresses for autocompletion.
emailPrompt :: XPConfig -> [String] -> X ()
emailPrompt c addrs =
inputPromptWithCompl c "To" (mkComplFunFromList addrs) ?+ \to ->
inputPrompt c "Subject" ?+ \subj ->
inputPrompt c "Body" ?+ \body ->
runProcessWithInput "mail" ["-s", subj, to] (body ++ "\n")
>> return ()
| adinapoli/xmonad-contrib | XMonad/Prompt/Email.hs | bsd-3-clause | 2,147 | 0 | 14 | 458 | 191 | 124 | 67 | 13 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE RankNTypes #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Array.ST
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : non-portable (uses Data.Array.MArray)
--
-- Mutable boxed and unboxed arrays in the 'Control.Monad.ST.ST' monad.
--
-----------------------------------------------------------------------------
module Data.Array.ST (
-- * Boxed arrays
STArray, -- instance of: Eq, MArray
runSTArray,
-- * Unboxed arrays
STUArray, -- instance of: Eq, MArray
runSTUArray,
-- * Overloaded mutable array interface
module Data.Array.MArray,
) where
import Data.Array.Base ( STUArray, UArray, unsafeFreezeSTUArray )
import Data.Array.MArray
import Control.Monad.ST ( ST, runST )
import GHC.Arr ( STArray, Array, unsafeFreezeSTArray )
-- | A safe way to create and work with a mutable array before returning an
-- immutable array for later perusal. This function avoids copying
-- the array before returning it - it uses 'unsafeFreeze' internally, but
-- this wrapper is a safe interface to that function.
--
#if __GLASGOW_HASKELL__ >= 711
runSTArray :: (forall s . ST s (STArray s i e)) -> Array i e
#else
runSTArray :: Ix i => (forall s . ST s (STArray s i e)) -> Array i e
#endif
runSTArray st = runST (st >>= unsafeFreezeSTArray)
-- | A safe way to create and work with an unboxed mutable array before
-- returning an immutable array for later perusal. This function
-- avoids copying the array before returning it - it uses
-- 'unsafeFreeze' internally, but this wrapper is a safe interface to
-- that function.
--
#if __GLASGOW_HASKELL__ >= 711
runSTUArray :: (forall s . ST s (STUArray s i e)) -> UArray i e
#else
runSTUArray :: Ix i => (forall s . ST s (STUArray s i e)) -> UArray i e
#endif
runSTUArray st = runST (st >>= unsafeFreezeSTUArray)
-- INTERESTING... this is the type we'd like to give to runSTUArray:
--
-- runSTUArray :: (Ix i, IArray UArray e,
-- forall s. MArray (STUArray s) e (ST s))
-- => (forall s . ST s (STUArray s i e))
-- -> UArray i e
--
-- Note the quantified constraint. We dodged the problem by using
-- unsafeFreezeSTUArray directly in the defn of runSTUArray above, but
-- this essentially constrains us to a single unsafeFreeze for all STUArrays
-- (in theory we might have a different one for certain element types).
| beni55/haste-compiler | libraries/ghc-7.10/array/Data/Array/ST.hs | bsd-3-clause | 2,607 | 0 | 10 | 521 | 240 | 158 | 82 | 16 | 1 |
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeOperators #-}
module T13938a where
import Data.Kind (Type)
data family Sing (a :: k)
data instance Sing (z :: [a]) where
SNil :: Sing '[]
SCons :: Sing x -> Sing xs -> Sing (x:xs)
data TyFun :: Type -> Type -> Type
type a ~> b = TyFun a b -> Type
infixr 0 ~>
type family Apply (f :: k1 ~> k2) (x :: k1) :: k2
type a @@ b = Apply a b
infixl 9 @@
data FunArrow = (:->) -- ^ '(->)'
| (:~>) -- ^ '(~>)'
class FunType (arr :: FunArrow) where
type Fun (k1 :: Type) arr (k2 :: Type) :: Type
class FunType arr => AppType (arr :: FunArrow) where
type App k1 arr k2 (f :: Fun k1 arr k2) (x :: k1) :: k2
type FunApp arr = (FunType arr, AppType arr)
instance FunType (:->) where
type Fun k1 (:->) k2 = k1 -> k2
$(return []) -- This is only necessary for GHC 8.0 -- GHC 8.2 is smarter
instance AppType (:->) where
type App k1 (:->) k2 (f :: k1 -> k2) x = f x
instance FunType (:~>) where
type Fun k1 (:~>) k2 = k1 ~> k2
$(return [])
instance AppType (:~>) where
type App k1 (:~>) k2 (f :: k1 ~> k2) x = f @@ x
infixr 0 -?>
type (-?>) (k1 :: Type) (k2 :: Type) (arr :: FunArrow) = Fun k1 arr k2
elimList :: forall (a :: Type) (p :: [a] -> Type) (l :: [a]).
Sing l
-> p '[]
-> (forall (x :: a) (xs :: [a]). Sing x -> Sing xs -> p xs -> p (x:xs))
-> p l
elimList = elimListPoly @(:->)
elimListTyFun :: forall (a :: Type) (p :: [a] ~> Type) (l :: [a]).
Sing l
-> p @@ '[]
-> (forall (x :: a) (xs :: [a]). Sing x -> Sing xs -> p @@ xs -> p @@ (x:xs))
-> p @@ l
elimListTyFun = elimListPoly @(:~>) @_ @p
elimListPoly :: forall (arr :: FunArrow) (a :: Type) (p :: ([a] -?> Type) arr) (l :: [a]).
FunApp arr
=> Sing l
-> App [a] arr Type p '[]
-> (forall (x :: a) (xs :: [a]). Sing x -> Sing xs -> App [a] arr Type p xs -> App [a] arr Type p (x:xs))
-> App [a] arr Type p l
elimListPoly SNil pNil _ = pNil
elimListPoly (SCons x (xs :: Sing xs)) pNil pCons = pCons x xs (elimListPoly @arr @a @p @xs xs pNil pCons)
| sdiehl/ghc | testsuite/tests/dependent/should_compile/T13938a.hs | bsd-3-clause | 2,501 | 22 | 20 | 714 | 1,026 | 587 | 439 | -1 | -1 |
module Imp500Aux where
import Imp500
data T1 a
data T2 a b
data T3 a b c
data T4 a b c d
data T5 a b c d e
data T6 a
data T7 a b
data T8 a b c
data T9 a b c d
data T10 a b c d e
| forked-upstream-packages-for-ghcjs/ghc | testsuite/tests/rename/should_compile/Imp500Aux.hs | bsd-3-clause | 190 | 0 | 3 | 67 | 67 | 55 | 12 | -1 | -1 |
main = do foo() putStrLn "Hello Haskell World!"
| codeboardio/kali | test/src_examples/haskell/error_one_file/Root/Src/Main.hs | mit | 48 | 0 | 8 | 8 | 18 | 8 | 10 | 1 | 1 |
-- | Fake cabal module for local building
module Paths_js_flot(getDataFileName, version) where
import Data.Version
import System.Directory
import System.FilePath
getDataFileName :: FilePath -> IO FilePath
getDataFileName x = do
dir <- getCurrentDirectory
pure $ dir </> "javascript" </> x
version :: Version
version = Version [0,8,3] []
| ndmitchell/js-flot | src/Paths.hs | mit | 350 | 0 | 9 | 59 | 96 | 53 | 43 | 10 | 1 |
module Main (main) where
import Test.Framework (defaultMain)
import qualified Tests.Properties as Properties
import qualified Tests.Simple as Simple
main :: IO ()
main = defaultMain [Properties.tests, Simple.tests]
| muhbaasu/hash-id | tests/Tests.hs | mit | 234 | 0 | 7 | 45 | 63 | 39 | 24 | 6 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Scheme.Simplified where
import qualified Data.Set as Set
import qualified Data.Map as Map
import Data.Text (Text)
import Data.Set (Set)
import Data.Map (Map)
import Control.Lens
import Scheme.IR
type FunctionTable = Map Text Function
type NameSpace = Set Text
data Simplified = Simplified {
_namespace :: NameSpace
, _functions :: FunctionTable
, _statements :: [AST]
}
makeLenses ''Simplified
nameList (Simplified{_namespace=ns}) = Set.toList ns
funcTable (Simplified{_functions=fns}) = Map.toList fns
| AKST/scheme.llvm | src/Scheme/Simplified.hs | mit | 566 | 0 | 9 | 93 | 171 | 100 | 71 | 18 | 1 |
module Validation(Validation, valid, invalid) where
import Control.Applicative
import Control.Monad
import Data.Monoid
data Validation e a = Validation a e
valid :: Monoid e => a -> Validation e a
valid x = Validation x mempty
invalid :: Monoid e => a -> e -> Validation e a
invalid x error = Validation x error
isValid :: (Eq e, Monoid e) => Validation e a -> Bool
isValid (Validation a e) = e == mempty
instance (Show a, Show e, Monoid e, Eq e) => Show (Validation e a) where
show v@(Validation a e) | isValid v = "OK " ++ show a
| otherwise = "INVALID " ++ show a ++ "(" ++ show e ++")"
instance (Monoid e) => Monad (Validation e) where
return = valid
(Validation a errors) >>= f = case f a of
Validation b moreErrors -> Validation b (errors `mappend` moreErrors)
instance (Monoid e) => Functor (Validation e) where
fmap = liftM
instance (Monoid e) => Applicative (Validation e) where
pure = return
(<*>) = ap
| raimohanska/Monads | examples/challenges/Validation/Validation.hs | mit | 964 | 0 | 11 | 222 | 417 | 214 | 203 | 23 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE DeriveGeneric #-}
module Codec.Xlsx.Types.Internal.Relationships where
import Data.List (find)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Monoid ((<>))
import Data.Text (Text)
import qualified Data.Text as T
import GHC.Generics (Generic)
import Network.URI hiding (path)
import Prelude hiding (abs, lookup)
import Safe
import Text.XML
import Text.XML.Cursor
import Codec.Xlsx.Parser.Internal
import Codec.Xlsx.Types.Internal
import Codec.Xlsx.Writer.Internal
data Relationship = Relationship
{ relType :: Text
, relTarget :: FilePath
} deriving (Eq, Show, Generic)
-- | Describes relationships according to Open Packaging Convention
--
-- See ECMA-376, 4th Edition Office Open XML File Formats — Open Packaging
-- Conventions
newtype Relationships = Relationships
{ relMap :: Map RefId Relationship
} deriving (Eq, Show, Generic)
fromList :: [(RefId, Relationship)] -> Relationships
fromList = Relationships . Map.fromList
empty :: Relationships
empty = fromList []
size :: Relationships -> Int
size = Map.size . relMap
relEntry :: RefId -> Text -> FilePath -> (RefId, Relationship)
relEntry rId typ trg = (rId, Relationship (stdRelType typ) trg)
lookup :: RefId -> Relationships -> Maybe Relationship
lookup ref = Map.lookup ref . relMap
setTargetsFrom :: FilePath -> Relationships -> Relationships
setTargetsFrom fp (Relationships m) = Relationships (Map.map fixPath m)
where
fixPath rel = rel{ relTarget = fp `joinRel` relTarget rel}
-- | joins relative URI (actually a file path as an internal relation target)
joinRel :: FilePath -> FilePath -> FilePath
joinRel abs rel = uriToString id (relPath `nonStrictRelativeTo` base) ""
where
base = fromJustNote "joinRel base path" $ parseURIReference abs
relPath = fromJustNote "joinRel relative path" $ parseURIReference rel
relFrom :: FilePath -> FilePath -> FilePath
relFrom path base = uriToString id (pathURI `relativeFrom` baseURI) ""
where
baseURI = fromJustNote "joinRel base path" $ parseURIReference base
pathURI = fromJustNote "joinRel relative path" $ parseURIReference path
findRelByType :: Text -> Relationships -> Maybe Relationship
findRelByType t (Relationships m) = find ((==t) . relType) (Map.elems m)
allByType :: Text -> Relationships -> [Relationship]
allByType t (Relationships m) = filter ((==t) . relType) (Map.elems m)
{-------------------------------------------------------------------------------
Rendering
-------------------------------------------------------------------------------}
instance ToDocument Relationships where
toDocument = documentFromNsElement "Relationships generated by xlsx" pkgRelNs
. toElement "Relationships"
instance ToElement Relationships where
toElement nm Relationships{..} = Element
{ elementName = nm
, elementAttributes = Map.empty
, elementNodes = map (NodeElement . relToEl "Relationship") $
Map.toList relMap
}
where
relToEl nm' (relId, rel) = setAttr "Id" relId (toElement nm' rel)
instance ToElement Relationship where
toElement nm Relationship{..} = Element
{ elementName = nm
, elementAttributes = Map.fromList [ "Target" .= relTarget
, "Type" .= relType ]
, elementNodes = []
}
{-------------------------------------------------------------------------------
Parsing
-------------------------------------------------------------------------------}
instance FromCursor Relationships where
fromCursor cur = do
let items = cur $/ element (pr"Relationship") >=> parseRelEntry
return . Relationships $ Map.fromList items
parseRelEntry :: Cursor -> [(RefId, Relationship)]
parseRelEntry cur = do
rel <- fromCursor cur
rId <- attribute "Id" cur
return (RefId rId, rel)
instance FromCursor Relationship where
fromCursor cur = do
ty <- attribute "Type" cur
trg <- T.unpack <$> attribute "Target" cur
return $ Relationship ty trg
-- | Add package relationship namespace to name
pr :: Text -> Name
pr x = Name
{ nameLocalName = x
, nameNamespace = Just pkgRelNs
, namePrefix = Nothing
}
-- | Add office document relationship namespace to name
odr :: Text -> Name
odr x = Name
{ nameLocalName = x
, nameNamespace = Just odRelNs
, namePrefix = Nothing
}
odRelNs :: Text
odRelNs = "http://schemas.openxmlformats.org/officeDocument/2006/relationships"
pkgRelNs :: Text
pkgRelNs = "http://schemas.openxmlformats.org/package/2006/relationships"
stdRelType :: Text -> Text
stdRelType t = stdPart <> t
where
stdPart = "http://schemas.openxmlformats.org/officeDocument/2006/relationships/"
| qrilka/xlsx | src/Codec/Xlsx/Types/Internal/Relationships.hs | mit | 4,805 | 0 | 15 | 896 | 1,186 | 647 | 539 | 98 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.EXT2.Inode
-- Copyright : (C) 2015 Braden Walters,
-- 2015 Ricky Elrod
-- License : MIT (see LICENSE file)
-- Maintainer : Braden Walters <vc@braden-walters.info>,
-- Ricky Elrod <ricky@elrod.me>
-- Stability : experimental
-- Portability : ghc
--
-- This module contains functions and types for dealing with ext2\'s concept of
-- inodes.
module Data.EXT2.Inode
( InodeMode(..)
, InodeNumber
, Inode(..)
, fetchInodeTable
, fetchInode
, usedInodes
, fetchInodeBlocks
, fetchDataBlockNumbers
-- * 'Inode' Lenses
, inodeNumber, mode, size, userId, accessTime, creationTime, modifiedTime
, deletedTime, groupId, linkCount, blocks512, flags, osDependentValue
, generation, fileAcl, dirAcl, faddr, osDependentValue2
) where
import Control.Applicative
import Control.Lens
import Control.Monad (replicateM)
import Data.Binary.Get
import Data.Bits
import qualified Data.ByteString as SBS
import qualified Data.ByteString.Lazy as LBS
import Data.EXT2.BlockGroupDescriptor
import Data.EXT2.Internal.LensHacks
import Data.EXT2.Superblock
import Data.EXT2.UsageBitmaps
import Data.EXT2.Internal.Util (createTime)
import Data.Maybe
import Data.UnixTime
import qualified Data.Vector as V
import System.IO
data InodeMode =
SocketInode | SymLinkInode | RegFileInode | BlockDevInode |
DirectoryInode | CharDevInode | FifoInode deriving (Eq, Show)
intToFileFormatMode :: Integer -> Maybe InodeMode
intToFileFormatMode input
| (input .&. 0xf000) == 0xc000 = Just SocketInode
| (input .&. 0xf000) == 0xa000 = Just SymLinkInode
| (input .&. 0xf000) == 0x8000 = Just RegFileInode
| (input .&. 0xf000) == 0x6000 = Just BlockDevInode
| (input .&. 0xf000) == 0x4000 = Just DirectoryInode
| (input .&. 0xf000) == 0x2000 = Just CharDevInode
| (input .&. 0xf000) == 0x1000 = Just FifoInode
| otherwise = Nothing
{-# INLINE intToFileFormatMode #-}
type InodeNumber = Integer
data Inode =
Inode
{ inoInodeNumber :: InodeNumber
, inoMode :: [InodeMode]
, inoUserId :: Integer
, inoSize :: Integer
, inoAccessTime :: UnixTime
, inoCreationTime :: UnixTime
, inoModifiedTime :: UnixTime
, inoDeletedTime :: UnixTime
, inoGroupId :: Integer
, inoLinkCount :: Integer
, inoBlocks512 :: Integer
, inoFlags :: Integer
, inoOsDependentValue :: SBS.ByteString
, inoDirectBlocks :: [Integer]
, inoIndirectBlocks :: (Integer, Integer, Integer)
, inoGeneration :: Integer
, inoFileAcl :: Integer
, inoDirAcl :: Integer
, inoFaddr :: Integer
, inoOsDependentValue2 :: SBS.ByteString }
deriving (Eq, Show)
makeLensesWith namespaceLensRules ''Inode
lenInode :: Integral a => a
lenInode = 128
{-# INLINE lenInode #-}
fetchInodeTable :: Superblock -> BlockGroupDescriptor -> Handle -> IO [Inode]
fetchInodeTable sb bgd handle = do
let startInodeNumber = bgd ^. groupNumber * sb ^. inodesPerGroup
hSeek handle AbsoluteSeek $ blockOffset sb $ bgd ^. inodeTblStartAddr
mapM (\num -> runGet (getInode (startInodeNumber + num)) <$>
LBS.hGet handle lenInode) [1..sb ^. inodesPerGroup]
fetchInode :: Superblock -> V.Vector BlockGroupDescriptor -> Handle ->
InodeNumber -> IO (Maybe Inode)
fetchInode sb bgdTable handle inodeNum =
let groupIndex = (inodeNum - 1) `quot` sb ^. inodesPerGroup
localInodeNum = (inodeNum - 1) - (groupIndex * sb ^. inodesPerGroup)
in case bgdTable V.!? fromIntegral groupIndex of
Just bgd -> do
let inodeLoc = blockOffset sb (bgd ^. inodeTblStartAddr) +
(lenInode * localInodeNum)
hSeek handle AbsoluteSeek inodeLoc
Just <$> runGet (getInode inodeNum) <$> LBS.hGet handle lenInode
Nothing -> return Nothing
getInode :: InodeNumber -> Get Inode
getInode inodeNum =
Inode inodeNum <$> maybeToList <$> (intToFileFormatMode <$> getShort)
<*> getShort <*> getInt <*> getTime <*> getTime <*> getTime
<*> getTime <*> getShort <*> getShort <*> getInt <*> getInt
<*> getByteString 4 <*> replicateM 12 getInt
<*> liftA3 (,,) getInt getInt getInt <*> getInt <*> getInt
<*> getInt <*> getInt <*> getByteString 12
where getInt = toInteger <$> getWord32le
getShort = toInteger <$> getWord16le
getTime = createTime <$> (fromIntegral <$> getWord32le :: Get Integer)
usedInodes :: InodeUsageBitmap -> [Inode] -> [Inode]
usedInodes (InodeUsageBitmap inodeUsageBits) allInodes =
map fst $ filter snd $ zip allInodes $ V.toList inodeUsageBits
{-# INLINE usedInodes #-}
fetchInodeBlocks :: Handle -> Superblock -> Inode -> IO LBS.ByteString
fetchInodeBlocks handle sb inode = do
blockNums <- fetchDataBlockNumbers handle sb inode
LBS.concat <$> mapM readBlock blockNums
where readBlock num = do
hSeek handle AbsoluteSeek $ blockOffset sb (fromIntegral num)
LBS.hGet handle $ fromIntegral (sb ^. logBlockSize)
fetchDataBlockNumbers :: Handle -> Superblock -> Inode -> IO [Integer]
fetchDataBlockNumbers handle sb inode = do
let usedDirect = takeWhile (not . (== 0)) (inode ^. directBlocks)
(indir1Num, indir2Num, indir3Num) = inode ^. indirectBlocks
indir1 <- fetchIndirectBlock1 handle sb indir1Num
indir2 <- fetchIndirectBlock2 handle sb indir2Num
indir3 <- fetchIndirectBlock3 handle sb indir3Num
return (usedDirect ++ indir1 ++ indir2 ++ indir3)
get32IntBlockTillZero :: Superblock -> Get [Integer]
get32IntBlockTillZero sb =
let num32Integers =
floor ((sb ^. logBlockSize . to fromIntegral) / 4 :: Double)
getInt = toInteger <$> getWord32le
in fst <$> break (== 0) <$> replicateM num32Integers getInt
fetchIndirectBlock1 :: Handle -> Superblock -> Integer -> IO [Integer]
fetchIndirectBlock1 _ _ 0 = return []
fetchIndirectBlock1 handle sb blockNum = do
hSeek handle AbsoluteSeek $ blockOffset sb blockNum
runGet (get32IntBlockTillZero sb) <$>
LBS.hGet handle (sb ^. logBlockSize . to fromIntegral)
fetchIndirectBlock2 :: Handle -> Superblock -> Integer -> IO [Integer]
fetchIndirectBlock2 _ _ 0 = return []
fetchIndirectBlock2 handle sb blockNum = do
hSeek handle AbsoluteSeek $ blockOffset sb blockNum
indirectBlocks' <- runGet (get32IntBlockTillZero sb) <$>
LBS.hGet handle (sb ^. logBlockSize . to fromIntegral)
concat <$> mapM (fetchIndirectBlock1 handle sb) indirectBlocks'
fetchIndirectBlock3 :: Handle -> Superblock -> Integer -> IO [Integer]
fetchIndirectBlock3 _ _ 0 = return []
fetchIndirectBlock3 handle sb blockNum = do
hSeek handle AbsoluteSeek $ blockOffset sb blockNum
indirectBlocks' <- runGet (get32IntBlockTillZero sb) <$>
LBS.hGet handle (sb ^. logBlockSize . to fromIntegral)
concat <$> mapM (fetchIndirectBlock2 handle sb) indirectBlocks'
| meoblast001/ext2-info | src/Data/EXT2/Inode.hs | mit | 6,973 | 0 | 25 | 1,309 | 1,933 | 1,017 | 916 | -1 | -1 |
import Hello
main :: IO ()
main = putStrLn hello | freels/haskell-project | bin/HelloMain.hs | mit | 49 | 0 | 6 | 10 | 22 | 11 | 11 | 3 | 1 |
{-# LANGUAGE OverloadedStrings, CPP #-}
{-# LANGUAGE NamedFieldPuns, RecordWildCards #-}
module Network.Wai.Handler.Warp.HTTP2.Types where
import Data.ByteString.Builder (Builder)
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative ((<$>), (<*>), pure)
#endif
import Control.Concurrent (forkIO)
import Control.Concurrent.MVar (MVar)
import Control.Concurrent.STM
import Control.Exception (SomeException)
import Control.Monad (void)
import Control.Reaper
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import Data.IntMap.Strict (IntMap, IntMap)
import qualified Data.IntMap.Strict as M
import qualified Network.HTTP.Types as H
import Network.Wai (Request, FilePart)
import Network.Wai.HTTP2 (PushPromise, Trailers)
import Network.Wai.Handler.Warp.IORef
import Network.Wai.Handler.Warp.Types
import Network.HTTP2
import Network.HTTP2.Priority
import Network.HPACK
----------------------------------------------------------------
http2ver :: H.HttpVersion
http2ver = H.HttpVersion 2 0
isHTTP2 :: Transport -> Bool
isHTTP2 TCP = False
isHTTP2 tls = useHTTP2
where
useHTTP2 = case tlsNegotiatedProtocol tls of
Nothing -> False
Just proto -> "h2-" `BS.isPrefixOf` proto || proto == "h2"
----------------------------------------------------------------
data Input = Input Stream Request
----------------------------------------------------------------
-- | The result of writing data from a stream's queue into the buffer.
data Control a = CFinish Trailers
-- ^ The stream has ended, and the trailers should be sent.
| CNext a
-- ^ The stream has more data immediately available, and we
-- should re-enqueue it when the stream window becomes open.
| CNone
-- ^ The stream queue has been drained and we've handed it off
-- to its dedicated waiter thread, which will re-enqueue it when
-- more data is available.
instance Show (Control a) where
show (CFinish _) = "CFinish"
show (CNext _) = "CNext"
show CNone = "CNone"
type DynaNext = WindowSize -> IO Next
type BytesFilled = Int
data Next = Next BytesFilled (Control DynaNext)
data Output = OFinish
-- ^ Terminate the connection.
| OGoaway ByteString
-- ^ Send a goaway frame and terminate the connection.
| OSettings ByteString SettingsList
-- ^ Update settings and send an ack settings frame.
| OFrame ByteString
-- ^ Send an entire pre-encoded frame.
| OResponse Stream H.Status H.ResponseHeaders Aux
-- ^ Send the headers and as much of the response as is immediately
-- available.
| OPush Stream PushPromise (MVar Bool) Stream H.Status H.ResponseHeaders Aux
-- ^ Send a PUSH_PROMISE frame, then act like OResponse; signal the
-- MVar whether the promise has been sent.
| ONext Stream DynaNext
-- ^ Send a chunk of the response.
outputStream :: Output -> Stream
outputStream (OResponse strm _ _ _) = strm
outputStream (ONext strm _) = strm
outputStream (OPush strm _ _ _ _ _ _) = strm
outputStream _ = error "outputStream"
----------------------------------------------------------------
-- | An element on the queue between a running stream and the sender; the order
-- should consist of any number of 'SFile', 'SBuilder', and 'SFlush', followed
-- by a single 'SFinish'.
data Sequence = SFinish Trailers
-- ^ The stream is over; its trailers are provided.
| SFlush
-- ^ Any buffered data should be sent immediately.
| SBuilder Builder
-- ^ Append a chunk of data to the stream.
| SFile FilePath FilePart
-- ^ Append a chunk of a file's contents to the stream.
-- | A message from the sender to a stream's dedicated waiter thread.
data Sync = SyncNone
-- ^ Nothing interesting has happened. Go back to sleep.
| SyncFinish
-- ^ The stream has ended.
| SyncNext Output
-- ^ The stream's queue has been drained; wait for more to be
-- available and re-enqueue the given 'Output'.
-- | Auxiliary information needed to communicate with a running stream: a queue
-- of stream elements ('Sequence') and a 'TVar' connected to its waiter thread.
data Aux = Persist (TBQueue Sequence) (TVar Sync)
----------------------------------------------------------------
-- | The context for HTTP/2 connection.
data Context = Context {
http2settings :: IORef Settings
, streamTable :: StreamTable
-- | Number of active streams initiated by the client; for enforcing our own
-- max concurrency setting.
, concurrency :: IORef Int
-- | Number of active streams initiated by the server; for respecting the
-- client's max concurrency setting.
, pushConcurrency :: IORef Int
, priorityTreeSize :: IORef Int
-- | RFC 7540 says "Other frames (from any stream) MUST NOT
-- occur between the HEADERS frame and any CONTINUATION
-- frames that might follow". This field is used to implement
-- this requirement.
, continued :: IORef (Maybe StreamId)
, currentStreamId :: IORef StreamId
-- ^ Last client-initiated stream ID we've handled.
, nextPushStreamId :: IORef StreamId
-- ^ Next available server-initiated stream ID.
, inputQ :: TQueue Input
, outputQ :: PriorityTree Output
, encodeDynamicTable :: IORef DynamicTable
, decodeDynamicTable :: IORef DynamicTable
, connectionWindow :: TVar WindowSize
}
----------------------------------------------------------------
newContext :: IO Context
newContext = Context <$> newIORef defaultSettings
<*> initialize 10 -- fixme: hard coding: 10
<*> newIORef 0
<*> newIORef 0
<*> newIORef 0
<*> newIORef Nothing
<*> newIORef 0
<*> newIORef 2 -- first server push stream; 0 is reserved
<*> newTQueueIO
<*> newPriorityTree
<*> (newDynamicTableForEncoding defaultDynamicTableSize >>= newIORef)
<*> (newDynamicTableForDecoding defaultDynamicTableSize >>= newIORef)
<*> newTVarIO defaultInitialWindowSize
clearContext :: Context -> IO ()
clearContext ctx = void $ reaperStop $ streamTable ctx
----------------------------------------------------------------
data OpenState =
JustOpened
| Continued [HeaderBlockFragment]
Int -- Total size
Int -- The number of continuation frames
Bool -- End of stream
Priority
| NoBody HeaderList Priority
| HasBody HeaderList Priority
| Body (TQueue ByteString)
data ClosedCode = Finished
| Killed
| Reset ErrorCodeId
| ResetByMe SomeException
deriving Show
data StreamState =
Idle
| Open OpenState
| HalfClosed
| Closed ClosedCode
isIdle :: StreamState -> Bool
isIdle Idle = True
isIdle _ = False
isOpen :: StreamState -> Bool
isOpen Open{} = True
isOpen _ = False
isHalfClosed :: StreamState -> Bool
isHalfClosed HalfClosed = True
isHalfClosed _ = False
isClosed :: StreamState -> Bool
isClosed Closed{} = True
isClosed _ = False
instance Show StreamState where
show Idle = "Idle"
show Open{} = "Open"
show HalfClosed = "HalfClosed"
show (Closed e) = "Closed: " ++ show e
----------------------------------------------------------------
data Stream = Stream {
streamNumber :: StreamId
, streamState :: IORef StreamState
-- Next two fields are for error checking.
, streamContentLength :: IORef (Maybe Int)
, streamBodyLength :: IORef Int
, streamWindow :: TVar WindowSize
, streamPrecedence :: IORef Precedence
-- | The concurrency IORef in which this stream has been counted. The client
-- and server each have separate concurrency values to respect, so pushed
-- streams need to decrement a different count when they're closed. This
-- should be either @concurrency ctx@ or @pushConcurrency ctx@.
, concurrencyRef :: IORef Int
}
instance Show Stream where
show s = show (streamNumber s)
newStream :: IORef Int -> StreamId -> WindowSize -> IO Stream
newStream ref sid win =
Stream sid <$> newIORef Idle
<*> newIORef Nothing
<*> newIORef 0
<*> newTVarIO win
<*> newIORef defaultPrecedence
<*> pure ref
----------------------------------------------------------------
opened :: Stream -> IO ()
opened Stream{concurrencyRef,streamState} = do
atomicModifyIORef' concurrencyRef (\x -> (x+1,()))
writeIORef streamState (Open JustOpened)
closed :: Stream -> ClosedCode -> IO ()
closed Stream{concurrencyRef,streamState} cc = do
atomicModifyIORef' concurrencyRef (\x -> (x-1,()))
writeIORef streamState (Closed cc)
----------------------------------------------------------------
type StreamTable = Reaper (IntMap Stream) (M.Key, Stream)
initialize :: Int -> IO StreamTable
initialize duration = mkReaper settings
where
settings = defaultReaperSettings {
reaperAction = clean
, reaperDelay = duration * 1000000
, reaperCons = uncurry M.insert
, reaperNull = M.null
, reaperEmpty = M.empty
}
clean :: IntMap Stream -> IO (IntMap Stream -> IntMap Stream)
clean old = do
new <- M.fromAscList <$> prune oldlist []
return $ M.union new
where
oldlist = M.toDescList old
prune [] lst = return lst
prune (x@(_,s):xs) lst = do
st <- readIORef (streamState s)
if isClosed st then
prune xs lst
else
prune xs (x:lst)
insert :: StreamTable -> M.Key -> Stream -> IO ()
insert strmtbl k v = reaperAdd strmtbl (k,v)
search :: StreamTable -> M.Key -> IO (Maybe Stream)
search strmtbl k = M.lookup k <$> reaperRead strmtbl
-- INVARIANT: streams in the output queue have non-zero window size.
enqueueWhenWindowIsOpen :: PriorityTree Output -> Output -> IO ()
enqueueWhenWindowIsOpen outQ out = do
let Stream{..} = outputStream out
atomically $ do
x <- readTVar streamWindow
check (x > 0)
pre <- readIORef streamPrecedence
enqueue outQ streamNumber pre out
enqueueOrSpawnTemporaryWaiter :: Stream -> PriorityTree Output -> Output -> IO ()
enqueueOrSpawnTemporaryWaiter Stream{..} outQ out = do
sw <- atomically $ readTVar streamWindow
if sw == 0 then
-- This waiter waits only for the stream window.
void $ forkIO $ enqueueWhenWindowIsOpen outQ out
else do
pre <- readIORef streamPrecedence
enqueue outQ streamNumber pre out
| rgrinberg/wai | warp/Network/Wai/Handler/Warp/HTTP2/Types.hs | mit | 11,061 | 0 | 18 | 2,929 | 2,122 | 1,148 | 974 | 195 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Language.DSKanren
import Test.Tasty.QuickCheck hiding ((===))
import Test.Tasty
import QuickCheckHelper
appendo :: Term -> Term -> Term -> Predicate
appendo l r o =
conde [ program [l === "nil", o === r]
, manyFresh $ \h t o' ->
program [ Pair h t === l
, appendo t r o'
, Pair h o' === o ]]
heado :: Term -> Term -> Predicate
heado l h = fresh $ \t -> Pair h t === l
tailo :: Term -> Term -> Predicate
tailo l t = fresh $ \h -> Pair h t === l
isAppend :: TestTree
isAppend = testProperty "Head Works"
. mapSize (const 3)
. forAll (two . listOf1 $ mkTerm [])
$ \(l, r) -> case runN 1 $ appendo (list l) (list r) of
(t, _) : _ -> t == list (l ++ r)
_ -> False
isHead :: TestTree
isHead = testProperty "Head Works"
. mapSize (const 3)
. forAll (listOf1 $ mkTerm [])
$ \terms -> case runN 1 $ heado (list terms) of
(t, _) : _ -> t == head terms
_ -> False
isTail :: TestTree
isTail = testProperty "Tail Works"
. mapSize (const 3)
. forAll (listOf1 $ mkTerm [])
$ \terms -> case runN 1 $ tailo (list terms) of
(t, _) : _ -> t == list (tail terms)
_ -> False
main :: IO ()
main = defaultMain (testGroup "List Tests" [isHead, isTail])
| jozefg/ds-kanren | test/List.hs | mit | 1,483 | 0 | 13 | 543 | 591 | 305 | 286 | 40 | 2 |
{-# htermination isPrefixOf :: [Bool] -> [Bool] -> Bool #-}
import List
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/List_isPrefixOf_6.hs | mit | 72 | 0 | 3 | 12 | 5 | 3 | 2 | 1 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-triggerconfig.html
module Stratosphere.ResourceProperties.CodeDeployDeploymentGroupTriggerConfig where
import Stratosphere.ResourceImports
-- | Full data type definition for CodeDeployDeploymentGroupTriggerConfig. See
-- 'codeDeployDeploymentGroupTriggerConfig' for a more convenient
-- constructor.
data CodeDeployDeploymentGroupTriggerConfig =
CodeDeployDeploymentGroupTriggerConfig
{ _codeDeployDeploymentGroupTriggerConfigTriggerEvents :: Maybe (ValList Text)
, _codeDeployDeploymentGroupTriggerConfigTriggerName :: Maybe (Val Text)
, _codeDeployDeploymentGroupTriggerConfigTriggerTargetArn :: Maybe (Val Text)
} deriving (Show, Eq)
instance ToJSON CodeDeployDeploymentGroupTriggerConfig where
toJSON CodeDeployDeploymentGroupTriggerConfig{..} =
object $
catMaybes
[ fmap (("TriggerEvents",) . toJSON) _codeDeployDeploymentGroupTriggerConfigTriggerEvents
, fmap (("TriggerName",) . toJSON) _codeDeployDeploymentGroupTriggerConfigTriggerName
, fmap (("TriggerTargetArn",) . toJSON) _codeDeployDeploymentGroupTriggerConfigTriggerTargetArn
]
-- | Constructor for 'CodeDeployDeploymentGroupTriggerConfig' containing
-- required fields as arguments.
codeDeployDeploymentGroupTriggerConfig
:: CodeDeployDeploymentGroupTriggerConfig
codeDeployDeploymentGroupTriggerConfig =
CodeDeployDeploymentGroupTriggerConfig
{ _codeDeployDeploymentGroupTriggerConfigTriggerEvents = Nothing
, _codeDeployDeploymentGroupTriggerConfigTriggerName = Nothing
, _codeDeployDeploymentGroupTriggerConfigTriggerTargetArn = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-triggerconfig.html#cfn-codedeploy-deploymentgroup-triggerconfig-triggerevents
cddgtcTriggerEvents :: Lens' CodeDeployDeploymentGroupTriggerConfig (Maybe (ValList Text))
cddgtcTriggerEvents = lens _codeDeployDeploymentGroupTriggerConfigTriggerEvents (\s a -> s { _codeDeployDeploymentGroupTriggerConfigTriggerEvents = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-triggerconfig.html#cfn-codedeploy-deploymentgroup-triggerconfig-triggername
cddgtcTriggerName :: Lens' CodeDeployDeploymentGroupTriggerConfig (Maybe (Val Text))
cddgtcTriggerName = lens _codeDeployDeploymentGroupTriggerConfigTriggerName (\s a -> s { _codeDeployDeploymentGroupTriggerConfigTriggerName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codedeploy-deploymentgroup-triggerconfig.html#cfn-codedeploy-deploymentgroup-triggerconfig-triggertargetarn
cddgtcTriggerTargetArn :: Lens' CodeDeployDeploymentGroupTriggerConfig (Maybe (Val Text))
cddgtcTriggerTargetArn = lens _codeDeployDeploymentGroupTriggerConfigTriggerTargetArn (\s a -> s { _codeDeployDeploymentGroupTriggerConfigTriggerTargetArn = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/CodeDeployDeploymentGroupTriggerConfig.hs | mit | 3,093 | 0 | 12 | 254 | 356 | 203 | 153 | 32 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Main where
import Control.Applicative
import Control.Concurrent (threadDelay)
import Control.Lens
import Control.Monad (when,forever)
import Control.Monad.Catch (try)
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Reader (ask)
import Control.Monad.State (modify)
import Control.Monad.Trans.Resource
import Data.Acid
import Data.SafeCopy
import Data.String.Utils
import Data.Typeable
import Language.Haskell.Interpreter (runInterpreter)
import Mueval.ArgsParse
import Mueval.Interpreter
import Web.Twitter.Conduit
import Web.Twitter.Types.Lens
import qualified Data.Conduit as C
import qualified Data.Conduit.List as CL
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Tokens
import Common
-- Strip off the first world (which is assumed to be the screenname of the
-- bot).
getHaskellExpression :: T.Text -> T.Text
getHaskellExpression t =
case T.breakOn " " $ T.strip t of
(a, "") -> a
(_, rest) -> T.strip rest
isHaskellPost :: T.Text -> Status -> Bool
isHaskellPost userName status =
(T.isPrefixOf userName $ status ^. statusText) &&
(status ^. statusUser ^. userScreenName) /= botScreenName
evalExpr :: String -> IO String
evalExpr e =
case getOptions ["--expression", e] of
Left t@(b, e) -> return $ show t
Right opts -> do
r <- runInterpreter (interpreter opts)
case r of
Left err -> return $ show err
Right (e,et,val) -> do (out, b) <- getResult val
return out
getResult :: (Functor m, MonadIO m) => String -> m (String, Bool)
getResult str = render 1024 str
statusToText :: Status -> T.Text
statusToText status = T.concat [ T.pack . show $ status ^. statusId
, ": "
, status ^. statusUser . userScreenName
, ": "
, status ^. statusText
]
evalExpression :: MonadIO m => Status -> m String
evalExpression status = do
r <- liftIO $ evalExpr $ decodeHtml $ T.unpack $ getHaskellExpression $ status ^. statusText
return $ take 140 r
{-TODO: Make this more comprehensive-}
decodeHtml :: String -> String
decodeHtml s =
replace "<" "<" $
replace ">" ">" $
replace "&" "&" $
replace """ "\"" $
replace "'" "'" $ s
-- res <- call $ update "Hello World"
reply :: Integer -> T.Text -> APIRequest StatusesUpdate Status
reply i s =
Web.Twitter.Conduit.update s & inReplyToStatusId ?~ i
postreply :: (MonadResource m, MonadLogger m) => Status -> Integer -> String -> TW m Status
postreply status i res = call (reply i $ (T.take 140 $
T.concat ["@",
status ^. statusUser ^. userScreenName,
" ",
T.pack res]))
{-Acid State database to keep track of replies-}
data TweetId = TweetId { tweetId :: Integer }
deriving (Eq, Show, Typeable)
data LambdaTwitDb = LambdaTwitDb { allReplyIds :: [TweetId] }
deriving (Typeable)
allReplies :: Query LambdaTwitDb [TweetId]
allReplies = allReplyIds <$> ask
addReply :: TweetId -> Update LambdaTwitDb ()
addReply tweetId = modify go
where go (LambdaTwitDb db) = LambdaTwitDb $ tweetId : db
{-The Acid State magic-}
deriveSafeCopy 0 'base ''TweetId
deriveSafeCopy 0 'base ''LambdaTwitDb
makeAcidic ''LambdaTwitDb ['allReplies, 'addReply]
conduitmain :: IO ()
conduitmain = do
state <- openLocalState (LambdaTwitDb [])
forever $ do
{-TODO: Use Data.Configurator to read in the oauth keys without needing a recompile:
- http://hackage.haskell.org/package/configurator-}
runNoLoggingT . runTwitterFromEnv $ do
sourceWithMaxId mentionsTimeline
C.$= CL.isolate 100
C.$$ CL.mapM_ $ \status -> do
replies <- liftIO $ query state AllReplies
if ((TweetId (status ^. statusId)) `elem` replies)
then do
liftIO $ putStrLn "Already replied to:"
liftIO $ T.putStrLn $ statusToText status
liftIO $ threadDelay $ 60 * 1000000
else do
when (isHaskellPost botScreenName status) $ do
liftIO $ T.putStrLn $ statusToText status
res <- evalExpression status
liftIO $ putStrLn res
postres <- try $ postreply status (status ^. statusId) res
case postres of
Left (FromJSONError e) -> liftIO $ print e
Left (TwitterErrorResponse s resH errs) ->
liftIO $ print errs
Left (TwitterStatusError s resH val) ->
liftIO $ print val
Right status -> liftIO $ print $ statusToText status
liftIO $ Data.Acid.update state (AddReply $ TweetId (status ^. statusId))
-- AA TODO: Better rate limiting, this probably blocks every tweet.
-- We should only wait for 60 seconds after each mentionsTimeline grab
liftIO $ threadDelay $ 60 * 1000000
main :: IO ()
main = conduitmain
{-TODO: Import lens:-}
{-https://twitter.com/relrod6/status/516785803100688384-}
| AshyIsMe/lambdatwit | Main.hs | mit | 5,856 | 0 | 28 | 1,945 | 1,458 | 761 | 697 | -1 | -1 |
{-# LANGUAGE DatatypeContexts #-}
{-# LANGUAGE DatatypeContexts #-}
{-# LANGUAGE TemplateHaskell #-}
{-# language DatatypeContexts #-}
{-# language FlexibleInstances #-}
{-# language DeriveDataTypeable #-}
module Rewriting.TRS
( module Rewriting.TRS
, module Autolib.Symbol
, module Autolib.TES.Term
, module Autolib.TES.Position
, module Autolib.TES.Rule
, module Autolib.TES.Identifier
) where
import qualified Rewriting.TRS.Raw as Raw
import Autolib.Symbol
import Autolib.TES.Term
import Autolib.TES.Position
import Autolib.TES.Rule
import Autolib.TES.Identifier
import qualified Autolib.TES
import Autolib.Reporter
import Autolib.ToDoc
import Autolib.Reader
import Autolib.Multilingual
import Data.Typeable
import qualified Data.Map as M
import qualified Data.Set as S
data ( Symbol c, Symbol v ) => TRS v c =
TRS { variables :: [ v ]
, rules :: [ Rule ( Term v c ) ]
}
deriving ( Eq, Ord, Typeable )
instance ( Symbol c, Symbol v ) => ToDoc ( TRS v c ) where
toDoc trs = toDoc
$ Raw.TRS
{ Raw.variables = variables trs
, Raw.rules = rules trs
}
instance ( Symbol c ) => Reader ( TRS c c ) where
reader = do
trs <- reader
patch trs
-- | upon reading, the parser does not know what is a variable
-- so the system has to be patched
patch :: ( Symbol c )
=> Raw.TRS c c
-> Parser ( TRS c c )
patch trs = do
let handle t @ ( Node f xs ) =
if f `elem` Raw.variables trs
then if null xs
then return $ Var f
else fail
$ show
$ multitext [(DE, "Variable darf keine Argumente haben:" )
,(UK, "variable cannot have arguments")
]
<+> toDoc t
else do
ys <- mapM handle xs
let ar = length xs
return $ Node ( set_arity ar f ) ys
rs <- sequence $ do
rule <- Raw.rules trs
return $ do
ll <- handle $ lhs rule
rr <- handle $ rhs rule
return $ rule { lhs = ll, rhs = rr }
let wrong = M.fromListWith S.union $ do
r <- rs ; t <- [lhs r, rhs r]
s @ ( Node f args ) <- subterms t
let ar = length args
guard $ arity f /= ar
return (f, S.singleton s)
forM_ (M.toList wrong) $ \ (f, ss) ->
forM (S.toList ss) $ \ s -> fail $ unlines
[ unwords [ "symbol", show f, "of arity", show (arity f) ]
, "occurs with different arity"
, unwords [ "at root of subterm", show s ]
]
return $ TRS { variables = Raw.variables trs
, rules = rs
}
example :: TRS Identifier Identifier
example = read "TRS { variables = [x, y] , rules = [ f(x,y) -> f(y,x) ] }"
pack :: ( Symbol c, Symbol v )
=> TRS v c -> Autolib.TES.RS c ( Term v c )
pack trs = Autolib.TES.from_strict_rules False
$ do r <- rules trs ; return ( lhs r, rhs r )
| marcellussiegburg/autotool | collection/src/Rewriting/TRS.hs | gpl-2.0 | 3,115 | 4 | 18 | 1,108 | 941 | 498 | 443 | 82 | 3 |
module Data.Matrix where
import Foreign.Vector
data Matrix a
fromLists xss = fromVectors $ list_to_vector $ map list_to_vector xss
scaleMatrix :: (Num a) => a -> Matrix a -> Matrix a
builtin scaleMatrix 2 "scaleMatrix" "SModel"
| bredelings/BAli-Phy | haskell/Data/Matrix.hs | gpl-2.0 | 232 | 0 | 8 | 39 | 76 | 39 | 37 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-|
A reader for the timelog file format generated by timeclock.el.
From timeclock.el 2.6:
@
A timelog contains data in the form of a single entry per line.
Each entry has the form:
CODE YYYY/MM/DD HH:MM:SS [COMMENT]
CODE is one of: b, h, i, o or O. COMMENT is optional when the code is
i, o or O. The meanings of the codes are:
b Set the current time balance, or \"time debt\". Useful when
archiving old log data, when a debt must be carried forward.
The COMMENT here is the number of seconds of debt.
h Set the required working time for the given day. This must
be the first entry for that day. The COMMENT in this case is
the number of hours in this workday. Floating point amounts
are allowed.
i Clock in. The COMMENT in this case should be the name of the
project worked on.
o Clock out. COMMENT is unnecessary, but can be used to provide
a description of how the period went, for example.
O Final clock out. Whatever project was being worked on, it is
now finished. Useful for creating summary reports.
@
Example:
@
i 2007/03/10 12:26:00 hledger
o 2007/03/10 17:26:02
@
-}
module Hledger.Read.TimelogReader (
reader,
tests_Hledger_Read_TimelogReader
)
where
import Control.Monad
import Control.Monad.Error
import Test.HUnit
import Text.ParserCombinators.Parsec hiding (parse)
import Hledger.Data
import Hledger.Read.Utils
import Hledger.Read.JournalReader (ledgerExclamationDirective, ledgerHistoricalPrice,
ledgerDefaultYear, emptyLine, ledgerdatetime)
import Hledger.Utils
reader :: Reader
reader = Reader format detect parse
format :: String
format = "timelog"
-- | Does the given file path and data provide timeclock.el's timelog format ?
detect :: FilePath -> String -> Bool
detect f _ = fileSuffix f == format
-- | Parse and post-process a "Journal" from timeclock.el's timelog
-- format, saving the provided file path and the current time, or give an
-- error.
parse :: FilePath -> String -> ErrorT String IO Journal
parse = parseJournalWith timelogFile
timelogFile :: GenParser Char JournalContext (JournalUpdate,JournalContext)
timelogFile = do items <- many timelogItem
eof
ctx <- getState
return (liftM (foldr (.) id) $ sequence items, ctx)
where
-- As all ledger line types can be distinguished by the first
-- character, excepting transactions versus empty (blank or
-- comment-only) lines, can use choice w/o try
timelogItem = choice [ ledgerExclamationDirective
, liftM (return . addHistoricalPrice) ledgerHistoricalPrice
, ledgerDefaultYear
, emptyLine >> return (return id)
, liftM (return . addTimeLogEntry) timelogentry
] <?> "timelog entry, or default year or historical price directive"
-- | Parse a timelog entry.
timelogentry :: GenParser Char JournalContext TimeLogEntry
timelogentry = do
code <- oneOf "bhioO"
many1 spacenonewline
datetime <- ledgerdatetime
comment <- optionMaybe (many1 spacenonewline >> liftM2 (++) getParentAccount restofline)
return $ TimeLogEntry (read [code]) datetime (maybe "" rstrip comment)
tests_Hledger_Read_TimelogReader = TestList [
]
| trygvis/hledger | hledger-lib/Hledger/Read/TimelogReader.hs | gpl-3.0 | 3,380 | 0 | 13 | 822 | 430 | 232 | 198 | 40 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module EqCommon where
import Data.List
import Data.Foldable (toList)
import Data.Maybe
import qualified Data.ByteString.Lazy as DBL
import qualified Data.Map as Map
import Data.Map ()
import qualified Data.Sequence as Seq
import Data.Sequence (Seq)
import qualified Data.Text as Text
import Data.Text (Text)
import Text.Pandoc as Doc
import Text.Pandoc.Error (handleError)
import System.Command (rawSystem, inDirectory')
import Codec.Xlsx
import Control.Lens
import System.Time (getClockTime)
data EqVersion = Eq2 | Eq3 deriving (Eq, Ord, Show, Read)
type Chapter = Int
type Section = Char
type Name = Text
type Tag = Text
type Score = Int
data Lesson = Lesson { chapter :: Chapter
, section :: Section
, count :: Int
, lName :: (Name,Name)
, tags :: (Seq Tag)
, score :: Score
, adapted :: Bool
} deriving (Show, Read)
instance Eq Lesson where
l == l' = sameCh && sameSc && sameCo
where sameCh = chapter l == chapter l'
sameSc = section l == section l'
sameCo = count l == count l'
instance Ord Lesson where
compare l@(Lesson c s o _ _ _ _) l'@(Lesson c' s' o' _ _ _ _)
| c < c' = LT
| c == c' && s < s' = LT
| c == c' && s == s' && o < o' = LT
| l == l' = EQ
| otherwise = GT
adaptedScore :: Lesson -> Double
adaptedScore l | score l /= 1 = 0
| adapted l = 0.5
| otherwise = 1
data Assessment = Assessment { student :: Name
, ver :: EqVersion
, teacher :: Name
, lessons :: Seq Lesson
} deriving (Eq)
bottomScore :: Maybe Lesson -> Maybe Lesson -> (Score, Bool)
bottomScore Nothing Nothing = (0, False)
bottomScore Nothing (Just l') = (score l', adapted l')
bottomScore (Just l) Nothing = (score l, adapted l)
bottomScore (Just l) (Just l') | al < al' = (score l, adapted l)
| otherwise = (score l', adapted l')
where al = adaptedScore l
al' = adaptedScore l'
retrieveLesson :: Seq Lesson -> (Chapter, Section, Int) -> Maybe Lesson
retrieveLesson ls (c,s,o) | found = Just l
| otherwise = Nothing
where l' = Lesson c s o ns Seq.empty 0 False
ns = (Text.empty, Text.empty)
idx = Seq.elemIndexL l' ls
found = idx /= Nothing
l = Seq.index ls $ fromJust idx
csLesson :: Lesson -> String
csLesson l = intercalate "," [s,n,c]
where s = intercalate "." [show $ ch,[sc],show $ cnt]
ch = chapter l
sc = section l
cn = count l
n = Text.unpack . snd $ lName l
c = show $ adaptedScore l
cnt | ch == 11 && sc == 'E' && cn > 6 = cn - 1
| otherwise = cn
ltLesson :: Lesson -> String
ltLesson l@(Lesson c s o n _ _ _) = intercalate " & " [i,d,ars] ++ "\\\\\\hline"
where i = intercalate "." [show $ c,[s],show $ cnt]
d = Text.unpack $ snd n
ars = show $ adaptedScore l
cnt | c == 11 && s == 'E' && o > 6 = o - 1
| otherwise = o
exLesson :: Lesson -> [(Int, Cell)]
exLesson l@(Lesson c s o n _ _ _)
= [(1, sc $ CellText id')
,(2, sc . CellText $ snd n)
,(3, sc . CellDouble $ adaptedScore l)
] where id' = Text.pack $ intercalate "." [show c,[s],show o]
sc = Cell Nothing . Just
makeExceptions :: Assessment -> Assessment
makeExceptions a@(Assessment i v t ls) | ver a == Eq2 = nA
| otherwise = a
where l = retrieveLesson ls (11,'E',5)
l' = retrieveLesson ls (11,'E',6)
(ns,na) = bottomScore l l'
a' = updateLesson a (11,'E',5) (Just ns, Just na)
nls Nothing = lessons a'
nls (Just lsn) = Seq.filter (/= lsn) $ lessons a'
nA = Assessment i v t $ nls l'
toTextOut :: Assessment -> String -> String
toTextOut a ext = unlines lns
where lns | ext == "csv" = [ concat ["Teacher:,", t]
, concat ["Student:,", i]
, concat ["Start at:,Chapter ",ch," "
,"(Adjusted Raw Score: ",sc,")\n"]
, "Lesson,Description,Score"
, fls
]
| otherwise = [ "\\documentclass[letterpaper]{article}"
, "\\usepackage{ifxetex,longtable}"
, "\\usepackage[margin=0.5in]{geometry}"
, "\\begin{document}"
, "\\section*{Equals Assessment Results}"
, "\\noindent"
, concat ["Teacher: ",t,"\\\\"]
, concat ["Student: ",i,"\\\\"]
, concat ["Start at Chapter ",ch, " "
,"(Adjusted Raw Score: ",sc,")\\\\"]
, concat ["\\ifxetex"
,"\\let\\tabular\\longtable"
,"\\let\\endtabular\\endlongtable"
,"\\fi"]
, "\\begin{tabular}[c]{|l|l|r|}"
, "\\hline"
, "Lesson & Description & Score\\\\\\hline"
, fls
, "\\end{tabular}"
, "\\end{document}"
]
fls | ext == "csv" = unlines $ csLesson <$> sl
| otherwise = unlines $ ltLesson <$> sl
na = makeExceptions a
ls = lessons na
sl = toList $ Seq.sort ls
ch = show $ suggestedStart na
sc = show $ adaptedTotal na
i = Text.unpack $ student na
t = Text.unpack $ teacher na
toExcel :: Assessment -> Xlsx
toExcel a = def & atSheet "Assessment" ?~ s
where cs = [(1, [(1, scl "Equals Assessment Results")])
,(3, [(1, scl $ mconcat ["Teacher: ", teacher a])])
,(4, [(1, scl $ mconcat ["Studnet: ", student a])])
,(5, [(1, scl $ mconcat ["Start at Chapter ",ch," (Adjusted ",
"Raw Score: ",sc,")"])])
,(7, zip [1..3] $ scl <$> ["Lesson","Description","Score"])
] ++ (zip [8..] $ exLesson <$> sl)
s = def { _wsCells = fromRows cs
, _wsColumns = [ColumnsWidth 2 2 68.7 0] }
na = makeExceptions a
ls = lessons na
sl = toList $ Seq.sort ls
ch = Text.pack . show $ suggestedStart na
sc = Text.pack . show $ adaptedTotal na
scl = Cell Nothing . Just . CellText
saveFile :: Assessment -> String -> IO ()
saveFile a ext | ext `elem` ["docx","pdf","xlsx"] = sW
| otherwise = writeFile n f
where i = handleError $ readLaTeX def lt
lt = toTextOut a ext
n = concat ["exports/",t,"_",s,".",ext]
n' = concat [t,"_",s,".tex"]
sW = case ext of
"docx" -> writeDocx def i >>= DBL.writeFile n
"xlsx" -> do c <- getClockTime
DBL.writeFile n . fromXlsx c $ toExcel a
"pdf" -> do writeFile ("exports/" ++ n') f
let xel = rawSystem "xelatex" [n']
_ <- inDirectory' "exports" xel
return ()
_ -> do return ()
f = case ext of
"csv" -> lt
"htm" -> writeHtmlString def i
"rtf" -> writeRTF def i
"pdf" -> lt
_ -> ""
t = Text.unpack $ teacher a
s = Text.unpack $ student a
type Specifier = (Chapter, Section, Int, (Name,Name))
newLesson :: EqVersion -> Specifier -> Seq Tag -> Score -> Bool -> Lesson
newLesson v (c,s,o,n) t r a | not vCh = error "Invalid Chapter"
| not vSec = error "Invalid Section"
| not vScr = error "Invalid Score"
| otherwise = Lesson c s o n t r a
where vCh = c `validChapterIn` v
vSec = s `validSectionIn` v
vScr = r `elem` [(-1)..1]
validChapterIn :: Chapter -> EqVersion -> Bool
validChapterIn c v = (Seq.elemIndexL c cList) /= Nothing
where cList = fromJust $ Map.lookup v chapters
chapters = Map.fromList [ (Eq2, Seq.fromList [1..12])
, (Eq3, Seq.fromList [1..10])
]
validSectionIn :: Section -> EqVersion -> Bool
validSectionIn s v = (Seq.elemIndexL s sList) /= Nothing
where sList = fromJust $ Map.lookup v sections
sections = Map.fromList
[ (Eq2, Seq.fromList ['A'..'E'])
, (Eq3, Seq.fromList ['A'..'E'])
]
rawTotal :: Assessment -> Int
rawTotal a = foldl1 (+) $ score <$> (lessons a)
adaptedTotal :: Assessment -> Double
adaptedTotal a = foldl1 (+) $ adaptedScore <$> (lessons a)
suggestedStart :: Assessment -> Chapter
suggestedStart a = 1 + idx ch
where aScr = adaptedTotal a
ch = Seq.findIndexL (aScr <=) b
b = scoreBounds $ ver a
idx (Just c) = c
idx Nothing = 0
scoreBounds :: EqVersion -> Seq Double
scoreBounds Eq2 = Seq.fromList $ zipWith (+) ((27.5 *) <$> [1..12]) adj
where adj = [0,0.5..] >>= replicate 5
scoreBounds _ = Seq.empty
updateScore :: Lesson -> Maybe Score -> Maybe Bool -> Lesson
updateScore (Lesson c s o n t _ _) (Just r') (Just a') = Lesson c s o n t r' a'
updateScore (Lesson c s o n t r _) Nothing (Just a') = Lesson c s o n t r a'
updateScore (Lesson c s o n t _ a) (Just r') Nothing = Lesson c s o n t r' a
updateScore (Lesson c s o n t r a) Nothing Nothing = Lesson c s o n t r a
updateLesson :: Assessment
-> (Int,Char,Int)
-> (Maybe Score,Maybe Bool)
-> Assessment
updateLesson (Assessment n v t ls) (c,s,o) (r,b) = Assessment n v t $ newLs idx
where l = newLesson v (c,s,o,ns) Seq.empty 0 False
ns = (Text.pack "", Text.pack "")
idx = Seq.elemIndexL l ls
newL i = updateScore (Seq.index ls i) r b
newLs Nothing = ls
newLs (Just i) = Seq.update i (newL i) ls
| AbleNetInc/EqualsAssessment | src/EqCommon.hs | gpl-3.0 | 12,058 | 0 | 16 | 5,630 | 3,677 | 1,948 | 1,729 | 232 | 8 |
{-# LANGUAGE OverloadedStrings #-}
module Hpack(
HeaderField,
HeaderName,
HeaderValue,
Headers,
DynamicTable,
PrefixLength(..),
getHeaderFields,
putHeaderFields,
emptyDynTable
) where
import qualified Control.Monad.State.Lazy as State
import qualified Data.Binary.Get as Get
import qualified Data.Binary.Put as Put
import qualified Data.Bits as Bits
import qualified Data.ByteString.Lazy as ByteString
import qualified Data.Text.Encoding as Encoding
import qualified Huffman
import Control.Monad.State.Lazy(StateT)
import Control.Monad.Trans.Class(lift)
import Data.Binary.Get(Get)
import Data.Binary.Put(Put)
import Data.Bits((.|.), (.&.))
import Data.ByteString.Lazy(ByteString)
import Data.Text(Text)
import ProjectPrelude
-- Use to limit the domain of some functions like putInteger and getInteger
data PrefixLength = One | Two | Three | Four | Five | Six | Seven | Eight
deriving Show
instance Enum PrefixLength where
toEnum 1 = One
toEnum 2 = Two
toEnum 3 = Three
toEnum 4 = Four
toEnum 5 = Five
toEnum 6 = Six
toEnum 7 = Seven
toEnum 8 = Eight
toEnum _ = undefined
fromEnum One = 1
fromEnum Two = 2
fromEnum Three = 3
fromEnum Four = 4
fromEnum Five = 5
fromEnum Six = 6
fromEnum Seven = 7
fromEnum Eight = 8
instance Bounded PrefixLength where
minBound = One
maxBound = Eight
-- Primitive Type Representations - Integer Representation
-- cf. https://tools.ietf.org/html/rfc7541#section-5.1
getInteger :: PrefixLength -> Get (Word8, Word32)
getInteger pLen =
let n = fromEnum pLen in
let m = 2^n - 1 :: Word8 in do
octet <- Get.getWord8
let i = octet .&. m
let prefix = octet .&. Bits.complement m
if i < m then
return (prefix, fromIntegral i)
else
let impl :: Word32 -> Word32 -> Get (Word8, Word32)
impl i m = do
b <- Get.getWord8
let i' = i + fromIntegral (b .&. 127) * 2^m
let m' = m + 7
if Bits.testBit b 7 then
impl i' m'
else
return (prefix, i') in
impl (fromIntegral i) 0
putInteger :: Word8 -> PrefixLength -> Word32 -> Put
putInteger octet pLen i =
let n = fromEnum pLen in
let m = 2^n - 1 in
if i < m then
Put.putWord8 (octet .&. Bits.complement (fromIntegral m) .|. fromIntegral i)
else
let impl i =
if i >= 128 then do
Put.putWord8 (fromIntegral (i `mod` 128) + 128)
impl (i `div` 128)
else
Put.putWord8 (fromIntegral i) in do
Put.putWord8 (octet .|. fromIntegral m)
impl (i - m)
-- Primitive Type Representations - String Literal Representation
-- cf. https://tools.ietf.org/html/rfc7541#section-5.2
getStringLiteral :: Get ByteString
getStringLiteral = do
(prefix, strLen) <- getInteger Seven
buf <- Get.getLazyByteString (fromIntegral strLen)
return (if Bits.testBit prefix 7 then Huffman.decode buf else buf)
putStringLiteral :: Bool -> ByteString -> Put
putStringLiteral huffman buf = do
let (prefix, buf') = if huffman then (128, Huffman.encode buf) else (0, buf)
let bufLen = fromIntegral (ByteString.length buf')
putInteger prefix Seven bufLen
Put.putLazyByteString buf'
getTextLiteral :: Get Text
getTextLiteral = Encoding.decodeUtf8 . ByteString.toStrict <$> getStringLiteral
putTextLiteral :: Bool -> Text -> Put
putTextLiteral b = putStringLiteral b . ByteString.fromStrict . Encoding.encodeUtf8
-- Binary Format
-- cf. https://tools.ietf.org/html/rfc7541#section-6
type HeaderName = Text
type HeaderValue = Text
type HeaderField = (HeaderName, HeaderValue)
type Headers = [HeaderField]
staticTable :: Headers
staticTable = [
("", ""),
(":authority", ""),
(":method", "GET"),
(":method", "POST"),
(":path", "/"),
(":path", "/index.html"),
(":scheme", "http"),
(":scheme", "https"),
(":status", "200"),
(":status", "204"),
(":status", "206"),
(":status", "304"),
(":status", "400"),
(":status", "404"),
(":status", "500"),
("accept-charset", ""),
("accept-encoding", "gzip, deflate "),
("accept-language", ""),
("accept-ranges", ""),
("accept", ""),
("access-control-allow-origin", ""),
("age", ""),
("allow", ""),
("authorization", ""),
("cache-control", ""),
("content-disposition", ""),
("content-encoding", ""),
("content-language", ""),
("content-length", ""),
("content-location", ""),
("content-range", ""),
("content-type", ""),
("cookie", ""),
("date", ""),
("etag", ""),
("expect", ""),
("expires", ""),
("from", ""),
("host", ""),
("if-match", ""),
("if-modified-since", ""),
("if-none-match", ""),
("if-range", ""),
("if-unmodified-since", ""),
("last-modified", ""),
("link", ""),
("location", ""),
("max-forwards", ""),
("proxy-authenticate", ""),
("proxy-authorization", ""),
("range", ""),
("referer", ""),
("refresh", ""),
("retry-after", ""),
("server", ""),
("set-cookie", ""),
("strict-transport-security", ""),
("transfer-encoding", ""),
("user-agent", ""),
("vary", ""),
("via", ""),
("www-authenticate", "")
]
staticTableLength :: Int
staticTableLength = length staticTable
type DynamicTable = Headers
emptyDynTable :: DynamicTable
emptyDynTable = []
getFromTable :: DynamicTable -> Word32 -> HeaderField
getFromTable dynamicTable index =
let idx = fromIntegral index in
if idx < staticTableLength then
staticTable !! idx
else
dynamicTable !! (idx - staticTableLength)
-- Indexed Header Field Representation
-- https://tools.ietf.org/html/rfc7541#section-6.1
getIndexed :: StateT DynamicTable Get HeaderField
getIndexed = do
(_, idx) <- lift $ getInteger Seven
dynTbl <- State.get
return $ getFromTable dynTbl idx
-- Literal Header Field with Incremental Indexing
-- https://tools.ietf.org/html/rfc7541#section-6.2.1
getLiteralWithIndexing :: StateT DynamicTable Get HeaderField
getLiteralWithIndexing = do
(_, index) <- lift $ getInteger Six
let idx = fromIntegral index
name <-
if idx == 0 then
lift getTextLiteral
else do
fst . flip getFromTable idx <$> State.get
value <- lift getTextLiteral
let header = (name, value)
State.modify ((:) header)
return header
-- Literal Header Field without Indexing
-- https://tools.ietf.org/html/rfc7541#section-6.2.2
getLiteralWithoutIndexing :: StateT DynamicTable Get HeaderField
getLiteralWithoutIndexing = do
(_, index) <- lift $ getInteger Four
let idx = fromIntegral index
name <-
if idx == 0 then
lift getTextLiteral
else do
fst . flip getFromTable idx <$> State.get
value <- lift getTextLiteral
return (name, value)
-- Literal Header Field never Index
-- https://tools.ietf.org/html/rfc7541#section-6.2.3
getLiteralNeverIndex :: StateT DynamicTable Get HeaderField
getLiteralNeverIndex = getLiteralWithoutIndexing
getHeaderField :: StateT DynamicTable Get HeaderField
getHeaderField = do
byte <- lift $ Get.lookAhead Get.getWord8
if Bits.testBit byte 7 then
getIndexed
else if Bits.testBit byte 6 then
getLiteralWithIndexing
else if Bits.testBit byte 5 then
-- Dynamic Table Size Update
-- https://tools.ietf.org/html/rfc7541#section-6.3
undefined
else if Bits.testBit byte 4 then
getLiteralNeverIndex
else
getLiteralWithoutIndexing
getHeaderFields :: StateT DynamicTable Get Headers
getHeaderFields =
let impl xs = do
empty <- lift Get.isEmpty
if empty then
return $ reverse xs
else do
hf <- getHeaderField
impl (hf : xs)
in impl []
-- Literal Header Field without Indexing
-- https://tools.ietf.org/html/rfc7541#section-6.2.2
putLiteralWithoutIndexing :: HeaderField -> Put
putLiteralWithoutIndexing (k, v) = do
putInteger 0x00 Four 0
putTextLiteral False k
putTextLiteral False v
putHeaderFields :: Headers -> Put
putHeaderFields = mapM_ putLiteralWithoutIndexing
| authchir/SoSe17-FFP-haskell-http2-server | src/Hpack.hs | gpl-3.0 | 8,932 | 6 | 24 | 2,631 | 2,383 | 1,329 | 1,054 | 234 | 5 |
module QFeldspar.Simplification (smp) where
import QFeldspar.MyPrelude
import QFeldspar.Expression.GADTFirstOrder
import QFeldspar.Expression.Utils.Equality.GADTFirstOrder
import QFeldspar.Expression.Utils.GADTFirstOrder
(sucAll,sbs,cntVar)
import QFeldspar.Variable.Typed
import QFeldspar.Singleton
import QFeldspar.ChangeMonad
import qualified QFeldspar.Type.GADT as TG
smp :: TG.Type a => Exp s g a -> Exp s g a
smp = tilNotChg smpOne
smpOne :: forall s g a. TG.Type a =>
Exp s g a -> Chg (Exp s g a)
smpOne ee = case ee of
Prm x ns -> Prm x <$> TG.mapMC smpOne ns
LeT m n
| cntVar Zro n <= 1 -> chg (sbs m n)
Cnd _ m n
| eql m n -> chg m
Ary el ef -> case el of
Len (e :: Exp s g (Ary te)) -> case ef of
Abs (Ind (e' :: Exp s (Word32 ': g) (Ary te')) (Var Zro)) -> case eqlSin (sin :: TG.Typ te) (sin :: TG.Typ te') of
Rgt Rfl -> do if eql (sucAll e) e'
then chg e
else Ary <$> smpOne el <*> smpOne ef
_ -> Ary <$> smpOne el <*> smpOne ef
_ -> Ary <$> smpOne el <*> smpOne ef
_ -> Ary <$> smpOne el <*> smpOne ef
_ -> $(genOverloadedM 'ee ''Exp ['Prm]
(\ tt -> if
| matchQ tt [t| Exp a a a |] -> [| smpOne |]
| otherwise -> [| pure |]))
| shayan-najd/QFeldspar | QFeldspar/Simplification.hs | gpl-3.0 | 1,391 | 0 | 22 | 472 | 565 | 288 | 277 | -1 | -1 |
{-
hoptics - command line tool for deriving optical constants from transmission
spectra and combining optical constants
freq -> frequency in Hz
omega -> frequency in Hz
nu -> wavenumber in 1/m
lambda -> wavelength in m
alpha -> exctinction coefficient in 1/m
n = n0 + ik
n -> complex index of refraction
n0 -> real part
k -> imaginary part
alpha = 1/d * ln(I0/I)
alpha -> absorption_coeff
d -> sample thickness
I/I0 -> transmission
k = alpha / (4 pi nu)
nu -> wavenumber
n0(nu) = n0_seed + 1/(2 pi^2) * \int \limits_0^\infty (alpha(nu')) / (nu'^2 - nu^2) d nu'
nu -> wavenumber at which the index of refraction is calculated
nu' -> integration variable
Maxwell Garnet formalism in Hoptics
-}
import qualified Data.Text.IO as TextIO
import qualified Data.Text as Text
import Data.Complex
import Numeric.GSL
import Data.Either
import Text.Read
import Data.Attoparsec.Text
import Data.Either.Unwrap (fromRight,fromLeft)
import Text.Printf
import System.Environment
import System.IO
import Control.Exception
-- modules specific for hoptics
import Parsers -- parsers for text data
import Optics -- calculation functions from spectra
x2meter = 1.0e-9 -- conversion factor from wavelength as in spectrum to wavelength in meter
x2invmeter = 1.0e2 -- conversion for x as wavenumber inverse meter
-- data for unit on x-axis
data XData = Wavenumber | Wavelength deriving (Show,Eq)
-- choose a integration method for Kramers-Kronig relation.
-- Naive is implemented in Optics, the others are provided by the GNU Scientific library
data IntegrationMethod = Naive | Linear | Polynomial | CSpline | Akima deriving (Show,Eq)
-- main module, only showing the version
main = do
putStrLn " ********************"
putStrLn " *** HOptics v0.1 ***"
putStrLn " ********************"
putStrLn ""
-- call the IO routine for the main menu
mainMenu
mainMenu = do
-- set defaults for other menus
-- spectrumMenu
thickness <- return 100.0 -- slab thickness in nano metre
n_seed <- return 1.0 -- seed value for real part of the index of refraction (Kramers Kronig)
spectralRange <- return (45000.0,590000.0) -- only applies to integration at Kramers-Kronig, which to which wavenumber
security_distance <- return 500.0 -- around the poles in Kramers-Kronig relation, how large is the gap where values are ommited for integration
spectrum_path_raw <- getArgs -- get the arguments supplied with the call of hoptics executable
spectrum_path <- do
if ((length spectrum_path_raw) < 1) -- and test if any were supplied
then do -- if no arguments were found
return $ "spectrum.dat" -- set dummy file name
else do
return $ head spectrum_path_raw -- if arguments were found, use them as guess for the input spectrum
unitOnX <- return Wavenumber -- set wavenumbers as default unit on x-axis
integration_method <- return Main.Akima -- use robust akima interpolation/integration for Kramers-Kronig
-- mixingMenu
spectrum1_prefix <- do -- guess for prefix of spectrum 1
if ((length spectrum_path_raw) < 1) -- if no arguments are found
then do -- use a dummy file name
return $ "spectrum.dat"
else do -- if arguments are found, use them as a guess
return $ spectrum_path_raw !! 0
spectrum2_prefix <- do -- same as above
if ((length spectrum_path_raw) < 2)
then do
return $ "spectrum.dat"
else do
return $ spectrum_path_raw !! 1
volume_fraction <- return 0.5 -- assume a 1/1 mixture of inclusion and matrix
magnetic_permittivity <- return 1.0 -- assume a non magnetic material
-- show infos of the current menu
putStrLn "\nHoptics"
putStrLn "∟ Main Menu"
putStrLn ""
putStrLn "(1) derive index of refraction from spectrum"
putStrLn "(2) mix two sets of indices of refraction"
-- get user selection for this menus
main_menu_input <- getLine
-- check if user input is valid and react accordingly
case main_menu_input of
"1" -> do
-- call the menu for analysing measured spectra
spectrumMenu spectrum_path thickness spectralRange security_distance n_seed unitOnX integration_method
"2" -> do
-- call the menu for mixing two already analysed spectra
mixingMenu spectrum1_prefix spectrum2_prefix volume_fraction magnetic_permittivity
_ -> do
-- if no correct answer was given, go back to the main menu
putStrLn "enter correct number"
mainMenu
-- the menu and routines for analysing measured spectra
spectrumMenu :: String -> Double -> (Double,Double) -> Double -> Double -> XData -> IntegrationMethod -> IO ()
spectrumMenu spectrum_path thickness spectralRange security_distance n_seed unitOnX integration_method = do
-- show options for this menu
putStrLn ""
putStrLn "Hoptics"
putStrLn "∟ Main Menu"
putStrLn " ∟ analyse spectrum"
putStrLn ""
putStrLn "(-1) return to main menu"
putStrLn $ "(0) start computation"
putStrLn $ "(1) path to spectrum " ++ (show spectrum_path)
putStrLn $ "(2) thickness of the sample [nm] " ++ (show thickness)
putStrLn $ "(3) spectral range for the calculation " ++ (show spectralRange)
putStrLn $ "(4) security distance around poles and boundaries " ++ (show security_distance)
putStrLn $ "(5) seed value for real part of index of refraction " ++ (show n_seed)
putStrLn $ "(6) dimension on x axis " ++ (show unitOnX)
putStrLn $ "(7) integration method for Kramers Kronig " ++ (show integration_method)
-- read in user selection for menu and apply changes or do the computation
spectrumMenu_input <- getLine
case spectrumMenu_input of
"-1" -> do
mainMenu
"0" -> do
-- this will start with the computation
-- try reading in the spectrum from the input file.
-- if it fails, catch the exception, show the user what went wrong
-- and let him try again.
spectrum_raw_content <- Control.Exception.try (TextIO.readFile spectrum_path) :: IO (Either SomeException Text.Text)
case spectrum_raw_content of
Left exception -> do
putStrLn ("can't open file " ++ (show exception))
spectrumMenu spectrum_path thickness spectralRange security_distance n_seed unitOnX integration_method
Right spectrum_raw_content -> do
-- parse succesfully read in spectrum
let spectrum_xUnkwn = fromRight $ parseOnly parse_spectrum spectrum_raw_content
-- convert to wavenumber and order for increasing wavenumbers
trans_spectrum_unordered <- if (unitOnX == Wavelength)
then do
let trans_spectrum_inMeter = map (scaleX x2meter) spectrum_xUnkwn
return $ wavelength2wavenumber trans_spectrum_inMeter
else do
let trans_spectrum_inInvMeter = map (scaleX x2invmeter) spectrum_xUnkwn
return $ trans_spectrum_inInvMeter
let trans_spectrum = order_spectrum trans_spectrum_unordered
-- calculate the absorption coefficient for every wavelength
alpha_spectrum = absorption_coeff (thickness * 1.0e-9) trans_spectrum
-- the imaginary part of the index of refraction
k_spectrum = indOfRef_k alpha_spectrum
-- and the real part of the index of refraction
n0_spectrum
| integration_method == Naive = indOfRef_n' spectralRange n_seed security_distance alpha_spectrum
| integration_method == Main.Linear = indOfRef_n Numeric.GSL.Linear spectralRange n_seed security_distance alpha_spectrum
| integration_method == Main.Polynomial = indOfRef_n Numeric.GSL.Polynomial spectralRange n_seed security_distance alpha_spectrum
| integration_method == Main.CSpline = indOfRef_n Numeric.GSL.CSpline spectralRange n_seed security_distance alpha_spectrum
| integration_method == Main.Akima = indOfRef_n Numeric.GSL.Akima spectralRange n_seed security_distance alpha_spectrum
| otherwise = indOfRef_n Numeric.GSL.Akima spectralRange n_seed security_distance alpha_spectrum
-- start outputting them, calculation happens here
putStrLn "\ncalculating index of refraction... (can take some time)"
-- parse the file name and find its prefix and suffix. use the prefix as prefix for the output
let spectrum_name = fromRight $ parseOnly parse_filename (Text.pack spectrum_path)
spectrum_basename = fst spectrum_name
-- open handles for the output spectra
trans_handle <- openFile (spectrum_basename ++ "_trans.dat") WriteMode
alpha_handle <- openFile (spectrum_basename ++ "_alpha.dat") WriteMode
k_handle <- openFile (spectrum_basename ++ "_k.dat") WriteMode
n0_handle <- openFile (spectrum_basename ++ "_n0.dat") WriteMode
-- monadically map a formatting output routine to the spectra
mapM_ (print_specpoint trans_handle) trans_spectrum
mapM_ (print_specpoint alpha_handle) alpha_spectrum
mapM_ (print_specpoint k_handle) k_spectrum
mapM_ (print_specpoint n0_handle) n0_spectrum
-- close the handles
hClose trans_handle
hClose alpha_handle
hClose k_handle
hClose n0_handle
putStrLn "finished calculation"
mainMenu
-- changes to the settings are processed from here
"1" -> do
putStrLn ""
putStrLn "enter file name of the spectrum"
spectrum_path <- getLine
spectrumMenu spectrum_path thickness spectralRange security_distance n_seed unitOnX integration_method
"2" -> do
putStrLn ""
putStrLn "enter thickness of the sample [nm]"
thickness_raw <- getLine
case ((readMaybe :: String -> Maybe Double) thickness_raw) of
Just x -> do
let thickness = x
spectrumMenu spectrum_path thickness spectralRange security_distance n_seed unitOnX integration_method
Nothing -> do
putStrLn "can not read this number, try again"
spectrumMenu spectrum_path thickness spectralRange security_distance n_seed unitOnX integration_method
"3" -> do
putStrLn ""
putStrLn "spectral range for the computation (low,high)"
spectralRange_raw <- getLine
case ((readMaybe :: String -> Maybe (Double,Double)) spectralRange_raw) of
Just x -> do
let spectralRange = x
spectrumMenu spectrum_path thickness spectralRange security_distance n_seed unitOnX integration_method
Nothing -> do
putStrLn "can not read this numbers, try again"
spectrumMenu spectrum_path thickness spectralRange security_distance n_seed unitOnX integration_method
"4" -> do
putStrLn ""
putStrLn "security distance around poles and boundaries in the dimension on x"
security_distance_raw <- getLine
case ((readMaybe :: String -> Maybe Double) security_distance_raw) of
Just x -> do
let security_distance = x
spectrumMenu spectrum_path thickness spectralRange security_distance n_seed unitOnX integration_method
Nothing -> do
putStrLn "can not read this number, try again"
spectrumMenu spectrum_path thickness spectralRange security_distance n_seed unitOnX integration_method
"5" -> do
putStrLn ""
putStrLn "seed value fro the index of refraction"
n_seed_raw <- getLine
case ((readMaybe :: String -> Maybe Double) n_seed_raw) of
Just x -> do
let n_seed = x
spectrumMenu spectrum_path thickness spectralRange security_distance n_seed unitOnX integration_method
Nothing -> do
putStrLn "can not read this number, try again"
spectrumMenu spectrum_path thickness spectralRange security_distance n_seed unitOnX integration_method
"6" -> do
putStrLn ""
putStrLn "wavenumber [cm⁻¹] (wn) or wavelength [nm] (wl) on x axis?"
unitOnX_raw <- getLine
case unitOnX_raw of
"wl" -> do
let unitOnX = Wavelength
spectrumMenu spectrum_path thickness spectralRange security_distance n_seed unitOnX integration_method
"wn" -> do
let unitOnX = Wavenumber
spectrumMenu spectrum_path thickness spectralRange security_distance n_seed unitOnX integration_method
_ -> do
let unitOnX = Wavenumber
spectrumMenu spectrum_path thickness spectralRange security_distance n_seed unitOnX integration_method
"7" -> do
putStrLn ""
putStrLn "integration method for real part"
putStrLn "Naive | Linear | Polynomial | CSpline | Akima"
putStrLn "Naive recommended for UVVis, Akima otherwise"
integration_method_raw <- getLine
case integration_method_raw of
"Naive" -> do
let integration_method = Main.Naive
let security_distance = 0.0
putStrLn "security distance does not apply to this method"
spectrumMenu spectrum_path thickness spectralRange security_distance n_seed unitOnX integration_method
"Linear" -> do
let integration_method = Main.Linear
spectrumMenu spectrum_path thickness spectralRange security_distance n_seed unitOnX integration_method
"Polynomial" -> do
let integration_method = Main.Polynomial
spectrumMenu spectrum_path thickness spectralRange security_distance n_seed unitOnX integration_method
"CSpline" -> do
let integration_method = Main.CSpline
spectrumMenu spectrum_path thickness spectralRange security_distance n_seed unitOnX integration_method
"Akima" -> do
let integration_method = Main.Akima
spectrumMenu spectrum_path thickness spectralRange security_distance n_seed unitOnX integration_method
_ -> do
putStrLn "not a valid choice, try again"
spectrumMenu spectrum_path thickness spectralRange security_distance n_seed unitOnX integration_method
_ -> do
spectrumMenu spectrum_path thickness spectralRange security_distance n_seed unitOnX integration_method
-- menu and routines for mixing dielectric permittivities by Maxwell-Garnet
mixingMenu :: String -> String -> Double -> Double -> IO ()
mixingMenu spectrum1_prefix spectrum2_prefix volume_fraction magnetic_permittivity = do
-- show options for this menu
putStrLn ""
putStrLn "Hoptics"
putStrLn "∟ Main Menu"
putStrLn " ∟ mix spectra"
putStrLn ""
putStrLn "(-1) return to main menu"
putStrLn $ "(0) start computation"
putStrLn $ "(1) prefix of spectrum of component 1 (inclusion) " ++ (show spectrum1_prefix)
putStrLn $ "(2) prefix of spectrum of component 2 (matrix) " ++ (show spectrum2_prefix)
putStrLn $ "(3) volume fraction of component 1 " ++ (show volume_fraction)
putStrLn $ "(4) magnetic permittivity of the mixture " ++ (show magnetic_permittivity)
-- if a strange input is found, echo a warning
if (volume_fraction > 0.5)
then do
putStrLn "WARNING: volume fraction of the inclusion is higher than 0.5. BE SURE YOU WANT TO DO THIS"
else do
return ()
-- get choice of the user
mixingMenu_input <- getLine
-- check for its validity and act accordingly
case mixingMenu_input of
"-1" -> do
mainMenu
"0" -> do
-- try reading 2 spectra, each consisting of the real part (n0) and the complex part (k), catching possible exceptions
spectrum1_n0_raw <- Control.Exception.try (TextIO.readFile (spectrum1_prefix ++ "_n0.dat")) :: IO (Either SomeException Text.Text)
spectrum2_n0_raw <- Control.Exception.try (TextIO.readFile (spectrum2_prefix ++ "_n0.dat")) :: IO (Either SomeException Text.Text)
spectrum1_k_raw <- Control.Exception.try (TextIO.readFile (spectrum1_prefix ++ "_k.dat")) :: IO (Either SomeException Text.Text)
spectrum2_k_raw <- Control.Exception.try (TextIO.readFile (spectrum2_prefix ++ "_k.dat")) :: IO (Either SomeException Text.Text)
if (isLeft spectrum1_n0_raw || isLeft spectrum2_n0_raw || isLeft spectrum1_k_raw || isLeft spectrum2_k_raw)
-- exception handling
then do
putStrLn ("can't open some files, look at the errors and try again")
if (isLeft spectrum1_n0_raw)
then do
print (fromLeft spectrum1_n0_raw)
else do
return ()
if (isLeft spectrum2_n0_raw)
then do
print (fromLeft spectrum2_n0_raw)
else do
return ()
if (isLeft spectrum1_k_raw)
then do
print (fromLeft spectrum1_k_raw)
else do
return ()
if (isLeft spectrum2_k_raw)
then do
print (fromLeft spectrum2_k_raw)
else do
return ()
mixingMenu spectrum1_prefix spectrum2_prefix volume_fraction magnetic_permittivity
-- doing the actual calculation if everything else went fine
else do
-- parse the spectra
let spectrum1_n0 = fromRight $ parseOnly parse_spectrum (fromRight spectrum1_n0_raw)
spectrum2_n0 = fromRight $ parseOnly parse_spectrum (fromRight spectrum2_n0_raw)
spectrum1_k_= fromRight $ parseOnly parse_spectrum (fromRight spectrum1_k_raw)
spectrum2_k = fromRight $ parseOnly parse_spectrum (fromRight spectrum2_k_raw)
-- combine the two pars (n0 and k) read for each from file to the complex index of refraction
spectrum1_indOfRef = nkSep2complexIndOfRef spectrum1_n0 spectrum1_k_
spectrum2_indOfRef = nkSep2complexIndOfRef spectrum2_n0 spectrum2_k
-- convert index of refraction to dielectric permittivity
spectrum1_permittivity = n2epsilon magnetic_permittivity spectrum1_indOfRef
spectrum2_permittivity = n2epsilon magnetic_permittivity spectrum2_indOfRef
-- apply Maxwell Garnet formalism to the two sets of permittivities
spectrum_permittivity_comb_MaxwellGarnet = maxwellGarnet volume_fraction spectrum1_permittivity spectrum2_permittivity
-- convert back to index of refraction
spectrum_n_comb_MaxwellGarnet = epsilon2n magnetic_permittivity spectrum_permittivity_comb_MaxwellGarnet
-- get the real part from the index of refraction
spectrum_n0_comb_MaxwellGarnet = zip (map fst spectrum_n_comb_MaxwellGarnet) (map realPart $ map snd spectrum_n_comb_MaxwellGarnet)
-- get the complex part from the index of refraction
spectrum_k_comb_MaxwellGarnet = zip (map fst spectrum_n_comb_MaxwellGarnet) (map imagPart $ map snd spectrum_n_comb_MaxwellGarnet)
-- start actual calculation
putStrLn ""
putStrLn "mixing spectra now by Maxwell Garnet formalism"
-- open a output handle
maxwellGarnetHandle_n0 <- openFile (spectrum1_prefix ++ "+" ++ spectrum2_prefix ++ "_n0_MaxwellGarnet.dat") WriteMode
maxwellGarnetHandle_k <- openFile (spectrum1_prefix ++ "+" ++ spectrum2_prefix ++ "_k_MaxwellGarnet.dat") WriteMode
-- apply monadic print
mapM_ (print_specpoint maxwellGarnetHandle_n0) spectrum_n0_comb_MaxwellGarnet
mapM_ (print_specpoint maxwellGarnetHandle_k) spectrum_k_comb_MaxwellGarnet
-- close the handles again
hClose maxwellGarnetHandle_k
hClose maxwellGarnetHandle_n0
putStr "finished spectra mixing"
mainMenu
-- allow changes to the settings
"1" -> do
putStrLn ""
putStrLn "enter prefix of spectrum 1 (\"spectrum\" for \"spectrum_n.dat\" and \"spectrum_k.dat\")"
spectrum1_prefix <- getLine
mixingMenu spectrum1_prefix spectrum2_prefix volume_fraction magnetic_permittivity
"2" -> do
putStrLn ""
putStrLn "enter prefix of spectrum 2 (\"spectrum\" for \"spectrum_n.dat\" and \"spectrum_k.dat\")"
spectrum2_prefix <- getLine
mixingMenu spectrum1_prefix spectrum2_prefix volume_fraction magnetic_permittivity
"3" -> do
putStrLn ""
putStrLn "volume fraction of component 1"
volume_fraction_raw <- getLine
case ((readMaybe :: String -> Maybe Double) volume_fraction_raw) of
Just x -> do
let volume_fraction = x
mixingMenu spectrum1_prefix spectrum2_prefix volume_fraction magnetic_permittivity
Nothing -> do
putStrLn "can not read this number, try again"
mixingMenu spectrum1_prefix spectrum2_prefix volume_fraction magnetic_permittivity
"4" -> do
putStrLn ""
putStrLn "relative magnetic permittivity"
magnetic_permittivity_raw <- getLine
case ((readMaybe :: String -> Maybe Double) magnetic_permittivity_raw) of
Just x -> do
let magnetic_permittivity = x
mixingMenu spectrum1_prefix spectrum2_prefix volume_fraction magnetic_permittivity
Nothing -> do
putStrLn "can not read this number, try again"
mixingMenu spectrum1_prefix spectrum2_prefix volume_fraction magnetic_permittivity
_ -> do
mixingMenu spectrum1_prefix spectrum2_prefix volume_fraction magnetic_permittivity
-- scaling only the x-value of a point
scaleX :: (Num a) => a -> (a,a) -> (a,a)
scaleX a (x,y) = (a*x,y)
-- scaling only the y-value of a point
scaleY :: (Num a) => a -> (a,a) -> (a,a)
scaleY a (x,y) = (x,a*y)
-- make sure, that the spectrum is in increasing order, if it was in decreasing order before
order_spectrum :: [(Double,Double)] -> [(Double,Double)]
order_spectrum spectrum
| (fst $ head spectrum) < (fst $ last spectrum) = spectrum
| (fst $ head spectrum) > (fst $ last spectrum) = reverse spectrum
-- formatted printing of spectrum point
print_specpoint :: Handle -> (Double,Double) -> IO()
print_specpoint handle (x,y) = do
hPrintf handle "%+12.6e " x
hPrintf handle "%+12.6e \n" y
| sheepforce/Hoptics | src/Hoptics.hs | gpl-3.0 | 25,768 | 7 | 26 | 9,041 | 3,907 | 1,858 | 2,049 | 340 | 23 |
runCont ka (\a -> let kb = kab a
in runCont kb hb) | hmemcpy/milewski-ctfp-pdf | src/content/3.5/code/haskell/snippet28.hs | gpl-3.0 | 69 | 0 | 12 | 31 | 36 | 16 | 20 | -1 | -1 |
module HsPredictor.Render.FLTK where
import HsPredictor.SQL.Queries
import HsPredictor.Types.Types
import HsPredictor.Render.Text (convertList)
getTableData :: DbPath -> IO [[String]]
getTableData dbname = do
t <- getStatsAll dbname
let t' = convertList t
return t'
| jacekm-git/HsPredictor | library/HsPredictor/Render/FLTK.hs | gpl-3.0 | 274 | 0 | 10 | 39 | 84 | 45 | 39 | 9 | 1 |
module CatmullClark where
import qualified Data.Vector as V (Vector, fromList, toList, map, (!))
import Data.List (nub)
import Data.Maybe (catMaybes)
import Vec
import VecGL (toVec3, toVertex3, toNormal3)
import Model
import ObjFile
-- Internal data types for the Catmull Clark algorithm
type Point = Vec3
data Face = Face [Point] deriving (Eq, Show)
data Edge = Edge Point Point deriving (Eq, Show)
-- Convert between model and internal data
modelToFaces :: (Model, V.Vector [Int]) -> [Face]
modelToFaces (Model vs _, fs) = V.toList $ V.map (\is -> Face (map toVec3 $ fromIndecies vs is)) fs
where fromIndecies xs = map (\i -> xs V.! (i - 1))
facesToModel :: [Face] -> (Model, V.Vector [Int])
facesToModel faces =
let verts = nub . concat $ map (\(Face pl) -> pl) faces
vertDict = zip verts [1..]
fs = map (\(Face pl) -> FaceCommand $ catMaybes $ map (\v -> lookup v vertDict) pl) faces
vs = map toVertex3 verts
ns = map toNormal3 $ calcFaceNormals vs fs
in (Model (V.fromList vs) (V.fromList ns), (V.fromList $ map faceIndecies fs))
-- Calculate the centroid of a Face or Edge
centroid = meanV
midEdge (Edge p1 p2) = meanV [p1,p2]
-- Helper functions relating to finding points
pointInFace p (Face pl) = elem p pl
pointInEdge p (Edge p1 p2) = p == p1 || p == p2
edgeInFace (Edge p1 p2) face = pointInFace p1 face && pointInFace p2 face
-- Find adjacent faces and edges
edgeAdjFaces e = filter (edgeInFace e)
pointAdjEdges p = filter (pointInEdge p)
pointAdjFaces p = filter (pointInFace p)
-- Calculate the face point and edge point
facePoint (Face pl) = centroid pl
edgePoint faces e =
let fps = map facePoint (edgeAdjFaces e faces) in
centroid [midEdge e, centroid fps]
-- Modify the original points
modPoint faces edges p =
let pEdges = pointAdjEdges p edges
pFaces = pointAdjFaces p faces
n = fromIntegral $ length pFaces
avgFace = centroid $ map facePoint pFaces
avgEdge = centroid $ map midEdge pEdges
in (avgFace +. (avgEdge *. 2.0) +. (p *. (n - 3.0))) /. n
-- Find all of the edges in a face
edges (Face pl) = nub [ Edge p1 p2 | p1 <- pl, p2 <- pl, p1 /= p2 ]
-- Do one iteration of the catmullClark algorithm
catmullClark :: [Face] -> [Face]
catmullClark faces =
let elist = concat $ map edges faces
modFace face@(Face pl) =
let fp = facePoint face
eps = (map (edgePoint faces) . edges) face
pl' = map (modPoint faces elist) pl
in nub [ Face [e, p, e', fp] |
e <- eps, p <- pl', e' <- eps ]
in concat $ map modFace faces
| WraithM/HCatmullClark | src/CatmullClark.hs | gpl-3.0 | 2,640 | 0 | 18 | 655 | 1,018 | 533 | 485 | 52 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.BigQuery.Jobs.Cancel
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Requests that a job be cancelled. This call will return immediately, and
-- the client will need to poll for the job status to see if the cancel
-- completed successfully. Cancelled jobs may still incur costs.
--
-- /See:/ <https://cloud.google.com/bigquery/ BigQuery API Reference> for @bigquery.jobs.cancel@.
module Network.Google.Resource.BigQuery.Jobs.Cancel
(
-- * REST Resource
JobsCancelResource
-- * Creating a Request
, jobsCancel
, JobsCancel
-- * Request Lenses
, jcJobId
, jcLocation
, jcProjectId
) where
import Network.Google.BigQuery.Types
import Network.Google.Prelude
-- | A resource alias for @bigquery.jobs.cancel@ method which the
-- 'JobsCancel' request conforms to.
type JobsCancelResource =
"bigquery" :>
"v2" :>
"projects" :>
Capture "projectId" Text :>
"jobs" :>
Capture "jobId" Text :>
"cancel" :>
QueryParam "location" Text :>
QueryParam "alt" AltJSON :>
Post '[JSON] JobCancelResponse
-- | Requests that a job be cancelled. This call will return immediately, and
-- the client will need to poll for the job status to see if the cancel
-- completed successfully. Cancelled jobs may still incur costs.
--
-- /See:/ 'jobsCancel' smart constructor.
data JobsCancel =
JobsCancel'
{ _jcJobId :: !Text
, _jcLocation :: !(Maybe Text)
, _jcProjectId :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'JobsCancel' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'jcJobId'
--
-- * 'jcLocation'
--
-- * 'jcProjectId'
jobsCancel
:: Text -- ^ 'jcJobId'
-> Text -- ^ 'jcProjectId'
-> JobsCancel
jobsCancel pJcJobId_ pJcProjectId_ =
JobsCancel'
{_jcJobId = pJcJobId_, _jcLocation = Nothing, _jcProjectId = pJcProjectId_}
-- | [Required] Job ID of the job to cancel
jcJobId :: Lens' JobsCancel Text
jcJobId = lens _jcJobId (\ s a -> s{_jcJobId = a})
-- | The geographic location of the job. Required except for US and EU. See
-- details at
-- https:\/\/cloud.google.com\/bigquery\/docs\/locations#specifying_your_location.
jcLocation :: Lens' JobsCancel (Maybe Text)
jcLocation
= lens _jcLocation (\ s a -> s{_jcLocation = a})
-- | [Required] Project ID of the job to cancel
jcProjectId :: Lens' JobsCancel Text
jcProjectId
= lens _jcProjectId (\ s a -> s{_jcProjectId = a})
instance GoogleRequest JobsCancel where
type Rs JobsCancel = JobCancelResponse
type Scopes JobsCancel =
'["https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform"]
requestClient JobsCancel'{..}
= go _jcProjectId _jcJobId _jcLocation (Just AltJSON)
bigQueryService
where go
= buildClient (Proxy :: Proxy JobsCancelResource)
mempty
| brendanhay/gogol | gogol-bigquery/gen/Network/Google/Resource/BigQuery/Jobs/Cancel.hs | mpl-2.0 | 3,802 | 0 | 16 | 895 | 474 | 284 | 190 | 71 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.ResetInstanceAttribute
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Resets an attribute of an instance to its default value. To reset the 'kernel'
-- or 'ramdisk', the instance must be in a stopped state. To reset the 'SourceDestCheck', the instance can be either running or stopped.
--
-- The 'SourceDestCheck' attribute controls whether source/destination checking
-- is enabled. The default value is 'true', which means checking is enabled. This
-- value must be 'false' for a NAT instance to perform NAT. For more information,
-- see <http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_NAT_Instance.html NAT Instances> in the /Amazon Virtual Private Cloud User Guide/.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-ResetInstanceAttribute.html>
module Network.AWS.EC2.ResetInstanceAttribute
(
-- * Request
ResetInstanceAttribute
-- ** Request constructor
, resetInstanceAttribute
-- ** Request lenses
, riaAttribute
, riaDryRun
, riaInstanceId
-- * Response
, ResetInstanceAttributeResponse
-- ** Response constructor
, resetInstanceAttributeResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data ResetInstanceAttribute = ResetInstanceAttribute
{ _riaAttribute :: InstanceAttributeName
, _riaDryRun :: Maybe Bool
, _riaInstanceId :: Text
} deriving (Eq, Read, Show)
-- | 'ResetInstanceAttribute' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'riaAttribute' @::@ 'InstanceAttributeName'
--
-- * 'riaDryRun' @::@ 'Maybe' 'Bool'
--
-- * 'riaInstanceId' @::@ 'Text'
--
resetInstanceAttribute :: Text -- ^ 'riaInstanceId'
-> InstanceAttributeName -- ^ 'riaAttribute'
-> ResetInstanceAttribute
resetInstanceAttribute p1 p2 = ResetInstanceAttribute
{ _riaInstanceId = p1
, _riaAttribute = p2
, _riaDryRun = Nothing
}
-- | The attribute to reset.
riaAttribute :: Lens' ResetInstanceAttribute InstanceAttributeName
riaAttribute = lens _riaAttribute (\s a -> s { _riaAttribute = a })
-- | Checks whether you have the required permissions for the action, without
-- actually making the request, and provides an error response. If you have the
-- required permissions, the error response is 'DryRunOperation'. Otherwise, it is 'UnauthorizedOperation'.
riaDryRun :: Lens' ResetInstanceAttribute (Maybe Bool)
riaDryRun = lens _riaDryRun (\s a -> s { _riaDryRun = a })
-- | The ID of the instance.
riaInstanceId :: Lens' ResetInstanceAttribute Text
riaInstanceId = lens _riaInstanceId (\s a -> s { _riaInstanceId = a })
data ResetInstanceAttributeResponse = ResetInstanceAttributeResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'ResetInstanceAttributeResponse' constructor.
resetInstanceAttributeResponse :: ResetInstanceAttributeResponse
resetInstanceAttributeResponse = ResetInstanceAttributeResponse
instance ToPath ResetInstanceAttribute where
toPath = const "/"
instance ToQuery ResetInstanceAttribute where
toQuery ResetInstanceAttribute{..} = mconcat
[ "Attribute" =? _riaAttribute
, "DryRun" =? _riaDryRun
, "InstanceId" =? _riaInstanceId
]
instance ToHeaders ResetInstanceAttribute
instance AWSRequest ResetInstanceAttribute where
type Sv ResetInstanceAttribute = EC2
type Rs ResetInstanceAttribute = ResetInstanceAttributeResponse
request = post "ResetInstanceAttribute"
response = nullResponse ResetInstanceAttributeResponse
| romanb/amazonka | amazonka-ec2/gen/Network/AWS/EC2/ResetInstanceAttribute.hs | mpl-2.0 | 4,592 | 0 | 9 | 917 | 469 | 288 | 181 | 58 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.CloudTrace.Projects.Traces.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets a single trace by its ID.
--
-- /See:/ <https://cloud.google.com/tools/cloud-trace Google Cloud Trace API Reference> for @cloudtrace.projects.traces.get@.
module Network.Google.Resource.CloudTrace.Projects.Traces.Get
(
-- * REST Resource
ProjectsTracesGetResource
-- * Creating a Request
, projectsTracesGet
, ProjectsTracesGet
-- * Request Lenses
, ptgTraceId
, ptgXgafv
, ptgUploadProtocol
, ptgPp
, ptgAccessToken
, ptgUploadType
, ptgBearerToken
, ptgProjectId
, ptgCallback
) where
import Network.Google.CloudTrace.Types
import Network.Google.Prelude
-- | A resource alias for @cloudtrace.projects.traces.get@ method which the
-- 'ProjectsTracesGet' request conforms to.
type ProjectsTracesGetResource =
"v1" :>
"projects" :>
Capture "projectId" Text :>
"traces" :>
Capture "traceId" Text :>
QueryParam "$.xgafv" Text :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Trace
-- | Gets a single trace by its ID.
--
-- /See:/ 'projectsTracesGet' smart constructor.
data ProjectsTracesGet = ProjectsTracesGet'
{ _ptgTraceId :: !Text
, _ptgXgafv :: !(Maybe Text)
, _ptgUploadProtocol :: !(Maybe Text)
, _ptgPp :: !Bool
, _ptgAccessToken :: !(Maybe Text)
, _ptgUploadType :: !(Maybe Text)
, _ptgBearerToken :: !(Maybe Text)
, _ptgProjectId :: !Text
, _ptgCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ProjectsTracesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ptgTraceId'
--
-- * 'ptgXgafv'
--
-- * 'ptgUploadProtocol'
--
-- * 'ptgPp'
--
-- * 'ptgAccessToken'
--
-- * 'ptgUploadType'
--
-- * 'ptgBearerToken'
--
-- * 'ptgProjectId'
--
-- * 'ptgCallback'
projectsTracesGet
:: Text -- ^ 'ptgTraceId'
-> Text -- ^ 'ptgProjectId'
-> ProjectsTracesGet
projectsTracesGet pPtgTraceId_ pPtgProjectId_ =
ProjectsTracesGet'
{ _ptgTraceId = pPtgTraceId_
, _ptgXgafv = Nothing
, _ptgUploadProtocol = Nothing
, _ptgPp = True
, _ptgAccessToken = Nothing
, _ptgUploadType = Nothing
, _ptgBearerToken = Nothing
, _ptgProjectId = pPtgProjectId_
, _ptgCallback = Nothing
}
-- | ID of the trace to return.
ptgTraceId :: Lens' ProjectsTracesGet Text
ptgTraceId
= lens _ptgTraceId (\ s a -> s{_ptgTraceId = a})
-- | V1 error format.
ptgXgafv :: Lens' ProjectsTracesGet (Maybe Text)
ptgXgafv = lens _ptgXgafv (\ s a -> s{_ptgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
ptgUploadProtocol :: Lens' ProjectsTracesGet (Maybe Text)
ptgUploadProtocol
= lens _ptgUploadProtocol
(\ s a -> s{_ptgUploadProtocol = a})
-- | Pretty-print response.
ptgPp :: Lens' ProjectsTracesGet Bool
ptgPp = lens _ptgPp (\ s a -> s{_ptgPp = a})
-- | OAuth access token.
ptgAccessToken :: Lens' ProjectsTracesGet (Maybe Text)
ptgAccessToken
= lens _ptgAccessToken
(\ s a -> s{_ptgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
ptgUploadType :: Lens' ProjectsTracesGet (Maybe Text)
ptgUploadType
= lens _ptgUploadType
(\ s a -> s{_ptgUploadType = a})
-- | OAuth bearer token.
ptgBearerToken :: Lens' ProjectsTracesGet (Maybe Text)
ptgBearerToken
= lens _ptgBearerToken
(\ s a -> s{_ptgBearerToken = a})
-- | ID of the Cloud project where the trace data is stored.
ptgProjectId :: Lens' ProjectsTracesGet Text
ptgProjectId
= lens _ptgProjectId (\ s a -> s{_ptgProjectId = a})
-- | JSONP
ptgCallback :: Lens' ProjectsTracesGet (Maybe Text)
ptgCallback
= lens _ptgCallback (\ s a -> s{_ptgCallback = a})
instance GoogleRequest ProjectsTracesGet where
type Rs ProjectsTracesGet = Trace
type Scopes ProjectsTracesGet =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/trace.readonly"]
requestClient ProjectsTracesGet'{..}
= go _ptgProjectId _ptgTraceId _ptgXgafv
_ptgUploadProtocol
(Just _ptgPp)
_ptgAccessToken
_ptgUploadType
_ptgBearerToken
_ptgCallback
(Just AltJSON)
cloudTraceService
where go
= buildClient
(Proxy :: Proxy ProjectsTracesGetResource)
mempty
| rueshyna/gogol | gogol-cloudtrace/gen/Network/Google/Resource/CloudTrace/Projects/Traces/Get.hs | mpl-2.0 | 5,680 | 0 | 20 | 1,466 | 935 | 542 | 393 | 134 | 1 |
module Echo where
import Control.Applicative
import Control.Concurrent
import Control.Monad
import Network.Socket
import System.Environment
import System.IO
echo :: IO ()
echo = withSocketsDo $ do
-- port <- toEnum . read . head <$> getArgs
let port = 3000
newSocket <- socket AF_INET Stream defaultProtocol
setSocketOption newSocket ReuseAddr 1
bindSocket newSocket $ SockAddrInet port iNADDR_ANY
listen newSocket 2
runServer id newSocket
runServer :: (String -> String) -> Socket -> IO()
runServer f s = forever $ do
(usableSocket,_) <- accept s
forkIO $ interactWithSocket f usableSocket
interactWithSocket :: (String -> String) -> Socket -> IO()
interactWithSocket f s = do
handle <- socketToHandle s ReadWriteMode
forever $ f <$> hGetLine handle >>= hPutStrLn handle
| haroldcarr/learn-haskell-coq-ml-etc | haskell/topic/streaming/pipes/src/Echo.hs | unlicense | 799 | 0 | 10 | 140 | 253 | 123 | 130 | 23 | 1 |
import Math.NumberTheory.Powers.Fourth (isFourthPower')
import Data.List (nub, subsequences)
import Helpers.Primes (primePowers)
raiseUp' :: [Integer] -> [[(Integer, Int)]]
raiseUp' = foldr (\a -> concatMap (\ts -> [(a, 1) : ts, (a, 2) : ts, (a, 3) : ts])) [[]]
a328045_list :: [Integer]
a328045_list = [0, 1, 4, 6, 4, 10, 9, 14, 15, 9, 18, 22, 20, 26, 21, 24, 16, 34, 27, 38, 25, 28, 33, 46, 30, 25, 39, 35, 36, 58, 40, 62, 42, 44, 51, 45, 36, 74, 57, 52, 49, 82, 50, 86, 55, 54, 69, 94, 54, 49, 63, 68, 65, 106]
baseCandidates :: Int -> [Integer]
baseCandidates n = filter (not . isInvalid) [n'+1..g_n-1] where
n' = fromIntegral n
g_n = a328045_list !! n
isInvalid k = any (\p -> k - p < n' && k + p > g_n) unpairedPrimes where
unpairedPrimes = map fst $ filter (\(_,p) -> p `mod` 2 == 1) $ primePowers $ fromIntegral k
count n = map (\x -> prefix ++ x) $ subsequences $ baseCandidates n where
n' = fromIntegral n
prefix = nub [n', g_n]
g_n = a328045_list !! n
a999999' n = filter (isFourthPower' . powerProduct) r where
r = concatMap raiseUp' $ count n
a999999'_list = map a999999' [1..]
powerProduct :: [(Integer, Int)] -> Integer
powerProduct = product . map (uncurry (^))
-- (zero indexed)
-- 0 3 = 2^0 * 3
-- 1 3 = 2^0 * 3
-- 2 2
-- 3 2
-- 4 1
-- 5 12 = 2^2 * 3
-- 6 2
-- 7 12 = 2^2 * 3
-- 8 12 = 2^2 * 3
-- 9 1
-- 10 12 = 2^2 * 3
-- 11 192 = 2^6 * 3
-- 12 12 = 2^2 * 3
-- 13 768 = 2^8 * 3
-- 14 12 = 2^2 * 3
-- 15 12 = 2^2 * 3
-- 16 3 = 2^0 * 3
-- 17 12288 = 2^12 * 3
-- 18 12 = 2^2 * 3
-- 19 49152 = 2^14 * 3
-- 20 2
-- 21 6 = 2^1 * 3
-- 22 48 = 2^4 * 3
-- 23 ******
-- 24 2
-- 25 1
-- 26 48 = 2^4 * 3
-- 27 3 = 2^0 * 3
-- 28 2
-- 29 ******
-- 30 2
-- 31 ******
-- 32 2
-- 33 6 = 2^1 * 3
-- 34 768 = 2^8 * 3
-- 35 2
-- 36 1
| peterokagey/haskellOEIS | src/Sandbox/Z4LinearAlgebra/CountGraham4.hs | apache-2.0 | 1,848 | 0 | 16 | 532 | 658 | 399 | 259 | 22 | 1 |
module Compiler.SymbolTable where
import Data.Maybe
import qualified Data.Map as M
import Compiler.Syntax
import Compiler.TypeChecking.Nameable
import Compiler.Positioned
import Compiler.CompileError
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Pos
import Debug.Trace
type SymbolTable = M.Map String (Positioned Symbol)
initSymbolTable = toSymbolTable [Positioned (initialPos "prelude") $ FuncSymbol Unknown $ Function Int "input" [] (Positioned (initialPos "prelude") $ CompoundStatement [] []),
Positioned (initialPos "prelude") $ FuncSymbol Unknown $ Function Void "output" [Positioned (initialPos "prelude") $ Variable Int "value"] (AnyPosition $ CompoundStatement [] [])]
allFunctions table = M.elems $ M.filter isFunc table
where isFunc (FuncSymbol _ _) = True
isFunc _ = False
insertSymbol table s = M.insert (nameOf s) s table
insertSymbols table syms = foldl insertSymbol table syms
toSymbolTable :: [Positioned Symbol] -> SymbolTable
toSymbolTable syms = insertSymbols M.empty syms
mergeSymbolTables a b = M.union a b
symbolizePVar (Positioned pos arg) = Positioned pos (symbolizeVar arg)
symbolizePVar (AnyPosition arg) = AnyPosition (symbolizeVar arg)
symbolizePFunc (Positioned pos arg) = Positioned pos (symbolizeFunc arg)
symbolizePFunc (AnyPosition arg) = AnyPosition (symbolizeFunc arg)
symbolizeVar v = (VarSymbol Unknown v)
symbolizeFunc f = (FuncSymbol Unknown f)
globalTable (Program p) = toSymbolTable p
-- localTable program function =
functionSymbols :: Function -> [Positioned Symbol]
functionSymbols (Function _ _ args
(Positioned _ (CompoundStatement locals _))) =
map symbolizePVar (args ++ locals)
symbolUndefined name table = isNothing $ M.lookup name table
symbolDefined name table = isJust $ M.lookup name table
symbolLookup name table = fromJust $ M.lookup name table
symbolIsFunction name table | symbolUndefined name table = trace ("symbolIsFunction: " ++ show name ++ " is not defined") False
| otherwise =
case symbolLookup name table of
Positioned _ (FuncSymbol _ _) -> True
AnyPosition (FuncSymbol _ _) -> True
other -> trace ("symbolIsFunction: " ++ show name ++ " is not a function, it's " ++ show other) False
symbolIsVariable :: String -> SymbolTable -> Bool
symbolIsVariable name table | symbolUndefined name table = False
| otherwise =
case fromJust $ M.lookup name table of
Positioned _ (VarSymbol _ _) -> True
_ -> False
symbolIsArray :: String -> SymbolTable -> Bool
symbolIsArray name table | symbolUndefined name table = False
| otherwise =
case symbolLookup name table of
Positioned _ (VarSymbol _ (Variable (Array _ _) _ )) -> True
_ -> False
symbolValidateArray pos name symbols | symbolUndefined name symbols = Just [TypeError pos (show name ++ " is not defined")]
| symbolIsArray name symbols = Nothing -- We're good
| otherwise = Just [TypeError pos (show name ++ " is not an array")]
symbolValidateVar pos name symbols | symbolUndefined name symbols = Just [TypeError pos (show name ++ " is not defined")]
| symbolIsVariable name symbols = Nothing -- We're good
| otherwise = Just [TypeError pos (show name ++ " is not a variable")]
symbolValidateFunc pos name symbols | symbolUndefined name symbols = Just [TypeError pos ("undefined function " ++ show name)]
| symbolIsFunction name symbols = Nothing
| otherwise = Just [TypeError pos ("attempt to call non-function" ++ show name)]
functionReturnsVoid name symbols | symbolUndefined name symbols = error (name ++ " is not defined")
| not (symbolIsFunction name symbols) = error (name ++ " is not a function")
| otherwise =
case symbolLookup name symbols of
Positioned _ (FuncSymbol _ (Function t _ _ _)) -> t == Void
AnyPosition (FuncSymbol _ (Function t _ _ _)) -> t == Void | michaelmelanson/cminus-compiler | Compiler/SymbolTable.hs | bsd-2-clause | 4,887 | 0 | 15 | 1,726 | 1,340 | 646 | 694 | 68 | 3 |
module Remote.Main where
import qualified Control.Exception as Exception
import Control.Monad (liftM)
import Database.Persist.Sqlite (runMigration)
import Remote.Database (migrateAll, runDB)
import Remote.Server (runServer)
import System.Environment (getEnv)
main :: IO ()
main = do
runDB $ runMigration migrateAll
port <- liftM read $ getEnv "PORT"
putStrLn ("Starting on port " ++ show port ++ "...")
Exception.catch
(runServer port)
(\ Exception.UserInterrupt -> putStrLn "\nStopping...")
| tippenein/hasken | lib/Remote/Main.hs | bsd-3-clause | 600 | 0 | 11 | 168 | 159 | 85 | 74 | 15 | 1 |
module UnaryConstructors where
newtype Goats = Goats Int deriving (Eq, Show)
newtype Cows = Cows Int deriving (Eq, Show)
class TooMany a where
tooMany :: a -> Bool
instance TooMany Int where
tooMany n = n > 42
instance TooMany Goats where
tooMany (Goats n) = tooMany n
| dsaenztagarro/haskellbook | src/chapter11/UnaryConstructors2.hs | bsd-3-clause | 280 | 0 | 8 | 59 | 105 | 56 | 49 | 9 | 0 |
-- | Terrain tile definitions.
module Content.TileKind ( cdefs ) where
import Control.Arrow (first)
import Data.Maybe
import qualified Data.Text as T
import Game.LambdaHack.Common.Color
import Game.LambdaHack.Common.ContentDef
import Game.LambdaHack.Common.Misc
import Game.LambdaHack.Common.Msg
import qualified Game.LambdaHack.Content.ItemKind as IK
import Game.LambdaHack.Content.TileKind
cdefs :: ContentDef TileKind
cdefs = ContentDef
{ getSymbol = tsymbol
, getName = tname
, getFreq = tfreq
, validateSingle = validateSingleTileKind
, validateAll = validateAllTileKind
, content =
[wall, hardRock, pillar, pillarCache, lampPost, burningBush, bush, tree, wallV, wallSuspectV, doorClosedV, doorOpenV, wallH, wallSuspectH, doorClosedH, doorOpenH, stairsUpLit, stairsLit, stairsDownLit, escapeUpLit, escapeDownLit, unknown, floorCorridorLit, floorArenaLit, floorArenaShade, floorActorLit, floorItemLit, floorActorItemLit, floorRedLit, floorBlueLit, floorGreenLit, floorBrownLit]
++ map makeDark [wallV, wallSuspectV, doorClosedV, doorOpenV, wallH, wallSuspectH, doorClosedH, doorOpenH, stairsLit, escapeUpLit, escapeDownLit, floorCorridorLit]
++ map makeDarkColor [stairsUpLit, stairsDownLit, floorArenaLit, floorActorLit, floorItemLit, floorActorItemLit]
}
wall, hardRock, pillar, pillarCache, lampPost, burningBush, bush, tree, wallV, wallSuspectV, doorClosedV, doorOpenV, wallH, wallSuspectH, doorClosedH, doorOpenH, stairsUpLit, stairsLit, stairsDownLit, escapeUpLit, escapeDownLit, unknown, floorCorridorLit, floorArenaLit, floorArenaShade, floorActorLit, floorItemLit, floorActorItemLit, floorRedLit, floorBlueLit, floorGreenLit, floorBrownLit :: TileKind
wall = TileKind
{ tsymbol = ' '
, tname = "bedrock"
, tfreq = [("fillerWall", 1), ("legendLit", 100), ("legendDark", 100)]
, tcolor = defBG
, tcolor2 = defBG
, tfeature = [Dark]
-- Bedrock being dark is bad for AI (forces it to backtrack to explore
-- bedrock at corridor turns) and induces human micromanagement
-- if there can be corridors joined diagonally (humans have to check
-- with the cursor if the dark space is bedrock or unexplored).
-- Lit bedrock would be even worse for humans, because it's harder
-- to guess which tiles are unknown and which can be explored bedrock.
-- The setup of Allure is ideal, with lit bedrock that is easily
-- distinguished from an unknown tile. However, LH follows the NetHack,
-- not the Angband, visual tradition, so we can't improve the situation,
-- unless we turn to subtle shades of black or non-ASCII glyphs,
-- but that is yet different aesthetics and it's inconsistent
-- with console frontends.
}
hardRock = TileKind
{ tsymbol = ' '
, tname = "impenetrable bedrock"
, tfreq = [("basic outer fence", 1)]
, tcolor = BrWhite
, tcolor2 = BrWhite
, tfeature = [Dark, Impenetrable]
}
pillar = TileKind
{ tsymbol = 'O'
, tname = "rock"
, tfreq = [ ("cachable", 70)
, ("legendLit", 100), ("legendDark", 100)
, ("noiseSet", 100), ("skirmishSet", 5)
, ("battleSet", 250) ]
, tcolor = BrWhite
, tcolor2 = defFG
, tfeature = []
}
pillarCache = TileKind
{ tsymbol = '&'
, tname = "cache"
, tfreq = [ ("cachable", 30)
, ("legendLit", 100), ("legendDark", 100) ]
, tcolor = BrWhite
, tcolor2 = defFG
, tfeature = [ Cause $ IK.CreateItem CGround "useful" IK.TimerNone
, ChangeTo "cachable"]
}
lampPost = TileKind
{ tsymbol = 'O'
, tname = "lamp post"
, tfreq = [("lampPostOver_O", 90)]
, tcolor = BrYellow
, tcolor2 = Brown
, tfeature = []
}
burningBush = TileKind
{ tsymbol = 'O'
, tname = "burning bush"
, tfreq = [("lampPostOver_O", 10), ("ambushSet", 3), ("battleSet", 2)]
, tcolor = BrRed
, tcolor2 = Red
, tfeature = []
}
bush = TileKind
{ tsymbol = 'O'
, tname = "bush"
, tfreq = [("ambushSet", 100) ]
, tcolor = Green
, tcolor2 = BrBlack
, tfeature = [Dark]
}
tree = TileKind
{ tsymbol = 'O'
, tname = "tree"
, tfreq = [("skirmishSet", 14), ("battleSet", 20), ("treeShadeOver_O", 1)]
, tcolor = BrGreen
, tcolor2 = Green
, tfeature = []
}
wallV = TileKind
{ tsymbol = '|'
, tname = "granite wall"
, tfreq = [("legendLit", 100)]
, tcolor = BrWhite
, tcolor2 = defFG
, tfeature = [HideAs "suspect vertical wall Lit"]
}
wallSuspectV = TileKind
{ tsymbol = '|'
, tname = "moldy wall"
, tfreq = [("suspect vertical wall Lit", 1)]
, tcolor = BrWhite
, tcolor2 = defFG
, tfeature = [Suspect, RevealAs "vertical closed door Lit"]
}
doorClosedV = TileKind
{ tsymbol = '+'
, tname = "closed door"
, tfreq = [("vertical closed door Lit", 1)]
, tcolor = Brown
, tcolor2 = BrBlack
, tfeature = [ OpenTo "vertical open door Lit"
, HideAs "suspect vertical wall Lit"
]
}
doorOpenV = TileKind
{ tsymbol = '-'
, tname = "open door"
, tfreq = [("vertical open door Lit", 1)]
, tcolor = Brown
, tcolor2 = BrBlack
, tfeature = [ Walkable, Clear, NoItem, NoActor
, CloseTo "vertical closed door Lit"
]
}
wallH = TileKind
{ tsymbol = '-'
, tname = "granite wall"
, tfreq = [("legendLit", 100)]
, tcolor = BrWhite
, tcolor2 = defFG
, tfeature = [HideAs "suspect horizontal wall Lit"]
}
wallSuspectH = TileKind
{ tsymbol = '-'
, tname = "scratched wall"
, tfreq = [("suspect horizontal wall Lit", 1)]
, tcolor = BrWhite
, tcolor2 = defFG
, tfeature = [Suspect, RevealAs "horizontal closed door Lit"]
}
doorClosedH = TileKind
{ tsymbol = '+'
, tname = "closed door"
, tfreq = [("horizontal closed door Lit", 1)]
, tcolor = Brown
, tcolor2 = BrBlack
, tfeature = [ OpenTo "horizontal open door Lit"
, HideAs "suspect horizontal wall Lit"
]
}
doorOpenH = TileKind
{ tsymbol = '|'
, tname = "open door"
, tfreq = [("horizontal open door Lit", 1)]
, tcolor = Brown
, tcolor2 = BrBlack
, tfeature = [ Walkable, Clear, NoItem, NoActor
, CloseTo "horizontal closed door Lit"
]
}
stairsUpLit = TileKind
{ tsymbol = '<'
, tname = "staircase up"
, tfreq = [("legendLit", 100)]
, tcolor = BrWhite
, tcolor2 = defFG
, tfeature = [Walkable, Clear, NoItem, NoActor, Cause $ IK.Ascend 1]
}
stairsLit = TileKind
{ tsymbol = '>'
, tname = "staircase"
, tfreq = [("legendLit", 100)]
, tcolor = BrCyan
, tcolor2 = Cyan -- TODO
, tfeature = [ Walkable, Clear, NoItem, NoActor
, Cause $ IK.Ascend 1
, Cause $ IK.Ascend (-1) ]
}
stairsDownLit = TileKind
{ tsymbol = '>'
, tname = "staircase down"
, tfreq = [("legendLit", 100)]
, tcolor = BrWhite
, tcolor2 = defFG
, tfeature = [Walkable, Clear, NoItem, NoActor, Cause $ IK.Ascend (-1)]
}
escapeUpLit = TileKind
{ tsymbol = '<'
, tname = "exit hatch up"
, tfreq = [("legendLit", 100)]
, tcolor = BrYellow
, tcolor2 = BrYellow
, tfeature = [Walkable, Clear, NoItem, NoActor, Cause $ IK.Escape 1]
}
escapeDownLit = TileKind
{ tsymbol = '>'
, tname = "exit trapdoor down"
, tfreq = [("legendLit", 100)]
, tcolor = BrYellow
, tcolor2 = BrYellow
, tfeature = [Walkable, Clear, NoItem, NoActor, Cause $ IK.Escape (-1)]
}
unknown = TileKind
{ tsymbol = ' '
, tname = "unknown space"
, tfreq = [("unknown space", 1)]
, tcolor = defFG
, tcolor2 = defFG
, tfeature = [Dark]
}
floorCorridorLit = TileKind
{ tsymbol = '#'
, tname = "corridor"
, tfreq = [("floorCorridorLit", 1)]
, tcolor = BrWhite
, tcolor2 = defFG
, tfeature = [Walkable, Clear]
}
floorArenaLit = floorCorridorLit
{ tsymbol = '.'
, tname = "stone floor"
, tfreq = [ ("floorArenaLit", 1)
, ("arenaSet", 1), ("emptySet", 1), ("noiseSet", 50)
, ("battleSet", 1000), ("skirmishSet", 100)
, ("ambushSet", 1000) ]
}
floorActorLit = floorArenaLit
{ tfreq = []
, tfeature = OftenActor : tfeature floorArenaLit
}
floorItemLit = floorArenaLit
{ tfreq = []
, tfeature = OftenItem : tfeature floorArenaLit
}
floorActorItemLit = floorItemLit
{ tfreq = [("legendLit", 100)] -- no OftenItem in legendDark
, tfeature = OftenActor : tfeature floorItemLit
}
floorArenaShade = floorActorLit
{ tname = "stone floor" -- TODO: "shaded ground"
, tfreq = [("treeShadeOver_s", 1)]
, tcolor2 = BrBlack
, tfeature = Dark : tfeature floorActorLit -- no OftenItem
}
floorRedLit = floorArenaLit
{ tname = "brick pavement"
, tfreq = [("trailLit", 30), ("trailChessLit", 30)]
, tcolor = BrRed
, tcolor2 = Red
, tfeature = Trail : tfeature floorArenaLit
}
floorBlueLit = floorRedLit
{ tname = "cobblestone path"
, tfreq = [("trailLit", 100), ("trailChessLit", 70)]
, tcolor = BrBlue
, tcolor2 = Blue
}
floorGreenLit = floorRedLit
{ tname = "mossy stone path"
, tfreq = [("trailLit", 100)]
, tcolor = BrGreen
, tcolor2 = Green
}
floorBrownLit = floorRedLit
{ tname = "rotting mahogany deck"
, tfreq = [("trailLit", 10)]
, tcolor = BrMagenta
, tcolor2 = Magenta
}
makeDark :: TileKind -> TileKind
makeDark k = let darkText :: GroupName TileKind -> GroupName TileKind
darkText t = maybe t (toGroupName . (<> "Dark"))
$ T.stripSuffix "Lit" $ tshow t
darkFrequency = map (first darkText) $ tfreq k
darkFeat (OpenTo t) = Just $ OpenTo $ darkText t
darkFeat (CloseTo t) = Just $ CloseTo $ darkText t
darkFeat (ChangeTo t) = Just $ ChangeTo $ darkText t
darkFeat (HideAs t) = Just $ HideAs $ darkText t
darkFeat (RevealAs t) = Just $ RevealAs $ darkText t
darkFeat OftenItem = Nothing -- items not common in the dark
darkFeat feat = Just feat
in k { tfreq = darkFrequency
, tfeature = Dark : mapMaybe darkFeat (tfeature k)
}
makeDarkColor :: TileKind -> TileKind
makeDarkColor k = (makeDark k) {tcolor2 = BrBlack}
| tuturto/space-privateers | GameDefinition/Content/TileKind.hs | bsd-3-clause | 10,454 | 0 | 14 | 2,820 | 2,706 | 1,716 | 990 | 252 | 7 |
-- | Client monad for interacting with a human through UI.
module Game.LambdaHack.Client.UI.MsgClient
( msgAdd, msgReset, recordHistory
, SlideOrCmd, failWith, failSlides, failSer, failMsg
, lookAt, itemOverlay
) where
import Prelude ()
import Prelude.Compat
import Control.Exception.Assert.Sugar
import Control.Monad
import qualified Data.EnumMap.Strict as EM
import Data.Maybe
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text as T
import qualified Game.LambdaHack.Common.Kind as Kind
import qualified NLP.Miniutter.English as MU
import Game.LambdaHack.Client.CommonClient
import Game.LambdaHack.Client.ItemSlot
import Game.LambdaHack.Client.MonadClient hiding (liftIO)
import Game.LambdaHack.Client.State
import Game.LambdaHack.Client.UI.MonadClientUI
import Game.LambdaHack.Client.UI.WidgetClient
import Game.LambdaHack.Common.Actor
import Game.LambdaHack.Common.ActorState
import Game.LambdaHack.Common.Item
import Game.LambdaHack.Common.ItemDescription
import Game.LambdaHack.Common.Level
import Game.LambdaHack.Common.Misc
import Game.LambdaHack.Common.MonadStateRead
import Game.LambdaHack.Common.Msg
import Game.LambdaHack.Common.Point
import Game.LambdaHack.Common.Request
import Game.LambdaHack.Common.State
import qualified Game.LambdaHack.Common.Tile as Tile
import qualified Game.LambdaHack.Content.TileKind as TK
-- | Add a message to the current report.
msgAdd :: MonadClientUI m => Msg -> m ()
msgAdd msg = modifyClient $ \d -> d {sreport = addMsg (sreport d) msg}
-- | Wipe out and set a new value for the current report.
msgReset :: MonadClientUI m => Msg -> m ()
msgReset msg = modifyClient $ \d -> d {sreport = singletonReport msg}
-- | Store current report in the history and reset report.
recordHistory :: MonadClientUI m => m ()
recordHistory = do
time <- getsState stime
StateClient{sreport, shistory} <- getClient
unless (nullReport sreport) $ do
msgReset ""
let nhistory = addReport shistory time sreport
modifyClient $ \cli -> cli {shistory = nhistory}
type SlideOrCmd a = Either Slideshow a
failWith :: MonadClientUI m => Msg -> m (SlideOrCmd a)
failWith msg = do
stopPlayBack
let starMsg = "*" <> msg <> "*"
assert (not $ T.null msg) $ Left <$> promptToSlideshow starMsg
failSlides :: MonadClientUI m => Slideshow -> m (SlideOrCmd a)
failSlides slides = do
stopPlayBack
return $ Left slides
failSer :: MonadClientUI m => ReqFailure -> m (SlideOrCmd a)
failSer = failWith . showReqFailure
failMsg :: MonadClientUI m => Msg -> m Slideshow
failMsg msg = do
stopPlayBack
let starMsg = "*" <> msg <> "*"
assert (not $ T.null msg) $ promptToSlideshow starMsg
-- | Produces a textual description of the terrain and items at an already
-- explored position. Mute for unknown positions.
-- The detailed variant is for use in the targeting mode.
lookAt :: MonadClientUI m
=> Bool -- ^ detailed?
-> Text -- ^ how to start tile description
-> Bool -- ^ can be seen right now?
-> Point -- ^ position to describe
-> ActorId -- ^ the actor that looks
-> Text -- ^ an extra sentence to print
-> m Text
lookAt detailed tilePrefix canSee pos aid msg = do
cops@Kind.COps{cotile=cotile@Kind.Ops{okind}} <- getsState scops
itemToF <- itemToFullClient
b <- getsState $ getActorBody aid
stgtMode <- getsClient stgtMode
let lidV = maybe (blid b) tgtLevelId stgtMode
lvl <- getLevel lidV
localTime <- getsState $ getLocalTime lidV
subject <- partAidLeader aid
is <- getsState $ getCBag $ CFloor lidV pos
let verb = MU.Text $ if pos == bpos b
then "stand on"
else if canSee then "notice" else "remember"
let nWs (iid, kit@(k, _)) = partItemWs k CGround localTime (itemToF iid kit)
isd = case detailed of
_ | EM.size is == 0 -> ""
_ | EM.size is <= 2 ->
makeSentence [ MU.SubjectVerbSg subject verb
, MU.WWandW $ map nWs $ EM.assocs is]
-- TODO: detailed unused here; disabled together with overlay in doLook True -> "\n"
_ -> makeSentence [MU.Cardinal (EM.size is), "items here"]
tile = lvl `at` pos
obscured | knownLsecret lvl
&& tile /= hideTile cops lvl pos = "partially obscured"
| otherwise = ""
tileText = obscured <+> TK.tname (okind tile)
tilePart | T.null tilePrefix = MU.Text tileText
| otherwise = MU.AW $ MU.Text tileText
tileDesc = [MU.Text tilePrefix, tilePart]
if not (null (Tile.causeEffects cotile tile)) then
return $! makeSentence ("activable:" : tileDesc)
<+> msg <+> isd
else if detailed then
return $! makeSentence tileDesc
<+> msg <+> isd
else return $! msg <+> isd
-- | Create a list of item names.
itemOverlay :: MonadClient m
=> CStore -> LevelId -> ItemBag -> m Overlay
itemOverlay c lid bag = do
localTime <- getsState $ getLocalTime lid
itemToF <- itemToFullClient
(itemSlots, organSlots) <- getsClient sslots
let isOrgan = c == COrgan
lSlots = if isOrgan then organSlots else itemSlots
let !_A = assert (all (`elem` EM.elems lSlots) (EM.keys bag)
`blame` (c, lid, bag, lSlots)) ()
let pr (l, iid) =
case EM.lookup iid bag of
Nothing -> Nothing
Just kit@(k, _) ->
let itemFull = itemToF iid kit
-- TODO: add color item symbols as soon as we have a menu
-- with all items visible on the floor or known to player
-- symbol = jsymbol $ itemBase itemFull
in Just $ makePhrase [ slotLabel l, "-" -- MU.String [symbol]
, partItemWs k c localTime itemFull ]
<> " "
return $! toOverlay $ mapMaybe pr $ EM.assocs lSlots
| beni55/LambdaHack | Game/LambdaHack/Client/UI/MsgClient.hs | bsd-3-clause | 5,902 | 0 | 19 | 1,440 | 1,636 | 864 | 772 | -1 | -1 |
{-# LANGUAGE CPP #-}
#define DO_NOT_EDIT (doNotEdit __FILE__ __LINE__)
-- | Generates code for HTML tags.
--
module Util.GenerateHtmlCombinators where
import Control.Arrow ((&&&))
import Data.List (sort, sortBy, intersperse, intercalate)
import Data.Ord (comparing)
import System.Directory (createDirectoryIfMissing)
import System.FilePath ((</>), (<.>))
import Data.Map (Map)
import qualified Data.Map as M
import Data.Char (toLower)
import qualified Data.Set as S
import Util.Sanitize (sanitize, prelude)
-- | Datatype for an HTML variant.
--
data HtmlVariant = HtmlVariant
{ version :: [String]
, docType :: [String]
, parents :: [String]
, leafs :: [String]
, attributes :: [String]
, selfClosing :: Bool
} deriving (Eq)
instance Show HtmlVariant where
show = map toLower . intercalate "-" . version
-- | Get the full module name for an HTML variant.
--
getModuleName :: HtmlVariant -> String
getModuleName = ("Text.Blaze." ++) . intercalate "." . version
-- | Get the attribute module name for an HTML variant.
--
getAttributeModuleName :: HtmlVariant -> String
getAttributeModuleName = (++ ".Attributes") . getModuleName
-- | Check if a given name causes a name clash.
--
isNameClash :: HtmlVariant -> String -> Bool
isNameClash v t
-- Both an element and an attribute
| (t `elem` parents v || t `elem` leafs v) && t `elem` attributes v = True
-- Already a prelude function
| sanitize t `S.member` prelude = True
| otherwise = False
-- | Write an HTML variant.
--
writeHtmlVariant :: HtmlVariant -> IO ()
writeHtmlVariant htmlVariant = do
-- Make a directory.
createDirectoryIfMissing True basePath
let tags = zip parents' (repeat makeParent)
++ zip leafs' (repeat (makeLeaf $ selfClosing htmlVariant))
sortedTags = sortBy (comparing fst) tags
appliedTags = map (\(x, f) -> f x) sortedTags
-- Write the main module.
writeFile' (basePath <.> "hs") $ removeTrailingNewlines $ unlines
[ DO_NOT_EDIT
, "{-# LANGUAGE OverloadedStrings #-}"
, "-- | This module exports HTML combinators used to create documents."
, "--"
, exportList modulName $ "module Text.Blaze"
: "docType"
: "docTypeHtml"
: map (sanitize . fst) sortedTags
, DO_NOT_EDIT
, "import Prelude ((>>), (.))"
, ""
, "import Text.Blaze"
, "import Text.Blaze.Internal"
, ""
, makeDocType $ docType htmlVariant
, makeDocTypeHtml $ docType htmlVariant
, unlines appliedTags
]
let sortedAttributes = sort attributes'
-- Write the attribute module.
writeFile' (basePath </> "Attributes.hs") $ removeTrailingNewlines $ unlines
[ DO_NOT_EDIT
, "-- | This module exports combinators that provide you with the"
, "-- ability to set attributes on HTML elements."
, "--"
, "{-# LANGUAGE OverloadedStrings #-}"
, exportList attributeModuleName $ map sanitize sortedAttributes
, DO_NOT_EDIT
, "import Prelude ()"
, ""
, "import Text.Blaze.Internal (Attribute, AttributeValue, attribute)"
, ""
, unlines (map makeAttribute sortedAttributes)
]
where
basePath = "Text" </> "Blaze" </> foldl1 (</>) version'
modulName = getModuleName htmlVariant
attributeModuleName = getAttributeModuleName htmlVariant
attributes' = attributes htmlVariant
parents' = parents htmlVariant
leafs' = leafs htmlVariant
version' = version htmlVariant
removeTrailingNewlines = reverse . drop 2 . reverse
writeFile' file content = do
putStrLn ("Generating " ++ file)
writeFile file content
-- | Create a string, consisting of @x@ spaces, where @x@ is the length of the
-- argument.
--
spaces :: String -> String
spaces = flip replicate ' ' . length
-- | Join blocks of code with a newline in between.
--
unblocks :: [String] -> String
unblocks = unlines . intersperse "\n"
-- | A warning to not edit the generated code.
--
doNotEdit :: FilePath -> Int -> String
doNotEdit fileName lineNumber = init $ unlines
[ "-- WARNING: The next block of code was automatically generated by"
, "-- " ++ fileName ++ ":" ++ show lineNumber
, "--"
]
-- | Generate an export list for a Haskell module.
--
exportList :: String -- ^ Module name.
-> [String] -- ^ List of functions.
-> String -- ^ Resulting string.
exportList _ [] = error "exportList without functions."
exportList name (f:functions) = unlines $
[ "module " ++ name
, " ( " ++ f
] ++
map (" , " ++) functions ++
[ " ) where"]
-- | Generate a function for a doctype.
--
makeDocType :: [String] -> String
makeDocType lines' = unlines
[ DO_NOT_EDIT
, "-- | Combinator for the document type. This should be placed at the top"
, "-- of every HTML page."
, "--"
, "-- Example:"
, "--"
, "-- > docType"
, "--"
, "-- Result:"
, "--"
, unlines (map ("-- > " ++) lines') ++ "--"
, "docType :: Html -- ^ The document type HTML."
, "docType = preEscapedText " ++ show (unlines lines')
, "{-# INLINE docType #-}"
]
-- | Generate a function for the HTML tag (including the doctype).
--
makeDocTypeHtml :: [String] -- ^ The doctype.
-> String -- ^ Resulting combinator function.
makeDocTypeHtml lines' = unlines
[ DO_NOT_EDIT
, "-- | Combinator for the @\\<html>@ element. This combinator will also"
, "-- insert the correct doctype."
, "--"
, "-- Example:"
, "--"
, "-- > docTypeHtml $ span $ text \"foo\""
, "--"
, "-- Result:"
, "--"
, unlines (map ("-- > " ++) lines') ++ "-- > <html><span>foo</span></html>"
, "--"
, "docTypeHtml :: Html -- ^ Inner HTML."
, " -> Html -- ^ Resulting HTML."
, "docTypeHtml inner = docType >> html inner"
, "{-# INLINE docTypeHtml #-}"
]
-- | Generate a function for an HTML tag that can be a parent.
--
makeParent :: String -> String
makeParent tag = unlines
[ DO_NOT_EDIT
, "-- | Combinator for the @\\<" ++ tag ++ ">@ element."
, "--"
, "-- Example:"
, "--"
, "-- > " ++ function ++ " $ span $ text \"foo\""
, "--"
, "-- Result:"
, "--"
, "-- > <" ++ tag ++ "><span>foo</span></" ++ tag ++ ">"
, "--"
, function ++ " :: Html -- ^ Inner HTML."
, spaces function ++ " -> Html -- ^ Resulting HTML."
, function ++ " = Parent \"" ++ tag ++ "\" \"<" ++ tag
++ "\" \"</" ++ tag ++ ">\"" ++ modifier
, "{-# INLINE " ++ function ++ " #-}"
]
where
function = sanitize tag
modifier = if tag `elem` ["style", "script"] then " . external" else ""
-- | Generate a function for an HTML tag that must be a leaf.
--
makeLeaf :: Bool -- ^ Make leaf tags self-closing
-> String -- ^ Tag for the combinator
-> String -- ^ Combinator code
makeLeaf selfClosing tag = unlines
[ DO_NOT_EDIT
, "-- | Combinator for the @\\<" ++ tag ++ " />@ element."
, "--"
, "-- Example:"
, "--"
, "-- > " ++ function
, "--"
, "-- Result:"
, "--"
, "-- > <" ++ tag ++ " />"
, "--"
, function ++ " :: Html -- ^ Resulting HTML."
, function ++ " = Leaf \"" ++ tag ++ "\" \"<" ++ tag ++ "\" " ++ "\""
++ (if selfClosing then " /" else "") ++ ">\""
, "{-# INLINE " ++ function ++ " #-}"
]
where
function = sanitize tag
-- | Generate a function for an HTML attribute.
--
makeAttribute :: String -> String
makeAttribute name = unlines
[ DO_NOT_EDIT
, "-- | Combinator for the @" ++ name ++ "@ attribute."
, "--"
, "-- Example:"
, "--"
, "-- > div ! " ++ function ++ " \"bar\" $ \"Hello.\""
, "--"
, "-- Result:"
, "--"
, "-- > <div " ++ name ++ "=\"bar\">Hello.</div>"
, "--"
, function ++ " :: AttributeValue -- ^ Attribute value."
, spaces function ++ " -> Attribute -- ^ Resulting attribute."
, function ++ " = attribute \"" ++ name ++ "\" \" "
++ name ++ "=\\\"\""
, "{-# INLINE " ++ function ++ " #-}"
]
where
function = sanitize name
-- | HTML 4.01 Strict.
-- A good reference can be found here: http://www.w3schools.com/tags/default.asp
--
html4Strict :: HtmlVariant
html4Strict = HtmlVariant
{ version = ["Html4", "Strict"]
, docType =
[ "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01//EN\""
, " \"http://www.w3.org/TR/html4/strict.dtd\">"
]
, parents =
[ "a", "abbr", "acronym", "address", "b", "bdo", "big", "blockquote"
, "body" , "button", "caption", "cite", "code", "colgroup", "dd", "del"
, "dfn", "div" , "dl", "dt", "em", "fieldset", "form", "h1", "h2", "h3"
, "h4", "h5", "h6", "head", "html", "i", "ins" , "kbd", "label"
, "legend", "li", "map", "noscript", "object", "ol", "optgroup"
, "option", "p", "pre", "q", "samp", "script", "select", "small"
, "span", "strong", "style", "sub", "sup", "table", "tbody", "td"
, "textarea", "tfoot", "th", "thead", "title", "tr", "tt", "ul", "var"
]
, leafs =
[ "area", "br", "col", "hr", "link", "img", "input", "meta", "param"
]
, attributes =
[ "abbr", "accept", "accesskey", "action", "align", "alt", "archive"
, "axis", "border", "cellpadding", "cellspacing", "char", "charoff"
, "charset", "checked", "cite", "class", "classid", "codebase"
, "codetype", "cols", "colspan", "content", "coords", "data", "datetime"
, "declare", "defer", "dir", "disabled", "enctype", "for", "frame"
, "headers", "height", "href", "hreflang", "http-equiv", "id", "label"
, "lang", "maxlength", "media", "method", "multiple", "name", "nohref"
, "onabort", "onblur", "onchange", "onclick", "ondblclick", "onfocus"
, "onkeydown", "onkeypress", "onkeyup", "onload", "onmousedown"
, "onmousemove", "onmouseout", "onmouseover", "onmouseup", "onreset"
, "onselect", "onsubmit", "onunload", "profile", "readonly", "rel"
, "rev", "rows", "rowspan", "rules", "scheme", "scope", "selected"
, "shape", "size", "span", "src", "standby", "style", "summary"
, "tabindex", "title", "type", "usemap", "valign", "value", "valuetype"
, "width"
]
, selfClosing = False
}
-- | HTML 4.0 Transitional
--
html4Transitional :: HtmlVariant
html4Transitional = HtmlVariant
{ version = ["Html4", "Transitional"]
, docType =
[ "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\""
, " \"http://www.w3.org/TR/html4/loose.dtd\">"
]
, parents = parents html4Strict ++
[ "applet", "center", "dir", "font", "iframe", "isindex", "menu"
, "noframes", "s", "u"
]
, leafs = leafs html4Strict ++ ["basefont"]
, attributes = attributes html4Strict ++
[ "background", "bgcolor", "clear", "compact", "hspace", "language"
, "noshade", "nowrap", "start", "target", "vspace"
]
, selfClosing = False
}
-- | HTML 4.0 FrameSet
--
html4FrameSet :: HtmlVariant
html4FrameSet = HtmlVariant
{ version = ["Html4", "FrameSet"]
, docType =
[ "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01 FrameSet//EN\""
, " \"http://www.w3.org/TR/html4/frameset.dtd\">"
]
, parents = parents html4Transitional ++ ["frameset"]
, leafs = leafs html4Transitional ++ ["frame"]
, attributes = attributes html4Transitional ++
[ "frameborder", "scrolling"
]
, selfClosing = False
}
-- | XHTML 1.0 Strict
--
xhtml1Strict :: HtmlVariant
xhtml1Strict = HtmlVariant
{ version = ["XHtml1", "Strict"]
, docType =
[ "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\""
, " \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">"
]
, parents = parents html4Strict
, leafs = leafs html4Strict
, attributes = attributes html4Strict
, selfClosing = True
}
-- | XHTML 1.0 Transitional
--
xhtml1Transitional :: HtmlVariant
xhtml1Transitional = HtmlVariant
{ version = ["XHtml1", "Transitional"]
, docType =
[ "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\""
, " \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">"
]
, parents = parents html4Transitional
, leafs = leafs html4Transitional
, attributes = attributes html4Transitional
, selfClosing = True
}
-- | XHTML 1.0 FrameSet
--
xhtml1FrameSet :: HtmlVariant
xhtml1FrameSet = HtmlVariant
{ version = ["XHtml1", "FrameSet"]
, docType =
[ "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 FrameSet//EN\""
, " \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd\">"
]
, parents = parents html4FrameSet
, leafs = leafs html4FrameSet
, attributes = attributes html4FrameSet
, selfClosing = True
}
-- | HTML 5.0
-- A good reference can be found here:
-- http://www.w3schools.com/html5/html5_reference.asp
--
html5 :: HtmlVariant
html5 = HtmlVariant
{ version = ["Html5"]
, docType = ["<!DOCTYPE HTML>"]
, parents =
[ "a", "abbr", "address", "article", "aside", "audio", "b", "base"
, "bdo", "blockquote", "body", "button", "canvas", "caption", "cite"
, "code", "colgroup", "command", "datalist", "dd", "del", "details"
, "dfn", "div", "dl", "dt", "em", "fieldset", "figcaption", "figure"
, "footer", "form", "h1", "h2", "h3", "h4", "h5", "h6", "head", "header"
, "hgroup", "html", "i", "iframe", "ins", "keygen", "kbd", "label"
, "legend", "li", "map", "mark", "menu", "meter", "nav", "noscript"
, "object", "ol", "optgroup", "option", "output", "p", "pre", "progress"
, "q", "rp", "rt", "ruby", "samp", "script", "section", "select"
, "small", "source", "span", "strong", "style", "sub", "summary", "sup"
, "table", "tbody", "td", "textarea", "tfoot", "th", "thead", "time"
, "title", "tr", "ul", "var", "video"
]
, leafs =
[ "area", "br", "col", "embed", "hr", "img", "input", "meta", "link"
, "param"
]
, attributes =
[ "accept", "accept-charset", "accesskey", "action", "alt", "async"
, "autocomplete", "autofocus", "autoplay", "challenge", "charset"
, "checked", "cite", "class", "cols", "colspan", "content"
, "contenteditable", "contextmenu", "controls", "coords", "data"
, "datetime", "defer", "dir", "disabled", "draggable", "enctype", "for"
, "form", "formaction", "formenctype", "formmethod", "formnovalidate"
, "formtarget", "headers", "height", "hidden", "high", "href"
, "hreflang", "http-equiv", "icon", "id", "ismap", "item", "itemprop"
, "keytype", "label", "lang", "list", "loop", "low", "manifest", "max"
, "maxlength", "media", "method", "min", "multiple", "name"
, "novalidate", "onbeforeonload", "onbeforeprint", "onblur", "oncanplay"
, "oncanplaythrough", "onchange", "oncontextmenu", "onclick"
, "ondblclick", "ondrag", "ondragend", "ondragenter", "ondragleave"
, "ondragover", "ondragstart", "ondrop", "ondurationchange", "onemptied"
, "onended", "onerror", "onfocus", "onformchange", "onforminput"
, "onhaschange", "oninput", "oninvalid", "onkeydown", "onkeyup"
, "onload", "onloadeddata", "onloadedmetadata", "onloadstart"
, "onmessage", "onmousedown", "onmousemove", "onmouseout", "onmouseover"
, "onmouseup", "onmousewheel", "ononline", "onpagehide", "onpageshow"
, "onpause", "onplay", "onplaying", "onprogress", "onpropstate"
, "onratechange", "onreadystatechange", "onredo", "onresize", "onscroll"
, "onseeked", "onseeking", "onselect", "onstalled", "onstorage"
, "onsubmit", "onsuspend", "ontimeupdate", "onundo", "onunload"
, "onvolumechange", "onwaiting", "open", "optimum", "pattern", "ping"
, "placeholder", "preload", "pubdate", "radiogroup", "readonly", "rel"
, "required", "reversed", "rows", "rowspan", "sandbox", "scope"
, "scoped", "seamless", "selected", "shape", "size", "sizes", "span"
, "spellcheck", "src", "srcdoc", "start", "step", "style", "subject"
, "summary", "tabindex", "target", "title", "type", "usemap", "value"
, "width", "wrap", "xmlns"
]
, selfClosing = False
}
-- | A map of HTML variants, per version, lowercase.
--
htmlVariants :: Map String HtmlVariant
htmlVariants = M.fromList $ map (show &&& id)
[ html4Strict
, html4Transitional
, html4FrameSet
, xhtml1Strict
, xhtml1Transitional
, xhtml1FrameSet
, html5
]
main :: IO ()
main = mapM_ (writeHtmlVariant . snd) $ M.toList htmlVariants
| jgm/blaze-html | Util/GenerateHtmlCombinators.hs | bsd-3-clause | 17,122 | 0 | 16 | 4,430 | 3,678 | 2,246 | 1,432 | 346 | 2 |
-- Arthur Miranda Gomes - 14.1.8338
-- Pedro Henrique Mendes Batista - 14.1.8403
module Token where
-- Tipo de dado que define uma representação simbolica para caracteres
data Token = TChar Char
| TPlus
| TMinus
| TTimes
| TDot
| TLambda
| TEnter
| TBar
| TError -- Tratar caracteres invalidos - exemplo: \a
deriving (Show, Eq)
-- Mapeia uma string em uma lista de Simbolos equivalentes
str2token :: String -> [Token]
str2token [] = []
str2token (x:y:xs) = case x of
'\\' -> case y of
'+' -> TPlus : str2token xs
'-' -> TMinus : str2token xs
'*' -> TTimes : str2token xs
'.' -> TDot : str2token xs
'n' -> TEnter : str2token xs
'l' -> TLambda : str2token xs
'\\'-> TBar : str2token xs
_ -> TError : str2token xs
b -> TChar b : str2token (y:xs)
str2token [x] = case x of
'\\' -> [TError]
a -> [TChar a]
-- Representação de um Simbolo em uma String
printToken :: Token -> String
printToken (TChar c) = [c]
printToken TPlus = "+"
printToken TMinus = "-"
printToken TTimes = "*"
printToken TDot = "."
printToken TLambda = "\\l"
printToken TEnter = "\\n"
printToken TBar = "\\\\"
-- Converte uma lista de Simbolos em uma String
token2str :: [Token] -> String
token2str = concatMap printToken
| arthurmgo/regex-ftc | src/Token.hs | bsd-3-clause | 1,461 | 0 | 12 | 490 | 377 | 198 | 179 | 38 | 10 |
module Releases2016 where
import PlatformDB
import Types
releases2016 :: [Release]
releases2016 = [hp_8_0_0]
hp_8_0_0 :: Release
hp_8_0_0 =
releaseWithMinimal "8.0.0"
[ incGHC "7.10.3"
, incGHCLib "Cabal" "1.22.5.0"
, incGHCLib "array" "0.5.1.0"
, incGHCLib "base" "4.8.2.0"
, incGHCLib "bytestring" "0.10.6.0"
, incGHCLib "containers" "0.5.6.2"
, incGHCLib "deepseq" "1.4.1.1"
, incGHCLib "directory" "1.2.2.0"
, incGHCLib "filepath" "1.4.0.0"
, incGHCLib "hpc" "0.6.0.2"
, incGHCLib "pretty" "1.1.2.0"
, incGHCLib "process" "1.2.3.0"
, incGHCLib "template-haskell" "2.10.0.0"
, incGHCLib "time" "1.5.0.1"
, incGHCLib "transformers" "0.4.2.0"
, incGHCLib "xhtml" "3000.2.1"
{- These packages are in the GHC distribution, and hence bundeled with
the Platform. However, they are not officially part of the Platform,
and as such, do not carry the same stability guaruntees.
, incGHCLib "bin-package-db" "0.0.0.0"
, incGHCLib "binary" "0.7.5.0"
, incGHCLib "ghc-prim" "0.4.0.0"
, incGHCLib "haskeline" "0.7.2.1"
, incGHCLib "hoopl" "3.10.0.2"
, incGHCLib "integer-gmp" "1.0.0.0"
, incGHCLib "terminfo" "0.4.0.1"
-}
, notWindows $ incGHCLib "unix" "2.7.1.0"
--, onlyWindows $ incGHCLib "Win32" "2.3.1.0"
, incTool "cabal-install" "1.22.6.0"
, incTool "alex" "3.1.4"
, incTool "happy" "1.19.5"
, incTool "hscolour" "1.23"
, incGHCTool "haddock" "2.16.1"
]
[
incLib "async" "2.0.2"
, incLib "attoparsec" "0.13.0.1"
, incLib "case-insensitive" "1.2.0.5"
, incLib "cgi" "3001.2.2.2"
, incLib "fgl" "5.5.2.3"
, incLib "GLUT" "2.7.0.3"
, incLib "GLURaw" "1.5.0.2"
, incLib "haskell-src" "1.0.2.0"
, incLib "hashable" "1.2.3.3"
, incLib "html" "1.0.1.2"
, incLib "HTTP" "4000.2.20"
, incLib "HUnit" "1.3.0.0"
, incLib "mtl" "2.2.1"
, incLib "network" "2.6.2.1"
, incLib "OpenGL" "2.13.1.0"
, incLib "OpenGLRaw" "2.6.0.0"
, incLib "parallel" "3.2.0.6"
, incLib "parsec" "3.1.9"
, incLib "primitive" "0.6.1.0"
, incLib "QuickCheck" "2.8.1"
, incLib "random" "1.1"
, incLib "regex-base" "0.93.2"
, incLib "regex-compat" "0.95.1"
, incLib "regex-posix" "0.95.2"
, incLib "split" "0.2.2"
, incLib "stm" "2.4.4"
, incLib "syb" "0.6"
, incLib "text" "1.2.1.3"
, incLib "unordered-containers" "0.2.5.1"
, incLib "vector" "0.11.0.0"
, incLib "zlib" "0.5.4.2"
-- held back because cabal-install needs < 0.6 -- ick
-- Libs required by newer version of stuff - but not cleared for HP
, incLib "tf-random" "0.5"
-- needed by alex & QuickCheck
-- these two were in the old HP
, incLib "old-locale" "1.0.0.7"
, incLib "old-time" "1.1.0.3"
-- needed by cabal-install, cgi, & HTTP
-- was split out of network, so was in HP, just under different pacakge
, incLib "network-uri" "2.6.0.3"
-- needed by cabal-install, cgi, & HTTP
-- needed by cgi
, incLib "exceptions" "0.8.0.2"
, incLib "transformers-compat" "0.4.0.4"
, incLib "multipart" "0.1.2"
-- needed by attoparsec, held back due to attoparsec
, incLib "scientific" "0.3.3.8"
-- needed by OpenGL
, incLib "ObjectName" "1.1.0.0"
, incLib "StateVar" "1.1.0.1"
, incLib "half" "0.2.2.1"
]
| gbaz/haskell-platform | hptool/src/Releases2016.hs | bsd-3-clause | 4,841 | 0 | 8 | 2,259 | 569 | 295 | 274 | 73 | 1 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.ARB.HalfFloatVertex
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.ARB.HalfFloatVertex (
-- * Extension Support
glGetARBHalfFloatVertex,
gl_ARB_half_float_vertex,
-- * Types
GLhalf,
-- * Enums
pattern GL_HALF_FLOAT
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Types
import Graphics.GL.Tokens
| haskell-opengl/OpenGLRaw | src/Graphics/GL/ARB/HalfFloatVertex.hs | bsd-3-clause | 701 | 0 | 5 | 101 | 57 | 43 | 14 | 9 | 0 |
module Jerimum.Tests.Unit.PostgreSQL.Types.BinaryTest
( tests
) where
import Data.ByteString (ByteString, pack)
import Jerimum.PostgreSQL.Types.Binary
import Jerimum.Tests.Unit.PostgreSQL.Types.Helpers
import Test.Tasty
import Test.Tasty.QuickCheck
newtype BinaryG =
BinaryG ByteString
deriving (Show, Eq)
tests :: TestTree
tests = testGroup "PostgreSQL.Types.Binary" [testCborCodec]
testCborCodec :: TestTree
testCborCodec =
testGroup
"cbor codec"
[ testProperty "identity" $ \(BinaryG bs) ->
Right bs == runDecoder binaryDecoderV0 (runEncoder $ binaryEncoderV0 bs)
]
instance Arbitrary BinaryG where
arbitrary = do
size <- choose (1, 20)
ba <- vector size
pure $ BinaryG (pack ba)
| dgvncsz0f/nws | test/Jerimum/Tests/Unit/PostgreSQL/Types/BinaryTest.hs | bsd-3-clause | 760 | 0 | 13 | 157 | 205 | 114 | 91 | 23 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Xml.XPath.Parser where
import Control.Applicative
import Control.Monad
import Data.Char
import Data.Attoparsec.Text
import Data.Attoparsec.Expr
import Data.Text (Text)
import Prelude hiding (takeWhile)
import qualified Data.Text as T
import Xml.XPath.Types
parser :: Text -> Either String XPath
parser = parseOnly xpath
xpath :: Parser XPath
xpath = XPath <$> expr <* endOfInput
locationPath :: Parser LocationPath
locationPath
= Absolute <$> absoluteLocationPath
<|> Relative <$> relativeLocationPath
relativeLocationPath :: Parser [Step]
relativeLocationPath
= (++)
<$> (step `sepBy1` token "/")
<*> option [] abbreviatedRelativeLocationPath
abbreviatedRelativeLocationPath :: Parser [Step]
abbreviatedRelativeLocationPath = (descendantOrSelf :) <$> (token "//" *> (pure <$> step))
absoluteLocationPath :: Parser [Step]
absoluteLocationPath
= abbreviatedAbsoluteLocationPath
<|> token "/" *> option [] relativeLocationPath
abbreviatedAbsoluteLocationPath :: Parser [Step]
abbreviatedAbsoluteLocationPath = (descendantOrSelf :) <$> (token "//" *> relativeLocationPath)
step :: Parser Step
step = Step <$> axisSpecifier <*> nodeTest <*> many predicate
<|> abbreviatedStep
abbreviatedStep :: Parser Step
abbreviatedStep
= parent <$ token ".."
<|> self <$ token "."
descendantOrSelf :: Step
descendantOrSelf = Step (NamedAxis DescendantOrSelf) (NodeType Node) []
parent :: Step
parent = Step (NamedAxis Parent) (NodeType Node) []
self :: Step
self = Step (NamedAxis Self) (NodeType Node) []
axisSpecifier :: Parser AxisSpecifier
axisSpecifier = NamedAxis <$> (axisName <|> abbreviatedAxisSpecifier)
axisName :: Parser AxisName
axisName = p Ancestor "ancestor"
<|> p AncestorOrSelf "ancestor-or-self"
<|> p Attribute "attribute"
<|> p Child "child"
<|> p Descendant "descendant"
<|> p DescendantOrSelf "descendant-or-self"
<|> p Following "following"
<|> p FollowingSibling "following-sibling"
<|> p Namespace "namespace"
<|> p Parent "parent"
<|> p Preceding "preceding"
<|> p PrecedingSibling "preceding-sibling"
<|> p Self "self"
where p f t = f <$ (token t <* token "::")
abbreviatedAxisSpecifier :: Parser AxisName
abbreviatedAxisSpecifier = option Child (Attribute <$ token "@")
nodeTest :: Parser NodeTest
nodeTest
= NodeType <$> nodeType
<|> PiTest <$> processingInstruction
<|> NameTest <$> nameTest
nameTest :: Parser NameTest
nameTest
= Star <$ token "*"
<|> NsStar <$> free unqualified <* token ":*"
<|> QName <$> free qualified
nodeType :: Parser NodeType
nodeType
= p Comment "comment"
<|> p Text "text"
<|> p ProcessingInstruction "processing-instruction"
<|> p Node "node"
where p f t = f <$ token t <* token "(" <* token ")"
processingInstruction :: Parser Text
processingInstruction
= PiTest
<$> token "processing-instruction"
<* token "(" *> literal <* token ")"
literal :: Parser Text
literal
= free (char '"' *> takeTill (=='"' ) <* char '"')
<|> free (char '\'' *> takeTill (=='\'') <* char '\'')
predicate :: Parser Expr
predicate = token "[" *> expr <* token "]"
expr :: Parser Expr
expr = buildExpressionParser table primaryExpr
primaryExpr :: Parser Expr
primaryExpr
= token "(" *> expr <* token ")"
<|> Literal <$> literal
<|> Number <$> free number
<|> Variable <$> variableReference
<|> functionCall
<|> Path <$> locationPath
variableReference :: Parser Text
variableReference = free (char '$' *> qualified)
functionCall :: Parser Expr
functionCall
= FunctionCall
<$> functionName
<*> (token "(" *> (expr `sepBy` token ",") <* token ")")
functionName :: Parser Text
functionName = join (valid <$> free qualified)
where valid n = if n `elem` ["comment", "text", "processing-instruction", "node"]
then empty
else return n
table :: [[Operator Text Expr]]
table =
[ [ Postfix ( flip Filter <$> predicate ) ]
, [ Postfix ( flip DeepChildren <$ token "//" <*> relativeLocationPath ) ]
, [ Postfix ( flip Children <$ token "/" <*> relativeLocationPath ) ]
, [ Infix ( Union <$ token "|" ) AssocLeft ]
, [ Infix ( Mod <$ token "mod" ) AssocLeft ]
, [ Infix ( Div <$ token "div" ) AssocLeft ]
, [ Infix ( Mul <$ token "*" ) AssocLeft ]
, [ Infix ( Sub <$ token "-" ) AssocLeft ]
, [ Infix ( Add <$ token "+" ) AssocLeft ]
, [ Infix ( Gte <$ token ">=" ) AssocLeft ]
, [ Infix ( Lte <$ token "<=" ) AssocLeft ]
, [ Infix ( Gt <$ token ">" ) AssocLeft ]
, [ Infix ( Lt <$ token "<" ) AssocLeft ]
, [ Infix ( IsNot <$ token "!=" ) AssocLeft ]
, [ Infix ( Is <$ token "=" ) AssocLeft ]
, [ Infix ( And <$ token "and" ) AssocLeft ]
, [ Infix ( Or <$ token "or" ) AssocLeft ]
]
unqualified :: Parser Text
unqualified
= T.cons
<$> satisfy (\d -> isAlpha d || d == '_')
<*> takeWhile (\d -> d == '-' || isAlpha d || isDigit d)
qualified :: Parser Text
qualified
= T.append
<$> unqualified
<*> (T.cons <$> char ':' <*> unqualified <|> pure "")
free :: Parser a -> Parser a
free = (<* many space)
token :: Text -> Parser Text
token = free . stringCI
| silkapp/xmlhtml-xpath | src/Xml/XPath/Parser.hs | bsd-3-clause | 6,006 | 0 | 17 | 1,889 | 1,728 | 886 | 842 | 147 | 2 |
{-# LANGUAGE OverloadedStrings #-}
import Network.Socket hiding (socket, connect, accept, recv, send)
import Network.Socket.Windows
import Control.Concurrent
import Control.Monad
import System.IO
import System.Timeout
server :: IO ()
server =
void $ forkIO $ do
sock <- socket AF_INET Stream defaultProtocol
bind sock $ SockAddrInet 1234 iNADDR_ANY
listen sock 5
putStrLn "server: listening"
forever $ do
(client, clientAddr) <- accept sock
putStrLn $ "server: accepted connection from " ++ show clientAddr
sname <- getSocketName client
putStrLn $ "server: getSocketName client: " ++ show sname
pname <- getPeerName client
putStrLn $ "server: getPeerName client: " ++ show pname
forkIO $ do
sendAll client "He"
threadDelay 3000000
sendAll client "llo!"
bs <- recv client 4096
putStrLn $ "server: received " ++ show bs
threadDelay 1000000
close client
main :: IO ()
main = withSocketsDo $ do
mapM_ (`hSetBuffering` LineBuffering) [stdout, stderr]
server
google <- inet_addr "74.125.137.101"
localhost <- inet_addr "127.0.0.1"
sock <- socket AF_INET Stream defaultProtocol
putStrLn $ "client: connecting to google.com:1234"
timeout 2000000 (connect sock $ SockAddrInet 1234 google)
>>= \m -> case m of
Nothing -> do
putStrLn "client: connect timed out. Will try connecting to myself instead."
connect sock $ SockAddrInet 1234 localhost
Just () ->
putStrLn "client: connect succeeded first time."
putStrLn "client: connected to server"
sname <- getSocketName sock
putStrLn $ "client: getSocketName: " ++ show sname
pname <- getPeerName sock
putStrLn $ "client: getPeerName: " ++ show pname
sendAll sock "Hello, server."
bs <- recv sock 4096
putStrLn $ "client: received " ++ show bs
timeout 1000000 (recv sock 4096)
>>= \m -> case m of
Nothing -> do
putStrLn $ "client: recv timed out. Trying again without timeout."
bs' <- recv sock 4096
putStrLn $ "client: received " ++ show bs'
Just s ->
putStrLn $ "client: recv succeeded first time: " ++ show s
threadDelay 1000000
close sock
| joeyadams/hs-windows-iocp | lab/connect.hs | bsd-3-clause | 2,377 | 0 | 16 | 700 | 617 | 278 | 339 | 62 | 3 |
{-# LANGUAGE ScopedTypeVariables #-}
module PrivacyCA where
import TPM
import Demo3Shared
import VChanUtil
import Data.Binary
import Data.Bits
import Data.ByteString.Lazy (ByteString, cons, empty, pack, toStrict, fromStrict)
import qualified Data.ByteString as B (ByteString, pack)
--import Codec.Crypto.RSA hiding (sign)
import System.Random
import System.IO
import Crypto.Cipher.AES
import CommTools
--import Codec.Crypto.AES
--withOpenSSL
caProcess :: LibXenVChan -> IO ()
caProcess chan = do
--ctrlWait chan
req <- receiveCARequest chan
resp <- mkCAResponse req
sendCAResponse chan resp
return ()
receiveCARequest :: LibXenVChan -> IO (Either String CARequest)
receiveCARequest chan = do
eitherShared <- receiveShared chan
case (eitherShared) of
(Left err) -> return (Left err)
(Right (WCARequest caReq)) -> do
putStrLn $ "Received: " ++ (show caReq)
return (Right caReq)
(Right x) -> return (Left ("I wasn't supposed to get this!. I expected a 'CARequest' but I received this: " ++ (show x)))
sendCAResponse :: LibXenVChan -> CAResponse -> IO ()
sendCAResponse chan caResp = do
sendShared' chan (WCAResponse caResp)
putStrLn $ "Sent: " ++ show caResp
return ()
mkCAResponse :: Either String CARequest -> IO CAResponse --Need to check idSig!!!!
mkCAResponse (Right (CARequest id (Signed idContents idSig))) = do
ekPubKey <- readPubEK
let iPubKey = identityPubKey idContents
iDigest = tpm_digest $ encode iPubKey
asymContents = contents iDigest
blob = encode asymContents
encBlob <- tpm_rsa_pubencrypt ekPubKey blob
let caPriKey = snd generateCAKeyPair
caCert = signPack caPriKey iPubKey
certBytes = encode caCert
strictCert = toStrict certBytes
encryptedCert = encryptCTR aes ctr strictCert
enc = fromStrict encryptedCert
--encryptedSignedAIK = crypt' CTR symKey symKey Encrypt signedAIK
--enc = encrypt key certBytes
return (CAResponse enc encBlob)
where
symKey =
TPM_SYMMETRIC_KEY
(tpm_alg_aes128)
(tpm_es_sym_ctr)
key
v:: Word8
v = 1
key = ({-B.-}Data.ByteString.Lazy.pack $ replicate 16 v)
--strictKey = toStrict key
aes = initAES $ toStrict key
ctr = toStrict key
contents dig = TPM_ASYM_CA_CONTENTS symKey dig
readPubEK :: IO TPM_PUBKEY
readPubEK = do
handle <- openFile exportEKFileName ReadMode
pubKeyString <- hGetLine handle
let pubKey :: TPM_PUBKEY
pubKey = read pubKeyString
hClose handle
return pubKey
{-
--"One-time use" export function
exportCAPub :: String -> PubKey -> IO ()
exportCAPub fileName pubKey = do
handle <- openFile fileName WriteMode
hPutStrLn handle $ show pubKey
hClose handle
-}
| armoredsoftware/protocol | tpm/mainline/privacyCA/PrivacyCA.hs | bsd-3-clause | 2,809 | 0 | 16 | 646 | 692 | 349 | 343 | 67 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module Html
( Html
, renderResult
, renderResults
, renderResults_
, showInterpreter
, indent, delim, getOne, getTwo
) where
import Result
import Lang
import Data.Data.Compare
import Text.XHtml.Strict
import qualified Data.Text as T
---------------------
renderResults :: [Result] -> T.Text
renderResults
= T.intercalate " " . T.splitOn " "
. T.pack
. showHtmlFragment
. renderResults_
renderResults_ :: [Result] -> Html
renderResults_
= foldr (|-|) noHtml
. map renderResult
renderResult :: Result -> Html
renderResult (ExprType _ e t err)
= showRes e "::" t $ map mkBott err
renderResult (TypeKind e t err)
= showRes e "::" t $ map mkBott err
renderResult (SearchResults b l)
= foldr (|-|) noHtml $ map (showCode_ "search") l ++ [toHtml ("..." :: String) | b]
renderResult (Error _ s)
= showRes "" "" "" [showLines s]
renderResult (Message s r)
= toHtml s |-| maybe noHtml renderResult r
renderResult (Comparison a x b es)
= showRes a (showAnswer x) b (map mkBott es)
renderResult (Dia htm err)
= showResErr htm (map mkBott err)
renderResult (ModifyCommandLine _)
= noHtml
renderResult (ShowInterpreter lang limit act i prompt exp res)
= showInterpreter lang limit act i prompt exp res
(|-|) :: Html -> Html -> Html
a |-| b | isNoHtml a || isNoHtml b = a +++ b
a |-| b = a +++ br +++ b
showCode :: String -> String -> Html
showCode c x
| isNoHtml x' = x'
| null c = f $ thecode << x'
| otherwise = f $ thecode ! [theclass c] << x'
where
x' = toHtml x
f y | elem '\n' x = pre ! [theclass "normal"] << y
| otherwise = y
showCode_ :: String -> String -> Html
showCode_ c x
= thecode ! [theclass c] << primHtml x
showRes :: String -> String -> String -> [Html] -> Html
showRes e x b err
= showResErr (p1
+++ showCode "" (if null x || isNoHtml p1 || isNoHtml p2 then "" else " " ++ x ++ " ")
+++ p2
+++ showCode "comment" (if null c then c else " --" ++ c))
err
where
p1 = showCode "result" a
p2 = showCode (if x == "::" then "type" else "result") b
(a, c) = splitComment e
showResErr :: Html -> [Html] -> Html
showResErr r err = r |-| me err
where
me [] = noHtml
me x = thediv ! [theclass "error"] << foldr (|-|) noHtml err
showLines :: String -> Html
showLines e
| elem '\n' e = pre ! [theclass "normal"] << toHtml e
| otherwise = toHtml e
mkBott :: (String, String) -> Html
mkBott (i, e) = toHtml (" " ++ i ++ ": ") +++ showLines e
splitComment :: String -> (String, String)
splitComment x = case splitComment' x of
Just (a,b) -> (a, b)
_ -> (x, "")
splitComment' :: String -> Maybe (String, String)
splitComment' a = f [] a where
f ac ('-':'-':c) | isComment c = Just (reverse $ dropWhile (==' ') ac, c) -- !!!
f ac ('"':cs) = uncurry f $ skipString ('"':ac) cs
f ac (c:cs) = f (c:ac) cs
f ac [] = Nothing
isComment ('-':c) = isComment c
isComment (d:_) | isSymbol d = False
isComment _ = True
isSymbol d = False --- !!!
skipString a ('"':cs) = ('"':a, cs)
skipString a ('\\':'\\':cs) = skipString ('\\':'\\':a) cs
skipString a ('\\':'"':cs) = skipString ('"':'\\':a) cs
skipString a (c:cs) = skipString (c:a) cs
skipString a [] = (a, [])
showInterpreter :: Language -> Int -> String -> String{-Id-} -> Char -> String -> [Result] -> Html
showInterpreter lang limit act i prompt exp res = indent $
form
! [ theclass $ if prompt == 'R' || null exp then "interpreter" else "resetinterpreter"
, action act ]
<< (onlyIf (prompt /= 'A')
[ thecode ! [theclass "prompt"] << (translate lang (if prompt /= 'R' then "Test" else "Solution") ++ "> ")
, input
! [ theclass "interpreter"
, thetype "text"
, size $ show limit
, maxlength 1000
, identifier $ "tarea" ++ i
, value $ if prompt == 'R' then "" else exp
]
, br
] ++
[ thediv
! [ theclass "answer"
, identifier $ "res" ++ i
] << if prompt `notElem` ['R', 'F'] then renderResults_ res else noHtml
])
onlyIf :: Bool -> [a] -> [a]
onlyIf True l = l
onlyIf _ _ = []
indent :: HTML a => a -> Html
indent x = thediv ! [ theclass "indent" ] << x
delim :: String
delim = "-----"
getOne :: String -> String -> String -> String -> String
getOne c f t x = concat ["javascript:getOne('c=", c, "&f=", f, "','", t, "','", x, "');"]
getTwo :: String -> String -> String -> String -> String -> String
getTwo c f t x y = concat ["javascript:getTwo('c=", c, "&f=", f, "','", t, "','", x, "','", y, "');"]
| divipp/ActiveHs | Html.hs | bsd-3-clause | 4,726 | 0 | 16 | 1,270 | 1,996 | 1,038 | 958 | 126 | 10 |
{-# OPTIONS -XExistentialQuantification -XTypeSynonymInstances
-XFlexibleInstances -XDeriveDataTypeable -XOverloadedStrings #-}
module MFlow.Wai.Response where
import Network.Wai
import MFlow.Cookies
import Data.ByteString.Lazy.UTF8
import MFlow
import Data.Typeable
import Data.Monoid
import System.IO.Unsafe
import Data.Map as M
import Data.CaseInsensitive
import Network.HTTP.Types
import Control.Workflow(WFErrors(..))
--import Data.String
--import Debug.Trace
--
--(!>)= flip trace
class ToResponse a where
toResponse :: a -> Response
data TResp = TRempty | forall a.ToResponse a=>TRespR a | forall a.(Typeable a, ToResponse a, Monoid a) => TResp a deriving Typeable
instance Monoid TResp where
mempty = TRempty
mappend (TResp x) (TResp y)=
case cast y of
Just y' -> TResp $ mappend x y'
Nothing -> error $ "fragment of type " ++ show ( typeOf y) ++ " after fragment of type " ++ show ( typeOf x)
mkParams = Prelude.map mkparam
mkparam (x,y)= (mk x, y)
instance ToResponse TResp where
toResponse (TResp x)= toResponse x
toResponse (TRespR r)= toResponse r
instance ToResponse Response where
toResponse = id
instance ToResponse ByteString where
toResponse x= responseLBS status200 [mkparam contentHtml] x
instance ToResponse String where
toResponse x= responseLBS status200 [mkparam contentHtml] $ fromString x
instance ToResponse HttpData where
toResponse (HttpData hs cookies x)= responseLBS status200 (mkParams ( hs <> cookieHeaders cookies)) x
toResponse (Error str)= responseLBS status404 [("Content-Type", "text/html")] str
-- toResponse $ error "FATAL ERROR: HttpData errors should not reach here: MFlow.Forms.Response.hs "
| agocorona/MFlow | src/MFlow/Wai/Response.hs | bsd-3-clause | 1,758 | 0 | 14 | 340 | 487 | 259 | 228 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -Wno-orphans #-}
{-|
Module : AERN2.MP.Ball.Type
Description : Arbitrary precision dyadic balls
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : mikkonecny@gmail.com
Stability : experimental
Portability : portable
Arbitrary precision dyadic balls
-}
module AERN2.MP.Ball.Type
(
-- * Auxiliary types
module AERN2.MP.Precision
, module AERN2.MP.Accuracy
, module AERN2.MP.Enclosure
-- * The Ball type
, MPBall(..), CanBeMPBall, mpBall, cnMPBall
, CanBeMPBallP, mpBallP, cnMPBallP
, reducePrecionIfInaccurate
-- * Ball construction/extraction functions
, fromMPFloatEndpoints
, mpBallEndpoints, fromMPBallEndpoints
)
where
import MixedTypesNumPrelude
-- import qualified Prelude as P
import qualified Numeric.CollectErrors as CN
import GHC.Generics (Generic)
import Control.DeepSeq
import qualified Data.List as List
import Text.Printf
import AERN2.Normalize
import AERN2.Norm
import AERN2.MP.Dyadic
import qualified AERN2.MP.Float as MPFloat
import AERN2.MP.Float (MPFloat, mpFloat, showMPFloat)
import AERN2.MP.Float.Operators
import AERN2.MP.Precision
import AERN2.MP.Accuracy
import AERN2.MP.ErrorBound (ErrorBound, errorBound)
import AERN2.MP.Enclosure
data MPBall = MPBall
{ ball_value :: MPFloat
, ball_error :: ErrorBound
}
-- { ball_value :: {-# UNPACK #-} ! MPFloat
-- , ball_error :: {-# UNPACK #-} ! ErrorBound
-- }
deriving (Generic)
instance NFData MPBall
instance Show MPBall where
show = showWithAccuracy (bits 50)
instance ShowWithAccuracy MPBall where
showWithAccuracy displayAC b@(MPBall x e) =
-- printf "[%s ± %s](prec=%s)" (show x) (showAC $ getAccuracy b) (show $ integer $ getPrecision b)
printf "[%s ± %s%s]" (dropSomeDigits $ showMPFloat x) eDS (showAC $ getAccuracy b)
-- "[" ++ show x ++ " ± " ++ show e ++ "](prec=" ++ (show $ integer $ getPrecision x) ++ ")"
where
eDS
| e == 0 = "0"
| otherwise =
case safeConvert (dyadic e) of
Right (eD :: Double) -> printf "~%.4g" $ eD
_ -> ""
dropSomeDigits s =
case List.findIndex (== '.') s of
Nothing -> s
Just ix -> withDotIx ix
where
withDotIx ix =
let maxLength = ix + displayAC_n in
let sTrimmed = take maxLength s in
if length sTrimmed < maxLength
then sTrimmed
else (take (maxLength - 3) sTrimmed) <> "..."
displayAC_n =
case displayAC of
Exact -> 1000000000
NoInformation -> 0
_ -> round $ (log (double 2)/log (double 10)) * (integer $ ac2prec displayAC)
showAC Exact = ""
showAC NoInformation = "(oo)"
showAC ac = " ~2^(" ++ show (negate $ fromAccuracy ac) ++ ")"
instance CanTestIsIntegerType MPBall -- False by default
-- instance CanTestValid MPBall where
-- isValid = isFinite
instance CanTestNaN MPBall where
isNaN = not . isFinite
instance CanTestFinite MPBall where
isInfinite = const False
isFinite (MPBall x e) = isFinite x && isFinite (mpFloat e)
instance CanNormalize MPBall where
normalize b
| isFinite b =
b
-- reducePrecionIfInaccurate b
| otherwise = error $ "invalid MPBall: " ++ show b
{-|
Reduce the precision of the ball centre if the
accuracy of the ball is poor.
More precisely, reduce the precision of the centre
so that the ulp is approximately (radius / 1024),
unless the ulp is already lower than this.
-}
reducePrecionIfInaccurate :: MPBall -> MPBall
reducePrecionIfInaccurate b@(MPBall x _) =
case (bAcc, bNorm) of
(Exact, _) -> b
(_, NormZero) -> b
_ | p_e_nb < p_x -> setPrecision p_e_nb b
_ -> b
where
bAcc = getAccuracy b
bNorm = getNormLog b
p_x = getPrecision x
p_e_nb = prec $ max 2 (10 + nb + fromAccuracy bAcc)
(NormBits nb) = bNorm
instance CanGiveUpIfVeryInaccurate MPBall where
giveUpIfVeryInaccurate = (aux =<<)
where
aux b@(MPBall _ e)
| e > 1000 = CN.noValueNumErrorPotential $ numErrorVeryInaccurate "MPBall" ""
| otherwise = cn b
instance CanTestContains MPBall MPBall where
contains (MPBall xLarge eLarge) (MPBall xSmall eSmall) =
xLargeDy - eLargeDy <= xSmallDy - eSmallDy
&&
xSmallDy + eSmallDy <= xLargeDy + eLargeDy
where
xLargeDy = dyadic xLarge
eLargeDy = dyadic eLarge
xSmallDy = dyadic xSmall
eSmallDy = dyadic eSmall
$(declForTypes
[[t| Integer |], [t| Int |], [t| Rational |], [t| Dyadic |]]
(\ t -> [d|
instance CanTestContains MPBall $t where
contains (MPBall c e) x =
l <= x && x <= r
where
l = cDy - eDy
r = cDy + eDy
cDy = dyadic c
eDy = dyadic e
|]))
{- ball construction/extraction functions -}
instance IsInterval MPBall where
type IntervalEndpoint MPBall = MPFloat
fromEndpoints l u
| u < l = fromEndpoints u l
| otherwise =
MPBall c (errorBound e)
where
c = (l +. u) *. (mpFloat $ dyadic 0.5)
e = (u -^ c) `max` (c -^ l)
endpoints (MPBall x e) = (l, u)
where
eFl = mpFloat e
l = x -. eFl
u = x +^ eFl
fromMPFloatEndpoints :: MPFloat -> MPFloat -> MPBall
fromMPFloatEndpoints = fromEndpoints
fromMPBallEndpoints :: MPBall -> MPBall -> MPBall
fromMPBallEndpoints = fromEndpointsAsIntervals
mpBallEndpoints :: MPBall -> (MPBall, MPBall)
mpBallEndpoints = endpointsAsIntervals
instance IsBall MPBall where
type CentreType MPBall = Dyadic
centre (MPBall cMP _e) = dyadic cMP
centreAsBallAndRadius x = (cB,e)
where
(MPBall cMP e) = x
cB = MPBall cMP (errorBound 0)
radius (MPBall _ e) = e
updateRadius updateFn (MPBall c e) = MPBall c (updateFn e)
{--- constructing a ball with a given precision ---}
type CanBeMPBallP t = (ConvertibleWithPrecision t MPBall)
mpBallP :: (CanBeMPBallP t) => Precision -> t -> MPBall
mpBallP = convertP
cnMPBallP :: (CanBeMPBallP a) => Precision -> CN a -> CN MPBall
cnMPBallP p = fmap (mpBallP p)
{--- constructing an exact ball ---}
type CanBeMPBall t = ConvertibleExactly t MPBall
mpBall :: (CanBeMPBall t) => t -> MPBall
mpBall = convertExactly
cnMPBall :: (CanBeMPBall a) => CN a -> CN MPBall
cnMPBall = fmap mpBall
{-- extracting approximate information about a ball --}
instance HasAccuracy MPBall where
getAccuracy = getAccuracy . ball_error
instance HasNorm MPBall where
getNormLog ball = getNormLog boundMP
where
(_, MPBall boundMP _) = mpBallEndpoints $ absRaw ball
instance HasApproximate MPBall where
type Approximate MPBall = (MPFloat, Bool)
getApproximate ac b@(MPBall x e) =
(approx, isAccurate)
where
isAccurate = getAccuracy b < ac
approx
| closeToN = n
| otherwise = MPFloat.ceduCentre $ MPFloat.setPrecisionCEDU (prec (fromAccuracy ac)) x
where
n = mpFloat $ round $ rational x
closeToN = ((abs $ x -^ n) <= e)
instance HasPrecision MPBall where
getPrecision = getPrecision . ball_value
instance CanSetPrecision MPBall where
setPrecision p (MPBall x e)
| p >= pPrev = MPBall xC e
| otherwise = MPBall xC (e + (xErr))
where
pPrev = MPFloat.getPrecision x
(xC, xErr) = MPFloat.ceduCentreErr $ MPFloat.setPrecisionCEDU p x
{- negation & abs -}
instance CanNeg MPBall where
negate (MPBall x e) = MPBall (-x) e
instance CanAbs MPBall where
abs = normalize . absRaw
absRaw :: MPBall -> MPBall
absRaw b
| l < 0 && 0 < r =
fromEndpoints (mpFloat 0) (max (-l) r)
| 0 <= l = b
| otherwise = -b
where
(l,r) = endpoints b
| michalkonecny/aern2 | aern2-mp/src/AERN2/MP/Ball/Type.hs | bsd-3-clause | 7,709 | 97 | 18 | 1,963 | 2,013 | 1,091 | 922 | -1 | -1 |
module System.Termutils.Xid (
getXid
) where
import Graphics.UI.Gtk
import Foreign
import Foreign.C.Types
import Unsafe.Coerce ( unsafeCoerce )
foreign import ccall "get_xid"
c_get_xid :: Ptr Window -> CULong
getXid :: Window -> IO Integer
getXid gtkWindow = do
let ptrWin = unsafeCoerce gtkWindow :: ForeignPtr Window
withForeignPtr ptrWin $ \realPointer -> do
return $ fromIntegral $ c_get_xid realPointer
| dagle/hermite | src/System/Termutils/Xid.hs | bsd-3-clause | 444 | 0 | 12 | 93 | 123 | 65 | 58 | 13 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for ganeti-htools.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Test.Ganeti.Rpc (testRpc) where
import Test.QuickCheck
import Test.QuickCheck.Monadic (monadicIO, run, stop)
import Control.Applicative
import qualified Data.Map as Map
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import Test.Ganeti.Objects (genInst)
import qualified Ganeti.Rpc as Rpc
import qualified Ganeti.Objects as Objects
import qualified Ganeti.Types as Types
import qualified Ganeti.JSON as JSON
import Ganeti.Types
instance Arbitrary Rpc.RpcCallInstanceConsoleInfo where
arbitrary = Rpc.RpcCallInstanceConsoleInfo <$> genConsoleInfoCallParams
instance Arbitrary Rpc.Compressed where
arbitrary = Rpc.toCompressed <$> arbitrary
genStorageUnit :: Gen StorageUnit
genStorageUnit = do
storage_type <- arbitrary
storage_key <- genName
storage_es <- arbitrary
return $ addParamsToStorageUnit storage_es (SURaw storage_type storage_key)
genStorageUnits :: Gen [StorageUnit]
genStorageUnits = do
num_storage_units <- choose (0, 5)
vectorOf num_storage_units genStorageUnit
genStorageUnitMap :: Gen (Map.Map String [StorageUnit])
genStorageUnitMap = do
num_nodes <- choose (0,5)
node_uuids <- vectorOf num_nodes genName
storage_units_list <- vectorOf num_nodes genStorageUnits
return $ Map.fromList (zip node_uuids storage_units_list)
-- FIXME: Generate more interesting hvparams
-- | Generate Hvparams
genHvParams :: Gen Objects.HvParams
genHvParams = return $ JSON.GenericContainer Map.empty
-- | Generate hypervisor specifications to be used for the NodeInfo call
genHvSpecs :: Gen [(Types.Hypervisor, Objects.HvParams)]
genHvSpecs = do
numhv <- choose (0, 5)
hvs <- vectorOf numhv arbitrary
hvparams <- vectorOf numhv genHvParams
let specs = zip hvs hvparams
return specs
instance Arbitrary Rpc.RpcCallAllInstancesInfo where
arbitrary = Rpc.RpcCallAllInstancesInfo <$> genHvSpecs
instance Arbitrary Rpc.RpcCallInstanceList where
arbitrary = Rpc.RpcCallInstanceList <$> arbitrary
instance Arbitrary Rpc.RpcCallNodeInfo where
arbitrary = Rpc.RpcCallNodeInfo <$> genStorageUnitMap <*> genHvSpecs
-- | Generates per-instance console info params for the 'InstanceConsoleInfo'
-- call.
genConsoleInfoCallParams :: Gen [(String, Rpc.InstanceConsoleInfoParams)]
genConsoleInfoCallParams = do
numInstances <- choose (0, 3)
names <- vectorOf numInstances arbitrary
params <- vectorOf numInstances genInstanceConsoleInfoParams
return $ zip names params
-- | Generates parameters for the console info call, consisting of an instance
-- object, node object, 'HvParams', and 'FilledBeParams'.
genInstanceConsoleInfoParams :: Gen Rpc.InstanceConsoleInfoParams
genInstanceConsoleInfoParams = Rpc.InstanceConsoleInfoParams <$>
genInst <*> arbitrary <*> arbitrary <*> genHvParams <*> arbitrary
-- | Monadic check that, for an offline node and a call that does not support
-- offline nodes, we get a OfflineNodeError response.
runOfflineTest :: (Rpc.Rpc a b, Eq b, Show b) => a -> Property
runOfflineTest call =
forAll (arbitrary `suchThat` Objects.nodeOffline) $ \node -> monadicIO $ do
res <- run $ Rpc.executeRpcCall [node] call
stop $ res ==? [(node, Left Rpc.OfflineNodeError)]
prop_noffl_request_allinstinfo :: Rpc.RpcCallAllInstancesInfo -> Property
prop_noffl_request_allinstinfo = runOfflineTest
prop_noffl_request_instconsinfo :: Rpc.RpcCallInstanceConsoleInfo -> Property
prop_noffl_request_instconsinfo = runOfflineTest
prop_noffl_request_instlist :: Rpc.RpcCallInstanceList -> Property
prop_noffl_request_instlist = runOfflineTest
prop_noffl_request_nodeinfo :: Rpc.RpcCallNodeInfo -> Property
prop_noffl_request_nodeinfo = runOfflineTest
-- | Test that the serialisation of 'Compressed' is idempotent.
prop_Compressed_serialisation :: Rpc.Compressed -> Property
prop_Compressed_serialisation = testSerialisation
testSuite "Rpc"
[ 'prop_noffl_request_allinstinfo
, 'prop_noffl_request_instconsinfo
, 'prop_noffl_request_instlist
, 'prop_noffl_request_nodeinfo
, 'prop_Compressed_serialisation
]
| kawamuray/ganeti | test/hs/Test/Ganeti/Rpc.hs | gpl-2.0 | 4,870 | 0 | 14 | 674 | 858 | 463 | 395 | 80 | 1 |
module Handler.File where
import Import hiding ((==.), on, isNothing)
import Handler.DB
import Yesod.Auth
import Data.Int
import Database.Esqueleto
import Data.Time.Clock
import System.IO (hPutStrLn, stderr)
import Network.HTTP.Types (status200, status206, status404, hRange, HeaderName)
import qualified Data.Aeson as A
import qualified Data.Text as T
import qualified System.FilePath as FP
import qualified System.Directory as D
import qualified System.Posix as SP
import qualified Network.Wai as W
import Control.Exception (tryJust)
import Control.Monad (guard)
import System.IO.Error (isDoesNotExistError)
import Data.Text.Encoding (encodeUtf8)
getFileR :: FileId -> Handler()
getFileR fileId = do
authId <- requireAuthId
results <- runDB $ select $ from $
\(f) -> do
where_ (just (f ^. FileId) `in_`
(subList_select $ from $ \ugc -> do
where_ ((ugc ^. UserGroupContentUserGroupId) `in_`
(subList_select $ from $ (\(ug `InnerJoin` ugi) -> do
on (ugi ^. UserGroupItemUserGroupId ==. ug ^. UserGroupId)
where_ (ugi ^. UserGroupItemUserId ==. (val authId))
where_ $ isNothing $ ugi ^. UserGroupItemDeletedVersionId
return (ug ^. UserGroupId))))
return $ ugc ^. UserGroupContentFileContentId))
where_ (f ^. FileId ==. (val fileId))
return f
case results of
((Entity _ f):_) -> do
uploadDir <- fmap (appUploadDir . appSettings) getYesod
let path = FP.joinPath [
uploadDir,
(T.unpack $ toPathPiece fileId)
]
sendWaiResponse $ W.responseFile status200 [] path Nothing
_ -> notFoundError
where notFoundError = sendResponseStatus status404 $ A.object [ "error" .= ("Requested file not found" :: Text) ]
| tlaitinen/sms | backend/Handler/File.hs | gpl-3.0 | 2,018 | 0 | 33 | 635 | 571 | 316 | 255 | 44 | 2 |
{-# OPTIONS -fno-monomorphism-restriction #-}
module GramLab.Statistics ( SetPair(..)
, Scores(..)
, Report(..)
, precision
, recall
, f_score
, f_score'
, toScores
, toReport
, getScores
)
where
import Data.Set hiding (split)
import qualified Data.List as List
import Control.Exception
import System.Random
import System
import Debug.Trace
data SetPair a = SetPair { testSet :: Set a , goldSet :: Set a } deriving (Show)
data Scores a = Scores { truePosCard :: a , testSetCard :: a , goldSetCard :: a } deriving (Show,Eq,Ord)
toScores :: (Num b, Ord a,Show a) => SetPair a -> Scores b
-- toScores pair | trace (show ((testSet pair)
-- ,(goldSet pair)
-- ,(truePos pair))) False = undefined
toScores pair = Scores { truePosCard = genericSize $ truePos pair
, testSetCard = genericSize $ testSet pair
, goldSetCard = genericSize $ goldSet pair }
truePos pair = intersection (testSet pair) (goldSet pair)
precision scores = fromIntegral (truePosCard scores) / fromIntegral (testSetCard scores)
recall scores = fromIntegral (truePosCard scores) / fromIntegral (goldSetCard scores)
f_score scores = f_score' p r
where p = precision scores
r = recall scores
f_score' p r = (2 * p * r) / (p + r)
genericSize = fromIntegral . size
instance Functor Scores where
fmap f (Scores a b c) = Scores (f a) (f b) (f c)
instance (Num a) => Num (Scores a) where
(Scores a b c) + (Scores a' b' c') = Scores (a+a') (b+b') (c+c')
(Scores a b c) - (Scores a' b' c') = Scores (a-a') (b-b') (c-c')
(Scores a b c) * (Scores a' b' c') = Scores (a*a') (b*b') (c*c')
abs = fmap abs
negate = fmap negate
signum = fmap signum
--fromInteger i | trace (show i) False = undefined
fromInteger i = Scores (fromInteger i) (fromInteger i) (fromInteger i)
data Report = Report Double Double Double deriving (Show)
--toReport scores | trace (show scores) False = undefined
toReport scores = Report (precision scores)
(recall scores)
(f_score scores)
getScores result = zipWith (\t g -> toScores (SetPair (result t) (result g)))
| gchrupala/morfette | src/GramLab/Statistics.hs | bsd-2-clause | 2,523 | 0 | 12 | 874 | 820 | 436 | 384 | 46 | 1 |
-- This module can (and perhaps should) be separate into its own
-- package, it's generally useful.
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE NoImplicitPrelude #-}
module Stack.StaticBytes
( Bytes8
, Bytes16
, Bytes32
, Bytes64
, Bytes128
, DynamicBytes
, StaticBytes
, StaticBytesException (..)
, toStaticExact
, toStaticPad
, toStaticTruncate
, toStaticPadTruncate
, fromStatic
) where
import Stack.Prelude hiding (words)
import qualified Data.ByteString as B
import qualified Data.ByteString.Internal as B
import qualified Data.Vector.Primitive as VP
import qualified Data.Vector.Unboxed as VU
import qualified Data.Vector.Unboxed.Base as VU
import qualified Data.Vector.Storable as VS
import System.IO.Unsafe (unsafePerformIO)
import Foreign.Ptr
import Foreign.ForeignPtr
import Foreign.Storable
import Data.Bits
import qualified Data.Primitive.ByteArray as BA
import Data.ByteArray
newtype Bytes8 = Bytes8 Word64
deriving (Eq, Ord, Generic, NFData, Hashable, Data, Store)
instance Show Bytes8 where -- FIXME good enough?
show (Bytes8 w) = show (fromWordsD 8 [w] :: B.ByteString)
data Bytes16 = Bytes16 !Bytes8 !Bytes8
deriving (Show, Eq, Ord, Generic, NFData, Hashable, Data, Store)
data Bytes32 = Bytes32 !Bytes16 !Bytes16
deriving (Show, Eq, Ord, Generic, NFData, Hashable, Data, Store)
data Bytes64 = Bytes64 !Bytes32 !Bytes32
deriving (Show, Eq, Ord, Generic, NFData, Hashable, Data, Store)
data Bytes128 = Bytes128 !Bytes64 !Bytes64
deriving (Show, Eq, Ord, Generic, NFData, Hashable, Data, Store)
data StaticBytesException
= NotEnoughBytes
| TooManyBytes
deriving (Show, Eq, Typeable)
instance Exception StaticBytesException
-- All lengths below are given in bytes
class DynamicBytes dbytes where
lengthD :: dbytes -> Int
-- | Yeah, it looks terrible to use a list here, but fusion should
-- kick in
withPeekD :: dbytes -> ((Int -> IO Word64) -> IO a) -> IO a
-- | May throw a runtime exception if invariants are violated!
fromWordsD :: Int -> [Word64] -> dbytes
fromWordsForeign
:: (ForeignPtr a -> Int -> b)
-> Int
-> [Word64]
-> b
fromWordsForeign wrapper len words0 = unsafePerformIO $ do
fptr <- B.mallocByteString len
withForeignPtr fptr $ \ptr -> do
let loop _ [] = return ()
loop off (w:ws) = do
pokeElemOff (castPtr ptr) off w
loop (off + 1) ws
loop 0 words0
return $ wrapper fptr len
withPeekForeign
:: (ForeignPtr a, Int, Int)
-> ((Int -> IO Word64) -> IO b)
-> IO b
withPeekForeign (fptr, off, len) inner =
withForeignPtr fptr $ \ptr -> do
let f off'
| off' >= len = return 0
| off' + 8 > len = do
let loop w64 i
| off' + i >= len = return w64
| otherwise = do
w8 :: Word8 <- peekByteOff ptr (off + off' + i)
let w64' = shiftL (fromIntegral w8) (i * 8) .|. w64
loop w64' (i + 1)
loop 0 0
| otherwise = peekByteOff ptr (off + off')
inner f
instance DynamicBytes B.ByteString where
lengthD = B.length
fromWordsD = fromWordsForeign (\fptr len -> B.fromForeignPtr fptr 0 len)
withPeekD = withPeekForeign . B.toForeignPtr
instance word8 ~ Word8 => DynamicBytes (VS.Vector word8) where
lengthD = VS.length
fromWordsD = fromWordsForeign VS.unsafeFromForeignPtr0
withPeekD = withPeekForeign . VS.unsafeToForeignPtr
instance word8 ~ Word8 => DynamicBytes (VP.Vector word8) where
lengthD = VP.length
fromWordsD len words0 = unsafePerformIO $ do
ba <- BA.newByteArray len
let loop _ [] = do
ba' <- BA.unsafeFreezeByteArray ba
return $ VP.Vector 0 len ba'
loop i (w:ws) = do
BA.writeByteArray ba i w
loop (i + 1) ws
loop 0 words0
withPeekD (VP.Vector off len ba) inner = do
let f off'
| off' >= len = return 0
| off' + 8 > len = do
let loop w64 i
| off' + i >= len = return w64
| otherwise = do
let w8 :: Word8 = BA.indexByteArray ba (off + off' + i)
let w64' = shiftL (fromIntegral w8) (i * 8) .|. w64
loop w64' (i + 1)
loop 0 0
| otherwise = return $ BA.indexByteArray ba (off + (off' `div` 8))
inner f
instance word8 ~ Word8 => DynamicBytes (VU.Vector word8) where
lengthD = VU.length
fromWordsD len words = VU.V_Word8 (fromWordsD len words)
withPeekD (VU.V_Word8 v) = withPeekD v
class StaticBytes sbytes where
lengthS :: proxy sbytes -> Int -- use type level literals instead?
-- difference list
toWordsS :: sbytes -> [Word64] -> [Word64]
usePeekS :: Int -> (Int -> IO Word64) -> IO sbytes
instance StaticBytes Bytes8 where
lengthS _ = 8
toWordsS (Bytes8 w) = (w:)
usePeekS off f = Bytes8 <$> f off
instance StaticBytes Bytes16 where
lengthS _ = 16
toWordsS (Bytes16 b1 b2) = toWordsS b1 . toWordsS b2
usePeekS off f = Bytes16 <$> usePeekS off f <*> usePeekS (off + 8) f
instance StaticBytes Bytes32 where
lengthS _ = 32
toWordsS (Bytes32 b1 b2) = toWordsS b1 . toWordsS b2
usePeekS off f = Bytes32 <$> usePeekS off f <*> usePeekS (off + 16) f
instance StaticBytes Bytes64 where
lengthS _ = 64
toWordsS (Bytes64 b1 b2) = toWordsS b1 . toWordsS b2
usePeekS off f = Bytes64 <$> usePeekS off f <*> usePeekS (off + 32) f
instance StaticBytes Bytes128 where
lengthS _ = 128
toWordsS (Bytes128 b1 b2) = toWordsS b1 . toWordsS b2
usePeekS off f = Bytes128 <$> usePeekS off f <*> usePeekS (off + 64) f
instance ByteArrayAccess Bytes8 where
length _ = 8
withByteArray = withByteArrayS
instance ByteArrayAccess Bytes16 where
length _ = 16
withByteArray = withByteArrayS
instance ByteArrayAccess Bytes32 where
length _ = 32
withByteArray = withByteArrayS
instance ByteArrayAccess Bytes64 where
length _ = 64
withByteArray = withByteArrayS
instance ByteArrayAccess Bytes128 where
length _ = 128
withByteArray = withByteArrayS
withByteArrayS :: StaticBytes sbytes => sbytes -> (Ptr p -> IO a) -> IO a
withByteArrayS sbytes = withByteArray (fromStatic sbytes :: ByteString)
toStaticExact
:: forall dbytes sbytes.
(DynamicBytes dbytes, StaticBytes sbytes)
=> dbytes
-> Either StaticBytesException sbytes
toStaticExact dbytes =
case compare (lengthD dbytes) (lengthS (Nothing :: Maybe sbytes)) of
LT -> Left NotEnoughBytes
GT -> Left TooManyBytes
EQ -> Right (toStaticPadTruncate dbytes)
toStaticPad
:: forall dbytes sbytes.
(DynamicBytes dbytes, StaticBytes sbytes)
=> dbytes
-> Either StaticBytesException sbytes
toStaticPad dbytes =
case compare (lengthD dbytes) (lengthS (Nothing :: Maybe sbytes)) of
GT -> Left TooManyBytes
_ -> Right (toStaticPadTruncate dbytes)
toStaticTruncate
:: forall dbytes sbytes.
(DynamicBytes dbytes, StaticBytes sbytes)
=> dbytes
-> Either StaticBytesException sbytes
toStaticTruncate dbytes =
case compare (lengthD dbytes) (lengthS (Nothing :: Maybe sbytes)) of
LT -> Left NotEnoughBytes
_ -> Right (toStaticPadTruncate dbytes)
toStaticPadTruncate
:: (DynamicBytes dbytes, StaticBytes sbytes)
=> dbytes
-> sbytes
toStaticPadTruncate dbytes = unsafePerformIO (withPeekD dbytes (usePeekS 0))
fromStatic
:: forall dbytes sbytes.
(DynamicBytes dbytes, StaticBytes sbytes)
=> sbytes
-> dbytes
fromStatic = fromWordsD (lengthS (Nothing :: Maybe sbytes)) . ($ []) . toWordsS
| MichielDerhaeg/stack | src/Stack/StaticBytes.hs | bsd-3-clause | 7,656 | 0 | 27 | 1,824 | 2,555 | 1,296 | 1,259 | 219 | 3 |
{-# LANGUAGE CPP #-}
#include "MachDeps.h"
module Bench.Options (
Options(..),
ndpMain, failWith
) where
import System.Console.GetOpt
import System.IO
import System.Exit
import System.Environment
import Control.Monad ( when )
import Data.Char ( toUpper )
import GHC.IOArray ( newIOArray )
import Data.Array.Parallel.Unlifted.Distributed
data Options = Options { optRuns :: Int
, optAlloc :: Int
, optVerbosity :: Int
, optHelp :: Bool
}
defaultVerbosity :: Int
defaultVerbosity = 1
defaultOptions :: Options
defaultOptions = Options { optRuns = 1
, optAlloc = 0
, optVerbosity = defaultVerbosity
, optHelp = False
}
options = [Option ['r'] ["runs"]
(ReqArg (\s o -> o { optRuns = read s }) "N")
"repeat each benchmark N times"
,Option ['A'] ["alloc"]
(ReqArg (\s o -> o { optAlloc = nbytes s }) "N")
"preallocate memory"
,Option ['v'] ["verbose"]
(OptArg (\r o -> o { optVerbosity = maybe defaultVerbosity read r })
"N")
"verbosity level"
,Option ['h'] ["help"]
(NoArg (\o -> o { optHelp = True }))
"show help screen"
]
where
nbytes s = case reads s of
[(n,"")] -> n
[(n,[c])] -> case toUpper c of
'K' -> n * 1024
'M' -> n * 1024 * 1024
'G' -> n * 1024 * 1024 * 1024
instance Functor OptDescr where
fmap f (Option c s d h) = Option c s (fmap f d) h
instance Functor ArgDescr where
fmap f (NoArg x) = NoArg (f x)
fmap f (ReqArg g s) = ReqArg (f . g) s
fmap f (OptArg g s) = OptArg (f . g) s
ndpMain :: String -> String
-> (Options -> a -> [String] -> IO ())
-> [OptDescr (a -> a)] -> a
-> IO ()
ndpMain descr hdr run options' dft =
do
args <- getArgs
case getOpt Permute opts args of
(fs, files, []) ->
let (os, os') = foldr ($) (defaultOptions, dft) fs
in
if optHelp os
then do
s <- getProgName
putStrLn $ usageInfo ("Usage: " ++ s ++ " " ++ hdr ++ "\n"
++ descr ++ "\n") opts
else do
when (optAlloc os /= 0)
$ do
_ <- newIOArray (0, optAlloc os `div` SIZEOF_HSWORD) undefined
return ()
run os os' files
(_, _, errs) -> failWith errs
where
opts = [fmap (\f (r,s) -> (f r, s)) d | d <- options]
++ [fmap (\f (r,s) -> (r, f s)) d | d <- options']
failWith :: [String] -> IO a
failWith errs = do
mapM_ (hPutStrLn stderr) errs
exitFailure
| mainland/dph | icebox/examples/lib/Bench/Options.hs | bsd-3-clause | 3,001 | 0 | 23 | 1,275 | 1,016 | 548 | 468 | 76 | 4 |
{-# LANGUAGE OverloadedStrings #-}
module FileSpec (fileSpec) where
import TestImport
fileSpec :: Spec
fileSpec = ydescribe "postCloneDirtoryR" $
yit "Clone a diretory, and ensure the common config files can save to Node Manager" $ do
postBody CloneDiretoryR (encode testCloneDirRequest)
printBody >> statusIs 200
| bitemyapp/node-manager | test/FileSpec.hs | bsd-3-clause | 337 | 0 | 10 | 66 | 61 | 31 | 30 | 8 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-deprecations #-}
module UnitTests.NullaryConstructors
(
nullaryConstructors
) where
import Prelude ()
import Prelude.Compat
import Data.Aeson (decode, eitherDecode, fromEncoding, Value)
import Data.Aeson.Internal (IResult (..), iparse)
import Data.Aeson.Types (Parser)
import Data.ByteString.Builder (toLazyByteString)
import Data.Maybe (fromJust)
import Encoders
import Test.HUnit ((@=?), Assertion)
import Types
import qualified Data.ByteString.Lazy.Char8 as L
nullaryConstructors :: [Assertion]
nullaryConstructors =
[ dec "\"C1\"" @=? thNullaryToJSONString C1
, dec "\"C1\"" @=? gNullaryToJSONString C1
, dec "{\"c1\":[]}" @=? thNullaryToJSONObjectWithSingleField C1
, dec "{\"c1\":[]}" @=? gNullaryToJSONObjectWithSingleField C1
, dec "[\"c1\",[]]" @=? gNullaryToJSON2ElemArray C1
, dec "[\"c1\",[]]" @=? thNullaryToJSON2ElemArray C1
, dec "{\"tag\":\"c1\"}" @=? thNullaryToJSONTaggedObject C1
, dec "{\"tag\":\"c1\"}" @=? gNullaryToJSONTaggedObject C1
, decE "\"C1\"" @=? enc (gNullaryToEncodingString C1)
, decE "\"C1\"" @=? enc (thNullaryToEncodingString C1)
, decE "[\"c1\",[]]" @=? enc (gNullaryToEncoding2ElemArray C1)
, decE "[\"c1\",[]]" @=? enc (thNullaryToEncoding2ElemArray C1)
, decE "{\"c1\":[]}" @=? enc (thNullaryToEncodingObjectWithSingleField C1)
, decE "{\"c1\":[]}" @=? enc (gNullaryToEncodingObjectWithSingleField C1)
, decE "{\"tag\":\"c1\"}" @=? enc (thNullaryToEncodingTaggedObject C1)
, decE "{\"tag\":\"c1\"}" @=? enc (gNullaryToEncodingTaggedObject C1)
, ISuccess C1 @=? parse thNullaryParseJSONTaggedObject (dec "{\"tag\":\"c1\"}")
, ISuccess C1 @=? parse gNullaryParseJSONTaggedObject (dec "{\"tag\":\"c1\"}")
, ISuccess C1 @=? parse thNullaryParseJSONString (dec "\"C1\"")
, ISuccess C1 @=? parse gNullaryParseJSONString (dec "\"C1\"")
, ISuccess C1 @=? parse thNullaryParseJSON2ElemArray (dec "[\"c1\",[]]")
, ISuccess C1 @=? parse gNullaryParseJSON2ElemArray (dec "[\"c1\",[]]")
, ISuccess C1 @=? parse thNullaryParseJSONObjectWithSingleField (dec "{\"c1\":[]}")
, ISuccess C1 @=? parse gNullaryParseJSONObjectWithSingleField (dec "{\"c1\":[]}")
-- Make sure that the old `"contents" : []' is still allowed
, ISuccess C1 @=? parse thNullaryParseJSONTaggedObject (dec "{\"tag\":\"c1\",\"contents\":[]}")
, ISuccess C1 @=? parse gNullaryParseJSONTaggedObject (dec "{\"tag\":\"c1\",\"contents\":[]}")
]
where
enc = eitherDecode . toLazyByteString . fromEncoding
dec :: L.ByteString -> Value
dec = fromJust . decode
decE :: L.ByteString -> Either String Value
decE = eitherDecode
parse :: (a -> Parser b) -> a -> IResult b
parse parsejson v = iparse parsejson v
| sol/aeson | tests/UnitTests/NullaryConstructors.hs | bsd-3-clause | 2,943 | 0 | 10 | 588 | 701 | 361 | 340 | 51 | 1 |
module Options.Language where
import Types
languageOptions :: [Flag]
languageOptions =
[ flag { flagName = "-fconstraint-solver-iterations=⟨n⟩"
, flagDescription =
"*default: 4.* Set the iteration limit for the type-constraint "++
"solver. Typically one iteration suffices; so please "++
"yell if you find you need to set it higher than the default. "++
"Zero means infinity."
, flagType = DynamicFlag
}
, flag { flagName = "-freduction-depth=⟨n⟩"
, flagDescription =
"*default: 200.* Set the :ref:`limit for type simplification "++
"<undecidable-instances>`. Zero means infinity."
, flagType = DynamicFlag
}
, flag { flagName = "-fcontext-stack=⟨n⟩"
, flagDescription =
"Deprecated. Use ``-freduction-depth=⟨n⟩`` instead."
, flagType = DynamicFlag
}
, flag { flagName = "-fglasgow-exts"
, flagDescription =
"Deprecated. Enable most language extensions; "++
"see :ref:`options-language` for exactly which ones."
, flagType = DynamicFlag
, flagReverse = "-fno-glasgow-exts"
}
, flag { flagName = "-firrefutable-tuples"
, flagDescription = "Make tuple pattern matching irrefutable"
, flagType = DynamicFlag
, flagReverse = "-fno-irrefutable-tuples"
}
, flag { flagName = "-fpackage-trust"
, flagDescription =
"Enable :ref:`Safe Haskell <safe-haskell>` trusted package "++
"requirement for trustworthy modules."
, flagType = DynamicFlag
}
, flag { flagName = "-ftype-function-depth=⟨n⟩"
, flagDescription = "Deprecated. Use ``-freduction-depth=⟨n⟩`` instead."
, flagType = DynamicFlag
}
, flag { flagName = "-XAllowAmbiguousTypes"
, flagDescription =
"Allow the user to write :ref:`ambiguous types <ambiguity>`, and "++
"the type inference engine to infer them."
, flagType = DynamicFlag
, flagReverse = "-XNoAllowAmbiguousTypes"
, flagSince = "7.8.1"
}
, flag { flagName = "-XArrows"
, flagDescription =
"Enable :ref:`arrow notation <arrow-notation>` extension"
, flagType = DynamicFlag
, flagReverse = "-XNoArrows"
, flagSince = "6.8.1"
}
, flag { flagName = "-XApplicativeDo"
, flagDescription =
"Enable :ref:`Applicative do-notation desugaring <applicative-do>`"
, flagType = DynamicFlag
, flagReverse = "-XNoApplicativeDo"
, flagSince = "8.0.1"
}
, flag { flagName = "-XAutoDeriveTypeable"
, flagDescription =
"As of GHC 7.10, this option is not needed, and should not be "++
"used. Previously this would automatically :ref:`derive Typeable "++
"instances for every datatype and type class declaration "++
"<deriving-typeable>`. Implies :ghc-flag:`XDeriveDataTypeable`."
, flagType = DynamicFlag
, flagReverse = "-XNoAutoDeriveTypeable"
, flagSince = "7.8.1"
}
, flag { flagName = "-XBangPatterns"
, flagDescription = "Enable :ref:`bang patterns <bang-patterns>`."
, flagType = DynamicFlag
, flagReverse = "-XNoBangPatterns"
, flagSince = "6.8.1"
}
, flag { flagName = "-XBinaryLiterals"
, flagDescription =
"Enable support for :ref:`binary literals <binary-literals>`."
, flagType = DynamicFlag
, flagReverse = "-XNoBinaryLiterals"
, flagSince = "7.10.1"
}
, flag { flagName = "-XCApiFFI"
, flagDescription =
"Enable :ref:`the CAPI calling convention <ffi-capi>`."
, flagType = DynamicFlag
, flagReverse = "-XNoCAPIFFI"
, flagSince = "7.10.1"
}
, flag { flagName = "-XConstrainedClassMethods"
, flagDescription =
"Enable :ref:`constrained class methods <class-method-types>`."
, flagType = DynamicFlag
, flagReverse = "-XNoConstrainedClassMethods"
, flagSince = "6.8.1"
}
, flag { flagName = "-XConstraintKinds"
, flagDescription =
"Enable a :ref:`kind of constraints <constraint-kind>`."
, flagType = DynamicFlag
, flagReverse = "-XNoConstraintKinds"
, flagSince = "7.4.1"
}
, flag { flagName = "-XCPP"
, flagDescription =
"Enable the :ref:`C preprocessor <c-pre-processor>`."
, flagType = DynamicFlag
, flagReverse = "-XNoCPP"
, flagSince = "6.8.1"
}
, flag { flagName = "-XDataKinds"
, flagDescription = "Enable :ref:`datatype promotion <promotion>`."
, flagType = DynamicFlag
, flagReverse = "-XNoDataKinds"
, flagSince = "7.4.1"
}
, flag { flagName = "-XDefaultSignatures"
, flagDescription =
"Enable :ref:`default signatures <class-default-signatures>`."
, flagType = DynamicFlag
, flagReverse = "-XNoDefaultSignatures"
, flagSince = "7.2.1"
}
, flag { flagName = "-XDeriveAnyClass"
, flagDescription =
"Enable :ref:`deriving for any class <derive-any-class>`."
, flagType = DynamicFlag
, flagReverse = "-XNoDeriveAnyClass"
, flagSince = "7.10.1"
}
, flag { flagName = "-XDeriveDataTypeable"
, flagDescription =
"Enable ``deriving`` for the :ref:`Data class "++
"<deriving-typeable>`. Implied by :ghc-flag:`XAutoDeriveTypeable`."
, flagType = DynamicFlag
, flagReverse = "-XNoDeriveDataTypeable"
, flagSince = "6.8.1"
}
, flag { flagName = "-XDeriveFunctor"
, flagDescription =
"Enable :ref:`deriving for the Functor class <deriving-extra>`. "++
"Implied by :ghc-flag:`XDeriveTraversable`."
, flagType = DynamicFlag
, flagReverse = "-XNoDeriveFunctor"
, flagSince = "7.10.1"
}
, flag { flagName = "-XDeriveFoldable"
, flagDescription =
"Enable :ref:`deriving for the Foldable class <deriving-extra>`. "++
"Implied by :ghc-flag:`XDeriveTraversable`."
, flagType = DynamicFlag
, flagReverse = "-XNoDeriveFoldable"
, flagSince = "7.10.1"
}
, flag { flagName = "-XDeriveGeneric"
, flagDescription =
"Enable :ref:`deriving for the Generic class <deriving-typeable>`."
, flagType = DynamicFlag
, flagReverse = "-XNoDeriveGeneric"
, flagSince = "7.2.1"
}
, flag { flagName = "-XDeriveGeneric"
, flagDescription =
"Enable :ref:`deriving for the Generic class <deriving-typeable>`."
, flagType = DynamicFlag
, flagReverse = "-XNoDeriveGeneric"
, flagSince = "7.2.1"
}
, flag { flagName = "-XDeriveLift"
, flagDescription =
"Enable :ref:`deriving for the Lift class <deriving-lift>`"
, flagType = DynamicFlag
, flagReverse = "-XNoDeriveLift"
, flagSince = "7.2.1"
}
, flag { flagName = "-XDeriveTraversable"
, flagDescription =
"Enable :ref:`deriving for the Traversable class <deriving-extra>`. "++
"Implies :ghc-flag:`XDeriveFunctor` and :ghc-flag:`XDeriveFoldable`."
, flagType = DynamicFlag
, flagReverse = "-XNoDeriveTraversable"
, flagSince = "7.10.1"
}
, flag { flagName = "-XDisambiguateRecordFields"
, flagDescription =
"Enable :ref:`record field disambiguation <disambiguate-fields>`. "++
"Implied by :ghc-flag:`XRecordWildCards`."
, flagType = DynamicFlag
, flagReverse = "-XNoDisambiguateRecordFields"
, flagSince = "6.8.1"
}
, flag { flagName = "-XEmptyCase"
, flagDescription =
"Allow :ref:`empty case alternatives <empty-case>`."
, flagType = DynamicFlag
, flagReverse = "-XNoEmptyCase"
, flagSince = "7.8.1"
}
, flag { flagName = "-XEmptyDataDecls"
, flagDescription = "Enable empty data declarations."
, flagType = DynamicFlag
, flagReverse = "-XNoEmptyDataDecls"
, flagSince = "6.8.1"
}
, flag { flagName = "-XExistentialQuantification"
, flagDescription =
"Enable :ref:`existential quantification <existential-quantification>`."
, flagType = DynamicFlag
, flagReverse = "-XNoExistentialQuantification"
, flagSince = "6.8.1"
}
, flag { flagName = "-XExplicitForAll"
, flagDescription =
"Enable :ref:`explicit universal quantification <explicit-foralls>`."++
" Implied by :ghc-flag:`XScopedTypeVariables`, :ghc-flag:`XLiberalTypeSynonyms`,"++
" :ghc-flag:`XRankNTypes` and :ghc-flag:`XExistentialQuantification`."
, flagType = DynamicFlag
, flagReverse = "-XNoExplicitForAll"
, flagSince = "6.12.1"
}
, flag { flagName = "-XExplicitNamespaces"
, flagDescription =
"Enable using the keyword ``type`` to specify the namespace of "++
"entries in imports and exports (:ref:`explicit-namespaces`). "++
"Implied by :ghc-flag:`XTypeOperators` and :ghc-flag:`XTypeFamilies`."
, flagType = DynamicFlag
, flagReverse = "-XNoExplicitNamespaces"
, flagSince = "7.6.1"
}
, flag { flagName = "-XExtendedDefaultRules"
, flagDescription =
"Use GHCi's :ref:`extended default rules <extended-default-rules>` "++
"in a normal module."
, flagType = DynamicFlag
, flagReverse = "-XNoExtendedDefaultRules"
, flagSince = "6.8.1"
}
, flag { flagName = "-XFlexibleContexts"
, flagDescription =
"Enable :ref:`flexible contexts <flexible-contexts>`. Implied by "++
":ghc-flag:`XImplicitParams`."
, flagType = DynamicFlag
, flagReverse = "-XNoFlexibleContexts"
, flagSince = "6.8.1"
}
, flag { flagName = "-XFlexibleInstances"
, flagDescription =
"Enable :ref:`flexible instances <instance-rules>`. "++
"Implies :ghc-flag:`XTypeSynonymInstances`. "++
"Implied by :ghc-flag:`XImplicitParams`."
, flagType = DynamicFlag
, flagReverse = "-XNoFlexibleInstances"
, flagSince = "6.8.1"
}
, flag { flagName = "-XForeignFunctionInterface"
, flagDescription =
"Enable :ref:`foreign function interface <ffi>`."
, flagType = DynamicFlag
, flagReverse = "-XNoForeignFunctionInterface"
, flagSince = "6.8.1"
}
, flag { flagName = "-XFunctionalDependencies"
, flagDescription =
"Enable :ref:`functional dependencies <functional-dependencies>`. "++
"Implies :ghc-flag:`XMultiParamTypeClasses`."
, flagType = DynamicFlag
, flagReverse = "-XNoFunctionalDependencies"
, flagSince = "6.8.1"
}
, flag { flagName = "-XGADTs"
, flagDescription =
"Enable :ref:`generalised algebraic data types <gadt>`. "++
"Implies :ghc-flag:`XGADTSyntax` and :ghc-flag:`XMonoLocalBinds`."
, flagType = DynamicFlag
, flagReverse = "-XNoGADTs"
, flagSince = "6.8.1"
}
, flag { flagName = "-XGADTSyntax"
, flagDescription =
"Enable :ref:`generalised algebraic data type syntax <gadt-style>`."
, flagType = DynamicFlag
, flagReverse = "-XNoGADTSyntax"
, flagSince = "7.2.1"
}
, flag { flagName = "-XGeneralizedNewtypeDeriving"
, flagDescription =
"Enable :ref:`newtype deriving <newtype-deriving>`."
, flagType = DynamicFlag
, flagReverse = "-XNoGeneralizedNewtypeDeriving"
, flagSince = "6.8.1"
}
, flag { flagName = "-XGenerics"
, flagDescription =
"Deprecated, does nothing. No longer enables "++
":ref:`generic classes <generic-classes>`. See also GHC's support "++
"for :ref:`generic programming <generic-programming>`."
, flagType = DynamicFlag
, flagReverse = "-XNoGenerics"
, flagSince = "6.8.1"
}
, flag { flagName = "-XImplicitParams"
, flagDescription =
"Enable :ref:`Implicit Parameters <implicit-parameters>`. "++
"Implies :ghc-flag:`XFlexibleContexts` and :ghc-flag:`XFlexibleInstances`."
, flagType = DynamicFlag
, flagReverse = "-XNoImplicitParams"
, flagSince = "6.8.1"
}
, flag { flagName = "-XNoImplicitPrelude"
, flagDescription =
"Don't implicitly ``import Prelude``. "++
"Implied by :ghc-flag:`XRebindableSyntax`."
, flagType = DynamicFlag
, flagReverse = "-XImplicitPrelude"
, flagSince = "6.8.1"
}
, flag { flagName = "-XImpredicativeTypes"
, flagDescription =
"Enable :ref:`impredicative types <impredicative-polymorphism>`. "++
"Implies :ghc-flag:`XRankNTypes`."
, flagType = DynamicFlag
, flagReverse = "-XNoImpredicativeTypes"
, flagSince = "6.10.1"
}
, flag { flagName = "-XIncoherentInstances"
, flagDescription =
"Enable :ref:`incoherent instances <instance-overlap>`. "++
"Implies :ghc-flag:`XOverlappingInstances`."
, flagType = DynamicFlag
, flagReverse = "-XNoIncoherentInstances"
, flagSince = "6.8.1"
}
, flag { flagName = "-XTypeFamilyDependencies"
, flagDescription =
"Enable :ref:`injective type families <injective-ty-fams>`. "++
"Implies :ghc-flag:`XTypeFamilies`."
, flagType = DynamicFlag
, flagReverse = "-XNoTypeFamilyDependencies"
, flagSince = "8.0.1"
}
, flag { flagName = "-XInstanceSigs"
, flagDescription =
"Enable :ref:`instance signatures <instance-sigs>`."
, flagType = DynamicFlag
, flagReverse = "-XNoInstanceSigs"
, flagSince = "7.10.1"
}
, flag { flagName = "-XInterruptibleFFI"
, flagDescription = "Enable interruptible FFI."
, flagType = DynamicFlag
, flagReverse = "-XNoInterruptibleFFI"
, flagSince = "7.2.1"
}
, flag { flagName = "-XKindSignatures"
, flagDescription =
"Enable :ref:`kind signatures <kinding>`. "++
"Implied by :ghc-flag:`XTypeFamilies` and :ghc-flag:`XPolyKinds`."
, flagType = DynamicFlag
, flagReverse = "-XNoKindSignatures"
, flagSince = "6.8.1"
}
, flag { flagName = "-XLambdaCase"
, flagDescription =
"Enable :ref:`lambda-case expressions <lambda-case>`."
, flagType = DynamicFlag
, flagReverse = "-XNoLambdaCase"
, flagSince = "7.6.1"
}
, flag { flagName = "-XLiberalTypeSynonyms"
, flagDescription =
"Enable :ref:`liberalised type synonyms <type-synonyms>`."
, flagType = DynamicFlag
, flagReverse = "-XNoLiberalTypeSynonyms"
, flagSince = "6.8.1"
}
, flag { flagName = "-XMagicHash"
, flagDescription =
"Allow ``#`` as a :ref:`postfix modifier on identifiers <magic-hash>`."
, flagType = DynamicFlag
, flagReverse = "-XNoMagicHash"
, flagSince = "6.8.1"
}
, flag { flagName = "-XMonadComprehensions"
, flagDescription =
"Enable :ref:`monad comprehensions <monad-comprehensions>`."
, flagType = DynamicFlag
, flagReverse = "-XNoMonadComprehensions"
, flagSince = "7.2.1"
}
, flag { flagName = "-XMonoLocalBinds"
, flagDescription =
"Enable :ref:`do not generalise local bindings <mono-local-binds>`. "++
"Implied by :ghc-flag:`XTypeFamilies` and :ghc-flag:`XGADTs`."
, flagType = DynamicFlag
, flagReverse = "-XNoMonoLocalBinds"
, flagSince = "6.12.1"
}
, flag { flagName = "-XNoMonomorphismRestriction"
, flagDescription =
"Disable the :ref:`monomorphism restriction <monomorphism>`."
, flagType = DynamicFlag
, flagReverse = "-XMonomorphismRestriction"
, flagSince = "6.8.1"
}
, flag { flagName = "-XMultiParamTypeClasses"
, flagDescription =
"Enable :ref:`multi parameter type classes "++
"<multi-param-type-classes>`. Implied by "++
":ghc-flag:`XFunctionalDependencies`."
, flagType = DynamicFlag
, flagReverse = "-XNoMultiParamTypeClasses"
, flagSince = "6.8.1"
}
, flag { flagName = "-XMultiWayIf"
, flagDescription =
"Enable :ref:`multi-way if-expressions <multi-way-if>`."
, flagType = DynamicFlag
, flagReverse = "-XNoMultiWayIf"
, flagSince = "7.6.1"
}
, flag { flagName = "-XNamedFieldPuns"
, flagDescription = "Enable :ref:`record puns <record-puns>`."
, flagType = DynamicFlag
, flagReverse = "-XNoNamedFieldPuns"
, flagSince = "6.10.1"
}
, flag { flagName = "-XNamedWildCards"
, flagDescription = "Enable :ref:`named wildcards <named-wildcards>`."
, flagType = DynamicFlag
, flagReverse = "-XNoNamedWildCards"
, flagSince = "7.10.1"
}
, flag { flagName = "-XNegativeLiterals"
, flagDescription =
"Enable support for :ref:`negative literals <negative-literals>`."
, flagType = DynamicFlag
, flagReverse = "-XNoNegativeLiterals"
, flagSince = "7.8.1"
}
, flag { flagName = "-XNoNPlusKPatterns"
, flagDescription = "Disable support for ``n+k`` patterns."
, flagType = DynamicFlag
, flagReverse = "-XNPlusKPatterns"
, flagSince = "6.12.1"
}
, flag { flagName = "-XNullaryTypeClasses"
, flagDescription =
"Deprecated, does nothing. :ref:`nullary (no parameter) type "++
"classes <nullary-type-classes>` are now enabled using "++
":ghc-flag:`XMultiParamTypeClasses`."
, flagType = DynamicFlag
, flagReverse = "-XNoNullaryTypeClasses"
, flagSince = "7.8.1"
}
, flag { flagName = "-XNumDecimals"
, flagDescription =
"Enable support for 'fractional' integer literals."
, flagType = DynamicFlag
, flagReverse = "-XNoNumDecimals"
, flagSince = "7.8.1"
}
, flag { flagName = "-XOverlappingInstances"
, flagDescription =
"Enable :ref:`overlapping instances <instance-overlap>`."
, flagType = DynamicFlag
, flagReverse = "-XNoOverlappingInstances"
, flagSince = "6.8.1"
}
, flag { flagName = "-XOverloadedLists"
, flagDescription =
"Enable :ref:`overloaded lists <overloaded-lists>`."
, flagType = DynamicFlag
, flagReverse = "-XNoOverloadedLists"
, flagSince = "7.8.1"
}
, flag { flagName = "-XOverloadedStrings"
, flagDescription =
"Enable :ref:`overloaded string literals <overloaded-strings>`."
, flagType = DynamicFlag
, flagReverse = "-XNoOverloadedStrings"
, flagSince = "6.8.1"
}
, flag { flagName = "-XPackageImports"
, flagDescription =
"Enable :ref:`package-qualified imports <package-imports>`."
, flagType = DynamicFlag
, flagReverse = "-XNoPackageImports"
, flagSince = "6.10.1"
}
, flag { flagName = "-XParallelArrays"
, flagDescription =
"Enable parallel arrays. Implies :ghc-flag:`XParallelListComp`."
, flagType = DynamicFlag
, flagReverse = "-XNoParallelArrays"
, flagSince = "7.4.1"
}
, flag { flagName = "-XParallelListComp"
, flagDescription =
"Enable :ref:`parallel list comprehensions "++
"<parallel-list-comprehensions>`. "++
"Implied by :ghc-flag:`XParallelArrays`."
, flagType = DynamicFlag
, flagReverse = "-XNoParallelListComp"
, flagSince = "6.8.1"
}
, flag { flagName = "-XPartialTypeSignatures"
, flagDescription =
"Enable :ref:`partial type signatures <partial-type-signatures>`."
, flagType = DynamicFlag
, flagReverse = "-XNoPartialTypeSignatures"
, flagSince = "7.10.1"
}
, flag { flagName = "-XPatternGuards"
, flagDescription = "Enable :ref:`pattern guards <pattern-guards>`."
, flagType = DynamicFlag
, flagReverse = "-XNoPatternGuards"
, flagSince = "6.8.1"
}
, flag { flagName = "-XPatternSynonyms"
, flagDescription =
"Enable :ref:`pattern synonyms <pattern-synonyms>`."
, flagType = DynamicFlag
, flagReverse = "-XNoPatternSynonyms"
, flagSince = "7.10.1"
}
, flag { flagName = "-XPolyKinds"
, flagDescription =
"Enable :ref:`kind polymorphism <kind-polymorphism>`. "++
"Implies :ghc-flag:`XKindSignatures`."
, flagType = DynamicFlag
, flagReverse = "-XNoPolyKinds"
, flagSince = "7.4.1"
}
, flag { flagName = "-XPolymorphicComponents"
, flagDescription =
"Enable :ref:`polymorphic components for data constructors "++
"<universal-quantification>`. Synonym for :ghc-flag:`XRankNTypes`."
, flagType = DynamicFlag
, flagReverse = "-XNoPolymorphicComponents"
, flagSince = "6.8.1"
}
, flag { flagName = "-XPostfixOperators"
, flagDescription =
"Enable :ref:`postfix operators <postfix-operators>`."
, flagType = DynamicFlag
, flagReverse = "-XNoPostfixOperators"
, flagSince = "7.10.1"
}
, flag { flagName = "-XQuasiQuotes"
, flagDescription = "Enable :ref:`quasiquotation <th-quasiquotation>`."
, flagType = DynamicFlag
, flagReverse = "-XNoQuasiQuotes"
, flagSince = "6.10.1"
}
, flag { flagName = "-XRank2Types"
, flagDescription =
"Enable :ref:`rank-2 types <universal-quantification>`. "++
"Synonym for :ghc-flag:`XRankNTypes`."
, flagType = DynamicFlag
, flagReverse = "-XNoRank2Types"
, flagSince = "6.8.1"
}
, flag { flagName = "-XRankNTypes"
, flagDescription =
"Enable :ref:`rank-N types <universal-quantification>`. "++
"Implied by :ghc-flag:`XImpredicativeTypes`."
, flagType = DynamicFlag
, flagReverse = "-XNoRankNTypes"
, flagSince = "6.8.1"
}
, flag { flagName = "-XRebindableSyntax"
, flagDescription =
"Employ :ref:`rebindable syntax <rebindable-syntax>`. "++
"Implies :ghc-flag:`XNoImplicitPrelude`."
, flagType = DynamicFlag
, flagReverse = "-XNoRebindableSyntax"
, flagSince = "7.0.1"
}
, flag { flagName = "-XRecordWildCards"
, flagDescription =
"Enable :ref:`record wildcards <record-wildcards>`. "++
"Implies :ghc-flag:`XDisambiguateRecordFields`."
, flagType = DynamicFlag
, flagReverse = "-XNoRecordWildCards"
, flagSince = "6.8.1"
}
, flag { flagName = "-XRecursiveDo"
, flagDescription =
"Enable :ref:`recursive do (mdo) notation <recursive-do-notation>`."
, flagType = DynamicFlag
, flagReverse = "-XNoRecursiveDo"
, flagSince = "6.8.1"
}
, flag { flagName = "-XRelaxedPolyRec"
, flagDescription =
"*(deprecated)* Relaxed checking for :ref:`mutually-recursive "++
"polymorphic functions <typing-binds>`."
, flagType = DynamicFlag
, flagReverse = "-XNoRelaxedPolyRec"
, flagSince = "6.8.1"
}
, flag { flagName = "-XRoleAnnotations"
, flagDescription =
"Enable :ref:`role annotations <role-annotations>`."
, flagType = DynamicFlag
, flagReverse = "-XNoRoleAnnotations"
, flagSince = "7.10.1"
}
, flag { flagName = "-XSafe"
, flagDescription =
"Enable the :ref:`Safe Haskell <safe-haskell>` Safe mode."
, flagType = DynamicFlag
, flagSince = "7.2.1"
}
, flag { flagName = "-XScopedTypeVariables"
, flagDescription =
"Enable :ref:`lexically-scoped type variables "++
"<scoped-type-variables>`."
, flagType = DynamicFlag
, flagReverse = "-XNoScopedTypeVariables"
, flagSince = "6.8.1"
}
, flag { flagName = "-XStandaloneDeriving"
, flagDescription =
"Enable :ref:`standalone deriving <stand-alone-deriving>`."
, flagType = DynamicFlag
, flagReverse = "-XNoStandaloneDeriving"
, flagSince = "6.8.1"
}
, flag { flagName = "-XStrictData"
, flagDescription =
"Enable :ref:`default strict datatype fields <strict-data>`."
, flagType = DynamicFlag
, flagReverse = "-XNoStrictData"
}
, flag { flagName = "-XTemplateHaskell"
, flagDescription =
"Enable :ref:`Template Haskell <template-haskell>`."
, flagType = DynamicFlag
, flagReverse = "-XNoTemplateHaskell"
, flagSince = "6.8.1"
}
, flag { flagName = "-XTemplateHaskellQuotes"
, flagDescription = "Enable quotation subset of "++
":ref:`Template Haskell <template-haskell>`."
, flagType = DynamicFlag
, flagReverse = "-XNoTemplateHaskellQuotes"
, flagSince = "8.0.1"
}
, flag { flagName = "-XNoTraditionalRecordSyntax"
, flagDescription =
"Disable support for traditional record syntax "++
"(as supported by Haskell 98) ``C {f = x}``"
, flagType = DynamicFlag
, flagReverse = "-XTraditionalRecordSyntax"
, flagSince = "7.4.1"
}
, flag { flagName = "-XTransformListComp"
, flagDescription =
"Enable :ref:`generalised list comprehensions "++
"<generalised-list-comprehensions>`."
, flagType = DynamicFlag
, flagReverse = "-XNoTransformListComp"
, flagSince = "6.10.1"
}
, flag { flagName = "-XTrustworthy"
, flagDescription =
"Enable the :ref:`Safe Haskell <safe-haskell>` Trustworthy mode."
, flagType = DynamicFlag
, flagSince = "7.2.1"
}
, flag { flagName = "-XTupleSections"
, flagDescription = "Enable :ref:`tuple sections <tuple-sections>`."
, flagType = DynamicFlag
, flagReverse = "-XNoTupleSections"
, flagSince = "7.10.1"
}
, flag { flagName = "-XTypeFamilies"
, flagDescription =
"Enable :ref:`type families <type-families>`. "++
"Implies :ghc-flag:`XExplicitNamespaces`, :ghc-flag:`XKindSignatures`, "++
"and :ghc-flag:`XMonoLocalBinds`."
, flagType = DynamicFlag
, flagReverse = "-XNoTypeFamilies"
, flagSince = "6.8.1"
}
, flag { flagName = "-XTypeOperators"
, flagDescription =
"Enable :ref:`type operators <type-operators>`. "++
"Implies :ghc-flag:`XExplicitNamespaces`."
, flagType = DynamicFlag
, flagReverse = "-XNoTypeOperators"
, flagSince = "6.8.1"
}
, flag { flagName = "-XTypeSynonymInstances"
, flagDescription =
"Enable :ref:`type synonyms in instance heads "++
"<flexible-instance-head>`. Implied by :ghc-flag:`XFlexibleInstances`."
, flagType = DynamicFlag
, flagReverse = "-XNoTypeSynonymInstances"
, flagSince = "6.8.1"
}
, flag { flagName = "-XUnboxedTuples"
, flagDescription = "Enable :ref:`unboxed tuples <unboxed-tuples>`."
, flagType = DynamicFlag
, flagReverse = "-XNoUnboxedTuples"
, flagSince = "6.8.1"
}
, flag { flagName = "-XUndecidableInstances"
, flagDescription =
"Enable :ref:`undecidable instances <undecidable-instances>`."
, flagType = DynamicFlag
, flagReverse = "-XNoUndecidableInstances"
, flagSince = "6.8.1"
}
, flag { flagName = "-XUnicodeSyntax"
, flagDescription = "Enable :ref:`unicode syntax <unicode-syntax>`."
, flagType = DynamicFlag
, flagReverse = "-XNoUnicodeSyntax"
, flagSince = "6.8.1"
}
, flag { flagName = "-XUnliftedFFITypes"
, flagDescription = "Enable unlifted FFI types."
, flagType = DynamicFlag
, flagReverse = "-XNoUnliftedFFITypes"
, flagSince = "6.8.1"
}
, flag { flagName = "-XUnsafe"
, flagDescription =
"Enable :ref:`Safe Haskell <safe-haskell>` Unsafe mode."
, flagType = DynamicFlag
, flagSince = "7.4.1"
}
, flag { flagName = "-XViewPatterns"
, flagDescription = "Enable :ref:`view patterns <view-patterns>`."
, flagType = DynamicFlag
, flagReverse = "-XNoViewPatterns"
, flagSince = "6.10.1"
}
]
| oldmanmike/ghc | utils/mkUserGuidePart/Options/Language.hs | bsd-3-clause | 29,817 | 0 | 10 | 9,276 | 3,751 | 2,429 | 1,322 | 644 | 1 |
{-|
Module : Idris.Error
Description : Utilities to deal with error reporting.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
module Idris.Error where
import Idris.AbsSyntax
import Idris.Core.Constraints
import Idris.Core.Evaluate (ctxtAlist)
import Idris.Core.TT
import Idris.Core.Typecheck
import Idris.Delaborate
import Idris.Output
import Prelude hiding (catch)
import Control.Monad (when)
import Control.Monad.State.Strict
import Data.Char
import qualified Data.Foldable as Foldable
import Data.List (intercalate, isPrefixOf)
import qualified Data.Set as S
import qualified Data.Text as T
import qualified Data.Traversable as Traversable
import Data.Typeable
import System.Console.Haskeline
import System.Console.Haskeline.MonadException
import System.IO.Error (ioeGetErrorString, isUserError)
iucheck :: Idris ()
iucheck = do tit <- typeInType
ist <- getIState
let cs = idris_constraints ist
logLvl 7 $ "ALL CONSTRAINTS: " ++ show (length (S.toList cs))
when (not tit) $
(tclift $ ucheck (idris_constraints ist)) `idrisCatch`
(\e -> do let fc = getErrSpan e
setErrSpan fc
iWarn fc $ pprintErr ist e)
showErr :: Err -> Idris String
showErr e = getIState >>= return . flip pshow e
report :: IOError -> String
report e
| isUserError e = ioeGetErrorString e
| otherwise = show e
idrisCatch :: Idris a -> (Err -> Idris a) -> Idris a
idrisCatch = catchError
setAndReport :: Err -> Idris ()
setAndReport e = do ist <- getIState
case (unwrap e) of
At fc e -> do setErrSpan fc
iWarn fc $ pprintErr ist e
_ -> do setErrSpan (getErrSpan e)
iWarn emptyFC $ pprintErr ist e
where unwrap (ProofSearchFail e) = e -- remove bookkeeping constructor
unwrap e = e
ifail :: String -> Idris a
ifail = throwError . Msg
ierror :: Err -> Idris a
ierror = throwError
tclift :: TC a -> Idris a
tclift (OK v) = return v
tclift (Error err@(At fc e)) = do setErrSpan fc; throwError err
tclift (Error err@(UniverseError fc _ _ _ _)) = do setErrSpan fc; throwError err
tclift (Error err) = throwError err
tcliftAt :: FC -> TC a -> Idris a
tcliftAt fc (OK v) = return v
tcliftAt fc (Error err@(At _ e)) = do setErrSpan fc; throwError err
tcliftAt fc (Error err@(UniverseError _ _ _ _ _)) = do setErrSpan fc; throwError err
tcliftAt fc (Error err) = do setErrSpan fc; throwError (At fc err)
tctry :: TC a -> TC a -> Idris a
tctry tc1 tc2
= case tc1 of
OK v -> return v
Error err -> tclift tc2
getErrSpan :: Err -> FC
getErrSpan (At fc _) = fc
getErrSpan (UniverseError fc _ _ _ _) = fc
getErrSpan _ = emptyFC
--------------------------------------------------------------------
-- Specific warnings not included in elaborator
--------------------------------------------------------------------
-- | Issue a warning on "with"-terms whose namespace is empty or nonexistent
warnDisamb :: IState -> PTerm -> Idris ()
warnDisamb ist (PQuote _) = return ()
warnDisamb ist (PRef _ _ _) = return ()
warnDisamb ist (PInferRef _ _ _) = return ()
warnDisamb ist (PPatvar _ _) = return ()
warnDisamb ist (PLam _ _ _ t b) = warnDisamb ist t >> warnDisamb ist b
warnDisamb ist (PPi _ _ _ t b) = warnDisamb ist t >> warnDisamb ist b
warnDisamb ist (PLet _ _ _ x t b) = warnDisamb ist x >> warnDisamb ist t >> warnDisamb ist b
warnDisamb ist (PTyped x t) = warnDisamb ist x >> warnDisamb ist t
warnDisamb ist (PApp _ t args) = warnDisamb ist t >>
mapM_ (warnDisamb ist . getTm) args
warnDisamb ist (PWithApp _ t a) = warnDisamb ist t >> warnDisamb ist a
warnDisamb ist (PAppBind _ f args) = warnDisamb ist f >>
mapM_ (warnDisamb ist . getTm) args
warnDisamb ist (PMatchApp _ _) = return ()
warnDisamb ist (PCase _ tm cases) = warnDisamb ist tm >>
mapM_ (\(x,y)-> warnDisamb ist x >> warnDisamb ist y) cases
warnDisamb ist (PIfThenElse _ c t f) = mapM_ (warnDisamb ist) [c, t, f]
warnDisamb ist (PTrue _ _) = return ()
warnDisamb ist (PResolveTC _) = return ()
warnDisamb ist (PRewrite _ _ x y z) = warnDisamb ist x >> warnDisamb ist y >>
Foldable.mapM_ (warnDisamb ist) z
warnDisamb ist (PPair _ _ _ x y) = warnDisamb ist x >> warnDisamb ist y
warnDisamb ist (PDPair _ _ _ x y z) = warnDisamb ist x >> warnDisamb ist y >> warnDisamb ist z
warnDisamb ist (PAlternative _ _ tms) = mapM_ (warnDisamb ist) tms
warnDisamb ist (PHidden tm) = warnDisamb ist tm
warnDisamb ist (PType _) = return ()
warnDisamb ist (PUniverse _ _) = return ()
warnDisamb ist (PGoal _ x _ y) = warnDisamb ist x >> warnDisamb ist y
warnDisamb ist (PConstant _ _) = return ()
warnDisamb ist Placeholder = return ()
warnDisamb ist (PDoBlock steps) = mapM_ wStep steps
where wStep (DoExp _ x) = warnDisamb ist x
wStep (DoBind _ _ _ x) = warnDisamb ist x
wStep (DoBindP _ x y cs) = warnDisamb ist x >> warnDisamb ist y >>
mapM_ (\(x,y) -> warnDisamb ist x >> warnDisamb ist y) cs
wStep (DoLet _ _ _ x y) = warnDisamb ist x >> warnDisamb ist y
wStep (DoLetP _ x y) = warnDisamb ist x >> warnDisamb ist y
warnDisamb ist (PIdiom _ x) = warnDisamb ist x
warnDisamb ist (PMetavar _ _) = return ()
warnDisamb ist (PProof tacs) = mapM_ (Foldable.mapM_ (warnDisamb ist)) tacs
warnDisamb ist (PTactics tacs) = mapM_ (Foldable.mapM_ (warnDisamb ist)) tacs
warnDisamb ist (PElabError _) = return ()
warnDisamb ist PImpossible = return ()
warnDisamb ist (PCoerced tm) = warnDisamb ist tm
warnDisamb ist (PDisamb ds tm) = warnDisamb ist tm >>
mapM_ warnEmpty ds
where warnEmpty d =
when (not (any (isIn d . fst) (ctxtAlist (tt_ctxt ist)))) $
ierror . Msg $
"Nothing found in namespace \"" ++
intercalate "." (map T.unpack . reverse $ d) ++
"\"."
isIn d (NS _ ns) = isPrefixOf d ns
isIn d _ = False
warnDisamb ist (PUnifyLog tm) = warnDisamb ist tm
warnDisamb ist (PNoImplicits tm) = warnDisamb ist tm
warnDisamb ist (PQuasiquote tm goal) = warnDisamb ist tm >>
Foldable.mapM_ (warnDisamb ist) goal
warnDisamb ist (PUnquote tm) = warnDisamb ist tm
warnDisamb ist (PQuoteName _ _ _) = return ()
warnDisamb ist (PAs _ _ tm) = warnDisamb ist tm
warnDisamb ist (PAppImpl tm _) = warnDisamb ist tm
warnDisamb ist (PRunElab _ tm _) = warnDisamb ist tm
warnDisamb ist (PConstSugar _ tm) = warnDisamb ist tm
| bravit/Idris-dev | src/Idris/Error.hs | bsd-3-clause | 6,869 | 0 | 20 | 1,823 | 2,635 | 1,290 | 1,345 | 138 | 6 |
{-# LANGUAGE RankNTypes, TypeApplications #-}
module VtaCoerce where
import Data.Coerce (coerce)
newtype Age = Age Int
convert :: Int -> Age
convert = coerce @Int @Age
| sdiehl/ghc | testsuite/tests/typecheck/should_compile/VtaCoerce.hs | bsd-3-clause | 172 | 0 | 6 | 30 | 46 | 27 | 19 | 6 | 1 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Configure
-- Copyright : (c) David Himmelstrup 2005,
-- Duncan Coutts 2005
-- License : BSD-like
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- High level interface to configuring a package.
-----------------------------------------------------------------------------
module Distribution.Client.Configure (
configure,
configureSetupScript,
chooseCabalVersion,
) where
import Distribution.Client.Dependency
import Distribution.Client.Dependency.Types
( AllowNewer(..), isAllowNewer, ConstraintSource(..)
, LabeledPackageConstraint(..) )
import qualified Distribution.Client.InstallPlan as InstallPlan
import Distribution.Client.InstallPlan (InstallPlan)
import Distribution.Client.IndexUtils as IndexUtils
( getSourcePackages, getInstalledPackages )
import Distribution.Client.Setup
( ConfigExFlags(..), configureCommand, filterConfigureFlags )
import Distribution.Client.Types as Source
import Distribution.Client.SetupWrapper
( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions )
import Distribution.Client.Targets
( userToPackageConstraint )
import qualified Distribution.Client.ComponentDeps as CD
import Distribution.Package (PackageId)
import Distribution.Client.JobControl (Lock)
import Distribution.Simple.Compiler
( Compiler, CompilerInfo, compilerInfo, PackageDB(..), PackageDBStack )
import Distribution.Simple.Program (ProgramConfiguration )
import Distribution.Simple.Setup
( ConfigFlags(..), fromFlag, toFlag, flagToMaybe, fromFlagOrDefault )
import Distribution.Simple.PackageIndex (InstalledPackageIndex)
import Distribution.Simple.Utils
( defaultPackageDesc )
import qualified Distribution.InstalledPackageInfo as Installed
import Distribution.Package
( Package(..), InstalledPackageId, packageName
, Dependency(..), thisPackageVersion
)
import qualified Distribution.PackageDescription as PkgDesc
import Distribution.PackageDescription.Parse
( readPackageDescription )
import Distribution.PackageDescription.Configuration
( finalizePackageDescription )
import Distribution.Version
( anyVersion, thisVersion )
import Distribution.Simple.Utils as Utils
( notice, info, debug, die )
import Distribution.System
( Platform )
import Distribution.Verbosity as Verbosity
( Verbosity )
import Distribution.Version
( Version(..), VersionRange, orLaterVersion )
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid (Monoid(..))
#endif
import Data.Maybe (isJust, fromMaybe)
-- | Choose the Cabal version such that the setup scripts compiled against this
-- version will support the given command-line flags.
chooseCabalVersion :: ConfigExFlags -> Maybe Version -> VersionRange
chooseCabalVersion configExFlags maybeVersion =
maybe defaultVersionRange thisVersion maybeVersion
where
-- Cabal < 1.19.2 doesn't support '--exact-configuration' which is needed
-- for '--allow-newer' to work.
allowNewer = fromFlagOrDefault False $
fmap isAllowNewer (configAllowNewer configExFlags)
defaultVersionRange = if allowNewer
then orLaterVersion (Version [1,19,2] [])
else anyVersion
-- | Configure the package found in the local directory
configure :: Verbosity
-> PackageDBStack
-> [Repo]
-> Compiler
-> Platform
-> ProgramConfiguration
-> ConfigFlags
-> ConfigExFlags
-> [String]
-> IO ()
configure verbosity packageDBs repos comp platform conf
configFlags configExFlags extraArgs = do
installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf
sourcePkgDb <- getSourcePackages verbosity repos
progress <- planLocalPackage verbosity comp platform configFlags configExFlags
installedPkgIndex sourcePkgDb
notice verbosity "Resolving dependencies..."
maybePlan <- foldProgress logMsg (return . Left) (return . Right)
progress
case maybePlan of
Left message -> do
info verbosity $
"Warning: solver failed to find a solution:\n"
++ message
++ "Trying configure anyway."
setupWrapper verbosity (setupScriptOptions installedPkgIndex Nothing)
Nothing configureCommand (const configFlags) extraArgs
Right installPlan -> case InstallPlan.ready installPlan of
[pkg@(ReadyPackage
(ConfiguredPackage (SourcePackage _ _ (LocalUnpackedPackage _) _)
_ _ _)
_)] -> do
configurePackage verbosity
platform (compilerInfo comp)
(setupScriptOptions installedPkgIndex (Just pkg))
configFlags pkg extraArgs
_ -> die $ "internal error: configure install plan should have exactly "
++ "one local ready package."
where
setupScriptOptions :: InstalledPackageIndex
-> Maybe ReadyPackage
-> SetupScriptOptions
setupScriptOptions =
configureSetupScript
packageDBs
comp
platform
conf
(fromFlagOrDefault
(useDistPref defaultSetupScriptOptions)
(configDistPref configFlags))
(chooseCabalVersion
configExFlags
(flagToMaybe (configCabalVersion configExFlags)))
Nothing
False
logMsg message rest = debug verbosity message >> rest
configureSetupScript :: PackageDBStack
-> Compiler
-> Platform
-> ProgramConfiguration
-> FilePath
-> VersionRange
-> Maybe Lock
-> Bool
-> InstalledPackageIndex
-> Maybe ReadyPackage
-> SetupScriptOptions
configureSetupScript packageDBs
comp
platform
conf
distPref
cabalVersion
lock
forceExternal
index
mpkg
= SetupScriptOptions {
useCabalVersion = cabalVersion
, useCompiler = Just comp
, usePlatform = Just platform
, usePackageDB = packageDBs'
, usePackageIndex = index'
, useProgramConfig = conf
, useDistPref = distPref
, useLoggingHandle = Nothing
, useWorkingDir = Nothing
, setupCacheLock = lock
, useWin32CleanHack = False
, forceExternalSetupMethod = forceExternal
-- If we have explicit setup dependencies, list them; otherwise, we give
-- the empty list of dependencies; ideally, we would fix the version of
-- Cabal here, so that we no longer need the special case for that in
-- `compileSetupExecutable` in `externalSetupMethod`, but we don't yet
-- know the version of Cabal at this point, but only find this there.
-- Therefore, for now, we just leave this blank.
, useDependencies = fromMaybe [] explicitSetupDeps
, useDependenciesExclusive = isJust explicitSetupDeps
}
where
-- When we are compiling a legacy setup script without an explicit
-- setup stanza, we typically want to allow the UserPackageDB for
-- finding the Cabal lib when compiling any Setup.hs even if we're doing
-- a global install. However we also allow looking in a specific package
-- db.
packageDBs' :: PackageDBStack
index' :: Maybe InstalledPackageIndex
(packageDBs', index') =
case packageDBs of
(GlobalPackageDB:dbs) | UserPackageDB `notElem` dbs
, Nothing <- explicitSetupDeps
-> (GlobalPackageDB:UserPackageDB:dbs, Nothing)
-- but if the user is using an odd db stack, don't touch it
_otherwise -> (packageDBs, Just index)
explicitSetupDeps :: Maybe [(InstalledPackageId, PackageId)]
explicitSetupDeps = do
ReadyPackage (ConfiguredPackage (SourcePackage _ gpkg _ _) _ _ _) deps
<- mpkg
-- Check if there is an explicit setup stanza
_buildInfo <- PkgDesc.setupBuildInfo (PkgDesc.packageDescription gpkg)
-- Return the setup dependencies computed by the solver
return [ ( Installed.installedPackageId deppkg
, Installed.sourcePackageId deppkg
)
| deppkg <- CD.setupDeps deps
]
-- | Make an 'InstallPlan' for the unpacked package in the current directory,
-- and all its dependencies.
--
planLocalPackage :: Verbosity -> Compiler
-> Platform
-> ConfigFlags -> ConfigExFlags
-> InstalledPackageIndex
-> SourcePackageDb
-> IO (Progress String String InstallPlan)
planLocalPackage verbosity comp platform configFlags configExFlags
installedPkgIndex
(SourcePackageDb _ packagePrefs) = do
pkg <- readPackageDescription verbosity =<< defaultPackageDesc verbosity
solver <- chooseSolver verbosity (fromFlag $ configSolver configExFlags)
(compilerInfo comp)
let -- We create a local package and ask to resolve a dependency on it
localPkg = SourcePackage {
packageInfoId = packageId pkg,
Source.packageDescription = pkg,
packageSource = LocalUnpackedPackage ".",
packageDescrOverride = Nothing
}
testsEnabled = fromFlagOrDefault False $ configTests configFlags
benchmarksEnabled =
fromFlagOrDefault False $ configBenchmarks configFlags
resolverParams =
removeUpperBounds (fromFlagOrDefault AllowNewerNone $
configAllowNewer configExFlags)
. addPreferences
-- preferences from the config file or command line
[ PackageVersionPreference name ver
| Dependency name ver <- configPreferences configExFlags ]
. addConstraints
-- version constraints from the config file or command line
-- TODO: should warn or error on constraints that are not on direct
-- deps or flag constraints not on the package in question.
[ LabeledPackageConstraint (userToPackageConstraint uc) src
| (uc, src) <- configExConstraints configExFlags ]
. addConstraints
-- package flags from the config file or command line
[ let pc = PackageConstraintFlags (packageName pkg)
(configConfigurationsFlags configFlags)
in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget
]
. addConstraints
-- '--enable-tests' and '--enable-benchmarks' constraints from
-- the config file or command line
[ let pc = PackageConstraintStanzas (packageName pkg) $
[ TestStanzas | testsEnabled ] ++
[ BenchStanzas | benchmarksEnabled ]
in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget
]
$ standardInstallPolicy
installedPkgIndex
(SourcePackageDb mempty packagePrefs)
[SpecificSourcePackage localPkg]
return (resolveDependencies platform (compilerInfo comp) solver resolverParams)
-- | Call an installer for an 'SourcePackage' but override the configure
-- flags with the ones given by the 'ReadyPackage'. In particular the
-- 'ReadyPackage' specifies an exact 'FlagAssignment' and exactly
-- versioned package dependencies. So we ignore any previous partial flag
-- assignment or dependency constraints and use the new ones.
--
-- NB: when updating this function, don't forget to also update
-- 'installReadyPackage' in D.C.Install.
configurePackage :: Verbosity
-> Platform -> CompilerInfo
-> SetupScriptOptions
-> ConfigFlags
-> ReadyPackage
-> [String]
-> IO ()
configurePackage verbosity platform comp scriptOptions configFlags
(ReadyPackage (ConfiguredPackage (SourcePackage _ gpkg _ _)
flags stanzas _)
deps)
extraArgs =
setupWrapper verbosity
scriptOptions (Just pkg) configureCommand configureFlags extraArgs
where
configureFlags = filterConfigureFlags configFlags {
configConfigurationsFlags = flags,
-- We generate the legacy constraints as well as the new style precise
-- deps. In the end only one set gets passed to Setup.hs configure,
-- depending on the Cabal version we are talking to.
configConstraints = [ thisPackageVersion (packageId deppkg)
| deppkg <- CD.nonSetupDeps deps ],
configDependencies = [ (packageName (Installed.sourcePackageId deppkg),
Installed.installedPackageId deppkg)
| deppkg <- CD.nonSetupDeps deps ],
-- Use '--exact-configuration' if supported.
configExactConfiguration = toFlag True,
configVerbosity = toFlag verbosity,
configBenchmarks = toFlag (BenchStanzas `elem` stanzas),
configTests = toFlag (TestStanzas `elem` stanzas)
}
pkg = case finalizePackageDescription flags
(const True)
platform comp [] (enableStanzas stanzas gpkg) of
Left _ -> error "finalizePackageDescription ReadyPackage failed"
Right (desc, _) -> desc
| rimmington/cabal | cabal-install/Distribution/Client/Configure.hs | bsd-3-clause | 13,893 | 0 | 22 | 4,023 | 2,186 | 1,204 | 982 | 242 | 3 |
{-# LANGUAGE Arrows #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
module Main where
import Prelude hiding (sum)
import Opaleye (Column, Nullable, matchNullable, isNull,
Table(Table), required, optional, queryTable,
Query, QueryArr, restrict, (.==), (.<=), (.&&), (.<),
(.++), ifThenElse, pgString, aggregate, groupBy,
count, avg, sum, leftJoin, runQuery,
showSqlForPostgres, Unpackspec,
PGInt4, PGInt8, PGText, PGDate, PGFloat8, PGBool)
import Data.Profunctor.Product (p2, p3)
import Data.Profunctor.Product.Default (Default, def)
import Data.Profunctor.Product.TH (makeAdaptorAndInstance)
import Data.Time.Calendar (Day)
import Control.Arrow (returnA, (<<<))
import qualified Database.PostgreSQL.Simple as PGS
import qualified Opaleye.Internal.Unpackspec as U
personTable :: Table (Column PGText, Column PGInt4, Column PGText)
(Column PGText, Column PGInt4, Column PGText)
personTable = Table "personTable" (p3 ( required "name"
, required "age"
, required "address" ))
personQuery :: Query (Column PGText, Column PGInt4, Column PGText)
personQuery = queryTable personTable
printSql :: Default Unpackspec a a => Query a -> IO ()
printSql = putStrLn . showSqlForPostgres
data Birthday' a b = Birthday { bdName :: a, bdDay :: b }
type Birthday = Birthday' String Day
type BirthdayColumn = Birthday' (Column PGText) (Column PGDate)
$(makeAdaptorAndInstance "pBirthday" ''Birthday')
birthdayTable :: Table BirthdayColumn BirthdayColumn
birthdayTable = Table "birthdayTable"
(pBirthday Birthday { bdName = required "name"
, bdDay = required "birthday" })
birthdayQuery :: Query BirthdayColumn
birthdayQuery = queryTable birthdayTable
-- projection
nameAge :: Query (Column PGText, Column PGInt4)
nameAge = proc () -> do
(name, age, _) <- personQuery -< ()
returnA -< (name, age)
-- product
personBirthdayProduct :: Query ((Column PGText, Column PGInt4, Column PGText), BirthdayColumn)
personBirthdayProduct = proc () -> do
personRow <- personQuery -< ()
birthdayRow <- birthdayQuery -< ()
returnA -< (personRow, birthdayRow)
-- restriction
youngPeople :: Query (Column PGText, Column PGInt4, Column PGText)
youngPeople = proc () -> do
row@(_, age, _) <- personQuery -< ()
restrict -< age .<= 18
returnA -< row
twentiesAtAddress :: Query (Column PGText, Column PGInt4, Column PGText)
twentiesAtAddress = proc () -> do
row@(_, age, address) <- personQuery -< ()
restrict -< (20 .<= age) .&& (age .< 30)
restrict -< address .== pgString "1 My Street, My Town"
returnA -< row
personAndBirthday :: Query (Column PGText, Column PGInt4, Column PGText, Column PGDate)
personAndBirthday = proc () -> do
(name, age, address) <- personQuery -< ()
birthday <- birthdayQuery -< ()
restrict -< name .== bdName birthday
returnA -< (name, age, address, bdDay birthday)
employeeTable :: Table (Column PGText, Column (Nullable PGText))
(Column PGText, Column (Nullable PGText))
employeeTable = Table "employeeTable" (p2 ( required "name"
, required "boss" ))
hasBoss :: Query (Column PGText)
hasBoss = proc () -> do
(name, nullableBoss) <- queryTable employeeTable -< ()
let aOrNo = ifThenElse (isNull nullableBoss) (pgString "no") (pgString "a")
returnA -< name .++ pgString " has " .++ aOrNo .++ pgString " boss"
bossQuery :: QueryArr (Column PGText, Column (Nullable PGText)) (Column PGText)
bossQuery = proc (name, nullableBoss) ->
returnA -< matchNullable (name .++ pgString " has no boss")
(\boss -> pgString "The boss of " .++ name .++ pgString " is " .++ boss)
nullableBoss
restrictIsTwenties :: QueryArr (Column PGInt4) ()
restrictIsTwenties = proc age ->
restrict -< (20 .<= age) .&& (age .< 30)
restrictAddressIs1MyStreet :: QueryArr (Column PGText) ()
restrictAddressIs1MyStreet = proc address ->
restrict -< address .== pgString "1 My Street, My Town"
twentiesAtAddress' :: Query (Column PGText, Column PGInt4, Column PGText)
twentiesAtAddress' = proc () -> do
row@(_, age, address) <- personQuery -< ()
restrictIsTwenties -< age
restrictAddressIs1MyStreet -< address
returnA -< row
main :: IO ()
-- main = printSql personQuery >> printSql birthdayQuery
-- main = printSql nameAge
main = printSql personBirthdayProduct
| hnfmr/opaleye-tutorial | Main.hs | mit | 4,877 | 10 | 13 | 1,258 | 1,454 | 779 | 675 | 94 | 1 |
module Zipper (
BinTree(..),
Zipper,
fromTree,
toTree,
value,
left,
right,
up,
setValue,
setLeft,
setRight
) where
-- | A binary tree.
data BinTree a = BT {
btValue :: a, -- ^ Value
btLeft :: Maybe (BinTree a), -- ^ Left child
btRight :: Maybe (BinTree a) -- ^ Right child
} deriving (Eq, Show)
-- | A zipper for a binary tree.
data Zipper a = Z {
zValue :: a, -- ^ Value of focus
zLeft :: Maybe (BinTree a), -- ^ Left child of focus
zRight :: Maybe (BinTree a), -- ^ Right child of focus
_zTrail :: ZipperTrail a -- ^ Zipper trail, field name not used
} deriving (Eq, Show)
-- | A "trail" of a zipper.
--
-- This stores the history of how the focus was reached
-- and the value and other paths higher up in the tree.
data ZipperTrail a = L a (Maybe (BinTree a)) (ZipperTrail a) -- Left path taken
| R a (Maybe (BinTree a)) (ZipperTrail a) -- Right path taken
| T -- Top level
deriving (Eq, Show)
-- | Get a zipper focussed on the root node.
fromTree :: BinTree a -> Zipper a
fromTree (BT v l r) = Z v l r T
-- | Get the complete tree from a zipper.
toTree :: Zipper a -> BinTree a
toTree (Z v l r zt) = go (BT v l r) zt
where
go t (L pv pr pzt) = go (BT pv (Just t) pr) pzt
go t (R pv pl pzt) = go (BT pv pl (Just t)) pzt
go t T = t
-- | Get the value of the focus node.
value :: Zipper a -> a
value = zValue
-- | Get the left child of the focus node, if any.
left :: Zipper a -> Maybe (Zipper a)
left (Z _ Nothing _ _) = Nothing
left (Z v (Just (BT lv ll lr)) r zt) = Just $ Z lv ll lr (L v r zt)
-- | Get the right child of the focus node, if any.
right :: Zipper a -> Maybe (Zipper a)
right (Z _ _ Nothing _) = Nothing
right (Z v l (Just (BT rv rl rr)) zt) = Just $ Z rv rl rr (R v l zt)
-- | Get the parent of the focus node, if any.
up :: Zipper a -> Maybe (Zipper a)
up (Z v l r (L pv pr zt)) = Just (Z pv (Just (BT v l r)) pr zt)
up (Z v l r (R pv pl zt)) = Just (Z pv pl (Just (BT v l r)) zt)
up (Z _ _ _ T) = Nothing
-- | Set the value of the focus node.
setValue :: a -> Zipper a -> Zipper a
setValue v z = z { zValue = v }
-- | Replace a left child tree.
setLeft :: Maybe (BinTree a) -> Zipper a -> Zipper a
setLeft t z = z { zLeft = t }
-- | Replace a right child tree.
setRight :: Maybe (BinTree a) -> Zipper a -> Zipper a
setRight t z = z { zRight = t }
| exercism/xhaskell | exercises/practice/zipper/.meta/examples/success-standard/src/Zipper.hs | mit | 2,538 | 0 | 11 | 808 | 959 | 504 | 455 | 52 | 3 |
------------------------------------------------------------------------------
-- | Defines the 'Language' accept header with an 'Accept' instance for use in
-- language negotiation.
module Network.HTTP.Media.Language
( Language
, toParts
) where
import Data.ByteString (ByteString)
import Data.CaseInsensitive (CI)
import Network.HTTP.Media.Language.Internal
------------------------------------------------------------------------------
-- | Converts 'Language' to a list of its language parts. The wildcard
-- produces an empty list.
toParts :: Language -> [CI ByteString]
toParts (Language l) = l
| zmthy/http-media | src/Network/HTTP/Media/Language.hs | mit | 684 | 0 | 7 | 147 | 79 | 50 | 29 | 8 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.ITCH
-- Copyright : (c) 2014, Jakub Kozlowski
-- License : MIT
--
-- Maintainer : mail@jakub-kozlowski.com
--
-- This module specifies types used in ITCH messages and their serialisers,
-- as per the specs at
-- <http://www.londonstockexchange.com/products-and-services/millennium-exchange/technicalinformation/technicalinformation.htm>,
-- specifically:
-- <http://www.londonstockexchange.com/products-and-services/millennium-exchange/millennium-exchange-migration/mit303.pdf>.
--
-- Serialisation of dates and times currently relies on `Data.Time.Format`
-- which doesn't seem to handle serialisation of invalid dates.
-- Until I figure out what's going on, I shall generate the test values so
-- that they are correct.
-----------------------------------------------------------------------------
module Data.ITCH.Types (
-- | Types
Alpha, BitField, Date(..), Time(..), UInt8, UInt16, UInt32, UInt64, Byte, Price,
uint32
-- | Utilities
, getMessageLength, putMessageLength, getMessageType, putMessageType, arbitraryAlpha, getAlpha, putAlpha
, skipRemaining
-- | Encoding and decoding
, UnitHeader(..), writeMessages, readMessages
) where
import Control.Applicative (pure, (*>), (<$>))
import Control.Monad (forM_, replicateM)
import Data.Binary (Binary, get, put)
import Data.Binary.Get (Get)
import qualified Data.Binary.Get as Get (getByteString, getWord16le,
getWord32le, getWord64le,
skip)
import Data.Binary.Put (Put)
import qualified Data.Binary.Put as Put (putByteString, putWord16le,
putWord32le, putWord64le,
runPut)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BS8 (ByteString, length, pack,
replicate, unpack)
import qualified Data.ByteString.Lazy as BSL
import Data.Decimal (DecimalRaw (..), realFracToDecimal)
import Data.Monoid ((<>))
import Data.Time.Calendar (Day (..))
import Data.Time.Clock (secondsToDiffTime)
import Data.Time.Format (formatTime, parseTime)
import Data.Time.LocalTime (TimeOfDay, timeToTimeOfDay)
import Data.Word
import Foreign.Storable (Storable, sizeOf)
import System.Locale (defaultTimeLocale)
import Test.QuickCheck.Arbitrary (Arbitrary, arbitrary)
import Test.QuickCheck.Gen (Gen, suchThat)
-- * Data types
-- | Data Type | Length | Description
-- | -----------------------------------
-- | Alpha | Variable | These fields use standard ASCII character bytes.
-- | | | They are left justified and padded on the right with spaces.
-- | -----------------------------------
newtype Alpha = Alpha BS8.ByteString
deriving (Eq, Show)
-- | Generator for 'Alpha' values of this length.
arbitraryAlpha :: Int -> Gen Alpha
arbitraryAlpha l = (Alpha . BS.replicate l) <$> arbitrary
-- | Puts the alpha given its length.
putAlpha :: Int -> Alpha -> Put
putAlpha l (Alpha a) = Put.putByteString $ padAlpha l a
-- | Gets the alpha given its length.
getAlpha :: Int -> Get Alpha
getAlpha l = Alpha <$> Get.getByteString l
-- | Padding for 'Alpha' values.
padAlphaValue :: Char
padAlphaValue = ' '
-- | Pads (or trims) the bytestring to be of exactly 'size'.
padAlpha :: Int -> BS8.ByteString -> BS8.ByteString
padAlpha size alpha | BS8.length alpha > size = BS.take size alpha
padAlpha size alpha | BS8.length alpha == size = alpha
padAlpha size alpha = BS.append alpha pad
where
pad = BS8.replicate r padAlphaValue
r = size - BS.length alpha
-- | Bit Field | 1 | A single byte used to hold up to eight 1-bit flags.
-- | | | Each bit will represent a Boolean flag.
-- | | | The 0 bit is the lowest significant bit and the 7 bit is the highest significant bit.
type BitField = Word8
-- | Byte | 1 | A single byte used to hold one ASCII character.
type Byte = Word8
-- | Date | 8 | Date specified in the YYYYMMDD format using ASCII characters.
newtype Date = Date Day
deriving (Eq, Show)
instance Arbitrary Date where
arbitrary = (\x -> Date $ ModifiedJulianDay {toModifiedJulianDay = x} ) <$> arbitrary
instance Binary Date where
get = Date <$> (maybeToFail =<< (parseTime defaultTimeLocale "%0Y%m%d" . BS8.unpack <$> Get.getByteString 8))
put (Date d) = Put.putByteString . BS8.pack $ formatTime defaultTimeLocale "%0Y%m%d" d
-- | Time | 8 | Time specified in the HH:MM:SS format using ASCII characters.
-- The 'Arbitrary' as well as 'Binary' instances are very specifically implemented to pass
-- tests. There is an inherent loss of information in this encoding, because we only
-- encode time with second precision, whereas 'TimeOfDay' has picosecond precision.
-- Therefore, get and put do not satisfy the identity law, unless we choose the input
-- 'TimeOfDay' very carefully.
newtype Time = Time TimeOfDay
deriving (Eq, Show)
instance Arbitrary Time where
arbitrary = Time . timeToTimeOfDay . secondsToDiffTime <$> suchThat arbitrary (\d -> d >= 0 && d <= 60 * 60 * 24)
instance Binary Time where
get = Time <$> (maybeToFail =<< (parseTime defaultTimeLocale "%H:%M:%S" . BS8.unpack <$> Get.getByteString 8))
put (Time t) = Put.putByteString . BS8.pack $ formatTime defaultTimeLocale "%H:%M:%S" t
-- | Price | 8 | Signed Little-Endian encoded eight byte integer field with eight implied decimal places.
newtype Price = Price (DecimalRaw Word64)
deriving (Eq, Show)
instance Arbitrary Price where
arbitrary = Price . realFracToDecimal 8 <$> (arbitrary :: Gen Double)
instance Binary Price where
get = Price . Decimal 8 <$> Get.getWord64le
put (Price (Decimal _ p)) = Put.putWord64le p
-- | UInt8 | 1 | 8 bit unsigned integer.
type UInt8 = Word8
-- | UInt16 | 2 | Little-Endian encoded 16 bit unsigned integer.
newtype UInt16 = UInt16 Word16
deriving (Eq, Show, Storable, Arbitrary)
instance Binary UInt16 where
get = UInt16 <$> Get.getWord16le
put (UInt16 uint16) = Put.putWord16le uint16
-- UInt32 | 4 | Little-Endian encoded 32 bit unsigned integer.
uint32 :: Word32 -> UInt32
uint32 = UInt32
newtype UInt32 = UInt32 Word32
deriving (Eq, Show, Storable, Arbitrary)
instance Binary UInt32 where
get = UInt32 <$> Get.getWord32le
put (UInt32 u) = Put.putWord32le u
-- UInt64 | 8 | Little-Endian encoded 64 bit unsigned integer.
newtype UInt64 = UInt64 Word64
deriving (Eq, Show, Storable, Arbitrary)
instance Binary UInt64 where
get = UInt64 <$> Get.getWord64le
put (UInt64 uint64) = Put.putWord64le uint64
-- * Unit header
-- Field | Offset | Length | Type | Description
-- ----------------------------------------------------------------
data UnitHeader a = UnitHeader {
-- Length | 0 | 2 | UInt16 | Length of the message block including the header and all payload messages.
_unitHeaderLength :: !UInt16
-- Message Count | 2 | 1 | UInt8 | Number of payload messages that will follow the header.
, _unitHeaderMessageCount :: !UInt8
-- Market Data Group | 3 | 1 | Byte | Identity of the market data group the payload messages relate to.
-- | | | | This field is not validated for client initiated messages.
, _unitHeaderMarketDataGroup :: !Byte
-- Sequence Number | 4 | 4 | UInt32 | Sequence number of the first payload message.
, _unitHeaderSequenceNumber :: !UInt32
-- Payload | 8 | Variable | - | One or more payload messages.
, _unitHeaderPayload :: ![a]
} deriving (Eq, Show)
-- | Size of the header.
sizeOfHeader :: Int
sizeOfHeader = (sizeOf (undefined :: UInt16))
+ (sizeOf (undefined :: UInt8))
+ (sizeOf (undefined :: Byte))
+ (sizeOf (undefined :: UInt32))
-- | Write the messages with unit header prepended.
writeMessages :: Binary a => Byte -> UInt32 -> [a] -> BSL.ByteString
writeMessages marketDataGroup headerSequenceNumber msgs =
let serialised = Put.runPut $ forM_ msgs put
size = UInt16 (fromIntegral $ sizeOfHeader + (fromIntegral $ BSL.length serialised))
unitHeader = Put.runPut $
put size *>
put (fromIntegral . length $ msgs :: UInt8) *>
put marketDataGroup *>
put headerSequenceNumber
in unitHeader <> serialised
-- | Reads the list of messages and the unit header.
readMessages :: Binary a => Get (UnitHeader a)
readMessages = do
headerLength <- get
msgCount <- get
marketDataGroup <- get
sequenceNumber <- get
msgs <- replicateM (fromIntegral msgCount) get
return $ UnitHeader headerLength msgCount marketDataGroup sequenceNumber msgs
-- | * Utilities
-- | Simplifies getting the msg type in generated code.
getMessageType :: Get Byte
getMessageType = get
-- | Simplified putting the msg type in generated code.
putMessageType :: Byte -> Put
putMessageType = put
-- | Gets the message length of bytes
getMessageLength :: Get UInt8
getMessageLength = get
-- | Simplifies putting the length of the message in generated code.
putMessageLength :: UInt8 -> Put
putMessageLength = put
-- | Consumes any remaining bytes.
skipRemaining :: UInt8 -> Int -> Get ()
skipRemaining expected actual
= let diff = (fromIntegral expected) - actual
in if diff > 0
then Get.skip diff
else return ()
-- | Transforms `Data.Maybe` into a `Get`, where `Nothing` constitutes a call to `fail`.
maybeToFail :: Maybe a -> Get a
maybeToFail (Just a) = pure a
maybeToFail (Nothing) = fail "Something didn't work"
| ggreif/thebook-haskell | src/Data/ITCH/Types.hs | mit | 10,253 | 0 | 15 | 2,584 | 1,884 | 1,063 | 821 | 147 | 2 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.HTMLDataListElement
(js_getOptions, getOptions, HTMLDataListElement,
castToHTMLDataListElement, gTypeHTMLDataListElement)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe "$1[\"options\"]" js_getOptions ::
JSRef HTMLDataListElement -> IO (JSRef HTMLCollection)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLDataListElement.options Mozilla HTMLDataListElement.options documentation>
getOptions ::
(MonadIO m) => HTMLDataListElement -> m (Maybe HTMLCollection)
getOptions self
= liftIO
((js_getOptions (unHTMLDataListElement self)) >>= fromJSRef) | plow-technologies/ghcjs-dom | src/GHCJS/DOM/JSFFI/Generated/HTMLDataListElement.hs | mit | 1,448 | 6 | 11 | 173 | 377 | 238 | 139 | 25 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE CPP #-}
module CommandArgs
( HDevTools(..)
, loadHDevTools
, pathArg
)
where
import Cabal (findFile)
import Data.Version (showVersion)
import Paths_hdevtools (version)
import qualified Config
import System.Console.CmdArgs.Implicit
import System.Console.CmdArgs.Explicit (splitArgs)
import System.Directory (getCurrentDirectory)
import System.Environment (getProgName, withArgs, getArgs)
import System.FilePath (takeDirectory)
import System.Info (arch, os)
programVersion :: String
programVersion =
"version " ++ showVersion version
cabalVersion :: String
cabalVersion =
"cabal-" ++ VERSION_Cabal
fullVersion :: String
fullVersion =
concat
[ programVersion
, " ("
, "ghc-", Config.cProjectVersion, "-", arch, "-", os
, ", ", cabalVersion
, ")"
]
data HDevTools
= Admin
{ socket :: Maybe FilePath
, ghcOpts :: [String]
, start_server :: Bool
, cabalOpts :: [String]
, noDaemon :: Bool
, status :: Bool
, stop_server :: Bool
, debug :: Bool
, noStack :: Bool
}
| Check
{ socket :: Maybe FilePath
, ghcOpts :: [String]
, cabalOpts :: [String]
, path :: Maybe String
, file :: String
, json :: Bool
, debug :: Bool
, noStack :: Bool
, noTH :: Bool
}
| ModuleFile
{ socket :: Maybe FilePath
, ghcOpts :: [String]
, cabalOpts :: [String]
, module_ :: String
, debug :: Bool
, noStack :: Bool
}
| Info
{ socket :: Maybe FilePath
, ghcOpts :: [String]
, cabalOpts :: [String]
, path :: Maybe String
, file :: String
, identifier :: String
, debug :: Bool
, noStack :: Bool
, noTH :: Bool
}
| Type
{ socket :: Maybe FilePath
, ghcOpts :: [String]
, cabalOpts :: [String]
, path :: Maybe String
, file :: String
, line :: Int
, col :: Int
, debug :: Bool
, noStack :: Bool
, noTH :: Bool
}
| FindSymbol
{ socket :: Maybe FilePath
, ghcOpts :: [String]
, cabalOpts :: [String]
, symbol :: String
, files :: [String]
, debug :: Bool
, noStack :: Bool
, noTH :: Bool
}
deriving (Show, Data, Typeable)
dummyAdmin :: HDevTools
dummyAdmin = Admin
{ socket = Nothing
, ghcOpts = []
, cabalOpts = []
, start_server = False
, noDaemon = False
, status = False
, stop_server = False
, debug = False
, noStack = False
}
dummyCheck :: HDevTools
dummyCheck = Check
{ socket = Nothing
, ghcOpts = []
, cabalOpts = []
, path = Nothing
, file = ""
, json = False
, debug = False
, noTH = False
, noStack = False
}
dummyModuleFile :: HDevTools
dummyModuleFile = ModuleFile
{ socket = Nothing
, ghcOpts = []
, cabalOpts = []
, module_ = ""
, debug = False
, noStack = False
}
dummyInfo :: HDevTools
dummyInfo = Info
{ socket = Nothing
, ghcOpts = []
, cabalOpts = []
, path = Nothing
, file = ""
, identifier = ""
, debug = False
, noStack = False
, noTH = False
}
dummyType :: HDevTools
dummyType = Type
{ socket = Nothing
, ghcOpts = []
, cabalOpts = []
, path = Nothing
, file = ""
, line = 0
, col = 0
, debug = False
, noStack = False
, noTH = False
}
dummyFindSymbol :: HDevTools
dummyFindSymbol = FindSymbol
{ socket = Nothing
, ghcOpts = []
, cabalOpts = []
, symbol = ""
, files = []
, debug = False
, noStack = False
, noTH = False
}
admin :: Annotate Ann
admin = record dummyAdmin
[ socket := def += typFile += help "socket file to use"
, ghcOpts := def += typ "OPTION" += help "ghc options"
, cabalOpts := def += typ "OPTION" += help "cabal options"
, start_server := def += help "start server"
, noDaemon := def += help "do not daemonize (only if --start-server)"
, status := def += help "show status of server"
, stop_server := def += help "shutdown the server"
, debug := def += help "enable debug output"
, noStack := def += name "S" += help "disable stack integration"
] += help "Interactions with the server"
check :: Annotate Ann
check = record dummyCheck
[ socket := def += typFile += help "socket file to use"
, ghcOpts := def += typ "OPTION" += help "ghc options"
, cabalOpts := def += typ "OPTION" += help "cabal options"
, path := def += typFile += help "path to target file"
, file := def += typFile += argPos 0 += opt ""
, json := def += help "render output as JSON"
, debug := def += help "enable debug output"
, noStack := def += name "S" += help "disable stack integration"
, noTH := def += help "disable template haskell"
] += help "Check a haskell source file for errors and warnings"
moduleFile :: Annotate Ann
moduleFile = record dummyModuleFile
[ socket := def += typFile += help "socket file to use"
, ghcOpts := def += typ "OPTION" += help "ghc options"
, cabalOpts := def += typ "OPTION" += help "cabal options"
, module_ := def += typ "MODULE" += argPos 0
, debug := def += help "enable debug output"
, noStack := def += name "S" += help "disable stack integration"
] += help "Get the haskell source file corresponding to a module name"
info :: Annotate Ann
info = record dummyInfo
[ socket := def += typFile += help "socket file to use"
, ghcOpts := def += typ "OPTION" += help "ghc options"
, cabalOpts := def += typ "OPTION" += help "cabal options"
, path := def += typFile += help "path to target file"
, file := def += typFile += argPos 0 += opt ""
, identifier := def += typ "IDENTIFIER" += argPos 1
, debug := def += help "enable debug output"
, noStack := def += name "S" += help "disable stack integration"
, noTH := def += help "disable template haskell"
] += help "Get info from GHC about the specified identifier"
type_ :: Annotate Ann
type_ = record dummyType
[ socket := def += typFile += help "socket file to use"
, ghcOpts := def += typ "OPTION" += help "ghc options"
, cabalOpts := def += typ "OPTION" += help "cabal options"
, debug := def += help "enable debug output"
, noStack := def += name "S" += help "disable stack integration"
, path := def += typFile += help "path to target file"
, file := def += typFile += argPos 0 += opt ""
, line := def += typ "LINE" += argPos 1
, col := def += typ "COLUMN" += argPos 2
, noTH := def += help "disable template haskell"
] += help "Get the type of the expression at the specified line and column"
findSymbol :: Annotate Ann
findSymbol = record dummyFindSymbol
[ socket := def += typFile += help "socket file to use"
, ghcOpts := def += typ "OPTION" += help "ghc options"
, cabalOpts := def += typ "OPTION" += help "cabal options"
, symbol := def += typ "SYMBOL" += argPos 0
, files := def += typFile += args
, debug := def += help "enable debug output"
, noStack := def += name "S" += help "disable stack integration"
, noTH := def += help "disable template haskell"
] += help "List the modules where the given symbol could be found"
full :: String -> Annotate Ann
full progName = modes_ [admin += auto, check, moduleFile, info, type_, findSymbol]
+= helpArg [name "h", groupname "Help"]
+= versionArg [groupname "Help"]
+= program progName
+= summary (progName ++ ": " ++ fullVersion)
fileArg :: HDevTools -> Maybe String
fileArg (Admin {}) = Nothing
fileArg (ModuleFile {}) = Nothing
fileArg a@(Check {}) = Just $ file a
fileArg a@(Info {}) = Just $ file a
fileArg a@(Type {}) = Just $ file a
fileArg (FindSymbol {}) = Nothing
pathArg' :: HDevTools -> Maybe String
pathArg' (Admin {}) = Nothing
pathArg' (ModuleFile {}) = Nothing
pathArg' a@(Check {}) = path a
pathArg' a@(Info {}) = path a
pathArg' a@(Type {}) = path a
pathArg' (FindSymbol {}) = Nothing
pathArg :: HDevTools -> Maybe String
pathArg a = case pathArg' a of
Just x -> Just x
Nothing -> fileArg a
loadHDevTools :: IO HDevTools
loadHDevTools = do
progName <- getProgName
cfg0 <- cmdArgs_ (full progName)
dir <- maybe getCurrentDirectory (return . takeDirectory) $ pathArg cfg0
mConfig <- findFile (== ".hdevtoolsrc") dir
perProject <- maybe (return []) (\f -> splitArgs `fmap` readFile f) mConfig
args0 <- getArgs
withArgs (args0 ++ perProject) $ cmdArgs_ (full progName)
| pacak/hdevtools | src/CommandArgs.hs | mit | 9,350 | 0 | 12 | 3,129 | 2,670 | 1,460 | 1,210 | 254 | 2 |
-----------------------------------------------------------------------------
--
-- Module : Language.PureScript.ModuleDependencies
-- Copyright : (c) Phil Freeman 2013
-- License : MIT
--
-- Maintainer : Phil Freeman <paf31@cantab.net>
-- Stability : experimental
-- Portability :
--
-- | Provides the ability to sort modules based on module dependencies
--
-----------------------------------------------------------------------------
module Language.PureScript.ModuleDependencies (
sortModules,
ModuleGraph
) where
import Data.Graph
import Data.List (nub)
import Data.Maybe (mapMaybe)
import Language.PureScript.Declarations
import Language.PureScript.Names
import Language.PureScript.Types
-- |
-- A list of modules with their dependencies
--
type ModuleGraph = [(ModuleName, [ModuleName])]
-- |
-- Sort a collection of modules based on module dependencies.
--
-- Reports an error if the module graph contains a cycle.
--
sortModules :: [Module] -> Either String ([Module], ModuleGraph)
sortModules ms = do
let verts = map (\m@(Module _ ds _) -> (m, getModuleName m, nub (concatMap usedModules ds))) ms
ms' <- mapM toModule $ stronglyConnComp verts
let moduleGraph = map (\(_, mn, deps) -> (mn, deps)) verts
return (ms', moduleGraph)
-- |
-- Calculate a list of used modules based on explicit imports and qualified names
--
usedModules :: Declaration -> [ModuleName]
usedModules = let (f, _, _, _, _) = everythingOnValues (++) forDecls forValues (const []) (const []) (const []) in nub . f
where
forDecls :: Declaration -> [ModuleName]
forDecls (ImportDeclaration mn _ _) = [mn]
forDecls _ = []
forValues :: Value -> [ModuleName]
forValues (Var (Qualified (Just mn) _)) = [mn]
forValues (BinaryNoParens (Qualified (Just mn) _) _ _) = [mn]
forValues (Constructor (Qualified (Just mn) _)) = [mn]
forValues (TypedValue _ _ ty) = forTypes ty
forValues _ = []
forTypes :: Type -> [ModuleName]
forTypes (TypeConstructor (Qualified (Just mn) _)) = [mn]
forTypes (ConstrainedType cs _) = mapMaybe (\(Qualified mn _, _) -> mn) cs
forTypes _ = []
getModuleName :: Module -> ModuleName
getModuleName (Module mn _ _) = mn
-- |
-- Convert a strongly connected component of the module graph to a module
--
toModule :: SCC Module -> Either String Module
toModule (AcyclicSCC m) = return m
toModule (CyclicSCC [m]) = return m
toModule (CyclicSCC ms) = Left $ "Cycle in module dependencies: " ++ show (map getModuleName ms)
| bergmark/purescript | src/Language/PureScript/ModuleDependencies.hs | mit | 2,480 | 0 | 16 | 426 | 740 | 407 | 333 | 37 | 8 |
module ReverseRead where
reverseWords :: String -> String
reverseWords = unwords . map reverse . words
main = do
line <- getLine
if null line
then return ()
else do
putStrLn $ reverseWords line
main | limdauto/learning-haskell | cis194/io/ReverseRead.hs | mit | 248 | 0 | 11 | 84 | 73 | 36 | 37 | 10 | 2 |
{-# LANGUAGE JavaScriptFFI #-}
-- | An implementation of the NodeJS Cipher API, as documented
-- <https://nodejs.org/api/crypto.html#crypto_class_cipher here>.
module GHCJS.Node.Crypto.Cipher
( module GHCJS.Node.Crypto.Cipher -- FIXME: specific export list
) where
import GHCJS.Array
import GHCJS.Foreign.Callback
import GHCJS.Types
import GHCJS.Node.Buffer
import GHCJS.Node.Stream
-- | FIXME: doc
newtype Cipher
= MkCipher JSVal
instance IsReadStream Cipher where
toReadStream (MkCipher val) = MkReadStream val
instance IsWriteStream Cipher where
toWriteStream (MkCipher val) = MkWriteStream val
-- | Create a cipher object using the given algorithm, key, and initialization
-- vector, each of which is a UTF-8 encoded string.
--
-- Valid values for the @algorithm@ argument can be seen by running
-- @openssl list-cipher-algorithm@.
foreign import javascript safe
"$r = crypto.createCipheriv($1, $2, $3);"
unsafeCreateCipherIV :: JSString -- ^ @algorithm@
-> JSString -- ^ @key@
-> JSString -- ^ @iv@
-> IO Cipher -- ^ Resultant cipher object.
-- | Returns any remaining enciphered contents from the given 'Cipher'.
foreign import javascript safe
"$r = $1.final();"
unsafeFinal :: Cipher
-> IO Buffer
-- | Update the cipher with the given data.
foreign import javascript safe
"$r = $1.update($2);"
unsafeUpdate :: Cipher
-> Buffer
-> IO Buffer
-- | When using an authenticated encryption mode (only @GCM@ is currently
-- supported), this function sets the value used for the additional
-- authenticated data (AAD) input parameter.
foreign import javascript safe
"$1.setAAD($2);"
unsafeSetAAD :: Cipher
-> Buffer
-> IO ()
-- | When using an authenticated encryption mode (only @GCM@ is currently
-- supported), this function returns a 'Buffer' containing the authentication
-- tag that has been computed from the input data.
--
-- This function should only be called after encryption has been completed
-- using the @cipher.final()@ method (called by 'unsafeFinal' and friends).
foreign import javascript safe
"$r = $1.getAuthTag();"
unsafeGetAuthTag :: Cipher
-> IO Buffer
-- | Sets the value of the @auto_padding@ boolean attribute on a 'Cipher'.
--
-- When using block encryption algorithms, a 'Cipher' will automatically add
-- padding to the input data to the appropriate block size.
--
-- When @auto_padding@ is false, the length of the entire input data must be a
-- multiple of the cipher's block size or cipher.final() will throw an error.
-- Disabling automatic padding is useful for non-standard padding, for
-- instance using @0x0@ instead of PKCS padding.
--
-- This function should only be called before encryption has been completed
-- using the @cipher.final()@ method (called by 'unsafeFinal' and friends).
foreign import javascript safe
"$1.setAutoPadding($2);"
unsafeSetAutoPadding :: Cipher
-> Bool
-> IO ()
| taktoa/ghcjs-electron | src/GHCJS/Node/Crypto/Cipher.hs | mit | 3,173 | 19 | 9 | 761 | 297 | 175 | 122 | 37 | 0 |
module Feature.InsertSpec where
import Test.Hspec
import Test.Hspec.Wai
import Test.Hspec.Wai.JSON
import Network.Wai.Test (SResponse(simpleBody,simpleHeaders,simpleStatus))
import SpecHelper
import qualified Data.Aeson as JSON
import Data.Maybe (fromJust)
import Text.Heredoc
import Network.HTTP.Types.Header
import Network.HTTP.Types
import Control.Monad (replicateM_)
import TestTypes(IncPK(..), CompoundPK(..))
spec :: Spec
spec = afterAll_ resetDb $ around withApp $ do
describe "Posting new record" $ do
after_ (clearTable "menagerie") . it "accepts disparate json types" $ do
p <- post "/menagerie"
[json| {
"integer": 13, "double": 3.14159, "varchar": "testing!"
, "boolean": false, "date": "1900-01-01", "money": "$3.99"
, "enum": "foo"
} |]
liftIO $ do
simpleBody p `shouldBe` ""
simpleStatus p `shouldBe` created201
context "with no pk supplied" $ do
context "into a table with auto-incrementing pk" . after_ (clearTable "auto_incrementing_pk") $
it "succeeds with 201 and link" $ do
p <- post "/auto_incrementing_pk" [json| { "non_nullable_string":"not null"} |]
liftIO $ do
simpleBody p `shouldBe` ""
simpleHeaders p `shouldSatisfy` matchHeader hLocation "/auto_incrementing_pk\\?id=eq\\.[0-9]+"
simpleStatus p `shouldBe` created201
let Just location = lookup hLocation $ simpleHeaders p
r <- get location
let [record] = fromJust (JSON.decode $ simpleBody r :: Maybe [IncPK])
liftIO $ do
incStr record `shouldBe` "not null"
incNullableStr record `shouldBe` Nothing
context "into a table with simple pk" $
it "fails with 400 and error" $
post "/simple_pk" [json| { "extra":"foo"} |]
`shouldRespondWith` 400
context "into a table with no pk" . after_ (clearTable "no_pk") $ do
it "succeeds with 201 and a link including all fields" $ do
p <- post "/no_pk" [json| { "a":"foo", "b":"bar" } |]
liftIO $ do
simpleBody p `shouldBe` ""
simpleHeaders p `shouldSatisfy` matchHeader hLocation "/no_pk\\?a=eq.foo&b=eq.bar"
simpleStatus p `shouldBe` created201
it "returns full details of inserted record if asked" $ do
p <- request methodPost "/no_pk"
[("Prefer", "return=representation")]
[json| { "a":"bar", "b":"baz" } |]
liftIO $ do
simpleBody p `shouldBe` [json| { "a":"bar", "b":"baz" } |]
simpleHeaders p `shouldSatisfy` matchHeader hLocation "/no_pk\\?a=eq.bar&b=eq.baz"
simpleStatus p `shouldBe` created201
it "can post nulls" $ do
p <- request methodPost "/no_pk"
[("Prefer", "return=representation")]
[json| { "a":null, "b":"foo" } |]
liftIO $ do
simpleBody p `shouldBe` [json| { "a":null, "b":"foo" } |]
simpleHeaders p `shouldSatisfy` matchHeader hLocation "/no_pk\\?a=is.null&b=eq.foo"
simpleStatus p `shouldBe` created201
context "with compound pk supplied" . after_ (clearTable "compound_pk") $
it "builds response location header appropriately" $
post "/compound_pk" [json| { "k1":12, "k2":42 } |]
`shouldRespondWith` ResponseMatcher {
matchBody = Nothing,
matchStatus = 201,
matchHeaders = ["Location" <:> "/compound_pk?k1=eq.12&k2=eq.42"]
}
context "with invalid json payload" $
it "fails with 400 and error" $
post "/simple_pk" "}{ x = 2" `shouldRespondWith` 400
context "jsonb" . after_ (clearTable "json") $ do
it "serializes nested object" $ do
let inserted = [json| { "data": { "foo":"bar" } } |]
p <- request methodPost "json" [("Prefer", "return=representation")] inserted
liftIO $ do
simpleBody p `shouldBe` inserted
simpleHeaders p `shouldSatisfy` matchHeader hLocation "/json\\?data=eq\\.%7B%22foo%22%3A%22bar%22%7D"
simpleStatus p `shouldBe` created201
it "serializes nested array" $ do
let inserted = [json| { "data": [1,2,3] } |]
p <- request methodPost "json" [("Prefer", "return=representation")] inserted
liftIO $ do
simpleBody p `shouldBe` inserted
simpleHeaders p `shouldSatisfy` matchHeader hLocation "/json\\?data=eq\\.%5B1%2C2%2C3%5D"
simpleStatus p `shouldBe` created201
describe "CSV insert" $ do
after_ (clearTable "menagerie") . context "disparate csv types" $
it "succeeds with multipart response" $ do
p <- request methodPost "/menagerie" [("Content-Type", "text/csv")]
[str|integer,double,varchar,boolean,date,money,enum
|13,3.14159,testing!,false,1900-01-01,$3.99,foo
|12,0.1,a string,true,1929-10-01,12,bar
|]
liftIO $ do
simpleBody p `shouldBe` "Content-Type: application/json\nLocation: /menagerie?integer=eq.13\n\n\n--postgrest_boundary\nContent-Type: application/json\nLocation: /menagerie?integer=eq.12\n\n"
simpleStatus p `shouldBe` created201
after_ (clearTable "no_pk") . context "requesting full representation" $ do
it "returns full details of inserted record" $
request methodPost "/no_pk"
[("Content-Type", "text/csv"), ("Prefer", "return=representation")]
"a,b\nbar,baz"
`shouldRespondWith` ResponseMatcher {
matchBody = Just [json| { "a":"bar", "b":"baz" } |]
, matchStatus = 201
, matchHeaders = ["Content-Type" <:> "application/json",
"Location" <:> "/no_pk?a=eq.bar&b=eq.baz"]
}
it "can post nulls" $
request methodPost "/no_pk"
[("Content-Type", "text/csv"), ("Prefer", "return=representation")]
"a,b\nNULL,foo"
`shouldRespondWith` ResponseMatcher {
matchBody = Just [json| { "a":null, "b":"foo" } |]
, matchStatus = 201
, matchHeaders = ["Content-Type" <:> "application/json",
"Location" <:> "/no_pk?a=is.null&b=eq.foo"]
}
after_ (clearTable "no_pk") . context "with wrong number of columns" $ do
it "fails for too few" $ do
p <- request methodPost "/no_pk" [("Content-Type", "text/csv")] "a,b\nfoo,bar\nbaz"
liftIO $ simpleStatus p `shouldBe` badRequest400
it "fails for too many" $ do
p <- request methodPost "/no_pk" [("Content-Type", "text/csv")] "a,b\nfoo,bar\nbaz,bat,bad"
liftIO $ simpleStatus p `shouldBe` badRequest400
describe "Putting record" $ do
context "to unkonwn uri" $
it "gives a 404" $
request methodPut "/fake" []
[json| { "real": false } |]
`shouldRespondWith` 404
context "to a known uri" $ do
context "without a fully-specified primary key" $
it "is not an allowed operation" $
request methodPut "/compound_pk?k1=eq.12" []
[json| { "k1":12, "k2":42 } |]
`shouldRespondWith` 405
context "with a fully-specified primary key" $ do
context "not specifying every column in the table" $
it "is rejected for lack of idempotence" $
request methodPut "/compound_pk?k1=eq.12&k2=eq.42" []
[json| { "k1":12, "k2":42 } |]
`shouldRespondWith` 400
context "specifying every column in the table" . after_ (clearTable "compound_pk") $ do
it "can create a new record" $ do
p <- request methodPut "/compound_pk?k1=eq.12&k2=eq.42" []
[json| { "k1":12, "k2":42, "extra":3 } |]
liftIO $ do
simpleBody p `shouldBe` ""
simpleStatus p `shouldBe` status204
r <- get "/compound_pk?k1=eq.12&k2=eq.42"
let rows = fromJust (JSON.decode $ simpleBody r :: Maybe [CompoundPK])
liftIO $ do
length rows `shouldBe` 1
let record = head rows
compoundK1 record `shouldBe` 12
compoundK2 record `shouldBe` 42
compoundExtra record `shouldBe` Just 3
it "can update an existing record" $ do
_ <- request methodPut "/compound_pk?k1=eq.12&k2=eq.42" []
[json| { "k1":12, "k2":42, "extra":4 } |]
_ <- request methodPut "/compound_pk?k1=eq.12&k2=eq.42" []
[json| { "k1":12, "k2":42, "extra":5 } |]
r <- get "/compound_pk?k1=eq.12&k2=eq.42"
let rows = fromJust (JSON.decode $ simpleBody r :: Maybe [CompoundPK])
liftIO $ do
length rows `shouldBe` 1
let record = head rows
compoundExtra record `shouldBe` Just 5
context "with an auto-incrementing primary key" . after_ (clearTable "auto_incrementing_pk") $
it "succeeds with 204" $
request methodPut "/auto_incrementing_pk?id=eq.1" []
[json| {
"id":1,
"nullable_string":"hi",
"non_nullable_string":"bye",
"inserted_at": "2020-11-11"
} |]
`shouldRespondWith` ResponseMatcher {
matchBody = Nothing,
matchStatus = 204,
matchHeaders = []
}
describe "Patching record" $ do
context "to unkonwn uri" $
it "gives a 404" $
request methodPatch "/fake" []
[json| { "real": false } |]
`shouldRespondWith` 404
context "on an empty table" $
it "indicates no records found to update" $
request methodPatch "/simple_pk" []
[json| { "extra":20 } |]
`shouldRespondWith` 404
context "in a nonempty table" . before_ (clearTable "items" >> createItems 15) .
after_ (clearTable "items") $ do
it "can update a single item" $ do
g <- get "/items?id=eq.42"
liftIO $ simpleHeaders g
`shouldSatisfy` matchHeader "Content-Range" "\\*/0"
request methodPatch "/items?id=eq.1" []
[json| { "id":42 } |]
`shouldRespondWith` ResponseMatcher {
matchBody = Nothing,
matchStatus = 204,
matchHeaders = ["Content-Range" <:> "0-0/1"]
}
g' <- get "/items?id=eq.42"
liftIO $ simpleHeaders g'
`shouldSatisfy` matchHeader "Content-Range" "0-0/1"
it "can update multiple items" $ do
replicateM_ 10 $ post "/auto_incrementing_pk"
[json| { non_nullable_string: "a" } |]
replicateM_ 10 $ post "/auto_incrementing_pk"
[json| { non_nullable_string: "b" } |]
_ <- request methodPatch
"/auto_incrementing_pk?non_nullable_string=eq.a" []
[json| { non_nullable_string: "c" } |]
g <- get "/auto_incrementing_pk?non_nullable_string=eq.c"
liftIO $ simpleHeaders g
`shouldSatisfy` matchHeader "Content-Range" "0-9/10"
it "can update based on a computed column" $
request methodPatch
"/items?always_true=eq.false"
[("Prefer", "return=representation")]
[json| { id: 100 } |]
`shouldRespondWith` 404
it "can provide a representation" $ do
_ <- post "/items"
[json| { id: 1 } |]
request methodPatch
"/items?id=eq.1"
[("Prefer", "return=representation")]
[json| { id: 99 } |]
`shouldRespondWith` [json| [{id:99}] |]
describe "Row level permission" $
it "set user_id when inserting rows" $ do
_ <- post "/postgrest/users" [json| { "id":"jdoe", "pass": "1234", "role": "postgrest_test_author" } |]
_ <- post "/postgrest/users" [json| { "id":"jroe", "pass": "1234", "role": "postgrest_test_author" } |]
p1 <- request methodPost "/authors_only"
[ authHeaderBasic "jdoe" "1234", ("Prefer", "return=representation") ]
[json| { "secret": "nyancat" } |]
liftIO $ do
simpleBody p1 `shouldBe` [json| { "owner":"jdoe", "secret":"nyancat" } |]
simpleStatus p1 `shouldBe` created201
p2 <- request methodPost "/authors_only"
-- jwt token for jroe
[ authHeaderJWT "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJyb2xlIjoicG9zdGdyZXN0X3Rlc3RfYXV0aG9yIiwiaWQiOiJqcm9lIn0.YuF_VfmyIxWyuceT7crnNKEprIYXsJAyXid3rjPjIow", ("Prefer", "return=representation") ]
[json| { "secret": "lolcat", "owner": "hacker" } |]
liftIO $ do
simpleBody p2 `shouldBe` [json| { "owner":"jroe", "secret":"lolcat" } |]
simpleStatus p2 `shouldBe` created201
| johan--/postgrest | test/Feature/InsertSpec.hs | mit | 12,859 | 0 | 26 | 3,854 | 2,667 | 1,371 | 1,296 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables #-}
import Control.Concurrent.ParallelIO
import Control.Exception
import Control.Lens
import qualified Data.Map as Map
import Network.HTTP.Client (newManager)
import Network.HTTP.Client.TLS (tlsManagerSettings)
import Network.Wreq hiding (options, header, get)
import Network.Wreq.Session hiding (options)
import Options.Applicative
import System.IO
import HB.Session
import HB.Types
import HB.Utils
import HB.Wreq
data MainOptions = MainOptions { optVerbose :: Bool
, optPlatform :: String
, optDestination :: String
, optHashStorage :: String }
options :: Parser MainOptions
options = MainOptions <$>
switch ( long "verbose"
<> short 'v'
<> help "be verbose" )
<*>
strOption ( long "platform"
<> short 'p'
<> value "Android"
<> help "platform to download binaries for" )
<*>
strOption ( long "destination"
<> short 'd'
<> help "where to download binaries" )
<*>
strOption ( long "hashes"
<> short 'h'
<> value "hashes"
<> help "file with hashes (without suffix)" )
main :: IO ()
main = do
hSetBuffering stdout NoBuffering
execParser opts >>= runHB
where
opts = info (helper <*> options)
( fullDesc
<> progDesc "Download binaries from HumbleBundle"
<> header "HumbleBundle downloader"
)
runHB :: MainOptions -> IO ()
runHB opts = do
let pl = strToPlatform' $ optPlatform opts
path = DirAbsName $ optDestination opts
hashStorage = FileRelName $ optHashStorage opts
putStrLn $ "Getting hashes from " ++ show hashStorage ++ "..."
hashes <- loadHashes hashStorage
-- forM_ (map unFileRelName $ Map.keys . getHashes $ hashes) putStrLn
putStrLn $ "Total " ++ show (Map.size . getHashes $ hashes) ++ " hashes there"
-- credentials
bundles <- handle hbCatch . withSession' $ \sess -> do
resp <- hbInit sess >>=
hbAuth sess
let cookies = resp ^. responseCookieJar
saveCookies cookies
keys <- hbKeys sess
putStrLn "Fetching bundles info..."
let urls = map ("https://www.humblebundle.com/api/v1/order/" ++) keys
-- mapM_ putStrLn urls
bundles :: [DL] <- fmap (uniq . concat)
. parallelInterleaved
. map (\u -> view responseBody <$> (asJSON =<< get sess u))
$ urls
-- print bundles
pure bundles
-- fetch all bundles data and extract download information
let dls = filterPlatform pl bundles
-- execute downloads
putStrLn "Downloads on it's way..."
m <- newManager tlsManagerSettings
newHashes <- parallelInterleaved
. map (executeDownload m hashes path (optVerbose opts)) $ dls
-- forM_ (map (unFileRelName . fst) $ newHashes) putStrLn
saveHashes (Hashes $ Map.union (getHashes hashes) . Map.fromList $ newHashes) hashStorage
stopGlobalPool
putStrLn "All done!"
-- Logout is GET to
-- https://www.humblebundle.com/logout?goto=/
| lattenwald/humblebundle.hs | src/Main.hs | mit | 3,272 | 0 | 21 | 1,010 | 751 | 375 | 376 | 75 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeFamilies #-}
module Capnp.Mutability
( Mutability(..)
, MaybeMutable(..)
, create
, createT
) where
import Control.Monad.Primitive (PrimMonad(PrimState))
import Control.Monad.ST (ST, runST)
import Data.Kind (Type)
-- | 'Mutability' is used as a type parameter (with the DataKinds extension)
-- to indicate the mutability of some values in this library; 'Const' denotes
-- an immutable value, while @'Mut' s@ denotes a value that can be mutated
-- in the scope of the state token @s@.
data Mutability = Const | Mut Type
-- | 'MaybeMutable' relates mutable and immutable versions of a type.
class MaybeMutable (f :: Mutability -> Type) where
-- | Convert an immutable value to a mutable one.
thaw :: (PrimMonad m, PrimState m ~ s) => f 'Const -> m (f ('Mut s))
-- | Convert a mutable value to an immutable one.
freeze :: (PrimMonad m, PrimState m ~ s) => f ('Mut s) -> m (f 'Const)
-- | Like 'thaw', except that the caller is responsible for ensuring that
-- the original value is not subsequently used; doing so may violate
-- referential transparency.
--
-- The default implementation of this is just the same as 'thaw', but
-- typically an instance will override this with a trivial (unsafe) cast,
-- hence the obligation described above.
unsafeThaw :: (PrimMonad m, PrimState m ~ s) => f 'Const -> m (f ('Mut s))
unsafeThaw = thaw
-- | Unsafe version of 'freeze' analagous to 'unsafeThaw'. The caller must
-- ensure that the original value is not used after this call.
unsafeFreeze :: (PrimMonad m, PrimState m ~ s) => f ('Mut s) -> m (f 'Const)
unsafeFreeze = freeze
-- | Create and freeze a mutable value, safely, without doing a full copy.
-- internally, 'create' calls unsafeFreeze, but it cannot be directly used to
-- violate referential transparency, as the value is not available to the
-- caller after freezing.
create :: MaybeMutable f => (forall s. ST s (f ('Mut s))) -> f 'Const
create st = runST (st >>= unsafeFreeze)
-- | Like 'create', but the result is wrapped in an instance of 'Traversable'.
createT :: (Traversable t, MaybeMutable f) => (forall s. ST s (t (f ('Mut s)))) -> t (f 'Const)
createT st = runST (st >>= traverse unsafeFreeze)
| zenhack/haskell-capnp | lib/Capnp/Mutability.hs | mit | 2,390 | 0 | 16 | 520 | 492 | 270 | 222 | 24 | 1 |
module Day11 where
import Data.List
differentPairs :: String -> Bool
differentPairs = go []
where go (x:y:xs) _ = True
go found (x:y:xs)
| pair x y, [x,y] `notElem` found = go ([x,y]:found) (y:xs)
| otherwise = go found (y:xs)
go _ _ = False
pair x y = x == y
increasingStraight :: String -> Bool
increasingStraight (x:y:z) = or $ map (\(a,b,c) -> [a,b,c] `isInfixOf` ['a'..'z']) trips
where trips = zip3 (x:y:z) (y:z) z
increasingStraight' :: String -> Bool
increasingStraight' [a,b,c] = [a,b,c] `isInfixOf` ['a'..'z']
increasingStraight' (a:b:c:xs) = [a,b,c] `isInfixOf` ['a'..'z'] || increasingStraight' (b:c:xs)
noIOLs :: String -> Bool
noIOLs l = 'i' `notElem` l
&& 'o' `notElem` l
&& 'l' `notElem` l
genPW :: String -> [String]
genPW str = str' : genPW str'
where
str' = inc str
inc :: String -> String
inc [] = []
inc xs
| overflowed = inc (init xs) ++ [c]
| otherwise = init xs ++ [c]
where (c, overflowed) = succ' (last xs)
succ' 'z' = ('a' , True)
succ' 'h' = ('j' , False)
succ' 'n' = ('p' , False)
succ' 'k' = ('m' , False)
succ' x = (succ x, False)
part1n2 = do
let res = take 2 [x | x <- genPW "vzbxkghb", differentPairs x, increasingStraight' x]
print res | cirquit/Personal-Repository | Haskell/Playground/AdventOfCode/advent-coding/src/Day11.hs | mit | 1,417 | 0 | 14 | 461 | 681 | 366 | 315 | 37 | 6 |
module GitHub.Repositories.Keys where
import GitHub.Internal
repoKeys o r = ownerRepo o r <> "/keys"
repoKey o r i = repoKeys o r <> "/" <> i
--| GET /repos/:owner/:repo/keys
listKeys ::
OwnerName ->
RepoName ->
GitHub GroupKeysData
listKeys o r = get [] $ repoKeys o r
--| GET /repos/:owner/:repo/keys/:id
getKey ::
OwnerName ->
RepoName ->
Int ->
GitHub GroupKeyData
getKey o r i = get [] $ repoKey o r i
--| POST /repos/:owner/:repo/keys
createKey ::
OwnerName ->
RepoName ->
NewGroupKey ->
GitHub GroupKeyData
createKey o r = post [] (repoKeys o r)
--| PATCH /repos/:owner/:repo/keys/:id
editKey ::
OwnerName ->
RepoName ->
Int ->
GroupKeyPatch ->
GitHub GroupKeyData
editKey o r i = patch [] (repoKey o r i)
--| DELETE /repos/:owner/:repo/keys/:id
deleteKey ::
OwnerName ->
RepoName ->
Int ->
GitHub ()
deleteKey o r i = delete [] (repoKey o r i)
| SaneApp/github-api | src/GitHub/Repositories/Keys.hs | mit | 878 | 40 | 11 | 170 | 470 | 230 | 240 | -1 | -1 |
module HsPredictor.LoadURL where
--standard
import Control.Exception (catch, throwIO)
import Prelude hiding (catch)
--3rd party
import Data.ByteString.Lazy.Char8 (pack, unpack)
import Data.ByteString.Lazy.Internal (ByteString)
import Network.HTTP.Conduit
-- | Return body of html page
getBody :: String -- ^ url
-> IO String
getBody url = do
x <- simpleHttp url `catch` handleError
return $ unpack x
where
handleError :: HttpException -> IO ByteString
handleError e = return $ pack "wrong url or no connection"
| Taketrung/HsPredictor | library/HsPredictor/LoadURL.hs | mit | 624 | 0 | 9 | 184 | 140 | 80 | 60 | 13 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.