code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
module X_2_5_Simplification_and_NNF where
import qualified Test.HUnit as U (Counts, Test (TestList),
runTestTT)
import qualified Test.HUnit.Util as U (t)
import X_2_1_Prop_Syntax
import X_2_3_Prop_Validity_Satisfiability_Tautology hiding (tps1)
-- 2.5 Simplification and negation normal form
psimplify1 fs = case fs of
Not F -> T
Not T -> F
Not (Not p) -> p
And _ F -> F
And F _ -> F
And T p -> p
And p T -> p
Or _ T -> T
Or T _ -> T
Or p F -> p
Or F p -> p
Impl F _ -> T
Impl _ T -> T
Impl T p -> p
Impl p F -> Not p
Iff p T -> p
Iff T p -> p
Iff p F -> Not p
Iff F p -> Not p
_ -> fs
psimplify fs = case fs of
Not p -> psimplify1 (Not (psimplify p))
And p q -> psimplify1 (And (psimplify p) (psimplify q))
Or p q -> psimplify1 (Or (psimplify p) (psimplify q))
Impl p q -> psimplify1 (Impl (psimplify p) (psimplify q))
Iff p q -> psimplify1 (Iff (psimplify p) (psimplify q))
_ -> fs
tps1 = U.t "tps1"
(pp (psimplify (pr "(T -> (x <-> F)) -> ~(y v F ^ z)")))
"~x -> ~y"
tps2 = U.t "tps2"
(pp (psimplify (pr "((x -> y) -> T) v ~F")))
"T"
-- partial
isNegative (Not (Atom _)) = True
isNegative (Atom _) = False
isPositive = not . isNegative
-- partial
negate (Not a@(Atom _)) = a
negate a@(Atom _) = Not a
nnf = nnf' . psimplify
where
nnf' fs = case fs of
And p q -> And (nnf' p) (nnf' q)
Or p q -> Or (nnf' p) (nnf' q)
Impl p q -> Or (nnf' (Not p)) (nnf' q)
Iff p q -> Or (And (nnf' p) (nnf' q))
(And (nnf' (Not p)) (nnf' (Not q)))
Not (Not p) -> nnf' p
Not (And p q) -> Or (nnf' (Not p)) (nnf' (Not q))
Not (Or p q) -> And (nnf' (Not p)) (nnf' (Not q))
Not (Impl p q) -> And (nnf' p) (nnf' (Not q))
Not (Iff p q) -> Or (And (nnf' p) (nnf' (Not q)))
(And (nnf' (Not p)) (nnf' q))
_ -> fs
tnnf1 = U.t "tnnf1"
(let fm = "(p <-> q) <-> ~(r -> s)"
fmpr = pr fm
fmnnf = nnf fmpr
taut = tautology (Iff fmpr fmnnf)
fmnnfpp = pp fmnnf
in (fm, fmnnfpp, taut))
( "(p <-> q) <-> ~(r -> s)"
, "(((p ^ q) v (~p ^ ~q)) ^ r ^ ~s) v (((p ^ ~q) v (~p ^ q)) ^ (~r v s))"
, True
)
-- only pushes negation down to atoms (does not eliminate Iff)
nenf = nenf' . psimplify
where
nenf' fs = case fs of
Not (Not p) -> nenf' p
Not (And p q) -> Or (nenf' (Not p)) (nenf' (Not q))
Not (Or p q) -> And (nenf' (Not p)) (nenf' (Not q))
Not (Impl p q) -> And (nenf' p) (nenf' (Not q))
Not (Iff p q) -> Iff (nenf' p) (nenf' (Not q)) -- different from nnf
And p q -> And (nenf' p) (nenf' q)
Or p q -> Or (nenf' p) (nenf' q)
Impl p q -> Or (nenf' (Not p)) (nenf' q)
Iff p q -> Iff (nenf' p) (nenf' q) -- different from nnf
_ -> fs
tnnf2 = U.t "tnnf2"
(let fm = "(p <-> q) <-> ~(r -> s)"
fmpr = pr fm
fmnnf = nenf fmpr
taut = tautology (Iff fmpr fmnnf)
fmnnfpp = pp fmnnf
in (fm, fmnnfpp, taut))
( "(p <-> q) <-> ~(r -> s)"
, "p <-> q <-> r ^ ~s"
, True
)
tttt = U.t "tttt"
(and (fmap (tautology . pr) [ "(p -> pp) ^ (q -> qq) -> (p ^ q -> pp ^ qq)"
, "(p -> pp) ^ (q -> qq) -> (p v q -> pp v qq)"
]))
True
------------------------------------------------------------------------------
-- test
test :: IO U.Counts
test =
U.runTestTT $ U.TestList $
tps1 ++ tps2 ++ tnnf1 ++ tnnf2 ++ tttt
-- end of file ---
| haroldcarr/learn-haskell-coq-ml-etc | math-and-functional-programming/2009-Handbook_of_Practical_Logic_and_Automated_Reasoning/src/X_2_5_Simplification_and_NNF.hs | unlicense | 4,308 | 0 | 16 | 1,892 | 1,631 | 807 | 824 | 100 | 20 |
module Lycopene.Action.Configure where
import Lycopene.Action
import Lycopene.Environment (createDatabase)
import Lycopene.Core.Project as Project
import Lycopene.Core.Sprint as Sprint
import System.FilePath (dropFileName)
prepareConfigure :: FilePath -> Action ()
prepareConfigure target = do
let parent = dropFileName target
exist <- isDir parent
if exist then return () else mkdir parent
-- | Attempt to create fresh database.
configure :: Action ()
configure = domain $ createDatabase
>> Project.inbox
>> Sprint.inboxDefault
>> Sprint.inboxBacklog
>> return ()
| utky/lycopene | backup/Action/Configure.hs | apache-2.0 | 709 | 0 | 10 | 215 | 158 | 85 | 73 | 17 | 2 |
{-# LANGUAGE PackageImports #-}
import "FruitHools" Application (develMain)
import Prelude (IO)
main :: IO ()
main = develMain
| SaintOlga/FruitHools | app/devel.hs | apache-2.0 | 128 | 0 | 6 | 19 | 34 | 20 | 14 | 5 | 1 |
ans :: [Int] -> String
ans [] = []
ans (m:f:r:_)
| (m==(-1)) || (f==(-1)) = "F"
| (m+f) >= 80 = "A"
| (m+f) >= 65 = "B"
| (m+f) >= 50 = "C"
| (m+f) >= 30 && (r>=50) = "C"
| (m+f) >= 30 = "D"
| otherwise = "F"
main = do
c <- getContents
let i = map (map read) $ map words $ lines c :: [[Int]]
i' = takeWhile (/= [-1,-1,-1]) i
o = map ans i'
mapM_ putStrLn o
| a143753/AOJ | ITP1_7_A.hs | apache-2.0 | 449 | 1 | 14 | 179 | 310 | 159 | 151 | 16 | 1 |
{-# LANGUAGE RankNTypes, OverloadedStrings, FlexibleContexts, NoMonomorphismRestriction #-}
module TF.Parsers.ParserUtils where
import Prelude hiding (Word, last)
import Control.Lens hiding (noneOf,(??), children)
import Control.Error as E
import Data.Maybe (fromJust)
import Data.Data
import qualified TF.Printer as P
import Data.Monoid
import Control.Monad.Except
import Control.Monad.Error.Lens
import qualified Data.Map as M
import TF.Types hiding (state, isSubtypeOf)
import TF.CheckerUtils (withEmpty, withEmpty''')
import TF.Util
import qualified Data.Text as Te
import Text.Parsec hiding (runParser, anyToken)
import TF.Errors
import Data.Tree
import Data.Tree.Zipper hiding (after,before,first)
import Text.PrettyPrint (render)
import Control.Arrow (first)
import TF.Type.Nodes
munzip mab = (liftM fst mab, liftM snd mab)
mzip (ma,mb) = do
a <- ma
b <- mb
return (a,b)
withTrace' p = mzip . (first withTrace) . munzip $ p
withTrace p = do
let modState f = modifyState $ _trace._Wrapped %~ f
modState $ insert (Node "" []) . last . children
result <- p
modState $ modifyTree (\t -> t { rootLabel = render $ P.infoNode result })
modState $ \s ->
if isContained s then
fromJust $ parent s
else
s
return result
parseKeyword :: String -> ExpressionsM ()
parseKeyword keyword = do
uk <- parseUnknownToken
guard $ (uk ^. _Wrapped) == keyword
parseWordPostpone = view parseWord' >>= \parseWord -> lift $ parseWord "postpone"
parseWordParens = view parseWord' >>= \parseWord -> lift $ parseWord "("
parseWordExecute = view parseWord' >>= \parseWord -> lift $ parseWord "execute"
parseWordTick = view parseWord' >>= \parseWord -> lift $ parseWord "'"
parseWordLeftBracket = view parseWord' >>= \parseWord -> lift $ parseWord "["
parseWordColon = view parseWord' >>= \parseWord -> lift $ parseWord ":"
parseWordComma = view parseWord' >>= \parseWord -> lift $ parseWord ","
parseWordImmediate = view parseWord' >>= \parseWord -> lift $ parseWord "immediate"
parseWordDo = view parseWord' >>= \parseWord -> lift $ parseWord "do"
parseWordIf = view parseWord' >>= \parseWord -> lift $ parseWord "if"
parseWordCreate = view parseWord' >>= \parseWord -> lift $ parseWord "create"
parseWordDoes = view parseWord' >>= \parseWord -> lift $ parseWord "does>"
parseWordBegin = view parseWord' >>= \parseWord -> lift $ parseWord "begin"
parseWordRepeat = view parseWord' >>= \parseWord -> lift $ parseWord "repeat"
parseWordUntil = view parseWord' >>= \parseWord -> lift $ parseWord "until"
parseWordWhile = view parseWord' >>= \parseWord -> lift $ parseWord "while"
manyWordsTillExcludingWithout :: Te.Text -> [Word] -> ExpressionsM [Node]
manyWordsTillExcludingWithout bs without = withEmpty''' $ do
parseWord <- view parseWord'
parseNodeWithout <- view parseNodeWithout'
expr <- lift $ manyTill (parseNodeWithout without) ((lookAhead (parseWord bs) *> return ()) <|> eof)
return expr
manyWordsTillExcluding :: Te.Text -> ExpressionsM [Node]
manyWordsTillExcluding bs = withEmpty''' $ do
parseWord <- view parseWord'
parseNode <- view parseNode'
expr <- lift $ manyTill parseNode (lookAhead (parseWord bs))
return expr
manyWordsTillWithout :: Te.Text -> [Word] -> ExpressionsM [Node]
manyWordsTillWithout bs without = do
parseNodeWithout <- view parseNodeWithout'
parseWord <- view parseWord'
let errorMsg = Te.unpack bs
let parseDelimiter = (parseWord bs <?> errorMsg)
lift $ withEmpty' $ manyTill (parseNodeWithout without) parseDelimiter
withEmpty' = withEmpty
manyWordsTill :: Te.Text -> ExpressionsM [Node]
manyWordsTill bs = manyWordsTillWithout bs []
errorHandler handlingFunction colonName = [
handler _ClashInWord handlingFunction,
handler _BeginUntilNoFlag (handlingFunction . (("The body of begin until must produce a flag value!\n") ++)),
handler (_TypeClashM._IfElseExprNotStatic) (handlingFunction . ((colonName <> ": If-Else branches do not have the same type\n") ++) . uncurry (++)),
handler_ (_TypeClashM._IfExprNotStatic) (handlingFunction (colonName <> ": An if branch which has an unempty stack effect is not allowed when multiple effects are forbidden")),
handler_ _MultiEffs (handlingFunction colonName),
handler_ _UnemptyStack (throwing _UnemptyStack ("df","asdf")),
handler_ _MultiEffClash (handlingFunction "asdf"),
handler_ _CastsNotAllowed (handlingFunction (colonName <> ": Casts are not allowed")),
handler _Clash handlingFunction,
handler _UnknownWord handlingFunction
]
-- handler' :: (Typeable a, Handleable e m h) => Getting (First a) e a -> ((Getting (First a) e a ) -> a -> m r) -> h r
handler' l f = handler l (\x -> f l x)
-- handler_' :: (Typeable a, Handleable e m h) => Getting (First a) e a -> ((Getting (First a) e a ) -> m r) -> h r
handler_' l f = handler_ l (f l)
-- errorHandler' :: (forall a r. (Getting (First a) e a ) -> a -> m r) -> String -> [Handler e m a ]
errorHandler' :: (AsTypeClash e, AsError' e, AsFeatureErr e, AsMultiEffClash e) => (forall a. Getting (First a) e a -> String -> m r) -> String -> [Handler e m r]
-- errorHandler' :: (AsTypeClash e, AsError' e, AsFeatureErr e, AsMultiEffClash e) => (AReview Error' String -> String -> m r) -> String -> [Handler e m r]
errorHandler' handlingFunction colonName = [
handler' _ClashInWord handlingFunction
, handler' _BeginUntilNoFlag (\l s -> flip handlingFunction ("The body of begin until must produce a flag value!\n" ++ s) l)
, handler' (_TypeClashM._IfElseExprNotStatic) (\l (s1,s2) -> flip handlingFunction (colonName <> ": If-Else branches do not have the same type\n" <> (s1 <> s2)) l)
, handler_' (_TypeClashM._IfExprNotStatic) (flip handlingFunction (colonName <> ": An if branch which has an unempty stack effect is not allowed when multiple effects are forbidden"))
, handler_' _MultiEffs (flip handlingFunction colonName)
, handler_' _MultiEffClash (flip handlingFunction "asdf")
, handler_' _CastsNotAllowed (flip handlingFunction (colonName <> ": Casts are not allowed"))
, handler' _Clash handlingFunction
, handler' _UnknownWord handlingFunction
]
parseUnknownToken' = do
UnknownToken uk <- satisfy' (has _UnknownToken)
return uk
parseUnknownToken = do
lift parseUnknownToken'
parseUnknownName = do
(Unknown ukName) <- parseUnknownToken
return ukName
forbiddenInBranch :: ExpressionsM [Word]
forbiddenInBranch = do
coreWords <- use _wordsMap
return $ catMaybes $ map (\w -> M.lookup (WordIdentifier w) coreWords) ["then", ";", "postpone"]
(</>) = mplus
compOrExec' = lift $ views _stateVar (\sVar -> if sVar == INTERPRETSTATE then Executed else Compiled) <$> getState
| sleepomeno/TForth | src/TF/Parsers/ParserUtils.hs | apache-2.0 | 7,158 | 0 | 17 | 1,604 | 1,916 | 994 | 922 | -1 | -1 |
-- | <https://tools.ietf.org/html/rfc4511#section-4.2 Bind> operation.
--
-- This operation comes in four flavours:
--
-- * synchronous, exception throwing ('bind')
--
-- * synchronous, returning 'Either' 'ResponseError' @()@ ('bindEither')
--
-- * asynchronous, 'IO' based ('bindAsync')
--
-- * asynchronous, 'STM' based ('bindAsyncSTM')
--
-- Of those, the first one ('bind') is probably the most useful for the typical usecase.
module Ldap.Client.Bind
( Password(..)
, bind
, bindEither
, bindAsync
, bindAsyncSTM
, Async
, wait
, waitSTM
) where
import Control.Monad.STM (STM, atomically)
import Data.ByteString (ByteString)
import Data.List.NonEmpty (NonEmpty((:|)))
import qualified Ldap.Asn1.Type as Type
import Ldap.Client.Internal
-- | User's password.
newtype Password = Password ByteString
deriving (Show, Eq)
-- | Perform the Bind operation synchronously. Raises 'ResponseError' on failures.
bind :: Ldap -> Dn -> Password -> IO ()
bind l username password =
raise =<< bindEither l username password
-- | Perform the Bind operation synchronously. Returns @Left e@ where
-- @e@ is a 'ResponseError' on failures.
bindEither :: Ldap -> Dn -> Password -> IO (Either ResponseError ())
bindEither l username password =
wait =<< bindAsync l username password
-- | Perform the Bind operation asynchronously. Call 'Ldap.Client.wait' to wait
-- for its completion.
bindAsync :: Ldap -> Dn -> Password -> IO (Async ())
bindAsync l username password =
atomically (bindAsyncSTM l username password)
-- | Perform the Bind operation asynchronously.
--
-- Don't wait for its completion (with 'Ldap.Client.waitSTM') in the
-- same transaction you've performed it in.
bindAsyncSTM :: Ldap -> Dn -> Password -> STM (Async ())
bindAsyncSTM l username password =
let req = bindRequest username password in sendRequest l (bindResult req) req
bindRequest :: Dn -> Password -> Request
bindRequest (Dn username) (Password password) =
Type.BindRequest ldapVersion
(Type.LdapDn (Type.LdapString username))
(Type.Simple password)
where
ldapVersion = 3
bindResult :: Request -> Response -> Either ResponseError ()
bindResult req (Type.BindResponse (Type.LdapResult code (Type.LdapDn (Type.LdapString dn))
(Type.LdapString msg) _) _ :| [])
| Type.Success <- code = Right ()
| otherwise = Left (ResponseErrorCode req code (Dn dn) msg)
bindResult req res = Left (ResponseInvalid req res)
| VictorDenisov/ldap-client | src/Ldap/Client/Bind.hs | bsd-2-clause | 2,559 | 0 | 15 | 534 | 582 | 315 | 267 | 40 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-|
Module : Jobpack
Description : Jobpack creation and submission
Needs debugging and improvements.
-}
module Jobpack
where
--import Pipeline
import Jobutil
import Data.List as DL
import qualified Data.ByteString.Lazy.Char8 as BL
import Data.Aeson
import Data.Binary as B --binary serialization of haskell data, big endian order
import Data.Bits (shiftL)
import System.IO
import System.Posix.Files
import System.Posix.User (getEffectiveUserName)
import System.Directory
import qualified Data.Map as Map
--import Codec.Compression.Zlib.Raw --RFC1951 Deflated compression format
import Control.Exception
import qualified Network.HTTP as Http
import qualified Network.TCP as TCP
import Network.URI
import Network.BSD (getHostName)
--TODO get rid of this it's in Pipeline
data Grouping
= Split
| Group_label
| Group_node
| Group_node_label
| Group_all
get_magic :: Word16
get_magic = 0xd5c0
get_version1 :: Word16
get_version1 = 0x0001
get_version2 :: Word16
get_version2 = 0x0002
get_header_size :: Int
get_header_size = 128
data Jobpack = Jobpack {
jobdict :: Jobdict,
jobenv :: Jobenv,
jobhome :: String,
jobdata :: String
}
data Jobenv = Jobenv [(String, String)] --needed it for toJSON dict conversion
instance ToJSON Jobenv where
toJSON (Jobenv env_list) = toJSON $ Map.fromList env_list
data Header = Header {
magic :: Word16,
vs :: Word16,
jobdict_offset :: Word32,
jobenv_offset :: Word32,
jobhome_offset :: Word32,
jobdata_offset :: Word32
} deriving (Show)
instance Binary Header where
put (Header m v jdo jeo jho jdto) = do
put m
put v
put jdo
put jeo
put jho
put jdto
get = do
m <- get
v <- get
jdo <- get
jeo <- get
jho <- get
jdto <- get
return (Header m v jdo jeo jho jdto)
make_header :: Word16 -> Int -> Int -> Int -> Header
make_header version job_dict_len job_env_len job_home_size =
Header {
magic = get_magic,
vs = version,
jobdict_offset = fromIntegral hs :: Word32,
jobenv_offset = fromIntegral $ hs + job_dict_len :: Word32,
jobhome_offset = fromIntegral $ hs + job_dict_len + job_env_len :: Word32,
jobdata_offset = fromIntegral $ hs + job_dict_len + job_env_len + job_home_size :: Word32
}
where
hs = get_header_size
data Jobinput = Jobinput {
label :: Int,
size_hint :: Int,
url_locs :: [String]
}
instance ToJSON Jobinput where
toJSON (Jobinput l sh urls) = toJSON (l, sh, urls)
--TODO no
data Jobdict = Jobdict_vs1 {
owner1 :: String,
worker1 :: String, --the path to the worker binary relative to the jobhome
prefix1 :: String,
has_map :: Bool,
has_reduce :: Bool,
nr_reduces :: Int,
scheduler :: String, --max_cores, force_local, force_remote
inputs1 :: [[String]], -- TODO make it work for multiple replicas
save_results :: Bool,
save_info :: String
}
| Jobdict_vs2 {
owner2 :: String,
worker2 :: String,
prefix2 :: String,
pipeline :: [(String, Grouping)], --(stage, grouping)
inputs2 :: [(Int, Int, [String])],
save_results :: Bool,
save_info :: String
}
instance ToJSON Jobdict where
toJSON (Jobdict_vs1 o w p hm hr nr s i sr si) = object ["owner" .= o,
"worker" .= w,
"prefix" .= p,
"map?" .= hm,
"reduce?" .= hr,
"nr_reduces" .= nr,
"scheduler" .= s,
"input" .= i,
"save_reults" .= sr,
"save_info" .= si
]
toJSON (Jobdict_vs2 o w p pipe i sr si) = object ["owner" .= o,
"worker" .= w,
"prefix" .= p,
"pipeline" .= pipe,
"input" .= i,
"save_reults" .= sr,
"save_info" .= si
]
instance ToJSON Grouping where
toJSON Split = String "split"
toJSON Group_label = String "group_label"
toJSON Group_node = String "group_node"
toJSON Group_node_label = String "group_node_label"
toJSON Group_all = String "group_all"
get_owner :: IO String
get_owner = do
usr <- getEffectiveUserName
host <- getHostName
return $ usr ++ "@" ++ host
create_classic_jobpack :: [String] -> FilePath -> IO ()
create_classic_jobpack inputs worker_path = do
own <- get_owner
inp <- get_effective_inputs inputs
let jd1 = Jobdict_vs1 {
owner1 = own,
worker1 = "./job",
prefix1 = "hjob_classic",
has_map = True,
has_reduce = True,
nr_reduces = 1,
scheduler = "",
inputs1 = inp,
save_results = False,
save_info = "ddfs"
}
let jp = Jobpack jd1 (Jobenv []) [] [] --TODO it's hardcoded change that
zip_encode_jp jp worker_path get_version1
create_pipeline_jobpack :: [(String, Grouping)] -> [(Int,Int,String)] -> FilePath -> IO ()
create_pipeline_jobpack pipeln inputs worker_path = do
own <- get_owner
let (inpt_labels, inpt_sizes , inpt_urls) = unzip3 inputs
effective_urls <- get_effective_inputs inpt_urls
let jobinpts = zip3 inpt_labels inpt_sizes effective_urls
let jd2 = Jobdict_vs2 {
owner2 = own,
worker2 = "./job",
prefix2 = "hjob_pipeline",
pipeline = pipeln,
inputs2 = jobinpts,
save_results = False,
save_info = "ddfs"
}
let jp = Jobpack jd2 (Jobenv []) [] [] --TODO it's hardcoded change that
zip_encode_jp jp worker_path get_version2
zip_encode_jp :: Jobpack -> FilePath -> Word16 -> IO ()
zip_encode_jp jp worker_exe version = do
let jd = B.encode $ Data.Aeson.encode $ jobdict jp
let je = B.encode $ Data.Aeson.encode $ jobenv jp
let jd_len = fromIntegral $ BL.length jd
let je_len = fromIntegral $ BL.length je
putStrLn $ BL.unpack $ Data.Aeson.encode $ jobdict jp
--TODO change it -> need create zip archive from Haskell
exe_contents <- BL.readFile $ worker_exe ++ ".zip" --TODO hack hardcoded
-- let zip_compr = compress exe_contents
jhome_size <- getFileStatus (worker_exe ++ ".zip") >>= \s -> return $ fileSize s
-- let header = make_header version jd_len je_len (fromIntegral $ BL.length zip_compr)
let header = make_header version jd_len je_len (fromIntegral $ jhome_size)
putStrLn $ show header
withFile "jp" WriteMode $ \h -> writeBins header jd je exe_contents h
writeBins :: Header -> BL.ByteString -> BL.ByteString -> BL.ByteString -> Handle -> IO ()
writeBins header jd je zip_compr handle = do
let bl = B.encode $ (replicate 25 0 :: [Word32]) --TODO
let encoded_header = BL.append (B.encode header) bl
let file_prefix = BL.append encoded_header $ BL.append jd je
let whole_bytestr = BL.append file_prefix (B.encode zip_compr)
putStrLn $ show $ BL.length whole_bytestr
BL.hPut handle whole_bytestr
cleanup :: IO ()
cleanup = do
try (removeFile "jp") :: IO (Either IOException ())
return ()
submit_job :: IO String
submit_job = do
let Just addr = parseURI "http://localhost:8989/disco/job/new" --TODO hardcoded
contents <- BL.readFile "jp" --TODO check if exist, check if it is done reading etc.
jp_size <- getFileStatus "jp" >>= \s -> return $ fileSize s
let request = Http.Request {Http.rqURI = addr,
Http.rqMethod = Http.POST,
Http.rqHeaders = [Http.mkHeader Http.HdrContentType "image/jpg",
Http.mkHeader Http.HdrContentLength (show jp_size)
],
Http.rqBody = BL.unpack contents}
Http.simpleHTTP request >>= Http.getResponseBody
--main = do
-- create_pipeline_jobpack [("map", Split)] [(0,0,"tag://data:more_chek"), (1,1,"http://cos")] "word_count"
-- submit
-- cleanup
| zuzia/haskell_worker | src/Jobpack.hs | bsd-3-clause | 8,953 | 0 | 15 | 3,151 | 2,125 | 1,143 | 982 | 196 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving, MultiParamTypeClasses, FlexibleInstances, TypeFamilies, PackageImports, FlexibleContexts, UndecidableInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Web.Route.RouteT
-- Copyright : (c) 2010 Jeremy Shaw
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : partners@seereason.com
-- Stability : experimental
-- Portability : portable
--
-- Declaration of the 'RouteT' monad transformer
-----------------------------------------------------------------------------
module Web.Routes.RouteT where
import Control.Applicative (Applicative((<*>), pure), Alternative((<|>), empty))
import Control.Monad (MonadPlus(mzero, mplus))
import Control.Monad.Catch (MonadCatch(catch), MonadThrow(throwM))
import Control.Monad.Cont(MonadCont(callCC))
import Control.Monad.Error (MonadError(throwError, catchError))
import Control.Monad.Fix (MonadFix(mfix))
import Control.Monad.Reader(MonadReader(ask,local))
import Control.Monad.RWS (MonadRWS)
import Control.Monad.State(MonadState(get,put))
import Control.Monad.Trans (MonadTrans(lift), MonadIO(liftIO))
import Control.Monad.Writer(MonadWriter(listen, tell, pass))
import Data.Text (Text)
-- * RouteT Monad Transformer
-- |monad transformer for generating URLs
newtype RouteT url m a = RouteT { unRouteT :: (url -> [(Text, Maybe Text)] -> Text) -> m a }
class (Monad m) => MonadRoute m where
type URL m
askRouteFn :: m (URL m -> [(Text, Maybe Text)] -> Text)
instance MonadCatch m => MonadCatch (RouteT url m) where
catch action handler =
RouteT $ \ fn -> catch (action' fn) (\ e -> handler' e fn)
where
action' = unRouteT action
handler' e = unRouteT (handler e)
instance MonadThrow m => MonadThrow (RouteT url m) where
throwM = throwM'
where
throwM' e = RouteT $ \ _fn -> throwM e
-- | convert a 'RouteT' based route handler to a handler that can be used with the 'Site' type
--
-- NOTE: this function used to be the same as 'unRouteT'. If you want the old behavior, just call 'unRouteT'.
runRouteT :: (url -> RouteT url m a)
-> ((url -> [(Text, Maybe Text)] -> Text) -> url -> m a)
runRouteT r = \f u -> (unRouteT (r u)) f
-- | Transform the computation inside a @RouteT@.
mapRouteT :: (m a -> n b) -> RouteT url m a -> RouteT url n b
mapRouteT f (RouteT m) = RouteT $ f . m
-- | Execute a computation in a modified environment
withRouteT :: ((url' -> [(Text, Maybe Text)] -> Text) -> (url -> [(Text, Maybe Text)] -> Text)) -> RouteT url m a -> RouteT url' m a
withRouteT f (RouteT m) = RouteT $ m . f
liftRouteT :: m a -> RouteT url m a
liftRouteT m = RouteT (const m)
askRouteT :: (Monad m) => RouteT url m (url -> [(Text, Maybe Text)] -> Text)
askRouteT = RouteT return
instance (Functor m) => Functor (RouteT url m) where
fmap f = mapRouteT (fmap f)
instance (Applicative m) => Applicative (RouteT url m) where
pure = liftRouteT . pure
f <*> v = RouteT $ \ url -> unRouteT f url <*> unRouteT v url
instance (Alternative m) => Alternative (RouteT url m) where
empty = liftRouteT empty
m <|> n = RouteT $ \ url -> unRouteT m url <|> unRouteT n url
instance (Monad m) => Monad (RouteT url m) where
return = liftRouteT . return
m >>= k = RouteT $ \ url -> do
a <- unRouteT m url
unRouteT (k a) url
fail msg = liftRouteT (fail msg)
instance (MonadPlus m, Monad (RouteT url m)) => MonadPlus (RouteT url m) where
mzero = liftRouteT mzero
m `mplus` n = RouteT $ \ url -> unRouteT m url `mplus` unRouteT n url
instance (MonadCont m) => MonadCont (RouteT url m) where
callCC f = RouteT $ \url ->
callCC $ \c ->
unRouteT (f (\a -> RouteT $ \_ -> c a)) url
instance (MonadError e m) => MonadError e (RouteT url m) where
throwError = liftRouteT . throwError
catchError action handler = RouteT $ \f -> catchError (unRouteT action f) (\e -> unRouteT (handler e) f)
instance (MonadFix m) => MonadFix (RouteT url m) where
mfix f = RouteT $ \ url -> mfix $ \ a -> unRouteT (f a) url
instance (MonadIO m) => MonadIO (RouteT url m) where
liftIO = lift . liftIO
instance (MonadReader r m) => MonadReader r (RouteT url m) where
ask = liftRouteT ask
local f = mapRouteT (local f)
instance (MonadRWS r w s m) => MonadRWS r w s (RouteT url m)
instance (MonadState s m) => MonadState s (RouteT url m) where
get = liftRouteT get
put s = liftRouteT $ put s
instance MonadTrans (RouteT url) where
lift = liftRouteT
instance (MonadWriter w m) => MonadWriter w (RouteT url m) where
tell w = liftRouteT $ tell w
listen m = mapRouteT listen m
pass m = mapRouteT pass m
instance (Monad m) => MonadRoute (RouteT url m) where
type URL (RouteT url m) = url
askRouteFn = askRouteT
showURL :: (MonadRoute m) => URL m -> m Text
showURL url =
do showFn <- askRouteFn
return (showFn url [])
showURLParams :: (MonadRoute m) => URL m -> [(Text, Maybe Text)] -> m Text
showURLParams url params =
do showFn <- askRouteFn
return (showFn url params)
nestURL :: (url1 -> url2) -> RouteT url1 m a -> RouteT url2 m a
nestURL transform (RouteT r) =
do RouteT $ \showFn ->
r (\url params -> showFn (transform url) params)
| shockkolate/web-routes | Web/Routes/RouteT.hs | bsd-3-clause | 5,315 | 0 | 17 | 1,134 | 1,944 | 1,040 | 904 | 93 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-|
Module : Network.Wai.Metrics
License : BSD3
Stability : experimental
A <http://hackage.haskell.org/package/wai WAI> middleware to collect the following <https://ocharles.org.uk/blog/posts/2012-12-11-24-day-of-hackage-ekg.html EKG> metrics from compatible web servers:
* number of requests (counter @wai.request_count@)
* number of response by status code, broken down class (count @wai.response_status_xxx@)
* latency distribution (distribution @wai.latency_distribution@)
Here's an example of reading these metrics from a Scotty server, and displaying them with EKG.
> -- Compile with GHC option `-with-rtsopts=-T` for GC metrics
> import Web.Scotty
> import Control.Applicative
> import System.Remote.Monitoring (serverMetricStore, forkServer)
> import Network.Wai.Metrics
>
> main :: IO()
> main = do
> store <- serverMetricStore <$> forkServer "localhost" 8000
> waiMetrics <- registerWaiMetrics store
> scotty 3000 $ do
> middleware (metrics waiMetrics)
> get "/" $ html "Ping"
Now have a look at <http://localhost:8000 your local EKG instance> and display the request count by clicking on 'wai.request_count'.
WAI metrics can also be stored in a bare EKG store, with no UI and no GC metrics. Use ekg-core's newStore function.
Compatible web servers include the following:
*Yesod
*Scotty
*Spock
*Servant
*Warp
-}
module Network.Wai.Metrics (
registerWaiMetrics,
WaiMetrics(..),
metrics) where
import Network.Wai
import System.Metrics
import Control.Applicative
import Data.Time.Clock
import qualified System.Metrics.Counter as Counter
import qualified System.Metrics.Distribution as Distribution
import Network.HTTP.Types.Status (statusCode)
{-|
The metrics to feed in WAI and register in EKG.
-}
data WaiMetrics = WaiMetrics {
requestCounter :: Counter.Counter
,latencyDistribution :: Distribution.Distribution
,statusCode100Counter :: Counter.Counter
,statusCode200Counter :: Counter.Counter
,statusCode300Counter :: Counter.Counter
,statusCode400Counter :: Counter.Counter
,statusCode500Counter :: Counter.Counter
}
{-|
Register in EKG a number of metrics related to web server activity.
* @wai.request_count@
* @wai.response_status_1xx@
* @wai.response_status_2xx@
* @wai.response_status_3xx@
* @wai.response_status_4xx@
* @wai.response_status_5xx@
* @wai.latency_distribution@
-}
registerWaiMetrics :: Store -> IO WaiMetrics
registerWaiMetrics store =
WaiMetrics
<$> createCounter "wai.request_count" store
<*> createDistribution "wai.latency_distribution" store
<*> createCounter "wai.response_status_1xx" store
<*> createCounter "wai.response_status_2xx" store
<*> createCounter "wai.response_status_3xx" store
<*> createCounter "wai.response_status_4xx" store
<*> createCounter "wai.response_status_5xx" store
{-|
Create a middleware to be added to a WAI-based webserver.
-}
metrics :: WaiMetrics -> Middleware
metrics waiMetrics app req respond = do
Counter.inc (requestCounter waiMetrics)
start <- getCurrentTime
app req (respond' start)
where respond' :: UTCTime -> Response -> IO ResponseReceived
respond' start res = do
Counter.inc $ case statusCode $ responseStatus res of
s | s >= 500 -> statusCode500Counter waiMetrics
| s >= 400 -> statusCode400Counter waiMetrics
| s >= 300 -> statusCode300Counter waiMetrics
| s >= 200 -> statusCode200Counter waiMetrics
| otherwise -> statusCode100Counter waiMetrics
end <- getCurrentTime
Distribution.add (latencyDistribution waiMetrics) (realToFrac $ diffUTCTime end start)
respond res
| ambiata/wai-middleware-metrics | Network/Wai/Metrics.hs | bsd-3-clause | 3,718 | 0 | 16 | 651 | 452 | 235 | 217 | 46 | 1 |
{-# LANGUAGE EmptyDataDecls #-}
module Code24 where
import Debug.Trace
import Data.List (unfoldr,find)
import Data.Ratio
type Fraction = Ratio Integer
type Interval = (Fraction, Fraction)
(|>) :: Interval -> Interval -> Interval
(l1,r1) |> (l2,r2) = (l1+(r1-l1)*l2,l1+(r1-l1)*r2)
(<|) :: Fraction -> Interval -> Fraction
f <| (l,r) = (f - l) / (r - l)
(<||) :: Interval -> Interval -> Interval
(l,r) <|| j = (l <| j, r <| j)
type Symbol = Char
type Model = [(Char,Interval)]
interval :: Model -> Symbol -> Interval
symbol :: Model -> Fraction -> Symbol
interval m s = maybe (error "Model does not include the symbol") id (lookup s m)
symbol m f = maybe (error "Model does not include the fraction") fst (find ((\(l,h) -> l <= f && f < h) . snd) m)
adapt :: Model -> Symbol -> Model
adapt m _ = m
intervals :: Model -> [Symbol] -> [Interval]
intervals _ [] = []
intervals m (x:xs) = interval m x : intervals (adapt m x) xs
-- Encoding
encode :: Model -> [Symbol] -> Fraction
encode m = pick . foldl (|>) (0,1) . intervals m
pick :: Interval -> Fraction
samplemd :: Model
samplemd = [('e',(0,3/8)),('g',(3/8,1/2)),('n',(1/2,5/8)),('r',(5/8,7/8)),('v',(7/8,1))]
-- Decoding
decode :: Model -> Fraction -> [Symbol]
decode m f = unfoldr step (m,(0,1),f)
where
step (m,i,f) = Just (x,(adapt m x, i |> interval m x, f))
where x = symbol m (f <| i)
-- Incremental encoding and decoding
type Bit = Fraction
toBits :: Interval -> [Bit]
toFrac :: [Bit] -> Fraction
encode' :: Model -> [Symbol] -> [Bit]
encode' m = toBits . foldl (|>) (0,1) . intervals m
decode' :: Model -> [Bit] -> [Symbol]
decode' m bs = unfoldr step (m,(0,1),toFrac bs)
where
step (m,i,f) = Just (x,(adapt m x, i |> interval m x, f))
where x = symbol m (f <| i)
-- Streaming
stream :: (a -> Maybe (b, a)) -> (a -> c -> a) -> a -> [c] -> [b]
stream f g s xs = unfoldr step (s,xs)
where
step (s,xs) = case f s of
Just (y,s') -> Just (y,(s',xs))
Nothing -> case xs of
x:xs' -> step (g s x,xs')
[] -> Nothing
encode'' m = stream bit (|>) (0,1) . intervals m
bit :: Interval -> Maybe (Bit, Interval)
bit (l,r) | r <= 1/2 = Just (0,(2*l,2*r))
| 1/2 <= l = Just (1,(2*l-1,2*r-1))
| otherwise = Nothing
toBits = unfoldr bit
toFrac = foldr (\ b f -> (b+f)/2) (1/2)
pick (l,r) | r <= 1/2 = trace (show (l,r)) $ pick (2*l,2*r) / 2
| 1/2 <= l = trace (show (l,r)) $ (1+pick (2*l-1,2*r-1)) / 2
| otherwise = trace (show (l,r)) $ 1/2
| sampou-org/pfad | Code/Code24.hs | bsd-3-clause | 2,793 | 0 | 15 | 871 | 1,501 | 825 | 676 | 59 | 3 |
module Language.Haskell.Modules.Ops.Internal
(
) where
| athanclark/haskell-modules | src/Language/Haskell/Modules/Ops/Internal.hs | bsd-3-clause | 63 | 0 | 3 | 13 | 12 | 9 | 3 | 2 | 0 |
-- Provides some helpers on kinding
module Language.Granule.Checker.KindsHelpers where
import Language.Granule.Syntax.Type
isCoeffectKind :: Kind -> Bool
isCoeffectKind KCoeffect = True
isCoeffectKind (KUnion _ KCoeffect) = True
isCoeffectKind (KUnion KCoeffect _) = True
isCoeffectKind _ = False
isEffectKind :: Kind -> Bool
isEffectKind KEffect = True
isEffectKind (KUnion _ KEffect) = True
isEffectKind (KUnion KEffect _) = True
isEffectKind _ = False
| dorchard/gram_lang | frontend/src/Language/Granule/Checker/KindsHelpers.hs | bsd-3-clause | 459 | 0 | 7 | 65 | 130 | 70 | 60 | 12 | 1 |
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE RankNTypes #-}
module Language.Lambda.Syntax.Nameless.Exp (
Alpha (Alpha)
, runAlpha
, Exp (Var, App, Lam, Let)
-- , bound
-- , free
, fold
, mapExp
, mapAlpha
, (#)
, (!)
, lam_
, gLam_
, let_
, gLet_
) where
import Control.Monad (ap)
import Bound
import Bound.Scope
import Prelude.Extras
data Alpha n = Alpha { runAlpha :: n }
deriving (Show, Read, Functor, Foldable, Traversable)
instance Eq n => Eq (Alpha n) where
_ == _ = True
data Exp n a =
Var !a
| App (Exp n a) (Exp n a)
| Lam (Alpha n) (Scope () (Exp n) a)
| Let (Alpha n) (Exp n a) (Scope () (Exp n) a)
deriving (Eq,Show,Read,Functor,Foldable,Traversable)
instance (Eq n) => Eq1 (Exp n)
instance (Show n) => Show1 (Exp n)
instance (Read n) => Read1 (Exp n)
instance Monad (Exp n) where
return = Var
-- (>>=) :: Exp n a -> (a -> Exp n b) -> Exp n B
(Var a) >>= g = g a
(f `App` a) >>= g = (f >>= g) `App` (a >>= g)
(Lam n s) >>= g = Lam n (s >>>= g)
(Let ns d e) >>= g = Let ns (d >>= g) (e >>>= g)
instance Applicative (Exp n) where
pure = Var
(<*>) = ap
fold :: forall n b f .
(forall a . a -> f a)
-> (forall a . f a -> f a -> f a)
-> (forall a . Alpha n -> Scope () f a -> f a)
-> (forall a . Alpha n -> Exp n a -> Scope () f a -> f a)
-> (Exp n) b -> f b
fold v _ _ _ (Var n) = v n
fold v a l lt (fun `App` arg) = a (fold v a l lt fun) (fold v a l lt arg)
fold v a l lt (Lam alpha scope) = l alpha scope'
where
scope' = (hoistScope (fold v a l lt) scope)
fold v a l lt (Let alpha def scope) = lt alpha def scope'
where
scope' = hoistScope (fold v a l lt) scope
mapAlpha :: (n -> m) -> (Exp n) a -> (Exp m) a
mapAlpha f = fold Var App l lt
where
l a s = Lam (f <$> a) s
lt a e s = Let (f <$> a) (mapAlpha f e) s
mapExp :: (n -> m) -> (a -> b) -> Exp n a -> Exp m b
mapExp f g e = mapAlpha f . fmap g $ e
-- | a smart constructor for abstractions
infixl 9 #
(#) :: Exp n a -> Exp n a -> Exp n a
(#) = App
infixr 6 !
(!) :: Eq a => a -> Exp a a -> Exp a a
(!) = lam_
lam_ :: Eq a => a -> Exp a a -> Exp a a
lam_ a = gLam_ a id
gLam_ :: Eq a => a -> (a -> n) -> Exp n a -> Exp n a
gLam_ a f e = Lam (Alpha (f a)) (abstract1 a e)
-- | a smart constructor for let bindings
let_ :: Eq a => (a, Exp a a) -> Exp a a -> Exp a a
let_ a = gLet_ a id
gLet_ :: Eq a => (a, Exp n a) -> (a -> n) -> Exp n a -> Exp n a
gLet_ (a,d) f e = Let (Alpha (f a)) d (abstract1 a e)
| julmue/UntypedLambda | src/Language/Lambda/Syntax/Nameless/Exp.hs | bsd-3-clause | 2,613 | 0 | 16 | 790 | 1,435 | 748 | 687 | 82 | 1 |
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -Wall -Werror #-}
module IRTS.CLaSH.NetlistTypes where
import Control.Applicative ((<$>))
import Control.Monad.Trans.Error (ErrorT(..))
import Unbound.LocallyNameless (name2String)
import CLaSH.Core.TyCon (tyConName)
import CLaSH.Core.Type (LitTy(..),Type(..),TypeView(..),tyView)
import CLaSH.Netlist.Types (HWType(..))
import CLaSH.Netlist.Util (coreTypeToHWType)
idrisTypeToHWType ::
Type
-> Maybe (Either String HWType)
idrisTypeToHWType (tyView -> TyConApp tc args) = runErrorT $
case (name2String $ tyConName tc) of
"__INT__" -> return Integer
"CLaSH.Signal.Signal" -> ErrorT $ return $ coreTypeToHWType idrisTypeToHWType (head args)
"Prelude.Vect.Vect" -> do
let [szTy,elTy] = args
sz <- tyNatSize szTy
elHWTy <- ErrorT $ return $ coreTypeToHWType idrisTypeToHWType elTy
return $ Vector sz elHWTy
_ -> ErrorT $ Nothing
idrisTypeToHWType _ = Nothing
tyNatSize ::
Type
-> ErrorT String Maybe Int
tyNatSize (LitTy (NumTy i)) = return i
tyNatSize t@(tyView -> TyConApp tc args) = case name2String (tyConName tc) of
"Prelude.Nat.Z" -> return 0
"Prelude.Nat.S" -> succ <$> tyNatSize (head args)
"Builtins.fromInteger" -> tyNatSize (last args)
"@Builtins.Num$[Nat].0.Builtins.#!fromInteger" -> tyNatSize (last args)
"@Builtins.Num$[Nat].0.Builtins.#!fromInteger.0.#fromInteger'" -> tyNatSize (last args)
_ -> error $ "Can't convert tyNat: " ++ show t
tyNatSize t = error $ "Can't convert tyNat: " ++ show t
| christiaanb/Idris-dev | src/IRTS/CLaSH/NetlistTypes.hs | bsd-3-clause | 1,636 | 0 | 14 | 363 | 474 | 249 | 225 | 36 | 6 |
import Distribution.Simple
import Distribution.Simple.Program
import Distribution.Simple.Setup
import Distribution.Simple.LocalBuildInfo
import Distribution.PackageDescription
import Distribution.Verbosity
main :: IO ()
main = defaultMainWithHooks hooks
hooks :: UserHooks
hooks = simpleUserHooks {
confHook = confHook'
}
confHook' :: (GenericPackageDescription, HookedBuildInfo) -> ConfigFlags -> IO LocalBuildInfo
confHook' param cfs = do
let sundials_config = simpleProgram "sundials-config"
conf <- configureAllKnownPrograms silent (addKnownPrograms [sundials_config] defaultProgramConfiguration)
cppFlags <- rawSystemProgramStdoutConf verbose sundials_config conf ["-m","ida","-t","s","-l","c","-s","cppflags"]
libs <- rawSystemProgramStdoutConf verbose sundials_config conf ["-m","ida","-t","s","-l","c","-s","libs"]
let cfs' = cfs { configExtraLibDirs = configExtraLibDirs cfs ++ extraLDs (words libs)
, configExtraIncludeDirs = configExtraIncludeDirs cfs ++ extraIDs (words cppFlags)
}
(confHook simpleUserHooks) param cfs'
extraLDs :: [String] -> [String]
extraLDs [] = []
extraLDs (('-' : 'L' : d) : ss) = d : extraLDs ss
extraLDs (_ : ss) = extraLDs ss
extraIDs :: [String] -> [String]
extraIDs [] = []
extraIDs (('-' : 'I' : d) : ss) = d : extraIDs ss
extraIDs (_ : ss) = extraIDs ss | giorgidze/Hydra | hydra-solver-sundials/Setup.hs | bsd-3-clause | 1,464 | 0 | 15 | 324 | 442 | 235 | 207 | 28 | 1 |
module TestBlashImpl where
import Data.Coerce (coerce)
import qualified Data.Vector.Storable as VS
import Data.Vector.Storable ((!))
import Foreign.C.Types
import Test.QuickCheck (Positive(..), NonZero(..))
import Test.QuickCheck.Property (Property)
import Test.QuickCheck.Monadic
import Data.AEq
import qualified BlashImpl as BI
import OpenBLAS1
import TestBlashData
prop_copyM :: BlasArgs Double -> Property
prop_copyM (BlasArgs (Positive n) xs (NonZero incx) ys (NonZero incy)) = monadicIO $ do
-- expected uses the CBLAS implementation via inline-c
expected <- run $ do
let expected' = VS.fromList (coerce ys)
xs' = VS.fromList (coerce xs)
n' = fromIntegral n
incx' = fromIntegral incx
incy' = fromIntegral incy
cblas_copyW n' xs' incx' expected' incy'
return expected'
-- actual calls the monadic haskell implementation directly
actual <- run $ do
let xs' = VS.fromList xs
actual' <- VS.thaw $ VS.fromList ys
BI.copyM n xs' incx actual' incy
VS.freeze actual'
-- invariant: same size as ys always
assert (length ys == VS.length actual)
assert (length ys == VS.length expected)
-- invariant: both methods give same answer
let ass = VS.toList actual
ess = VS.toList expected
assert $ and $ zipWith (\a e -> a ~== coerce e) ass ess
prop_axpyM :: BlasArgs Double -> Double -> Property
prop_axpyM (BlasArgs (Positive n) xs (NonZero incx) ys (NonZero incy)) da = monadicIO $ do
-- expected uses the CBLAS implementation via inline-c
expected <- run $ do
let expected' = VS.fromList (coerce ys)
xs' = VS.fromList (coerce xs)
n' = fromIntegral n
incx' = fromIntegral incx
incy' = fromIntegral incy
da' = coerce da
cblas_axpyW n' da' xs' incx' expected' incy'
return expected'
-- actual calls the monadic haskell implementation directly
actual <- run $ do
let xs' = VS.fromList xs
actual' <- VS.thaw $ VS.fromList ys
BI.axpyM n da xs' incx actual' incy
VS.freeze actual'
-- invariant: same size as ys always
assert (length ys == VS.length actual)
assert (length ys == VS.length expected)
-- invariant: both methods give same answer
let ass = VS.toList actual
ess = VS.toList expected
assert $ and $ zipWith (\a e -> a ~== coerce e) ass ess
-- --
prop_scalM :: BlasArgs Double -> Double -> Property
prop_scalM (BlasArgs (Positive n) xs (NonZero incx) _ _) da = monadicIO $ do
-- expected uses the CBLAS implementation via inline-c
expected <- run $ do
let expected' = VS.fromList (coerce xs)
da' = coerce da
n' = fromIntegral n
incx' = fromIntegral incx
cblas_scalW n' da' expected' incx'
return expected'
-- actual calls the monadic haskell implementation directly
actual <- run $ do
actual' <- VS.thaw $ VS.fromList xs
BI.scalM n da actual' incx
VS.freeze actual'
-- invariant: same size as ys always
assert (length xs == VS.length actual)
assert (length xs == VS.length expected)
-- invariant: both methods give same answer
let ass = VS.toList actual
ess = VS.toList expected
assert $ and $ zipWith (\a e -> a ~== coerce e) ass ess
-- --
prop_swapM :: BlasArgs Double -> Property
prop_swapM (BlasArgs (Positive n) xs (NonZero incx) ys (NonZero incy)) = monadicIO $ do
-- expected uses the CBLAS implementation via inline-c
(expectedXS, expectedYS) <- run $ do
let
expectedXS' = VS.fromList (coerce xs)
expectedYS' = VS.fromList (coerce ys)
n' = fromIntegral n
incx' = fromIntegral incx
incy' = fromIntegral incy
cblas_swapW n' expectedXS' incx' expectedYS' incy'
return (expectedXS', expectedYS')
-- actual calls the monadic haskell implementation directly
(actualXS, actualYS) <- run $ do
actualXS' <- VS.thaw $ VS.fromList xs
actualYS' <- VS.thaw $ VS.fromList ys
BI.swapM n actualXS' incx actualYS' incy
actualXS'' <- VS.freeze actualXS'
actualYS'' <- VS.freeze actualYS'
return (actualXS'', actualYS'')
-- invariant: same size as ys always
assert (length xs == VS.length actualXS)
assert (length xs == VS.length expectedXS)
assert (length ys == VS.length actualYS)
assert (length ys == VS.length expectedYS)
-- invariant: both methods give same answer
assert $ and $ zipWith (\a e -> a ~== coerce e) (VS.toList actualXS) (VS.toList expectedXS)
assert $ and $ zipWith (\a e -> a ~== coerce e) (VS.toList actualYS) (VS.toList expectedYS)
prop_rotg :: Double -> Double -> Double -> Double -> Property
prop_rotg da db c s = monadicIO $ do
-- expected uses the CBLAS implementation via inline-c
(e0, e1, e2, e3) <- run $ do
let
dx = [da, db, c, s]
dx' = VS.fromList (coerce dx)
cblas_rotgW dx'
return ( dx' ! 0
, dx' ! 1
, dx' ! 2
, dx' ! 3
)
assert $ a0 ~== coerce e0
assert $ a1 ~== coerce e1
assert $ a2 ~== coerce e2
assert $ a3 ~== coerce e3
where
(a0, a1, a2, a3) = BI.rotg da db c s
| wyn/blash | tests/TestBlashImpl.hs | bsd-3-clause | 5,172 | 0 | 17 | 1,341 | 1,721 | 839 | 882 | 111 | 1 |
{-# LANGUAGE DataKinds
, FlexibleContexts
, NegativeLiterals
, ForeignFunctionInterface
#-}
module Main where
import Prelude hiding (product, exp, log, (**))
import Language.Hakaru.Runtime.LogFloatPrelude
import Language.Hakaru.Types.Sing
import System.CPUTime
import Data.Time.Clock
import System.Environment
import System.IO.Unsafe
import qualified System.Random.MWC as MWC
import qualified System.Random.MWC.Distributions as MWCD
import Control.Monad
import Data.List (permutations)
import qualified Data.Vector as V
import qualified Data.Vector.Generic as G
import qualified Data.Vector.Unboxed as U
import qualified Data.Vector.Storable as SV
import qualified Data.Number.LogFloat as LF
import Text.Printf (printf)
--import qualified GmmBase
--import GmmGibbs
import GmmGibbs2
--import GmmGibbs3
import qualified GmmBucket
clusters = 3
as = G.replicate clusters 1.0
diffToDouble :: NominalDiffTime -> Double
diffToDouble = fromRational . toRational
t_ dataSize =
let_ (lam $ \ as1 ->
(plate (unsafeNat (nat2int (size as1) +
negate (nat2int (nat_ 1)))) $
\ i3 ->
beta (summate (i3 + nat_ 1) (size as1) (\ j4 -> as1 ! j4))
(as1 ! i3)) >>= \ xs2 ->
dirac (array (size as1) $
\ i5 ->
let_ (product (nat_ 0) i5 (\ j7 -> xs2 ! j7)) $ \ x6 ->
x6 *
case_ (i5 + nat_ 1 == size as1)
[branch ptrue (nat2prob (nat_ 1)),
branch pfalse
(unsafeProb (nat2real (nat_ 1) +
negate (fromProb (xs2 ! i5))))])) $ \ dirichlet0 ->
let_ (arrayLit [ prob_ 1, prob_ 1, prob_ 1 ]) $ \ as8 ->
let_ (nat_ dataSize) $ \ data_size10 ->
dirichlet0 `app` as8 >>= \ theta11 ->
let_ (arrayLit [ int_ -7, int_ 3, int_ 10 ]) $ \ phi12 ->
(plate data_size10 $
\ i14 ->
categorical theta11 >>= \ z15 ->
normal (fromInt (phi12 ! z15)) (nat2prob (nat_ 1)) >>= \ w16 ->
dirac (pair z15 w16))
zInit_ dataSize =
(plate (nat_ dataSize) $
\ i0 ->
categorical (arrayLit [ prob_ 1, prob_ 1, prob_ 1 ]))
accuracy
:: U.Vector Int
-> U.Vector Int
-> Double
accuracy x y = G.sum z / (fromIntegral $ G.length x)
where z = G.zipWith (\a b -> if a == b then 1 else 0) x y
relabel :: [Int] -> U.Vector Int -> U.Vector Int
relabel key = G.map (key !!)
iterateM :: Monad m => Int -> (a -> b -> Int -> m b) -> a -> b -> m b
iterateM 0 _ _ b = return b
iterateM n f a b = f a b (n - 1) >>= iterateM (n - 1) f a
iterateM2 :: Monad m => Int -> (a -> m a) -> a -> m a
iterateM2 0 _ a = return a
iterateM2 n f a = f a >>= iterateM2 (n - 1) f
oneUpdateB
:: (MWC.GenIO, U.Vector Double)
-> U.Vector Int
-> Int
-> IO (U.Vector Int)
oneUpdateB (g,t) z i = do
Just zNew <- unMeasure (GmmBucket.prog as z t i) g
--print (z, i, zNew)
return (G.unsafeUpd z [(i, zNew)])
oneSweepB
:: MWC.GenIO
-> U.Vector Int
-> U.Vector Double
-> IO (U.Vector Int)
oneSweepB g z t = iterateM size oneUpdateB (g, t) z
where size = G.length z
oneUpdate
:: (MWC.GenIO, U.Vector Double)
-> U.Vector Int
-> Int
-> IO (U.Vector Int)
oneUpdate (g,t) z i = do
--print $ G.map LF.logFromLogFloat (gmmTestArray as z t i) -- DEBUG
Just zNew <- unMeasure (gmmGibbs as z t i) g
return (G.unsafeUpd z [(i, zNew)])
oneSweep
:: MWC.GenIO
-> U.Vector Int
-> U.Vector Double
-> IO (U.Vector Int)
oneSweep g z t = iterateM size oneUpdate (g, t) z
where size = G.length z
data Experiment = NoBucket | Bucket deriving (Show)
runExperiment :: Experiment
-> Int
-> Int
-> Int
-> MWC.GenIO
-> IO ()
runExperiment e dataSize sweeps trial g = do
Just z <- unMeasure (zInit_ dataSize) g
Just d <- unMeasure (t_ dataSize) g
let (zG, t') = G.unzip d
t1 <- getCurrentTime
zPred <- iterateM2 sweeps (\z ->
case e of
NoBucket -> oneSweep g z t'
Bucket -> oneSweepB g z t') z
t2 <- getCurrentTime
putStrLn (show e ++ "," ++
show dataSize ++ "," ++
show trial ++ "," ++
(show . maximum $
map (\key -> accuracy zG (relabel key zPred))
(permutations [0 .. clusters - 1])) ++ "," ++
show (diffToDouble $ diffUTCTime t2 t1))
main = do
args <- getArgs
case length args == 3 of
False -> putStrLn "./gmm <dataSize> <sweeps> <trial>"
True -> do
let [dataSize, sweeps, trial] = map read args :: [Int]
g <- MWC.createSystemRandom
--putStrLn ("inf_method, dataSize, time")
runExperiment NoBucket dataSize sweeps trial g
runExperiment Bucket dataSize sweeps trial g
| zaxtax/gmm-example | GMMRunner.hs | bsd-3-clause | 5,236 | 0 | 30 | 1,845 | 1,877 | 971 | 906 | 134 | 2 |
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# OPTIONS_GHC -fno-warn-overlapping-patterns #-}
{-# OPTIONS_GHC -fno-warn-incomplete-patterns #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
-- The above warning suppression flags are a temporary kludge.
-- While working on this module you are encouraged to remove it and fix
-- any warnings in the module. See
-- http://ghc.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#Warnings
-- for details
module Main(main) where
#include "../../includes/ghcconfig.h"
#include "../../includes/stg/HaskellMachRegs.h"
#include "../../includes/rts/Constants.h"
-- Needed for TAG_BITS
#include "../../includes/MachDeps.h"
import Text.PrettyPrint
import Data.Word
import Data.Bits
import Data.List ( intersperse, nub, sort )
import System.Exit
import System.Environment
import System.IO
import Control.Arrow ((***))
-- -----------------------------------------------------------------------------
-- Argument kinds (rougly equivalent to PrimRep)
data ArgRep
= N -- non-ptr
| P -- ptr
| V -- void
| F -- float
| D -- double
| L -- long (64-bit)
| V16 -- 16-byte (128-bit) vectors
| V32 -- 32-byte (256-bit) vectors
| V64 -- 64-byte (512-bit) vectors
-- size of a value in *words*
argSize :: ArgRep -> Int
argSize N = 1
argSize P = 1
argSize V = 0
argSize F = 1
argSize D = (SIZEOF_DOUBLE `quot` SIZEOF_VOID_P :: Int)
argSize L = (8 `quot` SIZEOF_VOID_P :: Int)
argSize V16 = (16 `quot` SIZEOF_VOID_P :: Int)
argSize V32 = (32 `quot` SIZEOF_VOID_P :: Int)
argSize V64 = (64 `quot` SIZEOF_VOID_P :: Int)
showArg :: ArgRep -> String
showArg N = "n"
showArg P = "p"
showArg V = "v"
showArg F = "f"
showArg D = "d"
showArg L = "l"
showArg V16 = "v16"
showArg V32 = "v32"
showArg V64 = "v64"
-- is a value a pointer?
isPtr :: ArgRep -> Bool
isPtr P = True
isPtr _ = False
-- -----------------------------------------------------------------------------
-- Registers
data RegStatus = Registerised | Unregisterised
type Reg = String
availableRegs :: RegStatus -> ([Reg],[Reg],[Reg],[Reg])
availableRegs Unregisterised = ([],[],[],[])
availableRegs Registerised =
( vanillaRegs MAX_REAL_VANILLA_REG,
floatRegs MAX_REAL_FLOAT_REG,
doubleRegs MAX_REAL_DOUBLE_REG,
longRegs MAX_REAL_LONG_REG
)
vanillaRegs, floatRegs, doubleRegs, longRegs :: Int -> [Reg]
vanillaRegs n = [ "R" ++ show m | m <- [2..n] ] -- never use R1
floatRegs n = [ "F" ++ show m | m <- [1..n] ]
doubleRegs n = [ "D" ++ show m | m <- [1..n] ]
longRegs n = [ "L" ++ show m | m <- [1..n] ]
-- -----------------------------------------------------------------------------
-- Loading/saving register arguments to the stack
loadRegArgs :: RegStatus -> Int -> [ArgRep] -> (Doc,Int)
loadRegArgs regstatus sp args
= (loadRegOffs reg_locs, sp')
where (reg_locs, _, sp') = assignRegs regstatus sp args
loadRegOffs :: [(Reg,Int)] -> Doc
loadRegOffs = vcat . map (uncurry assign_stk_to_reg)
saveRegOffs :: [(Reg,Int)] -> Doc
saveRegOffs = vcat . map (uncurry assign_reg_to_stk)
-- a bit like assignRegs in CgRetConv.lhs
assignRegs
:: RegStatus -- are we registerised?
-> Int -- Sp of first arg
-> [ArgRep] -- args
-> ([(Reg,Int)], -- regs and offsets to load
[ArgRep], -- left-over args
Int) -- Sp of left-over args
assignRegs regstatus sp args = assign sp args (availableRegs regstatus) []
assign sp [] regs doc = (doc, [], sp)
assign sp (V : args) regs doc = assign sp args regs doc
assign sp (arg : args) regs doc
= case findAvailableReg arg regs of
Just (reg, regs') -> assign (sp + argSize arg) args regs'
((reg, sp) : doc)
Nothing -> (doc, (arg:args), sp)
findAvailableReg N (vreg:vregs, fregs, dregs, lregs) =
Just (vreg, (vregs,fregs,dregs,lregs))
findAvailableReg P (vreg:vregs, fregs, dregs, lregs) =
Just (vreg, (vregs,fregs,dregs,lregs))
findAvailableReg F (vregs, freg:fregs, dregs, lregs) =
Just (freg, (vregs,fregs,dregs,lregs))
findAvailableReg D (vregs, fregs, dreg:dregs, lregs) =
Just (dreg, (vregs,fregs,dregs,lregs))
findAvailableReg L (vregs, fregs, dregs, lreg:lregs) =
Just (lreg, (vregs,fregs,dregs,lregs))
findAvailableReg _ _ = Nothing
assign_reg_to_stk reg sp
= loadSpWordOff (regRep reg) sp <> text " = " <> text reg <> semi
assign_stk_to_reg reg sp
= text reg <> text " = " <> loadSpWordOff (regRep reg) sp <> semi
regRep ('F':_) = "F_"
regRep ('D':_) = "D_"
regRep ('L':_) = "L_"
regRep _ = "W_"
loadSpWordOff :: String -> Int -> Doc
loadSpWordOff rep off = text rep <> text "[Sp+WDS(" <> int off <> text ")]"
-- Make a jump
mkJump :: RegStatus -- Registerised status
-> Doc -- Jump target
-> [Reg] -- Registers that are definitely live
-> [ArgRep] -- Jump arguments
-> Doc
mkJump regstatus jump live args =
text "jump " <> jump <+> brackets (hcat (punctuate comma (map text regs)))
where
(reg_locs, _, _) = assignRegs regstatus 0 args
regs = (nub . sort) (live ++ map fst reg_locs)
-- make a ptr/non-ptr bitmap from a list of argument types
mkBitmap :: [ArgRep] -> Word32
mkBitmap args = foldr f 0 args
where f arg bm | isPtr arg = bm `shiftL` 1
| otherwise = (bm `shiftL` size) .|. ((1 `shiftL` size) - 1)
where size = argSize arg
-- -----------------------------------------------------------------------------
-- Generating the application functions
-- A SUBTLE POINT about stg_ap functions (can't think of a better
-- place to put this comment --SDM):
--
-- The entry convention to an stg_ap_ function is as follows: all the
-- arguments are on the stack (we might revisit this at some point,
-- but it doesn't make any difference on x86), and THERE IS AN EXTRA
-- EMPTY STACK SLOT at the top of the stack.
--
-- Why? Because in several cases, stg_ap_* will need an extra stack
-- slot, eg. to push a return address in the THUNK case, and this is a
-- way of pushing the stack check up into the caller which is probably
-- doing one anyway. Allocating the extra stack slot in the caller is
-- also probably free, because it will be adjusting Sp after pushing
-- the args anyway (this might not be true of register-rich machines
-- when we start passing args to stg_ap_* in regs).
mkApplyName args
= text "stg_ap_" <> text (concatMap showArg args)
mkApplyRetName args
= mkApplyName args <> text "_ret"
mkApplyFastName args
= mkApplyName args <> text "_fast"
mkApplyInfoName args
= mkApplyName args <> text "_info"
mb_tag_node arity | Just tag <- tagForArity arity = mkTagStmt tag <> semi
| otherwise = empty
mkTagStmt tag = text ("R1 = R1 + "++ show tag)
type StackUsage = (Int, Int) -- PROFILING, normal
maxStack :: [StackUsage] -> StackUsage
maxStack = (maximum *** maximum) . unzip
stackCheck
:: RegStatus -- Registerised status
-> [ArgRep]
-> Bool -- args in regs?
-> Doc -- fun_info_label
-> StackUsage
-> Doc
stackCheck regstatus args args_in_regs fun_info_label (prof_sp, norm_sp) =
let
(reg_locs, leftovers, sp_offset) = assignRegs regstatus 1 args
cmp_sp n
| n > 0 =
text "if (Sp - WDS(" <> int n <> text ") < SpLim) {" $$
nest 4 (vcat [
if args_in_regs
then
text "Sp_adj" <> parens (int (-sp_offset)) <> semi $$
saveRegOffs reg_locs
else
empty,
text "Sp(0) = " <> fun_info_label <> char ';',
mkJump regstatus (text "__stg_gc_enter_1") ["R1"] [] <> semi
]) $$
char '}'
| otherwise = empty
in
vcat [ text "#ifdef PROFILING",
cmp_sp prof_sp,
text "#else",
cmp_sp norm_sp,
text "#endif"
]
genMkPAP :: RegStatus -- Register status
-> String -- Macro
-> String -- Jump target
-> [Reg] -- Registers that are definitely live
-> String -- Ticker
-> String -- Disamb
-> Bool -- Don't load argument registers before jump if True
-> Bool -- Arguments already in registers if True
-> Bool -- Is a PAP if True
-> [ArgRep] -- Arguments
-> Int -- Size of all arguments
-> Doc -- info label
-> Bool -- Is a function
-> (Doc, StackUsage)
genMkPAP regstatus macro jump live ticker disamb
no_load_regs -- don't load argument regs before jumping
args_in_regs -- arguments are already in regs
is_pap args all_args_size fun_info_label
is_fun_case
= (doc, stack_usage)
where
doc = vcat smaller_arity_doc $$ exact_arity_case $$ larger_arity_doc
stack_usage = maxStack (larger_arity_stack : smaller_arity_stack)
n_args = length args
-- offset of arguments on the stack at slow apply calls.
stk_args_slow_offset = 1
stk_args_offset
| args_in_regs = 0
| otherwise = stk_args_slow_offset
-- The SMALLER ARITY cases:
-- if (arity == 1) {
-- Sp[0] = Sp[1];
-- Sp[1] = (W_)&stg_ap_1_info;
-- JMP_(GET_ENTRY(R1.cl));
(smaller_arity_doc, smaller_arity_stack)
= unzip [ smaller_arity i | i <- [1..n_args-1] ]
smaller_arity arity = (doc, stack_usage)
where
(save_regs, stack_usage)
| overflow_regs = save_extra_regs
| otherwise = shuffle_extra_args
doc =
text "if (arity == " <> int arity <> text ") {" $$
nest 4 (vcat [
-- text "TICK_SLOW_CALL_" <> text ticker <> text "_TOO_MANY();",
-- load up regs for the call, if necessary
load_regs,
-- If we have more args in registers than are required
-- for the call, then we must save some on the stack,
-- and set up the stack for the follow-up call.
-- If the extra arguments are on the stack, then we must
-- instead shuffle them down to make room for the info
-- table for the follow-on call.
save_regs,
-- for a PAP, we have to arrange that the stack contains a
-- return address in the event that stg_PAP_entry fails its
-- heap check. See stg_PAP_entry in Apply.hc for details.
if is_pap
then text "R2 = " <> mkApplyInfoName this_call_args <> semi
else empty,
if is_fun_case then mb_tag_node arity else empty,
if overflow_regs
then text "jump_SAVE_CCCS" <> parens (text jump) <> semi
else mkJump regstatus (text jump) live (if no_load_regs then [] else args) <> semi
]) $$
text "}"
-- offsets in case we need to save regs:
(reg_locs, _, _)
= assignRegs regstatus stk_args_offset args
-- register assignment for *this function call*
(reg_locs', reg_call_leftovers, reg_call_sp_stk_args)
= assignRegs regstatus stk_args_offset (take arity args)
load_regs
| no_load_regs || args_in_regs = empty
| otherwise = loadRegOffs reg_locs'
(this_call_args, rest_args) = splitAt arity args
-- the offset of the stack args from initial Sp
sp_stk_args
| args_in_regs = stk_args_offset
| no_load_regs = stk_args_offset
| otherwise = reg_call_sp_stk_args
-- the stack args themselves
this_call_stack_args
| args_in_regs = reg_call_leftovers -- sp offsets are wrong
| no_load_regs = this_call_args
| otherwise = reg_call_leftovers
stack_args_size = sum (map argSize this_call_stack_args)
overflow_regs = args_in_regs && length reg_locs > length reg_locs'
save_extra_regs = (doc, (size,size))
where
-- we have extra arguments in registers to save
extra_reg_locs = drop (length reg_locs') (reverse reg_locs)
adj_reg_locs = [ (reg, off - adj + 1) |
(reg,off) <- extra_reg_locs ]
adj = case extra_reg_locs of
(reg, fst_off):_ -> fst_off
size = snd (last adj_reg_locs) + 1
doc =
text "Sp_adj(" <> int (-size) <> text ");" $$
saveRegOffs adj_reg_locs $$
loadSpWordOff "W_" 0 <> text " = " <>
mkApplyInfoName rest_args <> semi
shuffle_extra_args = (doc, (shuffle_prof_stack, shuffle_norm_stack))
where
doc = vcat [ text "#ifdef PROFILING",
shuffle_prof_doc,
text "#else",
shuffle_norm_doc,
text "#endif"]
(shuffle_prof_doc, shuffle_prof_stack) = shuffle True
(shuffle_norm_doc, shuffle_norm_stack) = shuffle False
-- Sadly here we have to insert an stg_restore_cccs frame
-- just underneath the stg_ap_*_info frame if we're
-- profiling; see Note [jump_SAVE_CCCS]
shuffle prof = (doc, -sp_adj)
where
sp_adj = sp_stk_args - 1 - offset
offset = if prof then 2 else 0
doc =
vcat (map (shuffle_down (offset+1))
[sp_stk_args .. sp_stk_args+stack_args_size-1]) $$
(if prof
then
loadSpWordOff "W_" (sp_stk_args+stack_args_size-3)
<> text " = stg_restore_cccs_info;" $$
loadSpWordOff "W_" (sp_stk_args+stack_args_size-2)
<> text " = CCCS;"
else empty) $$
loadSpWordOff "W_" (sp_stk_args+stack_args_size-1)
<> text " = "
<> mkApplyInfoName rest_args <> semi $$
text "Sp_adj(" <> int sp_adj <> text ");"
shuffle_down j i =
loadSpWordOff "W_" (i-j) <> text " = " <>
loadSpWordOff "W_" i <> semi
-- The EXACT ARITY case
--
-- if (arity == 1) {
-- Sp++;
-- JMP_(GET_ENTRY(R1.cl));
exact_arity_case
= text "if (arity == " <> int n_args <> text ") {" $$
let
(reg_doc, sp')
| no_load_regs || args_in_regs = (empty, stk_args_offset)
| otherwise = loadRegArgs regstatus stk_args_offset args
in
nest 4 (vcat [
-- text "TICK_SLOW_CALL_" <> text ticker <> text "_CORRECT();",
reg_doc,
text "Sp_adj(" <> int sp' <> text ");",
if is_pap
then text "R2 = " <> fun_info_label <> semi
else empty,
if is_fun_case then mb_tag_node n_args else empty,
mkJump regstatus (text jump) live (if no_load_regs then [] else args) <> semi
])
-- The LARGER ARITY cases:
--
-- } else /* arity > 1 */ {
-- BUILD_PAP(1,0,(W_)&stg_ap_v_info);
-- }
(larger_arity_doc, larger_arity_stack) = (doc, stack)
where
-- offsets in case we need to save regs:
(reg_locs, leftovers, sp_offset)
= assignRegs regstatus stk_args_slow_offset args
-- BUILD_PAP assumes args start at offset 1
stack | args_in_regs = (sp_offset, sp_offset)
| otherwise = (0,0)
doc =
text "} else {" $$
let
save_regs
| args_in_regs =
text "Sp_adj(" <> int (-sp_offset) <> text ");" $$
saveRegOffs reg_locs
| otherwise =
empty
in
nest 4 (vcat [
-- text "TICK_SLOW_CALL_" <> text ticker <> text "_TOO_FEW();",
save_regs,
-- Before building the PAP, tag the function closure pointer
if is_fun_case then
vcat [
text "if (arity < " <> int tAG_BITS_MAX <> text ") {",
text " R1 = R1 + arity" <> semi,
text "}"
]
else empty
,
text macro <> char '(' <> int n_args <> comma <>
int all_args_size <>
text "," <> fun_info_label <>
text "," <> text disamb <>
text ");"
]) $$
char '}'
-- Note [jump_SAVE_CCCS]
-- when profiling, if we have some extra arguments to apply that we
-- save to the stack, we must also save the current cost centre stack
-- and restore it when applying the extra arguments. This is all
-- handled by the macro jump_SAVE_CCCS(target), defined in
-- rts/AutoApply.h.
--
-- At the jump, the stack will look like this:
--
-- ... extra args ...
-- stg_ap_pp_info
-- CCCS
-- stg_restore_cccs_info
-- --------------------------------------
-- Examine tag bits of function pointer and enter it
-- directly if needed.
-- TODO: remove the redundant case in the original code.
enterFastPath regstatus no_load_regs args_in_regs args
| Just tag <- tagForArity (length args)
= enterFastPathHelper tag regstatus no_load_regs args_in_regs args
enterFastPath _ _ _ _ = empty
-- Copied from Constants.lhs & CgUtils.hs, i'd rather have this imported:
-- (arity,tag)
tAG_BITS = (TAG_BITS :: Int)
tAG_BITS_MAX = ((1 `shiftL` tAG_BITS) :: Int)
tagForArity :: Int -> Maybe Int
tagForArity i | i < tAG_BITS_MAX = Just i
| otherwise = Nothing
enterFastPathHelper :: Int
-> RegStatus
-> Bool
-> Bool
-> [ArgRep]
-> Doc
enterFastPathHelper tag regstatus no_load_regs args_in_regs args =
text "if (GETTAG(R1)==" <> int tag <> text ") {" $$
nest 4 (vcat [
reg_doc,
text "Sp_adj(" <> int sp' <> text ");",
-- enter, but adjust offset with tag
mkJump regstatus (text "%GET_ENTRY(R1-" <> int tag <> text ")") ["R1"] args <> semi
]) $$
text "}"
-- I don't totally understand this code, I copied it from
-- exact_arity_case
-- TODO: refactor
where
-- offset of arguments on the stack at slow apply calls.
stk_args_slow_offset = 1
stk_args_offset
| args_in_regs = 0
| otherwise = stk_args_slow_offset
(reg_doc, sp')
| no_load_regs || args_in_regs = (empty, stk_args_offset)
| otherwise = loadRegArgs regstatus stk_args_offset args
tickForArity arity
| True
= empty
| Just tag <- tagForArity arity
= vcat [
text "W_[TOTAL_CALLS] = W_[TOTAL_CALLS] + 1;",
text "W_[SLOW_CALLS_" <> int arity <> text "] = W_[SLOW_CALLS_" <> int arity <> text "] + 1;",
text "if (TO_W_(StgFunInfoExtra_arity(%FUN_INFO(%INFO_PTR(UNTAG(R1))))) == " <> int arity <> text " ) {",
text " W_[RIGHT_ARITY_" <> int arity <> text "] = W_[RIGHT_ARITY_" <> int arity <> text "] + 1;",
text " if (GETTAG(R1)==" <> int tag <> text ") {",
text " W_[TAGGED_PTR_" <> int arity <> text "] = W_[TAGGED_PTR_" <> int arity <> text "] + 1;",
text " } else {",
-- force a halt when not tagged!
-- text " W_[0]=0;",
text " }",
text "}"
]
tickForArity _ = text "W_[TOTAL_CALLS] = W_[TOTAL_CALLS] + 1;"
-- -----------------------------------------------------------------------------
-- generate an apply function
-- args is a list of 'p', 'n', 'f', 'd' or 'l'
formalParam :: ArgRep -> Int -> Doc
formalParam V _ = empty
formalParam arg n =
formalParamType arg <> space <>
text "arg" <> int n <> text ", "
formalParamType arg = argRep arg
argRep F = text "F_"
argRep D = text "D_"
argRep L = text "L_"
argRep P = text "gcptr"
argRep V16 = text "V16_"
argRep V32 = text "V32_"
argRep V64 = text "V64_"
argRep _ = text "W_"
genApply regstatus args =
let
fun_ret_label = mkApplyRetName args
fun_info_label = mkApplyInfoName args
all_args_size = sum (map argSize args)
(bco_doc, bco_stack) =
genMkPAP regstatus "BUILD_PAP" "ENTRY_LBL(stg_BCO)" ["R1"] "FUN" "BCO"
True{-stack apply-} False{-args on stack-} False{-not a PAP-}
args all_args_size fun_info_label {- tag stmt -}False
(fun_doc, fun_stack) =
genMkPAP regstatus "BUILD_PAP" "%GET_ENTRY(UNTAG(R1))" ["R1"] "FUN" "FUN"
False{-reg apply-} False{-args on stack-} False{-not a PAP-}
args all_args_size fun_info_label {- tag stmt -}True
(pap_doc, pap_stack) =
genMkPAP regstatus "NEW_PAP" "stg_PAP_apply" ["R1", "R2"] "PAP" "PAP"
True{-stack apply-} False{-args on stack-} True{-is a PAP-}
args all_args_size fun_info_label {- tag stmt -}False
stack_usage = maxStack [bco_stack, fun_stack, pap_stack]
in
vcat [
text "INFO_TABLE_RET(" <> mkApplyName args <> text ", " <>
text "RET_SMALL, W_ info_ptr, " <> (cat $ zipWith formalParam args [1..]) <>
text ")\n{",
nest 4 (vcat [
text "W_ info;",
text "W_ arity;",
text "unwind Sp = Sp + WDS(" <> int (1+all_args_size) <> text ");",
-- if fast == 1:
-- print "static void *lbls[] ="
-- print " { [FUN] &&fun_lbl,"
-- print " [FUN_1_0] &&fun_lbl,"
-- print " [FUN_0_1] &&fun_lbl,"
-- print " [FUN_2_0] &&fun_lbl,"
-- print " [FUN_1_1] &&fun_lbl,"
-- print " [FUN_0_2] &&fun_lbl,"
-- print " [FUN_STATIC] &&fun_lbl,"
-- print " [PAP] &&pap_lbl,"
-- print " [THUNK] &&thunk_lbl,"
-- print " [THUNK_1_0] &&thunk_lbl,"
-- print " [THUNK_0_1] &&thunk_lbl,"
-- print " [THUNK_2_0] &&thunk_lbl,"
-- print " [THUNK_1_1] &&thunk_lbl,"
-- print " [THUNK_0_2] &&thunk_lbl,"
-- print " [THUNK_STATIC] &&thunk_lbl,"
-- print " [THUNK_SELECTOR] &&thunk_lbl,"
-- print " [IND] &&ind_lbl,"
-- print " [IND_STATIC] &&ind_lbl,"
-- print " [IND_PERM] &&ind_lbl,"
-- print " };"
tickForArity (length args),
text "",
text "IF_DEBUG(apply,foreign \"C\" debugBelch(\"" <> fun_ret_label <>
text "... \"); foreign \"C\" printClosure(R1 \"ptr\"));",
text "IF_DEBUG(sanity,foreign \"C\" checkStackFrame(Sp+WDS(" <> int (1 + all_args_size)
<> text ")\"ptr\"));",
-- text "IF_DEBUG(sanity,checkStackChunk(Sp+" <> int (1 + all_args_size) <>
-- text ", CurrentTSO->stack + CurrentTSO->stack_size));",
-- text "TICK_SLOW_CALL(" <> int (length args) <> text ");",
let do_assert [] _ = []
do_assert (arg:args) offset
| isPtr arg = this : rest
| otherwise = rest
where this = text "ASSERT(LOOKS_LIKE_CLOSURE_PTR(Sp("
<> int offset <> text ")));"
rest = do_assert args (offset + argSize arg)
in
vcat (do_assert args 1),
text "again:",
-- if pointer is tagged enter it fast!
enterFastPath regstatus False False args,
stackCheck regstatus args False{-args on stack-}
fun_info_label stack_usage,
-- Functions can be tagged, so we untag them!
text "R1 = UNTAG(R1);",
text "info = %INFO_PTR(R1);",
-- if fast == 1:
-- print " goto *lbls[info->type];";
-- else:
text "switch [INVALID_OBJECT .. N_CLOSURE_TYPES] (TO_W_(%INFO_TYPE(%STD_INFO(info)))) {",
nest 4 (vcat [
-- if fast == 1:
-- print " bco_lbl:"
-- else:
text "case BCO: {",
nest 4 (vcat [
text "arity = TO_W_(StgBCO_arity(R1));",
text "ASSERT(arity > 0);",
bco_doc
]),
text "}",
-- if fast == 1:
-- print " fun_lbl:"
-- else:
text "case FUN,",
text " FUN_1_0,",
text " FUN_0_1,",
text " FUN_2_0,",
text " FUN_1_1,",
text " FUN_0_2,",
text " FUN_STATIC: {",
nest 4 (vcat [
text "arity = TO_W_(StgFunInfoExtra_arity(%FUN_INFO(info)));",
text "ASSERT(arity > 0);",
fun_doc
]),
text "}",
-- if fast == 1:
-- print " pap_lbl:"
-- else:
text "case PAP: {",
nest 4 (vcat [
text "arity = TO_W_(StgPAP_arity(R1));",
text "ASSERT(arity > 0);",
pap_doc
]),
text "}",
text "",
-- if fast == 1:
-- print " thunk_lbl:"
-- else:
text "case AP,",
text " AP_STACK,",
text " BLACKHOLE,",
text " WHITEHOLE,",
text " THUNK,",
text " THUNK_1_0,",
text " THUNK_0_1,",
text " THUNK_2_0,",
text " THUNK_1_1,",
text " THUNK_0_2,",
text " THUNK_STATIC,",
text " THUNK_SELECTOR: {",
nest 4 (vcat [
-- text "TICK_SLOW_CALL_UNEVALD(" <> int (length args) <> text ");",
text "Sp(0) = " <> fun_info_label <> text ";",
-- CAREFUL! in SMP mode, the info table may already have been
-- overwritten by an indirection, so we must enter the original
-- info pointer we read, don't read it again, because it might
-- not be enterable any more.
text "jump_SAVE_CCCS(%ENTRY_CODE(info));",
-- see Note [jump_SAVE_CCCS]
text ""
]),
text "}",
-- if fast == 1:
-- print " ind_lbl:"
-- else:
text "case IND,",
text " IND_STATIC,",
text " IND_PERM: {",
nest 4 (vcat [
text "R1 = StgInd_indirectee(R1);",
-- An indirection node might contain a tagged pointer
text "goto again;"
]),
text "}",
text "",
-- if fast == 0:
text "default: {",
nest 4 (
text "foreign \"C\" barf(\"" <> fun_ret_label <> text "\") never returns;"
),
text "}"
]),
text "}"
]),
text "}"
]
-- -----------------------------------------------------------------------------
-- Making a fast unknown application, args are in regs
genApplyFast regstatus args =
let
fun_fast_label = mkApplyFastName args
fun_ret_label = text "RET_LBL" <> parens (mkApplyName args)
fun_info_label = mkApplyInfoName args
all_args_size = sum (map argSize args)
(fun_doc, fun_stack) =
genMkPAP regstatus "BUILD_PAP" "%GET_ENTRY(UNTAG(R1))" ["R1"] "FUN" "FUN"
False{-reg apply-} True{-args in regs-} False{-not a PAP-}
args all_args_size fun_info_label {- tag stmt -}True
(reg_locs, leftovers, sp_offset) = assignRegs regstatus 1 args
stack_usage = maxStack [fun_stack, (sp_offset,sp_offset)]
in
vcat [
fun_fast_label,
char '{',
nest 4 (vcat [
text "W_ info;",
text "W_ arity;",
tickForArity (length args),
-- if pointer is tagged enter it fast!
enterFastPath regstatus False True args,
stackCheck regstatus args True{-args in regs-}
fun_info_label stack_usage,
-- Functions can be tagged, so we untag them!
text "R1 = UNTAG(R1);",
text "info = %GET_STD_INFO(R1);",
text "switch [INVALID_OBJECT .. N_CLOSURE_TYPES] (TO_W_(%INFO_TYPE(info))) {",
nest 4 (vcat [
text "case FUN,",
text " FUN_1_0,",
text " FUN_0_1,",
text " FUN_2_0,",
text " FUN_1_1,",
text " FUN_0_2,",
text " FUN_STATIC: {",
nest 4 (vcat [
text "arity = TO_W_(StgFunInfoExtra_arity(%GET_FUN_INFO(R1)));",
text "ASSERT(arity > 0);",
fun_doc
]),
char '}',
text "default: {",
nest 4 (vcat [
text "Sp_adj" <> parens (int (-sp_offset)) <> semi,
saveRegOffs reg_locs,
mkJump regstatus fun_ret_label [] [] <> semi
]),
char '}'
]),
char '}'
]),
char '}'
]
-- -----------------------------------------------------------------------------
-- Making a stack apply
-- These little functions are like slow entry points. They provide
-- the layer between the PAP entry code and the function's fast entry
-- point: namely they load arguments off the stack into registers (if
-- available) and jump to the function's entry code.
--
-- On entry: R1 points to the function closure
-- arguments are on the stack starting at Sp
--
-- Invariant: the list of arguments never contains void. Since we're only
-- interested in loading arguments off the stack here, we can ignore
-- void arguments.
mkStackApplyEntryLabel:: [ArgRep] -> Doc
mkStackApplyEntryLabel args = text "stg_ap_stk_" <> text (concatMap showArg args)
genStackApply :: RegStatus -> [ArgRep] -> Doc
genStackApply regstatus args =
let fn_entry_label = mkStackApplyEntryLabel args in
vcat [
fn_entry_label,
text "{", nest 4 body, text "}"
]
where
(assign_regs, sp') = loadRegArgs regstatus 0 args
body = vcat [assign_regs,
text "Sp_adj" <> parens (int sp') <> semi,
mkJump regstatus (text "%GET_ENTRY(UNTAG(R1))") ["R1"] args <> semi
]
-- -----------------------------------------------------------------------------
-- Stack save entry points.
--
-- These code fragments are used to save registers on the stack at a heap
-- check failure in the entry code for a function. We also have to save R1
-- and the return address (stg_gc_fun_info) on the stack. See stg_gc_fun_gen
-- in HeapStackCheck.hc for more details.
mkStackSaveEntryLabel :: [ArgRep] -> Doc
mkStackSaveEntryLabel args = text "stg_stk_save_" <> text (concatMap showArg args)
genStackSave :: RegStatus -> [ArgRep] -> Doc
genStackSave regstatus args =
let fn_entry_label= mkStackSaveEntryLabel args in
vcat [
fn_entry_label,
text "{", nest 4 body, text "}"
]
where
body = vcat [text "Sp_adj" <> parens (int (-sp_offset)) <> semi,
saveRegOffs reg_locs,
text "Sp(2) = R1;",
text "Sp(1) =" <+> int stk_args <> semi,
text "Sp(0) = stg_gc_fun_info;",
text "jump stg_gc_noregs [];"
]
std_frame_size = 3 -- the std bits of the frame. See StgRetFun in Closures.h,
-- and the comment on stg_fun_gc_gen in HeapStackCheck.hc.
(reg_locs, leftovers, sp_offset) = assignRegs regstatus std_frame_size args
-- number of words of arguments on the stack.
stk_args = sum (map argSize leftovers) + sp_offset - std_frame_size
-- -----------------------------------------------------------------------------
-- The prologue...
main = do
args <- getArgs
regstatus <- case args of
[] -> return Registerised
["-u"] -> return Unregisterised
_other -> do hPutStrLn stderr "syntax: genapply [-u]"
exitWith (ExitFailure 1)
let the_code = vcat [
text "// DO NOT EDIT!",
text "// Automatically generated by GenApply.hs",
text "",
text "#include \"Cmm.h\"",
text "#include \"AutoApply.h\"",
text "",
vcat (intersperse (text "") $
map (genApply regstatus) applyTypes),
vcat (intersperse (text "") $
map (genStackFns regstatus) stackApplyTypes),
vcat (intersperse (text "") $
map (genApplyFast regstatus) applyTypes),
genStackApplyArray stackApplyTypes,
genStackSaveArray stackApplyTypes,
genBitmapArray stackApplyTypes,
text "" -- add a newline at the end of the file
]
-- in
putStr (render the_code)
-- These have been shown to cover about 99% of cases in practice...
applyTypes = [
[V],
[F],
[D],
[L],
[V16],
[V32],
[V64],
[N],
[P],
[P,V],
[P,P],
[P,P,V],
[P,P,P],
[P,P,P,V],
[P,P,P,P],
[P,P,P,P,P],
[P,P,P,P,P,P]
]
-- No need for V args in the stack apply cases.
-- ToDo: the stack apply and stack save code doesn't make a distinction
-- between N and P (they both live in the same register), only the bitmap
-- changes, so we could share the apply/save code between lots of cases.
--
-- NOTE: other places to change if you change stackApplyTypes:
-- - includes/rts/storage/FunTypes.h
-- - compiler/codeGen/CgCallConv.lhs: stdPattern
stackApplyTypes = [
[],
[N],
[P],
[F],
[D],
[L],
[V16],
[V32],
[V64],
[N,N],
[N,P],
[P,N],
[P,P],
[N,N,N],
[N,N,P],
[N,P,N],
[N,P,P],
[P,N,N],
[P,N,P],
[P,P,N],
[P,P,P],
[P,P,P,P],
[P,P,P,P,P],
[P,P,P,P,P,P],
[P,P,P,P,P,P,P],
[P,P,P,P,P,P,P,P]
]
genStackFns regstatus args
= genStackApply regstatus args
$$ genStackSave regstatus args
genStackApplyArray types =
vcat [
text "section \"relrodata\" {",
text "stg_ap_stack_entries:",
text "W_ 0; W_ 0; W_ 0;", -- ARG_GEN, ARG_GEN_BIG, ARG_BCO
vcat (map arr_ent types),
text "}"
]
where
arr_ent ty = text "W_" <+> mkStackApplyEntryLabel ty <> semi
genStackSaveArray types =
vcat [
text "section \"relrodata\" {",
text "stg_stack_save_entries:",
text "W_ 0; W_ 0; W_ 0;", -- ARG_GEN, ARG_GEN_BIG, ARG_BCO
vcat (map arr_ent types),
text "}"
]
where
arr_ent ty = text "W_" <+> mkStackSaveEntryLabel ty <> semi
genBitmapArray :: [[ArgRep]] -> Doc
genBitmapArray types =
vcat [
text "section \"rodata\" {",
text "stg_arg_bitmaps:",
text "W_ 0; W_ 0; W_ 0;", -- ARG_GEN, ARG_GEN_BIG, ARG_BCO
vcat (map gen_bitmap types),
text "}"
]
where
gen_bitmap ty = text "W_" <+> int bitmap_val <> semi
where bitmap_val =
(fromIntegral (mkBitmap ty) `shiftL` BITMAP_BITS_SHIFT)
.|. sum (map argSize ty)
| gridaphobe/ghc | utils/genapply/Main.hs | bsd-3-clause | 35,182 | 0 | 29 | 11,391 | 7,425 | 3,982 | 3,443 | 649 | 11 |
-- |
-- Module: Control.Wire.Trans.Embed
-- Copyright: (c) 2012 Ertugrul Soeylemez
-- License: BSD3
-- Maintainer: Ertugrul Soeylemez <es@ertes.de>
--
-- Combinators for embedding wires.
module Control.Wire.Trans.Embed
( -- * Embedding wires
embed
)
where
import Control.Wire.Wire
-- | Performs the argument wire with the input time delta. It is
-- stepped often enough to catch up with the main wire. The individual
-- results are combined as given by the fold (second and third
-- argument).
--
-- * Complexity: O(n) time wrt stepping the subwire, where n is the
-- number of times the subwire is stepped.
--
-- * Depends: like argument wire, if stepped.
--
-- * Inhibits: When the fold results in a 'Left'.
embed ::
(Monad m)
=> (a -> Time) -- ^ Time delta for the subwire.
-> (Either e c -> Either e b -> Either e c) -- ^ Folding function.
-> Either e c -- ^ Fold base value.
-> Wire e m a b -- ^ Subwire to step.
-> Wire e m a c
embed delta fold z = embed' 0
where
embed' rdt w0 =
mkGen $ \dt x' ->
let idt = delta x'
loop odt r w'
| odt >= idt = do
(mx, w) <- stepWire w' idt x'
loop (odt - idt) (fold r mx) w
| otherwise = return (r, embed' odt w')
in loop (rdt + dt) z w0
| MaxDaten/netwire | Control/Wire/Trans/Embed.hs | bsd-3-clause | 1,434 | 0 | 18 | 492 | 284 | 153 | 131 | 21 | 1 |
module Tunagui
(
withTunagui
, Tunagui, TunaguiT, runTuna
, withWindow, WinConfig (..)
, WidgetTree (Container)
, Direction (..)
-- * Features
, onClick
) where
import Tunagui.General.Data (withWindow, WinConfig (..), WidgetTree (..), Direction (..))
import Tunagui.General.Initialize (withTunagui)
import Tunagui.General.Base (Tunagui, TunaguiT, runTuna)
import Tunagui.Widget (onClick)
| masatoko/tunagui | src/Tunagui.hs | bsd-3-clause | 455 | 0 | 6 | 112 | 116 | 78 | 38 | 14 | 0 |
{- |
Copyright: 2006, Bjorn Bringert.
Copyright: 2009, Henning Thielemann.
-}
module Network.MoHWS.Part.CGI (
Configuration, desc,
mkCGIEnv, mkCGIResponse,
) where
import qualified Network.MoHWS.Module as Module
import qualified Network.MoHWS.Module.Description as ModuleDesc
import qualified Network.MoHWS.HTTP.Header as Header
import qualified Network.MoHWS.HTTP.Request as Request
import qualified Network.MoHWS.HTTP.Response as Response
import qualified Network.MoHWS.Stream as Stream
import qualified Network.MoHWS.Server.Request as ServerRequest
import qualified Network.MoHWS.Server.Context as ServerContext
import Network.MoHWS.Logger.Error (debug, abort, debugOnAbort, logError, )
import qualified Network.MoHWS.Utility as Util
import qualified Network.MoHWS.Configuration as Config
import qualified Network.MoHWS.Configuration.Accessor as ConfigA
import qualified Network.MoHWS.Configuration.Parser as ConfigParser
import qualified Data.Accessor.Basic as Accessor
import Data.Accessor.Basic ((.>))
import qualified Text.ParserCombinators.Parsec as Parsec
import Network.MoHWS.ParserUtility (trimLWS, )
import Data.Maybe.HT (toMaybe, )
import Data.Tuple.HT (mapFst, )
import Data.Bool.HT (if', )
import Control.Monad.Trans.Maybe (MaybeT, )
import Control.Concurrent (forkIO, )
import qualified Control.Exception as Exception
import Control.Monad.Trans.Class (lift, )
import Control.Monad (when, mzero, )
import Data.Char (toUpper, )
import Data.List (isSuffixOf, )
import Network.BSD (hostName, )
import Network.Socket (inet_ntoa, )
import Network.URI (uriQuery, )
import qualified System.IO as IO
import System.IO.Error (isEOFError, )
import System.Posix (isDirectory, isRegularFile, isSymbolicLink, )
import System.Process (runInteractiveProcess, waitForProcess, )
import Text.ParserCombinators.Parsec (parse, )
desc :: (Stream.C body) => ModuleDesc.T body Configuration
desc =
ModuleDesc.empty {
ModuleDesc.name = "cgi",
ModuleDesc.load = return . funs,
ModuleDesc.configParser = parser,
ModuleDesc.setDefltConfig = const defltConfig
}
data Configuration =
Configuration {
suffixes_ :: [String]
}
defltConfig :: Configuration
defltConfig =
Configuration {
suffixes_ = [".cgi"]
}
suffixes :: Accessor.T Configuration [String]
suffixes =
Accessor.fromSetGet (\x c -> c{suffixes_ = x}) suffixes_
parser :: ConfigParser.T st Configuration
parser =
ConfigParser.field "cgisuffixes" p_suffixes
p_suffixes :: ConfigParser.T st Configuration
p_suffixes =
ConfigParser.set (ConfigA.extension .> suffixes) $
Parsec.many ConfigParser.stringLiteral
funs :: (Stream.C body) =>
ServerContext.T Configuration -> Module.T body
funs st =
Module.empty {
Module.handleRequest = handleRequest st
}
handleRequest :: (Stream.C body) =>
ServerContext.T Configuration -> ServerRequest.T body -> MaybeT IO (Response.T body)
handleRequest st sreq =
do let conf = ServerContext.config st
(pathProg, pathInfo) <-
debugOnAbort st ("CGI: not handling " ++ ServerRequest.serverFilename sreq) $
findProg st (ServerRequest.serverFilename sreq)
let sufs = suffixes_ $ Config.extension conf
when (not $ any (flip isSuffixOf pathProg) sufs)
(abort st $ "CGI: not handling " ++ ServerRequest.serverFilename sreq ++ ", wrong suffix")
let hndle = handleRequest2 st sreq pathProg pathInfo
lift $
case Request.command (ServerRequest.clientRequest sreq) of
Request.GET -> hndle False
Request.POST -> hndle True
_ -> return $ Response.makeNotImplemented conf
handleRequest2 :: (Stream.C body) =>
ServerContext.T ext -> ServerRequest.T body -> FilePath -> String -> Bool -> IO (Response.T body)
handleRequest2 st sreq pathProg pathInfo useReqBody =
do let conf = ServerContext.config st
let req = ServerRequest.clientRequest sreq
env <- mkCGIEnv st sreq pathInfo
let wdir = Util.dirname pathProg
prog = "./" ++ Util.basename pathProg
debug st $ "Running CGI program: " ++ prog ++ " in " ++ wdir
(inp,out,err,pid)
<- runInteractiveProcess prog [] (Just wdir) (Just env)
if useReqBody
then forkIO (writeBody inp req) >> return ()
else IO.hClose inp
-- log process stderr to the error log
_ <- forkIO (logErrorsFromHandle st err)
-- FIXME: exception handling
-- FIXME: close handle?
output <- Stream.readAll (Config.chunkSize conf) out
-- wait in a separate thread, so that this thread can continue.
-- this is needed since output is lazy.
_ <- forkIO (waitForProcess pid >> return ())
case parseCGIOutput output of
Left errp ->
do logError st errp
return $ Response.makeInternalServerError conf
Right (outputHeaders, content) ->
mkCGIResponse outputHeaders content out
mkCGIResponse :: Header.Group -> body -> IO.Handle -> IO (Response.T body)
mkCGIResponse outputHeaders content h =
do let stat = Header.lookup (Header.HdrCustom "Status") outputHeaders
loc = Header.lookup Header.HdrLocation outputHeaders
(code,dsc) <-
case stat of
Nothing -> let c = maybe 200 (\_ -> 302) loc
in return (c, Response.descriptionFromCode c)
Just s -> case reads s of
[(c,r)] -> return (c, trimLWS r)
_ -> fail "Bad Status line"
let body =
Response.Body {
Response.size = Nothing,
Response.source = "CGI script",
Response.close = IO.hClose h,
Response.content = content
}
-- FIXME: don't use response constructor directly
return $
Response.Cons code dsc outputHeaders [Header.ChunkedTransferCoding] True body
-- Split the requested file system path into the path to an
-- existing file, and some extra path info
findProg :: ServerContext.T ext -> FilePath -> MaybeT IO (FilePath,String)
findProg st filename =
case Util.splitPath filename of
[] -> mzero -- this should never happen
[""] -> mzero -- we got an empty path
"":p -> firstFile st "/" p -- absolute path
p:r -> firstFile st p r -- relative path
-- similar to Module.File.handleRequest
firstFile :: ServerContext.T ext -> FilePath -> [String] -> MaybeT IO (FilePath,String)
firstFile st p pis =
let conf = ServerContext.config st
mkPath x y =
if Util.hasTrailingSlash x
then x ++ y
else x ++ "/" ++ y
mkPathInfo [] = ""
mkPathInfo q = "/" ++ Util.glue "/" q
checkStat stat =
if' (isDirectory stat)
(case pis of
[] -> abort st $ "findProg: " ++ show p ++ " is a directory"
f:pis' -> firstFile st (mkPath p f) pis') $
if' (isRegularFile stat) (return (p,mkPathInfo pis)) $
if' (isSymbolicLink stat)
(if Config.followSymbolicLinks conf
then Util.statFile p >>= checkStat
else abort st ("findProg: Not following symlink: " ++ show p)) $
(abort st $ "Strange file: " ++ show p)
in debugOnAbort st ("findProg: Not found: " ++ show p) (Util.statSymLink p) >>=
checkStat
mkCGIEnv :: ServerContext.T ext -> ServerRequest.T body -> String -> IO [(String,String)]
mkCGIEnv _st sreq pathInfo =
do let req = ServerRequest.clientRequest sreq
remoteAddr <- inet_ntoa (ServerRequest.clientAddress sreq)
let scriptName = ServerRequest.serverURIPath sreq `Util.dropSuffix` pathInfo
-- FIXME: use canonical name if there is no ServerName
serverEnv =
[
("SERVER_SOFTWARE", Config.serverSoftware
++ "/" ++ Config.serverVersion),
("SERVER_NAME", hostName (ServerRequest.requestHostName sreq)),
("GATEWAY_INTERFACE", "CGI/1.1")
]
requestEnv =
[
("SERVER_PROTOCOL", show (Request.httpVersion req)),
("SERVER_PORT", show (ServerRequest.serverPort sreq)),
("REQUEST_METHOD", show (Request.command req)),
("PATH_TRANSLATED", ServerRequest.serverFilename sreq),
("SCRIPT_NAME", scriptName),
("QUERY_STRING", uriQuery (Request.uri req) `Util.dropPrefix` "?"),
("REMOTE_ADDR", remoteAddr),
("PATH_INFO", pathInfo),
("PATH", "/usr/local/bin:/usr/bin:/bin")
]
++ maybeHeader "AUTH_TYPE" Nothing -- FIXME
++ maybeHeader "REMOTE_USER" Nothing -- FIXME
++ maybeHeader "REMOTE_IDENT" Nothing -- FIXME
++ maybeHeader "REMOTE_HOST" (fmap hostName (ServerRequest.clientName sreq))
++ maybeHeader "CONTENT_TYPE" (Header.getContentType req)
++ maybeHeader "CONTENT_LENGTH" (fmap show $ Header.getContentLength req)
hs = [] -- FIXME: convert headers to (name,value) pairs
headerEnv = [("HTTP_"++ map toUpper n, v) | (n,v) <- hs]
return $ serverEnv ++ requestEnv ++ headerEnv
-- Writes the body of a request to a handle.
writeBody :: (Stream.C body) =>
IO.Handle -> Request.T body -> IO ()
writeBody h req =
Stream.write h (Request.body req)
`Exception.finally`
IO.hClose h
-- | Reads lines form the given 'Handle' and log them with 'logError'.
logErrorsFromHandle :: ServerContext.T ext -> IO.Handle -> IO ()
logErrorsFromHandle st h =
(Exception.catchJust (\ e -> toMaybe (isEOFError e) e)
loop (const $ return ())
`Exception.catch`
\(Exception.SomeException e) -> logError st $ "CGI:" ++ show e)
`Exception.finally` IO.hClose h
where loop = do l <- IO.hGetLine h
logError st l
loop
maybeHeader :: String -> Maybe String -> [(String,String)]
maybeHeader n = maybe [] ((:[]) . (,) n)
{-
expects CRLF line endings, which is too strict
parseCGIOutput :: B.ByteString -> Either String (Header.Group, B.ByteString)
parseCGIOutput s =
let (hdrsStr, body) = breakHeaders s
in case parse Header.pGroup "CGI output" hdrsStr of
Left err -> Left (show err)
Right hdrs -> Right (hdrs, body)
breakHeaders :: B.ByteString -> (String, B.ByteString)
breakHeaders =
(\(hdrs, body) ->
mapFst (map B.head hdrs ++) $
if B.null $ head body
then ("", B.empty)
else (crLf, body!!4)) .
break (\suffix -> B.isPrefixOf (B.pack (crLf++crLf)) suffix || B.null suffix) .
B.tails
-}
parseCGIOutput :: (Stream.C body) => body -> Either String (Header.Group, body)
parseCGIOutput s =
let (hdrLines, body) = breakHeaders s
in -- parse headers in one go in order to handle multi-line headers correctly
case parse Header.pGroup "CGI output" $ unlines hdrLines of
Left err -> Left (show err)
Right hdrs -> Right (hdrs, body)
breakHeaders :: (Stream.C body) => body -> ([String], body)
breakHeaders str =
let (hdr,rest0) = Stream.break (\c -> c=='\r' || c=='\n') str
skip =
if Stream.isPrefixOf (Stream.fromString 2 "\r\n") rest0 ||
Stream.isPrefixOf (Stream.fromString 2 "\n\r") rest0
then 2 else 1
rest1 = Stream.drop skip rest0
in if Stream.isEmpty hdr
then ([], rest1)
else mapFst (Stream.toString hdr :) $ breakHeaders rest1
| xpika/mohws | src/Network/MoHWS/Part/CGI.hs | bsd-3-clause | 11,773 | 0 | 22 | 3,170 | 3,107 | 1,661 | 1,446 | 220 | 5 |
-- |
-- Abstract Syntax for SIMPLE Languguage
module Language.SIMPLE.AbstractSyntax (
Expr(..)
,isAtom
,Stm(..)
,isCompound
) where
-- |
-- Expression
--
data Expr a = Number Int
| Boolean Bool
| Variable a
| Add (Expr a) (Expr a)
| Multiply (Expr a) (Expr a)
| And (Expr a) (Expr a)
| Or (Expr a) (Expr a)
| Not (Expr a)
| LessThan (Expr a) (Expr a)
deriving (Show)
-- |
-- Predicate whether the specified expression is atomic or not.
--
isAtom :: Expr a -> Bool
isAtom e = case e of
Number _ -> True
Boolean _ -> True
Variable _ -> True
_ -> False
-- |
-- Statement
--
data Stm a = DoNothing
| Assign a (Expr a)
| If (Expr a) (Stm a) (Stm a)
| Sequence (Stm a) (Stm a)
| While (Expr a) (Stm a)
deriving (Show)
-- |
-- Predicate whether the specified statement is compound or not.
--
isCompound :: Stm a -> Bool
isCompound stm = case stm of
Sequence _ _ -> True
_ -> False
| nobsun/hs-uc | src/Language/SIMPLE/AbstractSyntax.hs | bsd-3-clause | 1,143 | 0 | 8 | 449 | 367 | 203 | 164 | 31 | 4 |
{-
-----------------------------------------------------------------------------
--
-- (c) The University of Glasgow 2001-2003
--
-- Access to system tools: gcc, cp, rm etc
--
-----------------------------------------------------------------------------
-}
{-# LANGUAGE CPP, MultiWayIf, ScopedTypeVariables #-}
module SysTools (
-- * Initialisation
initSysTools,
initLlvmTargets,
-- * Interface to system tools
module SysTools.Tasks,
module SysTools.Info,
linkDynLib,
copy,
copyWithHeader,
-- * General utilities
Option(..),
expandTopDir,
-- * Platform-specifics
libmLinkOpts,
-- * Mac OS X frameworks
getPkgFrameworkOpts,
getFrameworkOpts
) where
#include "HsVersions.h"
import GhcPrelude
import Module
import Packages
import Config
import Outputable
import ErrUtils
import Platform
import Util
import DynFlags
import System.FilePath
import System.IO
import System.Directory
import SysTools.ExtraObj
import SysTools.Info
import SysTools.Tasks
import SysTools.BaseDir
{-
Note [How GHC finds toolchain utilities]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
SysTools.initSysProgs figures out exactly where all the auxiliary programs
are, and initialises mutable variables to make it easy to call them.
To to this, it makes use of definitions in Config.hs, which is a Haskell
file containing variables whose value is figured out by the build system.
Config.hs contains two sorts of things
cGCC, The *names* of the programs
cCPP e.g. cGCC = gcc
cUNLIT cCPP = gcc -E
etc They do *not* include paths
cUNLIT_DIR The *path* to the directory containing unlit, split etc
cSPLIT_DIR *relative* to the root of the build tree,
for use when running *in-place* in a build tree (only)
---------------------------------------------
NOTES for an ALTERNATIVE scheme (i.e *not* what is currently implemented):
Another hair-brained scheme for simplifying the current tool location
nightmare in GHC: Simon originally suggested using another
configuration file along the lines of GCC's specs file - which is fine
except that it means adding code to read yet another configuration
file. What I didn't notice is that the current package.conf is
general enough to do this:
Package
{name = "tools", import_dirs = [], source_dirs = [],
library_dirs = [], hs_libraries = [], extra_libraries = [],
include_dirs = [], c_includes = [], package_deps = [],
extra_ghc_opts = ["-pgmc/usr/bin/gcc","-pgml${topdir}/bin/unlit", ... etc.],
extra_cc_opts = [], extra_ld_opts = []}
Which would have the advantage that we get to collect together in one
place the path-specific package stuff with the path-specific tool
stuff.
End of NOTES
---------------------------------------------
************************************************************************
* *
\subsection{Initialisation}
* *
************************************************************************
-}
initLlvmTargets :: Maybe String
-> IO LlvmTargets
initLlvmTargets mbMinusB
= do top_dir <- findTopDir mbMinusB
let llvmTargetsFile = top_dir </> "llvm-targets"
llvmTargetsStr <- readFile llvmTargetsFile
case maybeReadFuzzy llvmTargetsStr of
Just s -> return (fmap mkLlvmTarget <$> s)
Nothing -> pgmError ("Can't parse " ++ show llvmTargetsFile)
where
mkLlvmTarget :: (String, String, String) -> LlvmTarget
mkLlvmTarget (dl, cpu, attrs) = LlvmTarget dl cpu (words attrs)
initSysTools :: Maybe String -- Maybe TopDir path (without the '-B' prefix)
-> IO Settings -- Set all the mutable variables above, holding
-- (a) the system programs
-- (b) the package-config file
-- (c) the GHC usage message
initSysTools mbMinusB
= do top_dir <- findTopDir mbMinusB
-- see Note [topdir: How GHC finds its files]
-- NB: top_dir is assumed to be in standard Unix
-- format, '/' separated
let settingsFile = top_dir </> "settings"
platformConstantsFile = top_dir </> "platformConstants"
installed :: FilePath -> FilePath
installed file = top_dir </> file
libexec :: FilePath -> FilePath
libexec file = top_dir </> "bin" </> file
settingsStr <- readFile settingsFile
platformConstantsStr <- readFile platformConstantsFile
mySettings <- case maybeReadFuzzy settingsStr of
Just s ->
return s
Nothing ->
pgmError ("Can't parse " ++ show settingsFile)
platformConstants <- case maybeReadFuzzy platformConstantsStr of
Just s ->
return s
Nothing ->
pgmError ("Can't parse " ++
show platformConstantsFile)
let getSetting key = case lookup key mySettings of
Just xs -> return $ expandTopDir top_dir xs
Nothing -> pgmError ("No entry for " ++ show key ++ " in " ++ show settingsFile)
getBooleanSetting key = case lookup key mySettings of
Just "YES" -> return True
Just "NO" -> return False
Just xs -> pgmError ("Bad value for " ++ show key ++ ": " ++ show xs)
Nothing -> pgmError ("No entry for " ++ show key ++ " in " ++ show settingsFile)
readSetting key = case lookup key mySettings of
Just xs ->
case maybeRead xs of
Just v -> return v
Nothing -> pgmError ("Failed to read " ++ show key ++ " value " ++ show xs)
Nothing -> pgmError ("No entry for " ++ show key ++ " in " ++ show settingsFile)
crossCompiling <- getBooleanSetting "cross compiling"
targetArch <- readSetting "target arch"
targetOS <- readSetting "target os"
targetWordSize <- readSetting "target word size"
targetUnregisterised <- getBooleanSetting "Unregisterised"
targetHasGnuNonexecStack <- readSetting "target has GNU nonexec stack"
targetHasIdentDirective <- readSetting "target has .ident directive"
targetHasSubsectionsViaSymbols <- readSetting "target has subsections via symbols"
myExtraGccViaCFlags <- getSetting "GCC extra via C opts"
-- On Windows, mingw is distributed with GHC,
-- so we look in TopDir/../mingw/bin
-- It would perhaps be nice to be able to override this
-- with the settings file, but it would be a little fiddly
-- to make that possible, so for now you can't.
gcc_prog <- getSetting "C compiler command"
gcc_args_str <- getSetting "C compiler flags"
gccSupportsNoPie <- getBooleanSetting "C compiler supports -no-pie"
cpp_prog <- getSetting "Haskell CPP command"
cpp_args_str <- getSetting "Haskell CPP flags"
let unreg_gcc_args = if targetUnregisterised
then ["-DNO_REGS", "-DUSE_MINIINTERPRETER"]
else []
-- TABLES_NEXT_TO_CODE affects the info table layout.
tntc_gcc_args
| mkTablesNextToCode targetUnregisterised
= ["-DTABLES_NEXT_TO_CODE"]
| otherwise = []
cpp_args= map Option (words cpp_args_str)
gcc_args = map Option (words gcc_args_str
++ unreg_gcc_args
++ tntc_gcc_args)
ldSupportsCompactUnwind <- getBooleanSetting "ld supports compact unwind"
ldSupportsBuildId <- getBooleanSetting "ld supports build-id"
ldSupportsFilelist <- getBooleanSetting "ld supports filelist"
ldIsGnuLd <- getBooleanSetting "ld is GNU ld"
perl_path <- getSetting "perl command"
let pkgconfig_path = installed "package.conf.d"
ghc_usage_msg_path = installed "ghc-usage.txt"
ghci_usage_msg_path = installed "ghci-usage.txt"
-- For all systems, unlit, split, mangle are GHC utilities
-- architecture-specific stuff is done when building Config.hs
unlit_path = libexec cGHC_UNLIT_PGM
-- split is a Perl script
split_script = libexec cGHC_SPLIT_PGM
windres_path <- getSetting "windres command"
libtool_path <- getSetting "libtool command"
ar_path <- getSetting "ar command"
ranlib_path <- getSetting "ranlib command"
tmpdir <- getTemporaryDirectory
touch_path <- getSetting "touch command"
let -- On Win32 we don't want to rely on #!/bin/perl, so we prepend
-- a call to Perl to get the invocation of split.
-- On Unix, scripts are invoked using the '#!' method. Binary
-- installations of GHC on Unix place the correct line on the
-- front of the script at installation time, so we don't want
-- to wire-in our knowledge of $(PERL) on the host system here.
(split_prog, split_args)
| isWindowsHost = (perl_path, [Option split_script])
| otherwise = (split_script, [])
mkdll_prog <- getSetting "dllwrap command"
let mkdll_args = []
-- cpp is derived from gcc on all platforms
-- HACK, see setPgmP below. We keep 'words' here to remember to fix
-- Config.hs one day.
-- Other things being equal, as and ld are simply gcc
gcc_link_args_str <- getSetting "C compiler link flags"
let as_prog = gcc_prog
as_args = gcc_args
ld_prog = gcc_prog
ld_args = gcc_args ++ map Option (words gcc_link_args_str)
-- We just assume on command line
lc_prog <- getSetting "LLVM llc command"
lo_prog <- getSetting "LLVM opt command"
lcc_prog <- getSetting "LLVM clang command"
let iserv_prog = libexec "ghc-iserv"
let platform = Platform {
platformArch = targetArch,
platformOS = targetOS,
platformWordSize = targetWordSize,
platformUnregisterised = targetUnregisterised,
platformHasGnuNonexecStack = targetHasGnuNonexecStack,
platformHasIdentDirective = targetHasIdentDirective,
platformHasSubsectionsViaSymbols = targetHasSubsectionsViaSymbols,
platformIsCrossCompiling = crossCompiling
}
return $ Settings {
sTargetPlatform = platform,
sTmpDir = normalise tmpdir,
sGhcUsagePath = ghc_usage_msg_path,
sGhciUsagePath = ghci_usage_msg_path,
sTopDir = top_dir,
sRawSettings = mySettings,
sExtraGccViaCFlags = words myExtraGccViaCFlags,
sSystemPackageConfig = pkgconfig_path,
sLdSupportsCompactUnwind = ldSupportsCompactUnwind,
sLdSupportsBuildId = ldSupportsBuildId,
sLdSupportsFilelist = ldSupportsFilelist,
sLdIsGnuLd = ldIsGnuLd,
sGccSupportsNoPie = gccSupportsNoPie,
sProgramName = "ghc",
sProjectVersion = cProjectVersion,
sPgm_L = unlit_path,
sPgm_P = (cpp_prog, cpp_args),
sPgm_F = "",
sPgm_c = (gcc_prog, gcc_args),
sPgm_s = (split_prog,split_args),
sPgm_a = (as_prog, as_args),
sPgm_l = (ld_prog, ld_args),
sPgm_dll = (mkdll_prog,mkdll_args),
sPgm_T = touch_path,
sPgm_windres = windres_path,
sPgm_libtool = libtool_path,
sPgm_ar = ar_path,
sPgm_ranlib = ranlib_path,
sPgm_lo = (lo_prog,[]),
sPgm_lc = (lc_prog,[]),
sPgm_lcc = (lcc_prog,[]),
sPgm_i = iserv_prog,
sOpt_L = [],
sOpt_P = [],
sOpt_F = [],
sOpt_c = [],
sOpt_a = [],
sOpt_l = [],
sOpt_windres = [],
sOpt_lcc = [],
sOpt_lo = [],
sOpt_lc = [],
sOpt_i = [],
sPlatformConstants = platformConstants
}
{- Note [Windows stack usage]
See: Trac #8870 (and #8834 for related info) and #12186
On Windows, occasionally we need to grow the stack. In order to do
this, we would normally just bump the stack pointer - but there's a
catch on Windows.
If the stack pointer is bumped by more than a single page, then the
pages between the initial pointer and the resulting location must be
properly committed by the Windows virtual memory subsystem. This is
only needed in the event we bump by more than one page (i.e 4097 bytes
or more).
Windows compilers solve this by emitting a call to a special function
called _chkstk, which does this committing of the pages for you.
The reason this was causing a segfault was because due to the fact the
new code generator tends to generate larger functions, we needed more
stack space in GHC itself. In the x86 codegen, we needed approximately
~12kb of stack space in one go, which caused the process to segfault,
as the intervening pages were not committed.
GCC can emit such a check for us automatically but only when the flag
-fstack-check is used.
See https://gcc.gnu.org/onlinedocs/gnat_ugn/Stack-Overflow-Checking.html
for more information.
-}
copy :: DynFlags -> String -> FilePath -> FilePath -> IO ()
copy dflags purpose from to = copyWithHeader dflags purpose Nothing from to
copyWithHeader :: DynFlags -> String -> Maybe String -> FilePath -> FilePath
-> IO ()
copyWithHeader dflags purpose maybe_header from to = do
showPass dflags purpose
hout <- openBinaryFile to WriteMode
hin <- openBinaryFile from ReadMode
ls <- hGetContents hin -- inefficient, but it'll do for now. ToDo: speed up
maybe (return ()) (header hout) maybe_header
hPutStr hout ls
hClose hout
hClose hin
where
-- write the header string in UTF-8. The header is something like
-- {-# LINE "foo.hs" #-}
-- and we want to make sure a Unicode filename isn't mangled.
header h str = do
hSetEncoding h utf8
hPutStr h str
hSetBinaryMode h True
{-
************************************************************************
* *
\subsection{Support code}
* *
************************************************************************
-}
linkDynLib :: DynFlags -> [String] -> [InstalledUnitId] -> IO ()
linkDynLib dflags0 o_files dep_packages
= do
let -- This is a rather ugly hack to fix dynamically linked
-- GHC on Windows. If GHC is linked with -threaded, then
-- it links against libHSrts_thr. But if base is linked
-- against libHSrts, then both end up getting loaded,
-- and things go wrong. We therefore link the libraries
-- with the same RTS flags that we link GHC with.
dflags1 = if cGhcThreaded then addWay' WayThreaded dflags0
else dflags0
dflags2 = if cGhcDebugged then addWay' WayDebug dflags1
else dflags1
dflags = updateWays dflags2
verbFlags = getVerbFlags dflags
o_file = outputFile dflags
pkgs <- getPreloadPackagesAnd dflags dep_packages
let pkg_lib_paths = collectLibraryPaths dflags pkgs
let pkg_lib_path_opts = concatMap get_pkg_lib_path_opts pkg_lib_paths
get_pkg_lib_path_opts l
| ( osElfTarget (platformOS (targetPlatform dflags)) ||
osMachOTarget (platformOS (targetPlatform dflags)) ) &&
dynLibLoader dflags == SystemDependent &&
WayDyn `elem` ways dflags
= ["-L" ++ l, "-Xlinker", "-rpath", "-Xlinker", l]
-- See Note [-Xlinker -rpath vs -Wl,-rpath]
| otherwise = ["-L" ++ l]
let lib_paths = libraryPaths dflags
let lib_path_opts = map ("-L"++) lib_paths
-- We don't want to link our dynamic libs against the RTS package,
-- because the RTS lib comes in several flavours and we want to be
-- able to pick the flavour when a binary is linked.
-- On Windows we need to link the RTS import lib as Windows does
-- not allow undefined symbols.
-- The RTS library path is still added to the library search path
-- above in case the RTS is being explicitly linked in (see #3807).
let platform = targetPlatform dflags
os = platformOS platform
pkgs_no_rts = case os of
OSMinGW32 ->
pkgs
_ ->
filter ((/= rtsUnitId) . packageConfigId) pkgs
let pkg_link_opts = let (package_hs_libs, extra_libs, other_flags) = collectLinkOpts dflags pkgs_no_rts
in package_hs_libs ++ extra_libs ++ other_flags
-- probably _stub.o files
-- and last temporary shared object file
let extra_ld_inputs = ldInputs dflags
-- frameworks
pkg_framework_opts <- getPkgFrameworkOpts dflags platform
(map unitId pkgs)
let framework_opts = getFrameworkOpts dflags platform
case os of
OSMinGW32 -> do
-------------------------------------------------------------
-- Making a DLL
-------------------------------------------------------------
let output_fn = case o_file of
Just s -> s
Nothing -> "HSdll.dll"
runLink dflags (
map Option verbFlags
++ [ Option "-o"
, FileOption "" output_fn
, Option "-shared"
] ++
[ FileOption "-Wl,--out-implib=" (output_fn ++ ".a")
| gopt Opt_SharedImplib dflags
]
++ map (FileOption "") o_files
-- Permit the linker to auto link _symbol to _imp_symbol
-- This lets us link against DLLs without needing an "import library"
++ [Option "-Wl,--enable-auto-import"]
++ extra_ld_inputs
++ map Option (
lib_path_opts
++ pkg_lib_path_opts
++ pkg_link_opts
))
_ | os == OSDarwin -> do
-------------------------------------------------------------------
-- Making a darwin dylib
-------------------------------------------------------------------
-- About the options used for Darwin:
-- -dynamiclib
-- Apple's way of saying -shared
-- -undefined dynamic_lookup:
-- Without these options, we'd have to specify the correct
-- dependencies for each of the dylibs. Note that we could
-- (and should) do without this for all libraries except
-- the RTS; all we need to do is to pass the correct
-- HSfoo_dyn.dylib files to the link command.
-- This feature requires Mac OS X 10.3 or later; there is
-- a similar feature, -flat_namespace -undefined suppress,
-- which works on earlier versions, but it has other
-- disadvantages.
-- -single_module
-- Build the dynamic library as a single "module", i.e. no
-- dynamic binding nonsense when referring to symbols from
-- within the library. The NCG assumes that this option is
-- specified (on i386, at least).
-- -install_name
-- Mac OS/X stores the path where a dynamic library is (to
-- be) installed in the library itself. It's called the
-- "install name" of the library. Then any library or
-- executable that links against it before it's installed
-- will search for it in its ultimate install location.
-- By default we set the install name to the absolute path
-- at build time, but it can be overridden by the
-- -dylib-install-name option passed to ghc. Cabal does
-- this.
-------------------------------------------------------------------
let output_fn = case o_file of { Just s -> s; Nothing -> "a.out"; }
instName <- case dylibInstallName dflags of
Just n -> return n
Nothing -> return $ "@rpath" `combine` (takeFileName output_fn)
runLink dflags (
map Option verbFlags
++ [ Option "-dynamiclib"
, Option "-o"
, FileOption "" output_fn
]
++ map Option o_files
++ [ Option "-undefined",
Option "dynamic_lookup",
Option "-single_module" ]
++ (if platformArch platform == ArchX86_64
then [ ]
else [ Option "-Wl,-read_only_relocs,suppress" ])
++ [ Option "-install_name", Option instName ]
++ map Option lib_path_opts
++ extra_ld_inputs
++ map Option framework_opts
++ map Option pkg_lib_path_opts
++ map Option pkg_link_opts
++ map Option pkg_framework_opts
)
_ -> do
-------------------------------------------------------------------
-- Making a DSO
-------------------------------------------------------------------
let output_fn = case o_file of { Just s -> s; Nothing -> "a.out"; }
let bsymbolicFlag = -- we need symbolic linking to resolve
-- non-PIC intra-package-relocations
["-Wl,-Bsymbolic"]
runLink dflags (
map Option verbFlags
++ libmLinkOpts
++ [ Option "-o"
, FileOption "" output_fn
]
++ map Option o_files
++ [ Option "-shared" ]
++ map Option bsymbolicFlag
-- Set the library soname. We use -h rather than -soname as
-- Solaris 10 doesn't support the latter:
++ [ Option ("-Wl,-h," ++ takeFileName output_fn) ]
++ extra_ld_inputs
++ map Option lib_path_opts
++ map Option pkg_lib_path_opts
++ map Option pkg_link_opts
)
-- | Some platforms require that we explicitly link against @libm@ if any
-- math-y things are used (which we assume to include all programs). See #14022.
libmLinkOpts :: [Option]
libmLinkOpts =
#if defined(HAVE_LIBM)
[Option "-lm"]
#else
[]
#endif
getPkgFrameworkOpts :: DynFlags -> Platform -> [InstalledUnitId] -> IO [String]
getPkgFrameworkOpts dflags platform dep_packages
| platformUsesFrameworks platform = do
pkg_framework_path_opts <- do
pkg_framework_paths <- getPackageFrameworkPath dflags dep_packages
return $ map ("-F" ++) pkg_framework_paths
pkg_framework_opts <- do
pkg_frameworks <- getPackageFrameworks dflags dep_packages
return $ concat [ ["-framework", fw] | fw <- pkg_frameworks ]
return (pkg_framework_path_opts ++ pkg_framework_opts)
| otherwise = return []
getFrameworkOpts :: DynFlags -> Platform -> [String]
getFrameworkOpts dflags platform
| platformUsesFrameworks platform = framework_path_opts ++ framework_opts
| otherwise = []
where
framework_paths = frameworkPaths dflags
framework_path_opts = map ("-F" ++) framework_paths
frameworks = cmdlineFrameworks dflags
-- reverse because they're added in reverse order from the cmd line:
framework_opts = concat [ ["-framework", fw]
| fw <- reverse frameworks ]
| ezyang/ghc | compiler/main/SysTools.hs | bsd-3-clause | 25,394 | 0 | 27 | 8,883 | 3,362 | 1,731 | 1,631 | 325 | 11 |
{-# LANGUAGE PolyKinds, Rank2Types, TypeFamilies, DefaultSignatures, BangPatterns, GADTs, ScopedTypeVariables #-}
module Indexed.Map
( IMap(..)
, lookup
, null
, size
, empty
, singleton
, toList
, KV(..)
) where
import Control.Category
import Indexed.Show
import Indexed.Ord
import Prelude hiding ((.), id, lookup, null)
data IMap k v where
Bin :: {-# UNPACK #-} !Int -> !(k a) -> v a -> !(IMap k v) -> !(IMap k v) -> IMap k v
Tip :: IMap k v
lookup :: forall k v a. IOrd k => k a -> IMap k v -> Maybe (v a)
lookup = go
where
go :: k a -> IMap k v -> Maybe (v a)
go !_ !Tip = Nothing
go !k (Bin _ kx x l r) = icompare k kx (go k l) (Just x) (go k r)
{-# INLINEABLE lookup #-}
null :: IMap k v -> Bool
null Tip = True
null Bin{} = False
{-# INLINE null #-}
size :: IMap k v -> Int
size Tip = 0
size (Bin sz _ _ _ _) = sz
{-# INLINE size #-}
empty :: IMap k v
empty = Tip
{-# INLINE empty #-}
singleton :: k a -> v a -> IMap k v
singleton k x = Bin 1 k x Tip Tip
{-# INLINE singleton #-}
-- a key value pair
data KV k v where
KV :: k a -> v a -> KV k v
instance (IEq k, IEq v) => Eq (KV k v) where
KV ka va == KV kb vb = ieq ka kb (ieq va vb True False) False
instance (IOrd k, IOrd v) => Ord (KV k v) where
compare (KV ka va) (KV kb vb) = icompare ka kb LT (icompare va vb LT EQ GT) GT
instance (IShow k, IShow v) => Show (KV k v) where
showsPrec d (KV ka va) = showParen (d > 10) $
showString "KV " . ishowsPrec 11 ka . showChar ' ' . ishowsPrec 11 va
instance (IShow k, IShow v) => Show (IMap k v) where
showsPrec d m = showParen (d > 10) $
showString "fromList " . showsPrec 11 (toList m)
toList :: IMap k v -> [KV k v]
toList x0 = go x0 [] where
go !(Bin _ kx x l r) xs = go l (KV kx x : go r xs)
go !Tip xs = xs
{-# INLINEABLE toList #-}
| ekmett/indexed | src/Indexed/Map.hs | bsd-3-clause | 1,839 | 0 | 13 | 504 | 894 | 455 | 439 | 58 | 2 |
module OrdUni where
import Uni (Uni(Uni))
instance Ord Uni where
compare Uni Uni = EQ
| phischu/fragnix | tests/quick/ImplicitInstances/OrdUni.hs | bsd-3-clause | 92 | 0 | 6 | 21 | 34 | 20 | 14 | 4 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE UnboxedTuples, MagicHash #-}
-- |
--
-- In order to provide slowloris protection, Warp provides timeout handlers. We
-- follow these rules:
--
-- * A timeout is created when a connection is opened.
--
-- * When all request headers are read, the timeout is tickled.
--
-- * Every time at least 2048 bytes of the request body are read, the timeout
-- is tickled.
--
-- * The timeout is paused while executing user code. This will apply to both
-- the application itself, and a ResponseSource response. The timeout is
-- resumed as soon as we return from user code.
--
-- * Every time data is successfully sent to the client, the timeout is tickled.
module Network.Wai.Handler.Warp.Timeout (
-- * Types
Manager
, TimeoutAction
, Handle
-- * Manager
, initialize
, stopManager
, withManager
-- * Registration
, register
, registerKillThread
-- * Control
, tickle
, cancel
, pause
, resume
-- * Exceptions
, TimeoutThread (..)
) where
#if MIN_VERSION_base(4,6,0)
import Control.Concurrent (mkWeakThreadId, ThreadId)
#else
import GHC.Conc (ThreadId(..))
import GHC.Exts (mkWeak#)
import GHC.IO (IO (IO))
#endif
import Control.Concurrent (threadDelay, myThreadId)
import qualified Control.Exception as E
import GHC.Weak (Weak (..))
import Network.Wai.Handler.Warp.IORef (IORef)
import qualified Network.Wai.Handler.Warp.IORef as I
import Network.Wai.Handler.Warp.Thread
import System.Mem.Weak (deRefWeak)
import Data.Typeable (Typeable)
----------------------------------------------------------------
-- | A timeout manager
newtype Manager = Manager (IORef [Handle])
-- | An action to be performed on timeout.
type TimeoutAction = IO ()
-- | A handle used by 'Manager'
data Handle = Handle TimeoutAction (IORef State)
data State = Active -- Manager turns it to Inactive.
| Inactive -- Manager removes it with timeout action.
| Paused -- Manager does not change it.
| Canceled -- Manager removes it without timeout action.
----------------------------------------------------------------
-- | Creating timeout manager which works every N micro seconds
-- where N is the first argument.
initialize :: Int -> IO Manager
initialize timeout = do
ref' <- forkIOwithBreakableForever [] $ \ref -> do
threadDelay timeout
old <- I.atomicModifyIORef' ref (\x -> ([], x))
merge <- prune old id
I.atomicModifyIORef' ref (\new -> (merge new, ()))
return $ Manager ref'
where
prune [] front = return front
prune (m@(Handle onTimeout iactive):rest) front = do
state <- I.atomicModifyIORef' iactive (\x -> (inactivate x, x))
case state of
Inactive -> do
onTimeout `E.catch` ignoreAll
prune rest front
Canceled -> prune rest front
_ -> prune rest (front . (:) m)
inactivate Active = Inactive
inactivate x = x
----------------------------------------------------------------
-- | Stopping timeout manager.
stopManager :: Manager -> IO ()
stopManager (Manager ref) = E.mask_ $ do
!handles <- breakForever ref
mapM_ fire handles
where
fire (Handle onTimeout _) = onTimeout `E.catch` ignoreAll
ignoreAll :: E.SomeException -> IO ()
ignoreAll _ = return ()
----------------------------------------------------------------
-- | Registering a timeout action.
register :: Manager -> TimeoutAction -> IO Handle
register (Manager ref) onTimeout = do
iactive <- I.newIORef Active
let h = Handle onTimeout iactive
I.atomicModifyIORef' ref (\x -> (h : x, ()))
return h
-- | Registering a timeout action of killing this thread.
registerKillThread :: Manager -> IO Handle
registerKillThread m = do
wtid <- myThreadId >>= mkWeakThreadId
register m $ killIfExist wtid
-- If ThreadId is hold referred by a strong reference,
-- it leaks even after the thread is killed.
-- So, let's use a weak reference so that CG can throw ThreadId away.
-- deRefWeak checks if ThreadId referenced by the weak reference
-- exists. If exists, it means that the thread is alive.
killIfExist :: Weak ThreadId -> TimeoutAction
killIfExist wtid = deRefWeak wtid >>= maybe (return ()) (flip E.throwTo TimeoutThread)
data TimeoutThread = TimeoutThread
deriving Typeable
instance E.Exception TimeoutThread
instance Show TimeoutThread where
show TimeoutThread = "Thread killed by Warp's timeout reaper"
#if !MIN_VERSION_base(4,6,0)
mkWeakThreadId :: ThreadId -> IO (Weak ThreadId)
mkWeakThreadId t@(ThreadId t#) = IO $ \s ->
case mkWeak# t# t Nothing s of
(# s1, w #) -> (# s1, Weak w #)
#endif
----------------------------------------------------------------
-- | Setting the state to active.
-- 'Manager' turns active to inactive repeatedly.
tickle :: Handle -> IO ()
tickle (Handle _ iactive) = I.writeIORef iactive Active
-- | Setting the state to canceled.
-- 'Manager' eventually removes this without timeout action.
cancel :: Handle -> IO ()
cancel (Handle _ iactive) = I.writeIORef iactive Canceled
-- | Setting the state to paused.
-- 'Manager' does not change the value.
pause :: Handle -> IO ()
pause (Handle _ iactive) = I.writeIORef iactive Paused
-- | Setting the paused state to active.
-- This is an alias to 'tickle'.
resume :: Handle -> IO ()
resume = tickle
----------------------------------------------------------------
-- | Call the inner function with a timeout manager.
withManager :: Int -- ^ timeout in microseconds
-> (Manager -> IO a)
-> IO a
withManager timeout f = do
-- FIXME when stopManager is available, use it
man <- initialize timeout
f man
| sol/wai | warp/Network/Wai/Handler/Warp/Timeout.hs | mit | 5,774 | 0 | 17 | 1,187 | 1,156 | 632 | 524 | 97 | 5 |
module Data.Time.DurationSpec
( main
, spec
) where
import Prelude
import Test.Hspec
import Data.Time.Duration
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "Data.Time.Duration" $ do
describe "since" $ it "works" $ do
let t1 = mkTime "2015-12-31 05:15:00"
t2 = (6 :: Second) `since` t1
t2 `shouldBe` mkTime "2015-12-31 05:15:06"
describe "priorTo" $ it "works" $ do
let t1 = mkTime "2015-12-31 05:15:00"
t2 = (2 :: Day) `priorTo` t1
t2 `shouldBe` mkTime "2015-12-29 05:15:00"
mkTime :: String -> UTCTime
mkTime = parseTimeOrError False defaultTimeLocale "%F %T"
| mrb/tee-io | test/Data/Time/DurationSpec.hs | mit | 660 | 0 | 15 | 176 | 201 | 106 | 95 | 20 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{- |
Module : ./OMDoc/DataTypes.hs
Description : The OMDoc Data Types
Copyright : (c) Ewaryst Schulz, DFKI 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Ewaryst.Schulz@dfki.de
Stability : provisional
Portability : portable
Datatypes for an intermediate OMDoc representation.
-}
module OMDoc.DataTypes where
import Common.Utils
import Common.Doc
import Common.DocUtils
import Common.Amalgamate (readShow)
import Common.Id
import Common.Lexer
import Common.AnnoParser
import Common.Percent (encodeBut)
import Data.List
import Data.Typeable
import qualified Data.Map as Map
{-
OMDoc represented in 3 layers:
1. toplevel (theory, view)
2. theory constitutive (axiom, symbol)
3. subelements (insort, ...) and OpenMath
-}
-- -------------------- Datatypes for Representation ----------------------
-- | OMDoc root element with libname and a list of toplevel elements
data OMDoc = OMDoc String [TLElement] deriving (Show, Read, Eq, Ord)
{- | Toplevel elements for OMDoc, theory with name, meta and content,
view with from, to and morphism -}
data TLElement = TLTheory String (Maybe OMCD) [TCElement]
| TLView String OMCD OMCD TCMorphism
deriving (Show, Read, Eq, Ord)
{-
NOTATIONS
OMDoc currently supports two kinds of notations: smart and flexible.
a) The smart ones look like this:
<notation for="??+" role="application" fixity="f" precedence="p" implicit="i"/>
Here f \in {in, pre, post}, p is an integer (higher precedence =
higher binding), and i is the number of implicit arguments (0 by
default).
In this case, you would additionally give
<notation for="??+" role="constant"><text value="+"/></notation>
This notation is called to render operation itself, i.e.,
produces the operator symbol.
b) The flexible ones look like this
<notation for="??something" role="application" precedence="P">
<component index="1" precedence="p1"/>
<text value="["/>
<component index="2" precedence="p2"/>
<text value="/"/>
<component index="3" precedence="p3"/>
<text value="]"/>
</notation>
Here <component index="i"/> recurses into argument number i.
P is the output precedence, p1,p2,p3 are the input precedences.
You can also use <component index="0"/>. That renders the operator
symbol by calling the notation
<notation for="??+" role="constant">...</notation>
2) The smart ones have two major advantages:
- They can be read back easily.
- They are independent of the output format.
In 1b) above, we would need one notation for Hets-syntax, one for MathML etc
-}
-- | Theory constitutive elements for OMDoc
data TCElement =
-- | Symbol to represent sorts, constants, predicate symbols, etc.
TCSymbol String OMElement SymbolRole (Maybe OMElement)
-- | A notation for the given symbol with an optional style
| TCNotation OMQualName String (Maybe String)
{- | A smart notation for the given symbol with fixity, associativity,
precedence and the number of implicit arguments -}
| TCSmartNotation OMQualName Fixity Assoc Int Int
{- | A smart notation for the given symbol, the argument- and
text-components have to alternate in the component list -}
| TCFlexibleNotation OMQualName Int [NotationComponent]
-- | Algebraic Data Type represents free/generated types
| TCADT [OmdADT]
-- | Import statements for referencing other theories
| TCImport String OMCD TCMorphism
-- | A comment, only for development purposes
| TCComment String
deriving (Show, Read, Eq, Ord)
-- | return type for sentence translation (ADT or formula)
type TCorOMElement = Either TCElement OMElement
-- | Morphisms to specify signature mappings
type TCMorphism = [(OMName, OMImage)]
{- | The target type of a mapping is just an alias or an assignment to
a symbol -}
type OMImage = Either String OMElement
-- | The flattened structure of an Algebraic Data Type
data OmdADT =
-- | A single sort given by name, type and a list of constructors
ADTSortDef String ADTType [OmdADT]
-- | A constructor given by its name and a list of arguments
| ADTConstr String [OmdADT]
-- | An argument with type and evtually a selector
| ADTArg OMElement (Maybe OmdADT)
-- | The selector has a name and is total (Yes) or partial (No)
| ADTSelector String Totality
-- | Insort elements point to other sortdefs and inherit their structure
| ADTInsort OMQualName
deriving (Show, Read, Eq, Ord)
-- | Roles of the declared symbols can be object or type
data SymbolRole = Obj | Typ | Axiom | Theorem deriving (Eq, Ord)
-- | Fixity of notation patterns
data Fixity = Infix | Prefix | Postfix deriving (Eq, Ord)
-- | Associativity of notation patterns
data Assoc = LeftAssoc | RightAssoc | NoneAssoc deriving (Eq, Ord)
-- | Type of the algebraic data type
data ADTType = Free | Generated deriving (Eq, Ord)
-- | Totality for selectors of an adt
data Totality = Yes | No deriving (Eq, Ord)
{- | A component can be a text-component, e.g., <text value="["/>, or an
argument-component such as <component index="1" precedence="p1"/> -}
data NotationComponent = TextComp String | ArgComp Int Int
deriving (Show, Read, Eq, Ord)
instance Show SymbolRole where
show Obj = "object"
show Typ = "type"
show Axiom = "axiom"
show Theorem = "theorem"
instance Show ADTType where
show Free = "free"
show Generated = "generated"
instance Show Totality where
show Yes = "yes"
show No = "no"
instance Show Fixity where
show Infix = "in"
show Prefix = "pre"
show Postfix = "post"
instance Show Assoc where
show LeftAssoc = "left"
show RightAssoc = "right"
show NoneAssoc = "none"
instance Read SymbolRole where
readsPrec _ = readShow [Obj, Typ, Axiom, Theorem]
instance Read ADTType where
readsPrec _ = readShow [Free, Generated]
instance Read Totality where
readsPrec _ = readShow [Yes, No]
instance Read Fixity where
readsPrec _ = readShow [Infix, Prefix, Postfix]
instance Read Assoc where
readsPrec _ = readShow [LeftAssoc, RightAssoc, NoneAssoc]
-- | Names used for OpenMath variables and symbols
data OMName = OMName { name :: String, path :: [String] }
deriving (Show, Read, Eq, Ord, Typeable)
{- | Attribute-name/attribute-value pair used to represent the type
of a type-annotated term -}
data OMAttribute = OMAttr OMElement OMElement
deriving (Show, Read, Eq, Ord)
{- | CD contains the reference to the content dictionary
and eventually the cdbase entry -}
data OMCD = CD [String] deriving (Show, Read, Eq, Ord)
type OMQualName = (OMCD, OMName)
-- | Elements for Open Math
data OMElement =
-- | Symbol
OMS OMQualName
-- | Simple variable
| OMV OMName
-- | Attributed element needed for type annotations of elements
| OMATTT OMElement OMAttribute
{- | Application to a list of arguments,
first argument is usually the functionhead -}
| OMA [OMElement]
-- | Bindersymbol, bound vars, body
| OMBIND OMElement [OMElement] OMElement
deriving (Show, Read, Eq, Ord)
-- * Hets Utils
nameToId :: String -> Id
nameToId = parseString parseAnnoId
nameToToken :: String -> Token
nameToToken = mkSimpleId
-- * Utils for Translation
type UniqName = (String, Int)
type NameMap a = Map.Map a UniqName
-- | Mapping of symbols and sentence names to unique ids (used in export)
data SigMap a = SigMap (NameMap a) (NameMap String)
{- | Mapping of OMDoc names to hets strings, for signature creation,
and strings to symbols, for lookup in terms (used in import) -}
data SigMapI a = SigMapI { sigMapISymbs :: Map.Map OMName a
, sigMapINotations :: Map.Map OMName String }
sigMapSymbs :: SigMap a -> NameMap a
sigMapSymbs (SigMap sm _) = sm
cdFromList :: [String] -> OMCD
cdFromList ["", ""] = CD []
cdFromList ["", cd] = CD [cd]
cdFromList [base, cd] = CD [cd, base]
cdFromList _ = error "cdFromList: Malformed list. I need exactly 2 elements!"
cdIsEmpty :: OMCD -> Bool
cdIsEmpty cd = ["", ""] == cdToList cd
-- | The result list has always two elements: [base, modul]
cdToList :: OMCD -> [String]
cdToList (CD [cd, base]) = [base, cd]
cdToList (CD [cd]) = ["", cd]
cdToList _ = ["", ""]
cdToMaybeList :: OMCD -> [Maybe String]
cdToMaybeList (CD [cd, base]) = [Just base, Just cd]
cdToMaybeList (CD [cd]) = [Nothing, Just cd]
cdToMaybeList _ = [Nothing, Nothing]
-- * Name handling: encoding, decoding, unique names
{- | The closing paren + percent can be used neither in ordinary Hets-names
nor in sentence names hence it is used here for encodings. -}
uniqPrefix :: String
uniqPrefix = "%()%"
{- | Special name encoding in order to be able to recognize these names
while reading. -}
nameEncode :: String -- ^ the kind of the encoding, may not contain colons
-> [String] -- ^ the values to encode
-> String
nameEncode s l = concat [uniqPrefix, s, ":", intercalate uniqPrefix l]
{- | This invariant should hold:
@(x, l) = fromJust $ nameDecode $ nameEncode x l@ -}
nameDecode :: String -> Maybe (String, [String])
nameDecode s =
case stripPrefix uniqPrefix s of
Nothing -> Nothing
Just s' ->
let (kind, r) = break (== ':') s'
in if null r
then error $ "nameDecode: missing colon in " ++ s
else Just (kind, splitByList uniqPrefix $ tail r)
nameToString :: UniqName -> String
nameToString (s, i) =
let s' = encodeBut (`notElem` "/?%#") s
in if i > 0 then nameEncode ("over_" ++ show i) [s'] else s'
-- * Constructing/Extracting Values
{- | name of the theory constitutive element, error if not TCSymbol, TCNotation,
or TCImport -}
tcName :: TCElement -> OMName
tcName tc = case tc of
TCSymbol s _ _ _ -> mkSimpleName s
TCNotation qn _ _ -> unqualName qn
TCImport s _ _ -> mkSimpleName s
_ -> error "tcName: No name for this value."
unqualName :: OMQualName -> OMName
unqualName = snd
emptyCD :: OMCD
emptyCD = CD []
omName :: UniqName -> OMName
omName = mkSimpleName . nameToString
mkSimpleName :: String -> OMName
mkSimpleName s = OMName s []
mkSimpleQualName :: UniqName -> OMQualName
mkSimpleQualName un = (CD [], omName un)
simpleOMS :: UniqName -> OMElement
simpleOMS = OMS . mkSimpleQualName
-- * Lookup utils for Import and Export
lookupNotation :: SigMapI a -> OMName -> String
lookupNotation smi = lookupNotationInMap $ sigMapINotations smi
lookupNotationInMap :: Map.Map OMName String -> OMName -> String
lookupNotationInMap m n = Map.findWithDefault (name n) n m
-- * Pretty instances
instance Pretty OMName where
pretty = text . show
| spechub/Hets | OMDoc/DataTypes.hs | gpl-2.0 | 10,665 | 0 | 14 | 2,210 | 2,010 | 1,113 | 897 | 154 | 4 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
-----------------------------------------------------------------------------
--f
-- Module : IDE.Pane.Files
-- Copyright : (c) Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GNU-GPL
--
-- Maintainer : <maintainer at leksah.org>
-- Stability : provisional
-- Portability : portable
--
-- | The pane of ide that shows a list of all the files in the workspace
--
-------------------------------------------------------------------------------
module IDE.Pane.Files (
IDEFiles(..)
, FilesState(..)
) where
import Prelude hiding (catch)
import Graphics.UI.Gtk
import Data.Maybe (fromMaybe, maybeToList, listToMaybe, isJust)
import Control.Monad (forM, void, forM_, when)
import Data.Typeable (Typeable)
import IDE.Core.State
(catchIDE, window, getIDE, MessageLevel(..), ipdPackageId,
wsPackages, workspace, readIDE, IDEAction, ideMessage, reflectIDE,
reifyIDE, IDEM, IDEPackage, ipdSandboxSources)
import IDE.Pane.SourceBuffer (fileNew, goToSourceDefinition')
import IDE.Sandbox
import Control.Applicative ((<$>))
import System.FilePath ((</>), takeFileName, dropFileName)
import Distribution.Package (PackageIdentifier(..))
import System.Directory
(removeDirectoryRecursive, removeDirectory, createDirectory,
doesFileExist, removeFile, doesDirectoryExist,
getDirectoryContents, getPermissions, readable)
import IDE.Core.CTypes
(Location(..), packageIdentifierToString)
import Graphics.UI.Frame.Panes
(RecoverablePane(..), PanePath, RecoverablePane, Pane(..))
import Graphics.UI.Frame.ViewFrame (getMainWindow, getNotebook)
import Graphics.UI.Editor.Basics (Connection(..))
import Graphics.UI.Gtk.General.Enums
(ShadowType(..), PolicyType(..), SelectionMode(..),
TreeViewColumnSizing(..))
import System.Glib.Attributes (set, AttrOp(..))
import Control.Monad.IO.Class (MonadIO(..))
import IDE.Utils.GUIUtils
(showErrorDialog, showInputDialog, treeViewContextMenu', __,
showDialogOptions)
import Control.Exception (SomeException(..), catch)
import Data.Text (Text)
import qualified Data.Text as T
(isPrefixOf, words, isSuffixOf, unpack, pack)
import Data.Monoid ((<>))
import IDE.Core.Types
(ipdLib, WorkspaceAction, Workspace(..), wsAllPackages, WorkspaceM,
runPackage, runWorkspace, PackageAction, PackageM, IDEPackage(..),
IDE(..), Prefs(..), MonadIDE(..), ipdPackageDir)
import System.Glib.Properties (newAttrFromMaybeStringProperty)
import System.FilePath
(addTrailingPathSeparator, takeDirectory, takeExtension,
makeRelative, splitDirectories)
import Control.Monad.Reader.Class (MonadReader(..))
import IDE.Workspaces
(makePackage, workspaceAddPackage', workspaceRemovePackage,
workspaceActivatePackage, workspaceTry, workspaceTryQuiet,
packageTry)
import Data.List
(isSuffixOf, find, stripPrefix, isPrefixOf, sortBy, sort)
import Data.Ord (comparing)
import Data.Char (toUpper, toLower)
import System.Log.Logger (debugM)
import Data.Tree (Forest, Tree(..))
import Graphics.UI.Gtk.MenuComboToolbar.MenuItem
(menuItemActivate, menuItemNewWithLabel)
import IDE.Pane.Modules (addModule)
import Graphics.UI.Gtk.Windows.MessageDialog
(ButtonsType(..), MessageType(..), messageDialogNew)
import Graphics.UI.Gtk.ModelView.CellRenderer
(CellRendererMode(..), cellMode)
import IDE.Pane.PackageEditor (packageEditText)
import IDE.Utils.GtkBindings (treeViewSetActiveOnSingleClick)
import IDE.Package (packageTest, packageRun, packageClean)
-- * The Files pane
-- | The representation of the Files pane
data IDEFiles = IDEFiles {
deprecatedLabel :: Label
} deriving Typeable
-- | The additional state used when recovering the pane
-- (none, the package directories come from the IDE state)
data FilesState = FilesState
deriving(Eq,Ord,Read,Show,Typeable)
instance Pane IDEFiles IDEM where
primPaneName _ = __ "Files"
getAddedIndex _ = 0
getTopWidget = castToWidget . deprecatedLabel
paneId b = "*Files"
instance RecoverablePane IDEFiles FilesState IDEM where
saveState p = return (Just FilesState)
recoverState pp FilesState = do
nb <- getNotebook pp
buildPane pp nb builder
builder pp nb windows = reifyIDE $ \ ideR -> do
deprecatedLabel <- labelNew $ Just ("The Files pane is deprecated and has been combined with the Workspace pane"::Text)
return (Just IDEFiles {..}, [])
| jaccokrijnen/leksah | src/IDE/Pane/Files.hs | gpl-2.0 | 4,767 | 0 | 13 | 770 | 1,052 | 671 | 381 | 94 | 0 |
module Revision.Deltum.Db
( withDB, DB.defaultOptions, DB.Options(..)
) where
import qualified Data.ByteString.Extended as BS
import Data.UUID.Types (UUID)
import qualified Data.UUID.Types as UUID
import Database.LevelDB.Base (DB)
import qualified Database.LevelDB.Base as DB
import Revision.Deltum.Transaction (Store(..))
import System.Random (randomIO)
import Lamdu.Prelude hiding (lookup)
lookup :: DB -> UUID -> IO (Maybe ByteString)
lookup db = DB.get db DB.defaultReadOptions . BS.strictify . UUID.toByteString
transaction :: DB -> [(UUID, Maybe ByteString)] -> IO ()
transaction db =
DB.write db DB.defaultWriteOptions . map batchOp
where
batchOp (key, Nothing) = (DB.Del . BS.strictify . UUID.toByteString) key
batchOp (key, Just value) = (DB.Put . BS.strictify . UUID.toByteString) key value
store :: DB -> Store IO
store db =
Store
{ storeNewKey = randomIO
, storeLookup = lookup db
, storeAtomicWrite = transaction db
}
withDB :: FilePath -> DB.Options -> (Store IO -> IO a) -> IO a
withDB path opts act = DB.withDB path opts (act . store)
| lamdu/lamdu | src/Revision/Deltum/Db.hs | gpl-3.0 | 1,159 | 0 | 11 | 258 | 400 | 222 | 178 | 25 | 2 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
--
-- Licensed to the Apache Software Foundation (ASF) under one
-- or more contributor license agreements. See the NOTICE file
-- distributed with this work for additional information
-- regarding copyright ownership. The ASF licenses this file
-- to you under the Apache License, Version 2.0 (the
-- "License"); you may not use this file except in compliance
-- with the License. You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing,
-- software distributed under the License is distributed on an
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-- KIND, either express or implied. See the License for the
-- specific language governing permissions and limitations
-- under the License.
--
module Thrift.Transport.Framed
( module Thrift.Transport
, FramedTransport
, openFramedTransport
) where
import Thrift.Transport
import Thrift.Transport.IOBuffer
import Data.Int (Int32)
import qualified Data.Binary as B
import qualified Data.ByteString.Lazy as LBS
-- | FramedTransport wraps a given transport in framed mode.
data FramedTransport t = FramedTransport {
wrappedTrans :: t, -- ^ Underlying transport.
writeBuffer :: WriteBuffer, -- ^ Write buffer.
readBuffer :: ReadBuffer -- ^ Read buffer.
}
-- | Create a new framed transport which wraps the given transport.
openFramedTransport :: Transport t => t -> IO (FramedTransport t)
openFramedTransport trans = do
wbuf <- newWriteBuffer
rbuf <- newReadBuffer
return FramedTransport{ wrappedTrans = trans, writeBuffer = wbuf, readBuffer = rbuf }
instance Transport t => Transport (FramedTransport t) where
tClose = tClose . wrappedTrans
tRead trans n = do
-- First, check the read buffer for any data.
bs <- readBuf (readBuffer trans) n
if LBS.null bs
then
-- When the buffer is empty, read another frame from the
-- underlying transport.
do len <- readFrame trans
if len > 0
then tRead trans n
else return bs
else return bs
tPeek trans = do
mw <- peekBuf (readBuffer trans)
case mw of
Just _ -> return mw
Nothing -> do
len <- readFrame trans
if len > 0
then tPeek trans
else return Nothing
tWrite = writeBuf . writeBuffer
tFlush trans = do
bs <- flushBuf (writeBuffer trans)
let szBs = B.encode $ (fromIntegral $ LBS.length bs :: Int32)
tWrite (wrappedTrans trans) szBs
tWrite (wrappedTrans trans) bs
tFlush (wrappedTrans trans)
tIsOpen = tIsOpen . wrappedTrans
readFrame :: Transport t => FramedTransport t -> IO Int
readFrame trans = do
-- Read and decode the frame size.
szBs <- tReadAll (wrappedTrans trans) 4
let sz = fromIntegral (B.decode szBs :: Int32)
-- Read the frame and stuff it into the read buffer.
bs <- tReadAll (wrappedTrans trans) sz
fillBuf (readBuffer trans) bs
-- Return the frame size so that the caller knows whether to expect
-- something in the read buffer or not.
return sz
| dcelasun/thrift | lib/hs/src/Thrift/Transport/Framed.hs | apache-2.0 | 3,269 | 0 | 15 | 807 | 603 | 317 | 286 | 54 | 1 |
-- -*- Mode: Haskell; -*-
--
-- QuickCheck tests for Megaparsec's textual source positions.
--
-- Copyright © 2015 Megaparsec contributors
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are
-- met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
--
-- * Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- This software is provided by the copyright holders "as is" and any
-- express or implied warranties, including, but not limited to, the implied
-- warranties of merchantability and fitness for a particular purpose are
-- disclaimed. In no event shall the copyright holders be liable for any
-- direct, indirect, incidental, special, exemplary, or consequential
-- damages (including, but not limited to, procurement of substitute goods
-- or services; loss of use, data, or profits; or business interruption)
-- however caused and on any theory of liability, whether in contract,
-- strict liability, or tort (including negligence or otherwise) arising in
-- any way out of the use of this software, even if advised of the
-- possibility of such damage.
{-# OPTIONS -fno-warn-orphans #-}
module Pos (tests) where
import Data.Char (isAlphaNum)
import Data.List (intercalate, isInfixOf, elemIndices)
import Test.Framework
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.QuickCheck
import Text.Megaparsec.Pos
tests :: Test
tests = testGroup "Textual source positions"
[ testProperty "components" prop_components
, testProperty "show file name in source positions" prop_showFileName
, testProperty "show line in source positions" prop_showLine
, testProperty "show column in source positions" prop_showColumn
, testProperty "initial position" prop_initialPos
, testProperty "increment source line" prop_incSourceLine
, testProperty "increment source column" prop_incSourceColumn
, testProperty "set source name" prop_setSourceName
, testProperty "set source line" prop_setSourceLine
, testProperty "set source column" prop_setSourceColumn
, testProperty "position updating" prop_updating ]
instance Arbitrary SourcePos where
arbitrary = newPos <$> fileName <*> choose (1, 1000) <*> choose (0, 100)
fileName :: Gen SourceName
fileName = do
delimiter <- pure <$> elements "/\\"
dirs <- listOf1 simpleName
extension <- simpleName
frequency [ (1, return [])
, (7, return $ intercalate delimiter dirs ++ "." ++ extension)]
where simpleName = listOf1 (arbitrary `suchThat` isAlphaNum)
prop_components :: SourcePos -> Bool
prop_components pos = pos == copy
where copy = newPos (sourceName pos) (sourceLine pos) (sourceColumn pos)
prop_showFileName :: SourcePos -> Bool
prop_showFileName pos =
if null name
then '"'`notElem` shown
else ("\"" ++ name ++ "\"") `isInfixOf` shown
where name = sourceName pos
shown = show pos
prop_showLine :: SourcePos -> Bool
prop_showLine pos = ("line " ++ line) `isInfixOf` show pos
where line = show $ sourceLine pos
prop_showColumn :: SourcePos -> Bool
prop_showColumn pos = ("column " ++ column) `isInfixOf` show pos
where column = show $ sourceColumn pos
prop_initialPos :: SourceName -> Bool
prop_initialPos n =
sourceName ipos == n &&
sourceLine ipos == 1 &&
sourceColumn ipos == 1
where ipos = initialPos n
prop_incSourceLine :: SourcePos -> NonNegative Int -> Bool
prop_incSourceLine pos l =
d sourceName id pos incp &&
d sourceLine (+ l') pos incp &&
d sourceColumn id pos incp
where l' = getNonNegative l
incp = incSourceLine pos l'
prop_incSourceColumn :: SourcePos -> NonNegative Int -> Bool
prop_incSourceColumn pos c =
d sourceName id pos incp &&
d sourceLine id pos incp &&
d sourceColumn (+ c') pos incp
where c' = getNonNegative c
incp = incSourceColumn pos c'
prop_setSourceName :: SourcePos -> SourceName -> Bool
prop_setSourceName pos n =
d sourceName (const n) pos setp &&
d sourceLine id pos setp &&
d sourceColumn id pos setp
where setp = setSourceName pos n
prop_setSourceLine :: SourcePos -> Positive Int -> Bool
prop_setSourceLine pos l =
d sourceName id pos setp &&
d sourceLine (const l') pos setp &&
d sourceColumn id pos setp
where l' = getPositive l
setp = setSourceLine pos l'
prop_setSourceColumn :: SourcePos -> NonNegative Int -> Bool
prop_setSourceColumn pos c =
d sourceName id pos setp &&
d sourceLine id pos setp &&
d sourceColumn (const c') pos setp
where c' = getNonNegative c
setp = setSourceColumn pos c'
prop_updating :: SourcePos -> String -> Bool
prop_updating pos "" = updatePosString pos "" == pos
prop_updating pos s =
d sourceName id pos updated &&
d sourceLine (+ inclines) pos updated &&
cols >= mincols && ((last s /= '\t') || ((cols - 1) `rem` 8 == 0))
where updated = updatePosString pos s
cols = sourceColumn updated
newlines = elemIndices '\n' s
creturns = elemIndices '\r' s
inclines = length newlines
total = length s
allctrls = newlines ++ creturns
mincols = if null allctrls
then total + sourceColumn pos
else total - maximum allctrls
d :: Eq b => (a -> b) -> (b -> b) -> a -> a -> Bool
d f g x y = g (f x) == f y
| tulcod/megaparsec | tests/Pos.hs | bsd-2-clause | 5,735 | 0 | 13 | 1,296 | 1,323 | 691 | 632 | 105 | 2 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.SetupWrapper
-- Copyright : (c) The University of Glasgow 2006,
-- Duncan Coutts 2008
--
-- Maintainer : cabal-devel@haskell.org
-- Stability : alpha
-- Portability : portable
--
-- An interface to building and installing Cabal packages.
-- If the @Built-Type@ field is specified as something other than
-- 'Custom', and the current version of Cabal is acceptable, this performs
-- setup actions directly. Otherwise it builds the setup script and
-- runs it with the given arguments.
module Distribution.Client.SetupWrapper (
setupWrapper,
SetupScriptOptions(..),
defaultSetupScriptOptions,
) where
import qualified Distribution.Make as Make
import qualified Distribution.Simple as Simple
import Distribution.Version
( Version(..), VersionRange, anyVersion
, intersectVersionRanges, orLaterVersion
, withinRange )
import Distribution.InstalledPackageInfo (installedComponentId)
import Distribution.Package
( ComponentId(..), PackageIdentifier(..), PackageId,
PackageName(..), Package(..), packageName
, packageVersion, Dependency(..) )
import Distribution.PackageDescription
( GenericPackageDescription(packageDescription)
, PackageDescription(..), specVersion
, BuildType(..), knownBuildTypes, defaultRenaming )
import Distribution.PackageDescription.Parse
( readPackageDescription )
import Distribution.Simple.Configure
( configCompilerEx )
import Distribution.Compiler
( buildCompilerId, CompilerFlavor(GHC, GHCJS) )
import Distribution.Simple.Compiler
( Compiler(compilerId), compilerFlavor, PackageDB(..), PackageDBStack )
import Distribution.Simple.PreProcess
( runSimplePreProcessor, ppUnlit )
import Distribution.Simple.Build.Macros
( generatePackageVersionMacros )
import Distribution.Simple.Program
( ProgramConfiguration, emptyProgramConfiguration
, getProgramSearchPath, getDbProgramOutput, runDbProgram, ghcProgram
, ghcjsProgram )
import Distribution.Simple.Program.Find
( programSearchPathAsPATHVar )
import Distribution.Simple.Program.Run
( getEffectiveEnvironment )
import qualified Distribution.Simple.Program.Strip as Strip
import Distribution.Simple.BuildPaths
( defaultDistPref, exeExtension )
import Distribution.Simple.Command
( CommandUI(..), commandShowOptions )
import Distribution.Simple.Program.GHC
( GhcMode(..), GhcOptions(..), renderGhcOptions )
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.Simple.PackageIndex (InstalledPackageIndex)
import Distribution.Client.Config
( defaultCabalDir )
import Distribution.Client.IndexUtils
( getInstalledPackages )
import Distribution.Client.JobControl
( Lock, criticalSection )
import Distribution.Simple.Setup
( Flag(..) )
import Distribution.Simple.Utils
( die, debug, info, cabalVersion, tryFindPackageDesc, comparing
, createDirectoryIfMissingVerbose, installExecutableFile
, copyFileVerbose, rewriteFile, intercalate )
import Distribution.Client.Utils
( inDir, tryCanonicalizePath
, existsAndIsMoreRecentThan, moreRecentFile
#if mingw32_HOST_OS
, canonicalizePathNoThrow
#endif
)
import Distribution.System ( Platform(..), buildPlatform )
import Distribution.Text
( display )
import Distribution.Utils.NubList
( toNubListR )
import Distribution.Verbosity
( Verbosity )
import Distribution.Compat.Exception
( catchIO )
import System.Directory ( doesFileExist )
import System.FilePath ( (</>), (<.>) )
import System.IO ( Handle, hPutStr )
import System.Exit ( ExitCode(..), exitWith )
import System.Process ( runProcess, waitForProcess )
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative ( (<$>), (<*>) )
import Data.Monoid ( mempty )
#endif
import Control.Monad ( when, unless )
import Data.List ( foldl1' )
import Data.Maybe ( fromMaybe, isJust )
import Data.Char ( isSpace )
import Distribution.Client.Compat.ExecutablePath ( getExecutablePath )
#ifdef mingw32_HOST_OS
import Distribution.Simple.Utils
( withTempDirectory )
import Control.Exception ( bracket )
import System.FilePath ( equalFilePath, takeDirectory )
import System.Directory ( doesDirectoryExist )
import qualified System.Win32 as Win32
#endif
data SetupScriptOptions = SetupScriptOptions {
useCabalVersion :: VersionRange,
useCompiler :: Maybe Compiler,
usePlatform :: Maybe Platform,
usePackageDB :: PackageDBStack,
usePackageIndex :: Maybe InstalledPackageIndex,
useProgramConfig :: ProgramConfiguration,
useDistPref :: FilePath,
useLoggingHandle :: Maybe Handle,
useWorkingDir :: Maybe FilePath,
forceExternalSetupMethod :: Bool,
-- | List of dependencies to use when building Setup.hs
useDependencies :: [(ComponentId, PackageId)],
-- | Is the list of setup dependencies exclusive?
--
-- This is here for legacy reasons. Before the introduction of the explicit
-- setup stanza in .cabal files we compiled Setup.hs scripts with all
-- packages in the environment visible, but we will needed to restrict
-- _some_ packages; in particular, we need to restrict the version of Cabal
-- that the setup script gets linked against (this was the only "dependency
-- constraint" that we had previously for Setup scripts).
useDependenciesExclusive :: Bool,
-- Used only by 'cabal clean' on Windows.
--
-- Note: win32 clean hack
-------------------------
-- On Windows, running './dist/setup/setup clean' doesn't work because the
-- setup script will try to delete itself (which causes it to fail horribly,
-- unlike on Linux). So we have to move the setup exe out of the way first
-- and then delete it manually. This applies only to the external setup
-- method.
useWin32CleanHack :: Bool,
-- Used only when calling setupWrapper from parallel code to serialise
-- access to the setup cache; should be Nothing otherwise.
--
-- Note: setup exe cache
------------------------
-- When we are installing in parallel, we always use the external setup
-- method. Since compiling the setup script each time adds noticeable
-- overhead, we use a shared setup script cache
-- ('~/.cabal/setup-exe-cache'). For each (compiler, platform, Cabal
-- version) combination the cache holds a compiled setup script
-- executable. This only affects the Simple build type; for the Custom,
-- Configure and Make build types we always compile the setup script anew.
setupCacheLock :: Maybe Lock
}
defaultSetupScriptOptions :: SetupScriptOptions
defaultSetupScriptOptions = SetupScriptOptions {
useCabalVersion = anyVersion,
useCompiler = Nothing,
usePlatform = Nothing,
usePackageDB = [GlobalPackageDB, UserPackageDB],
usePackageIndex = Nothing,
useDependencies = [],
useDependenciesExclusive = False,
useProgramConfig = emptyProgramConfiguration,
useDistPref = defaultDistPref,
useLoggingHandle = Nothing,
useWorkingDir = Nothing,
useWin32CleanHack = False,
forceExternalSetupMethod = False,
setupCacheLock = Nothing
}
setupWrapper :: Verbosity
-> SetupScriptOptions
-> Maybe PackageDescription
-> CommandUI flags
-> (Version -> flags)
-> [String]
-> IO ()
setupWrapper verbosity options mpkg cmd flags extraArgs = do
pkg <- maybe getPkg return mpkg
let setupMethod = determineSetupMethod options' buildType'
options' = options {
useCabalVersion = intersectVersionRanges
(useCabalVersion options)
(orLaterVersion (specVersion pkg))
}
buildType' = fromMaybe Custom (buildType pkg)
mkArgs cabalLibVersion = commandName cmd
: commandShowOptions cmd (flags cabalLibVersion)
++ extraArgs
checkBuildType buildType'
setupMethod verbosity options' (packageId pkg) buildType' mkArgs
where
getPkg = tryFindPackageDesc (fromMaybe "." (useWorkingDir options))
>>= readPackageDescription verbosity
>>= return . packageDescription
checkBuildType (UnknownBuildType name) =
die $ "The build-type '" ++ name ++ "' is not known. Use one of: "
++ intercalate ", " (map display knownBuildTypes) ++ "."
checkBuildType _ = return ()
-- | Decide if we're going to be able to do a direct internal call to the
-- entry point in the Cabal library or if we're going to have to compile
-- and execute an external Setup.hs script.
--
determineSetupMethod :: SetupScriptOptions -> BuildType -> SetupMethod
determineSetupMethod options buildType'
-- This order is picked so that it's stable. The build type and
-- required cabal version are external info, coming from .cabal
-- files and the command line. Those do switch between the
-- external and self & internal methods, but that info itself can
-- be considered stable. The logging and force-external conditions
-- are internally generated choices but now these only switch
-- between the self and internal setup methods, which are
-- consistent with each other.
| buildType' == Custom = externalSetupMethod
| not (cabalVersion `withinRange`
useCabalVersion options) = externalSetupMethod
| isJust (useLoggingHandle options)
-- Forcing is done to use an external process e.g. due to parallel
-- build concerns.
|| forceExternalSetupMethod options = selfExecSetupMethod
| otherwise = internalSetupMethod
type SetupMethod = Verbosity
-> SetupScriptOptions
-> PackageIdentifier
-> BuildType
-> (Version -> [String]) -> IO ()
-- ------------------------------------------------------------
-- * Internal SetupMethod
-- ------------------------------------------------------------
internalSetupMethod :: SetupMethod
internalSetupMethod verbosity options _ bt mkargs = do
let args = mkargs cabalVersion
debug verbosity $ "Using internal setup method with build-type " ++ show bt
++ " and args:\n " ++ show args
inDir (useWorkingDir options) $
buildTypeAction bt args
buildTypeAction :: BuildType -> ([String] -> IO ())
buildTypeAction Simple = Simple.defaultMainArgs
buildTypeAction Configure = Simple.defaultMainWithHooksArgs
Simple.autoconfUserHooks
buildTypeAction Make = Make.defaultMainArgs
buildTypeAction Custom = error "buildTypeAction Custom"
buildTypeAction (UnknownBuildType _) = error "buildTypeAction UnknownBuildType"
-- ------------------------------------------------------------
-- * Self-Exec SetupMethod
-- ------------------------------------------------------------
selfExecSetupMethod :: SetupMethod
selfExecSetupMethod verbosity options _pkg bt mkargs = do
let args = ["act-as-setup",
"--build-type=" ++ display bt,
"--"] ++ mkargs cabalVersion
debug verbosity $ "Using self-exec internal setup method with build-type "
++ show bt ++ " and args:\n " ++ show args
path <- getExecutablePath
info verbosity $ unwords (path : args)
case useLoggingHandle options of
Nothing -> return ()
Just logHandle -> info verbosity $ "Redirecting build log to "
++ show logHandle
searchpath <- programSearchPathAsPATHVar
(getProgramSearchPath (useProgramConfig options))
env <- getEffectiveEnvironment [("PATH", Just searchpath)]
process <- runProcess path args
(useWorkingDir options) env Nothing
(useLoggingHandle options) (useLoggingHandle options)
exitCode <- waitForProcess process
unless (exitCode == ExitSuccess) $ exitWith exitCode
-- ------------------------------------------------------------
-- * External SetupMethod
-- ------------------------------------------------------------
externalSetupMethod :: SetupMethod
externalSetupMethod verbosity options pkg bt mkargs = do
debug verbosity $ "Using external setup method with build-type " ++ show bt
debug verbosity $ "Using explicit dependencies: "
++ show (useDependenciesExclusive options)
createDirectoryIfMissingVerbose verbosity True setupDir
(cabalLibVersion, mCabalLibInstalledPkgId, options') <- cabalLibVersionToUse
debug verbosity $ "Using Cabal library version " ++ display cabalLibVersion
path <- if useCachedSetupExecutable
then getCachedSetupExecutable options'
cabalLibVersion mCabalLibInstalledPkgId
else compileSetupExecutable options'
cabalLibVersion mCabalLibInstalledPkgId False
invokeSetupScript options' path (mkargs cabalLibVersion)
where
workingDir = case fromMaybe "" (useWorkingDir options) of
[] -> "."
dir -> dir
setupDir = workingDir </> useDistPref options </> "setup"
setupVersionFile = setupDir </> "setup" <.> "version"
setupHs = setupDir </> "setup" <.> "hs"
setupProgFile = setupDir </> "setup" <.> exeExtension
platform = fromMaybe buildPlatform (usePlatform options)
useCachedSetupExecutable = (bt == Simple || bt == Configure || bt == Make)
maybeGetInstalledPackages :: SetupScriptOptions -> Compiler
-> ProgramConfiguration -> IO InstalledPackageIndex
maybeGetInstalledPackages options' comp conf =
case usePackageIndex options' of
Just index -> return index
Nothing -> getInstalledPackages verbosity
comp (usePackageDB options') conf
cabalLibVersionToUse :: IO (Version, (Maybe ComponentId)
,SetupScriptOptions)
cabalLibVersionToUse = do
savedVer <- savedVersion
case savedVer of
Just version | version `withinRange` useCabalVersion options
-> do updateSetupScript version bt
-- Does the previously compiled setup executable still exist and
-- is it up-to date?
useExisting <- canUseExistingSetup version
if useExisting
then return (version, Nothing, options)
else installedVersion
_ -> installedVersion
where
-- This check duplicates the checks in 'getCachedSetupExecutable' /
-- 'compileSetupExecutable'. Unfortunately, we have to perform it twice
-- because the selected Cabal version may change as a result of this
-- check.
canUseExistingSetup :: Version -> IO Bool
canUseExistingSetup version =
if useCachedSetupExecutable
then do
(_, cachedSetupProgFile) <- cachedSetupDirAndProg options version
doesFileExist cachedSetupProgFile
else
(&&) <$> setupProgFile `existsAndIsMoreRecentThan` setupHs
<*> setupProgFile `existsAndIsMoreRecentThan` setupVersionFile
installedVersion :: IO (Version, Maybe ComponentId
,SetupScriptOptions)
installedVersion = do
(comp, conf, options') <- configureCompiler options
(version, mipkgid, options'') <- installedCabalVersion options' comp conf
updateSetupScript version bt
writeFile setupVersionFile (show version ++ "\n")
return (version, mipkgid, options'')
savedVersion :: IO (Maybe Version)
savedVersion = do
versionString <- readFile setupVersionFile `catchIO` \_ -> return ""
case reads versionString of
[(version,s)] | all isSpace s -> return (Just version)
_ -> return Nothing
-- | Update a Setup.hs script, creating it if necessary.
updateSetupScript :: Version -> BuildType -> IO ()
updateSetupScript _ Custom = do
useHs <- doesFileExist customSetupHs
useLhs <- doesFileExist customSetupLhs
unless (useHs || useLhs) $ die
"Using 'build-type: Custom' but there is no Setup.hs or Setup.lhs script."
let src = (if useHs then customSetupHs else customSetupLhs)
srcNewer <- src `moreRecentFile` setupHs
when srcNewer $ if useHs
then copyFileVerbose verbosity src setupHs
else runSimplePreProcessor ppUnlit src setupHs verbosity
where
customSetupHs = workingDir </> "Setup.hs"
customSetupLhs = workingDir </> "Setup.lhs"
updateSetupScript cabalLibVersion _ =
rewriteFile setupHs (buildTypeScript cabalLibVersion)
buildTypeScript :: Version -> String
buildTypeScript cabalLibVersion = case bt of
Simple -> "import Distribution.Simple; main = defaultMain\n"
Configure -> "import Distribution.Simple; main = defaultMainWithHooks "
++ if cabalLibVersion >= Version [1,3,10] []
then "autoconfUserHooks\n"
else "defaultUserHooks\n"
Make -> "import Distribution.Make; main = defaultMain\n"
Custom -> error "buildTypeScript Custom"
UnknownBuildType _ -> error "buildTypeScript UnknownBuildType"
installedCabalVersion :: SetupScriptOptions -> Compiler -> ProgramConfiguration
-> IO (Version, Maybe ComponentId
,SetupScriptOptions)
installedCabalVersion options' _ _ | packageName pkg == PackageName "Cabal" =
return (packageVersion pkg, Nothing, options')
installedCabalVersion options' compiler conf = do
index <- maybeGetInstalledPackages options' compiler conf
let cabalDep = Dependency (PackageName "Cabal") (useCabalVersion options')
options'' = options' { usePackageIndex = Just index }
case PackageIndex.lookupDependency index cabalDep of
[] -> die $ "The package '" ++ display (packageName pkg)
++ "' requires Cabal library version "
++ display (useCabalVersion options)
++ " but no suitable version is installed."
pkgs -> let ipkginfo = head . snd . bestVersion fst $ pkgs
in return (packageVersion ipkginfo
,Just . installedComponentId $ ipkginfo, options'')
bestVersion :: (a -> Version) -> [a] -> a
bestVersion f = firstMaximumBy (comparing (preference . f))
where
-- Like maximumBy, but picks the first maximum element instead of the
-- last. In general, we expect the preferred version to go first in the
-- list. For the default case, this has the effect of choosing the version
-- installed in the user package DB instead of the global one. See #1463.
--
-- Note: firstMaximumBy could be written as just
-- `maximumBy cmp . reverse`, but the problem is that the behaviour of
-- maximumBy is not fully specified in the case when there is not a single
-- greatest element.
firstMaximumBy :: (a -> a -> Ordering) -> [a] -> a
firstMaximumBy _ [] =
error "Distribution.Client.firstMaximumBy: empty list"
firstMaximumBy cmp xs = foldl1' maxBy xs
where
maxBy x y = case cmp x y of { GT -> x; EQ -> x; LT -> y; }
preference version = (sameVersion, sameMajorVersion
,stableVersion, latestVersion)
where
sameVersion = version == cabalVersion
sameMajorVersion = majorVersion version == majorVersion cabalVersion
majorVersion = take 2 . versionBranch
stableVersion = case versionBranch version of
(_:x:_) -> even x
_ -> False
latestVersion = version
configureCompiler :: SetupScriptOptions
-> IO (Compiler, ProgramConfiguration, SetupScriptOptions)
configureCompiler options' = do
(comp, conf) <- case useCompiler options' of
Just comp -> return (comp, useProgramConfig options')
Nothing -> do (comp, _, conf) <-
configCompilerEx (Just GHC) Nothing Nothing
(useProgramConfig options') verbosity
return (comp, conf)
-- Whenever we need to call configureCompiler, we also need to access the
-- package index, so let's cache it in SetupScriptOptions.
index <- maybeGetInstalledPackages options' comp conf
return (comp, conf, options' { useCompiler = Just comp,
usePackageIndex = Just index,
useProgramConfig = conf })
-- | Path to the setup exe cache directory and path to the cached setup
-- executable.
cachedSetupDirAndProg :: SetupScriptOptions -> Version
-> IO (FilePath, FilePath)
cachedSetupDirAndProg options' cabalLibVersion = do
cabalDir <- defaultCabalDir
let setupCacheDir = cabalDir </> "setup-exe-cache"
cachedSetupProgFile = setupCacheDir
</> ("setup-" ++ buildTypeString ++ "-"
++ cabalVersionString ++ "-"
++ platformString ++ "-"
++ compilerVersionString)
<.> exeExtension
return (setupCacheDir, cachedSetupProgFile)
where
buildTypeString = show bt
cabalVersionString = "Cabal-" ++ (display cabalLibVersion)
compilerVersionString = display $
fromMaybe buildCompilerId
(fmap compilerId . useCompiler $ options')
platformString = display platform
-- | Look up the setup executable in the cache; update the cache if the setup
-- executable is not found.
getCachedSetupExecutable :: SetupScriptOptions
-> Version -> Maybe ComponentId
-> IO FilePath
getCachedSetupExecutable options' cabalLibVersion
maybeCabalLibInstalledPkgId = do
(setupCacheDir, cachedSetupProgFile) <-
cachedSetupDirAndProg options' cabalLibVersion
cachedSetupExists <- doesFileExist cachedSetupProgFile
if cachedSetupExists
then debug verbosity $
"Found cached setup executable: " ++ cachedSetupProgFile
else criticalSection' $ do
-- The cache may have been populated while we were waiting.
cachedSetupExists' <- doesFileExist cachedSetupProgFile
if cachedSetupExists'
then debug verbosity $
"Found cached setup executable: " ++ cachedSetupProgFile
else do
debug verbosity $ "Setup executable not found in the cache."
src <- compileSetupExecutable options'
cabalLibVersion maybeCabalLibInstalledPkgId True
createDirectoryIfMissingVerbose verbosity True setupCacheDir
installExecutableFile verbosity src cachedSetupProgFile
-- Do not strip if we're using GHCJS, since the result may be a script
when (maybe True ((/=GHCJS).compilerFlavor) $ useCompiler options') $
Strip.stripExe verbosity platform (useProgramConfig options')
cachedSetupProgFile
return cachedSetupProgFile
where
criticalSection' = fromMaybe id
(fmap criticalSection $ setupCacheLock options')
-- | If the Setup.hs is out of date wrt the executable then recompile it.
-- Currently this is GHC/GHCJS only. It should really be generalised.
--
compileSetupExecutable :: SetupScriptOptions
-> Version -> Maybe ComponentId -> Bool
-> IO FilePath
compileSetupExecutable options' cabalLibVersion maybeCabalLibInstalledPkgId
forceCompile = do
setupHsNewer <- setupHs `moreRecentFile` setupProgFile
cabalVersionNewer <- setupVersionFile `moreRecentFile` setupProgFile
let outOfDate = setupHsNewer || cabalVersionNewer
when (outOfDate || forceCompile) $ do
debug verbosity "Setup executable needs to be updated, compiling..."
(compiler, conf, options'') <- configureCompiler options'
let cabalPkgid = PackageIdentifier (PackageName "Cabal") cabalLibVersion
(program, extraOpts)
= case compilerFlavor compiler of
GHCJS -> (ghcjsProgram, ["-build-runner"])
_ -> (ghcProgram, ["-threaded"])
cabalDep = maybe [] (\ipkgid -> [(ipkgid, cabalPkgid)])
maybeCabalLibInstalledPkgId
-- We do a few things differently once packages opt-in and declare
-- a custom-settup stanza. In particular we then enforce the deps
-- specified, but also let the Setup.hs use the version macros.
newPedanticDeps = useDependenciesExclusive options'
selectedDeps
| newPedanticDeps = useDependencies options'
| otherwise = useDependencies options' ++ cabalDep
addRenaming (ipid, pid) = (ipid, pid, defaultRenaming)
cppMacrosFile = setupDir </> "setup_macros.h"
ghcOptions = mempty {
ghcOptVerbosity = Flag verbosity
, ghcOptMode = Flag GhcModeMake
, ghcOptInputFiles = toNubListR [setupHs]
, ghcOptOutputFile = Flag setupProgFile
, ghcOptObjDir = Flag setupDir
, ghcOptHiDir = Flag setupDir
, ghcOptSourcePathClear = Flag True
, ghcOptSourcePath = toNubListR [workingDir]
, ghcOptPackageDBs = usePackageDB options''
, ghcOptHideAllPackages = Flag newPedanticDeps
, ghcOptCabal = Flag newPedanticDeps
, ghcOptPackages = toNubListR $ map addRenaming selectedDeps
, ghcOptCppIncludes = toNubListR [ cppMacrosFile
| newPedanticDeps ]
, ghcOptExtra = toNubListR extraOpts
}
let ghcCmdLine = renderGhcOptions compiler ghcOptions
when newPedanticDeps $
rewriteFile cppMacrosFile (generatePackageVersionMacros
[ pid | (_ipid, pid) <- selectedDeps ])
case useLoggingHandle options of
Nothing -> runDbProgram verbosity program conf ghcCmdLine
-- If build logging is enabled, redirect compiler output to the log file.
(Just logHandle) -> do output <- getDbProgramOutput verbosity program
conf ghcCmdLine
hPutStr logHandle output
return setupProgFile
invokeSetupScript :: SetupScriptOptions -> FilePath -> [String] -> IO ()
invokeSetupScript options' path args = do
info verbosity $ unwords (path : args)
case useLoggingHandle options' of
Nothing -> return ()
Just logHandle -> info verbosity $ "Redirecting build log to "
++ show logHandle
-- Since useWorkingDir can change the relative path, the path argument must
-- be turned into an absolute path. On some systems, runProcess will take
-- path as relative to the new working directory instead of the current
-- working directory.
path' <- tryCanonicalizePath path
-- See 'Note: win32 clean hack' above.
#if mingw32_HOST_OS
-- setupProgFile may not exist if we're using a cached program
setupProgFile' <- canonicalizePathNoThrow setupProgFile
let win32CleanHackNeeded = (useWin32CleanHack options')
-- Skip when a cached setup script is used.
&& setupProgFile' `equalFilePath` path'
if win32CleanHackNeeded then doWin32CleanHack path' else doInvoke path'
#else
doInvoke path'
#endif
where
doInvoke path' = do
searchpath <- programSearchPathAsPATHVar
(getProgramSearchPath (useProgramConfig options'))
env <- getEffectiveEnvironment [("PATH", Just searchpath)]
process <- runProcess path' args
(useWorkingDir options') env Nothing
(useLoggingHandle options') (useLoggingHandle options')
exitCode <- waitForProcess process
unless (exitCode == ExitSuccess) $ exitWith exitCode
#if mingw32_HOST_OS
doWin32CleanHack path' = do
info verbosity $ "Using the Win32 clean hack."
-- Recursively removes the temp dir on exit.
withTempDirectory verbosity workingDir "cabal-tmp" $ \tmpDir ->
bracket (moveOutOfTheWay tmpDir path')
(maybeRestore path')
doInvoke
moveOutOfTheWay tmpDir path' = do
let newPath = tmpDir </> "setup" <.> exeExtension
Win32.moveFile path' newPath
return newPath
maybeRestore oldPath path' = do
let oldPathDir = takeDirectory oldPath
oldPathDirExists <- doesDirectoryExist oldPathDir
-- 'setup clean' didn't complete, 'dist/setup' still exists.
when oldPathDirExists $
Win32.moveFile path' oldPath
#endif
| randen/cabal | cabal-install/Distribution/Client/SetupWrapper.hs | bsd-3-clause | 29,915 | 0 | 23 | 8,368 | 5,128 | 2,742 | 2,386 | 440 | 28 |
module Util.Regex where
regex_stuff = "I'm a regex module!" | beni55/openshake | tests/evan-hang/Util/Regex.hs | bsd-3-clause | 60 | 0 | 4 | 9 | 11 | 7 | 4 | 2 | 1 |
{-# LANGUAGE StandaloneDeriving ,DeriveAnyClass #-}
module Test.E.Binary where
import Test.E
import Data.Binary
deriving instance Binary E2
deriving instance Binary E4
deriving instance Binary E8
deriving instance Binary E16
deriving instance Binary E32
deriving instance Binary E256
deriving instance Binary E258
-- fs =
-- [ Binary E2_1
-- , Binary E32_1
-- , Binary E256_255
-- , Binary E256_254
-- , Binary E256_253
-- , Binary E256_256
-- ]
| tittoassini/flat | test/Test/E/Binary.hs | bsd-3-clause | 499 | 0 | 5 | 122 | 82 | 48 | 34 | 11 | 0 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Servant.Server.Auth.Token.LevelDB.Schema where
import Control.Concurrent.RLock
import Control.Lens
import Control.Monad
import Control.Monad.Catch
import Control.Monad.IO.Class
import Data.Aeson.WithField
import Data.ByteString (ByteString)
import Data.Int
import Data.List (sort, sortBy)
import Data.Map.Strict (Map)
import Data.Maybe
import Data.Ord
import Data.SafeCopy.Store
import Data.SafeCopy.Store.Internal
import Data.Set (Set)
import Data.Store
import Data.Text (Text)
import Data.Time
import Data.Typeable hiding (Proxy)
import Database.LevelDB
import Safe
import Servant.API.Auth.Token
import Servant.API.Auth.Token.Pagination
import Servant.Server.Auth.Token.Common
import Servant.Server.Auth.Token.Model(
UserImplId
, UserImpl(..)
, UserPermId
, UserPerm(..)
, AuthTokenId
, AuthToken(..)
, UserRestoreId
, UserRestore(..)
, UserSingleUseCodeId
, UserSingleUseCode(..)
, AuthUserGroupId
, AuthUserGroup(..)
, AuthUserGroupUsersId
, AuthUserGroupUsers(..)
, AuthUserGroupPermsId
, AuthUserGroupPerms(..)
)
import qualified Data.Foldable as F
import qualified Data.Map.Strict as M
import qualified Data.Set as S
-- | ID of global model index
newtype ModelId = ModelId { unModelId :: Int64 }
deriving (Show, Read, Ord, Eq)
-- | Global id of model index
modelId :: ModelId
modelId = ModelId 0
-- | Holds all data for auth server in acid-state container
data Model = Model {
-- | Holds users by id
_modelUsers :: !(Set UserImplId)
-- | Holds users by login (same content as 'modelUsers')
, _modelUsersByLogin :: !(Map Login UserImplId)
-- | Holds 'UserPerm'
, _modelUserPerms :: !(Set UserPermId)
-- | Holds 'AuthToken'
, _modelAuthTokens :: !(Set AuthTokenId)
-- | Holds 'UserRestore'
, _modelUserRestores :: !(Set UserRestoreId)
-- | Holds 'UserSingleUseCode'
, _modelUserSingleUseCodes :: !(Set UserSingleUseCodeId)
-- | Holds 'AuthUserGroup'
, _modelAuthUserGroups :: !(Set AuthUserGroupId)
-- | Holds 'AuthUserGroupUsers'
, _modelAuthUserGroupUsers :: !(Set AuthUserGroupUsersId)
-- | Holds 'AuthUserGroupPerms'
, _modelAuthUserGroupPerms :: !(Set AuthUserGroupPermsId)
-- | Holds next id for entities
, _modelNextUserImplId :: !Int64
-- | Holds next id for entities
, _modelNextUserPermId :: !Int64
-- | Holds next id for entities
, _modelNextAuthTokenId :: !Int64
-- | Holds next id for entities
, _modelNextUserRestoreId :: !Int64
-- | Holds next id for entities
, _modelNextUserSingleUseCodeId :: !Int64
-- | Holds next id for entities
, _modelNextAuthUserGroupId :: !Int64
-- | Holds next id for entities
, _modelNextAuthUserGroupUserId :: !Int64
-- | Holds next id for entities
, _modelNextAuthUserGroupPermId :: !Int64
}
makeLenses ''Model
-- | Defines empty model for new database
newModel :: Model
newModel = Model {
_modelUsers = mempty
, _modelUsersByLogin = mempty
, _modelUserPerms = mempty
, _modelAuthTokens = mempty
, _modelUserRestores = mempty
, _modelUserSingleUseCodes = mempty
, _modelAuthUserGroups = mempty
, _modelAuthUserGroupUsers = mempty
, _modelAuthUserGroupPerms = mempty
, _modelNextUserImplId = 0
, _modelNextUserPermId = 0
, _modelNextAuthTokenId = 0
, _modelNextUserRestoreId = 0
, _modelNextUserSingleUseCodeId = 0
, _modelNextAuthUserGroupId = 0
, _modelNextAuthUserGroupUserId = 0
, _modelNextAuthUserGroupPermId = 0
}
-- | Helper that defines bijection between key and record
class Key i a | i -> a, a -> i where
encodeKey :: i -> ByteString
default encodeKey :: (SafeCopy i, Typeable i) => i -> ByteString
encodeKey i = runEncode $ do
_ <- pokeE tname
safePut i
where
tname = show $ typeRep (Proxy :: Proxy i)
instance Key AuthTokenId AuthToken
instance Key AuthUserGroupId AuthUserGroup
instance Key AuthUserGroupPermsId AuthUserGroupPerms
instance Key AuthUserGroupUsersId AuthUserGroupUsers
instance Key ModelId Model
instance Key UserImplId UserImpl
instance Key UserPermId UserPerm
instance Key UserRestoreId UserRestore
instance Key UserSingleUseCodeId UserSingleUseCode
-- | Holds together db reference and options for read/write and mutex
data LevelDBEnv = LevelDBEnv !DB !ReadOptions !WriteOptions !RLock
-- | Make new environment for execution of LevelDB operations
newLevelDBEnv :: MonadIO m => DB -> ReadOptions -> WriteOptions -> m LevelDBEnv
newLevelDBEnv db rops wopts = do
rlock <- liftIO new
return $ LevelDBEnv db rops wopts rlock
-- | Load object by id from leveldb
load :: (MonadResource m, Key i a, SafeCopy a) => LevelDBEnv -> i -> m (Maybe a)
load (LevelDBEnv db ropts _ _) i = do
mbs <- get db ropts (encodeKey i)
return $ decodeExWith safeGet <$> mbs
-- | Store object by id in leveldb
store :: (MonadResource m, Key i a, SafeCopy a) => LevelDBEnv -> i -> a -> m ()
store (LevelDBEnv db _ wopts _) i a = put db wopts (encodeKey i) (runEncode $ safePut a)
-- | Remove object by given id in leveldb
remove :: (MonadResource m, Key i a) => LevelDBEnv -> i -> m ()
remove (LevelDBEnv db _ wopts _) i = delete db wopts (encodeKey i)
-- | Modify value by id in leveldb
modify :: (MonadResource m, MonadMask m, Key i a, SafeCopy a) => LevelDBEnv -> i -> (a -> a) -> m ()
modify db@(LevelDBEnv _ _ _ mut) i f = bracket_ (liftIO $ acquire mut) (liftIO $ release mut) $ do
ma <- load db i
case ma of
Nothing -> return ()
Just a -> store db i (f a)
-- | Modify value by id in leveldb
modifyM :: (MonadResource m, MonadMask m, Key i a, SafeCopy a) => LevelDBEnv -> i -> (a -> m a) -> m ()
modifyM db@(LevelDBEnv _ _ _ mut) i f = bracket_ (liftIO $ acquire mut) (liftIO $ release mut) $ do
ma <- load db i
case ma of
Nothing -> return ()
Just a -> store db i =<< f a
-- | Load global index from leveldb
loadModel :: MonadResource m => LevelDBEnv -> m Model
loadModel db = do
mm <- load db modelId
return $ fromMaybe newModel mm
-- | Store glogal index to leveldb
storeModel :: MonadResource m => LevelDBEnv -> Model -> m ()
storeModel db = store db modelId
-- | Modify global index
modifyModel :: MonadResource m => LevelDBEnv -> (Model -> Model) -> m ()
modifyModel db f = do
m <- loadModel db
storeModel db $ f m
-- | Modify global index
modifyModelM :: (MonadResource m, MonadMask m) => LevelDBEnv -> (Model -> m (Model, a)) -> m a
modifyModelM db@(LevelDBEnv _ _ _ mut) f = bracket_ (liftIO $ acquire mut) (liftIO $ release mut) $ do
m <- loadModel db
(m', a) <- f m
storeModel db m'
return a
-- | Helper to get paged list of entities
getPagedList :: (MonadResource m, Ord i, Key i a, SafeCopy a) => LevelDBEnv -> Page -> PageSize -> Set i -> m ([WithId i a], Word)
getPagedList db p s is = do
let is' = take (fromIntegral s) . drop (fromIntegral $ p * s) . sort . F.toList $ is
es <- traverse (\i -> fmap (i,) <$> load db i) is'
return (fmap (uncurry WithField) . catMaybes $ es, fromIntegral $ F.length is)
-- | Generic way to insert record in the leveldb with track in global registry
insertRecord :: (MonadResource m, MonadMask m, Key i a, ConvertableKey i, Ord i, SafeCopy a)
=> Lens' Model Int64 -- ^ Field of model that store counter of the record ids
-> Lens' Model (Set i) -- ^ Field of model that store a registry of the record ids
-> a -> LevelDBEnv -> m i
insertRecord counterL registryL v db = modifyModelM db $ \m -> do
let
i = toKey $ view counterL m
m' = m & over counterL (+1)
& over registryL (S.insert i)
store db i v
return (m', i)
-- | Generic way to select all records that satisfies given predicate
selectRecords :: (MonadResource m, Key i a, SafeCopy a)
=> Lens' Model (Set i) -- ^ Model field with registry of the records
-> (i -> a -> Bool) -- ^ Predicate
-> LevelDBEnv -> m [WithId i a]
selectRecords registryL f db = do
is <- view registryL <$> loadModel db
fmap catMaybes $ forM (F.toList is) $ \i -> do
ma <- load db i
return $ case ma of
Just a | f i a -> Just $ WithField i a
_ -> Nothing
-- | Generic way to delete several records with respect of global registry
deleteRecords :: (MonadResource m, MonadMask m, Key i a, Ord i, Foldable f)
=> Lens' Model (Set i) -- ^ Model field with registry of the records
-> f i -- ^ Set of ids of records that should be deleted
-> LevelDBEnv -> m ()
deleteRecords registryL is db = modifyModelM db $ \m -> do
F.traverse_ (remove db) is
return . (, ()) $ m & over registryL (`S.difference` (S.fromList . F.toList) is)
-- | Generic way to replace record in registry and leveldb
replaceRecord :: (MonadResource m, MonadMask m, Key i a, Ord i, SafeCopy a)
=> Lens' Model (Set i) -- ^ Model field with registry of the records
-> i -- ^ ID of record
-> a -- ^ Value of record
-> LevelDBEnv -> m ()
replaceRecord registryL i v db = modifyModelM db $ \m -> do
store db i v
return . (, ()) $ m & over registryL (S.insert i)
-- | Extract id
withId :: WithField s i a -> i
withId (WithField i _) = i
-- | Extract value
withVal :: WithField s i a -> a
withVal (WithField _ v) = v
--------------------------------------------------------------------------------
-- End of generic helpers
--------------------------------------------------------------------------------
-- | Getting user from storage by login
getUserImplByLogin :: MonadResource m => Login -> LevelDBEnv -> m (Maybe (WithId UserImplId UserImpl))
getUserImplByLogin login db = do
Model{..} <- loadModel db
case M.lookup login _modelUsersByLogin of
Nothing -> return Nothing
Just i -> fmap (WithField i) <$> load db i
-- | Get paged list of users and total count of users
listUsersPaged :: MonadResource m => Page -> PageSize -> LevelDBEnv -> m ([WithId UserImplId UserImpl], Word)
listUsersPaged p s db = getPagedList db p s =<< (_modelUsers <$> loadModel db)
-- | Get user permissions, ascending by tag
getUserImplPermissions :: MonadResource m => UserImplId -> LevelDBEnv -> m [WithId UserPermId UserPerm]
getUserImplPermissions i = selectRecords modelUserPerms $ \ _ perm -> userPermUser perm == i
-- | Delete user permissions
deleteUserPermissions :: (MonadResource m, MonadMask m) => UserImplId -> LevelDBEnv -> m ()
deleteUserPermissions i db = do
is <- fmap withId <$> getUserImplPermissions i db
deleteRecords modelUserPerms is db
-- | Insertion of new user permission
insertUserPerm :: (MonadResource m, MonadMask m) => UserPerm -> LevelDBEnv -> m UserPermId
insertUserPerm = insertRecord modelNextUserPermId modelUserPerms
-- | Insertion of new user
insertUserImpl :: (MonadResource m, MonadMask m) => UserImpl -> LevelDBEnv -> m UserImplId
insertUserImpl v db = modifyModelM db $ \m -> do
let
i = toKey $ view modelNextUserImplId m
m' = m & over modelNextUserImplId (+1)
& over modelUsers (S.insert i)
& over modelUsersByLogin (M.insert (userImplLogin v) i)
store db i v
return (m', i)
-- | Replace user with new value
replaceUserImpl :: (MonadResource m, MonadMask m) => UserImplId -> UserImpl -> LevelDBEnv -> m ()
replaceUserImpl i v db = modifyModelM db $ \m -> do
muser <- load db i
let cleanOld = case muser of
Nothing -> id
Just v' -> M.delete (userImplLogin v')
store db i v
return . (, ()) $ m & over modelUsersByLogin (M.insert (userImplLogin v) i . cleanOld)
-- | Delete user by id
deleteUserImpl :: (MonadResource m, MonadMask m) => UserImplId -> LevelDBEnv -> m ()
deleteUserImpl i db = do
muser <- load db i
case muser of
Nothing -> return ()
Just u -> modifyModelM db $ \m -> do
deleteUserPermissions i db
remove db i
return . (, ()) $ m
& over modelUsers (S.delete i)
& over modelUsersByLogin (M.delete (userImplLogin u))
-- | Check whether the user has particular permission
hasPerm :: MonadResource m => UserImplId -> Permission -> LevelDBEnv -> m Bool
hasPerm i p db = do
ps <- getUserImplPermissions i db
return $ (> 0) . F.length . filter (\(WithField _ p') -> userPermUser p' == i && userPermPermission p' == p) $ ps
-- | Get any user with given permission
getFirstUserByPerm :: MonadResource m => Permission -> LevelDBEnv -> m (Maybe (WithId UserImplId UserImpl))
getFirstUserByPerm perm db = do
ps <- view modelUserPerms <$> loadModel db
let
go _ v@Just{} = pure v
go i Nothing = do
mp <- load db i
case mp of
Just p | userPermPermission p == perm -> fmap (WithField (userPermUser p)) <$> load db (userPermUser p)
_ -> pure Nothing
F.foldrM go Nothing ps
-- | Select user groups and sort them by ascending name
selectUserImplGroups :: MonadResource m => UserImplId -> LevelDBEnv -> m [WithId AuthUserGroupUsersId AuthUserGroupUsers]
selectUserImplGroups i = selectRecords modelAuthUserGroupUsers $ \_ g -> authUserGroupUsersUser g == i
-- | Remove user from all groups
clearUserImplGroups :: (MonadResource m, MonadMask m) => UserImplId -> LevelDBEnv -> m ()
clearUserImplGroups i db = do
is <- fmap withId <$> selectUserImplGroups i db
deleteRecords modelAuthUserGroupUsers is db
-- | Add new user group
insertAuthUserGroup :: (MonadResource m, MonadMask m) => AuthUserGroup -> LevelDBEnv -> m AuthUserGroupId
insertAuthUserGroup = insertRecord modelNextAuthUserGroupId modelAuthUserGroups
-- | Add user to given group
insertAuthUserGroupUsers :: (MonadResource m, MonadMask m) => AuthUserGroupUsers -> LevelDBEnv -> m AuthUserGroupUsersId
insertAuthUserGroupUsers = insertRecord modelNextAuthUserGroupUserId modelAuthUserGroupUsers
-- | Add permission to given group
insertAuthUserGroupPerms :: (MonadResource m, MonadMask m) => AuthUserGroupPerms -> LevelDBEnv -> m AuthUserGroupPermsId
insertAuthUserGroupPerms = insertRecord modelNextAuthUserGroupPermId modelAuthUserGroupPerms
-- | Get list of permissions of given group
listAuthUserGroupPermissions :: MonadResource m => AuthUserGroupId -> LevelDBEnv -> m [WithId AuthUserGroupPermsId AuthUserGroupPerms]
listAuthUserGroupPermissions i = selectRecords modelAuthUserGroupPerms $ \_ p -> authUserGroupPermsGroup p == i
-- | Get list of all users of the group
listAuthUserGroupUsers :: MonadResource m => AuthUserGroupId -> LevelDBEnv -> m [WithId AuthUserGroupUsersId AuthUserGroupUsers]
listAuthUserGroupUsers i = selectRecords modelAuthUserGroupUsers $ \_ p -> authUserGroupUsersGroup p == i
-- | Replace record of user group
replaceAuthUserGroup :: (MonadResource m, MonadMask m) => AuthUserGroupId -> AuthUserGroup -> LevelDBEnv -> m ()
replaceAuthUserGroup = replaceRecord modelAuthUserGroups
-- | Remove all users from group
clearAuthUserGroupUsers :: (MonadResource m, MonadMask m) => AuthUserGroupId -> LevelDBEnv -> m ()
clearAuthUserGroupUsers i db = do
is <- fmap withId <$> listAuthUserGroupUsers i db
deleteRecords modelAuthUserGroupUsers is db
-- | Remove all permissions from group
clearAuthUserGroupPerms :: (MonadResource m, MonadMask m) => AuthUserGroupId -> LevelDBEnv -> m ()
clearAuthUserGroupPerms i db = do
is <- fmap withId <$> listAuthUserGroupPermissions i db
deleteRecords modelAuthUserGroupPerms is db
-- | Delete user group from storage
deleteAuthUserGroup :: (MonadResource m, MonadMask m) => AuthUserGroupId -> LevelDBEnv -> m ()
deleteAuthUserGroup i db = modifyModelM db $ \m -> do
clearAuthUserGroupUsers i db
clearAuthUserGroupPerms i db
remove db i
return . (, ()) $ m & over modelAuthUserGroups (S.delete i)
-- | Get paged list of user groups with total count
listGroupsPaged :: MonadResource m => Page -> PageSize -> LevelDBEnv -> m ([WithId AuthUserGroupId AuthUserGroup], Word)
listGroupsPaged p s db = getPagedList db p s =<< (view modelAuthUserGroups <$> loadModel db)
-- | Set group name
setAuthUserGroupName :: (MonadResource m, MonadMask m) => AuthUserGroupId -> Text -> LevelDBEnv -> m ()
setAuthUserGroupName i n db = modify db i $ \v -> v { authUserGroupName = n }
-- | Set group parent
setAuthUserGroupParent :: (MonadResource m, MonadMask m) => AuthUserGroupId -> Maybe AuthUserGroupId -> LevelDBEnv -> m ()
setAuthUserGroupParent i p db = modify db i $ \v -> v { authUserGroupParent = p }
-- | Add new single use code
insertSingleUseCode :: (MonadResource m, MonadMask m) => UserSingleUseCode -> LevelDBEnv -> m UserSingleUseCodeId
insertSingleUseCode = insertRecord modelNextUserSingleUseCodeId modelUserSingleUseCodes
-- | Set usage time of the single use code
setSingleUseCodeUsed :: (MonadResource m, MonadMask m) => UserSingleUseCodeId -> Maybe UTCTime -> LevelDBEnv -> m ()
setSingleUseCodeUsed i mt db = modify db i $ \v -> v { userSingleUseCodeUsed = mt }
-- | Find unused code for the user and expiration time greater than the given time
getUnusedCode :: MonadResource m => SingleUseCode -> UserImplId -> UTCTime -> LevelDBEnv -> m (Maybe (WithId UserSingleUseCodeId UserSingleUseCode))
getUnusedCode c i t db = headMay . sorting <$> selectRecords modelUserSingleUseCodes f db
where
sorting = sortBy (comparing $ Down . userSingleUseCodeExpire . (\(WithField _ v) -> v))
f _ usc =
userSingleUseCodeValue usc == c
&& userSingleUseCodeUser usc == i
&& isNothing (userSingleUseCodeUsed usc)
&& (isNothing (userSingleUseCodeExpire usc) || userSingleUseCodeExpire usc >= Just t)
-- | Invalidate all permament codes for user and set use time for them
invalidatePermamentCodes :: (MonadResource m, MonadMask m) => UserImplId -> UTCTime -> LevelDBEnv -> m ()
invalidatePermamentCodes i t db = do
cs <- view modelUserSingleUseCodes <$> loadModel db
forM_ (F.toList cs) $ \cid -> do
mc <- load db cid
case mc of
Just usc | isPermament usc -> modify db cid invalidate
_ -> return ()
where
invalidate su = su { userSingleUseCodeUsed = Just t }
isPermament usc =
userSingleUseCodeUser usc == i
&& isNothing (userSingleUseCodeUsed usc)
&& isNothing (userSingleUseCodeExpire usc)
-- | Select last valid restoration code by the given current time
selectLastRestoreCode :: MonadResource m => UserImplId -> UTCTime -> LevelDBEnv -> m (Maybe (WithId UserRestoreId UserRestore))
selectLastRestoreCode i t db = headMay . sorting <$> selectRecords modelUserRestores (const f) db
where
sorting = sortBy (comparing $ Down . userRestoreExpire . withVal)
f ur = userRestoreUser ur == i && userRestoreExpire ur > t
-- | Insert new restore code
insertUserRestore :: (MonadResource m, MonadMask m) => UserRestore -> LevelDBEnv -> m UserRestoreId
insertUserRestore = insertRecord modelNextUserRestoreId modelUserRestores
-- | Find unexpired by the time restore code
findRestoreCode :: MonadResource m => UserImplId -> RestoreCode -> UTCTime -> LevelDBEnv -> m (Maybe (WithId UserRestoreId UserRestore))
findRestoreCode i rc t db = headMay . sorting <$> selectRecords modelUserRestores (const f) db
where
sorting = sortBy (comparing $ Down . userRestoreExpire . (\(WithField _ v) -> v ))
f ur = userRestoreUser ur == i && userRestoreValue ur == rc && userRestoreExpire ur > t
-- | Replace restore code with new value
replaceRestoreCode :: (MonadResource m, MonadMask m) => UserRestoreId -> UserRestore -> LevelDBEnv -> m ()
replaceRestoreCode = replaceRecord modelUserRestores
-- | Find first non-expired by the time token for user
findAuthToken :: MonadResource m => UserImplId -> UTCTime -> LevelDBEnv -> m (Maybe (WithId AuthTokenId AuthToken))
findAuthToken i t db = headMay <$> selectRecords modelAuthTokens (const f) db
where
f atok = authTokenUser atok == i && authTokenExpire atok > t
-- | Find token by value
findAuthTokenByValue :: MonadResource m => SimpleToken -> LevelDBEnv -> m (Maybe (WithId AuthTokenId AuthToken))
findAuthTokenByValue v db = headMay <$> selectRecords modelAuthTokens (const f) db
where
f atok = authTokenValue atok == v
-- | Insert new token
insertAuthToken :: (MonadResource m, MonadMask m) => AuthToken -> LevelDBEnv -> m AuthTokenId
insertAuthToken = insertRecord modelNextAuthTokenId modelAuthTokens
-- | Replace auth token with new value
replaceAuthToken :: (MonadResource m, MonadMask m) => AuthTokenId -> AuthToken -> LevelDBEnv -> m ()
replaceAuthToken = replaceRecord modelAuthTokens
deriveSafeCopy 0 'base ''UserImplId
deriveSafeCopy 0 'base ''UserImpl
deriveSafeCopy 0 'base ''UserPermId
deriveSafeCopy 0 'base ''UserPerm
deriveSafeCopy 0 'base ''AuthTokenId
deriveSafeCopy 0 'base ''AuthToken
deriveSafeCopy 0 'base ''UserRestoreId
deriveSafeCopy 0 'base ''UserRestore
deriveSafeCopy 0 'base ''UserSingleUseCodeId
deriveSafeCopy 0 'base ''UserSingleUseCode
deriveSafeCopy 0 'base ''AuthUserGroupId
deriveSafeCopy 0 'base ''AuthUserGroup
deriveSafeCopy 0 'base ''AuthUserGroupUsersId
deriveSafeCopy 0 'base ''AuthUserGroupUsers
deriveSafeCopy 0 'base ''AuthUserGroupPermsId
deriveSafeCopy 0 'base ''AuthUserGroupPerms
deriveSafeCopy 0 'base ''ModelId
deriveSafeCopy 0 'base ''Model
instance (SafeCopy k, SafeCopy v) => SafeCopy (WithField i k v) where
putCopy a@(WithField k v) = contain $ do
_ <- safePut k
_ <- safePut v
return a
getCopy = contain $ WithField
<$> safeGet
<*> safeGet
| ivan-m/servant-auth-token | servant-auth-token-leveldb/src/Servant/Server/Auth/Token/LevelDB/Schema.hs | bsd-3-clause | 21,194 | 0 | 21 | 4,018 | 6,370 | 3,225 | 3,145 | -1 | -1 |
-- | A bounds-checked version of 'Data.Primitive.ByteArray'.
-- See that module for documentation.
{-# LANGUAGE ScopedTypeVariables #-}
module Data.Primitive.ByteArray.Checked(
module Data.Primitive.ByteArray,
module Data.Primitive.ByteArray.Checked) where
import Control.Monad.Primitive
import qualified Data.Primitive.ByteArray as P
import Data.Primitive(Prim)
import Data.Primitive.ByteArray(
ByteArray(..), MutableByteArray(..),
newByteArray, newPinnedByteArray, newAlignedPinnedByteArray,
byteArrayContents, mutableByteArrayContents,
sameMutableByteArray,
unsafeFreezeByteArray, unsafeThawByteArray,
sizeofByteArray, sizeofMutableByteArray)
import Data.Primitive.Checked
import Data.Word
instance Sized ByteArray where
size = sizeofByteArray
instance Sized (MutableByteArray m) where
size = sizeofMutableByteArray
{-# INLINE readByteArray #-}
readByteArray :: forall m a. (PrimMonad m, Prim a) => MutableByteArray (PrimState m) -> Int -> m a
readByteArray arr n =
checkPrim (undefined :: a) arr n $
P.readByteArray arr n
{-# INLINE writeByteArray #-}
writeByteArray :: (PrimMonad m, Prim a) => MutableByteArray (PrimState m) -> Int -> a -> m ()
writeByteArray arr n x =
checkPrim x arr n $
P.writeByteArray arr n x
{-# INLINE indexByteArray #-}
indexByteArray :: forall a. Prim a => ByteArray -> Int -> a
indexByteArray arr n =
checkPrim (undefined :: a) arr n $
P.indexByteArray arr n
{-# INLINE copyByteArray #-}
copyByteArray :: PrimMonad m => MutableByteArray (PrimState m) -> Int -> ByteArray -> Int -> Int -> m ()
copyByteArray arr1 n1 arr2 n2 len =
range arr1 n1 len $
range arr2 n2 len $
P.copyByteArray arr1 n1 arr2 n2 len
{-# INLINE moveByteArray #-}
moveByteArray :: PrimMonad m => MutableByteArray (PrimState m) -> Int -> MutableByteArray (PrimState m) -> Int -> Int -> m ()
moveByteArray arr1 n1 arr2 n2 len =
range arr1 n1 len $
range arr2 n2 len $
P.moveByteArray arr1 n1 arr2 n2 len
{-# INLINE copyMutableByteArray #-}
copyMutableByteArray :: PrimMonad m => MutableByteArray (PrimState m) -> Int -> MutableByteArray (PrimState m) -> Int -> Int -> m ()
copyMutableByteArray arr1 n1 arr2 n2 len =
range arr1 n1 len $
range arr2 n2 len $
P.copyMutableByteArray arr1 n1 arr2 n2 len
{-# INLINE setByteArray #-}
setByteArray :: (Prim a, PrimMonad m) => MutableByteArray (PrimState m) -> Int -> Int -> a -> m ()
setByteArray arr n len x =
rangePrim x arr n len $
P.setByteArray arr n len x
{-# INLINE fillByteArray #-}
fillByteArray :: PrimMonad m => MutableByteArray (PrimState m) -> Int -> Int -> Word8 -> m ()
fillByteArray = setByteArray
| nick8325/kbc | src/Data/Primitive/ByteArray/Checked.hs | bsd-3-clause | 2,621 | 0 | 12 | 438 | 817 | 426 | 391 | 61 | 1 |
module Tests.Buffer (
tests
) where
import Control.Exception (ErrorCall(..), catch)
import Control.Monad (forM_)
import Data.Primitive.ByteArray
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BS8
import Data.Word (Word8)
import Test.Framework.Providers.HUnit (testCase)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.QuickCheck (Property, ioProperty, (==>))
import Test.Framework (Test, testGroup)
import Test.HUnit ((@=?), assertFailure)
import GHCJS.Buffer
byteLengthWorks :: Int -> Property
byteLengthWorks i = (i >= 0) ==> (ioProperty $ do
buf <- create i
return (i == byteLength buf))
fromByteStringNoOffset :: IO ()
fromByteStringNoOffset = do
let bs = BS8.pack ['a'..'z']
(buf, offset, len) = fromByteString bs
0 @=? offset
26 @=? len
26 @=? byteLength buf
let ba = toByteArray buf
forM_ [0..25] $ \i -> do
BS.index bs i @=? indexByteArray ba i
fromByteStringWithOffset :: IO ()
fromByteStringWithOffset = do
let bs = BS8.pack ['a'..'z']
sliced = BS.take 10 . BS.drop 5 $ bs
(buf, offset, len) = fromByteString sliced
5 @=? offset
10 @=? len
26 @=? byteLength buf
let ba = toByteArray buf
forM_ [0..25] $ \i -> do
BS.index bs i @=? indexByteArray ba i
atoz :: IO ByteArray
atoz = do
ba <- newByteArray 26
forM_ [0..25] $ \i -> writeByteArray ba i (fromIntegral i + 65 :: Word8)
unsafeFreezeByteArray ba
wrapIntoByteString :: IO ()
wrapIntoByteString = do
buf <- fromByteArray `fmap` atoz
BS8.pack ['A'..'Z'] @=? toByteString 0 Nothing buf
BS8.pack ['K'..'Z'] @=? toByteString 10 Nothing buf
BS8.pack ['K'..'O'] @=? toByteString 10 (Just 5) buf
toByteStringOffset :: IO ()
toByteStringOffset = do
buf <- fromByteArray `fmap` atoz
tst (toByteString (-1) Nothing buf) "toByteString: negative offset"
tst (toByteString 100 Nothing buf) "toByteString: offset past end of buffer"
tst (toByteString 0 (Just (-1)) buf) "toByteString: negative length"
tst (toByteString 0 (Just (-1)) buf) "toByteString: negative length"
tst (toByteString 0 (Just 27) buf) "toByteString: length past end of buffer"
tst (toByteString 20 (Just 7) buf) "toByteString: length past end of buffer"
where tst expr expected = ign expr expected `catch` \(ErrorCall msg) -> expected @=? msg
ign expr expected = seq expr $ assertFailure ("Expected error " ++ show expected ++ " but got nothing")
tests :: Test
tests =
testGroup "Buffer" [
testProperty "byte length works" byteLengthWorks,
testCase "fromByteString - no offset" fromByteStringNoOffset,
testCase "fromByteString - offset" fromByteStringWithOffset,
testCase "toByteString" wrapIntoByteString,
testCase "toByteString - offset" toByteStringOffset
]
| tavisrudd/ghcjs-base | test/Tests/Buffer.hs | mit | 2,767 | 0 | 13 | 504 | 961 | 489 | 472 | 69 | 1 |
module System.Mesos.Raw.Scheduler where
import System.Mesos.Internal
import System.Mesos.Raw.Credential
import System.Mesos.Raw.ExecutorId
import System.Mesos.Raw.Filters
import System.Mesos.Raw.FrameworkId
import System.Mesos.Raw.FrameworkInfo
import System.Mesos.Raw.MasterInfo
import System.Mesos.Raw.Offer
import System.Mesos.Raw.OfferId
import System.Mesos.Raw.Request
import System.Mesos.Raw.SlaveId
import System.Mesos.Raw.TaskId
import System.Mesos.Raw.TaskInfo
import System.Mesos.Raw.TaskStatus
type SchedulerPtr = Ptr Scheduler
data Scheduler = Scheduler
{ schedulerImpl :: SchedulerPtr
, rawSchedulerRegistered :: FunPtr RawSchedulerRegistered
, rawSchedulerReRegistered :: FunPtr RawSchedulerReRegistered
, rawSchedulerDisconnected :: FunPtr RawSchedulerDisconnected
, rawSchedulerResourceOffers :: FunPtr RawSchedulerResourceOffers
, rawSchedulerOfferRescinded :: FunPtr RawSchedulerOfferRescinded
, rawSchedulerStatusUpdate :: FunPtr RawSchedulerStatusUpdate
, rawSchedulerFrameworkMessage :: FunPtr RawSchedulerFrameworkMessage
, rawSchedulerSlaveLost :: FunPtr RawSchedulerSlaveLost
, rawSchedulerExecutorLost :: FunPtr RawSchedulerExecutorLost
, rawSchedulerError :: FunPtr RawSchedulerError
}
-- | Type representing the connection from a scheduler to Mesos. This
-- handle is used both to manage the scheduler's lifecycle (start
-- it, stop it, or wait for it to finish) and to interact with Mesos
-- (e.g., launch tasks, kill tasks, etc.).
newtype SchedulerDriver = SchedulerDriver { fromSchedulerDriver :: SchedulerDriverPtr }
deriving (Show, Eq)
type SchedulerDriverPtr = Ptr SchedulerDriver
type RawSchedulerRegistered = SchedulerDriverPtr -> FrameworkIDPtr -> MasterInfoPtr -> IO ()
type RawSchedulerReRegistered = SchedulerDriverPtr -> MasterInfoPtr -> IO ()
type RawSchedulerDisconnected = SchedulerDriverPtr -> IO ()
type RawSchedulerResourceOffers = SchedulerDriverPtr -> Ptr OfferPtr -> CInt -> IO ()
type RawSchedulerOfferRescinded = SchedulerDriverPtr -> OfferIDPtr -> IO ()
type RawSchedulerStatusUpdate = SchedulerDriverPtr -> TaskStatusPtr -> IO ()
type RawSchedulerFrameworkMessage = SchedulerDriverPtr -> ExecutorIDPtr -> SlaveIDPtr -> Ptr CChar -> Int -> IO ()
type RawSchedulerSlaveLost = SchedulerDriverPtr -> SlaveIDPtr -> IO ()
type RawSchedulerExecutorLost = SchedulerDriverPtr -> ExecutorIDPtr -> SlaveIDPtr -> CInt -> IO ()
type RawSchedulerError = SchedulerDriverPtr -> Ptr CChar -> CInt -> IO ()
foreign import ccall "wrapper" wrapSchedulerRegistered
:: RawSchedulerRegistered
-> IO (FunPtr RawSchedulerRegistered)
foreign import ccall "wrapper" wrapSchedulerReRegistered
:: RawSchedulerReRegistered
-> IO (FunPtr RawSchedulerReRegistered)
foreign import ccall "wrapper" wrapSchedulerDisconnected
:: RawSchedulerDisconnected
-> IO (FunPtr RawSchedulerDisconnected)
foreign import ccall "wrapper" wrapSchedulerResourceOffers
:: RawSchedulerResourceOffers
-> IO (FunPtr RawSchedulerResourceOffers)
foreign import ccall "wrapper" wrapSchedulerOfferRescinded
:: RawSchedulerOfferRescinded
-> IO (FunPtr RawSchedulerOfferRescinded)
foreign import ccall "wrapper" wrapSchedulerStatusUpdate
:: RawSchedulerStatusUpdate
-> IO (FunPtr RawSchedulerStatusUpdate)
foreign import ccall "wrapper" wrapSchedulerFrameworkMessage
:: RawSchedulerFrameworkMessage
-> IO (FunPtr RawSchedulerFrameworkMessage)
foreign import ccall "wrapper" wrapSchedulerSlaveLost
:: RawSchedulerSlaveLost
-> IO (FunPtr RawSchedulerSlaveLost)
foreign import ccall "wrapper" wrapSchedulerExecutorLost
:: RawSchedulerExecutorLost
-> IO (FunPtr RawSchedulerExecutorLost)
foreign import ccall "wrapper" wrapSchedulerError
:: RawSchedulerError
-> IO (FunPtr RawSchedulerError)
foreign import ccall safe "ext/scheduler.h createScheduler" c_createScheduler
:: FunPtr RawSchedulerRegistered
-> FunPtr RawSchedulerReRegistered
-> FunPtr RawSchedulerDisconnected
-> FunPtr RawSchedulerResourceOffers
-> FunPtr RawSchedulerOfferRescinded
-> FunPtr RawSchedulerStatusUpdate
-> FunPtr RawSchedulerFrameworkMessage
-> FunPtr RawSchedulerSlaveLost
-> FunPtr RawSchedulerExecutorLost
-> FunPtr RawSchedulerError
-> IO SchedulerPtr
foreign import ccall safe "ext/scheduler.h destroyScheduler" c_destroyScheduler
:: SchedulerPtr
-> IO ()
foreign import ccall safe "ext/scheduler.h createSchedulerDriver" c_createSchedulerDriver
:: SchedulerPtr
-> FrameworkInfoPtr
-> Ptr CChar
-> CInt
-> IO SchedulerDriverPtr
foreign import ccall safe "ext/scheduler.h createSchedulerDriverWithCredentials" c_createSchedulerDriverWithCredentials
:: SchedulerPtr
-> FrameworkInfoPtr
-> Ptr CChar
-> CInt
-> CredentialPtr
-> IO SchedulerDriverPtr
foreign import ccall safe "ext/scheduler.h destroySchedulerDriver" c_destroySchedulerDriver
:: SchedulerDriverPtr
-> IO ()
foreign import ccall safe "ext/scheduler.h startSchedulerDriver" c_startSchedulerDriver
:: SchedulerDriverPtr
-> IO CInt
foreign import ccall safe "ext/scheduler.h stopSchedulerDriver" c_stopSchedulerDriver
:: SchedulerDriverPtr
-> CInt
-> IO CInt
foreign import ccall safe "ext/scheduler.h abortSchedulerDriver" c_abortSchedulerDriver
:: SchedulerDriverPtr
-> IO CInt
foreign import ccall safe "ext/scheduler.h joinSchedulerDriver" c_joinSchedulerDriver
:: SchedulerDriverPtr
-> IO CInt
foreign import ccall safe "ext/scheduler.h runSchedulerDriver" c_runSchedulerDriver
:: SchedulerDriverPtr
-> IO CInt
foreign import ccall safe "ext/scheduler.h requestResources" c_requestResources
:: SchedulerDriverPtr
-> Ptr RequestPtr
-> CInt
-> IO CInt
foreign import ccall safe "ext/scheduler.h launchTasks" c_launchTasks
:: SchedulerDriverPtr
-> Ptr OfferIDPtr
-> CInt
-> Ptr TaskInfoPtr
-> CInt
-> FiltersPtr
-> IO CInt
foreign import ccall safe "ext/scheduler.h killTask" c_killTask
:: SchedulerDriverPtr
-> TaskIDPtr
-> IO CInt
foreign import ccall safe "ext/scheduler.h declineOffer" c_declineOffer
:: SchedulerDriverPtr
-> OfferIDPtr
-> FiltersPtr
-> IO CInt
foreign import ccall safe "ext/scheduler.h reviveOffers" c_reviveOffers
:: SchedulerDriverPtr
-> IO CInt
foreign import ccall safe "ext/scheduler.h schedulerDriverSendFrameworkMessage" c_sendFrameworkMessage
:: SchedulerDriverPtr
-> ExecutorIDPtr
-> SlaveIDPtr
-> Ptr CChar
-> CInt
-> IO CInt
foreign import ccall safe "ext/scheduler.h reconcileTasks" c_reconcileTasks
:: SchedulerDriverPtr
-> Ptr TaskStatusPtr
-> CInt
-> IO CInt
| iand675/hs-mesos | src/System/Mesos/Raw/Scheduler.hs | mit | 6,770 | 0 | 16 | 1,101 | 1,277 | 682 | 595 | 155 | 0 |
{- |
Module : $Header$
Description : XML processing for the CMDL interface
Copyright : uni-bremen and DFKI
License : GPLv2 or higher, see LICENSE.txt
Maintainer : r.pascanu@jacobs-university.de
Stability : provisional
Portability : portable
PGIP.XMLparsing contains commands for parsing or creating XML messages
-}
module PGIP.XMLparsing where
import PGIP.XMLstate
import CMDL.DataTypes
import CMDL.DataTypesUtils
import CMDL.DgCommands (cUse)
import CMDL.ProcessScript
import CMDL.Interface (cmdlRunShell)
import Interfaces.DataTypes
import Interfaces.Command
import Interfaces.Utils (emptyIntIState)
import Driver.Options
import Driver.ReadFn
import qualified Static.ToXml as ToXml
import Static.DevGraph
import Common.LibName
import Common.ToXml
import Text.XML.Light as XML
import Network (connectTo, PortID (PortNumber), accept, listenOn)
import System.IO
import Data.List (isInfixOf)
{- | Generates the XML packet that contains information about what
commands can the interface respond to -}
addPGIPHandshake :: CmdlPgipState -> CmdlPgipState
addPGIPHandshake pgipData = if useXML pgipData
then addPGIPElement pgipData
$ add_attr (mkAttr "version" "2.0")
$ unode "acceptedpgipelems" $ map genPgipElem
[ "askpgip"
, "askpgml"
, "askprefs"
, "getprefs"
, "setprefs"
, "proverinit"
, "proverexit"
, "startquiet"
, "stopquiet"
, "pgmlsymbolon"
, "pgmlsymboloff"
, "dostep"
, "undostep"
, "redostep"
, "abortgoal"
, "forget"
, "restoregoal"
, "askids"
, "showid"
, "askguise"
, "parsescript"
, "showproofstate"
, "showctxt"
, "searchtheorems"
, "setlinewidth"
, "viewdoc"
, "doitem"
, "undoitem"
, "redoitem"
, "aborttheory"
, "retracttheory"
, "loadfile"
, "openfile"
, "closefile"
, "abortfile"
, "changecwd"
, "systemcmd"]
else pgipData
{- | The function executes a communication step, i.e. waits for input,
processes the message and outputs the answer -}
communicationStep :: CmdlPgipState -> CmdlState -> IO (CmdlPgipState, CmdlState)
communicationStep pgD st = do
-- tries to read a packet from the input
b <- hIsEOF (hin pgD)
if b then return (pgD { stop = True }, st) else do
tmp <- timeoutReadPacket (maxWaitTime pgD) pgD
case tmp of
Nothing -> if resendMsgIfTimeout pgD
{- if the interface receives nothing in the given timeframe
described by maxWaitTime and the flag resendMsgIfTimeout is
set, that the interface resends last packet assuming that last
send was a fail -}
then do
nwpgD <- sendPGIPData (hetsOpts st) pgD
communicationStep nwpgD st
{- if the flag is not set, that the network waits some more for the
broker to respond or give a new command -}
else communicationStep pgD st
{- if something is received, that the commands are parsed and executed
and a response is generated -}
Just smtxt ->
do
let cmds = parseMsg pgD smtxt
refseqNb = getRefseqNb smtxt
(nwSt, nwPgD) <- processCmds cmds st $ resetPGIPData $
pgD { refSeqNb = refseqNb }
if useXML pgD then do
nwPgipSt <- sendPGIPData (hetsOpts nwSt) nwPgD
return (nwPgipSt, nwSt)
else do
nwPgD' <- sendMSGData (hetsOpts nwSt) nwPgD
return (nwPgD', nwSt)
-- | Comunicate over a port
cmdlListenOrConnect2Port :: HetcatsOpts -> CmdlState -> IO CmdlState
cmdlListenOrConnect2Port opts state = do
let portNb = listen opts
conPN = connectP opts
hostName = connectH opts
swXML = xmlFlag opts
servH <- if portNb /= -1 then do
putIfVerbose opts 1 $ "Starting hets. Listen to port " ++ show portNb
servSock <- listenOn $ PortNumber $ fromIntegral portNb
(servH, _, _) <- accept servSock
return servH
else if conPN /= -1 then do
putIfVerbose opts 1 $ "Starting hets. Connecting to port "
++ show conPN ++ " on host " ++ hostName
connectTo hostName $ PortNumber $ fromIntegral conPN
else error "cmdlListenOrConnect2Port: missing port number"
cmdlStartLoop swXML servH servH 1000 state
{- | Reads from a handle, it waits only for a certain amount of time,
if no input comes it will return Nothing -}
timeoutReadPacket :: Int -> CmdlPgipState -> IO (Maybe String)
timeoutReadPacket untilTimeout st = do
let h = hin st
smtmp <- hWaitForInput h untilTimeout
if smtmp then do
ms <- if useXML st
then readPacket [] h
else hGetLine h
return $ Just ms
else return Nothing
-- | Waits until it reads an entire XML packet
readPacket :: String -> Handle -> IO String
readPacket acc hf = do
tmp <- hGetLine hf
let str = acc ++ tmp ++ "\n"
if isInfixOf "</pgip>" tmp
then return str
else readPacket str hf
cmdlStartLoop :: Bool -> Handle -> Handle -> Int -> CmdlState
-> IO CmdlState
cmdlStartLoop swXML h_in h_out timeOut state = do
pgData <- genCMDLPgipState swXML h_in h_out timeOut
let pgD = addPGIPReady $ addPGIPHandshake $ resetPGIPData pgData
pgD' <- sendPGIPData (hetsOpts state) pgD
waitLoop pgD' state
waitLoop :: CmdlPgipState -> CmdlState -> IO CmdlState
waitLoop pgData state = do
(pgData', state') <- communicationStep pgData state
if stop pgData'
then return state'
else waitLoop pgData' state'
{- | Runs a shell in which the communication is expected to be
through XML packets -}
cmdlRunXMLShell :: CmdlState -> IO CmdlState
cmdlRunXMLShell = cmdlStartLoop True stdin stdout (-1)
-- | Processes a list of input files
processInput :: HetcatsOpts -> [FilePath] -> CmdlState -> IO CmdlState
processInput opts ls state = case ls of
[] -> return state
l : ll -> (case guess l GuessIn of
ProofCommand -> cmdlProcessScriptFile True
_ -> cUse) l state >>= processInput opts ll
cmdlRun :: HetcatsOpts -> IO CmdlState
cmdlRun opts =
processInput opts (infiles opts) (emptyCmdlState opts) >>=
if isRemote opts
then cmdlListenOrConnect2Port opts
else if interactive opts
then if xmlFlag opts
then cmdlRunXMLShell
else cmdlRunShell
else return
processString :: [CmdlXMLcommands] -> String -> CmdlState -> CmdlPgipState
-> IO (CmdlState, CmdlPgipState)
processString pl str st pgSt = do
(nwSt, mCmd) <- cmdlProcessString "" 0 str st
postProcessCmd pl nwSt pgSt mCmd
-- copy messages to pgip state
processMsgs :: CmdlState -> CmdlPgipState -> (CmdlPgipState, String)
processMsgs nwSt pgSt =
let o = output nwSt
ms = outputMsg o
ws = warningMsg o
es = errorMsg o
-- there should be at most one
in (if null es then addPGIPAnswer ms ws pgSt else addPGIPError es pgSt, es)
processCommand :: [CmdlXMLcommands] -> Command -> CmdlState -> CmdlPgipState
-> IO (CmdlState, CmdlPgipState)
processCommand pl cmd st pgSt = do
nwSt <- cmdlProcessCmd cmd st
postProcessCmd pl nwSt pgSt (Just cmd)
-- postprocess a previously run command and recurse
postProcessCmd :: [CmdlXMLcommands] -> CmdlState -> CmdlPgipState
-> Maybe Command -> IO (CmdlState, CmdlPgipState)
postProcessCmd pl nwSt0 pgSt mCmd = let
(pgSt1, es) = processMsgs nwSt0 pgSt
nwSt = nwSt0 { output = emptyCmdlMessage } -- remove messages form cmdl state
in if null es then processCmds pl nwSt $ informCmd nwSt mCmd pgSt1 else
return (nwSt, addPGIPReady pgSt1)
informCmd :: CmdlState -> Maybe Command -> CmdlPgipState -> CmdlPgipState
informCmd nwSt mCmd pgSt1 = let opts = hetsOpts nwSt in
case (getMaybeLib $ intState nwSt, mCmd) of
(Just (lN, lEnv), Just cmd) -> case cmd of
SelectCmd LibFile _ ->
informDGraph opts lN lEnv $ addPGIPElement pgSt1
$ add_attr (mkAttr "url" $ libNameToFile lN)
$ unode "informfileloaded" ()
GlobCmd g | g < ProveCurrent ->
informDGraph opts lN lEnv pgSt1
_ -> pgSt1
_ -> pgSt1
informDGraph :: HetcatsOpts -> LibName -> LibEnv -> CmdlPgipState
-> CmdlPgipState
informDGraph opts lN lEnv pgSt =
addPGIPElement pgSt $ unode "informdevelopmentgraph"
$ ToXml.dGraph opts lEnv lN
$ lookupDGraph lN lEnv
-- | Executes given commands and returns output message and the new state
processCmds :: [CmdlXMLcommands] -> CmdlState -> CmdlPgipState
-> IO (CmdlState, CmdlPgipState)
processCmds cmds state pgipSt = do
let opts = hetsOpts state
case cmds of
[] -> return (state, addPGIPReady pgipSt)
{- ensures that the response is ended with a ready element
such that the broker does wait for more input -}
XmlExecute str : l -> processString l str state (resetPGIPData pgipSt)
XmlExit : l -> processCmds l state $
addPGIPAnswer "Exiting prover" [] pgipSt { stop = True }
XmlAskpgip : l -> processCmds l state $ addPGIPHandshake pgipSt
XmlProverInit : l -> processCmds l (emptyCmdlState opts) $
addPGIPAnswer "Prover state was reset" [] pgipSt
XmlStartQuiet : l -> do
{- To inform that quiet mode is enabled we need to send this with the
old options. -}
let pgD = addPGIPReady $ addPGIPAnswer "Quiet mode enabled" [] pgipSt
pgipSt' <- if useXML pgD
then sendPGIPData opts pgD
else sendMSGData opts pgD
processCmds l (state { hetsOpts = opts { verbose = 0 } }) pgipSt'
XmlStopQuiet : l ->
processCmds l (state { hetsOpts = opts { verbose = 1 } }) $
addPGIPAnswer "Quiet mode disabled" [] pgipSt
XmlOpenGoal str : l -> processCommand l (SelectCmd Goal str) state pgipSt
XmlCloseGoal str : l -> processCommand (XmlGiveUpGoal str : l)
(GlobCmd ProveCurrent) state pgipSt
XmlGiveUpGoal str : l -> processString l ("del goals " ++ str) state pgipSt
XmlUnknown str : l -> processCmds l state $
addPGIPAnswer [] ("Unknown command: " ++ str) pgipSt
XmlUndo : l -> processCommand l (GlobCmd UndoCmd) state pgipSt
XmlRedo : l -> processCommand l (GlobCmd RedoCmd) state pgipSt
XmlForget str : l -> processString l ("del axioms " ++ str) state pgipSt
XmlOpenTheory str : l -> processString l str state pgipSt
XmlCloseTheory _ : l -> let
nwSt = case i_state $ intState state of
Nothing -> state
Just ist -> add2hist [IStateChange $ Just ist] $ state
{ intState = (intState state)
{ i_state = Just $ emptyIntIState (i_libEnv ist)
$ i_ln ist }}
in processCmds l nwSt $ addPGIPAnswer "Theory closed" [] pgipSt
XmlCloseFile _ : l -> processCmds l (emptyCmdlState opts)
(addPGIPAnswer "File closed" [] pgipSt)
XmlParseScript str : _ ->
processCmds [] state . addPGIPElement pgipSt $ addPGIPMarkup str
XmlLoadFile str : l ->
processCommand l (SelectCmd LibFile str) state pgipSt
{- deleting axioms or goals should be implemented via a select command after
inspecting the current axioms or goals. The current strings do not work. -}
| keithodulaigh/Hets | PGIP/XMLparsing.hs | gpl-2.0 | 11,750 | 0 | 26 | 3,407 | 2,846 | 1,428 | 1,418 | 238 | 21 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
-- | Nix types.
module Stack.Types.Nix where
import Data.Aeson.Extended
import Stack.Prelude
import Generics.Deriving.Monoid (mappenddefault, memptydefault)
-- | Nix configuration. Parameterize by resolver type to avoid cyclic
-- dependency.
data NixOpts = NixOpts
{nixEnable :: !Bool
,nixPureShell :: !Bool
,nixPackages :: ![Text]
-- ^ The system packages to be installed in the environment before it runs
,nixInitFile :: !(Maybe FilePath)
-- ^ The path of a file containing preconfiguration of the environment (e.g shell.nix)
,nixShellOptions :: ![Text]
-- ^ Options to be given to the nix-shell command line
,nixAddGCRoots :: !Bool
-- ^ Should we register gc roots so running nix-collect-garbage doesn't remove nix dependencies
}
deriving (Show)
-- | An uninterpreted representation of nix options.
-- Configurations may be "cascaded" using mappend (left-biased).
data NixOptsMonoid = NixOptsMonoid
{nixMonoidDefaultEnable :: !Any
-- ^ Should nix-shell be defaulted to enabled (does @nix:@ section exist in the config)?
,nixMonoidEnable :: !(First Bool)
-- ^ Is using nix-shell enabled?
,nixMonoidPureShell :: !(First Bool)
-- ^ Should the nix-shell be pure
,nixMonoidPackages :: !(First [Text])
-- ^ System packages to use (given to nix-shell)
,nixMonoidInitFile :: !(First FilePath)
-- ^ The path of a file containing preconfiguration of the environment (e.g shell.nix)
,nixMonoidShellOptions :: !(First [Text])
-- ^ Options to be given to the nix-shell command line
,nixMonoidPath :: !(First [Text])
-- ^ Override parts of NIX_PATH (notably 'nixpkgs')
,nixMonoidAddGCRoots :: !(First Bool)
-- ^ Should we register gc roots so running nix-collect-garbage doesn't remove nix dependencies
}
deriving (Eq, Show, Generic)
-- | Decode uninterpreted nix options from JSON/YAML.
instance FromJSON (WithJSONWarnings NixOptsMonoid) where
parseJSON = withObjectWarnings "NixOptsMonoid"
(\o -> do nixMonoidDefaultEnable <- pure (Any False)
nixMonoidEnable <- First <$> o ..:? nixEnableArgName
nixMonoidPureShell <- First <$> o ..:? nixPureShellArgName
nixMonoidPackages <- First <$> o ..:? nixPackagesArgName
nixMonoidInitFile <- First <$> o ..:? nixInitFileArgName
nixMonoidShellOptions <- First <$> o ..:? nixShellOptsArgName
nixMonoidPath <- First <$> o ..:? nixPathArgName
nixMonoidAddGCRoots <- First <$> o ..:? nixAddGCRootsArgName
return NixOptsMonoid{..})
-- | Left-biased combine Nix options
instance Monoid NixOptsMonoid where
mempty = memptydefault
mappend = mappenddefault
-- | Nix enable argument name.
nixEnableArgName :: Text
nixEnableArgName = "enable"
-- | Nix run in pure shell argument name.
nixPureShellArgName :: Text
nixPureShellArgName = "pure"
-- | Nix packages (build inputs) argument name.
nixPackagesArgName :: Text
nixPackagesArgName = "packages"
-- | shell.nix file path argument name.
nixInitFileArgName :: Text
nixInitFileArgName = "shell-file"
-- | Extra options for the nix-shell command argument name.
nixShellOptsArgName :: Text
nixShellOptsArgName = "nix-shell-options"
-- | NIX_PATH override argument name
nixPathArgName :: Text
nixPathArgName = "path"
-- | Add GC roots arg name
nixAddGCRootsArgName :: Text
nixAddGCRootsArgName = "add-gc-roots"
| MichielDerhaeg/stack | src/Stack/Types/Nix.hs | bsd-3-clause | 3,603 | 0 | 14 | 710 | 516 | 294 | 222 | 83 | 1 |
module PatIn1 where
--Default parameters can be added to definition of functions and simple constants.
--In this example: add parameter 'x' to 'foo'
foo :: a -> Int
foo x = h + t where (h,t) = head $ zip [1..10] [3..15]
foo_x = undefined
main :: Int
main = (foo foo_x) | SAdams601/HaRe | old/testing/addOneParameter/PatIn1_TokOut.hs | bsd-3-clause | 273 | 0 | 9 | 56 | 81 | 46 | 35 | 6 | 1 |
module Meas () where
import Language.Haskell.Liquid.Prelude
mylen :: [a] -> Int
mylen [] = 0
mylen (_:xs) = 1 + mylen xs
mymap f [] = []
mymap f (x:xs) = (f x) : (mymap f xs)
zs = [1..100]
prop2 = liquidAssertB (n1 == n2)
where n1 = mylen zs
n2 = mylen $ mymap (+ 1) zs
| ssaavedra/liquidhaskell | tests/pos/meas6.hs | bsd-3-clause | 308 | 0 | 9 | 97 | 160 | 87 | 73 | 11 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE UnboxedTuples #-}
{-# LANGUAGE UnliftedFFITypes #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE BangPatterns #-}
module GHC.Integer.Logarithms
( wordLog2#
, integerLog2#
, integerLogBase#
) where
#include "MachDeps.h"
#if WORD_SIZE_IN_BITS == 32
# define LD_WORD_SIZE_IN_BITS 5
#elif WORD_SIZE_IN_BITS == 64
# define LD_WORD_SIZE_IN_BITS 6
#else
# error unsupported WORD_SIZE_IN_BITS
#endif
import GHC.Integer.Type
import GHC.Prim
default ()
-- | Calculate the integer logarithm for an arbitrary base.
--
-- The base must be greater than @1@, the second argument, the number
-- whose logarithm is sought, shall be positive, otherwise the
-- result is meaningless.
--
-- The following property holds
--
-- @base ^ 'integerLogBase#' base m <= m < base ^('integerLogBase#' base m + 1)@
--
-- for @base > 1@ and @m > 0@.
--
-- Note: Internally uses 'integerLog2#' for base 2
integerLogBase# :: Integer -> Integer -> Int#
integerLogBase# (S# 2#) m = integerLog2# m
integerLogBase# b m = e'
where
!(# _, e' #) = go b
go pw | m `ltInteger` pw = (# m, 0# #)
go pw = case go (sqrInteger pw) of
(# q, e #) | q `ltInteger` pw -> (# q, 2# *# e #)
(# q, e #) -> (# q `quotInteger` pw, 2# *# e +# 1# #)
-- | Calculate the integer base 2 logarithm of an 'Integer'. The
-- calculation is more efficient than for the general case, on
-- platforms with 32- or 64-bit words much more efficient.
--
-- The argument must be strictly positive, that condition is /not/ checked.
integerLog2# :: Integer -> Int#
integerLog2# (S# i#) = wordLog2# (int2Word# i#)
integerLog2# (Jn# _) = -1#
integerLog2# (Jp# bn) = go (s -# 1#)
where
s = sizeofBigNat# bn
go i = case indexBigNat# bn i of
0## -> go (i -# 1#)
w -> wordLog2# w +# (uncheckedIShiftL# i LD_WORD_SIZE_IN_BITS#)
-- | Compute base-2 log of 'Word#'
--
-- This is internally implemented as count-leading-zeros machine instruction.
wordLog2# :: Word# -> Int#
wordLog2# w# = (WORD_SIZE_IN_BITS# -# 1#) -# (word2Int# (clz# w#))
| ezyang/ghc | libraries/integer-gmp/src/GHC/Integer/Logarithms.hs | bsd-3-clause | 2,131 | 0 | 13 | 462 | 408 | 228 | 180 | -1 | -1 |
-- | Settings are centralized, as much as possible, into this file. This
-- includes database connection settings, static file locations, etc.
-- In addition, you can configure a number of different aspects of Yesod
-- by overriding methods in the Yesod typeclass. That instance is
-- declared in the Foundation.hs file.
module Settings where
import ClassyPrelude.Yesod
import Control.Exception (throw)
import Data.Aeson (Result (..), fromJSON, withObject,
(.!=), (.:?))
import Data.FileEmbed (embedFile)
import Data.Yaml (decodeEither')
import Database.Persist.Postgresql (PostgresConf)
import Language.Haskell.TH.Syntax (Exp, Name, Q)
import Network.Wai.Handler.Warp (HostPreference)
import Yesod.Default.Config2 (applyEnvValue, configSettingsYml)
import Yesod.Default.Util (WidgetFileSettings,
widgetFileNoReload,
widgetFileReload)
-- | Runtime settings to configure this application. These settings can be
-- loaded from various sources: defaults, environment variables, config files,
-- theoretically even a database.
data AppSettings = AppSettings
{ appStaticDir :: String
-- ^ Directory from which to serve static files.
, appDatabaseConf :: PostgresConf
-- ^ Configuration settings for accessing the database.
, appRoot :: Text
-- ^ Base for all generated URLs.
, appHost :: HostPreference
-- ^ Host/interface the server should bind to.
, appPort :: Int
-- ^ Port to listen on
, appIpFromHeader :: Bool
-- ^ Get the IP address from the header when logging. Useful when sitting
-- behind a reverse proxy.
, appDetailedRequestLogging :: Bool
-- ^ Use detailed request logging system
, appShouldLogAll :: Bool
-- ^ Should all log messages be displayed?
, appReloadTemplates :: Bool
-- ^ Use the reload version of templates
, appMutableStatic :: Bool
-- ^ Assume that files in the static dir may change after compilation
, appSkipCombining :: Bool
-- ^ Perform no stylesheet/script combining
-- Example app-specific configuration values.
, appCopyright :: Text
-- ^ Copyright text to appear in the footer of the page
, appAnalytics :: Maybe Text
-- ^ Google Analytics code
, appAdmin :: Text
, appLambdacmsVersion :: Text
}
instance FromJSON AppSettings where
parseJSON = withObject "AppSettings" $ \o -> do
let defaultDev =
#if DEVELOPMENT
True
#else
False
#endif
appStaticDir <- o .: "static-dir"
appDatabaseConf <- o .: "database"
appRoot <- o .: "approot"
appHost <- fromString <$> o .: "host"
appPort <- o .: "port"
appIpFromHeader <- o .: "ip-from-header"
appAdmin <- o .: "admin"
appLambdacmsVersion <- o .: "lambdacms-version"
appDetailedRequestLogging <- o .:? "detailed-logging" .!= defaultDev
appShouldLogAll <- o .:? "should-log-all" .!= defaultDev
appReloadTemplates <- o .:? "reload-templates" .!= defaultDev
appMutableStatic <- o .:? "mutable-static" .!= defaultDev
appSkipCombining <- o .:? "skip-combining" .!= defaultDev
appCopyright <- o .: "copyright"
appAnalytics <- o .:? "analytics"
return AppSettings {..}
-- | Settings for 'widgetFile', such as which template languages to support and
-- default Hamlet settings.
--
-- For more information on modifying behavior, see:
--
-- https://github.com/yesodweb/yesod/wiki/Overriding-widgetFile
widgetFileSettings :: WidgetFileSettings
widgetFileSettings = def
-- | How static files should be combined.
combineSettings :: CombineSettings
combineSettings = def
-- The rest of this file contains settings which rarely need changing by a
-- user.
widgetFile :: String -> Q Exp
widgetFile = (if appReloadTemplates compileTimeAppSettings
then widgetFileReload
else widgetFileNoReload)
widgetFileSettings
-- | Raw bytes at compile time of @config/settings.yml@
configSettingsYmlBS :: ByteString
configSettingsYmlBS = $(embedFile configSettingsYml)
-- | @config/settings.yml@, parsed to a @Value@.
configSettingsYmlValue :: Value
configSettingsYmlValue = either throw id $ decodeEither' configSettingsYmlBS
-- | A version of @AppSettings@ parsed at compile time from @config/settings.yml@.
compileTimeAppSettings :: AppSettings
compileTimeAppSettings =
case fromJSON $ applyEnvValue False mempty configSettingsYmlValue of
Error e -> error e
Success settings -> settings
-- The following two functions can be used to combine multiple CSS or JS files
-- at compile time to decrease the number of http requests.
-- Sample usage (inside a Widget):
--
-- > $(combineStylesheets 'StaticR [style1_css, style2_css])
combineStylesheets :: Name -> [Route Static] -> Q Exp
combineStylesheets = combineStylesheets'
(appSkipCombining compileTimeAppSettings)
combineSettings
combineScripts :: Name -> [Route Static] -> Q Exp
combineScripts = combineScripts'
(appSkipCombining compileTimeAppSettings)
combineSettings
| lambdacms/lambdacms.org | lambdacmsorg-base/Settings.hs | mit | 5,722 | 0 | 12 | 1,695 | 735 | 420 | 315 | -1 | -1 |
-- 600851475143
import Utils.Prime
main = putStrLn $ show getProblem3Value
getProblem3Value :: Integer
getProblem3Value = last $ primeFactors 600851475143
| jchitel/ProjectEuler.hs | Problems/Problem0003.hs | mit | 159 | 0 | 6 | 23 | 36 | 19 | 17 | 4 | 1 |
module Main
( main )
where
import Bot.NetHack.FightingTactics ( debugSimulation )
main :: IO ()
main = debugSimulation
| Noeda/adeonbot | bot/fighting-simulation/Main.hs | mit | 125 | 0 | 6 | 24 | 35 | 21 | 14 | 5 | 1 |
{-# LANGUAGE LambdaCase, FlexibleInstances #-}
module Halt.Printing.Pretty where
import Data.List
import Data.Char
import Halt.AST
class PrettyShow a where
prettyShow :: a -> String
unlines' :: [String] -> String --Avoids the newline at the end
unlines' = intercalate "\n"
inParentheses :: String -> String
inParentheses s = '(' : (s ++ ")")
indent :: [String] -> String
indent = unlines' . map (" " ++) . lines . unlines'
showStatements :: [Statement] -> String
showStatements = indent . map prettyShow
parenthesesIf :: Bool -> String -> String
parenthesesIf b = if b then inParentheses else id
parenthesesUnless :: Bool -> String -> String
parenthesesUnless b = if not b then inParentheses else id
isFunctionApp :: Expression -> Bool
isFunctionApp (FunctionApp _ _) = True
isFunctionApp _ = False
parenthesesIfFunctionApp :: Expression -> String
parenthesesIfFunctionApp e = parenthesesIf (condition e) (prettyShow e)
where condition (Identifier s) | isOperator s = True
| otherwise = False
condition (FunctionApp _ _) = True
condition _ = False
isOperator :: String -> Bool
isOperator = all isSymbol
isSimpleType :: TypeLiteral -> Bool
isSimpleType (Generic _ _) = False
isSimpleType (Function _ _) = False
isSimpleType _ = True
isFunction :: TypeLiteral -> Bool
isFunction (Function _ _) = True
isFunction _ = False
instance PrettyShow Declaration where
prettyShow = \case
Import s -> "import " ++ s
ImportAs m a -> "import " ++ m ++ " as " ++ a
FunctionType n t -> n ++ " :: " ++ prettyShow t
FunctionDecl n a b -> n ++ " " ++ unwords a ++ " ->\n" ++ showStatements b
Data n g c -> "data " ++ n ++ " " ++ intersperse ' ' g
++ " = " ++ intercalate " | " (map prettyShow c)
Record n f -> "record " ++ n ++ " =\n"
++ unlines' (map (("\t" ++) . (\(fn, t) -> fn ++ " :: " ++ prettyShow t)) f)
instance PrettyShow (String, [TypeLiteral]) where
prettyShow (c, ts) = c ++ " "
++ unwords (map (\t -> parenthesesUnless (isSimpleType t) $ prettyShow t) ts)
instance PrettyShow TypeLiteral where
prettyShow = \case
Parameter c -> return c
Concrete s -> s
Generic s t -> prettyShow (s, t)
Function f t -> parenthesesIf (isFunction t) (prettyShow f) ++ " -> " ++ prettyShow t
Var -> "var"
Unit -> "()"
instance PrettyShow Statement where
prettyShow = \case
Assignment t n v -> parenthesesUnless (isSimpleType t) (prettyShow t)
++ " " ++ n ++ " = " ++ prettyShow v
If c t e -> "if " ++ prettyShow c ++ " then\n" ++ showStatements t
++ maybe "" (("\nelse\n" ++) . showStatements) e
For v s bnd bdy -> "for " ++ v ++ " from " ++ prettyShow s ++ " to " ++ prettyShow bnd
++ "\n" ++ showStatements bdy
Return v -> "return " ++ prettyShow v
NakedExpr e -> prettyShow e
instance PrettyShow Bound where
prettyShow (StaticBound e) = prettyShow e
prettyShow (DynamicWithStaticBound d s) = prettyShow d ++ " | " ++ prettyShow s
instance PrettyShow Expression where
prettyShow = \case
FunctionApp f a -> parenthesesIfFunctionApp f ++ " "
++ unwords (map parenthesesIfFunctionApp a)
IntLiteral i -> show i
DoubleLiteral d -> show d
StringLiteral s -> show s
Identifier s -> s
instance PrettyShow [Declaration] where
prettyShow d = unlines $ map prettyShow d
{-
testPrint = do
basic <- basicAST
putStrLn $ prettyShow basic
testParse = do
basic <- basicAST
let str = prettyShow basic
putStrLn "Original:"
print basic
putStrLn "New:"
let new = parseHelper program str
print new
putStrLn $ "Equal: " ++ show (basic == new)
-}
| LukaHorvat/Halt | src/Halt/Printing/Pretty.hs | mit | 4,057 | 0 | 18 | 1,268 | 1,219 | 607 | 612 | 81 | 3 |
module Main where
import System.Environment
main :: IO ()
main = do
putStrLn "What is your name?"
name <- getLine
putStrLn ("Hello " ++ name) | kenwilcox/WyScheme | prompt.hs | mit | 154 | 0 | 9 | 37 | 50 | 25 | 25 | 7 | 1 |
module ProjectEuler.Problem59
( problem
) where
import Data.Word
import Data.List.Split
import Data.List
import Control.Arrow
import Data.Function
import Data.Char
import Data.Bits
import qualified Data.Text as T
import ProjectEuler.GetData
problem :: Problem
problem = pureProblemWithData "p059_cipher.txt" 59 Solved compute
getCipherText :: T.Text -> [Word8]
getCipherText raw =
read $ "[" ++ T.unpack raw ++ "]"
getFreq :: [Word8] -> [(Word8, Int)]
getFreq = sortBy (flip compare `on` snd) . map (head &&& length) . group . sort
guessKey :: [(Word8, Int)] -> Word8
-- hmm... the most frequent letter might be spaces
guessKey = (`xor` fromIntegral (ord ' ')) . fst . head
-- for XOR encryptions, encryption is the same as decryption
encrypt :: [Word8] -> [Word8] -> [Word8]
encrypt key = zipWith xor (cycle key)
compute :: T.Text -> Int
compute raw = sum $ fromIntegral <$> plaintext
where
cipher = getCipherText raw
-- key is three lowercase letters
-- we divide ciphertext into 3 chunks
-- and do frequence analysis to see if that helps
cipherGroups = transpose $ chunksOf 3 cipher
key = map (guessKey . getFreq) cipherGroups
plaintext :: [Word8]
plaintext = encrypt key cipher
| Javran/Project-Euler | src/ProjectEuler/Problem59.hs | mit | 1,232 | 0 | 11 | 238 | 359 | 202 | 157 | 29 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module App.User where
import App.Common
import Data.IntMap as IM
data User = User
{ _firstName :: Text
, _lastName :: Text
} deriving (Eq, Ord, Typeable, Show)
makeLenses ''User
deriveSafeCopy 0 'base ''User
newtype Users = Users {unUsers :: IntMap User}
deriving (Eq, Typeable)
deriveSafeCopy 0 'base ''Users
setFirstName :: Text -> Update User ()
setFirstName f = modify (firstName .~ f)
setLastName :: Text -> Update User ()
setLastName f = modify (lastName .~ f)
getFirst :: Query User Text
getFirst = asks _firstName
getLast :: Query User Text
getLast = asks _firstName
makeAcidic ''User ['setFirstName, 'setLastName]
addNext :: User -> Update Users Int
addNext u = do
Users im <- get
case maxViewWithKey im of
Nothing -> put (Users $ IM.singleton 0 u) >> return 0
Just ((k,_),_) -> put (Users $ IM.insert (k+1) u im) >> return (k+1)
getValue :: Int -> Query Users (Maybe User)
getValue k = asks (IM.lookup k . unUsers)
makeAcidic ''Users ['addNext, 'getValue]
| axman6/BelSES-time-tracker | src/App/User.hs | mit | 1,113 | 10 | 16 | 221 | 448 | 232 | 216 | 33 | 2 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DerivingVia #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE RecordWildCards #-}
module Unison.Codebase.SqliteCodebase.Branch.Dependencies where
import Data.Foldable (toList)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Monoid.Generic (GenericMonoid (..), GenericSemigroup (..))
import Data.Set (Set)
import qualified Data.Set as Set
import GHC.Generics (Generic)
import Unison.Codebase.Branch (Branch (Branch), Branch0, EditHash)
import qualified Unison.Codebase.Branch as Branch
import qualified Unison.Codebase.Causal as Causal
import Unison.Codebase.Patch (Patch)
import Unison.NameSegment (NameSegment)
import Unison.Reference (Reference, pattern Derived)
import Unison.Referent (Referent)
import qualified Unison.Referent as Referent
import qualified Unison.Util.Relation as R
import qualified Unison.Util.Star3 as Star3
import Unison.Hash (Hash)
type Branches m = [(Branch.Hash, m (Branch m))]
data Dependencies = Dependencies
{ patches :: Set EditHash
, terms :: Set Hash
, decls :: Set Hash
}
deriving Show
deriving Generic
deriving Semigroup via GenericSemigroup Dependencies
deriving Monoid via GenericMonoid Dependencies
data Dependencies' = Dependencies'
{ patches' :: [EditHash]
, terms' :: [Hash]
, decls' :: [Hash]
}
deriving (Eq, Show)
deriving Generic
deriving Semigroup via GenericSemigroup Dependencies'
deriving Monoid via GenericMonoid Dependencies'
to' :: Dependencies -> Dependencies'
to' Dependencies{..} = Dependencies' (toList patches) (toList terms) (toList decls)
fromBranch :: Applicative m => Branch m -> (Branches m, Dependencies)
fromBranch (Branch c) = case c of
Causal.One _hh e -> fromBranch0 e
Causal.Cons _hh e (h, m) -> fromBranch0 e <> fromTails (Map.singleton h m)
Causal.Merge _hh e tails -> fromBranch0 e <> fromTails tails
where
fromTails m = ([(h, Branch <$> mc) | (h, mc) <- Map.toList m], mempty)
fromBranch0 :: Applicative m => Branch0 m -> (Branches m, Dependencies)
fromBranch0 b =
( fromChildren (Branch._children b)
, fromTermsStar (Branch._terms b)
<> fromTypesStar (Branch._types b)
<> fromEdits (Branch._edits b) )
where
fromChildren :: Applicative m => Map NameSegment (Branch m) -> Branches m
fromChildren m = [ (Branch.headHash b, pure b) | b <- toList m ]
references :: Branch.Star r NameSegment -> [r]
references = toList . R.dom . Star3.d1
mdValues :: Branch.Star r NameSegment -> [Reference]
mdValues = fmap snd . toList . R.ran . Star3.d3
fromTermsStar :: Branch.Star Referent NameSegment -> Dependencies
fromTermsStar s = Dependencies mempty terms decls where
terms = Set.fromList $
[ h | Referent.Ref (Derived h _ _) <- references s] ++
[ h | (Derived h _ _) <- mdValues s]
decls = Set.fromList $
[ h | Referent.Con (Derived h _i _n) _ _ <- references s ]
fromTypesStar :: Branch.Star Reference NameSegment -> Dependencies
fromTypesStar s = Dependencies mempty terms decls where
terms = Set.fromList [ h | (Derived h _ _) <- mdValues s ]
decls = Set.fromList [ h | (Derived h _ _) <- references s ]
fromEdits :: Map NameSegment (EditHash, m Patch) -> Dependencies
fromEdits m = Dependencies (Set.fromList . fmap fst $ toList m) mempty mempty
| unisonweb/platform | parser-typechecker/src/Unison/Codebase/SqliteCodebase/Branch/Dependencies.hs | mit | 3,305 | 0 | 16 | 590 | 1,139 | 617 | 522 | -1 | -1 |
module Sudoku.Strategy where
import Sudoku
import Prelude
import Data.Maybe
import Data.List
type Solver = (Sudoku -> [[(Char, String)]])
findCandidates :: Sudoku -> Int -> Int -> [Char]
findCandidates su i j | isTaken su i j = ""
| otherwise = filter ((isAllowed') su i j) (allowedChars su)
findResolvableCell :: Sudoku -> Solver -> Maybe (Int, Int, Char)
findResolvableCell su f
| isNothing m = Nothing
| isJust m = let
m' = fromJust m
i = div m' (rowCount su)
j = mod m' (columnCount su)
(_, cs) = rcs !! m'
in
Just (i, j, head cs)
where
rcs = concat (f su)
m = findIndex (\(_, cs) -> length cs == 1) rcs
step :: Sudoku -> [Solver] -> Sudoku
step su [] = su
step su (f:fs)
| isNothing r = step su fs
| isJust r = let (i, j, c) = fromJust r in update su i j c
where
r = findResolvableCell su f
run :: Sudoku -> Solver -> Sudoku
run su f | outcome /= su = run outcome f
| otherwise = outcome
where
outcome = map (\r -> (map resolve r)) (f su)
resolve (s, cs) | length cs == 1 = head cs
| otherwise = s
| thomasbrus/sudoku-solver | src/Sudoku/Strategy.hs | mit | 1,184 | 0 | 12 | 390 | 541 | 273 | 268 | 32 | 1 |
module ReSpec where
import Test.Hspec
import Re
spec :: Spec
spec = do
describe "match" $ do
it "broken pattern" $ do
(match "++" "12345") `shouldBe` Nothing
(match "1++" "12345") `shouldBe` Nothing
it "no special charater pattern" $ do
(match "12345" "12345") `shouldBe` Just True
(match "12345" "1234") `shouldBe` Just False
it "*" $ do
(match "a*b" "b") `shouldBe` Just True
(match "a*b" "ab") `shouldBe` Just True
(match "a*b" "aab") `shouldBe` Just True
it "?" $ do
(match "a?b" "b") `shouldBe` Just True
(match "a?b" "ab") `shouldBe` Just True
(match "a?b" "aab") `shouldBe` Just False
it "+" $ do
(match "a+b" "b") `shouldBe` Just False
(match "a+b" "ab") `shouldBe` Just True
(match "a+b" "aab") `shouldBe` Just True
main :: IO()
main = do
hspec spec
| czchen/haskell-regular-expression-virtual-machine | test/ReSpec.hs | mit | 971 | 0 | 15 | 337 | 357 | 176 | 181 | 27 | 1 |
module Game.Shared.Concurrency where
import Control.Concurrent.MVar
import Data.Serialize
import Data.List
-----------------
-- Concurrency --
-----------------
-- |Initialises a list-type MVar
initMVar :: IO (MVar [a])
initMVar = newMVar []
-- |Pushes a value to an MVar
pushMVar :: MVar [a] -- ^MVar to push value to
-> a -- ^The value to push
-> IO ()
pushMVar var x = modifyMVar_ var (\vals -> return (x : vals))
-- |Pushes a list of values to an MVar
pushAllMVar :: MVar [a] -- ^MVar to push values to
-> [a] -- ^List of values to push
-> IO ()
pushAllMVar var xs = modifyMVar_ var (\vals -> return (xs ++ vals))
-- |Pops every value from an MVar
popAllMVar :: MVar [a] -- ^The MVar to pop values from
-> IO [a] -- ^The values that were in the MVar
popAllMVar var = swapMVar var []
-- |Removes a specific value from an MVar
removeFromMVar :: Eq a => MVar [a] -- ^MVar to remove value from
-> a -- ^The value to remove
-> IO ()
removeFromMVar var val = modifyMVar_ var (\vals -> return (delete val vals)) | Mattiemus/LaneWars | Game/Shared/Concurrency.hs | mit | 1,098 | 0 | 10 | 276 | 284 | 155 | 129 | 21 | 1 |
{-# LANGUAGE DeriveFunctor, DeriveFoldable,
DeriveTraversable, TupleSections #-}
-- |This module contains the AST of Zeno's internal functional syntax,
-- which is essentially GHC core.
module Zeno.Expression (
Expr (..), Bindings (..), Alt (..),
AltCon (..), ExprTraversable (..),
Binding, Term, ExprSubstitution,
isTerm, isVar, fromVar, isApp, isCse, isErr, isLam,
flattenBindings, freeVariables, isRec,
flattenApp, unflattenApp, termFunction,
flattenLambdas, unflattenLambdas,
boundVars, boundExprs,
mapExprMaybe, mapExpr,
isOperator,
) where
import Prelude ()
import Zeno.Prelude
import Zeno.Id
import Zeno.Traversing
import Zeno.Utils
import Zeno.Unification
import qualified Data.Map as Map
import qualified Data.Set as Set
-- |This is an expression for which 'isTerm' should be 'True'.
type Term a = Expr a
type Binding a = (a, Expr a)
type ExprSubstitution a = Substitution (Expr a) (Expr a)
-- |Expressions in Zeno, essentially GHC core syntax.
data Expr a
= Err
| Var !a
| App !(Expr a) !(Expr a)
| Let !(Bindings a) !(Expr a)
| Lam !a !(Expr a)
-- |Case analysis has an identifier for our 'CriticalTerm' technique
| Cse !Id !(Expr a) ![Alt a]
deriving ( Eq, Ord, Functor, Foldable, Traversable )
data Bindings a
= NonRec !(Binding a)
| Rec ![Binding a]
deriving ( Eq, Ord, Functor, Foldable, Traversable )
data Alt a
= Alt { altCon :: !(AltCon a),
altVars :: ![a],
altExpr :: !(Expr a) }
deriving ( Eq, Ord, Functor, Foldable, Traversable )
data AltCon a
= AltCon !a
| AltDefault
deriving ( Eq, Ord, Functor, Foldable, Traversable )
class ExprTraversable f where
mapExprM :: Monad m => (Expr a -> m (Expr a)) -> f a -> m (f a)
exprList :: f a -> [Expr a]
exprList = execWriter . mapExprM (\x -> tell [x] >> return x)
mapExpr :: ExprTraversable f => (Expr a -> Expr a) -> f a -> f a
mapExpr = mapM_to_fmap mapExprM
mapExprMaybe :: ExprTraversable f =>
(Expr a -> Maybe (Expr a)) -> f a -> Maybe (f a)
mapExprMaybe = mapM_to_mapMaybe mapExprM
instance ExprTraversable Expr where
mapExprM = id
exprList = return
instance ExprTraversable Bindings where
mapExprM f (NonRec (var, expr)) =
return (NonRec . (var, )) `ap` f expr
mapExprM f (Rec binds) =
return Rec `ap` mapM mapBind binds
where
mapBind (var, expr) = liftM (var, ) (f expr)
exprList = boundExprs
flattenBindings :: Bindings a -> [Binding a]
flattenBindings (NonRec bind) = [bind]
flattenBindings (Rec binds) = binds
boundVars :: Bindings a -> [a]
boundVars = map fst . flattenBindings
boundExprs :: Bindings a -> [Expr a]
boundExprs = map snd . flattenBindings
isRec :: Bindings a -> Bool
isRec (Rec {}) = True
isRec _ = False
freeVariables :: (Ord a, WithinTraversable (Expr a) (f a), Foldable f) =>
f a -> [a]
freeVariables expr = Set.toList freeVars
where
allVars = Set.fromList (toList expr)
boundVars = execWriter (mapWithinM writeBound expr)
freeVars = allVars `Set.difference` boundVars
writeBound expr@(Cse _ _ alts) = do
let bound = concatMap altVars alts
tell (Set.fromList bound)
return expr
writeBound expr@(Lam var _) = do
tell (Set.singleton var)
return expr
writeBound expr =
return expr
-- |Terms are just variables, errors and application.
isTerm :: Expr a -> Bool
isTerm (App lhs rhs) = isTerm lhs
isTerm Err = True
isTerm (Var _) = True
isTerm _ = False
isVar :: Expr a -> Bool
isVar (Var _) = True
isVar _ = False
isErr :: Expr a -> Bool
isErr Err = True
isErr _ = False
isApp :: Expr a -> Bool
isApp (App _ _) = True
isApp _ = False
isCse :: Expr a -> Bool
isCse (Cse {}) = True
isCse _ = False
isLam :: Expr a -> Bool
isLam (Lam {}) = True
isLam _ = False
fromVar :: Expr a -> a
fromVar (Var v) = v
flattenApp :: Expr a -> [Expr a]
flattenApp (App lhs rhs) = flattenApp lhs ++ [rhs]
flattenApp expr = [expr]
unflattenApp :: [Expr a] -> Expr a
unflattenApp [] = Err
unflattenApp xs = foldl1 App xs
flattenLambdas :: Expr a -> ([a], Expr a)
flattenLambdas (Lam v rhs) =
let (vs, rhs') = flattenLambdas rhs in (v : vs, rhs')
flattenLambdas expr = ([], expr)
unflattenLambdas :: [a] -> Expr a -> Expr a
unflattenLambdas = flip (foldr Lam)
termFunction :: Term a -> Maybe a
termFunction term =
case head (flattenApp term) of
Var v -> Just v
_ -> Nothing
instance Ord a => Unifiable (Expr a) where
type Names (Expr a) = Expr a
unify Err Err = mempty
unify (Var v1) (Var v2)
| v1 == v2 = mempty
unify var@(Var _) expr =
Unifier (Map.singleton var expr)
unify (App f1 a1) (App f2 a2) =
unify f1 f2 `mappend` unify a1 a2
unify _ _ = NoUnifier
instance WithinTraversable (Expr a) (Expr a) where
mapWithinM f (App lhs rhs) =
f =<< return App `ap` mapWithinM f lhs `ap` mapWithinM f rhs
mapWithinM f (Cse id lhs alts) =
f =<< return (Cse id) `ap` mapWithinM f lhs `ap` mapM (mapWithinM f) alts
mapWithinM f (Let bind rhs) =
f =<< return Let `ap` mapWithinM f bind `ap` mapWithinM f rhs
mapWithinM f (Lam var rhs) =
f =<< return (Lam var) `ap` mapWithinM f rhs
mapWithinM f expr =
f =<< return expr
mwBindingM :: Monad m =>
(Expr a -> m (Expr a)) -> Binding a -> m (Binding a)
mwBindingM f (b, x) = return (b,) `ap` mapWithinM f x
instance WithinTraversable (Expr a) (Bindings a) where
mapWithinM f (NonRec b) =
return NonRec `ap` mwBindingM f b
mapWithinM f (Rec bs) =
return Rec `ap` mapM (mwBindingM f) bs
instance WithinTraversable (Expr a) (Alt a) where
mapWithinM f (Alt con binds rhs) =
return (Alt con binds) `ap` mapWithinM f rhs
instance Show a => Show (Expr a) where
show = flip runReader 0 . showExpr
instance Show a => Show (Bindings a) where
show = flip runReader 0 . showBindings
showAlt :: Show a => Alt a -> Indented String
showAlt (Alt con binds rhs) = do
i <- indentation
rhs_s <- indent $ showExpr rhs
let con_s = case con of
AltDefault -> "_"
AltCon var -> show var ++ concatMap ((" " ++) . show) binds
return $ i ++ con_s ++ " -> " ++ rhs_s
showBinding :: Show a => (a, Expr a) -> Indented String
showBinding (var, rhs) = do
rhs' <- indent (showExpr rhs)
return $ show var ++ " = " ++ rhs'
showBindings :: Show a => Bindings a -> Indented String
showBindings (Rec []) = return ""
showBindings (NonRec bind) = do
bind' <- showBinding bind
i <- indentation
return $ i ++ "let " ++ bind'
showBindings (Rec binds) = do
i <- indentation
binds' <- intercalate (i ++ "and ") <$> mapM showBinding binds
return $ i ++ "let rec " ++ binds'
isOperator :: String -> Bool
isOperator = any (not . isNormalChar)
where
isNormalChar :: Char -> Bool
isNormalChar '_' = True
-- isNormalChar '$' = True
isNormalChar '.' = True
isNormalChar c = isAlphaNum c
showExpr :: Show a => Expr a -> Indented String
showExpr Err = return "undefined"
showExpr (Var var) = (return . stripModuleName . show) var
showExpr (flattenApp -> Var fun : args)
| (show fun == "(,)" && length args == 2)
|| (show fun == "(,,)" && length args == 3)
|| (show fun == "(,,,)" && length args == 4) = do
args' <- mapM showExpr args
return $
"(" ++ intercalate ", " args' ++ ")"
showExpr (App (App (Var fun) arg1) arg2)
| isOperator fun_s && isTerm arg1 && isTerm arg2 = do
arg1' <- (indent . showExpr) arg1
arg2' <- (indent . showExpr) arg2
let arg1_s = if isTerm arg1 then arg1' else "(" ++ arg1' ++ ")"
arg2_s = if isTerm arg2 then arg2' else "(" ++ arg2' ++ ")"
if fun_s == ":" && arg2_s == "[]"
then return $ "[" ++ arg1_s ++ "]"
else return $ "(" ++ arg1_s ++ " " ++ fun_s ++ " " ++ arg2_s ++ ")"
where
fun_s = show fun
showExpr (App lhs rhs) = do
lhs' <- (indent . showExpr) lhs
rhs' <- (indent . showExpr) rhs
let lhs_s = if isVar lhs || isApp lhs then lhs' else "(" ++ lhs' ++ ")"
rhs_s = if isVar rhs then rhs' else "(" ++ rhs' ++ ")"
return $ lhs_s ++ " " ++ rhs_s
showExpr expr@(Lam {}) = do
let (vars, rhs) = flattenLam expr
vars_s = intercalate " " (map show vars)
rhs_s <- showExpr rhs
return $ "\\" ++ vars_s ++ " -> " ++ rhs_s
where
flattenLam :: Expr a -> ([a], Expr a)
flattenLam (Lam var rhs) = (var : vars, real_rhs)
where (vars, real_rhs) = flattenLam rhs
flattenLam expr = ([], expr)
showExpr (Cse id lhs alts) = do
alts' <- indent . concatMapM showAlt $ alts
lhs' <- indent . showExpr $ lhs
let lhs'' = if isTerm lhs then lhs' else "(" ++ lhs' ++ ")"
i <- indentation
return $ i ++ "case " ++ lhs'' ++ " of" ++ alts'
showExpr (Let binds rhs) = do
binds' <- showBindings binds
rhs' <- showExpr rhs
return $ binds' ++ " in " ++ rhs'
| trenta3/zeno-0.2.0.1 | src/Zeno/Expression.hs | mit | 9,053 | 0 | 17 | 2,389 | 3,715 | 1,878 | 1,837 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE FlexibleContexts, FlexibleInstances, MultiParamTypeClasses #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans -fno-warn-missing-fields #-}
-- overlapping instances is for automatic lifting
-- while avoiding an orphan of Lift for Text
{-# LANGUAGE OverlappingInstances #-}
-- | This module provides utilities for creating backends. Regular users do not
-- need to use this module.
module Database.Persist.TH
( -- * Parse entity defs
persistWith
, persistUpperCase
, persistLowerCase
, persistFileWith
-- * Turn @EntityDef@s into types
, mkPersist
, MkPersistSettings
, mpsBackend
, mpsGeneric
, mpsPrefixFields
, mpsEntityJSON
, mpsGenerateLenses
, EntityJSON(..)
, mkPersistSettings
, sqlSettings
, sqlOnlySettings
-- * Various other TH functions
, mkMigrate
, mkSave
, mkDeleteCascade
, share
, derivePersistField
, derivePersistFieldJSON
, persistFieldFromEntity
-- * Internal
, packPTH
, lensPTH
) where
import Prelude hiding ((++), take, concat, splitAt, exp)
import Database.Persist
import Database.Persist.Sql (Migration, migrate, SqlBackend, PersistFieldSql)
import Database.Persist.Quasi
import Language.Haskell.TH.Lib (varE)
import Language.Haskell.TH.Quote
import Language.Haskell.TH.Syntax
import Data.Char (toLower, toUpper)
import Control.Monad (forM, (<=<), mzero)
import qualified System.IO as SIO
import Data.Text (pack, Text, append, unpack, concat, uncons, cons, stripPrefix, stripSuffix)
import Data.Text.Encoding (decodeUtf8)
import qualified Data.Text.IO as TIO
import Data.List (foldl')
import Data.Maybe (isJust, listToMaybe, mapMaybe, fromMaybe)
import Data.Monoid (mappend, mconcat)
import Text.Read (readPrec, lexP, step, prec, parens, Lexeme(Ident))
import qualified Data.Map as M
import qualified Data.HashMap.Strict as HM
import Data.Aeson
( ToJSON (toJSON), FromJSON (parseJSON), (.=), object
, Value (Object), (.:), (.:?)
, eitherDecodeStrict'
)
import Control.Applicative (pure, (<$>), (<*>))
import Database.Persist.Sql (sqlType)
import Data.Proxy (Proxy (Proxy))
import Web.PathPieces (PathPiece, toPathPiece, fromPathPiece)
import GHC.Generics (Generic)
import qualified Data.Text.Encoding as TE
-- | Converts a quasi-quoted syntax into a list of entity definitions, to be
-- used as input to the template haskell generation code (mkPersist).
persistWith :: PersistSettings -> QuasiQuoter
persistWith ps = QuasiQuoter
{ quoteExp = parseReferences ps . pack
}
-- | Apply 'persistWith' to 'upperCaseSettings'.
persistUpperCase :: QuasiQuoter
persistUpperCase = persistWith upperCaseSettings
-- | Apply 'persistWith' to 'lowerCaseSettings'.
persistLowerCase :: QuasiQuoter
persistLowerCase = persistWith lowerCaseSettings
-- | Same as 'persistWith', but uses an external file instead of a
-- quasiquotation.
persistFileWith :: PersistSettings -> FilePath -> Q Exp
persistFileWith ps fp = do
#ifdef GHC_7_4
qAddDependentFile fp
#endif
h <- qRunIO $ SIO.openFile fp SIO.ReadMode
qRunIO $ SIO.hSetEncoding h SIO.utf8_bom
s <- qRunIO $ TIO.hGetContents h
parseReferences ps s
-- calls parse to Quasi.parse individual entities in isolation
-- afterwards, sets references to other entities
parseReferences :: PersistSettings -> Text -> Q Exp
parseReferences ps s = lift $
map (mkEntityDefSqlTypeExp entityMap) entsWithEmbeds
where
-- every EntityDef could reference each-other (as an EmbedRef)
-- let Haskell tie the knot
entityMap = M.fromList $ map (\ent -> (entityHaskell ent, toEmbedEntityDef ent)) entsWithEmbeds
entsWithEmbeds = map setEmbedEntity rawEnts
setEmbedEntity ent = ent
{ entityFields = map (setEmbedField entityMap) $ entityFields ent
}
rawEnts = parse ps s
stripId :: FieldType -> Maybe Text
stripId (FTTypeCon Nothing t) = stripSuffix "Id" t
stripId _ = Nothing
foreignReference :: FieldDef -> Maybe HaskellName
foreignReference field = case fieldReference field of
ForeignRef ref _ -> Just ref
_ -> Nothing
-- fieldSqlType at parse time can be an Exp
-- This helps delay setting fieldSqlType until lift time
data EntityDefSqlTypeExp = EntityDefSqlTypeExp EntityDef SqlTypeExp [SqlTypeExp]
deriving Show
data SqlTypeExp = SqlTypeExp FieldType
| SqlType' SqlType
deriving Show
instance Lift SqlTypeExp where
lift (SqlType' t) = lift t
lift (SqlTypeExp ftype) = return st
where
typ = ftToType ftype
mtyp = (ConT ''Proxy `AppT` typ)
typedNothing = SigE (ConE 'Proxy) mtyp
st = VarE 'sqlType `AppE` typedNothing
data FieldsSqlTypeExp = FieldsSqlTypeExp [FieldDef] [SqlTypeExp]
instance Lift FieldsSqlTypeExp where
lift (FieldsSqlTypeExp fields sqlTypeExps) =
lift $ zipWith FieldSqlTypeExp fields sqlTypeExps
data FieldSqlTypeExp = FieldSqlTypeExp FieldDef SqlTypeExp
instance Lift FieldSqlTypeExp where
lift (FieldSqlTypeExp (FieldDef{..}) sqlTypeExp) =
[|FieldDef fieldHaskell fieldDB fieldType $(lift sqlTypeExp) fieldAttrs fieldStrict fieldReference|]
instance Lift EntityDefSqlTypeExp where
lift (EntityDefSqlTypeExp ent sqlTypeExp sqlTypeExps) =
[|ent { entityFields = $(lift $ FieldsSqlTypeExp (entityFields ent) sqlTypeExps)
, entityId = $(lift $ FieldSqlTypeExp (entityId ent) sqlTypeExp)
}
|]
instance Lift ReferenceDef where
lift NoReference = [|NoReference|]
lift (ForeignRef name ft) = [|ForeignRef name ft|]
lift (EmbedRef em) = [|EmbedRef em|]
lift (CompositeRef cdef) = [|CompositeRef cdef|]
instance Lift EmbedEntityDef where
lift (EmbedEntityDef name fields) = [|EmbedEntityDef name fields|]
instance Lift EmbedFieldDef where
lift (EmbedFieldDef name em) = [|EmbedFieldDef name em|]
type EntityMap = M.Map HaskellName EmbedEntityDef
mEmbedded :: EntityMap -> FieldType -> Maybe EmbedEntityDef
mEmbedded _ (FTTypeCon Just{} _) = Nothing
mEmbedded ents (FTTypeCon Nothing n) = let name = HaskellName n in
M.lookup name ents
mEmbedded ents (FTList x) = mEmbedded ents x
mEmbedded ents (FTApp x y) = maybe (mEmbedded ents y) Just (mEmbedded ents x)
setEmbedField :: EntityMap -> FieldDef -> FieldDef
setEmbedField allEntities field = field
{ fieldReference = case fieldReference field of
NoReference -> case mEmbedded allEntities (fieldType field) of
Nothing -> case stripId $ fieldType field of
Nothing -> NoReference
Just name -> if M.member (HaskellName name) allEntities
then ForeignRef (HaskellName name)
-- the EmebedEntityDef does not contain FieldType information
-- but we shouldn't need this anyway
(FTTypeCon Nothing $ pack $ nameBase ''Int)
else NoReference
Just em -> EmbedRef em
existing@_ -> existing
}
mkEntityDefSqlTypeExp :: EntityMap -> EntityDef -> EntityDefSqlTypeExp
mkEntityDefSqlTypeExp allEntities ent = EntityDefSqlTypeExp ent
(getSqlType $ entityId ent)
$ (map getSqlType $ entityFields ent)
where
getSqlType field = maybe
(defaultSqlTypeExp field)
(SqlType' . SqlOther)
(listToMaybe $ mapMaybe (stripPrefix "sqltype=") $ fieldAttrs field)
-- In the case of embedding, there won't be any datatype created yet.
-- We just use SqlString, as the data will be serialized to JSON.
defaultSqlTypeExp field
| isJust (mEmbedded allEntities ftype) = SqlType' SqlString
| otherwise = case fieldReference field of
ForeignRef _ ft -> SqlTypeExp ft
CompositeRef _ -> SqlType' $ SqlOther "Composite Reference"
_ -> case ftype of
-- In the case of lists, we always serialize to a string
-- value (via JSON).
--
-- Normally, this would be determined automatically by
-- SqlTypeExp. However, there's one corner case: if there's
-- a list of entity IDs, the datatype for the ID has not
-- yet been created, so the compiler will fail. This extra
-- clause works around this limitation.
FTList _ -> SqlType' SqlString
_ -> SqlTypeExp ftype
where
ftype = fieldType field
-- | Create data types and appropriate 'PersistEntity' instances for the given
-- 'EntityDef's. Works well with the persist quasi-quoter.
mkPersist :: MkPersistSettings -> [EntityDef] -> Q [Dec]
mkPersist mps ents' = do
x <- fmap mconcat $ mapM (persistFieldFromEntity mps) ents
y <- fmap mconcat $ mapM (mkEntity mps) ents
z <- fmap mconcat $ mapM (mkJSON mps) ents
return $ mconcat [x, y, z]
where
ents = map fixEntityDef ents'
-- | Implement special preprocessing on EntityDef as necessary for 'mkPersist'.
-- For example, strip out any fields marked as MigrationOnly.
fixEntityDef :: EntityDef -> EntityDef
fixEntityDef ed =
ed { entityFields = filter keepField $ entityFields ed }
where
keepField fd = "MigrationOnly" `notElem` fieldAttrs fd &&
"SafeToRemove" `notElem` fieldAttrs fd
-- | Settings to be passed to the 'mkPersist' function.
data MkPersistSettings = MkPersistSettings
{ mpsBackend :: Type
-- ^ Which database backend we\'re using.
--
-- When generating data types, each type is given a generic version- which
-- works with any backend- and a type synonym for the commonly used
-- backend. This is where you specify that commonly used backend.
, mpsGeneric :: Bool
-- ^ Create generic types that can be used with multiple backends. Good for
-- reusable code, but makes error messages harder to understand. Default:
-- True.
, mpsPrefixFields :: Bool
-- ^ Prefix field names with the model name. Default: True.
, mpsEntityJSON :: Maybe EntityJSON
-- ^ Generate @ToJSON@/@FromJSON@ instances for each model types. If it's
-- @Nothing@, no instances will be generated. Default:
--
-- @
-- Just EntityJSON
-- { entityToJSON = 'keyValueEntityToJSON
-- , entityFromJSON = 'keyValueEntityFromJSON
-- }
-- @
, mpsGenerateLenses :: !Bool
-- ^ Instead of generating normal field accessors, generator lens-style accessors.
--
-- Default: False
--
-- Since 1.3.1
}
data EntityJSON = EntityJSON
{ entityToJSON :: Name
-- ^ Name of the @toJSON@ implementation for @Entity a@.
, entityFromJSON :: Name
-- ^ Name of the @fromJSON@ implementation for @Entity a@.
}
-- | Create an @MkPersistSettings@ with default values.
mkPersistSettings :: Type -- ^ Value for 'mpsBackend'
-> MkPersistSettings
mkPersistSettings t = MkPersistSettings
{ mpsBackend = t
, mpsGeneric = False
, mpsPrefixFields = True
, mpsEntityJSON = Just EntityJSON
{ entityToJSON = 'entityIdToJSON
, entityFromJSON = 'entityIdFromJSON
}
, mpsGenerateLenses = False
}
-- | Use the 'SqlPersist' backend.
sqlSettings :: MkPersistSettings
sqlSettings = mkPersistSettings $ ConT ''SqlBackend
-- | Same as 'sqlSettings'.
--
-- Since 1.1.1
sqlOnlySettings :: MkPersistSettings
sqlOnlySettings = sqlSettings
{-# DEPRECATED sqlOnlySettings "use sqlSettings" #-}
recNameNoUnderscore :: MkPersistSettings -> HaskellName -> HaskellName -> Text
recNameNoUnderscore mps dt f
| mpsPrefixFields mps = lowerFirst (unHaskellName dt) ++ upperFirst ft
| otherwise = lowerFirst ft
where ft = unHaskellName f
recName :: MkPersistSettings -> HaskellName -> HaskellName -> Text
recName mps dt f =
addUnderscore $ recNameNoUnderscore mps dt f
where
addUnderscore
| mpsGenerateLenses mps = ("_" ++)
| otherwise = id
lowerFirst :: Text -> Text
lowerFirst t =
case uncons t of
Just (a, b) -> cons (toLower a) b
Nothing -> t
upperFirst :: Text -> Text
upperFirst t =
case uncons t of
Just (a, b) -> cons (toUpper a) b
Nothing -> t
dataTypeDec :: MkPersistSettings -> EntityDef -> Dec
dataTypeDec mps t =
DataD [] nameFinal paramsFinal constrs
$ map (mkName . unpack) $ entityDerives t
where
mkCol x fd@FieldDef {..} =
(mkName $ unpack $ recName mps x fieldHaskell,
if fieldStrict then IsStrict else NotStrict,
maybeIdType mps fd Nothing Nothing
)
(nameFinal, paramsFinal)
| mpsGeneric mps = (nameG, [PlainTV backend])
| otherwise = (name, [])
nameG = mkName $ unpack $ unHaskellName (entityHaskell t) ++ "Generic"
name = mkName $ unpack $ unHaskellName $ entityHaskell t
cols = map (mkCol $ entityHaskell t) $ entityFields t
backend = backendName
constrs
| entitySum t = map sumCon $ entityFields t
| otherwise = [RecC name cols]
sumCon fd = NormalC
(sumConstrName mps t fd)
[(NotStrict, maybeIdType mps fd Nothing Nothing)]
sumConstrName :: MkPersistSettings -> EntityDef -> FieldDef -> Name
sumConstrName mps t FieldDef {..} = mkName $ unpack $ concat
[ if mpsPrefixFields mps
then unHaskellName $ entityHaskell t
else ""
, upperFirst $ unHaskellName fieldHaskell
, "Sum"
]
uniqueTypeDec :: MkPersistSettings -> EntityDef -> Dec
uniqueTypeDec mps t =
DataInstD [] ''Unique
[genericDataType mps (entityHaskell t) backendT]
(map (mkUnique mps t) $ entityUniques t)
[]
mkUnique :: MkPersistSettings -> EntityDef -> UniqueDef -> Con
mkUnique mps t (UniqueDef (HaskellName constr) _ fields attrs) =
NormalC (mkName $ unpack constr) types
where
types = map (go . flip lookup3 (entityFields t))
$ map (unHaskellName . fst) fields
force = "!force" `elem` attrs
go :: (FieldDef, IsNullable) -> (Strict, Type)
go (_, Nullable _) | not force = error nullErrMsg
go (fd, y) = (NotStrict, maybeIdType mps fd Nothing (Just y))
lookup3 :: Text -> [FieldDef] -> (FieldDef, IsNullable)
lookup3 s [] =
error $ unpack $ "Column not found: " ++ s ++ " in unique " ++ constr
lookup3 x (fd@FieldDef {..}:rest)
| x == unHaskellName fieldHaskell = (fd, nullable fieldAttrs)
| otherwise = lookup3 x rest
nullErrMsg =
mconcat [ "Error: By default we disallow NULLables in an uniqueness "
, "constraint. The semantics of how NULL interacts with those "
, "constraints is non-trivial: two NULL values are not "
, "considered equal for the purposes of an uniqueness "
, "constraint. If you understand this feature, it is possible "
, "to use it your advantage. *** Use a \"!force\" attribute "
, "on the end of the line that defines your uniqueness "
, "constraint in order to disable this check. ***" ]
maybeIdType :: MkPersistSettings
-> FieldDef
-> Maybe Name -- ^ backend
-> Maybe IsNullable
-> Type
maybeIdType mps fd mbackend mnull = maybeTyp mayNullable idtyp
where
mayNullable = case mnull of
(Just (Nullable ByMaybeAttr)) -> True
_ -> maybeNullable fd
idtyp = idType mps fd mbackend
backendDataType :: MkPersistSettings -> Type
backendDataType mps
| mpsGeneric mps = backendT
| otherwise = mpsBackend mps
genericDataType :: MkPersistSettings
-> HaskellName -- ^ entity name
-> Type -- ^ backend
-> Type
genericDataType mps (HaskellName typ') backend
| mpsGeneric mps = ConT (mkName $ unpack $ typ' ++ "Generic") `AppT` backend
| otherwise = ConT $ mkName $ unpack typ'
idType :: MkPersistSettings -> FieldDef -> Maybe Name -> Type
idType mps fd mbackend =
case foreignReference fd of
Just typ ->
ConT ''Key
`AppT` genericDataType mps typ (VarT $ fromMaybe backendName mbackend)
Nothing -> ftToType $ fieldType fd
degen :: [Clause] -> [Clause]
degen [] =
let err = VarE 'error `AppE` LitE (StringL
"Degenerate case, should never happen")
in [normalClause [WildP] err]
degen x = x
mkToPersistFields :: MkPersistSettings -> String -> EntityDef -> Q Dec
mkToPersistFields mps constr ed@EntityDef { entitySum = isSum, entityFields = fields } = do
clauses <-
if isSum
then sequence $ zipWith goSum fields [1..]
else fmap return go
return $ FunD 'toPersistFields clauses
where
go :: Q Clause
go = do
xs <- sequence $ replicate fieldCount $ newName "x"
let pat = ConP (mkName constr) $ map VarP xs
sp <- [|SomePersistField|]
let bod = ListE $ map (AppE sp . VarE) xs
return $ normalClause [pat] bod
fieldCount = length fields
goSum :: FieldDef -> Int -> Q Clause
goSum fd idx = do
let name = sumConstrName mps ed fd
enull <- [|SomePersistField PersistNull|]
let beforeCount = idx - 1
afterCount = fieldCount - idx
before = replicate beforeCount enull
after = replicate afterCount enull
x <- newName "x"
sp <- [|SomePersistField|]
let body = ListE $ mconcat
[ before
, [sp `AppE` VarE x]
, after
]
return $ normalClause [ConP name [VarP x]] body
mkToFieldNames :: [UniqueDef] -> Q Dec
mkToFieldNames pairs = do
pairs' <- mapM go pairs
return $ FunD 'persistUniqueToFieldNames $ degen pairs'
where
go (UniqueDef constr _ names _) = do
names' <- lift names
return $
normalClause
[RecP (mkName $ unpack $ unHaskellName constr) []]
names'
mkUniqueToValues :: [UniqueDef] -> Q Dec
mkUniqueToValues pairs = do
pairs' <- mapM go pairs
return $ FunD 'persistUniqueToValues $ degen pairs'
where
go :: UniqueDef -> Q Clause
go (UniqueDef constr _ names _) = do
xs <- mapM (const $ newName "x") names
let pat = ConP (mkName $ unpack $ unHaskellName constr) $ map VarP xs
tpv <- [|toPersistValue|]
let bod = ListE $ map (AppE tpv . VarE) xs
return $ normalClause [pat] bod
isNotNull :: PersistValue -> Bool
isNotNull PersistNull = False
isNotNull _ = True
mapLeft :: (a -> c) -> Either a b -> Either c b
mapLeft _ (Right r) = Right r
mapLeft f (Left l) = Left (f l)
fieldError :: Text -> Text -> Text
fieldError fieldName err = "field " `mappend` fieldName `mappend` ": " `mappend` err
mkFromPersistValues :: MkPersistSettings -> EntityDef -> Q [Clause]
mkFromPersistValues _ t@(EntityDef { entitySum = False }) =
fromValues t "fromPersistValues" entE $ entityFields t
where
entE = ConE $ mkName $ unpack entName
entName = unHaskellName $ entityHaskell t
mkFromPersistValues mps t@(EntityDef { entitySum = True }) = do
nothing <- [|Left ("Invalid fromPersistValues input: sum type with all nulls. Entity: " `mappend` entName)|]
clauses <- mkClauses [] $ entityFields t
return $ clauses `mappend` [normalClause [WildP] nothing]
where
entName = unHaskellName $ entityHaskell t
mkClauses _ [] = return []
mkClauses before (field:after) = do
x <- newName "x"
let null' = ConP 'PersistNull []
pat = ListP $ mconcat
[ map (const null') before
, [VarP x]
, map (const null') after
]
constr = ConE $ sumConstrName mps t field
fs <- [|fromPersistValue $(return $ VarE x)|]
let guard' = NormalG $ VarE 'isNotNull `AppE` VarE x
let clause = Clause [pat] (GuardedB [(guard', InfixE (Just constr) fmapE (Just fs))]) []
clauses <- mkClauses (field : before) after
return $ clause : clauses
type Lens s t a b = forall f. Functor f => (a -> f b) -> s -> f t
lensPTH :: (s -> a) -> (s -> b -> t) -> Lens s t a b
lensPTH sa sbt afb s = fmap (sbt s) (afb $ sa s)
fmapE :: Exp
fmapE = VarE 'fmap
mkLensClauses :: MkPersistSettings -> EntityDef -> Q [Clause]
mkLensClauses mps t = do
lens' <- [|lensPTH|]
getId <- [|entityKey|]
setId <- [|\(Entity _ value) key -> Entity key value|]
getVal <- [|entityVal|]
dot <- [|(.)|]
keyVar <- newName "key"
valName <- newName "value"
xName <- newName "x"
let idClause = normalClause
[ConP (keyIdName t) []]
(lens' `AppE` getId `AppE` setId)
if entitySum t
then return $ idClause : map (toSumClause lens' keyVar valName xName) (entityFields t)
else return $ idClause : map (toClause lens' getVal dot keyVar valName xName) (entityFields t)
where
toClause lens' getVal dot keyVar valName xName f = normalClause
[ConP (filterConName mps t f) []]
(lens' `AppE` getter `AppE` setter)
where
fieldName = mkName $ unpack $ recName mps (entityHaskell t) (fieldHaskell f)
getter = InfixE (Just $ VarE fieldName) dot (Just getVal)
setter = LamE
[ ConP 'Entity [VarP keyVar, VarP valName]
, VarP xName
]
$ ConE 'Entity `AppE` VarE keyVar `AppE` RecUpdE
(VarE valName)
[(fieldName, VarE xName)]
toSumClause lens' keyVar valName xName f = normalClause
[ConP (filterConName mps t f) []]
(lens' `AppE` getter `AppE` setter)
where
emptyMatch = Match WildP (NormalB $ VarE 'error `AppE` LitE (StringL "Tried to use fieldLens on a Sum type")) []
getter = LamE
[ ConP 'Entity [WildP, VarP valName]
] $ CaseE (VarE valName)
$ Match (ConP (sumConstrName mps t f) [VarP xName]) (NormalB $ VarE xName) []
-- FIXME It would be nice if the types expressed that the Field is
-- a sum type and therefore could result in Maybe.
: if length (entityFields t) > 1 then [emptyMatch] else []
setter = LamE
[ ConP 'Entity [VarP keyVar, WildP]
, VarP xName
]
$ ConE 'Entity `AppE` VarE keyVar `AppE` (ConE (sumConstrName mps t f) `AppE` VarE xName)
-- | declare the key type and associated instances
-- a PathPiece instance is only generated for a Key with one field
mkKeyTypeDec :: MkPersistSettings -> EntityDef -> Q (Dec, [Dec])
mkKeyTypeDec mps t = do
(instDecs, i) <-
if mpsGeneric mps
then if not useNewtype
then do pfDec <- pfInstD
return (pfDec, [''Generic])
else do gi <- genericNewtypeInstances
return (gi, [])
else if not useNewtype
then do pfDec <- pfInstD
return (pfDec, [''Show, ''Read, ''Eq, ''Ord, ''Generic])
else do
let allInstances = [''Show, ''Read, ''Eq, ''Ord, ''PathPiece, ''PersistField, ''PersistFieldSql, ''ToJSON, ''FromJSON]
if customKeyType
then return ([], allInstances)
else do
bi <- backendKeyI
return (bi, allInstances)
let kd = if useNewtype
then NewtypeInstD [] k [recordType] dec i
else DataInstD [] k [recordType] [dec] i
return (kd, instDecs)
where
keyConE = keyConExp t
unKeyE = unKeyExp t
dec = RecC (keyConName t) keyFields
k = ''Key
recordType = genericDataType mps (entityHaskell t) backendT
pfInstD = -- FIXME: generate a PersistMap instead of PersistList
[d|instance PersistField (Key $(pure recordType)) where
toPersistValue = PersistList . keyToValues
fromPersistValue (PersistList l) = keyFromValues l
fromPersistValue got = error $ "fromPersistValue: expected PersistList, got: " `mappend` show got
instance PersistFieldSql (Key $(pure recordType)) where
sqlType _ = SqlString
instance ToJSON (Key $(pure recordType))
instance FromJSON (Key $(pure recordType))
|]
keyStringL = StringL . keyString
-- ghc 7.6 cannot parse the left arrow Ident $() <- lexP
keyPattern = BindS (ConP 'Ident [LitP $ keyStringL t])
backendKeyGenericI =
[d| instance PersistStore $(pure backendT) =>
ToBackendKey $(pure backendT) $(pure recordType) where
toBackendKey = $(return unKeyE)
fromBackendKey = $(return keyConE)
|]
backendKeyI = let bdt = backendDataType mps in
[d| instance ToBackendKey $(pure bdt) $(pure recordType) where
toBackendKey = $(return unKeyE)
fromBackendKey = $(return keyConE)
|]
-- truly unfortunate that TH doesn't support standalone deriving
-- https://ghc.haskell.org/trac/ghc/ticket/8100
genericNewtypeInstances = do
instances <- [|lexP|] >>= \lexPE -> [| step readPrec >>= return . ($(pure keyConE) )|] >>= \readE -> do
alwaysInstances <-
[d|instance Show (BackendKey $(pure backendT)) => Show (Key $(pure recordType)) where
showsPrec i x = showParen (i > app_prec) $
(showString $ $(pure $ LitE $ keyStringL t) `mappend` " ") .
showsPrec i ($(return unKeyE) x)
where app_prec = (10::Int)
instance Read (BackendKey $(pure backendT)) => Read (Key $(pure recordType)) where
readPrec = parens $ (prec app_prec $ $(pure $ DoE [keyPattern lexPE, NoBindS readE]))
where app_prec = (10::Int)
instance Eq (BackendKey $(pure backendT)) => Eq (Key $(pure recordType)) where
x == y =
($(return unKeyE) x) ==
($(return unKeyE) y)
x /= y =
($(return unKeyE) x) ==
($(return unKeyE) y)
instance Ord (BackendKey $(pure backendT)) => Ord (Key $(pure recordType)) where
compare x y = compare
($(return unKeyE) x)
($(return unKeyE) y)
instance PathPiece (BackendKey $(pure backendT)) => PathPiece (Key $(pure recordType)) where
toPathPiece = toPathPiece . $(return unKeyE)
fromPathPiece = fmap $(return keyConE) . fromPathPiece
instance PersistField (BackendKey $(pure backendT)) => PersistField (Key $(pure recordType)) where
toPersistValue = toPersistValue . $(return unKeyE)
fromPersistValue = fmap $(return keyConE) . fromPersistValue
instance PersistFieldSql (BackendKey $(pure backendT)) => PersistFieldSql (Key $(pure recordType)) where
sqlType = sqlType . fmap $(return unKeyE)
instance ToJSON (BackendKey $(pure backendT)) => ToJSON (Key $(pure recordType)) where
toJSON = toJSON . $(return unKeyE)
instance FromJSON (BackendKey $(pure backendT)) => FromJSON (Key $(pure recordType)) where
parseJSON = fmap $(return keyConE) . parseJSON
|]
if customKeyType then return alwaysInstances
else fmap (alwaysInstances `mappend`) backendKeyGenericI
return instances
useNewtype = length keyFields < 2
defaultIdType = fieldType (entityId t) == FTTypeCon Nothing (keyIdText t)
keyFields = case entityPrimary t of
Just pdef -> map primaryKeyVar $ (compositeFields pdef)
Nothing -> if defaultIdType
then [idKeyVar backendKeyType]
else [idKeyVar $ ftToType $ fieldType $ entityId t]
customKeyType = not defaultIdType || not useNewtype || isJust (entityPrimary t)
primaryKeyVar fd = (keyFieldName t fd, NotStrict, ftToType $ fieldType fd)
idKeyVar ft = (unKeyName t, NotStrict, ft)
backendKeyType
| mpsGeneric mps = ConT ''BackendKey `AppT` backendT
| otherwise = ConT ''BackendKey `AppT` mpsBackend mps
keyIdName :: EntityDef -> Name
keyIdName = mkName . unpack . keyIdText
keyIdText :: EntityDef -> Text
keyIdText t = (unHaskellName $ entityHaskell t) `mappend` "Id"
unKeyName :: EntityDef -> Name
unKeyName t = mkName $ "un" `mappend` keyString t
unKeyExp :: EntityDef -> Exp
unKeyExp = VarE . unKeyName
backendT :: Type
backendT = VarT backendName
backendName :: Name
backendName = mkName "backend"
keyConName :: EntityDef -> Name
keyConName t = mkName $ resolveConflict $ keyString t
where
resolveConflict kn = if conflict then kn `mappend` "'" else kn
conflict = any ((== HaskellName "key") . fieldHaskell) $ entityFields t
keyConExp :: EntityDef -> Exp
keyConExp = ConE . keyConName
keyString :: EntityDef -> String
keyString = unpack . keyText
keyText :: EntityDef -> Text
keyText t = unHaskellName (entityHaskell t) ++ "Key"
keyFieldName :: EntityDef -> FieldDef -> Name
keyFieldName t fd = mkName $ unpack $ lowerFirst (keyText t) `mappend` (unHaskellName $ fieldHaskell fd)
mkKeyToValues :: MkPersistSettings -> EntityDef -> Q Dec
mkKeyToValues _mps t = do
(p, e) <- case entityPrimary t of
Nothing ->
([],) <$> [|(:[]) . toPersistValue . $(return $ unKeyExp t)|]
Just pdef ->
return $ toValuesPrimary pdef
return $ FunD 'keyToValues $ return $ normalClause p e
where
toValuesPrimary pdef =
( [VarP recordName]
, ListE $ map (\fd -> VarE 'toPersistValue `AppE` (VarE (keyFieldName t fd) `AppE` VarE recordName)) $ compositeFields pdef
)
recordName = mkName "record"
normalClause :: [Pat] -> Exp -> Clause
normalClause p e = Clause p (NormalB e) []
mkKeyFromValues :: MkPersistSettings -> EntityDef -> Q Dec
mkKeyFromValues _mps t = do
clauses <- case entityPrimary t of
Nothing -> do
e <- [|fmap $(return $ keyConE) . fromPersistValue . headNote|]
return $ [normalClause [] e]
Just pdef ->
fromValues t "keyFromValues" keyConE (compositeFields pdef)
return $ FunD 'keyFromValues clauses
where
keyConE = keyConExp t
headNote :: [PersistValue] -> PersistValue
headNote (x:[]) = x
headNote xs = error $ "mkKeyFromValues: expected a list of one element, got: "
`mappend` show xs
fromValues :: EntityDef -> Text -> Exp -> [FieldDef] -> Q [Clause]
fromValues t funName conE fields = do
x <- newName "x"
let funMsg = entityText t `mappend` ": " `mappend` funName `mappend` " failed on: "
patternMatchFailure <-
[|Left $ mappend funMsg (pack $ show $(return $ VarE x))|]
suc <- patternSuccess fields
return [ suc, normalClause [VarP x] patternMatchFailure ]
where
patternSuccess [] = do
rightE <- [|Right|]
return $ normalClause [ListP []] (rightE `AppE` conE)
patternSuccess fieldsNE = do
x1 <- newName "x1"
restNames <- mapM (\i -> newName $ "x" `mappend` show i) [2..length fieldsNE]
(fpv1:mkPersistValues) <- mapM mkPvFromFd fieldsNE
app1E <- [|(<$>)|]
let conApp = infixFromPersistValue app1E fpv1 conE x1
applyE <- [|(<*>)|]
let applyFromPersistValue = infixFromPersistValue applyE
return $ normalClause
[ListP $ map VarP (x1:restNames)]
(foldl' (\exp (name, fpv) -> applyFromPersistValue fpv exp name) conApp (zip restNames mkPersistValues))
where
infixFromPersistValue applyE fpv exp name =
UInfixE exp applyE (fpv `AppE` VarE name)
mkPvFromFd = mkPersistValue . unHaskellName . fieldHaskell
mkPersistValue fieldName = [|mapLeft (fieldError fieldName) . fromPersistValue|]
mkEntity :: MkPersistSettings -> EntityDef -> Q [Dec]
mkEntity mps t = do
t' <- lift t
let nameT = unHaskellName entName
let nameS = unpack nameT
let clazz = ConT ''PersistEntity `AppT` genericDataType mps entName backendT
tpf <- mkToPersistFields mps nameS t
fpv <- mkFromPersistValues mps t
utv <- mkUniqueToValues $ entityUniques t
puk <- mkUniqueKeys t
fkc <- mapM (mkForeignKeysComposite mps t) $ entityForeigns t
let primaryField = entityId t
fields <- mapM (mkField mps t) $ primaryField : entityFields t
toFieldNames <- mkToFieldNames $ entityUniques t
(keyTypeDec, keyInstanceDecs) <- mkKeyTypeDec mps t
keyToValues' <- mkKeyToValues mps t
keyFromValues' <- mkKeyFromValues mps t
let addSyn -- FIXME maybe remove this
| mpsGeneric mps = (:) $
TySynD (mkName nameS) [] $
genericDataType mps entName $ mpsBackend mps
| otherwise = id
lensClauses <- mkLensClauses mps t
lenses <- mkLenses mps t
let instanceConstraint = if not (mpsGeneric mps) then [] else
[ClassP ''PersistStore [backendT]]
return $ addSyn $
dataTypeDec mps t : mconcat fkc `mappend`
([ TySynD (keyIdName t) [] $
ConT ''Key `AppT` ConT (mkName nameS)
, InstanceD instanceConstraint clazz $
[ uniqueTypeDec mps t
, keyTypeDec
, keyToValues'
, keyFromValues'
, FunD 'entityDef [normalClause [WildP] t']
, tpf
, FunD 'fromPersistValues fpv
, toFieldNames
, utv
, puk
, DataInstD
[]
''EntityField
[ genDataType
, VarT $ mkName "typ"
]
(map fst fields)
[]
, FunD 'persistFieldDef (map snd fields)
, TySynInstD
''PersistEntityBackend
#if MIN_VERSION_template_haskell(2,9,0)
(TySynEqn
[genDataType]
(backendDataType mps))
#else
[genDataType]
(backendDataType mps)
#endif
, FunD 'persistIdField [normalClause [] (ConE $ keyIdName t)]
, FunD 'fieldLens lensClauses
]
] `mappend` lenses) `mappend` keyInstanceDecs
where
genDataType = genericDataType mps entName backendT
entName = entityHaskell t
entityText :: EntityDef -> Text
entityText = unHaskellName . entityHaskell
mkLenses :: MkPersistSettings -> EntityDef -> Q [Dec]
mkLenses mps _ | not (mpsGenerateLenses mps) = return []
mkLenses _ ent | entitySum ent = return []
mkLenses mps ent = fmap mconcat $ forM (entityFields ent) $ \field -> do
let lensName' = recNameNoUnderscore mps (entityHaskell ent) (fieldHaskell field)
lensName = mkName $ unpack lensName'
fieldName = mkName $ unpack $ "_" ++ lensName'
needleN <- newName "needle"
setterN <- newName "setter"
fN <- newName "f"
aN <- newName "a"
yN <- newName "y"
let needle = VarE needleN
setter = VarE setterN
f = VarE fN
a = VarE aN
y = VarE yN
fT = mkName "f"
-- FIXME if we want to get really fancy, then: if this field is the
-- *only* Id field present, then set backend1 and backend2 to different
-- values
backend1 = backendName
backend2 = backendName
aT = maybeIdType mps field (Just backend1) Nothing
bT = maybeIdType mps field (Just backend2) Nothing
mkST backend = genericDataType mps (entityHaskell ent) (VarT backend)
sT = mkST backend1
tT = mkST backend2
t1 `arrow` t2 = ArrowT `AppT` t1 `AppT` t2
vars = PlainTV fT
: (if mpsGeneric mps then [PlainTV backend1{-, PlainTV backend2-}] else [])
return
[ SigD lensName $ ForallT vars [ClassP ''Functor [VarT fT]] $
(aT `arrow` (VarT fT `AppT` bT)) `arrow`
(sT `arrow` (VarT fT `AppT` tT))
, FunD lensName $ return $ Clause
[VarP fN, VarP aN]
(NormalB $ fmapE
`AppE` setter
`AppE` (f `AppE` needle))
[ FunD needleN [normalClause [] (VarE fieldName `AppE` a)]
, FunD setterN $ return $ normalClause
[VarP yN]
(RecUpdE a
[ (fieldName, y)
])
]
]
mkForeignKeysComposite :: MkPersistSettings -> EntityDef -> ForeignDef -> Q [Dec]
mkForeignKeysComposite mps t ForeignDef {..} = do
let fieldName f = mkName $ unpack $ recName mps (entityHaskell t) f
let fname = fieldName foreignConstraintNameHaskell
let reftableString = unpack $ unHaskellName $ foreignRefTableHaskell
let reftableKeyName = mkName $ reftableString `mappend` "Key"
let tablename = mkName $ unpack $ entityText t
recordName <- newName "record"
let fldsE = map (\((foreignName, _),_) -> VarE (fieldName $ foreignName)
`AppE` VarE recordName) foreignFields
let mkKeyE = foldl' AppE (maybeExp foreignNullable $ ConE reftableKeyName) fldsE
let fn = FunD fname [normalClause [VarP recordName] mkKeyE]
let t2 = maybeTyp foreignNullable $ ConT ''Key `AppT` ConT (mkName reftableString)
let sig = SigD fname $ (ArrowT `AppT` (ConT tablename)) `AppT` t2
return [sig, fn]
maybeExp :: Bool -> Exp -> Exp
maybeExp may exp | may = fmapE `AppE` exp
| otherwise = exp
maybeTyp :: Bool -> Type -> Type
maybeTyp may typ | may = ConT ''Maybe `AppT` typ
| otherwise = typ
-- | produce code similar to the following:
--
-- @
-- instance PersistEntity e => PersistField e where
-- toPersistValue = PersistMap $ zip columNames (map toPersistValue . toPersistFields)
-- fromPersistValue (PersistMap o) =
-- let columns = HM.fromList o
-- in fromPersistValues $ map (\name ->
-- case HM.lookup name columns of
-- Just v -> v
-- Nothing -> PersistNull
-- fromPersistValue x = Left $ "Expected PersistMap, received: " ++ show x
-- sqlType _ = SqlString
-- @
persistFieldFromEntity :: MkPersistSettings -> EntityDef -> Q [Dec]
persistFieldFromEntity mps e = do
ss <- [|SqlString|]
obj <- [|\ent -> PersistMap $ zip (map pack columnNames) (map toPersistValue $ toPersistFields ent)|]
fpv <- [|\x -> let columns = HM.fromList x
in fromPersistValues $ map
(\(name) ->
case HM.lookup (pack name) columns of
Just v -> v
Nothing -> PersistNull)
$ columnNames
|]
compose <- [|(<=<)|]
getPersistMap' <- [|getPersistMap|]
return
[ persistFieldInstanceD (mpsGeneric mps) typ
[ FunD 'toPersistValue [ normalClause [] obj ]
, FunD 'fromPersistValue
[ normalClause [] (InfixE (Just fpv) compose $ Just getPersistMap')
]
]
, persistFieldSqlInstanceD (mpsGeneric mps) typ
[ sqlTypeFunD ss
]
]
where
typ = genericDataType mps (entityHaskell e) backendT
entFields = entityFields e
columnNames = map (unpack . unHaskellName . fieldHaskell) entFields
-- | Apply the given list of functions to the same @EntityDef@s.
--
-- This function is useful for cases such as:
--
-- >>> share [mkSave "myDefs", mkPersist sqlSettings] [persistLowerCase|...|]
share :: [[EntityDef] -> Q [Dec]] -> [EntityDef] -> Q [Dec]
share fs x = fmap mconcat $ mapM ($ x) fs
-- | Save the @EntityDef@s passed in under the given name.
mkSave :: String -> [EntityDef] -> Q [Dec]
mkSave name' defs' = do
let name = mkName name'
defs <- lift defs'
return [ SigD name $ ListT `AppT` ConT ''EntityDef
, FunD name [normalClause [] defs]
]
data Dep = Dep
{ depTarget :: HaskellName
, depSourceTable :: HaskellName
, depSourceField :: HaskellName
, depSourceNull :: IsNullable
}
-- | Generate a 'DeleteCascade' instance for the given @EntityDef@s.
mkDeleteCascade :: MkPersistSettings -> [EntityDef] -> Q [Dec]
mkDeleteCascade mps defs = do
let deps = concatMap getDeps defs
mapM (go deps) defs
where
getDeps :: EntityDef -> [Dep]
getDeps def =
concatMap getDeps' $ entityFields $ fixEntityDef def
where
getDeps' :: FieldDef -> [Dep]
getDeps' field@FieldDef {..} =
case foreignReference field of
Just name ->
return Dep
{ depTarget = name
, depSourceTable = entityHaskell def
, depSourceField = fieldHaskell
, depSourceNull = nullable fieldAttrs
}
Nothing -> []
go :: [Dep] -> EntityDef -> Q Dec
go allDeps EntityDef{entityHaskell = name} = do
let deps = filter (\x -> depTarget x == name) allDeps
key <- newName "key"
let del = VarE 'delete
let dcw = VarE 'deleteCascadeWhere
just <- [|Just|]
filt <- [|Filter|]
eq <- [|Eq|]
left <- [|Left|]
let mkStmt :: Dep -> Stmt
mkStmt dep = NoBindS
$ dcw `AppE`
ListE
[ filt `AppE` ConE filtName
`AppE` (left `AppE` val (depSourceNull dep))
`AppE` eq
]
where
filtName = filterConName' mps (depSourceTable dep) (depSourceField dep)
val (Nullable ByMaybeAttr) = just `AppE` VarE key
val _ = VarE key
let stmts :: [Stmt]
stmts = map mkStmt deps `mappend`
[NoBindS $ del `AppE` VarE key]
let entityT = genericDataType mps name backendT
return $
InstanceD
[ ClassP ''PersistQuery [backendT]
, EqualP (ConT ''PersistEntityBackend `AppT` entityT) backendT
]
(ConT ''DeleteCascade `AppT` entityT `AppT` backendT)
[ FunD 'deleteCascade
[normalClause [VarP key] (DoE stmts)]
]
mkUniqueKeys :: EntityDef -> Q Dec
mkUniqueKeys def | entitySum def =
return $ FunD 'persistUniqueKeys [normalClause [WildP] (ListE [])]
mkUniqueKeys def = do
c <- clause
return $ FunD 'persistUniqueKeys [c]
where
clause = do
xs <- forM (entityFields def) $ \fd -> do
let x = fieldHaskell fd
x' <- newName $ '_' : unpack (unHaskellName x)
return (x, x')
let pcs = map (go xs) $ entityUniques def
let pat = ConP
(mkName $ unpack $ unHaskellName $ entityHaskell def)
(map (VarP . snd) xs)
return $ normalClause [pat] (ListE pcs)
go :: [(HaskellName, Name)] -> UniqueDef -> Exp
go xs (UniqueDef name _ cols _) =
foldl' (go' xs) (ConE (mkName $ unpack $ unHaskellName name)) (map fst cols)
go' :: [(HaskellName, Name)] -> Exp -> HaskellName -> Exp
go' xs front col =
let Just col' = lookup col xs
in front `AppE` VarE col'
sqlTypeFunD :: Exp -> Dec
sqlTypeFunD st = FunD 'sqlType
[ normalClause [WildP] st ]
typeInstanceD :: Name
-> Bool -- ^ include PersistStore backend constraint
-> Type -> [Dec] -> Dec
typeInstanceD clazz hasBackend typ =
InstanceD ctx (ConT clazz `AppT` typ)
where
ctx
| hasBackend = [ClassP ''PersistStore [backendT]]
| otherwise = []
persistFieldInstanceD :: Bool -- ^ include PersistStore backend constraint
-> Type -> [Dec] -> Dec
persistFieldInstanceD = typeInstanceD ''PersistField
persistFieldSqlInstanceD :: Bool -- ^ include PersistStore backend constraint
-> Type -> [Dec] -> Dec
persistFieldSqlInstanceD = typeInstanceD ''PersistFieldSql
-- | Automatically creates a valid 'PersistField' instance for any datatype
-- that has valid 'Show' and 'Read' instances. Can be very convenient for
-- 'Enum' types.
derivePersistField :: String -> Q [Dec]
derivePersistField s = do
ss <- [|SqlString|]
tpv <- [|PersistText . pack . show|]
fpv <- [|\dt v ->
case fromPersistValue v of
Left e -> Left e
Right s' ->
case reads $ unpack s' of
(x, _):_ -> Right x
[] -> Left $ pack "Invalid " ++ pack dt ++ pack ": " ++ s'|]
return
[ persistFieldInstanceD False (ConT $ mkName s)
[ FunD 'toPersistValue
[ normalClause [] tpv
]
, FunD 'fromPersistValue
[ normalClause [] (fpv `AppE` LitE (StringL s))
]
]
, persistFieldSqlInstanceD False (ConT $ mkName s)
[ sqlTypeFunD ss
]
]
-- | Automatically creates a valid 'PersistField' instance for any datatype
-- that has valid 'ToJSON' and 'FromJSON' instances. For a datatype @T@ it
-- generates instances similar to these:
--
-- @
-- instance PersistField T where
-- toPersistValue = PersistByteString . L.toStrict . encode
-- fromPersistValue = (left T.pack) . eitherDecodeStrict' <=< fromPersistValue
-- instance PersistFieldSql T where
-- sqlType _ = SqlString
-- @
derivePersistFieldJSON :: String -> Q [Dec]
derivePersistFieldJSON s = do
ss <- [|SqlString|]
tpv <- [|PersistText . toJsonText|]
fpv <- [|\dt v -> do
text <- fromPersistValue v
let bs' = TE.encodeUtf8 text
case eitherDecodeStrict' bs' of
Left e -> Left $ pack "JSON decoding error for " ++ pack dt ++ pack ": " ++ pack e ++ pack ". On Input: " ++ decodeUtf8 bs'
Right x -> Right x|]
return
[ persistFieldInstanceD False (ConT $ mkName s)
[ FunD 'toPersistValue
[ normalClause [] tpv
]
, FunD 'fromPersistValue
[ normalClause [] (fpv `AppE` LitE (StringL s))
]
]
, persistFieldSqlInstanceD False (ConT $ mkName s)
[ sqlTypeFunD ss
]
]
-- | Creates a single function to perform all migrations for the entities
-- defined here. One thing to be aware of is dependencies: if you have entities
-- with foreign references, make sure to place those definitions after the
-- entities they reference.
mkMigrate :: String -> [EntityDef] -> Q [Dec]
mkMigrate fun allDefs = do
body' <- body
return
[ SigD (mkName fun) typ
, FunD (mkName fun) [normalClause [] body']
]
where
defs = filter isMigrated allDefs
isMigrated def = not $ "no-migrate" `elem` entityAttrs def
typ = ConT ''Migration
body :: Q Exp
body =
case defs of
[] -> [|return ()|]
_ -> do
defsName <- newName "defs"
defsStmt <- do
defs' <- mapM lift defs
let defsExp = ListE defs'
return $ LetS [ValD (VarP defsName) (NormalB defsExp) []]
stmts <- mapM (toStmt $ VarE defsName) defs
return (DoE $ defsStmt : stmts)
toStmt :: Exp -> EntityDef -> Q Stmt
toStmt defsExp ed = do
u <- lift ed
m <- [|migrate|]
return $ NoBindS $ m `AppE` defsExp `AppE` u
instance Lift EntityDef where
lift EntityDef{..} =
[|EntityDef
entityHaskell
entityDB
entityId
entityAttrs
entityFields
entityUniques
entityForeigns
entityDerives
entityExtra
entitySum
|]
instance Lift FieldDef where
lift (FieldDef a b c d e f g) = [|FieldDef a b c d e f g|]
instance Lift UniqueDef where
lift (UniqueDef a b c d) = [|UniqueDef a b c d|]
instance Lift CompositeDef where
lift (CompositeDef a b) = [|CompositeDef a b|]
instance Lift ForeignDef where
lift (ForeignDef a b c d e f g) = [|ForeignDef a b c d e f g|]
-- | A hack to avoid orphans.
class Lift' a where
lift' :: a -> Q Exp
instance Lift' Text where
lift' = liftT
instance Lift' a => Lift' [a] where
lift' xs = do { xs' <- mapM lift' xs; return (ListE xs') }
instance (Lift' k, Lift' v) => Lift' (M.Map k v) where
lift' m = [|M.fromList $(fmap ListE $ mapM liftPair $ M.toList m)|]
-- auto-lifting, means instances are overlapping
instance Lift' a => Lift a where
lift = lift'
packPTH :: String -> Text
packPTH = pack
#if !MIN_VERSION_text(0, 11, 2)
{-# NOINLINE packPTH #-}
#endif
liftT :: Text -> Q Exp
liftT t = [|packPTH $(lift (unpack t))|]
liftPair :: (Lift' k, Lift' v) => (k, v) -> Q Exp
liftPair (k, v) = [|($(lift' k), $(lift' v))|]
instance Lift HaskellName where
lift (HaskellName t) = [|HaskellName t|]
instance Lift DBName where
lift (DBName t) = [|DBName t|]
instance Lift FieldType where
lift (FTTypeCon Nothing t) = [|FTTypeCon Nothing t|]
lift (FTTypeCon (Just x) t) = [|FTTypeCon (Just x) t|]
lift (FTApp x y) = [|FTApp x y|]
lift (FTList x) = [|FTList x|]
instance Lift PersistFilter where
lift Eq = [|Eq|]
lift Ne = [|Ne|]
lift Gt = [|Gt|]
lift Lt = [|Lt|]
lift Ge = [|Ge|]
lift Le = [|Le|]
lift In = [|In|]
lift NotIn = [|NotIn|]
lift (BackendSpecificFilter x) = [|BackendSpecificFilter x|]
instance Lift PersistUpdate where
lift Assign = [|Assign|]
lift Add = [|Add|]
lift Subtract = [|Subtract|]
lift Multiply = [|Multiply|]
lift Divide = [|Divide|]
lift (BackendSpecificUpdate x) = [|BackendSpecificUpdate x|]
instance Lift SqlType where
lift SqlString = [|SqlString|]
lift SqlInt32 = [|SqlInt32|]
lift SqlInt64 = [|SqlInt64|]
lift SqlReal = [|SqlReal|]
lift (SqlNumeric x y) =
[|SqlNumeric (fromInteger x') (fromInteger y')|]
where
x' = fromIntegral x :: Integer
y' = fromIntegral y :: Integer
lift SqlBool = [|SqlBool|]
lift SqlDay = [|SqlDay|]
lift SqlTime = [|SqlTime|]
lift SqlDayTime = [|SqlDayTime|]
lift SqlBlob = [|SqlBlob|]
lift (SqlOther a) = [|SqlOther a|]
-- Ent
-- fieldName FieldType
--
-- forall . typ ~ FieldType => EntFieldName
--
-- EntFieldName = FieldDef ....
mkField :: MkPersistSettings -> EntityDef -> FieldDef -> Q (Con, Clause)
mkField mps et cd = do
let con = ForallC
[]
[EqualP (VarT $ mkName "typ") $ maybeIdType mps cd Nothing Nothing]
$ NormalC name []
bod <- lift cd
let cla = normalClause
[ConP name []]
bod
return (con, cla)
where
name = filterConName mps et cd
maybeNullable :: FieldDef -> Bool
maybeNullable fd = nullable (fieldAttrs fd) == Nullable ByMaybeAttr
filterConName :: MkPersistSettings
-> EntityDef
-> FieldDef
-> Name
filterConName mps entity field = filterConName' mps (entityHaskell entity) (fieldHaskell field)
filterConName' :: MkPersistSettings
-> HaskellName -- ^ table
-> HaskellName -- ^ field
-> Name
filterConName' mps entity field = mkName $ unpack $ concat
[ if mpsPrefixFields mps || field == HaskellName "Id"
then unHaskellName entity
else ""
, upperFirst $ unHaskellName field
]
ftToType :: FieldType -> Type
ftToType (FTTypeCon Nothing t) = ConT $ mkName $ unpack t
ftToType (FTTypeCon (Just m) t) = ConT $ mkName $ unpack $ concat [m, ".", t]
ftToType (FTApp x y) = ftToType x `AppT` ftToType y
ftToType (FTList x) = ListT `AppT` ftToType x
infixr 5 ++
(++) :: Text -> Text -> Text
(++) = append
mkJSON :: MkPersistSettings -> EntityDef -> Q [Dec]
mkJSON _ def | not ("json" `elem` entityAttrs def) = return []
mkJSON mps def = do
pureE <- [|pure|]
apE' <- [|(<*>)|]
packE <- [|pack|]
dotEqualE <- [|(.=)|]
dotColonE <- [|(.:)|]
dotColonQE <- [|(.:?)|]
objectE <- [|object|]
obj <- newName "obj"
mzeroE <- [|mzero|]
xs <- mapM (newName . unpack . unHaskellName . fieldHaskell)
$ entityFields def
let conName = mkName $ unpack $ unHaskellName $ entityHaskell def
typ = genericDataType mps (entityHaskell def) backendT
toJSONI = typeInstanceD ''ToJSON (mpsGeneric mps) typ [toJSON']
toJSON' = FunD 'toJSON $ return $ normalClause
[ConP conName $ map VarP xs]
(objectE `AppE` ListE pairs)
pairs = zipWith toPair (entityFields def) xs
toPair f x = InfixE
(Just (packE `AppE` LitE (StringL $ unpack $ unHaskellName $ fieldHaskell f)))
dotEqualE
(Just $ VarE x)
fromJSONI = typeInstanceD ''FromJSON (mpsGeneric mps) typ [parseJSON']
parseJSON' = FunD 'parseJSON
[ normalClause [ConP 'Object [VarP obj]]
(foldl'
(\x y -> InfixE (Just x) apE' (Just y))
(pureE `AppE` ConE conName)
pulls
)
, normalClause [WildP] mzeroE
]
pulls = map toPull $ entityFields def
toPull f = InfixE
(Just $ VarE obj)
(if maybeNullable f then dotColonQE else dotColonE)
(Just $ AppE packE $ LitE $ StringL $ unpack $ unHaskellName $ fieldHaskell f)
case mpsEntityJSON mps of
Nothing -> return [toJSONI, fromJSONI]
Just entityJSON -> do
entityJSONIs <- if mpsGeneric mps
then [d|
#if MIN_VERSION_base(4, 6, 0)
instance PersistStore backend => ToJSON (Entity $(pure typ)) where
toJSON = $(varE (entityToJSON entityJSON))
instance PersistStore backend => FromJSON (Entity $(pure typ)) where
parseJSON = $(varE (entityFromJSON entityJSON))
#endif
|]
else [d|
instance ToJSON (Entity $(pure typ)) where
toJSON = $(varE (entityToJSON entityJSON))
instance FromJSON (Entity $(pure typ)) where
parseJSON = $(varE (entityFromJSON entityJSON))
|]
return $ toJSONI : fromJSONI : entityJSONIs
-- entityUpdates :: EntityDef -> [(HaskellName, FieldType, IsNullable, PersistUpdate)]
-- entityUpdates =
-- concatMap go . entityFields
-- where
-- go FieldDef {..} = map (\a -> (fieldHaskell, fieldType, nullable fieldAttrs, a)) [minBound..maxBound]
-- mkToUpdate :: String -> [(String, PersistUpdate)] -> Q Dec
-- mkToUpdate name pairs = do
-- pairs' <- mapM go pairs
-- return $ FunD (mkName name) $ degen pairs'
-- where
-- go (constr, pu) = do
-- pu' <- lift pu
-- return $ normalClause [RecP (mkName constr) []] pu'
-- mkToFieldName :: String -> [(String, String)] -> Dec
-- mkToFieldName func pairs =
-- FunD (mkName func) $ degen $ map go pairs
-- where
-- go (constr, name) =
-- normalClause [RecP (mkName constr) []] (LitE $ StringL name)
-- mkToValue :: String -> [String] -> Dec
-- mkToValue func = FunD (mkName func) . degen . map go
-- where
-- go constr =
-- let x = mkName "x"
-- in normalClause [ConP (mkName constr) [VarP x]]
-- (VarE 'toPersistValue `AppE` VarE x)
| junjihashimoto/persistent | persistent-template/Database/Persist/TH.hs | mit | 55,662 | 0 | 21 | 16,439 | 14,030 | 7,366 | 6,664 | -1 | -1 |
{-# LANGUAGE NoImplicitPrelude, CPP, OverloadedStrings, DoAndIfThenElse, FlexibleContexts #-}
{- |
Description: Generates inspections when asked for by the frontend.
-}
module IHaskell.Eval.Inspect (inspect) where
import IHaskellPrelude
import qualified Prelude as P
import Data.List.Split (splitOn)
#if MIN_VERSION_ghc(9,0,0)
import qualified Control.Monad.Catch as MC
#else
import Exception (ghandle)
#endif
import IHaskell.Eval.Evaluate (Interpreter)
import IHaskell.Display
import IHaskell.Eval.Util (getType)
-- | Characters used in Haskell operators.
operatorChars :: String
operatorChars = "!#$%&*+./<=>?@\\^|-~:"
-- | Whitespace characters.
whitespace :: String
whitespace = " \t\n"
-- | Compute the identifier that is being queried.
getIdentifier :: String -> Int -> String
getIdentifier code _pos = identifier
where
chunks = splitOn whitespace code
lastChunk = P.last chunks :: String
identifier =
if all (`elem` operatorChars) lastChunk
then "(" ++ lastChunk ++ ")"
else lastChunk
inspect :: String -- ^ Code in the cell
-> Int -- ^ Cursor position in the cell
-> Interpreter (Maybe Display)
inspect code pos = do
let identifier = getIdentifier code pos
handler :: SomeException -> Interpreter (Maybe a)
handler _ = return Nothing
#if MIN_VERSION_ghc(9,0,0)
response <- MC.handle handler (Just <$> getType identifier)
#else
response <- ghandle handler (Just <$> getType identifier)
#endif
let prefix = identifier ++ " :: "
fmt str = Display [plain $ prefix ++ str]
return $ fmt <$> response
| gibiansky/IHaskell | src/IHaskell/Eval/Inspect.hs | mit | 1,659 | 0 | 13 | 370 | 326 | 184 | 142 | 32 | 2 |
-- | Copyright 2013-2015 Anchor Systems, Pty Ltd and Others
--
-- The code in this file, and the program it is a part of, is
-- made available to you by its authors as open source software:
-- you can redistribute it and/or modify it under the terms of
-- the 3-clause BSD licence.
--
-- /Description/
-- This module defines unit of measurement for metric reports.
--
{-# OPTIONS -fno-warn-missing-signatures #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
module Borel.Types.UOM
( -- * Unit of measurement
UOM(..), BaseUOM(..), Prefix(..)
-- * Convenient constructors
, sec, nanosec, byte, megabyte, gigabyte
, countCPU, countVCPU, countInstance, countIP
-- * Conversions
, convert, tryConvert
, nanosecToSec, byteToGigabyte, nanosecToHour
-- * Utilities
, pUOM, pPrefixUOM, pBaseUOM
, flattenUOM, mapUOM
) where
import Control.Applicative
import Control.Error.Util
import Control.Lens (Prism', preview, prism', re, review,
(^.), (^?))
import Control.Monad
import Data.Aeson
import qualified Data.Attoparsec.Text as AT
import Data.Csv (FromField, ToField, parseField, toField)
import Data.Maybe
import Data.Monoid
import Data.MultiSet (MultiSet)
import qualified Data.MultiSet as S
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as E
import Data.Word
--------------------------------------------------------------------------------
-- * Our UOM parser is only total for UOMs made up of
-- these components. This is because the format we need
-- to work with is restrictive.
countCPU = UOM Base CPU
countVCPU = UOM Base VCPU
countInstance = UOM Base Instance
countIP = UOM Base IPAddress
nanosec = UOM Nano Second
sec = UOM Base Second
byte = UOM Base Byte
megabyte = UOM Mega Byte
gigabyte = UOM Giga Byte
hour = UOM Base Hour
--------------------------------------------------------------------------------
data UOM
= UOM Prefix BaseUOM
| Times UOM UOM
deriving (Eq, Ord)
data Prefix
= Base
| Giga
| Nano
| Mebi
| Mega
deriving (Eq, Ord, Enum, Bounded)
data BaseUOM
= Second
| Hour
| Byte
| Instance
| IPAddress
| CPU
| VCPU
deriving (Eq, Ord, Enum, Bounded)
pBaseUOM :: Prism' Text BaseUOM
pBaseUOM = prism' pretty parse
where pretty Second = "s"
pretty Hour = "h"
pretty Byte = "B"
pretty Instance = "instance"
pretty IPAddress = "ip"
pretty CPU = "cpu"
pretty VCPU = "vcpu"
parse "s" = Just Second
parse "h" = Just Hour
parse "B" = Just Byte
parse "instance" = Just Instance
parse "ip" = Just IPAddress
parse "cpu" = Just CPU
parse "vcpu" = Just VCPU
parse _ = Nothing
pPrefixUOM :: Prism' Text Prefix
pPrefixUOM = prism' pretty parse
where pretty Base = ""
pretty Giga = "G"
pretty Nano = "n"
pretty Mebi = "Mi"
pretty Mega = "M"
parse "" = Just Base
parse "G" = Just Giga
parse "n" = Just Nano
parse "Mi" = Just Mebi
parse "M" = Just Mega
parse _ = Nothing
pUOM :: Prism' Text UOM
pUOM = prism' pretty parse
where dash = "-"
pretty (u `Times` v) = (u ^. re pUOM) <> dash <> (v ^. re pUOM)
pretty (UOM p b) = (p ^. re pPrefixUOM) <> (b ^. re pBaseUOM)
-- treat UOM @Times@ as left-associative.
parse = hush . AT.parseOnly (parser <* AT.endOfInput)
parser = do
uoms <- puom `AT.sepBy` AT.string dash
case uoms of [] -> mzero
-- construct left-associative UOMs
(u:us) -> return $ foldl Times u us
puom = do
pre <- AT.option (Just Base) ppre
base <- pbase
case (pre, base) of (Just p, Just b) -> return $ UOM p b
_ -> mzero
ppre = (preview pPrefixUOM <$> AT.string "G")
<|> (preview pPrefixUOM <$> AT.string "n")
<|> (preview pPrefixUOM <$> AT.string "Mi")
<|> (preview pPrefixUOM <$> AT.string "M")
pbase = (preview pBaseUOM <$> AT.string "s")
<|> (preview pBaseUOM <$> AT.string "h")
<|> (preview pBaseUOM <$> AT.string "B")
<|> (preview pBaseUOM <$> AT.string "instance")
<|> (preview pBaseUOM <$> AT.string "ip" )
<|> (preview pBaseUOM <$> AT.string "cpu")
<|> (preview pBaseUOM <$> AT.string "vcpu")
instance Show BaseUOM where
show = T.unpack . review pBaseUOM
instance Show Prefix where
show = T.unpack . review pPrefixUOM
instance Show UOM where
show = T.unpack . review pUOM
instance Read UOM where
readsPrec _ (T.pack -> x) = maybe [] (pure . (,"")) $ x ^? pUOM
instance FromJSON UOM where
parseJSON (String t) = maybe mzero return $ t ^? pUOM
parseJSON _ = mzero
instance ToJSON UOM where
toJSON x = String $ x ^. re pUOM
instance FromField UOM where
parseField (E.decodeUtf8 -> t) = maybe mzero return $ t ^? pUOM
instance ToField UOM where
toField x = E.encodeUtf8 $ x ^. re pUOM
--------------------------------------------------------------------------------
nanosecToSec :: (UOM, Word64) -> (UOM, Word64)
nanosecToSec (old, v)
= let new = mapUOM f old
in (new, tryConvert old new v)
where f p b | UOM p b == nanosec = sec
| otherwise = UOM p b
byteToGigabyte :: (UOM, Word64) -> (UOM, Word64)
byteToGigabyte (old, v)
= let new = mapUOM f old
in (new, tryConvert old new v)
where f p b | UOM p b == byte = gigabyte
| otherwise = UOM p b
nanosecToHour :: (UOM, Word64) -> (UOM, Word64)
nanosecToHour (old,v)
= let new = mapUOM f old
in (new, tryConvert old new v)
where f p b | UOM p b == nanosec = hour
| otherwise = UOM p b
-- Not a functor.
mapUOM :: (Prefix -> BaseUOM -> UOM) -> UOM -> UOM
mapUOM f (UOM p b) = f p b
mapUOM f (Times x y) = Times (mapUOM f x) (mapUOM f y)
flattenUOM :: UOM -> [UOM]
flattenUOM x@(UOM _ _) = [x]
flattenUOM (Times x y) = flattenUOM x ++ flattenUOM y
class Weighed a where
weigh :: a -> Double
instance Weighed Prefix where
weigh Base = 1
weigh Giga = 10^^(9 :: Int)
weigh Nano = 10^^(-9 :: Int)
weigh Mebi = 1024^^(2 :: Int)
weigh Mega = 10^^(6 :: Int)
instance Weighed BaseUOM where
weigh Hour = 60 * 60
weigh _ = 1
instance Weighed UOM where
weigh (UOM p b) = weigh p * weigh b
weigh (a `Times` b) = weigh a * weigh b
data Dimension
= CTime
| CData
| CInstance
| CIPAddress
| CCPU
| CVCPU
deriving (Eq, Ord)
dimension :: BaseUOM -> Dimension
dimension Second = CTime
dimension Hour = CTime
dimension Byte = CData
dimension Instance = CInstance
dimension IPAddress = CIPAddress
dimension CPU = CCPU
dimension VCPU = CVCPU
dimensions :: UOM -> MultiSet Dimension
dimensions (UOM _ b) = S.singleton $ dimension b
dimensions (a `Times` b) = dimensions a <> dimensions b
convert :: UOM -> UOM -> Maybe (Word64 -> Word64)
convert old new
| dimensions old == dimensions new
= let factor = weigh old / weigh new
in Just (floor . (*) (toRational factor) . toRational)
| otherwise = Nothing
tryConvert :: UOM -> UOM -> Word64 -> Word64
tryConvert old new = fromMaybe id $ convert old new
| anchor/borel | lib/Borel/Types/UOM.hs | mit | 7,799 | 0 | 16 | 2,414 | 2,461 | 1,289 | 1,172 | 197 | 14 |
module ListSort where
import Data.Ord
import Data.List as L
listSort :: [[a]] -> [[a]]
listSort = sortBy $ comparing L.length
| lpenz/realworldhaskell-exercises | ch03/ListSort.hs | mit | 129 | 0 | 7 | 23 | 51 | 31 | 20 | 5 | 1 |
module CaseFirstExamples where
verdictOfMoneyBalance :: (Num a, Ord a) => a -> [Char]
verdictOfMoneyBalance x =
case x > 1000 of
True -> "Great!"
False -> "Not so great."
tellIsItPalindrome :: [Char] -> [Char]
tellIsItPalindrome x =
case pali of
True -> "It is palindrome."
False -> "No, it is not palindrome."
where pali = ( x == reverse x )
-- functionC x y = if (x > y) then x else y
functionC :: Ord a => a -> a -> a
functionC x y =
case x > y of
True -> x
False -> y
-- ifEvenAdd2 x = if ( even x ) then x 2 else x
ifEvenAdd2First :: Integral a => a -> a
ifEvenAdd2First x = if ( even x ) then x + 2 else x
ifEvenAdd2Second :: Integral a => a -> a
ifEvenAdd2Second x =
case ( even x ) of
True -> x + 2
False -> x
| Lyapunov/haskell-programming-from-first-principles | chapter_7/case_first.hs | mit | 767 | 0 | 9 | 209 | 259 | 136 | 123 | 24 | 2 |
{-# LANGUAGE CPP #-}
module GHCJS.DOM.HTMLBodyElement (
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
module GHCJS.DOM.JSFFI.Generated.HTMLBodyElement
#else
module Graphics.UI.Gtk.WebKit.DOM.HTMLBodyElement
#endif
) where
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
import GHCJS.DOM.JSFFI.Generated.HTMLBodyElement
#else
import Graphics.UI.Gtk.WebKit.DOM.HTMLBodyElement
#endif
| plow-technologies/ghcjs-dom | src/GHCJS/DOM/HTMLBodyElement.hs | mit | 460 | 0 | 5 | 39 | 33 | 26 | 7 | 4 | 0 |
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
{-
Copyright (C) 2014 Matthew Pickering <matthewtpickering@gmail.com>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Parses MathML in conformance with the MathML3 specification.
Unimplemented features:
- mpadded
- malignmark
- maligngroup
- Elementary Math
To Improve:
- Handling of menclose
- Handling of mstyle
-}
module Text.TeXMath.Readers.MathML (readMathML) where
import Text.XML.Light hiding (onlyText)
import Text.TeXMath.Types
import Text.TeXMath.Readers.MathML.MMLDict (getMathMLOperator)
import Text.TeXMath.Readers.MathML.EntityMap (getUnicode)
import Text.TeXMath.Shared (getTextType, readLength, getOperator, fixTree,
getSpaceWidth, isEmpty, empty)
import Text.TeXMath.Unicode.ToTeX (getSymbolType)
import Text.TeXMath.Unicode.ToUnicode (fromUnicode)
import Control.Monad.Except (throwError, Except, runExcept, MonadError)
import Control.Arrow ((&&&))
import Data.Char (toLower)
import Data.Maybe (fromMaybe, listToMaybe, isJust)
import Data.Monoid (First(..), getFirst)
import Data.List (transpose)
import Control.Applicative ((<|>))
import qualified Data.Text as T
import Control.Monad (filterM, guard)
import Control.Monad.Reader (ReaderT, runReaderT, asks, local)
import Data.Either (rights)
-- | Parse a MathML expression to a list of 'Exp'.
readMathML :: T.Text -> Either T.Text [Exp]
readMathML inp = map fixTree <$>
(runExcept (flip runReaderT defaultState (i >>= parseMathML)))
where
i = maybeToEither "Invalid XML" (parseXMLDoc inp)
data MMLState = MMLState { attrs :: [Attr]
, position :: Maybe FormType
, inAccent :: Bool
, curStyle :: TextType }
type MML = ReaderT MMLState (Except T.Text)
data SupOrSub = Sub | Sup deriving (Show, Eq)
data IR a = Stretchy TeXSymbolType (T.Text -> Exp) T.Text
| Trailing (Exp -> Exp -> Exp) Exp
| E a
instance Show a => Show (IR a) where
show (Stretchy t _ s) = "Stretchy " ++ show t ++ " " ++ show s
show (Trailing _ s) = "Trailing " ++ show s
show (E s) = "E " ++ show s
parseMathML :: Element -> MML [Exp]
parseMathML e@(name -> "math") = do
e' <- row e
return $
case e' of
EGrouped es -> es
_ -> [e']
parseMathML _ = throwError "Root must be math element"
expr :: Element -> MML [IR Exp]
expr e = local (addAttrs (elAttribs e)) (expr' e)
expr' :: Element -> MML [IR Exp]
expr' e =
case name e of
"mi" -> mkE <$> ident e
"mn" -> mkE <$> number e
"mo" -> (:[]) <$> op e
"mtext" -> mkE <$> text e
"ms" -> mkE <$> literal e
"mspace" -> mkE <$> space e
"mrow" -> mkE <$> row e
"mstyle" -> mkE <$> style e
"mfrac" -> mkE <$> frac e
"msqrt" -> mkE <$> msqrt e
"mroot" -> mkE <$> kroot e
"merror" -> return (mkE empty)
"mpadded" -> mkE <$> row e
"mphantom" -> mkE <$> phantom e
"mfenced" -> mkE <$> fenced e
"menclose" -> mkE <$> enclosed e
"msub" -> sub e
"msup" -> sup e
"msubsup" -> mkE <$> subsup e
"munder" -> mkE <$> under e
"mover" -> mkE <$> over e
"munderover" -> mkE <$> underover e
"mtable" -> mkE <$> table e
"maction" -> mkE <$> action e
"semantics" -> mkE <$> semantics e
"maligngroup" -> return $ mkE empty
"malignmark" -> return $ mkE empty
"mmultiscripts" -> mkE <$> multiscripts e
_ -> throwError $ "Unexpected element " <> err e
where
mkE :: Exp -> [IR Exp]
mkE = (:[]) . E
-- Tokens
ident :: Element -> MML Exp
ident e = do
s <- getString e
let base = case getOperator (EMathOperator s) of
Just _ -> EMathOperator s
Nothing -> EIdentifier s
mbVariant <- findAttrQ "mathvariant" e
curstyle <- asks curStyle
case mbVariant of
Nothing -> return base
Just v
| curstyle == getTextType v -> return base
| otherwise -> return $ EStyled (getTextType v) [base]
number :: Element -> MML Exp
number e = ENumber <$> getString e
op :: Element -> MML (IR Exp)
op e = do
mInferredPosition <- (<|>) <$> (getFormType <$> findAttrQ "form" e)
<*> asks position
inferredPosition <- case mInferredPosition of
Just inferredPosition -> pure inferredPosition
Nothing -> throwError "Did not find an inferred position"
opString <- getString e
let dummy = Operator opString "" inferredPosition 0 0 0 []
let opLookup = getMathMLOperator opString inferredPosition
let opDict = fromMaybe dummy opLookup
props <- filterM (checkAttr (properties opDict))
["fence", "accent", "stretchy"]
let objectPosition = getPosition $ form opDict
inScript <- asks inAccent
let ts = [("accent", ESymbol Accent), ("fence", ESymbol objectPosition)]
let fallback = case T.unpack opString of
[t] -> ESymbol (getSymbolType t)
_ -> if isJust opLookup
then ESymbol Ord
else EMathOperator
let constructor =
fromMaybe fallback
(getFirst . mconcat $ map (First . flip lookup ts) props)
if ("stretchy" `elem` props) && not inScript
then return $ Stretchy objectPosition constructor opString
else do
return $ (E . constructor) opString
where
checkAttr ps v = maybe (v `elem` ps) (=="true") <$> findAttrQ (T.unpack v) e
text :: Element -> MML Exp
text e = do
textStyle <- maybe TextNormal getTextType
<$> (findAttrQ "mathvariant" e)
s <- getString e
-- mathml seems to use mtext for spacing often; we get
-- more idiomatic math if we replace these with ESpace:
return $ case (textStyle, T.unpack s) of
(TextNormal, [c]) ->
case getSpaceWidth c of
Just w -> ESpace w
Nothing -> EText textStyle s
_ -> EText textStyle s
literal :: Element -> MML Exp
literal e = do
lquote <- fromMaybe "\x201C" <$> findAttrQ "lquote" e
rquote <- fromMaybe "\x201D" <$> findAttrQ "rquote" e
textStyle <- maybe TextNormal getTextType
<$> (findAttrQ "mathvariant" e)
s <- getString e
return $ EText textStyle $ lquote <> s <> rquote
space :: Element -> MML Exp
space e = do
width <- fromMaybe "0.0em" <$> (findAttrQ "width" e)
return $ ESpace (widthToNum width)
-- Layout
style :: Element -> MML Exp
style e = do
tt <- maybe TextNormal getTextType <$> findAttrQ "mathvariant" e
curstyle <- asks curStyle
-- We do not want to propagate the mathvariant else
-- we end up with nested EStyled applying the same
-- style
result <- local (filterMathVariant . enterStyled tt) (row e)
return $ if curstyle == tt
then result
else EStyled tt [result]
row :: Element -> MML Exp
row e = mkExp <$> group e
-- 1. matchNesting strips all additional IR
-- 2. toEDelim
-- 3. toExp makes sure that no additional nesting happens
mkExp :: [IR Exp] -> Exp
mkExp = toExp . toEDelim . matchNesting
toExp :: [InEDelimited] -> Exp
toExp [] = empty
toExp xs =
if any isStretchy xs
then case xs of
[x] -> either (ESymbol Ord) id x
_ -> EDelimited "" "" xs
else
case xs of
[Right x] -> x
_ -> EGrouped (rights xs)
toEDelim :: [IR InEDelimited] -> [InEDelimited]
toEDelim [] = []
toEDelim [Stretchy _ con s] = [Right $ con s]
toEDelim (xs) = map removeIR xs
-- Strips internal representation from processed list
removeIR :: IR a -> a
removeIR (E e) = e
removeIR _ = error "removeIR, should only be ever called on processed lists"
-- Convers stretch to InEDelimited element
removeStretch :: [IR Exp] -> [IR InEDelimited]
removeStretch [Stretchy _ constructor s] = [E $ Right (constructor s)]
removeStretch xs = map f xs
where
f (Stretchy _ _ s) = E $ Left s
f (E e) = E $ Right e
f (Trailing a b) = Trailing a b
isStretchy :: InEDelimited -> Bool
isStretchy (Left _) = True
isStretchy (Right _) = False
-- If at the end of a delimiter we need to apply the script to the whole
-- expression. We only insert Trailing when reordering Stretchy
trailingSup :: Maybe (T.Text, T.Text -> Exp) -> Maybe (T.Text, T.Text -> Exp) -> [IR InEDelimited] -> Exp
trailingSup open close es = go es
where
go [] = case (open, close) of
(Nothing, Nothing) -> empty
(Just (openFence, conOpen), Nothing) -> conOpen openFence
(Nothing, Just (closeFence, conClose)) -> conClose closeFence
(Just (openFence, conOpen), Just (closeFence, conClose)) ->
EGrouped [conOpen openFence, conClose closeFence]
go es'@(last -> Trailing constructor e) = (constructor (go (init es')) e)
go es' = EDelimited (getFence open) (getFence close) (toEDelim es')
getFence = fromMaybe "" . fmap fst
-- TODO: Break this into two functions
-- Matches open and closing brackets
-- The result of this function is a list with only E elements.
matchNesting :: [IR Exp] -> [IR InEDelimited]
matchNesting ((break isFence) -> (inis, rest)) =
let inis' = removeStretch inis in
case rest of
[] -> inis'
((Stretchy Open conOpen opens): rs) ->
let jOpen = Just (opens, conOpen)
(body, rems) = go rs 0 []
body' = matchNesting body in
case rems of
[] -> inis' ++ [E $ Right $ trailingSup jOpen Nothing body']
(Stretchy Close conClose closes : rs') ->
let jClose = Just (closes, conClose) in
inis' ++ (E $ Right $ trailingSup jOpen jClose body') : matchNesting rs'
_ -> (error "matchNesting: Logical error 1")
((Stretchy Close conClose closes): rs) ->
let jClose = Just (closes, conClose) in
(E $ Right $ trailingSup Nothing jClose (matchNesting inis)) : matchNesting rs
_ -> error "matchNesting: Logical error 2"
where
isOpen (Stretchy Open _ _) = True
isOpen _ = False
isClose (Stretchy Close _ _) = True
isClose _ = False
go :: [IR a] -> Int -> [IR a] -> ([IR a], [IR a])
go (x:xs) 0 a | isClose x = (reverse a, x:xs)
go (x:xs) n a | isOpen x = go xs (n + 1) (x:a)
go (x:xs) n a | isClose x = go xs (n - 1) (x:a)
go (x:xs) n a = go xs n (x:a)
go [] _ a = (reverse a, [])
isFence :: IR a -> Bool
isFence (Stretchy Open _ _) = True
isFence (Stretchy Close _ _) = True
isFence _ = False
group :: Element -> MML [IR Exp]
group e = do
front <- concat <$> mapM expr frontSpaces
middle <- local resetPosition (row' body)
end <- concat <$> local resetPosition (mapM expr endSpaces)
return $ (front ++ middle ++ end)
where
cs = elChildren e
(frontSpaces, noFront) = span spacelike cs
(endSpaces, body) = let (as, bs) = span spacelike (reverse noFront) in
(reverse as, reverse bs)
row' :: [Element] -> MML [IR Exp]
row' [] = return []
row' [x] = do
pos <- maybe FInfix (const FPostfix) <$> asks position
local (setPosition pos) (expr x)
row' (x:xs) =
do
pos <- maybe FPrefix (const FInfix) <$> asks position
e <- local (setPosition pos) (expr x)
es <- local (setPosition pos) (row' xs)
return (e ++ es)
-- Indicates the closure of scope
safeExpr :: Element -> MML Exp
safeExpr e = mkExp <$> expr e
frac :: Element -> MML Exp
frac e = do
(num, denom) <- mapPairM safeExpr =<< (checkArgs2 e)
rawThick <- findAttrQ "linethickness" e
return $
if thicknessZero rawThick
then EFraction NoLineFrac num denom
else EFraction NormalFrac num denom
msqrt :: Element -> MML Exp
msqrt e = ESqrt <$> (row e)
kroot :: Element -> MML Exp
kroot e = do
(base, index) <- mapPairM safeExpr =<< (checkArgs2 e)
return $ ERoot index base
phantom :: Element -> MML Exp
phantom e = EPhantom <$> row e
fenced :: Element -> MML Exp
fenced e = do
open <- fromMaybe "(" <$> (findAttrQ "open" e)
close <- fromMaybe ")" <$> (findAttrQ "close" e)
sep <- fromMaybe "," <$> (findAttrQ "separators" e)
let expanded =
case sep of
"" -> elChildren e
_ ->
let seps = map (\x -> unode "mo" [x]) $ T.unpack sep
sepsList = seps ++ repeat (last seps) in
fInterleave (elChildren e) (sepsList)
safeExpr $ unode "mrow"
([tunode "mo" open | not $ T.null open] ++
[unode "mrow" expanded] ++
[tunode "mo" close | not $ T.null close])
-- This could approximate the variants
enclosed :: Element -> MML Exp
enclosed e = do
mbNotation <- findAttrQ "notation" e
case mbNotation of
Just "box" -> EBoxed <$> row e
_ -> row e
action :: Element -> MML Exp
action e = do
selection <- maybe 1 (read . T.unpack) <$> (findAttrQ "selection" e) -- 1-indexing
safeExpr =<< maybeToEither ("Selection out of range")
(listToMaybe $ drop (selection - 1) (elChildren e))
-- Scripts and Limits
sub :: Element -> MML [IR Exp]
sub e = do
(base, subs) <- checkArgs2 e
reorderScripts base subs ESub
-- Handles case with strethy elements in the base of sub/sup
reorderScripts :: Element -> Element -> (Exp -> Exp -> Exp) -> MML [IR Exp]
reorderScripts e subs c = do
baseExpr <- expr e
subExpr <- postfixExpr subs
return $
case baseExpr of
[s@(Stretchy Open _ _)] -> [s, E $ c empty subExpr] -- Open
[s@(Stretchy Close _ _)] -> [Trailing c subExpr, s] -- Close
[s@(Stretchy _ _ _)] -> [s, E $ ESub empty subExpr] -- Middle
_ -> [E $ c (mkExp baseExpr) subExpr] -- No stretch
sup :: Element -> MML [IR Exp]
sup e = do
(base, sups) <- checkArgs2 e
reorderScripts base sups ESuper
subsup :: Element -> MML Exp
subsup e = do
(base, subs, sups) <- checkArgs3 e
ESubsup <$> safeExpr base <*> (postfixExpr subs)
<*> (postfixExpr sups)
under :: Element -> MML Exp
under e = do
(base, below) <- checkArgs2 e
EUnder False <$> safeExpr base <*> postfixExpr below
over :: Element -> MML Exp
over e = do
(base, above) <- checkArgs2 e
EOver False <$> safeExpr base <*> postfixExpr above
underover :: Element -> MML Exp
underover e = do
(base, below, above) <- checkArgs3 e
EUnderover False <$> safeExpr base <*> (postfixExpr below)
<*> (postfixExpr above)
-- Other
semantics :: Element -> MML Exp
semantics e = do
guard (not $ null cs)
first <- safeExpr (head cs)
if isEmpty first
then fromMaybe empty . getFirst . mconcat <$> mapM annotation (tail cs)
else return first
where
cs = elChildren e
annotation :: Element -> MML (First Exp)
annotation e = do
encoding <- findAttrQ "encoding" e
case encoding of
Just "application/mathml-presentation+xml" ->
First . Just <$> row e
Just "MathML-Presentation" ->
First . Just <$> row e
_ -> return (First Nothing)
multiscripts :: Element -> MML Exp
multiscripts e = do
let (xs, pres) = break ((== "mprescripts") . name) (elChildren e)
let row'' e' = if name e' == "none"
then return $ EGrouped []
else row e'
xs' <- mapM row'' xs
let base =
case xs' of
[x] -> x
[x,y] -> ESub x y
(x:y:z:_) -> ESubsup x y z
[] -> EGrouped []
pres' <- mapM row'' $ drop 1 pres
return $
case pres' of
(x:y:_) -> EGrouped [ESubsup (EGrouped []) x y, base]
[x] -> EGrouped [ESub x (EGrouped []), base]
[] -> base
-- Table
table :: Element -> MML Exp
table e = do
defAlign <- maybe AlignCenter toAlignment <$> (findAttrQ "columnalign" e)
rs <- mapM (tableRow defAlign) (elChildren e)
let (onlyAligns, exprs) = (map .map) fst &&& (map . map) snd $ rs
let rs' = map (pad (maximum (map length rs))) exprs
let aligns = map findAlign (transpose onlyAligns)
return $ EArray aligns rs'
where
findAlign xs = if null xs then AlignCenter
else foldl1 combine xs
combine x y = if x == y then x else AlignCenter
tableRow :: Alignment -> Element -> MML [(Alignment, [Exp])]
tableRow a e = do
align <- maybe a toAlignment <$> (findAttrQ "columnalign" e)
case name e of
"mtr" -> mapM (tableCell align) (elChildren e)
"mlabeledtr" -> mapM (tableCell align) (tail $ elChildren e)
_ -> throwError $ "Invalid Element: Only expecting mtr elements " <> err e
tableCell :: Alignment -> Element -> MML (Alignment, [Exp])
tableCell a e = do
align <- maybe a toAlignment <$> (findAttrQ "columnalign" e)
case name e of
"mtd" -> (,) align . (:[]) <$> row e
_ -> throwError $ "Invalid Element: Only expecting mtd elements " <> err e
-- Fixup
-- Library Functions
maybeToEither :: (MonadError e m) => e -> Maybe a -> m a
maybeToEither = flip maybe return . throwError
--interleave up to end of shorter list
fInterleave :: [a] -> [a] -> [a]
fInterleave [] _ = []
fInterleave _ [] = []
fInterleave (x:xs) ys = x : fInterleave ys xs
-- MMLState helper functions
defaultState :: MMLState
defaultState = MMLState [] Nothing False TextNormal
addAttrs :: [Attr] -> MMLState -> MMLState
addAttrs as s = s {attrs = (map renameAttr as) ++ attrs s }
renameAttr :: Attr -> Attr
renameAttr v@(qName . attrKey -> "accentunder") =
Attr (unqual "accent") (attrVal v)
renameAttr a = a
filterMathVariant :: MMLState -> MMLState
filterMathVariant s@(attrs -> as) =
s{attrs = filter ((/= unqual "mathvariant") . attrKey) as}
setPosition :: FormType -> MMLState -> MMLState
setPosition p s = s {position = Just p}
resetPosition :: MMLState -> MMLState
resetPosition s = s {position = Nothing}
enterAccent :: MMLState -> MMLState
enterAccent s = s{ inAccent = True }
enterStyled :: TextType -> MMLState -> MMLState
enterStyled tt s = s{ curStyle = tt }
-- Utility
getString :: Element -> MML T.Text
getString e = do
tt <- asks curStyle
return $ fromUnicode tt $ stripSpaces $ T.pack $ concatMap cdData
$ onlyText $ elContent $ e
-- Finds only text data and replaces entity references with corresponding
-- characters
onlyText :: [Content] -> [CData]
onlyText [] = []
onlyText ((Text c):xs) = c : onlyText xs
onlyText (CRef s : xs) = (CData CDataText (fromMaybe s $ getUnicode' s) Nothing) : onlyText xs
where getUnicode' = fmap T.unpack . getUnicode . T.pack
onlyText (_:xs) = onlyText xs
checkArgs2 :: Element -> MML (Element, Element)
checkArgs2 e = case elChildren e of
[a, b] -> return (a, b)
_ -> throwError ("Incorrect number of arguments for " <> err e)
checkArgs3 :: Element -> MML (Element, Element, Element)
checkArgs3 e = case elChildren e of
[a, b, c] -> return (a, b, c)
_ -> throwError ("Incorrect number of arguments for " <> err e)
mapPairM :: Monad m => (a -> m b) -> (a, a) -> m (b, b)
mapPairM f (a, b) = (,) <$> (f a) <*> (f b)
err :: Element -> T.Text
err e = name e <> maybe "" (\x -> " line " <> T.pack (show x)) (elLine e)
-- Kept as String for Text.XML.Light
findAttrQ :: String -> Element -> MML (Maybe T.Text)
findAttrQ s e = do
inherit <- case (name e, s) of
("mfenced", "open") -> return Nothing
("mfenced", "close") -> return Nothing
("mfenced", "separators") -> return Nothing
_ -> asks (lookupAttrQ s . attrs)
return $ fmap T.pack $
findAttr (QName s Nothing Nothing) e
<|> inherit
-- Kept as String for Text.XML.Light
lookupAttrQ :: String -> [Attr] -> Maybe String
lookupAttrQ s = lookupAttr (QName (map toLower s) Nothing Nothing)
name :: Element -> T.Text
name (elName -> (QName n _ _)) = T.toLower $ T.pack n
-- Kept as String for Text.XML.Light
tunode :: String -> T.Text -> Element
tunode s = unode s . T.unpack
stripSpaces :: T.Text -> T.Text
stripSpaces = T.dropAround isSpace
toAlignment :: T.Text -> Alignment
toAlignment "left" = AlignLeft
toAlignment "center" = AlignCenter
toAlignment "right" = AlignRight
toAlignment _ = AlignCenter
getPosition :: FormType -> TeXSymbolType
getPosition (FPrefix) = Open
getPosition (FPostfix) = Close
getPosition (FInfix) = Op
getFormType :: Maybe T.Text -> Maybe FormType
getFormType (Just "infix") = (Just FInfix)
getFormType (Just "prefix") = (Just FPrefix)
getFormType (Just "postfix") = (Just FPostfix)
getFormType _ = Nothing
pad :: Int -> [[a]] -> [[a]]
pad n xs = xs ++ (replicate (n - len) [])
where
len = length xs
isSpace :: Char -> Bool
isSpace ' ' = True
isSpace '\t' = True
isSpace '\n' = True
isSpace _ = False
spacelikeElems, cSpacelikeElems :: [T.Text]
spacelikeElems = ["mtext", "mspace", "maligngroup", "malignmark"]
cSpacelikeElems = ["mrow", "mstyle", "mphantom", "mpadded"]
spacelike :: Element -> Bool
spacelike e@(name -> uid) =
uid `elem` spacelikeElems || uid `elem` cSpacelikeElems &&
and (map spacelike (elChildren e))
thicknessZero :: Maybe T.Text -> Bool
thicknessZero (Just s) = thicknessToNum s == 0.0
thicknessZero Nothing = False
widthToNum :: T.Text -> Rational
widthToNum s =
case s of
"veryverythinmathspace" -> 1/18
"verythinmathspace" -> 2/18
"thinmathspace" -> 3/18
"mediummathspace" -> 4/18
"thickmathspace" -> 5/18
"verythickmathspace" -> 6/18
"veryverythickmathspace" -> 7/18
"negativeveryverythinmathspace" -> -1/18
"negativeverythinmathspace" -> -2/18
"negativethinmathspace" -> -3/18
"negativemediummathspace" -> -4/18
"negativethickmathspace" -> -5/18
"negativeverythickmathspace" -> -6/18
"negativeveryverythickmathspace" -> -7/18
_ -> fromMaybe 0 (readLength s)
thicknessToNum :: T.Text -> Rational
thicknessToNum s =
case s of
"thin" -> (3/18)
"medium" -> (1/2)
"thick" -> 1
v -> fromMaybe 0.5 (readLength v)
postfixExpr :: Element -> MML Exp
postfixExpr e = local (setPosition FPostfix . enterAccent) (safeExpr e)
| jgm/texmath | src/Text/TeXMath/Readers/MathML.hs | gpl-2.0 | 22,513 | 1 | 22 | 5,737 | 8,216 | 4,135 | 4,081 | 519 | 29 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-
Copyright (C) 2014-2016 Albert Krewinkel <tarleb+pandoc@moltkeplatz.de>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Readers.Org.Options
Copyright : Copyright (C) 2014-2016 Albert Krewinkel
License : GNU GPL, version 2 or above
Maintainer : Albert Krewinkel <tarleb+pandoc@moltkeplatz.de>
Define the Org-mode parser state.
-}
module Text.Pandoc.Readers.Org.ParserState
( OrgParserState(..)
, OrgParserLocal(..)
, OrgNoteRecord
, F(..)
, askF
, asksF
, trimInlinesF
, runF
, returnF
, ExportSettingSetter
, exportSubSuperscripts
, setExportSubSuperscripts
, modifyExportSettings
) where
import Control.Monad (liftM, liftM2)
import Control.Monad.Reader (Reader, runReader, ask, asks, local)
import Data.Default (Default(..))
import qualified Data.Map as M
import qualified Data.Set as Set
import Text.Pandoc.Builder ( Inlines, Blocks, HasMeta(..),
trimInlines )
import Text.Pandoc.Definition ( Meta(..), nullMeta )
import Text.Pandoc.Options ( ReaderOptions(..) )
import Text.Pandoc.Parsing ( HasHeaderMap(..)
, HasIdentifierList(..)
, HasLastStrPosition(..)
, HasQuoteContext(..)
, HasReaderOptions(..)
, ParserContext(..)
, QuoteContext(..)
, SourcePos )
-- | An inline note / footnote containing the note key and its (inline) value.
type OrgNoteRecord = (String, F Blocks)
-- | Table of footnotes
type OrgNoteTable = [OrgNoteRecord]
-- | Map of org block attributes (e.g. LABEL, CAPTION, NAME, etc)
type OrgBlockAttributes = M.Map String String
-- | Map of functions for link transformations. The map key is refers to the
-- link-type, the corresponding function transforms the given link string.
type OrgLinkFormatters = M.Map String (String -> String)
-- | Export settings <http://orgmode.org/manual/Export-settings.html>
-- These settings can be changed via OPTIONS statements.
data ExportSettings = ExportSettings
{ exportSubSuperscripts :: Bool -- ^ TeX-like syntax for sub- and superscripts
}
-- | Org-mode parser state
data OrgParserState = OrgParserState
{ orgStateOptions :: ReaderOptions
, orgStateAnchorIds :: [String]
, orgStateBlockAttributes :: OrgBlockAttributes
, orgStateEmphasisCharStack :: [Char]
, orgStateEmphasisNewlines :: Maybe Int
, orgStateExportSettings :: ExportSettings
, orgStateLastForbiddenCharPos :: Maybe SourcePos
, orgStateLastPreCharPos :: Maybe SourcePos
, orgStateLastStrPos :: Maybe SourcePos
, orgStateLinkFormatters :: OrgLinkFormatters
, orgStateMeta :: Meta
, orgStateMeta' :: F Meta
, orgStateNotes' :: OrgNoteTable
, orgStateParserContext :: ParserContext
, orgStateIdentifiers :: Set.Set String
, orgStateHeaderMap :: M.Map Inlines String
}
data OrgParserLocal = OrgParserLocal { orgLocalQuoteContext :: QuoteContext }
instance Default OrgParserLocal where
def = OrgParserLocal NoQuote
instance HasReaderOptions OrgParserState where
extractReaderOptions = orgStateOptions
instance HasMeta OrgParserState where
setMeta field val st =
st{ orgStateMeta = setMeta field val $ orgStateMeta st }
deleteMeta field st =
st{ orgStateMeta = deleteMeta field $ orgStateMeta st }
instance HasLastStrPosition OrgParserState where
getLastStrPos = orgStateLastStrPos
setLastStrPos pos st = st{ orgStateLastStrPos = Just pos }
instance HasQuoteContext st (Reader OrgParserLocal) where
getQuoteContext = asks orgLocalQuoteContext
withQuoteContext q = local (\s -> s{orgLocalQuoteContext = q})
instance HasIdentifierList OrgParserState where
extractIdentifierList = orgStateIdentifiers
updateIdentifierList f s = s{ orgStateIdentifiers = f (orgStateIdentifiers s) }
instance HasHeaderMap OrgParserState where
extractHeaderMap = orgStateHeaderMap
updateHeaderMap f s = s{ orgStateHeaderMap = f (orgStateHeaderMap s) }
instance Default ExportSettings where
def = defaultExportSettings
instance Default OrgParserState where
def = defaultOrgParserState
defaultOrgParserState :: OrgParserState
defaultOrgParserState = OrgParserState
{ orgStateOptions = def
, orgStateAnchorIds = []
, orgStateBlockAttributes = M.empty
, orgStateEmphasisCharStack = []
, orgStateEmphasisNewlines = Nothing
, orgStateExportSettings = def
, orgStateLastForbiddenCharPos = Nothing
, orgStateLastPreCharPos = Nothing
, orgStateLastStrPos = Nothing
, orgStateLinkFormatters = M.empty
, orgStateMeta = nullMeta
, orgStateMeta' = return nullMeta
, orgStateNotes' = []
, orgStateParserContext = NullState
, orgStateIdentifiers = Set.empty
, orgStateHeaderMap = M.empty
}
defaultExportSettings :: ExportSettings
defaultExportSettings = ExportSettings
{ exportSubSuperscripts = True
}
--
-- Setter for exporting options
--
type ExportSettingSetter a = a -> ExportSettings -> ExportSettings
setExportSubSuperscripts :: ExportSettingSetter Bool
setExportSubSuperscripts val es = es { exportSubSuperscripts = val }
-- | Modify a parser state
modifyExportSettings :: ExportSettingSetter a -> a -> OrgParserState -> OrgParserState
modifyExportSettings setter val state =
state { orgStateExportSettings = setter val . orgStateExportSettings $ state }
--
-- Parser state reader
--
-- | Reader monad wrapping the parser state. This is used to delay evaluation
-- until all relevant information has been parsed and made available in the
-- parser state. See also the newtype of the same name in
-- Text.Pandoc.Parsing.
newtype F a = F { unF :: Reader OrgParserState a
} deriving (Functor, Applicative, Monad)
instance Monoid a => Monoid (F a) where
mempty = return mempty
mappend = liftM2 mappend
mconcat = fmap mconcat . sequence
runF :: F a -> OrgParserState -> a
runF = runReader . unF
askF :: F OrgParserState
askF = F ask
asksF :: (OrgParserState -> a) -> F a
asksF f = F $ asks f
trimInlinesF :: F Inlines -> F Inlines
trimInlinesF = liftM trimInlines
returnF :: Monad m => a -> m (F a)
returnF = return . return
| janschulz/pandoc | src/Text/Pandoc/Readers/Org/ParserState.hs | gpl-2.0 | 7,218 | 0 | 10 | 1,619 | 1,233 | 725 | 508 | 126 | 1 |
module WildFire.RunWFStatic where
import WildFire.WildFireModelStatic
import Control.Monad.STM
import Control.Concurrent.STM.TVar
import qualified Data.Map as Map
import qualified PureAgents2DDiscrete as Front
import System.Random
import System.IO
import Data.Maybe
import Data.List
import Debug.Trace
import qualified Graphics.Gloss as GLO
import Graphics.Gloss.Interface.IO.Simulate
import qualified PureAgentsAct as PA
winTitle = "WildFire Static ACT"
winSize = (800, 800)
runWFStaticRendering :: IO ()
runWFStaticRendering = do
let dt = 1.0
let xCells = 50
let yCells = 50
let rngSeed = 42
let cells = (xCells, yCells)
let g = mkStdGen rngSeed
(as, g') <- atomically $ createRandomWFAgents g cells
let ignitedAs = initialIgnition as (25, 25) cells
hdl <- PA.startSimulation ignitedAs dt ()
stepWithRendering hdl dt cells
initialIgnition :: [WFAgent] -> (Int, Int) -> (Int, Int) -> [WFAgent]
initialIgnition as pos cells
| isNothing mayAgentAtPos = as
| otherwise = infront ++ [ignitedAgentAtPos] ++ (tail behind)
where
mayAgentAtPos = find (\a -> pos == (agentToCell a cells)) as
agentAtPos = (fromJust mayAgentAtPos)
agentAtPosId = PA.agentId agentAtPos
ignitedAgentAtPos = igniteAgent agentAtPos
(infront, behind) = splitAt agentAtPosId as
stepWithRendering :: WFSimHandle -> Double -> (Int, Int) -> IO ()
stepWithRendering hdl dt cells = simulateIO (Front.display winTitle winSize)
GLO.white
30
hdl
(modelToPicture cells)
(stepIteration dt)
modelToPicture :: (Int, Int) -> WFSimHandle -> IO GLO.Picture
modelToPicture cells hdl = do
as <- PA.observeAgentStates hdl
let observableAgentStates = map (wfAgentToObservableState cells) as
return (Front.renderFrame observableAgentStates (800, 800) cells)
stepIteration :: Double -> ViewPort -> Float -> WFSimHandle -> IO WFSimHandle
stepIteration fixedDt viewport dtRendering hdl = return hdl
wfAgentToObservableState :: (Int, Int) -> (PA.AgentId, Double, WFAgentState) -> Front.RenderCell
wfAgentToObservableState (xCells, yCells) (aid, _, s) = Front.RenderCell { Front.renderCellCoord = (x, y),
Front.renderCellColor = cs }
where
y = floor((fromIntegral aid) / (fromIntegral xCells))
x = mod aid yCells
shade = burnable s
cs = case (wfState s) of
Living -> (0.0, shade, 0.0)
Burning -> (shade, 0.0, 0.0)
Dead -> (0.5, 0.5, 0.5) | thalerjonathan/phd | public/ArtIterating/code/haskell/PureAgentsAct/src/WildFire/RunWFStatic.hs | gpl-3.0 | 3,017 | 0 | 12 | 1,047 | 809 | 436 | 373 | 61 | 3 |
-- let in: guards are evaluated top to bottom, the firs True guard wins
-- factorial n = let loop acc n' | n' > 1 = loop (acc * n') (n' - 1)
-- | otherwise = acc
-- in loop 1 n
-- where clauses
factorial n = loop 1 n
where loop acc n' | n' > 1 = loop (acc * n') (n' - 1)
| otherwise = acc
main = print (factorial 5)
| lamontu/learning_haskell | guards.hs | gpl-3.0 | 399 | 0 | 10 | 161 | 83 | 42 | 41 | 4 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE PatternGuards #-}
module MachineLearning.Classifier.LibLinear.LibLinear ( TrainParams(..)
, Solver(..)
, Feature(..)
, readLabelFile
, labelSource
, featureNode
, train
, predict
, findParameterC
, problem
, problemMaxInd
, libLinearRead
, libLinearWrite
, debugSink) where
import Classifier.LibLinear.Bindings
import Classifier.LibLinear.Solver
import qualified Control.Monad as M
import Control.Monad.IO.Class (liftIO)
import Data.Binary.Get
import Data.Binary.IEEE754
import Data.Binary.Put
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy as BL
import Data.Conduit
import qualified Data.Conduit.List as CL
import Data.Either
import qualified Data.Foldable as F
import qualified Data.List as L
import Data.Maybe
import qualified Data.Sequence as S
import qualified Data.Vector as V
import qualified Data.Vector.Storable.Mutable as MVec
import qualified Data.Vector.Unboxed as VU
import Foreign as F
import Foreign.C.String
import Foreign.C.Types
import GHC.Float
import System.IO
import System.Mem
{-Syntax-}
type Feature = (Int,Double)
data TrainParams = TrainParams
{ trainSolver :: Solver
, trainC :: Double
, trainNumExamples :: Int
, trainModel :: String
} deriving (Show)
{-Error Information-}
data LibLinearError = InvalidLabel | WrongNumExamples Int deriving (Show)
{-Functions for Label-}
readLabel :: B.ByteString -> Either LibLinearError Int
readLabel bs
| isNothing label = Left InvalidLabel
| B.null $ snd $ fromJust label = Right $ fst $ fromJust label
| otherwise = Left InvalidLabel
where label = B.readInt bs
readLabelFile :: FilePath -> IO [Int]
readLabelFile filePath = do h <- openFile filePath ReadMode
bs <- B.hGetContents h
let bsList = B.lines bs
hClose h
return $ map read bsList
where read x = case readLabel x of
Left msg -> error $ show msg
Right label -> label
labelSource :: FilePath -> Source IO Double
labelSource filePath = do h <- liftIO $ openFile filePath ReadMode
!bs <- liftIO $ B.hGetContents h
let bsList = B.lines bs
M.mapM_ (yield.fromIntegral.read) bsList
liftIO $ hClose h
where read x = case readLabel x of
Left msg -> error $ show msg
Right label -> label
{-Parameter Functions-}
newParameter :: Solver -> Double -> C'parameter
newParameter solver c = C'parameter
{ c'parameter'solver_type = fromIntegral $ fromEnum solver
, c'parameter'eps = 0.1
, c'parameter'C = realToFrac c
, c'parameter'nr_weight = 0
, c'parameter'weight_label = nullPtr
, c'parameter'weight = nullPtr
, c'parameter'p = 0.1
, c'parameter'init_sol = nullPtr
}
{-Problem Functions-}
featuresToNodeList
:: VU.Vector Feature -> V.Vector C'feature_node
featuresToNodeList features =
V.snoc (V.map mapper (VU.convert features)) sentintel
where mapper (i,v) =
C'feature_node {c'feature_node'index = fromIntegral i
,c'feature_node'value = realToFrac v}
sentintel = mapper $ ((-1),0.0)
labelToTargetList
:: [Int] -> MVec.IOVector CDouble -> IO ()
labelToTargetList label vec =
M.zipWithM_
(\i x ->
MVec.write vec
i
(fromIntegral x))
[0 ..]
label
updateFeatureListRankOne :: V.Vector C'feature_node
-> MVec.IOVector C'feature_node
-> IO ()
updateFeatureListRankOne xs vec =
V.zipWithM_
(\i x -> MVec.write vec i x)
(V.generate (V.length xs)
(\i -> i))
xs
updateFeatureNodeList
:: MVec.IOVector (Ptr C'feature_node)
-> MVec.IOVector CDouble
-> (Int,Int, S.Seq (MVec.IOVector C'feature_node))
-> (Double,Int,MVec.IOVector C'feature_node)
-> IO (Int,Int, S.Seq (MVec.IOVector C'feature_node))
updateFeatureNodeList !featureIndex !targetsVec (!i,!maxInd,!vecs') (!label,!maxInd',!vec) =
do let maxIndex = max maxInd maxInd'
MVec.unsafeWith vec
(\basePtr -> MVec.write featureIndex i basePtr)
MVec.write targetsVec
i
(realToFrac label)
return (i + 1,maxIndex, vecs' S.|> vec)
problemMaxInd
:: Conduit (Double,VU.Vector Feature) IO (Double,Int,MVec.IOVector C'feature_node)
problemMaxInd =
awaitForever
(\(label,x) ->
let !maxInd = (VU.maximum . fst . VU.unzip) x
in do !vec <- liftIO $ MVec.new $ (VU.length x + 1)
liftIO $
updateFeatureListRankOne (featuresToNodeList x)
vec
yield (label,maxInd,vec))
problem
:: Conduit (Double,VU.Vector Feature) IO (Double,MVec.IOVector C'feature_node)
problem =
awaitForever
(\(label,x) ->
do !vec <- liftIO $ MVec.new $ (VU.length x + 1)
liftIO $
updateFeatureListRankOne (featuresToNodeList x)
vec
yield (label,vec))
featureNode
:: Conduit (VU.Vector Feature) IO (MVec.IOVector C'feature_node)
featureNode =
awaitForever
(\x ->
do !vec <- liftIO $ MVec.new $ (VU.length x + 1)
liftIO $
updateFeatureListRankOne (featuresToNodeList x)
vec
yield vec)
{-Train and Predict Functions-}
extractPtr :: [F.ForeignPtr a] -> IO b -> IO b
extractPtr [] f = f
extractPtr (x:xs) f = F.withForeignPtr x (\_ -> extractPtr xs f)
train
:: TrainParams
-> Sink (Double,Int,MVec.IOVector C'feature_node) IO (Either LibLinearError (IO ()))
train TrainParams{trainSolver,trainC,trainNumExamples,trainModel} =
do !targets <- liftIO $ MVec.new $ trainNumExamples
!featureIndex <- liftIO $ MVec.new trainNumExamples
!(count,trainFeatureMax,vecs) <-
CL.foldM (updateFeatureNodeList featureIndex targets)
(0,0,S.empty)
if count /= trainNumExamples
then return $ Left (WrongNumExamples count)
else return $
Right $
extractPtr
(map (\x -> fst $ MVec.unsafeToForeignPtr0 x)
(F.toList vecs))
(MVec.unsafeWith targets $
\targets' ->
MVec.unsafeWith featureIndex $
\features' ->
do let p =
C'problem {c'problem'l =
fromIntegral trainNumExamples
,c'problem'n =
fromIntegral trainFeatureMax
,c'problem'y = targets'
,c'problem'x = features'
,c'problem'bias = -1.0}
model <-
with p $
\problem' ->
with (newParameter trainSolver trainC) $
\param' -> c'train problem' param'
modelName <- newCString trainModel
c'save_model modelName model)
--train' :: TrainParams
-- -> MVec.IOVector CDouble
-- -> MVec.IOVector (Ptr C'feature_node)
-- -> (Int,Int, S.Seq (MVec.IOVector C'feature_node))
-- -> Int
-- -> Int
-- -> Sink (Double,Int, MVec.IOVector C'feature_node) IO ()
--train' TrainParams{trainSolver,trainC,trainNumExamples,trainModel} targets featureIndex (count',trainFeatureMax',vecs') trunkSize n =
-- do !trunk <- CL.take trunkSize
-- if length trunk == 0
-- then return ()
-- else do !(count,trainFeatureMax,vecs) <-
-- liftIO $
-- F.foldlM (updateFeatureNodeList featureIndex targets)
-- (count',trainFeatureMax',vecs')
-- trunk
-- liftIO $
-- extractPtr
-- (map (\x -> fst $ MVec.unsafeToForeignPtr0 x)
-- (F.toList vecs))
-- (MVec.unsafeWith targets $
-- \targets' ->
-- MVec.unsafeWith featureIndex $
-- \features' ->
-- do let p =
-- C'problem {c'problem'l =
-- fromIntegral (n * trunkSize)
-- ,c'problem'n =
-- fromIntegral trainFeatureMax
-- ,c'problem'y = targets'
-- ,c'problem'x = features'
-- ,c'problem'bias = -1.0}
-- model <-
-- with p $
-- \problem' ->
-- with (newParameter trainSolver trainC) $
-- \param' -> c'train problem' param'
-- modelName <-
-- newCString
-- (show (n * trunkSize) ++ "_" ++ trainModel)
-- c'save_model modelName model)
-- train' TrainParams {trainSolver
-- ,trainC
-- ,trainNumExamples
-- ,trainModel}
-- targets
-- featureIndex
-- (count,trainFeatureMax,vecs)
-- trunkSize
-- (n + 1)
predict
:: String -> FilePath -> Sink (Double,MVec.IOVector C'feature_node) IO ()
predict predictModel output =
do modelName <- liftIO $ newCString predictModel
model <- liftIO $ c'load_model modelName
(correct,total) <-
CL.foldM (func model)
(0,0)
let percent = (fromIntegral correct) / (fromIntegral total) * 100
str = show percent
liftIO $ putStrLn str
h <- liftIO $ openFile output WriteMode
liftIO $ hPutStrLn h str
liftIO $ hClose h
where func :: Ptr C'model
-> (Int,Int)
-> (Double,MVec.IOVector C'feature_node)
-> IO (Int,Int)
func model (correct,total) (t,vec) =
do prediction <- MVec.unsafeWith vec $ \vec' -> c'predict model vec'
if realToFrac t == prediction
then return (correct + 1,total + 1)
else return (correct,total + 1)
{-Cross validation -}
findParameterC
:: TrainParams
-> Sink (Double,Int,MVec.IOVector C'feature_node) IO (Either LibLinearError (IO ()))
findParameterC TrainParams{trainSolver,trainC,trainNumExamples,trainModel} =
do targets <- liftIO $ MVec.new $ trainNumExamples
featureIndex <- liftIO $ MVec.new trainNumExamples
(count,trainFeatureMax,vecs) <-
CL.foldM (updateFeatureNodeList featureIndex targets)
(0,0,S.empty)
if count /= trainNumExamples
then return $ Left (WrongNumExamples count)
else return $
Right $
extractPtr
(map (\x -> fst $ MVec.unsafeToForeignPtr0 x)
(F.toList vecs))
(MVec.unsafeWith targets $
\targets' ->
MVec.unsafeWith featureIndex $
\features' ->
do let p =
C'problem {c'problem'l =
fromIntegral trainNumExamples
,c'problem'n =
fromIntegral trainFeatureMax
,c'problem'y = targets'
,c'problem'x = features'
,c'problem'bias = -1.0}
with p $
\problem' ->
with (newParameter trainSolver trainC) $
\param' ->
F.allocaBytes 8 $
\bestC' ->
F.allocaBytes 8 $
\bestRate' ->
do c'find_parameter_C problem'
param'
5
(realToFrac trainC)
1024
bestC'
bestRate'
bestC <- F.peek bestC'
bestRate <- F.peek bestRate'
putStrLn $
"Best C = " ++
show bestC ++
" CV accuracy = " ++
show (100 * bestRate) ++ "%")
{-IO Functions-}
putFeautre :: (Double,[Feature]) -> Put
putFeautre (label,feature) =
do putFloat32le $ double2Float label
M.mapM_ (\(i,v) ->
do putWord32le (fromIntegral i)
putFloat32le $ double2Float v)
feature
libLinearWrite
:: FilePath -> Int -> Sink (Double,[Feature]) IO ()
libLinearWrite filePath len =
do handle <- liftIO $ openBinaryFile filePath WriteMode
liftIO $ BL.hPut handle $ runPut $ putWord32le (fromIntegral len)
go handle
where go h =
do node <- await
case node of
Nothing -> liftIO $ hClose h
Just (label,feature) ->
do liftIO $
BL.hPut h $
runPut $ putWord32le $ fromIntegral $ length feature
liftIO $ BL.hPut h $ runPut $ putFeautre (label,feature)
go h
getLength :: Get Int
getLength =
do len <- getWord32le
return $ fromIntegral len
getFeature :: Get Feature
getFeature =
do index <- getWord32le
value <- getFloat32le
return (fromIntegral index,float2Double value)
getNode :: Int -> Get (Double,VU.Vector Feature)
getNode len =
do label <- getFloat32le
feature <- M.replicateM len getFeature
return (float2Double label,VU.fromList feature)
libLinearRead
:: FilePath -> Source IO (Double,VU.Vector Feature)
libLinearRead filePath =
do handle <- liftIO $ openBinaryFile filePath ReadMode
lenBs <- liftIO $ BL.hGet handle 4
let len = runGet getLength lenBs
go handle len
where go h n =
if n > 0
then do featureLenBs <- liftIO $ BL.hGet h 4
let featureLen = runGet getLength featureLenBs
bs <- liftIO $ BL.hGet h (4 + 8 * featureLen)
let (label,vec) = runGet (getNode featureLen) bs
yield (label,vec)
go h (n - 1)
else liftIO $ hClose h
debugSink
:: Sink (Double,VU.Vector Feature) IO ()
debugSink =
do x <- await
case x of
Just (label,vec) ->
do liftIO $ print label
liftIO $ print vec
Nothing -> liftIO $ return ()
| XinhuaZhang/PetaVisionHaskell | Classifier/LibLinear/LibLinear.hs | gpl-3.0 | 16,691 | 0 | 31 | 7,299 | 3,663 | 1,898 | 1,765 | -1 | -1 |
module Filter.ProofCheckers (makeProofChecker) where
import Text.Pandoc
import Data.List.Split (splitOn)
import Data.Map (Map, unions, fromList, toList)
import Filter.Util (splitIt, intoChunks,formatChunk,unlines')
import Prelude
makeProofChecker :: Block -> Block
makeProofChecker cb@(CodeBlock (_,classes,extra) contents)
| "ProofChecker" `elem` classes = Div ("",[],[]) $ map (activate classes extra) $ intoChunks contents
| "Playground" `elem` classes = Div ("",[],[]) [toPlayground classes extra contents]
| otherwise = cb
makeProofChecker x = x
activate cls extra chunk
| "Prop" `elem` cls = exTemplate [("system", "prop"),("guides","montague"),("options","resize")]
| "FirstOrder" `elem` cls = exTemplate [("system", "firstOrder"),("guides","montague"),("options","resize")]
| "SecondOrder" `elem` cls = exTemplate [("system", "secondOrder")]
| "PolySecondOrder" `elem` cls = exTemplate [("system", "polyadicSecondOrder")]
| "ElementaryST" `elem` cls = exTemplate [("system", "elementarySetTheory"),("options","resize render")]
| "SeparativeST" `elem` cls = exTemplate [("system", "separativeSetTheory"),("options","resize render")]
| "MontagueSC" `elem` cls = exTemplate [("system", "montagueSC"),("options","resize")]
| "MontagueQC" `elem` cls = exTemplate [("system", "montagueQC"),("options","resize")]
| "LogicBookSD" `elem` cls = exTemplate [("system", "LogicBookSD")]
| "LogicBookSDPlus" `elem` cls = exTemplate [("system", "LogicBookSDPlus")]
| "LogicBookPD" `elem` cls = exTemplate [("system", "LogicBookPD")]
| "LogicBookPDPlus" `elem` cls = exTemplate [("system", "LogicBookPDPlus")]
| "HausmanSL" `elem` cls = exTemplate [("system", "hausmanSL"), ("guides","hausman"), ("options", "resize fonts") ]
| "HausmanPL" `elem` cls = exTemplate [("system", "hausmanPL"), ("guides","hausman"), ("options", "resize fonts") ]
| "GamutMPND" `elem` cls = exTemplate [("system", "gamutMPND"), ("guides","hausman"), ("options", "resize fonts") ]
| "GamutIPND" `elem` cls = exTemplate [("system", "gamutIPND"), ("guides","hausman"), ("options", "resize fonts") ]
| "GamutPND" `elem` cls = exTemplate [("system", "gamutPND"), ("guides","hausman"), ("options", "resize fonts") ]
| "GamutPNDPlus" `elem` cls = exTemplate [("system", "gamutPNDPlus"), ("guides","hausman"), ("options", "resize fonts") ]
| "GamutND" `elem` cls = exTemplate [("system", "gamutND"), ("guides","hausman"), ("options", "resize fonts") ]
| "HowardSnyderSL" `elem` cls = exTemplate [("system", "howardSnyderSL"), ("guides","howardSnyder"), ("options", "resize fonts") ]
| "HowardSnyderPL" `elem` cls = exTemplate [("system", "howardSnyderPL"), ("guides","howardSnyder"), ("options", "resize fonts") ]
| "ForallxSL" `elem` cls = exTemplate [("system", "magnusSL"), ("options","render")]
| "ForallxSLPlus" `elem` cls = exTemplate [("system", "magnusSLPlus"), ("options","render")]
| "ForallxQL" `elem` cls = exTemplate [("system", "magnusQL"), ("options","render")]
| "IchikawaJenkinsSL"`elem` cls = exTemplate [("system", "ichikawaJenkinsSL"), ("options","render")]
| "IchikawaJenkinsQL"`elem` cls = exTemplate [("system", "ichikawaJenkinsQL"), ("options","render")]
| "TomassiPL" `elem` cls = exTemplate [("system", "tomassiPL"), ("options","resize render hideNumbering")]
| "GoldfarbND" `elem` cls = exTemplate [("system", "goldfarbND")]
| "GoldfarbAltND" `elem` cls = exTemplate [("system", "goldfarbAltND")]
| "GoldfarbNDPlus" `elem` cls = exTemplate [("system", "goldfarbNDPlus")]
| "GoldfarbAltNDPlus"`elem` cls = exTemplate [("system", "goldfarbAltNDPlus")]
| "ZachTFL" `elem` cls = exTemplate [("system", "thomasBolducAndZachTFL"), ("options","render")]
| "ZachTFL2019" `elem` cls = exTemplate [("system", "thomasBolducAndZachTFL2019"), ("options","render")]
| "ZachFOL" `elem` cls = exTemplate [("system", "thomasBolducAndZachFOL"), ("options","render")]
| "ZachFOL2019" `elem` cls = exTemplate [("system", "thomasBolducAndZachFOL2019"), ("options","render")]
| "ZachFOLPlus2019" `elem` cls = exTemplate [("system", "thomasBolducAndZachFOLPlus2019"), ("options","render")]
| "EbelsDugganTFL" `elem` cls = exTemplate [("system", "ebelsDugganTFL"), ("guides", "fitch"), ("options", "fonts resize")]
| "EbelsDugganFOL" `elem` cls = exTemplate [("system", "ebelsDugganFOL"), ("guides", "fitch"), ("options", "fonts resize")]
| "HardegreeSL" `elem` cls = exTemplate [("system", "hardegreeSL"), ("options", "render")]
| "HardegreePL" `elem` cls = exTemplate [("system", "hardegreePL"), ("options", "render")]
| "HardegreeWTL" `elem` cls = exTemplate [("system", "hardegreeWTL"), ("guides", "montague"), ("options", "render fonts")]
| "HardegreeL" `elem` cls = exTemplate [("system", "hardegreeL"), ("guides", "montague"), ("options", "fonts")]
| "HardegreeK" `elem` cls = exTemplate [("system", "hardegreeK"), ("guides", "montague"), ("options", "fonts")]
| "HardegreeT" `elem` cls = exTemplate [("system", "hardegreeT"), ("guides", "montague"), ("options", "fonts")]
| "HardegreeB" `elem` cls = exTemplate [("system", "hardegreeB"), ("guides", "montague"), ("options", "fonts")]
| "HardegreeD" `elem` cls = exTemplate [("system", "hardegreeD"), ("guides", "montague"), ("options", "fonts")]
| "Hardegree4" `elem` cls = exTemplate [("system", "hardegree4"), ("guides", "montague"), ("options", "fonts")]
| "Hardegree5" `elem` cls = exTemplate [("system", "hardegree5"), ("guides", "montague"), ("options", "fonts")]
| "HardegreeMPL" `elem` cls = exTemplate [("system", "hardegreeMPL"), ("guides", "montague"), ("options", "fonts")]
| otherwise = exTemplate []
where numof = takeWhile (/= ' ')
seqof = dropWhile (/= ' ')
(h:t) = formatChunk chunk
fixed = [("type","proofchecker"),("goal",seqof h),("submission","saveAs:" ++ numof h)]
exTemplate opts = template (unions [fromList extra, fromList opts, fromList fixed]) ("exercise " ++ numof h) (unlines' t)
toPlayground cls extra content
| "Prop" `elem` cls = playTemplate [("system", "prop")]
| "FirstOrder" `elem` cls = playTemplate [("system", "firstOrder")]
| "SecondOrder" `elem` cls = playTemplate [("system", "secondOrder")]
| "PolySecondOrder" `elem` cls = playTemplate [("system", "polyadicSecondOrder")]
| "ElementaryST" `elem` cls = playTemplate [("system", "elementarySetTheory"), ("options","resize render")]
| "SeparativeST" `elem` cls = playTemplate [("system", "separativeSetTheory"), ("options","resize render")]
| "MontagueSC" `elem` cls = playTemplate [("system", "montagueSC"),("options","resize")]
| "MontagueQC" `elem` cls = playTemplate [("system", "montagueQC"),("options","resize")]
| "LogicBookSD" `elem` cls = playTemplate [("system", "LogicBookSD")]
| "LogicBookSDPlus" `elem` cls = playTemplate [("system", "LogicBookSDPlus")]
| "LogicBookPD" `elem` cls = playTemplate [("system", "LogicBookPD")]
| "LogicBookPDPlus" `elem` cls = playTemplate [("system", "LogicBookPDPlus")]
| "HausmanSL" `elem` cls = playTemplate [("system", "hausmanSL"), ("guides","hausman"), ("options","fonts resize")]
| "HausmanPL" `elem` cls = playTemplate [("system", "hausmanPL"), ("guides","hausman"), ("options","fonts resize")]
| "GamutMPND" `elem` cls = playTemplate [("system", "gamutMPND"), ("guides","hausman"), ("options", "resize fonts") ]
| "GamutIPND" `elem` cls = playTemplate [("system", "gamutIPND"), ("guides","hausman"), ("options", "resize fonts") ]
| "GamutPND" `elem` cls = playTemplate [("system", "gamutPND"), ("guides","hausman"), ("options", "resize fonts") ]
| "GamutPNDPlus" `elem` cls = playTemplate [("system", "gamutPNDPlus"), ("guides","hausman"), ("options", "resize fonts") ]
| "GamutND" `elem` cls = playTemplate [("system", "gamutND"), ("guides","hausman"), ("options", "resize fonts") ]
| "HowardSnyderSL" `elem` cls = playTemplate [("system", "howardSnyderSL"), ("guides","howardSnyder"), ("options","fonts resize")]
| "HowardSnyderPL" `elem` cls = playTemplate [("system", "howardSnyderPL"), ("guides","howardSnyder"), ("options","fonts resize")]
| "ForallxSL" `elem` cls = playTemplate [("system", "magnusSL"), ("options","render")]
| "ForallxSLPlus" `elem` cls = playTemplate [("system", "magnusSLPlus"), ("options","render")]
| "ForallxQL" `elem` cls = playTemplate [("system", "magnusQL"), ("options","render")]
| "IchikawaJenkinsSL"`elem` cls = playTemplate [("system", "ichikawaJenkinsSL"), ("options","render")]
| "IchikawaJenkinsQL"`elem` cls = playTemplate [("system", "ichikawaJenkinsQL"), ("options","render")]
| "TomassiPL" `elem` cls = playTemplate [("system", "tomassiPL"), ("options","resize render hideNumbering")]
| "GoldfarbND" `elem` cls = playTemplate [("system", "goldfarbND"),("options","resize")]
| "GoldfarbAltND" `elem` cls = playTemplate [("system", "goldfarbAltND"),("options","resize")]
| "GoldfarbNDPlus" `elem` cls = playTemplate [("system", "goldfarbNDPlus"),("options","resize")]
| "GoldfarbAltNDPlus"`elem` cls = playTemplate [("system", "goldfarbAltNDPlus"),("options","resize")]
| "ZachTFL" `elem` cls = playTemplate [("system", "thomasBolducAndZachTFL"), ("options","render")]
| "ZachTFL2019" `elem` cls = playTemplate [("system", "thomasBolducAndZachTFL2019"), ("options","render")]
| "ZachFOL" `elem` cls = playTemplate [("system", "thomasBolducAndZachFOL"), ("options","render")]
| "ZachFOL2019" `elem` cls = playTemplate [("system", "thomasBolducAndZachFOL2019"), ("options","render")]
| "ZachFOLPlus2019" `elem` cls = playTemplate [("system", "thomasBolducAndZachFOLPlus2019"), ("options","render")]
| "EbelsDugganTFL" `elem` cls = playTemplate [("system", "ebelsDugganTFL"), ("guides", "fitch"), ("options", "fonts resize")]
| "EbelsDugganFOL" `elem` cls = playTemplate [("system", "ebelsDugganFOL"), ("guides", "fitch"), ("options", "fonts resize")]
| "HardegreeSL" `elem` cls = playTemplate [("system", "hardegreeSL"), ("options", "render")]
| "HardegreePL" `elem` cls = playTemplate [("system", "hardegreePL"), ("options", "render")]
| "HardegreeWTL" `elem` cls = playTemplate [("system", "hardegreeWTL"), ("guides", "montague"), ("options", "render fonts")]
| "HardegreeL" `elem` cls = playTemplate [("system", "hardegreeL"), ("guides", "montague"), ("options", "fonts")]
| "HardegreeK" `elem` cls = playTemplate [("system", "hardegreeK"), ("guides", "montague"), ("options", "fonts")]
| "HardegreeT" `elem` cls = playTemplate [("system", "hardegreeT"), ("guides", "montague"), ("options", "fonts")]
| "HardegreeB" `elem` cls = playTemplate [("system", "hardegreeB"), ("guides", "montague"), ("options", "fonts")]
| "HardegreeD" `elem` cls = playTemplate [("system", "hardegreeD"), ("guides", "montague"), ("options", "fonts")]
| "Hardegree4" `elem` cls = playTemplate [("system", "hardegree4"), ("guides", "montague"), ("options", "fonts")]
| "Hardegree5" `elem` cls = playTemplate [("system", "hardegree5"), ("guides", "montague"), ("options", "fonts")]
| "HardegreeMPL" `elem` cls = playTemplate [("system", "hardegreeMPL"), ("guides", "montague"), ("options", "fonts")]
| otherwise = playTemplate []
where fixed = [("type","proofchecker")]
playTemplate opts = template (unions [fromList extra, fromList opts, fromList fixed]) "Playground" (unlines' $ formatChunk content)
template :: Map String String -> String -> String -> Block
template opts head content = Div ("",["exercise"],[])
[ Plain
[Span ("",[],[])
[Str head]
]
, RawBlock "html"
--Need rawblock here to get the linebreaks right.
$ "<div" ++ optString ++ ">"
++ content
++ "</div>"
]
where optString = concatMap (\(x,y) -> (" data-carnap-" ++ x ++ "=\"" ++ y ++ "\"")) (toList opts)
| opentower/carnap | Carnap-Server/Filter/ProofCheckers.hs | gpl-3.0 | 12,518 | 0 | 14 | 2,261 | 4,373 | 2,539 | 1,834 | 131 | 1 |
{-
Copyright (C) 2015 Leon Medvinsky
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 3
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-}
{-|
Module : Neet.Examples.XOR
Description : Testing the algorithm on XOR
Copyright : (c) Leon Medvinsky, 2015
License : GPL-3
Maintainer : lmedvinsky@hotmail.com
Stability : experimental
Portability : ghc
-}
module Neet.Examples.XOR (xorFit, andFit, orFit, xorExperiment) where
import Neet
import Neet.Species
import Data.Monoid
import qualified Data.Map.Strict as M
import System.Random
import Data.List (intercalate)
boolQuestions :: [[Double]]
boolQuestions = [ [0, 0]
, [0, 1]
, [1, 0]
, [1, 1]
]
xorAnswers :: [Bool]
xorAnswers = [False, True, True, False]
sampleFit :: [[Double]] -> [Bool] -> GenScorer [Double]
sampleFit questions answers = GS intermed ff criteria
where intermed g = map try questions
where try samp = head $ pushThrough net samp
net = mkPhenotype g
ff ds = (fromIntegral (length answers) - sumDiffs)**2
where sumDiffs = sum $ zipWith (\x y -> abs (x - y)) ds binarized
binarized = map (\b -> if b then 1 else 0) answers
bounds = map (\b -> if b then (>0.5) else (<0.5)) answers
criteria ds = and $ zipWith id bounds ds
xorFit :: GenScorer [Double]
xorFit = sampleFit boolQuestions xorAnswers
andAnswers :: [Bool]
andAnswers = [False, False, False, True]
andFit :: GenScorer [Double]
andFit = sampleFit boolQuestions andAnswers
orAnswers :: [Bool]
orAnswers = [False, True, True, True]
orFit :: GenScorer [Double]
orFit = sampleFit boolQuestions orAnswers
-- | Automated XOR experiment
xorExperiment :: IO ()
xorExperiment = do
putStrLn $ "XOR Input list is: " ++ show boolQuestions
putStrLn "Press Enter to start learning"
_ <- getLine
putStrLn "Running XOR experiment with 150 population and default parameters"
seed <- randomIO
let pp = Just (PhaseParams 10 10)
pop = newPop seed (PS 150 2 1 params Nothing pp)
params = defParams { specParams = sp, mutParams = mp, mutParamsS = mpS }
mp = defMutParams { delConnChance = 0.3, delNodeChance = 0.03 }
mpS = defMutParamsS { addConnRate = 0.05, delConnChance = 0.05 }
sp = Target dp (SpeciesTarget (14,17) 0.1)
dp = defDistParams { delta_t = 5 }
(pop', sol) <- xorLoop pop
printInfo pop'
putStrLn $ "Solution found in generation " ++ show (popGen pop')
let score = gScorer xorFit sol
putStrLn $ "\nOutputs to XOR inputs are: " ++ show score
putStrLn $ "Fitness (Out of 16): " ++ show (fitnessFunction xorFit score)
putStrLn $ "Final distance threshold: " ++ show (distParams . specParams $ popParams pop')
putStrLn "\nSpecify a path to write the network dot file"
putStrLn "Otherwise press Enter to view network"
dotPath <- getLine
if dotPath == ""
then renderGenome sol
else writeGenomeDot dotPath sol
mkSpecInfo :: Population -> String
mkSpecInfo pop = intercalate ", " infos
where infos = map (\((SpecId k), sp) -> "S" ++ show k ++ " P" ++ show (specSize sp)) ass
ass = M.toList $ popSpecs pop
xorLoop :: Population -> IO (Population, Genome)
xorLoop pop = do
printInfo pop
let (First mg, pop') = trainOnce (winTrain xorFit) pop
case mg of
Nothing -> xorLoop pop'
Just g -> return (pop',g)
printInfo :: Population -> IO ()
printInfo pop = do
putStrLn $ "Generation " ++ show (popGen pop)
putStrLn $ "Species: " ++ mkSpecInfo pop
putStrLn $ "High Score: " ++ show (popBScore pop)
putStrLn ""
| raymoo/NEET | src/Neet/Examples/XOR.hs | gpl-3.0 | 4,184 | 0 | 15 | 949 | 1,116 | 588 | 528 | 78 | 3 |
-- This program is free software: you can redistribute it and/or modify it under
-- the terms of the GNU General Public License as published by the Free Software
-- Foundation, either version 3 of the License, or (at your option) any later
-- version.
--
-- This program is distributed in the hope that it will be useful, but WITHOUT
-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-- FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
-- details.
--
-- You should have received a copy of the GNU General Public License along with
-- this program. If not, see <http://www.gnu.org/licenses/>.
-- |
-- Module : Text.Comarkdown.Combinators
-- Description : Fancy combinators for comarkdown
-- Copyright : Copyright 2015 Peter Harpending
-- License : GPL-3
-- Maintainer : peter@harpending.org
-- Stability : experimental
-- Portability : portable
module Text.Comarkdown.Combinators
( (!)
, module Text.Comarkdown.Combinators
, module Text.Comarkdown.Combinators.Primitives
) where
import Text.Comarkdown.Combinators.Primitives
import qualified Text.Comarkdown.Stdlib as L
import Text.Comarkdown.Parser
import Text.Comarkdown.Types
import Control.Exceptional
import Control.Lens hiding (parts)
import Control.Monad.State
import Data.HashMap.Lazy ((!))
import Data.Monoid ((<>))
import Text.Parsec
import Text.Pandoc
-- ** Missing operators from other modules
-- |Alias for 'mappend'
infixl 5 <+>
(<+>) :: Monoid m => m -> m -> m
(<+>) = mappend
-- * Comarkdown combinators!
-- |Parse a String into the current document.
--
-- The source name is required for error messages
parse :: SourceName -> String -> DocumentM ()
parse sn bs =
do doc <- get
exceptionalDocument <- liftIO $ parse' doc sn bs
mDocument <- runExceptional exceptionalDocument
put mDocument
-- |Parse a String, given an existing document (with definitions and stuff),
-- the name of the source, and a Bytestring to parse.
parse' :: Document -> SourceName -> String -> IO (Exceptional Document)
parse' doc sn bs =
runParserT documentParser doc sn bs >>=
return .
\case
Left parseError -> fail (show parseError)
Right parts' -> return (over parts (<> parts') doc)
-- |Parse a file into the current document
parseFile :: (MonadState Document m, MonadIO m) => FilePath -> m ()
parseFile fp =
do doc <- get
excNewDoc <- liftIO (parseFile' doc fp)
mNewDoc <- runExceptional excNewDoc
put mNewDoc
-- |Runs 'parse\'' on the contents of a file, using the 'FilePath' as the
-- 'SourceName'
parseFile' :: Document -> FilePath -> IO (Exceptional Document)
parseFile' doc fp =
do contents <- readFile fp
parse' doc fp contents
-- |Run the document including the 'stdlib'
withStdlib :: DocumentM x -> DocumentM x
withStdlib x = L.stdlib >> x
-- |Wrapper around 'runDocument' and 'stdlib'
runWithStdlib :: DocumentM x -> IO Pandoc
runWithStdlib = runDocument . withStdlib
-- |Parse a comarkdown file, then send the resulting file to Pandoc, write all
-- of this as Markdown
comdToMd :: FilePath -> IO String
comdToMd fp =
do pandoc <- runWithStdlib $ parseFile fp
return (writeMarkdown def pandoc)
-- |Parse a comarkdown file, then send the resulting file to Pandoc, write all
-- of this as plain-text
comdToPlain :: FilePath -> IO String
comdToPlain fp =
do pandoc <- runWithStdlib $ parseFile fp
return (writePlain def pandoc)
-- |Run a Document, return the resulting Pandoc
runDocument :: DocumentM x -> IO Pandoc
runDocument d =
do (pd,_) <- runStateT compileD nullDocument
return pd
where compileD = d >> compile
-- -- |Get a list of commands in the current document
-- commandNames :: DocumentM (Vector String)
-- commandNames =
-- do cmds <- fmap definedCommands get
-- return (foldMap (\accum cmd ->
-- mappend accum
-- (V.cons (cmdPrimary cmd)
-- (cmdAliases cmd)))
-- cmds)
-- |Insert an 'Ignore' into the 'Document'
ignore :: String -> DocumentM ()
ignore = insertPart . Ignore
-- |Insert a 'Comment' into the 'Document'
comment :: String -> DocumentM ()
comment = insertPart . Comment
-- |Insert a 'DocumentPart' into the document
insertPart :: DocumentPart -> DocumentM ()
insertPart x = parts <>= [x]
| pharpend/comarkdown | lib/Text/Comarkdown/Combinators.hs | gpl-3.0 | 4,449 | 0 | 12 | 989 | 782 | 426 | 356 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.ServiceManagement.Services.Consumers.TestIAMPermissions
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns permissions that a caller has on the specified resource. If the
-- resource does not exist, this will return an empty set of permissions,
-- not a \`NOT_FOUND\` error. Note: This operation is designed to be used
-- for building permission-aware UIs and command-line tools, not for
-- authorization checking. This operation may \"fail open\" without
-- warning.
--
-- /See:/ <https://cloud.google.com/service-management/ Service Management API Reference> for @servicemanagement.services.consumers.testIamPermissions@.
module Network.Google.Resource.ServiceManagement.Services.Consumers.TestIAMPermissions
(
-- * REST Resource
ServicesConsumersTestIAMPermissionsResource
-- * Creating a Request
, servicesConsumersTestIAMPermissions
, ServicesConsumersTestIAMPermissions
-- * Request Lenses
, sctipXgafv
, sctipUploadProtocol
, sctipAccessToken
, sctipUploadType
, sctipPayload
, sctipResource
, sctipCallback
) where
import Network.Google.Prelude
import Network.Google.ServiceManagement.Types
-- | A resource alias for @servicemanagement.services.consumers.testIamPermissions@ method which the
-- 'ServicesConsumersTestIAMPermissions' request conforms to.
type ServicesConsumersTestIAMPermissionsResource =
"v1" :>
CaptureMode "resource" "testIamPermissions" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] TestIAMPermissionsRequest :>
Post '[JSON] TestIAMPermissionsResponse
-- | Returns permissions that a caller has on the specified resource. If the
-- resource does not exist, this will return an empty set of permissions,
-- not a \`NOT_FOUND\` error. Note: This operation is designed to be used
-- for building permission-aware UIs and command-line tools, not for
-- authorization checking. This operation may \"fail open\" without
-- warning.
--
-- /See:/ 'servicesConsumersTestIAMPermissions' smart constructor.
data ServicesConsumersTestIAMPermissions =
ServicesConsumersTestIAMPermissions'
{ _sctipXgafv :: !(Maybe Xgafv)
, _sctipUploadProtocol :: !(Maybe Text)
, _sctipAccessToken :: !(Maybe Text)
, _sctipUploadType :: !(Maybe Text)
, _sctipPayload :: !TestIAMPermissionsRequest
, _sctipResource :: !Text
, _sctipCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ServicesConsumersTestIAMPermissions' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sctipXgafv'
--
-- * 'sctipUploadProtocol'
--
-- * 'sctipAccessToken'
--
-- * 'sctipUploadType'
--
-- * 'sctipPayload'
--
-- * 'sctipResource'
--
-- * 'sctipCallback'
servicesConsumersTestIAMPermissions
:: TestIAMPermissionsRequest -- ^ 'sctipPayload'
-> Text -- ^ 'sctipResource'
-> ServicesConsumersTestIAMPermissions
servicesConsumersTestIAMPermissions pSctipPayload_ pSctipResource_ =
ServicesConsumersTestIAMPermissions'
{ _sctipXgafv = Nothing
, _sctipUploadProtocol = Nothing
, _sctipAccessToken = Nothing
, _sctipUploadType = Nothing
, _sctipPayload = pSctipPayload_
, _sctipResource = pSctipResource_
, _sctipCallback = Nothing
}
-- | V1 error format.
sctipXgafv :: Lens' ServicesConsumersTestIAMPermissions (Maybe Xgafv)
sctipXgafv
= lens _sctipXgafv (\ s a -> s{_sctipXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
sctipUploadProtocol :: Lens' ServicesConsumersTestIAMPermissions (Maybe Text)
sctipUploadProtocol
= lens _sctipUploadProtocol
(\ s a -> s{_sctipUploadProtocol = a})
-- | OAuth access token.
sctipAccessToken :: Lens' ServicesConsumersTestIAMPermissions (Maybe Text)
sctipAccessToken
= lens _sctipAccessToken
(\ s a -> s{_sctipAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
sctipUploadType :: Lens' ServicesConsumersTestIAMPermissions (Maybe Text)
sctipUploadType
= lens _sctipUploadType
(\ s a -> s{_sctipUploadType = a})
-- | Multipart request metadata.
sctipPayload :: Lens' ServicesConsumersTestIAMPermissions TestIAMPermissionsRequest
sctipPayload
= lens _sctipPayload (\ s a -> s{_sctipPayload = a})
-- | REQUIRED: The resource for which the policy detail is being requested.
-- See the operation documentation for the appropriate value for this
-- field.
sctipResource :: Lens' ServicesConsumersTestIAMPermissions Text
sctipResource
= lens _sctipResource
(\ s a -> s{_sctipResource = a})
-- | JSONP
sctipCallback :: Lens' ServicesConsumersTestIAMPermissions (Maybe Text)
sctipCallback
= lens _sctipCallback
(\ s a -> s{_sctipCallback = a})
instance GoogleRequest
ServicesConsumersTestIAMPermissions
where
type Rs ServicesConsumersTestIAMPermissions =
TestIAMPermissionsResponse
type Scopes ServicesConsumersTestIAMPermissions =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only",
"https://www.googleapis.com/auth/service.management",
"https://www.googleapis.com/auth/service.management.readonly"]
requestClient
ServicesConsumersTestIAMPermissions'{..}
= go _sctipResource _sctipXgafv _sctipUploadProtocol
_sctipAccessToken
_sctipUploadType
_sctipCallback
(Just AltJSON)
_sctipPayload
serviceManagementService
where go
= buildClient
(Proxy ::
Proxy ServicesConsumersTestIAMPermissionsResource)
mempty
| brendanhay/gogol | gogol-servicemanagement/gen/Network/Google/Resource/ServiceManagement/Services/Consumers/TestIAMPermissions.hs | mpl-2.0 | 6,791 | 0 | 16 | 1,404 | 798 | 471 | 327 | 123 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.S3.Types.Product
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.AWS.S3.Types.Product where
import Network.AWS.Prelude
import Network.AWS.S3.Internal
import Network.AWS.S3.Types.Sum
-- | /See:/ 'accessControlPolicy' smart constructor.
data AccessControlPolicy = AccessControlPolicy'
{ _acpGrants :: !(Maybe [Grant])
, _acpOwner :: !(Maybe Owner)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'AccessControlPolicy' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'acpGrants'
--
-- * 'acpOwner'
accessControlPolicy
:: AccessControlPolicy
accessControlPolicy =
AccessControlPolicy'
{ _acpGrants = Nothing
, _acpOwner = Nothing
}
-- | A list of grants.
acpGrants :: Lens' AccessControlPolicy [Grant]
acpGrants = lens _acpGrants (\ s a -> s{_acpGrants = a}) . _Default . _Coerce;
-- | Undocumented member.
acpOwner :: Lens' AccessControlPolicy (Maybe Owner)
acpOwner = lens _acpOwner (\ s a -> s{_acpOwner = a});
instance ToXML AccessControlPolicy where
toXML AccessControlPolicy'{..}
= mconcat
["AccessControlList" @=
toXML (toXMLList "Grant" <$> _acpGrants),
"Owner" @= _acpOwner]
-- | /See:/ 'bucket' smart constructor.
data Bucket = Bucket'
{ _bCreationDate :: !RFC822
, _bName :: !BucketName
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Bucket' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bCreationDate'
--
-- * 'bName'
bucket
:: UTCTime -- ^ 'bCreationDate'
-> BucketName -- ^ 'bName'
-> Bucket
bucket pCreationDate_ pName_ =
Bucket'
{ _bCreationDate = _Time # pCreationDate_
, _bName = pName_
}
-- | Date the bucket was created.
bCreationDate :: Lens' Bucket UTCTime
bCreationDate = lens _bCreationDate (\ s a -> s{_bCreationDate = a}) . _Time;
-- | The name of the bucket.
bName :: Lens' Bucket BucketName
bName = lens _bName (\ s a -> s{_bName = a});
instance FromXML Bucket where
parseXML x
= Bucket' <$> (x .@ "CreationDate") <*> (x .@ "Name")
-- | /See:/ 'bucketLoggingStatus' smart constructor.
newtype BucketLoggingStatus = BucketLoggingStatus'
{ _blsLoggingEnabled :: Maybe LoggingEnabled
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'BucketLoggingStatus' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'blsLoggingEnabled'
bucketLoggingStatus
:: BucketLoggingStatus
bucketLoggingStatus =
BucketLoggingStatus'
{ _blsLoggingEnabled = Nothing
}
-- | Undocumented member.
blsLoggingEnabled :: Lens' BucketLoggingStatus (Maybe LoggingEnabled)
blsLoggingEnabled = lens _blsLoggingEnabled (\ s a -> s{_blsLoggingEnabled = a});
instance ToXML BucketLoggingStatus where
toXML BucketLoggingStatus'{..}
= mconcat ["LoggingEnabled" @= _blsLoggingEnabled]
-- | /See:/ 'corsConfiguration' smart constructor.
newtype CORSConfiguration = CORSConfiguration'
{ _ccCORSRules :: Maybe [CORSRule]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CORSConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ccCORSRules'
corsConfiguration
:: CORSConfiguration
corsConfiguration =
CORSConfiguration'
{ _ccCORSRules = Nothing
}
-- | Undocumented member.
ccCORSRules :: Lens' CORSConfiguration [CORSRule]
ccCORSRules = lens _ccCORSRules (\ s a -> s{_ccCORSRules = a}) . _Default . _Coerce;
instance ToXML CORSConfiguration where
toXML CORSConfiguration'{..}
= mconcat
[toXML (toXMLList "CORSRule" <$> _ccCORSRules)]
-- | /See:/ 'corsRule' smart constructor.
data CORSRule = CORSRule'
{ _crAllowedMethods :: !(Maybe [Text])
, _crMaxAgeSeconds :: !(Maybe Int)
, _crAllowedHeaders :: !(Maybe [Text])
, _crAllowedOrigins :: !(Maybe [Text])
, _crExposeHeaders :: !(Maybe [Text])
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CORSRule' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'crAllowedMethods'
--
-- * 'crMaxAgeSeconds'
--
-- * 'crAllowedHeaders'
--
-- * 'crAllowedOrigins'
--
-- * 'crExposeHeaders'
corsRule
:: CORSRule
corsRule =
CORSRule'
{ _crAllowedMethods = Nothing
, _crMaxAgeSeconds = Nothing
, _crAllowedHeaders = Nothing
, _crAllowedOrigins = Nothing
, _crExposeHeaders = Nothing
}
-- | Identifies HTTP methods that the domain\/origin specified in the rule is
-- allowed to execute.
crAllowedMethods :: Lens' CORSRule [Text]
crAllowedMethods = lens _crAllowedMethods (\ s a -> s{_crAllowedMethods = a}) . _Default . _Coerce;
-- | The time in seconds that your browser is to cache the preflight response
-- for the specified resource.
crMaxAgeSeconds :: Lens' CORSRule (Maybe Int)
crMaxAgeSeconds = lens _crMaxAgeSeconds (\ s a -> s{_crMaxAgeSeconds = a});
-- | Specifies which headers are allowed in a pre-flight OPTIONS request.
crAllowedHeaders :: Lens' CORSRule [Text]
crAllowedHeaders = lens _crAllowedHeaders (\ s a -> s{_crAllowedHeaders = a}) . _Default . _Coerce;
-- | One or more origins you want customers to be able to access the bucket
-- from.
crAllowedOrigins :: Lens' CORSRule [Text]
crAllowedOrigins = lens _crAllowedOrigins (\ s a -> s{_crAllowedOrigins = a}) . _Default . _Coerce;
-- | One or more headers in the response that you want customers to be able
-- to access from their applications (for example, from a JavaScript
-- XMLHttpRequest object).
crExposeHeaders :: Lens' CORSRule [Text]
crExposeHeaders = lens _crExposeHeaders (\ s a -> s{_crExposeHeaders = a}) . _Default . _Coerce;
instance FromXML CORSRule where
parseXML x
= CORSRule' <$>
(may (parseXMLList "AllowedMethod") x) <*>
(x .@? "MaxAgeSeconds")
<*> (may (parseXMLList "AllowedHeader") x)
<*> (may (parseXMLList "AllowedOrigin") x)
<*> (may (parseXMLList "ExposeHeader") x)
instance ToXML CORSRule where
toXML CORSRule'{..}
= mconcat
[toXML
(toXMLList "AllowedMethod" <$> _crAllowedMethods),
"MaxAgeSeconds" @= _crMaxAgeSeconds,
toXML
(toXMLList "AllowedHeader" <$> _crAllowedHeaders),
toXML
(toXMLList "AllowedOrigin" <$> _crAllowedOrigins),
toXML
(toXMLList "ExposeHeader" <$> _crExposeHeaders)]
-- | /See:/ 'commonPrefix' smart constructor.
newtype CommonPrefix = CommonPrefix'
{ _cpPrefix :: Maybe Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CommonPrefix' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cpPrefix'
commonPrefix
:: CommonPrefix
commonPrefix =
CommonPrefix'
{ _cpPrefix = Nothing
}
-- | Undocumented member.
cpPrefix :: Lens' CommonPrefix (Maybe Text)
cpPrefix = lens _cpPrefix (\ s a -> s{_cpPrefix = a});
instance FromXML CommonPrefix where
parseXML x = CommonPrefix' <$> (x .@? "Prefix")
-- | /See:/ 'completedMultipartUpload' smart constructor.
newtype CompletedMultipartUpload = CompletedMultipartUpload'
{ _cmuParts :: Maybe (List1 CompletedPart)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CompletedMultipartUpload' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cmuParts'
completedMultipartUpload
:: CompletedMultipartUpload
completedMultipartUpload =
CompletedMultipartUpload'
{ _cmuParts = Nothing
}
-- | Undocumented member.
cmuParts :: Lens' CompletedMultipartUpload (Maybe (NonEmpty CompletedPart))
cmuParts = lens _cmuParts (\ s a -> s{_cmuParts = a}) . mapping _List1;
instance ToXML CompletedMultipartUpload where
toXML CompletedMultipartUpload'{..}
= mconcat [toXML (toXMLList "Part" <$> _cmuParts)]
-- | /See:/ 'completedPart' smart constructor.
data CompletedPart = CompletedPart'
{ _cpPartNumber :: !Int
, _cpETag :: !ETag
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CompletedPart' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cpPartNumber'
--
-- * 'cpETag'
completedPart
:: Int -- ^ 'cpPartNumber'
-> ETag -- ^ 'cpETag'
-> CompletedPart
completedPart pPartNumber_ pETag_ =
CompletedPart'
{ _cpPartNumber = pPartNumber_
, _cpETag = pETag_
}
-- | Part number that identifies the part. This is a positive integer between
-- 1 and 10,000.
cpPartNumber :: Lens' CompletedPart Int
cpPartNumber = lens _cpPartNumber (\ s a -> s{_cpPartNumber = a});
-- | Entity tag returned when the part was uploaded.
cpETag :: Lens' CompletedPart ETag
cpETag = lens _cpETag (\ s a -> s{_cpETag = a});
instance ToXML CompletedPart where
toXML CompletedPart'{..}
= mconcat
["PartNumber" @= _cpPartNumber, "ETag" @= _cpETag]
-- | /See:/ 'condition' smart constructor.
data Condition = Condition'
{ _cKeyPrefixEquals :: !(Maybe Text)
, _cHTTPErrorCodeReturnedEquals :: !(Maybe Text)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Condition' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cKeyPrefixEquals'
--
-- * 'cHTTPErrorCodeReturnedEquals'
condition
:: Condition
condition =
Condition'
{ _cKeyPrefixEquals = Nothing
, _cHTTPErrorCodeReturnedEquals = Nothing
}
-- | The object key name prefix when the redirect is applied. For example, to
-- redirect requests for ExamplePage.html, the key prefix will be
-- ExamplePage.html. To redirect request for all pages with the prefix
-- docs\/, the key prefix will be \/docs, which identifies all objects in
-- the docs\/ folder. Required when the parent element Condition is
-- specified and sibling HttpErrorCodeReturnedEquals is not specified. If
-- both conditions are specified, both must be true for the redirect to be
-- applied.
cKeyPrefixEquals :: Lens' Condition (Maybe Text)
cKeyPrefixEquals = lens _cKeyPrefixEquals (\ s a -> s{_cKeyPrefixEquals = a});
-- | The HTTP error code when the redirect is applied. In the event of an
-- error, if the error code equals this value, then the specified redirect
-- is applied. Required when parent element Condition is specified and
-- sibling KeyPrefixEquals is not specified. If both are specified, then
-- both must be true for the redirect to be applied.
cHTTPErrorCodeReturnedEquals :: Lens' Condition (Maybe Text)
cHTTPErrorCodeReturnedEquals = lens _cHTTPErrorCodeReturnedEquals (\ s a -> s{_cHTTPErrorCodeReturnedEquals = a});
instance FromXML Condition where
parseXML x
= Condition' <$>
(x .@? "KeyPrefixEquals") <*>
(x .@? "HttpErrorCodeReturnedEquals")
instance ToXML Condition where
toXML Condition'{..}
= mconcat
["KeyPrefixEquals" @= _cKeyPrefixEquals,
"HttpErrorCodeReturnedEquals" @=
_cHTTPErrorCodeReturnedEquals]
-- | /See:/ 'copyObjectResult' smart constructor.
data CopyObjectResult = CopyObjectResult'
{ _corETag :: !(Maybe ETag)
, _corLastModified :: !(Maybe RFC822)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CopyObjectResult' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'corETag'
--
-- * 'corLastModified'
copyObjectResult
:: CopyObjectResult
copyObjectResult =
CopyObjectResult'
{ _corETag = Nothing
, _corLastModified = Nothing
}
-- | Undocumented member.
corETag :: Lens' CopyObjectResult (Maybe ETag)
corETag = lens _corETag (\ s a -> s{_corETag = a});
-- | Undocumented member.
corLastModified :: Lens' CopyObjectResult (Maybe UTCTime)
corLastModified = lens _corLastModified (\ s a -> s{_corLastModified = a}) . mapping _Time;
instance FromXML CopyObjectResult where
parseXML x
= CopyObjectResult' <$>
(x .@? "ETag") <*> (x .@? "LastModified")
-- | /See:/ 'copyPartResult' smart constructor.
data CopyPartResult = CopyPartResult'
{ _cprETag :: !(Maybe ETag)
, _cprLastModified :: !(Maybe RFC822)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CopyPartResult' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cprETag'
--
-- * 'cprLastModified'
copyPartResult
:: CopyPartResult
copyPartResult =
CopyPartResult'
{ _cprETag = Nothing
, _cprLastModified = Nothing
}
-- | Entity tag of the object.
cprETag :: Lens' CopyPartResult (Maybe ETag)
cprETag = lens _cprETag (\ s a -> s{_cprETag = a});
-- | Date and time at which the object was uploaded.
cprLastModified :: Lens' CopyPartResult (Maybe UTCTime)
cprLastModified = lens _cprLastModified (\ s a -> s{_cprLastModified = a}) . mapping _Time;
instance FromXML CopyPartResult where
parseXML x
= CopyPartResult' <$>
(x .@? "ETag") <*> (x .@? "LastModified")
-- | /See:/ 'createBucketConfiguration' smart constructor.
newtype CreateBucketConfiguration = CreateBucketConfiguration'
{ _cbcLocationConstraint :: Maybe Region
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CreateBucketConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cbcLocationConstraint'
createBucketConfiguration
:: CreateBucketConfiguration
createBucketConfiguration =
CreateBucketConfiguration'
{ _cbcLocationConstraint = Nothing
}
-- | Specifies the region where the bucket will be created. If you don\'t
-- specify a region, the bucket will be created in US Standard.
cbcLocationConstraint :: Lens' CreateBucketConfiguration (Maybe Region)
cbcLocationConstraint = lens _cbcLocationConstraint (\ s a -> s{_cbcLocationConstraint = a});
instance ToXML CreateBucketConfiguration where
toXML CreateBucketConfiguration'{..}
= mconcat
["LocationConstraint" @= _cbcLocationConstraint]
-- | /See:/ 'delete'' smart constructor.
data Delete = Delete'
{ _dQuiet :: !(Maybe Bool)
, _dObjects :: ![ObjectIdentifier]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Delete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dQuiet'
--
-- * 'dObjects'
delete'
:: Delete
delete' =
Delete'
{ _dQuiet = Nothing
, _dObjects = mempty
}
-- | Element to enable quiet mode for the request. When you add this element,
-- you must set its value to true.
dQuiet :: Lens' Delete (Maybe Bool)
dQuiet = lens _dQuiet (\ s a -> s{_dQuiet = a});
-- | Undocumented member.
dObjects :: Lens' Delete [ObjectIdentifier]
dObjects = lens _dObjects (\ s a -> s{_dObjects = a}) . _Coerce;
instance ToXML Delete where
toXML Delete'{..}
= mconcat
["Quiet" @= _dQuiet, toXMLList "Object" _dObjects]
-- | /See:/ 'deleteMarkerEntry' smart constructor.
data DeleteMarkerEntry = DeleteMarkerEntry'
{ _dmeVersionId :: !(Maybe ObjectVersionId)
, _dmeIsLatest :: !(Maybe Bool)
, _dmeOwner :: !(Maybe Owner)
, _dmeKey :: !(Maybe ObjectKey)
, _dmeLastModified :: !(Maybe RFC822)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteMarkerEntry' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dmeVersionId'
--
-- * 'dmeIsLatest'
--
-- * 'dmeOwner'
--
-- * 'dmeKey'
--
-- * 'dmeLastModified'
deleteMarkerEntry
:: DeleteMarkerEntry
deleteMarkerEntry =
DeleteMarkerEntry'
{ _dmeVersionId = Nothing
, _dmeIsLatest = Nothing
, _dmeOwner = Nothing
, _dmeKey = Nothing
, _dmeLastModified = Nothing
}
-- | Version ID of an object.
dmeVersionId :: Lens' DeleteMarkerEntry (Maybe ObjectVersionId)
dmeVersionId = lens _dmeVersionId (\ s a -> s{_dmeVersionId = a});
-- | Specifies whether the object is (true) or is not (false) the latest
-- version of an object.
dmeIsLatest :: Lens' DeleteMarkerEntry (Maybe Bool)
dmeIsLatest = lens _dmeIsLatest (\ s a -> s{_dmeIsLatest = a});
-- | Undocumented member.
dmeOwner :: Lens' DeleteMarkerEntry (Maybe Owner)
dmeOwner = lens _dmeOwner (\ s a -> s{_dmeOwner = a});
-- | The object key.
dmeKey :: Lens' DeleteMarkerEntry (Maybe ObjectKey)
dmeKey = lens _dmeKey (\ s a -> s{_dmeKey = a});
-- | Date and time the object was last modified.
dmeLastModified :: Lens' DeleteMarkerEntry (Maybe UTCTime)
dmeLastModified = lens _dmeLastModified (\ s a -> s{_dmeLastModified = a}) . mapping _Time;
instance FromXML DeleteMarkerEntry where
parseXML x
= DeleteMarkerEntry' <$>
(x .@? "VersionId") <*> (x .@? "IsLatest") <*>
(x .@? "Owner")
<*> (x .@? "Key")
<*> (x .@? "LastModified")
-- | /See:/ 'deletedObject' smart constructor.
data DeletedObject = DeletedObject'
{ _dVersionId :: !(Maybe ObjectVersionId)
, _dDeleteMarker :: !(Maybe Bool)
, _dDeleteMarkerVersionId :: !(Maybe Text)
, _dKey :: !(Maybe ObjectKey)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeletedObject' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dVersionId'
--
-- * 'dDeleteMarker'
--
-- * 'dDeleteMarkerVersionId'
--
-- * 'dKey'
deletedObject
:: DeletedObject
deletedObject =
DeletedObject'
{ _dVersionId = Nothing
, _dDeleteMarker = Nothing
, _dDeleteMarkerVersionId = Nothing
, _dKey = Nothing
}
-- | Undocumented member.
dVersionId :: Lens' DeletedObject (Maybe ObjectVersionId)
dVersionId = lens _dVersionId (\ s a -> s{_dVersionId = a});
-- | Undocumented member.
dDeleteMarker :: Lens' DeletedObject (Maybe Bool)
dDeleteMarker = lens _dDeleteMarker (\ s a -> s{_dDeleteMarker = a});
-- | Undocumented member.
dDeleteMarkerVersionId :: Lens' DeletedObject (Maybe Text)
dDeleteMarkerVersionId = lens _dDeleteMarkerVersionId (\ s a -> s{_dDeleteMarkerVersionId = a});
-- | Undocumented member.
dKey :: Lens' DeletedObject (Maybe ObjectKey)
dKey = lens _dKey (\ s a -> s{_dKey = a});
instance FromXML DeletedObject where
parseXML x
= DeletedObject' <$>
(x .@? "VersionId") <*> (x .@? "DeleteMarker") <*>
(x .@? "DeleteMarkerVersionId")
<*> (x .@? "Key")
-- | /See:/ 'destination' smart constructor.
newtype Destination = Destination'
{ _dBucket :: BucketName
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Destination' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dBucket'
destination
:: BucketName -- ^ 'dBucket'
-> Destination
destination pBucket_ =
Destination'
{ _dBucket = pBucket_
}
-- | Amazon resource name (ARN) of the bucket where you want Amazon S3 to
-- store replicas of the object identified by the rule.
dBucket :: Lens' Destination BucketName
dBucket = lens _dBucket (\ s a -> s{_dBucket = a});
instance FromXML Destination where
parseXML x = Destination' <$> (x .@ "Bucket")
instance ToXML Destination where
toXML Destination'{..}
= mconcat ["Bucket" @= _dBucket]
-- | /See:/ 'errorDocument' smart constructor.
newtype ErrorDocument = ErrorDocument'
{ _edKey :: ObjectKey
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ErrorDocument' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'edKey'
errorDocument
:: ObjectKey -- ^ 'edKey'
-> ErrorDocument
errorDocument pKey_ =
ErrorDocument'
{ _edKey = pKey_
}
-- | The object key name to use when a 4XX class error occurs.
edKey :: Lens' ErrorDocument ObjectKey
edKey = lens _edKey (\ s a -> s{_edKey = a});
instance FromXML ErrorDocument where
parseXML x = ErrorDocument' <$> (x .@ "Key")
instance ToXML ErrorDocument where
toXML ErrorDocument'{..} = mconcat ["Key" @= _edKey]
-- | Container for key value pair that defines the criteria for the filter
-- rule.
--
-- /See:/ 'filterRule' smart constructor.
data FilterRule = FilterRule'
{ _frValue :: !(Maybe Text)
, _frName :: !(Maybe FilterRuleName)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'FilterRule' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'frValue'
--
-- * 'frName'
filterRule
:: FilterRule
filterRule =
FilterRule'
{ _frValue = Nothing
, _frName = Nothing
}
-- | Undocumented member.
frValue :: Lens' FilterRule (Maybe Text)
frValue = lens _frValue (\ s a -> s{_frValue = a});
-- | Object key name prefix or suffix identifying one or more objects to
-- which the filtering rule applies. Maximum prefix length can be up to
-- 1,024 characters. Overlapping prefixes and suffixes are not supported.
-- For more information, go to
-- <http://docs.aws.amazon.com/AmazonS3/latest/dev/NotificationHowTo.html Configuring Event Notifications>
-- in the Amazon Simple Storage Service Developer Guide.
frName :: Lens' FilterRule (Maybe FilterRuleName)
frName = lens _frName (\ s a -> s{_frName = a});
instance FromXML FilterRule where
parseXML x
= FilterRule' <$> (x .@? "Value") <*> (x .@? "Name")
instance ToXML FilterRule where
toXML FilterRule'{..}
= mconcat ["Value" @= _frValue, "Name" @= _frName]
-- | /See:/ 'grant' smart constructor.
data Grant = Grant'
{ _gPermission :: !(Maybe Permission)
, _gGrantee :: !(Maybe Grantee)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Grant' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gPermission'
--
-- * 'gGrantee'
grant
:: Grant
grant =
Grant'
{ _gPermission = Nothing
, _gGrantee = Nothing
}
-- | Specifies the permission given to the grantee.
gPermission :: Lens' Grant (Maybe Permission)
gPermission = lens _gPermission (\ s a -> s{_gPermission = a});
-- | Undocumented member.
gGrantee :: Lens' Grant (Maybe Grantee)
gGrantee = lens _gGrantee (\ s a -> s{_gGrantee = a});
instance FromXML Grant where
parseXML x
= Grant' <$>
(x .@? "Permission") <*> (x .@? "Grantee")
instance ToXML Grant where
toXML Grant'{..}
= mconcat
["Permission" @= _gPermission,
"Grantee" @= _gGrantee]
-- | /See:/ 'grantee' smart constructor.
data Grantee = Grantee'
{ _gURI :: !(Maybe Text)
, _gEmailAddress :: !(Maybe Text)
, _gDisplayName :: !(Maybe Text)
, _gId :: !(Maybe Text)
, _gType :: !Type
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Grantee' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gURI'
--
-- * 'gEmailAddress'
--
-- * 'gDisplayName'
--
-- * 'gId'
--
-- * 'gType'
grantee
:: Type -- ^ 'gType'
-> Grantee
grantee pType_ =
Grantee'
{ _gURI = Nothing
, _gEmailAddress = Nothing
, _gDisplayName = Nothing
, _gId = Nothing
, _gType = pType_
}
-- | URI of the grantee group.
gURI :: Lens' Grantee (Maybe Text)
gURI = lens _gURI (\ s a -> s{_gURI = a});
-- | Email address of the grantee.
gEmailAddress :: Lens' Grantee (Maybe Text)
gEmailAddress = lens _gEmailAddress (\ s a -> s{_gEmailAddress = a});
-- | Screen name of the grantee.
gDisplayName :: Lens' Grantee (Maybe Text)
gDisplayName = lens _gDisplayName (\ s a -> s{_gDisplayName = a});
-- | The canonical user ID of the grantee.
gId :: Lens' Grantee (Maybe Text)
gId = lens _gId (\ s a -> s{_gId = a});
-- | Type of grantee
gType :: Lens' Grantee Type
gType = lens _gType (\ s a -> s{_gType = a});
instance FromXML Grantee where
parseXML x
= Grantee' <$>
(x .@? "URI") <*> (x .@? "EmailAddress") <*>
(x .@? "DisplayName")
<*> (x .@? "ID")
<*> (x .@ "xsi:type")
instance ToXML Grantee where
toXML Grantee'{..}
= mconcat
["URI" @= _gURI, "EmailAddress" @= _gEmailAddress,
"DisplayName" @= _gDisplayName, "ID" @= _gId,
"xsi:type" @= _gType]
-- | /See:/ 'indexDocument' smart constructor.
newtype IndexDocument = IndexDocument'
{ _idSuffix :: Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'IndexDocument' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'idSuffix'
indexDocument
:: Text -- ^ 'idSuffix'
-> IndexDocument
indexDocument pSuffix_ =
IndexDocument'
{ _idSuffix = pSuffix_
}
-- | A suffix that is appended to a request that is for a directory on the
-- website endpoint (e.g. if the suffix is index.html and you make a
-- request to samplebucket\/images\/ the data that is returned will be for
-- the object with the key name images\/index.html) The suffix must not be
-- empty and must not include a slash character.
idSuffix :: Lens' IndexDocument Text
idSuffix = lens _idSuffix (\ s a -> s{_idSuffix = a});
instance FromXML IndexDocument where
parseXML x = IndexDocument' <$> (x .@ "Suffix")
instance ToXML IndexDocument where
toXML IndexDocument'{..}
= mconcat ["Suffix" @= _idSuffix]
-- | /See:/ 'initiator' smart constructor.
data Initiator = Initiator'
{ _iDisplayName :: !(Maybe Text)
, _iId :: !(Maybe Text)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Initiator' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'iDisplayName'
--
-- * 'iId'
initiator
:: Initiator
initiator =
Initiator'
{ _iDisplayName = Nothing
, _iId = Nothing
}
-- | Name of the Principal.
iDisplayName :: Lens' Initiator (Maybe Text)
iDisplayName = lens _iDisplayName (\ s a -> s{_iDisplayName = a});
-- | If the principal is an AWS account, it provides the Canonical User ID.
-- If the principal is an IAM User, it provides a user ARN value.
iId :: Lens' Initiator (Maybe Text)
iId = lens _iId (\ s a -> s{_iId = a});
instance FromXML Initiator where
parseXML x
= Initiator' <$>
(x .@? "DisplayName") <*> (x .@? "ID")
-- | Container for specifying the AWS Lambda notification configuration.
--
-- /See:/ 'lambdaFunctionConfiguration' smart constructor.
data LambdaFunctionConfiguration = LambdaFunctionConfiguration'
{ _lfcId :: !(Maybe Text)
, _lfcFilter :: !(Maybe NotificationConfigurationFilter)
, _lfcLambdaFunctionARN :: !Text
, _lfcEvents :: ![Event]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'LambdaFunctionConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lfcId'
--
-- * 'lfcFilter'
--
-- * 'lfcLambdaFunctionARN'
--
-- * 'lfcEvents'
lambdaFunctionConfiguration
:: Text -- ^ 'lfcLambdaFunctionARN'
-> LambdaFunctionConfiguration
lambdaFunctionConfiguration pLambdaFunctionARN_ =
LambdaFunctionConfiguration'
{ _lfcId = Nothing
, _lfcFilter = Nothing
, _lfcLambdaFunctionARN = pLambdaFunctionARN_
, _lfcEvents = mempty
}
-- | Undocumented member.
lfcId :: Lens' LambdaFunctionConfiguration (Maybe Text)
lfcId = lens _lfcId (\ s a -> s{_lfcId = a});
-- | Undocumented member.
lfcFilter :: Lens' LambdaFunctionConfiguration (Maybe NotificationConfigurationFilter)
lfcFilter = lens _lfcFilter (\ s a -> s{_lfcFilter = a});
-- | Lambda cloud function ARN that Amazon S3 can invoke when it detects
-- events of the specified type.
lfcLambdaFunctionARN :: Lens' LambdaFunctionConfiguration Text
lfcLambdaFunctionARN = lens _lfcLambdaFunctionARN (\ s a -> s{_lfcLambdaFunctionARN = a});
-- | Undocumented member.
lfcEvents :: Lens' LambdaFunctionConfiguration [Event]
lfcEvents = lens _lfcEvents (\ s a -> s{_lfcEvents = a}) . _Coerce;
instance FromXML LambdaFunctionConfiguration where
parseXML x
= LambdaFunctionConfiguration' <$>
(x .@? "Id") <*> (x .@? "Filter") <*>
(x .@ "CloudFunction")
<*> (parseXMLList "Event" x)
instance ToXML LambdaFunctionConfiguration where
toXML LambdaFunctionConfiguration'{..}
= mconcat
["Id" @= _lfcId, "Filter" @= _lfcFilter,
"CloudFunction" @= _lfcLambdaFunctionARN,
toXMLList "Event" _lfcEvents]
-- | /See:/ 'lifecycleConfiguration' smart constructor.
newtype LifecycleConfiguration = LifecycleConfiguration'
{ _lcRules :: [Rule]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'LifecycleConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lcRules'
lifecycleConfiguration
:: LifecycleConfiguration
lifecycleConfiguration =
LifecycleConfiguration'
{ _lcRules = mempty
}
-- | Undocumented member.
lcRules :: Lens' LifecycleConfiguration [Rule]
lcRules = lens _lcRules (\ s a -> s{_lcRules = a}) . _Coerce;
instance ToXML LifecycleConfiguration where
toXML LifecycleConfiguration'{..}
= mconcat [toXMLList "Rule" _lcRules]
-- | /See:/ 'lifecycleExpiration' smart constructor.
data LifecycleExpiration = LifecycleExpiration'
{ _leDays :: !(Maybe Int)
, _leDate :: !(Maybe RFC822)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'LifecycleExpiration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'leDays'
--
-- * 'leDate'
lifecycleExpiration
:: LifecycleExpiration
lifecycleExpiration =
LifecycleExpiration'
{ _leDays = Nothing
, _leDate = Nothing
}
-- | Indicates the lifetime, in days, of the objects that are subject to the
-- rule. The value must be a non-zero positive integer.
leDays :: Lens' LifecycleExpiration (Maybe Int)
leDays = lens _leDays (\ s a -> s{_leDays = a});
-- | Indicates at what date the object is to be moved or deleted. Should be
-- in GMT ISO 8601 Format.
leDate :: Lens' LifecycleExpiration (Maybe UTCTime)
leDate = lens _leDate (\ s a -> s{_leDate = a}) . mapping _Time;
instance FromXML LifecycleExpiration where
parseXML x
= LifecycleExpiration' <$>
(x .@? "Days") <*> (x .@? "Date")
instance ToXML LifecycleExpiration where
toXML LifecycleExpiration'{..}
= mconcat ["Days" @= _leDays, "Date" @= _leDate]
-- | /See:/ 'loggingEnabled' smart constructor.
data LoggingEnabled = LoggingEnabled'
{ _leTargetBucket :: !(Maybe Text)
, _leTargetGrants :: !(Maybe [TargetGrant])
, _leTargetPrefix :: !(Maybe Text)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'LoggingEnabled' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'leTargetBucket'
--
-- * 'leTargetGrants'
--
-- * 'leTargetPrefix'
loggingEnabled
:: LoggingEnabled
loggingEnabled =
LoggingEnabled'
{ _leTargetBucket = Nothing
, _leTargetGrants = Nothing
, _leTargetPrefix = Nothing
}
-- | Specifies the bucket where you want Amazon S3 to store server access
-- logs. You can have your logs delivered to any bucket that you own,
-- including the same bucket that is being logged. You can also configure
-- multiple buckets to deliver their logs to the same target bucket. In
-- this case you should choose a different TargetPrefix for each source
-- bucket so that the delivered log files can be distinguished by key.
leTargetBucket :: Lens' LoggingEnabled (Maybe Text)
leTargetBucket = lens _leTargetBucket (\ s a -> s{_leTargetBucket = a});
-- | Undocumented member.
leTargetGrants :: Lens' LoggingEnabled [TargetGrant]
leTargetGrants = lens _leTargetGrants (\ s a -> s{_leTargetGrants = a}) . _Default . _Coerce;
-- | This element lets you specify a prefix for the keys that the log files
-- will be stored under.
leTargetPrefix :: Lens' LoggingEnabled (Maybe Text)
leTargetPrefix = lens _leTargetPrefix (\ s a -> s{_leTargetPrefix = a});
instance FromXML LoggingEnabled where
parseXML x
= LoggingEnabled' <$>
(x .@? "TargetBucket") <*>
(x .@? "TargetGrants" .!@ mempty >>=
may (parseXMLList "Grant"))
<*> (x .@? "TargetPrefix")
instance ToXML LoggingEnabled where
toXML LoggingEnabled'{..}
= mconcat
["TargetBucket" @= _leTargetBucket,
"TargetGrants" @=
toXML (toXMLList "Grant" <$> _leTargetGrants),
"TargetPrefix" @= _leTargetPrefix]
-- | /See:/ 'multipartUpload' smart constructor.
data MultipartUpload = MultipartUpload'
{ _muInitiated :: !(Maybe RFC822)
, _muInitiator :: !(Maybe Initiator)
, _muOwner :: !(Maybe Owner)
, _muKey :: !(Maybe ObjectKey)
, _muStorageClass :: !(Maybe StorageClass)
, _muUploadId :: !(Maybe Text)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'MultipartUpload' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'muInitiated'
--
-- * 'muInitiator'
--
-- * 'muOwner'
--
-- * 'muKey'
--
-- * 'muStorageClass'
--
-- * 'muUploadId'
multipartUpload
:: MultipartUpload
multipartUpload =
MultipartUpload'
{ _muInitiated = Nothing
, _muInitiator = Nothing
, _muOwner = Nothing
, _muKey = Nothing
, _muStorageClass = Nothing
, _muUploadId = Nothing
}
-- | Date and time at which the multipart upload was initiated.
muInitiated :: Lens' MultipartUpload (Maybe UTCTime)
muInitiated = lens _muInitiated (\ s a -> s{_muInitiated = a}) . mapping _Time;
-- | Identifies who initiated the multipart upload.
muInitiator :: Lens' MultipartUpload (Maybe Initiator)
muInitiator = lens _muInitiator (\ s a -> s{_muInitiator = a});
-- | Undocumented member.
muOwner :: Lens' MultipartUpload (Maybe Owner)
muOwner = lens _muOwner (\ s a -> s{_muOwner = a});
-- | Key of the object for which the multipart upload was initiated.
muKey :: Lens' MultipartUpload (Maybe ObjectKey)
muKey = lens _muKey (\ s a -> s{_muKey = a});
-- | The class of storage used to store the object.
muStorageClass :: Lens' MultipartUpload (Maybe StorageClass)
muStorageClass = lens _muStorageClass (\ s a -> s{_muStorageClass = a});
-- | Upload ID that identifies the multipart upload.
muUploadId :: Lens' MultipartUpload (Maybe Text)
muUploadId = lens _muUploadId (\ s a -> s{_muUploadId = a});
instance FromXML MultipartUpload where
parseXML x
= MultipartUpload' <$>
(x .@? "Initiated") <*> (x .@? "Initiator") <*>
(x .@? "Owner")
<*> (x .@? "Key")
<*> (x .@? "StorageClass")
<*> (x .@? "UploadId")
-- | Specifies when noncurrent object versions expire. Upon expiration,
-- Amazon S3 permanently deletes the noncurrent object versions. You set
-- this lifecycle configuration action on a bucket that has versioning
-- enabled (or suspended) to request that Amazon S3 delete noncurrent
-- object versions at a specific period in the object\'s lifetime.
--
-- /See:/ 'noncurrentVersionExpiration' smart constructor.
newtype NoncurrentVersionExpiration = NoncurrentVersionExpiration'
{ _nveNoncurrentDays :: Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'NoncurrentVersionExpiration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'nveNoncurrentDays'
noncurrentVersionExpiration
:: Int -- ^ 'nveNoncurrentDays'
-> NoncurrentVersionExpiration
noncurrentVersionExpiration pNoncurrentDays_ =
NoncurrentVersionExpiration'
{ _nveNoncurrentDays = pNoncurrentDays_
}
-- | Specifies the number of days an object is noncurrent before Amazon S3
-- can perform the associated action. For information about the noncurrent
-- days calculations, see
-- </AmazonS3/latest/dev/s3-access-control.html How Amazon S3 Calculates When an Object Became Noncurrent>
-- in the Amazon Simple Storage Service Developer Guide.
nveNoncurrentDays :: Lens' NoncurrentVersionExpiration Int
nveNoncurrentDays = lens _nveNoncurrentDays (\ s a -> s{_nveNoncurrentDays = a});
instance FromXML NoncurrentVersionExpiration where
parseXML x
= NoncurrentVersionExpiration' <$>
(x .@ "NoncurrentDays")
instance ToXML NoncurrentVersionExpiration where
toXML NoncurrentVersionExpiration'{..}
= mconcat ["NoncurrentDays" @= _nveNoncurrentDays]
-- | Container for the transition rule that describes when noncurrent objects
-- transition to the GLACIER storage class. If your bucket is
-- versioning-enabled (or versioning is suspended), you can set this action
-- to request that Amazon S3 transition noncurrent object versions to the
-- GLACIER storage class at a specific period in the object\'s lifetime.
--
-- /See:/ 'noncurrentVersionTransition' smart constructor.
data NoncurrentVersionTransition = NoncurrentVersionTransition'
{ _nvtNoncurrentDays :: !Int
, _nvtStorageClass :: !TransitionStorageClass
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'NoncurrentVersionTransition' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'nvtNoncurrentDays'
--
-- * 'nvtStorageClass'
noncurrentVersionTransition
:: Int -- ^ 'nvtNoncurrentDays'
-> TransitionStorageClass -- ^ 'nvtStorageClass'
-> NoncurrentVersionTransition
noncurrentVersionTransition pNoncurrentDays_ pStorageClass_ =
NoncurrentVersionTransition'
{ _nvtNoncurrentDays = pNoncurrentDays_
, _nvtStorageClass = pStorageClass_
}
-- | Specifies the number of days an object is noncurrent before Amazon S3
-- can perform the associated action. For information about the noncurrent
-- days calculations, see
-- </AmazonS3/latest/dev/s3-access-control.html How Amazon S3 Calculates When an Object Became Noncurrent>
-- in the Amazon Simple Storage Service Developer Guide.
nvtNoncurrentDays :: Lens' NoncurrentVersionTransition Int
nvtNoncurrentDays = lens _nvtNoncurrentDays (\ s a -> s{_nvtNoncurrentDays = a});
-- | The class of storage used to store the object.
nvtStorageClass :: Lens' NoncurrentVersionTransition TransitionStorageClass
nvtStorageClass = lens _nvtStorageClass (\ s a -> s{_nvtStorageClass = a});
instance FromXML NoncurrentVersionTransition where
parseXML x
= NoncurrentVersionTransition' <$>
(x .@ "NoncurrentDays") <*> (x .@ "StorageClass")
instance ToXML NoncurrentVersionTransition where
toXML NoncurrentVersionTransition'{..}
= mconcat
["NoncurrentDays" @= _nvtNoncurrentDays,
"StorageClass" @= _nvtStorageClass]
-- | Container for specifying the notification configuration of the bucket.
-- If this element is empty, notifications are turned off on the bucket.
--
-- /See:/ 'notificationConfiguration' smart constructor.
data NotificationConfiguration = NotificationConfiguration'
{ _ncQueueConfigurations :: !(Maybe [QueueConfiguration])
, _ncTopicConfigurations :: !(Maybe [TopicConfiguration])
, _ncLambdaFunctionConfigurations :: !(Maybe [LambdaFunctionConfiguration])
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'NotificationConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ncQueueConfigurations'
--
-- * 'ncTopicConfigurations'
--
-- * 'ncLambdaFunctionConfigurations'
notificationConfiguration
:: NotificationConfiguration
notificationConfiguration =
NotificationConfiguration'
{ _ncQueueConfigurations = Nothing
, _ncTopicConfigurations = Nothing
, _ncLambdaFunctionConfigurations = Nothing
}
-- | Undocumented member.
ncQueueConfigurations :: Lens' NotificationConfiguration [QueueConfiguration]
ncQueueConfigurations = lens _ncQueueConfigurations (\ s a -> s{_ncQueueConfigurations = a}) . _Default . _Coerce;
-- | Undocumented member.
ncTopicConfigurations :: Lens' NotificationConfiguration [TopicConfiguration]
ncTopicConfigurations = lens _ncTopicConfigurations (\ s a -> s{_ncTopicConfigurations = a}) . _Default . _Coerce;
-- | Undocumented member.
ncLambdaFunctionConfigurations :: Lens' NotificationConfiguration [LambdaFunctionConfiguration]
ncLambdaFunctionConfigurations = lens _ncLambdaFunctionConfigurations (\ s a -> s{_ncLambdaFunctionConfigurations = a}) . _Default . _Coerce;
instance FromXML NotificationConfiguration where
parseXML x
= NotificationConfiguration' <$>
(may (parseXMLList "QueueConfiguration") x) <*>
(may (parseXMLList "TopicConfiguration") x)
<*>
(may (parseXMLList "CloudFunctionConfiguration") x)
instance ToXML NotificationConfiguration where
toXML NotificationConfiguration'{..}
= mconcat
[toXML
(toXMLList "QueueConfiguration" <$>
_ncQueueConfigurations),
toXML
(toXMLList "TopicConfiguration" <$>
_ncTopicConfigurations),
toXML
(toXMLList "CloudFunctionConfiguration" <$>
_ncLambdaFunctionConfigurations)]
-- | Container for object key name filtering rules. For information about key
-- name filtering, go to
-- <http://docs.aws.amazon.com/AmazonS3/latest/dev/NotificationHowTo.html Configuring Event Notifications>
-- in the Amazon Simple Storage Service Developer Guide.
--
-- /See:/ 'notificationConfigurationFilter' smart constructor.
newtype NotificationConfigurationFilter = NotificationConfigurationFilter'
{ _ncfKey :: Maybe S3KeyFilter
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'NotificationConfigurationFilter' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ncfKey'
notificationConfigurationFilter
:: NotificationConfigurationFilter
notificationConfigurationFilter =
NotificationConfigurationFilter'
{ _ncfKey = Nothing
}
-- | Undocumented member.
ncfKey :: Lens' NotificationConfigurationFilter (Maybe S3KeyFilter)
ncfKey = lens _ncfKey (\ s a -> s{_ncfKey = a});
instance FromXML NotificationConfigurationFilter
where
parseXML x
= NotificationConfigurationFilter' <$>
(x .@? "S3Key")
instance ToXML NotificationConfigurationFilter where
toXML NotificationConfigurationFilter'{..}
= mconcat ["S3Key" @= _ncfKey]
-- | /See:/ 'object'' smart constructor.
data Object = Object'
{ _oOwner :: !(Maybe Owner)
, _oETag :: !ETag
, _oSize :: !Int
, _oKey :: !ObjectKey
, _oStorageClass :: !ObjectStorageClass
, _oLastModified :: !RFC822
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Object' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'oOwner'
--
-- * 'oETag'
--
-- * 'oSize'
--
-- * 'oKey'
--
-- * 'oStorageClass'
--
-- * 'oLastModified'
object'
:: ETag -- ^ 'oETag'
-> Int -- ^ 'oSize'
-> ObjectKey -- ^ 'oKey'
-> ObjectStorageClass -- ^ 'oStorageClass'
-> UTCTime -- ^ 'oLastModified'
-> Object
object' pETag_ pSize_ pKey_ pStorageClass_ pLastModified_ =
Object'
{ _oOwner = Nothing
, _oETag = pETag_
, _oSize = pSize_
, _oKey = pKey_
, _oStorageClass = pStorageClass_
, _oLastModified = _Time # pLastModified_
}
-- | Undocumented member.
oOwner :: Lens' Object (Maybe Owner)
oOwner = lens _oOwner (\ s a -> s{_oOwner = a});
-- | Undocumented member.
oETag :: Lens' Object ETag
oETag = lens _oETag (\ s a -> s{_oETag = a});
-- | Undocumented member.
oSize :: Lens' Object Int
oSize = lens _oSize (\ s a -> s{_oSize = a});
-- | Undocumented member.
oKey :: Lens' Object ObjectKey
oKey = lens _oKey (\ s a -> s{_oKey = a});
-- | The class of storage used to store the object.
oStorageClass :: Lens' Object ObjectStorageClass
oStorageClass = lens _oStorageClass (\ s a -> s{_oStorageClass = a});
-- | Undocumented member.
oLastModified :: Lens' Object UTCTime
oLastModified = lens _oLastModified (\ s a -> s{_oLastModified = a}) . _Time;
instance FromXML Object where
parseXML x
= Object' <$>
(x .@? "Owner") <*> (x .@ "ETag") <*> (x .@ "Size")
<*> (x .@ "Key")
<*> (x .@ "StorageClass")
<*> (x .@ "LastModified")
-- | /See:/ 'objectIdentifier' smart constructor.
data ObjectIdentifier = ObjectIdentifier'
{ _oiVersionId :: !(Maybe ObjectVersionId)
, _oiKey :: !ObjectKey
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ObjectIdentifier' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'oiVersionId'
--
-- * 'oiKey'
objectIdentifier
:: ObjectKey -- ^ 'oiKey'
-> ObjectIdentifier
objectIdentifier pKey_ =
ObjectIdentifier'
{ _oiVersionId = Nothing
, _oiKey = pKey_
}
-- | VersionId for the specific version of the object to delete.
oiVersionId :: Lens' ObjectIdentifier (Maybe ObjectVersionId)
oiVersionId = lens _oiVersionId (\ s a -> s{_oiVersionId = a});
-- | Key name of the object to delete.
oiKey :: Lens' ObjectIdentifier ObjectKey
oiKey = lens _oiKey (\ s a -> s{_oiKey = a});
instance ToXML ObjectIdentifier where
toXML ObjectIdentifier'{..}
= mconcat
["VersionId" @= _oiVersionId, "Key" @= _oiKey]
-- | /See:/ 'objectVersion' smart constructor.
data ObjectVersion = ObjectVersion'
{ _ovETag :: !(Maybe ETag)
, _ovVersionId :: !(Maybe ObjectVersionId)
, _ovSize :: !(Maybe Int)
, _ovIsLatest :: !(Maybe Bool)
, _ovOwner :: !(Maybe Owner)
, _ovKey :: !(Maybe ObjectKey)
, _ovStorageClass :: !(Maybe ObjectVersionStorageClass)
, _ovLastModified :: !(Maybe RFC822)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ObjectVersion' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ovETag'
--
-- * 'ovVersionId'
--
-- * 'ovSize'
--
-- * 'ovIsLatest'
--
-- * 'ovOwner'
--
-- * 'ovKey'
--
-- * 'ovStorageClass'
--
-- * 'ovLastModified'
objectVersion
:: ObjectVersion
objectVersion =
ObjectVersion'
{ _ovETag = Nothing
, _ovVersionId = Nothing
, _ovSize = Nothing
, _ovIsLatest = Nothing
, _ovOwner = Nothing
, _ovKey = Nothing
, _ovStorageClass = Nothing
, _ovLastModified = Nothing
}
-- | Undocumented member.
ovETag :: Lens' ObjectVersion (Maybe ETag)
ovETag = lens _ovETag (\ s a -> s{_ovETag = a});
-- | Version ID of an object.
ovVersionId :: Lens' ObjectVersion (Maybe ObjectVersionId)
ovVersionId = lens _ovVersionId (\ s a -> s{_ovVersionId = a});
-- | Size in bytes of the object.
ovSize :: Lens' ObjectVersion (Maybe Int)
ovSize = lens _ovSize (\ s a -> s{_ovSize = a});
-- | Specifies whether the object is (true) or is not (false) the latest
-- version of an object.
ovIsLatest :: Lens' ObjectVersion (Maybe Bool)
ovIsLatest = lens _ovIsLatest (\ s a -> s{_ovIsLatest = a});
-- | Undocumented member.
ovOwner :: Lens' ObjectVersion (Maybe Owner)
ovOwner = lens _ovOwner (\ s a -> s{_ovOwner = a});
-- | The object key.
ovKey :: Lens' ObjectVersion (Maybe ObjectKey)
ovKey = lens _ovKey (\ s a -> s{_ovKey = a});
-- | The class of storage used to store the object.
ovStorageClass :: Lens' ObjectVersion (Maybe ObjectVersionStorageClass)
ovStorageClass = lens _ovStorageClass (\ s a -> s{_ovStorageClass = a});
-- | Date and time the object was last modified.
ovLastModified :: Lens' ObjectVersion (Maybe UTCTime)
ovLastModified = lens _ovLastModified (\ s a -> s{_ovLastModified = a}) . mapping _Time;
instance FromXML ObjectVersion where
parseXML x
= ObjectVersion' <$>
(x .@? "ETag") <*> (x .@? "VersionId") <*>
(x .@? "Size")
<*> (x .@? "IsLatest")
<*> (x .@? "Owner")
<*> (x .@? "Key")
<*> (x .@? "StorageClass")
<*> (x .@? "LastModified")
-- | /See:/ 'owner' smart constructor.
data Owner = Owner'
{ _oDisplayName :: !(Maybe Text)
, _oId :: !(Maybe Text)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Owner' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'oDisplayName'
--
-- * 'oId'
owner
:: Owner
owner =
Owner'
{ _oDisplayName = Nothing
, _oId = Nothing
}
-- | Undocumented member.
oDisplayName :: Lens' Owner (Maybe Text)
oDisplayName = lens _oDisplayName (\ s a -> s{_oDisplayName = a});
-- | Undocumented member.
oId :: Lens' Owner (Maybe Text)
oId = lens _oId (\ s a -> s{_oId = a});
instance FromXML Owner where
parseXML x
= Owner' <$> (x .@? "DisplayName") <*> (x .@? "ID")
instance ToXML Owner where
toXML Owner'{..}
= mconcat
["DisplayName" @= _oDisplayName, "ID" @= _oId]
-- | /See:/ 'part' smart constructor.
data Part = Part'
{ _pETag :: !(Maybe ETag)
, _pSize :: !(Maybe Int)
, _pPartNumber :: !(Maybe Int)
, _pLastModified :: !(Maybe RFC822)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Part' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pETag'
--
-- * 'pSize'
--
-- * 'pPartNumber'
--
-- * 'pLastModified'
part
:: Part
part =
Part'
{ _pETag = Nothing
, _pSize = Nothing
, _pPartNumber = Nothing
, _pLastModified = Nothing
}
-- | Entity tag returned when the part was uploaded.
pETag :: Lens' Part (Maybe ETag)
pETag = lens _pETag (\ s a -> s{_pETag = a});
-- | Size of the uploaded part data.
pSize :: Lens' Part (Maybe Int)
pSize = lens _pSize (\ s a -> s{_pSize = a});
-- | Part number identifying the part. This is a positive integer between 1
-- and 10,000.
pPartNumber :: Lens' Part (Maybe Int)
pPartNumber = lens _pPartNumber (\ s a -> s{_pPartNumber = a});
-- | Date and time at which the part was uploaded.
pLastModified :: Lens' Part (Maybe UTCTime)
pLastModified = lens _pLastModified (\ s a -> s{_pLastModified = a}) . mapping _Time;
instance FromXML Part where
parseXML x
= Part' <$>
(x .@? "ETag") <*> (x .@? "Size") <*>
(x .@? "PartNumber")
<*> (x .@? "LastModified")
-- | Container for specifying an configuration when you want Amazon S3 to
-- publish events to an Amazon Simple Queue Service (Amazon SQS) queue.
--
-- /See:/ 'queueConfiguration' smart constructor.
data QueueConfiguration = QueueConfiguration'
{ _qcId :: !(Maybe Text)
, _qcFilter :: !(Maybe NotificationConfigurationFilter)
, _qcQueueARN :: !Text
, _qcEvents :: ![Event]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'QueueConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'qcId'
--
-- * 'qcFilter'
--
-- * 'qcQueueARN'
--
-- * 'qcEvents'
queueConfiguration
:: Text -- ^ 'qcQueueARN'
-> QueueConfiguration
queueConfiguration pQueueARN_ =
QueueConfiguration'
{ _qcId = Nothing
, _qcFilter = Nothing
, _qcQueueARN = pQueueARN_
, _qcEvents = mempty
}
-- | Undocumented member.
qcId :: Lens' QueueConfiguration (Maybe Text)
qcId = lens _qcId (\ s a -> s{_qcId = a});
-- | Undocumented member.
qcFilter :: Lens' QueueConfiguration (Maybe NotificationConfigurationFilter)
qcFilter = lens _qcFilter (\ s a -> s{_qcFilter = a});
-- | Amazon SQS queue ARN to which Amazon S3 will publish a message when it
-- detects events of specified type.
qcQueueARN :: Lens' QueueConfiguration Text
qcQueueARN = lens _qcQueueARN (\ s a -> s{_qcQueueARN = a});
-- | Undocumented member.
qcEvents :: Lens' QueueConfiguration [Event]
qcEvents = lens _qcEvents (\ s a -> s{_qcEvents = a}) . _Coerce;
instance FromXML QueueConfiguration where
parseXML x
= QueueConfiguration' <$>
(x .@? "Id") <*> (x .@? "Filter") <*> (x .@ "Queue")
<*> (parseXMLList "Event" x)
instance ToXML QueueConfiguration where
toXML QueueConfiguration'{..}
= mconcat
["Id" @= _qcId, "Filter" @= _qcFilter,
"Queue" @= _qcQueueARN, toXMLList "Event" _qcEvents]
-- | /See:/ 'redirect' smart constructor.
data Redirect = Redirect'
{ _rHostName :: !(Maybe Text)
, _rProtocol :: !(Maybe Protocol)
, _rHTTPRedirectCode :: !(Maybe Text)
, _rReplaceKeyWith :: !(Maybe Text)
, _rReplaceKeyPrefixWith :: !(Maybe Text)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Redirect' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rHostName'
--
-- * 'rProtocol'
--
-- * 'rHTTPRedirectCode'
--
-- * 'rReplaceKeyWith'
--
-- * 'rReplaceKeyPrefixWith'
redirect
:: Redirect
redirect =
Redirect'
{ _rHostName = Nothing
, _rProtocol = Nothing
, _rHTTPRedirectCode = Nothing
, _rReplaceKeyWith = Nothing
, _rReplaceKeyPrefixWith = Nothing
}
-- | The host name to use in the redirect request.
rHostName :: Lens' Redirect (Maybe Text)
rHostName = lens _rHostName (\ s a -> s{_rHostName = a});
-- | Protocol to use (http, https) when redirecting requests. The default is
-- the protocol that is used in the original request.
rProtocol :: Lens' Redirect (Maybe Protocol)
rProtocol = lens _rProtocol (\ s a -> s{_rProtocol = a});
-- | The HTTP redirect code to use on the response. Not required if one of
-- the siblings is present.
rHTTPRedirectCode :: Lens' Redirect (Maybe Text)
rHTTPRedirectCode = lens _rHTTPRedirectCode (\ s a -> s{_rHTTPRedirectCode = a});
-- | The specific object key to use in the redirect request. For example,
-- redirect request to error.html. Not required if one of the sibling is
-- present. Can be present only if ReplaceKeyPrefixWith is not provided.
rReplaceKeyWith :: Lens' Redirect (Maybe Text)
rReplaceKeyWith = lens _rReplaceKeyWith (\ s a -> s{_rReplaceKeyWith = a});
-- | The object key prefix to use in the redirect request. For example, to
-- redirect requests for all pages with prefix docs\/ (objects in the
-- docs\/ folder) to documents\/, you can set a condition block with
-- KeyPrefixEquals set to docs\/ and in the Redirect set
-- ReplaceKeyPrefixWith to \/documents. Not required if one of the siblings
-- is present. Can be present only if ReplaceKeyWith is not provided.
rReplaceKeyPrefixWith :: Lens' Redirect (Maybe Text)
rReplaceKeyPrefixWith = lens _rReplaceKeyPrefixWith (\ s a -> s{_rReplaceKeyPrefixWith = a});
instance FromXML Redirect where
parseXML x
= Redirect' <$>
(x .@? "HostName") <*> (x .@? "Protocol") <*>
(x .@? "HttpRedirectCode")
<*> (x .@? "ReplaceKeyWith")
<*> (x .@? "ReplaceKeyPrefixWith")
instance ToXML Redirect where
toXML Redirect'{..}
= mconcat
["HostName" @= _rHostName, "Protocol" @= _rProtocol,
"HttpRedirectCode" @= _rHTTPRedirectCode,
"ReplaceKeyWith" @= _rReplaceKeyWith,
"ReplaceKeyPrefixWith" @= _rReplaceKeyPrefixWith]
-- | /See:/ 'redirectAllRequestsTo' smart constructor.
data RedirectAllRequestsTo = RedirectAllRequestsTo'
{ _rartProtocol :: !(Maybe Protocol)
, _rartHostName :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'RedirectAllRequestsTo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rartProtocol'
--
-- * 'rartHostName'
redirectAllRequestsTo
:: Text -- ^ 'rartHostName'
-> RedirectAllRequestsTo
redirectAllRequestsTo pHostName_ =
RedirectAllRequestsTo'
{ _rartProtocol = Nothing
, _rartHostName = pHostName_
}
-- | Protocol to use (http, https) when redirecting requests. The default is
-- the protocol that is used in the original request.
rartProtocol :: Lens' RedirectAllRequestsTo (Maybe Protocol)
rartProtocol = lens _rartProtocol (\ s a -> s{_rartProtocol = a});
-- | Name of the host where requests will be redirected.
rartHostName :: Lens' RedirectAllRequestsTo Text
rartHostName = lens _rartHostName (\ s a -> s{_rartHostName = a});
instance FromXML RedirectAllRequestsTo where
parseXML x
= RedirectAllRequestsTo' <$>
(x .@? "Protocol") <*> (x .@ "HostName")
instance ToXML RedirectAllRequestsTo where
toXML RedirectAllRequestsTo'{..}
= mconcat
["Protocol" @= _rartProtocol,
"HostName" @= _rartHostName]
-- | Container for replication rules. You can add as many as 1,000 rules.
-- Total replication configuration size can be up to 2 MB.
--
-- /See:/ 'replicationConfiguration' smart constructor.
data ReplicationConfiguration = ReplicationConfiguration'
{ _rcRole :: !Text
, _rcRules :: ![ReplicationRule]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReplicationConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rcRole'
--
-- * 'rcRules'
replicationConfiguration
:: Text -- ^ 'rcRole'
-> ReplicationConfiguration
replicationConfiguration pRole_ =
ReplicationConfiguration'
{ _rcRole = pRole_
, _rcRules = mempty
}
-- | Amazon Resource Name (ARN) of an IAM role for Amazon S3 to assume when
-- replicating the objects.
rcRole :: Lens' ReplicationConfiguration Text
rcRole = lens _rcRole (\ s a -> s{_rcRole = a});
-- | Container for information about a particular replication rule.
-- Replication configuration must have at least one rule and can contain up
-- to 1,000 rules.
rcRules :: Lens' ReplicationConfiguration [ReplicationRule]
rcRules = lens _rcRules (\ s a -> s{_rcRules = a}) . _Coerce;
instance FromXML ReplicationConfiguration where
parseXML x
= ReplicationConfiguration' <$>
(x .@ "Role") <*> (parseXMLList "Rule" x)
instance ToXML ReplicationConfiguration where
toXML ReplicationConfiguration'{..}
= mconcat
["Role" @= _rcRole, toXMLList "Rule" _rcRules]
-- | /See:/ 'replicationRule' smart constructor.
data ReplicationRule = ReplicationRule'
{ _rrId :: !(Maybe Text)
, _rrPrefix :: !Text
, _rrStatus :: !ReplicationRuleStatus
, _rrDestination :: !Destination
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReplicationRule' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rrId'
--
-- * 'rrPrefix'
--
-- * 'rrStatus'
--
-- * 'rrDestination'
replicationRule
:: Text -- ^ 'rrPrefix'
-> ReplicationRuleStatus -- ^ 'rrStatus'
-> Destination -- ^ 'rrDestination'
-> ReplicationRule
replicationRule pPrefix_ pStatus_ pDestination_ =
ReplicationRule'
{ _rrId = Nothing
, _rrPrefix = pPrefix_
, _rrStatus = pStatus_
, _rrDestination = pDestination_
}
-- | Unique identifier for the rule. The value cannot be longer than 255
-- characters.
rrId :: Lens' ReplicationRule (Maybe Text)
rrId = lens _rrId (\ s a -> s{_rrId = a});
-- | Object keyname prefix identifying one or more objects to which the rule
-- applies. Maximum prefix length can be up to 1,024 characters.
-- Overlapping prefixes are not supported.
rrPrefix :: Lens' ReplicationRule Text
rrPrefix = lens _rrPrefix (\ s a -> s{_rrPrefix = a});
-- | The rule is ignored if status is not Enabled.
rrStatus :: Lens' ReplicationRule ReplicationRuleStatus
rrStatus = lens _rrStatus (\ s a -> s{_rrStatus = a});
-- | Undocumented member.
rrDestination :: Lens' ReplicationRule Destination
rrDestination = lens _rrDestination (\ s a -> s{_rrDestination = a});
instance FromXML ReplicationRule where
parseXML x
= ReplicationRule' <$>
(x .@? "ID") <*> (x .@ "Prefix") <*> (x .@ "Status")
<*> (x .@ "Destination")
instance ToXML ReplicationRule where
toXML ReplicationRule'{..}
= mconcat
["ID" @= _rrId, "Prefix" @= _rrPrefix,
"Status" @= _rrStatus,
"Destination" @= _rrDestination]
-- | /See:/ 'requestPaymentConfiguration' smart constructor.
newtype RequestPaymentConfiguration = RequestPaymentConfiguration'
{ _rpcPayer :: Payer
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'RequestPaymentConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rpcPayer'
requestPaymentConfiguration
:: Payer -- ^ 'rpcPayer'
-> RequestPaymentConfiguration
requestPaymentConfiguration pPayer_ =
RequestPaymentConfiguration'
{ _rpcPayer = pPayer_
}
-- | Specifies who pays for the download and request fees.
rpcPayer :: Lens' RequestPaymentConfiguration Payer
rpcPayer = lens _rpcPayer (\ s a -> s{_rpcPayer = a});
instance ToXML RequestPaymentConfiguration where
toXML RequestPaymentConfiguration'{..}
= mconcat ["Payer" @= _rpcPayer]
-- | /See:/ 'restoreRequest' smart constructor.
newtype RestoreRequest = RestoreRequest'
{ _rrDays :: Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'RestoreRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rrDays'
restoreRequest
:: Int -- ^ 'rrDays'
-> RestoreRequest
restoreRequest pDays_ =
RestoreRequest'
{ _rrDays = pDays_
}
-- | Lifetime of the active copy in days
rrDays :: Lens' RestoreRequest Int
rrDays = lens _rrDays (\ s a -> s{_rrDays = a});
instance ToXML RestoreRequest where
toXML RestoreRequest'{..}
= mconcat ["Days" @= _rrDays]
-- | /See:/ 'routingRule' smart constructor.
data RoutingRule = RoutingRule'
{ _rrCondition :: !(Maybe Condition)
, _rrRedirect :: !Redirect
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'RoutingRule' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rrCondition'
--
-- * 'rrRedirect'
routingRule
:: Redirect -- ^ 'rrRedirect'
-> RoutingRule
routingRule pRedirect_ =
RoutingRule'
{ _rrCondition = Nothing
, _rrRedirect = pRedirect_
}
-- | A container for describing a condition that must be met for the
-- specified redirect to apply. For example, 1. If request is for pages in
-- the \/docs folder, redirect to the \/documents folder. 2. If request
-- results in HTTP error 4xx, redirect request to another host where you
-- might process the error.
rrCondition :: Lens' RoutingRule (Maybe Condition)
rrCondition = lens _rrCondition (\ s a -> s{_rrCondition = a});
-- | Container for redirect information. You can redirect requests to another
-- host, to another page, or with another protocol. In the event of an
-- error, you can can specify a different error code to return.
rrRedirect :: Lens' RoutingRule Redirect
rrRedirect = lens _rrRedirect (\ s a -> s{_rrRedirect = a});
instance FromXML RoutingRule where
parseXML x
= RoutingRule' <$>
(x .@? "Condition") <*> (x .@ "Redirect")
instance ToXML RoutingRule where
toXML RoutingRule'{..}
= mconcat
["Condition" @= _rrCondition,
"Redirect" @= _rrRedirect]
-- | /See:/ 'rule' smart constructor.
data Rule = Rule'
{ _rNoncurrentVersionExpiration :: !(Maybe NoncurrentVersionExpiration)
, _rTransition :: !(Maybe Transition)
, _rExpiration :: !(Maybe LifecycleExpiration)
, _rNoncurrentVersionTransition :: !(Maybe NoncurrentVersionTransition)
, _rId :: !(Maybe Text)
, _rPrefix :: !Text
, _rStatus :: !ExpirationStatus
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Rule' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rNoncurrentVersionExpiration'
--
-- * 'rTransition'
--
-- * 'rExpiration'
--
-- * 'rNoncurrentVersionTransition'
--
-- * 'rId'
--
-- * 'rPrefix'
--
-- * 'rStatus'
rule
:: Text -- ^ 'rPrefix'
-> ExpirationStatus -- ^ 'rStatus'
-> Rule
rule pPrefix_ pStatus_ =
Rule'
{ _rNoncurrentVersionExpiration = Nothing
, _rTransition = Nothing
, _rExpiration = Nothing
, _rNoncurrentVersionTransition = Nothing
, _rId = Nothing
, _rPrefix = pPrefix_
, _rStatus = pStatus_
}
-- | Undocumented member.
rNoncurrentVersionExpiration :: Lens' Rule (Maybe NoncurrentVersionExpiration)
rNoncurrentVersionExpiration = lens _rNoncurrentVersionExpiration (\ s a -> s{_rNoncurrentVersionExpiration = a});
-- | Undocumented member.
rTransition :: Lens' Rule (Maybe Transition)
rTransition = lens _rTransition (\ s a -> s{_rTransition = a});
-- | Undocumented member.
rExpiration :: Lens' Rule (Maybe LifecycleExpiration)
rExpiration = lens _rExpiration (\ s a -> s{_rExpiration = a});
-- | Undocumented member.
rNoncurrentVersionTransition :: Lens' Rule (Maybe NoncurrentVersionTransition)
rNoncurrentVersionTransition = lens _rNoncurrentVersionTransition (\ s a -> s{_rNoncurrentVersionTransition = a});
-- | Unique identifier for the rule. The value cannot be longer than 255
-- characters.
rId :: Lens' Rule (Maybe Text)
rId = lens _rId (\ s a -> s{_rId = a});
-- | Prefix identifying one or more objects to which the rule applies.
rPrefix :: Lens' Rule Text
rPrefix = lens _rPrefix (\ s a -> s{_rPrefix = a});
-- | If \'Enabled\', the rule is currently being applied. If \'Disabled\',
-- the rule is not currently being applied.
rStatus :: Lens' Rule ExpirationStatus
rStatus = lens _rStatus (\ s a -> s{_rStatus = a});
instance FromXML Rule where
parseXML x
= Rule' <$>
(x .@? "NoncurrentVersionExpiration") <*>
(x .@? "Transition")
<*> (x .@? "Expiration")
<*> (x .@? "NoncurrentVersionTransition")
<*> (x .@? "ID")
<*> (x .@ "Prefix")
<*> (x .@ "Status")
instance ToXML Rule where
toXML Rule'{..}
= mconcat
["NoncurrentVersionExpiration" @=
_rNoncurrentVersionExpiration,
"Transition" @= _rTransition,
"Expiration" @= _rExpiration,
"NoncurrentVersionTransition" @=
_rNoncurrentVersionTransition,
"ID" @= _rId, "Prefix" @= _rPrefix,
"Status" @= _rStatus]
-- | Container for object key name prefix and suffix filtering rules.
--
-- /See:/ 's3KeyFilter' smart constructor.
newtype S3KeyFilter = S3KeyFilter'
{ _skfFilterRules :: Maybe [FilterRule]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'S3KeyFilter' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'skfFilterRules'
s3KeyFilter
:: S3KeyFilter
s3KeyFilter =
S3KeyFilter'
{ _skfFilterRules = Nothing
}
-- | Undocumented member.
skfFilterRules :: Lens' S3KeyFilter [FilterRule]
skfFilterRules = lens _skfFilterRules (\ s a -> s{_skfFilterRules = a}) . _Default . _Coerce;
instance FromXML S3KeyFilter where
parseXML x
= S3KeyFilter' <$>
(may (parseXMLList "FilterRule") x)
instance ToXML S3KeyFilter where
toXML S3KeyFilter'{..}
= mconcat
[toXML (toXMLList "FilterRule" <$> _skfFilterRules)]
-- | /See:/ 's3ServiceError' smart constructor.
data S3ServiceError = S3ServiceError'
{ _sseVersionId :: !(Maybe ObjectVersionId)
, _sseKey :: !(Maybe ObjectKey)
, _sseCode :: !(Maybe Text)
, _sseMessage :: !(Maybe Text)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'S3ServiceError' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sseVersionId'
--
-- * 'sseKey'
--
-- * 'sseCode'
--
-- * 'sseMessage'
s3ServiceError
:: S3ServiceError
s3ServiceError =
S3ServiceError'
{ _sseVersionId = Nothing
, _sseKey = Nothing
, _sseCode = Nothing
, _sseMessage = Nothing
}
-- | Undocumented member.
sseVersionId :: Lens' S3ServiceError (Maybe ObjectVersionId)
sseVersionId = lens _sseVersionId (\ s a -> s{_sseVersionId = a});
-- | Undocumented member.
sseKey :: Lens' S3ServiceError (Maybe ObjectKey)
sseKey = lens _sseKey (\ s a -> s{_sseKey = a});
-- | Undocumented member.
sseCode :: Lens' S3ServiceError (Maybe Text)
sseCode = lens _sseCode (\ s a -> s{_sseCode = a});
-- | Undocumented member.
sseMessage :: Lens' S3ServiceError (Maybe Text)
sseMessage = lens _sseMessage (\ s a -> s{_sseMessage = a});
instance FromXML S3ServiceError where
parseXML x
= S3ServiceError' <$>
(x .@? "VersionId") <*> (x .@? "Key") <*>
(x .@? "Code")
<*> (x .@? "Message")
-- | /See:/ 'tag' smart constructor.
data Tag = Tag'
{ _tagKey :: !ObjectKey
, _tagValue :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Tag' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tagKey'
--
-- * 'tagValue'
tag
:: ObjectKey -- ^ 'tagKey'
-> Text -- ^ 'tagValue'
-> Tag
tag pKey_ pValue_ =
Tag'
{ _tagKey = pKey_
, _tagValue = pValue_
}
-- | Name of the tag.
tagKey :: Lens' Tag ObjectKey
tagKey = lens _tagKey (\ s a -> s{_tagKey = a});
-- | Value of the tag.
tagValue :: Lens' Tag Text
tagValue = lens _tagValue (\ s a -> s{_tagValue = a});
instance FromXML Tag where
parseXML x = Tag' <$> (x .@ "Key") <*> (x .@ "Value")
instance ToXML Tag where
toXML Tag'{..}
= mconcat ["Key" @= _tagKey, "Value" @= _tagValue]
-- | /See:/ 'tagging' smart constructor.
newtype Tagging = Tagging'
{ _tTagSet :: [Tag]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Tagging' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tTagSet'
tagging
:: Tagging
tagging =
Tagging'
{ _tTagSet = mempty
}
-- | Undocumented member.
tTagSet :: Lens' Tagging [Tag]
tTagSet = lens _tTagSet (\ s a -> s{_tTagSet = a}) . _Coerce;
instance ToXML Tagging where
toXML Tagging'{..}
= mconcat ["TagSet" @= toXMLList "Tag" _tTagSet]
-- | /See:/ 'targetGrant' smart constructor.
data TargetGrant = TargetGrant'
{ _tgPermission :: !(Maybe BucketLogsPermission)
, _tgGrantee :: !(Maybe Grantee)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'TargetGrant' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tgPermission'
--
-- * 'tgGrantee'
targetGrant
:: TargetGrant
targetGrant =
TargetGrant'
{ _tgPermission = Nothing
, _tgGrantee = Nothing
}
-- | Logging permissions assigned to the Grantee for the bucket.
tgPermission :: Lens' TargetGrant (Maybe BucketLogsPermission)
tgPermission = lens _tgPermission (\ s a -> s{_tgPermission = a});
-- | Undocumented member.
tgGrantee :: Lens' TargetGrant (Maybe Grantee)
tgGrantee = lens _tgGrantee (\ s a -> s{_tgGrantee = a});
instance FromXML TargetGrant where
parseXML x
= TargetGrant' <$>
(x .@? "Permission") <*> (x .@? "Grantee")
instance ToXML TargetGrant where
toXML TargetGrant'{..}
= mconcat
["Permission" @= _tgPermission,
"Grantee" @= _tgGrantee]
-- | Container for specifying the configuration when you want Amazon S3 to
-- publish events to an Amazon Simple Notification Service (Amazon SNS)
-- topic.
--
-- /See:/ 'topicConfiguration' smart constructor.
data TopicConfiguration = TopicConfiguration'
{ _tcId :: !(Maybe Text)
, _tcFilter :: !(Maybe NotificationConfigurationFilter)
, _tcTopicARN :: !Text
, _tcEvents :: ![Event]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'TopicConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tcId'
--
-- * 'tcFilter'
--
-- * 'tcTopicARN'
--
-- * 'tcEvents'
topicConfiguration
:: Text -- ^ 'tcTopicARN'
-> TopicConfiguration
topicConfiguration pTopicARN_ =
TopicConfiguration'
{ _tcId = Nothing
, _tcFilter = Nothing
, _tcTopicARN = pTopicARN_
, _tcEvents = mempty
}
-- | Undocumented member.
tcId :: Lens' TopicConfiguration (Maybe Text)
tcId = lens _tcId (\ s a -> s{_tcId = a});
-- | Undocumented member.
tcFilter :: Lens' TopicConfiguration (Maybe NotificationConfigurationFilter)
tcFilter = lens _tcFilter (\ s a -> s{_tcFilter = a});
-- | Amazon SNS topic ARN to which Amazon S3 will publish a message when it
-- detects events of specified type.
tcTopicARN :: Lens' TopicConfiguration Text
tcTopicARN = lens _tcTopicARN (\ s a -> s{_tcTopicARN = a});
-- | Undocumented member.
tcEvents :: Lens' TopicConfiguration [Event]
tcEvents = lens _tcEvents (\ s a -> s{_tcEvents = a}) . _Coerce;
instance FromXML TopicConfiguration where
parseXML x
= TopicConfiguration' <$>
(x .@? "Id") <*> (x .@? "Filter") <*> (x .@ "Topic")
<*> (parseXMLList "Event" x)
instance ToXML TopicConfiguration where
toXML TopicConfiguration'{..}
= mconcat
["Id" @= _tcId, "Filter" @= _tcFilter,
"Topic" @= _tcTopicARN, toXMLList "Event" _tcEvents]
-- | /See:/ 'transition' smart constructor.
data Transition = Transition'
{ _tDays :: !(Maybe Int)
, _tDate :: !(Maybe RFC822)
, _tStorageClass :: !(Maybe TransitionStorageClass)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Transition' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tDays'
--
-- * 'tDate'
--
-- * 'tStorageClass'
transition
:: Transition
transition =
Transition'
{ _tDays = Nothing
, _tDate = Nothing
, _tStorageClass = Nothing
}
-- | Indicates the lifetime, in days, of the objects that are subject to the
-- rule. The value must be a non-zero positive integer.
tDays :: Lens' Transition (Maybe Int)
tDays = lens _tDays (\ s a -> s{_tDays = a});
-- | Indicates at what date the object is to be moved or deleted. Should be
-- in GMT ISO 8601 Format.
tDate :: Lens' Transition (Maybe UTCTime)
tDate = lens _tDate (\ s a -> s{_tDate = a}) . mapping _Time;
-- | The class of storage used to store the object.
tStorageClass :: Lens' Transition (Maybe TransitionStorageClass)
tStorageClass = lens _tStorageClass (\ s a -> s{_tStorageClass = a});
instance FromXML Transition where
parseXML x
= Transition' <$>
(x .@? "Days") <*> (x .@? "Date") <*>
(x .@? "StorageClass")
instance ToXML Transition where
toXML Transition'{..}
= mconcat
["Days" @= _tDays, "Date" @= _tDate,
"StorageClass" @= _tStorageClass]
-- | /See:/ 'versioningConfiguration' smart constructor.
data VersioningConfiguration = VersioningConfiguration'
{ _vcStatus :: !(Maybe BucketVersioningStatus)
, _vcMFADelete :: !(Maybe MFADelete)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'VersioningConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'vcStatus'
--
-- * 'vcMFADelete'
versioningConfiguration
:: VersioningConfiguration
versioningConfiguration =
VersioningConfiguration'
{ _vcStatus = Nothing
, _vcMFADelete = Nothing
}
-- | The versioning state of the bucket.
vcStatus :: Lens' VersioningConfiguration (Maybe BucketVersioningStatus)
vcStatus = lens _vcStatus (\ s a -> s{_vcStatus = a});
-- | Specifies whether MFA delete is enabled in the bucket versioning
-- configuration. This element is only returned if the bucket has been
-- configured with MFA delete. If the bucket has never been so configured,
-- this element is not returned.
vcMFADelete :: Lens' VersioningConfiguration (Maybe MFADelete)
vcMFADelete = lens _vcMFADelete (\ s a -> s{_vcMFADelete = a});
instance ToXML VersioningConfiguration where
toXML VersioningConfiguration'{..}
= mconcat
["Status" @= _vcStatus, "MfaDelete" @= _vcMFADelete]
-- | /See:/ 'websiteConfiguration' smart constructor.
data WebsiteConfiguration = WebsiteConfiguration'
{ _wcRedirectAllRequestsTo :: !(Maybe RedirectAllRequestsTo)
, _wcErrorDocument :: !(Maybe ErrorDocument)
, _wcIndexDocument :: !(Maybe IndexDocument)
, _wcRoutingRules :: !(Maybe [RoutingRule])
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'WebsiteConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'wcRedirectAllRequestsTo'
--
-- * 'wcErrorDocument'
--
-- * 'wcIndexDocument'
--
-- * 'wcRoutingRules'
websiteConfiguration
:: WebsiteConfiguration
websiteConfiguration =
WebsiteConfiguration'
{ _wcRedirectAllRequestsTo = Nothing
, _wcErrorDocument = Nothing
, _wcIndexDocument = Nothing
, _wcRoutingRules = Nothing
}
-- | Undocumented member.
wcRedirectAllRequestsTo :: Lens' WebsiteConfiguration (Maybe RedirectAllRequestsTo)
wcRedirectAllRequestsTo = lens _wcRedirectAllRequestsTo (\ s a -> s{_wcRedirectAllRequestsTo = a});
-- | Undocumented member.
wcErrorDocument :: Lens' WebsiteConfiguration (Maybe ErrorDocument)
wcErrorDocument = lens _wcErrorDocument (\ s a -> s{_wcErrorDocument = a});
-- | Undocumented member.
wcIndexDocument :: Lens' WebsiteConfiguration (Maybe IndexDocument)
wcIndexDocument = lens _wcIndexDocument (\ s a -> s{_wcIndexDocument = a});
-- | Undocumented member.
wcRoutingRules :: Lens' WebsiteConfiguration [RoutingRule]
wcRoutingRules = lens _wcRoutingRules (\ s a -> s{_wcRoutingRules = a}) . _Default . _Coerce;
instance ToXML WebsiteConfiguration where
toXML WebsiteConfiguration'{..}
= mconcat
["RedirectAllRequestsTo" @= _wcRedirectAllRequestsTo,
"ErrorDocument" @= _wcErrorDocument,
"IndexDocument" @= _wcIndexDocument,
"RoutingRules" @=
toXML (toXMLList "RoutingRule" <$> _wcRoutingRules)]
| fmapfmapfmap/amazonka | amazonka-s3/gen/Network/AWS/S3/Types/Product.hs | mpl-2.0 | 82,135 | 0 | 15 | 17,995 | 16,436 | 9,404 | 7,032 | 1,595 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.YouTube.LiveChatModerators.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Inserts a new resource into this collection.
--
-- /See:/ <https://developers.google.com/youtube/ YouTube Data API v3 Reference> for @youtube.liveChatModerators.insert@.
module Network.Google.Resource.YouTube.LiveChatModerators.Insert
(
-- * REST Resource
LiveChatModeratorsInsertResource
-- * Creating a Request
, liveChatModeratorsInsert
, LiveChatModeratorsInsert
-- * Request Lenses
, lXgafv
, lPart
, lUploadProtocol
, lAccessToken
, lUploadType
, lPayload
, lCallback
) where
import Network.Google.Prelude
import Network.Google.YouTube.Types
-- | A resource alias for @youtube.liveChatModerators.insert@ method which the
-- 'LiveChatModeratorsInsert' request conforms to.
type LiveChatModeratorsInsertResource =
"youtube" :>
"v3" :>
"liveChat" :>
"moderators" :>
QueryParams "part" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] LiveChatModerator :>
Post '[JSON] LiveChatModerator
-- | Inserts a new resource into this collection.
--
-- /See:/ 'liveChatModeratorsInsert' smart constructor.
data LiveChatModeratorsInsert =
LiveChatModeratorsInsert'
{ _lXgafv :: !(Maybe Xgafv)
, _lPart :: ![Text]
, _lUploadProtocol :: !(Maybe Text)
, _lAccessToken :: !(Maybe Text)
, _lUploadType :: !(Maybe Text)
, _lPayload :: !LiveChatModerator
, _lCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'LiveChatModeratorsInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lXgafv'
--
-- * 'lPart'
--
-- * 'lUploadProtocol'
--
-- * 'lAccessToken'
--
-- * 'lUploadType'
--
-- * 'lPayload'
--
-- * 'lCallback'
liveChatModeratorsInsert
:: [Text] -- ^ 'lPart'
-> LiveChatModerator -- ^ 'lPayload'
-> LiveChatModeratorsInsert
liveChatModeratorsInsert pLPart_ pLPayload_ =
LiveChatModeratorsInsert'
{ _lXgafv = Nothing
, _lPart = _Coerce # pLPart_
, _lUploadProtocol = Nothing
, _lAccessToken = Nothing
, _lUploadType = Nothing
, _lPayload = pLPayload_
, _lCallback = Nothing
}
-- | V1 error format.
lXgafv :: Lens' LiveChatModeratorsInsert (Maybe Xgafv)
lXgafv = lens _lXgafv (\ s a -> s{_lXgafv = a})
-- | The *part* parameter serves two purposes in this operation. It
-- identifies the properties that the write operation will set as well as
-- the properties that the API response returns. Set the parameter value to
-- snippet.
lPart :: Lens' LiveChatModeratorsInsert [Text]
lPart
= lens _lPart (\ s a -> s{_lPart = a}) . _Coerce
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
lUploadProtocol :: Lens' LiveChatModeratorsInsert (Maybe Text)
lUploadProtocol
= lens _lUploadProtocol
(\ s a -> s{_lUploadProtocol = a})
-- | OAuth access token.
lAccessToken :: Lens' LiveChatModeratorsInsert (Maybe Text)
lAccessToken
= lens _lAccessToken (\ s a -> s{_lAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
lUploadType :: Lens' LiveChatModeratorsInsert (Maybe Text)
lUploadType
= lens _lUploadType (\ s a -> s{_lUploadType = a})
-- | Multipart request metadata.
lPayload :: Lens' LiveChatModeratorsInsert LiveChatModerator
lPayload = lens _lPayload (\ s a -> s{_lPayload = a})
-- | JSONP
lCallback :: Lens' LiveChatModeratorsInsert (Maybe Text)
lCallback
= lens _lCallback (\ s a -> s{_lCallback = a})
instance GoogleRequest LiveChatModeratorsInsert where
type Rs LiveChatModeratorsInsert = LiveChatModerator
type Scopes LiveChatModeratorsInsert =
'["https://www.googleapis.com/auth/youtube",
"https://www.googleapis.com/auth/youtube.force-ssl"]
requestClient LiveChatModeratorsInsert'{..}
= go _lPart _lXgafv _lUploadProtocol _lAccessToken
_lUploadType
_lCallback
(Just AltJSON)
_lPayload
youTubeService
where go
= buildClient
(Proxy :: Proxy LiveChatModeratorsInsertResource)
mempty
| brendanhay/gogol | gogol-youtube/gen/Network/Google/Resource/YouTube/LiveChatModerators/Insert.hs | mpl-2.0 | 5,305 | 0 | 19 | 1,263 | 809 | 472 | 337 | 113 | 1 |
--------------------------------------------------------------------------------
-- |
-- Module: System.Delta.FSEvents
--
-- Uses the FSEvents API on MacOS to detect file changes.
--------------------------------------------------------------------------------
module System.Delta.FSEvents ( createFSEventsWatcher
) where
import Control.Monad
import Data.Bits
import Data.Word
import qualified FRP.Sodium as Sodium
import FRP.Sodium (sync,merge)
import System.Delta.Class
import System.OSX.FSEvents
-- | Helper Method
(=<=) :: Event -> Word64 -> Bool
(Event{eventFlags=fls}) =<= flags = 0 < (fls .&. flags)
itemIsFile :: Event -> Bool
itemIsFile e = e =<= eventFlagItemIsFile
itemIsDir :: Event -> Bool
itemIsDir = (=<= eventFlagItemIsDir)
itemIsCreated :: Event -> Bool
itemIsCreated = (=<= eventFlagItemCreated)
itemIsRemoved :: Event -> Bool
itemIsRemoved = (=<= eventFlagItemRemoved)
itemIsChanged :: Event -> Bool
itemIsChanged = (=<= eventFlagItemModified)
createFSEventsWatcher path = do
(changedEvent, pushChanged) <- sync $ Sodium.newEvent
(deletedEvent, pushDeleted) <- sync $ Sodium.newEvent
(newFileEvent, pushNewFile) <- sync $ Sodium.newEvent
let callback = \e ->
when (itemIsFile e) $ do
when (itemIsCreated e) (sync $ pushNewFile $ eventPath e)
when (itemIsRemoved e) (sync $ pushDeleted $ eventPath e)
when (itemIsChanged e) (sync $ pushChanged $ eventPath e)
evStream <- eventStreamCreate [path] 1 False False True callback
return $ FileWatcher
newFileEvent
deletedEvent
changedEvent
(eventStreamDestroy evStream)
| kryoxide/delta | src/main/delta/System/Delta/FSEvents.hs | lgpl-3.0 | 1,744 | 0 | 17 | 398 | 430 | 235 | 195 | 35 | 1 |
module Data.Truthful where
import Data.Maybe (isJust)
import Data.Either (isRight)
class Truthful a where
isTruthful :: a -> Bool
isTruthful = not . notTruthful
{-# INLINE isTruthful #-}
notTruthful :: a -> Bool
notTruthful = not . isTruthful
{-# INLINE notTruthful #-}
{-# MINIMAL isTruthful | notTruthful #-}
instance Truthful Bool where isTruthful = id
instance Truthful [a] where notTruthful = null
instance Truthful (Maybe a) where isTruthful = isJust
instance Truthful (Either l r) where isTruthful = isRight
| shockkolate/hs-truthful | src/Data/Truthful.hs | unlicense | 548 | 0 | 7 | 111 | 143 | 80 | 63 | 15 | 0 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="it-IT">
<title>HTTPS Info Add-on</title>
<maps>
<homeID>httpsinfo</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | secdec/zap-extensions | addOns/httpsInfo/src/main/javahelp/org/zaproxy/zap/extension/httpsinfo/resources/help_it_IT/helpset_it_IT.hs | apache-2.0 | 968 | 77 | 67 | 157 | 413 | 209 | 204 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
module Main where
import Control.Applicative
import Control.Parallel.Strategies
import qualified Data.ByteString.Lazy as B
import Data.Foldable
import Data.Maybe
import Data.Monoid
import qualified Data.Text as T
import Data.Text.Format
import Data.Text.Format.Params
import qualified Data.Text.Lazy as TL
import Data.Text.Lazy.Encoding
import Data.Traversable
import Prelude hiding (concat, map)
import Debug.Trace
chunkSize :: Int
chunkSize = 4096
tracef :: Params ps => Format -> ps -> a -> a
tracef f ps = trace (TL.unpack $ format f ps)
cleanDime :: [TL.Text] -> [TL.Text]
cleanDime = withStrategy (parListChunk 500 rdeepseq)
. mapMaybe (fmap TL.fromStrict . cleanLine . TL.toStrict)
tags :: [T.Text]
tags = [ "NNE"
, "PAC"
, "CIT"
, "CRV"
, "NON"
, "OTH"
]
replaceTag :: T.Text -> T.Text -> Maybe T.Text
replaceTag tag line
| long `T.isInfixOf` line = Just $ T.replace long repl line
| short `T.isInfixOf` line = Just $ T.replace short repl line
| otherwise = Nothing
where
long = "\"" <> tag <> "\",\"\","
short = "\"" <> tag <> "\","
repl = "\"\","
replaceTags :: T.Text -> Maybe T.Text
replaceTags line = foldl' step empty tags
where
step accum tag = accum <|> replaceTag tag line
invalidLine :: T.Text -> Maybe T.Text
invalidLine line
| ("\"COMM\"" `T.isInfixOf` line) && not (isValidComm line)
= Just $ recipComm line
| otherwise = Nothing
isValidComm :: T.Text -> Bool
isValidComm line | "\"\",\"COMM\"" `T.isInfixOf` line = True
| "\"100\",\"COMM\"" `T.isInfixOf` line = True
| "\"200\",\"COMM\"" `T.isInfixOf` line = True
| "\"402\",\"COMM\"" `T.isInfixOf` line = True
| "\"0\",\"COMM\"" `T.isInfixOf` line = True
| otherwise = False
joinLines :: Maybe TL.Text -> TL.Text -> (Maybe TL.Text, Maybe TL.Text)
joinLines prev line
| "33054178" `isInd2012` line = (Just line, Nothing)
| "one\"\",\"\"none\"" `TL.isPrefixOf` line
= justPair $ maybe "" clean33054178a prev <> clean33054178b line
| otherwise = (prev, Just line)
cleanLine :: T.Text -> Maybe T.Text
cleanLine line
-- I just can't make sense of these lines.
| "48d749ff199f19b3f8a9487d9648e33b" `T.isInfixOf` line = Nothing
| "30233408" `isInd2012'` line = Just $ clean30233408 line
| "30233412" `isInd2012'` line = Just $ clean30233412 line
| "\"100\",\"\",\"COMM\"" `T.isInfixOf` line = Just $ recipComm100 line
| "\"UNK\",\"COMM\"" `T.isInfixOf` line = Just $ recipCommUNK line
| otherwise = replaceTags line <|> invalidLine line <|> pure line
justPair :: a -> (Maybe a, Maybe a)
justPair = (Nothing,) . Just
isInd2012 :: TL.Text -> TL.Text -> Bool
isInd2012 n line = ("2012,\"e:ind:2012:" <> n <> "\",") `TL.isPrefixOf` line
isInd2012' :: T.Text -> T.Text -> Bool
isInd2012' n line = ("2012,\"e:ind:2012:" <> n <> "\",") `T.isPrefixOf` line
clean33054178a :: TL.Text -> TL.Text
clean33054178a = TL.replace "\"ballwin\"" "\"\",\"ballwin\""
. TL.replace ",0,\"" ",\""
. TL.stripEnd
clean33054178b :: TL.Text -> TL.Text
clean33054178b = TL.replace "\"142902012\"," ""
. TL.replace "\"CAND\"" "\"200\",\"CAND\""
. federa
. TL.intercalate "\"" . removeN 2 "" . TL.splitOn "\""
clean30233408 :: T.Text -> T.Text
clean30233408 = T.replace "\"I\",\"F\",\"\",\"35 fuller pl\","
"\"I\",\"F\",\"35 fuller pl\","
. T.replace "\"iatse local 52\",\",\",\",\","
"\"iatse local 52\",\"\",\"\",\"\",\"\","
clean30233412 :: T.Text -> T.Text
clean30233412 = T.replace "\"I\",\"F\"" "\"I\",\"F\",\"\""
. T.replace "\"NY\",\"12\",0," "\"NY\",\"12\","
. T.replace "\"\",\"\"j" "\",\"j"
. T.replace "\"C003700072012\"," ""
. recipComm
. federa'
recipComm100 :: T.Text -> T.Text
recipComm100 = recipCommTag "100"
recipCommUNK :: T.Text -> T.Text
recipCommUNK = T.replace "\"UNK\",\"COMM\"" "\"\",\"COMM\""
recipCommTag :: T.Text -> T.Text -> T.Text
recipCommTag tag = T.replace ( TL.toStrict . format "\"{}\",\"\",\"COMM\""
$ Only tag
)
"\"\",\"COMM\""
recipCommTag' :: T.Text -> T.Text -> T.Text
recipCommTag' tag = T.replace (TL.toStrict . format "\"{}\",\"COMM\"" $ Only tag)
"\"\",\"COMM\""
federa :: TL.Text -> TL.Text
federa = TL.replace "\"federa\",\"\"" "\"federa\""
federa' :: T.Text -> T.Text
federa' = T.replace "\"federa\",\"\"" "\"federa\""
recipComm :: T.Text -> T.Text
recipComm = T.replace ",\"COMM\"," ",\"\",\"COMM\","
removeN :: Eq a => Int -> a -> [a] -> [a]
removeN 0 _ xs = xs
removeN n y (x:xs) | y == x = removeN (n - 1) y xs
| otherwise = x : removeN n y xs
removeN _ _ [] = []
main :: IO ()
main = B.interact ( encodeUtf8
. TL.unlines
. cleanDime
. catMaybes
. snd
. mapAccumL joinLines Nothing
. TL.lines
. decodeLatin1
)
| erochest/popvox-scrape | CleanDime.hs | apache-2.0 | 5,577 | 0 | 14 | 1,686 | 1,639 | 848 | 791 | 126 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeOperators #-}
module CI.ProgName where
import Control.Monad.Eff
import Control.Monad.Eff.Lift
import System.Environment
import Data.Text (Text)
import qualified Data.Text as T
data ProgName x where
ProgName :: ProgName Text
progName :: Member ProgName r => Eff r Text
progName = send ProgName
runTestProgName :: Text -> Eff (ProgName ': r) a -> Eff r a
runTestProgName pn = handleRelay return (\ProgName k -> k pn)
runProgName :: MemberU2 Lift (Lift IO) r => Eff (ProgName ': r) a -> Eff r a
runProgName = handleRelay return (\ProgName k -> lift (fmap T.pack getProgName) >>= k)
| lancelet/bored-robot | src/CI/ProgName.hs | apache-2.0 | 749 | 0 | 12 | 176 | 226 | 123 | 103 | 18 | 1 |
module Helpers.BraxtonHelper (allSequences, enumerateSequences, SymmetricRelation(..), ReflexiveRelation(..)) where
data SymmetricRelation = Symmetric | NonSymmetric deriving (Eq)
data ReflexiveRelation = Reflexive | NonReflexive deriving (Eq)
allSequences :: ReflexiveRelation -> SymmetricRelation -> Int -> Int -> [[Int]]
allSequences reflexivity symmetry upperBound = f [[]] where
f knownSequences 0 = knownSequences
f knownSequences stepCount = f newSequences (stepCount - 1) where
newSequences = concatMap (childSequences reflexivity symmetry upperBound) knownSequences
enumerateSequences :: ReflexiveRelation ->
SymmetricRelation ->
([Integer] -> Integer) -> -- e.g. `product`, `sum`, `maximum`
Int -> -- upper bound
Int -> -- sequence length
[(Integer, [Integer])] -- number of sequences
enumerateSequences reflexivity symmetry f upperBound numberOfTerms = filter ((==minReduction) . fst) reductionTuples where
reductionTuples = map (\s -> (f s, s)) theSequences
minReduction = minimum $ map fst reductionTuples
theSequences = map (map fromIntegral) $ allSequences reflexivity symmetry upperBound numberOfTerms
childSequences :: ReflexiveRelation -> SymmetricRelation -> Int -> [Int] -> [[Int]]
childSequences reflexivity symmetry upperBound as = map (:as) validTerms where
validTerms = filter (isValidExtension reflexivity symmetry as) [1..upperBound]
isValidExtension :: ReflexiveRelation -> SymmetricRelation -> [Int] -> Int -> Bool
isValidExtension _ _ [] _ = True
isValidExtension reflexivity symmetry as@(a:t) c = checkSymmetric && checkReflexive where
checkSymmetric = (symmetry == NonSymmetric) || a /= c
checkReflexive = all validNeighbor $ zip as t where
validNeighbor pair = pair /= (c, a) && (reflexivity == NonReflexive || pair /= (a, c))
| peterokagey/haskellOEIS | src/Helpers/BraxtonHelper.hs | apache-2.0 | 1,939 | 0 | 13 | 407 | 550 | 300 | 250 | 27 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Transform where
import Control.Arrow
import qualified Data.Aeson as JSON
import qualified Data.ByteString.Char8 as B8
import qualified Data.ByteString.Lazy.Char8 as B8L
import qualified Data.HashMap.Strict as HM
import qualified Data.Scientific as SC
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.Vector as V
import qualified EDN
ednToJson :: EDN.Value -> JSON.Value
ednToJson EDN.Nil = JSON.Null
ednToJson (EDN.Boolean b) = JSON.Bool b
ednToJson (EDN.String s) = JSON.String s
ednToJson (EDN.Character c) = JSON.String (T.pack [c])
ednToJson (EDN.Symbol sym) = JSON.String sym
ednToJson (EDN.Keyword kw) = JSON.String (T.concat [":", kw])
ednToJson (EDN.WholeNumber i) = JSON.Number (SC.scientific i 0)
ednToJson (EDN.RationalNumber r) = JSON.Number (SC.fromFloatDigits f) where f = fromRational r :: Double
ednToJson (EDN.RealNumber f) = JSON.Number (SC.fromFloatDigits f)
ednToJson (EDN.List vs) = jsonArray vs
ednToJson (EDN.Vector vs) = jsonArray vs
ednToJson (EDN.Set vs) = jsonArray vs
ednToJson (EDN.Map vs) = JSON.Object (HM.fromList $ map (stringify *** ednToJson) vs)
where
stringify :: EDN.Value -> T.Text
stringify (EDN.String s) = s
stringify (EDN.Keyword kw) = kw -- minus the :
stringify (EDN.Symbol sym) = sym
stringify v = (TE.decodeUtf8 . toStrict . JSON.encode . ednToJson) v
toStrict :: B8L.ByteString -> B8.ByteString
toStrict = B8.concat . B8L.toChunks
jsonArray :: [EDN.Value] -> JSON.Value
jsonArray = JSON.Array . V.fromList . map ednToJson
| uswitch/ej | Transform.hs | bsd-2-clause | 1,593 | 0 | 12 | 246 | 595 | 319 | 276 | 35 | 4 |
-- | Haskell98
--
-- <http://okmij.org/ftp/Algorithms.html#pure-cyclic-list>
--
-- Pure functional, mutation-free, constant-time-access double-linked
-- lists
--
-- Note that insertions, deletions, lookups have
-- a worst-case complexity of O(min(n,W)), where W is either 32 or 64
-- (depending on the paltform). That means the access time is bounded
-- by a small constant (32 or 64).
--
--
-- /Pure functional, mutation-free, efficient double-linked lists/
--
-- It is always an interesting challenge to write a pure functional and efficient implementation of
-- an imperative algorithm destructively operating a data structure. The functional implementation
-- has a significant benefit of equational reasoning and modularity. We can comprehend the algorithm
-- without keeping the implicit global state in mind. The mutation-free, functional realization has
-- practical benefits: the ease of adding checkpointing, undo and redo. The absence of mutations
-- makes the code multi-threading-safe and helps in porting to distributed or non-shared-memory
-- parallel architectures. On the other hand, an imperative implementation has the advantage of
-- optimality: mutating a component in a complex data structure is a constant-time operation, at
-- least on conventional architectures. Imperative code makes sharing explicit, and so permits
-- efficient implementation of cyclic data structures.
--
-- We show a simple example of achieving all the benefits of an imperative data structure --
-- including sharing and the efficiency of updates -- in a pure functional program. Our data
-- structure is a doubly-linked, possibly cyclic list, with the standard operations of adding,
-- deleting and updating elements; traversing the list in both directions; iterating over the list,
-- with cycle detection. The code:
--
-- □ uniformly handles both cyclic and terminated lists;
-- □ does not rebuild the whole list on updates;
-- □ updates the value in the current node in time bound by a small constant;
-- □ does not use or mention any monads;
-- □ does not use any IORef, STRef, TVars, or any other destructive updates;
-- □ permits the logging, undoing and redoing of updates, checkpointing;
-- □ easily generalizes to two-dimensional meshes.
--
-- The algorithm is essentially imperative, thus permitting identity checking and in-place
-- `updates', but implemented purely functionally. Although the code uses many local, type safe
-- `heaps', there is emphatically no global heap and no global state.
--
-- Version: The current version is 1.2, Jan 7, 2009.
--
-- References
--
-- Haskell-Cafe discussion ``Updating doubly linked lists''. January 2009
--
module Data.FDList where
import qualified Data.IntMap as IM
-- | Representation of the double-linked list
type Ref = Int -- positive, we shall treat 0 specially
data Node a = Node{node_val :: a,
node_left :: Ref,
node_right :: Ref}
-- | Because DList contains the `pointer' to the current element, DList
-- is also a Zipper
data DList a = DList{dl_counter :: Ref, -- to generate new Refs
dl_current :: Ref, -- current node
dl_mem :: IM.IntMap (Node a)} -- main `memory'
-- | Operations on the DList a
empty :: DList a
empty = DList{dl_counter = 1, dl_current = 0, dl_mem = IM.empty}
-- | In a well-formed list, dl_current must point to a valid node
-- All operations below preserve well-formedness
well_formed :: DList a -> Bool
well_formed dl | IM.null (dl_mem dl) = dl_current dl == 0
well_formed dl = IM.member (dl_current dl) (dl_mem dl)
is_empty :: DList a -> Bool
is_empty dl = IM.null (dl_mem dl)
-- | auxiliary function
get_curr_node :: DList a -> Node a
get_curr_node DList{dl_current=curr,dl_mem=mem} =
maybe (error "not well-formed") id $ IM.lookup curr mem
-- | The insert operation below makes a cyclic list
-- The other operations don't care
-- Insert to the right of the current element, if any
-- Return the DL where the inserted node is the current one
insert_right :: a -> DList a -> DList a
insert_right x dl | is_empty dl =
let ref = dl_counter dl
-- the following makes the list cyclic
node = Node{node_val = x, node_left = ref, node_right = ref}
in DList{dl_counter = succ ref,
dl_current = ref,
dl_mem = IM.insert ref node (dl_mem dl)}
insert_right x dl@DList{dl_counter = ref, dl_current = curr, dl_mem = mem} =
DList{dl_counter = succ ref, dl_current = ref,
dl_mem = IM.insert ref new_node $
IM.insert next next_node' $
(if next == curr then mem else IM.insert curr curr_node' mem)}
where
curr_node = get_curr_node dl
curr_node'= curr_node{node_right = ref}
next = node_right curr_node
next_node = if next == curr then curr_node'
else maybe (error "ill-formed DList") id $ IM.lookup next mem
new_node = Node{node_val = x, node_left = curr, node_right = next}
next_node'= next_node{node_left = ref}
-- | Delete the current element from a non-empty list
-- We can handle both cyclic and terminated lists
-- The right node becomes the current node.
-- If the right node does not exists, the left node becomes current
delete :: DList a -> DList a
delete dl@DList{dl_current = curr, dl_mem = mem_old} =
case () of
_ | notexist l && notexist r -> empty
_ | r == 0 ->
dl{dl_current = l, dl_mem = upd l (\x -> x{node_right=r}) mem}
_ | r == curr -> -- it was a cycle on the right
dl{dl_current = l, dl_mem = upd l (\x -> x{node_right=l}) mem}
_ | l == 0 ->
dl{dl_current = r, dl_mem = upd r (\x -> x{node_left=l}) mem}
_ | l == curr ->
dl{dl_current = r, dl_mem = upd r (\x -> x{node_left=r}) mem}
_ | l == r ->
dl{dl_current = r, dl_mem = upd r (\x -> x{node_left=r,
node_right=r}) mem}
_ ->
dl{dl_current = r, dl_mem = upd r (\x -> x{node_left=l}) .
upd l (\x -> x{node_right=r}) $ mem}
where
(Just curr_node, mem) = IM.updateLookupWithKey (\_ _ -> Nothing) curr mem_old
l = node_left curr_node
r = node_right curr_node
notexist x = x == 0 || x == curr
upd ref f mem = IM.adjust f ref mem
get_curr :: DList a -> a
get_curr = node_val . get_curr_node
move_right :: DList a -> Maybe (DList a)
move_right dl = if next == 0 then Nothing else Just (dl{dl_current=next})
where
next = node_right $ get_curr_node dl
-- | If no right, just stay inplace
move_right' :: DList a -> DList a
move_right' dl = maybe dl id $ move_right dl
move_left :: DList a -> Maybe (DList a)
move_left dl = if next == 0 then Nothing else Just (dl{dl_current=next})
where
next = node_left $ get_curr_node dl
-- | If no left, just stay inplace
move_left' :: DList a -> DList a
move_left' dl = maybe dl id $ move_left dl
fromList :: [a] -> DList a
fromList = foldl (flip insert_right) empty
-- | The following does not anticipate cycles (deliberately)
takeDL :: Int -> DList a -> [a]
takeDL 0 _ = []
takeDL n dl | is_empty dl = []
takeDL n dl = get_curr dl : (maybe [] (takeDL (pred n)) $ move_right dl)
-- | Reverse taking: we move left
takeDLrev :: Int -> DList a -> [a]
takeDLrev 0 _ = []
takeDLrev n dl | is_empty dl = []
takeDLrev n dl = get_curr dl : (maybe [] (takeDLrev (pred n)) $ move_left dl)
-- | Update the current node `inplace'
update :: a -> DList a -> DList a
update x dl@(DList{dl_current = curr, dl_mem = mem}) =
dl{dl_mem = IM.insert curr (curr_node{node_val = x}) mem}
where
curr_node = get_curr_node dl
-- | This one watches for a cycle and terminates when it detects one
toList :: DList a -> [a]
toList dl | is_empty dl = []
toList dl = get_curr dl : collect (dl_current dl) (move_right dl)
where
collect ref0 Nothing = []
collect ref0 (Just DList{dl_current = curr}) | curr == ref0 = []
collect ref0 (Just dl) = get_curr dl : collect ref0 (move_right dl)
-- Tests
test1l = insert_right 1 $ empty
test1l_r = takeDL 5 test1l -- [1,1,1,1,1]
test1l_l = takeDLrev 5 test1l -- [1,1,1,1,1]
test1l_c = toList test1l -- [1]
test2l = insert_right 2 $ test1l
test2l_r = takeDL 5 test2l -- [2,1,2,1,2]
test2l_l = takeDLrev 5 test2l -- [2,1,2,1,2]
test2l_l'= takeDLrev 5 (move_left' test2l) -- [1,2,1,2,1]
test2l_c = toList test2l -- [2,1]
test3l = insert_right 3 $ test2l
test3l_r = takeDL 7 test3l -- [3,1,2,3,1,2,3]
test3l_l = takeDLrev 7 test3l -- [3,2,1,3,2,1,3]
test3l_l'= takeDLrev 7 (move_left' test3l) -- [2,1,3,2,1,3,2]
test3l_c = toList (move_right' test3l) -- [1,2,3]
test31l = delete test3l
test31l_r = takeDL 7 test31l -- [1,2,1,2,1,2,1]
test31l_l = takeDLrev 7 test31l -- [1,2,1,2,1,2,1]
test31l_c = toList test31l -- [1,2]
test32l = delete test31l
test32l_r = takeDL 5 test32l -- [2,2,2,2,2]
test32l_l = takeDLrev 5 test32l -- [2,2,2,2,2]
test32l_c = toList test32l -- [2]
test33l = delete test32l
test33l_r = takeDL 5 test33l -- []
testl = fromList [1..5]
testl_r = takeDL 11 testl -- [5,1,2,3,4,5,1,2,3,4,5]
testl_l = takeDLrev 11 testl -- [5,4,3,2,1,5,4,3,2,1,5]
testl_c = toList testl -- [5,1,2,3,4]
testl1 = update (-1) testl
testl1_r = takeDL 11 testl1 -- [-1,1,2,3,4,-1,1,2,3,4,-1]
testl1_c = toList testl1 -- [-1,1,2,3,4]
testl2 = update (-2) . move_right' . move_right' $ testl1
testl2_r = takeDL 11 testl2 -- [-2,3,4,-1,1,-2,3,4,-1,1,-2]
testl2_l = takeDLrev 11 testl2 -- [-2,1,-1,4,3,-2,1,-1,4,3,-2]
testl2_c = toList testl2 -- [-2,3,4,-1,1]
-- | Old testl is still available: there are no destructive updates
testl3 = update (-2) . move_right' . move_right' $ testl
testl3_r = takeDL 11 testl3 -- [-2,3,4,5,1,-2,3,4,5,1,-2]
testl3_c = toList testl3 -- [-2,3,4,5,1]
| suhailshergill/liboleg | Data/FDList.hs | bsd-3-clause | 10,176 | 0 | 16 | 2,484 | 2,333 | 1,265 | 1,068 | 131 | 7 |
module Idris.ASTUtils where
-- This implements just a few basic lens-like concepts to ease state updates.
-- Similar to fclabels in approach, just without the extra dependency.
--
-- We don't include an explicit export list
-- because everything here is meant to be exported.
--
-- Short synopsis:
-- ---------------
--
-- f :: Idris ()
-- f = do
-- -- these two steps:
-- detaggable <- fgetState (opt_detaggable . ist_optimisation typeName)
-- fputState (opt_detaggable . ist_optimisation typeName) (not detaggable)
--
-- -- are equivalent to:
-- fmodifyState (opt_detaggable . ist_optimisation typeName) not
--
-- -- of course, the long accessor can be put in a variable;
-- -- everything is first-class
-- let detag n = opt_detaggable . ist_optimisation n
-- fputState (detag n1) True
-- fputState (detag n2) False
--
-- -- Note that all these operations handle missing items consistently
-- -- and transparently, as prescribed by the default values included
-- -- in the definitions of the ist_* functions.
-- --
-- -- Especially, it's no longer necessary to have initial values of
-- -- data structures copied (possibly inconsistently) all over the compiler.
import Control.Category
import Control.Applicative
import Control.Monad.State.Class
import Data.Maybe
import Prelude hiding (id, (.))
import Idris.Core.TT
import Idris.AbsSyntaxTree
data Field rec fld = Field
{ fget :: rec -> fld
, fset :: fld -> rec -> rec
}
fmodify :: Field rec fld -> (fld -> fld) -> rec -> rec
fmodify field f x = fset field (f $ fget field x) x
instance Category Field where
id = Field id const
Field g2 s2 . Field g1 s1 = Field (g2 . g1) (\v2 x1 -> s1 (s2 v2 $ g1 x1) x1)
fgetState :: MonadState s m => Field s a -> m a
fgetState field = gets $ fget field
fputState :: MonadState s m => Field s a -> a -> m ()
fputState field x = fmodifyState field (const x)
fmodifyState :: MonadState s m => Field s a -> (a -> a) -> m ()
fmodifyState field f = modify $ fmodify field f
-- Exact-name context lookup; uses Nothing for deleted values (read+write!).
--
-- Reading a non-existing value yields Nothing,
-- writing Nothing deletes the value (if it existed).
ctxt_lookup :: Name -> Field (Ctxt a) (Maybe a)
ctxt_lookup n = Field
{ fget = lookupCtxtExact n
, fset = \newVal -> case newVal of
Just x -> addDef n x
Nothing -> deleteDefExact n
}
-- Maybe-lens with a default value.
maybe_default :: a -> Field (Maybe a) a
maybe_default dflt = Field (fromMaybe dflt) (const . Just)
-----------------------------------
-- Individual records and fields --
-----------------------------------
--
-- These could probably be generated; let's use lazy addition for now.
--
-- OptInfo
----------
-- the optimisation record for the given (exact) name
ist_optimisation :: Name -> Field IState OptInfo
ist_optimisation n =
maybe_default Optimise
{ inaccessible = []
, detaggable = False
}
. ctxt_lookup n
. Field idris_optimisation (\v ist -> ist{ idris_optimisation = v })
-- two fields of the optimisation record
opt_inaccessible :: Field OptInfo [(Int, Name)]
opt_inaccessible = Field inaccessible (\v opt -> opt{ inaccessible = v })
opt_detaggable :: Field OptInfo Bool
opt_detaggable = Field detaggable (\v opt -> opt{ detaggable = v })
-- CGInfo
---------
-- callgraph record for the given (exact) name
ist_callgraph :: Name -> Field IState CGInfo
ist_callgraph n =
maybe_default CGInfo
{ argsdef = [], calls = [], scg = []
, argsused = [], usedpos = []
}
. ctxt_lookup n
. Field idris_callgraph (\v ist -> ist{ idris_callgraph = v })
-- some fields of the CGInfo record
cg_usedpos :: Field CGInfo [(Int, [UsageReason])]
cg_usedpos = Field usedpos (\v cg -> cg{ usedpos = v })
-- Commandline flags
--------------------
opts_idrisCmdline :: Field IState [Opt]
opts_idrisCmdline =
Field opt_cmdline (\v opts -> opts{ opt_cmdline = v })
. Field idris_options (\v ist -> ist{ idris_options = v })
| DanielWaterworth/Idris-dev | src/Idris/ASTUtils.hs | bsd-3-clause | 4,106 | 0 | 12 | 902 | 938 | 523 | 415 | 54 | 2 |
module Control.Monad.Ref (
MonadRef(..)
) where
import Control.Monad.ST
import Data.IORef
import Data.STRef
class Monad m => MonadRef m where
type Ref m :: * -> *
-- | Create a new reference
newRef :: a -> m (Ref m a)
-- | Read a reference
readRef :: Ref m a -> m a
-- | Write a reference
writeRef :: Ref m a -> a -> m ()
-- | Modify a reference
modifyRef :: Ref m a -> (a -> a) -> m ()
modifyRef ref f = readRef ref >>= \x -> writeRef ref (f x)
-- | Modify a reference strictly, evaluating the function application immediately
modifyRef' :: Ref m a -> (a -> a) -> m ()
modifyRef' ref f = readRef ref >>= \x -> let x' = f x in x' `seq` writeRef ref x'
instance MonadRef IO where
type Ref IO = IORef
newRef = newIORef
readRef = readIORef
writeRef = writeIORef
modifyRef = modifyIORef
instance MonadRef (ST s) where
type Ref (ST s) = STRef s
newRef = newSTRef
readRef = readSTRef
writeRef = writeSTRef
modifyRef = modifySTRef | batterseapower/hashtables | Control/Monad/Ref.hs | bsd-3-clause | 1,086 | 0 | 13 | 348 | 353 | 187 | 166 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
module Pushbullet.API (
listPushes,
createPush,
updatePush,
deletePush
) where
import Control.Monad.Trans.Either (EitherT)
import Data.Proxy (Proxy (..))
import Data.Text (Text)
import qualified Pushbullet.API.Models as M
import Servant.API
import Servant.Client
type API = Header "Access-Token" AccessToken :> "v2" :> "pushes" :> Get '[JSON] M.Pushes
:<|> Header "Access-Token" AccessToken :> "v2" :> "pushes" :> ReqBody '[JSON] M.PushRequest :> Post '[JSON] M.Push
:<|> Header "Access-Token" AccessToken :> "v2" :> "pushes" :> Capture "iden" Text :> ReqBody '[JSON] M.PushUpdate :> Post '[JSON] M.Push
:<|> Header "Access-Token" AccessToken :> "v2" :> "pushes" :> Capture "iden" Text :> Delete '[JSON] ()
newtype AccessToken = AccessToken Text
instance ToText AccessToken where
toText (AccessToken x) = x
instance FromText AccessToken where
fromText x = Just $ AccessToken x
api :: Proxy API
api = Proxy
listPushes :: Maybe AccessToken -> EitherT ServantError IO M.Pushes
createPush :: Maybe AccessToken -> M.PushRequest -> EitherT ServantError IO M.Push
updatePush :: Maybe AccessToken -> Text -> M.PushUpdate -> EitherT ServantError IO M.Push
deletePush :: Maybe AccessToken -> Text -> EitherT ServantError IO ()
(listPushes :<|> createPush :<|> updatePush :<|> deletePush) = client api (BaseUrl Https "api.pushbullet.com" 443)
| joshbohde/pushbullet | src/Pushbullet/API.hs | bsd-3-clause | 1,522 | 0 | 26 | 324 | 467 | 243 | 224 | 29 | 1 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.ARB.TextureCompression
-- Copyright : (c) Sven Panne 2013
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- All raw functions and tokens from the ARB_texture_compression extension, see
-- <http://www.opengl.org/registry/specs/ARB/texture_compression.txt>.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.ARB.TextureCompression (
-- * Functions
glCompressedTexImage3D,
glCompressedTexImage2D,
glCompressedTexImage1D,
glCompressedTexSubImage3D,
glCompressedTexSubImage2D,
glCompressedTexSubImage1D,
glGetCompressedTexImage,
-- * Tokens
gl_COMPRESSED_ALPHA,
gl_COMPRESSED_LUMINANCE,
gl_COMPRESSED_LUMINANCE_ALPHA,
gl_COMPRESSED_INTENSITY,
gl_COMPRESSED_RGB,
gl_COMPRESSED_RGBA,
gl_TEXTURE_COMPRESSION_HINT,
gl_TEXTURE_COMPRESSED_IMAGE_SIZE,
gl_TEXTURE_COMPRESSED,
gl_NUM_COMPRESSED_TEXTURE_FORMATS,
gl_COMPRESSED_TEXTURE_FORMATS
) where
import Graphics.Rendering.OpenGL.Raw.ARB.Compatibility
import Graphics.Rendering.OpenGL.Raw.Core32
| mfpi/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/ARB/TextureCompression.hs | bsd-3-clause | 1,290 | 0 | 4 | 164 | 99 | 74 | 25 | 21 | 0 |
module Main where
import Control.Monad
import Data.List
import Data.Maybe
import Distribution.Package
import qualified Distribution.PackageDescription as P
import Fibon.Benchmarks
import Fibon.BenchmarkInstance
--import Fibon.InputSize
import Fibon.Run.Actions
import Fibon.Run.BenchmarkBundle
import Fibon.Run.Config
import qualified Fibon.Run.Log as Log
import Fibon.Run.Manifest
import qualified Fibon.Run.Config.Default
import Util (readProcessOutAndErr, inDirectory, findCabalFile, parsePackage)
import System.Directory
import System.Environment
import System.Exit
import System.FilePath
import System.IO
import System.Log.Logger
import System.Log.Handler.Simple
import System.Process
main :: IO ()
main = do
(bmRoot, benchmarks) <- parseArgs
setupLogger
mapM_ (process bmRoot runConfig) benchmarks
where
runConfig = selectConfig "nofib"
setupLogger :: IO ()
setupLogger = do
ch <- streamHandler stdout INFO
updateGlobalLogger rootLoggerName (setLevel DEBUG. setHandlers [ch])
defaultBmRoot :: FilePath
defaultBmRoot = "benchmarks" </> "Fibon" </> "Benchmarks"
selectConfig :: ConfigId -> RunConfig
selectConfig configName =
case find ((== configName) . configId) configManifest of
Just c -> c
Nothing -> Fibon.Run.Config.Default.config
process :: FilePath -> RunConfig -> FibonBenchmark -> IO ()
process bmRoot rc bm = do
let bundle = mkBundle rc bm "None" bmRoot "None" Ref Peak []
putStrLn (show bundle)
copyFilesToBenchDir bundle
srcs <- getListOfNeededSourceFiles bundle
createMakefile srcs bundle
copyFilesToBenchDir :: BenchmarkBundle -> IO ()
copyFilesToBenchDir bundle = do
-- Copy files from Fibon/data to the root of the benchmark dir
prep <- prepNofibBundle bundle
case prep of
Left err -> Log.error $ "ERROR: "++(show err)
Right _ -> return ()
-- Rename stdout file to "Bench.stdout" to match nofib expectations
case lookup Stdout $ (output . benchDetails) bundle of
Just (Diff f) -> do
let benchName = show $ benchmark bundle
srcPath = (pathToBench bundle) </> f
dstPath = (pathToBench bundle) </> benchName <.> ".stdout"
Log.info $ "Renaming stdout file to "++dstPath
renameFile srcPath dstPath
Just _ -> return ()
Nothing -> return ()
getListOfNeededSourceFiles :: BenchmarkBundle -> IO [FilePath]
getListOfNeededSourceFiles bundle =
inDirectory (pathToBench bundle) $ do
ExitSuccess <- system "cabal clean"
ExitSuccess <- system $ "cabal configure "++cFlags
out <- readProcessOutAndErr "cabal" (["build"]++bFlags) ""
Log.info out
return (parseGhcMakeOutput out)
where
cFlags = concatMap (' ':) $ configureFlags flgs
bFlags = buildFlags flgs
flgs = fullFlags bundle
parseGhcMakeOutput :: String -> [FilePath]
parseGhcMakeOutput = parse
where
parse = map parseLine . filter isCompileLine . lines
isCompileLine ('[':_) = True
isCompileLine _ = False
parseLine line = takeWhile (/= ',') (tail $ dropWhile (/= '(') line)
createMakefile :: [FilePath] -> BenchmarkBundle -> IO ()
createMakefile srcFiles bundle = do
let args = intercalate " " $ snd (benchExeAndArgs bundle)
srcs = intercalate " \\\n " srcFiles
Log.info "Creating Makefile"
mfh <- openFile ((pathToBench bundle) </> "Makefile") WriteMode
--set TOP
putL mfh "TOP = ../../.."
--include boilerplate
putL mfh "include $(TOP)/mk/boilerplate.mk"
--set SRCS
putL mfh $ "SRCS ="++srcs
--set PROG_ARGS
when (not (null args)) $ putL mfh $ "PROG_ARGS += "++args
--set STDIN_FILE
maybe (return ())
(\f -> putL mfh $ "STDIN_FILE = "++f)
(stdinInput . benchDetails $ bundle)
--set HC_OPTS
(srcDirIncs, packages) <- getCabalInfo bundle
let ghcOptions = getGhcOpts bundle
hcOpts = intercalate " " (ghcOptions ++ srcDirIncs ++ packages)
when (not (null hcOpts)) $ putL mfh $ "HC_OPTS += "++hcOpts
--include targets
putL mfh "include $(TOP)/mk/target.mk"
hClose mfh
where
putL h l = Log.info l >> hPutStrLn h l
getCabalInfo:: BenchmarkBundle -> IO ([FilePath], [String])
getCabalInfo bundle =
inDirectory (pathToBench bundle) $ do
cf <- findCabalFile
pkg <- parsePackage cf
return $ getInfo pkg
where
getInfo pkg =
case find exeMatches (P.executables pkg) of
Nothing -> error "No matching executable"
Just e ->
let buildInfo = P.buildInfo e
packages = map ("-package "++) $ getPackages pkg
srcDirs = map ("-i"++) $ P.hsSourceDirs buildInfo
in
(srcDirs, packages)
getPackages pkg = (nub . sort) $ map getName $ P.buildDepends pkg
where getName (Dependency (PackageName n) _) = n
exeMatches e = (P.exeName e) == (exeName.benchDetails $ bundle)
getGhcOpts :: BenchmarkBundle -> [String]
getGhcOpts bundle =
catMaybes $ map (stripPrefix ghcOptionFlag) opts
where
ghcOptionFlag = "--ghc-option="
opts = configureFlags flgs ++ buildFlags flgs
flgs = fullFlags bundle
parseArgs :: IO (FilePath, [FibonBenchmark])
parseArgs = do
args <- getArgs
case args of
[] -> return (defaultBmRoot, allBenchmarks)
_ -> do
let (root, rest) = parseRoot args
case rest of
[] -> return (root, allBenchmarks)
_ -> mapM parseBm rest >>= \bms -> return (root, concat bms)
where
parseBm bm = do
case reads bm of
[(b, "")] -> return [b]
_ -> (putStrLn $"Unknown benchmark "++show bm) >> exitFailure
parseRoot args =
case args of
"-b":path:bms -> (path, bms)
_ -> (defaultBmRoot, args)
| dmpots/fibon | tools/fibon-nofib/Main.hs | bsd-3-clause | 5,719 | 0 | 19 | 1,325 | 1,786 | 900 | 886 | 140 | 5 |
module Data.Geo.GPX.Lens(
module Data.Geo.GPX.Lens.AgeofdgpsdataL
, module Data.Geo.GPX.Lens.AuthorL
, module Data.Geo.GPX.Lens.BoundsL
, module Data.Geo.GPX.Lens.CmtL
, module Data.Geo.GPX.Lens.CopyrightL
, module Data.Geo.GPX.Lens.CreatorL
, module Data.Geo.GPX.Lens.DescL
, module Data.Geo.GPX.Lens.DgpsidL
, module Data.Geo.GPX.Lens.DomainL
, module Data.Geo.GPX.Lens.EleL
, module Data.Geo.GPX.Lens.EmailL
, module Data.Geo.GPX.Lens.ExtensionsL
, module Data.Geo.GPX.Lens.FixL
, module Data.Geo.GPX.Lens.GeoidheightL
, module Data.Geo.GPX.Lens.HdopL
, module Data.Geo.GPX.Lens.HrefL
, module Data.Geo.GPX.Lens.IdL
, module Data.Geo.GPX.Lens.KeywordsL
, module Data.Geo.GPX.Lens.LatL
, module Data.Geo.GPX.Lens.LicenseL
, module Data.Geo.GPX.Lens.LinkL
, module Data.Geo.GPX.Lens.LinksL
, module Data.Geo.GPX.Lens.LonL
, module Data.Geo.GPX.Lens.MagvarL
, module Data.Geo.GPX.Lens.MaxlatL
, module Data.Geo.GPX.Lens.MaxlonL
, module Data.Geo.GPX.Lens.MetadataL
, module Data.Geo.GPX.Lens.MinlatL
, module Data.Geo.GPX.Lens.MinlonL
, module Data.Geo.GPX.Lens.NameL
, module Data.Geo.GPX.Lens.NumberL
, module Data.Geo.GPX.Lens.PdopL
, module Data.Geo.GPX.Lens.PtsL
, module Data.Geo.GPX.Lens.RteptsL
, module Data.Geo.GPX.Lens.RtesL
, module Data.Geo.GPX.Lens.SatL
, module Data.Geo.GPX.Lens.SrcL
, module Data.Geo.GPX.Lens.SymL
, module Data.Geo.GPX.Lens.TextL
, module Data.Geo.GPX.Lens.TimeL
, module Data.Geo.GPX.Lens.TrkptsL
, module Data.Geo.GPX.Lens.TrksegsL
, module Data.Geo.GPX.Lens.TrksL
, module Data.Geo.GPX.Lens.TypeL
, module Data.Geo.GPX.Lens.VdopL
, module Data.Geo.GPX.Lens.VersionL
, module Data.Geo.GPX.Lens.WptsL
, module Data.Geo.GPX.Lens.YearL
) where
import Data.Geo.GPX.Lens.AgeofdgpsdataL
import Data.Geo.GPX.Lens.AuthorL
import Data.Geo.GPX.Lens.BoundsL
import Data.Geo.GPX.Lens.CmtL
import Data.Geo.GPX.Lens.CopyrightL
import Data.Geo.GPX.Lens.CreatorL
import Data.Geo.GPX.Lens.DescL
import Data.Geo.GPX.Lens.DgpsidL
import Data.Geo.GPX.Lens.DomainL
import Data.Geo.GPX.Lens.EleL
import Data.Geo.GPX.Lens.EmailL
import Data.Geo.GPX.Lens.ExtensionsL
import Data.Geo.GPX.Lens.FixL
import Data.Geo.GPX.Lens.GeoidheightL
import Data.Geo.GPX.Lens.HdopL
import Data.Geo.GPX.Lens.HrefL
import Data.Geo.GPX.Lens.IdL
import Data.Geo.GPX.Lens.KeywordsL
import Data.Geo.GPX.Lens.LatL
import Data.Geo.GPX.Lens.LicenseL
import Data.Geo.GPX.Lens.LinkL
import Data.Geo.GPX.Lens.LinksL
import Data.Geo.GPX.Lens.LonL
import Data.Geo.GPX.Lens.MagvarL
import Data.Geo.GPX.Lens.MaxlatL
import Data.Geo.GPX.Lens.MaxlonL
import Data.Geo.GPX.Lens.MetadataL
import Data.Geo.GPX.Lens.MinlatL
import Data.Geo.GPX.Lens.MinlonL
import Data.Geo.GPX.Lens.NameL
import Data.Geo.GPX.Lens.NumberL
import Data.Geo.GPX.Lens.PdopL
import Data.Geo.GPX.Lens.PtsL
import Data.Geo.GPX.Lens.RteptsL
import Data.Geo.GPX.Lens.RtesL
import Data.Geo.GPX.Lens.SatL
import Data.Geo.GPX.Lens.SrcL
import Data.Geo.GPX.Lens.SymL
import Data.Geo.GPX.Lens.TextL
import Data.Geo.GPX.Lens.TimeL
import Data.Geo.GPX.Lens.TrkptsL
import Data.Geo.GPX.Lens.TrksegsL
import Data.Geo.GPX.Lens.TrksL
import Data.Geo.GPX.Lens.TypeL
import Data.Geo.GPX.Lens.VdopL
import Data.Geo.GPX.Lens.VersionL
import Data.Geo.GPX.Lens.WptsL
import Data.Geo.GPX.Lens.YearL
| tonymorris/geo-gpx | src/Data/Geo/GPX/Lens.hs | bsd-3-clause | 3,233 | 0 | 5 | 248 | 826 | 631 | 195 | 97 | 0 |
module Control.Reactive.Midi (
module Codec.Midi,
-- * Basic types
Midi.MidiTime,
Midi.MidiMessage,
-- * Sources and destinations
MidiSource,
MidiDestination,
midiSources,
midiDestinations,
findSource,
findDestination,
-- * Sending and receiving
midiIn,
midiIn',
midiOut,
) where
import Data.Monoid
import Data.Maybe
import Control.Monad
import Control.Applicative
import Control.Concurrent (forkIO, threadDelay)
import System.IO.Unsafe (unsafePerformIO)
import Control.Reactive
import Control.Reactive.Util
import Codec.Midi hiding (Time, Track)
import qualified System.Midi as Midi
type MidiSource = Midi.Source
type MidiDestination = Midi.Destination
midiSources :: Reactive [MidiSource]
midiSources = eventToReactive
(pollE $ threadDelay 1 >> Midi.sources >>= return . Just)
midiDestinations :: Reactive [MidiDestination]
midiDestinations = eventToReactive
(pollE $ threadDelay 1 >> Midi.destinations >>= return . Just)
findSource :: Reactive String -> Reactive (Maybe MidiSource)
findSource nm = g <$> nm <*> midiSources
where
g = (\n -> listToMaybe . filter (\d -> isSubstringOfNormalized n $ unsafePerformIO (Midi.name d)))
findDestination :: Reactive String -> Reactive (Maybe MidiDestination)
findDestination nm = g <$> nm <*> midiDestinations
where
g = (\n -> listToMaybe . filter (\d -> isSubstringOfNormalized n $ unsafePerformIO (Midi.name d)))
midiIn :: MidiSource -> Event Midi.MidiMessage
midiIn dev = snd <$> midiIn' dev
midiIn' :: MidiSource -> Event (Midi.MidiTime, Midi.MidiMessage)
midiIn' dev = unsafePerformIO $ do
(k, e) <- newSource
str <- Midi.openSource dev (Just $ curry k)
Midi.start str
return e
midiOut :: MidiDestination -> Event Midi.MidiMessage -> Event Midi.MidiMessage
midiOut dest = putE $ \msg -> do
Midi.send dest' msg
where
dest' = unsafePerformIO $ do
-- putStrLn "Midi.openDestination"
Midi.openDestination dest
---------
eventToReactive :: Event a -> Reactive a
eventToReactive = stepper (error "eventToReactive: ")
| hanshoglund/reenact | src/Control/Reactive/Midi.hs | bsd-3-clause | 2,228 | 0 | 17 | 523 | 619 | 331 | 288 | 52 | 1 |
{-# LANGUAGE FlexibleContexts,
ScopedTypeVariables,
MultiParamTypeClasses #-}
-- |
-- Module : Core.Matcher
-- Copyright : (c) Radek Micek 2010
-- License : BSD3
-- Stability : experimental
--
-- Finds matching prefixes of the given string.
--
module Core.Matcher
(
Matcher(..)
, Length
, BinSearchMatcher(..)
, BTransitionTab
, CompAlphabetMatcher(..)
, TranslationTable
, TransitionTable
, TabIdx
, TSymbol
, toBinSearchMatcher
, toCompAlphabetMatcher
) where
import Core.Rule
import Core.DFA
import Data.Array
import Data.Array.Unboxed (UArray)
import qualified Data.Array.Unboxed as U
import Data.Monoid
import Core.Partition
import Data.List (partition)
import Data.Word (Word8)
import Core.Utils
infixl 9 !!!
(!!!) :: (U.IArray UArray b, Ix a) => UArray a b -> a -> b
(!!!) = (U.!)
type Length = Int
class Matcher m s where
findWords :: m s -> [s] -> [(RuNum, [Length])]
instance Matcher BinSearchMatcher Char where
findWords = findWordsBSM
instance Matcher BinSearchMatcher Word8 where
findWords = findWordsBSM
findWordsBSM :: (Ord s, U.IArray UArray s)
=> BinSearchMatcher s -> [s] -> [(RuNum, [Length])]
findWordsBSM bsm
= findWordsGeneric fWhatMatches fMatchPrio fReachablePrio fNextState
where
fWhatMatches = (bsmWhatMatches bsm!)
fMatchPrio = (bsmMatchPrio bsm!)
fReachablePrio = (bsmReachablePrio bsm!)
fNextState st symb = ttStates !!! (binSearch lo hi symb ttSymbols)
where
(ttSymbols, ttStates) = bsmTransitionTabs bsm ! st
(lo, hi) = U.bounds ttSymbols
instance Matcher CompAlphabetMatcher Char where
findWords = findWordsCAM
instance Matcher CompAlphabetMatcher Word8 where
findWords = findWordsCAM
findWordsCAM :: Ix s => CompAlphabetMatcher s -> [s] -> [(RuNum, [Length])]
findWordsCAM cam
= findWordsGeneric fWhatMatches fMatchPrio fReachablePrio fNextState
where
fWhatMatches = (camWhatMatches cam!)
fMatchPrio = (camMatchPrio cam!)
fReachablePrio = (camReachablePrio cam!)
fNextState st symb = (camTransitionTabs cam ! st) !!! translSymb
where
translTabIdx = camSymbolTranslation cam !!! st
translTab = camTranslationTabs cam ! translTabIdx
translSymb = translTab !!! symb
findWordsGeneric :: forall s. (State -> [RuNum])
-> (State -> Maybe Priority)
-> (State -> Maybe Priority)
-> (State -> s -> State)
-> [s]
-> [(RuNum, [Length])]
findWordsGeneric fWhatMatches fMatchPrio fReachablePrio fNextState
= map (\xs -> (snd $ head xs, map fst xs)) .
sortAndGroupBySnd .
concat .
runDFA 0 1 Nothing
where
runDFA :: State {- old state -}
-> Length
-> Maybe Priority
-> [s]
-> [[(Length, RuNum)]]
runDFA _ _ _ [] = []
runDFA st len maxPrio (symb:symbols)
| reachablePrio < maxPrio' = [whatMatches]
| otherwise = whatMatches:runDFA newState (succ len) maxPrio' symbols
where
whatMatches = zip (repeat len) $ fWhatMatches newState
reachablePrio = fReachablePrio st
maxPrio' = maxPrio `max` (fMatchPrio st)
newState = fNextState st symb
-- | Function @'binSearch' lo hi symb arr@ returns index of the smallest
-- element @el@ such that @symb <= el@.
--
-- Element @el@ must exist.
binSearch :: (U.IArray UArray a, Ord a)
=> Int -> Int -> a -> UArray Int a -> Int
binSearch lo hi symb arr
| lo == hi = lo
-- Element is too small.
| arr!!!mid < symb = binSearch (mid+1) hi symb arr
| otherwise = binSearch lo mid symb arr
where
mid = (lo + hi) `div` 2
-- --------------------------------------------------------------------------
-- | Representation of transition table. The first array is search by binary
-- search and the second array contains next state.
type BTransitionTab a = (UArray Int a, UArray Int State)
data BinSearchMatcher a
= BSM {
-- | Transition tables.
bsmTransitionTabs :: Array State (BTransitionTab a)
-- | Which rules match.
, bsmWhatMatches :: Array State [RuNum]
-- | Highest priority which matches.
, bsmMatchPrio :: Array State (Maybe Priority)
-- | Highest priority reachable by some nonempty word.
, bsmReachablePrio :: Array State (Maybe Priority)
}
toBinSearchMatcher :: (U.IArray UArray a, Symbol a)
=> DFA a -> BinSearchMatcher a
toBinSearchMatcher dfa
= BSM (fmap (listsToTransTab . unzip . toList . sdTrans) dfa)
(fmap sdMatches dfa)
(fmap sdMatchPrio dfa)
(fmap sdReachablePrio dfa)
where
listsToTransTab (sts, symbols) = (U.listArray bnds symbols,
U.listArray bnds sts)
where
bnds = (0, pred $ length symbols)
-- ---------------------------------------------------------------------------
-- Alphabet compression.
--
-- We use algorithm described in article
-- "Efficient Signature Matching with Multiple Alphabet Compression Tables"
-- by Shijin Kong, Randy Smith and Cristian Estan
-- | Translation of symbols.
type TranslationTable a = UArray a TSymbol
-- | Transition table for one state.
type TransitionTable = UArray TSymbol State
-- | Index of translation table.
type TabIdx = Int
-- | Translated symbol.
type TSymbol = Int
data CompAlphabetMatcher a
= CAM {
-- | Tables for symbol translation.
camTranslationTabs :: Array TabIdx (TranslationTable a)
-- | Transition tables.
, camTransitionTabs :: Array State TransitionTable
-- | Indices of the table for symbol translation.
, camSymbolTranslation :: UArray State TabIdx
-- | Which rules match.
, camWhatMatches :: Array State [RuNum]
-- | Highest priority which matches.
, camMatchPrio :: Array State (Maybe Priority)
-- | Highest priority reachable by some nonempty word.
, camReachablePrio :: Array State (Maybe Priority)
}
type NewPartition = [State]
type Rest = [State]
-- | Converts automaton to matcher.
toCompAlphabetMatcher :: (Ix a, Symbol a)
=> Int -> DFA a -> CompAlphabetMatcher a
toCompAlphabetMatcher numPartitions dfa
= CAM (listArray (0, pred numPartitions) $ map fst tabs)
(array (bounds dfa) (concatMap snd tabs))
(U.array (bounds dfa) $ concatMap (\(i, sts) -> zip sts (repeat i))
$ zip [0..] statePartition)
(fmap sdMatches dfa)
(fmap sdMatchPrio dfa)
(fmap sdReachablePrio dfa)
where
tabs = map (\states ->
let (transl, invTransl) = mkTranslationTab states
transitions = [ (s, mkTransitionTab s invTransl)
| s <- states]
in (transl, transitions))
statePartition
-- Returns two tables (symbos -> tsymbol, tsymbol -> symbol).
mkTranslationTab states
= (U.array (minBound, maxBound) $
concatMap (\(b, u, v) -> [(symbol, b) | symbol <- [u..v]]) $
intervals,
U.array (0, lastTSymbol)
-- Works only in GHC since indices may repeat.
[(b, u) | (b, u, _) <- intervals])
where
intervals = toIntervals alphaPartit
alphaPartit = mconcat $ map (sdTrans . (dfa!)) states
lastTSymbol = fst $ maximum (toList alphaPartit)
mkTransitionTab st invTranslTab
= U.array (bounds invTranslTab)
[ (t, getBlock s $ sdTrans $ dfa!st)
| (t, s) <- U.assocs invTranslTab]
-- Each block in partition of states will have its translation table.
statePartition :: [[State]]
statePartition
= let (x, xs) =
until (\(_, ps) -> length ps + 1 >= numPartitions)
(\(sts, ps) -> let (p, sts') = extractOnePartition sts
in (sts', p:ps))
(dfaStates, [])
in x:xs
dfaStates = let (lo, hi) = bounds dfa in [lo..hi]
-- Size of partition <= (size of remaining) / 2
extractOnePartition :: [State] -> (NewPartition, Rest)
extractOnePartition remaining
= rmFromPartit (length remaining `div` 2) remaining []
where
rmFromPartit maxSize part rest
| length part > maxSize = let (cut, r) = statesCut part
in rmFromPartit maxSize r (cut ++ rest)
| otherwise = (part, rest)
-- Returns (cut, rest).
statesCut states
-- Partition of the alphabet has only one block.
| null statePartitions = (states, [])
| otherwise = snd $ minimum statePartitionsWithLen
where
-- [(symbol, symbol)]
pairsOfSymbs = combinations2 $ map snd
$ representatives
$ mconcat
$ map (sdTrans . (dfa!)) states
-- List of pairs (states where symbols behave differently, rest).
statePartitions = map (\p -> partition (behavesDifferently p)
states)
pairsOfSymbs
where
behavesDifferently (a, b) state
= let ts = sdTrans $ dfa!state
in getBlock a ts /= getBlock b ts
statePartitionsWithLen = map (\a -> (length $ fst a, a))
statePartitions
combinations2 :: [a] -> [(a, a)]
combinations2 [] = []
combinations2 (x:xs) = [(x, y) | y <- xs] ++ combinations2 xs
| radekm/crep | Core/Matcher.hs | bsd-3-clause | 9,917 | 0 | 18 | 3,142 | 2,559 | 1,396 | 1,163 | 185 | 2 |
{-# LANGUAGE DeriveDataTypeable, RecordWildCards #-}
module Internal.DUE (main) where
import System.Console.CmdArgs
import System.Hardware.Serialport
import Control.Concurrent
data Options = Options
{ port :: FilePath
} deriving (Show, Data, Typeable)
main :: IO ()
main = do
Options{..} <- cmdArgs $ Options "COM6"
withSerial port defaultSerialSettings { commSpeed = CS1200 } $ \port -> run port 0
run port i = do
threadDelay 10000
| marangisto/karakul | src/Internal/DUE.hs | bsd-3-clause | 459 | 0 | 10 | 88 | 140 | 75 | 65 | 14 | 1 |
{-# LANGUAGE RankNTypes #-}
module Alienator.State
( PSpriteState'
, BulletState
, Meter(..)
, meterRead
, meterMax
, PlayerShipState(..)
, health
, attack
, defense
, pSprite
, GamePlaySceneState(..)
, bulletPool
, playerShip
, enemyShipPool
, initPlayerShipState
, initBulletPool
, initGamePlaySceneState
, bullet
)
where
import Data.Default
import Diagrams (unr2, (^&), P2)
import Diagrams.TwoD.Shapes
import Linear
import Linear.Affine
import Control.Lens
import qualified Data.Pool as P
import Alienator.PhysicsSprite
import Alienator.Actuator
import Alienator.Constants
import Reflex.Cocos2d
type PSpriteState' ac = PhysicsSpriteState ac CollisionCategory
type BulletState m = PSpriteState' (AnyActuator m)
data Meter a = Meter { _meterRead :: a , _meterMax :: a } deriving (Show, Read, Eq)
meterMax :: forall a_a4Tw. Lens' (Meter a_a4Tw) a_a4Tw
meterMax f_aaXh (Meter x1_aaXi x2_aaXj)
= fmap (\ y1_aaXk -> Meter x1_aaXi y1_aaXk) (f_aaXh x2_aaXj)
{-# INLINE meterMax #-}
meterRead :: forall a_a4Tw. Lens' (Meter a_a4Tw) a_a4Tw
meterRead f_aaXl (Meter x1_aaXm x2_aaXn)
= fmap (\ y1_aaXo -> Meter y1_aaXo x2_aaXn) (f_aaXl x1_aaXm)
{-# INLINE meterRead #-}
data PlayerShipState = PlayerShipState
{ _health :: Meter Int
, _attack :: Float
, _defense :: Float
, _pSprite :: PSpriteState' VelActuator
} deriving (Show, Eq)
attack :: Lens' PlayerShipState Float
attack f_a9Xd (PlayerShipState x1_a9Xe x2_a9Xf x3_a9Xg x4_a9Xh)
= fmap
(\ y1_a9Xi -> PlayerShipState x1_a9Xe y1_a9Xi x3_a9Xg x4_a9Xh)
(f_a9Xd x2_a9Xf)
{-# INLINE attack #-}
defense :: Lens' PlayerShipState Float
defense f_a9Xj (PlayerShipState x1_a9Xk x2_a9Xl x3_a9Xm x4_a9Xn)
= fmap
(\ y1_a9Xo -> PlayerShipState x1_a9Xk x2_a9Xl y1_a9Xo x4_a9Xn)
(f_a9Xj x3_a9Xm)
{-# INLINE defense #-}
health :: Lens' PlayerShipState (Meter Int)
health f_a9Xp (PlayerShipState x1_a9Xq x2_a9Xr x3_a9Xs x4_a9Xt)
= fmap
(\ y1_a9Xu -> PlayerShipState y1_a9Xu x2_a9Xr x3_a9Xs x4_a9Xt)
(f_a9Xp x1_a9Xq)
{-# INLINE health #-}
pSprite :: Lens' PlayerShipState (PSpriteState' VelActuator)
pSprite f_a9Xv (PlayerShipState x1_a9Xw x2_a9Xx x3_a9Xy x4_a9Xz)
= fmap
(\ y1_a9XA -> PlayerShipState x1_a9Xw x2_a9Xx x3_a9Xy y1_a9XA)
(f_a9Xv x4_a9Xz)
{-# INLINE pSprite #-}
-- the state definition of the GamePlay scene
data GamePlaySceneState m = GamePlaySceneState
{ -- a set of bullets to be reused by the ships
_bulletPool :: P.Pool (BulletState m)
, _playerShip :: PlayerShipState
, _enemyShipPool :: P.Pool (PSpriteState' VelActuator)
} deriving (Show)
bulletPool ::
forall m_a4Tx.
Lens' (GamePlaySceneState m_a4Tx) (P.Pool (BulletState m_a4Tx))
bulletPool f_a7Q9 (GamePlaySceneState x1_a7Qa x2_a7Qb x3_a7Qc)
= fmap
(\ y1_a7Qd -> GamePlaySceneState y1_a7Qd x2_a7Qb x3_a7Qc)
(f_a7Q9 x1_a7Qa)
{-# INLINE bulletPool #-}
enemyShipPool ::
forall m_a4Tx.
Lens' (GamePlaySceneState m_a4Tx) (P.Pool (PSpriteState' VelActuator))
enemyShipPool f_a7Qe (GamePlaySceneState x1_a7Qf x2_a7Qg x3_a7Qh)
= fmap
(\ y1_a7Qi -> GamePlaySceneState x1_a7Qf x2_a7Qg y1_a7Qi)
(f_a7Qe x3_a7Qh)
{-# INLINE enemyShipPool #-}
playerShip ::
forall m_a4Tx. Lens' (GamePlaySceneState m_a4Tx) PlayerShipState
playerShip f_a7Qj (GamePlaySceneState x1_a7Qk x2_a7Ql x3_a7Qm)
= fmap
(\ y1_a7Qn -> GamePlaySceneState x1_a7Qk y1_a7Qn x3_a7Qm)
(f_a7Qj x2_a7Ql)
{-# INLINE playerShip #-}
initPlayerShipState :: V2 Float -> PlayerShipState
initPlayerShipState winSize = PlayerShipState
{ _health = Meter 100 100
, _attack = 20
, _defense = 30
, _pSprite = def & actuator.pos .~ playerStartPos
& sCategory .~ PlayerShip
& sGeometry .~ Polygon (reverse $ uncurry rect $ unr2 playerShipContour)
& sMass .~ 5000
& sprName .~ "res/img/player.png"
& enabled .~ True
}
where playerStartPos = 0 .+^ (winSize & _x .~ 200
& _y //~ 2)
playerShipContour = 140^&40
-- we want to start with a pool of some capacity but WITHOUT any slots
-- taken
initBulletPool :: P.Pool (BulletState m)
initBulletPool = P.idling 25
initGamePlaySceneState ::
V2 Float -- ^ Win size
-> GamePlaySceneState m
initGamePlaySceneState winSize = GamePlaySceneState
{ _bulletPool = initBulletPool
, _playerShip = initPlayerShipState winSize
, _enemyShipPool = P.idling 10
}
-- | create a standard round bullet PSpriteState with the initial position, velocity and
-- acceleration
bullet :: Monad m => CollisionCategory -> P2 Float -> V2 Float -> V2 Float -> BulletState m
bullet ct p v acc = def & actuator .~ AnyActuator accelAct
& sCategory .~ ct
& sGeometry .~ Polygon (reverse $ square 5)
& sMass .~ 5
& sprName .~ "res/img/bullet.png"
& enabled .~ True
where accelAct :: AccelActuator
accelAct = def & pos .~ p
& vel .~ v
& accel .~ acc
| lynnard/alienator | hs-src/Alienator/State.hs | bsd-3-clause | 5,251 | 0 | 19 | 1,312 | 1,329 | 715 | 614 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Data.Type.Vector.Util where
import Control.DeepSeq
import Data.Bifunctor
import Data.Distributive
import Data.Foldable
import Data.Monoid
import Data.Type.Combinator
import Data.Type.Conjunction
import Data.Type.Fin
import Data.Type.Nat
import Data.Type.Vector
import Type.Class.Known
import Type.Class.Witness
import Type.Family.Nat
import Type.Family.Nat.Util
instance (Known Nat n, Distributive f) => Distributive (VecT n f) where
distribute xs = vgen_ $ \i -> distribute $ index i <$> xs
instance NFData (f a) => NFData (VecT n f a) where
rnf = \case
ØV -> ()
x :* xs -> x `deepseq` xs `deepseq` ()
splitVec
:: Nat n
-> VecT (n + m) f a
-> (VecT n f a, VecT m f a)
splitVec = \case
Z_ -> (ØV,)
S_ n -> \case
x :* xs -> first (x :*) (splitVec n xs)
{-# INLINE splitVec #-}
zipVecs
:: (Traversable g, Applicative g, Known Nat n)
=> (VecT m g a -> b)
-> VecT m g (VecT n g a)
-> VecT n g b
zipVecs = liftVec
{-# INLINE zipVecs #-}
liftVec
:: (Applicative f, Traversable g)
=> (VecT m g a -> b)
-> VecT m g (f a)
-> f b
liftVec f xs = f <$> sequenceA xs
{-# INLINE liftVec #-}
zipVecsD
:: (Distributive g, Known Nat n)
=> (VecT m g a -> b)
-> VecT m g (VecT n g a)
-> VecT n g b
zipVecsD = liftVecD
{-# INLINE zipVecsD #-}
liftVecD
:: (Distributive f, Distributive g)
=> (VecT m g a -> b)
-> VecT m g (f a)
-> f b
liftVecD f xs = f <$> distribute xs
{-# INLINE liftVecD #-}
curryV
:: (VecT ('S n) f a -> b)
-> f a
-> VecT n f a
-> b
curryV f x xs = f (x :* xs)
{-# INLINE curryV #-}
curryV'
:: (Vec ('S n) a -> b)
-> a
-> Vec n a
-> b
curryV' f x xs = f (I x :* xs)
{-# INLINE curryV' #-}
curryV2'
:: (Vec N2 a -> b)
-> a -> a -> b
curryV2' f x y = f (I x :* I y :* ØV)
{-# INLINE curryV2' #-}
curryV3'
:: (Vec N3 a -> b)
-> a -> a -> a -> b
curryV3' f x y z = f (I x :* I y :* I z :* ØV)
{-# INLINE curryV3' #-}
uncurryV
:: (f a -> VecT n f a -> b)
-> VecT ('S n) f a
-> b
uncurryV f = \case
x :* xs -> f x xs
{-# INLINE uncurryV #-}
uncurryV'
:: (a -> Vec n a -> b)
-> Vec ('S n) a
-> b
uncurryV' f = \case
I x :* xs -> f x xs
{-# INLINE uncurryV' #-}
append'
:: VecT n f a
-> VecT m f a
-> VecT (n + m) f a
append' = \case
ØV -> id
x :* xs -> (x :*) . append' xs
{-# INLINE append' #-}
vecFunc
:: Known Nat n
=> (a -> Vec n b)
-> Vec n (a -> b)
vecFunc f = vgen_ (\i -> I $ index' i . f)
{-# INLINE vecFunc #-}
unVecFunc
:: Vec n (a -> b)
-> a
-> Vec n b
unVecFunc xs x = fmap ($ x) xs
{-# INLINE unVecFunc #-}
vgenA
:: Applicative g
=> Nat n
-> (Fin n -> g (f a))
-> g (VecT n f a)
vgenA = \case
Z_ -> \_ -> pure ØV
S_ n -> \f -> (:*) <$> f FZ <*> vgenA n (f . FS)
{-# INLINE vgenA #-}
uniformVec
:: Eq (f a)
=> VecT ('S m) f a
-> Maybe (f a)
uniformVec = \case
x :* xs | getAll (vfoldMap (All . (== x)) xs) -> Just x
| otherwise -> Nothing
{-# INLINE uniformVec #-}
uncons'
:: VecT ('S n) f a
-> (f a, VecT n f a)
uncons' (x :* xs) = (x, xs)
len
:: VecT n f a
-> Nat n
len = \case
ØV -> Z_
_ :* xs -> S_ (len xs)
select
:: forall n f a. ()
=> VecT ('S n) f a
-> VecT ('S n) (f :&: VecT n f) a
select xs0 = go Z_ ØV (len xs0) xs0
where
go :: forall m o. ()
=> Nat m
-> VecT m f a
-> Nat ('S o)
-> VecT ('S o) f a
-> VecT ('S o) (f :&: VecT (m + o) f) a
go m xs = \case
S_ Z_ -> \case
y :* ØV -> (y :&: xs) :* ØV
\\ addZero m
S_ o@(S_ p) -> \case
y :* ys -> (y :&: (xs `append'` ys)) :* go (S_ m) (y :* xs) o ys
\\ succAssoc m p
sumV
:: Num a
=> Vec f a
-> a
sumV = \case
ØV -> 0
xs@(_ :* _) -> foldl1' (+) xs
{-# INLINE sumV #-}
foldl1'
:: (a -> a -> a)
-> Vec ('S n) a
-> a
foldl1' f = \case
I x :* ØV -> x
I x :* ys@(_ :* _) -> foldl' f x ys
{-# INLINE foldl1' #-}
reverse'
:: forall n f a. ()
=> VecT n f a
-> VecT n f a
reverse' v0 = go Z_ ØV (known \\ v0) v0
where
go :: forall m o. ()
=> Nat m
-> VecT m f a
-> Nat o
-> VecT o f a
-> VecT (m + o) f a
go m xs = \case
Z_ -> \case
ØV -> xs \\ addZero m
S_ o -> \case
y :* ys -> go (S_ m) (y :* xs) o ys
\\ succAssoc m o
{-# INLINE reverse' #-}
unzip'
:: Vec n (a, b)
-> (Vec n a, Vec n b)
unzip' = \case
ØV -> (ØV, ØV)
I (x,y) :* xsys -> let (xs, ys) = unzip' xsys
in (I x :* xs, I y :* ys)
| mstksg/tensor-ops | src/Data/Type/Vector/Util.hs | bsd-3-clause | 5,286 | 0 | 18 | 1,968 | 2,389 | 1,230 | 1,159 | 211 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-----------------------------------------------------------------------------
-- |
-- Module : Geometry.TwoD.Path
-- Copyright : (c) 2011-2017 diagrams team (see LICENSE)
-- License : BSD-style (see LICENSE)
-- Maintainer : diagrams-discuss@googlegroups.com
--
-- Paths in two dimensions are special since we may stroke them to
-- create a 2D diagram, and perform operations such as intersection
-- and union. They also have a trace, whereas paths in higher
-- dimensions do not.
--
-----------------------------------------------------------------------------
module Geometry.TwoD.Path
(
-- ** Inside/outside testing
Crossings (..)
, isInsideWinding
, isInsideEvenOdd
-- * Intersections
, intersectPoints, intersectPoints'
, intersectPointsP, intersectPointsP'
, intersectPointsT, intersectPointsT'
) where
import Control.Lens hiding (at, transform)
import Geometry.Located (Located)
import Geometry.Path
import Geometry.Segment
import Geometry.Space
import Geometry.Trail
import Geometry.TwoD.Segment
import Geometry.TwoD.Types
------------------------------------------------------------
-- Intersections -----------------------------------------
------------------------------------------------------------
-- | Find the intersect points of two objects that can be converted to a path.
intersectPoints :: (InSpace V2 n t, SameSpace t s, ToPath t, ToPath s, OrderedField n)
=> t -> s -> [P2 n]
intersectPoints = intersectPoints' 1e-8
-- | Find the intersect points of two objects that can be converted to a path
-- within the given tolerance.
intersectPoints' :: (InSpace V2 n t, SameSpace t s, ToPath t, ToPath s, OrderedField n)
=> n -> t -> s -> [P2 n]
intersectPoints' eps t s = intersectPointsP' eps (toPath t) (toPath s)
-- | Compute the intersect points between two paths.
intersectPointsP :: OrderedField n => Path V2 n -> Path V2 n -> [P2 n]
intersectPointsP = intersectPointsP' 1e-8
-- | Compute the intersect points between two paths within given tolerance.
intersectPointsP' :: OrderedField n => n -> Path V2 n -> Path V2 n -> [P2 n]
intersectPointsP' eps as bs = do
a <- toListOf each as
b <- toListOf each bs
intersectPointsT' eps a b
-- | Compute the intersect points between two located trails.
intersectPointsT :: OrderedField n => Located (Trail V2 n) -> Located (Trail V2 n) -> [P2 n]
intersectPointsT = intersectPointsT' 1e-8
-- | Compute the intersect points between two located trails within the given
-- tolerance.
intersectPointsT' :: OrderedField n => n -> Located (Trail V2 n) -> Located (Trail V2 n) -> [P2 n]
intersectPointsT' eps as bs = do
a <- fixTrail as
b <- fixTrail bs
intersectPointsS' eps a b
| cchalmers/geometry | src/Geometry/TwoD/Path.hs | bsd-3-clause | 3,282 | 0 | 11 | 681 | 572 | 310 | 262 | 47 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
module UI where
import Brick
import Brick.Widgets.List
import Brick.Widgets.Edit
import Graphics.Vty
import Control.Lens
import Data.Text hiding (intercalate)
import Control.Monad
import Control.Monad.Trans.State
import Control.Monad.IO.Class
import Data.Monoid
import Network.Wreq
import Data.Vector as V hiding (replicate, mapM_)
import Control.Concurrent
import Data.Default
import Text.Printf
import System.IO.Temp
import System.Directory
import System.IO
import Data.Maybe
import Data.List (intercalate)
import System.FilePath
import Constant
import Operations
import Types
import Player
padLeft' :: Int -> Widget n -> Widget n
padLeft' = padLeft . Pad
padTop' :: Int -> Widget n -> Widget n
padTop' = padTop . Pad
playerUI :: St -> Widget n
playerUI st =
let b = st ^. stflag
in if b then playerUI' st else emptyWidget
doubleToTime :: Double -> String
doubleToTime d =
let m = truncate (d / 60) :: Int
s = truncate (d - realToFrac (m * 60)) :: Int
in printf "%02d" m <> ":" <> printf "%02d" s
playerUI' :: St -> Widget n
playerUI' st =
let (a, b) = st ^. sttimeline
w1 =
let played = truncate (a / b * 50)
played' = if played > 50 then 50 else played
unplayed = 50 - played
pb1 = withAttr progressBarAttr $ str $ Prelude.replicate played' ' '
pb2 = withAttr progressBarInAttr $ str $ Prelude.replicate unplayed ' '
pb = pb1 <+> pb2
in str "[" <+> pb <+> str "]"
w2 = str $ doubleToTime a <> "/" <> doubleToTime b
filename = withAttr songNameAttr $ str $
fromMaybe "" (st ^. stcurrentSong)
pa = withAttr pausedAttr $ if st ^. stisplaying then str "" else str "(paused)"
in padLeft' 6 $ (str " " <=> (w1 <+> str " " <+> w2) <=> str " ") <=>
padLeft' 5 (filename <+> str " " <+> pa)
renderMainLayout :: St -> [Widget Text]
renderMainLayout st =
let ls = st ^. stmain
in [padTop' 1 (vLimit 17 $ vBox [renderList rend True ls]) <=>
playerUI st] where
rend b t = if b
then padLeft' 10 $ withAttr listSelectedAttr $ txt t
else padLeft' 9 $ withAttr listAttr $ txt t
renderPlaylistLayout :: St -> [Widget Text]
renderPlaylistLayout st =
let ls'' = fst <$> (st ^. stplaylist)
ls' = ls'' ^. listElementsL
ls = V.zipWith (\a b -> pack (show b) <> ". " <> a)
ls' (fromList [0..V.length ls' - 1] :: Vector Int)
in [vBox [padTop' 1 $ vLimit 17 $ vBox
[renderList rend True $ ls'' & listElementsL .~ ls], playerUI st]] where
rend b t = if b then padLeft' 10 $ withAttr listSelectedAttr $ txt t
else padLeft' 9 $ withAttr listAttr $ txt t
renderPlaylistDetailLayout :: St -> [Widget Text]
renderPlaylistDetailLayout st =
let ls'' = fst <$> (st ^. stalbumDetail)
ls' = ls'' ^. listElementsL
ls = V.zipWith (\a b -> pack (show b) <> ". " <> a)
ls' (fromList [0..V.length ls' - 1] :: Vector Int)
in [vBox [padTop' 1 $ vLimit 17 $ vBox
[renderList rend True $ ls'' & listElementsL .~ ls], playerUI st]] where
rend b t = if b then padLeft' 10 $ withAttr listSelectedAttr $ txt t
else padLeft' 9 $ withAttr listAttr $ txt t
renderLoginLayout :: St -> [Widget Text]
renderLoginLayout st = [padAll 5 w] where
w = waccount <=> str " " <=> str " " <=> wpassword <=> str " " <=>
padTop' 1 (padLeft' 8 $ wconfirm <+> str " " <+> wcancel)
<=> padTop' 1wfailmessage
waccount = vLimit 1 $ wastr <+> renderEditor True (fst $ st ^. steditlogin)
wpassword = vLimit 1 $ wapwd <+> renderwpwd
renderwpwd = str $ Prelude.replicate (Prelude.length . Prelude.head
. getEditContents . snd $ st ^. steditlogin) '*'
selected = st ^. steditloginselect
wastr' = str "Account: "
wapwd' = str "Password: "
wconfirm' = str "Confirm"
wcancel' = str "Cancel"
choices = [wastr', wapwd', wconfirm', wcancel']
[wastr, wapwd, wconfirm, wcancel] =
choices & ix selected %~ withAttr editSelected
wfailmessage = if st ^. stloginfailed then str "" else
withAttr pausedAttr (str "login failed, pls try again.")
monadSet :: (Ord n, Show n) => St -> Lens' St (List n e) -> Event -> EventM n (Next St)
monadSet st len ev = do
new <- handleListEvent ev (st ^. len)
continue $ st & len .~ new
handleMainLayoutEvent :: St -> BrickEvent Text CustomEvent -> EventM Text (Next St)
handleMainLayoutEvent st ev =
case ev of
VtyEvent (EvKey (KChar 'j') []) -> monadSet st stmain (EvKey KDown [])
VtyEvent (EvKey (KChar 'k') []) -> monadSet st stmain (EvKey KUp [])
VtyEvent (EvKey (KChar 'd') [MCtrl]) -> monadSet st stmain (EvKey KPageDown [])
VtyEvent (EvKey (KChar 'u') [MCtrl]) -> monadSet st stmain (EvKey KPageUp [])
VtyEvent (EvKey (KChar 'l') []) -> -- switch from MainLayout to PlayListLayout
case st ^. stmain . listSelectedL of
Just 0 -> continue $ st & stcurrentLayout .~ LoginLayout
Just 1 ->
if st ^. stlogined then do
(r, l) <- liftIO $ runStateT getUserPlaylist (st ^. stlog)
continue $ st & stlog .~ l & stcurrentLayout .~ PlayListLayout
& stplaylist .~ list "playlist" (fromList r) 1
else continue st
_ -> continue st
_ -> genericHandler st ev
handlePlaylistLayoutEvent :: St -> BrickEvent Text CustomEvent -> EventM Text (Next St)
handlePlaylistLayoutEvent st ev =
case ev of
VtyEvent (EvKey (KChar 'j') []) -> monadSet st stplaylist (EvKey KDown [])
VtyEvent (EvKey (KChar 'k') []) -> monadSet st stplaylist (EvKey KUp [])
VtyEvent (EvKey (KChar 'd') [MCtrl]) -> monadSet st stplaylist (EvKey KPageDown [])
VtyEvent (EvKey (KChar 'u') [MCtrl]) -> monadSet st stplaylist (EvKey KPageUp [])
VtyEvent (EvKey (KChar 'h') []) -> continue $ st & stcurrentLayout .~ MainLayout
VtyEvent (EvKey (KChar 'l') []) -> do -- switch from PlayListLayout to PlayListDetailLayout
let selected = st ^. stplaylist . listSelectedL
case selected of
Just n -> do
(r, l) <- liftIO $ runStateT (getPlaylistDetail n) (st ^. stlog)
continue $ st & stlog .~ l & stcurrentLayout .~ PlayListDetailLayout
& stalbumDetail .~ list "albumdetail" (fromList r) 1
Nothing -> continue st
_ -> genericHandler st ev
handlePlaylistDetailLayoutEvent :: St -> BrickEvent Text CustomEvent -> EventM Text (Next St)
handlePlaylistDetailLayoutEvent st ev =
case ev of
VtyEvent (EvKey (KChar 'j') []) -> monadSet st stalbumDetail (EvKey KDown [])
VtyEvent (EvKey (KChar 'k') []) -> monadSet st stalbumDetail (EvKey KUp [])
VtyEvent (EvKey (KChar 'd') [MCtrl]) -> monadSet st stalbumDetail (EvKey KPageDown [])
VtyEvent (EvKey (KChar 'u') [MCtrl]) -> monadSet st stalbumDetail (EvKey KPageUp [])
VtyEvent (EvKey (KChar 'h') []) -> continue $ st & stcurrentLayout .~ PlayListLayout
VtyEvent (EvKey (KChar 'l') []) -> do -- play music
let selected = st ^. stalbumDetail . listSelectedL
case selected of
Just n -> do
let total = V.length $ st ^. stalbumDetail . listElementsL
if n <= total - 1
then do -- add a single song at a time
uri <- liftIO $ evalStateT (getMusicUrl n _currentList) (st ^. stlog)
exeMplayer (playMusic uri) $ st & stisplaying .~ True
& stflag .~ True
& stfilenameMap .~ [(takeFileName uri,
unpack . fst $ (st^.stlog.currentList) !! n)]
& stcurrentSongNumber .~ n
& stnextsongappended .~ False
& stlog . currentPlayingList .~
st ^. stlog . currentList
{- r <- liftIO $ evalStateT (getMusicsUrl n (total - 1)) -}
{- (st ^. stlog) -}
{- let urls = fst <$> r -}
{- ids = snd <$> r -}
{- tdir <- liftIO getTemporaryDirectory -}
{- (fp, h) <- liftIO $ openTempFile tdir "mplayer.playlist" -}
{- liftIO $ hPutStrLn h (intercalate "\n" urls) >> hFlush h -}
{- let fns = Prelude.map takeFileName urls -}
{- albummap = (\(a,b) -> (b, a)) <$> st^.stlog.currentList -}
{- songs = Prelude.map -}
{- (\i -> unpack . fromJust $ lookup i albummap) ids -}
{- result <- exeMplayer (playMusicFile fp) -}
{- (st & stisplaying .~ True -}
{- & stfilenameMap .~ Prelude.zip fns songs -}
{- & stflag .~ True) -}
{- liftIO $ threadDelay 300000 >> removeFile fp -}
{- return result -}
else continue st
Nothing -> continue st
_ -> genericHandler st ev
handleLoginLayoutEvent :: St -> BrickEvent Text CustomEvent -> EventM Text (Next St)
handleLoginLayoutEvent st ev = do
let (a, b) = st ^. steditlogin
case ev of
VtyEvent (EvKey KEsc []) -> continue $ st & stcurrentLayout .~ MainLayout
& stloginfailed .~ True & steditlogin .~ emptyEditPair
VtyEvent (EvKey (KChar '\t') []) -> continue $
st & steditloginselect %~ (\i -> (i + 1) `rem` 4)
VtyEvent (EvKey KBackTab []) -> continue $
st & steditloginselect %~ (\i -> (i - 1) `mod` 4)
VtyEvent (EvKey KEnter []) ->
case st ^. steditloginselect of
0 -> continue $ st & steditloginselect .~ 1
1 -> performLogin st
2 -> performLogin st
_ -> continue $ st & stcurrentLayout .~ MainLayout
& stloginfailed .~ True & steditlogin .~ emptyEditPair
VtyEvent event ->
case st ^. steditloginselect of
0 -> do
newa <- handleEditorEvent event a
continue $ st & steditlogin .~ (newa, b)
1 -> do
newb <- handleEditorEvent event b
continue $ st & steditlogin .~ (a, newb)
_ -> continue st
_ -> continue st
genericHandler :: St -> BrickEvent Text CustomEvent -> EventM Text (Next St)
genericHandler st ev =
case ev of
VtyEvent (EvKey (KChar 'q') []) -> halt st
VtyEvent (EvKey KEsc []) -> halt st
VtyEvent (EvKey KRight []) ->
if st ^. stisplaying then exeMplayer (seekRelative 15) st
else continue st
VtyEvent (EvKey KLeft []) ->
if st ^. stisplaying then exeMplayer (seekRelative (-15)) st
else continue st
VtyEvent (EvKey KUp []) ->
if st ^. stisplaying then exeMplayer (addVolume 1) st
else continue st
VtyEvent (EvKey KDown []) ->
if st ^. stisplaying then exeMplayer (addVolume (-1)) st
else continue st
VtyEvent (EvKey (KChar 'p') []) ->
if st ^. stisplaying then exeMplayer pauseMplayer
(st & stisplaying .~ False)
else exeMplayer pauseMplayer (st & stisplaying .~ True)
AppEvent UpdateTimeline -> --carefully update time line infomation
if st ^. stisplaying then updateTime st
else continue st
AppEvent UpdateSong ->
if st ^. stisplaying then updateSong st
else continue st
_ -> continue st
performLogin :: St -> EventM Text (Next St)
performLogin st = do
let account = Prelude.head $ getEditContents (fst $ st ^. steditlogin)
password = Prelude.head $ getEditContents (snd $ st ^. steditlogin)
(result, ok) <- tryPhoneLogin account password st
if ok then liftIO (writeCache result)
>> continue (result & stcurrentLayout .~ MainLayout
& stlogined .~ True) else do
(result', ok') <- tryLogin account password st
if ok' then liftIO (writeCache result) >>
continue (result' & stcurrentLayout .~ MainLayout
& stlogined .~ True) else
continue $ st & stloginfailed .~ False & steditlogin .~ emptyEditPair
writeCache :: St -> IO ()
writeCache st = withFile (st ^. stcachefile) WriteMode $ \h -> do
hPrint h $ fromJust $ st ^. stlog . userId
hPrint h $ fromJust $ st ^. stlog . option . cookies
tryPhoneLogin :: String -> String -> St -> EventM n (St, Bool)
tryPhoneLogin phone pwd st = do
(r, l) <- liftIO $ runStateT (loginPhone phone pwd) (st ^. stlog)
if r then return (st&stlog .~ l, True) else return (st, False)
tryLogin :: String -> String -> St -> EventM n (St, Bool)
tryLogin account pwd st = do
(r, l) <- liftIO $ runStateT (login account pwd) (st ^. stlog)
if r then return (st&stlog .~ l, True) else return (st, False)
updateTime :: St -> EventM n (Next St)
updateTime st = case st ^. stmplayer of
Nothing -> error "this should not happen!!!"
Just mp -> do
t <- liftIO $
( \a b -> do
tp <- a
tl <- b
return (tp, tl) ) <$> getTimePosition mp <*> getTimeLength mp
case t of
Nothing -> continue st
Just (tp', tl') -> do
let flag | tp' / tl' > 0.9 = if st ^. stnextsongappended
then 0 -- do nothing
else 1 -- append song
| st ^. stnextsongappended = 2 -- appended song playing
-- clear it
| otherwise = 0 -- do nothing
case flag of
0 -> continue $ st & sttimeline .~ (tp', tl')
1 -> do
let n = st ^. stcurrentSongNumber
if n < Prelude.length (st ^. stlog . currentPlayingList) - 1
then do
uri <- liftIO $ evalStateT
(getMusicUrl (n+1) _currentPlayingList) (st ^. stlog)
let newTuple = (takeFileName uri,
unpack . fst $ (st^.stlog.currentPlayingList) !! (n+1))
exeMplayer (appendMusic uri) $ st & stisplaying .~ True
& stflag .~ True
& stfilenameMap .~
[newTuple, Prelude.head $
st^.stfilenameMap]
& stcurrentSongNumber .~ (n + 1)
& sttimeline .~ (tp', tl')
& stnextsongappended .~ True
else continue $ st & sttimeline .~ (tp', tl')
& stnextsongappended .~ True
2 -> continue $ st & sttimeline .~ (tp', tl')
& stnextsongappended .~ False
updateSong :: St -> EventM n (Next St)
updateSong st = case st ^. stmplayer of
Nothing -> error "this should not happend!!!!"
Just mp -> do
fn <- liftIO (getFileName mp)
case fn of
Nothing -> continue st
Just fn' -> continue $ st & stcurrentSong .~ lookup fn' (st^.stfilenameMap)
getMplayerProperty :: (MVar MpObject -> IO a) -> St -> EventM n a
getMplayerProperty action st = do
let mp = st ^. stmplayer
case mp of
Just mp' -> liftIO $ action mp'
Nothing -> error "invalid calling"
exeMplayer :: (MVar MpObject -> IO ()) -> St -> EventM n (Next St)
exeMplayer action st = do
let mp = st ^. stmplayer
case mp of
Just mp' -> liftIO $ action mp'
Nothing -> return ()
continue st
uiAppDraw :: St -> [Widget Text]
uiAppDraw st =
case st ^. stcurrentLayout of
MainLayout -> renderMainLayout st
PlayListLayout -> renderPlaylistLayout st
PlayListDetailLayout -> renderPlaylistDetailLayout st
LoginLayout -> renderLoginLayout st
uiAppHandleEvent :: St -> BrickEvent Text CustomEvent -> EventM Text (Next St)
uiAppHandleEvent st ev =
case st ^. stcurrentLayout of
MainLayout -> handleMainLayoutEvent st ev
PlayListLayout -> handlePlaylistLayoutEvent st ev
PlayListDetailLayout -> handlePlaylistDetailLayoutEvent st ev
LoginLayout -> handleLoginLayoutEvent st ev
theApp :: App St CustomEvent Text
theApp = App {
appDraw = uiAppDraw,
appChooseCursor = neverShowCursor,
appHandleEvent = uiAppHandleEvent,
appStartEvent = return,
appAttrMap = const $ attrMap defAttr customAttrMap
}
progressBarAttr :: AttrName
progressBarAttr = "progressbar"
progressBarInAttr :: AttrName -- progressbar-incomplete
progressBarInAttr = "progressbarin"
songNameAttr :: AttrName
songNameAttr = "songname"
pausedAttr :: AttrName
pausedAttr = "paused"
editSelected :: AttrName
editSelected = "focused"
customAttrMap :: [(AttrName, Attr)]
customAttrMap = [(listSelectedAttr, fg brightGreen `withStyle` bold),
(listAttr, fg brightYellow),
(progressBarAttr, bg brightBlack),
(progressBarInAttr, fg brightBlack),
(songNameAttr, fg brightCyan `withStyle` bold),
(pausedAttr, fg red),
(editSelected, fg brightGreen `withStyle` bold)]
| Frefreak/hnem | src/UI.hs | bsd-3-clause | 18,580 | 0 | 38 | 6,777 | 5,602 | 2,818 | 2,784 | -1 | -1 |
module Sexy.Functions.Maybe (
fromMaybe
, isJust
, isNothing
, listToMaybe
, maybeToList
, catMaybes
, mapMaybe
) where
import Sexy.Core
fromMaybe :: (MaybeC m, MaybeC f) => f a -> m a
fromMaybe = maybe just nothing
isJust :: MaybeC m => m a -> Bool
isJust = maybe (const True) False
isNothing :: MaybeC m => m a -> Bool
isNothing = maybe (const False) True
listToMaybe :: UnconsMay l => l a -> Maybe a
listToMaybe xs = fst <$> unconsMay xs
maybeToList :: MaybeC m => m a -> [a]
maybeToList = maybe (\x -> [x]) []
catMaybes :: (List l, MaybeC m) => l (m a) -> l a
catMaybes = foldr step empty
where step v acc = maybe (<| acc) acc v
mapMaybe :: (List l, MaybeC m) => (a -> m b) -> l a -> l b
mapMaybe f = foldr step empty
where step v acc = maybe (<| acc) acc (f v)
| DanBurton/sexy | src/Sexy/Functions/Maybe.hs | bsd-3-clause | 797 | 0 | 9 | 193 | 382 | 196 | 186 | 25 | 1 |
{-# language QuasiQuotes #-}
{-# language TemplateHaskell #-}
{-# language MultiParamTypeClasses #-}
module OpenCV.ImgProc.CascadeClassifier
( CascadeClassifier
, newCascadeClassifier
, cascadeClassifierDetectMultiScale
) where
import "base" Data.Int
import "base" Foreign.ForeignPtr (ForeignPtr, withForeignPtr)
import "base" Foreign.C.String (withCString)
import "base" System.IO.Unsafe (unsafePerformIO)
import "base" Data.Word
import "base" Foreign.Marshal.Alloc (alloca)
import "base" Foreign.Ptr (Ptr)
import "base" Control.Exception (mask_)
import "base" Foreign.Storable (peek)
import "base" Foreign.Marshal.Array (peekArray)
import qualified "inline-c" Language.C.Inline as C
import qualified "inline-c" Language.C.Inline.Unsafe as CU
import qualified "inline-c-cpp" Language.C.Inline.Cpp as C
import qualified "vector" Data.Vector as V
import "linear" Linear (V2(..))
import "this" OpenCV.Core.Types
import "this" OpenCV.Internal.C.Inline ( openCvCtx )
import "this" OpenCV.Internal.C.Types
import "this" OpenCV.Internal
import "this" OpenCV.TypeLevel
C.context openCvCtx
C.include "opencv2/core.hpp"
C.include "opencv2/objdetect.hpp"
C.using "namespace cv"
newtype CascadeClassifier = CascadeClassifier {unCascadeClassifier :: ForeignPtr (C CascadeClassifier)}
type instance C CascadeClassifier = C'CascadeClassifier
instance WithPtr CascadeClassifier where
withPtr = withForeignPtr . unCascadeClassifier
instance FromPtr CascadeClassifier where
fromPtr = objFromPtr CascadeClassifier $ \ptr ->
[CU.exp| void { delete $(CascadeClassifier * ptr) }|]
-- | Create a new cascade classifier. Returns 'Nothing' if the classifier
-- is empty after initialization. This usually means that the file could
-- not be loaded (e.g. it doesn't exist, is corrupt, etc.)
newCascadeClassifier :: FilePath -> IO (Maybe CascadeClassifier)
newCascadeClassifier fp = do
cc <- withCString fp $ \c'fp -> fromPtr
[CU.exp| CascadeClassifier * { new CascadeClassifier(cv::String($(const char * c'fp))) } |]
-- TODO: empty() seems to return bogus numbers when the classifier is not
-- empty, and I'm not sure why. This is also why I'm not using toBool.
empty <- fmap (== 1) (withPtr cc (\ccPtr -> [CU.exp| bool { $(CascadeClassifier * ccPtr)->empty() } |]))
return $ if empty
then Nothing
else Just cc
{- |
Example:
@
cascadeClassifierArnold
:: forall (width :: Nat)
(height :: Nat)
(channels :: Nat)
(depth :: * )
. (Mat (ShapeT [height, width]) ('S channels) ('S depth) ~ Arnold_small)
=> IO (Mat (ShapeT [height, width]) ('S channels) ('S depth))
cascadeClassifierArnold = do
-- Create two classifiers from data files.
Just ccFrontal <- newCascadeClassifier "data/haarcascade_frontalface_default.xml"
Just ccEyes <- newCascadeClassifier "data/haarcascade_eye.xml"
-- Detect some features.
let eyes = ccDetectMultiscale ccEyes arnoldGray
faces = ccDetectMultiscale ccFrontal arnoldGray
-- Draw the result.
pure $ exceptError $
withMatM (Proxy :: Proxy [height, width])
(Proxy :: Proxy channels)
(Proxy :: Proxy depth)
white $ \imgM -> do
void $ matCopyToM imgM (V2 0 0) arnold_small Nothing
forM_ eyes $ \eyeRect -> lift $ rectangle imgM eyeRect blue 2 LineType_8 0
forM_ faces $ \faceRect -> lift $ rectangle imgM faceRect green 2 LineType_8 0
where
arnoldGray = exceptError $ cvtColor bgr gray arnold_small
ccDetectMultiscale cc = cascadeClassifierDetectMultiScale cc Nothing Nothing minSize maxSize
minSize = Nothing :: Maybe (V2 Int32)
maxSize = Nothing :: Maybe (V2 Int32)
@
<<doc/generated/examples/cascadeClassifierArnold.png cascadeClassifierArnold>>
-}
cascadeClassifierDetectMultiScale
:: (IsSize size Int32)
=> CascadeClassifier
-> Maybe Double -- ^ Scale factor, default is 1.1
-> Maybe Int32 -- ^ Min neighbours, default 3
-> Maybe (size Int32) -- ^ Minimum size. Default: no minimum.
-> Maybe (size Int32) -- ^ Maximum size. Default: no maximum.
-> Mat ('S [w, h]) ('S 1) ('S Word8)
-> V.Vector (Rect Int32)
cascadeClassifierDetectMultiScale cc scaleFactor minNeighbours minSize maxSize src = unsafePerformIO $
withPtr cc $ \ccPtr ->
withPtr src $ \srcPtr ->
withPtr c'minSize $ \minSizePtr ->
withPtr c'maxSize $ \maxSizePtr ->
alloca $ \(numRectsPtr :: Ptr Int32) ->
alloca $ \(rectsPtrPtr :: Ptr (Ptr (Ptr (C'Rect Int32)))) -> mask_ $ do
[CU.block| void {
std::vector<cv::Rect> rects;
$(CascadeClassifier * ccPtr)->detectMultiScale(
*$(Mat * srcPtr),
rects,
$(double c'scaleFactor),
$(int32_t c'minNeighbours),
0,
*$(Size2i * minSizePtr),
*$(Size2i * maxSizePtr));
*$(int32_t * numRectsPtr) = rects.size();
cv::Rect * * rectsPtr = new cv::Rect * [rects.size()];
*$(Rect2i * * * rectsPtrPtr) = rectsPtr;
for (std::vector<cv::Rect>::size_type i = 0; i != rects.size(); i++) {
rectsPtr[i] = new cv::Rect(rects[i]);
}
} |]
numRects <- fromIntegral <$> peek numRectsPtr
rectsPtr <- peek rectsPtrPtr
rects :: [Rect Int32] <- peekArray numRects rectsPtr >>= mapM (fromPtr . return)
[CU.block| void { delete [] *$(Rect2i * * * rectsPtrPtr); }|]
return (V.fromList rects)
where
c'scaleFactor = maybe 1.1 realToFrac scaleFactor
c'minNeighbours = maybe 3 fromIntegral minNeighbours
c'minSize = maybe (toSize (V2 0 0)) toSize minSize
c'maxSize = maybe (toSize (V2 0 0)) toSize maxSize
| lukexi/haskell-opencv | src/OpenCV/ImgProc/CascadeClassifier.hs | bsd-3-clause | 5,671 | 0 | 25 | 1,189 | 866 | 487 | 379 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables #-}
module TestSimple (testSimple) where
import Control.Monad
import Control.Concurrent
import Data.Maybe
import Data.Pipe
import Data.Pipe.ByteString
import System.IO
import Text.XML.Pipe
import Network.XmlPush
import Network.XmlPush.Simple
testSimple :: Handle -> IO ()
testSimple h = do
(sp :: SimplePusher Handle) <- generate (One h) ()
void . forkIO . runPipe_ $ readFrom sp
=$= convert (xmlString . (: []))
=$= toHandle stdout
runPipe_ $ fromHandle stdin
=$= xmlEvent
=$= convert fromJust
=$= xmlNode []
=$= writeTo sp
| YoshikuniJujo/xml-push | examples/TestSimple.hs | bsd-3-clause | 576 | 0 | 13 | 95 | 192 | 100 | 92 | 22 | 1 |
module T136b where
import Data.Bool.Singletons
import Data.Singletons.TH
$(singletons [d|
class C a where
meth :: a -> a
|])
$(singletons [d|
instance C Bool where
meth = not
|])
| goldfirere/singletons | singletons-base/tests/compile-and-dump/Singletons/T136b.hs | bsd-3-clause | 198 | 0 | 7 | 48 | 44 | 27 | 17 | -1 | -1 |
module Pickler.Text where
import Prelude
import Fay.Text (Text)
import FFI (ffi)
-- Utility functions on Text.
lower :: Text -> Text
lower = ffi "%1.toLowerCase()"
split :: Text -> Text -> [Text]
split = ffi "%2.split(%1)"
join :: Text -> [Text] -> Text
join = ffi "%2.join(%1)"
textLength :: Text -> Int
textLength = ffi "%1.length"
(<>) :: Text -> Text -> Text
(<>) = ffi "%1 + %2"
| silkapp/pickler.js | src/Pickler/Text.hs | bsd-3-clause | 392 | 0 | 7 | 77 | 135 | 77 | 58 | 14 | 1 |
{-# LANGUAGE CPP, DeriveGeneric, OverloadedStrings, ScopedTypeVariables, TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-deprecations #-}
module UnitTests (ioTests, tests) where
import Control.Monad (forM)
import Data.Aeson (decode, eitherDecode, encode, genericToJSON, genericToEncoding)
import Data.Aeson.TH ( deriveJSON )
import Data.Aeson.Encode (encodeToTextBuilder)
import Data.Aeson.Types (ToJSON(..), FromJSON, Value, camelTo, camelTo2, defaultOptions, omitNothingFields)
import Data.Char (toUpper)
import Data.Time (UTCTime)
import Data.Time.Format (parseTime)
import GHC.Generics (Generic)
import Test.Framework (Test, testGroup)
import Test.Framework.Providers.HUnit (testCase)
import Test.HUnit (Assertion, assertFailure, assertEqual)
import qualified Data.ByteString.Lazy.Char8 as L
import qualified Data.Text.Lazy.Builder as TLB
import qualified Data.Text.Lazy.Encoding as TLE
import qualified Data.Text.Lazy as LT
import qualified Data.Text.Lazy.Encoding as LT
#if MIN_VERSION_time(1,5,0)
import Data.Time.Format (defaultTimeLocale)
#else
import System.Locale (defaultTimeLocale)
#endif
tests :: Test
tests = testGroup "unit" [
testGroup "camelCase" [
testCase "camelTo" $ roundTripCamel "aName"
, testCase "camelTo" $ roundTripCamel "another"
, testCase "camelTo" $ roundTripCamel "someOtherName"
, testCase "camelTo" $
assertEqual "" "camel_apicase" (camelTo '_' "CamelAPICase")
, testCase "camelTo2" $ roundTripCamel2 "aName"
, testCase "camelTo2" $ roundTripCamel2 "another"
, testCase "camelTo2" $ roundTripCamel2 "someOtherName"
, testCase "camelTo2" $
assertEqual "" "camel_api_case" (camelTo2 '_' "CamelAPICase")
]
, testGroup "encoding" [
testCase "goodProducer" $ goodProducer
]
, testGroup "utctime" [
testCase "good" $ utcTimeGood
, testCase "bad" $ utcTimeBad
]
]
roundTripCamel :: String -> Assertion
roundTripCamel name = assertEqual "" name (camelFrom '_' $ camelTo '_' name)
roundTripCamel2 :: String -> Assertion
roundTripCamel2 name = assertEqual "" name (camelFrom '_' $ camelTo2 '_' name)
camelFrom :: Char -> String -> String
camelFrom c s = let (p:ps) = split c s
in concat $ p : map capitalize ps
where
split c' s' = map L.unpack $ L.split c' $ L.pack s'
capitalize t = toUpper (head t) : tail t
data Wibble = Wibble {
wibbleString :: String
, wibbleInt :: Int
} deriving (Generic, Show)
instance ToJSON Wibble where
toJSON = genericToJSON defaultOptions
toEncoding = genericToEncoding defaultOptions
-- Test that if we put a bomb in a data structure, but only demand
-- part of it via lazy encoding, we do not unexpectedly fail.
goodProducer :: Assertion
goodProducer = assertEqual "partial encoding should not explode on undefined"
'{' (L.head (encode wibble))
where
wibble = Wibble {
wibbleString = replicate 4030 'a'
, wibbleInt = undefined
}
-- Test decoding various UTC time formats
--
-- Note: the incomplete pattern matches for UTCTimes are completely
-- intentional. The test expects these parses to succeed. If the
-- pattern matches fails, there's a bug in either the test or in aeson
-- and needs to be investigated.
utcTimeGood :: Assertion
utcTimeGood = do
let ts1 = "2015-01-01T12:13:00.00Z" :: LT.Text
let ts2 = "2015-01-01T12:13:00Z" :: LT.Text
-- 'T' between date and time is not required, can be space
let ts3 = "2015-01-03 12:13:00.00Z" :: LT.Text
let ts4 = "2015-01-03 12:13:00.125Z" :: LT.Text
let (Just (t1 :: UTCTime)) = parseWithAeson ts1
let (Just (t2 :: UTCTime)) = parseWithAeson ts2
let (Just (t3 :: UTCTime)) = parseWithAeson ts3
let (Just (t4 :: UTCTime)) = parseWithAeson ts4
assertEqual "utctime" (parseWithRead "%FT%T%QZ" ts1) t1
assertEqual "utctime" (parseWithRead "%FT%T%QZ" ts2) t2
assertEqual "utctime" (parseWithRead "%F %T%QZ" ts3) t3
assertEqual "utctime" (parseWithRead "%F %T%QZ" ts4) t4
-- Time zones. Both +HHMM and +HH:MM are allowed for timezone
-- offset, and MM may be omitted.
let ts5 = "2015-01-01T12:30:00.00+00" :: LT.Text
let ts6 = "2015-01-01T12:30:00.00+01:15" :: LT.Text
let ts7 = "2015-01-01T12:30:00.00-02" :: LT.Text
let ts8 = "2015-01-01T22:00:00.00-03" :: LT.Text
let ts9 = "2015-01-01T22:00:00.00-04:30" :: LT.Text
let (Just (t5 :: UTCTime)) = parseWithAeson ts5
let (Just (t6 :: UTCTime)) = parseWithAeson ts6
let (Just (t7 :: UTCTime)) = parseWithAeson ts7
let (Just (t8 :: UTCTime)) = parseWithAeson ts8
let (Just (t9 :: UTCTime)) = parseWithAeson ts9
assertEqual "utctime" (parseWithRead "%FT%T%QZ" "2015-01-01T12:30:00.00Z") t5
assertEqual "utctime" (parseWithRead "%FT%T%QZ" "2015-01-01T11:15:00.00Z") t6
assertEqual "utctime" (parseWithRead "%FT%T%QZ" "2015-01-01T14:30:00Z") t7
-- ts8 wraps around to the next day in UTC
assertEqual "utctime" (parseWithRead "%FT%T%QZ" "2015-01-02T01:00:00Z") t8
assertEqual "utctime" (parseWithRead "%FT%T%QZ" "2015-01-02T02:30:00Z") t9
-- Seconds in Time can be omitted
let ts10 = "2015-01-03T12:13Z" :: LT.Text
let ts11 = "2015-01-03 12:13Z" :: LT.Text
let ts12 = "2015-01-01T12:30-02" :: LT.Text
let (Just (t10 :: UTCTime)) = parseWithAeson ts10
let (Just (t11 :: UTCTime)) = parseWithAeson ts11
let (Just (t12 :: UTCTime)) = parseWithAeson ts12
assertEqual "utctime" (parseWithRead "%FT%H:%MZ" ts10) t10
assertEqual "utctime" (parseWithRead "%F %H:%MZ" ts11) t11
assertEqual "utctime" (parseWithRead "%FT%T%QZ" "2015-01-01T14:30:00Z") t12
where
parseWithRead :: String -> LT.Text -> UTCTime
parseWithRead f s =
case parseTime defaultTimeLocale f . LT.unpack $ s of
Nothing -> error "parseTime input malformed"
Just t -> t
parseWithAeson :: LT.Text -> Maybe UTCTime
parseWithAeson s = decode . LT.encodeUtf8 $ (LT.concat ["\"", s, "\""])
-- Test that a few non-timezone qualified timestamp formats get
-- rejected if decoding to UTCTime.
utcTimeBad :: Assertion
utcTimeBad = do
verifyFailParse "2000-01-01T12:13:00" -- missing Zulu time not allowed (some TZ required)
verifyFailParse "2000-01-01 12:13:00" -- missing Zulu time not allowed (some TZ required)
verifyFailParse "2000-01-01" -- date only not OK
verifyFailParse "2000-01-01Z" -- date only not OK
verifyFailParse "2015-01-01T12:30:00.00+00Z" -- no Zulu if offset given
verifyFailParse "2015-01-01T12:30:00.00+00:00Z" -- no Zulu if offset given
verifyFailParse "2015-01-03 12:13:00.Z" -- decimal at the end but no digits
verifyFailParse "2015-01-03 12:13.000Z" -- decimal at the end, but no seconds
where
verifyFailParse (s :: LT.Text) =
let (dec :: Maybe UTCTime) = decode . LT.encodeUtf8 $ (LT.concat ["\"", s, "\""]) in
assertEqual "verify failure" Nothing dec
------------------------------------------------------------------------------
-- Comparison between bytestring and text encoders
------------------------------------------------------------------------------
ioTests :: IO [Test]
ioTests = do
enc <- encoderComparisonTests
return [enc]
encoderComparisonTests :: IO Test
encoderComparisonTests = do
encoderTests <- forM testFiles $ \file0 -> do
let file = "benchmarks/json-data/" ++ file0
return $ testCase file $ do
inp <- L.readFile file
case eitherDecode inp of
Left err -> assertFailure $ "Decoding failure: " ++ err
Right val -> assertEqual "" (encode val) (encodeViaText val)
return $ testGroup "encoders" encoderTests
where
encodeViaText :: Value -> L.ByteString
encodeViaText =
TLE.encodeUtf8 . TLB.toLazyText . encodeToTextBuilder . toJSON
testFiles =
[ "example.json"
, "integers.json"
, "jp100.json"
, "numbers.json"
, "twitter10.json"
, "twitter20.json"
, "geometry.json"
, "jp10.json"
, "jp50.json"
, "twitter1.json"
, "twitter100.json"
, "twitter50.json"
]
-- A regression test for: https://github.com/bos/aeson/issues/293
data MyRecord = MyRecord {_field1 :: Maybe Int, _field2 :: Maybe Bool}
deriveJSON defaultOptions{omitNothingFields=True} ''MyRecord
data MyRecord2 = MyRecord2 {_field3 :: Maybe Int, _field4 :: Maybe Bool}
deriving Generic
instance ToJSON MyRecord2
instance FromJSON MyRecord2
| neobrain/aeson | tests/UnitTests.hs | bsd-3-clause | 8,392 | 0 | 20 | 1,605 | 2,066 | 1,062 | 1,004 | 155 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.