code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="es-ES">
<title>HTTPS Info Add-on</title>
<maps>
<homeID>httpsinfo</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
secdec/zap-extensions
|
addOns/httpsInfo/src/main/javahelp/org/zaproxy/zap/extension/httpsinfo/resources/help_es_ES/helpset_es_ES.hs
|
apache-2.0
| 968
| 77
| 67
| 157
| 413
| 209
| 204
| -1
| -1
|
module EKG.A169853 (a169853) where
import Helpers.EKGBuilder (buildEKG)
a169853 :: Int -> Integer
a169853 n = a169853_list !! (n - 1)
a169853_list :: [Integer]
a169853_list = buildEKG [11]
|
peterokagey/haskellOEIS
|
src/EKG/A169853.hs
|
apache-2.0
| 191
| 0
| 7
| 29
| 68
| 39
| 29
| 6
| 1
|
-- x and y coodinates or lengths
data Cartesian2D = Cartesian2D Double Double
deriving (Eq,Show)
-- Angle and distance (magnitude)
data Polar2D = Polar2D Double Double
deriving (Eq,Show)
|
hungaikev/learning-haskell
|
AlgebraicVector.hs
|
apache-2.0
| 222
| 0
| 6
| 63
| 47
| 26
| 21
| 4
| 0
|
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
-- | Description: Extract and apply patches on JSON documents.
--
-- This module implements data types and operations to represent the
-- differences between JSON documents (i.e. a patch), to compare JSON documents
-- and extract such a patch, and to apply such a patch to a JSON document.
module Data.Aeson.Diff (
-- * Patches
Patch(..),
Pointer,
Key(..),
Operation(..),
Config(..),
-- * Functions
diff,
diff',
patch,
applyOperation,
) where
import Control.Applicative
import Control.Monad
import Control.Monad.Error.Class
import Data.Aeson
import Data.Aeson.Types (modifyFailure, typeMismatch)
import qualified Data.ByteString.Lazy.Char8 as BS
import Data.Foldable (foldlM)
import Data.Hashable
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HM
import Data.List (groupBy, intercalate)
import Data.Maybe
import Data.Monoid
import Data.Scientific
import Data.Text (Text)
import qualified Data.Text as T
import Data.Vector (Vector)
import qualified Data.Vector as V
import Data.Vector.Distance
import Data.Aeson.Patch
import Data.Aeson.Pointer
-- * Configuration
-- | Configuration for the diff algorithm.
newtype Config = Config
{ configTstBeforeRem :: Bool
}
defaultConfig :: Config
defaultConfig = Config False
-- * Costs
-- | Calculate the cost of an operation.
operationCost :: Operation -> Int
operationCost op =
case op of
Add{} -> valueSize (changeValue op)
Rem{} -> 1
Rep{} -> valueSize (changeValue op)
Mov{} -> 1
Cpy{} -> 1
Tst{} -> valueSize (changeValue op)
-- | Estimate the size of a JSON 'Value'.
valueSize :: Value -> Int
valueSize val = case val of
Object o -> sum . fmap valueSize . HM.elems $ o
Array a -> V.sum $ V.map valueSize a
_ -> 1
-- * Atomic patches
-- | Construct a patch with a single 'Add' operation.
ins :: Config -> Pointer -> Value -> [Operation]
ins cfg p v = [Add p v]
-- | Construct a patch with a single 'Rem' operation.
del :: Config -> Pointer -> Value -> [Operation]
del Config{..} p v =
if configTstBeforeRem
then [Tst p v, Rem p]
else [Rem p]
-- | Construct a patch which changes 'Rep' operation.
rep :: Config -> Pointer -> Value -> [Operation]
rep Config{..} p v = [Rep p v]
-- * Diff
-- | Compare two JSON documents and generate a patch describing the differences.
--
-- Uses the 'defaultConfig'.
diff
:: Value
-> Value
-> Patch
diff = diff' defaultConfig
-- | Compare two JSON documents and generate a patch describing the differences.
diff'
:: Config
-> Value
-> Value
-> Patch
diff' cfg@Config{..} v v' = Patch (worker mempty v v')
where
check :: Monoid m => Bool -> m -> m
check b v = if b then mempty else v
worker :: Pointer -> Value -> Value -> [Operation]
worker p v1 v2 = case (v1, v2) of
-- For atomic values of the same type, emit changes iff they differ.
(Null, Null) -> mempty
(Bool b1, Bool b2) -> check (b1 == b2) $ rep cfg p v2
(Number n1, Number n2) -> check (n1 == n2) $ rep cfg p v2
(String s1, String s2) -> check (s1 == s2) $ rep cfg p v2
-- For structured values of the same type, walk them.
(Array a1, Array a2) -> check (a1 == a2) $ workArray p a1 a2
(Object o1, Object o2) -> check (o1 == o2) $ workObject p o1 o2
-- For values of different types, replace v1 with v2.
_ -> rep cfg p v2
-- Walk the keys in two objects, producing a 'Patch'.
workObject :: Pointer -> Object -> Object -> [Operation]
workObject path o1 o2 =
let k1 = HM.keys o1
k2 = HM.keys o2
-- Deletions
del_keys :: [Text]
del_keys = filter (not . (`elem` k2)) k1
deletions :: [Operation]
deletions = concatMap
(\k -> del cfg (Pointer [OKey k]) (fromJust $ HM.lookup k o1))
del_keys
-- Insertions
ins_keys = filter (not . (`elem` k1)) k2
insertions :: [Operation]
insertions = concatMap
(\k -> ins cfg (Pointer [OKey k]) (fromJust $ HM.lookup k o2))
ins_keys
-- Changes
chg_keys = filter (`elem` k2) k1
changes :: [Operation]
changes = concatMap
(\k -> worker (Pointer [OKey k])
(fromJust $ HM.lookup k o1)
(fromJust $ HM.lookup k o2))
chg_keys
in modifyPointer (path <>) <$> (deletions <> insertions <> changes)
-- Use an adaption of the Wagner-Fischer algorithm to find the shortest
-- sequence of changes between two JSON arrays.
workArray :: Pointer -> Array -> Array -> [Operation]
workArray path ss tt = fmap (modifyPointer (path <>)) . snd . fmap concat $ leastChanges params ss tt
where
params :: Params Value [Operation] (Sum Int)
params = Params{..}
equivalent = (==)
delete i = del cfg (Pointer [AKey i])
insert i = ins cfg (Pointer [AKey i])
substitute i = worker (Pointer [AKey i])
cost = Sum . sum . fmap operationCost
-- Position is advanced by grouping operations with same "head" index:
-- + groups of many operations advance one
-- + singletons with |pointer|>1 advance one
-- + other singletons advance according to 'pos'
positionOffset = sum . fmap adv . groupBy related
related :: Operation -> Operation -> Bool
related o1 o2 =
let p1 = pointerPath (changePointer o1)
p2 = pointerPath (changePointer o2)
in case (p1, p2) of
([i1], [i2]) -> False
(i1:_, i2:_) | i1 == i2 -> True
| otherwise -> False
-- A group of operations has a peculiar (i.e. given by 'pos') advance
-- when it's a single op and |changePointer| = 1; otherwise it's a
-- bunch of changes inside the head key.
adv :: [Operation] -> Int
adv [op]
| (length . pointerPath . changePointer $ op) == 1 = pos op
adv _ = 1
pos :: Operation -> Int
pos Rem{changePointer=Pointer path}
| length path == 1 = 0
| otherwise = 0
pos Add{changePointer=Pointer path}
| length path == 1 = 1
| otherwise = 0
pos Rep{changePointer=Pointer path}
| length path == 1 = 1
| otherwise = 0
pos Cpy{changePointer=Pointer path}
| length path == 1 = 1
| otherwise = 0
pos Mov{changePointer=Pointer path}
| length path == 1 = 1
| otherwise = 0
pos Tst{changePointer=Pointer path} = 0
-- * Patching
-- | Apply a patch to a JSON document.
patch
:: Patch
-> Value
-> Result Value
patch (Patch []) val = return val
patch (Patch ops) val = foldlM (flip applyOperation) val ops
-- | Apply an 'Operation' to a 'Value'.
applyOperation
:: Operation
-> Value
-> Result Value
applyOperation op json = case op of
Add path v' -> applyAdd path v' json
Rem path -> applyRem path json
Rep path v' -> applyRep path v' json
Tst path v -> applyTst path v json
Cpy path from -> applyCpy path from json
Mov path from -> do
v' <- get from json
applyRem from json >>= applyAdd path v'
-- | Apply an 'Add' operation to a document.
--
-- http://tools.ietf.org/html/rfc6902#section-4.1
--
-- - An empty 'Path' replaces the document.
-- - A single 'OKey' inserts or replaces the corresponding member in an object.
-- - A single 'AKey' inserts at the corresponding location.
-- - Longer 'Paths' traverse if they can and fail otherwise.
applyAdd :: Pointer -> Value -> Value -> Result Value
applyAdd pointer = go pointer
where
go (Pointer []) val _ =
return val
go (Pointer [AKey i]) v' (Array v) =
let fn :: Maybe Value -> Result (Maybe Value)
fn _ = return (Just v')
in return (Array $ vInsert i v' v)
go (Pointer (AKey i : path)) v' (Array v) =
let fn :: Maybe Value -> Result (Maybe Value)
fn Nothing = cannot "insert" "array" i pointer
fn (Just d) = Just <$> go (Pointer path) v' d
in Array <$> vModify i fn v
go (Pointer [OKey n]) v' (Object m) =
return . Object $ HM.insert n v' m
go (Pointer (OKey n : path)) v' (Object o) =
let fn :: Maybe Value -> Result (Maybe Value)
fn Nothing = cannot "insert" "object" n pointer
fn (Just d) = Just <$> go (Pointer path) v' d
in Object <$> hmModify n fn o
go (Pointer (OKey n : path)) v' array@(Array v)
| n == "-" = go (Pointer (AKey (V.length v) : path)) v' array
go path _ v = pointerFailure path v
-- | Apply a 'Rem' operation to a document.
--
-- http://tools.ietf.org/html/rfc6902#section-4.2
--
-- - The target location MUST exist.
applyRem :: Pointer -> Value -> Result Value
applyRem from@(Pointer path) = go path
where
go [] _ = return Null
go [AKey i] d@(Array v) =
let fn :: Maybe Value -> Result (Maybe Value)
fn Nothing = cannot "delete" "array" i from
fn (Just v) = return Nothing
in Array <$> vModify i fn v
go (AKey i : path) (Array v) =
let fn :: Maybe Value -> Result (Maybe Value)
fn Nothing = cannot "traverse" "array" i from
fn (Just o) = Just <$> go path o
in Array <$> vModify i fn v
go [OKey n] (Object m) =
let fn :: Maybe Value -> Result (Maybe Value)
fn Nothing = cannot "delete" "object" n from
fn (Just _) = return Nothing
in Object <$> hmModify n fn m
go (OKey n : path) (Object m) =
let fn :: Maybe Value -> Result (Maybe Value)
fn Nothing = cannot "traverse" "object" n from
fn (Just o) = Just <$> go path o
in Object <$> hmModify n fn m
-- Dodgy hack for "-" key which means "the end of the array".
go (OKey n : path) array@(Array v)
| n == "-" = go (AKey (V.length v) : path) array
-- Type mismatch: clearly the thing we're deleting isn't here.
go path value = pointerFailure from value
-- | Apply a 'Rep' operation to a document.
--
-- http://tools.ietf.org/html/rfc6902#section-4.3
--
-- - Functionally identical to a 'Rem' followed by an 'Add'.
applyRep :: Pointer -> Value -> Value -> Result Value
applyRep from v doc = applyRem from doc >>= applyAdd from v
-- | Apply a 'Mov' operation to a document.
--
-- http://tools.ietf.org/html/rfc6902#section-4.4
applyMov :: Pointer -> Pointer -> Value -> Result Value
applyMov path from doc = do
v <- get from doc
applyRem from doc >>= applyAdd path v
-- | Apply a 'Cpy' operation to a document.
--
-- http://tools.ietf.org/html/rfc6902#section-4.5
--
-- - The location must exist.
-- - Identical to an add with the appropriate value.
applyCpy :: Pointer -> Pointer -> Value -> Result Value
applyCpy path from doc = do
v <- get from doc
applyAdd path v doc
-- | Apply a 'Tst' operation to a document.
--
-- http://tools.ietf.org/html/rfc6902#section-4.6
--
-- - The location must exist.
-- - The value must be equal to the supplied value.
applyTst :: Pointer -> Value -> Value -> Result Value
applyTst path v doc = do
v' <- get path doc
unless (v == v') (Error . T.unpack $ "Element at \"" <> formatPointer path <> "\" fails test.")
return doc
-- * Utilities
-- $ These are some utility functions used in the functions defined
-- above. Mostly they just fill gaps in the APIs of the "Data.Vector"
-- and "Data.HashMap.Strict" modules.
-- | Delete an element in a vector.
vDelete :: Int -> Vector a -> Vector a
vDelete i v =
let l = V.length v
in V.slice 0 i v <> V.slice (i + 1) (l - i - 1) v
-- | Insert an element into a vector.
vInsert :: Int -> a -> Vector a -> Vector a
vInsert i a v
| i <= 0 = V.cons a v
| V.length v <= i = V.snoc v a
| otherwise = V.slice 0 i v
<> V.singleton a
<> V.slice i (V.length v - i) v
-- | Modify the element at an index in a 'Vector'.
--
-- The function is passed the value at index @i@, or 'Nothing' if there is no
-- such element. The function should return 'Nothing' if it wants to have no
-- value corresponding to the index, or 'Just' if it wants a value.
--
-- Depending on the vector and the function, we will either:
--
-- - leave the vector unchanged;
-- - delete an existing element;
-- - insert a new element; or
-- - replace an existing element.
vModify
:: Int
-> (Maybe a -> Result (Maybe a))
-> Vector a
-> Result (Vector a)
vModify i f v =
let a = v V.!? i
a' = f a
in case (a, a') of
(Nothing, Success Nothing ) -> return v
(Just _ , Success Nothing ) -> return (vDelete i v)
(Nothing, Success (Just n)) -> return (vInsert i n v)
(Just _ , Success (Just n)) -> return (V.update v (V.singleton (i, n)))
(_ , Error e ) -> Error e
-- | Modify the value associated with a key in a 'HashMap'.
--
-- The function is passed the value defined for @k@, or 'Nothing'. If the
-- function returns 'Nothing', the key and value are deleted from the map;
-- otherwise the value replaces the existing value in the returned map.
hmModify
:: (Eq k, Hashable k)
=> k
-> (Maybe v -> Result (Maybe v))
-> HashMap k v
-> Result (HashMap k v)
hmModify k f m = case f (HM.lookup k m) of
Error e -> Error e
Success Nothing -> return $ HM.delete k m
Success (Just v) -> return $ HM.insert k v m
-- | Report an error about being able to use a pointer key.
cannot
:: (Show ix)
=> String -- ^ Use to be made "delete", "traverse", etc.
-> String -- ^ Type "array" "object"
-> ix
-> Pointer
-> Result a
cannot op ty ix p =
Error ("Cannot " <> op <> " missing " <> ty <> " member at index "
<> show ix <> " in pointer \"" <> T.unpack (formatPointer p) <> "\".")
|
thsutton/aeson-diff
|
lib/Data/Aeson/Diff.hs
|
bsd-2-clause
| 14,552
| 0
| 18
| 4,482
| 4,277
| 2,187
| 2,090
| 273
| 15
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeOperators #-}
module Web.Twitter.Conduit.Status
(
-- * Timelines
StatusesMentionsTimeline
, mentionsTimeline
, StatusesUserTimeline
, userTimeline
, StatusesHomeTimeline
, homeTimeline
, StatusesRetweetsOfMe
, retweetsOfMe
-- * Tweets
, StatusesRetweetsId
, retweetsId
, StatusesShowId
, showId
, StatusesDestroyId
, destroyId
, StatusesUpdate
, update
, StatusesRetweetId
, retweetId
, MediaData (..)
, StatusesUpdateWithMedia
, updateWithMedia
-- , oembed
-- , retweetersIds
, StatusesLookup
, lookup
) where
import Prelude hiding ( lookup )
import Web.Twitter.Conduit.Base
import Web.Twitter.Conduit.Request
import Web.Twitter.Conduit.Request.Internal
import Web.Twitter.Conduit.Parameters
import Web.Twitter.Types
import qualified Data.Text as T
import Network.HTTP.Client.MultipartFormData
import Data.Default
-- $setup
-- >>> :set -XOverloadedStrings -XOverloadedLabels
-- >>> import Control.Lens
-- * Timelines
-- | Returns query data asks the most recent mentions for the authenticating user.
--
-- You can perform a query using 'call':
--
-- @
-- res <- 'call' 'mentionsTimeline'
-- @
--
-- >>> mentionsTimeline
-- APIRequest "GET" "https://api.twitter.com/1.1/statuses/mentions_timeline.json" []
mentionsTimeline :: APIRequest StatusesMentionsTimeline [Status]
mentionsTimeline = APIRequest "GET" (endpoint ++ "statuses/mentions_timeline.json") def
type StatusesMentionsTimeline = '[
"count" ':= Integer
, "since_id" ':= Integer
, "max_id" ':= Integer
, "trim_user" ':= Bool
, "contributor_details" ':= Bool
, "include_entities" ':= Bool
, "tweet_mode" ':= T.Text
]
-- | Returns query data asks a collection of the most recent Tweets posted by the user indicated by the screen_name or user_id parameters.
--
-- You can perform a search query using 'call':
--
-- @
-- res <- 'call' $ 'userTimeline' ('ScreenNameParam' \"thimura\")
-- @
--
-- >>> userTimeline (ScreenNameParam "thimura")
-- APIRequest "GET" "https://api.twitter.com/1.1/statuses/user_timeline.json" [("screen_name","thimura")]
-- >>> userTimeline (ScreenNameParam "thimura") & #include_rts ?~ True & #count ?~ 200
-- APIRequest "GET" "https://api.twitter.com/1.1/statuses/user_timeline.json" [("count","200"),("include_rts","true"),("screen_name","thimura")]
userTimeline :: UserParam -> APIRequest StatusesUserTimeline [Status]
userTimeline q = APIRequest "GET" (endpoint ++ "statuses/user_timeline.json") (mkUserParam q)
type StatusesUserTimeline = '[
"count" ':= Integer
, "since_id" ':= Integer
, "max_id" ':= Integer
, "trim_user" ':= Bool
, "exclude_replies" ':= Bool
, "contributor_details" ':= Bool
, "include_rts" ':= Bool
, "tweet_mode" ':= T.Text
]
-- | Returns query data asks a collection of the most recentTweets and retweets posted by the authenticating user and the users they follow.
--
-- You can perform a search query using 'call':
--
-- @
-- res <- 'call' 'homeTimeline'
-- @
--
-- >>> homeTimeline
-- APIRequest "GET" "https://api.twitter.com/1.1/statuses/home_timeline.json" []
-- >>> homeTimeline & #count ?~ 200
-- APIRequest "GET" "https://api.twitter.com/1.1/statuses/home_timeline.json" [("count","200")]
homeTimeline :: APIRequest StatusesHomeTimeline [Status]
homeTimeline = APIRequest "GET" (endpoint ++ "statuses/home_timeline.json") def
type StatusesHomeTimeline = '[
"count" ':= Integer
, "since_id" ':= Integer
, "max_id" ':= Integer
, "trim_user" ':= Bool
, "exclude_replies" ':= Bool
, "contributor_details" ':= Bool
, "include_entities" ':= Bool
, "tweet_mode" ':= T.Text
]
-- | Returns query data asks the most recent tweets authored by the authenticating user that have been retweeted by others.
--
-- You can perform a search query using 'call':
--
-- @
-- res <- 'call' 'retweetsOfMe'
-- @
--
-- >>> retweetsOfMe
-- APIRequest "GET" "https://api.twitter.com/1.1/statuses/retweets_of_me.json" []
-- >>> retweetsOfMe & #count ?~ 100
-- APIRequest "GET" "https://api.twitter.com/1.1/statuses/retweets_of_me.json" [("count","100")]
retweetsOfMe :: APIRequest StatusesRetweetsOfMe [Status]
retweetsOfMe = APIRequest "GET" (endpoint ++ "statuses/retweets_of_me.json") def
type StatusesRetweetsOfMe = '[
"count" ':= Integer
, "since_id" ':= Integer
, "max_id" ':= Integer
, "trim_user" ':= Bool
, "include_entities" ':= Bool
, "include_user_entities" ':= Bool
, "tweet_mode" ':= T.Text
]
-- * Tweets
-- | Returns query data that asks for the most recent retweets of the specified tweet
--
-- You can perform a search query using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'retweetsId' 1234567890
-- @
--
-- >>> retweetsId 1234567890
-- APIRequest "GET" "https://api.twitter.com/1.1/statuses/retweets/1234567890.json" []
-- >>> retweetsId 1234567890 & #count ?~ 100
-- APIRequest "GET" "https://api.twitter.com/1.1/statuses/retweets/1234567890.json" [("count","100")]
retweetsId :: StatusId -> APIRequest StatusesRetweetsId [RetweetedStatus]
retweetsId status_id = APIRequest "GET" uri def
where uri = endpoint ++ "statuses/retweets/" ++ show status_id ++ ".json"
type StatusesRetweetsId = '[
"count" ':= Integer
, "trim_user" ':= Bool
]
-- | Returns query data asks a single Tweet, specified by the id parameter.
--
-- You can perform a search query using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'showId' 1234567890
-- @
--
-- >>> showId 1234567890
-- APIRequest "GET" "https://api.twitter.com/1.1/statuses/show/1234567890.json" []
-- >>> showId 1234567890 & #include_my_retweet ?~ True
-- APIRequest "GET" "https://api.twitter.com/1.1/statuses/show/1234567890.json" [("include_my_retweet","true")]
showId :: StatusId -> APIRequest StatusesShowId Status
showId status_id = APIRequest "GET" uri def
where uri = endpoint ++ "statuses/show/" ++ show status_id ++ ".json"
type StatusesShowId = '[
"trim_user" ':= Bool
, "include_my_retweet" ':= Bool
, "include_entities" ':= Bool
, "include_ext_alt_text" ':= Bool
, "tweet_mode" ':= T.Text
]
-- | Returns post data which destroys the status specified by the require ID parameter.
--
-- You can perform a search query using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'destroyId' 1234567890
-- @
--
-- >>> destroyId 1234567890
-- APIRequest "POST" "https://api.twitter.com/1.1/statuses/destroy/1234567890.json" []
destroyId :: StatusId -> APIRequest StatusesDestroyId Status
destroyId status_id = APIRequest "POST" uri def
where uri = endpoint ++ "statuses/destroy/" ++ show status_id ++ ".json"
type StatusesDestroyId = '[
"trim_user" ':= Bool
, "tweet_mode" ':= T.Text
]
-- | Returns post data which updates the authenticating user's current status.
-- To upload an image to accompany the tweet, use 'updateWithMedia'.
--
-- You can perform a search query using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'update' \"Hello World\"
-- @
--
-- >>> update "Hello World"
-- APIRequest "POST" "https://api.twitter.com/1.1/statuses/update.json" [("status","Hello World")]
-- >>> update "Hello World" & #in_reply_to_status_id ?~ 1234567890
-- APIRequest "POST" "https://api.twitter.com/1.1/statuses/update.json" [("in_reply_to_status_id","1234567890"),("status","Hello World")]
update :: T.Text -> APIRequest StatusesUpdate Status
update status = APIRequest "POST" uri [("status", PVString status)]
where uri = endpoint ++ "statuses/update.json"
type StatusesUpdate = '[
"in_reply_to_status_id" ':= Integer
-- , "lat_long"
-- , "place_id"
, "display_coordinates" ':= Bool
, "trim_user" ':= Bool
, "media_ids" ':= [Integer]
, "tweet_mode" ':= T.Text
]
-- | Returns post data which retweets a tweet, specified by ID.
--
-- You can perform a search query using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'retweetId' 1234567890
-- @
--
-- >>> retweetId 1234567890
-- APIRequest "POST" "https://api.twitter.com/1.1/statuses/retweet/1234567890.json" []
retweetId :: StatusId -> APIRequest StatusesRetweetId RetweetedStatus
retweetId status_id = APIRequest "POST" uri def
where uri = endpoint ++ "statuses/retweet/" ++ show status_id ++ ".json"
type StatusesRetweetId = '[
"trim_user" ':= Bool
]
-- | Returns post data which updates the authenticating user's current status and attaches media for upload.
--
-- You can perform a search query using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'updateWithMedia' \"Hello World\" ('MediaFromFile' \"/home/thimura/test.jpeg\")
-- @
--
-- >>> updateWithMedia "Hello World" (MediaFromFile "/home/fuga/test.jpeg")
-- APIRequestMultipart "POST" "https://api.twitter.com/1.1/statuses/update_with_media.json" [("status","Hello World")]
updateWithMedia :: T.Text
-> MediaData
-> APIRequest StatusesUpdateWithMedia Status
updateWithMedia tweet mediaData =
APIRequestMultipart "POST" uri [("status", PVString tweet)] [mediaBody mediaData]
where
uri = endpoint ++ "statuses/update_with_media.json"
mediaBody (MediaFromFile fp) = partFileSource "media[]" fp
mediaBody (MediaRequestBody filename filebody) = partFileRequestBody "media[]" filename filebody
type StatusesUpdateWithMedia = '[
"possibly_sensitive" ':= Bool
, "in_reply_to_status_id" ':= Integer
-- , "lat_long"
-- , "place_id"
, "display_coordinates" ':= Bool
, "tweet_mode" ':= T.Text
]
-- | Returns fully-hydrated tweet objects for up to 100 tweets per request, as specified by comma-separated values passed to the id parameter.
--
-- You can perform a request using 'call':
--
-- @
-- res <- 'call' twInfo mgr '$' 'lookup' [20, 432656548536401920]
-- @
--
-- >>> lookup [10]
-- APIRequest "GET" "https://api.twitter.com/1.1/statuses/lookup.json" [("id","10")]
-- >>> lookup [10, 432656548536401920]
-- APIRequest "GET" "https://api.twitter.com/1.1/statuses/lookup.json" [("id","10,432656548536401920")]
-- >>> lookup [10, 432656548536401920] & #include_entities ?~ True
-- APIRequest "GET" "https://api.twitter.com/1.1/statuses/lookup.json" [("include_entities","true"),("id","10,432656548536401920")]
lookup :: [StatusId] -> APIRequest StatusesLookup [Status]
lookup ids = APIRequest "GET" (endpoint ++ "statuses/lookup.json") [("id", PVIntegerArray ids)]
type StatusesLookup = '[
"include_entities" ':= Bool
, "trim_user" ':= Bool
, "map" ':= Bool
, "tweet_mode" ':= T.Text
]
|
Javran/twitter-conduit
|
Web/Twitter/Conduit/Status.hs
|
bsd-2-clause
| 10,705
| 0
| 9
| 1,878
| 1,442
| 863
| 579
| 134
| 2
|
module Example where
import MixFix.Expr
import MixFix.Parser
import MixFix.Dic
import qualified Data.Map as Map
import Text.Parsec.Error (ParseError(..))
(=>>) :: String -> a -> Assump a
(=>>) = (,)
infoUnInt = ()
infoOpNum = ()
infoOpRel = ()
infoNum = ()
infoBool = ()
infoFun = ()
infoCont = ()
infoMVar = ()
infoIf = ()
initialState' :: Dictionary ()
initialState' =
-- Operators and container list
addOperator (Closed [Part "{",Place,Part "}"] infoCont )
.addOperator (Operator [Place,Part "/\\",Place] [Assoc,Ident "True"] 30 infoOpNum)
.addOperator (Operator [Place,Part "\\/",Place] [Assoc,Ident "False"] 20 infoOpNum)
.addOperator (Operator [Place,Place] [AssocLeft] maxPrec infoOpNum)
.addOperator (Operator [Place,Part ".",Place] [Assoc] maxPrec infoOpNum)
.addOperator (Operator [Place,Part "!"] [Assoc] 100 infoUnInt)
.addOperator (Operator [Place,Part "^",Place] [AssocRight] 80 infoOpNum)
.addOperator (Operator [Place,Part "+",Place] [Assoc,Symmetric,Ident "0"] 80 infoOpNum)
.addOperator (Operator [Place,Part "*",Place] [Assoc,Symmetric,Ident "1"] 100 infoOpNum)
.addOperator (Operator [Place,Part "<",Place] [Conjuntive] 40 infoOpRel)
.addOperator (Operator [Place,Part "<=",Place] [Conjuntive] 40 infoOpRel)
.addOperator (Operator [Place,Part "==",Place] [Conjuntive] 10 infoOpRel)
.addOperator (Operator [Place,Part "=",Place] [Conjuntive] 40 infoOpRel)
.addOperator (Operator [Place,Part ">",Place] [Conjuntive] 40 infoOpRel)
.addOperator (Operator [Place,Part ">=",Place] [Conjuntive] 40 infoOpRel)
.addOperator (Operator [Part "if",Place,Part "then",Place,Part "else",Place] [] 0 infoIf)
--, ("H",[Prefix [Part "H",Place,Place] tsOpInt])
$ Dictionary Map.empty
["f" =>> infoFun,"g" =>> infoFun,"b" =>> infoBool, "p" =>> infoBool,"q" =>> infoBool,"r" =>> infoBool, "n" =>> infoNum, "m" =>> infoNum]
["E" =>> infoMVar,"R" =>> infoMVar,"Q" =>> infoMVar]
[ "true" =>> infoBool,"false" =>> infoBool
, "0" =>> infoNum,"1" =>> infoNum,"2" =>> infoNum,"3" =>> infoNum,"4" =>> infoNum,"5" =>> infoNum
]
[]
initialState :: Dictionary ()
initialState =
addOperator (Operator [Part "if",Place,Part "then",Place,Part "else",Place,Part "fi"] [] 0 infoIf)
. addOperator (Closed [Part "〈",Place,Part "〉"] infoCont)
. addOperator (Operator [Part "if",Place,Part "then",Place] [] 0 infoIf)
-- . addOperator (Operator [Place,Place,Part "K"] [Assoc] maxPrec infoOpInt)
-- . addOperator (Operator [Part "H",Place,Place] [Assoc] maxPrec infoOpInt)
-- . (\d -> addProperty d [Part "All",Place,Place] Symmetric)
. addOperator (Alias [Part "All",Place,Place] [Place,Part "/\\",Place])
-- . addOperator (Operator [Place,Part "+",Place,Part "-",Place] [] 90 tsOpInt)
$ initialState'
{-
typeState :: Dictionary ()
typeState =
addOperator (Operator [Place,Part "->",Place] [] 0 infoOpRel)
.
-}
-- To run some Examples:
-- runParse initialState (flip parseExpr (0,"")) "if b then if p then q else r"
-- runParse initialState (flip parseExpr (0,"")) "p /\\ q \\/ r"
-- runParse initialState (flip parseExpr (0,"")) "p \\/ q /\\ r"
-- runParse initialState (flip parseExpr (0,"")) "p[q := r]"
ifAmbig :: Either Text.Parsec.Error.ParseError Expr
ifAmbig = runParse initialState (flip parseExpr (0,"")) "if b then if p then q else r"
boolExpr1 :: Either Text.Parsec.Error.ParseError Expr
boolExpr1 = runParse initialState (flip parseExpr (0,"")) "p /\\ q \\/ r"
boolExpr2 :: Either Text.Parsec.Error.ParseError Expr
boolExpr2 = runParse initialState (flip parseExpr (0,"")) "p \\/ q /\\ r"
sustExpr :: Either Text.Parsec.Error.ParseError Expr
sustExpr = runParse initialState (flip parseExpr (0,"")) "p[q := r]"
parse :: String -> Either Text.Parsec.Error.ParseError Expr
parse = runParse initialState (flip parseExpr (0,""))
|
pachopepe/mfxparser
|
Example.hs
|
bsd-3-clause
| 3,996
| 15
| 13
| 745
| 1,247
| 690
| 557
| 58
| 1
|
-----------------------------------------------------------------------------
-- |
-- Module : Minecraft.Format.Schematic.PrettyPrinting
-- Copyright : (c) Tamar Christina 2012
-- License : BSD3
--
-- Maintainer : tamar@zhox.com
-- Stability : experimental
-- Portability : portable
--
-- Pretty printing for the Schematic file format
--
-----------------------------------------------------------------------------
module Minecraft.Format.Schematic.PrettyPrinting where
|
Mistuke/CraftGen
|
Minecraft/Format/Schematic/PrettyPrinting.hs
|
bsd-3-clause
| 492
| 0
| 3
| 66
| 21
| 19
| 2
| 1
| 0
|
import System.Environment (getArgs)
moves :: [(Char -> Char, Char -> Char)]
moves = [(pred . pred, pred), (pred . pred, succ), (pred, pred . pred), (pred, succ . succ),
(succ, pred . pred), (succ, succ . succ), (succ . succ, pred), (succ . succ, succ)]
knigh :: Char -> Char -> [(Char -> Char, Char -> Char)] -> [String]
knigh _ _ [] = []
knigh x y (z:zs) | elem (fst z x) ['a'..'h'] && elem (snd z y) ['1'..'8'] = (fst z x : [snd z y]) : knigh x y zs
| otherwise = knigh x y zs
knight :: String -> [String]
knight [x, y] = knigh x y moves
main :: IO ()
main = do
[inpFile] <- getArgs
input <- readFile inpFile
putStr . unlines . map (unwords . knight) $ lines input
|
nikai3d/ce-challenges
|
easy/knight_moves.hs
|
bsd-3-clause
| 727
| 0
| 11
| 199
| 403
| 217
| 186
| 15
| 1
|
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.InstallSymlink
-- Copyright : (c) Duncan Coutts 2008
-- License : BSD-like
--
-- Maintainer : cabal-devel@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- Managing installing binaries with symlinks.
-----------------------------------------------------------------------------
module Distribution.Client.InstallSymlink (
symlinkBinaries,
symlinkBinary,
) where
#if mingw32_HOST_OS
import Distribution.Package (PackageIdentifier)
import Distribution.Client.InstallPlan (InstallPlan)
import Distribution.Client.Setup (InstallFlags)
import Distribution.Simple.Setup (ConfigFlags)
import Distribution.Simple.Compiler
symlinkBinaries :: Compiler
-> ConfigFlags
-> InstallFlags
-> InstallPlan
-> IO [(PackageIdentifier, String, FilePath)]
symlinkBinaries _ _ _ _ = return []
symlinkBinary :: FilePath -> FilePath -> String -> String -> IO Bool
symlinkBinary _ _ _ _ = fail "Symlinking feature not available on Windows"
#else
import Distribution.Client.Types
( SourcePackage(..), ReadyPackage(..), enableStanzas )
import Distribution.Client.Setup
( InstallFlags(installSymlinkBinDir) )
import qualified Distribution.Client.InstallPlan as InstallPlan
import Distribution.Client.InstallPlan (InstallPlan)
import Distribution.Package
( PackageIdentifier, Package(packageId), mkPackageKey
, packageKeyLibraryName, LibraryName )
import Distribution.Compiler
( CompilerId(..) )
import qualified Distribution.PackageDescription as PackageDescription
import qualified Distribution.Client.ComponentDeps as CD
import Distribution.PackageDescription
( PackageDescription )
import Distribution.PackageDescription.Configuration
( finalizePackageDescription )
import Distribution.Simple.Setup
( ConfigFlags(..), fromFlag, fromFlagOrDefault, flagToMaybe )
import qualified Distribution.Simple.InstallDirs as InstallDirs
import qualified Distribution.InstalledPackageInfo as Installed
import Distribution.Simple.Compiler
( Compiler, CompilerInfo(..), packageKeySupported )
import System.Posix.Files
( getSymbolicLinkStatus, isSymbolicLink, createSymbolicLink
, removeLink )
import System.Directory
( canonicalizePath )
import System.FilePath
( (</>), splitPath, joinPath, isAbsolute )
import Prelude hiding (ioError)
import System.IO.Error
( isDoesNotExistError, ioError )
import Distribution.Compat.Exception ( catchIO )
import Control.Exception
( assert )
import Data.Maybe
( catMaybes )
-- | We would like by default to install binaries into some location that is on
-- the user's PATH. For per-user installations on Unix systems that basically
-- means the @~/bin/@ directory. On the majority of platforms the @~/bin/@
-- directory will be on the user's PATH. However some people are a bit nervous
-- about letting a package manager install programs into @~/bin/@.
--
-- A compromise solution is that instead of installing binaries directly into
-- @~/bin/@, we could install them in a private location under @~/.cabal/bin@
-- and then create symlinks in @~/bin/@. We can be careful when setting up the
-- symlinks that we do not overwrite any binary that the user installed. We can
-- check if it was a symlink we made because it would point to the private dir
-- where we install our binaries. This means we can install normally without
-- worrying and in a later phase set up symlinks, and if that fails then we
-- report it to the user, but even in this case the package is still in an OK
-- installed state.
--
-- This is an optional feature that users can choose to use or not. It is
-- controlled from the config file. Of course it only works on POSIX systems
-- with symlinks so is not available to Windows users.
--
symlinkBinaries :: Compiler
-> ConfigFlags
-> InstallFlags
-> InstallPlan
-> IO [(PackageIdentifier, String, FilePath)]
symlinkBinaries comp configFlags installFlags plan =
case flagToMaybe (installSymlinkBinDir installFlags) of
Nothing -> return []
Just symlinkBinDir
| null exes -> return []
| otherwise -> do
publicBinDir <- canonicalizePath symlinkBinDir
-- TODO: do we want to do this here? :
-- createDirectoryIfMissing True publicBinDir
fmap catMaybes $ sequence
[ do privateBinDir <- pkgBinDir pkg libname
ok <- symlinkBinary
publicBinDir privateBinDir
publicExeName privateExeName
if ok
then return Nothing
else return (Just (pkgid, publicExeName,
privateBinDir </> privateExeName))
| (ReadyPackage _ _flags _ deps, pkg, exe) <- exes
, let pkgid = packageId pkg
pkg_key = mkPackageKey (packageKeySupported comp) pkgid
(map Installed.libraryName (CD.nonSetupDeps deps))
libname = packageKeyLibraryName pkgid pkg_key
publicExeName = PackageDescription.exeName exe
privateExeName = prefix ++ publicExeName ++ suffix
prefix = substTemplate pkgid libname prefixTemplate
suffix = substTemplate pkgid libname suffixTemplate ]
where
exes =
[ (cpkg, pkg, exe)
| InstallPlan.Installed cpkg _ <- InstallPlan.toList plan
, let pkg = pkgDescription cpkg
, exe <- PackageDescription.executables pkg
, PackageDescription.buildable (PackageDescription.buildInfo exe) ]
pkgDescription :: ReadyPackage -> PackageDescription
pkgDescription (ReadyPackage (SourcePackage _ pkg _ _) flags stanzas _) =
case finalizePackageDescription flags
(const True)
platform cinfo [] (enableStanzas stanzas pkg) of
Left _ -> error "finalizePackageDescription ReadyPackage failed"
Right (desc, _) -> desc
-- This is sadly rather complicated. We're kind of re-doing part of the
-- configuration for the package. :-(
pkgBinDir :: PackageDescription -> LibraryName -> IO FilePath
pkgBinDir pkg libname = do
defaultDirs <- InstallDirs.defaultInstallDirs
compilerFlavor
(fromFlag (configUserInstall configFlags))
(PackageDescription.hasLibs pkg)
let templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault
defaultDirs (configInstallDirs configFlags)
absoluteDirs = InstallDirs.absoluteInstallDirs
(packageId pkg) libname
cinfo InstallDirs.NoCopyDest
platform templateDirs
canonicalizePath (InstallDirs.bindir absoluteDirs)
substTemplate pkgid libname = InstallDirs.fromPathTemplate
. InstallDirs.substPathTemplate env
where env = InstallDirs.initialPathTemplateEnv pkgid libname
cinfo platform
fromFlagTemplate = fromFlagOrDefault (InstallDirs.toPathTemplate "")
prefixTemplate = fromFlagTemplate (configProgPrefix configFlags)
suffixTemplate = fromFlagTemplate (configProgSuffix configFlags)
platform = InstallPlan.planPlatform plan
cinfo = InstallPlan.planCompiler plan
(CompilerId compilerFlavor _) = compilerInfoId cinfo
symlinkBinary :: FilePath -- ^ The canonical path of the public bin dir
-- eg @/home/user/bin@
-> FilePath -- ^ The canonical path of the private bin dir
-- eg @/home/user/.cabal/bin@
-> String -- ^ The name of the executable to go in the public
-- bin dir, eg @foo@
-> String -- ^ The name of the executable to in the private bin
-- dir, eg @foo-1.0@
-> IO Bool -- ^ If creating the symlink was successful. @False@
-- if there was another file there already that we
-- did not own. Other errors like permission errors
-- just propagate as exceptions.
symlinkBinary publicBindir privateBindir publicName privateName = do
ok <- targetOkToOverwrite (publicBindir </> publicName)
(privateBindir </> privateName)
case ok of
NotOurFile -> return False
NotExists -> mkLink >> return True
OkToOverwrite -> rmLink >> mkLink >> return True
where
relativeBindir = makeRelative publicBindir privateBindir
mkLink = createSymbolicLink (relativeBindir </> privateName)
(publicBindir </> publicName)
rmLink = removeLink (publicBindir </> publicName)
-- | Check a file path of a symlink that we would like to create to see if it
-- is OK. For it to be OK to overwrite it must either not already exist yet or
-- be a symlink to our target (in which case we can assume ownership).
--
targetOkToOverwrite :: FilePath -- ^ The file path of the symlink to the private
-- binary that we would like to create
-> FilePath -- ^ The canonical path of the private binary.
-- Use 'canonicalizePath' to make this.
-> IO SymlinkStatus
targetOkToOverwrite symlink target = handleNotExist $ do
status <- getSymbolicLinkStatus symlink
if not (isSymbolicLink status)
then return NotOurFile
else do target' <- canonicalizePath symlink
-- This relies on canonicalizePath handling symlinks
if target == target'
then return OkToOverwrite
else return NotOurFile
where
handleNotExist action = catchIO action $ \ioexception ->
-- If the target doesn't exist then there's no problem overwriting it!
if isDoesNotExistError ioexception
then return NotExists
else ioError ioexception
data SymlinkStatus
= NotExists -- ^ The file doesn't exist so we can make a symlink.
| OkToOverwrite -- ^ A symlink already exists, though it is ours. We'll
-- have to delete it first before we make a new symlink.
| NotOurFile -- ^ A file already exists and it is not one of our existing
-- symlinks (either because it is not a symlink or because
-- it points somewhere other than our managed space).
deriving Show
-- | Take two canonical paths and produce a relative path to get from the first
-- to the second, even if it means adding @..@ path components.
--
makeRelative :: FilePath -> FilePath -> FilePath
makeRelative a b = assert (isAbsolute a && isAbsolute b) $
let as = splitPath a
bs = splitPath b
commonLen = length $ takeWhile id $ zipWith (==) as bs
in joinPath $ [ ".." | _ <- drop commonLen as ]
++ drop commonLen bs
#endif
|
fugyk/cabal
|
cabal-install/Distribution/Client/InstallSymlink.hs
|
bsd-3-clause
| 11,261
| 0
| 11
| 3,100
| 177
| 107
| 70
| 151
| 4
|
import BasicPrelude
import Test.Tasty
tests :: TestTree
tests = testGroup "Tests"
[
]
main :: IO ()
main = defaultMain tests
|
mfine/template
|
test/Test.hs
|
bsd-3-clause
| 131
| 0
| 6
| 27
| 44
| 23
| 21
| 7
| 1
|
import Distribution.ArchLinux.Report
import Distribution.ArchLinux.AUR
import Distribution.ArchLinux.PkgBuild
import System.FilePath
import Control.Monad
import Data.List
import Distribution.Text
import Distribution.Version
import System.Directory
import Text.Printf
import Control.DeepSeq
import GHC.Conc (numCapabilities)
import Control.Concurrent
import qualified Control.OldException as C
import Control.Concurrent.Chan
import Control.Concurrent.MVar
import Control.Parallel.Strategies
{-
Generate the report of mappings of hackage to distro urls for Hackage
-}
k = 32
-- Parallel work queue, similar to forM
--
parM tests f = do
let n = numCapabilities
chan <- newChan
ps <- getChanContents chan -- results
work <- newMVar tests -- where to take jobs from
forM_ [1..n*k] $ forkIO . thread work chan -- how many threads to fork
-- wait on i threads to close
-- logging them as they go
let wait xs i acc
| i >= (n*k) = return acc -- done
| otherwise = case xs of
Nothing : xs -> wait xs (i+1) acc
Just (s,a) : xs -> do a ; wait xs i (s : acc)
wait ps 0 []
where
-- thread :: MVar [Test] -> Chan (Maybe String) -> Int -> IO ()
thread work chan me = loop
where
loop = do
job <- modifyMVar work $ \jobs -> return $ case jobs of
[] -> ([], Nothing)
(j:js) -> (js, Just j)
case job of
Nothing -> writeChan chan Nothing -- done
Just name -> do
v <- f name
writeChan chan . Just $ (v, printf "%d: %-25s\n" me name)
loop
writer ch = do
v <- readChan ch
case v of
Nothing -> return ()
Just s -> do appendFile "cabalArchMap.txt" s
writer ch
me = "arch-haskell"
main = do
C.handle (\e -> return ()) (removeFile "cabalArchMap.txt")
-- todo: replace this with call to AUR json interface.
-- s <- lines `fmap` readFile "arch-haskell-packages.txt"
packages <- maintainer me
let s = sort $ map packageName packages
-- just write out the ones we already know.
forM community $ \p@(name, vers, url) -> do
appendFile "cabalArchMap.txt" $ show p ++ "\n"
-- one output writer
out <- newChan
forkIO $ writer out
-- start with the Arch Package names, deriving the Cabal names, the
-- version, and the URL to find them.
--
parM s $ \p -> do
k <- info p
case k of
Left err -> do putStrLn $ "Couldn't find package: " ++ show p
return ()
Right aur -> do
let name = takeFileName (packageURL aur)
vers = case packageVersion aur of
Left _ -> ""
Right (v,_) -> display v
url = packageURLinAUR aur
-- (Agda,"2.2.4+dfsg","http://packages.debian.org/source/sid/agda")
let s = show (name, vers, Just url) ++ "\n"
rnf s `seq` writeChan out . Just $ s
writeChan out Nothing
putStrLn "copy the map to http://code.haskell.org/arch/cabalArchMap.txt:"
putStrLn "scp cabalArchMap.txt community.haskell.org/srv/code/arch"
-- hand search:
--
-- http://www.archlinux.org/packages/?sort=&arch=x86_64&repo=Extra&q=haskell&last_update=&limit=all
--
community =
[ ("xmonad", "0.9.1", Just "http://www.archlinux.org/packages/community/i686/xmonad/")
, ("packedstring", "0.1.0.1", Just "http://www.archlinux.org/packages/extra/i686/haskell-packedstring/")
, ("deepseq", "1.1.0.0", Just "http://www.archlinux.org/packages/extra/i686/haskell-deepseq/")
, ("haskell-src", "1.0.1.3", Just "http://www.archlinux.org/packages/extra/i686/haskell-haskell-src/")
, ("HUnit", "1.2.2.1", Just "http://www.archlinux.org/packages/extra/i686/haskell-hunit/")
, ("parallel", "2.2.0.1", Just "http://www.archlinux.org/packages/extra/i686/haskell-parallel/")
, ("QuickCheck", "2.1.0.3", Just "http://www.archlinux.org/packages/extra/i686/haskell-quickcheck/")
, ("stm", "2.1.1.2", Just "http://www.archlinux.org/packages/extra/i686/haskell-stm/")
, ("xhtml", "3000.2.0.1", Just "http://www.archlinux.org/packages/extra/i686/haskell-xhtml/")
, ("extensible-exceptions", "0.1.1.0", Just "http://www.archlinux.org/packages/extra/i686/haskell-extensible-exceptions/")
, ("haskeline", "0.6.2.2", Just "http://www.archlinux.org/packages/extra/i686/haskell-haskeline/")
, ("terminfo", "0.3.0.2", Just "http://www.archlinux.org/packages/extra/i686/haskell-terminfo/")
, ("mtl", "1.1.0.2", Just "http://www.archlinux.org/packages/extra/i686/haskell-mtl/")
, ("network", "2.2.1.7", Just "http://www.archlinux.org/packages/extra/i686/haskell-network/")
, ("dataenc", "0.13.0.2", Just "http://www.archlinux.org/packages/extra/i686/haskell-dataenc/")
, ("hashed-storage", "0.4.13", Just "http://www.archlinux.org/packages/extra/i686/haskell-hashed-storage/")
, ("html", "1.0.1.2", Just "http://www.archlinux.org/packages/extra/i686/haskell-html/")
, ("mmap", "0.4.1", Just "http://www.archlinux.org/packages/extra/i686/haskell-mmap/")
, ("parsec", "3.0.1", Just "http://www.archlinux.org/packages/extra/i686/haskell-parsec/")
, ("regex-base", "0.93.1", Just "http://www.archlinux.org/packages/extra/i686/haskell-regex-base/")
, ("regex-compat", "0.92", Just "http://www.archlinux.org/packages/extra/i686/haskell-regex-compat/")
, ("regex-posix", "0.94.1", Just "http://www.archlinux.org/packages/extra/i686/haskell-regex-posix/")
, ("utf8-string","0.3.6", Just "http://www.archlinux.org/packages/extra/i686/haskell-utf8-string/")
, ("zlib", "0.5.2.0", Just "http://www.archlinux.org/packages/community/i686/haskell-zlib/")
, ("happy" ,"1.18.4", Just "http://www.archlinux.org/packages/extra/i686/happy/")
, ("alex", "2.3.1", Just "http://www.archlinux.org/packages/community/i686/alex/")
, ("X11-xft", "0.3", Just "http://www.archlinux.org/packages/community/i686/haskell-x11-xft/")
, ("X11", "1.5.0.0", Just "http://www.archlinux.org/packages/community/i686/haskell-x11/")
, ("HTTP", "4000.0.9", Just "http://www.archlinux.org/packages/community/i686/haskell-http/")
, ("gtk2hs", "0.10.1", Just "http://www.archlinux.org/packages/community/i686/gtk2hs/")
, ("darcs", "2.3.1", Just "http://www.archlinux.org/packages/extra/i686/darcs/")
, ("cabal-install", "0.8.0", Just "http://www.archlinux.org/packages/community/i686/cabal-install/")
]
|
archhaskell/archlinux-web
|
scripts/distro-map.hs
|
bsd-3-clause
| 6,756
| 0
| 23
| 1,516
| 1,360
| 727
| 633
| 105
| 4
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE DoAndIfThenElse #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import Data.Text
import Data.Set (Set)
import qualified Data.Set as Set
import System.Random
import System.IO
import Test.Tasty
import Test.Tasty.HUnit
import Test.Tasty.Ingredients
import Test.Tasty.Runners.AntXML
import Test.Tasty.QuickCheck
import qualified Data.ByteString.Lazy as B
import Text.Parsing.ElectSON
#if !MIN_VERSION_base(4,8,0)
import Data.Functor
#endif
main :: IO ()
main = tests >>= defaultMainWithIngredients ingrs
ingrs :: [Ingredient]
ingrs =
[ antXMLRunner
]
++
defaultIngredients
tests :: IO TestTree
tests = localOption (QuickCheckTests 10000) . testGroup "ElectSON Tests" <$> sequence
[ test_deserialize_succeeds
]
test_deserialize_succeeds :: IO TestTree
test_deserialize_succeeds = do
h <- openFile "test-data/electson_trivial_stringcandidates.json" ReadMode
f <- B.hGetContents h
return $ testCase "ElectSON Deserialize"
$ checkElection (getElection f)
(Set.fromList ["Alice", "Betty"])
[ Ballot ["Alice", "Betty"]
, Ballot ["Alice"]
, Ballot ["Betty", "Alice"]
]
checkElection :: Either String (Election Text)
-> Set Text
-> [Ballot Text]
-> Assertion
checkElection (Left msg) _ _ = assertFailure (show msg)
checkElection (Right elec) cs bs = do
assertEqual "candidates" (electionCandidates elec) cs
assertEqual "ballots" (electionBallots elec) bs
|
cjerdonek/formal-rcv
|
src/extracted/tests/ElectSONUnitTest.hs
|
bsd-3-clause
| 1,577
| 0
| 11
| 322
| 404
| 219
| 185
| 46
| 1
|
{-# LANGUAGE RankNTypes #-}
module Llvm.Pass.Changer where
import Llvm.Hir.Data
data Changer g h = Changer { change_GlobalId :: g -> h
, change_LocalId :: Lname -> Lname
, change_Label :: Label -> Label
, change_Const :: Const h -> Const h
, change_Ftype :: Type CodeFunB X -> Type CodeFunB X
, change_ParamsForDefine :: [FunOperand Lname] -> [FunOperand Lname]
, change_ParamsForDeclare :: [FunOperand ()] -> [FunOperand ()]
, change_ParamsForCall :: [FunOperand (Value h)] -> [FunOperand (Value h)]
}
defaultChanger :: Changer g g
defaultChanger = Changer { change_GlobalId = id
, change_LocalId = id
, change_Label = id
, change_Const = id -- non-recursive version
, change_Ftype = id
, change_ParamsForDefine = id
, change_ParamsForDeclare = id
, change_ParamsForCall = id
}
|
mlite/hLLVM
|
src/Llvm/Pass/Changer.hs
|
bsd-3-clause
| 1,268
| 0
| 13
| 611
| 239
| 139
| 100
| 20
| 1
|
{-# LANGUAGE RankNTypes #-}
module Graphics.Gloss.Internals.Interface.Game
( playWithBackendIO
, Event(..) )
where
import Graphics.Gloss.Data.Color
import Graphics.Gloss.Data.Picture
import Graphics.Gloss.Data.ViewPort
import Graphics.Gloss.Rendering
import Graphics.Gloss.Internals.Interface.Event
import Graphics.Gloss.Internals.Interface.Backend
import Graphics.Gloss.Internals.Interface.Window
import Graphics.Gloss.Internals.Interface.Common.Exit
import Graphics.Gloss.Internals.Interface.ViewState.Reshape
import Graphics.Gloss.Internals.Interface.Animate.Timing
import Graphics.Gloss.Internals.Interface.Simulate.Idle
import qualified Graphics.Gloss.Internals.Interface.Callback as Callback
import qualified Graphics.Gloss.Internals.Interface.Simulate.State as SM
import qualified Graphics.Gloss.Internals.Interface.Animate.State as AN
import Data.IORef
import System.Mem
playWithBackendIO
:: forall world a
. Backend a
=> a -- ^ Initial state of the backend
-> Display -- ^ Display mode.
-> Color -- ^ Background color.
-> Int -- ^ Number of simulation steps to take for each second of real time.
-> world -- ^ The initial world.
-> (world -> IO Picture)
-- ^ A function to convert the world to a picture.
-> (Event -> world -> IO world)
-- ^ A function to handle input events.
-> (Float -> world -> IO world)
-- ^ A function to step the world one iteration.
-- It is passed the period of time (in seconds) needing to be advanced.
-> Bool -- ^ Whether to use the callback_exit or not.
-> IO ()
playWithBackendIO
backend
display
backgroundColor
simResolution
worldStart
worldToPicture
worldHandleEvent
worldAdvance
withCallbackExit
= do
let singleStepTime = 1
-- make the simulation state
stateSR <- newIORef $ SM.stateInit simResolution
-- make a reference to the initial world
worldSR <- newIORef worldStart
-- make the initial GL view and render states
viewSR <- newIORef viewPortInit
animateSR <- newIORef AN.stateInit
renderS_ <- initState
renderSR <- newIORef renderS_
let displayFun backendRef
= do
-- convert the world to a picture
world <- readIORef worldSR
picture <- worldToPicture world
-- display the picture in the current view
renderS <- readIORef renderSR
viewPort <- readIORef viewSR
windowSize <- getWindowDimensions backendRef
-- render the frame
displayPicture
windowSize
backgroundColor
renderS
(viewPortScale viewPort)
(applyViewPortToPicture viewPort picture)
-- perform GC every frame to try and avoid long pauses
performGC
let callbacks
= [ Callback.Display (animateBegin animateSR)
, Callback.Display displayFun
, Callback.Display (animateEnd animateSR)
, Callback.Idle (callback_simulate_idle
stateSR animateSR (readIORef viewSR)
worldSR (\_ -> worldAdvance)
singleStepTime)
, callback_keyMouse worldSR viewSR worldHandleEvent
, callback_motion worldSR worldHandleEvent
, callback_reshape worldSR worldHandleEvent]
let exitCallback
= if withCallbackExit then [callback_exit ()] else []
createWindow backend display backgroundColor $ callbacks ++ exitCallback
-- | Callback for KeyMouse events.
callback_keyMouse
:: IORef world -- ^ ref to world state
-> IORef ViewPort
-> (Event -> world -> IO world) -- ^ fn to handle input events
-> Callback
callback_keyMouse worldRef viewRef eventFn
= KeyMouse (handle_keyMouse worldRef viewRef eventFn)
handle_keyMouse
:: IORef a
-> t
-> (Event -> a -> IO a)
-> KeyboardMouseCallback
handle_keyMouse worldRef _ eventFn backendRef key keyState keyMods pos
= do ev <- keyMouseEvent backendRef key keyState keyMods pos
world <- readIORef worldRef
world' <- eventFn ev world
writeIORef worldRef world'
-- | Callback for Motion events.
callback_motion
:: IORef world -- ^ ref to world state
-> (Event -> world -> IO world) -- ^ fn to handle input events
-> Callback
callback_motion worldRef eventFn
= Motion (handle_motion worldRef eventFn)
handle_motion
:: IORef a
-> (Event -> a -> IO a)
-> MotionCallback
handle_motion worldRef eventFn backendRef pos
= do ev <- motionEvent backendRef pos
world <- readIORef worldRef
world' <- eventFn ev world
writeIORef worldRef world'
-- | Callback for Handle reshape event.
callback_reshape
:: IORef world
-> (Event -> world -> IO world)
-> Callback
callback_reshape worldRef eventFN
= Reshape (handle_reshape worldRef eventFN)
handle_reshape
:: IORef world
-> (Event -> world -> IO world)
-> ReshapeCallback
handle_reshape worldRef eventFn stateRef (width,height)
= do world <- readIORef worldRef
world' <- eventFn (EventResize (width, height)) world
writeIORef worldRef world'
viewState_reshape stateRef (width, height)
|
ardumont/snake
|
deps/gloss/Graphics/Gloss/Internals/Interface/Game.hs
|
bsd-3-clause
| 6,030
| 0
| 18
| 2,101
| 1,080
| 567
| 513
| 126
| 2
|
module Wikirick.Backends.URLMapper
( module Wikirick.URLMapper
, initURLMapper
, initURLReceiver
) where
import qualified Data.ByteString.Lens as BL
import qualified Heist.Interpreted as I
import Snap
import System.FilePath
import Wikirick.Repository
import Wikirick.Import
import Wikirick.URLMapper
initURLMapper :: String -> URLMapper
initURLMapper basePath = URLMapper $ \url ->
basePath </> case url of
ArticlePath a -> "wiki" </> a ^. articleTitle . unpacked
EditPath a -> "wiki" </> a ^. articleTitle . unpacked </> "edit"
initURLReceiver :: URLMapper -> SnapletInit a URLReceiver
initURLReceiver mapper = makeSnaplet "urlmapper" "Provide a URL mapper" Nothing $ do
return URLReceiver
{ _urlSplice = \url ->
I.textSplice $ expandURL mapper url ^. packed
, _redirectTo = \url ->
redirect $ expandURL mapper url ^. BL.packedChars
}
|
keitax/wikirick
|
src/Wikirick/Backends/URLMapper.hs
|
bsd-3-clause
| 889
| 0
| 14
| 169
| 234
| 128
| 106
| -1
| -1
|
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
-- | 'Cocoyoneda' gives a cofree cofunctor for any type constructor.
module Data.Cofunctor.Cocoyoneda
( Cocoyoneda(..)
, liftCocoyoneda
, lowerCocoyoneda
, hoistCocoyoneda
) where
import Data.Cofunctor (Cofunctor(..))
data Cocoyoneda f a where
Cocoyoneda :: (a -> b) -> f a -> Cocoyoneda f b
instance Cofunctor (Cocoyoneda f) where
cofmap f (Cocoyoneda g x) = Cocoyoneda (f . g) x
liftCocoyoneda :: f a -> Cocoyoneda f a
liftCocoyoneda = Cocoyoneda id
lowerCocoyoneda :: Cofunctor f => Cocoyoneda f a -> f a
lowerCocoyoneda (Cocoyoneda f x) = cofmap f x
hoistCocoyoneda :: (forall a. f a -> g a) -> Cocoyoneda f b -> Cocoyoneda g b
hoistCocoyoneda f (Cocoyoneda g x) = Cocoyoneda g (f x)
|
jaspervdj/acme-cofunctor
|
src/Data/Cofunctor/Cocoyoneda.hs
|
bsd-3-clause
| 774
| 0
| 9
| 160
| 260
| 137
| 123
| 18
| 1
|
module Config.Type(
Severity(..), Classify(..), HintRule(..), Note(..), Setting(..),
Restrict(..), RestrictType(..), RestrictIdents(..), SmellType(..),
defaultHintName, isUnifyVar, showNotes, getSeverity, getRestrictType, getSmellType
) where
import Data.Char
import Data.List.Extra
import Prelude
import qualified GHC.Hs
import Fixity
import GHC.Util
import Language.Haskell.GhclibParserEx.GHC.Hs.ExtendInstances
getSeverity :: String -> Maybe Severity
getSeverity "ignore" = Just Ignore
getSeverity "warn" = Just Warning
getSeverity "warning" = Just Warning
getSeverity "suggest" = Just Suggestion
getSeverity "suggestion" = Just Suggestion
getSeverity "error" = Just Error
getSeverity "hint" = Just Suggestion
getSeverity _ = Nothing
getRestrictType :: String -> Maybe RestrictType
getRestrictType "modules" = Just RestrictModule
getRestrictType "extensions" = Just RestrictExtension
getRestrictType "flags" = Just RestrictFlag
getRestrictType "functions" = Just RestrictFunction
getRestrictType _ = Nothing
defaultHintName :: String
defaultHintName = "Use alternative"
-- | How severe an issue is.
data Severity
= Ignore -- ^ The issue has been explicitly ignored and will usually be hidden (pass @--show@ on the command line to see ignored ideas).
| Suggestion -- ^ Suggestions are things that some people may consider improvements, but some may not.
| Warning -- ^ Warnings are suggestions that are nearly always a good idea to apply.
| Error -- ^ Available as a setting for the user. Only parse errors have this setting by default.
deriving (Eq,Ord,Show,Read,Bounded,Enum)
-- Any 1-letter variable names are assumed to be unification variables
isUnifyVar :: String -> Bool
isUnifyVar [x] = x == '?' || isAlpha x
isUnifyVar [] = False
isUnifyVar xs = all (== '?') xs
---------------------------------------------------------------------
-- TYPE
-- | A note describing the impact of the replacement.
data Note
= IncreasesLaziness -- ^ The replacement is increases laziness, for example replacing @reverse (reverse x)@ with @x@ makes the code lazier.
| DecreasesLaziness -- ^ The replacement is decreases laziness, for example replacing @(fst x, snd x)@ with @x@ makes the code stricter.
| RemovesError String -- ^ The replacement removes errors, for example replacing @foldr1 (+)@ with @sum@ removes an error on @[]@, and might contain the text @\"on []\"@.
| ValidInstance String String -- ^ The replacement assumes standard type class lemmas, a hint with the note @ValidInstance \"Eq\" \"x\"@ might only be valid if
-- the @x@ variable has a reflexive @Eq@ instance.
| RequiresExtension String -- ^ The replacement requires this extension to be available.
| Note String -- ^ An arbitrary note.
deriving (Eq,Ord)
instance Show Note where
show IncreasesLaziness = "increases laziness"
show DecreasesLaziness = "decreases laziness"
show (RemovesError x) = "removes error " ++ x
show (ValidInstance x y) = "requires a valid `" ++ x ++ "` instance for `" ++ y ++ "`"
show (RequiresExtension x) = "may require `{-# LANGUAGE " ++ x ++ " #-}` adding to the top of the file"
show (Note x) = x
showNotes :: [Note] -> String
showNotes = intercalate ", " . map show . filter use
where use ValidInstance{} = False -- Not important enough to tell an end user
use _ = True
-- | How to classify an 'Idea'. If any matching field is @\"\"@ then it matches everything.
data Classify = Classify
{classifySeverity :: Severity -- ^ Severity to set the 'Idea' to.
,classifyHint :: String -- ^ Match on 'Idea' field 'ideaHint'.
,classifyModule :: String -- ^ Match on 'Idea' field 'ideaModule'.
,classifyDecl :: String -- ^ Match on 'Idea' field 'ideaDecl'.
}
deriving Show
-- | A @LHS ==> RHS@ style hint rule.
data HintRule = HintRule
{hintRuleSeverity :: Severity -- ^ Default severity for the hint.
,hintRuleName :: String -- ^ Name for the hint.
,hintRuleNotes :: [Note] -- ^ Notes about application of the hint.
,hintRuleScope :: Scope -- ^ Module scope in which the hint operates (GHC parse tree).
-- We wrap these GHC elements in 'HsExtendInstances' in order that we may derive 'Show'.
,hintRuleLHS :: HsExtendInstances (GHC.Hs.LHsExpr GHC.Hs.GhcPs) -- ^ LHS (GHC parse tree).
,hintRuleRHS :: HsExtendInstances (GHC.Hs.LHsExpr GHC.Hs.GhcPs) -- ^ RHS (GHC parse tree).
,hintRuleSide :: Maybe (HsExtendInstances (GHC.Hs.LHsExpr GHC.Hs.GhcPs)) -- ^ Side condition (GHC parse tree).
}
deriving Show
data RestrictType = RestrictModule | RestrictExtension | RestrictFlag | RestrictFunction deriving (Show,Eq,Ord)
data RestrictIdents
= NoRestrictIdents -- No restrictions on module imports
| ForbidIdents [String] -- Forbid importing the given identifiers from this module
| OnlyIdents [String] -- Forbid importing all identifiers from this module, except the given identifiers
deriving Show
instance Semigroup RestrictIdents where
NoRestrictIdents <> ri = ri
ri <> NoRestrictIdents = ri
ForbidIdents x1 <> ForbidIdents y1 = ForbidIdents $ x1 <> y1
OnlyIdents x1 <> OnlyIdents x2 = OnlyIdents $ x1 <> x2
ri1 <> ri2 = error $ "Incompatible restrictions: " ++ show (ri1, ri2)
data Restrict = Restrict
{restrictType :: RestrictType
,restrictDefault :: Bool
,restrictName :: [String]
,restrictAs :: [String] -- for RestrictModule only, what module names you can import it as
,restrictWithin :: [(String, String)]
,restrictIdents :: RestrictIdents -- for RestrictModule only, what identifiers can be imported from it
,restrictMessage :: Maybe String
} deriving Show
data SmellType = SmellLongFunctions | SmellLongTypeLists | SmellManyArgFunctions | SmellManyImports
deriving (Show,Eq,Ord)
getSmellType :: String -> Maybe SmellType
getSmellType "long functions" = Just SmellLongFunctions
getSmellType "long type lists" = Just SmellLongTypeLists
getSmellType "many arg functions" = Just SmellManyArgFunctions
getSmellType "many imports" = Just SmellManyImports
getSmellType _ = Nothing
data Setting
= SettingClassify Classify
| SettingMatchExp HintRule
| SettingRestrict Restrict
| SettingArgument String -- ^ Extra command-line argument
| SettingSmell SmellType Int
| Builtin String -- use a builtin hint set
| Infix FixityInfo
deriving Show
|
ndmitchell/hlint
|
src/Config/Type.hs
|
bsd-3-clause
| 6,459
| 0
| 14
| 1,240
| 1,165
| 659
| 506
| 110
| 2
|
-- | This module provides type synonyms used by models.
module Model.Types (
-- * SI units
Watt
, Second
, Joule
, KiloWattHour
, jouleToKiloWattHour
, kiloWattHourToJoule
) where
-- | Unit of time.
type Second = Rational
-- | Unit of power.
type Watt = Rational
-- | Unit of energy.
type Joule = Rational
-- | Unit of energy.
type KiloWattHour = Rational
-- | Converts from Joules to Kilo Watts per Hour.
jouleToKiloWattHour :: Joule -> KiloWattHour
jouleToKiloWattHour = (/ 3600000)
-- | Converts from Kilo Watts per Hour to Joules.
kiloWattHourToJoule :: KiloWattHour -> Joule
kiloWattHourToJoule = (* 3600000)
|
redelmann/e-zimod-server
|
Model/Types.hs
|
bsd-3-clause
| 654
| 0
| 5
| 144
| 94
| 63
| 31
| 15
| 1
|
--------------------------------------------------------------------
-- |
-- Module : Text.PDF.Document
-- Description : Functions for manipulating PDF content.
-- Copyright : (c) Dylan McNamee, 2008, 2009, 2011
-- License : BSD3
--
-- Maintainer: Dylan McNamee <dylan@galois.com>
-- Stability : provisional
-- Portability: portable
--
-- Data functions for constructing PDF documents in Haskell.
--------------------------------------------------------------------
module Text.PDF.Document where
import Data.Map as Map
import qualified Data.Traversable as T
import qualified Control.Monad.State as State
import Text.PDF.Types hiding ( parent, dictMap )
import Text.PDF.Utils
-- import Text.PDF.Parser
import System.IO
import Data.Maybe
header14 :: String
header14 = "%PDF-1.4\n"
-- | @unDigestDocument dp@ takes a parsed document and turns all of the parsed objects into generic
-- objects. Most particularly, PDFPageParsed into PDFDict's by inserting the salient page attributes
-- into their stylized key/value pairs (like Contents, MediaBox, etc.)
unDigestDocument :: PDFDocumentParsed -> PDFTreeExploded
unDigestDocument (PDFDocumentParsed parsedPageList {- globals -} ) = (catalogDictFromArray pageArray) where
(PDFArray pageArray) = PDFArray (Prelude.map unParsePage parsedPageList)
-- | @flattenDocument treeObj@ takes @treeObj@, a nested representation of a PDF document tree, and
-- flattens it by replacing nesting with PDF references to an object list it accumulates as it goes.
-- In theory, the exploded tree is "very close" to a valid PDF structure, because nesting is
-- okay according to the spec. However, flattening objects gives us the opportunity to discover
-- identical items and collapse them, and also, for some crazy reason, PDFStreams have to be
-- referenced inside of a page - they can't be nested.
flattenDocument :: PDFObject -> PDFObjectTreeFlattened
flattenDocument root = (PDFObjectTreeFlattened newRoot unNestState) where
(newRoot, unNestState) = State.runState (traverseAndUnNest root) Map.empty
newPage :: PDFObject -> PDFBox -> PDFDictionaryMap -> PDFPageParsed
newPage stream mbox rscDict = PDFPageParsed {
contents = stream,
resources = rscDict,
mediaBox = mbox,
cropBox = mbox
}
-- | @printFlatTree file objectTree@ is the pretty printer that actually produces valid PDF
-- from @objectTree@ on to the file handle @file@ (when we're lucky)
printFlatTree :: Handle -> PDFObjectTreeFlattened -> IO PDFObjectTreeFlattened
printFlatTree h d@(PDFObjectTreeFlattened _ _) = do
let prefixLen = length header14
hPutStr h (header14)
let d' = enpointerifyRoot d
ret <- printFlatTree' h d' (ObjectIndices []) prefixLen 1
return ret
-- appendPage pageTree newPage returns a new Page Tree with newPage appended to the end
appendPage :: PDFObject -> PDFObject -> PDFObject
appendPage (PDFArray a) newPage' = PDFArray (a ++ [newPage'])
appendPage x y = (PDFArray [(PDFError "BadCallToAppendPage with:"), x, y])
-- eventually want to balance the tree of pages
-- deletePage pageTree n returns a new Page Tree with the nth page deleted
deletePage :: PDFObject -> Int -> PDFObject
deletePage _ _ = undefined
-- insertPage pageTree n newPage returns a new Page Tree with newPage inserted at page N
insertPage :: PDFObject -> Int -> PDFObject -> PDFObject
insertPage _ _ _ = undefined
catalogDictFromArray :: [PDFObject] -> PDFObject
catalogDictFromArray objs =
(PDFDict (fromList [
((PDFKey "Type"), (PDFSymbol "Catalog")),
((PDFKey "Pages"), pageTreeFromArray objs) ]))
-- build a simple page tree from an array of page object references
-- TODO: build a tree with > depth 1 for big documents. Where's the guidance on this?
pageTreeFromArray :: [PDFObject] -> PDFObject
pageTreeFromArray arr =
(PDFDict (fromList[
((PDFKey "Type"), (PDFSymbol "Pages")),
((PDFKey "Count"), (PDFInt (length arr))),
((PDFKey "Kids"), (PDFArray arr))
]))
unParsePage :: PDFPageParsed -> PDFObject
unParsePage parsedPage = (PDFDict (fromList [
((PDFKey "Type"), PDFSymbol "Page"),
((PDFKey "Resources"), PDFDict (resources parsedPage)),
((PDFKey "Contents"), contents parsedPage),
((PDFKey "MediaBox"), boxToPDFObject (mediaBox parsedPage)),
((PDFKey "CropBox"), boxToPDFObject (cropBox parsedPage))]))
boxToPDFObject :: PDFBox -> PDFObject
boxToPDFObject (Quad a b c d) = PDFArray [(PDFInt a),(PDFInt b),(PDFInt c),(PDFInt d)]
boxToPDFObject _ = PDFNull
-- the Unparsing functions. It's nice how short this is.
showPDFObject :: PDFObject -> String
showPDFObject (PDFString s) = "(" ++ (escapeString s) ++ ")"
showPDFObject (PDFSymbol s) = "/" ++ s
showPDFObject (PDFDict m) =
"<<" ++ (foldrWithKey showKeyObject "" m) ++ " >>"
showPDFObject (PDFFloat f) = (show f)
showPDFObject (PDFInt i) = (show i)
showPDFObject (PDFArray a) =
"[" ++ (foldl showArrayObject "" a) ++ "]"
showPDFObject (PDFReference n g) = (show n) ++ " " ++ (show g) ++ " R"
showPDFObject (PDFStream s) =
showPDFObject (PDFDict (fromList [((PDFKey "Length"),(PDFInt (length s)))])) ++
"\nstream\n" ++ s ++ "\nendstream"
showPDFObject (PDFNull) = "null"
showPDFObject (PDFPageRaw obj) = (showPDFObject obj)
showPDFObject (PDFError str) = (show str)
showPDFObject (PDFComment str) = "%" ++ (show str)
showPDFObject (PDFXObject _) = ("pdfxobject ??")
showPDFObject _ = error "can't showPDFObject a non PDFString object (yet)"
showKeyObject :: PDFKey -> PDFObject -> String -> String
showKeyObject (PDFKey key) obj initString = initString ++
"/" ++ key ++ " " ++ (showPDFObject obj) ++ "\n"
showArrayObject :: String -> PDFObject -> String
showArrayObject initString obj = initString ++
(showPDFObject obj) ++ " "
-- Components for creating a PDF document
fontDict :: String -> String -> PDFObject
fontDict name shortcut =
PDFDict ( -- ... this dict is the definition of it
fromList [
((PDFKey "Type"), (PDFSymbol "Font")),
((PDFKey "Subtype"), (PDFSymbol "Type1")),
((PDFKey "BaseFont"), (PDFSymbol name)),
((PDFKey "Name"), (PDFSymbol shortcut)),
((PDFKey "Encoding"), (PDFSymbol "MacRomanEncoding"))
] )
-- add a mapping between a font shortcut and a font with a name, return new mapping
addFontToDict :: String -> String -> PDFObject -> PDFObject
addFontToDict name shortcut oldDict =
addToDict oldDict shortcut (fontDict name shortcut)
-- someday silently don't add this if this shortcut's already in the dict
type PDF = State.State PDFState
data PDFState =
PDFState {
-- masterDocument :: PDFObjectTreeFlattened,
streamAccum :: PDFObject,
rsrcDict :: PDFObject,
fontsDict :: PDFObject,
pagesArray :: [PDFPageParsed]
}
-- a wrapper for put to make my silly mistakes result in sensible errors (thanks, sof!)
putPDF :: PDFState -> PDF ()
putPDF s = State.put s
-- TODO: write a "withPage" that does the beginPage/endPage wrapping around a set of
-- PDF() operations (thx SOF, will need your help there...)
-- State.execState takes my state monad, runs it, and returns the result
-- still a bit mysterious, but I think I can handle it. The rest of the
-- is either monadic, which makes sense, or pure, which makes more sense.
-- It's still this bridge code that throws me a bit.
rundoc :: PDF () -> PDFDocumentParsed
rundoc m = d where
myState = State.execState m newPDFState
d = PDFDocumentParsed {
pageList = pagesArray myState
-- , globals = error "undefined globals"
}
appendStream :: PDFObject -> String -> PDFObject
appendStream (PDFStream s) ns = PDFStream (s ++ ns)
appendStream _ _ = PDFError "Non-stream argument to appendStream"
newPDFState :: PDFState
newPDFState = PDFState {
streamAccum = PDFStream "",
rsrcDict = PDFDict (fromList []),
fontsDict = PDFDict (fromList []),
pagesArray = []
}
-- todo: I like putting the media box as an arg to beginPage. HMMMM.
beginPage :: PDF ()
beginPage = do
myState <- State.get
let myState' = myState {
streamAccum = (PDFStream "BT "),
rsrcDict = PDFDict (Map.fromList[])
}
putPDF (myState')
-- at the end of the page, we complete the page's resource dictionary,
-- add the page to the end of the document, and reset streamAccum
-- Andy A-M took one look at this code and said "get out of the Monad, dude"
-- too many lets are an indication that you belong in pure code. Okay!
endPage :: PDF ()
endPage = do
myState <- State.get
let s' = appendStream (streamAccum myState) " ET"
let (PDFDict pageRsrc) = addToDict
(addToDict (rsrcDict myState) "ProcSet" globalProcSet)
"Font" (fontsDict myState)
let myState' = myState {
streamAccum = (PDFStream ""),
rsrcDict = PDFDict (fromList []),
fontsDict = PDFDict (fromList []),
pagesArray = (pagesArray myState) ++ [(newPage s' globalPageBox pageRsrc)]
}
putPDF (myState')
endDocument :: PDF ()
endDocument = do
silly <- State.get
putPDF silly
-- Now for a bunch of imaging operations.
moveTo :: Int -> Int -> PDF ()
moveTo x y = do
-- (od, PDFStream os, dict, pgs) <- State.get
myState <- State.get
let ns = appendStream (streamAccum myState) (" " ++ (show x) ++
" " ++ (show y) ++ " Td ")
putPDF (myState {streamAccum = ns})
printString :: String -> PDF ()
printString s = do
myState <- State.get
let ns = appendStream (streamAccum myState) ("(" ++ s ++ ") Tj")
putPDF (myState {streamAccum = ns})
setFont :: String -> String -> Int -> PDF ()
setFont name shortcut fontSize = do
myState <- State.get
let s' = appendStream (streamAccum myState) ("/" ++ shortcut ++ " " ++ (show fontSize) ++ " Tf")
let fonts' = addFontToDict name shortcut (fontsDict myState)
let myState' = myState {
streamAccum = s',
fontsDict = fonts'
}
State.put (myState')
globalProcSet :: PDFObject
globalProcSet = PDFArray [(PDFSymbol "PDF"), (PDFSymbol "Text") ]
globalPageBox :: PDFBox
globalPageBox = Quad 0 0 300 300
-- The state monad is helping us thread the accumulated ObjectMap through the flattening process
-- without having to pass it all over the place.
type UnNest = State.State PDFObjectMap
traverseAndUnNest :: PDFObject -> UnNest PDFObject
traverseAndUnNest (PDFArray objs) = PDFArray `fmap` mapM (enPointerify PDFNull) objs
traverseAndUnNest (PDFDict myDict) = PDFDict `fmap` T.mapM (enPointerify PDFNull) myDict
traverseAndUnNest a = (enPointerify PDFNull) a
enPointerify :: PDFObject -> PDFObject -> UnNest PDFObject
enPointerify parent (PDFArray objs) = do
objs' <- mapM (enPointerify parent) objs -- could also use T.mapM here
case (length objs > 4) of
True -> do
reference (PDFArray objs')
False -> return (PDFArray objs')
enPointerify parent node@(PDFDict objs) = do
myReference <- reference PDFNull -- get a placeholder reference for my kids' "parent" reference
objs' <- T.mapM (enPointerify myReference) objs
let objs'' = case (isPageTreeNode node) of
True -> addParentPointer objs' parent
False -> objs'
-- False -> (Map.insert (PDFKey "NOTPARENT") (PDFString (show node)) objs')
clobberReference (PDFDict objs'') myReference
-- ok, this is wack: if I don't "enpointerify" streams, it's not a valid PDF.
-- I'm having a hard time finding where this is stated in the spec. Sigh. that's
-- a day of my life I'd like back. :-/
enPointerify _parent str@(PDFStream _) = do
reference str
enPointerify _parent o = return o
isPageTreeNode :: PDFObject -> Bool
isPageTreeNode (PDFDict dictMap) = case (Map.lookup (PDFKey "Type") dictMap) of
Just (PDFSymbol "Page") -> True
Just (PDFSymbol "Pages") -> True
Just _ -> False
Nothing -> False
isPageTreeNode _ = False
addParentPointer :: PDFDictionaryMap -> PDFObject -> PDFDictionaryMap
addParentPointer inDict PDFNull = inDict
-- addParentPointer inDict PDFNull = (Map.insert (PDFKey "NULL PARENT") PDFNull inDict)
addParentPointer inDict parentPtr = (Map.insert (PDFKey "Parent") parentPtr inDict)
addParent :: PDFObject -> PDFObject -> PDFObject
addParent pd@(PDFDict _pd) parentRef = addToDict pd "Parent" parentRef
reference :: PDFObject -> UnNest PDFObject
reference obj = do
dict <- State.get
let (dict',ref) = addObjectGetRef dict obj
State.put dict'
return ref
clobberReference :: PDFObject -> PDFObject -> UnNest PDFObject
clobberReference object ref = do
dict <- State.get
State.put (clobberObjectWithRef dict object ref)
return ref
addDocObjectGetRef :: PDFObject -> PDFObjectTreeFlattened -> (PDFObject, PDFObjectTreeFlattened)
addDocObjectGetRef obj (PDFObjectTreeFlattened root oldMap) = (objRef, (PDFObjectTreeFlattened root newMap)) where
(newMap, objRef) = addObjectGetRef oldMap obj
-- this should be in my monad for recursivelyUnNest
addObjectGetRef :: PDFObjectMap -> PDFObject -> (PDFObjectMap, PDFObject)
addObjectGetRef om (PDFReference n g) = (om, (PDFReference n g)) -- don't create refs to refs
addObjectGetRef oldMap pdfobj = (newMap, newRef) where
newRef = (PDFReference objNum 0)
newMap = Map.insert objNum pdfobj oldMap
objNum = (Map.size oldMap + 1)
clobberObjectWithRef :: PDFObjectMap -> PDFObject -> PDFObject -> PDFObjectMap
clobberObjectWithRef oldMap newObject (PDFReference n _) = Map.insert n newObject oldMap
clobberObjectWithRef _ _ _ = error ("internal error: bad args to clobberObjectWithRef")
enpointerifyRoot :: PDFObjectTreeFlattened -> PDFObjectTreeFlattened
enpointerifyRoot (PDFObjectTreeFlattened rootDict oldMap) = (PDFObjectTreeFlattened dictRef newMap) where
(newMap, dictRef) = addObjectGetRef oldMap rootDict
data ObjectIndices = ObjectIndices [Int] deriving (Show)
-- empties the PDFObjectList, printing each object as we go, adding
-- its offset in the output file to "ObjectIndices"
printFlatTree' :: Handle -> PDFObjectTreeFlattened -> ObjectIndices -> Int -> Int -> IO PDFObjectTreeFlattened
printFlatTree'
h
(PDFObjectTreeFlattened _a objectMap )
(ObjectIndices ixs)
currIx
objNum = case (mapSize >= objNum) of
True -> do
hPutStr h (prefixStr)
hPutStr h (str)
hPutStr h (postFixStr)
printFlatTree' h (PDFObjectTreeFlattened _a objectMap ) (ObjectIndices (ixs ++ [currIx])) newIx (objNum + 1)
False -> printFlatTree'' h (PDFObjectTreeFlattened _a objectMap ) (ObjectIndices ixs) currIx
where
mapSize = Map.size objectMap
o = fromMaybe (PDFError ("Unable to lookup object # " ++ (show objNum))) (Map.lookup objNum objectMap)
prefixStr = (show (objNum)) ++ " 0 obj\n"
str = showPDFObject (o)
postFixStr = "\nendobj\n"
newIx = currIx + length str + length prefixStr + length postFixStr
-- empty PDFObjectList -> transition from printing objects to
-- printing the xref table, which is the list of object indices
-- so this function prints the xref table header, kicks off the
-- xref table output, then prints the trailer dict and trailer
printFlatTree'' :: Handle -> PDFObjectTreeFlattened -> ObjectIndices -> Int -> IO PDFObjectTreeFlattened
printFlatTree''
h
inDoc@(PDFObjectTreeFlattened rootRef _ )
(ObjectIndices (ixs))
currIx = do
printXRefIndexes h ixs 0
let numObjects = 1 + length ixs
printTrailer h rootRef numObjects currIx
return inDoc
printXRefIndexes :: Handle -> [Int] -> Int -> IO ()
printXRefIndexes _ [] _ = do
return ()
printXRefIndexes h a 0 = do
hPutStrLn h ("xref\n" ++ "0 " ++ show (1 + length a))
hPutStrLn h ((padTo "" 10) ++ " 65535 f ")
printXRefIndexes h a 1
printXRefIndexes h (ix:ixs) n = do
hPutStrLn h ((padTo (show ix) 10) ++ " 00000 n ")
printXRefIndexes h ixs (n + 1)
printTrailer :: Handle -> PDFObject -> Int -> Int -> IO ()
printTrailer h rootRef numObjects currIx = do
hPutStrLn h ("trailer")
hPutStrLn h (showPDFObject ((trailerObj numObjects) rootRef ))
hPutStrLn h ("startxref\n" ++ (show (currIx)))
hPutStrLn h ("%%EOF")
where
trailerObj n rootR = PDFDict
(fromList [((PDFKey "Root"), rootR),
((PDFKey "Size"), (PDFInt n))])
|
dylanmc/Haskell-PDF-Parsing-Library
|
Text/PDF/Document.hs
|
bsd-3-clause
| 16,708
| 0
| 18
| 3,650
| 4,199
| 2,181
| 2,018
| 274
| 4
|
{-# LANGUAGE OverloadedStrings #-}
module Network.Cloudprint (
addPrintJob
)where
import Control.Monad.IO.Class
import Data.ByteString
import qualified Data.ByteString.Char8 as B8
import qualified Data.Either.Validation as V
import Data.Monoid
import Network.Google.OAuth2
import Network.HTTP.Client.MultipartFormData
import Network.HTTP.Conduit
import Network.HTTP.Types (hAuthorization)
import Network.HTTP.Types.Status
type PrinterId = ByteString
type PrintContentType = ByteString
type PrintJobName = ByteString
type FileName = ByteString
formData :: MonadIO m => PrinterId -- ^ Cloudprint ID for the printer
-> PrintContentType -- ^ MIME type for the print job
-> PrintJobName -- ^ Name of the print job
-> FilePath
-> ByteString -- ^ File contents to print
-> Request -- ^ Request to add the form data to
-> m Request -- ^ Resulting request
formData printerId contentType name fn conts = formDataBody [
-- partBS "xsrf" -- hidden text
partBS "printerid" printerId -- hidden text
, partBS "jobid" "" -- hidden text
, partBS "ticket" "{\"version\": \"1.0\",\"print\": {}}" -- text
, partBS "title" name
, partBS "contentType" contentType
, partFileRequestBody "content" fn $ RequestBodyBS conts -- file
]
-- | Add a print job to a given oauth session on google print
addPrintJob :: OAuth2Client
-> FilePath -- ^ Filepath for the Oauth token
-> PrinterId -- ^ Cloudprint ID for the printer
-> PrintContentType -- ^ MIME type for the print job
-> PrintJobName -- ^ Name of the print job
-> FilePath -- ^ Fake filename for the uploaded file
-> ByteString -- ^ File contents to print
-> IO (V.Validation String ())
addPrintJob client tokenFp pid ctype name fn conts = do
let addFd = formData pid ctype name fn conts
token <- useAccessToken client tokenFp
case token of
(Just t) -> do
req <- (addFd . (authorize t)) =<< parseUrl "https://www.google.com/cloudprint/submit"
resp <- withManager $ httpLbs req
case responseStatus resp of
(Status 200 _) -> return $ V.Success ()
_ -> return $ V.Failure (show resp)
Nothing -> return $ V.Failure "Failure to load token"
where authorize token request = request
{ requestHeaders = [(hAuthorization, B8.pack $ "Bearer " <> token)] }
|
plow-technologies/cloudprint-api
|
src/Network/Cloudprint.hs
|
bsd-3-clause
| 2,967
| 0
| 18
| 1,128
| 509
| 278
| 231
| 52
| 3
|
{-
(c) The University of Glasgow, 1994-2006
Core pass to saturate constructors and PrimOps
-}
{-# LANGUAGE BangPatterns, CPP #-}
module CorePrep (
corePrepPgm, corePrepExpr, cvtLitInteger,
lookupMkIntegerName, lookupIntegerSDataConName
) where
#include "HsVersions.h"
import OccurAnal
import HscTypes
import PrelNames
import MkId ( realWorldPrimId )
import CoreUtils
import CoreArity
import CoreFVs
import CoreMonad ( CoreToDo(..) )
import CoreLint ( endPassIO )
import CoreSyn
import CoreSubst
import MkCore hiding( FloatBind(..) ) -- We use our own FloatBind here
import Type
import Literal
import Coercion
import TcEnv
import TyCon
import Demand
import Var
import VarSet
import VarEnv
import Id
import IdInfo
import TysWiredIn
import DataCon
import PrimOp
import BasicTypes
import Module
import UniqSupply
import Maybes
import OrdList
import ErrUtils
import DynFlags
import Util
import Pair
import Outputable
import Platform
import FastString
import Config
import Name ( NamedThing(..), nameSrcSpan )
import SrcLoc ( SrcSpan(..), realSrcLocSpan, mkRealSrcLoc )
import Data.Bits
import MonadUtils ( mapAccumLM )
import Data.List ( mapAccumL )
import Control.Monad
{-
-- ---------------------------------------------------------------------------
-- Overview
-- ---------------------------------------------------------------------------
The goal of this pass is to prepare for code generation.
1. Saturate constructor and primop applications.
2. Convert to A-normal form; that is, function arguments
are always variables.
* Use case for strict arguments:
f E ==> case E of x -> f x
(where f is strict)
* Use let for non-trivial lazy arguments
f E ==> let x = E in f x
(were f is lazy and x is non-trivial)
3. Similarly, convert any unboxed lets into cases.
[I'm experimenting with leaving 'ok-for-speculation'
rhss in let-form right up to this point.]
4. Ensure that *value* lambdas only occur as the RHS of a binding
(The code generator can't deal with anything else.)
Type lambdas are ok, however, because the code gen discards them.
5. [Not any more; nuked Jun 2002] Do the seq/par munging.
6. Clone all local Ids.
This means that all such Ids are unique, rather than the
weaker guarantee of no clashes which the simplifier provides.
And that is what the code generator needs.
We don't clone TyVars or CoVars. The code gen doesn't need that,
and doing so would be tiresome because then we'd need
to substitute in types and coercions.
7. Give each dynamic CCall occurrence a fresh unique; this is
rather like the cloning step above.
8. Inject bindings for the "implicit" Ids:
* Constructor wrappers
* Constructor workers
We want curried definitions for all of these in case they
aren't inlined by some caller.
9. Replace (lazy e) by e. See Note [lazyId magic] in MkId.hs
10. Convert (LitInteger i t) into the core representation
for the Integer i. Normally this uses mkInteger, but if
we are using the integer-gmp implementation then there is a
special case where we use the S# constructor for Integers that
are in the range of Int.
11. Uphold tick consistency while doing this: We move ticks out of
(non-type) applications where we can, and make sure that we
annotate according to scoping rules when floating.
This is all done modulo type applications and abstractions, so that
when type erasure is done for conversion to STG, we don't end up with
any trivial or useless bindings.
Invariants
~~~~~~~~~~
Here is the syntax of the Core produced by CorePrep:
Trivial expressions
triv ::= lit | var
| triv ty | /\a. triv
| truv co | /\c. triv | triv |> co
Applications
app ::= lit | var | app triv | app ty | app co | app |> co
Expressions
body ::= app
| let(rec) x = rhs in body -- Boxed only
| case body of pat -> body
| /\a. body | /\c. body
| body |> co
Right hand sides (only place where value lambdas can occur)
rhs ::= /\a.rhs | \x.rhs | body
We define a synonym for each of these non-terminals. Functions
with the corresponding name produce a result in that syntax.
-}
type CpeTriv = CoreExpr -- Non-terminal 'triv'
type CpeApp = CoreExpr -- Non-terminal 'app'
type CpeBody = CoreExpr -- Non-terminal 'body'
type CpeRhs = CoreExpr -- Non-terminal 'rhs'
{-
************************************************************************
* *
Top level stuff
* *
************************************************************************
-}
corePrepPgm :: HscEnv -> ModLocation -> CoreProgram -> [TyCon] -> IO CoreProgram
corePrepPgm hsc_env mod_loc binds data_tycons = do
let dflags = hsc_dflags hsc_env
showPass dflags "CorePrep"
us <- mkSplitUniqSupply 's'
initialCorePrepEnv <- mkInitialCorePrepEnv dflags hsc_env
let implicit_binds = mkDataConWorkers dflags mod_loc data_tycons
-- NB: we must feed mkImplicitBinds through corePrep too
-- so that they are suitably cloned and eta-expanded
binds_out = initUs_ us $ do
floats1 <- corePrepTopBinds initialCorePrepEnv binds
floats2 <- corePrepTopBinds initialCorePrepEnv implicit_binds
return (deFloatTop (floats1 `appendFloats` floats2))
endPassIO hsc_env alwaysQualify CorePrep binds_out []
return binds_out
corePrepExpr :: DynFlags -> HscEnv -> CoreExpr -> IO CoreExpr
corePrepExpr dflags hsc_env expr = do
showPass dflags "CorePrep"
us <- mkSplitUniqSupply 's'
initialCorePrepEnv <- mkInitialCorePrepEnv dflags hsc_env
let new_expr = initUs_ us (cpeBodyNF initialCorePrepEnv expr)
dumpIfSet_dyn dflags Opt_D_dump_prep "CorePrep" (ppr new_expr)
return new_expr
corePrepTopBinds :: CorePrepEnv -> [CoreBind] -> UniqSM Floats
-- Note [Floating out of top level bindings]
corePrepTopBinds initialCorePrepEnv binds
= go initialCorePrepEnv binds
where
go _ [] = return emptyFloats
go env (bind : binds) = do (env', bind') <- cpeBind TopLevel env bind
binds' <- go env' binds
return (bind' `appendFloats` binds')
mkDataConWorkers :: DynFlags -> ModLocation -> [TyCon] -> [CoreBind]
-- See Note [Data constructor workers]
-- c.f. Note [Injecting implicit bindings] in TidyPgm
mkDataConWorkers dflags mod_loc data_tycons
= [ NonRec id (tick_it (getName data_con) (Var id))
-- The ice is thin here, but it works
| tycon <- data_tycons, -- CorePrep will eta-expand it
data_con <- tyConDataCons tycon,
let id = dataConWorkId data_con
]
where
-- If we want to generate debug info, we put a source note on the
-- worker. This is useful, especially for heap profiling.
tick_it name
| debugLevel dflags == 0 = id
| RealSrcSpan span <- nameSrcSpan name = tick span
| Just file <- ml_hs_file mod_loc = tick (span1 file)
| otherwise = tick (span1 "???")
where tick span = Tick (SourceNote span $ showSDoc dflags (ppr name))
span1 file = realSrcLocSpan $ mkRealSrcLoc (mkFastString file) 1 1
{-
Note [Floating out of top level bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
NB: we do need to float out of top-level bindings
Consider x = length [True,False]
We want to get
s1 = False : []
s2 = True : s1
x = length s2
We return a *list* of bindings, because we may start with
x* = f (g y)
where x is demanded, in which case we want to finish with
a = g y
x* = f a
And then x will actually end up case-bound
Note [CafInfo and floating]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
What happens when we try to float bindings to the top level? At this
point all the CafInfo is supposed to be correct, and we must make certain
that is true of the new top-level bindings. There are two cases
to consider
a) The top-level binding is marked asCafRefs. In that case we are
basically fine. The floated bindings had better all be lazy lets,
so they can float to top level, but they'll all have HasCafRefs
(the default) which is safe.
b) The top-level binding is marked NoCafRefs. This really happens
Example. CoreTidy produces
$fApplicativeSTM [NoCafRefs] = D:Alternative retry# ...blah...
Now CorePrep has to eta-expand to
$fApplicativeSTM = let sat = \xy. retry x y
in D:Alternative sat ...blah...
So what we *want* is
sat [NoCafRefs] = \xy. retry x y
$fApplicativeSTM [NoCafRefs] = D:Alternative sat ...blah...
So, gruesomely, we must set the NoCafRefs flag on the sat bindings,
*and* substutite the modified 'sat' into the old RHS.
It should be the case that 'sat' is itself [NoCafRefs] (a value, no
cafs) else the original top-level binding would not itself have been
marked [NoCafRefs]. The DEBUG check in CoreToStg for
consistentCafInfo will find this.
This is all very gruesome and horrible. It would be better to figure
out CafInfo later, after CorePrep. We'll do that in due course.
Meanwhile this horrible hack works.
Note [Data constructor workers]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Create any necessary "implicit" bindings for data con workers. We
create the rather strange (non-recursive!) binding
$wC = \x y -> $wC x y
i.e. a curried constructor that allocates. This means that we can
treat the worker for a constructor like any other function in the rest
of the compiler. The point here is that CoreToStg will generate a
StgConApp for the RHS, rather than a call to the worker (which would
give a loop). As Lennart says: the ice is thin here, but it works.
Hmm. Should we create bindings for dictionary constructors? They are
always fully applied, and the bindings are just there to support
partial applications. But it's easier to let them through.
Note [Dead code in CorePrep]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Imagine that we got an input program like this (see Trac #4962):
f :: Show b => Int -> (Int, b -> Maybe Int -> Int)
f x = (g True (Just x) + g () (Just x), g)
where
g :: Show a => a -> Maybe Int -> Int
g _ Nothing = x
g y (Just z) = if z > 100 then g y (Just (z + length (show y))) else g y unknown
After specialisation and SpecConstr, we would get something like this:
f :: Show b => Int -> (Int, b -> Maybe Int -> Int)
f x = (g$Bool_True_Just x + g$Unit_Unit_Just x, g)
where
{-# RULES g $dBool = g$Bool
g $dUnit = g$Unit #-}
g = ...
{-# RULES forall x. g$Bool True (Just x) = g$Bool_True_Just x #-}
g$Bool = ...
{-# RULES forall x. g$Unit () (Just x) = g$Unit_Unit_Just x #-}
g$Unit = ...
g$Bool_True_Just = ...
g$Unit_Unit_Just = ...
Note that the g$Bool and g$Unit functions are actually dead code: they
are only kept alive by the occurrence analyser because they are
referred to by the rules of g, which is being kept alive by the fact
that it is used (unspecialised) in the returned pair.
However, at the CorePrep stage there is no way that the rules for g
will ever fire, and it really seems like a shame to produce an output
program that goes to the trouble of allocating a closure for the
unreachable g$Bool and g$Unit functions.
The way we fix this is to:
* In cloneBndr, drop all unfoldings/rules
* In deFloatTop, run a simple dead code analyser on each top-level
RHS to drop the dead local bindings. For that call to OccAnal, we
disable the binder swap, else the occurrence analyser sometimes
introduces new let bindings for cased binders, which lead to the bug
in #5433.
The reason we don't just OccAnal the whole output of CorePrep is that
the tidier ensures that all top-level binders are GlobalIds, so they
don't show up in the free variables any longer. So if you run the
occurrence analyser on the output of CoreTidy (or later) you e.g. turn
this program:
Rec {
f = ... f ...
}
Into this one:
f = ... f ...
(Since f is not considered to be free in its own RHS.)
************************************************************************
* *
The main code
* *
************************************************************************
-}
cpeBind :: TopLevelFlag -> CorePrepEnv -> CoreBind
-> UniqSM (CorePrepEnv, Floats)
cpeBind top_lvl env (NonRec bndr rhs)
= do { (_, bndr1) <- cpCloneBndr env bndr
; let dmd = idDemandInfo bndr
is_unlifted = isUnliftedType (idType bndr)
; (floats, bndr2, rhs2) <- cpePair top_lvl NonRecursive
dmd
is_unlifted
env bndr1 rhs
; let new_float = mkFloat dmd is_unlifted bndr2 rhs2
-- We want bndr'' in the envt, because it records
-- the evaluated-ness of the binder
; return (extendCorePrepEnv env bndr bndr2,
addFloat floats new_float) }
cpeBind top_lvl env (Rec pairs)
= do { let (bndrs,rhss) = unzip pairs
; (env', bndrs1) <- cpCloneBndrs env (map fst pairs)
; stuff <- zipWithM (cpePair top_lvl Recursive topDmd False env') bndrs1 rhss
; let (floats_s, bndrs2, rhss2) = unzip3 stuff
all_pairs = foldrOL add_float (bndrs2 `zip` rhss2)
(concatFloats floats_s)
; return (extendCorePrepEnvList env (bndrs `zip` bndrs2),
unitFloat (FloatLet (Rec all_pairs))) }
where
-- Flatten all the floats, and the currrent
-- group into a single giant Rec
add_float (FloatLet (NonRec b r)) prs2 = (b,r) : prs2
add_float (FloatLet (Rec prs1)) prs2 = prs1 ++ prs2
add_float b _ = pprPanic "cpeBind" (ppr b)
---------------
cpePair :: TopLevelFlag -> RecFlag -> Demand -> Bool
-> CorePrepEnv -> Id -> CoreExpr
-> UniqSM (Floats, Id, CpeRhs)
-- Used for all bindings
cpePair top_lvl is_rec dmd is_unlifted env bndr rhs
= do { (floats1, rhs1) <- cpeRhsE env rhs
-- See if we are allowed to float this stuff out of the RHS
; (floats2, rhs2) <- float_from_rhs floats1 rhs1
-- Make the arity match up
; (floats3, rhs3)
<- if manifestArity rhs1 <= arity
then return (floats2, cpeEtaExpand arity rhs2)
else WARN(True, text "CorePrep: silly extra arguments:" <+> ppr bndr)
-- Note [Silly extra arguments]
(do { v <- newVar (idType bndr)
; let float = mkFloat topDmd False v rhs2
; return ( addFloat floats2 float
, cpeEtaExpand arity (Var v)) })
-- Wrap floating ticks
; let (floats4, rhs4) = wrapTicks floats3 rhs3
-- Record if the binder is evaluated
-- and otherwise trim off the unfolding altogether
-- It's not used by the code generator; getting rid of it reduces
-- heap usage and, since we may be changing uniques, we'd have
-- to substitute to keep it right
; let bndr' | exprIsHNF rhs3 = bndr `setIdUnfolding` evaldUnfolding
| otherwise = bndr `setIdUnfolding` noUnfolding
; return (floats4, bndr', rhs4) }
where
is_strict_or_unlifted = (isStrictDmd dmd) || is_unlifted
platform = targetPlatform (cpe_dynFlags env)
arity = idArity bndr -- We must match this arity
---------------------
float_from_rhs floats rhs
| isEmptyFloats floats = return (emptyFloats, rhs)
| isTopLevel top_lvl = float_top floats rhs
| otherwise = float_nested floats rhs
---------------------
float_nested floats rhs
| wantFloatNested is_rec is_strict_or_unlifted floats rhs
= return (floats, rhs)
| otherwise = dont_float floats rhs
---------------------
float_top floats rhs -- Urhgh! See Note [CafInfo and floating]
| mayHaveCafRefs (idCafInfo bndr)
, allLazyTop floats
= return (floats, rhs)
-- So the top-level binding is marked NoCafRefs
| Just (floats', rhs') <- canFloatFromNoCaf platform floats rhs
= return (floats', rhs')
| otherwise
= dont_float floats rhs
---------------------
dont_float floats rhs
-- Non-empty floats, but do not want to float from rhs
-- So wrap the rhs in the floats
-- But: rhs1 might have lambdas, and we can't
-- put them inside a wrapBinds
= do { body <- rhsToBodyNF rhs
; return (emptyFloats, wrapBinds floats body) }
{- Note [Silly extra arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we had this
f{arity=1} = \x\y. e
We *must* match the arity on the Id, so we have to generate
f' = \x\y. e
f = \x. f' x
It's a bizarre case: why is the arity on the Id wrong? Reason
(in the days of __inline_me__):
f{arity=0} = __inline_me__ (let v = expensive in \xy. e)
When InlineMe notes go away this won't happen any more. But
it seems good for CorePrep to be robust.
-}
-- ---------------------------------------------------------------------------
-- CpeRhs: produces a result satisfying CpeRhs
-- ---------------------------------------------------------------------------
cpeRhsE :: CorePrepEnv -> CoreExpr -> UniqSM (Floats, CpeRhs)
-- If
-- e ===> (bs, e')
-- then
-- e = let bs in e' (semantically, that is!)
--
-- For example
-- f (g x) ===> ([v = g x], f v)
cpeRhsE _env expr@(Type {}) = return (emptyFloats, expr)
cpeRhsE _env expr@(Coercion {}) = return (emptyFloats, expr)
cpeRhsE env (Lit (LitInteger i _))
= cpeRhsE env (cvtLitInteger (cpe_dynFlags env) (getMkIntegerId env)
(cpe_integerSDataCon env) i)
cpeRhsE _env expr@(Lit {}) = return (emptyFloats, expr)
cpeRhsE env expr@(Var {}) = cpeApp env expr
cpeRhsE env (Var f `App` _{-type-} `App` arg)
| f `hasKey` lazyIdKey -- Replace (lazy a) by a
= cpeRhsE env arg -- See Note [lazyId magic] in MkId
cpeRhsE env (Var f `App` _runtimeRep `App` _type `App` arg)
-- See Note [runRW magic] in MkId
| f `hasKey` runRWKey -- Replace (runRW# f) by (f realWorld#),
= case arg of -- beta reducing if possible
Lam s body -> cpeRhsE (extendCorePrepEnv env s realWorldPrimId) body
_ -> cpeRhsE env (arg `App` Var realWorldPrimId)
-- See Note [runRW arg]
{- Note [runRW arg]
~~~~~~~~~~~~~~~~~~~
If we got, say
runRW# (case bot of {})
which happened in Trac #11291, we do /not/ want to turn it into
(case bot of {}) realWorldPrimId#
because that gives a panic in CoreToStg.myCollectArgs, which expects
only variables in function position. But if we are sure to make
runRW# strict (which we do in MkId), this can't happen
-}
cpeRhsE env expr@(App {}) = cpeApp env expr
cpeRhsE env (Let bind expr)
= do { (env', new_binds) <- cpeBind NotTopLevel env bind
; (floats, body) <- cpeRhsE env' expr
; return (new_binds `appendFloats` floats, body) }
cpeRhsE env (Tick tickish expr)
| tickishPlace tickish == PlaceNonLam && tickish `tickishScopesLike` SoftScope
= do { (floats, body) <- cpeRhsE env expr
-- See [Floating Ticks in CorePrep]
; return (unitFloat (FloatTick tickish) `appendFloats` floats, body) }
| otherwise
= do { body <- cpeBodyNF env expr
; return (emptyFloats, mkTick tickish' body) }
where
tickish' | Breakpoint n fvs <- tickish
= Breakpoint n (map (lookupCorePrepEnv env) fvs)
| otherwise
= tickish
cpeRhsE env (Cast expr co)
= do { (floats, expr') <- cpeRhsE env expr
; return (floats, Cast expr' co) }
cpeRhsE env expr@(Lam {})
= do { let (bndrs,body) = collectBinders expr
; (env', bndrs') <- cpCloneBndrs env bndrs
; body' <- cpeBodyNF env' body
; return (emptyFloats, mkLams bndrs' body') }
cpeRhsE env (Case scrut bndr ty alts)
= do { (floats, scrut') <- cpeBody env scrut
; let bndr1 = bndr `setIdUnfolding` evaldUnfolding
-- Record that the case binder is evaluated in the alternatives
; (env', bndr2) <- cpCloneBndr env bndr1
; alts' <- mapM (sat_alt env') alts
; return (floats, Case scrut' bndr2 ty alts') }
where
sat_alt env (con, bs, rhs)
= do { (env2, bs') <- cpCloneBndrs env bs
; rhs' <- cpeBodyNF env2 rhs
; return (con, bs', rhs') }
cvtLitInteger :: DynFlags -> Id -> Maybe DataCon -> Integer -> CoreExpr
-- Here we convert a literal Integer to the low-level
-- represenation. Exactly how we do this depends on the
-- library that implements Integer. If it's GMP we
-- use the S# data constructor for small literals.
-- See Note [Integer literals] in Literal
cvtLitInteger dflags _ (Just sdatacon) i
| inIntRange dflags i -- Special case for small integers
= mkConApp sdatacon [Lit (mkMachInt dflags i)]
cvtLitInteger dflags mk_integer _ i
= mkApps (Var mk_integer) [isNonNegative, ints]
where isNonNegative = if i < 0 then mkConApp falseDataCon []
else mkConApp trueDataCon []
ints = mkListExpr intTy (f (abs i))
f 0 = []
f x = let low = x .&. mask
high = x `shiftR` bits
in mkConApp intDataCon [Lit (mkMachInt dflags low)] : f high
bits = 31
mask = 2 ^ bits - 1
-- ---------------------------------------------------------------------------
-- CpeBody: produces a result satisfying CpeBody
-- ---------------------------------------------------------------------------
cpeBodyNF :: CorePrepEnv -> CoreExpr -> UniqSM CpeBody
cpeBodyNF env expr
= do { (floats, body) <- cpeBody env expr
; return (wrapBinds floats body) }
--------
cpeBody :: CorePrepEnv -> CoreExpr -> UniqSM (Floats, CpeBody)
cpeBody env expr
= do { (floats1, rhs) <- cpeRhsE env expr
; (floats2, body) <- rhsToBody rhs
; return (floats1 `appendFloats` floats2, body) }
--------
rhsToBodyNF :: CpeRhs -> UniqSM CpeBody
rhsToBodyNF rhs = do { (floats,body) <- rhsToBody rhs
; return (wrapBinds floats body) }
--------
rhsToBody :: CpeRhs -> UniqSM (Floats, CpeBody)
-- Remove top level lambdas by let-binding
rhsToBody (Tick t expr)
| tickishScoped t == NoScope -- only float out of non-scoped annotations
= do { (floats, expr') <- rhsToBody expr
; return (floats, mkTick t expr') }
rhsToBody (Cast e co)
-- You can get things like
-- case e of { p -> coerce t (\s -> ...) }
= do { (floats, e') <- rhsToBody e
; return (floats, Cast e' co) }
rhsToBody expr@(Lam {})
| Just no_lam_result <- tryEtaReducePrep bndrs body
= return (emptyFloats, no_lam_result)
| all isTyVar bndrs -- Type lambdas are ok
= return (emptyFloats, expr)
| otherwise -- Some value lambdas
= do { fn <- newVar (exprType expr)
; let rhs = cpeEtaExpand (exprArity expr) expr
float = FloatLet (NonRec fn rhs)
; return (unitFloat float, Var fn) }
where
(bndrs,body) = collectBinders expr
rhsToBody expr = return (emptyFloats, expr)
-- ---------------------------------------------------------------------------
-- CpeApp: produces a result satisfying CpeApp
-- ---------------------------------------------------------------------------
cpeApp :: CorePrepEnv -> CoreExpr -> UniqSM (Floats, CpeRhs)
-- May return a CpeRhs because of saturating primops
cpeApp env expr
= do { (app, head, _, floats, ss) <- collect_args expr 0
; MASSERT(null ss) -- make sure we used all the strictness info
-- Now deal with the function
; case head of
Just (fn_id, depth) -> do { sat_app <- maybeSaturate fn_id app depth
; return (floats, sat_app) }
_other -> return (floats, app) }
where
-- Deconstruct and rebuild the application, floating any non-atomic
-- arguments to the outside. We collect the type of the expression,
-- the head of the application, and the number of actual value arguments,
-- all of which are used to possibly saturate this application if it
-- has a constructor or primop at the head.
collect_args
:: CoreExpr
-> Int -- Current app depth
-> UniqSM (CpeApp, -- The rebuilt expression
Maybe (Id, Int), -- The head of the application,
-- and no. of args it was applied to
Type, -- Type of the whole expr
Floats, -- Any floats we pulled out
[Demand]) -- Remaining argument demands
collect_args (App fun arg@(Type arg_ty)) depth
= do { (fun',hd,fun_ty,floats,ss) <- collect_args fun depth
; return (App fun' arg, hd, piResultTy fun_ty arg_ty, floats, ss) }
collect_args (App fun arg@(Coercion {})) depth
= do { (fun',hd,fun_ty,floats,ss) <- collect_args fun depth
; return (App fun' arg, hd, funResultTy fun_ty, floats, ss) }
collect_args (App fun arg) depth
= do { (fun',hd,fun_ty,floats,ss) <- collect_args fun (depth+1)
; let (ss1, ss_rest) -- See Note [lazyId magic] in MkId
= case (ss, isLazyExpr arg) of
(_ : ss_rest, True) -> (topDmd, ss_rest)
(ss1 : ss_rest, False) -> (ss1, ss_rest)
([], _) -> (topDmd, [])
(arg_ty, res_ty) = expectJust "cpeBody:collect_args" $
splitFunTy_maybe fun_ty
; (fs, arg') <- cpeArg env ss1 arg arg_ty
; return (App fun' arg', hd, res_ty, fs `appendFloats` floats, ss_rest) }
collect_args (Var v) depth
= do { v1 <- fiddleCCall v
; let v2 = lookupCorePrepEnv env v1
; return (Var v2, Just (v2, depth), idType v2, emptyFloats, stricts) }
where
stricts = case idStrictness v of
StrictSig (DmdType _ demands _)
| listLengthCmp demands depth /= GT -> demands
-- length demands <= depth
| otherwise -> []
-- If depth < length demands, then we have too few args to
-- satisfy strictness info so we have to ignore all the
-- strictness info, e.g. + (error "urk")
-- Here, we can't evaluate the arg strictly, because this
-- partial application might be seq'd
collect_args (Cast fun co) depth
= do { let Pair _ty1 ty2 = coercionKind co
; (fun', hd, _, floats, ss) <- collect_args fun depth
; return (Cast fun' co, hd, ty2, floats, ss) }
collect_args (Tick tickish fun) depth
| tickishPlace tickish == PlaceNonLam
&& tickish `tickishScopesLike` SoftScope
= do { (fun',hd,fun_ty,floats,ss) <- collect_args fun depth
-- See [Floating Ticks in CorePrep]
; return (fun',hd,fun_ty,addFloat floats (FloatTick tickish),ss) }
-- N-variable fun, better let-bind it
collect_args fun _
= do { (fun_floats, fun') <- cpeArg env evalDmd fun ty
-- The evalDmd says that it's sure to be evaluated,
-- so we'll end up case-binding it
; return (fun', Nothing, ty, fun_floats, []) }
where
ty = exprType fun
isLazyExpr :: CoreExpr -> Bool
-- See Note [lazyId magic] in MkId
isLazyExpr (Cast e _) = isLazyExpr e
isLazyExpr (Tick _ e) = isLazyExpr e
isLazyExpr (Var f `App` _ `App` _) = f `hasKey` lazyIdKey
isLazyExpr _ = False
-- ---------------------------------------------------------------------------
-- CpeArg: produces a result satisfying CpeArg
-- ---------------------------------------------------------------------------
-- This is where we arrange that a non-trivial argument is let-bound
cpeArg :: CorePrepEnv -> Demand
-> CoreArg -> Type -> UniqSM (Floats, CpeTriv)
cpeArg env dmd arg arg_ty
= do { (floats1, arg1) <- cpeRhsE env arg -- arg1 can be a lambda
; (floats2, arg2) <- if want_float floats1 arg1
then return (floats1, arg1)
else do { body1 <- rhsToBodyNF arg1
; return (emptyFloats, wrapBinds floats1 body1) }
-- Else case: arg1 might have lambdas, and we can't
-- put them inside a wrapBinds
; if cpe_ExprIsTrivial arg2 -- Do not eta expand a trivial argument
then return (floats2, arg2)
else do
{ v <- newVar arg_ty
; let arg3 = cpeEtaExpand (exprArity arg2) arg2
arg_float = mkFloat dmd is_unlifted v arg3
; return (addFloat floats2 arg_float, varToCoreExpr v) } }
where
is_unlifted = isUnliftedType arg_ty
is_strict = isStrictDmd dmd
want_float = wantFloatNested NonRecursive (is_strict || is_unlifted)
{-
Note [Floating unlifted arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider C (let v* = expensive in v)
where the "*" indicates "will be demanded". Usually v will have been
inlined by now, but let's suppose it hasn't (see Trac #2756). Then we
do *not* want to get
let v* = expensive in C v
because that has different strictness. Hence the use of 'allLazy'.
(NB: the let v* turns into a FloatCase, in mkLocalNonRec.)
------------------------------------------------------------------------------
-- Building the saturated syntax
-- ---------------------------------------------------------------------------
maybeSaturate deals with saturating primops and constructors
The type is the type of the entire application
-}
maybeSaturate :: Id -> CpeApp -> Int -> UniqSM CpeRhs
maybeSaturate fn expr n_args
| Just DataToTagOp <- isPrimOpId_maybe fn -- DataToTag must have an evaluated arg
-- A gruesome special case
= saturateDataToTag sat_expr
| hasNoBinding fn -- There's no binding
= return sat_expr
| otherwise
= return expr
where
fn_arity = idArity fn
excess_arity = fn_arity - n_args
sat_expr = cpeEtaExpand excess_arity expr
-------------
saturateDataToTag :: CpeApp -> UniqSM CpeApp
-- See Note [dataToTag magic]
saturateDataToTag sat_expr
= do { let (eta_bndrs, eta_body) = collectBinders sat_expr
; eta_body' <- eval_data2tag_arg eta_body
; return (mkLams eta_bndrs eta_body') }
where
eval_data2tag_arg :: CpeApp -> UniqSM CpeBody
eval_data2tag_arg app@(fun `App` arg)
| exprIsHNF arg -- Includes nullary constructors
= return app -- The arg is evaluated
| otherwise -- Arg not evaluated, so evaluate it
= do { arg_id <- newVar (exprType arg)
; let arg_id1 = setIdUnfolding arg_id evaldUnfolding
; return (Case arg arg_id1 (exprType app)
[(DEFAULT, [], fun `App` Var arg_id1)]) }
eval_data2tag_arg (Tick t app) -- Scc notes can appear
= do { app' <- eval_data2tag_arg app
; return (Tick t app') }
eval_data2tag_arg other -- Should not happen
= pprPanic "eval_data2tag" (ppr other)
{-
Note [dataToTag magic]
~~~~~~~~~~~~~~~~~~~~~~
Horrid: we must ensure that the arg of data2TagOp is evaluated
(data2tag x) --> (case x of y -> data2tag y)
(yuk yuk) take into account the lambdas we've now introduced
How might it not be evaluated? Well, we might have floated it out
of the scope of a `seq`, or dropped the `seq` altogether.
************************************************************************
* *
Simple CoreSyn operations
* *
************************************************************************
-}
cpe_ExprIsTrivial :: CoreExpr -> Bool
-- Version that doesn't consider an scc annotation to be trivial.
cpe_ExprIsTrivial (Var _) = True
cpe_ExprIsTrivial (Type _) = True
cpe_ExprIsTrivial (Coercion _) = True
cpe_ExprIsTrivial (Lit _) = True
cpe_ExprIsTrivial (App e arg) = not (isRuntimeArg arg) && cpe_ExprIsTrivial e
cpe_ExprIsTrivial (Lam b e) = not (isRuntimeVar b) && cpe_ExprIsTrivial e
cpe_ExprIsTrivial (Tick t e) = not (tickishIsCode t) && cpe_ExprIsTrivial e
cpe_ExprIsTrivial (Cast e _) = cpe_ExprIsTrivial e
cpe_ExprIsTrivial (Case e _ _ []) = cpe_ExprIsTrivial e
-- See Note [Empty case is trivial] in CoreUtils
cpe_ExprIsTrivial _ = False
{-
-- -----------------------------------------------------------------------------
-- Eta reduction
-- -----------------------------------------------------------------------------
Note [Eta expansion]
~~~~~~~~~~~~~~~~~~~~~
Eta expand to match the arity claimed by the binder Remember,
CorePrep must not change arity
Eta expansion might not have happened already, because it is done by
the simplifier only when there at least one lambda already.
NB1:we could refrain when the RHS is trivial (which can happen
for exported things). This would reduce the amount of code
generated (a little) and make things a little words for
code compiled without -O. The case in point is data constructor
wrappers.
NB2: we have to be careful that the result of etaExpand doesn't
invalidate any of the assumptions that CorePrep is attempting
to establish. One possible cause is eta expanding inside of
an SCC note - we're now careful in etaExpand to make sure the
SCC is pushed inside any new lambdas that are generated.
Note [Eta expansion and the CorePrep invariants]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It turns out to be much much easier to do eta expansion
*after* the main CorePrep stuff. But that places constraints
on the eta expander: given a CpeRhs, it must return a CpeRhs.
For example here is what we do not want:
f = /\a -> g (h 3) -- h has arity 2
After ANFing we get
f = /\a -> let s = h 3 in g s
and now we do NOT want eta expansion to give
f = /\a -> \ y -> (let s = h 3 in g s) y
Instead CoreArity.etaExpand gives
f = /\a -> \y -> let s = h 3 in g s y
-}
cpeEtaExpand :: Arity -> CpeRhs -> CpeRhs
cpeEtaExpand arity expr
| arity == 0 = expr
| otherwise = etaExpand arity expr
{-
-- -----------------------------------------------------------------------------
-- Eta reduction
-- -----------------------------------------------------------------------------
Why try eta reduction? Hasn't the simplifier already done eta?
But the simplifier only eta reduces if that leaves something
trivial (like f, or f Int). But for deLam it would be enough to
get to a partial application:
case x of { p -> \xs. map f xs }
==> case x of { p -> map f }
-}
tryEtaReducePrep :: [CoreBndr] -> CoreExpr -> Maybe CoreExpr
tryEtaReducePrep bndrs expr@(App _ _)
| ok_to_eta_reduce f
, n_remaining >= 0
, and (zipWith ok bndrs last_args)
, not (any (`elemVarSet` fvs_remaining) bndrs)
, exprIsHNF remaining_expr -- Don't turn value into a non-value
-- else the behaviour with 'seq' changes
= Just remaining_expr
where
(f, args) = collectArgs expr
remaining_expr = mkApps f remaining_args
fvs_remaining = exprFreeVars remaining_expr
(remaining_args, last_args) = splitAt n_remaining args
n_remaining = length args - length bndrs
ok bndr (Var arg) = bndr == arg
ok _ _ = False
-- We can't eta reduce something which must be saturated.
ok_to_eta_reduce (Var f) = not (hasNoBinding f)
ok_to_eta_reduce _ = False -- Safe. ToDo: generalise
tryEtaReducePrep bndrs (Let bind@(NonRec _ r) body)
| not (any (`elemVarSet` fvs) bndrs)
= case tryEtaReducePrep bndrs body of
Just e -> Just (Let bind e)
Nothing -> Nothing
where
fvs = exprFreeVars r
tryEtaReducePrep bndrs (Tick tickish e)
= fmap (mkTick tickish) $ tryEtaReducePrep bndrs e
tryEtaReducePrep _ _ = Nothing
{-
************************************************************************
* *
Floats
* *
************************************************************************
Note [Pin demand info on floats]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We pin demand info on floated lets so that we can see the one-shot thunks.
-}
data FloatingBind
= FloatLet CoreBind -- Rhs of bindings are CpeRhss
-- They are always of lifted type;
-- unlifted ones are done with FloatCase
| FloatCase
Id CpeBody
Bool -- The bool indicates "ok-for-speculation"
-- | See Note [Floating Ticks in CorePrep]
| FloatTick (Tickish Id)
data Floats = Floats OkToSpec (OrdList FloatingBind)
instance Outputable FloatingBind where
ppr (FloatLet b) = ppr b
ppr (FloatCase b r ok) = brackets (ppr ok) <+> ppr b <+> equals <+> ppr r
ppr (FloatTick t) = ppr t
instance Outputable Floats where
ppr (Floats flag fs) = text "Floats" <> brackets (ppr flag) <+>
braces (vcat (map ppr (fromOL fs)))
instance Outputable OkToSpec where
ppr OkToSpec = text "OkToSpec"
ppr IfUnboxedOk = text "IfUnboxedOk"
ppr NotOkToSpec = text "NotOkToSpec"
-- Can we float these binds out of the rhs of a let? We cache this decision
-- to avoid having to recompute it in a non-linear way when there are
-- deeply nested lets.
data OkToSpec
= OkToSpec -- Lazy bindings of lifted type
| IfUnboxedOk -- A mixture of lazy lifted bindings and n
-- ok-to-speculate unlifted bindings
| NotOkToSpec -- Some not-ok-to-speculate unlifted bindings
mkFloat :: Demand -> Bool -> Id -> CpeRhs -> FloatingBind
mkFloat dmd is_unlifted bndr rhs
| use_case = FloatCase bndr rhs (exprOkForSpeculation rhs)
| is_hnf = FloatLet (NonRec bndr rhs)
| otherwise = FloatLet (NonRec (setIdDemandInfo bndr dmd) rhs)
-- See Note [Pin demand info on floats]
where
is_hnf = exprIsHNF rhs
is_strict = isStrictDmd dmd
use_case = is_unlifted || is_strict && not is_hnf
-- Don't make a case for a value binding,
-- even if it's strict. Otherwise we get
-- case (\x -> e) of ...!
emptyFloats :: Floats
emptyFloats = Floats OkToSpec nilOL
isEmptyFloats :: Floats -> Bool
isEmptyFloats (Floats _ bs) = isNilOL bs
wrapBinds :: Floats -> CpeBody -> CpeBody
wrapBinds (Floats _ binds) body
= foldrOL mk_bind body binds
where
mk_bind (FloatCase bndr rhs _) body = Case rhs bndr (exprType body) [(DEFAULT, [], body)]
mk_bind (FloatLet bind) body = Let bind body
mk_bind (FloatTick tickish) body = mkTick tickish body
addFloat :: Floats -> FloatingBind -> Floats
addFloat (Floats ok_to_spec floats) new_float
= Floats (combine ok_to_spec (check new_float)) (floats `snocOL` new_float)
where
check (FloatLet _) = OkToSpec
check (FloatCase _ _ ok_for_spec)
| ok_for_spec = IfUnboxedOk
| otherwise = NotOkToSpec
check FloatTick{} = OkToSpec
-- The ok-for-speculation flag says that it's safe to
-- float this Case out of a let, and thereby do it more eagerly
-- We need the top-level flag because it's never ok to float
-- an unboxed binding to the top level
unitFloat :: FloatingBind -> Floats
unitFloat = addFloat emptyFloats
appendFloats :: Floats -> Floats -> Floats
appendFloats (Floats spec1 floats1) (Floats spec2 floats2)
= Floats (combine spec1 spec2) (floats1 `appOL` floats2)
concatFloats :: [Floats] -> OrdList FloatingBind
concatFloats = foldr (\ (Floats _ bs1) bs2 -> appOL bs1 bs2) nilOL
combine :: OkToSpec -> OkToSpec -> OkToSpec
combine NotOkToSpec _ = NotOkToSpec
combine _ NotOkToSpec = NotOkToSpec
combine IfUnboxedOk _ = IfUnboxedOk
combine _ IfUnboxedOk = IfUnboxedOk
combine _ _ = OkToSpec
deFloatTop :: Floats -> [CoreBind]
-- For top level only; we don't expect any FloatCases
deFloatTop (Floats _ floats)
= foldrOL get [] floats
where
get (FloatLet b) bs = occurAnalyseRHSs b : bs
get b _ = pprPanic "corePrepPgm" (ppr b)
-- See Note [Dead code in CorePrep]
occurAnalyseRHSs (NonRec x e) = NonRec x (occurAnalyseExpr_NoBinderSwap e)
occurAnalyseRHSs (Rec xes) = Rec [(x, occurAnalyseExpr_NoBinderSwap e) | (x, e) <- xes]
---------------------------------------------------------------------------
canFloatFromNoCaf :: Platform -> Floats -> CpeRhs -> Maybe (Floats, CpeRhs)
-- Note [CafInfo and floating]
canFloatFromNoCaf platform (Floats ok_to_spec fs) rhs
| OkToSpec <- ok_to_spec -- Worth trying
, Just (subst, fs') <- go (emptySubst, nilOL) (fromOL fs)
= Just (Floats OkToSpec fs', subst_expr subst rhs)
| otherwise
= Nothing
where
subst_expr = substExpr (text "CorePrep")
go :: (Subst, OrdList FloatingBind) -> [FloatingBind]
-> Maybe (Subst, OrdList FloatingBind)
go (subst, fbs_out) [] = Just (subst, fbs_out)
go (subst, fbs_out) (FloatLet (NonRec b r) : fbs_in)
| rhs_ok r
= go (subst', fbs_out `snocOL` new_fb) fbs_in
where
(subst', b') = set_nocaf_bndr subst b
new_fb = FloatLet (NonRec b' (subst_expr subst r))
go (subst, fbs_out) (FloatLet (Rec prs) : fbs_in)
| all rhs_ok rs
= go (subst', fbs_out `snocOL` new_fb) fbs_in
where
(bs,rs) = unzip prs
(subst', bs') = mapAccumL set_nocaf_bndr subst bs
rs' = map (subst_expr subst') rs
new_fb = FloatLet (Rec (bs' `zip` rs'))
go (subst, fbs_out) (ft@FloatTick{} : fbs_in)
= go (subst, fbs_out `snocOL` ft) fbs_in
go _ _ = Nothing -- Encountered a caffy binding
------------
set_nocaf_bndr subst bndr
= (extendIdSubst subst bndr (Var bndr'), bndr')
where
bndr' = bndr `setIdCafInfo` NoCafRefs
------------
rhs_ok :: CoreExpr -> Bool
-- We can only float to top level from a NoCaf thing if
-- the new binding is static. However it can't mention
-- any non-static things or it would *already* be Caffy
rhs_ok = rhsIsStatic platform (\_ -> False)
(\i -> pprPanic "rhsIsStatic" (integer i))
-- Integer literals should not show up
wantFloatNested :: RecFlag -> Bool -> Floats -> CpeRhs -> Bool
wantFloatNested is_rec strict_or_unlifted floats rhs
= isEmptyFloats floats
|| strict_or_unlifted
|| (allLazyNested is_rec floats && exprIsHNF rhs)
-- Why the test for allLazyNested?
-- v = f (x `divInt#` y)
-- we don't want to float the case, even if f has arity 2,
-- because floating the case would make it evaluated too early
allLazyTop :: Floats -> Bool
allLazyTop (Floats OkToSpec _) = True
allLazyTop _ = False
allLazyNested :: RecFlag -> Floats -> Bool
allLazyNested _ (Floats OkToSpec _) = True
allLazyNested _ (Floats NotOkToSpec _) = False
allLazyNested is_rec (Floats IfUnboxedOk _) = isNonRec is_rec
{-
************************************************************************
* *
Cloning
* *
************************************************************************
-}
-- ---------------------------------------------------------------------------
-- The environment
-- ---------------------------------------------------------------------------
data CorePrepEnv
= CPE { cpe_dynFlags :: DynFlags
, cpe_env :: IdEnv Id -- Clone local Ids
, cpe_mkIntegerId :: Id
, cpe_integerSDataCon :: Maybe DataCon
}
lookupMkIntegerName :: DynFlags -> HscEnv -> IO Id
lookupMkIntegerName dflags hsc_env
= guardIntegerUse dflags $ liftM tyThingId $
lookupGlobal hsc_env mkIntegerName
lookupIntegerSDataConName :: DynFlags -> HscEnv -> IO (Maybe DataCon)
lookupIntegerSDataConName dflags hsc_env = case cIntegerLibraryType of
IntegerGMP -> guardIntegerUse dflags $ liftM (Just . tyThingDataCon) $
lookupGlobal hsc_env integerSDataConName
IntegerSimple -> return Nothing
-- | Helper for 'lookupMkIntegerName' and 'lookupIntegerSDataConName'
guardIntegerUse :: DynFlags -> IO a -> IO a
guardIntegerUse dflags act
| thisPackage dflags == primUnitId
= return $ panic "Can't use Integer in ghc-prim"
| thisPackage dflags == integerUnitId
= return $ panic "Can't use Integer in integer-*"
| otherwise = act
mkInitialCorePrepEnv :: DynFlags -> HscEnv -> IO CorePrepEnv
mkInitialCorePrepEnv dflags hsc_env
= do mkIntegerId <- lookupMkIntegerName dflags hsc_env
integerSDataCon <- lookupIntegerSDataConName dflags hsc_env
return $ CPE {
cpe_dynFlags = dflags,
cpe_env = emptyVarEnv,
cpe_mkIntegerId = mkIntegerId,
cpe_integerSDataCon = integerSDataCon
}
extendCorePrepEnv :: CorePrepEnv -> Id -> Id -> CorePrepEnv
extendCorePrepEnv cpe id id'
= cpe { cpe_env = extendVarEnv (cpe_env cpe) id id' }
extendCorePrepEnvList :: CorePrepEnv -> [(Id,Id)] -> CorePrepEnv
extendCorePrepEnvList cpe prs
= cpe { cpe_env = extendVarEnvList (cpe_env cpe) prs }
lookupCorePrepEnv :: CorePrepEnv -> Id -> Id
lookupCorePrepEnv cpe id
= case lookupVarEnv (cpe_env cpe) id of
Nothing -> id
Just id' -> id'
getMkIntegerId :: CorePrepEnv -> Id
getMkIntegerId = cpe_mkIntegerId
------------------------------------------------------------------------------
-- Cloning binders
-- ---------------------------------------------------------------------------
cpCloneBndrs :: CorePrepEnv -> [Var] -> UniqSM (CorePrepEnv, [Var])
cpCloneBndrs env bs = mapAccumLM cpCloneBndr env bs
cpCloneBndr :: CorePrepEnv -> Var -> UniqSM (CorePrepEnv, Var)
cpCloneBndr env bndr
| isLocalId bndr, not (isCoVar bndr)
= do bndr' <- setVarUnique bndr <$> getUniqueM
-- We are going to OccAnal soon, so drop (now-useless) rules/unfoldings
-- so that we can drop more stuff as dead code.
-- See also Note [Dead code in CorePrep]
let bndr'' = bndr' `setIdUnfolding` noUnfolding
`setIdSpecialisation` emptyRuleInfo
return (extendCorePrepEnv env bndr bndr'', bndr'')
| otherwise -- Top level things, which we don't want
-- to clone, have become GlobalIds by now
-- And we don't clone tyvars, or coercion variables
= return (env, bndr)
------------------------------------------------------------------------------
-- Cloning ccall Ids; each must have a unique name,
-- to give the code generator a handle to hang it on
-- ---------------------------------------------------------------------------
fiddleCCall :: Id -> UniqSM Id
fiddleCCall id
| isFCallId id = (id `setVarUnique`) <$> getUniqueM
| otherwise = return id
------------------------------------------------------------------------------
-- Generating new binders
-- ---------------------------------------------------------------------------
newVar :: Type -> UniqSM Id
newVar ty
= seqType ty `seq` do
uniq <- getUniqueM
return (mkSysLocalOrCoVar (fsLit "sat") uniq ty)
------------------------------------------------------------------------------
-- Floating ticks
-- ---------------------------------------------------------------------------
--
-- Note [Floating Ticks in CorePrep]
--
-- It might seem counter-intuitive to float ticks by default, given
-- that we don't actually want to move them if we can help it. On the
-- other hand, nothing gets very far in CorePrep anyway, and we want
-- to preserve the order of let bindings and tick annotations in
-- relation to each other. For example, if we just wrapped let floats
-- when they pass through ticks, we might end up performing the
-- following transformation:
--
-- src<...> let foo = bar in baz
-- ==> let foo = src<...> bar in src<...> baz
--
-- Because the let-binding would float through the tick, and then
-- immediately materialize, achieving nothing but decreasing tick
-- accuracy. The only special case is the following scenario:
--
-- let foo = src<...> (let a = b in bar) in baz
-- ==> let foo = src<...> bar; a = src<...> b in baz
--
-- Here we would not want the source tick to end up covering "baz" and
-- therefore refrain from pushing ticks outside. Instead, we copy them
-- into the floating binds (here "a") in cpePair. Note that where "b"
-- or "bar" are (value) lambdas we have to push the annotations
-- further inside in order to uphold our rules.
--
-- All of this is implemented below in @wrapTicks@.
-- | Like wrapFloats, but only wraps tick floats
wrapTicks :: Floats -> CoreExpr -> (Floats, CoreExpr)
wrapTicks (Floats flag floats0) expr = (Floats flag floats1, expr')
where (floats1, expr') = foldrOL go (nilOL, expr) floats0
go (FloatTick t) (fs, e) = ASSERT(tickishPlace t == PlaceNonLam)
(mapOL (wrap t) fs, mkTick t e)
go other (fs, e) = (other `consOL` fs, e)
wrap t (FloatLet bind) = FloatLet (wrapBind t bind)
wrap t (FloatCase b r ok) = FloatCase b (mkTick t r) ok
wrap _ other = pprPanic "wrapTicks: unexpected float!"
(ppr other)
wrapBind t (NonRec binder rhs) = NonRec binder (mkTick t rhs)
wrapBind t (Rec pairs) = Rec (mapSnd (mkTick t) pairs)
|
mcschroeder/ghc
|
compiler/coreSyn/CorePrep.hs
|
bsd-3-clause
| 51,140
| 53
| 21
| 14,079
| 8,945
| 4,721
| 4,224
| 591
| 10
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
{-
This module is the primary interface for signing and verifying http-client
TSRP requests with zodiac and should provide everything needed to do so.
-}
module Zodiac.HttpClient(
KeyId
, TSRPKey
, RequestTimestamp(..)
, SymmetricProtocol(..)
, Verified(..)
, parseTSRPKey
, parseSymmetricProtocol
, parseKeyId
, renderKeyId
, renderTSRPKey
, renderSymmetricProtocol
-- * Key generation
, genKeyId
, genTSRPKey
-- * Timestamps
, RequestExpiry(..)
, parseRequestExpiry
, renderRequestExpiry
, timestampRequest
-- * http-client interface
, authedHttpClientRequest
, httpClientKeyId
, verifyHttpClientRequest
, verifyHttpClientRequest'
, RequestError(..)
, renderRequestError
) where
import Tinfoil.Data (Verified(..))
import Zodiac.Core.Data.Protocol
import Zodiac.Core.Data.Time
import Zodiac.Core.Time
import Zodiac.HttpClient.Error
import Zodiac.HttpClient.TSRP
import Zodiac.TSRP.Data.Key
import Zodiac.TSRP.Key
|
ambiata/zodiac
|
zodiac-http-client/src/Zodiac/HttpClient.hs
|
bsd-3-clause
| 1,165
| 0
| 6
| 258
| 163
| 113
| 50
| 35
| 0
|
-- |
-- Module : Data.ASN1.Get
-- License : BSD-style
-- Maintainer : Vincent Hanquez <vincent@snarc.org>
-- Stability : experimental
-- Portability : unknown
--
-- Simple get module with really simple accessor for ASN1.
--
-- Original code is pulled from the Get module from cereal
-- which is covered by:
-- Copyright : Lennart Kolmodin, Galois Inc. 2009
-- License : BSD3-style (see LICENSE)
--
-- The original code has been tailored and reduced to only cover the useful
-- case for asn1 and augmented by a position.
--
{-# LANGUAGE Rank2Types #-}
module Data.ASN1.Get
( Result(..)
, Input
, Get
, runGetPos
, runGet
, getBytes
, getBytesCopy
, getWord8
) where
import Control.Applicative (Applicative(..),Alternative(..))
import Control.Monad (ap,MonadPlus(..))
import Data.Maybe (fromMaybe)
import Foreign
import qualified Data.ByteString as B
import qualified Data.ByteString.Unsafe as B
-- | The result of a parse.
data Result r = Fail String
-- ^ The parse failed. The 'String' is the
-- message describing the error, if any.
| Partial (B.ByteString -> Result r)
-- ^ Supply this continuation with more input so that
-- the parser can resume. To indicate that no more
-- input is available, use an 'B.empty' string.
| Done r Position B.ByteString
-- ^ The parse succeeded. The 'B.ByteString' is the
-- input that had not yet been consumed (if any) when
-- the parse succeeded.
instance Show r => Show (Result r) where
show (Fail msg) = "Fail " ++ show msg
show (Partial _) = "Partial _"
show (Done r pos bs) = "Done " ++ show r ++ " " ++ show pos ++ " " ++ show bs
instance Functor Result where
fmap _ (Fail msg) = Fail msg
fmap f (Partial k) = Partial (fmap f . k)
fmap f (Done r p bs) = Done (f r) p bs
type Input = B.ByteString
type Buffer = Maybe B.ByteString
type Failure r = Input -> Buffer -> More -> Position -> String -> Result r
type Success a r = Input -> Buffer -> More -> Position -> a -> Result r
type Position = Word64
-- | Have we read all available input?
data More = Complete
| Incomplete (Maybe Int)
deriving (Eq)
-- | The Get monad is an Exception and State monad.
newtype Get a = Get
{ unGet :: forall r. Input -> Buffer -> More -> Position -> Failure r -> Success a r -> Result r }
append :: Buffer -> Buffer -> Buffer
append l r = B.append `fmap` l <*> r
{-# INLINE append #-}
bufferBytes :: Buffer -> B.ByteString
bufferBytes = fromMaybe B.empty
{-# INLINE bufferBytes #-}
instance Functor Get where
fmap p m =
Get $ \s0 b0 m0 p0 kf ks ->
let ks' s1 b1 m1 p1 a = ks s1 b1 m1 p1 (p a)
in unGet m s0 b0 m0 p0 kf ks'
instance Applicative Get where
pure = return
(<*>) = ap
instance Alternative Get where
empty = failDesc "empty"
(<|>) = mplus
-- Definition directly from Control.Monad.State.Strict
instance Monad Get where
return a = Get $ \ s0 b0 m0 p0 _ ks -> ks s0 b0 m0 p0 a
m >>= g = Get $ \s0 b0 m0 p0 kf ks ->
let ks' s1 b1 m1 p1 a = unGet (g a) s1 b1 m1 p1 kf ks
in unGet m s0 b0 m0 p0 kf ks'
fail = failDesc
instance MonadPlus Get where
mzero = failDesc "mzero"
mplus a b =
Get $ \s0 b0 m0 p0 kf ks ->
let kf' _ b1 m1 p1 _ = unGet b (s0 `B.append` bufferBytes b1)
(b0 `append` b1) m1 p1 kf ks
in unGet a s0 (Just B.empty) m0 p0 kf' ks
------------------------------------------------------------------------
put :: Position -> B.ByteString -> Get ()
put pos s = Get (\_ b0 m p0 _ k -> k s b0 m (p0+pos) ())
{-# INLINE put #-}
finalK :: B.ByteString -> t -> t1 -> Position -> r -> Result r
finalK s _ _ p a = Done a p s
failK :: Failure a
failK _ _ _ p s = Fail (show p ++ ":" ++ s)
-- | Run the Get monad applies a 'get'-based parser on the input ByteString
runGetPos :: Position -> Get a -> B.ByteString -> Result a
runGetPos pos m str = unGet m str Nothing (Incomplete Nothing) pos failK finalK
{-# INLINE runGetPos #-}
runGet :: Get a -> B.ByteString -> Result a
runGet = runGetPos 0
{-# INLINE runGet #-}
-- | If at least @n@ bytes of input are available, return the current
-- input, otherwise fail.
ensure :: Int -> Get B.ByteString
ensure n = n `seq` Get $ \ s0 b0 m0 p0 kf ks ->
if B.length s0 >= n
then ks s0 b0 m0 p0 s0
else unGet (demandInput >> ensureRec n) s0 b0 m0 p0 kf ks
{-# INLINE ensure #-}
-- | If at least @n@ bytes of input are available, return the current
-- input, otherwise fail.
ensureRec :: Int -> Get B.ByteString
ensureRec n = Get $ \s0 b0 m0 p0 kf ks ->
if B.length s0 >= n
then ks s0 b0 m0 p0 s0
else unGet (demandInput >> ensureRec n) s0 b0 m0 p0 kf ks
-- | Immediately demand more input via a 'Partial' continuation
-- result.
demandInput :: Get ()
demandInput = Get $ \s0 b0 m0 p0 kf ks ->
case m0 of
Complete -> kf s0 b0 m0 p0 "too few bytes"
Incomplete mb -> Partial $ \s ->
if B.null s
then kf s0 b0 m0 p0 "too few bytes"
else let update l = l - B.length s
s1 = s0 `B.append` s
b1 = b0 `append` Just s
in ks s1 b1 (Incomplete (update `fmap` mb)) p0 ()
failDesc :: String -> Get a
failDesc err = Get (\s0 b0 m0 p0 kf _ -> kf s0 b0 m0 p0 ("Failed reading: " ++ err))
------------------------------------------------------------------------
-- Utility with ByteStrings
-- | An efficient 'get' method for strict ByteStrings. Fails if fewer
-- than @n@ bytes are left in the input. This function creates a fresh
-- copy of the underlying bytes.
getBytesCopy :: Int -> Get B.ByteString
getBytesCopy n = do
bs <- getBytes n
return $! B.copy bs
------------------------------------------------------------------------
-- Helpers
-- | Pull @n@ bytes from the input, as a strict ByteString.
getBytes :: Int -> Get B.ByteString
getBytes n = do
s <- ensure n
put (fromIntegral n) $ B.unsafeDrop n s
return $ B.unsafeTake n s
getWord8 :: Get Word8
getWord8 = do
s <- ensure 1
put 1 $ B.unsafeTail s
return $ B.unsafeHead s
|
mboes/hs-asn1
|
encoding/Data/ASN1/Get.hs
|
bsd-3-clause
| 6,288
| 0
| 19
| 1,722
| 1,852
| 974
| 878
| 118
| 3
|
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
-- | Generate HPC (Haskell Program Coverage) reports
module Stack.Coverage
( deleteHpcReports
, updateTixFile
, generateHpcReport
, HpcReportOpts(..)
, generateHpcReportForTargets
, generateHpcUnifiedReport
, generateHpcMarkupIndex
) where
import Control.Applicative
import Control.Exception.Lifted
import Control.Monad (liftM, when, unless, void)
import Control.Monad.Catch (MonadCatch)
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Reader (MonadReader, asks)
import Control.Monad.Trans.Resource
import qualified Data.ByteString.Char8 as S8
import Data.Foldable (forM_, asum, toList)
import Data.Function
import Data.List
import qualified Data.Map.Strict as Map
import Data.Maybe
import Data.Maybe.Extra (mapMaybeM)
import Data.Monoid ((<>))
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.IO as T
import qualified Data.Text.Lazy as LT
import Data.Traversable (forM)
import Network.HTTP.Download (HasHttpManager)
import Path
import Path.Extra (toFilePathNoTrailingSep)
import Path.IO
import Prelude hiding (FilePath, writeFile)
import Stack.Build.Source (parseTargetsFromBuildOpts)
import Stack.Build.Target
import Stack.Constants
import Stack.Package
import Stack.Types
import qualified System.Directory as D
import System.FilePath (dropExtension, isPathSeparator)
import System.Process.Read
import Text.Hastache (htmlEscape)
import Trace.Hpc.Tix
-- | Invoked at the beginning of running with "--coverage"
deleteHpcReports :: (MonadIO m, MonadThrow m, MonadReader env m, HasEnvConfig env)
=> m ()
deleteHpcReports = do
hpcDir <- hpcReportDir
removeTreeIfExists hpcDir
-- | Move a tix file into a sub-directory of the hpc report directory. Deletes the old one if one is
-- present.
updateTixFile :: (MonadIO m,MonadReader env m,HasConfig env,MonadLogger m,MonadBaseControl IO m,MonadCatch m,HasEnvConfig env)
=> PackageName -> Path Abs File -> m ()
updateTixFile pkgName tixSrc = do
exists <- fileExists tixSrc
when exists $ do
tixDest <- tixFilePath pkgName (dropExtension (toFilePath (filename tixSrc)))
removeFileIfExists tixDest
createTree (parent tixDest)
-- Remove exe modules because they are problematic. This could be revisited if there's a GHC
-- version that fixes https://ghc.haskell.org/trac/ghc/ticket/1853
mtix <- readTixOrLog tixSrc
case mtix of
Nothing -> $logError $ "Failed to read " <> T.pack (toFilePath tixSrc)
Just tix -> do
liftIO $ writeTix (toFilePath tixDest) (removeExeModules tix)
removeFileIfExists tixSrc
-- | Get the directory used for hpc reports for the given pkgId.
hpcPkgPath :: (MonadIO m,MonadReader env m,HasConfig env,MonadLogger m,MonadBaseControl IO m,MonadCatch m,HasEnvConfig env)
=> PackageName -> m (Path Abs Dir)
hpcPkgPath pkgName = do
outputDir <- hpcReportDir
pkgNameRel <- parseRelDir (packageNameString pkgName)
return (outputDir </> pkgNameRel)
-- | Get the tix file location, given the name of the file (without extension), and the package
-- identifier string.
tixFilePath :: (MonadIO m,MonadReader env m,HasConfig env,MonadLogger m,MonadBaseControl IO m,MonadCatch m,HasEnvConfig env)
=> PackageName -> String -> m (Path Abs File)
tixFilePath pkgName tixName = do
pkgPath <- hpcPkgPath pkgName
tixRel <- parseRelFile (tixName ++ "/" ++ tixName ++ ".tix")
return (pkgPath </> tixRel)
-- | Generates the HTML coverage report and shows a textual coverage summary for a package.
generateHpcReport :: (MonadIO m,MonadReader env m,HasConfig env,MonadLogger m,MonadBaseControl IO m,MonadCatch m,HasEnvConfig env)
=> Path Abs Dir -> Package -> [Text] -> m ()
generateHpcReport pkgDir package tests = do
-- If we're using > GHC 7.10, the hpc 'include' parameter must specify a ghc package key. See
-- https://github.com/commercialhaskell/stack/issues/785
let pkgName = packageNameText (packageName package)
pkgId = packageIdentifierString (packageIdentifier package)
compilerVersion <- asks (envConfigCompilerVersion . getEnvConfig)
eincludeName <-
-- Pre-7.8 uses plain PKG-version in tix files.
if getGhcVersion compilerVersion < $(mkVersion "7.10") then return $ Right $ Just pkgId
-- We don't expect to find a package key if there is no library.
else if not (packageHasLibrary package) then return $ Right Nothing
-- Look in the inplace DB for the package key.
-- See https://github.com/commercialhaskell/stack/issues/1181#issuecomment-148968986
else do
mghcPkgKey <- findPackageKeyForBuiltPackage pkgDir (packageIdentifier package)
case mghcPkgKey of
Nothing -> do
let msg = "Failed to find GHC package key for " <> pkgName
$logError msg
return $ Left msg
Just ghcPkgKey -> return $ Right $ Just $ T.unpack ghcPkgKey
forM_ tests $ \testName -> do
tixSrc <- tixFilePath (packageName package) (T.unpack testName)
let report = "coverage report for " <> pkgName <> "'s test-suite \"" <> testName <> "\""
reportDir = parent tixSrc
case eincludeName of
Left err -> generateHpcErrorReport reportDir (sanitize (T.unpack err))
-- Restrict to just the current library code, if there is a library in the package (see
-- #634 - this will likely be customizable in the future)
Right mincludeName -> do
let extraArgs = case mincludeName of
Just includeName -> ["--include", includeName ++ ":"]
Nothing -> []
generateHpcReportInternal tixSrc reportDir report extraArgs extraArgs
generateHpcReportInternal :: (MonadIO m,MonadReader env m,HasConfig env,MonadLogger m,MonadBaseControl IO m,MonadCatch m,HasEnvConfig env)
=> Path Abs File -> Path Abs Dir -> Text -> [String] -> [String] -> m ()
generateHpcReportInternal tixSrc reportDir report extraMarkupArgs extraReportArgs = do
-- If a .tix file exists, move it to the HPC output directory and generate a report for it.
tixFileExists <- fileExists tixSrc
if not tixFileExists
then $logError $ T.concat
[ "Didn't find .tix for "
, report
, " - expected to find it at "
, T.pack (toFilePath tixSrc)
, "."
]
else (`catch` \err -> do
let msg = show (err :: ReadProcessException)
$logError (T.pack msg)
generateHpcErrorReport reportDir $ sanitize msg) $
(`onException` $logError ("Error occurred while producing " <> report)) $ do
-- Directories for .mix files.
hpcRelDir <- hpcRelativeDir
-- Compute arguments used for both "hpc markup" and "hpc report".
pkgDirs <- Map.keys . envConfigPackages <$> asks getEnvConfig
let args =
-- Use index files from all packages (allows cross-package coverage results).
concatMap (\x -> ["--srcdir", toFilePathNoTrailingSep x]) pkgDirs ++
-- Look for index files in the correct dir (relative to each pkgdir).
["--hpcdir", toFilePathNoTrailingSep hpcRelDir, "--reset-hpcdirs"]
menv <- getMinimalEnvOverride
$logInfo $ "Generating " <> report
outputLines <- liftM S8.lines $ readProcessStdout Nothing menv "hpc"
( "report"
: toFilePath tixSrc
: (args ++ extraReportArgs)
)
if all ("(0/0)" `S8.isSuffixOf`) outputLines
then do
let msg html = T.concat
[ "Error: The "
, report
, " did not consider any code. One possible cause of this is"
, " if your test-suite builds the library code (see stack "
, if html then "<a href='https://github.com/commercialhaskell/stack/issues/1008'>" else ""
, "issue #1008"
, if html then "</a>" else ""
, "). It may also indicate a bug in stack or"
, " the hpc program. Please report this issue if you think"
, " your coverage report should have meaningful results."
]
$logError (msg False)
generateHpcErrorReport reportDir (msg True)
else do
-- Print output, stripping @\r@ characters because Windows.
forM_ outputLines ($logInfo . T.decodeUtf8 . S8.filter (not . (=='\r')))
$logInfo
("The " <> report <> " is available at " <>
T.pack (toFilePath (reportDir </> $(mkRelFile "hpc_index.html"))))
-- Generate the markup.
void $ readProcessStdout Nothing menv "hpc"
( "markup"
: toFilePath tixSrc
: ("--destdir=" ++ toFilePathNoTrailingSep reportDir)
: (args ++ extraMarkupArgs)
)
data HpcReportOpts = HpcReportOpts
{ hroptsInputs :: [Text]
, hroptsAll :: Bool
, hroptsDestDir :: Maybe String
} deriving (Show)
generateHpcReportForTargets :: (MonadIO m, HasHttpManager env, MonadReader env m, MonadBaseControl IO m, MonadCatch m, MonadLogger m, HasEnvConfig env)
=> HpcReportOpts -> m ()
generateHpcReportForTargets opts = do
let (tixFiles, targetNames) = partition (".tix" `T.isSuffixOf`) (hroptsInputs opts)
targetTixFiles <-
-- When there aren't any package component arguments, then
-- don't default to all package components.
if not (hroptsAll opts) && null targetNames
then return []
else do
when (hroptsAll opts && not (null targetNames)) $
$logWarn $ "Since --all is used, it is redundant to specify these targets: " <> T.pack (show targetNames)
(_,_,targets) <- parseTargetsFromBuildOpts
AllowNoTargets
defaultBuildOpts
{ boptsTargets = if hroptsAll opts then [] else targetNames
}
liftM concat $ forM (Map.toList targets) $ \(name, target) ->
case target of
STUnknown -> fail $
packageNameString name ++ " isn't a known local page"
STNonLocal -> fail $
"Expected a local package, but " ++
packageNameString name ++
" is either an extra-dep or in the snapshot."
STLocalComps comps -> do
pkgPath <- hpcPkgPath name
forM (toList comps) $ \nc ->
case nc of
CTest testName ->
liftM (pkgPath </>) $ parseRelFile (T.unpack testName ++ ".tix")
_ -> fail $
"Can't specify anything except test-suites as hpc report targets (" ++
packageNameString name ++
" is used with a non test-suite target)"
STLocalAll -> do
pkgPath <- hpcPkgPath name
exists <- dirExists pkgPath
if exists
then do
(_, files) <- listDirectory pkgPath
return (filter ((".tix" `isSuffixOf`) . toFilePath) files)
else return []
tixPaths <- liftM (++ targetTixFiles) $ mapM (parseRelAsAbsFile . T.unpack) tixFiles
when (null tixPaths) $
fail "Not generating combined report, because no targets or tix files are specified."
reportDir <- case hroptsDestDir opts of
Nothing -> liftM (</> $(mkRelDir "combined/custom")) hpcReportDir
Just destDir -> do
liftIO $ D.createDirectoryIfMissing True destDir
parseRelAsAbsDir destDir
generateUnionReport "combined report" reportDir tixPaths
generateHpcUnifiedReport :: (MonadIO m,MonadReader env m,HasConfig env,MonadLogger m,MonadBaseControl IO m,MonadCatch m,HasEnvConfig env)
=> m ()
generateHpcUnifiedReport = do
outputDir <- hpcReportDir
createTree outputDir
(dirs, _) <- listDirectory outputDir
tixFiles <- liftM (concat . concat) $ forM (filter (("combined" /=) . dirnameString) dirs) $ \dir -> do
(dirs', _) <- listDirectory dir
forM dirs' $ \dir' -> do
(_, files) <- listDirectory dir'
return (filter ((".tix" `isSuffixOf`) . toFilePath) files)
let reportDir = outputDir </> $(mkRelDir "combined/all")
if length tixFiles < 2
then $logInfo $ T.concat
[ if null tixFiles then "No tix files" else "Only one tix file"
, " found in "
, T.pack (toFilePath outputDir)
, ", so not generating a unified coverage report."
]
else generateUnionReport "unified report" reportDir tixFiles
generateUnionReport :: (MonadIO m,MonadReader env m,HasConfig env,MonadLogger m,MonadBaseControl IO m,MonadCatch m,HasEnvConfig env)
=> Text -> Path Abs Dir -> [Path Abs File] -> m ()
generateUnionReport report reportDir tixFiles = do
(errs, tix) <- fmap (unionTixes . map removeExeModules) (mapMaybeM readTixOrLog tixFiles)
$logDebug $ "Using the following tix files: " <> T.pack (show tixFiles)
unless (null errs) $ $logWarn $ T.concat $
"The following modules are left out of the " : report : " due to version mismatches: " :
intersperse ", " (map T.pack errs)
tixDest <- liftM (reportDir </>) $ parseRelFile (dirnameString reportDir ++ ".tix")
createTree (parent tixDest)
liftIO $ writeTix (toFilePath tixDest) tix
generateHpcReportInternal tixDest reportDir report [] []
readTixOrLog :: (MonadLogger m, MonadIO m, MonadBaseControl IO m) => Path b File -> m (Maybe Tix)
readTixOrLog path = do
mtix <- liftIO (readTix (toFilePath path)) `catch` \(ErrorCall err) -> do
$logError $ "Error while reading tix: " <> T.pack err
return Nothing
when (isNothing mtix) $
$logError $ "Failed to read tix file " <> T.pack (toFilePath path)
return mtix
-- | Module names which contain '/' have a package name, and so they weren't built into the
-- executable.
removeExeModules :: Tix -> Tix
removeExeModules (Tix ms) = Tix (filter (\(TixModule name _ _ _) -> '/' `elem` name) ms)
unionTixes :: [Tix] -> ([String], Tix)
unionTixes tixes = (Map.keys errs, Tix (Map.elems outputs))
where
(errs, outputs) = Map.mapEither id $ Map.unionsWith merge $ map toMap tixes
toMap (Tix ms) = Map.fromList (map (\x@(TixModule k _ _ _) -> (k, Right x)) ms)
merge (Right (TixModule k hash1 len1 tix1))
(Right (TixModule _ hash2 len2 tix2))
| hash1 == hash2 && len1 == len2 = Right (TixModule k hash1 len1 (zipWith (+) tix1 tix2))
merge _ _ = Left ()
generateHpcMarkupIndex :: (MonadIO m,MonadReader env m,MonadLogger m,MonadCatch m,HasEnvConfig env)
=> m ()
generateHpcMarkupIndex = do
outputDir <- hpcReportDir
let outputFile = outputDir </> $(mkRelFile "index.html")
createTree outputDir
(dirs, _) <- listDirectory outputDir
rows <- liftM (catMaybes . concat) $ forM dirs $ \dir -> do
(subdirs, _) <- listDirectory dir
forM subdirs $ \subdir -> do
let indexPath = subdir </> $(mkRelFile "hpc_index.html")
exists' <- fileExists indexPath
if not exists' then return Nothing else do
relPath <- stripDir outputDir indexPath
let package = dirname dir
testsuite = dirname subdir
return $ Just $ T.concat
[ "<tr><td>"
, pathToHtml package
, "</td><td><a href=\""
, pathToHtml relPath
, "\">"
, pathToHtml testsuite
, "</a></td></tr>"
]
liftIO $ T.writeFile (toFilePath outputFile) $ T.concat $
[ "<html><head><meta http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\">"
-- Part of the css from HPC's output HTML
, "<style type=\"text/css\">"
, "table.dashboard { border-collapse: collapse; border: solid 1px black }"
, ".dashboard td { border: solid 1px black }"
, ".dashboard th { border: solid 1px black }"
, "</style>"
, "</head>"
, "<body>"
] ++
(if null rows
then
[ "<b>No hpc_index.html files found in \""
, pathToHtml outputDir
, "\".</b>"
]
else
[ "<table class=\"dashboard\" width=\"100%\" boder=\"1\"><tbody>"
, "<p><b>NOTE: This is merely a listing of the html files found in the coverage reports directory. Some of these reports may be old.</b></p>"
, "<tr><th>Package</th><th>TestSuite</th><th>Modification Time</th></tr>"
] ++
rows ++
["</tbody></table>"]) ++
["</body></html>"]
unless (null rows) $
$logInfo $ "\nAn index of the generated HTML coverage reports is available at " <>
T.pack (toFilePath outputFile)
generateHpcErrorReport :: MonadIO m => Path Abs Dir -> Text -> m ()
generateHpcErrorReport dir err = do
createTree dir
liftIO $ T.writeFile (toFilePath (dir </> $(mkRelFile "hpc_index.html"))) $ T.concat
[ "<html><head><meta http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\"></head><body>"
, "<h1>HPC Report Generation Error</h1>"
, "<p>"
, err
, "</p>"
, "</body></html>"
]
pathToHtml :: Path b t -> Text
pathToHtml = T.dropWhileEnd (=='/') . sanitize . toFilePath
sanitize :: String -> Text
sanitize = LT.toStrict . htmlEscape . LT.pack
dirnameString :: Path r Dir -> String
dirnameString = dropWhileEnd isPathSeparator . toFilePath . dirname
findPackageKeyForBuiltPackage :: (MonadIO m, MonadReader env m, MonadThrow m, HasEnvConfig env)
=> Path Abs Dir -> PackageIdentifier -> m (Maybe Text)
findPackageKeyForBuiltPackage pkgDir pkgId = do
distDir <- distDirFromDir pkgDir
path <- liftM (distDir </>) $
parseRelFile ("package.conf.inplace/" ++ packageIdentifierString pkgId ++ "-inplace.conf")
exists <- fileExists path
if exists
then do
contents <- liftIO $ T.readFile (toFilePath path)
return $ asum (map (T.stripPrefix "key: ") (T.lines contents))
else return Nothing
|
rvion/stack
|
src/Stack/Coverage.hs
|
bsd-3-clause
| 19,985
| 0
| 28
| 6,387
| 4,575
| 2,322
| 2,253
| 341
| 9
|
import System.IO
import Data.Char(toUpper)
main =
do
inh <- openFile "input.txt" ReadMode
outh <- openFile "output.txt" WriteMode
str <- hGetContents inh
hPutStr outh (map toUpper str)
hClose inh
hClose outh
|
zhangjiji/real-world-haskell
|
ch7/to-upper-lazy2.hs
|
mit
| 241
| 0
| 9
| 63
| 83
| 37
| 46
| 10
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.Lambda.Waiters
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.AWS.Lambda.Waiters where
import Network.AWS.Lambda.Types
import Network.AWS.Prelude
import Network.AWS.Waiter
|
fmapfmapfmap/amazonka
|
amazonka-lambda/gen/Network/AWS/Lambda/Waiters.hs
|
mpl-2.0
| 621
| 0
| 4
| 122
| 39
| 31
| 8
| 7
| 0
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.SSM.DescribeAssociation
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Describes the associations for the specified configuration document or
-- instance.
--
-- <http://docs.aws.amazon.com/ssm/latest/APIReference/API_DescribeAssociation.html>
module Network.AWS.SSM.DescribeAssociation
(
-- * Request
DescribeAssociation
-- ** Request constructor
, describeAssociation
-- ** Request lenses
, daInstanceId
, daName
-- * Response
, DescribeAssociationResponse
-- ** Response constructor
, describeAssociationResponse
-- ** Response lenses
, darAssociationDescription
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.SSM.Types
import qualified GHC.Exts
data DescribeAssociation = DescribeAssociation
{ _daInstanceId :: Text
, _daName :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'DescribeAssociation' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'daInstanceId' @::@ 'Text'
--
-- * 'daName' @::@ 'Text'
--
describeAssociation :: Text -- ^ 'daName'
-> Text -- ^ 'daInstanceId'
-> DescribeAssociation
describeAssociation p1 p2 = DescribeAssociation
{ _daName = p1
, _daInstanceId = p2
}
-- | The ID of the instance.
daInstanceId :: Lens' DescribeAssociation Text
daInstanceId = lens _daInstanceId (\s a -> s { _daInstanceId = a })
-- | The name of the configuration document.
daName :: Lens' DescribeAssociation Text
daName = lens _daName (\s a -> s { _daName = a })
newtype DescribeAssociationResponse = DescribeAssociationResponse
{ _darAssociationDescription :: Maybe AssociationDescription
} deriving (Eq, Read, Show)
-- | 'DescribeAssociationResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'darAssociationDescription' @::@ 'Maybe' 'AssociationDescription'
--
describeAssociationResponse :: DescribeAssociationResponse
describeAssociationResponse = DescribeAssociationResponse
{ _darAssociationDescription = Nothing
}
-- | Information about the association.
darAssociationDescription :: Lens' DescribeAssociationResponse (Maybe AssociationDescription)
darAssociationDescription =
lens _darAssociationDescription
(\s a -> s { _darAssociationDescription = a })
instance ToPath DescribeAssociation where
toPath = const "/"
instance ToQuery DescribeAssociation where
toQuery = const mempty
instance ToHeaders DescribeAssociation
instance ToJSON DescribeAssociation where
toJSON DescribeAssociation{..} = object
[ "Name" .= _daName
, "InstanceId" .= _daInstanceId
]
instance AWSRequest DescribeAssociation where
type Sv DescribeAssociation = SSM
type Rs DescribeAssociation = DescribeAssociationResponse
request = post "DescribeAssociation"
response = jsonResponse
instance FromJSON DescribeAssociationResponse where
parseJSON = withObject "DescribeAssociationResponse" $ \o -> DescribeAssociationResponse
<$> o .:? "AssociationDescription"
|
kim/amazonka
|
amazonka-ssm/gen/Network/AWS/SSM/DescribeAssociation.hs
|
mpl-2.0
| 4,111
| 0
| 9
| 873
| 514
| 312
| 202
| 65
| 1
|
-- Compile this with 'ghc -o Game Game.hs' and run it with './Game'.
import Data.List
import Graphics.Gloss.Game
-- Window size
width = 600
height = 400
-- A sprite representing our character
slimeSprite = bmp "Slime.bmp"
slimeWidth = fst (snd (boundingBox (scale 0.5 0.5 slimeSprite)))
slimeHeight = snd (snd (boundingBox (scale 0.5 0.5 slimeSprite)))
-- Our game world consists of both the location and the vertical velocity of our character as well as a list of all
-- currently pressed keys.
data World = World Point Float [Char]
-- This starts our gamein a window with a give size, running at 30 frames per second.
--
-- The argument 'World (0, 0) 0' is the initial state of our game world, where our character is at the centre of the
-- window and has no velocity.
--
main
= play (InWindow "Slime is here!" (round width, round height) (50, 50)) white 30 (World (0, 0) 0 []) draw handle
[applyMovement, applyVelocity, applyGravity]
-- To draw a frame, we position the character sprite at the location as determined by the current state of the world.
-- We shrink the sprite by 50%.
draw (World (x, y) _v _keys) = translate x y (scale 0.5 0.5 slimeSprite)
-- Pressing the spacebar makes the character jump. All character keys are tracked in the world state.
handle (EventKey (Char ch) Down _ _) (World (x, y) v keys) = World (x, y) v (ch : keys)
handle (EventKey (Char ch) Up _ _) (World (x, y) v keys) = World (x, y) v (delete ch keys)
handle (EventKey (SpecialKey KeySpace) Down _ _) (World (x, y) v keys) = World (x, y) 8 keys
handle event world = world -- don't change the world in case of any other events
-- Move horizontally, but box the character in at the window boundaries.
moveX (x, y) offset = if x + offset < (-width / 2) + slimeWidth / 2
then ((-width / 2) + slimeWidth / 2, y)
else if x + offset > width / 2 - slimeWidth / 2
then (width / 2 - slimeWidth / 2, y)
else (x + offset, y)
-- Move vertically, but box the character in at the window boundaries.
moveY (x, y) offset = if y + offset < (-height / 2) + slimeHeight / 2
then (x, (-height / 2) + slimeHeight / 2)
else if y + offset > height / 2 - slimeHeight / 2
then (x, height / 2 - slimeHeight / 2)
else (x, y + offset)
-- A pressed 'a' and 'd' key moves the character a fixed distance left or right.
applyMovement _time (World (x, y) v keys) = if elem 'a' keys
then World (moveX (x, y) (-10)) v keys
else if elem 'd' keys
then World (moveX (x, y) 10) v keys
else World (x, y) v keys
-- Each frame, add the veclocity to the verticial position (y-axis). (A negative velocity corresponds to a downward movement.)
applyVelocity _time (World (x, y) v keys) = World (moveY (x, y) v) v keys
-- We simulate gravity by decrease the velocity slightly on each frame, corresponding to a downward acceleration.
--
-- We bounce of the bottom edge by reverting the velocity (with a damping factor).
--
applyGravity _time (World (x, y) v keys) = World (x, y) (if y <= (-200) + slimeHeight / 2 then v * (-0.5) else v - 0.5) keys
|
mchakravarty/lets-program
|
step4/Game.hs
|
bsd-3-clause
| 3,407
| 1
| 12
| 996
| 938
| 514
| 424
| 33
| 3
|
{-# LANGUAGE Trustworthy, BangPatterns #-}
{-# LANGUAGE CPP, NoImplicitPrelude #-}
{-# OPTIONS_GHC -funbox-strict-fields #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.IO.Buffer
-- Copyright : (c) The University of Glasgow 2008
-- License : see libraries/base/LICENSE
--
-- Maintainer : cvs-ghc@haskell.org
-- Stability : internal
-- Portability : non-portable (GHC Extensions)
--
-- Buffers used in the IO system
--
-----------------------------------------------------------------------------
module GHC.IO.Buffer (
-- * Buffers of any element
Buffer(..), BufferState(..), CharBuffer, CharBufElem,
-- ** Creation
newByteBuffer,
newCharBuffer,
newBuffer,
emptyBuffer,
-- ** Insertion/removal
bufferRemove,
bufferAdd,
slideContents,
bufferAdjustL,
-- ** Inspecting
isEmptyBuffer,
isFullBuffer,
isFullCharBuffer,
isWriteBuffer,
bufferElems,
bufferAvailable,
summaryBuffer,
-- ** Operating on the raw buffer as a Ptr
withBuffer,
withRawBuffer,
-- ** Assertions
checkBuffer,
-- * Raw buffers
RawBuffer,
readWord8Buf,
writeWord8Buf,
RawCharBuffer,
peekCharBuf,
readCharBuf,
writeCharBuf,
readCharBufPtr,
writeCharBufPtr,
charSize,
) where
import GHC.Base
-- import GHC.IO
import GHC.Num
import GHC.Ptr
import GHC.Word
import GHC.Show
import GHC.Real
import Foreign.C.Types
import Foreign.ForeignPtr
import Foreign.Storable
-- Char buffers use either UTF-16 or UTF-32, with the endianness matching
-- the endianness of the host.
--
-- Invariants:
-- * a Char buffer consists of *valid* UTF-16 or UTF-32
-- * only whole characters: no partial surrogate pairs
-- #define CHARBUF_UTF16
--
-- NB. it won't work to just change this to CHARBUF_UTF16. Some of
-- the code to make this work is there, and it has been tested with
-- the Iconv codec, but there are some pieces that are known to be
-- broken. In particular, the built-in codecs
-- e.g. GHC.IO.Encoding.UTF{8,16,32} need to use isFullCharBuffer or
-- similar in place of the ow >= os comparisons.
-- ---------------------------------------------------------------------------
-- Raw blocks of data
type RawBuffer e = ForeignPtr e
readWord8Buf :: RawBuffer Word8 -> Int -> IO Word8
readWord8Buf arr ix = withForeignPtr arr $ \p -> peekByteOff p ix
writeWord8Buf :: RawBuffer Word8 -> Int -> Word8 -> IO ()
writeWord8Buf arr ix w = withForeignPtr arr $ \p -> pokeByteOff p ix w
type CharBufElem = Char
type RawCharBuffer = RawBuffer CharBufElem
peekCharBuf :: RawCharBuffer -> Int -> IO Char
peekCharBuf arr ix = withForeignPtr arr $ \p -> do
(c,_) <- readCharBufPtr p ix
return c
{-# INLINE readCharBuf #-}
readCharBuf :: RawCharBuffer -> Int -> IO (Char, Int)
readCharBuf arr ix = withForeignPtr arr $ \p -> readCharBufPtr p ix
{-# INLINE writeCharBuf #-}
writeCharBuf :: RawCharBuffer -> Int -> Char -> IO Int
writeCharBuf arr ix c = withForeignPtr arr $ \p -> writeCharBufPtr p ix c
{-# INLINE readCharBufPtr #-}
readCharBufPtr :: Ptr CharBufElem -> Int -> IO (Char, Int)
readCharBufPtr p ix = do c <- peekElemOff (castPtr p) ix; return (c, ix+1)
{-# INLINE writeCharBufPtr #-}
writeCharBufPtr :: Ptr CharBufElem -> Int -> Char -> IO Int
writeCharBufPtr p ix ch = do pokeElemOff (castPtr p) ix ch; return (ix+1)
charSize :: Int
charSize = 4
-- ---------------------------------------------------------------------------
-- Buffers
-- | A mutable array of bytes that can be passed to foreign functions.
--
-- The buffer is represented by a record, where the record contains
-- the raw buffer and the start/end points of the filled portion. The
-- buffer contents itself is mutable, but the rest of the record is
-- immutable. This is a slightly odd mix, but it turns out to be
-- quite practical: by making all the buffer metadata immutable, we
-- can have operations on buffer metadata outside of the IO monad.
--
-- The "live" elements of the buffer are those between the 'bufL' and
-- 'bufR' offsets. In an empty buffer, 'bufL' is equal to 'bufR', but
-- they might not be zero: for exmaple, the buffer might correspond to
-- a memory-mapped file and in which case 'bufL' will point to the
-- next location to be written, which is not necessarily the beginning
-- of the file.
data Buffer e
= Buffer {
bufRaw :: !(RawBuffer e),
bufState :: BufferState,
bufSize :: !Int, -- in elements, not bytes
bufL :: !Int, -- offset of first item in the buffer
bufR :: !Int -- offset of last item + 1
}
type CharBuffer = Buffer Char
data BufferState = ReadBuffer | WriteBuffer deriving (Eq)
withBuffer :: Buffer e -> (Ptr e -> IO a) -> IO a
withBuffer Buffer{ bufRaw=raw } f = withForeignPtr (castForeignPtr raw) f
withRawBuffer :: RawBuffer e -> (Ptr e -> IO a) -> IO a
withRawBuffer raw f = withForeignPtr (castForeignPtr raw) f
isEmptyBuffer :: Buffer e -> Bool
isEmptyBuffer Buffer{ bufL=l, bufR=r } = l == r
isFullBuffer :: Buffer e -> Bool
isFullBuffer Buffer{ bufR=w, bufSize=s } = s == w
-- if a Char buffer does not have room for a surrogate pair, it is "full"
isFullCharBuffer :: Buffer e -> Bool
isFullCharBuffer = isFullBuffer
isWriteBuffer :: Buffer e -> Bool
isWriteBuffer buf = case bufState buf of
WriteBuffer -> True
ReadBuffer -> False
bufferElems :: Buffer e -> Int
bufferElems Buffer{ bufR=w, bufL=r } = w - r
bufferAvailable :: Buffer e -> Int
bufferAvailable Buffer{ bufR=w, bufSize=s } = s - w
bufferRemove :: Int -> Buffer e -> Buffer e
bufferRemove i buf@Buffer{ bufL=r } = bufferAdjustL (r+i) buf
bufferAdjustL :: Int -> Buffer e -> Buffer e
bufferAdjustL l buf@Buffer{ bufR=w }
| l == w = buf{ bufL=0, bufR=0 }
| otherwise = buf{ bufL=l, bufR=w }
bufferAdd :: Int -> Buffer e -> Buffer e
bufferAdd i buf@Buffer{ bufR=w } = buf{ bufR=w+i }
emptyBuffer :: RawBuffer e -> Int -> BufferState -> Buffer e
emptyBuffer raw sz state =
Buffer{ bufRaw=raw, bufState=state, bufR=0, bufL=0, bufSize=sz }
newByteBuffer :: Int -> BufferState -> IO (Buffer Word8)
newByteBuffer c st = newBuffer c c st
newCharBuffer :: Int -> BufferState -> IO CharBuffer
newCharBuffer c st = newBuffer (c * charSize) c st
newBuffer :: Int -> Int -> BufferState -> IO (Buffer e)
newBuffer bytes sz state = do
fp <- mallocForeignPtrBytes bytes
return (emptyBuffer fp sz state)
-- | slides the contents of the buffer to the beginning
slideContents :: Buffer Word8 -> IO (Buffer Word8)
slideContents buf@Buffer{ bufL=l, bufR=r, bufRaw=raw } = do
let elems = r - l
withRawBuffer raw $ \p ->
do _ <- memmove p (p `plusPtr` l) (fromIntegral elems)
return ()
return buf{ bufL=0, bufR=elems }
foreign import java unsafe "@static eta.base.Utils.c_memmove"
memmove :: Ptr a -> Ptr a -> CSize -> IO (Ptr a)
summaryBuffer :: Buffer a -> String
summaryBuffer !buf -- Strict => slightly better code
= "buf" ++ show (bufSize buf) ++ "(" ++ show (bufL buf) ++ "-" ++ show (bufR buf) ++ ")"
-- INVARIANTS on Buffers:
-- * r <= w
-- * if r == w, and the buffer is for reading, then r == 0 && w == 0
-- * a write buffer is never full. If an operation
-- fills up the buffer, it will always flush it before
-- returning.
-- * a read buffer may be full as a result of hLookAhead. In normal
-- operation, a read buffer always has at least one character of space.
checkBuffer :: Buffer a -> IO ()
checkBuffer buf@Buffer{ bufState = state, bufL=r, bufR=w, bufSize=size } = do
check buf (
size > 0
&& r <= w
&& w <= size
&& ( r /= w || state == WriteBuffer || (r == 0 && w == 0) )
&& ( state /= WriteBuffer || w < size ) -- write buffer is never full
)
check :: Buffer a -> Bool -> IO ()
check _ True = return ()
check buf False = errorWithoutStackTrace ("buffer invariant violation: " ++ summaryBuffer buf)
|
rahulmutt/ghcvm
|
libraries/base/GHC/IO/Buffer.hs
|
bsd-3-clause
| 8,093
| 7
| 16
| 1,788
| 1,928
| 1,046
| 882
| -1
| -1
|
#!/usr/bin/env stack -- stack --install-ghc --resolver lts-5.13 runghc --package text --package foundation
module Main where
import qualified Foundation as F
import qualified Foundation.Collection as F
import Criterion.Main
import qualified Data.Text as T
textEnglish = "Set in the year 0 F.E. (\"Foundation Era\"), The Psychohistorians opens on Trantor, the capital of the 12,000-year-old Galactic Empire. Though the empire appears stable and powerful, it is slowly decaying in ways that parallel the decline of the Western Roman Empire. Hari Seldon, a mathematician and psychologist, has developed psychohistory, a new field of science and psychology that equates all possibilities in large societies to mathematics, allowing for the prediction of future events."
textJapanese = "数学者ハリ・セルダンは、膨大な集団の行動を予測する心理歴史学を作りあげ発展させることで、銀河帝国が近いうちに崩壊することを予言する[1]。セルダンは、帝国崩壊後に3万年続くはずの暗黒時代を、あらゆる知識を保存することで千年に縮めようとし、知識の集大成となる銀河百科事典 (Encyclopedia Galactica) を編纂するグループ「ファウンデーション」をつくったが、帝国崩壊を公言し平和を乱したという罪で裁判にかけられ、グループは銀河系辺縁部にある資源の乏しい無人惑星ターミナスへ追放されることになった。しかし、この追放劇すらもセルダンの計画に予定されていた事柄であった。病で死期をさとっていたセルダンは、己の仕事が終わったことを確信する。"
main = defaultMain
[ bgroup "break"
[ bgroup "#english-start"
[ bench "foundation" $ whnf (fst . F.breakElem 'S') (F.fromList textEnglish :: F.String)
, bench "text" $ nf (fst . T.break (== 'S')) (T.pack textEnglish)
]
, bgroup "#english-middle"
[ bench "foundation" $ whnf (fst . F.breakElem '2') (F.fromList textEnglish :: F.String)
, bench "text" $ nf (fst . T.break (== '2')) (T.pack textEnglish)
]
, bgroup "#english-notfound"
[ bench "foundation" $ whnf (fst . F.breakElem 'z') (F.fromList textEnglish :: F.String)
, bench "text" $ nf (fst . T.break (== 'z')) (T.pack textEnglish)
]
{-
, bgroup "#japanese"
[ bench "foundation" $ whnf (fst . F.breakElem '帝') (F.fromList textJapanese :: F.String)
, bench "text" $ whnf (fst . T.break (== '帝')) (T.pack textJapanese)
]
-}
]
]
|
vincenthz/hs-foundation
|
foundation/benchs/compare-libs/Text.hs
|
bsd-3-clause
| 2,678
| 0
| 16
| 437
| 320
| 171
| 149
| 18
| 1
|
-- Copyright (c) 2015 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -funbox-strict-fields -Wall -Werror #-}
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses #-}
-- | A module containing common structures for both AST and Syntax.
module Language.Salt.Surface.Common(
Assoc(..),
Fixity(..),
FieldName(..),
BuilderKind(..),
ContextKind(..),
TruthKind(..),
AbstractionKind(..),
Visibility(..),
Literal(..),
BasicPosition(..),
Position,
literalDot,
getNodeID,
) where
import Control.Monad.Positions
import Control.Monad.Symbols
import Control.Monad.State
import Data.Array
import Data.ByteString(ByteString)
import Data.Hashable
import Data.Ratio
import Data.Position.BasicPosition
import Data.PositionElement
import Data.Symbol
import Data.Word
import Language.Salt.Format
import Prelude hiding (concat)
import Text.Format
import Text.XML.Expat.Pickle
import Text.XML.Expat.Tree(NodeG)
type Position = BasicPosition
-- | A newtype to discriminate field names from symbols.
newtype FieldName = FieldName { fieldSym :: Symbol }
deriving (Ord, Eq)
-- | Associativity for syntax directives.
data Assoc = LeftAssoc | RightAssoc | NonAssoc
deriving (Ord, Eq, Enum, Show)
-- | Fixity for syntax directives.
data Fixity =
Prefix
| Infix {
-- | The associativity of the infix operator.
infixAssoc :: !Assoc
}
| Postfix
deriving (Ord, Eq, Show)
-- | Scope classes. These define the exact semantics of a scoped
-- entity declaration.
data BuilderKind =
-- | Signatures are similar to SML signatures, or Fortress traits.
-- A signature declares a type, which may not be directly
-- instantiated, and whose mutable fields are not automatically
-- inherited by scopes which declare the class as a supertype.
-- Multiple inheritence of signatures is allowed.
Signature
-- | Interfaces are classes that are only allowed to contain
-- function declarations and/or definitions. They behave exactly
-- like Java interfaces (as of JDK 8).
| Interface
-- | Modules are similar to SML modules. Unlike SML, a module may
-- be used at runtime. Modules declare a named instance of an
-- anonymous record type
--
-- A module declaration is tantamount to declaring a structure
-- type anonymously, followed by a single instance of the type,
-- then initializing the instance.
--
-- Put in terms of classes, modules are like declaring a class
-- anonymously, then instantiating it.
| Module
-- | Classes are similar to Java classes. A Class defines a type
-- which may be directly instantiated, and whose mutable fields
-- are automatically inherited by scopes which declare the class
-- as a supertype. Multiple inheritence of classes is not
-- allowed.
| Class
| Typeclass
| Instance
deriving (Ord, Eq, Enum)
-- | Context kind for definition. This indicates the context to
-- which a given definition is relative.
data ContextKind =
-- | Static definition. These are relative only to the global
-- context (meaning they can access other statically-defined
-- definitions by name).
Static
-- | Local definitions. These are relative to a given function
-- body, and can access other local definitions in that function
-- by name.
| Local
-- | Object definitions. These are relative to a given object,
-- and can access other definitions relative to that object by
-- name.
| Object
deriving (Ord, Eq, Enum)
-- | Truth classes. These define the exact semantics of a truth
-- declaration.
data TruthKind =
-- | Theorems are propositions that are added to the proof
-- environments of every proof obligation generated from within
-- this scope. Abstract declarations of theorems do not need to
-- be proven; however, concrete theorems will generate a proof
-- obligation.
Theorem
-- | Invariants are propositions that are added automatically to
-- the pre- and post-conditions of every state transition in the
-- current scope. They do not need to be proven.
| Invariant
| Axiom
deriving (Ord, Eq, Enum)
data AbstractionKind =
-- | A function abstraction.
Lambda
-- | A forall proposition.
| Forall
-- | An exists proposition.
| Exists
deriving (Ord, Eq, Enum)
-- | Visibilities. Controls when accesses to a given element are legal.
data Visibility =
-- | Hidden access. Used internally to add synthetic elements.
Hidden
-- | Private access. The given element may only be accessed
-- within the scope in which it is defined.
| Private
-- | Protected access. The given element may only be accessed
-- within the scope in which it is defined, or in any derived
-- scope derived from that one.
| Protected
-- | Public access. The given element may be accessed anywhere.
| Public
deriving (Ord, Eq, Enum, Ix)
-- | A literal value.
data Literal =
-- | A number literal.
Num {
-- | The number value.
numVal :: !Rational,
-- | The position in source from which this arises.
numPos :: !Position
}
-- | A string literal.
| Str {
-- | The string value.
strVal :: !ByteString,
-- | The position in source from which this arises.
strPos :: !Position
}
-- | A Character literal.
| Char {
-- | The character value.
charVal :: !Char,
-- | The position in source from which this arises.
charPos :: !Position
}
-- | A unit value.
| Unit {
-- | The position in source from which this arises.
unitPos :: !Position
}
getNodeID :: Monad m => StateT Word m String
getNodeID =
do
nodeid <- get
put $! nodeid + 1
return ("node" ++ show nodeid)
instance PositionElement Literal where
position Num { numPos = pos } = pos
position Str { strPos = pos } = pos
position Char { charPos = pos } = pos
position Unit { unitPos = pos } = pos
literalDot :: Monad m => Literal -> StateT Word m (Doc, String)
literalDot Num { numVal = num } =
do
nodeid <- getNodeID
return (dquoted (string nodeid) <+>
brackets (string "label = " <>
dquoted (string "Num | " <>
string (show num)) <!>
string "shape = \"record\"") <> char ';', nodeid)
literalDot Str { strVal = str } =
do
nodeid <- getNodeID
return (dquoted (string nodeid) <+>
brackets (string "label = " <>
dquoted (string "Str | " <>
string "\\\"" <> bytestring str <>
string "\\\"") <!>
string "shape = \"record\"") <> char ';', nodeid)
literalDot Char { charVal = chr } =
do
nodeid <- getNodeID
return (dquoted (string nodeid) <+>
brackets (string "label = " <>
dquoted (string "Char | " <>
squoted (char chr)) <!>
string "shape = \"record\"") <> char ';', nodeid)
literalDot Unit {} =
do
nodeid <- getNodeID
return (dquoted (string nodeid) <+>
brackets (string "label = " <> dquoted (string "Unit") <!>
string "shape = \"record\"") <> char ';', nodeid)
instance Hashable Assoc where
hashWithSalt s = hashWithSalt s . fromEnum
instance Hashable Fixity where
hashWithSalt s Prefix = hashWithSalt s (0 :: Int)
hashWithSalt s (Infix assoc) =
s `hashWithSalt` (1 :: Int) `hashWithSalt` assoc
hashWithSalt s Postfix = hashWithSalt s (2 :: Int)
instance Hashable FieldName where
hashWithSalt s FieldName { fieldSym = sym } = s `hashWithSalt` sym
instance MonadSymbols m => FormatM m FieldName where
formatM = formatM . fieldSym
instance Format Assoc where format = string . show
instance Format Fixity where format = string . show
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text) =>
XmlPickler [NodeG [] tag text] FieldName where
xpickle = xpWrap (FieldName, fieldSym) xpickle
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text) =>
XmlPickler [(tag, text)] FieldName where
xpickle = xpWrap (FieldName, fieldSym) xpickle
instance Eq Literal where
Num { numVal = num1 } == Num { numVal = num2 } = num1 == num2
Str { strVal = str1 } == Str { strVal = str2 } = str1 == str2
Char { charVal = chr1 } == Char { charVal = chr2 } = chr1 == chr2
Unit {} == Unit {} = True
_ == _ = False
instance Ord Literal where
compare Num { numVal = num1 } Num { numVal = num2 } = compare num1 num2
compare Num {} _ = GT
compare _ Num {} = LT
compare Str { strVal = str1 } Str { strVal = str2 } = compare str1 str2
compare Str {} _ = GT
compare _ Str {} = LT
compare Char { charVal = chr1 } Char { charVal = chr2 } = compare chr1 chr2
compare Char {} _ = GT
compare _ Char {} = LT
compare Unit {} Unit {} = EQ
instance Hashable Literal where
hashWithSalt s Num { numVal = num } =
s `hashWithSalt` (1 :: Int) `hashWithSalt` num
hashWithSalt s Str { strVal = str } =
s `hashWithSalt` (2 :: Int) `hashWithSalt` str
hashWithSalt s Char { charVal = chr } =
s `hashWithSalt` (3 :: Int) `hashWithSalt` chr
hashWithSalt s Unit {} = s `hashWithSalt` (4 :: Int)
instance MonadPositions m => FormatM m Literal where
formatM Num { numVal = num, numPos = pos } =
do
posdoc <- formatM pos
return (compoundApplyDoc (string "Num")
[(string "val", string (show num)),
(string "pos", posdoc)])
formatM Str { strVal = str, strPos = pos } =
do
posdoc <- formatM pos
return (compoundApplyDoc (string "Str")
[(string "val", bytestring str),
(string "pos", posdoc)])
formatM Char { charVal = chr, charPos = pos } =
do
posdoc <- formatM pos
return (compoundApplyDoc (string "Char")
[(string "val", char chr),
(string "pos", posdoc)])
formatM Unit { unitPos = pos } =
do
posdoc <- formatM pos
return (compoundApplyDoc (string "Unit") [(string "pos", posdoc)])
numPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text) =>
PU [NodeG [] tag text] Literal
numPickler =
let
revfunc Num { numVal = num, numPos = pos } =
((numerator num, denominator num), pos)
revfunc _ = error $! "Can't convert"
in
xpWrap (\((numer, denom), pos) -> Num { numVal = numer % denom,
numPos = pos }, revfunc)
(xpElem (gxFromString "Num")
(xpPair (xpAttr (gxFromString "numerator") xpPrim)
(xpAttr (gxFromString "denominator") xpPrim))
(xpElemNodes (gxFromString "pos") xpickle))
strPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text) =>
PU [NodeG [] tag text] Literal
strPickler =
let
revfunc Str { strVal = str, strPos = pos } = (gxFromByteString str, pos)
revfunc _ = error $! "Can't convert"
in
xpWrap (\(str, pos) -> Str { strVal = gxToByteString str,
strPos = pos }, revfunc)
(xpElemNodes (gxFromString "Str")
(xpPair (xpElemNodes (gxFromString "value")
(xpContent xpText0))
(xpElemNodes (gxFromString "pos") xpickle)))
charPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text) =>
PU [NodeG [] tag text] Literal
charPickler =
let
revfunc Char { charVal = chr, charPos = pos } = (chr, pos)
revfunc _ = error $! "Can't convert"
in
xpWrap (\(chr, pos) -> Char { charVal = chr, charPos = pos }, revfunc)
(xpElem (gxFromString "Char")
(xpAttr (gxFromString "value") xpPrim)
(xpElemNodes (gxFromString "pos") xpickle))
unitPickler :: (GenericXMLString tag, Show tag,
GenericXMLString text, Show text) =>
PU [NodeG [] tag text] Literal
unitPickler = xpWrap (Unit, unitPos)
(xpElemNodes (gxFromString "Unit")
(xpElemNodes (gxFromString "pos") xpickle))
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text) =>
XmlPickler [NodeG [] tag text] Literal where
xpickle =
let
picker Num {} = 0
picker Str {} = 1
picker Char {} = 2
picker Unit {} = 3
in
xpAlt picker [numPickler, strPickler, charPickler, unitPickler]
instance Hashable BuilderKind where
hashWithSalt s = hashWithSalt s . fromEnum
instance Hashable ContextKind where
hashWithSalt s = hashWithSalt s . fromEnum
instance Hashable AbstractionKind where
hashWithSalt s = hashWithSalt s . fromEnum
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text) =>
XmlPickler (Attributes tag text) BuilderKind where
xpickle = xpAlt fromEnum
[xpWrap (const Signature, const ())
(xpAttrFixed (gxFromString "kind")
(gxFromString "Signature")),
xpWrap (const Interface, const ())
(xpAttrFixed (gxFromString "kind")
(gxFromString "Interface")),
xpWrap (const Module, const ())
(xpAttrFixed (gxFromString "kind")
(gxFromString "Module")),
xpWrap (const Class, const ())
(xpAttrFixed (gxFromString "kind")
(gxFromString "Class")),
xpWrap (const Typeclass, const ())
(xpAttrFixed (gxFromString "kind")
(gxFromString "Typeclass")),
xpWrap (const Instance, const ())
(xpAttrFixed (gxFromString "kind")
(gxFromString "Instance"))]
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text) =>
XmlPickler (Attributes tag text) ContextKind where
xpickle = xpAlt fromEnum
[xpWrap (const Static, const ())
(xpAttrFixed (gxFromString "context")
(gxFromString "static")),
xpWrap (const Local, const ())
(xpAttrFixed (gxFromString "context")
(gxFromString "local")),
xpWrap (const Object, const ())
(xpAttrFixed (gxFromString "context")
(gxFromString "object"))]
instance Show BuilderKind where
show Signature = "signature"
show Interface = "interface"
show Module = "module"
show Class = "class"
show Typeclass = "typeclass"
show Instance = "instance"
instance Show ContextKind where
show Static = "static"
show Local = "local"
show Object = "object"
instance Format BuilderKind where format = string . show
instance Format ContextKind where format = string . show
instance Hashable TruthKind where
hashWithSalt s = hashWithSalt s . fromEnum
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text) =>
XmlPickler (Attributes tag text) TruthKind where
xpickle = xpAlt fromEnum
[xpWrap (const Theorem, const ())
(xpAttrFixed (gxFromString "kind")
(gxFromString "Theorem")),
xpWrap (const Invariant, const ())
(xpAttrFixed (gxFromString "kind")
(gxFromString "Invariant")),
xpWrap (const Axiom, const ())
(xpAttrFixed (gxFromString "kind")
(gxFromString "Axiom"))]
instance Show TruthKind where
show Theorem = "theorem"
show Invariant = "invariant"
show Axiom = "axiom"
instance Format TruthKind where format = string . show
instance Hashable Visibility where
hashWithSalt s = hashWithSalt s . fromEnum
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text) =>
XmlPickler (Attributes tag text) Visibility where
xpickle = xpAlt fromEnum
[xpWrap (const Hidden, const ())
(xpAttrFixed (gxFromString "visibility")
(gxFromString "Hidden")),
xpWrap (const Private, const ())
(xpAttrFixed (gxFromString "visibility")
(gxFromString "Private")),
xpWrap (const Protected, const ())
(xpAttrFixed (gxFromString "visibility")
(gxFromString "Protected")),
xpWrap (const Public, const ())
(xpAttrFixed (gxFromString "visibility")
(gxFromString "Public"))]
instance Show Visibility where
show Hidden = "hidden"
show Private = "private"
show Protected = "protected"
show Public = "public"
instance Format Visibility where format = string . show
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text) =>
XmlPickler (Attributes tag text) AbstractionKind where
xpickle = xpAlt fromEnum
[xpWrap (const Lambda, const ())
(xpAttrFixed (gxFromString "kind")
(gxFromString "Lambda")),
xpWrap (const Forall, const ())
(xpAttrFixed (gxFromString "kind")
(gxFromString "Forall")),
xpWrap (const Exists, const ())
(xpAttrFixed (gxFromString "kind")
(gxFromString "Exists"))]
instance Show AbstractionKind where
show Lambda = "lambda"
show Forall = "forall"
show Exists = "exists"
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text) =>
XmlPickler (Attributes tag text) Assoc where
xpickle = xpAlt fromEnum
[xpWrap (const LeftAssoc, const ())
(xpAttrFixed (gxFromString "assoc")
(gxFromString "Left")),
xpWrap (const RightAssoc, const ())
(xpAttrFixed (gxFromString "assoc")
(gxFromString "Right")),
xpWrap (const NonAssoc, const ())
(xpAttrFixed (gxFromString "assoc")
(gxFromString "NonAssoc"))]
instance (GenericXMLString tag, Show tag, GenericXMLString text, Show text) =>
XmlPickler (Attributes tag text) Fixity where
xpickle =
let
picker Prefix = 0
picker (Infix _) = 1
picker Postfix = 2
unpackInfix (Infix a) = ((), Just a)
unpackInfix _ = error "Can't unpack"
packInfix ((), Just a) = Infix a
packInfix _ = error "Need associativity for infix"
in
xpAlt picker [xpWrap (const Prefix, const ((), Nothing))
(xpPair (xpAttrFixed (gxFromString "fixity")
(gxFromString "Prefix"))
(xpOption xpZero)),
xpWrap (packInfix, unpackInfix)
(xpPair (xpAttrFixed (gxFromString "fixity")
(gxFromString "Infix"))
(xpOption xpickle)),
xpWrap (const Postfix, const ((), Nothing))
(xpPair (xpAttrFixed (gxFromString "fixity")
(gxFromString "Postfix"))
(xpOption xpZero))]
instance Format AbstractionKind where format = string . show
|
emc2/saltlang
|
src/salt/Language/Salt/Surface/Common.hs
|
bsd-3-clause
| 21,895
| 0
| 20
| 7,032
| 5,112
| 2,710
| 2,402
| 409
| 2
|
{-# LANGUAGE Haskell98, MultiParamTypeClasses, FunctionalDependencies, FlexibleInstances #-}
{-# LINE 1 "Control/Monad/Reader/Class.hs" #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE UndecidableInstances #-}
-- Search for UndecidableInstances to see why this is needed
{- |
Module : Control.Monad.Reader.Class
Copyright : (c) Andy Gill 2001,
(c) Oregon Graduate Institute of Science and Technology 2001,
(c) Jeff Newbern 2003-2007,
(c) Andriy Palamarchuk 2007
License : BSD-style (see the file LICENSE)
Maintainer : libraries@haskell.org
Stability : experimental
Portability : non-portable (multi-param classes, functional dependencies)
[Computation type:] Computations which read values from a shared environment.
[Binding strategy:] Monad values are functions from the environment to a value.
The bound function is applied to the bound value, and both have access
to the shared environment.
[Useful for:] Maintaining variable bindings, or other shared environment.
[Zero and plus:] None.
[Example type:] @'Reader' [(String,Value)] a@
The 'Reader' monad (also called the Environment monad).
Represents a computation, which can read values from
a shared environment, pass values from function to function,
and execute sub-computations in a modified environment.
Using 'Reader' monad for such computations is often clearer and easier
than using the 'Control.Monad.State.State' monad.
Inspired by the paper
/Functional Programming with Overloading and Higher-Order Polymorphism/,
Mark P Jones (<http://web.cecs.pdx.edu/~mpj/>)
Advanced School of Functional Programming, 1995.
-}
module Control.Monad.Reader.Class (
MonadReader(..),
asks,
) where
import Control.Monad.Trans.Cont as Cont
import Control.Monad.Trans.Except
import Control.Monad.Trans.Error
import Control.Monad.Trans.Identity
import Control.Monad.Trans.List
import Control.Monad.Trans.Maybe
import Control.Monad.Trans.Reader (ReaderT)
import qualified Control.Monad.Trans.Reader as ReaderT (ask, local, reader)
import qualified Control.Monad.Trans.RWS.Lazy as LazyRWS (RWST, ask, local, reader)
import qualified Control.Monad.Trans.RWS.Strict as StrictRWS (RWST, ask, local, reader)
import Control.Monad.Trans.State.Lazy as Lazy
import Control.Monad.Trans.State.Strict as Strict
import Control.Monad.Trans.Writer.Lazy as Lazy
import Control.Monad.Trans.Writer.Strict as Strict
import Control.Monad.Trans.Class (lift)
import Control.Monad
import Data.Monoid
-- ----------------------------------------------------------------------------
-- class MonadReader
-- asks for the internal (non-mutable) state.
-- | See examples in "Control.Monad.Reader".
-- Note, the partially applied function type @(->) r@ is a simple reader monad.
-- See the @instance@ declaration below.
class Monad m => MonadReader r m | m -> r where
{-# MINIMAL (ask | reader), local #-}
-- | Retrieves the monad environment.
ask :: m r
ask = reader id
-- | Executes a computation in a modified environment.
local :: (r -> r) -- ^ The function to modify the environment.
-> m a -- ^ @Reader@ to run in the modified environment.
-> m a
-- | Retrieves a function of the current environment.
reader :: (r -> a) -- ^ The selector function to apply to the environment.
-> m a
reader f = do
r <- ask
return (f r)
-- | Retrieves a function of the current environment.
asks :: MonadReader r m
=> (r -> a) -- ^ The selector function to apply to the environment.
-> m a
asks = reader
-- ----------------------------------------------------------------------------
-- The partially applied function type is a simple reader monad
instance MonadReader r ((->) r) where
ask = id
local f m = m . f
reader = id
instance Monad m => MonadReader r (ReaderT r m) where
ask = ReaderT.ask
local = ReaderT.local
reader = ReaderT.reader
instance (Monad m, Monoid w) => MonadReader r (LazyRWS.RWST r w s m) where
ask = LazyRWS.ask
local = LazyRWS.local
reader = LazyRWS.reader
instance (Monad m, Monoid w) => MonadReader r (StrictRWS.RWST r w s m) where
ask = StrictRWS.ask
local = StrictRWS.local
reader = StrictRWS.reader
-- ---------------------------------------------------------------------------
-- Instances for other mtl transformers
--
-- All of these instances need UndecidableInstances,
-- because they do not satisfy the coverage condition.
instance MonadReader r' m => MonadReader r' (ContT r m) where
ask = lift ask
local = Cont.liftLocal ask local
reader = lift . reader
instance (Error e, MonadReader r m) => MonadReader r (ErrorT e m) where
ask = lift ask
local = mapErrorT . local
reader = lift . reader
instance MonadReader r m => MonadReader r (ExceptT e m) where
ask = lift ask
local = mapExceptT . local
reader = lift . reader
instance MonadReader r m => MonadReader r (IdentityT m) where
ask = lift ask
local = mapIdentityT . local
reader = lift . reader
instance MonadReader r m => MonadReader r (ListT m) where
ask = lift ask
local = mapListT . local
reader = lift . reader
instance MonadReader r m => MonadReader r (MaybeT m) where
ask = lift ask
local = mapMaybeT . local
reader = lift . reader
instance MonadReader r m => MonadReader r (Lazy.StateT s m) where
ask = lift ask
local = Lazy.mapStateT . local
reader = lift . reader
instance MonadReader r m => MonadReader r (Strict.StateT s m) where
ask = lift ask
local = Strict.mapStateT . local
reader = lift . reader
instance (Monoid w, MonadReader r m) => MonadReader r (Lazy.WriterT w m) where
ask = lift ask
local = Lazy.mapWriterT . local
reader = lift . reader
instance (Monoid w, MonadReader r m) => MonadReader r (Strict.WriterT w m) where
ask = lift ask
local = Strict.mapWriterT . local
reader = lift . reader
|
phischu/fragnix
|
tests/packages/scotty/Control.Monad.Reader.Class.hs
|
bsd-3-clause
| 6,056
| 0
| 11
| 1,294
| 1,159
| 654
| 505
| 96
| 1
|
module Main where
main = print "Test"
------------------------ beta reduction ---------------------
beta_reduce_start :: Int
beta_reduce_start = f 1
where
f = \ x -> x + 2 :: Int -- is auto-inlined
beta_reduce_end :: Int
beta_reduce_end = 1 + 2
------------------------ case reduction -------------------------
data Foo a = Bar Int Float a | Baz String
case_reduce_start = case bar of
Bar x f a -> show x
Baz s -> s
where {-# NOINLINE bar #-}
bar = Bar 5 2.1 'a'
case_reduce_end = show (5 :: Int)
------------------------ adding and using a rule ----------------
--{-# NOINLINE capture_me #-}
capture_me :: Int
capture_me = 99
new_rule_start = capture_me
new_rule_end = 99 :: Int
------------------------ fold -----------------------------------
double :: Int -> Int
double x = x + x
fold_start :: Int
fold_start = 5 + 5 + 6
fold_end = double 5 + 6
------------------------ ticks in names -------------------------
ones' :: [Int]
ones' = 1 : ones'
ones'_start :: [Int]
ones'_start = 2 : ones'
ones'_end :: [Int]
ones'_end = 2 : 1 : ones'
|
ku-fpg/hermit
|
tests/prims/Test.hs
|
bsd-2-clause
| 1,128
| 0
| 8
| 264
| 276
| 156
| 120
| 29
| 2
|
{-# LANGUAGE PartialTypeSignatures, TypeFamilies, InstanceSigs #-}
module WildcardInTypeFamilyInstanceLHS where
class Foo k where
type Dual k :: *
instance Foo Int where
type Dual _ = Maybe Int
|
urbanslug/ghc
|
testsuite/tests/partial-sigs/should_fail/WildcardInTypeFamilyInstanceLHS.hs
|
bsd-3-clause
| 200
| 0
| 6
| 34
| 41
| 23
| 18
| 6
| 0
|
{-
(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
\section[WwLib]{A library for the ``worker\/wrapper'' back-end to the strictness analyser}
-}
{-# LANGUAGE CPP #-}
module WwLib ( mkWwBodies, mkWWstr, mkWorkerArgs
, deepSplitProductType_maybe, findTypeShape
) where
#include "HsVersions.h"
import CoreSyn
import CoreUtils ( exprType, mkCast )
import Id ( Id, idType, mkSysLocal, idDemandInfo, setIdDemandInfo,
setIdUnfolding,
setIdInfo, idOneShotInfo, setIdOneShotInfo
)
import IdInfo ( vanillaIdInfo )
import DataCon
import Demand
import MkCore ( mkRuntimeErrorApp, aBSENT_ERROR_ID )
import MkId ( voidArgId, voidPrimId )
import TysPrim ( voidPrimTy )
import TysWiredIn ( tupleDataCon )
import Type
import Coercion hiding ( substTy, substTyVarBndr )
import FamInstEnv
import BasicTypes ( Boxity(..), OneShotInfo(..), worstOneShot )
import Literal ( absentLiteralOf )
import TyCon
import UniqSupply
import Unique
import Maybes
import Util
import Outputable
import DynFlags
import FastString
{-
************************************************************************
* *
\subsection[mkWrapperAndWorker]{@mkWrapperAndWorker@}
* *
************************************************************************
Here's an example. The original function is:
\begin{verbatim}
g :: forall a . Int -> [a] -> a
g = \/\ a -> \ x ys ->
case x of
0 -> head ys
_ -> head (tail ys)
\end{verbatim}
From this, we want to produce:
\begin{verbatim}
-- wrapper (an unfolding)
g :: forall a . Int -> [a] -> a
g = \/\ a -> \ x ys ->
case x of
I# x# -> $wg a x# ys
-- call the worker; don't forget the type args!
-- worker
$wg :: forall a . Int# -> [a] -> a
$wg = \/\ a -> \ x# ys ->
let
x = I# x#
in
case x of -- note: body of g moved intact
0 -> head ys
_ -> head (tail ys)
\end{verbatim}
Something we have to be careful about: Here's an example:
\begin{verbatim}
-- "f" strictness: U(P)U(P)
f (I# a) (I# b) = a +# b
g = f -- "g" strictness same as "f"
\end{verbatim}
\tr{f} will get a worker all nice and friendly-like; that's good.
{\em But we don't want a worker for \tr{g}}, even though it has the
same strictness as \tr{f}. Doing so could break laziness, at best.
Consequently, we insist that the number of strictness-info items is
exactly the same as the number of lambda-bound arguments. (This is
probably slightly paranoid, but OK in practice.) If it isn't the
same, we ``revise'' the strictness info, so that we won't propagate
the unusable strictness-info into the interfaces.
************************************************************************
* *
\subsection{The worker wrapper core}
* *
************************************************************************
@mkWwBodies@ is called when doing the worker\/wrapper split inside a module.
-}
mkWwBodies :: DynFlags
-> FamInstEnvs
-> Type -- Type of original function
-> [Demand] -- Strictness of original function
-> DmdResult -- Info about function result
-> [OneShotInfo] -- One-shot-ness of the function, value args only
-> UniqSM (Maybe ([Demand], -- Demands for worker (value) args
Id -> CoreExpr, -- Wrapper body, lacking only the worker Id
CoreExpr -> CoreExpr)) -- Worker body, lacking the original function rhs
-- wrap_fn_args E = \x y -> E
-- work_fn_args E = E x y
-- wrap_fn_str E = case x of { (a,b) ->
-- case a of { (a1,a2) ->
-- E a1 a2 b y }}
-- work_fn_str E = \a2 a2 b y ->
-- let a = (a1,a2) in
-- let x = (a,b) in
-- E
mkWwBodies dflags fam_envs fun_ty demands res_info one_shots
= do { let arg_info = demands `zip` (one_shots ++ repeat NoOneShotInfo)
all_one_shots = foldr (worstOneShot . snd) OneShotLam arg_info
; (wrap_args, wrap_fn_args, work_fn_args, res_ty) <- mkWWargs emptyTvSubst fun_ty arg_info
; (useful1, work_args, wrap_fn_str, work_fn_str) <- mkWWstr dflags fam_envs wrap_args
-- Do CPR w/w. See Note [Always do CPR w/w]
; (useful2, wrap_fn_cpr, work_fn_cpr, cpr_res_ty)
<- mkWWcpr (gopt Opt_CprAnal dflags) fam_envs res_ty res_info
; let (work_lam_args, work_call_args) = mkWorkerArgs dflags work_args all_one_shots cpr_res_ty
worker_args_dmds = [idDemandInfo v | v <- work_call_args, isId v]
wrapper_body = wrap_fn_args . wrap_fn_cpr . wrap_fn_str . applyToVars work_call_args . Var
worker_body = mkLams work_lam_args. work_fn_str . work_fn_cpr . work_fn_args
; if useful1 && not (only_one_void_argument) || useful2
then return (Just (worker_args_dmds, wrapper_body, worker_body))
else return Nothing
}
-- We use an INLINE unconditionally, even if the wrapper turns out to be
-- something trivial like
-- fw = ...
-- f = __inline__ (coerce T fw)
-- The point is to propagate the coerce to f's call sites, so even though
-- f's RHS is now trivial (size 1) we still want the __inline__ to prevent
-- fw from being inlined into f's RHS
where
-- Note [Do not split void functions]
only_one_void_argument
| [d] <- demands
, Just (arg_ty1, _) <- splitFunTy_maybe fun_ty
, isAbsDmd d && isVoidTy arg_ty1
= True
| otherwise
= False
{-
Note [Always do CPR w/w]
~~~~~~~~~~~~~~~~~~~~~~~~
At one time we refrained from doing CPR w/w for thunks, on the grounds that
we might duplicate work. But that is already handled by the demand analyser,
which doesn't give the CPR proprety if w/w might waste work: see
Note [CPR for thunks] in DmdAnal.
And if something *has* been given the CPR property and we don't w/w, it's
a disaster, because then the enclosing function might say it has the CPR
property, but now doesn't and there a cascade of disaster. A good example
is Trac #5920.
************************************************************************
* *
\subsection{Making wrapper args}
* *
************************************************************************
During worker-wrapper stuff we may end up with an unlifted thing
which we want to let-bind without losing laziness. So we
add a void argument. E.g.
f = /\a -> \x y z -> E::Int# -- E does not mention x,y,z
==>
fw = /\ a -> \void -> E
f = /\ a -> \x y z -> fw realworld
We use the state-token type which generates no code.
-}
mkWorkerArgs :: DynFlags -> [Var]
-> OneShotInfo -- Whether all arguments are one-shot
-> Type -- Type of body
-> ([Var], -- Lambda bound args
[Var]) -- Args at call site
mkWorkerArgs dflags args all_one_shot res_ty
| any isId args || not needsAValueLambda
= (args, args)
| otherwise
= (args ++ [newArg], args ++ [voidPrimId])
where
needsAValueLambda =
isUnLiftedType res_ty
|| not (gopt Opt_FunToThunk dflags)
-- see Note [Protecting the last value argument]
-- see Note [All One-Shot Arguments of a Worker]
newArg = setIdOneShotInfo voidArgId all_one_shot
{-
Note [Protecting the last value argument]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the user writes (\_ -> E), they might be intentionally disallowing
the sharing of E. Since absence analysis and worker-wrapper are keen
to remove such unused arguments, we add in a void argument to prevent
the function from becoming a thunk.
The user can avoid adding the void argument with the -ffun-to-thunk
flag. However, this can create sharing, which may be bad in two ways. 1) It can
create a space leak. 2) It can prevent inlining *under a lambda*. If w/w
removes the last argument from a function f, then f now looks like a thunk, and
so f can't be inlined *under a lambda*.
Note [All One-Shot Arguments of a Worker]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Sometimes, derived join-points are just lambda-lifted thunks, whose
only argument is of the unit type and is never used. This might
interfere with the absence analysis, basing on which results these
never-used arguments are eliminated in the worker. The additional
argument `all_one_shot` of `mkWorkerArgs` is to prevent this.
Example. Suppose we have
foo = \p(one-shot) q(one-shot). y + 3
Then we drop the unused args to give
foo = \pq. $wfoo void#
$wfoo = \void(one-shot). y + 3
But suppse foo didn't have all one-shot args:
foo = \p(not-one-shot) q(one-shot). expensive y + 3
Then we drop the unused args to give
foo = \pq. $wfoo void#
$wfoo = \void(not-one-shot). y + 3
If we made the void-arg one-shot we might inline an expensive
computation for y, which would be terrible!
************************************************************************
* *
\subsection{Coercion stuff}
* *
************************************************************************
We really want to "look through" coerces.
Reason: I've seen this situation:
let f = coerce T (\s -> E)
in \x -> case x of
p -> coerce T' f
q -> \s -> E2
r -> coerce T' f
If only we w/w'd f, we'd get
let f = coerce T (\s -> fw s)
fw = \s -> E
in ...
Now we'll inline f to get
let fw = \s -> E
in \x -> case x of
p -> fw
q -> \s -> E2
r -> fw
Now we'll see that fw has arity 1, and will arity expand
the \x to get what we want.
-}
-- mkWWargs just does eta expansion
-- is driven off the function type and arity.
-- It chomps bites off foralls, arrows, newtypes
-- and keeps repeating that until it's satisfied the supplied arity
mkWWargs :: TvSubst -- Freshening substitution to apply to the type
-- See Note [Freshen type variables]
-> Type -- The type of the function
-> [(Demand,OneShotInfo)] -- Demands and one-shot info for value arguments
-> UniqSM ([Var], -- Wrapper args
CoreExpr -> CoreExpr, -- Wrapper fn
CoreExpr -> CoreExpr, -- Worker fn
Type) -- Type of wrapper body
mkWWargs subst fun_ty arg_info
| null arg_info
= return ([], id, id, substTy subst fun_ty)
| ((dmd,one_shot):arg_info') <- arg_info
, Just (arg_ty, fun_ty') <- splitFunTy_maybe fun_ty
= do { uniq <- getUniqueM
; let arg_ty' = substTy subst arg_ty
id = mk_wrap_arg uniq arg_ty' dmd one_shot
; (wrap_args, wrap_fn_args, work_fn_args, res_ty)
<- mkWWargs subst fun_ty' arg_info'
; return (id : wrap_args,
Lam id . wrap_fn_args,
work_fn_args . (`App` varToCoreExpr id),
res_ty) }
| Just (tv, fun_ty') <- splitForAllTy_maybe fun_ty
= do { let (subst', tv') = substTyVarBndr subst tv
-- This substTyVarBndr clones the type variable when necy
-- See Note [Freshen type variables]
; (wrap_args, wrap_fn_args, work_fn_args, res_ty)
<- mkWWargs subst' fun_ty' arg_info
; return (tv' : wrap_args,
Lam tv' . wrap_fn_args,
work_fn_args . (`App` Type (mkTyVarTy tv')),
res_ty) }
| Just (co, rep_ty) <- topNormaliseNewType_maybe fun_ty
-- The newtype case is for when the function has
-- a newtype after the arrow (rare)
--
-- It's also important when we have a function returning (say) a pair
-- wrapped in a newtype, at least if CPR analysis can look
-- through such newtypes, which it probably can since they are
-- simply coerces.
= do { (wrap_args, wrap_fn_args, work_fn_args, res_ty)
<- mkWWargs subst rep_ty arg_info
; return (wrap_args,
\e -> Cast (wrap_fn_args e) (mkSymCo co),
\e -> work_fn_args (Cast e co),
res_ty) }
| otherwise
= WARN( True, ppr fun_ty ) -- Should not happen: if there is a demand
return ([], id, id, substTy subst fun_ty) -- then there should be a function arrow
applyToVars :: [Var] -> CoreExpr -> CoreExpr
applyToVars vars fn = mkVarApps fn vars
mk_wrap_arg :: Unique -> Type -> Demand -> OneShotInfo -> Id
mk_wrap_arg uniq ty dmd one_shot
= mkSysLocal (fsLit "w") uniq ty
`setIdDemandInfo` dmd
`setIdOneShotInfo` one_shot
{-
Note [Freshen type variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Wen we do a worker/wrapper split, we must not use shadowed names,
else we'll get
f = /\ a /\a. fw a a
which is obviously wrong. Type variables can can in principle shadow,
within a type (e.g. forall a. a -> forall a. a->a). But type
variables *are* mentioned in <blah>, so we must substitute.
That's why we carry the TvSubst through mkWWargs
************************************************************************
* *
\subsection{Strictness stuff}
* *
************************************************************************
-}
mkWWstr :: DynFlags
-> FamInstEnvs
-> [Var] -- Wrapper args; have their demand info on them
-- *Includes type variables*
-> UniqSM (Bool, -- Is this useful
[Var], -- Worker args
CoreExpr -> CoreExpr, -- Wrapper body, lacking the worker call
-- and without its lambdas
-- This fn adds the unboxing
CoreExpr -> CoreExpr) -- Worker body, lacking the original body of the function,
-- and lacking its lambdas.
-- This fn does the reboxing
mkWWstr _ _ []
= return (False, [], nop_fn, nop_fn)
mkWWstr dflags fam_envs (arg : args) = do
(useful1, args1, wrap_fn1, work_fn1) <- mkWWstr_one dflags fam_envs arg
(useful2, args2, wrap_fn2, work_fn2) <- mkWWstr dflags fam_envs args
return (useful1 || useful2, args1 ++ args2, wrap_fn1 . wrap_fn2, work_fn1 . work_fn2)
{-
Note [Unpacking arguments with product and polymorphic demands]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The argument is unpacked in a case if it has a product type and has a
strict *and* used demand put on it. I.e., arguments, with demands such
as the following ones:
<S,U(U, L)>
<S(L,S),U>
will be unpacked, but
<S,U> or <B,U>
will not, because the pieces aren't used. This is quite important otherwise
we end up unpacking massive tuples passed to the bottoming function. Example:
f :: ((Int,Int) -> String) -> (Int,Int) -> a
f g pr = error (g pr)
main = print (f fst (1, error "no"))
Does 'main' print "error 1" or "error no"? We don't really want 'f'
to unbox its second argument. This actually happened in GHC's onwn
source code, in Packages.applyPackageFlag, which ended up un-boxing
the enormous DynFlags tuple, and being strict in the
as-yet-un-filled-in pkgState files.
-}
----------------------
-- mkWWstr_one wrap_arg = (useful, work_args, wrap_fn, work_fn)
-- * wrap_fn assumes wrap_arg is in scope,
-- brings into scope work_args (via cases)
-- * work_fn assumes work_args are in scope, a
-- brings into scope wrap_arg (via lets)
mkWWstr_one :: DynFlags -> FamInstEnvs -> Var
-> UniqSM (Bool, [Var], CoreExpr -> CoreExpr, CoreExpr -> CoreExpr)
mkWWstr_one dflags fam_envs arg
| isTyVar arg
= return (False, [arg], nop_fn, nop_fn)
-- See Note [Worker-wrapper for bottoming functions]
| isAbsDmd dmd
, Just work_fn <- mk_absent_let dflags arg
-- Absent case. We can't always handle absence for arbitrary
-- unlifted types, so we need to choose just the cases we can
--- (that's what mk_absent_let does)
= return (True, [], nop_fn, work_fn)
-- See Note [Worthy functions for Worker-Wrapper split]
| isSeqDmd dmd -- `seq` demand; evaluate in wrapper in the hope
-- of dropping seqs in the worker
= let arg_w_unf = arg `setIdUnfolding` evaldUnfolding
-- Tell the worker arg that it's sure to be evaluated
-- so that internal seqs can be dropped
in return (True, [arg_w_unf], mk_seq_case arg, nop_fn)
-- Pass the arg, anyway, even if it is in theory discarded
-- Consider
-- f x y = x `seq` y
-- x gets a (Eval (Poly Abs)) demand, but if we fail to pass it to the worker
-- we ABSOLUTELY MUST record that x is evaluated in the wrapper.
-- Something like:
-- f x y = x `seq` fw y
-- fw y = let x{Evald} = error "oops" in (x `seq` y)
-- If we don't pin on the "Evald" flag, the seq doesn't disappear, and
-- we end up evaluating the absent thunk.
-- But the Evald flag is pretty weird, and I worry that it might disappear
-- during simplification, so for now I've just nuked this whole case
| isStrictDmd dmd
, Just cs <- splitProdDmd_maybe dmd
-- See Note [Unpacking arguments with product and polymorphic demands]
, Just (data_con, inst_tys, inst_con_arg_tys, co)
<- deepSplitProductType_maybe fam_envs (idType arg)
, cs `equalLength` inst_con_arg_tys
-- See Note [mkWWstr and unsafeCoerce]
= do { (uniq1:uniqs) <- getUniquesM
; let unpk_args = zipWith mk_ww_local uniqs inst_con_arg_tys
unpk_args_w_ds = zipWithEqual "mkWWstr" set_worker_arg_info unpk_args cs
unbox_fn = mkUnpackCase (Var arg) co uniq1
data_con unpk_args
rebox_fn = Let (NonRec arg con_app)
con_app = mkConApp2 data_con inst_tys unpk_args `mkCast` mkSymCo co
; (_, worker_args, wrap_fn, work_fn) <- mkWWstr dflags fam_envs unpk_args_w_ds
; return (True, worker_args, unbox_fn . wrap_fn, work_fn . rebox_fn) }
-- Don't pass the arg, rebox instead
| otherwise -- Other cases
= return (False, [arg], nop_fn, nop_fn)
where
dmd = idDemandInfo arg
one_shot = idOneShotInfo arg
-- If the wrapper argument is a one-shot lambda, then
-- so should (all) the corresponding worker arguments be
-- This bites when we do w/w on a case join point
set_worker_arg_info worker_arg demand
= worker_arg `setIdDemandInfo` demand
`setIdOneShotInfo` one_shot
----------------------
nop_fn :: CoreExpr -> CoreExpr
nop_fn body = body
{-
Note [mkWWstr and unsafeCoerce]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
By using unsafeCoerce, it is possible to make the number of demands fail to
match the number of constructor arguments; this happened in Trac #8037.
If so, the worker/wrapper split doesn't work right and we get a Core Lint
bug. The fix here is simply to decline to do w/w if that happens.
************************************************************************
* *
Type scrutiny that is specfic to demand analysis
* *
************************************************************************
Note [Do not unpack class dictionaries]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have
f :: Ord a => [a] -> Int -> a
{-# INLINABLE f #-}
and we worker/wrapper f, we'll get a worker with an INLINALBE pragma
(see Note [Worker-wrapper for INLINABLE functions] in WorkWrap), which
can still be specialised by the type-class specialiser, something like
fw :: Ord a => [a] -> Int# -> a
BUT if f is strict in the Ord dictionary, we might unpack it, to get
fw :: (a->a->Bool) -> [a] -> Int# -> a
and the type-class specialiser can't specialise that. An example is
Trac #6056.
Moreover, dictinoaries can have a lot of fields, so unpacking them can
increase closure sizes.
Conclusion: don't unpack dictionaries.
-}
deepSplitProductType_maybe :: FamInstEnvs -> Type -> Maybe (DataCon, [Type], [Type], Coercion)
-- If deepSplitProductType_maybe ty = Just (dc, tys, arg_tys, co)
-- then dc @ tys (args::arg_tys) :: rep_ty
-- co :: ty ~ rep_ty
deepSplitProductType_maybe fam_envs ty
| let (co, ty1) = topNormaliseType_maybe fam_envs ty
`orElse` (mkReflCo Representational ty, ty)
, Just (tc, tc_args) <- splitTyConApp_maybe ty1
, Just con <- isDataProductTyCon_maybe tc
, not (isClassTyCon tc) -- See Note [Do not unpack class dictionaries]
= Just (con, tc_args, dataConInstArgTys con tc_args, co)
deepSplitProductType_maybe _ _ = Nothing
deepSplitCprType_maybe :: FamInstEnvs -> ConTag -> Type -> Maybe (DataCon, [Type], [Type], Coercion)
-- If deepSplitCprType_maybe n ty = Just (dc, tys, arg_tys, co)
-- then dc @ tys (args::arg_tys) :: rep_ty
-- co :: ty ~ rep_ty
deepSplitCprType_maybe fam_envs con_tag ty
| let (co, ty1) = topNormaliseType_maybe fam_envs ty
`orElse` (mkReflCo Representational ty, ty)
, Just (tc, tc_args) <- splitTyConApp_maybe ty1
, isDataTyCon tc
, let cons = tyConDataCons tc
, cons `lengthAtLeast` con_tag -- This might not be true if we import the
-- type constructor via a .hs-bool file (#8743)
, let con = cons !! (con_tag - fIRST_TAG)
= Just (con, tc_args, dataConInstArgTys con tc_args, co)
deepSplitCprType_maybe _ _ _ = Nothing
findTypeShape :: FamInstEnvs -> Type -> TypeShape
-- Uncover the arrow and product shape of a type
-- The data type TypeShape is defined in Demand
-- See Note [Trimming a demand to a type] in Demand
findTypeShape fam_envs ty
| Just (_, ty') <- splitForAllTy_maybe ty
= findTypeShape fam_envs ty'
| Just (tc, tc_args) <- splitTyConApp_maybe ty
, Just con <- isDataProductTyCon_maybe tc
= TsProd (map (findTypeShape fam_envs) $ dataConInstArgTys con tc_args)
| Just (_, res) <- splitFunTy_maybe ty
= TsFun (findTypeShape fam_envs res)
| Just (_, ty') <- topNormaliseType_maybe fam_envs ty
= findTypeShape fam_envs ty'
| otherwise
= TsUnk
{-
************************************************************************
* *
\subsection{CPR stuff}
* *
************************************************************************
@mkWWcpr@ takes the worker/wrapper pair produced from the strictness
info and adds in the CPR transformation. The worker returns an
unboxed tuple containing non-CPR components. The wrapper takes this
tuple and re-produces the correct structured output.
The non-CPR results appear ordered in the unboxed tuple as if by a
left-to-right traversal of the result structure.
-}
mkWWcpr :: Bool
-> FamInstEnvs
-> Type -- function body type
-> DmdResult -- CPR analysis results
-> UniqSM (Bool, -- Is w/w'ing useful?
CoreExpr -> CoreExpr, -- New wrapper
CoreExpr -> CoreExpr, -- New worker
Type) -- Type of worker's body
mkWWcpr opt_CprAnal fam_envs body_ty res
-- CPR explicitly turned off (or in -O0)
| not opt_CprAnal = return (False, id, id, body_ty)
-- CPR is turned on by default for -O and O2
| otherwise
= case returnsCPR_maybe res of
Nothing -> return (False, id, id, body_ty) -- No CPR info
Just con_tag | Just stuff <- deepSplitCprType_maybe fam_envs con_tag body_ty
-> mkWWcpr_help stuff
| otherwise
-- See Note [non-algebraic or open body type warning]
-> WARN( True, text "mkWWcpr: non-algebraic or open body type" <+> ppr body_ty )
return (False, id, id, body_ty)
mkWWcpr_help :: (DataCon, [Type], [Type], Coercion)
-> UniqSM (Bool, CoreExpr -> CoreExpr, CoreExpr -> CoreExpr, Type)
mkWWcpr_help (data_con, inst_tys, arg_tys, co)
| [arg_ty1] <- arg_tys
, isUnLiftedType arg_ty1
-- Special case when there is a single result of unlifted type
--
-- Wrapper: case (..call worker..) of x -> C x
-- Worker: case ( ..body.. ) of C x -> x
= do { (work_uniq : arg_uniq : _) <- getUniquesM
; let arg = mk_ww_local arg_uniq arg_ty1
con_app = mkConApp2 data_con inst_tys [arg] `mkCast` mkSymCo co
; return ( True
, \ wkr_call -> Case wkr_call arg (exprType con_app) [(DEFAULT, [], con_app)]
, \ body -> mkUnpackCase body co work_uniq data_con [arg] (varToCoreExpr arg)
-- varToCoreExpr important here: arg can be a coercion
-- Lacking this caused Trac #10658
, arg_ty1 ) }
| otherwise -- The general case
-- Wrapper: case (..call worker..) of (# a, b #) -> C a b
-- Worker: case ( ...body... ) of C a b -> (# a, b #)
= do { (work_uniq : uniqs) <- getUniquesM
; let (wrap_wild : args) = zipWith mk_ww_local uniqs (ubx_tup_ty : arg_tys)
ubx_tup_con = tupleDataCon Unboxed (length arg_tys)
ubx_tup_ty = exprType ubx_tup_app
ubx_tup_app = mkConApp2 ubx_tup_con arg_tys args
con_app = mkConApp2 data_con inst_tys args `mkCast` mkSymCo co
; return (True
, \ wkr_call -> Case wkr_call wrap_wild (exprType con_app) [(DataAlt ubx_tup_con, args, con_app)]
, \ body -> mkUnpackCase body co work_uniq data_con args ubx_tup_app
, ubx_tup_ty ) }
mkUnpackCase :: CoreExpr -> Coercion -> Unique -> DataCon -> [Id] -> CoreExpr -> CoreExpr
-- (mkUnpackCase e co uniq Con args body)
-- returns
-- case e |> co of bndr { Con args -> body }
mkUnpackCase (Tick tickish e) co uniq con args body -- See Note [Profiling and unpacking]
= Tick tickish (mkUnpackCase e co uniq con args body)
mkUnpackCase scrut co uniq boxing_con unpk_args body
= Case casted_scrut bndr (exprType body)
[(DataAlt boxing_con, unpk_args, body)]
where
casted_scrut = scrut `mkCast` co
bndr = mk_ww_local uniq (exprType casted_scrut)
{-
Note [non-algebraic or open body type warning]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There are a few cases where the W/W transformation is told that something
returns a constructor, but the type at hand doesn't really match this. One
real-world example involves unsafeCoerce:
foo = IO a
foo = unsafeCoerce c_exit
foreign import ccall "c_exit" c_exit :: IO ()
Here CPR will tell you that `foo` returns a () constructor for sure, but trying
to create a worker/wrapper for type `a` obviously fails.
(This was a real example until ee8e792 in libraries/base.)
It does not seem feasible to avoid all such cases already in the analyser (and
after all, the analysis is not really wrong), so we simply do nothing here in
mkWWcpr. But we still want to emit warning with -DDEBUG, to hopefully catch
other cases where something went avoidably wrong.
Note [Profiling and unpacking]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the original function looked like
f = \ x -> {-# SCC "foo" #-} E
then we want the CPR'd worker to look like
\ x -> {-# SCC "foo" #-} (case E of I# x -> x)
and definitely not
\ x -> case ({-# SCC "foo" #-} E) of I# x -> x)
This transform doesn't move work or allocation
from one cost centre to another.
Later [SDM]: presumably this is because we want the simplifier to
eliminate the case, and the scc would get in the way? I'm ok with
including the case itself in the cost centre, since it is morally
part of the function (post transformation) anyway.
************************************************************************
* *
\subsection{Utilities}
* *
************************************************************************
Note [Absent errors]
~~~~~~~~~~~~~~~~~~~~
We make a new binding for Ids that are marked absent, thus
let x = absentError "x :: Int"
The idea is that this binding will never be used; but if it
buggily is used we'll get a runtime error message.
Coping with absence for *unlifted* types is important; see, for
example, Trac #4306. For these we find a suitable literal,
using Literal.absentLiteralOf. We don't have literals for
every primitive type, so the function is partial.
[I did try the experiment of using an error thunk for unlifted
things too, relying on the simplifier to drop it as dead code,
by making absentError
(a) *not* be a bottoming Id,
(b) be "ok for speculation"
But that relies on the simplifier finding that it really
is dead code, which is fragile, and indeed failed when
profiling is on, which disables various optimisations. So
using a literal will do.]
-}
mk_absent_let :: DynFlags -> Id -> Maybe (CoreExpr -> CoreExpr)
mk_absent_let dflags arg
| not (isUnLiftedType arg_ty)
= Just (Let (NonRec arg abs_rhs))
| Just tc <- tyConAppTyCon_maybe arg_ty
, Just lit <- absentLiteralOf tc
= Just (Let (NonRec arg (Lit lit)))
| arg_ty `eqType` voidPrimTy
= Just (Let (NonRec arg (Var voidPrimId)))
| otherwise
= WARN( True, ptext (sLit "No absent value for") <+> ppr arg_ty )
Nothing
where
arg_ty = idType arg
abs_rhs = mkRuntimeErrorApp aBSENT_ERROR_ID arg_ty msg
msg = showSDoc dflags (ppr arg <+> ppr (idType arg))
mk_seq_case :: Id -> CoreExpr -> CoreExpr
mk_seq_case arg body = Case (Var arg) (sanitiseCaseBndr arg) (exprType body) [(DEFAULT, [], body)]
sanitiseCaseBndr :: Id -> Id
-- The argument we are scrutinising has the right type to be
-- a case binder, so it's convenient to re-use it for that purpose.
-- But we *must* throw away all its IdInfo. In particular, the argument
-- will have demand info on it, and that demand info may be incorrect for
-- the case binder. e.g. case ww_arg of ww_arg { I# x -> ... }
-- Quite likely ww_arg isn't used in '...'. The case may get discarded
-- if the case binder says "I'm demanded". This happened in a situation
-- like (x+y) `seq` ....
sanitiseCaseBndr id = id `setIdInfo` vanillaIdInfo
mk_ww_local :: Unique -> Type -> Id
mk_ww_local uniq ty = mkSysLocal (fsLit "ww") uniq ty
|
acowley/ghc
|
compiler/stranal/WwLib.hs
|
bsd-3-clause
| 32,112
| 0
| 15
| 9,621
| 3,879
| 2,115
| 1,764
| 268
| 2
|
{-# LANGUAGE LambdaCase #-}
module TcLambdaCase where
import Data.Bits ((.|.))
f1 :: (a -> a) -> (a -> a)
f1 = \case x -> x
f2 :: Num a => a -> a
f2 = \case x -> x + x
f3 :: Int -> (Int, Int)
f3 = (\case y -> (y + y, y * y)) . (.|. 12)
f4 = \case _ -> undefined
|
urbanslug/ghc
|
testsuite/tests/typecheck/should_compile/TcLambdaCase.hs
|
bsd-3-clause
| 269
| 0
| 11
| 74
| 152
| 87
| 65
| 10
| 1
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Todo.App (
newTodoApp,
defaultTodo,
listTodos,
addTodo,
removeTodo,
updateTodo,
findTodoById,
clearTodos,
runTodoGrammar,
incrTodoAppCounter
) where
import Control.Lens
import Control.Monad.State.Lazy
import qualified Data.Map as M
import Data.Text (Text)
import Todo.Types
-- | newTodoApp
--
newTodoApp :: TodoApp
newTodoApp = TodoApp M.empty 0
-- | listTodos
--
listTodos :: TodoAppState [Todo]
listTodos = use (todoAppTodos . to M.elems)
-- | addTodo
--
addTodo :: Todo -> TodoAppState Todo
addTodo todo = do
new_id <- incrTodoAppCounter
let new_todo = set todoId new_id todo
todoAppTodos %= M.insert new_id new_todo
pure new_todo
-- | removeTodo
--
removeTodo :: TodoId -> TodoAppState (Maybe TodoId)
removeTodo tid = do
e <- use (todoAppTodos . to (M.lookup tid))
maybe (pure Nothing) (const del) e
where
del = todoAppTodos %= M.delete tid >> (pure . pure) tid
-- | updateTodo
--
updateTodo :: TodoId -> Todo -> TodoAppState (Maybe Todo)
updateTodo tid updated_todo = do
todo <- findTodoById tid
maybe (pure Nothing) (const update) todo
where
new_todo = set todoId tid updated_todo
update = do
todoAppTodos %= M.update (const $ pure new_todo) tid
(pure . pure) new_todo
-- | findTodoById
--
findTodoById :: TodoId -> TodoAppState (Maybe Todo)
findTodoById tid = M.lookup tid <$> gets _todoAppTodos
-- | clearTodos
--
clearTodos :: TodoAppState Bool
clearTodos = do
todoAppTodos .= M.empty
pure True
-- | incrTodoAppCounter
--
incrTodoAppCounter :: TodoAppState TodoId
incrTodoAppCounter = do
todoAppCounter += 1
gets _todoAppCounter
-- | defaultTodo
--
defaultTodo :: Text -> Todo
defaultTodo title = Todo 0 title Active
-- | runTodoGrammar
--
-- our todo application grammar in its entirety.
--
runTodoGrammar :: TodoActionRequest -> TodoAppState TodoActionResponse
runTodoGrammar ReqListTodos = RespListTodos <$> listTodos
runTodoGrammar (ReqAddTodo todo) = (RespAddTodo . Just) <$> addTodo todo
runTodoGrammar (ReqRemoveTodo tid) = RespRemoveTodo <$> removeTodo tid
runTodoGrammar (ReqUpdateTodo tid todo) = RespUpdateTodo <$> updateTodo tid todo
runTodoGrammar (ReqFindTodoById tid) = RespFindTodoById <$> findTodoById tid
runTodoGrammar ReqClearTodos = RespClearTodos <$> clearTodos
|
adarqui/todomvc-haskell-servant-purescript
|
haskell_src/Todo/App.hs
|
mit
| 2,555
| 0
| 14
| 580
| 682
| 353
| 329
| 61
| 1
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE Strict #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | Coordinate representation
--
-- I use @Float32Array@s for representing coordinates.
--
-- All points and normals are represented as homogenious 4D vectors.
--
-- If 2D coordinates are given, they are padded with zeroes
-- (`PaddedZeros` flag is added during parsing if at least one vector was padded).
--
-- All normals are inferred from coordinates.
--
-- PaddedZeros flag can be used to infer if I should try to extrude 2D objects into 3D
-- (by inspecting @height@ feature property or using default extrusion height).
--
module Model.GeoJSON.Coordinates
( PaddedZeros (..)
, bestFittingPlaneN -- , bestFittingLineN
, varX, meanX
, ObjectCentres (..)
, getScenarioStatistics
, setNormalsAndComputeIndices
) where
import Control.Monad (zipWithM, foldM)
import Data.Word
--import Data.List (fromListN)
import Numeric.DataFrame
import Numeric.DataFrame.IO
import Numeric.Dimensions
import Numeric.Dimensions.Traverse.IO
import Numeric.TypeLits
import Commons.NoReflex
import JavaScript.JSON.Types.Internal
import JavaScript.JSON.Types.Instances
import JavaScript.Array (JSArray)
import qualified JavaScript.Array as JSArray
import Unsafe.Coerce
import Model.Scenario.Statistics
import Model.Scenario.Object.Geometry
-- | Flag telling if fromJSON parsing function appened zeros as third coordinates
-- to all vertices of a Geometry.
newtype PaddedZeros = PaddedZeros Bool
deriving (Eq, Show)
----------------------------------------------------------------------------------------------------
-- * Getting normals and indices
----------------------------------------------------------------------------------------------------
setNormalsAndComputeIndices :: Geometry -> IO (Maybe (SomeIODataFrame Word16 '[XN 0]))
setNormalsAndComputeIndices (Points _) = pure Nothing
setNormalsAndComputeIndices (Lines xs) = pure $ case fromList $ map scalar iss of
SomeDataFrame (df :: DataFrame Word16 ns) ->
case unsafeCoerce (Evidence :: Evidence (ns ~ ns)) of
(Evidence :: Evidence ('[n] ~ ns)) ->
Just $ SomeIODataFrame
(unsafeCoerce df :: IODataFrame Word16 '[n])
where
(_, iss) = foldl f (0,[]) xs
f :: (Word16, [Word16]) -> SomeIODataFrame Float '[N 4,XN 2] -> (Word16, [Word16])
f (n0,is) (SomeIODataFrame (_ :: IODataFrame Float ns))
| (Evidence :: Evidence ('[4,n] ~ ns, 2 <= n))
<- unsafeCoerce (Evidence :: Evidence (ns~ns, n <= n))
, n <- fromIntegral $ dimVal' @n
= (n + n0, is ++ ([0..n-2] >>= \i -> [n0+i,n0+i+1]) )
setNormalsAndComputeIndices (Polygons ns) = do
l0 <- js_emptyList
l1 <- foldM (\l p -> triangulateAndSetNormal p >>= js_concat l) l0 ns
(n, df) <- js_wratIds l1
return $ case someIntNatVal n of
Nothing -> Nothing
Just (SomeIntNat (_::Proxy n)) -> Just $ SomeIODataFrame
(unsafeCoerce df :: IODataFrame Word16 '[n])
triangulateAndSetNormal :: (SomeIODataFrame Float '[N 4, N 2, XN 3], [Int]) -> IO JSVal
triangulateAndSetNormal (SomeIODataFrame (sdf :: IODataFrame Float ns), holes)
= case unsafeCoerce (Evidence :: Evidence (ns ~ ns, 2 <= 3)) of
(Evidence :: Evidence ('[4,2,n] ~ ns, 2 <= n)) -> do
df <- unsafeFreezeDataFrame sdf
let onlyPoints = ewmap @_ @'[4] @'[n] (1!.) df
n = bestFittingPlaneN onlyPoints
n' = n <+:> 0
projected = project2D onlyPoints n
jprojected <- toJSVal projected
jholes <- toJSVal holes
overDimIdx_ (dim @'[n]) $ \i -> copyDataFrame n' (1:!2:!i) sdf
js_earcut (unsafeCoerce sdf) jprojected jholes >>= fromJSValUnchecked
foreign import javascript unsafe
"var off = Math.floor($1.byteOffset / 32); $r = (self['earcut']($2, $3)).map(function(e){return off + e;});"
js_earcut :: JSVal -> JSVal -> JSVal -> IO JSVal
foreign import javascript unsafe
"$1.concat($2)"
js_concat :: JSVal -> JSVal -> IO JSVal
foreign import javascript unsafe
"$r = [];"
js_emptyList :: IO JSVal
foreign import javascript unsafe
"$r1 = $1.length; $r2 = new Uint16Array($1);"
js_wratIds :: JSVal -> IO (Int, JSVal)
project2D :: forall n . (KnownDim n, 2 <= n)
=> DataFrame Float '[4, n]
-> Vec3f
-> DataFrame Float '[2, n]
project2D df' norm = ewmap proj df
where
proj x = vec2 (unScalar $ dot x nx) (unScalar $ dot x ny)
nx = let x' = vec3 0 1 0 `cross` norm
x'' = if dot x' x' < 0.01
then norm `cross` vec3 0 0 1
else x'
in x'' / fromScalar (normL2 x'')
ny = let y' = norm `cross` nx
in y' / fromScalar (normL2 y')
df = centralizeNFromHom df'
-- Use this not-so-good check until we implement a proper SVD.
bestFittingPlaneN :: forall n . (KnownDim n, 2 <= n) => DataFrame Float '[4, n] -> Vec3f
bestFittingPlaneN df' =
case (\(x,y,z) -> (x > 0.001, y > 0.001, z > 0.001)) . unpackV3 $ varX df of
(_, _, False) -> vec3 0 0 1
(_, False, _) -> vec3 0 1 0
(False, _, _) -> vec3 1 0 0
(True, True, True) -> normalized ( (1:!Z !. df) `cross` (2:!Z !. df))
where
df = centralizeNFromHom df'
---- https://math.stackexchange.com/q/2306029
--bestFittingPlaneN :: forall n . (KnownDim n, 2 <= n) => DataFrame Float '[4, n] -> Vec3f
--bestFittingPlaneN df'
-- | -- normalized frame in 3D
-- df <- centralizeNFromHom df'
-- -- solve Ax = B
-- , b <- ewmap @Float @'[] (3:!Z !. ) df :: DataFrame Float '[n]
-- , a <- ewmap @Float @'[3] @'[n] @'[3,n]
-- (\v -> vec3 (unScalar $ 1:!Z !. v) (unScalar $ 2:!Z !. v) 1 ) df
-- , aT <- transpose a :: DataFrame Float '[n, 3]
-- , aaT <- a %* aT
-- , aX <- inverse aaT
-- , r3 <- aX %* a %* b
-- , df2d <- ewmap (\v -> vec2 (unScalar $ 1:!Z !. v) (unScalar $ 2:!Z !. v) ) df
-- = case (\(x,y,z) -> (x > 0.001, y > 0.001, z > 0.001)) . unpackV3 $ varX df of
-- (_, _, False) -> vec3 0 0 1
-- (_, False, _) -> vec3 0 1 0
-- (False, _, _) -> vec3 1 0 0
-- (True, True, True) ->
-- if abs (det aaT) > 0.001
-- then let v = vec3 (negate . unScalar $ 1 !. r3) (negate . unScalar $ 2 !. r3) 1
-- in v / fromScalar (normL2 v)
-- else bestFittingLineN df2d <+:> 0
--
--
---- Here we assume it is already normalized
--bestFittingLineN :: forall n . (KnownDim n, 2 <= n) => DataFrame Float '[2, n] -> Vec2f
--bestFittingLineN df
-- | -- solve Ax = B
-- b <- ewmap @Float @'[] @'[n] @'[n] (2:!Z !. ) df
-- , a <- ewmap @Float @'[2] @'[n] @'[2,n]
-- (\v -> vec2 (unScalar $ 1:!Z !. v) 1 ) df
-- , aT <- transpose a :: DataFrame Float '[n, 2]
-- , aaT <- a %* aT
-- , aX <- inverse aaT
-- , r2 <- aX %* a %* b
-- = let v = vec2 (unScalar $ 1 !. r2) (-1) in v / fromScalar (normL2 v)
meanX :: forall n m . (KnownDim n, KnownDim m) => DataFrame Float '[n, m] -> Vector Float n
meanX x = ewfoldl (+) 0 x / fromIntegral (dimVal' @m)
varX :: forall n m . (KnownDim n, KnownDim m) => DataFrame Float '[n, m] -> Vector Float n
varX x = ewfoldl (\a v -> let v' = (v - m) in a + v' * v' ) 0 x / fromIntegral (dimVal' @m - 1)
where
m = meanX x
--var1 :: forall n . KnownDim n => Vector Float n -> Scf
--var1 x = ewfoldl (\a v -> let v' = (v - m) in a + v' * v' ) 0 x / fromIntegral (dimVal' @n - 1)
-- where
-- m = ewfoldl (+) 0 x / fromIntegral (dimVal' @n)
-- | Take out mean and transform homogeneous coordinates to normal 3D coordinates
centralizeNFromHom :: forall n . KnownDim n => DataFrame Float '[4,n] -> DataFrame Float '[3,n]
centralizeNFromHom df' = ewmap (flip (-) m) df
where
df = ewmap fromHom df'
m = meanX df
----------------------------------------------------------------------------------------------------
-- * Converting from JSON
----------------------------------------------------------------------------------------------------
instance FromJSON (Geometry, PaddedZeros) where
parseJSON v = flip (withObject "GeoJSON Geometry object") v $ \obj -> do
gType <- obj .: "type"
obj .: "coordinates" >>= \a -> flip (withArray "GeoJSON Geometry coordinates") a $ \arr ->
case (js_arrayNestingLvl arr, gType) of
(1, "Point") ->
pure $ (Points . SomeIODataFrame) *** PaddedZeros
$ js_parsePoint arr
(2, "MultiPoint") ->
case js_parsePointSeq arr of
(df, n, padded) -> flip (,) (PaddedZeros padded) . Points <$> converDF df n
(2, "LineString") ->
case js_parsePointSeq arr of
(df, n, padded) -> flip (,) (PaddedZeros padded) . Lines . (:|[]) <$> converDF df n
(3, "MultiLineString") ->
case js_parseMultiLineString arr of
(dfs', ns', padded) -> do
dfs <- parseJSON dfs'
ns <- parseJSON ns'
rs <- zipWithM converDF dfs ns
case rs of
[] -> fail "MultiLineString seems to be empty"
(x:xs) -> pure ( Lines $ x :| xs
, PaddedZeros padded
)
(3, "Polygon") ->
case js_parsePolygon arr of
(df, n, holes', padded) -> do
poly <- converDF df n
holes <- parseJSON $ arrayValue holes'
return ( Polygons ( (poly, holes) :| [])
, PaddedZeros padded
)
(4, "MultiPolygon") ->
case js_parseMultiPolygon arr of
(dfs', ns', holes', padded) -> do
dfs <- parseJSON dfs'
ns <- parseJSON ns'
holes <- parseJSON holes'
rs <- zipWithM converDF dfs ns
case zip rs holes of
[] -> fail "MultiLineString seems to be empty"
(x:xs) -> pure ( Polygons $ x :| xs
, PaddedZeros padded
)
(lvl, s) -> fail $
"Wrong geometry type (" <> s <> ") or array nesting level (" <> show lvl <> ")."
converDF :: forall ns k
. ( Dimensions ns, ArraySizeInference ns)
=> JSVal -> Int -> Parser (SomeIODataFrame Float (AsXDims ns +: XN k))
converDF jsv n = case someIntNatVal n of
Nothing -> fail "Could not read dataframe length"
Just (SomeIntNat (_::Proxy n)) ->
case ( unsafeCoerce (Evidence :: Evidence (ns ~ ns)) :: Evidence
(FixedDim (AsXDims ns +: XN k) (ns +: n) ~ (ns +: n)) )
+!+ ( unsafeCoerce (Evidence :: Evidence (ns ~ ns)) :: Evidence
(FixedXDim (AsXDims ns +: XN k) (ns +: n) ~ (AsXDims ns +: XN k)) )
+!+ inferSnocDimensions @ns @n
+!+ inferSnocArrayInstance (undefined :: IODataFrame Float ns) (Proxy @n)
of
Evidence -> case inferNumericFrame @Float @(ns +: n) of
Evidence -> pure $ SomeIODataFrame (coerce jsv :: IODataFrame Float (ns +: n))
foreign import javascript unsafe
"h$geojson_nestingLvl($1)"
js_arrayNestingLvl :: JSArray -> Int
foreign import javascript unsafe
"var a = h$geojson_parseVec4($1); $r1 = a[0]; $r2 = a[1];"
js_parsePoint :: JSArray -> (IODataFrame Float '[4,1], Bool)
foreign import javascript unsafe
"var a = h$geojson_parsePointSeq($1); $r1 = new Float32Array(a[0]); $r2 = a[1]; $r3 = a[2];"
js_parsePointSeq :: JSArray -> (JSVal, Int, Bool)
foreign import javascript unsafe
"var a = h$geojson_parseMultiLineString($1); $r1 = a[0]; $r2 = a[1]; $r3 = a[2];"
js_parseMultiLineString :: JSArray -> (Value, Value, Bool)
foreign import javascript unsafe
"var a = h$geojson_parsePolygon($1);$r1 = new Float32Array(a[0]);$r2 = a[1];$r3 = a[2];$r4 = a[3];"
js_parsePolygon :: JSArray -> (JSVal, Int, JSArray, Bool)
foreign import javascript unsafe
"var a = h$geojson_parseMultiPolygon($1); $r1 = a[0];$r2 = a[1];$r3 = a[2];$r4 = a[3];"
js_parseMultiPolygon :: JSArray -> (Value, Value, Value, Bool)
----------------------------------------------------------------------------------------------------
-- * Converting to JSON
----------------------------------------------------------------------------------------------------
instance ToJSON Geometry where
toJSON (Points (SomeIODataFrame (sdf :: IODataFrame Float ns)))
| (Evidence :: Evidence ([4,n] ~ ns, 1 <= n))
<- unsafeCoerce (Evidence :: Evidence (ns ~ ns, 1 <= 1))
, n <- dimVal' @n
= if n == 1
then objectValue $ object
[ ("type", toJSON ("Point" :: JSString))
, ("coordinates", js_vecToJSArray 3 $ unsafeCoerce sdf )
]
else objectValue $ object
[ ("type", toJSON ("MultiPoint" :: JSString))
, ("coordinates", js_vecToJSArray2Stride 4 3 $ unsafeCoerce sdf )
]
toJSON (Lines (SomeIODataFrame sdf :| []))
= objectValue $ object
[ ("type", toJSON ("LineString" :: JSString))
, ("coordinates", js_vecToJSArray2Stride 4 3 $ unsafeCoerce sdf )
]
toJSON (Lines (x:|xs))
= objectValue $ object
[ ("type", toJSON ("MultiLineString" :: JSString))
, ("coordinates", toJSON $ map f (x:xs) )
]
where
f :: SomeIODataFrame Float '[N 4, XN 2] -> Value
f (SomeIODataFrame sdf) = js_vecToJSArray2Stride 4 3 (unsafeCoerce sdf)
toJSON (Polygons ((SomeIODataFrame sdf, holes) :| []))
= objectValue $ object
[ ("type", toJSON ("Polygon" :: JSString))
, ("coordinates", sdf `seq` js_vecToJSArray3StrideNRings 8 3
(toJSON (0: map (8*) holes)) (unsafeCoerce sdf))
]
toJSON (Polygons (x:|xs))
= objectValue $ object
[ ("type", toJSON ("MultiPolygon" :: JSString))
, ("coordinates", toJSON $ map f (x:xs))
]
where
f :: (SomeIODataFrame Float '[N 4, N 2, XN 3], [Int]) -> Value
f (SomeIODataFrame sdf, holes)
= sdf `seq`
js_vecToJSArray3StrideNRings 8 3 (toJSON (0: map (8*) holes)) (unsafeCoerce sdf)
foreign import javascript unsafe "Array.prototype.slice.call($2, 0, $1)"
js_vecToJSArray :: Int -> JSVal -> Value
foreign import javascript unsafe
"var a = [], i = -$1;\
\ while(i < $3.length - $1){\
\ i+=$1; a.push(Array.prototype.slice.call($3, i, i + $2));\
\ } $r = a;"
js_vecToJSArray2Stride :: Int -> Int -> JSVal -> Value
foreign import javascript unsafe
"var r, j, is = $3.concat([$4.length]); $r = [];\
\ for(var i = 0; i < is.length - 1; i++){\
\ r = []; j = is[i]-$1;\
\ while(j < is[i+1] - $1){\
\ j+=$1; r.push(Array.prototype.slice.call($4, j, j + $2));\
\ }\
\ r.push(r[0]);\
\ $r.push(r);\
\ }"
js_vecToJSArray3StrideNRings :: Int -> Int -> Value -> JSVal -> Value
----------------------------------------------------------------------------------------------------
-- * Gathering object statistics
----------------------------------------------------------------------------------------------------
-- | Go over a 2D point set and derive neccessary statistics
getScenarioStatistics :: ObjectCentres -> ScenarioStatistics
getScenarioStatistics (ObjectCentres (SomeDataFrame centres))
= ewfoldMap (\v -> ScenarioStatistics v v 1 v) centres
-- | Try hard to parse JSON containing feature collection or geometry
-- and return collection of object centres in 2D
newtype ObjectCentres = ObjectCentres (DataFrame Float '[N 2, XN 0])
instance FromJSON ObjectCentres where
parseJSON (SomeValue jsv) = case someIntNatVal n of
Nothing -> fail $ "Could not set DataFrame length: " ++ show n ++ "."
Just (SomeIntNat (_ :: Proxy n)) ->
pure (ObjectCentres $ SomeDataFrame
(unsafeCoerce (js_wrapFloat32ArrayVec xs) :: DataFrame Float '[2,n])
)
where
xs = js_getObjectCentres jsv
n = JSArray.length xs
foreign import javascript unsafe
"h$geojson_getObjectCentres($1)"
js_getObjectCentres :: JSVal -> JSArray
foreign import javascript unsafe
"new Float32Array([].concat.apply([], $1))"
js_wrapFloat32ArrayVec :: JSArray -> JSVal
|
achirkin/qua-view
|
src/Model/GeoJSON/Coordinates.hs
|
mit
| 16,824
| 231
| 18
| 4,662
| 4,149
| 2,257
| 1,892
| -1
| -1
|
module Network.CryptoNote.P2P.Command.Chain.Response where
import Data.Word (Word64)
import Network.CryptoNote.Crypto.Hash (Hash, Id)
-- cryptonote_protocol_handler.h
-- cryptonote_protocol_defs.h
-- #define BC_COMMANDS_POOL_BASE 2000
-- const static int ID = BC_COMMANDS_POOL_BASE + 7;
data ResponseChainEntry = ResponseChainEntry
{ startHeight :: Word64
, totalHeight :: Word64
, blockIds :: [Hash Id]
} deriving (Show, Eq)
|
nvmd/hs-cryptonote
|
src/Network/CryptoNote/P2P/Command/Chain/Response.hs
|
mit
| 449
| 0
| 10
| 71
| 83
| 54
| 29
| 8
| 0
|
{-# LANGUAGE TypeFamilies, GADTs #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Text.Regex.Applicative.Interface where
import Control.Applicative hiding (empty)
import qualified Control.Applicative
import Control.Arrow
import Data.Traversable
import Data.String
import Data.Maybe
import Text.Regex.Applicative.Types
import Text.Regex.Applicative.Object
instance Functor (RE s) where
fmap f x = Fmap f x
f <$ x = pure f <* x
instance Applicative (RE s) where
pure x = const x <$> Eps
a1 <*> a2 = App a1 a2
a *> b = pure (const id) <*> Void a <*> b
a <* b = pure const <*> a <*> Void b
instance Alternative (RE s) where
a1 <|> a2 = Alt a1 a2
empty = Fail
many a = reverse <$> Rep Greedy (flip (:)) [] a
some a = (:) <$> a <*> many a
instance Monoid a => Monoid (RE s a) where
mempty = pure mempty
mappend = liftA2 mappend
instance (char ~ Char, string ~ String) => IsString (RE char string) where
fromString = string
-- | 'RE' is a profunctor. This is its contravariant map.
--
-- (A dependency on the @profunctors@ package doesn't seem justified.)
comap :: (s2 -> s1) -> RE s1 a -> RE s2 a
comap f re =
case re of
Eps -> Eps
Symbol t p -> Fmap f $ Symbol t (p . f)
Alt r1 r2 -> Alt (comap f r1) (comap f r2)
App r1 r2 -> App (comap f r1) (comap f r2)
Fmap g r -> Fmap g (comap f r)
Fail -> Fail
Rep gr fn a r -> Rep gr fn a (comap f r)
Void r -> Void (comap f r)
-- | Match and return a single symbol which satisfies the predicate
psym :: (s -> Bool) -> RE s s
psym p = Symbol (error "Not numbered symbol") p
-- | Match and return the given symbol
sym :: Eq s => s -> RE s s
sym s = psym (s ==)
-- | Match and return any single symbol
anySym :: RE s s
anySym = psym (const True)
-- | Match and return the given sequence of symbols.
--
-- Note that there is an 'IsString' instance for regular expression, so
-- if you enable the @OverloadedStrings@ language extension, you can write
-- @string \"foo\"@ simply as @\"foo\"@.
--
-- Example:
--
-- >{-# LANGUAGE OverloadedStrings #-}
-- >import Text.Regex.Applicative
-- >
-- >number = "one" *> pure 1 <|> "two" *> pure 2
-- >
-- >main = print $ "two" =~ number
string :: Eq a => [a] -> RE a [a]
string = traverse sym
-- | Match zero or more instances of the given expression, which are combined using
-- the given folding function.
--
-- 'Greediness' argument controls whether this regular expression should match
-- as many as possible ('Greedy') or as few as possible ('NonGreedy') instances
-- of the underlying expression.
reFoldl :: Greediness -> (b -> a -> b) -> b -> RE s a -> RE s b
reFoldl g f b a = Rep g f b a
-- | Match zero or more instances of the given expression, but as
-- few of them as possible (i.e. /non-greedily/). A greedy equivalent of 'few'
-- is 'many'.
--
-- Examples:
--
-- >Text.Regex.Applicative> findFirstPrefix (few anySym <* "b") "ababab"
-- >Just ("a","abab")
-- >Text.Regex.Applicative> findFirstPrefix (many anySym <* "b") "ababab"
-- >Just ("ababa","")
few :: RE s a -> RE s [a]
few a = reverse <$> Rep NonGreedy (flip (:)) [] a
-- | Return matched symbols as part of the return value
withMatched :: RE s a -> RE s (a, [s])
withMatched Eps = flip (,) [] <$> Eps
withMatched x@(Symbol _ _) = (id &&& pure) <$> x
withMatched (Alt a b) = withMatched a <|> withMatched b
withMatched (App a b) =
(\(f, s) (x, t) -> (f x, s ++ t)) <$>
withMatched a <*>
withMatched b
withMatched Fail = Fail
withMatched (Fmap f x) = (f *** id) <$> withMatched x
withMatched (Rep gr f a0 x) =
Rep gr (\(a, s) (x, t) -> (f a x, s ++ t)) (a0, []) (withMatched x)
-- N.B.: this ruins the Void optimization
withMatched (Void x) = (const () *** id) <$> withMatched x
-- | @s =~ a = match a s@
(=~) :: [s] -> RE s a -> Maybe a
(=~) = flip match
infix 2 =~
-- | Attempt to match a string of symbols against the regular expression.
-- Note that the whole string (not just some part of it) should be matched.
--
-- Examples:
--
-- >Text.Regex.Applicative> match (sym 'a' <|> sym 'b') "a"
-- >Just 'a'
-- >Text.Regex.Applicative> match (sym 'a' <|> sym 'b') "ab"
-- >Nothing
--
match :: RE s a -> [s] -> Maybe a
match re = let obj = compile re in \str ->
listToMaybe $
results $
foldl (flip step) obj str
-- | Find a string prefix which is matched by the regular expression.
--
-- Of all matching prefixes, pick one using left bias (prefer the left part of
-- '<|>' to the right part) and greediness.
--
-- This is the match which a backtracking engine (such as Perl's one) would find
-- first.
--
-- If match is found, the rest of the input is also returned.
--
-- Examples:
--
-- >Text.Regex.Applicative> findFirstPrefix ("a" <|> "ab") "abc"
-- >Just ("a","bc")
-- >Text.Regex.Applicative> findFirstPrefix ("ab" <|> "a") "abc"
-- >Just ("ab","c")
-- >Text.Regex.Applicative> findFirstPrefix "bc" "abc"
-- >Nothing
findFirstPrefix :: RE s a -> [s] -> Maybe (a, [s])
findFirstPrefix re str = go (compile re) str Nothing
where
walk obj [] = (obj, Nothing)
walk obj (t:ts) =
case getResult t of
Just r -> (obj, Just r)
Nothing -> walk (addThread t obj) ts
go obj str resOld =
case walk emptyObject $ threads obj of
(obj', resThis) ->
let res = ((flip (,) str) <$> resThis) <|> resOld
in
case str of
_ | failed obj' -> res
[] -> res
(s:ss) -> go (step s obj') ss res
-- | Find the longest string prefix which is matched by the regular expression.
--
-- Submatches are still determined using left bias and greediness, so this is
-- different from POSIX semantics.
--
-- If match is found, the rest of the input is also returned.
--
-- Examples:
--
-- >Text.Regex.Applicative Data.Char> let keyword = "if"
-- >Text.Regex.Applicative Data.Char> let identifier = many $ psym isAlpha
-- >Text.Regex.Applicative Data.Char> let lexeme = (Left <$> keyword) <|> (Right <$> identifier)
-- >Text.Regex.Applicative Data.Char> findLongestPrefix lexeme "if foo"
-- >Just (Left "if"," foo")
-- >Text.Regex.Applicative Data.Char> findLongestPrefix lexeme "iffoo"
-- >Just (Right "iffoo","")
findLongestPrefix :: RE s a -> [s] -> Maybe (a, [s])
findLongestPrefix re str = go (compile re) str Nothing
where
go obj str resOld =
let res = (fmap (flip (,) str) $ listToMaybe $ results obj) <|> resOld
in
case str of
_ | failed obj -> res
[] -> res
(s:ss) -> go (step s obj) ss res
-- | Find the shortest prefix (analogous to 'findLongestPrefix')
findShortestPrefix :: RE s a -> [s] -> Maybe (a, [s])
findShortestPrefix re str = go (compile re) str
where
go obj str =
case results obj of
r : _ -> Just (r, str)
_ | failed obj -> Nothing
_ ->
case str of
[] -> Nothing
s:ss -> go (step s obj) ss
-- | Find the leftmost substring that is matched by the regular expression.
-- Otherwise behaves like 'findFirstPrefix'. Returns the result together with
-- the prefix and suffix of the string surrounding the match.
findFirstInfix :: RE s a -> [s] -> Maybe ([s], a, [s])
findFirstInfix re str =
fmap (\((first, res), last) -> (first, res, last)) $
findFirstPrefix ((,) <$> few anySym <*> re) str
-- Auxiliary function for findExtremeInfix
prefixCounter :: RE s (Int, [s])
prefixCounter = second reverse <$> reFoldl NonGreedy f (0, []) anySym
where
f (i, prefix) s = ((,) $! (i+1)) $ s:prefix
data InfixMatchingState s a = GotResult
{ prefixLen :: !Int
, prefixStr :: [s]
, result :: a
, postfixStr :: [s]
}
| NoResult
-- a `preferOver` b chooses one of a and b, giving preference to a
preferOver
:: InfixMatchingState s a
-> InfixMatchingState s a
-> InfixMatchingState s a
preferOver NoResult b = b
preferOver b NoResult = b
preferOver a b =
case prefixLen a `compare` prefixLen b of
GT -> b -- prefer b when it has smaller prefix
_ -> a -- otherwise, prefer a
mkInfixMatchingState
:: [s] -- rest of input
-> Thread s ((Int, [s]), a)
-> InfixMatchingState s a
mkInfixMatchingState rest thread =
case getResult thread of
Just ((pLen, pStr), res) ->
GotResult
{ prefixLen = pLen
, prefixStr = pStr
, result = res
, postfixStr = rest
}
Nothing -> NoResult
gotResult :: InfixMatchingState s a -> Bool
gotResult GotResult {} = True
gotResult _ = False
-- Algorithm for finding leftmost longest infix match:
--
-- 1. Add a thread /.*?/ to the begginning of the regexp
-- 2. As soon as we get first accept, we delete that thread
-- 3. When we get more than one accept, we choose one by the following criteria:
-- 3.1. Compare by the length of prefix (since we are looking for the leftmost
-- match)
-- 3.2. If they are produced on the same step, choose the first one (left-biased
-- choice)
-- 3.3. If they are produced on the different steps, choose the later one (since
-- they have the same prefixes, later means longer)
findExtremalInfix
:: -- function to combine a later result (first arg) to an earlier one (second
-- arg)
(InfixMatchingState s a -> InfixMatchingState s a -> InfixMatchingState s a)
-> RE s a
-> [s]
-> Maybe ([s], a, [s])
findExtremalInfix newOrOld re str =
case go (compile $ (,) <$> prefixCounter <*> re) str NoResult of
NoResult -> Nothing
r@GotResult{} ->
Just (prefixStr r, result r, postfixStr r)
where
{-
go :: ReObject s ((Int, [s]), a)
-> [s]
-> InfixMatchingState s a
-> InfixMatchingState s a
-}
go obj str resOld =
let resThis =
foldl
(\acc t -> acc `preferOver` mkInfixMatchingState str t)
NoResult $
threads obj
res = resThis `newOrOld` resOld
obj' =
-- If we just found the first result, kill the "prefixCounter" thread.
-- We rely on the fact that it is the last thread of the object.
if gotResult resThis && not (gotResult resOld)
then fromThreads $ init $ threads obj
else obj
in
case str of
[] -> res
_ | failed obj -> res
(s:ss) -> go (step s obj') ss res
-- | Find the leftmost substring that is matched by the regular expression.
-- Otherwise behaves like 'findLongestPrefix'. Returns the result together with
-- the prefix and suffix of the string surrounding the match.
findLongestInfix :: RE s a -> [s] -> Maybe ([s], a, [s])
findLongestInfix = findExtremalInfix preferOver
-- | Find the leftmost substring that is matched by the regular expression.
-- Otherwise behaves like 'findShortestPrefix'. Returns the result together with
-- the prefix and suffix of the string surrounding the match.
findShortestInfix :: RE s a -> [s] -> Maybe ([s], a, [s])
findShortestInfix = findExtremalInfix $ flip preferOver
|
mitchellwrosen/regex-applicative
|
Text/Regex/Applicative/Interface.hs
|
mit
| 11,316
| 0
| 19
| 3,044
| 2,868
| 1,529
| 1,339
| 176
| 8
|
{-# htermination maxBound :: Ordering #-}
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/Prelude_maxBound_6.hs
|
mit
| 42
| 0
| 2
| 6
| 3
| 2
| 1
| 1
| 0
|
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.WebGPURenderPipelineColorAttachmentDescriptor
(setPixelFormat, getPixelFormat,
WebGPURenderPipelineColorAttachmentDescriptor(..),
gTypeWebGPURenderPipelineColorAttachmentDescriptor)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/WebGPURenderPipelineColorAttachmentDescriptor.pixelFormat Mozilla WebGPURenderPipelineColorAttachmentDescriptor.pixelFormat documentation>
setPixelFormat ::
(MonadDOM m) =>
WebGPURenderPipelineColorAttachmentDescriptor -> Word -> m ()
setPixelFormat self val
= liftDOM (self ^. jss "pixelFormat" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/WebGPURenderPipelineColorAttachmentDescriptor.pixelFormat Mozilla WebGPURenderPipelineColorAttachmentDescriptor.pixelFormat documentation>
getPixelFormat ::
(MonadDOM m) =>
WebGPURenderPipelineColorAttachmentDescriptor -> m Word
getPixelFormat self
= liftDOM (round <$> ((self ^. js "pixelFormat") >>= valToNumber))
|
ghcjs/jsaddle-dom
|
src/JSDOM/Generated/WebGPURenderPipelineColorAttachmentDescriptor.hs
|
mit
| 1,964
| 0
| 12
| 260
| 414
| 260
| 154
| 30
| 1
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
module Data.Matrix.Dense
(DenseMatrix
,fromColMajorVector)
where
import Data.Tensor hiding (generate,generateM)
import Data.Tensor.Dense.VTensor (VTensor(..),MD_VTensor)
import qualified Data.Permutation as P
import qualified Data.Vector.Generic as GV
-- | Synonym for a matrix-like multivector.
type DenseMatrix v a = MD_VTensor LinearStorageC v a
-- | Create a dense matrix from column-major raw vector data.
fromColMajorVector :: (GV.Vector v a) => Int -> Int -> v a -> DenseMatrix v a
fromColMajorVector rs cs v =
VTensor { _vStorageScheme = fromShapeOrder (rs,cs) (P.fromList [1,0])
, _vData = v }
|
lensky/hs-matrix
|
lib/Data/Matrix/Dense.hs
|
mit
| 776
| 0
| 10
| 134
| 175
| 108
| 67
| 16
| 1
|
{-
Copyright (c) 2008, 2009
Russell O'Connor
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-}
-- |Datatypes for representing the human perception of colour.
-- Includes common operations for blending and compositing colours.
-- The most common way of creating colours is either by name
-- (see "Data.Colour.Names") or by giving an sRGB triple
-- (see "Data.Colour.SRGB").
--
-- Methods of specifying Colours can be found in
--
-- - "Data.Colour.SRGB"
--
-- - "Data.Colour.SRGB.Linear"
--
-- - "Data.Colour.CIE"
--
-- Colours can be specified in a generic 'Data.Colour.RGBSpace.RGBSpace'
-- by using
--
-- - "Data.Colour.RGBSpace"
--TODO
-- - "Data.Colour.HDTV"
--
-- - "Data.Colour.SDTV"
module Data.Colour
(
-- *Interfacing with Other Libraries\' Colour Spaces
--
-- |Executive summary: Always use "Data.Colour.SRGB" when interfacing with
-- other libraries.
-- Use 'Data.Colour.SRGB.toSRGB24' \/ 'Data.Colour.SRGB.sRGB24' when
-- interfacing with libraries wanting 'Data.Word.Word8' per channel.
-- Use 'Data.Colour.SRGB.toSRGB' \/ 'Data.Colour.SRGB.sRGB' when
-- interfacing with libraries wanting 'Double' or 'Float' per channel.
--
-- Interfacing with the colour for other libraries, such as cairo
-- (<http://www.haskell.org/gtk2hs/archives/category/cairo/>) and OpenGL
-- (<http://hackage.haskell.org/cgi-bin/hackage-scripts/package/OpenGL>),
-- can be a challenge because these libraries often do not use colour spaces
-- in a consistent way.
-- The problem is that these libraries work in a device dependent colour
-- space and give no indication what the colour space is.
-- For most devices this colours space is implicitly the non-linear sRGB
-- space.
-- However, to make matters worse, these libraries also do their
-- compositing and blending in the device colour space.
-- Blending and compositing ought to be done in a linear colour space,
-- but since the device space is typically non-linear sRGB, these libraries
-- typically produce colour blends that are too dark.
--
-- (Note that "Data.Colour" is a device /independent/ colour space, and
-- produces correct blends.
-- e.g. compare @toSRGB (blend 0.5 lime red)@ with @RGB 0.5 0.5 0@)
--
-- Because these other colour libraries can only blend in device colour
-- spaces, they are fundamentally broken and there is no \"right\" way
-- to interface with them.
-- For most libraries, the best one can do is assume they are working
-- with an sRGB colour space and doing incorrect blends.
-- In these cases use "Data.Colour.SRGB" to convert to and from the
-- colour coordinates. This is the best advice for interfacing with cairo.
--
-- When using OpenGL, the choice is less clear.
-- Again, OpenGL usually does blending in the device colour space.
-- However, because blending is an important part of proper shading, one
-- may want to consider that OpenGL is working in a linear colour space,
-- and the resulting rasters are improperly displayed.
-- This is born out by the fact that OpenGL extensions that support
-- sRGB do so by converting sRGB input\/output to linear colour coordinates
-- for processing by OpenGL.
--
-- The best way to use OpenGL, is to use proper sRGB surfaces for textures
-- and rendering.
-- These surfaces will automatically convert to and from OpenGL's linear
-- colour space.
-- In this case, use "Data.Colour.SRGB.Linear" to interface OpenGL's linear
-- colour space.
--
-- If not using proper surfaces with OpenGL, then you have a choice between
-- having OpenGL do improper blending or improper display
-- If you are using OpenGL for 3D shading, I recommend using
-- "Data.Colour.SRGB.Linear" (thus choosing improper OpenGL display).
-- If you are not using OpenGL for 3D shading, I recommend using
-- "Data.Colour.SRGB" (thus choosing improper OpenGL blending).
-- *Colour type
Colour
,colourConvert
,black
,AlphaColour
,opaque, withOpacity
,transparent
,alphaColourConvert
,alphaChannel
-- *Colour operations
-- |These operations allow combine and modify existing colours
,AffineSpace(..), blend
,ColourOps(..)
,dissolve, atop
)
where
import Data.Char
import Data.Colour.Internal
import qualified Data.Colour.SRGB.Linear
import Data.Colour.CIE.Chromaticity (app_prec, infix_prec)
instance (Fractional a, Show a) => Show (Colour a) where
showsPrec d c = showParen (d > app_prec) showStr
where
showStr = showString linearConstructorQualifiedName
. showString " " . (showsPrec (app_prec+1) r)
. showString " " . (showsPrec (app_prec+1) g)
. showString " " . (showsPrec (app_prec+1) b)
Data.Colour.SRGB.Linear.RGB r g b = Data.Colour.SRGB.Linear.toRGB c
instance (Fractional a, Read a) => Read (Colour a) where
readsPrec d r = readParen (d > app_prec)
(\r -> [(Data.Colour.SRGB.Linear.rgb r0 g0 b0,t)
|(name,s) <- mylex r
,name `elem` [linearConstructorName
,linearConstructorQualifiedName]
,(r0,s0) <- readsPrec (app_prec+1) s
,(g0,s1) <- readsPrec (app_prec+1) s0
,(b0,t) <- readsPrec (app_prec+1) s1]) r
where
mylex = return
. span (\c -> isAlphaNum c || c `elem` "._'")
. dropWhile isSpace
linearConstructorQualifiedName = "Data.Colour.SRGB.Linear.rgb"
linearConstructorName = "rgb"
instance (Fractional a, Show a, Eq a) => Show (AlphaColour a) where
showsPrec d ac | a == 0 = showString "transparent"
| otherwise = showParen (d > infix_prec) showStr
where
showStr = showsPrec (infix_prec+1) c
. showString " `withOpacity` "
. showsPrec (infix_prec+1) a
a = alphaChannel ac
c = colourChannel ac
instance (Fractional a, Read a) => Read (AlphaColour a) where
readsPrec d r = [(transparent,s)|("transparent",s) <- lex r]
++ readParen (d > infix_prec)
(\r -> [(c `withOpacity` o,s)
|(c,r0) <- readsPrec (infix_prec+1) r
,("`",r1) <- lex r0
,("withOpacity",r2) <- lex r1
,("`",r3) <- lex r2
,(o,s) <- readsPrec (infix_prec+1) r3]) r
|
haasn/colour
|
Data/Colour.hs
|
mit
| 7,228
| 0
| 16
| 1,531
| 932
| 546
| 386
| 55
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-security-group-ingress.html
module Stratosphere.Resources.RDSDBSecurityGroupIngress where
import Stratosphere.ResourceImports
-- | Full data type definition for RDSDBSecurityGroupIngress. See
-- 'rdsdbSecurityGroupIngress' for a more convenient constructor.
data RDSDBSecurityGroupIngress =
RDSDBSecurityGroupIngress
{ _rDSDBSecurityGroupIngressCIDRIP :: Maybe (Val Text)
, _rDSDBSecurityGroupIngressDBSecurityGroupName :: Val Text
, _rDSDBSecurityGroupIngressEC2SecurityGroupId :: Maybe (Val Text)
, _rDSDBSecurityGroupIngressEC2SecurityGroupName :: Maybe (Val Text)
, _rDSDBSecurityGroupIngressEC2SecurityGroupOwnerId :: Maybe (Val Text)
} deriving (Show, Eq)
instance ToResourceProperties RDSDBSecurityGroupIngress where
toResourceProperties RDSDBSecurityGroupIngress{..} =
ResourceProperties
{ resourcePropertiesType = "AWS::RDS::DBSecurityGroupIngress"
, resourcePropertiesProperties =
hashMapFromList $ catMaybes
[ fmap (("CIDRIP",) . toJSON) _rDSDBSecurityGroupIngressCIDRIP
, (Just . ("DBSecurityGroupName",) . toJSON) _rDSDBSecurityGroupIngressDBSecurityGroupName
, fmap (("EC2SecurityGroupId",) . toJSON) _rDSDBSecurityGroupIngressEC2SecurityGroupId
, fmap (("EC2SecurityGroupName",) . toJSON) _rDSDBSecurityGroupIngressEC2SecurityGroupName
, fmap (("EC2SecurityGroupOwnerId",) . toJSON) _rDSDBSecurityGroupIngressEC2SecurityGroupOwnerId
]
}
-- | Constructor for 'RDSDBSecurityGroupIngress' containing required fields as
-- arguments.
rdsdbSecurityGroupIngress
:: Val Text -- ^ 'rdsdbsgiDBSecurityGroupName'
-> RDSDBSecurityGroupIngress
rdsdbSecurityGroupIngress dBSecurityGroupNamearg =
RDSDBSecurityGroupIngress
{ _rDSDBSecurityGroupIngressCIDRIP = Nothing
, _rDSDBSecurityGroupIngressDBSecurityGroupName = dBSecurityGroupNamearg
, _rDSDBSecurityGroupIngressEC2SecurityGroupId = Nothing
, _rDSDBSecurityGroupIngressEC2SecurityGroupName = Nothing
, _rDSDBSecurityGroupIngressEC2SecurityGroupOwnerId = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-security-group-ingress.html#cfn-rds-securitygroup-ingress-cidrip
rdsdbsgiCIDRIP :: Lens' RDSDBSecurityGroupIngress (Maybe (Val Text))
rdsdbsgiCIDRIP = lens _rDSDBSecurityGroupIngressCIDRIP (\s a -> s { _rDSDBSecurityGroupIngressCIDRIP = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-security-group-ingress.html#cfn-rds-securitygroup-ingress-dbsecuritygroupname
rdsdbsgiDBSecurityGroupName :: Lens' RDSDBSecurityGroupIngress (Val Text)
rdsdbsgiDBSecurityGroupName = lens _rDSDBSecurityGroupIngressDBSecurityGroupName (\s a -> s { _rDSDBSecurityGroupIngressDBSecurityGroupName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-security-group-ingress.html#cfn-rds-securitygroup-ingress-ec2securitygroupid
rdsdbsgiEC2SecurityGroupId :: Lens' RDSDBSecurityGroupIngress (Maybe (Val Text))
rdsdbsgiEC2SecurityGroupId = lens _rDSDBSecurityGroupIngressEC2SecurityGroupId (\s a -> s { _rDSDBSecurityGroupIngressEC2SecurityGroupId = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-security-group-ingress.html#cfn-rds-securitygroup-ingress-ec2securitygroupname
rdsdbsgiEC2SecurityGroupName :: Lens' RDSDBSecurityGroupIngress (Maybe (Val Text))
rdsdbsgiEC2SecurityGroupName = lens _rDSDBSecurityGroupIngressEC2SecurityGroupName (\s a -> s { _rDSDBSecurityGroupIngressEC2SecurityGroupName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-security-group-ingress.html#cfn-rds-securitygroup-ingress-ec2securitygroupownerid
rdsdbsgiEC2SecurityGroupOwnerId :: Lens' RDSDBSecurityGroupIngress (Maybe (Val Text))
rdsdbsgiEC2SecurityGroupOwnerId = lens _rDSDBSecurityGroupIngressEC2SecurityGroupOwnerId (\s a -> s { _rDSDBSecurityGroupIngressEC2SecurityGroupOwnerId = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/Resources/RDSDBSecurityGroupIngress.hs
|
mit
| 4,153
| 0
| 15
| 405
| 552
| 313
| 239
| 45
| 1
|
module Utils
( handleArgs, chooseFileCreator, filterFiles, createOptions
) where
import DirTree (DirTree (..), filterDirTreeByFSO)
import FSO (CreateOptions, FSO (..), FileCreator)
import Options (Options (..), on')
import BasePrelude
import System.Directory (canonicalizePath, copyFile, doesDirectoryExist,
getTemporaryDirectory, removeFile, renameFile)
import System.FilePath (isValid, joinPath, splitDirectories, splitFileName,
(</>))
import System.IO (IOMode (..), openBinaryFile,
openBinaryTempFile)
import System.Posix.Files (createLink, createSymbolicLink)
import System.Process (CreateProcess (..), StdStream (..), createProcess,
shell, waitForProcess)
import Text.Regex (Regex, matchRegex, mkRegex, subRegex)
{- |Parses command-line arguments minus command-line options.
Expects paths of either one or two directories, the first of
which must exist. Throws errors when conditions not met.-}
handleArgs :: [String] -> IO (FilePath, Maybe FilePath)
handleArgs [] = error "missing file operand"
handleArgs (_:_:_:_) = error "too many operands"
handleArgs (source:rest) = do
let dest = msum $ map Just rest
isDir <- doesDirectoryExist source
unless isDir $ ioError $
mkIOError doesNotExistErrorType "" Nothing (Just source)
case fmap isValid dest of
Just False -> error $ fromJust dest ++ ": invalid filepath"
_ -> return ()
return (source, dest)
chooseFileCreator :: Options -> Maybe FileCreator
chooseFileCreator o =
case ap [optLink, optRelative, optSymbolic, on'.optConvert] [o] of
(True:_) -> Just ("=>", createLink)
(_:True:True:_) -> Just ("<-", createRelativeLink)
(_:_:True:_) -> Just ("<-", createSymbolicLink)
(_:_:_:True:_) -> (,) "~>" . convertFile <$> optConvert o
_ -> Nothing
createOptions :: Options -> CreateOptions
createOptions o = (optForce o, optVerbose o, optDryRun o)
createRelativeLink :: FilePath -> FilePath -> IO ()
createRelativeLink orig link = do
orig' <- canonicalizePath orig
let (dir, file) = splitFileName link
dir' <- canonicalizePath dir
let link' = dir' </> file
rel = relativePath link' orig'
createSymbolicLink rel link
relativePath :: FilePath -- ^start point
-> FilePath -- ^end point
-> FilePath -- ^path from \"start\" to \"end\"
relativePath start end = joinPath $ map (const "..") up ++ down
where up = init $ fromJust $ stripPrefix common s
down = fromJust $ stripPrefix common e
common = last $ takeWhile (`elem` inits e) $ inits s
e = splitDirectories end
s = splitDirectories start
{- |Takes a string containing an external command with optional {in}
and {out} file markers. If these are present, source and
destination filepaths are substituted into the command.
Otherwise, the file contents are piped in and/or out,
respectively, when the command is run. -}
convertFile :: String -> FilePath -> FilePath -> IO ()
convertFile converter source dest = do
tempDir <- getTemporaryDirectory
(tempPath, tempHandle) <- openBinaryTempFile tempDir "ttree"
inHandle <- openBinaryFile source ReadMode
let inRegex = mkRegex "\\{in\\}"
outRegex = mkRegex "\\{out\\}"
let command = substitute [(inRegex, source), (outRegex, tempPath)] converter
let process = (if not $ match inRegex converter then
(\c -> c {std_in = UseHandle inHandle}) else id)
<<< (if not $ match outRegex converter then
(\c -> c {std_out = UseHandle tempHandle}) else id)
<<< shell $ command
(_,_,_, procHandle) <- createProcess process
_ <- waitForProcess procHandle
hClose inHandle >> hClose tempHandle
moveFile tempPath dest
moveFile :: FilePath -> FilePath -> IO ()
moveFile s d = catch (renameFile s d) $ \e ->
if isUnsupportedOperation e
then copyFile s d >> removeFile s
else ioError e
-- |Helper function for 'moveFile'.
isUnsupportedOperation :: IOException -> Bool
isUnsupportedOperation e = case ioe_type e of
UnsupportedOperation -> True
_ -> False
match :: Regex -> String -> Bool
match = curry $ isJust . uncurry matchRegex
substitute :: [(Regex, String)] -> String -> String
substitute = flip $ foldl (\i (p, r) -> subRegex p i r)
filterFiles :: String -- ^regex
-> DirTree -> DirTree
filterFiles = filterDirTreeByFSO . f
where f _ (Dir _) = True
f s (File name _) = match (mkRegex s) name
|
pavelkogan/transform-tree
|
Utils.hs
|
mit
| 4,589
| 0
| 19
| 1,074
| 1,367
| 721
| 646
| 90
| 5
|
third :: String -> String
third "" = "Empty string, how fuggin lame"
third ltrs@(x:y:_) = [ltrs !! 2] ++ " is the 3rd letter of " ++ ltrs
fug :: (RealFloat a) => a -> a -> String
fug weight height
| weight / height ^ 2 <= 18.5 = "Shiiiiit you skinny"
| weight / height ^ 2 <= 25.0 = "Perfect, fuggin showoff"
| weight / height ^ 2 <= 30.0 = "Grand stand chunky monkey"
| otherwise = "Not even light can escape your pull"
fug' :: (RealFloat a) => a -> a -> String
fug' weight height
| bmi <= skinny = "Shiiiiit you skinny"
| bmi <= normal = "Perfect, fuggin showoff"
| bmi <= fat = "Grand stand chunky monkey"
| otherwise = "Not even light can escape your pull"
where bmi = weight / height ^ 2
skinny = 18.5
normal= 25.0
fat = 30.0
|
onicrypt/noauth
|
noauth-hs/thlt.hs
|
gpl-2.0
| 757
| 19
| 10
| 179
| 273
| 139
| 134
| 19
| 1
|
{-# LANGUAGE FlexibleContexts #-}
module Yi.Users.JP.Experimental where
-- This is an attempt at a completely "normalized" keymap.
-- Choose your mode/unit with the left hand;
-- Perform commands with the right hand.
import Prelude (zipWith)
import Control.Monad.State
import Data.Char
import Yi.Keymap.Emacs.Utils
import Yi.Rectangle
import Yi
import qualified Yi.Interact as I (choice, I())
-- | Enhanced keymap type, where the current unit is remembered using a StateT
type KM a = (StateT (TextUnit, String) (I.I Event Action)) a
{-
We'll assume QWERTY layout:
qqq www eee rrr ttt yyy uuu iii ooo ppp
aaa sss ddd fff ggg hhh jjj kkk lll ;;;
zzz xxx ccc vvv bbb nnn mmm ,,, ... ///
-}
-- | Keyboard layout definition
leftHand, rightHand :: [String]
leftHand = ["qwert", "asdfg", "zxcvb"]
rightHand = ["yuiop", "hjkl;", "nm,./"]
-- data Mark = Paste | SetMark | Cut | Copy | SwitchMark
-- data Special = Complete | Undo | Indent | Search
-- Special shift for events that understands qwerty layout.
shi_ :: Event ->Event
shi_ (Event (KASCII c) ms) | isAlpha c = Event (KASCII (toUpper c)) ms
shi_ (Event (KASCII ',') ms) = Event (KASCII '<') ms
shi_ (Event (KASCII '.') ms) = Event (KASCII '>') ms
shi_ (Event (KASCII '/') ms) = Event (KASCII '?') ms
shi_ (Event (KASCII ';') ms) = Event (KASCII ':') ms
shi_ (Event (KASCII '\'') ms) = Event (KASCII '"') ms
shi_ (Event (KASCII '[') ms) = Event (KASCII '{') ms
shi_ (Event (KASCII ']') ms) = Event (KASCII '}') ms
shi_ _ = error "shi_: unhandled event"
selfInsertKeymap :: KM ()
selfInsertKeymap = do
c <- printableChar
write (insertB c)
retKeymap :: KM ()
retKeymap = do
Event KEnter [] <- anyEvent
write (insertB '\n')
insertKeymap :: KM ()
insertKeymap = do
event $ char 'g'
write $ msgEditor "-- INSERT --"
many $ do
write $ msgEditor "-- INSERT --"
(selfInsertKeymap <|> retKeymap <|> quickCmdKeymap) <|| unrecognized
quitInsert
return ()
quitInsert :: KM Event
quitInsert = oneOf [ctrl $ spec KEnter, spec KEsc, ctrlCh '\\']
quickCmdKeymap :: KM ()
quickCmdKeymap = mkCmdKeymap (return Character) ctrl
<|> mkCmdKeymap (return unitWord) (ctrl . shi_)
quitKeymap :: KM ()
quitKeymap = do
Event KEsc [] <- anyEvent
write quitEditor
unrecognized :: KM ()
unrecognized = do
e <- anyEvent
write (msgEditor $ "unrecognized: " ++ show e)
commandsKeymap :: KM ()
commandsKeymap = do
(_, unitName) <- get
write $ msgEditor $ "-- CMD: " ++ unitName
quitKeymap <|| (I.choice $ insertKeymap : cmds : concat unts)
where
cmds = mkCmdKeymap (fst <$> get) id
unts = zipWith (zipWith mkUnt) units leftHand
mkUnt unt ch = do
event $ char ch
put unt
mkCmdKeymap :: KM TextUnit -> (Event -> Event) -> KM ()
mkCmdKeymap getUnit mods = I.choice $ concat $ zipWith (zipWith mkCmd) commands rightHand
where mkCmd cmd ch = do
event $ mods $ char ch
unt <- getUnit
write (cmd unt)
keymap :: Keymap
keymap = runKM $ forever $ choice
[
metaCh 'x' ?>>! executeExtendedCommandE,
commandsKeymap,
ctrlCh 'x' ?>> ctrlX
]
{-
Commands: (right hand)
cop cut del del com ???
pop pas mov mov sea '''
mpp mxp xpo xpo und
com: complete
und: undo
sea: start incremental search of the Unit at point
pop: pop-yank
pas: paste
xpo: transpose in given direction
''': search start from empty
mxp: exchange point and mark
mpp: mark pop
cop: copy
-}
commands :: [[TextUnit -> BufferM ()]]
commands = [[copy, cut, del b, del f, complete],
[pop, paste, move b, move f, search],
[mpp, mxp, xpo b, xpo f, undo]]
where copy = todo
cut = todo
pop = todo
mpp = todo
mxp = todo
complete = todo
paste = todo
search = todo
undo = const undoB
move dir u = moveB u dir
del dir u = deleteB u dir
xpo dir u = transposeB u dir
b = Backward
f = Forward
todo = const $ return ()
{-
Units: (left hand)
doc pag col ver ovr
par lin wor cha ins
*** *** *** sea buf
-}
document, page, column :: TextUnit
document = Character
page = Character
column = Character
units :: [[(TextUnit, String)]]
units = [
[(document, "DOC"), (page, "PAGE"), (column, "COL"), (VLine, "VER")], -- ↕
[(unitParagraph, "PARA"), (Line, "Line"), (unitWord, "Word"), (Character, "Char")]
]
runKM :: KM () -> Keymap
runKM p = fmap fst $ runStateT p (Character, "Char")
{-
ins: go to insert mode
ovr: go to overwrite mode
sea: navigate searched items.
... free
*** reserved for normal emacs usage.
----------
C-: briefly switch to character mode
M-: briefly switch to word mode
C-mode: go to that mode
-}
------------
-- C-x commands borrowed from emacs.
ctrlX :: KM ()
ctrlX =
choice [ ctrlCh 'o' ?>>! deleteBlankLinesB
, char '0' ?>>! closeWindow
, char '1' ?>>! closeOtherE
, char '2' ?>>! splitE
, char 's' ?>>! askSaveEditor
, ctrlCh 'c' ?>>! askQuitEditor
, ctrlCh 'f' ?>>! findFile
, ctrlCh 's' ?>>! fwriteE
, ctrlCh 'w' ?>>! promptFile "Write file:" fwriteToE
, ctrlCh 'x' ?>>! (exchangePointAndMarkB >>
putA highlightSelectionA True)
, char 'b' ?>>! switchBufferE
, char 'd' ?>>! dired
, char 'e' ?>>
char 'e' ?>>! evalRegionE
, char 'o' ?>>! nextWinE
, char 'k' ?>>! killBufferE
, char 'r' ?>> rectangleFuntions
, char 'u' ?>>! undoB
, char 'v' ?>>! shrinkWinE
]
rectangleFuntions :: KM ()
rectangleFuntions = choice [char 'a' ?>>! alignRegionOn,
char 'o' ?>>! openRectangle,
char 't' ?>>! stringRectangle,
char 'k' ?>>! killRectangle,
char 'y' ?>>! yankRectangle
]
|
codemac/yi-editor
|
src/Yi/Users/JP/Experimental.hs
|
gpl-2.0
| 6,330
| 0
| 13
| 2,015
| 1,720
| 898
| 822
| 132
| 1
|
-- Copyright (c) 2011-14, Nicola Bonelli
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- * Neither the name of University of Pisa nor the names of its contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-- ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
-- LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-- CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-- SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-- INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
-- CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-- ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-- POSSIBILITY OF SUCH DAMAGE.
--
--
{-# LANGUAGE ImpredicativeTypes #-}
{-# LANGUAGE OverloadedStrings #-}
module Network.PFq.Default
(
-- * Predicates
-- | Collection of predicates used in conditional expressions.
is_ip,
is_udp,
is_tcp,
is_icmp,
is_ip6,
is_udp6,
is_tcp6,
is_icmp6,
is_flow,
is_l3_proto,
is_l4_proto,
is_frag,
is_first_frag,
is_more_frag,
has_port,
has_src_port,
has_dst_port,
has_addr,
has_src_addr,
has_dst_addr,
has_state,
has_mark,
has_vlan,
has_vid,
vlan_id,
-- * Properties
ip_tos ,
ip_tot_len ,
ip_id ,
ip_frag ,
ip_ttl ,
get_mark ,
get_state ,
tcp_source ,
tcp_dest ,
tcp_hdrlen ,
udp_source ,
udp_dest ,
udp_len ,
icmp_type ,
icmp_code ,
-- * Combinators
(.||.),
(.&&.),
(.^^.),
not',
inv ,
par',
-- * Comparators
-- | Take a NetProperty, a value, and return a predicate that compares the values.
(.<.),
(.<=.),
(.==.),
(./=.),
(.>.),
(.>=.),
any_bit,
all_bit,
-- * Conditionals
conditional ,
when' ,
unless' ,
-- * Filters
-- | A collection of monadic NetFunctions.
filter' ,
ip ,
ip6 ,
udp ,
tcp ,
icmp ,
udp6 ,
tcp6 ,
icmp6 ,
vlan ,
l3_proto ,
l4_proto ,
flow ,
rtp ,
vlan_id_filter,
no_frag ,
no_more_frag,
port ,
src_port ,
dst_port ,
addr ,
src_addr ,
dst_addr ,
-- * Steering functions
-- | Monadic functions used to dispatch packets across sockets.
-- They evaluate to /Steer Hash Skbuff/, if the packet has a certain property, /Drop/ otherwise.
steer_link ,
steer_vlan ,
steer_ip ,
steer_ip6 ,
steer_flow ,
steer_rtp ,
steer_net ,
steer_field,
-- * Forwarders
kernel ,
broadcast ,
drop' ,
forward ,
forwardIO ,
bridge ,
tee ,
tap ,
-- * Logging
log_msg ,
log_buff ,
log_packet ,
-- * Bloom Filters
bloom ,
bloom_src ,
bloom_dst ,
bloom_filter,
bloom_src_filter,
bloom_dst_filter,
bloomCalcN ,
bloomCalcM ,
bloomCalcP ,
-- * Miscellaneous
unit ,
inc ,
dec ,
mark ,
put_state ,
) where
import Data.Int
import Network.PFq.Lang
import Data.Word
import Network.Socket
import System.IO.Unsafe
import Foreign.C.Types
import Foreign.Storable.Tuple ()
-- Default combinators
-- | Combine two predicate expressions with a specific boolean /or/ operation.
(.||.) :: NetPredicate -> NetPredicate -> NetPredicate
-- | Combine two predicate expressions with a specific boolean /and/ operation.
(.&&.) :: NetPredicate -> NetPredicate -> NetPredicate
-- | Combine two predicate expressions with a specific boolean /xor/ operation.
(.^^.) :: NetPredicate -> NetPredicate -> NetPredicate
-- | Return a new predicate that evaluates to /True/, when the given one evaluates to
-- false, and vice versa.
not' :: NetPredicate -> NetPredicate
not' p = Combinator1 "not" p
p1 .||. p2 = Combinator2 "or" p1 p2
p1 .&&. p2 = Combinator2 "and" p1 p2
p1 .^^. p2 = Combinator2 "xor" p1 p2
infixl 7 .&&.
infixl 6 .^^.
infixl 5 .||.
-- | Return a predicate that evaluates to /True/, if the property is less than
-- the given value. Example:
--
-- > when' (ip_ttl .<. 64) drop'
(.<.) :: NetProperty -> Word64 -> NetPredicate
(.<=.) :: NetProperty -> Word64 -> NetPredicate
(.==.) :: NetProperty -> Word64 -> NetPredicate
(./=.) :: NetProperty -> Word64 -> NetPredicate
(.>.) :: NetProperty -> Word64 -> NetPredicate
(.>=.) :: NetProperty -> Word64 -> NetPredicate
p .<. x = Predicate "less" p x () () () () () ()
p .<=. x = Predicate "less_eq" p x () () () () () ()
p .==. x = Predicate "equal" p x () () () () () ()
p ./=. x = Predicate "not_equal" p x () () () () () ()
p .>. x = Predicate "greater" p x () () () () () ()
p .>=. x = Predicate "greater_eq" p x () () () () () ()
infix 4 .<.
infix 4 .<=.
infix 4 .>.
infix 4 .>=.
infix 4 .==.
infix 4 ./=.
-- | Return a predicate that evaluates to /True/, if the property has at least
-- one bit set among those specified by the given mask.
any_bit :: NetProperty
-> Word64 -- ^ comparison mask
-> NetPredicate
-- | Return a predicate that evaluates to /True/, if the property has all bits
-- set among those specified in the given mask.
all_bit :: NetProperty
-> Word64 -- ^ comparison mask
-> NetPredicate
p `any_bit` x = Predicate "any_bit" p x () () () () () ()
p `all_bit` x = Predicate "all_bit" p x () () () () () ()
-- | Evaluate to /True/ if the SkBuff is an IPv4 packet.
is_ip = Predicate "is_ip" () () () () () () () ()
-- | Evaluate to /True/ if the SkBuff is an IPv6 packet.
is_ip6 = Predicate "is_ip6" () () () () () () () ()
-- | Evaluate to /True/ if the SkBuff is an UDP packet.
is_udp = Predicate "is_udp" () () () () () () () ()
-- | Evaluate to /True/ if the SkBuff is a TCP packet.
is_tcp = Predicate "is_tcp" () () () () () () () ()
-- | Evaluate to /True/ if the SkBuff is an ICMP packet.
is_icmp = Predicate "is_icmp" () () () () () () () ()
-- | Evaluate to /True/ if the SkBuff is an UDP packet, on top of IPv6.
is_udp6 = Predicate "is_udp6" () () () () () () () ()
-- | Evaluate to /True/ if the SkBuff is a TCP packet, on top of IPv6.
is_tcp6 = Predicate "is_tcp6" () () () () () () () ()
-- | Evaluate to /True/ if the SkBuff is an ICMP packet, on top of IPv6.
is_icmp6 = Predicate "is_icmp6" () () () () () () () ()
-- | Evaluate to /True/ if the SkBuff is an UDP or TCP packet.
is_flow = Predicate "is_flow" () () () () () () () ()
-- | Evaluate to /True/ if the SkBuff has a vlan tag.
has_vlan = Predicate "has_vlan" () () () () () () () ()
-- | Evaluate to /True/ if the SkBuff is a TCP fragment.
is_frag = Predicate "is_frag" () () () () () () () ()
-- | Evaluate to /True/ if the SkBuff is the first TCP fragment.
is_first_frag = Predicate "is_first_frag" () () () () () () () ()
-- | Evaluate to /True/ if the SkBuff is a TCP fragment, but the first.
is_more_frag = Predicate "is_more_frag" () () () () () () () ()
-- | Evaluate to /True/ if the SkBuff has the given vlan id.
--
-- > has_vid 42
has_vid :: CInt -> NetPredicate
has_vid x = Predicate "has_vid" x () () () () () () ()
-- | Evaluate to /True/ if the SkBuff has the given mark, set by 'mark' function.
--
-- > has_mark 11
has_mark :: Word32 -> NetPredicate
has_mark x = Predicate "has_mark" x () () () () () () ()
-- | Evaluate to /True/ if the state of the computation is set to the given value, possibly by 'put_state' function.
--
-- > has_state 11
has_state :: Word32 -> NetPredicate
has_state x = Predicate "has_state" x () () () () () () ()
-- | Evaluate to /True/ if the SkBuff has the given Layer3 protocol.
is_l3_proto :: Int16 -> NetPredicate
is_l3_proto x = Predicate "is_l3_proto" x () () () () () () ()
-- | Evaluate to /True/ if the SkBuff has the given Layer4 protocol.
is_l4_proto :: Int8 -> NetPredicate
is_l4_proto x = Predicate "is_l4_proto" x () () () () () () ()
has_port, has_src_port, has_dst_port :: Int16 -> NetPredicate
-- | Evaluate to /True/ if the SkBuff has the given source or destination port.
--
-- If the transport protocol is not present or has no port, the predicate evaluates to False.
--
-- > has_port 80
has_port x = Predicate "has_port" x () () () () () () ()
-- | Evaluate to /True/ if the SkBuff has the given source port.
--
-- If the transport protocol is not present or has no port, the predicate evaluates to False.
has_src_port x = Predicate "has_src_port" x () () () () () () ()
-- | Evaluate to /True/ if the SkBuff has the given destination port.
--
-- If the transport protocol is not present or has no port, the predicate evaluates to False.
has_dst_port x = Predicate "has_dst_port" x () () () () () () ()
-- | Evaluate to /True/ if the source or destination IP address matches the given network address. I.e.,
--
-- > has_addr "192.168.0.0" 24
has_addr :: IPv4 -> CInt -> NetPredicate
-- | Evaluate to /True/ if the source IP address matches the given network address.
has_src_addr :: IPv4 -> CInt -> NetPredicate
-- | Evaluate to /True/ if the destination IP address matches the given network address.
has_dst_addr :: IPv4 -> CInt -> NetPredicate
has_addr a p = Predicate "has_addr" a p () () () () () ()
has_src_addr a p = Predicate "has_src_addr" a p () () () () () ()
has_dst_addr a p = Predicate "has_dst_addr" a p () () () () () ()
-- | Evaluate to the mark set by 'mark' function. By default packets are marked with 0.
get_mark = Property "get_mark" () () () () () () () ()
-- | Evaluate to the state of the computation (possibly set by 'state' function).
get_state = Property "get_state" () () () () () () () ()
-- | Evaluate to the /tos/ field of the IP header.
ip_tos = Property "ip_tos" () () () () () () () ()
-- | Evaluate to the /tot_len/ field of the IP header.
ip_tot_len = Property "ip_tot_len" () () () () () () () ()
-- | Evaluate to the /ip_id/ field of the IP header.
ip_id = Property "ip_id" () () () () () () () ()
-- | Evaluate to the /frag/ field of the IP header.
ip_frag = Property "ip_frag" () () () () () () () ()
-- | Evaluate to the /TTL/ field of the IP header.
ip_ttl = Property "ip_ttl" () () () () () () () ()
-- | Evaluate to the /source port/ of the TCP header.
tcp_source = Property "tcp_source" () () () () () () () ()
-- | Evaluate to the /destination port/ of the TCP header.
tcp_dest = Property "tcp_dest" () () () () () () () ()
-- | Evaluate to the /length/ field of the TCP header.
tcp_hdrlen = Property "tcp_hdrlen" () () () () () () () ()
-- | Evaluate to the /source port/ of the UDP header.
udp_source = Property "udp_source" () () () () () () () ()
-- | Evaluate to the /destination port/ of the UDP header.
udp_dest = Property "udp_dest" () () () () () () () ()
-- | Evaluate to the /length/ field of the UDP header.
udp_len = Property "udp_len" () () () () () () () ()
-- | Evaluate to the /type/ field of the ICMP header.
icmp_type = Property "icmp_type" () () () () () () () ()
-- | Evaluate to the /code/ field of the ICMP header.
icmp_code = Property "icmp_code" () () () () () () () ()
-- Predefined in-kernel computations:
-- | Dispatch the packet across the sockets
-- with a randomized algorithm that maintains the integrity of
-- physical links.
--
-- > ip >-> steer_link
steer_link = MFunction "steer_link" () () () () () () () () :: NetFunction
-- | Dispatch the packet across the sockets
-- with a randomized algorithm that maintains the integrity of
-- vlan links.
--
-- > steer_vlan
steer_vlan = MFunction "steer_vlan" () () () () () () () () :: NetFunction
-- | Dispatch the packet across the sockets
-- with a randomized algorithm that maintains the integrity of
-- IP flows.
--
-- > steer_ip
steer_ip = MFunction "steer_ip" () () () () () () () () :: NetFunction
-- | Dispatch the packet across the sockets
-- with a randomized algorithm that maintains the integrity of
-- IPv6 flows.
--
-- > steer_ip6 >-> log_msg "Steering an IPv6 packet"
steer_ip6 = MFunction "steer_ip6" () () () () () () () () :: NetFunction
-- | Dispatch the packet across the sockets
-- with a randomized algorithm that maintains the integrity of
-- TCP/UDP flows.
--
-- > steer_flow >-> log_msg "Steering a flow"
steer_flow = MFunction "steer_flow" () () () () () () () () :: NetFunction
-- | Dispatch the packet across the sockets
-- with a randomized algorithm that maintains the integrity of
-- RTP/RTCP flows.
--
-- > steer_rtp
steer_rtp = MFunction "steer_rtp" () () () () () () () () :: NetFunction
-- | Dispatch the packet across the sockets
-- with a randomized algorithm that maintains the integrity of
-- sub networks.
--
-- > steer_net "192.168.0.0" 16 24
steer_net :: IPv4 -> CInt -> CInt -> NetFunction
steer_net net p sub = MFunction "steer_net" net p sub () () () () ()
-- | Dispatch the packet across the sockets
-- with a randomized algorithm. The function uses as /hash/ the field
-- of /size/ bits taken at /offset/ bytes from the beginning of the packet.
steer_field :: CInt -- ^ offset from the beginning of the packet, in bytes
-> CInt -- ^ sizeof field in bits
-> NetFunction
steer_field off size = MFunction "steer_field" off size () () () () () ()
-- Predefined filters:
-- | Transform the given predicate in its counterpart monadic version.
-- Example:
--
-- > filter' is_udp >-> kernel
--
-- is logically equivalent to:
--
-- > udp >-> kernel
filter' :: NetPredicate -> NetFunction
filter' p = MFunction "filter" p () () () () () () ()
-- | Evaluate to /Pass SkBuff/ if it is an IPv4 packet, /Drop/ it otherwise.
ip = MFunction "ip" () () () () () () () () :: NetFunction
-- | Evaluate to /Pass SkBuff/ if it is an IPv6 packet, /Drop/ it otherwise.
ip6 = MFunction "ip6" () () () () () () () () :: NetFunction
-- | Evaluate to /Pass SkBuff/ if it is an UDP packet, /Drop/ it otherwise.
udp = MFunction "udp" () () () () () () () () :: NetFunction
-- | Evaluate to /Pass SkBuff/ if it is a TCP packet, /Drop/ it otherwise.
tcp = MFunction "tcp" () () () () () () () () :: NetFunction
-- | Evaluate to /Pass SkBuff/ if it is an ICMP packet, /Drop/ it otherwise.
icmp = MFunction "icmp" () () () () () () () () :: NetFunction
-- | Evaluate to /Pass SkBuff/ if it is an UDP packet (on top of IPv6), /Drop/ it otherwise.
udp6 = MFunction "udp6" () () () () () () () () :: NetFunction
-- | Evaluate to /Pass SkBuff/ if it is a TCP packet (on top of IPv6), /Drop/ it otherwise.
tcp6 = MFunction "tcp6" () () () () () () () () :: NetFunction
-- | Evaluate to /Pass SkBuff/ if it is an ICMP packet (on top of IPv6), /Drop/ it otherwise.
icmp6 = MFunction "icmp6" () () () () () () () () :: NetFunction
-- | Evaluate to /Pass SkBuff/ if it has a vlan tag, /Drop/ it otherwise.
vlan = MFunction "vlan" () () () () () () () () :: NetFunction
-- | Evaluate to /Pass SkBuff/ if it is a TCP or UDP packet, /Drop/ it otherwise.
flow = MFunction "flow" () () () () () () () () :: NetFunction
-- | Evaluate to /Pass SkBuff/ if it is a RTP/RTCP packet, /Drop/ it otherwise.
rtp = MFunction "rtp" () () () () () () () () :: NetFunction
-- | Evaluate to /Pass SkBuff/ if it is not a fragment, /Drop/ it otherwise.
no_frag = MFunction "no_frag" () () () () () () () () :: NetFunction
-- | Evaluate to /Pass SkBuff/ if it is not a fragment or if it's the first fragment, /Drop/ it otherwise.
no_more_frag = MFunction "no_more_frag" () () () () () () () () :: NetFunction
-- | Forward the packet to the given device.
-- This function is lazy, in that the action is logged and performed
-- when the computation is completely evaluated.
--
-- > forward "eth1"
forward :: String -> NetFunction
forward d = MFunction "forward" d () () () () () () ()
-- | Forward the packet to the given device and evaluates to /Drop/. Example:
--
-- > when' is_udp (bridge "eth1") >-> kernel
--
-- Conditional bridge, forward the packet to eth1 if UDP, send it to the kernel
-- otherwise.
bridge :: String -> NetFunction
bridge d = MFunction "bridge" d () () () () () () ()
-- | Forward the packet to the given device and, evaluates to /Pass SkBuff/ or /Drop/,
-- depending on the value returned by the predicate. Example:
--
-- > tee "eth1" is_udp >-> kernel
--
-- Logically equivalent to:
--
-- > forward "eth1" >-> udp >-> kernel
--
-- Only a little bit more efficient.
tee :: String -> NetPredicate -> NetFunction
tee d p = MFunction "tee" d p () () () () () ()
-- | Evaluate to /Pass SkBuff/, or forward the packet to the given device and evaluate to /Drop/,
-- depending on the value returned by the predicate. Example:
--
-- > tap "eth1" is_udp >-> kernel
--
-- Logically equivalent to:
--
-- > unless' is_udp (forward "eth1" >-> drop') >-> kernel
--
-- Only a little bit more efficient.
tap :: String -> NetPredicate -> NetFunction
tap d p = MFunction "tap" d p () () () () () ()
-- | Forward the packet to the given device. This operation breaks the purity of the language,
-- and it is possibly slower than the lazy "forward" counterpart.
--
-- > forwardIO "eth1"
forwardIO :: String -> NetFunction
forwardIO d = MFunction "forwardIO" d () () () () () () ()
-- | Send a copy of the packet to the kernel (the sk_buff may have been captured directly
-- by PFQ).
--
-- To avoid loop, this function is ignored for packets sniffed from the kernel.
kernel = MFunction "kernel" () () () () () () () () :: NetFunction
-- | Broadcast the packet to all the sockets that have joined the group for which this computation
-- is specified.
broadcast = MFunction "broadcast" () () () () () () () () :: NetFunction
-- | Drop the packet. The computation evaluates to /Drop/.
drop'= MFunction "drop" () () () () () () () () :: NetFunction
-- | Unit operation implements left- and right-identity for Action monad.
unit = MFunction "unit" () () () () () () () () :: NetFunction
-- | Log a message to syslog.
--
-- > udp >-> log_msg "This is an UDP packet"
log_msg :: String -> NetFunction
log_msg msg = MFunction "log_msg" msg () () () () () () ()
-- | Dump the payload of packet to syslog.
--
-- > icmp >-> log_buff
log_buff = MFunction "log_buff" () () () () () () () () :: NetFunction
-- | Log the packet to syslog, with a syntax similar to tcpdump.
--
-- > icmp >-> log_msg "This is an ICMP packet:" >-> log_packet
log_packet = MFunction "log_packet" () () () () () () () () :: NetFunction
-- | Increment the i-th counter of the current group.
--
-- > inc 10
inc :: CInt -> NetFunction
inc n = MFunction "inc" n () () () () () () ()
-- | Decrement the i-th counter of the current group.
--
-- > dec 10
dec :: CInt -> NetFunction
dec n = MFunction "dec" n () () () () () () ()
-- | Mark the packet with the given value.
-- This function is unsafe in that it breaks the pure functional paradigm.
-- Consider using `put_state` instead.
--
-- > mark 42
mark :: Word32 -> NetFunction
mark n = MFunction "mark" n () () () () () () ()
-- | Set the state of the computation to the given value.
--
-- > state 42
put_state :: Word32 -> NetFunction
put_state n = MFunction "put_state" n () () () () () () ()
-- | Monadic version of 'is_l3_proto' predicate.
--
-- Predicates are used in conditional expressions, while monadic functions
-- are combined with Kleisli operator:
--
-- > l3_proto 0x842 >-> log_msg "Wake-on-LAN packet!"
l3_proto :: Int16 -> NetFunction
l3_proto p = MFunction "l3_proto" p () () () () () () ()
-- | Monadic version of 'is_l4_proto' predicate.
--
-- Predicates are used in conditional expressions, while monadic functions
-- are combined with Kleisli operator:
--
-- > l4_proto 89 >-> log_msg "OSFP packet!"
l4_proto :: Int8 -> NetFunction
l4_proto p = MFunction "l4_proto" p () () () () () () ()
-- | Monadic version of 'has_port' predicate.
--
-- Predicates are used in conditional expressions, while monadic functions
-- are combined with Kleisli operator:
--
-- > port 80 >-> log_msg "http packet!"
port :: Int16 -> NetFunction
port p = MFunction "port" p () () () () () () ()
-- | Monadic version of 'has_src_port' predicate.
src_port :: Int16 -> NetFunction
src_port p = MFunction "src_port" p () () () () () () ()
-- | Monadic version of 'has_dst_port' predicate.
dst_port :: Int16 -> NetFunction
dst_port a = MFunction "dst_port" a () () () () () () ()
-- | Monadic version of 'has_addr' predicate.
--
-- predicates are used in conditional expressions, while monadic functions
-- are combined with kleisli operator:
--
-- > addr "192.168.0.0" 24 >-> log_packet
addr :: IPv4 -> CInt -> NetFunction
-- | Monadic version of 'has_src_addr' predicate.
src_addr :: IPv4 -> CInt -> NetFunction
-- | Monadic version of 'has_src_addr' predicate.
dst_addr :: IPv4 -> CInt -> NetFunction
addr net p = MFunction "addr" net p () () () () () ()
src_addr net p = MFunction "src_addr" net p () () () () () ()
dst_addr net p = MFunction "dst_addr" net p () () () () () ()
-- | Conditional execution of monadic NetFunctions.
--
-- The function takes a predicate and evaluates to given the NetFunction when it evalutes to /True/,
-- otherwise does nothing.
-- Example:
--
-- > when' is_tcp (log_msg "This is a TCP Packet")
when' :: NetPredicate -> NetFunction -> NetFunction
when' p c = MFunction "when" p c () () () () () ()
-- | The reverse of "when'"
unless' :: NetPredicate -> NetFunction -> NetFunction
unless' p c = MFunction "unless" p c () () () () () ()
-- | conditional execution of monadic netfunctions.
--
-- the function takes a predicate and evaluates to the first or the second expression, depending on the
-- value returned by the predicate. Example:
--
-- > conditional is_udp (forward "eth1") (forward "eth2")
conditional :: NetPredicate -> NetFunction -> NetFunction -> NetFunction
conditional p c1 c2 = MFunction "conditional" p c1 c2 () () () () ()
-- | Function that inverts a monadic NetFunction. Useful to invert filters:
--
-- > inv ip >-> log_msg "This is not an IPv4 Packet"
inv :: NetFunction -> NetFunction
inv x = MFunction "inv" x () () () () () () ()
-- | Function that returns the parallel of two monadic NetFunctions.
--
-- Logic 'or' for monadic filters:
--
-- > par' udp icmp >-> log_msg "This is an UDP or ICMP Packet"
par' :: NetFunction -> NetFunction -> NetFunction
par' a b = MFunction "par" a b () () () () () ()
-- | Predicate which evaluates to /True/ when the packet has one of the
-- vlan id specified by the list. Example:
--
-- > when' (vland_id [1,13,42,43]) (msg_log "Got a packet!")
vlan_id :: [CInt] -> NetPredicate
vlan_id ids = Predicate "vlan_id" ids () () () () () () ()
-- | Monadic function, counterpart of 'vlan_id' function.
vlan_id_filter :: [CInt] -> NetFunction
vlan_id_filter ids = MFunction "vlan_id_filter" ids () () () () () () ()
-- | Predicate that evaluates to /True/ when the source or the destination address
-- of the packet matches the ones specified by the bloom list.
--
-- The first 'CInt' argument specifies the size of the bloom filter. Example:
--
-- > when' (bloom 1024 ["192.168.0.13", "192.168.0.42"] 32) log_packet >-> kernel
{-# NOINLINE bloom #-}
bloom :: CInt -- ^ Hint: size of bloom filter (M)
-> [HostName] -- ^ List of Host/Network address to match
-> CInt -- ^ Network prefix
-> NetPredicate
-- | Similarly to 'bloom', evaluates to /True/ when the source address
-- of the packet matches the ones specified by the bloom list.
{-# NOINLINE bloom_src #-}
bloom_src :: CInt -> [HostName] -> CInt -> NetPredicate
-- | Similarly to 'bloom', evaluates to /True/ when the destination address
-- of the packet matches the ones specified by the bloom list.
{-# NOINLINE bloom_dst #-}
bloom_dst :: CInt -> [HostName] -> CInt -> NetPredicate
-- | Monadic counterpart of 'bloom' function.
{-# NOINLINE bloom_filter #-}
bloom_filter :: CInt -> [HostName] -> CInt -> NetFunction
-- | Monadic counterpart of 'bloom_src' function.
{-# NOINLINE bloom_src_filter #-}
bloom_src_filter :: CInt -> [HostName] -> CInt -> NetFunction
-- | Monadic counterpart of 'bloom_dst' function.
{-# NOINLINE bloom_dst_filter #-}
bloom_dst_filter :: CInt -> [HostName] -> CInt -> NetFunction
bloom m hs p = let ips = unsafePerformIO (mapM inet_addr hs) in Predicate "bloom" m ips p () () () () ()
bloom_src m hs p = let ips = unsafePerformIO (mapM inet_addr hs) in Predicate "bloom_src" m ips p () () () () ()
bloom_dst m hs p = let ips = unsafePerformIO (mapM inet_addr hs) in Predicate "bloom_dst" m ips p () () () () ()
bloom_filter m hs p = let ips = unsafePerformIO (mapM inet_addr hs) in MFunction "bloom_filter" m ips p () () () () ()
bloom_src_filter m hs p = let ips = unsafePerformIO (mapM inet_addr hs) in MFunction "bloom_src_filter" m ips p () () () () ()
bloom_dst_filter m hs p = let ips = unsafePerformIO (mapM inet_addr hs) in MFunction "bloom_dst_filter" m ips p () () () () ()
-- bloom filter, utility functions:
bloomK = 4
-- | Bloom filter: utility function that computes the optimal /M/, given the parameter /N/ and
-- the false-positive probability /p/.
bloomCalcM :: Int -> Double -> Int
bloomCalcM n p = ceiling $ fromIntegral(-bloomK * n) / log(1 - p ** (1 / fromIntegral bloomK))
-- | Bloom filter: utility function that computes the optimal /N/, given the parameter /M/ and
-- the false-positive probability /p/.
bloomCalcN :: Int -> Double -> Int
bloomCalcN m p = ceiling $ fromIntegral (-m) * log(1 - p ** (1 / fromIntegral bloomK )) / fromIntegral bloomK
-- | Bloom filter: utility function that computes the false positive P, given /N/ and /M/ parameters.
bloomCalcP :: Int -> Int -> Double
bloomCalcP n m = (1 - (1 - 1 / fromIntegral m) ** fromIntegral (n * bloomK))^bloomK
|
pandaychen/PFQ
|
user/Haskell/Network/PFq/Default.hs
|
gpl-2.0
| 27,254
| 0
| 12
| 6,492
| 6,263
| 3,393
| 2,870
| 320
| 1
|
{-# LANGUAGE StandaloneDeriving, DeriveFunctor, DeriveFoldable, DeriveTraversable #-}
import Control.Applicative
import Control.Monad
import qualified Data.Foldable as F
import qualified Data.Traversable as T
import Data.List
import Data.Maybe
import System.Directory
import System.Directory.Tree
import System.Environment
import System.FilePath
import System.Process
data LTree a = B a [LTree a]
deriving instance (Show a) => Show (LTree a)
deriving instance Functor LTree
deriving instance F.Foldable LTree
-- deriving instance T.Traversable LTree
isHdl = ( == ".hdl") <$> takeExtension
isFile (File _ _) = True
isFile _ = False
takeFile x | isFile x = (Just . file) x
takeFile x | otherwise = Nothing
isDirectory (Dir _ _) = True
isDirectory _ = False
takeDir x | isDirectory x = (Just . name) x
takeDir x | otherwise = Nothing
emptyB = B "" []
startB dir = B dir []
buildLTree ctxt (Failed _ _) = Nothing
buildLTree ctxt (File _ _) = Nothing
buildLTree (B ctxt _) (Dir n ds) =
let nb = B (ctxt </> n) []
in Just (B (ctxt </> n) (mapMaybe (buildLTree nb) ds))
splitprefix :: FilePath -> FilePath -> Either FilePath (FilePath,FilePath)
splitprefix fp ofp | isPrefixOf fp ofp = Right (fp, (drop (length fp) ofp))
splitprefix fp ofp | otherwise = Left ofp
replaceprefix :: (FilePath,FilePath) -> FilePath -> FilePath
replaceprefix (op,np) = either id (\(_,x)->np++x) . splitprefix op
splitsuffix :: FilePath -> FilePath -> Either FilePath (FilePath,FilePath)
splitsuffix fp ofp | isSuffixOf fp ofp = Right (take (length ofp - length fp) ofp, fp)
splitsuffix fp ofp | otherwise = Left ofp
replacesuffix :: (FilePath,FilePath) -> FilePath -> FilePath
replacesuffix (op,np) = either id (\(x,_)->x++np) . splitsuffix op
createDirIfNotExist fp =
liftM not (doesDirectoryExist fp)
>>= \b -> when b (createDirectory fp)
makePDF (x,y) = system $ "/home/wavewave/repo/src/hoodle-parser/examples/parsetest atto " ++ x ++ " " ++ y
main = do
-- args <- getArgs
-- let newbase = args !! 0
cwd <- getCurrentDirectory
(r :/ r') <- build cwd
let files = catMaybes . map takeFile . flattenDir $ r'
hdlfiles = filter isHdl $ files
nhdlfiles = map (replaceprefix ("/home/wavewave/Dropbox","/home/wavewave/test")) hdlfiles
npdffiles = map (replacesuffix ("hdl","pdf")) nhdlfiles
let dirs = catMaybes . map takeDir . flattenDir $ r'
let ltree = buildLTree (startB r) r'
ltreelst = F.foldr (:) [] (fromJust ltree) -- F.toList ltree
ntreelst = map (replaceprefix ("/home/wavewave/Dropbox","/home/wavewave/test")) ltreelst
mapM_ print dirs
mapM_ print ntreelst
mapM_ createDirIfNotExist ntreelst
let cplist = zipWith (,) hdlfiles npdffiles
mapM_ print cplist
-- mapM_ (uncurry copyFile) cplist
mapM_ makePDF cplist
-- mapM_ print ltreelst
-- print fromJust ltree
-- putStrLn $ "length = " ++ show (length ltreelst)
|
wavewave/hoodle-tools
|
exe/filerecurse.hs
|
gpl-2.0
| 3,048
| 0
| 13
| 688
| 1,051
| 534
| 517
| 63
| 1
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Lamdu.Sugar.Internal.EntityId
( EntityId
, bs
, ofValI, ofIRef
, ofLambdaParam
, ofLambdaTagParam
, ofInjectTag
, ofGetFieldTag
, ofRecExtendTag
, ofCaseTag
, ofTId
, randomizeExprAndParams
) where
import Data.ByteString (ByteString)
import Data.Hashable (Hashable)
import Data.Store.Guid (Guid)
import qualified Data.Store.Guid as Guid
import Data.Store.IRef (IRef)
import qualified Lamdu.Expr.GenIds as GenIds
import qualified Lamdu.Expr.IRef as ExprIRef
import qualified Lamdu.Expr.Type as T
import qualified Lamdu.Expr.UniqueId as UniqueId
import Lamdu.Expr.Val (Val)
import qualified Lamdu.Expr.Val as V
import System.Random (RandomGen)
newtype EntityId = EntityId Guid
deriving (Eq, Hashable, Show)
bs :: EntityId -> ByteString
bs (EntityId guid) = Guid.bs guid
randomizeExprAndParams ::
RandomGen gen => gen -> Val (Guid -> EntityId -> a) -> Val a
randomizeExprAndParams gen =
GenIds.randomizeExprAndParams gen . fmap addEntityId
where
addEntityId f guid = f guid (EntityId guid)
augment :: String -> EntityId -> EntityId
augment str (EntityId x) = EntityId $ Guid.augment str x
ofIRef :: IRef m a -> EntityId
ofIRef = EntityId . UniqueId.toGuid
ofValI :: ExprIRef.ValI m -> EntityId
ofValI = ofIRef . ExprIRef.unValI
ofTId :: T.Id -> EntityId
ofTId = EntityId . UniqueId.toGuid
ofLambdaParam :: V.Var -> EntityId
ofLambdaParam = EntityId . UniqueId.toGuid
ofLambdaTagParam :: V.Var -> T.Tag -> EntityId
ofLambdaTagParam v p =
EntityId $ Guid.combine (UniqueId.toGuid v) (UniqueId.toGuid p)
ofInjectTag :: EntityId -> EntityId
ofInjectTag = augment "tag"
ofGetFieldTag :: EntityId -> EntityId
ofGetFieldTag = augment "tag"
ofRecExtendTag :: EntityId -> EntityId
ofRecExtendTag = augment "tag"
ofCaseTag :: EntityId -> EntityId
ofCaseTag = augment "tag"
|
rvion/lamdu
|
Lamdu/Sugar/Internal/EntityId.hs
|
gpl-3.0
| 1,961
| 0
| 11
| 396
| 551
| 311
| 240
| 55
| 1
|
{-# LANGUAGE CPP #-}
{- | UIState operations. -}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Hledger.UI.UIState
where
#if !MIN_VERSION_brick(0,19,0)
import Brick
#endif
import Brick.Widgets.Edit
import Data.List
import Data.Text.Zipper (gotoEOL)
import Data.Time.Calendar (Day)
import Hledger
import Hledger.Cli.CliOptions
import Hledger.UI.UITypes
import Hledger.UI.UIOptions
-- | Toggle between showing only unmarked items or all items.
toggleUnmarked :: UIState -> UIState
toggleUnmarked ui@UIState{aopts=uopts@UIOpts{cliopts_=copts@CliOpts{reportopts_=ropts}}} =
ui{aopts=uopts{cliopts_=copts{reportopts_=reportOptsToggleStatusSomehow Unmarked copts ropts}}}
-- | Toggle between showing only pending items or all items.
togglePending :: UIState -> UIState
togglePending ui@UIState{aopts=uopts@UIOpts{cliopts_=copts@CliOpts{reportopts_=ropts}}} =
ui{aopts=uopts{cliopts_=copts{reportopts_=reportOptsToggleStatusSomehow Pending copts ropts}}}
-- | Toggle between showing only cleared items or all items.
toggleCleared :: UIState -> UIState
toggleCleared ui@UIState{aopts=uopts@UIOpts{cliopts_=copts@CliOpts{reportopts_=ropts}}} =
ui{aopts=uopts{cliopts_=copts{reportopts_=reportOptsToggleStatusSomehow Cleared copts ropts}}}
-- TODO testing different status toggle styles
-- | Generate zero or more indicators of the status filters currently active,
-- which will be shown comma-separated as part of the indicators list.
uiShowStatus :: CliOpts -> [Status] -> [String]
uiShowStatus copts ss =
case style of
-- in style 2, instead of "Y, Z" show "not X"
Just 2 | length ss == numstatuses-1
-> map (("not "++). showstatus) $ sort $ complement ss -- should be just one
_ -> map showstatus $ sort ss
where
numstatuses = length [minBound..maxBound::Status]
style = maybeintopt "status-toggles" $ rawopts_ copts
showstatus Cleared = "cleared"
showstatus Pending = "pending"
showstatus Unmarked = "unmarked"
reportOptsToggleStatusSomehow :: Status -> CliOpts -> ReportOpts -> ReportOpts
reportOptsToggleStatusSomehow s copts ropts =
case maybeintopt "status-toggles" $ rawopts_ copts of
Just 2 -> reportOptsToggleStatus2 s ropts
Just 3 -> reportOptsToggleStatus3 s ropts
-- Just 4 -> reportOptsToggleStatus4 s ropts
-- Just 5 -> reportOptsToggleStatus5 s ropts
_ -> reportOptsToggleStatus1 s ropts
-- 1 UPC toggles only X/all
reportOptsToggleStatus1 s ropts@ReportOpts{statuses_=ss}
| ss == [s] = ropts{statuses_=[]}
| otherwise = ropts{statuses_=[s]}
-- 2 UPC cycles X/not-X/all
-- repeatedly pressing X cycles:
-- [] U [u]
-- [u] U [pc]
-- [pc] U []
-- pressing Y after first or second step starts new cycle:
-- [u] P [p]
-- [pc] P [p]
reportOptsToggleStatus2 s ropts@ReportOpts{statuses_=ss}
| ss == [s] = ropts{statuses_=complement [s]}
| ss == complement [s] = ropts{statuses_=[]}
| otherwise = ropts{statuses_=[s]} -- XXX assume only three values
-- 3 UPC toggles each X
reportOptsToggleStatus3 s ropts@ReportOpts{statuses_=ss}
| s `elem` ss = ropts{statuses_=filter (/= s) ss}
| otherwise = ropts{statuses_=simplifyStatuses (s:ss)}
-- 4 upc sets X, UPC sets not-X
--reportOptsToggleStatus4 s ropts@ReportOpts{statuses_=ss}
-- | s `elem` ss = ropts{statuses_=filter (/= s) ss}
-- | otherwise = ropts{statuses_=simplifyStatuses (s:ss)}
--
-- 5 upc toggles X, UPC toggles not-X
--reportOptsToggleStatus5 s ropts@ReportOpts{statuses_=ss}
-- | s `elem` ss = ropts{statuses_=filter (/= s) ss}
-- | otherwise = ropts{statuses_=simplifyStatuses (s:ss)}
-- | Given a list of unique enum values, list the other possible values of that enum.
complement :: (Bounded a, Enum a, Eq a) => [a] -> [a]
complement = ([minBound..maxBound] \\)
--
-- | Toggle between showing all and showing only nonempty (more precisely, nonzero) items.
toggleEmpty :: UIState -> UIState
toggleEmpty ui@UIState{aopts=uopts@UIOpts{cliopts_=copts@CliOpts{reportopts_=ropts}}} =
ui{aopts=uopts{cliopts_=copts{reportopts_=toggleEmpty ropts}}}
where
toggleEmpty ropts = ropts{empty_=not $ empty_ ropts}
-- | Toggle between flat and tree mode. If in the third "default" mode, go to flat mode.
toggleFlat :: UIState -> UIState
toggleFlat ui@UIState{aopts=uopts@UIOpts{cliopts_=copts@CliOpts{reportopts_=ropts}}} =
ui{aopts=uopts{cliopts_=copts{reportopts_=toggleFlatMode ropts}}}
where
toggleFlatMode ropts@ReportOpts{accountlistmode_=ALFlat} = ropts{accountlistmode_=ALTree}
toggleFlatMode ropts = ropts{accountlistmode_=ALFlat}
-- | Toggle between historical balances and period balances.
toggleHistorical :: UIState -> UIState
toggleHistorical ui@UIState{aopts=uopts@UIOpts{cliopts_=copts@CliOpts{reportopts_=ropts}}} =
ui{aopts=uopts{cliopts_=copts{reportopts_=ropts{balancetype_=b}}}}
where
b | balancetype_ ropts == HistoricalBalance = PeriodChange
| otherwise = HistoricalBalance
-- | Toggle between showing all and showing only real (non-virtual) items.
toggleReal :: UIState -> UIState
toggleReal ui@UIState{aopts=uopts@UIOpts{cliopts_=copts@CliOpts{reportopts_=ropts}}} =
ui{aopts=uopts{cliopts_=copts{reportopts_=toggleReal ropts}}}
where
toggleReal ropts = ropts{real_=not $ real_ ropts}
-- | Toggle the ignoring of balance assertions.
toggleIgnoreBalanceAssertions :: UIState -> UIState
toggleIgnoreBalanceAssertions ui@UIState{aopts=uopts@UIOpts{cliopts_=copts@CliOpts{inputopts_=iopts}}} =
ui{aopts=uopts{cliopts_=copts{inputopts_=iopts{ignore_assertions_=not $ ignore_assertions_ iopts}}}}
-- | Step through larger report periods, up to all.
growReportPeriod :: Day -> UIState -> UIState
growReportPeriod _d ui@UIState{aopts=uopts@UIOpts{cliopts_=copts@CliOpts{reportopts_=ropts}}} =
ui{aopts=uopts{cliopts_=copts{reportopts_=ropts{period_=periodGrow $ period_ ropts}}}}
-- | Step through smaller report periods, down to a day.
shrinkReportPeriod :: Day -> UIState -> UIState
shrinkReportPeriod d ui@UIState{aopts=uopts@UIOpts{cliopts_=copts@CliOpts{reportopts_=ropts}}} =
ui{aopts=uopts{cliopts_=copts{reportopts_=ropts{period_=periodShrink d $ period_ ropts}}}}
-- | Step the report start/end dates to the next period of same duration,
-- remaining inside the given enclosing span.
nextReportPeriod :: DateSpan -> UIState -> UIState
nextReportPeriod enclosingspan ui@UIState{aopts=uopts@UIOpts{cliopts_=copts@CliOpts{reportopts_=ropts@ReportOpts{period_=p}}}} =
ui{aopts=uopts{cliopts_=copts{reportopts_=ropts{period_=periodNextIn enclosingspan p}}}}
-- | Step the report start/end dates to the next period of same duration,
-- remaining inside the given enclosing span.
previousReportPeriod :: DateSpan -> UIState -> UIState
previousReportPeriod enclosingspan ui@UIState{aopts=uopts@UIOpts{cliopts_=copts@CliOpts{reportopts_=ropts@ReportOpts{period_=p}}}} =
ui{aopts=uopts{cliopts_=copts{reportopts_=ropts{period_=periodPreviousIn enclosingspan p}}}}
-- | If a standard report period is set, step it forward/backward if needed so that
-- it encloses the given date.
moveReportPeriodToDate :: Day -> UIState -> UIState
moveReportPeriodToDate d ui@UIState{aopts=uopts@UIOpts{cliopts_=copts@CliOpts{reportopts_=ropts@ReportOpts{period_=p}}}} =
ui{aopts=uopts{cliopts_=copts{reportopts_=ropts{period_=periodMoveTo d p}}}}
-- | Get the report period.
reportPeriod :: UIState -> Period
reportPeriod UIState{aopts=UIOpts{cliopts_=CliOpts{reportopts_=ReportOpts{period_=p}}}} =
p
-- | Set the report period.
setReportPeriod :: Period -> UIState -> UIState
setReportPeriod p ui@UIState{aopts=uopts@UIOpts{cliopts_=copts@CliOpts{reportopts_=ropts}}} =
ui{aopts=uopts{cliopts_=copts{reportopts_=ropts{period_=p}}}}
-- | Apply a new filter query.
setFilter :: String -> UIState -> UIState
setFilter s ui@UIState{aopts=uopts@UIOpts{cliopts_=copts@CliOpts{reportopts_=ropts}}} =
ui{aopts=uopts{cliopts_=copts{reportopts_=ropts{query_=s}}}}
-- | Clear all filters/flags.
resetFilter :: UIState -> UIState
resetFilter ui@UIState{aopts=uopts@UIOpts{cliopts_=copts@CliOpts{reportopts_=ropts}}} =
ui{aopts=uopts{cliopts_=copts{reportopts_=ropts{
accountlistmode_=ALTree
,empty_=True
,statuses_=[]
,real_=False
,query_=""
--,period_=PeriodAll
}}}}
resetDepth :: UIState -> UIState
resetDepth ui@UIState{aopts=uopts@UIOpts{cliopts_=copts@CliOpts{reportopts_=ropts}}} =
ui{aopts=uopts{cliopts_=copts{reportopts_=ropts{depth_=Nothing}}}}
-- | Get the maximum account depth in the current journal.
maxDepth :: UIState -> Int
maxDepth UIState{ajournal=j} = maximum $ map accountNameLevel $ journalAccountNames j
-- | Decrement the current depth limit towards 0. If there was no depth limit,
-- set it to one less than the maximum account depth.
decDepth :: UIState -> UIState
decDepth ui@UIState{aopts=uopts@UIOpts{cliopts_=copts@CliOpts{reportopts_=ropts@ReportOpts{..}}}}
= ui{aopts=uopts{cliopts_=copts{reportopts_=ropts{depth_=dec depth_}}}}
where
dec (Just d) = Just $ max 0 (d-1)
dec Nothing = Just $ maxDepth ui - 1
-- | Increment the current depth limit. If this makes it equal to the
-- the maximum account depth, remove the depth limit.
incDepth :: UIState -> UIState
incDepth ui@UIState{aopts=uopts@UIOpts{cliopts_=copts@CliOpts{reportopts_=ropts@ReportOpts{..}}}}
= ui{aopts=uopts{cliopts_=copts{reportopts_=ropts{depth_=inc depth_}}}}
where
inc (Just d) | d < (maxDepth ui - 1) = Just $ d+1
inc _ = Nothing
-- | Set the current depth limit to the specified depth, or remove the depth limit.
-- Also remove the depth limit if the specified depth is greater than the current
-- maximum account depth. If the specified depth is negative, reset the depth limit
-- to whatever was specified at uiartup.
setDepth :: Maybe Int -> UIState -> UIState
setDepth mdepth ui@UIState{aopts=uopts@UIOpts{cliopts_=copts@CliOpts{reportopts_=ropts}}}
= ui{aopts=uopts{cliopts_=copts{reportopts_=ropts{depth_=mdepth'}}}}
where
mdepth' = case mdepth of
Nothing -> Nothing
Just d | d < 0 -> depth_ ropts
| d >= maxDepth ui -> Nothing
| otherwise -> mdepth
getDepth :: UIState -> Maybe Int
getDepth UIState{aopts=UIOpts{cliopts_=CliOpts{reportopts_=ropts}}} = depth_ ropts
-- | Open the minibuffer, setting its content to the current query with the cursor at the end.
showMinibuffer :: UIState -> UIState
showMinibuffer ui = setMode (Minibuffer e) ui
where
#if MIN_VERSION_brick(0,19,0)
e = applyEdit gotoEOL $ editor MinibufferEditor (Just 1) oldq
#else
e = applyEdit gotoEOL $ editor MinibufferEditor (str . unlines) (Just 1) oldq
#endif
oldq = query_ $ reportopts_ $ cliopts_ $ aopts ui
-- | Close the minibuffer, discarding any edit in progress.
closeMinibuffer :: UIState -> UIState
closeMinibuffer = setMode Normal
setMode :: Mode -> UIState -> UIState
setMode m ui = ui{aMode=m}
-- | Regenerate the content for the current and previous screens, from a new journal and current date.
regenerateScreens :: Journal -> Day -> UIState -> UIState
regenerateScreens j d ui@UIState{aScreen=s,aPrevScreens=ss} =
-- XXX clumsy due to entanglement of UIState and Screen.
-- sInit operates only on an appstate's current screen, so
-- remove all the screens from the appstate and then add them back
-- one at a time, regenerating as we go.
let
first:rest = reverse $ s:ss :: [Screen]
ui0 = ui{ajournal=j, aScreen=first, aPrevScreens=[]} :: UIState
ui1 = (sInit first) d False ui0 :: UIState
ui2 = foldl' (\ui s -> (sInit s) d False $ pushScreen s ui) ui1 rest :: UIState
in
ui2
pushScreen :: Screen -> UIState -> UIState
pushScreen scr ui = ui{aPrevScreens=(aScreen ui:aPrevScreens ui)
,aScreen=scr
}
popScreen :: UIState -> UIState
popScreen ui@UIState{aPrevScreens=s:ss} = ui{aScreen=s, aPrevScreens=ss}
popScreen ui = ui
resetScreens :: Day -> UIState -> UIState
resetScreens d ui@UIState{aScreen=s,aPrevScreens=ss} =
(sInit topscreen) d True $ resetDepth $ resetFilter $ closeMinibuffer ui{aScreen=topscreen, aPrevScreens=[]}
where
topscreen = case ss of _:_ -> last ss
[] -> s
-- | Enter a new screen, saving the old screen & state in the
-- navigation history and initialising the new screen's state.
screenEnter :: Day -> Screen -> UIState -> UIState
screenEnter d scr ui = (sInit scr) d True $
pushScreen scr
ui
|
ony/hledger
|
hledger-ui/Hledger/UI/UIState.hs
|
gpl-3.0
| 12,612
| 0
| 21
| 2,054
| 3,529
| 1,985
| 1,544
| 160
| 4
|
module Data.DotsAndBoxes.Test.StringsAndCoins where
andBack :: [(Int, Int)] -> [(Int, Int)]
andBack xs = xs ++ [(j, i) | (i, j) <- xs, j /= 0]
c1 = mkUGraph [0..1] $ andBack [(1, 0)] :: Gr () ()
c2 = mkUGraph [0..3] $ andBack [(1, 2), (2, 0), (2, 3)] :: Gr () ()
c3 = mkUGraph [0..2] $ andBack [(1, 2)] :: Gr () ()
c4 = mkUGraph [0..3] $ andBack [(1, 2), (2, 3)] :: Gr () ()
l1 = mkUGraph [0..2] $ andBack [(1, 2), (2, 0)] :: Gr () ()
l2 = mkUGraph [0..3] $ andBack [(1, 2), (2, 3), (3, 0)] :: Gr () ()
lp = mkUGraph [0..4] $ andBack [(1, 2), (1, 3), (2, 4), (3, 4)] :: Gr () ()
ns3 = mkUGraph [0..4] $ andBack [(1, 2), (1, 3), (2, 4), (3, 4), (1, 0), (1, 0), (2, 0)] :: Gr () ()
b = [c1, c2, c3, c4, l1, l2, lp]
|
mcapodici/dotsandboxes
|
test/Data/DotsAndBoxes/Test/StringsAndCoins.hs
|
gpl-3.0
| 719
| 0
| 9
| 166
| 567
| 330
| 237
| 12
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
module Network.Refraction.FairExchange.Types
( KeyPair
, Secret
, numSecrets
, numChallenges
, BobKeyMessage(..)
, AliceKeysMessage(..)
) where
import Data.Aeson (FromJSON, ToJSON)
import Data.Text (Text)
import GHC.Generics
import Network.Haskoin.Crypto
type KeyPair = (PrvKey, PubKey)
type Secret = Integer
numSecrets = 4 :: Int
numChallenges = 2 :: Int
data BobKeyMessage = BobKeyMessage {
bKey1 :: PubKey
, bKey2 :: PubKey
, bHashes :: [Text] -- ^ text for the hex-encoded bytestring
, bSumHashes :: [Text] -- ^ text for the hex-encoded bytestring
} deriving (Generic)
instance ToJSON BobKeyMessage
instance FromJSON BobKeyMessage
data AliceKeysMessage = AliceKeysMessage {
aKey1 :: PubKey
, aKey2 :: PubKey
, aSecrets :: [Secret]
} deriving (Generic)
instance ToJSON AliceKeysMessage
instance FromJSON AliceKeysMessage
|
hudon/refraction-hs
|
src/Network/Refraction/FairExchange/Types.hs
|
gpl-3.0
| 959
| 0
| 9
| 191
| 219
| 136
| 83
| 32
| 1
|
{-# LANGUAGE BangPatterns #-}
-----------------------------------------------------------------------------
-- |
-- Module :
-- Copyright : (c) 2013 Boyun Tang
-- License : BSD-style
-- Maintainer : tangboyun@hotmail.com
-- Stability : experimental
-- Portability : ghc
--
--
--
-----------------------------------------------------------------------------
module MiRanda.CeRNA where
import MiRanda.Storage.Type
import MiRanda.Types (UTR(..),GapSeq(..),Pair(..),ContextScorePlus(..))
import qualified Data.ByteString.Char8 as B8
import Data.Char (isAlpha)
import qualified Data.HashMap.Strict as H
import Control.Arrow
import Data.Function
import Data.List
import Data.ByteString (ByteString)
import qualified Data.Vector.Unboxed as UV
import Data.Maybe
import Control.DeepSeq
mutameScore :: Int -- ^ UTR length
-> Int -- ^ number of miRNAs considered
-> [[(Int,Int)]] -- ^ sites for microRNAs predicted to target
-> Double
{-# INLINE mutameScore #-}
mutameScore !len !nmiR bss =
if null bss'
then 0
else c1 * -- shared miRNAs / total miRNAs
c2 * -- sum for all miRNA,
-- number of sites / distance span
-- for 1 site
-- 1 / min (beg from 5',end from 3')
c3 * -- sum for all miRNA,
-- (distance spaned)^2 / sum of the squared distances between
-- successive MREs
c4 -- (#MREs in X for all considered microRNAs - #microRNAs predicted to target X + 1) /
-- #MREs in X for all considered microRNAs
where
bss' = map sort $ filter (not . null) bss
nRNA = length bss'
totalN = sum nForEach
nForEach = map (fromIntegral . length) bss'
!c1 = (fromIntegral nRNA) / fromIntegral nmiR
!c2 = sum $ zipWith (/) nForEach $ map f1 bss'
!c3 = sum $ zipWith (/) (map ((\x -> x*x) . f1) bss') (map f2 bss') -- unclear
!c4 = (totalN - (fromIntegral nRNA) + 1) / totalN
f1 ((a,b):[]) = let l = (fromIntegral (a + b)) / 2
leng = fromIntegral len
in max (leng-l) l
f1 xs = let (a1,b1) = head xs
(a2,b2) = last xs
in (fromIntegral (a2+b2)) / 2 -
(fromIntegral (a1+b1)) / 2
f2 ((a,b):[]) = let l = (fromIntegral (a + b)) / 2
leng = fromIntegral len
in l*l + (leng-l) * (leng-l)
f2 (x:xs) = fst $
foldl' (\(acc,(a1,b1)) (a2,b2) ->
let l1 = fromIntegral (a1+b1) / 2
l2 = fromIntegral (a2+b2) / 2
acc' = acc + (l2-l1)*(l2-l1)
in (acc',(a2,b2))
) (0,x) xs
ceRNAScore :: GeneRecord -> GeneRecord -> Double
{-# INLINE ceRNAScore #-}
ceRNAScore (GR _ mirSs1) (GR (GI _ _ uStr _) mirSs2) =
let l = B8.length $ B8.filter isAlpha $
unGS $ alignment uStr
total = length mirSs1
ss = map snd $ intersection mirSs2 mirSs1
in mutameScore l total ss
intersection :: [MiRSites] -> [MiRSites] -> [(ByteString,[(Int,Int)])]
{-# INLINE intersection #-}
intersection !ss1 ss2 =
((H.toList .) .
(H.intersection `on`
(H.fromList . map
((identity . mir) &&&
(map ((beg &&& end) . siteRange) . sites)
)))) ss1 ss2
sharePercentageGE :: Double -> GeneRecord -> GeneRecord -> Bool
{-# INLINE sharePercentageGE #-}
sharePercentageGE !p !gr1 gr2 =
let i = length $ (intersection `on` mirSites) gr2 gr1
n = length $ mirSites gr1
in ((/) `on` fromIntegral) i n >= p
toLine :: GeneRecord -> GeneRecord
-> (Double,(UV.Vector Int,UV.Vector Double))
{-# INLINE toLine #-}
toLine !gr1 gr2 =
let h1 = (H.intersection `on`
(H.fromList . map
((identity . mir) &&&
(map ((beg &&& end) . siteRange) . sites)
) . mirSites)) gr2 gr1
h2 = (H.intersection `on`
(H.fromList .
map
((identity . mir) &&&
(foldl1' (+) . map (fromMaybe 0 . fmap contextPlus) . map contextScorePlus . sites)
) . mirSites)) gr2 gr1
vi = UV.fromList $
map
(fromMaybe 0 . fmap length .
(`H.lookup` h1) . identity . mir) $
mirSites gr1
vd = UV.fromList $
map
(fromMaybe 0 .
(`H.lookup` h2) . identity . mir) $
mirSites gr1
m = ceRNAScore gr1 gr2
in m `deepseq` vi `deepseq` vd `deepseq` (m,(vi,vd))
|
tangboyun/miranda
|
src/MiRanda/CeRNA.hs
|
gpl-3.0
| 4,580
| 0
| 25
| 1,499
| 1,480
| 814
| 666
| 101
| 4
|
--- |
--- | type definitions for FixedQueue.hs to prevent circular imports
--- |
--- Copyright : (c) Florian Richter 2011
--- License : GPL
---
module FixedQueueTypes where
import qualified Data.Sequence as S
import Control.Concurrent.STM.TVar (TVar, newTVarIO)
type FixedQueue t = TVar (Int, S.Seq t)
-- | create event list
newFixedQueue :: IO (FixedQueue t)
newFixedQueue = newTVarIO (0, S.empty)
-- vim: sw=4 expandtab
|
f1ori/hadcc
|
FixedQueueTypes.hs
|
gpl-3.0
| 433
| 0
| 8
| 76
| 86
| 55
| 31
| 6
| 1
|
module Pearl.SWB.Section03 where
newtype Lf v = In { insideI :: v (Lf v )}
newtype Gf x = OutO { insideO :: x (Gf x )}
data K = K
deriving (Ord, Eq)
data KList l = KNil
| KCons K l
data SList l = SNil
| SCons K l
fold :: (Functor f) => (f a -> a) -> Lf f -> a
unfold :: (Functor x) => ( a -> x a) -> a -> Gf x
unfold f = OutO . fmap (unfold f) . f
fold f = f . fmap (fold f) . insideI
instance Functor KList where
fmap f KNil = KNil
fmap f (KCons k x) = KCons k (f x)
instance Functor SList where
fmap f SNil = SNil
fmap f (SCons k l) = SCons k (f l)
-- |
--
-- bub's forward type shows you that both:
--
-- nIns :: KList (Gf SList ..........) > SList (KList (Gf SList))
-- bub :: KList (.. SList (Lf ... )) -> SList (..... (Lf .....))
nIns :: KList (Gf SList) -> SList (KList (Gf SList))
bub :: KList (SList (Lf KList)) -> SList (Lf KList)
swap :: KList (SList x ) -> SList (KList x )
--However, the (difunctor-reversed) signature has the identical shape to bub's:
-- bubInv :: SList (Lf KList) -> KList (SList (Lf KList))
-- nIns :: KList (Gf SList) -> SList (KList (Gf SList))
--
-- -- And furthermore, when you do the following (dual) interchange:
-- --
-- -- Gf <-> Lf
-- -- KList <-> SList
-- --
-- -- Scripted out here as a series of regexes:
-- --
-- -- { . <-- bubInv, s/SList/Klist/
-- -- , s/SList/Klist/
-- -- , s/Gf/LF/
-- -- , s/Lf/GF/
-- -- , s/list/List/
-- -- , s/GF/Gf/
-- -- , s/LF/Lf/
-- -- }
-- nIns :: KList (Gf SList) -> SList (KList (Gf SList))
-- bubInv :: KList (Gf SList) -> SList (KList (Gf SList))
bub (KNil ) = SNil
bub (KCons a (SNil ) ) = SCons a (In (KNil ))
bub (KCons a (SCons b x) )
| (<=) a b = SCons a (In (KCons b x))
| otherwise = SCons b (In (KCons a x))
nIns (KNil ) = SNil
nIns (KCons a (OutO (SNil ) ) ) = SCons a (KNil )
nIns (KCons a (OutO (SCons b x) ) )
| (<=) a b = SCons a (KCons b x)
| otherwise = SCons b (KCons a x)
swap KNil = SNil
swap (KCons a (SNil ) ) = SCons a (KNil)
swap (KCons a (SCons b x) )
| (<=) a b = SCons a (KCons b x)
| otherwise = SCons b (KCons a x)
bubbleSort :: Lf KList -> Gf SList
bubbleSort = unfold bubble
where bubble = fold bub
naiveInsertSort = fold nInsert
where nInsert = unfold nIns
bubbleSort' = unfold ( fold (fmap In . swap) )
naiveInsertSort' = fold (unfold (swap . fmap insideO))
{- ||| 3.1 Algebra and Co-Algebra Homomorphisms -}
-- ...
{- |||| (1) bubble . In =.= bub . fmap bubble -}
-- ...
{- ||||| (catd) {- TODO category diagram -} -}
-- ...
{- |||| (1) bubble . In =.= bub . fmap bubble -}
{- ||||| {- bub is replacable in fmap by In . swap -} -}
{- |||| (2) bubble . In =.= fmap In . swap . fmap bubble -}
{- ||||| (2.cat) {- TODO category diagram -} -}
{- ||||| (2.cat2) {- TODO category diagram -} -}
{- |||| (3) insideOut . naiveInsert = fmap naiveInsert . nIns -}
{- ||||| {- nIns is replacable (outside fmap) by swap . fmap insideO -} -}
{- |||| (2) bubble . In =.= fmap nIns . swap . fmap insideO -}
|
technogeeky/pearl-sorting-with-bialgebras
|
src/Pearl/SWB/Section03.hs
|
gpl-3.0
| 4,225
| 0
| 11
| 1,963
| 905
| 477
| 428
| 44
| 1
|
module Hive.Game.Board
-- (Board
-- , allOccupiedPositions
-- , isOccupiedAt
-- , isUnoccupiedAt
-- , occupiedNeighbors
-- , unoccupiedNeighbors
-- , topPieceAt
-- , piecesAt
-- , pieceIsFree
-- , removePiecesAt
-- , removeTopPieceAt
-- , example1
-- )
where
import Hive.Game.Piece
import Hive.Game.HexGrid (AxialPoint(..))
import qualified Hive.Game.HexGrid as Grid
import qualified Data.Graph as Graph
import Data.List (nub)
import Data.Maybe (isJust, listToMaybe, fromJust, fromMaybe)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.Monoid ((<>))
import qualified Data.Tree as Tree
-- defensive programming: hide the details of the Board type
-- such that only this module can see them, so i don't forget
-- to deal with stacks (happened a lot in the clojure version)
newtype Board = Board { unBoard :: Map AxialPoint [Piece] }
deriving (Eq, Show)
piecesAt :: Board -> AxialPoint -> [Piece]
piecesAt (Board bmap) pos = fromMaybe [] $ Map.lookup pos bmap
isStacked :: Board -> AxialPoint -> Bool
isStacked board pos =
case piecesAt board pos of
(_:_:_) -> True
_ -> False
addPiece :: Piece -> AxialPoint -> Board -> Board
addPiece piece pos = Board . Map.insertWith (++) pos [piece] . unBoard
movePieceTo :: Board -> AxialPoint -> AxialPoint -> Board
movePieceTo board from to = addPiece piece to $ board `removeTopPieceAt` from
where piece = unsafeTopPieceAt board from
removeTopPieceAt :: Board -> AxialPoint -> Board
removeTopPieceAt (Board bmap) pos = Board $ Map.update tailOrDeath pos bmap
where
tailOrDeath [] = Nothing
tailOrDeath [_] = Nothing -- don't retain empty lists
tailOrDeath (_:xs) = Just xs
topPieceAt :: Board -> AxialPoint -> Maybe Piece
topPieceAt board pos = listToMaybe $ board `piecesAt` pos
unsafeTopPieceAt = (fromJust .) . topPieceAt
isOccupiedAt :: Board -> AxialPoint -> Bool
isOccupiedAt board pos = not . null $ board `piecesAt` pos
isUnoccupiedAt :: Board -> AxialPoint -> Bool
isUnoccupiedAt = (not .) . isOccupiedAt
occupiedNeighbors :: Board -> AxialPoint -> [AxialPoint]
occupiedNeighbors board = filter (board `isOccupiedAt`) . Grid.neighbors
unoccupiedNeighbors :: Board -> AxialPoint -> [AxialPoint]
unoccupiedNeighbors board = filter (board `isUnoccupiedAt`) . Grid.neighbors
allOccupiedPositions :: Board -> [AxialPoint]
allOccupiedPositions = Map.keys . unBoard
allPiecesOnBoard = concat . Map.elems . unBoard
findPieces :: (Piece -> Bool) -> Board -> [(AxialPoint, [Piece])]
findPieces f = Map.toList . Map.filter (any f) . unBoard
findTopPieces :: (Piece -> Bool) -> Board -> [AxialPoint]
findTopPieces f board = Map.foldlWithKey
(\acc pos pieces ->
case pieces of
[] -> error $ "found a board site with empty piecelist! "
<> show pos <> "\nboard with empty site: "
<> show board <> "\n"
(pc:_) -> if f pc
then pos:acc
else acc)
[]
(unBoard board)
findTopPiecesBySpecies spec = findTopPieces (\pc -> spec == pieceSpecies pc)
-- | N.B. this is really a check that the given piece is not an articulation point
-- in the board graph; a "free" position found by this fn could still be
-- prevented from moving by being surrounded etc.
pieceIsFree :: Board -> AxialPoint -> Bool
pieceIsFree board pos = isOneHive (board `removeTopPieceAt` pos)
allFreePiecePositions :: Board -> [AxialPoint]
allFreePiecePositions board = filter (pieceIsFree board) $ allOccupiedPositions board
allFreePieces :: Board -> [Piece]
allFreePieces board = map (fromJust . topPieceAt board) $ allFreePiecePositions board
freePiecePositionsForTeam :: Team -> Board -> [AxialPoint]
freePiecePositionsForTeam team board =
filter (\p -> pieceTeam (unsafeTopPieceAt board p) == team && pieceIsFree board p)
$ allOccupiedPositions board
-- convert a board (map of axial coords to Pieces) into a Data.Graph style adjacency list
-- each vertex is a piece's axial coordinates
-- each piece has edges to its occupied neighbors
-- adjlist format is [(node, key, [keys of other nodes this node has directed edges to])]
boardToAdjacencyList :: Board -> [(AxialPoint, AxialPoint, [AxialPoint])]
boardToAdjacencyList board@(Board bm) = map convert $ Map.keys bm
where convert pos = (pos, pos, occupiedNeighbors board pos)
-- | Is the board a contiguous set of pieces? (Does it satisfy the One Hive Rule?)
-- firstly, we must have no more than one connected component in the board graph
isOneHive :: Board -> Bool
isOneHive b =
case connectedComponents $ boardToAdjacencyList b of
[_] -> True -- nonempty must have exactly one connected component o' pieces
_ -> False -- empty or multiple disconnected islands of pieces -> bad board
isValidBoard :: Board -> Bool
isValidBoard b =
isOneHive b
&& length allPieces == length (nub allPieces)
&& stackedPiecesAreStackable
where
allPieces = allPiecesOnBoard b
isStackable pc = let species = pieceSpecies pc
in species == Beetle || species == Mosquito
stackedPiecesAreStackable = all (all isStackable . init) $
map (piecesAt b) $
filter (isStacked b) $
allOccupiedPositions b
connectedComponents :: Ord key => [(node,key,[key])] -> [[node]]
connectedComponents adjlist = deforest $ Graph.components g
where
(g,vertexInfo,_) = Graph.graphFromEdges adjlist
deforest = map (map (fst3 . vertexInfo) . Tree.flatten)
fst3 (x,_,_) = x
--------------------------------------------------------------------------------
-- Debug Helpers and REPL scratch
instance Show a => Show (Graph.SCC a) where
show (Graph.AcyclicSCC x) = "AcyclicSCC " ++ show x
show (Graph.CyclicSCC xs) = "CyclicSCC " ++ show xs
-- XXX total hack, does not support stacks!
boardFromAscList = Board . Map.fromAscList . map (\((p,q), name) -> (Axial p q, [piece name]))
emptyBoard = Board mempty
-- and here is where i miss clojure
-- ghci destroys all bindings when you :r
example1 = boardFromAscList
[ ((3,4), "bQ")
, ((2,5), "bS1")
, ((2,6), "wS1")
, ((1,7), "wQ")
, ((1,6), "wG1")
, ((2,4), "bL")
, ((4,4), "bP")
, ((0,7), "wA1")
, ((4,5), "bA2")
, ((4,6), "bA3")
, ((3,7), "wS2")
]
example2 = boardFromAscList
[ ((0,0), "wQ")
, ((0,1), "bQ")
, ((1,0), "wS1")
]
example3 = boardFromAscList
[ ((0,0), "wQ")
, ((0,1), "bQ")
, ((0,2), "wS1")
, ((0,3), "bS1")
]
example4 = boardFromAscList
[ ((0,0), "wQ")
, ((0,1), "bQ")
, ((0,2), "wS1")
, ((0,3), "bS1")
, ((1,2), "wS2")
]
example5 = boardFromAscList
[ ((1,4), "wQ")
, ((1,5), "wA1")
, ((2,3), "wL")
, ((2,4), "wS1")
, ((3,4), "bS1")
, ((3,5), "bA1")
, ((4,3), "bQ")
, ((4,4), "bG1")
]
-- YO NATHAN
-- READ THIS
-- the next thing you need to do is write some tests
-- because probably a lot of this shit is wrong
-- though testing would probably be easier if i had a move parser
-- and it'd be fun to get parsec out...
-- https://en.wikipedia.org/wiki/Strongly_connected_component
al = [('a', 'a', ['b'])
,('b', 'b', ['c','e','f'])
,('c', 'c', ['d','g'])
,('d', 'd', ['c','h'])
,('e', 'e', ['a','f'])
,('f', 'f', ['g'])
,('g', 'g', ['f'])
,('h', 'h', ['d','g'])
,('x', 'x', ['y'])
,('y', 'y', [])
]
|
nathanic/hive-hs
|
src/Hive/Game/Board.hs
|
gpl-3.0
| 7,785
| 0
| 15
| 1,964
| 2,287
| 1,324
| 963
| 145
| 3
|
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies #-}
-----------------------------------------------------------------------------
--
-- Module : Math.IMM
-- Copyright : Christian Gosch
-- License : BSD3
--
-- Maintainer :
-- Stability :
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module Numeric.Jalla.IMM (
IMM (..)
) where
import Data.Ix
{-| Indexable objects modification monad class.
Monads in this type class are used to modify and create indexable objects
such as matrices or vectors. This is to provide a common interface
for such 'modification monads'. -}
class (Ix i) => IMM m i o e | m -> o, m -> i, m -> e where
-- These three lead to functional dependency collisions since
-- the type of the result can not be decided by the compiler.
--create :: i -> m a -> o
--modify :: o -> m a -> o
--getO :: m o
setElem :: i -> e -> m ()
setElems :: [(i,e)] -> m ()
fill :: e -> m ()
getElem :: i -> m e
|
cgo/jalla
|
Numeric/Jalla/IMM.hs
|
gpl-3.0
| 1,064
| 0
| 10
| 254
| 148
| 93
| 55
| 9
| 0
|
module PowerDivisibility.A004709 (a004709) where
import Helpers.Primes (primePowers)
a004709 :: Int -> Integer
a004709 n = a004709_list !! (n - 1)
a004709_list :: [Integer]
a004709_list = filter isCubefree [1..] where
isCubefree n = all ((<3) . snd) $ primePowers n
|
peterokagey/haskellOEIS
|
src/PowerDivisibility/A004709.hs
|
apache-2.0
| 271
| 0
| 11
| 44
| 101
| 56
| 45
| 7
| 1
|
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QMimeData.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:32
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Core.QMimeData (
qMimeData
,QcolorData(..), QcolorData_nf(..)
,hasColor
,QhasFormat(..)
,hasHtml
,hasImage
,hasText
,hasUrls
,html
,QimageData(..), QimageData_nf(..)
,QretrieveData(..)
,setColorData
,setImageData
,urls
,qMimeData_delete
,qMimeData_deleteLater
)
where
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Enums.Core.QVariant
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
instance QuserMethod (QMimeData ()) (()) (IO ()) where
userMethod qobj evid ()
= withObjectPtr qobj $ \cobj_qobj ->
qtc_QMimeData_userMethod cobj_qobj (toCInt evid)
foreign import ccall "qtc_QMimeData_userMethod" qtc_QMimeData_userMethod :: Ptr (TQMimeData a) -> CInt -> IO ()
instance QuserMethod (QMimeDataSc a) (()) (IO ()) where
userMethod qobj evid ()
= withObjectPtr qobj $ \cobj_qobj ->
qtc_QMimeData_userMethod cobj_qobj (toCInt evid)
instance QuserMethod (QMimeData ()) (QVariant ()) (IO (QVariant ())) where
userMethod qobj evid qvoj
= withObjectRefResult $
withObjectPtr qobj $ \cobj_qobj ->
withObjectPtr qvoj $ \cobj_qvoj ->
qtc_QMimeData_userMethodVariant cobj_qobj (toCInt evid) cobj_qvoj
foreign import ccall "qtc_QMimeData_userMethodVariant" qtc_QMimeData_userMethodVariant :: Ptr (TQMimeData a) -> CInt -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
instance QuserMethod (QMimeDataSc a) (QVariant ()) (IO (QVariant ())) where
userMethod qobj evid qvoj
= withObjectRefResult $
withObjectPtr qobj $ \cobj_qobj ->
withObjectPtr qvoj $ \cobj_qvoj ->
qtc_QMimeData_userMethodVariant cobj_qobj (toCInt evid) cobj_qvoj
qMimeData :: () -> IO (QMimeData ())
qMimeData ()
= withQMimeDataResult $
qtc_QMimeData
foreign import ccall "qtc_QMimeData" qtc_QMimeData :: IO (Ptr (TQMimeData ()))
instance Qclear (QMimeData a) (()) where
clear x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMimeData_clear cobj_x0
foreign import ccall "qtc_QMimeData_clear" qtc_QMimeData_clear :: Ptr (TQMimeData a) -> IO ()
class QcolorData x0 x1 where
colorData :: x0 -> x1 -> IO (QVariant ())
class QcolorData_nf x0 x1 where
colorData_nf :: x0 -> x1 -> IO (QVariant ())
instance QcolorData (QMimeData ()) (()) where
colorData x0 ()
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMimeData_colorData cobj_x0
foreign import ccall "qtc_QMimeData_colorData" qtc_QMimeData_colorData :: Ptr (TQMimeData a) -> IO (Ptr (TQVariant ()))
instance QcolorData (QMimeDataSc a) (()) where
colorData x0 ()
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMimeData_colorData cobj_x0
instance QcolorData_nf (QMimeData ()) (()) where
colorData_nf x0 ()
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMimeData_colorData cobj_x0
instance QcolorData_nf (QMimeDataSc a) (()) where
colorData_nf x0 ()
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMimeData_colorData cobj_x0
instance Qqdata (QMimeData a) ((String)) (IO (String)) where
qdata x0 (x1)
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QMimeData_data cobj_x0 cstr_x1
foreign import ccall "qtc_QMimeData_data" qtc_QMimeData_data :: Ptr (TQMimeData a) -> CWString -> IO (Ptr (TQString ()))
hasColor :: QMimeData a -> (()) -> IO (Bool)
hasColor x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMimeData_hasColor cobj_x0
foreign import ccall "qtc_QMimeData_hasColor" qtc_QMimeData_hasColor :: Ptr (TQMimeData a) -> IO CBool
class QhasFormat x0 x1 where
hasFormat :: x0 -> x1 -> IO (Bool)
instance QhasFormat (QMimeData ()) ((String)) where
hasFormat x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QMimeData_hasFormat_h cobj_x0 cstr_x1
foreign import ccall "qtc_QMimeData_hasFormat_h" qtc_QMimeData_hasFormat_h :: Ptr (TQMimeData a) -> CWString -> IO CBool
instance QhasFormat (QMimeDataSc a) ((String)) where
hasFormat x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QMimeData_hasFormat_h cobj_x0 cstr_x1
hasHtml :: QMimeData a -> (()) -> IO (Bool)
hasHtml x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMimeData_hasHtml cobj_x0
foreign import ccall "qtc_QMimeData_hasHtml" qtc_QMimeData_hasHtml :: Ptr (TQMimeData a) -> IO CBool
hasImage :: QMimeData a -> (()) -> IO (Bool)
hasImage x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMimeData_hasImage cobj_x0
foreign import ccall "qtc_QMimeData_hasImage" qtc_QMimeData_hasImage :: Ptr (TQMimeData a) -> IO CBool
hasText :: QMimeData a -> (()) -> IO (Bool)
hasText x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMimeData_hasText cobj_x0
foreign import ccall "qtc_QMimeData_hasText" qtc_QMimeData_hasText :: Ptr (TQMimeData a) -> IO CBool
hasUrls :: QMimeData a -> (()) -> IO (Bool)
hasUrls x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMimeData_hasUrls cobj_x0
foreign import ccall "qtc_QMimeData_hasUrls" qtc_QMimeData_hasUrls :: Ptr (TQMimeData a) -> IO CBool
html :: QMimeData a -> (()) -> IO (String)
html x0 ()
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMimeData_html cobj_x0
foreign import ccall "qtc_QMimeData_html" qtc_QMimeData_html :: Ptr (TQMimeData a) -> IO (Ptr (TQString ()))
class QimageData x0 x1 where
imageData :: x0 -> x1 -> IO (QVariant ())
class QimageData_nf x0 x1 where
imageData_nf :: x0 -> x1 -> IO (QVariant ())
instance QimageData (QMimeData ()) (()) where
imageData x0 ()
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMimeData_imageData cobj_x0
foreign import ccall "qtc_QMimeData_imageData" qtc_QMimeData_imageData :: Ptr (TQMimeData a) -> IO (Ptr (TQVariant ()))
instance QimageData (QMimeDataSc a) (()) where
imageData x0 ()
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMimeData_imageData cobj_x0
instance QimageData_nf (QMimeData ()) (()) where
imageData_nf x0 ()
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMimeData_imageData cobj_x0
instance QimageData_nf (QMimeDataSc a) (()) where
imageData_nf x0 ()
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMimeData_imageData cobj_x0
class QretrieveData x0 x1 where
retrieveData :: x0 -> x1 -> IO (QVariant ())
instance QretrieveData (QMimeData ()) ((String, QVariantType)) where
retrieveData x0 (x1, x2)
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QMimeData_retrieveData cobj_x0 cstr_x1 (toCLong $ qEnum_toInt x2)
foreign import ccall "qtc_QMimeData_retrieveData" qtc_QMimeData_retrieveData :: Ptr (TQMimeData a) -> CWString -> CLong -> IO (Ptr (TQVariant ()))
instance QretrieveData (QMimeDataSc a) ((String, QVariantType)) where
retrieveData x0 (x1, x2)
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QMimeData_retrieveData cobj_x0 cstr_x1 (toCLong $ qEnum_toInt x2)
setColorData :: QMimeData a -> ((QVariant t1)) -> IO ()
setColorData x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMimeData_setColorData cobj_x0 cobj_x1
foreign import ccall "qtc_QMimeData_setColorData" qtc_QMimeData_setColorData :: Ptr (TQMimeData a) -> Ptr (TQVariant t1) -> IO ()
instance QsetData (QMimeData a) ((String, String)) (IO ()) where
setData x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
withCWString x2 $ \cstr_x2 ->
qtc_QMimeData_setData cobj_x0 cstr_x1 cstr_x2
foreign import ccall "qtc_QMimeData_setData" qtc_QMimeData_setData :: Ptr (TQMimeData a) -> CWString -> CWString -> IO ()
instance QsetHtml (QMimeData a) ((String)) where
setHtml x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QMimeData_setHtml cobj_x0 cstr_x1
foreign import ccall "qtc_QMimeData_setHtml" qtc_QMimeData_setHtml :: Ptr (TQMimeData a) -> CWString -> IO ()
setImageData :: QMimeData a -> ((QVariant t1)) -> IO ()
setImageData x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMimeData_setImageData cobj_x0 cobj_x1
foreign import ccall "qtc_QMimeData_setImageData" qtc_QMimeData_setImageData :: Ptr (TQMimeData a) -> Ptr (TQVariant t1) -> IO ()
instance QsetText (QMimeData a) ((String)) where
setText x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QMimeData_setText cobj_x0 cstr_x1
foreign import ccall "qtc_QMimeData_setText" qtc_QMimeData_setText :: Ptr (TQMimeData a) -> CWString -> IO ()
instance Qtext (QMimeData a) (()) (IO (String)) where
text x0 ()
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMimeData_text cobj_x0
foreign import ccall "qtc_QMimeData_text" qtc_QMimeData_text :: Ptr (TQMimeData a) -> IO (Ptr (TQString ()))
urls :: QMimeData a -> (()) -> IO ([QUrl ()])
urls x0 ()
= withQListObjectRefResult $ \arr ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMimeData_urls cobj_x0 arr
foreign import ccall "qtc_QMimeData_urls" qtc_QMimeData_urls :: Ptr (TQMimeData a) -> Ptr (Ptr (TQUrl ())) -> IO CInt
qMimeData_delete :: QMimeData a -> IO ()
qMimeData_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMimeData_delete cobj_x0
foreign import ccall "qtc_QMimeData_delete" qtc_QMimeData_delete :: Ptr (TQMimeData a) -> IO ()
qMimeData_deleteLater :: QMimeData a -> IO ()
qMimeData_deleteLater x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMimeData_deleteLater cobj_x0
foreign import ccall "qtc_QMimeData_deleteLater" qtc_QMimeData_deleteLater :: Ptr (TQMimeData a) -> IO ()
instance QchildEvent (QMimeData ()) ((QChildEvent t1)) where
childEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMimeData_childEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QMimeData_childEvent" qtc_QMimeData_childEvent :: Ptr (TQMimeData a) -> Ptr (TQChildEvent t1) -> IO ()
instance QchildEvent (QMimeDataSc a) ((QChildEvent t1)) where
childEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMimeData_childEvent cobj_x0 cobj_x1
instance QconnectNotify (QMimeData ()) ((String)) where
connectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QMimeData_connectNotify cobj_x0 cstr_x1
foreign import ccall "qtc_QMimeData_connectNotify" qtc_QMimeData_connectNotify :: Ptr (TQMimeData a) -> CWString -> IO ()
instance QconnectNotify (QMimeDataSc a) ((String)) where
connectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QMimeData_connectNotify cobj_x0 cstr_x1
instance QcustomEvent (QMimeData ()) ((QEvent t1)) where
customEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMimeData_customEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QMimeData_customEvent" qtc_QMimeData_customEvent :: Ptr (TQMimeData a) -> Ptr (TQEvent t1) -> IO ()
instance QcustomEvent (QMimeDataSc a) ((QEvent t1)) where
customEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMimeData_customEvent cobj_x0 cobj_x1
instance QdisconnectNotify (QMimeData ()) ((String)) where
disconnectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QMimeData_disconnectNotify cobj_x0 cstr_x1
foreign import ccall "qtc_QMimeData_disconnectNotify" qtc_QMimeData_disconnectNotify :: Ptr (TQMimeData a) -> CWString -> IO ()
instance QdisconnectNotify (QMimeDataSc a) ((String)) where
disconnectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QMimeData_disconnectNotify cobj_x0 cstr_x1
instance Qevent (QMimeData ()) ((QEvent t1)) where
event x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMimeData_event_h cobj_x0 cobj_x1
foreign import ccall "qtc_QMimeData_event_h" qtc_QMimeData_event_h :: Ptr (TQMimeData a) -> Ptr (TQEvent t1) -> IO CBool
instance Qevent (QMimeDataSc a) ((QEvent t1)) where
event x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMimeData_event_h cobj_x0 cobj_x1
instance QeventFilter (QMimeData ()) ((QObject t1, QEvent t2)) where
eventFilter x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QMimeData_eventFilter_h cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QMimeData_eventFilter_h" qtc_QMimeData_eventFilter_h :: Ptr (TQMimeData a) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO CBool
instance QeventFilter (QMimeDataSc a) ((QObject t1, QEvent t2)) where
eventFilter x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QMimeData_eventFilter_h cobj_x0 cobj_x1 cobj_x2
instance Qreceivers (QMimeData ()) ((String)) where
receivers x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QMimeData_receivers cobj_x0 cstr_x1
foreign import ccall "qtc_QMimeData_receivers" qtc_QMimeData_receivers :: Ptr (TQMimeData a) -> CWString -> IO CInt
instance Qreceivers (QMimeDataSc a) ((String)) where
receivers x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QMimeData_receivers cobj_x0 cstr_x1
instance Qsender (QMimeData ()) (()) where
sender x0 ()
= withQObjectResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMimeData_sender cobj_x0
foreign import ccall "qtc_QMimeData_sender" qtc_QMimeData_sender :: Ptr (TQMimeData a) -> IO (Ptr (TQObject ()))
instance Qsender (QMimeDataSc a) (()) where
sender x0 ()
= withQObjectResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMimeData_sender cobj_x0
instance QtimerEvent (QMimeData ()) ((QTimerEvent t1)) where
timerEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMimeData_timerEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QMimeData_timerEvent" qtc_QMimeData_timerEvent :: Ptr (TQMimeData a) -> Ptr (TQTimerEvent t1) -> IO ()
instance QtimerEvent (QMimeDataSc a) ((QTimerEvent t1)) where
timerEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMimeData_timerEvent cobj_x0 cobj_x1
|
uduki/hsQt
|
Qtc/Core/QMimeData.hs
|
bsd-2-clause
| 15,006
| 0
| 14
| 2,630
| 5,003
| 2,543
| 2,460
| -1
| -1
|
{-# LANGUAGE UnicodeSyntax #-}
module Arith.Semantics (Term(..), isNumericVal, isVal, eval) where
data Term = TmTrue
| TmFalse
| TmIf Term Term Term
| TmZero
| TmSucc Term
| TmPred Term
| TmIsZero Term
digitize ∷ Term → Int
digitize TmZero = 0
digitize (TmSucc t) = 1 + digitize t
instance Show Term where
show TmTrue = "true"
show TmFalse = "false"
show TmZero = "0"
show num@(TmSucc t) = show $ digitize num
isNumericVal ∷ Term → Term
isNumericVal TmZero = TmTrue
isNumericVal (TmSucc t) = isNumericVal t
isNumericVal _ = TmFalse
isVal ∷ Term → Term
isVal TmTrue = TmTrue
isVal TmFalse = TmTrue
isVal t = isNumericVal t
eval ∷ Term → Term
eval (TmIf TmTrue t₂ t₃) = t₂
eval (TmIf TmFalse t₂ t₃) = t₃
eval (TmIf t₁ t₂ t₃) = let t₁' = eval t₁
in TmIf t₁' t₂ t₃
eval (TmSucc t₁) = let t₁' = eval t₁
in TmSucc t₁'
eval (TmPred TmZero) = TmZero
eval (TmPred (TmSucc nv₁)) = nv₁
eval (TmPred t₁) = let t₁' = eval t₁
in TmPred t₁'
eval (TmIsZero TmZero) = TmTrue
eval (TmIsZero _) = TmFalse
eval t = t
|
ayberkt/TAPL
|
src/Arith/Semantics.hs
|
bsd-3-clause
| 1,334
| 50
| 9
| 468
| 504
| 259
| 245
| 39
| 1
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.TimeGrain.EN.Rules
( rules ) where
import Data.Text (Text)
import Prelude
import Data.String
import Duckling.Dimensions.Types
import qualified Duckling.TimeGrain.Types as TG
import Duckling.Types
grains :: [(Text, String, TG.Grain)]
grains = [ ("second (grain) ", "sec(ond)?s?", TG.Second)
, ("minute (grain)" , "min(ute)?s?", TG.Minute)
, ("hour (grain)" , "h(((ou)?rs?)|r)?", TG.Hour)
, ("day (grain)" , "days?", TG.Day)
, ("week (grain)" , "weeks?", TG.Week)
, ("month (grain)" , "months?", TG.Month)
, ("quarter (grain)", "(quarter|qtr)s?", TG.Quarter)
, ("year (grain)" , "y(ea)?rs?", TG.Year)
]
rules :: [Rule]
rules = map go grains
where
go (name, regexPattern, grain) = Rule
{ name = name
, pattern = [regex regexPattern]
, prod = \_ -> Just $ Token TimeGrain grain
}
|
rfranek/duckling
|
Duckling/TimeGrain/EN/Rules.hs
|
bsd-3-clause
| 1,306
| 0
| 11
| 347
| 271
| 172
| 99
| 24
| 1
|
module Room where
import Rumpus
roomSize = 400
(roomW, roomH, roomD) = (roomSize,roomSize,roomSize)
wallD = 1
shelfH = 0.15
--roomOffset = (roomH/2 - wallD/2)
roomOffset = 0
start :: Start
start = do
let makeWall pos size hue extraProps = spawnChild $ do
myPose ==> position (pos & _y +~ roomOffset)
myShape ==> Cube
myBody ==> Animated
myBodyFlags ==> extraProps ++ [Ungrabbable]
mySize ==> size
myColor ==> colorHSL hue 0.8 0.9
myMass ==> 0
makeWall (V3 0 0 (-roomD/2)) (V3 roomW roomH wallD) 0.1 [] -- back
makeWall (V3 0 0 (roomD/2)) (V3 (roomW) roomH wallD) 0.2 [] -- front
makeWall (V3 (-roomW/2) 0 0) (V3 wallD roomH roomD) 0.3 [] -- left
makeWall (V3 (roomW/2) 0 0) (V3 wallD roomH roomD) 0.4 [] -- right
makeWall (V3 0 (-roomH/2) 0) (V3 roomW wallD roomD) 0.5 [Teleportable] -- floor
makeWall (V3 0 (roomH/2) 0) (V3 roomW wallD roomD) 0.6 [Teleportable] -- ceiling
return ()
|
lukexi/rumpus
|
pristine/Attraction/Room.hs
|
bsd-3-clause
| 1,069
| 0
| 17
| 339
| 432
| 220
| 212
| 24
| 1
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
module GithubWebhook.Types.PullRequest
( PullRequest(..)
) where
import qualified Data.Char as Ch
import qualified Data.Text as T
import qualified Data.Aeson as A
import qualified Data.Aeson.TH as A
import GHC.Generics
import qualified Utils
data PullRequest = PullRequest
{ url :: T.Text
, htmlUrl :: T.Text
, diffUrl :: T.Text
, patchUrl :: T.Text
} deriving (Eq, Generic, Show)
$(A.deriveJSON
A.defaultOptions
{A.fieldLabelModifier = Utils.camelCaseToSnakeCase}
''PullRequest)
|
bgwines/hueue
|
src/GithubWebhook/Types/PullRequest.hs
|
bsd-3-clause
| 582
| 0
| 10
| 108
| 146
| 92
| 54
| 20
| 0
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.Time.UK.Tests
( tests ) where
import Data.String
import Prelude
import Test.Tasty
import Test.Tasty.HUnit
import Duckling.Dimensions.Types
import Duckling.Locale
import Duckling.Resolve
import Duckling.Testing.Asserts
import Duckling.Testing.Types (testContext, testOptions)
import Duckling.Time.UK.Corpus
import Duckling.Types (Range(..))
tests :: TestTree
tests = testGroup "UK Tests"
[ makeCorpusTest [Seal Time] corpus
, makeNegativeCorpusTest [Seal Time] negativeCorpus
]
|
facebookincubator/duckling
|
tests/Duckling/Time/UK/Tests.hs
|
bsd-3-clause
| 711
| 0
| 9
| 98
| 137
| 85
| 52
| 17
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE UnboxedTuples #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE PolyKinds #-}
module Data.Params.Vector.Unboxed
( Vector
, module Data.Params.Vector
)
where
import Control.Category
import Prelude hiding ((.),id)
import Control.Monad
import Control.Monad.Primitive
import Control.DeepSeq
import Data.Primitive
import Data.Primitive.ByteArray
-- import Data.Primitive.Types
-- import GHC.Ptr
-- import Foreign.Ptr
-- import Foreign.ForeignPtr hiding (unsafeForeignPtrToPtr)
-- import Foreign.ForeignPtr.Unsafe
-- import Foreign.Marshal.Array
import qualified Data.Vector.Generic as VG
import qualified Data.Vector.Generic.Mutable as VGM
import qualified Data.Vector.Unboxed as VU
import qualified Data.Vector.Unboxed.Mutable as VUM
import qualified Data.Vector.Primitive as VP
import qualified Data.Vector.Primitive.Mutable as VPM
import GHC.Base (Int (..))
import GHC.Int
import GHC.Prim
import GHC.TypeLits
import Data.Params
import Data.Params.Vector
import Data.Params.PseudoPrim
import Debug.Trace
-------------------------------------------------------------------------------
-- immutable automatically sized vector
data family Vector (len::Config Nat) elem
mkParams ''Vector
instance (Show elem, VG.Vector (Vector len) elem) => Show (Vector len elem) where
show v = "fromList "++show (VG.toList v)
instance (Eq elem, VG.Vector (Vector len) elem) => Eq (Vector len elem) where
a == b = (VG.toList a) == (VG.toList b)
instance (Ord elem, VG.Vector (Vector len) elem) => Ord (Vector len elem) where
compare a b = compare (VG.toList a) (VG.toList b)
---------------------------------------
-- Static size
data instance Vector (Static len) elem = Vector
{-#UNPACK#-}!Int
{-#UNPACK#-}!(PseudoPrimInfo elem)
{-#UNPACK#-}!ByteArray
instance
( KnownNat len
, PseudoPrim elem
) => StaticToAutomatic
Param_len
(Vector (Static len) elem)
(Vector Automatic elem)
where
staticToAutomatic _ (Vector off ppi arr) = Vector_Automatic off len ppi arr
where
len = fromIntegral $ natVal (Proxy::Proxy len)
mkPseudoPrimInfoFromStatic _ (PseudoPrimInfo_VectorStatic ppi)
= PseudoPrimInfo_VectorAutomatic len (len*size) ppi
where
len = fromIntegral $ natVal (Proxy::Proxy len)
size = pp_sizeOf ppi
instance
( KnownNat len
, StaticToAutomatic p elem elem'
) => StaticToAutomatic
(Param_elem p)
(Vector (Static len) elem)
(Vector (Static len) elem')
where
staticToAutomatic _ (Vector off ppi arr) = Vector off ppi' arr
where
ppi' = mkPseudoPrimInfoFromStatic (TypeLens::TypeLens Base p) ppi
mkPseudoPrimInfoFromStatic _ (PseudoPrimInfo_VectorStatic ppi)
= PseudoPrimInfo_VectorStatic $ mkPseudoPrimInfoFromStatic (TypeLens :: TypeLens Base p) ppi
instance
( PseudoPrim elem
) => RunTimeToAutomatic
Param_len
(Vector RunTime elem)
(Vector Automatic elem)
where
runTimeToAutomatic lens p v = mkApWith1Param
(Proxy::Proxy (Vector RunTime elem))
(Proxy::Proxy (Vector Automatic elem))
lens
p
go
v
where
go v@(Vector_RunTime off ppi arr) = Vector_Automatic off len ppi arr
where
len = VG.length v
mkPseudoPrimInfoFromRuntime _ len (PseudoPrimInfo_VectorRunTime ppi)
= PseudoPrimInfo_VectorAutomatic len (len*pp_sizeOf ppi) ppi
instance
( RunTimeToAutomatic p elem elem'
, HasDictionary p
, ReifiableConstraint (ApplyConstraint_GetConstraint p)
) => RunTimeToAutomatic
(Param_elem p)
(Vector (Static len) elem)
(Vector (Static len) elem')
where
runTimeToAutomatic lens p v = mkApWith1Param
(Proxy::Proxy (Vector (Static len) elem))
(Proxy::Proxy (Vector (Static len) elem'))
lens
p
go
v
where
go :: Vector (Static len) elem -> Vector (Static len) elem'
go (Vector off ppi arr) = Vector off ppi' arr
where
ppi' = mkPseudoPrimInfoFromRuntime (TypeLens::TypeLens Base p) p ppi
:: PseudoPrimInfo elem'
mkPseudoPrimInfoFromRuntime _ p (PseudoPrimInfo_VectorStatic ppi)
= PseudoPrimInfo_VectorStatic $ mkPseudoPrimInfoFromRuntime (TypeLens::TypeLens Base p) p ppi
instance
( RunTimeToAutomatic p elem elem'
, HasDictionary p
, ReifiableConstraint (ApplyConstraint_GetConstraint p)
) => RunTimeToAutomatic
(Param_elem p)
(Vector RunTime elem)
(Vector RunTime elem')
where
runTimeToAutomatic lens p v = mkApWith1Param
(Proxy::Proxy (Vector RunTime elem))
(Proxy::Proxy (Vector RunTime elem'))
lens
p
go
v
where
go :: Vector RunTime elem -> Vector RunTime elem'
go (Vector_RunTime off ppi arr) = Vector_RunTime off ppi' arr
where
ppi' = mkPseudoPrimInfoFromRuntime (TypeLens::TypeLens Base p) p ppi
:: PseudoPrimInfo elem'
mkPseudoPrimInfoFromRuntime _ p (PseudoPrimInfo_VectorRunTime ppi)
= PseudoPrimInfo_VectorRunTime $ mkPseudoPrimInfoFromRuntime (TypeLens::TypeLens Base p) p ppi
instance
( RunTimeToAutomatic p elem elem'
, HasDictionary p
, ReifiableConstraint (ApplyConstraint_GetConstraint p)
) => RunTimeToAutomatic
(Param_elem p)
(Vector Automatic elem)
(Vector Automatic elem')
where
runTimeToAutomatic lens p v = mkApWith1Param
(Proxy::Proxy (Vector Automatic elem))
(Proxy::Proxy (Vector Automatic elem'))
lens
p
go
v
where
go :: Vector Automatic elem -> Vector Automatic elem'
go (Vector_Automatic len off ppi arr) = Vector_Automatic len off ppi' arr
where
ppi' = mkPseudoPrimInfoFromRuntime (TypeLens::TypeLens Base p) p ppi
:: PseudoPrimInfo elem'
mkPseudoPrimInfoFromRuntime _ p (PseudoPrimInfo_VectorAutomatic len size ppi)
= PseudoPrimInfo_VectorAutomatic
len
size
(mkPseudoPrimInfoFromRuntime (TypeLens::TypeLens Base p) p ppi)
-------------------
instance NFData (Vector (Static len) elem) where
rnf a = seq a ()
instance
( PseudoPrim elem
, KnownNat len
) => VG.Vector (Vector (Static len)) elem
where
{-# INLINE basicUnsafeFreeze #-}
basicUnsafeFreeze (MVector i ppi marr) = Vector i ppi `liftM` unsafeFreezeByteArray marr
{-# INLINE basicUnsafeThaw #-}
basicUnsafeThaw (Vector i ppi arr) = MVector i ppi `liftM` unsafeThawByteArray arr
{-# INLINE [2] basicLength #-}
basicLength _ = viewParam _len (undefined::Vector (Static len) elem)
{-# INLINE basicUnsafeSlice #-}
basicUnsafeSlice j n v = if n /= viewParam _len (undefined::Vector (Static len) elem) || j /= 0
then error $ "Vector.basicUnsafeSlice not allowed to change size"
else v
{-# INLINE basicUnsafeIndexM #-}
basicUnsafeIndexM (Vector i ppi arr) j = return $! pp_indexByteArray ppi arr (i+j)
-- {-# INLINE basicUnsafeCopy #-}
-- basicUnsafeCopy (MVector i ppi1 dst) (Vector j ppi2src) =
-- copyByteArray dst (i*sz) src (j*sz) (len*sz)
-- where
-- sz = pp_sizeOf (undefined :: elem)
-- len = getParam_len (undefined::Vector (Static len) elem)
{-# INLINE elemseq #-}
elemseq _ = seq
-------------------
-- primitive instance allows unboxing unboxed vectors
unInt :: Int -> Int#
unInt (I# i) = i
instance
( Prim elem
, PseudoPrim elem
, KnownNat len
) => Prim (Vector (Static len) elem)
where
{-# INLINE sizeOf# #-}
sizeOf# _ =
unInt (sizeOf (undefined::elem)* (intparam (Proxy::Proxy len)))
{-# INLINE alignment# #-}
alignment# _ =
unInt (alignment (undefined :: elem))
-- unInt (sizeOf ppi * (intparam (Proxy::Proxy len)))
{-# INLINE indexByteArray# #-}
indexByteArray# arr# i# =
Vector ((I# i#)*(intparam (Proxy::Proxy len))) (emptyInfo::PseudoPrimInfo elem) (ByteArray arr#)
{-# INLINE readByteArray# #-}
readByteArray# marr# i# s# =
(# s#, Vector (I# i#) (emptyInfo::PseudoPrimInfo elem) (ByteArray (unsafeCoerce# marr#)) #)
{-# INLINE writeByteArray# #-}
writeByteArray# marr# i# x s# = go 0 s#
where
go i s = ( if i >= intparam (Proxy::Proxy len)
then s
else go (i+1)
(writeByteArray# marr#
(i# *# (unInt ( intparam (Proxy::Proxy len))) +# (unInt i))
-- (x VG.! i)
(x `VG.unsafeIndex` i)
s
)
)
where
iii = I# (i# *# (sizeOf# (undefined::elem)) +# (unInt i))
instance
( PseudoPrim elem
, KnownNat len
, Show elem
) => PseudoPrim (Vector (Static len) elem)
where
newtype PseudoPrimInfo (Vector (Static len) elem) =
PseudoPrimInfo_VectorStatic (PseudoPrimInfo elem)
{-# INLINE pp_sizeOf# #-}
pp_sizeOf# (PseudoPrimInfo_VectorStatic ppi) =
unInt (pp_sizeOf ppi * (intparam (Proxy::Proxy len)))
{-# INLINE pp_alignment# #-}
pp_alignment# (PseudoPrimInfo_VectorStatic ppi) =
unInt (pp_alignment ppi)
-- unInt (pp_sizeOf ppi * (intparam (Proxy::Proxy len)))
{-# INLINE pp_indexByteArray# #-}
pp_indexByteArray# (PseudoPrimInfo_VectorStatic ppi) arr# i# =
Vector ((I# i#)*(intparam (Proxy::Proxy len))) ppi (ByteArray arr#)
{-# INLINE pp_readByteArray# #-}
pp_readByteArray# (PseudoPrimInfo_VectorStatic ppi) marr# i# s# =
(# s#, Vector (I# i#) ppi (ByteArray (unsafeCoerce# marr#)) #)
{-# INLINE pp_writeByteArray# #-}
pp_writeByteArray# (PseudoPrimInfo_VectorStatic ppi) marr# i# x s# = go 0 s#
where
go i s = ( if i >= intparam (Proxy::Proxy len)
then s
else go (i+1)
(pp_writeByteArray# ppi marr#
(i# *# (unInt ( intparam (Proxy::Proxy len))) +# (unInt i))
-- (x VG.! i)
(x `VG.unsafeIndex` i)
s
)
)
where
iii = I# (i# *# (pp_sizeOf# ppi) +# (unInt i))
{-# INLINE seqInfo #-}
seqInfo _ = seqInfo (undefined::elem)
{-# INLINE emptyInfo #-}
emptyInfo = PseudoPrimInfo_VectorStatic emptyInfo
----------------------------------------
-- RunTime size
data instance Vector RunTime elem = Vector_RunTime
{-#UNPACK#-}!Int
{-#UNPACK#-}!(PseudoPrimInfo elem)
{-#UNPACK#-}!ByteArray
instance NFData (Vector RunTime elem) where
rnf a = seq a ()
instance
( PseudoPrim elem
-- , GetParam_len (Vector RunTime elem)
-- , ViewParam GetParam_len (Vector RunTime elem)
, ViewParam Param_len (Vector RunTime elem)
) => VG.Vector (Vector RunTime) elem
where
{-# INLINE basicUnsafeFreeze #-}
-- basicUnsafeFreeze (MVector_RunTime len i marr) = if len==getParam_len (undefined::Vector RunTime elem)
basicUnsafeFreeze (MVector_RunTime len i ppi marr) =
if len == viewParam _len (undefined::Vector RunTime elem)
then Vector_RunTime i ppi `liftM` unsafeFreezeByteArray marr
else error $ "basicUnsafeFreeze cannot change RunTime vector size"
++ "; len="++show len
++ "; getParam_len="++show (viewParam _len (undefined::Vector RunTime elem))
{-# INLINE basicUnsafeThaw #-}
basicUnsafeThaw (Vector_RunTime i ppi arr) =
MVector_RunTime (viewParam _len (undefined::Vector RunTime elem)) i ppi `liftM` unsafeThawByteArray arr
-- MVector_RunTime (getParam_len (undefined::Vector RunTime elem)) i `liftM` unsafeThawByteArray arr
{-# INLINE [2] basicLength #-}
basicLength _ = viewParam _len (undefined::Vector RunTime elem)
-- basicLength _ = getParam_len (undefined::Vector RunTime elem)
{-# INLINE basicUnsafeSlice #-}
-- basicUnsafeSlice j n v = if n /= getParam_len (undefined::Vector RunTime elem) || j /= 0
basicUnsafeSlice j n v =
if n /= viewParam _len (undefined::Vector RunTime elem) || j /= 0
then error $ "Vector_RunTime.basicUnsafeSlice not allowed to change size"
else v
{-# INLINE basicUnsafeIndexM #-}
basicUnsafeIndexM (Vector_RunTime i ppi arr) j = return $! pp_indexByteArray ppi arr (i+j)
-- {-# INLINE basicUnsafeCopy #-}
-- basicUnsafeCopy (MVector_RunTime n i dst) (Vector_RunTime j src) = if n==len
-- then copyByteArray dst (i*sz) src (j*sz) (len*sz)
-- else error "basicUnsafeCopy cannot change RunTime vector size"
-- where
-- sz = pp_sizeOf (undefined :: elem)
-- -- len = getParam_len (undefined::Vector RunTime elem)
-- len = viewParam _len (undefined::Vector RunTime elem)
{-# INLINE elemseq #-}
elemseq _ = seq
-------------------
instance
( PseudoPrim elem
-- , GetParam_len (Vector RunTime elem)
, ViewParam Param_len (Vector RunTime elem)
) => PseudoPrim (Vector RunTime elem)
where
newtype PseudoPrimInfo (Vector RunTime elem) =
PseudoPrimInfo_VectorRunTime (PseudoPrimInfo elem)
{-# INLINE pp_sizeOf# #-}
pp_sizeOf# (PseudoPrimInfo_VectorRunTime ppi) =
-- unInt (pp_sizeOf ppi * (getParam_len (undefined::Vector RunTime elem)))
unInt (pp_sizeOf ppi * (viewParam _len (undefined::Vector RunTime elem)))
{-# INLINE pp_alignment# #-}
pp_alignment# (PseudoPrimInfo_VectorRunTime ppi) =
unInt (pp_alignment ppi)
-- unInt (pp_sizeOf (undefined::elem) * (getParam_len (undefined::Vector RunTime elem)))
{-# INLINE pp_indexByteArray# #-}
pp_indexByteArray# (PseudoPrimInfo_VectorRunTime ppi)arr# i# =
-- Vector_RunTime ((I# i#)*(getParam_len (undefined::Vector RunTime elem))) ppi (ByteArray arr#)
Vector_RunTime ((I# i#)*(viewParam _len (undefined::Vector RunTime elem))) ppi (ByteArray arr#)
{-# INLINE pp_readByteArray# #-}
pp_readByteArray# (PseudoPrimInfo_VectorRunTime ppi) marr# i# s# =
(# s#, Vector_RunTime (I# i#) ppi (ByteArray (unsafeCoerce# marr#)) #)
{-# INLINE pp_writeByteArray# #-}
pp_writeByteArray# (PseudoPrimInfo_VectorRunTime ppi) marr# i# x s# = go 0 s#
where
go i s = ( if i >= len
then s
else go (i+1)
(pp_writeByteArray# ppi marr#
(i# *# (unInt len) +# (unInt i))
(x VG.! i)
s
)
)
where
len = viewParam _len (undefined::Vector RunTime elem)
-- len = getParam_len (undefined::Vector RunTime elem)
iii = I# (i# *# (pp_sizeOf# ppi) +# (unInt i))
{-# INLINE seqInfo #-}
seqInfo _ = seqInfo (undefined::elem)
{-# INLINE emptyInfo #-}
emptyInfo = PseudoPrimInfo_VectorRunTime emptyInfo
---------------------------------------
-- Automatic sized
data instance Vector Automatic elem = Vector_Automatic
{-#UNPACK#-}!Int
{-#UNPACK#-}!Int
{-#UNPACK#-}!(PseudoPrimInfo elem)
{-#UNPACK#-}!ByteArray
instance NFData (Vector Automatic elem) where
rnf v = seq v ()
instance PseudoPrim elem => VG.Vector (Vector Automatic) elem where
{-# INLINE basicUnsafeFreeze #-}
basicUnsafeFreeze (MVector_Automatic i n ppi marr)
= Vector_Automatic i n ppi `liftM` unsafeFreezeByteArray marr
{-# INLINE basicUnsafeThaw #-}
basicUnsafeThaw (Vector_Automatic i n ppi arr)
= MVector_Automatic i n ppi `liftM` unsafeThawByteArray arr
{-# INLINE basicLength #-}
basicLength (Vector_Automatic _ n _ _) = n
{-# INLINE basicUnsafeSlice #-}
basicUnsafeSlice j n (Vector_Automatic i _ ppi arr) = Vector_Automatic (i+j) n ppi arr
{-# INLINE basicUnsafeIndexM #-}
basicUnsafeIndexM (Vector_Automatic i _ ppi arr) j = return $! pp_indexByteArray ppi arr (i+j)
-- {-# INLINE basicUnsafeCopy #-}
-- basicUnsafeCopy (MVector_Automatic i n dst) (Vector_Automatic j _ src)
-- = copyByteArray dst (i*sz) src (j*sz) (n*sz)
-- where
-- sz = sizeOf (indefinido :: a)
{-# INLINE elemseq #-}
elemseq _ = seq
instance PseudoPrim elem => PseudoPrim (Vector Automatic elem) where
data PseudoPrimInfo (Vector Automatic elem) = PseudoPrimInfo_VectorAutomatic
{-#UNPACK#-}!(Int) -- length
{-#UNPACK#-}!(Int) -- sizeOf
{-#UNPACK#-}!(PseudoPrimInfo elem)
{-# INLINE pp_sizeOf# #-}
pp_sizeOf# (PseudoPrimInfo_VectorAutomatic _ s _) = unInt s
{-# INLINE pp_alignment# #-}
pp_alignment# (PseudoPrimInfo_VectorAutomatic _ _ ppi) =
unInt (pp_alignment ppi)
{-# INLINE pp_indexByteArray# #-}
pp_indexByteArray# (PseudoPrimInfo_VectorAutomatic len _ ppi) arr# i# =
Vector_Automatic ((I# i#)*len) len ppi (ByteArray arr#)
{-# INLINE pp_readByteArray# #-}
pp_readByteArray# (PseudoPrimInfo_VectorAutomatic len _ ppi) marr# i# s# =
(# s#, Vector_Automatic (I# i#) len ppi (ByteArray (unsafeCoerce# marr#)) #)
{-# INLINE pp_writeByteArray# #-}
pp_writeByteArray# (PseudoPrimInfo_VectorAutomatic len _ ppi) marr# i# x s# = go 0 s#
where
go i s = ( if i >= len
then s
else go (i+1)
(pp_writeByteArray# ppi marr#
(i# *# (unInt len) +# (unInt i))
(x VG.! i)
s
)
)
where
iii = I# (i# *# (pp_sizeOf# ppi) +# (unInt i))
{-# INLINE seqInfo #-}
seqInfo _ = False
{-# INLINE emptyInfo #-}
emptyInfo = error "emptyInfo of PseudoPrimInfo_VectorAutomatic"
-------------------------------------------------------------------------------
-- mutable vector
data family MVector (len::Config Nat) s elem
type instance VG.Mutable (Vector len) = MVector len
---------------------------------------
-- static size
data instance MVector (Static len) s elem = MVector
{-#UNPACK#-}!Int
{-#UNPACK#-}!(PseudoPrimInfo elem)
{-#UNPACK#-}!(MutableByteArray s)
instance
( PseudoPrim elem
, KnownNat len
) => VGM.MVector (MVector (Static len)) elem
where
{-# INLINE basicLength #-}
basicLength _ = fromIntegral $ natVal (Proxy::Proxy len)
{-# INLINE basicUnsafeSlice #-}
basicUnsafeSlice i m v = if m /= len
then error $ "MVector (Static len) .basicUnsafeSlice not allowed to change size"
++"; i="++show i
++"; m="++show m
++"; len="++show len
else v
where
-- len = getParam_len (undefined::MVector (Static len) s elem)
len = intparam (Proxy::Proxy len)
{-# INLINE basicOverlaps #-}
basicOverlaps (MVector i ppi1 arr1) (MVector j ppi2 arr2)
= sameMutableByteArray arr1 arr2
&& (between i j (j+len) || between j i (i+len))
where
len = intparam (Proxy::Proxy len)
between x y z = x >= y && x < z
{-# INLINE basicUnsafeNew #-}
basicUnsafeNew n = if seqInfo (undefined::elem)
then error "basicUnsafeNew: seqInfo"
else do
arr <- newPinnedByteArray (len * pp_sizeOf (emptyInfo :: PseudoPrimInfo elem))
return $ MVector 0 (emptyInfo::PseudoPrimInfo elem) arr
where
len = intparam (Proxy::Proxy len)
{-# INLINE basicUnsafeRead #-}
basicUnsafeRead (MVector i ppi arr) j = pp_readByteArray ppi arr (i+j)
{-# INLINE basicUnsafeWrite #-}
basicUnsafeWrite (MVector i ppi arr) j x = pp_writeByteArray ppi arr (i+j) x
-- {-# INLINE basicUnsafeCopy #-}
-- basicUnsafeCopy (MVector i ppi dst) (MVector j ppi src) =
-- copyMutableByteArray ppi dst (i*sz) src (j*sz) (len*sz)
-- where
-- sz = pp_sizeOf (undefined :: elem)
-- len = intparam (Proxy::Proxy len)
--
-- {-# INLINE basicUnsafeMove #-}
-- basicUnsafeMove (MVector i dst) (MVector j src) = moveByteArray dst (i*sz) src (j*sz) (len * sz)
-- where
-- sz = pp_sizeOf (undefined :: elem)
-- len = intparam (Proxy::Proxy len)
--
-- {-# INLINE basicSet #-}
-- basicSet (MVector i arr) x = setByteArray arr i (intparam(Proxy::Proxy len)) x
---------------------------------------
-- RunTime size
data instance MVector RunTime s elem = MVector_RunTime
{-#UNPACK#-}!Int
{-#UNPACK#-}!Int
{-#UNPACK#-}!(PseudoPrimInfo elem)
{-#UNPACK#-}!(MutableByteArray s)
instance
( PseudoPrim elem
) => VGM.MVector (MVector RunTime) elem
where
{-# INLINE basicLength #-}
basicLength (MVector_RunTime n _ ppi _) = n
{-# INLINE basicUnsafeSlice #-}
basicUnsafeSlice i m (MVector_RunTime n j ppi v) = MVector_RunTime m (i+j) ppi v
-- basicUnsafeSlice i m v = if m /= len
-- then error $ "MVector.basicUnsafeSlice not allowed to change size"
-- ++"; i="++show i
-- ++"; m="++show m
-- ++"; len="++show len
-- else v
-- where
-- len = VGM.length v
{-# INLINE basicOverlaps #-}
basicOverlaps (MVector_RunTime m i ppi1 arr1) (MVector_RunTime n j ppi2 arr2)
= sameMutableByteArray arr1 arr2
&& (between i j (j+m) || between j i (i+n))
where
between x y z = x >= y && x < z
{-# INLINE basicUnsafeNew #-}
basicUnsafeNew n = if seqInfo (undefined::elem)
then error "basicUnsafeNew: seqInfo"
else do
arr <- newPinnedByteArray (n * pp_sizeOf (emptyInfo :: PseudoPrimInfo elem))
return $ MVector_RunTime n 0 emptyInfo arr
{-# INLINE basicUnsafeRead #-}
basicUnsafeRead (MVector_RunTime _ i ppi arr) j = pp_readByteArray ppi arr (i+j)
{-# INLINE basicUnsafeWrite #-}
basicUnsafeWrite (MVector_RunTime _ i ppi arr) j x = pp_writeByteArray ppi arr (i+j) x
-- {-# INLINE basicUnsafeCopy #-}
-- basicUnsafeCopy (MVector_RunTime n i dst) (MVector_RunTime m j src)
-- = if n==m
-- then copyMutableByteArray dst (i*sz) src (j*sz) (n*sz)
-- else error "basicUnsafeCopy cannot change size of RunTime MVector"
-- where
-- sz = pp_sizeOf (undefined :: elem)
--
-- {-# INLINE basicUnsafeMove #-}
-- basicUnsafeMove (MVector_RunTime n i dst) (MVector_RunTime m j src)
-- = if n==m
-- then moveByteArray dst (i*sz) src (j*sz) (n * sz)
-- else error "basicUnsafeMove cannot change size of RunTime MVector"
-- where
-- sz = pp_sizeOf (undefined :: elem)
--
-- {-# INLINE basicSet #-}
-- basicSet (MVector_RunTime n i arr) x = setByteArray arr i n x
---------------------------------------
-- Automatic size
data instance MVector Automatic s elem = MVector_Automatic
{-#UNPACK#-}!Int
{-#UNPACK#-}!Int
{-#UNPACK#-}!(PseudoPrimInfo elem)
{-#UNPACK#-}!(MutableByteArray s)
instance
( PseudoPrim elem
) => VGM.MVector (MVector Automatic) elem
where
{-# INLINE basicLength #-}
basicLength (MVector_Automatic _ n ppi _) = n
{-# INLINE basicUnsafeSlice #-}
basicUnsafeSlice i m (MVector_Automatic j n ppi v) = MVector_Automatic (i+j) m ppi v
-- basicUnsafeSlice i m v = if m /= len
-- then error $ "MVector.basicUnsafeSlice not allowed to change size"
-- ++"; i="++show i
-- ++"; m="++show m
-- ++"; len="++show len
-- else v
-- where
-- len = VGM.length v
{-# INLINE basicOverlaps #-}
basicOverlaps (MVector_Automatic i m ppi1 arr1) (MVector_Automatic j n ppi2 arr2)
= sameMutableByteArray arr1 arr2
&& (between i j (j+m) || between j i (i+n))
where
between x y z = x >= y && x < z
{-# INLINE basicUnsafeNew #-}
basicUnsafeNew n = if seqInfo (undefined::elem)
then error "basicUnsafeNew: seqInfo"
else do
arr <- newPinnedByteArray (n * pp_sizeOf (emptyInfo :: PseudoPrimInfo elem))
return $ MVector_Automatic 0 n emptyInfo arr
{-# INLINE basicUnsafeRead #-}
basicUnsafeRead (MVector_Automatic i _ ppi arr) j = pp_readByteArray ppi arr (i+j)
{-# INLINE basicUnsafeWrite #-}
basicUnsafeWrite (MVector_Automatic i _ ppi arr) j x = pp_writeByteArray ppi arr (i+j) x
-- {-# INLINE basicUnsafeCopy #-}
-- basicUnsafeCopy (MVector_Automatic i n dst) (MVector_Automatic j m src)
-- = if n==m
-- then copyMutableByteArray dst (i*sz) src (j*sz) (n*sz)
-- else error "basicUnsafeCopy cannot change size of Automatic MVector"
-- where
-- sz = pp_sizeOf (undefined :: elem)
--
-- {-# INLINE basicUnsafeMove #-}
-- basicUnsafeMove (MVector_Automatic i n dst) (MVector_Automatic j m src)
-- = if n==m
-- then moveByteArray dst (i*sz) src (j*sz) (n * sz)
-- else error "basicUnsafeMove cannot change size of Automatic MVector"
-- where
-- sz = pp_sizeOf (undefined :: elem)
--
-- {-# INLINE basicSet #-}
-- basicSet (MVector_Automatic i n arr) x = setByteArray arr i n x
|
mikeizbicki/typeparams
|
src/Data/Params/Vector/Unboxed.hs
|
bsd-3-clause
| 26,179
| 0
| 21
| 7,602
| 5,803
| 3,066
| 2,737
| -1
| -1
|
{-# LANGUAGE FlexibleContexts #-}
module Sync.GitLike (
GitLike(..),
enumGitLike, gitLike, remoteGitLike,
markGitLike, remoteMarkGitLike,
module Sync.Base,
module Sync.Repo
) where
import Prelude.Unicode
import Control.Arrow
import Control.Lens
import Control.Monad.Except
import Data.Either (partitionEithers)
import Data.List (nub, intercalate)
import Data.Maybe (mapMaybe, listToMaybe, catMaybes)
import Data.Time.Clock
import Data.Tuple (swap)
import System.Directory
import System.FilePath.Posix
import System.Process
import Text.Read (readMaybe)
import Text.Regex.PCRE ((=~))
import Sync.Base
import Sync.Base.Internal (mapKeys)
import Sync.Repo
import Sync.Dir
import Sync.Ssh
data GitLike = GitLike {
gitLikeCommand ∷ String,
gitLikeArgs ∷ [String] }
gitLikeCmds ∷ GitLike → [String]
gitLikeCmds (GitLike cmd args) = cmd : args
gitLikeShellCmd ∷ GitLike → String
gitLikeShellCmd = unwords ∘ gitLikeCmds
enumGitLike ∷ GitLike → Location → Bool → IO (Patch Entity (Maybe UTCTime), Repo Entity UTCTime)
enumGitLike cvs = location (gitLike cvs) (remoteGitLike cvs)
gitLike ∷ GitLike → FilePath → Bool → IO (Patch Entity (Maybe UTCTime), Repo Entity UTCTime)
gitLike cvs fpath untracked = withDir fpath $ do
status ← lines <$> readProcess (gitLikeCommand cvs) (gitLikeArgs cvs ++ ["status", if untracked then "-s" else "-suno", "."]) ""
let
(trackedList, untrackedList) = parseGitLikeStatus status
rgit ← traverse (uncurry getStat) trackedList
ugit ← filterM (fmap not ∘ pathIsSymbolicLink) untrackedList >>= traverse stat'
udirs ← fmap concat ∘ mapM (untrackedDir ∘ view entityPath) ∘ filter isDir ∘ map fst $ ugit
return (repo rgit, repo $ ugit ++ udirs)
where
stat' f = do
tm ← getMTime f
isDir' ← doesDirectoryExist f
return (Entity isDir' f, tm)
getStat (Create _) f = second (Create ∘ Just) <$> stat' f
getStat (Update _ _) f = second (Update Nothing ∘ Just) <$> stat' f
getStat (Delete _) f = return (Entity False f, Delete Nothing)
untrackedDir d = do
dirCts ← dir d
return $ toList ∘ mapKeys (over entityPath (normalise ∘ (d </>))) $ dirCts
remoteGitLike ∷ GitLike → String → FilePath → Bool → IO (Patch Entity (Maybe UTCTime), Repo Entity UTCTime)
remoteGitLike cvs host fpath untracked = ssh host $ do
cd fpath
out ← invoke $ unwords $ gitLikeCmds cvs ++ ["status", if untracked then "-s" else "-suno", "."]
let
(trackedList, untrackedList) = parseGitLikeStatus out
rgit ← fmap catMaybes (traverse ((`catchError` const (return Nothing)) ∘ fmap Just ∘ uncurry getStat) trackedList)
ugit ← filterM (fmap not ∘ isLink) untrackedList >>= traverse stat
udirs ← fmap concat ∘ mapM (untrackedDir ∘ view entityPath) ∘ filter isDir ∘ map fst $ ugit
return (repo rgit, repo $ ugit ++ udirs)
where
getStat (Create _) f = second (Create ∘ Just) <$> stat f
getStat (Update _ _) f = second (Update Nothing ∘ Just) <$> stat f
getStat (Delete _) f = return (Entity False f, Delete Nothing)
untrackedDir d = do
cts ← invoke $ "find '" ++ d ++ "' -mindepth 1 -type f -or -type d"
r ← repo <$> fmap catMaybes (mapM (\f → fmap Just (stat f) `catchError` const (return Nothing)) cts)
return $ toList ∘ mapKeys (over entityPath normalise) $ r
-- | Mark git file according to action performed
markGitLike ∷ GitLike → Entity → Action a → IO ()
markGitLike _ (Entity True _) _ = return ()
markGitLike _ _ (Update _ _) = return ()
markGitLike cvs (Entity False fpath) (Delete _) = void $ readProcess (gitLikeCommand cvs) (gitLikeArgs cvs ++ ["rm", "--cached", fpath]) ""
markGitLike cvs (Entity False fpath) _ = void $ readProcess (gitLikeCommand cvs) (gitLikeArgs cvs ++ ["add", fpath]) ""
-- | Mark remote git file according to action performed
remoteMarkGitLike ∷ GitLike → Entity → Action a → ProcessM ()
remoteMarkGitLike _ (Entity True _) _ = return ()
remoteMarkGitLike _ _ (Update _ _) = return ()
remoteMarkGitLike cvs (Entity False fpath) (Delete _) = invoke_ $ gitLikeShellCmd cvs ++ " rm --cached " ++ quote fpath
remoteMarkGitLike cvs (Entity False fpath) _ = invoke_ $ gitLikeShellCmd cvs ++ " add " ++ quote fpath
data GitLikeStatus = Ignored | Untracked | Added | Unmerged | Modified | Renamed | Deleted | Copied deriving (Eq, Ord, Enum, Bounded)
gitStates ∷ [(GitLikeStatus, Char)]
gitStates = [
(Ignored, '!'),
(Untracked, '?'),
(Added, 'A'),
(Unmerged, 'U'),
(Modified, 'M'),
(Renamed, 'R'),
(Deleted, 'D'),
(Copied, 'C')]
instance Show GitLikeStatus where
show = maybe undefined return ∘ flip lookup gitStates
instance Read GitLikeStatus where
readsPrec _ "" = []
readsPrec _ (s:ss) = maybe [] (\st → [(st, ss)]) $ lookup s (map swap gitStates)
parseGitLikeStatus ∷ [String] → ([(Action (), FilePath)], [FilePath])
parseGitLikeStatus = partitionEithers ∘ concatMap parse' where
parse' f = maybe [] (uncurry toStatus) $ do
[_, mods, from', to'] ← listToMaybe (f =~ "^([!?AUMRDC ]{2}) (.*?)(?: -> (.*?))?$" ∷ [[String]])
let
mod' = listToMaybe $ merge' $ nub $ map simplifyStatus $ mapMaybe (readMaybe ∘ return) mods
files = filter (not ∘ null) [from', to']
return (mod', files)
-- Leaves only `Added`, `Modified`, `Deleted`, `Renamed` and `Copied`
simplifyStatus Ignored = Added
simplifyStatus Unmerged = Modified
simplifyStatus s = s
merge' [Added, Modified] = [Added]
merge' [Added, Deleted] = error "Impossible git status: AD - added, then deleted"
merge' [Added, Renamed] = error "Impossible git status: AR - added, then renamed (should be added with new name)"
merge' [Added, Copied] = error "Unknown git status: AC - added, then copied"
merge' [Modified, Added] = error "Impossible git status: MA - modified, then added"
merge' [Modified, Deleted] = [Deleted]
merge' [Modified, Renamed] = error "Impossible git status: MR - modified, then renamed (should be deleted and added with new name)"
merge' [Modified, Copied] = error "Unknown git status: MC - modified, then copied"
merge' [Deleted, _] = error "Impossible git status: Dx - deleted, then smth else"
merge' [Renamed, Added] = error "Impossible git status: RA - renamed, then added"
merge' [Renamed, Modified] = [Renamed]
merge' [Renamed, Deleted] = [Deleted]
merge' [Renamed, Copied] = error "Unknown git status: RC - renamed, then copied"
merge' [Copied, Added] = error "Impossible git status: CA - copied, then added"
merge' [Copied, Modified] = [Copied]
merge' [Copied, Deleted] = [Deleted]
merge' [Copied, Renamed] = error "Unknown git status: CR - copied, then renamed"
merge' [s] = [s]
merge' s = error $ "Impossible git status: " ++ show s
toStatus Nothing _ = []
toStatus (Just Added) [f] = return $ Left (Create (), f)
toStatus (Just Modified) [f] = return $ Left (Update () (), f)
toStatus (Just Renamed) [f, t] = [Left (Delete (), f), Left (Create (), t)]
toStatus (Just Copied) [_, t] = return $ Left (Update () (), t)
toStatus (Just Deleted) [f] = return $ Left (Delete (), f)
toStatus (Just Untracked) [f] = return (Right f)
toStatus (Just s) fs = error $ "Don't know how to convert this git status to actions, status: " ++ show s ++ ", files: " ++ intercalate ", " fs
|
mvoidex/hsync
|
src/Sync/GitLike.hs
|
bsd-3-clause
| 7,240
| 32
| 19
| 1,267
| 2,732
| 1,421
| 1,311
| -1
| -1
|
-- FIXME rename module
module Util.BufferedIOx
( BufferedIOx(..)
, runGetBuffered
, runPutBuffered
, module Util.Binary
) where
import Control.Monad.IO.Class
import Data.Binary
import qualified Data.ByteString as BS ( ByteString )
import qualified Data.ByteString.Lazy as LBS ( ByteString )
import Util.Binary
import Util.IOExtra
class BufferedIOx a where
readBuffered :: (MonadIO m) => a -> Int -> m BS.ByteString
unreadBuffered :: (MonadIO m) => a -> BS.ByteString -> m ()
writeBuffered :: (MonadIO m) => a -> LBS.ByteString -> m ()
closeBuffered :: (MonadIO m) => a -> m ()
runGetBuffered :: (MonadIO m, BufferedIOx s, Binary a, MonadMask m, MonadLogger m) => s -> m a
runGetBuffered s =
throwLeftM (runGetA (liftIO . readBuffered s) (liftIO . unreadBuffered s) get)
runPutBuffered :: (MonadIO m, BufferedIOx s, Binary a) => s -> a -> m ()
runPutBuffered s = runPutA (liftIO . writeBuffered s) . put
|
LTI2000/hinterface
|
src/Util/BufferedIOx.hs
|
bsd-3-clause
| 994
| 0
| 11
| 234
| 344
| 187
| 157
| 21
| 1
|
module Foreign.Mms.MappedVector
( MappedVector(..)
, null
, mappedVectorSize
, mappedVectorAlignment
, mappedVectorReadFields
, mappedVectorWriteFields
) where
import Prelude hiding(length, null)
import Control.Monad(liftM2)
import Foreign.Mms.Class(Mms(..), Storage(..))
import Foreign.Mms.Get(Get, getPointer)
import Foreign.Mms.Instances
import Foreign.Mms.Put(Put, writeOffset, loadOffset)
import Foreign.Mms.Vector(Vector(..))
import Foreign.Ptr(Ptr, plusPtr)
import GHC.Int(Int64)
import qualified Data.Foldable as F
data MappedVector a where
MappedVector :: Mms a m => Ptr m -> Int64 -> MappedVector m
mappedVectorSize :: Int
mappedVectorSize = 16
mappedVectorAlignment :: Int
mappedVectorAlignment = 8
mappedVectorReadFields :: Mms a m => Get (MappedVector m)
mappedVectorReadFields = liftM2 MappedVector getPointer readFields
mappedVectorWriteFields :: Int64 -> Put ()
mappedVectorWriteFields len = (writeOffset =<< loadOffset) >> writeFields len
null :: MappedVector a -> Bool
null (MappedVector _ s) = s == 0
instance Vector MappedVector a where
length (MappedVector _ length) = fromIntegral length
(!) (MappedVector p length) index = let
size = mmsSize result
result = readMms (p `plusPtr` (size * index))
in result
instance Show a => Show (MappedVector a) where
showsPrec p x = showParen (p > 10) $
showString "MappedVector ". shows (F.toList x)
instance F.Foldable MappedVector where
toList (MappedVector p length) = let
xs = take (fromIntegral length) $ map readMms $
iterate (`plusPtr` elementSize) p
elementSize = mmsSize . head $ xs
in xs
foldr f z v = foldr f z (F.toList v)
length = length
|
eeight/haskell-mms
|
src/Foreign/Mms/MappedVector.hs
|
bsd-3-clause
| 1,748
| 0
| 14
| 358
| 577
| 316
| 261
| -1
| -1
|
module Agon.Agon where
import Agon.Types
import Control.Lens
import Control.Monad.Reader
type AgonM = ReaderT Agon
viewA lens = ask >>= return . view lens
runAgonM = runReaderT
|
Feeniks/Agon
|
app/Agon/Agon.hs
|
bsd-3-clause
| 183
| 0
| 6
| 32
| 55
| 31
| 24
| 7
| 1
|
{-# LANGUAGE DeriveDataTypeable, RankNTypes, FlexibleInstances, FlexibleContexts,
KindSignatures, ScopedTypeVariables #-}
module Game.DeckBuild.Dominion.Lib where
{-
Contains boilerplate / useful monadic state transformation Game operations, as well
as non-monadic helper functions for making queries about a specific Game state.
TODO: figure out a good (if exists) nomenclature for splitting up the monadic operations
from the non-monadic ones (separate files?)
-}
import Language.DeckBuild.Syntax hiding (Card, cID, cType, cDescr, cCost)
import Game.DeckBuild.Dominion.Types
import Control.Monad.State
import Game.Sample.Sample
import Data.List (delete, find)
import Data.Char (toUpper)
import Examples.BaseQuote
-------------------------------------------------------------------------------
addMoney :: forall (m :: * -> *). MonadState Game m => Int -> m ()
addMoney n = get >>= (\g -> put $ g { p1 = (p1 g) { amtMoney = ((amtMoney . p1) g) + n } })
addActions :: forall (m :: * -> *). MonadState Game m => Int -> m ()
addActions n = get >>= (\g -> put $ g { p1 = (p1 g) { numActions = ((numActions . p1) g) + n } })
addBuys :: forall (m :: * -> *). MonadState Game m => Int -> m ()
addBuys n = get >>= (\g -> put $ g { p1 = (p1 g) { numBuys = ((numBuys.p1) g) + n } })
nop :: forall (m :: * -> *). MonadState Game m => m ()
nop = return ()
trashCard :: forall (m :: * -> *). MonadState Game m => CardName -> m ()
trashCard c = nop -- TODO - source and destination
-- Whether or not the given CardName is buy-able in the given supply :: [(Card,Int)]
canBuySupply :: [(CardName,Int)] -> CardName -> Bool
canBuySupply [] c = False
canBuySupply ((c',cnt'):xs) c = (c' == c && cnt' > 0) || (canBuySupply xs c)
canBuy :: Game -> CardName -> Bool
canBuy g c = ((cost c) <= (amtMoney . p1) g) && (canBuySupply ((piles . supply) g) c)
canPlay :: Game -> CardName -> Bool
canPlay g c = elem c ((cards . hand . p1) g)
-- Takes all of player #1's discarded cards and shuffles them back into her deck:
--shuffle :: forall m. MonadState Game m => m ()
shuffleCards :: forall (m :: * -> *). (MonadState Game m, MonadIO m) => m ()
shuffleCards = do
g <- get
newDeck <- liftIO $ shuffleList $ ((cards . discardPile . p1) g) ++ ((cards . deck . p1) g)
put g { p1 = (p1 g)
{ deck= ((deck.p1) g) {cards=newDeck}
, discardPile=((discardPile.p1) g) {cards=[]}
}
}
-- Player #1 draws n cards from her deck
--draw :: Int -> State (GameState Game) ()
draw :: forall (m :: * -> *). (MonadState Game m, MonadIO m) => Int -> m ()
draw 0 = return ()
draw n = do
g <- get
let cs = (cards . deck . p1) g
case () of
_ | 0 == length cs -> do
shuffleCards
draw n
| otherwise -> do
put (g { p1 = (p1 g) { hand = ((hand.p1) g) { cards = (head cs) : ((cards . hand . p1) g) }
, deck = ((deck.p1) g) { cards = tail $ cs } }})
draw $ n - 1
-- Player #1 discards a specific card from her hand
-- TODO: Error handling when card is not in hand - probably use a Maybe
discard :: forall (m :: * -> *). MonadState Game m => CardName -> m ()
discard c = do
g <- get
let newDiscard = c : (cards.discardPile.p1) g
let newHand = delete c ((cards.hand.p1) g)
put $ g { p1 = (p1 g)
{ hand = ((hand.p1) g) {cards=newHand}
, discardPile = ((discardPile.p1) g) {cards=newDiscard}
}
}
-- Player #1 discards all remaining cards from her hand and play
discardAll :: forall (m :: * -> *). MonadState Game m => m ()
discardAll = do
g <- get
let newDiscard = (cards . hand . p1) g ++ (cards . inPlay . p1) g ++ (cards . discardPile . p1) g
put $ g { p1 = (p1 g)
{ hand = ((hand.p1) g) {cards=[]}
, inPlay = ((inPlay.p1) g) {cards=[]}
, discardPile = ((discardPile.p1) g) {cards=newDiscard}
}
}
-- Player #1 and #2 swap places (i.e. p1 == current player)
swapPlayers :: forall (m :: * -> *). MonadState Game m => m ()
swapPlayers = do
g <- get
put $ g { p1 = p2 g, p2 = p1 g }
findAndDecr c (c',cnt') (c'',cnt'') = if c'' == c then (c'',cnt'' - 1) else (c',cnt')
-- Player #1 buys card c, removing one from the supply and putting into her discard pile
gain :: forall (m :: * -> *). MonadState Game m => CardName -> m ()
gain c = do
g <- get
let (c0,cnt0):ss = (piles . supply) g
let newPilePair = foldl (findAndDecr c) (c0,cnt0 - 1) ss
let newSupply = filter (\(c',_) -> c /= c') $ (piles . supply) g
put $ g { supply=Supply { piles=newPilePair:newSupply }
, p1 = (p1 g)
{ discardPile = ((discardPile.p1) g)
{ cards = c : ((cards . discardPile . p1) g)
}
, amtMoney = ((amtMoney.p1) g) - (cost c)
}
}
doBasicEffect :: forall (m :: * -> *). (MonadIO m, MonadState Game m) => Effect -> m ()
doBasicEffect e = do
g <- get
case effectType e of
COINS -> addMoney $ (amount e)
ACTIONS -> addActions $ (amount e)
BUYS -> addBuys $ (amount e)
CARDS -> draw $ (amount e)
VICTORYPOINTS -> nop -- TODO: ???
playCard :: forall (m :: * -> *). (MonadIO m, MonadState Game m) => CardName -> m ()
playCard c = do
g <- get
let c0:cs = (cards . hand . p1) g
let newHand = delete c $ (cards . hand . p1) g
put $ g { p1 = (p1 g)
{ hand = ((hand.p1) g) {cards=newHand}
, inPlay = ((inPlay.p1) g) {cards=c : ((cards . inPlay . p1) g)}
}
}
mapM doBasicEffect $ (primary.cDescr) (getCard kcs c)
g' <- get
(doCardEffects g') c
-- Gets only the treasure cards from a hand:
filterMoney h = filter isTreasure h
filterNotMoney h = filter (not . isTreasure) h
countMoney :: [CardName] -> Int
countMoney [] = 0
countMoney (c:cs)
| length ((primary.cDescr) (getCard kcs c)) == 0 = undefined -- TODO: invalid treasure card
| isTreasure c = (amount.head.primary.cDescr) (getCard kcs c) + countMoney cs
| otherwise = countMoney cs
--countMoney (COPPER:xs) = 1 + countMoney xs
--countMoney (SILVER:xs) = 2 + countMoney xs
--countMoney (GOLD:xs) = 3 + countMoney xs
--countMoney (x:xs) = countMoney xs
{-
-- Player #1 players all of her money:
playMoney :: forall (m :: * -> *). MonadState Game m => m ()
playMoney = do
g <- get
let newInPlay = (filterMoney $ (cards . hand . p1) g) ++ (cards . inPlay . p1) g
let newHand = filterNotMoney $ (cards . hand . p1) g
let newAmtMoney = ((amtMoney . p1) g) + (countMoney newInPlay)
put $ g { p1 = (p1 g)
{ inPlay = ((inPlay.p1) g) { cards=newInPlay }
, hand = ((hand.p1) g) { cards=newHand }
, amtMoney = newAmtMoney
}
}
-}
-- Decrements the number of buys player #1 has by n
decrBuys :: forall (m :: * -> *). (MonadState Game m, MonadIO m) => Int -> m ()
decrBuys n = do
g <- get
put $ g { p1 = (p1 g) { numBuys = (numBuys . p1) g - n } }
countVictory :: [CardName] -> Int
countVictory [] = 0
countVictory (c:cs)
| length ((primary.cDescr) (getCard kcs c)) == 0 = undefined -- TODO: invalid victory card
| isVictory c = (amount.head.primary.cDescr) (getCard kcs c) + countVictory cs
| otherwise = countVictory cs
--countVictory (ESTATE:xs) = 1 + countVictory xs
--countVictory (DUCHY:xs) = 3 + countVictory xs
--countVictory (PROVINCE:xs) = 6 + countVictory xs
--countVictory (x:xs) = 0 + countVictory xs
-- Game is over if ending condition is true, or turns ran out:
gameOver :: forall (m :: * -> *). MonadState Game m => m Bool
gameOver = do
g <- get
return $ (endCndn g) g ||
((turn g >= 0) && (turn g > maxTurns g))
|
cronburg/deckbuild
|
Game/DeckBuild/Dominion/Lib.hs
|
bsd-3-clause
| 7,909
| 0
| 26
| 2,222
| 2,791
| 1,484
| 1,307
| 122
| 5
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
module Pages.BuildExistsPage where
import Aria.Types
import Aria.Routes
import HtmlTemplates
import Web.Routes.PathInfo (toPathInfo)
import Control.Lens
import Data.Maybe (fromJust)
import Data.Data
import Data.Text
import Data.Time (UTCTime(..))
import Text.Blaze ((!), string)
import Data.Monoid ((<>))
import Control.Monad
import qualified Aria.Scripts as AS
import qualified Data.List as DL
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import qualified Text.Blaze.Bootstrap as BH
buildExistsPage :: RacerId -> AriaWebApp H.Html
buildExistsPage rid =
appTemplate "Build Exists" $
do BH.jumbotron
(H.string "Build Already Exists and Is Already Selected")
(racerPageButton rid "Go back")
|
theNerd247/ariaRacer
|
arweb/app/Pages/BuildExistsPage.hs
|
bsd-3-clause
| 898
| 0
| 11
| 122
| 204
| 128
| 76
| 28
| 1
|
module Channel where
import qualified Prelude
import Feldspar.Data.Vector
import Feldspar.Multicore
primitives :: Multicore ()
primitives = do
c0 <- newChan hostId 0 one
c1 <- newChan 0 1 one
c2 <- newChan 1 hostId one
onHost $ do
onCore 0 (f c0 c1)
onCore 1 (g c1 c2)
forever $ do
item <- lift $ fget stdin
writeChan c0 item
slot <- newSlot c2
readChan c2 slot
item :: Data Int32 <- getSlot slot
printf "> %d\n" item
closeChan c0
closeChan c2
f :: CoreChan (Data Int32) -> CoreChan (Data Int32) -> CoreComp ()
f input output = forever $ do
slot <- newSlot input
readChan input slot
elem <- getSlot slot
void $ writeChan output (elem + 1)
g :: CoreChan (Data Int32) -> CoreChan (Data Int32) -> CoreComp ()
g input output = forever $ do
slot <- newSlot input
readChan input slot
elem <- getSlot slot
void $ writeChan output (elem * 2)
------------------------------------------------------------
vectors :: Length -> Multicore ()
vectors vecSize = do
let chanSize = Prelude.fromIntegral vecSize
c0 <- newChan hostId 0 chanSize
c1 <- newChan 0 1 chanSize
c2 <- newChan 1 hostId chanSize
onHost $ do
onCore 0 (inc c0 c1)
onCore 1 (twice c1 c2)
while (return $ true) $ do
arr <- newArr $ value vecSize
for (0, 1, Excl $ value vecSize) $ \i -> do
item <- lift $ fget stdin
setArr arr i item
lenRef <- initRef $ value vecSize
let input = Store (lenRef, arr)
writeChan c0 input
slot <- newSlot c2
readChan c2 slot
store :: Store (DPull Int32) <- getSlot slot
output <- unsafeFreezeStore store
for (0, 1, Excl $ value vecSize) $ \i -> do
let item = output ! i
printf "> %d\n" item
closeChan c0
closeChan c2
inc :: CoreChan (Store (DPull Int32)) -> CoreChan (Store (DPull Int32)) -> CoreComp ()
inc inp out = forever $ do
slot <- newSlot inp
readChan inp slot
store <- getSlot slot
v <- unsafeFreezeStore store
v' <- initStore $ fmap (+1) v
writeChan out v'
twice :: CoreChan (Store (DPull Int32)) -> CoreChan (Store (DPull Int32)) -> CoreComp ()
twice inp out = forever $ do
slot <- newSlot inp
readChan inp slot
store <- getSlot slot
v <- unsafeFreezeStore store
v' <- initStore $ fmap (*2) v
writeChan out v'
------------------------------------------------------------
test = primitives
-- test = vectors 5
testAll = do
icompileAll `onParallella` test
let modules = compileAll `onParallella` test
forM_ modules $ \(name, contents) -> do
let name' = if name Prelude.== "main" then "host" else name
writeFile (name' Prelude.++ ".c") contents
runTestCompiled = runCompiled' def opts test
where
opts = def
{ externalFlagsPre = [ "-I../imperative-edsl/include"
, "../imperative-edsl/csrc/chan.c"]
, externalFlagsPost = [ "-lpthread" ]
}
|
kmate/raw-feldspar-mcs
|
examples/Channel.hs
|
bsd-3-clause
| 3,232
| 0
| 20
| 1,060
| 1,147
| 531
| 616
| -1
| -1
|
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
{-|
-}
module LivingFlame.Monad (
LivingFlameEnv (..), LivingFlame,
mkDefaultEnv,
runLivingFlame,
--
ask, asks,
--
putLog, rotateLog,
--
getConfig, getConfigDefault) where
import qualified Blaze.ByteString.Builder as BZ
import qualified Blaze.ByteString.Builder.Char.Utf8 as BZ
import Control.Applicative ((<$>))
import Control.Monad.Trans (liftIO)
import Control.Monad.Trans.Reader (ReaderT, asks, ask, runReaderT)
import Control.Monad.Trans.Resource (ResourceT, runResourceT)
import Data.Monoid (mappend)
import Control.Concurrent.MVar (MVar, withMVar, newMVar, modifyMVar_)
import Control.Concurrent.Chan.Lifted (Chan, newChan)
import Control.Concurrent (ThreadId)
import Data.Text (Text)
import Data.Time (UTCTime,formatTime,getCurrentTime)
import qualified Data.Configurator as CF
import qualified Data.Configurator.Types as CF
import System.IO (Handle, openFile, IOMode (..), hClose)
import System.Locale (defaultTimeLocale)
import System.FilePath ((</>))
import System.Directory (setCurrentDirectory)
import System.Log.FastLogger
data LivingFlameEnv
= LivingFlameEnv {
systemBaseDir :: FilePath,
systemStartupTimestamp :: UTCTime,
systemLogSpec :: FileLogSpec,
systemLogHandle :: MVar Handle,
systemRunningMonitors :: MVar [ThreadId],
systemMonitorChan :: Chan Text,
systemConfig :: (CF.Config, ThreadId)
}
type LivingFlame = ReaderT LivingFlameEnv (ResourceT IO)
mkDefaultEnv :: String -> IO LivingFlameEnv
mkDefaultEnv baseDir = do
setCurrentDirectory baseDir
tm <- getCurrentTime
let logpath = baseDir </> "logs" </> "LivingFlame.log"
-- 環境を構築して、LivingFlameモナドを実行
logh <- openFile logpath AppendMode >>= newMVar
mons <- newMVar []
monchan <- newChan
cfg <- CF.autoReload CF.autoConfig [CF.Required "config"]
return $ LivingFlameEnv {
systemBaseDir = baseDir,
systemStartupTimestamp = tm,
systemLogSpec = FileLogSpec {
log_file = logpath,
log_file_size = 128 * 1024 * 1024,
log_backup_number = 9
},
systemLogHandle = logh,
systemRunningMonitors = mons,
systemMonitorChan = monchan,
systemConfig = cfg
}
runLivingFlame :: LivingFlame a -> LivingFlameEnv -> IO a
runLivingFlame m env = runResourceT $ runReaderT m env
{-| 設定を取得します -}
getConfig :: CF.Configured a => Text -> LivingFlame (Maybe a)
getConfig x = do
(cfg, _) <- asks systemConfig
liftIO $ CF.lookup cfg x
getConfigDefault :: CF.Configured a => a -> Text -> LivingFlame a
getConfigDefault dft x = do
(cfg, _) <- asks systemConfig
liftIO $ CF.lookupDefault dft cfg x
{- ロギング関係
-}
{-| 運転ログを出力します -}
putLog :: BZ.Builder -> LivingFlame ()
putLog x = do
stamp <- liftIO $ BZ.fromString <$> (formatTime defaultTimeLocale "%F %T " <$> getCurrentTime)
mv <- asks systemLogHandle
liftIO $ withMVar mv (\h -> hPutBuilder h $ stamp `mappend` x `mappend` BZ.fromString "\n")
{-| 運転ログをローテーションします -}
rotateLog :: LivingFlame ()
rotateLog = do
mv <- asks systemLogHandle
ls <- asks systemLogSpec
liftIO $ modifyMVar_ mv (\h -> hClose h >> rotate ls >> openFile (log_file ls) AppendMode)
|
seagull-kamome/living-flame
|
src/LivingFlame/Monad.hs
|
bsd-3-clause
| 3,414
| 4
| 14
| 688
| 927
| 519
| 408
| 77
| 1
|
{-# language CPP #-}
-- No documentation found for Chapter "ColorComponentFlagBits"
module Vulkan.Core10.Enums.ColorComponentFlagBits ( ColorComponentFlags
, ColorComponentFlagBits( COLOR_COMPONENT_R_BIT
, COLOR_COMPONENT_G_BIT
, COLOR_COMPONENT_B_BIT
, COLOR_COMPONENT_A_BIT
, ..
)
) where
import Vulkan.Internal.Utils (enumReadPrec)
import Vulkan.Internal.Utils (enumShowsPrec)
import GHC.Show (showString)
import Numeric (showHex)
import Vulkan.Zero (Zero)
import Data.Bits (Bits)
import Data.Bits (FiniteBits)
import Foreign.Storable (Storable)
import GHC.Read (Read(readPrec))
import GHC.Show (Show(showsPrec))
import Vulkan.Core10.FundamentalTypes (Flags)
type ColorComponentFlags = ColorComponentFlagBits
-- | VkColorComponentFlagBits - Bitmask controlling which components are
-- written to the framebuffer
--
-- = Description
--
-- The color write mask operation is applied regardless of whether blending
-- is enabled.
--
-- The color write mask operation is applied only if
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#framebuffer-color-write-enable Color Write Enable>
-- is enabled for the respective attachment. Otherwise the color write mask
-- is ignored and writes to all components of the attachment are disabled.
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_VERSION_1_0 VK_VERSION_1_0>,
-- 'ColorComponentFlags'
newtype ColorComponentFlagBits = ColorComponentFlagBits Flags
deriving newtype (Eq, Ord, Storable, Zero, Bits, FiniteBits)
-- | 'COLOR_COMPONENT_R_BIT' specifies that the R value is written to the
-- color attachment for the appropriate sample. Otherwise, the value in
-- memory is unmodified.
pattern COLOR_COMPONENT_R_BIT = ColorComponentFlagBits 0x00000001
-- | 'COLOR_COMPONENT_G_BIT' specifies that the G value is written to the
-- color attachment for the appropriate sample. Otherwise, the value in
-- memory is unmodified.
pattern COLOR_COMPONENT_G_BIT = ColorComponentFlagBits 0x00000002
-- | 'COLOR_COMPONENT_B_BIT' specifies that the B value is written to the
-- color attachment for the appropriate sample. Otherwise, the value in
-- memory is unmodified.
pattern COLOR_COMPONENT_B_BIT = ColorComponentFlagBits 0x00000004
-- | 'COLOR_COMPONENT_A_BIT' specifies that the A value is written to the
-- color attachment for the appropriate sample. Otherwise, the value in
-- memory is unmodified.
pattern COLOR_COMPONENT_A_BIT = ColorComponentFlagBits 0x00000008
conNameColorComponentFlagBits :: String
conNameColorComponentFlagBits = "ColorComponentFlagBits"
enumPrefixColorComponentFlagBits :: String
enumPrefixColorComponentFlagBits = "COLOR_COMPONENT_"
showTableColorComponentFlagBits :: [(ColorComponentFlagBits, String)]
showTableColorComponentFlagBits =
[ (COLOR_COMPONENT_R_BIT, "R_BIT")
, (COLOR_COMPONENT_G_BIT, "G_BIT")
, (COLOR_COMPONENT_B_BIT, "B_BIT")
, (COLOR_COMPONENT_A_BIT, "A_BIT")
]
instance Show ColorComponentFlagBits where
showsPrec = enumShowsPrec enumPrefixColorComponentFlagBits
showTableColorComponentFlagBits
conNameColorComponentFlagBits
(\(ColorComponentFlagBits x) -> x)
(\x -> showString "0x" . showHex x)
instance Read ColorComponentFlagBits where
readPrec = enumReadPrec enumPrefixColorComponentFlagBits
showTableColorComponentFlagBits
conNameColorComponentFlagBits
ColorComponentFlagBits
|
expipiplus1/vulkan
|
src/Vulkan/Core10/Enums/ColorComponentFlagBits.hs
|
bsd-3-clause
| 4,052
| 1
| 10
| 1,049
| 419
| 259
| 160
| -1
| -1
|
module FibLargeMod where
import Data.Char
import Control.Applicative
import System.IO
main :: IO ()
main =
hSetBuffering stdin NoBuffering >>= \_ ->
nextNum >>= \n ->
nextNum >>= \m ->
putStrLn $ show $ fibsModN n m
fibsModN n m =
let p = pisanoPeriod m
r = n `rem` p
in fibsMod m !! fromInteger r
pisanoPeriod m =
let pp l acc = case l of
(0:1:_) -> acc + 2
(_:0:t) -> let acc' = 1 + acc in seq acc' $ pp (0:t) acc'
(h1:h2:t) -> let acc' = 2 + acc in seq acc' $ pp t acc'
in pp (drop 2 $ fibsMod m) 0
fibsMod m = map (`mod` m) fibs
fibs = scanl (+) 0 (1:fibs)
nextNum = nextNum' ""
nextNum' n = getChar >>= \char ->
if(isDigit char) then nextNum' $ char:n
else if(null n) then nextNum' n
else pure $ read $ reverse n
|
msosnicki/algorithms
|
app/week2/FibLargeMod.hs
|
bsd-3-clause
| 785
| 0
| 17
| 220
| 401
| 205
| 196
| 27
| 3
|
{-# LANGUAGE MagicHash, UnboxedTuples #-}
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances, TypeFamilies, DataKinds #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -pgmP cpphs -optP-traditional -optP--cpp #-}
#if defined(ghcjs_HOST_OS)
{-# LANGUAGE ScopedTypeVariables, TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
#else
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Data.Geometry.Types
-- Copyright : Copyright (C) 2015 Artem M. Chirkin <chirkin@arch.ethz.ch>
-- License : BSD3
--
-- Maintainer : Artem M. Chirkin <chirkin@arch.ethz.ch>
-- Stability : Experimental
--
--
-----------------------------------------------------------------------------
module Data.Geometry.Types where
import Data.Geometry.VectorMath
#if defined(ghcjs_HOST_OS)
import Data.Coerce (coerce)
import GHC.TypeLits
import Data.Geometry.Prim.JSNum
{-# INLINE fromHom #-}
foreign import javascript unsafe "$r = $1.slice(); var l = $r.pop(); if(l !== 0){$r = $r.map(function(e){return e/l;});}"
fromHom :: Vector (n+1) t -> Vector n t
{-# INLINE unit #-}
foreign import javascript unsafe "var l = Math.hypot.apply(null,$1); if(l !== 0){$r = $1.map(function(e){return e/l;});} else {$r = $1.slice();}"
unit :: Vector n t -> Vector n t
-- | Cross-product of two 3D vectors
{-# INLINE cross #-}
cross :: Vector 3 t -> Vector 3 t -> Vector 3 t
cross a b = coerce $ js_cross (coerce a) (coerce b)
-- | Determinant of a matrix composed of two 2D vectors
{-# INLINE det2 #-}
det2 :: JSNum t => Vector 2 t -> Vector 2 t -> t
det2 a b = toNum $ js_cross (coerce a) (coerce b)
{-# INLINE [2] resizeVector #-}
resizeVector :: (KnownNat n, KnownNat m) => Vector n t -> Vector m t
resizeVector v = r
where r = coerce $ resizeJSVec (coerce v) (dim r)
{-# RULES "resizeVector/id" resizeVector = id :: Vector n t -> Vector n t #-}
{-# INLINE [2] resizeMatrix #-}
resizeMatrix :: (KnownNat n, KnownNat m) => Matrix n t -> Matrix m t
resizeMatrix m = r
where r = coerce $ resizeJSMat (coerce m) (dim m) (dim r)
{-# RULES "resizeMatrix/id" resizeMatrix = id :: Matrix n t -> Matrix n t #-}
instance (KnownNat n, JSNum a) => VectorMath n a where
{-# SPECIALIZE instance VectorMath 4 Int #-}
{-# SPECIALIZE instance VectorMath 4 Float #-}
{-# SPECIALIZE instance VectorMath 4 Double #-}
{-# INLINE broadcastVector #-}
broadcastVector a = coerce $ broadcastJSVec (fromNum a) (dim (undefined :: Vector n a))
{-# INLINE broadcastMatrix #-}
broadcastMatrix a = coerce $ broadcastJSVec (fromNum a) (n*n)
where n = dim (undefined :: Vector n a)
{-# INLINE eye #-}
eye = coerce . eyeJSMat $ dim (undefined :: Matrix n a)
{-# INLINE diag #-}
diag x = coerce . diagJSMat (fromNum x) $ dim (undefined :: Matrix n a)
{-# INLINE transpose #-}
transpose m = coerce . transposeJSMat (coerce m) . dim $ m
{-# INLINE det #-}
det m = toNum . detJSMat (coerce m) . dim $ m
{-# INLINE trace #-}
trace m = toNum . traceJSMat (coerce m) . dim $ m
{-# INLINE fromDiag #-}
fromDiag m = coerce . fromDiagJSMat (coerce m) . dim $ m
{-# INLINE toDiag #-}
toDiag = coerce . toDiagJSMat . coerce
{-# INLINE (.*.) #-}
a .*. b = coerce $ dotBJSVec (coerce a) (coerce b)
{-# INLINE dot #-}
dot a b = toNum $ dotJSVec (coerce a) (coerce b)
{-# INLINE indexVector #-}
indexVector i v = toNum $ indexJSVec i (coerce v)
{-# INLINE indexMatrix #-}
indexMatrix i j m = toNum $ indexJSVec (i + j * dim m) (coerce m)
{-# INLINE normL1 #-}
normL1 = toNum . js_normL1 . coerce
{-# INLINE normL2 #-}
normL2 = toNum . js_normL2 . coerce
{-# INLINE normLPInf #-}
normLPInf = toNum . js_normLPInf . coerce
{-# INLINE normLNInf #-}
normLNInf = toNum . js_normLNInf . coerce
{-# INLINE normLP #-}
normLP p = toNum . js_normLP p . coerce
instance JSNum a => Vector4Math a where
{-# SPECIALIZE instance Vector4Math Int #-}
{-# SPECIALIZE instance Vector4Math Float #-}
{-# SPECIALIZE instance Vector4Math Double #-}
{-# INLINE vector4 #-}
vector4 a b c d = coerce $ jsVector4 (fromNum a) (fromNum b) (fromNum c) (fromNum d)
{-# INLINE matrix4x4 #-}
matrix4x4 a b c d = coerce $ jsMatrix4 (coerce a) (coerce b) (coerce c) (coerce d)
{-# INLINE unpackV4 #-}
unpackV4 v = case unpackJSVec4 (coerce v) of
(# a, b, c, d #) -> ( toNum a, toNum b, toNum c, toNum d )
{-# INLINE colsOfM4 #-}
colsOfM4 m = case unpackJSVec4 $ matColsJS (coerce m) 4 of
(# a, b, c, d #) -> ( coerce a, coerce b, coerce c, coerce d )
{-# INLINE rowsOfM4 #-}
rowsOfM4 m = case unpackJSVec4 $ matRowsJS (coerce m) 4 of
(# a, b, c, d #) -> ( coerce a, coerce b, coerce c, coerce d )
instance JSNum a => Vector3Math a where
{-# SPECIALIZE instance Vector3Math Int #-}
{-# SPECIALIZE instance Vector3Math Float #-}
{-# SPECIALIZE instance Vector3Math Double #-}
{-# INLINE vector3 #-}
vector3 a b c = coerce $ jsVector3 (fromNum a) (fromNum b) (fromNum c)
{-# INLINE matrix3x3 #-}
matrix3x3 a b c = coerce $ jsMatrix3 (coerce a) (coerce b) (coerce c)
{-# INLINE unpackV3 #-}
unpackV3 v = case unpackJSVec3 (coerce v) of
(# a, b, c #) -> ( toNum a, toNum b, toNum c )
{-# INLINE colsOfM3 #-}
colsOfM3 m = case unpackJSVec3 $ matColsJS (coerce m) 3 of
(# a, b, c #) -> ( coerce a, coerce b, coerce c )
{-# INLINE rowsOfM3 #-}
rowsOfM3 m = case unpackJSVec3 $ matRowsJS (coerce m) 3 of
(# a, b, c #) -> ( coerce a, coerce b, coerce c )
instance JSNum a => Vector2Math a where
{-# SPECIALIZE instance Vector2Math Int #-}
{-# SPECIALIZE instance Vector2Math Float #-}
{-# SPECIALIZE instance Vector2Math Double #-}
{-# INLINE vector2 #-}
vector2 a b = coerce $ jsVector2 (fromNum a) (fromNum b)
{-# INLINE matrix2x2 #-}
matrix2x2 a b = coerce $ jsMatrix2 (coerce a) (coerce b)
{-# INLINE unpackV2 #-}
unpackV2 v = case unpackJSVec2 (coerce v) of
(# a, b #) -> ( toNum a, toNum b )
{-# INLINE colsOfM2 #-}
colsOfM2 m = case unpackJSVec2 $ matColsJS (coerce m) 2 of
(# a, b #) -> ( coerce a, coerce b )
{-# INLINE rowsOfM2 #-}
rowsOfM2 m = case unpackJSVec2 $ matRowsJS (coerce m) 2 of
(# a, b #) -> ( coerce a, coerce b )
instance (JSNum a, KnownNat n) => MatrixProduct Matrix n a where
prod a b = coerce $ prodJSMM (coerce a) (coerce b) (dim b)
instance (JSNum a, KnownNat n) => MatrixProduct Vector n a where
prod a b = coerce $ prodJSMV (coerce a) (coerce b)
instance JSNum a => VectorFracMath 4 a where
{-# SPECIALIZE instance VectorFracMath 4 Float #-}
{-# SPECIALIZE instance VectorFracMath 4 Double #-}
{-# INLINE inverse #-}
inverse = coerce . inverseJSM4 . coerce
instance JSNum a => VectorFracMath 3 a where
{-# SPECIALIZE instance VectorFracMath 3 Float #-}
{-# SPECIALIZE instance VectorFracMath 3 Double #-}
{-# INLINE inverse #-}
inverse = coerce . inverseJSM3 . coerce
instance JSNum a => VectorFracMath 2 a where
{-# SPECIALIZE instance VectorFracMath 2 Float #-}
{-# SPECIALIZE instance VectorFracMath 2 Double #-}
{-# INLINE inverse #-}
inverse = coerce . inverseJSM2 . coerce
#else
import GHC.Exts
import GHC.Int
import Foreign.C.Types
import Data.Geometry.Prim.Int32X4
import Data.Geometry.Prim.FloatX3
import Data.Geometry.Prim.FloatX4
#define emptyc(x) x
-- params: type, vectortype, Vector constr, Matrix constr
-- , Elem constr, num ending, plusOp, Elem newtype
#define VECTORMATH4(T,VT,VC,MC,EC,e,pOp, EC2) \
instance VectorMath 4 T where { \
data Vector 4 T = VC VT; \
data Matrix 4 T = MC VT VT VT VT; \
{-# INLINE eye #-}; \
eye = MC (pack/**/VT (# 1/**/e, 0/**/e, 0/**/e, 0/**/e #)) \
(pack/**/VT (# 0/**/e, 1/**/e, 0/**/e, 0/**/e #)) \
(pack/**/VT (# 0/**/e, 0/**/e, 1/**/e, 0/**/e #)) \
(pack/**/VT (# 0/**/e, 0/**/e, 0/**/e, 1/**/e #)); \
{-# INLINE diag #-}; \
diag (EC2(EC x)) = MC (pack/**/VT (# x , 0/**/e, 0/**/e, 0/**/e #)) \
(pack/**/VT (# 0/**/e, x , 0/**/e, 0/**/e #)) \
(pack/**/VT (# 0/**/e, 0/**/e, x , 0/**/e #)) \
(pack/**/VT (# 0/**/e, 0/**/e, 0/**/e, x #)); \
{-# INLINE transpose #-}; \
transpose (MC c1 c2 c3 c4) = case transposeM/**/VT c1 c2 c3 c4 of \
{(# r1, r2, r3, r4 #) -> MC r1 r2 r3 r4}; \
{-# INLINE det #-}; \
det (MC c1 c2 c3 c4) = case unpack/**/VT (detM/**/VT c1 c2 c3 c4) of \
{(# r1, _, _, _ #) -> EC2(EC r1)}; \
{-# INLINE trace #-}; \
trace (MC a1 a2 a3 a4) = case (# unpack/**/VT a1 \
, unpack/**/VT a2 \
, unpack/**/VT a3 \
, unpack/**/VT a4 #) of \
{(#(# x11, _ , _ , _ #) \
,(# _ , x22, _ , _ #) \
,(# _ , _ , x33, _ #) \
,(# _ , _ , _ , x44 #) \
#) -> EC2(EC (x11 pOp x22 pOp x33 pOp x44))}; \
{-# INLINE fromDiag #-}; \
fromDiag (MC a1 a2 a3 a4) = case (# unpack/**/VT a1 \
, unpack/**/VT a2 \
, unpack/**/VT a3 \
, unpack/**/VT a4 #) of \
{(#(# x11, _ , _ , _ #) \
,(# _ , x22, _ , _ #) \
,(# _ , _ , x33, _ #) \
,(# _ , _ , _ , x44 #) \
#) -> VC (pack/**/VT (# x11, x22, x33, x44 #))}; \
{-# INLINE toDiag #-}; \
toDiag (VC a) = case unpack/**/VT a of \
{(# x11, x22, x33, x44 #) -> MC (pack/**/VT (# x11 , 0/**/e, 0/**/e, 0/**/e #)) \
(pack/**/VT (# 0/**/e, x22 , 0/**/e, 0/**/e #)) \
(pack/**/VT (# 0/**/e, 0/**/e, x33 , 0/**/e #)) \
(pack/**/VT (# 0/**/e, 0/**/e, 0/**/e, x44 #))}; \
{-# INLINE (.*.) #-}; \
VC a .*. VC b = VC (dot/**/VT a b); \
{-# INLINE dot #-} ; \
dot (VC a) (VC b) = case unpack/**/VT (dot/**/VT a b) of \
{(# r1, _, _, _ #) -> EC2(EC r1)} }
VECTORMATH4(Int32,Int32X4#,V4I32,M4I32,I32#,#,+#, emptyc)
VECTORMATH4(Int,Int32X4#,V4I,M4I,I#,#,+#, emptyc)
VECTORMATH4(CInt,Int32X4#,V4CI,M4CI,I32#,#,+#,CInt)
VECTORMATH4(Float,FloatX4#,V4F,M4F,F#,.0#,`plusFloat#`,emptyc)
VECTORMATH4(CFloat,FloatX4#,V4CF,M4CF,F#,.0#,`plusFloat#`,CFloat)
-- params: type, vectortype, Vector constr, Matrix constr, Elem constr, Elem newtype
#define VECTOR4MATH(T,VT,VC,MC,EC,EC2) \
instance Vector4Math T where { \
{-# INLINE vector4 #-}; \
vector4 (EC2(EC x)) (EC2(EC y)) (EC2(EC z)) (EC2(EC t)) \
= VC (pack/**/VT (# x, y, z, t #)); \
{-# INLINE matrix4x4 #-}; \
matrix4x4 (VC c1) (VC c2) (VC c3) (VC c4) = MC c1 c2 c3 c4 }
VECTOR4MATH(Int32,Int32X4#,V4I32,M4I32,I32#,emptyc)
VECTOR4MATH(Int,Int32X4#,V4I,M4I,I#,emptyc)
VECTOR4MATH(CInt,Int32X4#,V4CI,M4CI,I32#,CInt)
VECTOR4MATH(Float,FloatX4#,V4F,M4F,F#,emptyc)
VECTOR4MATH(CFloat,FloatX4#,V4CF,M4CF,F#,CFloat)
#define MATRIXPRODUCT4(T,VT,VC,MC) \
instance MatrixProduct Matrix 4 T where { \
{-# INLINE prod #-}; \
prod (MC a1 a2 a3 a4) (MC b1 b2 b3 b4) \
= case prodMM/**/VT a1 a2 a3 a4 b1 b2 b3 b4 of \
{(# r1, r2, r3, r4 #) -> MC r1 r2 r3 r4}}; \
instance MatrixProduct Vector 4 T where { \
{-# INLINE prod #-}; \
prod (MC a1 a2 a3 a4) (VC b) = VC (prodMV/**/VT a1 a2 a3 a4 b) }
MATRIXPRODUCT4(Int32,Int32X4#,V4I32,M4I32)
MATRIXPRODUCT4(Int,Int32X4#,V4I,M4I)
MATRIXPRODUCT4(CInt,Int32X4#,V4CI,M4CI)
MATRIXPRODUCT4(Float,FloatX4#,V4F,M4F)
MATRIXPRODUCT4(CFloat,FloatX4#,V4CF,M4CF)
#define VECTORFRACMATH4(T,VT,VC,MC) \
instance VectorFracMath 4 T where { \
{-# INLINE inverse #-}; \
inverse (MC c1 c2 c3 c4) = case inverseM/**/VT c1 c2 c3 c4 of \
(# r1, r2, r3, r4 #) -> MC r1 r2 r3 r4 }
VECTORFRACMATH4(Float,FloatX4#,V4F,M4F)
VECTORFRACMATH4(CFloat,FloatX4#,V4CF,M4CF)
-- params: type, vectortype, Vector constr, Matrix constr
-- , Elem constr, num ending, plusOp, Elem newtype
#define VECTORMATH3(T,VT,VC,MC,EC,e,pOp, EC2) \
instance VectorMath 3 T where { \
data Vector 3 T = VC VT; \
data Matrix 3 T = MC VT VT VT; \
{-# INLINE eye #-}; \
eye = MC (pack/**/VT (# 1/**/e, 0/**/e, 0/**/e, 0/**/e #)) \
(pack/**/VT (# 0/**/e, 1/**/e, 0/**/e, 0/**/e #)) \
(pack/**/VT (# 0/**/e, 0/**/e, 1/**/e, 0/**/e #)); \
{-# INLINE diag #-}; \
diag (EC2(EC x)) = MC (pack/**/VT (# x , 0/**/e, 0/**/e, 0/**/e #)) \
(pack/**/VT (# 0/**/e, x , 0/**/e, 0/**/e #)) \
(pack/**/VT (# 0/**/e, 0/**/e, x , 0/**/e #)); \
{-# INLINE transpose #-}; \
transpose (MC c1 c2 c3) = case transposeM/**/VT c1 c2 c3 of \
{(# r1, r2, r3 #) -> MC r1 r2 r3}; \
{-# INLINE det #-}; \
det (MC c1 c2 c3) = case unpack/**/VT (detM/**/VT c1 c2 c3) of \
{(# r1, _, _, _ #) -> EC2(EC r1)}; \
{-# INLINE trace #-}; \
trace (MC a1 a2 a3) = case (# unpack/**/VT a1 \
, unpack/**/VT a2 \
, unpack/**/VT a3 #) of \
{(#(# x11, _ , _ , _ #) \
,(# _ , x22, _ , _ #) \
,(# _ , _ , x33, _ #) \
#) -> EC2(EC (x11 pOp x22 pOp x33))}; \
{-# INLINE fromDiag #-}; \
fromDiag (MC a1 a2 a3) = case (# unpack/**/VT a1 \
, unpack/**/VT a2 \
, unpack/**/VT a3 #) of \
{(#(# x11, _ , _ , _ #) \
,(# _ , x22, _ , _ #) \
,(# _ , _ , x33, _ #) \
#) -> VC (pack/**/VT (# x11, x22, x33, 0/**/e #))}; \
{-# INLINE toDiag #-}; \
toDiag (VC a) = case unpack/**/VT a of \
{(# x11, x22, x33, _ #) -> MC (pack/**/VT (# x11 , 0/**/e, 0/**/e, 0/**/e #)) \
(pack/**/VT (# 0/**/e, x22 , 0/**/e, 0/**/e #)) \
(pack/**/VT (# 0/**/e, 0/**/e, x33 , 0/**/e #))}; \
{-# INLINE (.*.) #-}; \
VC a .*. VC b = VC (dot/**/VT a b); \
{-# INLINE dot #-} ; \
dot (VC a) (VC b) = case unpack/**/VT (dot/**/VT a b) of \
{(# r1, _, _, _ #) -> EC2(EC r1)} }
VECTORMATH3(Float,FloatX3#,V3F,M3F,F#,.0#,`plusFloat#`,emptyc)
VECTORMATH3(CFloat,FloatX3#,V3CF,M3CF,F#,.0#,`plusFloat#`,CFloat)
-- params: type, vectortype, Vector constr, Matrix constr, Elem constr, Elem newtype
#define VECTOR3MATH(T,VT,VC,MC,EC,EC2,e) \
instance Vector3Math T where { \
{-# INLINE vector3 #-}; \
vector3 (EC2(EC x)) (EC2(EC y)) (EC2(EC z)) \
= VC (pack/**/VT (# x, y, z, 0/**/e #)); \
{-# INLINE matrix3x3 #-}; \
matrix3x3 (VC c1) (VC c2) (VC c3) = MC c1 c2 c3; \
{-# INLINE unpackV3 #-}; \
unpackV3 (VC c) = case unpack/**/VT c of \
{(# r1, r2, r3, _ #) -> (EC2(EC r1), EC2(EC r2), EC2(EC r3))}; }
VECTOR3MATH(Float,FloatX3#,V3F,M3F,F#,emptyc,.0#)
VECTOR3MATH(CFloat,FloatX3#,V3CF,M3CF,F#,CFloat,.0#)
#define MATRIXPRODUCT3(T,VT,VC,MC) \
instance MatrixProduct Matrix 3 T where { \
{-# INLINE prod #-}; \
prod (MC a1 a2 a3) (MC b1 b2 b3) \
= case prodMM/**/VT a1 a2 a3 b1 b2 b3 of \
{(# r1, r2, r3 #) -> MC r1 r2 r3}}; \
instance MatrixProduct Vector 3 T where { \
{-# INLINE prod #-}; \
prod (MC a1 a2 a3) (VC b) = VC (prodMV/**/VT a1 a2 a3 b) }
MATRIXPRODUCT3(Float,FloatX3#,V3F,M3F)
MATRIXPRODUCT3(CFloat,FloatX3#,V3CF,M3CF)
#define VECTORFRACMATH3(T,VT,VC,MC) \
instance VectorFracMath 3 T where { \
{-# INLINE inverse #-}; \
inverse (MC c1 c2 c3) = case inverseM/**/VT c1 c2 c3 of \
(# r1, r2, r3 #) -> MC r1 r2 r3 }
VECTORFRACMATH3(Float,FloatX3#,V3F,M3F)
VECTORFRACMATH3(CFloat,FloatX3#,V3CF,M3CF)
#endif
|
achirkin/fastvec
|
src/Data/Geometry/Types.hs
|
bsd-3-clause
| 20,783
| 154
| 9
| 9,189
| 1,942
| 1,038
| 904
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
{- |
Module : Network.Mail.Mime.Parser.Util
Copyright : (c) 2015 Alberto Valverde
License : BSD3
Maintainer : alberto@toscat.net
Stability : provisional
Portability : unknown
This module provides accessors to Message parts
-}
module Network.Mail.Mime.Parser.Util (
getContentType
, getAttachments
, getPlainText
, getHtmlText
, getFilename
, getContentDisposition
, getFrom
, getTo
, getSubject
) where
import Data.Text (Text)
import Control.Lens
import Control.Monad (join)
import Network.Mail.Mime.Parser.Types
import Network.Mail.Mime.Parser.Internal.Common (
getAttachments
, getTextBody
, getContentType
, getContentDisposition
, getFilename
, firstJust
)
getFrom :: Message -> Maybe [NameAddr]
getFrom = firstJust . map (^?_From) . (^.msgHeaders)
getTo :: Message -> Maybe [NameAddr]
getTo = firstJust . map (^?_To) . (^.msgHeaders)
getSubject :: Message -> Maybe Text
getSubject = firstJust . map (^?_Subject) . (^.msgHeaders)
getHtmlText :: Message -> Maybe Text
getHtmlText = join . fmap (^?_TextBody) . getTextBody "html"
getPlainText :: Message -> Maybe Text
getPlainText = join . fmap (^?_TextBody) . getTextBody "plain"
|
meteogrid/mime-mail-parser
|
Network/Mail/Mime/Parser/Util.hs
|
bsd-3-clause
| 1,256
| 0
| 8
| 242
| 286
| 169
| 117
| 32
| 1
|
{-
- Hacq (c) 2013 NEC Laboratories America, Inc. All rights reserved.
-
- This file is part of Hacq.
- Hacq is distributed under the 3-clause BSD license.
- See the LICENSE file for more details.
-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Control.Monad.Quantum.ApproxSequence.Counter (
module Control.Monad.Quantum.ApproxSequence.Class,
ApproxSequenceCounterT(..)) where
import Control.Applicative (Applicative)
import Control.Monad.Trans (MonadTrans(lift))
import Control.Monad.Memo.Class
import Control.Monad.Quantum.Class
import Control.Monad.Quantum.Counter.Class
import Control.Monad.Quantum.ApproxSequence.Class
import Data.Quantum.ApproxSequence.Count
newtype ApproxSequenceCounterT w c m a = ApproxSequenceCounterT {
runApproxSequenceCounterT :: m a
} deriving (Functor, Applicative, Monad, MonadQuantumBase w, MonadToffoli w, MonadQuantum w, MonadQuantumCounter w c, MonadMemo k)
instance MonadTrans (ApproxSequenceCounterT w c) where
lift = ApproxSequenceCounterT
instance (IsApproxSequenceCount c, MonadApproxSequence w m, MonadQuantumCounter w c m) => MonadApproxSequence w (ApproxSequenceCounterT w c m) where
applyOneQubitUnitary a c d w = do
rawRecord singletonApproxSequenceCount
lift $ applyOneQubitUnitary a c d w
|
ti1024/hacq
|
src/Control/Monad/Quantum/ApproxSequence/Counter.hs
|
bsd-3-clause
| 1,358
| 0
| 9
| 185
| 264
| 156
| 108
| 22
| 0
|
module Main where
import Test.Framework.Providers.DocTest
import Test.Framework
main = docTest ["Test/Cucumber.hs"] [] >>= defaultMain . return
|
Erkan-Yilmaz/haskell-cucumber
|
tests/MainDocTest.hs
|
bsd-3-clause
| 146
| 0
| 8
| 18
| 39
| 23
| 16
| 4
| 1
|
{- |
Module : ./VSE/Fold.hs
Description : folding functions for VSE progams
Copyright : (c) Christian Maeder, DFKI Bremen 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : portable
folding functions for VSE progams
-}
module VSE.Fold where
import qualified Data.Set as Set
import CASL.AS_Basic_CASL
import VSE.As
-- | fold record
data FoldRec a = FoldRec
{ foldAbort :: Program -> a
, foldSkip :: Program -> a
, foldAssign :: Program -> VAR -> TERM () -> a
, foldCall :: Program -> FORMULA () -> a
, foldReturn :: Program -> TERM () -> a
, foldBlock :: Program -> [VAR_DECL] -> a -> a
, foldSeq :: Program -> a -> a -> a
, foldIf :: Program -> FORMULA () -> a -> a -> a
, foldWhile :: Program -> FORMULA () -> a -> a }
-- | fold function
foldProg :: FoldRec a -> Program -> a
foldProg r p = case unRanged p of
Abort -> foldAbort r p
Skip -> foldSkip r p
Assign v t -> foldAssign r p v t
Call f -> foldCall r p f
Return t -> foldReturn r p t
Block vs q -> foldBlock r p vs $ foldProg r q
Seq p1 p2 -> foldSeq r p (foldProg r p1) $ foldProg r p2
If f p1 p2 -> foldIf r p f (foldProg r p1) $ foldProg r p2
While f q -> foldWhile r p f $ foldProg r q
mapRec :: FoldRec Program
mapRec = FoldRec
{ foldAbort = id
, foldSkip = id
, foldAssign = \ (Ranged _ r) v t -> Ranged (Assign v t) r
, foldCall = \ (Ranged _ r) f -> Ranged (Call f) r
, foldReturn = \ (Ranged _ r) t -> Ranged (Return t) r
, foldBlock = \ (Ranged _ r) vs p -> Ranged (Block vs p) r
, foldSeq = \ (Ranged _ r) p1 p2 -> Ranged (Seq p1 p2) r
, foldIf = \ (Ranged _ r) c p1 p2 -> Ranged (If c p1 p2) r
, foldWhile = \ (Ranged _ r) c p -> Ranged (While c p) r }
mapProg :: (TERM () -> TERM ()) -> (FORMULA () -> FORMULA ())
-> FoldRec Program
mapProg mt mf = mapRec
{ foldAssign = \ (Ranged _ r) v t -> Ranged (Assign v $ mt t) r
, foldCall = \ (Ranged _ r) f -> Ranged (Call $ mf f) r
, foldReturn = \ (Ranged _ r) t -> Ranged (Return $ mt t) r
, foldIf = \ (Ranged _ r) c p1 p2 -> Ranged (If (mf c) p1 p2) r
, foldWhile = \ (Ranged _ r) c p -> Ranged (While (mf c) p) r }
-- | collect i.e. variables to be universally bound on the top level
constProg :: (TERM () -> a) -> (FORMULA () -> a) -> ([a] -> a) -> a -> FoldRec a
constProg ft ff join c = FoldRec
{ foldAbort = const c
, foldSkip = const c
, foldAssign = \ _ _ t -> ft t
, foldCall = \ _ f -> ff f
, foldReturn = \ _ t -> ft t
, foldBlock = \ _ _ p -> p
, foldSeq = \ _ p1 p2 -> join [p1, p2]
, foldIf = \ _ f p1 p2 -> join [ff f, p1, p2]
, foldWhile = \ _ f p -> join [ff f, p] }
progToSetRec :: Ord a => (TERM () -> Set.Set a) -> (FORMULA () -> Set.Set a)
-> FoldRec (Set.Set a)
progToSetRec ft ff = constProg ft ff Set.unions Set.empty
|
spechub/Hets
|
VSE/Fold.hs
|
gpl-2.0
| 2,867
| 0
| 13
| 766
| 1,309
| 681
| 628
| 58
| 9
|
import Distribution.PackageDescription
import Distribution.PackageDescription.Parse
import Distribution.Verbosity
import Distribution.System
import Distribution.Simple
import Distribution.Simple.Utils
import Distribution.Simple.Setup
import Distribution.Simple.Command
import Distribution.Simple.Program
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.PreProcess hiding (ppC2hs)
import Control.Exception
import Control.Monad
import System.Exit (ExitCode (ExitSuccess, ExitFailure))
import System.FilePath
import System.Directory
import System.Environment
import System.IO.Error hiding (catch)
import Prelude hiding (catch)
-- Replicate the invocation of the postConf script, so that we can insert the
-- arguments of --extra-include-dirs and --extra-lib-dirs as paths in CPPFLAGS
-- and LDFLAGS into the environment
--
main :: IO ()
main = defaultMainWithHooks customHooks
where
preprocessors = hookedPreProcessors autoconfUserHooks
customHooks = autoconfUserHooks {
preConf = preConfHook,
postConf = postConfHook,
hookedPreProcessors = ("chs",ppC2hs) : filter (\x -> fst x /= "chs") preprocessors
}
preConfHook :: Args -> ConfigFlags -> IO HookedBuildInfo
preConfHook args flags = do
let verbosity = fromFlag (configVerbosity flags)
confExists <- doesFileExist "configure"
unless confExists $ do
code <- rawSystemExitCode verbosity "autoconf" []
case code of
ExitSuccess -> return ()
ExitFailure c -> die $ "autoconf exited with code " ++ show c
preConf autoconfUserHooks args flags
postConfHook :: Args -> ConfigFlags -> PackageDescription -> LocalBuildInfo -> IO ()
postConfHook args flags pkg_descr lbi
= let verbosity = fromFlag (configVerbosity flags)
in do
noExtraFlags args
confExists <- doesFileExist "configure"
if confExists
then runConfigureScript verbosity False flags lbi
else die "configure script not found."
pbi <- getHookedBuildInfo verbosity
let pkg_descr' = updatePackageDescription pbi pkg_descr
postConf simpleUserHooks args flags pkg_descr' lbi
runConfigureScript :: Verbosity -> Bool -> ConfigFlags -> LocalBuildInfo -> IO ()
runConfigureScript verbosity backwardsCompatHack flags lbi = do
env <- getEnvironment
(ccProg, ccFlags) <- configureCCompiler verbosity (withPrograms lbi)
let env' = foldr appendToEnvironment env
[("CC", ccProg)
,("CFLAGS", unwords ccFlags)
,("CPPFLAGS", unwords $ map ("-I"++) (configExtraIncludeDirs flags))
,("LDFLAGS", unwords $ map ("-L"++) (configExtraLibDirs flags))
]
handleNoWindowsSH $ rawSystemExitWithEnv verbosity "sh" args env'
where
args = "configure" : configureArgs backwardsCompatHack flags
appendToEnvironment (key, val) [] = [(key, val)]
appendToEnvironment (key, val) (kv@(k, v) : rest)
| key == k = (key, v ++ " " ++ val) : rest
| otherwise = kv : appendToEnvironment (key, val) rest
handleNoWindowsSH action
| buildOS /= Windows
= action
| otherwise
= action
`catch` \ioe -> if isDoesNotExistError ioe
then die notFoundMsg
else throwIO ioe
notFoundMsg = "The package has a './configure' script. This requires a "
++ "Unix compatibility toolchain such as MinGW+MSYS or Cygwin."
getHookedBuildInfo :: Verbosity -> IO HookedBuildInfo
getHookedBuildInfo verbosity = do
maybe_infoFile <- defaultHookedPackageDesc
case maybe_infoFile of
Nothing -> return emptyHookedBuildInfo
Just infoFile -> do
info verbosity $ "Reading parameters from " ++ infoFile
readHookedBuildInfo verbosity infoFile
-- Replicate the default C2HS preprocessor hook here, and inject a value for
-- extra-c2hs-options, if it was present in the buildinfo file
--
-- Everything below copied from Distribution.Simple.PreProcess
--
ppC2hs :: BuildInfo -> LocalBuildInfo -> PreProcessor
ppC2hs bi lbi
= PreProcessor {
platformIndependent = False,
runPreProcessor = \(inBaseDir, inRelativeFile)
(outBaseDir, outRelativeFile) verbosity ->
rawSystemProgramConf verbosity c2hsProgram (withPrograms lbi) . filter (not . null) $
maybe [] words (lookup "x-extra-c2hs-options" (customFieldsBI bi))
++ ["--include=" ++ outBaseDir]
++ ["--cppopts=" ++ opt | opt <- getCppOptions bi lbi]
++ ["--output-dir=" ++ outBaseDir,
"--output=" ++ outRelativeFile,
inBaseDir </> inRelativeFile]
}
getCppOptions :: BuildInfo -> LocalBuildInfo -> [String]
getCppOptions bi lbi
= ["-I" ++ dir | dir <- includeDirs bi]
++ [opt | opt@('-':c:_) <- ccOptions bi, c `elem` "DIU"]
|
kathawala/symdiff
|
cublas/Setup.hs
|
gpl-3.0
| 5,068
| 0
| 16
| 1,310
| 1,191
| 624
| 567
| 98
| 3
|
main :: (Eq aap, Ord aap) => aap -> aap -> aap
main = undefined
|
roberth/uu-helium
|
test/staticwarnings/SuperfluousPreds.hs
|
gpl-3.0
| 64
| 0
| 7
| 15
| 34
| 18
| 16
| 2
| 1
|
-- Copyright 2016 TensorFlow authors.
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-} -- For the Render class
module TensorFlow.Tensor where
import Data.ByteString (ByteString)
import Data.String (IsString(..))
import qualified Data.Text as Text
import Lens.Family2 ((^.))
import Lens.Family2.State ((%=), use)
import Proto.Tensorflow.Core.Framework.NodeDef (device)
import TensorFlow.Build
import TensorFlow.Output (Output, NodeName, outputNodeName, Device(..))
import TensorFlow.Types
( TensorData(..)
, ListOf(..)
)
import qualified TensorFlow.Internal.FFI as FFI
-- | A named output of a TensorFlow operation.
--
-- The type parameter @a@ is the type of the elements in the 'Tensor'. The
-- parameter @v@ is either:
--
-- * 'Build': An unrendered, immutable value.
-- * 'Value': A rendered, immutable value.
-- * 'Ref': A rendered stateful handle (e.g., a variable).
--
-- Note that 'expr', 'value', 'render' and 'renderValue' can help convert between
-- the different types of 'Tensor'.
data Tensor v a where
Tensor :: TensorKind v => {tensorOutput :: v Output} -> Tensor v a
newtype Value a = Value {runValue :: a}
deriving Functor
instance Applicative Value where
pure = Value
Value f <*> Value x = Value $ f x
instance Monad Value where
f >>= g = g $ runValue f
newtype Ref a = Ref {runRef :: a}
deriving Functor
instance Applicative Ref where
pure = Ref
Ref f <*> Ref x = Ref $ f x
instance Monad Ref where
f >>= g = g $ runRef f
-- | Cast a 'Tensor Ref' into a 'Tensor Value'. This behaves like a no-op.
value :: Tensor Ref a -> Tensor Value a
value (Tensor o) = Tensor $ Value $ runRef o
renderValue :: MonadBuild m => Tensor v a -> m (Tensor Value a)
renderValue (Tensor o) = render $ Tensor $ toBuild o
-- | A pair of a 'Tensor' and some data that should be fed into that 'Tensor'
-- when running the graph.
data Feed = Feed Output FFI.TensorData
-- | A class ensuring that a given tensor is rendered, i.e., has a fixed
-- name, device, etc.
class TensorKind v => Rendered v where
rendered :: v a -> a
instance Rendered Value where
rendered = runValue
instance Rendered Ref where
rendered = runRef
renderedOutput :: Rendered v => Tensor v a -> Output
renderedOutput = rendered . tensorOutput
tensorNodeName :: Rendered v => Tensor v a -> NodeName
tensorNodeName = outputNodeName . renderedOutput
-- | Create a 'Feed' for feeding the given data into a 'Tensor' when running
-- the graph.
--
-- Note that if a 'Tensor' is rendered, its identity may change; so feeding the
-- rendered 'Tensor' may be different than feeding the original 'Tensor'.
feed :: Rendered v => Tensor v a -> TensorData a -> Feed
feed t (TensorData td) = Feed (renderedOutput t) td
-- | Create a 'Tensor' for a given name. This can be used to reference nodes
-- in a 'GraphDef' that was loaded via 'addGraphDef'.
-- TODO(judahjacobson): add more safety checks here.
tensorFromName :: TensorKind v => Text.Text -> Tensor v a
tensorFromName = Tensor . pure . fromString . Text.unpack
-- | Like 'tensorFromName', but type-restricted to 'Value'.
tensorValueFromName :: Text.Text -> Tensor Value a
tensorValueFromName = tensorFromName
-- | Like 'tensorFromName', but type-restricted to 'Ref'.
tensorRefFromName :: Text.Text -> Tensor Ref a
tensorRefFromName = tensorFromName
type TensorList v = ListOf (Tensor v)
tensorListOutputs :: Rendered v => TensorList v as -> [Output]
tensorListOutputs Nil = []
tensorListOutputs (t :/ ts) = renderedOutput t : tensorListOutputs ts
-- | Places all nodes rendered in the given 'Build' action on the same
-- device as the given Tensor (see also 'withDevice'). Make sure that
-- the action has side effects of rendering the desired tensors. A pure
-- return would not have the desired effect.
colocateWith :: (MonadBuild m, Rendered v) => Tensor v b -> m a -> m a
colocateWith t x = do
d <- build $ Device . (^. device)
<$> lookupNode (outputNodeName $ renderedOutput t)
withDevice (Just d) x
-- | Render a 'Tensor', fixing its name, scope, device and control inputs from
-- the 'MonadBuild' context. Also renders any dependencies of the 'Tensor' that
-- weren't already rendered.
--
-- This operation is idempotent; calling 'render' on the same input in the same
-- context will produce the same result. However, rendering the same
-- @Tensor Build@ in two different contexts may result in two different
-- @Tensor Value@s.
render :: MonadBuild m => Tensor Build a -> m (Tensor Value a)
render (Tensor t) = Tensor . Value <$> build t
-- TODO: better name.
expr :: TensorKind v => Tensor v a -> Tensor Build a
expr (Tensor o) = Tensor $ toBuild o
-- | Records the given summary action in Build for retrieval with
-- Summary protocol buffer in string form. For safety, use the
-- pre-composed functions: Logging.scalarSummary and
-- Logging.histogramSummary.
addSummary :: (MonadBuild m, TensorKind v) => Tensor v ByteString -- ^ A 'SummaryTensor'
-> m ()
addSummary t = build $ do
-- TODO: more generic way
o <- toBuild $ tensorOutput t
summaries %= (o :)
-- | Retrieves the summary ops collected thus far. Typically this only
-- happens once, but if 'TensorFlow.Session.buildWithSummary' is used
-- repeatedly, the values accumulate.
collectAllSummaries :: MonadBuild m => m [SummaryTensor]
collectAllSummaries = build $ map (Tensor . Value) <$> use summaries
-- | Synonym for the tensors that return serialized Summary proto.
type SummaryTensor = Tensor Value ByteString
-- | An internal class for kinds of Tensors.
class Monad v => TensorKind v where
toBuild :: v a -> Build a
instance TensorKind Value where
toBuild = return . rendered
instance TensorKind Ref where
toBuild = return . rendered
instance TensorKind Build where
toBuild = id
|
cem3394/haskell
|
tensorflow/src/TensorFlow/Tensor.hs
|
apache-2.0
| 6,742
| 0
| 12
| 1,275
| 1,257
| 688
| 569
| -1
| -1
|
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QStyleOptionViewItemV3.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:15
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QStyleOptionViewItemV3 (
QqStyleOptionViewItemV3(..)
,QqStyleOptionViewItemV3_nf(..)
,qStyleOptionViewItemV3_delete
)
where
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
class QqStyleOptionViewItemV3 x1 where
qStyleOptionViewItemV3 :: x1 -> IO (QStyleOptionViewItemV3 ())
instance QqStyleOptionViewItemV3 (()) where
qStyleOptionViewItemV3 ()
= withQStyleOptionViewItemV3Result $
qtc_QStyleOptionViewItemV3
foreign import ccall "qtc_QStyleOptionViewItemV3" qtc_QStyleOptionViewItemV3 :: IO (Ptr (TQStyleOptionViewItemV3 ()))
instance QqStyleOptionViewItemV3 ((QStyleOptionViewItem t1)) where
qStyleOptionViewItemV3 (x1)
= withQStyleOptionViewItemV3Result $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QStyleOptionViewItemV31 cobj_x1
foreign import ccall "qtc_QStyleOptionViewItemV31" qtc_QStyleOptionViewItemV31 :: Ptr (TQStyleOptionViewItem t1) -> IO (Ptr (TQStyleOptionViewItemV3 ()))
instance QqStyleOptionViewItemV3 ((QStyleOptionViewItemV3 t1)) where
qStyleOptionViewItemV3 (x1)
= withQStyleOptionViewItemV3Result $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QStyleOptionViewItemV32 cobj_x1
foreign import ccall "qtc_QStyleOptionViewItemV32" qtc_QStyleOptionViewItemV32 :: Ptr (TQStyleOptionViewItemV3 t1) -> IO (Ptr (TQStyleOptionViewItemV3 ()))
class QqStyleOptionViewItemV3_nf x1 where
qStyleOptionViewItemV3_nf :: x1 -> IO (QStyleOptionViewItemV3 ())
instance QqStyleOptionViewItemV3_nf (()) where
qStyleOptionViewItemV3_nf ()
= withObjectRefResult $
qtc_QStyleOptionViewItemV3
instance QqStyleOptionViewItemV3_nf ((QStyleOptionViewItem t1)) where
qStyleOptionViewItemV3_nf (x1)
= withObjectRefResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QStyleOptionViewItemV31 cobj_x1
instance QqStyleOptionViewItemV3_nf ((QStyleOptionViewItemV3 t1)) where
qStyleOptionViewItemV3_nf (x1)
= withObjectRefResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QStyleOptionViewItemV32 cobj_x1
instance Qlocale (QStyleOptionViewItemV3 a) (()) where
locale x0 ()
= withQLocaleResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOptionViewItemV3_locale cobj_x0
foreign import ccall "qtc_QStyleOptionViewItemV3_locale" qtc_QStyleOptionViewItemV3_locale :: Ptr (TQStyleOptionViewItemV3 a) -> IO (Ptr (TQLocale ()))
instance QsetLocale (QStyleOptionViewItemV3 a) ((QLocale t1)) where
setLocale x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QStyleOptionViewItemV3_setLocale cobj_x0 cobj_x1
foreign import ccall "qtc_QStyleOptionViewItemV3_setLocale" qtc_QStyleOptionViewItemV3_setLocale :: Ptr (TQStyleOptionViewItemV3 a) -> Ptr (TQLocale t1) -> IO ()
instance QsetWidget (QStyleOptionViewItemV3 a) ((QWidget t1)) where
setWidget x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QStyleOptionViewItemV3_setWidget cobj_x0 cobj_x1
foreign import ccall "qtc_QStyleOptionViewItemV3_setWidget" qtc_QStyleOptionViewItemV3_setWidget :: Ptr (TQStyleOptionViewItemV3 a) -> Ptr (TQWidget t1) -> IO ()
qStyleOptionViewItemV3_delete :: QStyleOptionViewItemV3 a -> IO ()
qStyleOptionViewItemV3_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOptionViewItemV3_delete cobj_x0
foreign import ccall "qtc_QStyleOptionViewItemV3_delete" qtc_QStyleOptionViewItemV3_delete :: Ptr (TQStyleOptionViewItemV3 a) -> IO ()
|
keera-studios/hsQt
|
Qtc/Gui/QStyleOptionViewItemV3.hs
|
bsd-2-clause
| 3,961
| 0
| 12
| 523
| 881
| 458
| 423
| -1
| -1
|
{-# LANGUAGE BangPatterns, PatternGuards #-}
-- | The list version of the solver also builds the bounding box at every
-- node of the tree, which is good for visualisation.
module Solver.VectorBH.Solver
( MassPoint (..)
, BoundingBox (..)
, BHTree (..)
, calcAccels
, buildTree
, findBounds)
where
import Common.Body
import Data.Vector.Unboxed (Vector)
import qualified Data.Vector.Unboxed as V
type BoundingBox
= (Double, Double, Double, Double)
sizeOfBox :: BoundingBox -> Double
{-# INLINE sizeOfBox #-}
sizeOfBox (llx, lly, rux, ruy)
= min (abs (rux - llx)) (abs (ruy - lly))
-- | The Barnes-Hut tree we use to organise the points.
data BHTree
= BHT
{ bhTreeSize :: {-# UNPACK #-} !Double -- minimum of hight and width of cell
, bhTreeCenterX :: {-# UNPACK #-} !Double
, bhTreeCenterY :: {-# UNPACK #-} !Double
, bhTreeMass :: {-# UNPACK #-} !Double
, bhTreeBranch :: ![BHTree] }
deriving Show
-- | Compute the acclerations on all these points.
calcAccels :: Double -> Vector MassPoint -> Vector Accel
calcAccels epsilon mpts
= V.map (calcAccel epsilon (buildTree mpts)) mpts
-- | Build a Barnes-Hut tree from these points.
buildTree :: Vector MassPoint -> BHTree
buildTree mpts
= buildTreeWithBox (findBounds mpts) mpts
-- | Find the coordinates of the bounding box that contains these points.
findBounds :: Vector MassPoint -> (Double, Double, Double, Double)
{-# INLINE findBounds #-}
findBounds bounds
= V.foldl' acc (x1, y1, x1, y1) bounds
where
(x1, y1, _) = bounds V.! 0
acc (!llx, !lly, !rux, !ruy) (x, y, _)
= let !llx' = min llx x
!lly' = min lly y
!rux' = max rux x
!ruy' = max ruy y
in (llx', lly', rux', ruy')
-- | Given a bounding box that contains all the points,
-- build the Barnes-Hut tree for them.
buildTreeWithBox
:: BoundingBox -- ^ bounding box containing all the points.
-> Vector MassPoint -- ^ points in the box.
-> BHTree
buildTreeWithBox bb mpts
| V.length mpts <= 1 = BHT s x y m []
| otherwise = BHT s x y m subTrees
where
s = sizeOfBox bb
(x, y, m) = calcCentroid mpts
(boxes, splitPnts) = splitPoints bb mpts
subTrees = [buildTreeWithBox bb' ps
| (bb', ps) <- zip boxes splitPnts]
-- | Split massPoints according to their locations in the quadrants.
splitPoints
:: BoundingBox -- ^ bounding box containing all the points.
-> Vector MassPoint -- ^ points in the box.
-> ( [BoundingBox] --
, [Vector MassPoint])
splitPoints b@(llx, lly, rux, ruy) mpts
| noOfPoints <= 1 = ([b], [mpts])
| otherwise
= unzip [ (b,p)
| (b,p) <- zip boxes splitPars
, V.length p > 0]
where
noOfPoints = V.length mpts
-- The midpoint of the parent bounding box.
(midx, midy) = ((llx + rux) / 2.0 , (lly + ruy) / 2.0)
-- Split the parent bounding box into four quadrants.
b1 = (llx, lly, midx, midy)
b2 = (llx, midy, midx, ruy)
b3 = (midx, midy, rux, ruy)
b4 = (midx, lly, rux, midy)
boxes = [b1, b2, b3, b4]
-- Sort the particles into the smaller boxes.
lls = V.filter (inBox b1) mpts
lus = V.filter (inBox b2) mpts
rus = V.filter (inBox b3) mpts
rls = V.filter (inBox b4) mpts
splitPars = [lls, lus, rus, rls]
-- | Check if a particle is in box (excluding left and lower border)
inBox:: BoundingBox -> MassPoint -> Bool
{-# INLINE inBox #-}
inBox (llx, lly, rux, ruy) (px, py, _)
= (px > llx) && (px <= rux) && (py > lly) && (py <= ruy)
-- | Calculate the centroid of some points.
calcCentroid :: Vector MassPoint -> MassPoint
{-# INLINE calcCentroid #-}
calcCentroid mpts
= (V.sum xs / mass, V.sum ys / mass, mass)
where mass = V.sum $ V.map (\(_, _, m) -> m) mpts
(xs, ys) = V.unzip $ V.map (\(x, y, m) -> (m * x, m * y)) mpts
-- | Calculate the accelleration of a point due to the points in the given tree.
calcAccel:: Double -> BHTree -> MassPoint -> (Double, Double)
calcAccel !epsilon (BHT s x y m subtrees) mpt
| [] <- subtrees
= accel epsilon mpt (x, y, m)
| isFar mpt s x y
= accel epsilon mpt (x, y, m)
| otherwise
= let (xs, ys) = unzip [ calcAccel epsilon st mpt | st <- subtrees]
in (sum xs, sum ys)
-- | If the point is far from a cell in the tree then we can use
-- it's centroid as an approximation of all the points in the region.
--
isFar :: MassPoint -- point being accelerated
-> Double -- size of region
-> Double -- position of center of mass of cell
-> Double -- position of center of mass of cell
-> Bool
{-# INLINE isFar #-}
isFar (x1, y1, m) s x2 y2
= let !dx = x2 - x1
!dy = y2 - y1
!dist = sqrt (dx * dx + dy * dy)
in (s / dist) < 1
|
mainland/dph
|
dph-examples/examples/real/NBody/Solver/VectorBH/Solver.hs
|
bsd-3-clause
| 4,692
| 141
| 13
| 1,151
| 1,592
| 885
| 707
| 109
| 1
|
markup = <div>
<% insertComponent "Page" "List" [] %>
<div class="page-content">
<h1><% getViewDataValue_u "page-title" :: View String %></h1>
<% (getViewDataValue_u "page-content" :: View String) >>= (return . cdata) %>
</div>
</div>
|
alsonkemp/turbinado-website
|
App/Views/Test/Showxml.hs
|
bsd-3-clause
| 304
| 22
| 10
| 100
| 102
| 54
| 48
| -1
| -1
|
{-# LANGUAGE TemplateHaskell, OverloadedStrings #-}
module Monto.ProductDependency where
import Data.Aeson
import Data.Text (Text,unpack)
import Monto.Types
data ProductDependency
= Version (VersionID,Source,Language)
| Product (VersionID,ProductID,Source,Language,Product)
deriving (Eq,Ord,Show)
instance ToJSON ProductDependency where
toJSON (Version (vid,s,l)) = object
[ "tag" .= ("version" :: Text)
, "version_id" .= vid
, "source" .= s
, "language" .= l
]
toJSON (Product (vid,pid,s,l,p)) = object
[ "tag" .= ("product" :: Text)
, "version_id" .= vid
, "product_id" .= pid
, "source" .= s
, "language" .= l
, "product" .= p
]
instance FromJSON ProductDependency where
parseJSON = withObject "ProductDependency" $ \obj -> do
tag <- obj .: "tag"
vid <- obj .: "version_id"
s <- obj .: "source"
l <- obj .: "language"
case unpack tag of
"version" -> do
return $ Version (vid,s,l)
"product" -> do
pid <- obj .: "product_id"
p <- obj .: "product"
return $ Product (vid,pid,s,l,p)
_ -> fail "tag has to be version or product"
type Invalid = ProductDependency
type ReverseProductDependency = ProductDependency
|
svenkeidel/monto-broker
|
src/Monto/ProductDependency.hs
|
bsd-3-clause
| 1,314
| 0
| 17
| 368
| 409
| 224
| 185
| 38
| 0
|
{-# LANGUAGE BangPatterns #-}
{-| Monitoring daemon backend
This module holds implements the querying of the monitoring daemons
for dynamic utilisation data.
-}
{-
Copyright (C) 2015 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.HTools.Backend.MonD
( queryAllMonDDCs
, pMonDData
) where
import Control.Monad
import Control.Monad.Writer
import qualified Data.List as L
import qualified Data.IntMap as IntMap
import qualified Data.Map as Map
import Data.Maybe (catMaybes, mapMaybe)
import qualified Data.Set as Set
import Network.Curl
import qualified Text.JSON as J
import Ganeti.BasicTypes
import qualified Ganeti.Constants as C
import Ganeti.Cpu.Types
import qualified Ganeti.DataCollectors.XenCpuLoad as XenCpuLoad
import qualified Ganeti.DataCollectors.CPUload as CPUload
import Ganeti.DataCollectors.Types ( DCReport, DCCategory
, dcReportData, dcReportName
, getCategoryName )
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Instance as Instance
import Ganeti.HTools.Loader (ClusterData(..))
import Ganeti.HTools.Types
import Ganeti.HTools.CLI
import Ganeti.JSON (fromJVal, tryFromObj, JSRecord, loadJSArray, maybeParseMap)
import Ganeti.Logging.Lifted (logWarning)
import Ganeti.Utils (exitIfBad)
-- * General definitions
-- | The actual data types for MonD's Data Collectors.
data Report = CPUavgloadReport CPUavgload
| InstanceCpuReport (Map.Map String Double)
-- | Type describing a data collector basic information.
data DataCollector = DataCollector
{ dName :: String -- ^ Name of the data collector
, dCategory :: Maybe DCCategory -- ^ The name of the category
, dMkReport :: DCReport -> Maybe Report -- ^ How to parse a monitor report
, dUse :: [(Node.Node, Report)]
-> (Node.List, Instance.List)
-> Result (Node.List, Instance.List)
-- ^ How the collector reports are to be used to bring dynamic
-- data into a cluster
}
-- * Node-total CPU load average data collector
-- | Parse a DCReport for the node-total CPU collector.
mkCpuReport :: DCReport -> Maybe Report
mkCpuReport dcr =
case fromJVal (dcReportData dcr) :: Result CPUavgload of
Ok cav -> Just $ CPUavgloadReport cav
Bad _ -> Nothing
-- | Take reports of node CPU values and update a node accordingly.
updateNodeCpuFromReport :: (Node.Node, Report) -> Node.Node
updateNodeCpuFromReport (node, CPUavgloadReport cav) =
let ct = cavCpuTotal cav
du = Node.utilLoad node
du' = du {cpuWeight = ct}
in node { Node.utilLoad = du' }
updateNodeCpuFromReport (node, _) = node
-- | Update the instance CPU-utilization data, asuming that each virtual
-- CPU contributes equally to the node CPU load.
updateCpuUtilDataFromNode :: Instance.List -> Node.Node -> Instance.List
updateCpuUtilDataFromNode il node =
let ct = cpuWeight (Node.utilLoad node)
n_uCpu = Node.uCpu node
upd inst =
if Node.idx node == Instance.pNode inst
then
let i_vcpus = Instance.vcpus inst
i_util = ct / fromIntegral n_uCpu * fromIntegral i_vcpus
i_du = Instance.util inst
i_du' = i_du {cpuWeight = i_util}
in inst {Instance.util = i_du'}
else inst
in Container.map upd il
-- | Update cluster data from node CPU load reports.
useNodeTotalCPU :: [(Node.Node, Report)]
-> (Node.List, Instance.List)
-> Result (Node.List, Instance.List)
useNodeTotalCPU reports (nl, il) =
let newnodes = map updateNodeCpuFromReport reports
il' = foldl updateCpuUtilDataFromNode il newnodes
nl' = zip (Container.keys nl) newnodes
in return (Container.fromList nl', il')
-- | The node-total CPU collector.
totalCPUCollector :: DataCollector
totalCPUCollector = DataCollector { dName = CPUload.dcName
, dCategory = CPUload.dcCategory
, dMkReport = mkCpuReport
, dUse = useNodeTotalCPU
}
-- * Xen instance CPU-usage collector
-- | Parse results of the Xen-Cpu-load data collector.
mkXenCpuReport :: DCReport -> Maybe Report
mkXenCpuReport =
liftM InstanceCpuReport . maybeParseMap . dcReportData
-- | Update cluster data based on the per-instance CPU usage
-- reports
useInstanceCpuData :: [(Node.Node, Report)]
-> (Node.List, Instance.List)
-> Result (Node.List, Instance.List)
useInstanceCpuData reports (nl, il) = do
let toMap (InstanceCpuReport m) = Just m
toMap _ = Nothing
let usage = Map.unions $ mapMaybe (toMap . snd) reports
missingData = (Set.fromList . map Instance.name $ IntMap.elems il)
Set.\\ Map.keysSet usage
unless (Set.null missingData)
. Bad . (++) "No CPU information available for "
. show $ Set.elems missingData
let updateInstance inst =
let cpu = Map.lookup (Instance.name inst) usage
dynU = Instance.util inst
dynU' = maybe dynU (\c -> dynU { cpuWeight = c }) cpu
in inst { Instance.util = dynU' }
let il' = IntMap.map updateInstance il
let updateNode node =
let cpu = sum
. map (\ idx -> maybe 0 (cpuWeight . Instance.util)
$ IntMap.lookup idx il')
$ Node.pList node
dynU = Node.utilLoad node
dynU' = dynU { cpuWeight = cpu }
in node { Node.utilLoad = dynU' }
let nl' = IntMap.map updateNode nl
return (nl', il')
-- | Collector for per-instance CPU data as observed by Xen
xenCPUCollector :: DataCollector
xenCPUCollector = DataCollector { dName = XenCpuLoad.dcName
, dCategory = XenCpuLoad.dcCategory
, dMkReport = mkXenCpuReport
, dUse = useInstanceCpuData
}
-- * Collector choice
-- | The list of Data Collectors used by hail and hbal.
collectors :: Options -> [DataCollector]
collectors opts
| optIgnoreDynu opts = []
| optMonDXen opts = [ xenCPUCollector ]
| otherwise = [ totalCPUCollector ]
-- * Querying infrastructure
-- | Return the data from correct combination of a Data Collector
-- and a DCReport.
mkReport :: DataCollector -> Maybe DCReport -> Maybe Report
mkReport dc = (>>= dMkReport dc)
-- | MonDs Data parsed by a mock file. Representing (node name, list of reports
-- produced by MonDs Data Collectors).
type MonDData = (String, [DCReport])
-- | A map storing MonDs data.
type MapMonDData = Map.Map String [DCReport]
-- | Get data report for the specified Data Collector and Node from the map.
fromFile :: DataCollector -> Node.Node -> MapMonDData -> Maybe DCReport
fromFile dc node m =
let matchDCName dcr = dName dc == dcReportName dcr
in maybe Nothing (L.find matchDCName) $ Map.lookup (Node.name node) m
-- | Get Category Name.
getDCCName :: Maybe DCCategory -> String
getDCCName dcc =
case dcc of
Nothing -> "default"
Just c -> getCategoryName c
-- | Prepare url to query a single collector.
prepareUrl :: DataCollector -> Node.Node -> URLString
prepareUrl dc node =
Node.name node ++ ":" ++ show C.defaultMondPort ++ "/"
++ show C.mondLatestApiVersion ++ "/report/" ++
getDCCName (dCategory dc) ++ "/" ++ dName dc
-- | Query a specified MonD for a Data Collector.
fromCurl :: DataCollector -> Node.Node -> IO (Maybe DCReport)
fromCurl dc node = do
(code, !body) <- curlGetString (prepareUrl dc node) []
case code of
CurlOK ->
case J.decodeStrict body :: J.Result DCReport of
J.Ok r -> return $ Just r
J.Error _ -> return Nothing
_ -> do
logWarning $ "Failed to contact node's " ++ Node.name node
++ " MonD for DC " ++ dName dc
return Nothing
-- | Parse a node's JSON record.
pMonDN :: JSRecord -> Result MonDData
pMonDN a = do
node <- tryFromObj "Parsing node's name" a "node"
reports <- tryFromObj "Parsing node's reports" a "reports"
return (node, reports)
-- | Parse MonD data file contents.
pMonDData :: String -> Result [MonDData]
pMonDData input =
loadJSArray "Parsing MonD's answer" input >>=
mapM (pMonDN . J.fromJSObject)
-- | Query a single MonD for a single Data Collector.
queryAMonD :: Maybe MapMonDData -> DataCollector -> Node.Node
-> IO (Maybe Report)
queryAMonD m dc node =
liftM (mkReport dc) $ case m of
Nothing -> fromCurl dc node
Just m' -> return $ fromFile dc node m'
-- | Query all MonDs for a single Data Collector. Return the updated
-- cluster, as well as a bit inidicating wether the collector succeeded.
queryAllMonDs :: Maybe MapMonDData -> (Node.List, Instance.List)
-> DataCollector -> WriterT All IO (Node.List, Instance.List)
queryAllMonDs m (nl, il) dc = do
elems <- liftIO $ mapM (queryAMonD m dc) (Container.elems nl)
let elems' = catMaybes elems
if length elems == length elems'
then
let results = zip (Container.elems nl) elems'
in case dUse dc results (nl, il) of
Ok (nl', il') -> return (nl', il')
Bad s -> do
logWarning s
tell $ All False
return (nl, il)
else do
logWarning $ "Didn't receive an answer by all MonDs, " ++ dName dc
++ "'s data will be ignored."
tell $ All False
return (nl,il)
-- | Query all MonDs for all Data Collector. Return the cluster enriched
-- by dynamic data, as well as a bit indicating wether all collectors
-- could be queried successfully.
queryAllMonDDCs :: ClusterData -> Options -> WriterT All IO ClusterData
queryAllMonDDCs cdata opts = do
map_mDD <-
case optMonDFile opts of
Nothing -> return Nothing
Just fp -> do
monDData_contents <- liftIO $ readFile fp
monDData <- liftIO . exitIfBad "can't parse MonD data"
. pMonDData $ monDData_contents
return . Just $ Map.fromList monDData
let (ClusterData _ nl il _ _) = cdata
(nl', il') <- foldM (queryAllMonDs map_mDD) (nl, il) (collectors opts)
return $ cdata {cdNodes = nl', cdInstances = il'}
|
mbakke/ganeti
|
src/Ganeti/HTools/Backend/MonD.hs
|
bsd-2-clause
| 11,551
| 0
| 23
| 2,849
| 2,552
| 1,346
| 1,206
| 194
| 3
|
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.FetchUtils
-- Copyright : (c) David Himmelstrup 2005
-- Duncan Coutts 2011
-- License : BSD-like
--
-- Maintainer : cabal-devel@gmail.com
-- Stability : provisional
-- Portability : portable
--
-- Functions for fetching packages
-----------------------------------------------------------------------------
{-# LANGUAGE RecordWildCards #-}
module Distribution.Client.FetchUtils (
-- * fetching packages
fetchPackage,
isFetched,
checkFetched,
-- ** specifically for repo packages
checkRepoTarballFetched,
fetchRepoTarball,
-- ** fetching packages asynchronously
asyncFetchPackages,
waitAsyncFetchPackage,
AsyncFetchMap,
-- * fetching other things
downloadIndex,
) where
import Distribution.Client.Types
import Distribution.Client.HttpUtils
( downloadURI, isOldHackageURI, DownloadResult(..)
, HttpTransport(..), transportCheckHttps, remoteRepoCheckHttps )
import Distribution.Package
( PackageId, packageName, packageVersion )
import Distribution.Simple.Utils
( notice, info, debug, setupMessage )
import Distribution.Text
( display )
import Distribution.Verbosity
( Verbosity, verboseUnmarkOutput )
import Distribution.Client.GlobalFlags
( RepoContext(..) )
import Data.Maybe
import Data.Map (Map)
import qualified Data.Map as Map
import Control.Monad
import Control.Exception
import Control.Concurrent.Async
import Control.Concurrent.MVar
import System.Directory
( doesFileExist, createDirectoryIfMissing, getTemporaryDirectory )
import System.IO
( openTempFile, hClose )
import System.FilePath
( (</>), (<.>) )
import qualified System.FilePath.Posix as FilePath.Posix
( combine, joinPath )
import Network.URI
( URI(uriPath) )
import qualified Hackage.Security.Client as Sec
-- ------------------------------------------------------------
-- * Actually fetch things
-- ------------------------------------------------------------
-- | Returns @True@ if the package has already been fetched
-- or does not need fetching.
--
isFetched :: UnresolvedPkgLoc -> IO Bool
isFetched loc = case loc of
LocalUnpackedPackage _dir -> return True
LocalTarballPackage _file -> return True
RemoteTarballPackage _uri local -> return (isJust local)
RepoTarballPackage repo pkgid _ -> doesFileExist (packageFile repo pkgid)
-- | Checks if the package has already been fetched (or does not need
-- fetching) and if so returns evidence in the form of a 'PackageLocation'
-- with a resolved local file location.
--
checkFetched :: UnresolvedPkgLoc
-> IO (Maybe ResolvedPkgLoc)
checkFetched loc = case loc of
LocalUnpackedPackage dir ->
return (Just $ LocalUnpackedPackage dir)
LocalTarballPackage file ->
return (Just $ LocalTarballPackage file)
RemoteTarballPackage uri (Just file) ->
return (Just $ RemoteTarballPackage uri file)
RepoTarballPackage repo pkgid (Just file) ->
return (Just $ RepoTarballPackage repo pkgid file)
RemoteTarballPackage _uri Nothing -> return Nothing
RepoTarballPackage repo pkgid Nothing ->
fmap (fmap (RepoTarballPackage repo pkgid))
(checkRepoTarballFetched repo pkgid)
-- | Like 'checkFetched' but for the specific case of a 'RepoTarballPackage'.
--
checkRepoTarballFetched :: Repo -> PackageId -> IO (Maybe FilePath)
checkRepoTarballFetched repo pkgid = do
let file = packageFile repo pkgid
exists <- doesFileExist file
if exists
then return (Just file)
else return Nothing
-- | Fetch a package if we don't have it already.
--
fetchPackage :: Verbosity
-> RepoContext
-> UnresolvedPkgLoc
-> IO ResolvedPkgLoc
fetchPackage verbosity repoCtxt loc = case loc of
LocalUnpackedPackage dir ->
return (LocalUnpackedPackage dir)
LocalTarballPackage file ->
return (LocalTarballPackage file)
RemoteTarballPackage uri (Just file) ->
return (RemoteTarballPackage uri file)
RepoTarballPackage repo pkgid (Just file) ->
return (RepoTarballPackage repo pkgid file)
RemoteTarballPackage uri Nothing -> do
path <- downloadTarballPackage uri
return (RemoteTarballPackage uri path)
RepoTarballPackage repo pkgid Nothing -> do
local <- fetchRepoTarball verbosity repoCtxt repo pkgid
return (RepoTarballPackage repo pkgid local)
where
downloadTarballPackage uri = do
transport <- repoContextGetTransport repoCtxt
transportCheckHttps verbosity transport uri
notice verbosity ("Downloading " ++ show uri)
tmpdir <- getTemporaryDirectory
(path, hnd) <- openTempFile tmpdir "cabal-.tar.gz"
hClose hnd
_ <- downloadURI transport verbosity uri path
return path
-- | Fetch a repo package if we don't have it already.
--
fetchRepoTarball :: Verbosity -> RepoContext -> Repo -> PackageId -> IO FilePath
fetchRepoTarball verbosity repoCtxt repo pkgid = do
fetched <- doesFileExist (packageFile repo pkgid)
if fetched
then do info verbosity $ display pkgid ++ " has already been downloaded."
return (packageFile repo pkgid)
else do setupMessage verbosity "Downloading" pkgid
downloadRepoPackage
where
downloadRepoPackage = case repo of
RepoLocal{..} -> return (packageFile repo pkgid)
RepoRemote{..} -> do
transport <- repoContextGetTransport repoCtxt
remoteRepoCheckHttps verbosity transport repoRemote
let uri = packageURI repoRemote pkgid
dir = packageDir repo pkgid
path = packageFile repo pkgid
createDirectoryIfMissing True dir
_ <- downloadURI transport verbosity uri path
return path
RepoSecure{} -> repoContextWithSecureRepo repoCtxt repo $ \rep -> do
let dir = packageDir repo pkgid
path = packageFile repo pkgid
createDirectoryIfMissing True dir
Sec.uncheckClientErrors $ do
info verbosity ("writing " ++ path)
Sec.downloadPackage' rep pkgid path
return path
-- | Downloads an index file to [config-dir/packages/serv-id] without
-- hackage-security. You probably don't want to call this directly;
-- use 'updateRepo' instead.
--
downloadIndex :: HttpTransport -> Verbosity -> RemoteRepo -> FilePath -> IO DownloadResult
downloadIndex transport verbosity remoteRepo cacheDir = do
remoteRepoCheckHttps verbosity transport remoteRepo
let uri = (remoteRepoURI remoteRepo) {
uriPath = uriPath (remoteRepoURI remoteRepo)
`FilePath.Posix.combine` "00-index.tar.gz"
}
path = cacheDir </> "00-index" <.> "tar.gz"
createDirectoryIfMissing True cacheDir
downloadURI transport verbosity uri path
-- ------------------------------------------------------------
-- * Async fetch wrapper utilities
-- ------------------------------------------------------------
type AsyncFetchMap = Map UnresolvedPkgLoc
(MVar (Either SomeException ResolvedPkgLoc))
-- | Fork off an async action to download the given packages (by location).
--
-- The downloads are initiated in order, so you can arrange for packages that
-- will likely be needed sooner to be earlier in the list.
--
-- The body action is passed a map from those packages (identified by their
-- location) to a completion var for that package. So the body action should
-- lookup the location and use 'asyncFetchPackage' to get the result.
--
asyncFetchPackages :: Verbosity
-> RepoContext
-> [UnresolvedPkgLoc]
-> (AsyncFetchMap -> IO a)
-> IO a
asyncFetchPackages verbosity repoCtxt pkglocs body = do
--TODO: [nice to have] use parallel downloads?
asyncDownloadVars <- sequence [ do v <- newEmptyMVar
return (pkgloc, v)
| pkgloc <- pkglocs ]
let fetchPackages :: IO ()
fetchPackages =
forM_ asyncDownloadVars $ \(pkgloc, var) -> do
-- Suppress marking here, because 'withAsync' means
-- that we get nondeterministic interleaving
result <- try $ fetchPackage (verboseUnmarkOutput verbosity)
repoCtxt pkgloc
putMVar var result
withAsync fetchPackages $ \_ ->
body (Map.fromList asyncDownloadVars)
-- | Expect to find a download in progress in the given 'AsyncFetchMap'
-- and wait on it to finish.
--
-- If the download failed with an exception then this will be thrown.
--
-- Note: This function is supposed to be idempotent, as our install plans
-- can now use the same tarball for many builds, e.g. different
-- components and/or qualified goals, and these all go through the
-- download phase so we end up using 'waitAsyncFetchPackage' twice on
-- the same package. C.f. #4461.
waitAsyncFetchPackage :: Verbosity
-> AsyncFetchMap
-> UnresolvedPkgLoc
-> IO ResolvedPkgLoc
waitAsyncFetchPackage verbosity downloadMap srcloc =
case Map.lookup srcloc downloadMap of
Just hnd -> do
debug verbosity $ "Waiting for download of " ++ show srcloc
either throwIO return =<< readMVar hnd
Nothing -> fail "waitAsyncFetchPackage: package not being downloaded"
-- ------------------------------------------------------------
-- * Path utilities
-- ------------------------------------------------------------
-- | Generate the full path to the locally cached copy of
-- the tarball for a given @PackageIdentifer@.
--
packageFile :: Repo -> PackageId -> FilePath
packageFile repo pkgid = packageDir repo pkgid
</> display pkgid
<.> "tar.gz"
-- | Generate the full path to the directory where the local cached copy of
-- the tarball for a given @PackageIdentifer@ is stored.
--
packageDir :: Repo -> PackageId -> FilePath
packageDir repo pkgid = repoLocalDir repo
</> display (packageName pkgid)
</> display (packageVersion pkgid)
-- | Generate the URI of the tarball for a given package.
--
packageURI :: RemoteRepo -> PackageId -> URI
packageURI repo pkgid | isOldHackageURI (remoteRepoURI repo) =
(remoteRepoURI repo) {
uriPath = FilePath.Posix.joinPath
[uriPath (remoteRepoURI repo)
,display (packageName pkgid)
,display (packageVersion pkgid)
,display pkgid <.> "tar.gz"]
}
packageURI repo pkgid =
(remoteRepoURI repo) {
uriPath = FilePath.Posix.joinPath
[uriPath (remoteRepoURI repo)
,"package"
,display pkgid <.> "tar.gz"]
}
|
mydaum/cabal
|
cabal-install/Distribution/Client/FetchUtils.hs
|
bsd-3-clause
| 10,919
| 0
| 19
| 2,606
| 2,053
| 1,057
| 996
| 186
| 6
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fil-PH">
<title>SAML Support</title>
<maps>
<homeID>saml</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/saml/src/main/javahelp/help_fil_PH/helpset_fil_PH.hs
|
apache-2.0
| 959
| 77
| 66
| 156
| 407
| 206
| 201
| -1
| -1
|
{-# LANGUAGE FlexibleContexts, TypeFamilies #-}
module T8978 where
import Data.Kind (Type)
type Syn a = Associated a
class Eq (Associated a) => Foo a where
type Associated a :: Type
foo :: a -> Syn a -> Bool
instance Foo () where
type Associated () = Int
foo _ x = x == x
|
sdiehl/ghc
|
testsuite/tests/indexed-types/should_compile/T8978.hs
|
bsd-3-clause
| 292
| 0
| 9
| 74
| 105
| 57
| 48
| -1
| -1
|
{-|
Purely functional top-down splay sets.
* D.D. Sleator and R.E. Rarjan,
\"Self-Adjusting Binary Search Tree\",
Journal of the Association for Computing Machinery,
Vol 32, No 3, July 1985, pp 652-686.
<http://www.cs.cmu.edu/~sleator/papers/self-adjusting.pdf>
-}
module Data.Set.Splay (
-- * Data structures
Splay(..)
-- * Creating sets
, empty
, singleton
, insert
, fromList
-- * Converting a list
, toList
-- * Membership
, member
-- * Deleting
, delete
, deleteMin
, deleteMax
-- * Checking
, null
-- * Set operations
, union
, intersection
, difference
-- * Helper functions
, split
, minimum
, maximum
, valid
, (===)
, showSet
, printSet
) where
import Data.List (foldl')
import Prelude hiding (minimum, maximum, null)
import Language.Haskell.Liquid.Prelude
----------------------------------------------------------------
-- LIQUID left depends on value, so their order had to be changed
{-@
data Splay a <l :: root:a -> a -> Bool, r :: root:a -> a -> Bool>
= Node (value :: a)
(left :: Splay <l, r> (a <l value>))
(right :: Splay <l, r> (a <r value>))
| Leaf
@-}
data Splay a = Leaf | Node a (Splay a) (Splay a) deriving Show
{-@ type OSplay a = Splay <{v:a | v < root}, {v:a | v > root}> a @-}
{-@ type MinSPair a = (a, OSplay a) <{v : Splay {v:a|v>fld} | 0=0}> @-}
{-@ type MinEqSPair a = (a, OSplay a) <{v : Splay {v:a|v>=fld}| 0=0}> @-}
{-@ type MaxSPair a = (a, OSplay a) <{v : Splay {v:a|v<fld} | 0=0}> @-}
{-@ type MaxEqSPair a = (a, OSplay a) <{v : Splay {v:a|v<=fld}| 0=0}> @-}
instance (Eq a) => Eq (Splay a) where
t1 == t2 = toList t1 == toList t2
{-| Checking if two splay sets are exactly the same shape.
-}
(===) :: Eq a => Splay a -> Splay a -> Bool
Leaf === Leaf = True
(Node x1 l1 r1) === (Node x2 l2 r2) = x1 == x2 && l1 === l2 && r1 === r2
_ === _ = False
----------------------------------------------------------------
{-| Splitting smaller and bigger with splay.
Since this is a set implementation, members must be unique.
-}
{-@ split :: Ord a => x:a -> OSplay a
-> (OSplay {v:a | v<x}, Bool, OSplay {v:a | v>x})
@-}
split :: Ord a => a -> Splay a -> (Splay a, Bool, Splay a)
split _ Leaf = (Leaf,False,Leaf)
split k (Node xk xl xr) = case compare k xk of
EQ -> (xl, True, xr)
GT -> case xr of
Leaf -> (Node xk xl Leaf, False, Leaf)
Node yk yl yr -> case compare k yk of
EQ -> (Node xk xl yl, True, yr) -- R :zig
GT -> let (lt, b, gt) = split k yr -- RR :zig zag
in (Node yk (Node xk xl yl) lt, b, gt)
LT -> let (lt, b, gt) = split k yl
in (Node xk xl lt, b, Node yk gt yr) -- RL :zig zig
LT -> case xl of
Leaf -> (Leaf, False, Node xk Leaf xr)
Node yk yl yr -> case compare k yk of
EQ -> (yl, True, Node xk yr xr) -- L :zig
GT -> let (lt, b, gt) = split k yr -- LR :zig zag
in (Node yk yl lt, b, Node xk gt xr)
LT -> let (lt, b, gt) = split k yl -- LL :zig zig
in (lt, b, Node yk gt (Node xk yr xr))
----------------------------------------------------------------
{-| Empty set.
-}
{-@ empty :: OSplay a @-}
empty :: Splay a
empty = Leaf
{-|
See if the splay set is empty.
>>> Data.Set.Splay.null empty
True
>>> Data.Set.Splay.null (singleton 1)
False
-}
null :: Splay a -> Bool
null Leaf = True
null _ = False
{-| Singleton set.
-}
{-@ singleton :: a -> OSplay a @-}
singleton :: a -> Splay a
singleton x = Node x Leaf Leaf
----------------------------------------------------------------
{-| Insertion.
>>> insert 5 (fromList [5,3]) == fromList [3,5]
True
>>> insert 7 (fromList [5,3]) == fromList [3,5,7]
True
>>> insert 5 empty == singleton 5
True
-}
{-@ insert :: Ord a => a -> OSplay a -> OSplay a @-}
insert :: Ord a => a -> Splay a -> Splay a
insert x t = Node x l r
where
(l,_,r) = split x t
----------------------------------------------------------------
{-| Creating a set from a list.
>>> empty == fromList []
True
>>> singleton 'a' == fromList ['a']
True
>>> fromList [5,3,5] == fromList [5,3]
True
-}
{-@ fromList :: Ord a => [a] -> OSplay a @-}
fromList :: Ord a => [a] -> Splay a
fromList = foldl' (flip insert) empty
----------------------------------------------------------------
{-| Creating a list from a set.
>>> toList (fromList [5,3])
[3,5]
>>> toList empty
[]
-}
toList :: Splay a -> [a]
toList t = inorder t []
where
inorder Leaf xs = xs
inorder (Node x l r) xs = inorder l (x : inorder r xs)
----------------------------------------------------------------
{-| Checking if this element is a member of a set?
>>> fst $ member 5 (fromList [5,3])
True
>>> fst $ member 1 (fromList [5,3])
False
-}
{- member :: Ord a => a -> OSplay a -> (Bool, OSplay a) @-}
member :: Ord a => a -> Splay a -> (Bool, Splay a)
member x t = case split x t of
(l,True,r) -> (True, Node x l r)
(Leaf,_,r) -> (False, r)
(l,_,Leaf) -> (False, l)
(l,_,r) -> let (m,l') = deleteMax l
in (False, Node m l' r)
----------------------------------------------------------------
{-| Finding the minimum element.
>>> fst $ minimum (fromList [3,5,1])
1
>>> minimum empty
*** Exception: minimum
-}
{-@ minimum :: OSplay a -> MinEqSPair a @-}
minimum :: Splay a -> (a, Splay a)
minimum Leaf = error "minimum"
minimum t = let (x,mt) = deleteMin t in (x, Node x Leaf mt)
{-| Finding the maximum element.
>>> fst $ maximum (fromList [3,5,1])
5
>>> maximum empty
*** Exception: maximum
-}
{-@ maximum :: OSplay a -> MaxEqSPair a @-}
maximum :: Splay a -> (a, Splay a)
maximum Leaf = error "maximum"
maximum t = let (x,mt) = deleteMax t in (x, Node x mt Leaf)
----------------------------------------------------------------
{-| Deleting the minimum element.
>>> snd (deleteMin (fromList [5,3,7])) == fromList [5,7]
True
>>> deleteMin empty
*** Exception: deleteMin
-}
{-@ deleteMin :: OSplay a -> MinSPair a @-}
deleteMin :: Splay a -> (a, Splay a)
deleteMin Leaf = error "deleteMin"
deleteMin (Node x Leaf r) = (x,r)
deleteMin (Node x (Node lx Leaf lr) r) = (lx, Node x lr r)
deleteMin (Node x (Node lx ll lr) r) = let (k,mt) = deleteMin ll
in (k, Node lx mt (Node x lr r))
{-| Deleting the maximum
>>> snd (deleteMax (fromList [(5,"a"), (3,"b"), (7,"c")])) == fromList [(3,"b"), (5,"a")]
True
>>> deleteMax empty
*** Exception: deleteMax
-}
{-@ deleteMax :: OSplay a -> MaxSPair a @-}
deleteMax :: Splay a -> (,) a (Splay a)
deleteMax Leaf = error "deleteMax"
deleteMax (Node x l Leaf) = (x,l)
deleteMax (Node x l (Node rx rl Leaf)) = (rx, Node x l rl)
deleteMax (Node x l (Node rx rl rr)) = let (k,mt) = deleteMax rr
in (k, Node rx (Node x l rl) mt)
----------------------------------------------------------------
{-| Deleting this element from a set.
>>> delete 5 (fromList [5,3]) == singleton 3
True
>>> delete 7 (fromList [5,3]) == fromList [3,5]
True
>>> delete 5 empty == empty
True
-}
-- Liquid TOPROVE
-- delete :: Ord a => x:a -> OSplay a -> OSplay {v:a| v!=x}
{-@ delete :: Ord a => x:a -> OSplay a -> OSplay a @-}
delete :: Ord a => a -> Splay a -> Splay a
delete x t = case split x t of
(l, True, r) -> union l r
_ -> t
----------------------------------------------------------------
{-| Creating a union set from two sets.
>>> union (fromList [5,3]) (fromList [5,7]) == fromList [3,5,7]
True
-}
{-@ union :: Ord a => OSplay a -> OSplay a -> OSplay a@-}
union :: Ord a => Splay a -> Splay a -> Splay a
union Leaf t = t
union (Node x a b) t = Node x (union ta a) (union tb b)
where
(ta,_,tb) = split x t
{-| Creating a intersection set from sets.
>>> intersection (fromList [5,3]) (fromList [5,7]) == singleton 5
True
-}
{-@ intersection :: Ord a => OSplay a -> OSplay a -> OSplay a @-}
intersection :: Ord a => Splay a -> Splay a -> Splay a
intersection Leaf _ = Leaf
intersection _ Leaf = Leaf
intersection t1 (Node x l r) = case split x t1 of
(l', True, r') -> Node x (intersection l' l) (intersection r' r)
(l', False, r') -> union (intersection l' l) (intersection r' r)
{-| Creating a difference set from sets.
>>> difference (fromList [5,3]) (fromList [5,7]) == singleton 3
True
-}
{-@ difference :: Ord a => OSplay a -> OSplay a -> OSplay a @-}
difference :: Ord a => Splay a -> Splay a -> Splay a
difference Leaf _ = Leaf
difference t1 Leaf = t1
difference t1 (Node x l r) = union (difference l' l) (difference r' r)
where
(l',_,r') = split x t1
----------------------------------------------------------------
-- Basic operations
----------------------------------------------------------------
{-| Checking validity of a set.
-}
valid :: Ord a => Splay a -> Bool
valid t = isOrdered t
isOrdered :: Ord a => Splay a -> Bool
isOrdered t = ordered $ toList t
where
ordered [] = True
ordered [_] = True
ordered (x:y:xys) = x < y && ordered (y:xys)
showSet :: Show a => Splay a -> String
showSet = showSet' ""
showSet' :: Show a => String -> Splay a -> String
showSet' _ Leaf = "\n"
showSet' pref (Node x l r) = show x ++ "\n"
++ pref ++ "+ " ++ showSet' pref' l
++ pref ++ "+ " ++ showSet' pref' r
where
pref' = " " ++ pref
printSet :: Show a => Splay a -> IO ()
printSet = putStr . showSet
{-
Demo: http://www.link.cs.cmu.edu/splay/
Paper: http://www.cs.cmu.edu/~sleator/papers/self-adjusting.pdf
TopDown: http://www.cs.umbc.edu/courses/undergraduate/341/fall02/Lectures/Splay/TopDownSplay.ppt
Blog: http://chasen.org/~daiti-m/diary/?20061223
http://www.geocities.jp/m_hiroi/clisp/clispb07.html
fromList minimum delMin member
Blanced Tree N log N log N log N log N
Skew Heap N log N 1 log N(???) N/A
Splay Heap N log N or A(N)? log N or A(N)? log N or A(N)?
-}
|
mightymoose/liquidhaskell
|
benchmarks/llrbtree-0.1.1/Data/Set/Splay.hs
|
bsd-3-clause
| 10,365
| 0
| 19
| 2,798
| 2,476
| 1,303
| 1,173
| 129
| 9
|
module Signature (plugin) where
-- This plugin replaces $SIG$ with the username and timestamp
-- of the last edit, prior to saving the page in the repository.
import Network.Gitit.Interface
import Data.DateTime (getCurrentTime, formatDateTime)
plugin :: Plugin
plugin = PreCommitTransform replacedate
replacedate :: String -> PluginM String
replacedate [] = return ""
replacedate ('$':'S':'I':'G':'$':xs) = do
datetime <- liftIO getCurrentTime
mbuser <- askUser
let username = case mbuser of
Nothing -> "???"
Just u -> uUsername u
let sig = concat ["-- ", username, " (", formatDateTime "%c" datetime, ")"]
fmap (sig ++ ) $ replacedate xs
replacedate (x:xs) = fmap (x : ) $ replacedate xs
|
bergmannf/gitit
|
plugins/Signature.hs
|
gpl-2.0
| 745
| 0
| 13
| 162
| 225
| 116
| 109
| 16
| 2
|
{-# LANGUAGE TypeFamilies #-}
module T11381 where
-- ensure that this code does not compile without TypeFamilyDependencies and that
-- injectivity error is not reported.
type family F a = r | r -> a
type instance F Int = Bool
type instance F Int = Char
|
sdiehl/ghc
|
testsuite/tests/driver/T11381.hs
|
bsd-3-clause
| 255
| 0
| 4
| 49
| 41
| 28
| 13
| 5
| 0
|
import StackTest
main :: IO ()
main = stackErr ["build"]
|
juhp/stack
|
test/integration/tests/drop-packages/Main.hs
|
bsd-3-clause
| 58
| 0
| 6
| 11
| 25
| 13
| 12
| 3
| 1
|
{-
Counting Sundays
Problem 19
You are given the following information, but you may prefer to do some research for yourself.
1 Jan 1900 was a Monday.
Thirty days has September,
April, June and November.
All the rest have thirty-one,
Saving February alone,
Which has twenty-eight, rain or shine.
And on leap years, twenty-nine.
A leap year occurs on any year evenly divisible by 4, but not on a century unless it is divisible by 400.
How many Sundays fell on the first of the month during the twentieth century (1 Jan 1901 to 31 Dec 2000)?
-}
-- Some custom data types to help define the problem
type Year = Int
type Day = Int
type Month = Int
data Date = Date Year Month Day deriving (Eq, Ord, Show)
data WeekDay = Monday | Tuesday | Wednesday | Thursday | Friday | Saturday | Sunday deriving (Enum, Eq, Show)
-- Quick check if a year is a leap year
isLeapYear :: Year -> Bool
isLeapYear y = y `mod` 400 == 0 || (y `mod` 4 == 0 && y `mod` 100 /= 0)
-- Determine the next day of a given date
next :: Date -> Date
next (Date y m d)
| m == 12 && d == 31 = Date (y+1) 1 1
| d == 31 = Date y (m+1) 1
| m == 2 && d == 28 = if isLeapYear y then Date y m 29 else Date y 3 1
| m == 2 && d == 29 = Date y 3 1
| m `elem` [4, 6, 9, 11] && d == 30 = Date y (succ m) 1
| otherwise = Date y m (d+1)
-- The first known Monday given in the problem description
initialMonday :: Date
initialMonday = Date 1900 1 1
-- An infinite sequence of dates starting from initialMonday
calendar :: [Date]
calendar = iterate next initialMonday
-- An infinite cycle of the days of the week
weekdays :: [WeekDay]
weekdays = cycle [Monday .. Sunday]
-- Pairing the day of the week with an actual date beginning on a known Monday (see above)
days :: [(WeekDay, Date)]
days = zip weekdays calendar
-- Filter the given period
period :: [(WeekDay, Date)]
period = dropWhile (\(w,d) -> d < (Date 1901 1 1)) (takeWhile (\(w,d) -> d <= (Date 2000 12 31)) days)
-- Filter only the sundays that happened in the first of the month
sundayFirsts :: [(WeekDay, Date)]
sundayFirsts = filter (\(w,Date y m d) -> w == Sunday && d == 1) period
-- Count them
euler19 :: Int
euler19 = length sundayFirsts
{-
instance Enum Date where
toEnum date =
let y = (date `div` 10000)
m = (date `div` 100 `mod` 100)
d = (date `mod` 100)
test1 = m `elem` [4, 6, 9, 11] && d > 30
test2 = m == 2 && d > 28 && not (isLeapYear y)
test3 = m >= 1 && m <= 12
in if test1 && test2 && test3
then Date y m d
else InvalidDate date
fromEnum (Date y m d) = y*10000 + m*100 + d
succ date = next date
-}
|
feliposz/project-euler-solutions
|
haskell/euler19original.hs
|
mit
| 2,613
| 0
| 12
| 622
| 627
| 345
| 282
| 29
| 2
|
{-# OPTIONS #-}
-- ------------------------------------------------------------
module Holumbus.Crawler.URIs
where
import qualified Data.StringMap.Strict as S
-- ------------------------------------------------------------
-- | An URI is represented as a String
type URI = String
type URIWithLevel = (URI, Int)
-- | A set of URIs implemeted as a prefix tree. This implementation
-- is space efficient, because of many equal prefixes in the crawled set of URIs
type URIs = URIs' ()
type URIsWithLevel = URIs' Int -- URIs with a priority or clicklevel
type URIs' a = S.StringMap a
-- ------------------------------------------------------------
emptyURIs :: URIs' a
emptyURIs = S.empty
singletonURIs :: URI -> URIs
singletonURIs = flip S.singleton ()
singletonURIs' :: URI -> a -> URIs' a
singletonURIs' = S.singleton
nullURIs :: URIs' a -> Bool
nullURIs = S.null
memberURIs :: URI -> URIs' a -> Bool
memberURIs = S.member
cardURIs :: URIs' a -> Int
cardURIs = S.size
nextURI :: URIs' a -> (URI, a)
nextURI = head . toListURIs'
nextURIs :: Int -> URIs' a -> [(URI, a)]
nextURIs n = take n . toListURIs'
insertURI :: URI -> URIs -> URIs
insertURI = flip S.insert ()
insertURI' :: URI -> a -> URIs' a -> URIs' a
insertURI' = S.insert
deleteURI :: URI -> URIs' a -> URIs' a
deleteURI = S.delete
deleteURIs :: URIs' b -> URIs' a -> URIs' a
deleteURIs = flip S.difference
unionURIs :: URIs -> URIs -> URIs
unionURIs = S.union
unionURIs' :: (a -> a -> a) -> URIs' a -> URIs' a -> URIs' a
unionURIs' = S.unionWith
diffURIs :: URIs' a -> URIs' a -> URIs' a
diffURIs = S.difference
fromListURIs :: [URI] -> URIs
fromListURIs = S.fromList . map (\ x -> (x, ()))
fromListURIs' :: [(URI, a)] -> URIs' a
fromListURIs' = S.fromList
toListURIs :: URIs' a -> [URI]
toListURIs = S.keys -- map fst . S.toList
toListURIs' :: URIs' a -> [(URI, a)]
toListURIs' = S.toList
foldURIs :: (URI -> b -> b) -> b -> URIs -> b
foldURIs f = S.foldrWithKey (\ x _ r -> f x r)
foldURIs' :: (URI -> a -> b -> b) -> b -> URIs' a -> b
foldURIs' = S.foldrWithKey
-- ------------------------------------------------------------
|
ichistmeinname/holumbus
|
src/Holumbus/Crawler/URIs.hs
|
mit
| 2,784
| 0
| 10
| 1,056
| 693
| 376
| 317
| 50
| 1
|
module Y2016.M08.D17.Exercise where
import Data.Aeson
import Data.Set (Set)
import qualified Data.Set as Set
-- the below imported modules are available on this github repository
import Data.MultiMap (MultiMap)
import qualified Data.MultiMap as MM
import Data.Twitter
import Graph.JSON.Cypher.Read.Graphs
import Graph.JSON.Cypher.Read.Tweets
import Y2016.M08.D15.Exercise (twitterGraphUrl)
{--
So we now have tweets from graph data, but they are not connected to any of
the external information about the tweets, such as URLs and hashtags or
people tweeting or liking these tweets. The URLs and hashtags can be parsed
from the tweet-text, but this is the 'hard-way' of doing things, particularly
as twitter provides these data as nodes related to their tweets.
So, today, instead of doing a deep-dive into the related information, let's
focus on the relationships to the tweets.
The GraphJ-type has a relationships value which has (e.g.) this structure:
"relationships":[{"id":"1447",
"type":"USING",
"startNode":"255","endNode":"1000",
"properties":{}}]}}
and from the above import you were able to read these relationships into
Haskell-typed values. Now, let's take the next step.
read in the graph-JSON from the twitterGraphUrl, get the unique set of tweets
and for each tweet, show the associated values (URL, user, hashtag) as raw
values (we'll parse them out later) and how these values are related to the
tweets.
--}
-- first up we want a set of indexed tweets from our graph-JSON
-- recall readGraphJSON twitterGraphUrl gives us the graph-JSON
-- note that I've generalized the Tweet-type in Data.Tweet
uniqueTweets :: [GraphJ] -> Set (Tweet String)
uniqueTweets = undefined
{-- For example:
*Y2016.M08.D17.Solution> readGraphJSON twitterGraphUrl ~> tweets
*Y2016.M08.D17.Solution> let unqt = uniqueTweets tweets ~> length ~> 29
*Y2016.M08.D17.Solution> head (Set.toList unqt)
IndexedT {index = "1134", tt = TT {date = 2016-05-20, ...}}
--}
-- use the NodeJ's idn value as the tweet index
-- Now, using the relationships derived from the graph-JSON, create a mapping
-- from (unique) tweets to id's of related data, but also include the kind
-- of the relation to that related id:
data Direction = GoingTo | ComingFrom
deriving (Eq, Ord)
instance Show Direction where
show GoingTo = ">"
show ComingFrom = "<"
data Dart a = Drt Direction Label a
deriving (Eq, Ord)
instance Show a => Show (Dart a) where
show (Drt GoingTo lbl val) = ' ':lbl ++ "> " ++ show val
show (Drt ComingFrom lbl val) = " <" ++ lbl ++ " " ++ show val
relatedData :: Set (Tweet String) -> [GraphJ]
-> MultiMap (Tweet String) (Dart String) (Set (Dart String))
relatedData = undefined
{--
For example:
*Y2016.M08.D17.Solution> let reld = relatedData unqt tweets ~> head . MM.toList ~>
(IndexedT {index = "1134", tt = TT {date = 2016-05-20,...}},
fromList [CONTAINS> "1494", REPLY_TO> "1241", TAGS> "1011", USING> "1400"])
--}
|
geophf/1HaskellADay
|
exercises/HAD/Y2016/M08/D17/Exercise.hs
|
mit
| 3,011
| 0
| 11
| 553
| 337
| 192
| 145
| 25
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.