code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module Matrix(
modelMatrix
, cameraMatrix
, projMatrix
, gridModelMatrix
) where
import Linear
import qualified LambdaCube.Linear as LC
import Game.GoreAndAsh.Math
import Camera
-- | Convert from linear matrix format to LambdaCube format
convLC :: M44 Float -> LC.M44F
convLC (V4 !a !b !c !d) = LC.V4 (cv a) (cv b) (cv c) (cv d)
where
cv (V4 !x !y !z !w) = LC.V4 x y z w
-- | Model matrix, maps from local model coords to world coords
modelMatrix :: Float -> LC.M44F
modelMatrix _ = convLC identity -- . quatMatrix $ axisAngle (normalize $ V3 1 1 3) 0.1
-- | Grid is static
gridModelMatrix :: Camera -> Float -> LC.M44F
gridModelMatrix Camera{..} gridSize = convLC $ translate ((* gridSize) . fromIntegral . (round :: Float -> Int) . (/ gridSize) <$> cameraEye)
-- | Camera matrix, maps from world coords to camera coords
cameraMatrix :: Camera -> LC.M44F
cameraMatrix Camera{..} = convLC $ lookAt cameraEye (cameraEye + cameraForward) cameraUp
-- | Projection matrix, maps from camera coords to device normalized coords
projMatrix :: Float -> LC.M44F
projMatrix !aspect = convLC $ perspective (pi/3) aspect 0.1 100
-- | Transform quaternion to rotation matrix
quatMatrix :: Quaternion Float -> M44 Float
quatMatrix q@(Quaternion !w (V3 !x !y !z)) = V4
(V4 m00 m01 m02 0)
(V4 m10 m11 m12 0)
(V4 m20 m21 m22 0)
(V4 0 0 0 1)
where
s = 2 / norm q
x2 = x * s
y2 = y * s
z2 = z * s
xx = x * x2
xy = x * y2
xz = x * z2
yy = y * y2
yz = y * z2
zz = z * z2
wx = w * x2
wy = w * y2
wz = w * z2
m00 = 1 - (yy + zz)
m10 = xy - wz
m20 = xz + wy
m01 = xy + wz
m11 = 1 - (xx + zz)
m21 = yz - wx
m02 = xz - wy
m12 = yz + wx
m22 = 1 - (xx + yy)
|
Teaspot-Studio/model-gridizer
|
src/Matrix.hs
|
bsd-3-clause
| 1,757
| 0
| 12
| 478
| 664
| 351
| 313
| -1
| -1
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# Language TemplateHaskell #-}
module Types (
Bucket
, Value
, Type (..)
, Consolidation (..)
, Stats (..)
, Statskell
, Settings (..)
, runStatskellT
, bucket
, value
, consolidate
, type_
, module Data.Text.Lazy
, module Control.Concurrent.STM
) where
import Data.Text.Lazy (Text)
import Data.Map (Map)
import Control.Concurrent.STM (TVar, TChan)
import Data.Lens.Template
import Control.Monad.Reader
import Control.Monad
import Network.Socket (PortNumber)
type Bucket = Text
type Value = Double
data Type = Gauge | Absolute deriving Show
data Consolidation = Average | Max | Min | Sum deriving Show
data Stats = Stats {
_bucket :: Bucket
, _value :: Value
, _type_ :: Type
, _consolidate :: Consolidation
} deriving Show
data Settings = Settings {
stats :: TVar (Map Bucket [Stats])
, errorChan :: TChan Text
, port :: PortNumber
, databaseDir :: FilePath
}
newtype Statskell m a = Statskell (ReaderT Settings m a) deriving (Monad, MonadIO, MonadReader Settings)
runStatskellT :: Monad m => Statskell m a -> Settings -> m a
runStatskellT (Statskell statskell) = runReaderT statskell
$(makeLens ''Stats)
|
MasseR/statskell
|
Types.hs
|
bsd-3-clause
| 1,209
| 0
| 12
| 234
| 360
| 218
| 142
| 43
| 1
|
{-# LANGUAGE OverloadedStrings, OverloadedLists #-}
-- | For use in a REPL.
--
-- Merely by doing an
--
-- > :set -XOverloadedStrings -XOverloadedLists
-- > import Penny.Repl
--
-- the user should be able to do common tasks in the REPL. This
-- module will do whatever re-exports are necessary to make this
-- happen.
--
-- Also, the Haddocks for this module must be readable, so keep this
-- in mind when re-exporting other functions and modules.
module Penny.Repl
(
-- * Number types
Exponential
, DecUnsigned
-- * Data types
, Text
, SubAccount
, Seq
, Account
, Commodity
, Pole
-- ** Serials
, Serset
, forward
, backward
-- ** Time and date types
, Time.ZonedTime
, Time.Day
, Time.TimeOfDay
, Time.TimeZone
-- * Clatcher types
, Clatcher
, Loader
, Report
-- * Quasi quoters
, qDay
, qTime
, qUnsigned
, qNonNeg
-- * Loading files
, copopen
, load
-- * Converting commodities
, Converter
, convert
, converter
-- * Filtering
, prefilt
, postfilt
-- * Sorting
, sort
, comparing
-- * Reports
-- ** Dump
, Dump.dump
-- ** Columns
, Column
, Columns
, checkbook
, table
-- ** Acctree
, acctree
, byQty
, bySubAccountCmp
, bySubAccount
-- ** Pctree
, pctree
, Penny.Polar.debit
, Penny.Polar.credit
-- * Output
, Colors
, colors
, light
, dark
, report
-- * Running the clatcher
, clatcher
, presets
-- * Accessing fields
-- ** Transaction fields
, zonedTime
, day
, timeOfDay
, timeZone
, timeZoneMinutes
, payee
-- ** Posting fields
, birth
, number
, flag
, account
, fitid
, tags
, commodity
, AP.reconciled
, AP.cleared
, AP.side
, AP.qty
, AP.magnitude
, AP.isDebit
, AP.isCredit
, AP.isZero
-- * Comparison helpers
, NonNegative
, cmpUnsigned
, (&&&)
, (|||)
-- * Lens operators
, (&)
, (.~)
, (^.)
-- * Monoid operators
, (<>)
-- * Time
, zonedTimeToUTC
) where
import Penny.Account
import Penny.Acctree
import Penny.BalanceMap
import Penny.Commodity
import qualified Penny.Clatch.Access.Posting as AP
import qualified Penny.Clatch.Access.TransactionX as AT
import Penny.Clatch.Types
import Penny.Clatcher
import Penny.Colors
import Penny.Table (Column, Columns, checkbook, table)
import Penny.Converter
import Penny.Copper (copopen)
import Penny.Decimal
import qualified Penny.Dump as Dump
import Penny.NonNegative
import Penny.Pctree
import Penny.Polar
import Penny.Quasi
import Penny.Report
import Penny.Serial (Serset)
import qualified Penny.Serial as Serial
import Penny.Unix
import Control.Lens (view, (&), (.~), (^.))
import Data.Foldable (toList)
import Data.Monoid ((<>))
import Data.Ord (comparing)
import Data.Sequence (Seq)
import Data.Text (Text)
import Data.Time (zonedTimeToUTC)
import qualified Data.Time as Time
import Turtle.Bytes (procs)
import Turtle.Shell (select)
import Rainbow (chunksToByteStrings, toByteStringsColors256)
-- | Runs the clatcher with the specified settings. Sends output to
-- @less@ with 256 colors.
clatcher :: Clatcher -> IO ()
clatcher cltch = do
chks <- runClatcher cltch
procs "less" lessOpts
(select . chunksToByteStrings toByteStringsColors256
. toList $ chks)
-- | A set of reasonable presets:
--
-- * Colorful output is sent to @less@
--
-- * the light color scheme is used
--
-- * the checkbook column report is used
--
-- * postings are sorted by date and time, after converting the zoned
-- time to UTC time
presets :: Clatcher
presets = mempty
& colors .~ light
& report .~ table checkbook
& sort .~ comparing (zonedTimeToUTC . zonedTime)
-- # Helpers
-- | A point-free version of '&&'.
(&&&) :: (a -> Bool) -> (a -> Bool) -> a -> Bool
l &&& r = \a -> l a && r a
infixr 3 &&&
-- | A point-free version of '||'.
(|||) :: (a -> Bool) -> (a -> Bool) -> a -> Bool
l ||| r = \a -> l a || r a
infixr 2 |||
-- # Accessing fields
-- ## Transaction fields
zonedTime :: Sliced l a -> Time.ZonedTime
zonedTime = view AT.zonedTime
day :: Sliced l a -> Time.Day
day = view AT.day
timeOfDay :: Sliced l a -> Time.TimeOfDay
timeOfDay = view AT.timeOfDay
timeZone :: Sliced l a -> Time.TimeZone
timeZone = view AT.timeZone
timeZoneMinutes :: Sliced l a -> Int
timeZoneMinutes = view AT.timeZoneMinutes
payee :: Sliced l a -> Text
payee = view AT.payee
-- ## Posting fields
-- | How this single posting relates to its sibling postings; that is,
-- its \"birth order\". Numbering restarts with every transaction.
birth :: Sliced l a -> Serset
birth = view AP.birth
-- | A number assigned by the user.
number :: Sliced l a -> Maybe Integer
number = view AP.number
flag :: Sliced l a -> Text
flag = view AP.flag
account :: Sliced l a -> Account
account = view AP.account
-- | Financial institution ID; often provided in OFX files.
fitid :: Sliced l a -> Text
fitid = view AP.fitid
-- | List of tags assigned by the user.
tags :: Sliced l a -> Seq Text
tags = view AP.tags
-- | The commodity of this posting.
commodity :: Sliced l a -> Text
commodity = view AP.commodity
-- # Functions to access 'Serset' components.
forward :: Serset -> NonNegative
forward = view Serial.forward
backward :: Serset -> NonNegative
backward = view Serial.backward
|
massysett/penny
|
penny/lib/Penny/Repl.hs
|
bsd-3-clause
| 5,281
| 0
| 12
| 1,149
| 1,231
| 738
| 493
| 163
| 1
|
{-# LANGUAGE OverloadedStrings, RecordWildCards
#-}
module Test.WebDriver.Capabilities where
import Test.WebDriver.Firefox.Profile
import Test.WebDriver.Chrome.Extension
import Test.WebDriver.JSON
import Data.Aeson
import Data.Aeson.Types (Parser, typeMismatch, Pair)
import qualified Data.HashMap.Strict as HM (delete, toList)
import Data.Text (Text, toLower, toUpper)
import Data.Default (Default(..))
import Data.Word (Word16)
import Data.Maybe (fromMaybe, catMaybes)
import Data.String (fromString)
import Control.Applicative
import Control.Exception.Lifted (throw)
{- |A structure describing the capabilities of a session. This record
serves dual roles.
* It's used to specify the desired capabilities for a session before
it's created. In this usage, fields that are set to Nothing indicate
that we have no preference for that capability.
* When received from the server , it's used to
describe the actual capabilities given to us by the WebDriver
server. Here a value of Nothing indicates that the server doesn't
support the capability. Thus, for Maybe Bool fields, both Nothing and
Just False indicate a lack of support for the desired capability.
-}
data Capabilities =
Capabilities { -- |Browser choice and browser specific settings.
browser :: Browser
-- |Browser version to use.
, version :: Maybe String
-- |Platform on which the browser should run.
, platform :: Platform
-- |Proxy configuration settings.
, proxy :: ProxyType
-- |Whether the session supports executing JavaScript via
-- 'executeJS' and 'asyncJS'.
, javascriptEnabled :: Maybe Bool
-- |Whether the session supports taking screenshots of the
-- current page with the 'screenshot' command
, takesScreenshot :: Maybe Bool
-- |Whether the session can interact with modal popups,
-- such as window.alert and window.confirm via
-- 'acceptAlerts', 'dismissAlerts', etc.
, handlesAlerts :: Maybe Bool
-- |Whether the session can interact with database storage.
, databaseEnabled :: Maybe Bool
-- |Whether the session can set and query the browser's
-- location context with 'setLocation' and 'getLocation'.
, locationContextEnabled :: Maybe Bool
-- |Whether the session can interact with the application cache
-- .
, applicationCacheEnabled :: Maybe Bool
-- |Whether the session can query for the browser's
-- connectivity and disable it if desired
, browserConnectionEnabled :: Maybe Bool
-- |Whether the session supports CSS selectors when searching
-- for elements.
, cssSelectorsEnabled :: Maybe Bool
-- |Whether Web Storage ('getKey', 'setKey', etc) support is
-- enabled
, webStorageEnabled :: Maybe Bool
-- |Whether the session can rotate the current page's current
-- layout between 'Portrait' and 'Landscape' orientations.
, rotatable :: Maybe Bool
-- |Whether the session should accept all SSL certs by default
, acceptSSLCerts :: Maybe Bool
-- |Whether the session is capable of generating native OS
-- events when simulating user input.
, nativeEvents :: Maybe Bool
-- |How the session should handle unexpected alerts.
, unexpectedAlertBehavior :: Maybe UnexpectedAlertBehavior
-- |A list of ('Text', 'Value') pairs specifying additional non-standard capabilities.
, additionalCaps :: [Pair]
} deriving (Eq, Show)
instance Default Capabilities where
def = Capabilities { browser = firefox
, version = Nothing
, platform = Any
, javascriptEnabled = Nothing
, takesScreenshot = Nothing
, handlesAlerts = Nothing
, databaseEnabled = Nothing
, locationContextEnabled = Nothing
, applicationCacheEnabled = Nothing
, browserConnectionEnabled = Nothing
, cssSelectorsEnabled = Nothing
, webStorageEnabled = Nothing
, rotatable = Nothing
, acceptSSLCerts = Nothing
, nativeEvents = Nothing
, proxy = UseSystemSettings
, unexpectedAlertBehavior = Nothing
, additionalCaps = []
}
-- |Default capabilities. This is the same as the 'Default' instance, but with
-- less polymorphism. By default, we use 'firefox' of an unspecified 'version'
-- with default system-wide 'proxy' settings on whatever 'platform' is available
-- . All 'Maybe' capabilities are set to 'Nothing' (no preference).
defaultCaps :: Capabilities
defaultCaps = def
-- |Same as 'defaultCaps', but with all 'Maybe' 'Bool' capabilities set to
-- 'Just' 'True'.
allCaps :: Capabilities
allCaps = defaultCaps { javascriptEnabled = Just True
, takesScreenshot = Just True
, handlesAlerts = Just True
, databaseEnabled = Just True
, locationContextEnabled = Just True
, applicationCacheEnabled = Just True
, browserConnectionEnabled = Just True
, cssSelectorsEnabled = Just True
, webStorageEnabled = Just True
, rotatable = Just True
, acceptSSLCerts = Just True
, nativeEvents = Just True
}
instance ToJSON Capabilities where
toJSON Capabilities{..} =
object $ [ "browserName" .= browser
, "version" .= version
, "platform" .= platform
, "proxy" .= proxy
, "javascriptEnabled" .= javascriptEnabled
, "takesScreenshot" .= takesScreenshot
, "handlesAlerts" .= handlesAlerts
, "databaseEnabled" .= databaseEnabled
, "locationContextEnabled" .= locationContextEnabled
, "applicationCacheEnabled" .= applicationCacheEnabled
, "browserConnectionEnabled" .= browserConnectionEnabled
, "cssSelectorsEnabled" .= cssSelectorsEnabled
, "webStorageEnabled" .= webStorageEnabled
, "rotatable" .= rotatable
, "acceptSslCerts" .= acceptSSLCerts
, "nativeEvents" .= nativeEvents
, "unexpectedAlertBehavior" .= unexpectedAlertBehavior
]
++ browserInfo
++ additionalCaps
where
browserInfo = case browser of
Firefox {..}
-> ["firefox_profile" .= ffProfile
,"loggingPrefs" .= object ["driver" .= ffLogPref]
,"firefox_binary" .= ffBinary
]
Chrome {..}
-> catMaybes [ opt "chrome.chromedriverVersion" chromeDriverVersion
, opt "chrome.binary" chromeBinary
]
++ ["chrome.switches" .= chromeOptions
,"chrome.extensions" .= chromeExtensions
]
IE {..}
-> ["ignoreProtectedModeSettings" .= ieIgnoreProtectedModeSettings
,"ignoreZoomSetting" .= ieIgnoreZoomSetting
,"initialBrowserUrl" .= ieInitialBrowserUrl
,"elementScrollBehavior" .= ieElementScrollBehavior
,"enablePersistentHover" .= ieEnablePersistentHover
,"enableElementCacheCleanup" .= ieEnableElementCacheCleanup
,"requireWindowFocus" .= ieRequireWindowFocus
,"browserAttachTimeout" .= ieBrowserAttachTimeout
,"logFile" .= ieLogFile
,"logLevel" .= ieLogLevel
,"host" .= ieHost
,"extractPath" .= ieExtractPath
,"silent" .= ieSilent
,"forceCreateProcess" .= ieForceCreateProcess
,"internetExplorerSwitches" .= ieSwitches
]
Opera{..}
-> catMaybes [ opt "opera.binary" operaBinary
, opt "opera.display" operaDisplay
, opt "opera.product" operaProduct
, opt "opera.launcher" operaLauncher
, opt "opera.host" operaHost
, opt "opera.logging.file" operaLogFile
]
++ ["opera.detatch" .= operaDetach
,"opera.no_quit" .= operaDetach --backwards compatability
,"opera.autostart" .= operaAutoStart
, "opera.idle" .= operaIdle
-- ,"opera.profile" .= operaProfile
,"opera.port" .= fromMaybe (-1) operaPort
--note: consider replacing operaOptions with a list of options
,"opera.arguments" .= operaOptions
,"opera.logging.level" .= operaLogPref
]
_ -> []
where
opt k = fmap (k .=)
instance FromJSON Capabilities where
parseJSON (Object o) = do
browser <- req "browserName"
Capabilities <$> getBrowserCaps browser
<*> opt "version" Nothing
<*> req "platform"
<*> opt "proxy" NoProxy
<*> b "javascriptEnabled"
<*> b "takesScreenshot"
<*> b "handlesAlerts"
<*> b "databaseEnabled"
<*> b "locationContextEnabled"
<*> b "applicationCacheEnabled"
<*> b "browserConnectionEnabled"
<*> b "cssSelectorEnabled"
<*> b "webStorageEnabled"
<*> b "rotatable"
<*> b "acceptSslCerts"
<*> b "nativeEvents"
<*> opt "unexpectedAlertBehaviour" Nothing
<*> pure (additionalCapabilities browser)
where --some helpful JSON accessor shorthands
req :: FromJSON a => Text -> Parser a
req = (o .:) -- required field
opt :: FromJSON a => Text -> a -> Parser a
opt k d = o .:? k .!= d -- optional field
b :: Text -> Parser (Maybe Bool)
b k = opt k Nothing -- Maybe Bool field
-- produce additionalCaps by removing known capabilities from the JSON object
additionalCapabilities = HM.toList . foldr HM.delete o . knownCapabilities
knownCapabilities browser =
["browserName", "version", "platform", "proxy"
,"javascriptEnabled", "takesScreenshot", "handlesAlerts"
,"databaseEnabled", "locationContextEnabled"
,"applicationCacheEnabled", "browserConnectionEnabled"
, "cssSelectorEnabled","webStorageEnabled", "rotatable"
, "acceptSslCerts", "nativeEvents", "unexpectedBrowserBehaviour"]
++ case browser of
Firefox {} -> ["firefox_profile", "loggingPrefs", "firefox_binary"]
Chrome {} -> ["chrome.chromedriverVersion", "chrome.extensions", "chrome.switches", "chrome.extensions"]
IE {} -> ["ignoreProtectedModeSettings", "ignoreZoomSettings", "initialBrowserUrl", "elementScrollBehavior"
,"enablePersistentHover", "enableElementCacheCleanup", "requireWindowFocus", "browserAttachTimeout"
,"logFile", "logLevel", "host", "extractPath", "silent", "forceCreateProcess", "internetExplorerSwitches"]
Opera {} -> ["opera.binary", "opera.product", "opera.no_quit", "opera.autostart", "opera.idle", "opera.display"
,"opera.launcher", "opera.port", "opera.host", "opera.arguments", "opera.logging.file", "opera.logging.level"]
_ -> []
getBrowserCaps browser =
case browser of
Firefox {} -> Firefox <$> opt "firefox_profile" Nothing
<*> opt "loggingPrefs" def
<*> opt "firefox_binary" Nothing
Chrome {} -> Chrome <$> opt "chrome.chromedriverVersion" Nothing
<*> opt "chrome.extensions" Nothing
<*> opt "chrome.switches" []
<*> opt "chrome.extensions" []
IE {} -> IE <$> opt "ignoreProtectedModeSettings" True
<*> opt "ignoreZoomSettings" False
<*> opt "initialBrowserUrl" Nothing
<*> opt "elementScrollBehavior" def
<*> opt "enablePersistentHover" True
<*> opt "enableElementCacheCleanup" True
<*> opt "requireWindowFocus" False
<*> opt "browserAttachTimeout" 0
<*> opt "logFile" Nothing
<*> opt "logLevel" def
<*> opt "host" Nothing
<*> opt "extractPath" Nothing
<*> opt "silent" False
<*> opt "forceCreateProcess" False
<*> opt "internetExplorerSwitches" Nothing
Opera {} -> Opera <$> opt "opera.binary" Nothing
<*> opt "opera.product" Nothing
<*> opt "opera.no_quit" False
<*> opt "opera.autostart" True
<*> opt "opera.idle" False
<*> opt "opera.display" Nothing
<*> opt "opera.launcher" Nothing
<*> opt "opera.port" (Just 0)
<*> opt "opera.host" Nothing
<*> opt "opera.arguments" Nothing
<*> opt "opera.logging.file" Nothing
<*> opt "opera.logging.level" def
_ -> return browser
parseJSON v = typeMismatch "Capabilities" v
-- |This constructor simultaneously specifies which browser the session will
-- use, while also providing browser-specific configuration. Default
-- configuration is provided for each browser by 'firefox', 'chrome', 'opera',
-- 'ie', etc.
--
-- This library uses 'firefox' as its 'Default' browser configuration, when no
-- browser choice is specified.
data Browser = Firefox { -- |The firefox profile to use. If Nothing,
-- a default temporary profile is automatically created
-- and used.
ffProfile :: Maybe (PreparedProfile Firefox)
-- |Firefox logging preference
, ffLogPref :: LogLevel
-- |Server-side path to Firefox binary. If Nothing,
-- use a sensible system-based default.
, ffBinary :: Maybe FilePath
}
| Chrome { -- |Version of the Chrome Webdriver server server to use
--
-- for more information on chromedriver see
-- <http://code.google.com/p/selenium/wiki/ChromeDriver>
chromeDriverVersion :: Maybe String
-- |Server-side path to Chrome binary. If Nothing,
-- use a sensible system-based default.
, chromeBinary :: Maybe FilePath
-- |A list of command-line options to pass to the
-- Chrome binary.
, chromeOptions :: [String]
-- |A list of extensions to use.
, chromeExtensions :: [ChromeExtension]
}
| IE { -- |Whether to skip the protected mode check. If set, tests
-- may become flaky, unresponsive, or browsers may hang. If
-- not set, and protected mode settings are not the same for
-- all zones, an exception will be thrown on driver
-- construction.
ieIgnoreProtectedModeSettings :: Bool
-- |Indicates whether to skip the check that the browser's zoom
-- level is set to 100%. Value is set to false by default.
, ieIgnoreZoomSetting :: Bool
-- |Allows the user to specify the initial URL loaded when IE
-- starts. Intended to be used with ignoreProtectedModeSettings
-- to allow the user to initialize IE in the proper Protected Mode
-- zone. Using this capability may cause browser instability or
-- flaky and unresponsive code. Only \"best effort\" support is
-- provided when using this capability.
, ieInitialBrowserUrl :: Maybe Text
-- |Allows the user to specify whether elements are scrolled into
-- the viewport for interaction to align with the top or bottom
-- of the viewport. The default value is to align with the top of
-- the viewport.
, ieElementScrollBehavior :: IEElementScrollBehavior
-- |Determines whether persistent hovering is enabled (true by
-- default). Persistent hovering is achieved by continuously firing
-- mouse over events at the last location the mouse cursor has been
-- moved to.
, ieEnablePersistentHover :: Bool
-- |Determines whether the driver should attempt to remove obsolete
-- elements from the element cache on page navigation (true by
-- default). This is to help manage the IE driver's memory footprint
-- , removing references to invalid elements.
, ieEnableElementCacheCleanup :: Bool
-- |Determines whether to require that the IE window have focus
-- before performing any user interaction operations (mouse or
-- keyboard events). This capability is false by default, but
-- delivers much more accurate native events interactions.
, ieRequireWindowFocus :: Bool
-- |The timeout, in milliseconds, that the driver will attempt to
-- locate and attach to a newly opened instance of Internet Explorer
-- . The default is zero, which indicates waiting indefinitely.
, ieBrowserAttachTimeout :: Integer
-- |The path to file where server should write log messages to.
-- By default it writes to stdout.
, ieLogFile :: Maybe FilePath
-- |The log level used by the server. Defaults to 'IELogFatal'
, ieLogLevel :: IELogLevel
-- |The address of the host adapter on which the server will listen
-- for commands.
, ieHost :: Maybe Text
-- |The path to the directory used to extract supporting files used
-- by the server. Defaults to the TEMP directory if not specified.
, ieExtractPath :: Maybe Text
-- |Suppresses diagnostic output when the server is started.
, ieSilent :: Bool
-- |Forces launching Internet Explorer using the CreateProcess API.
-- If this option is not specified, IE is launched using the
-- IELaunchURL, if it is available. For IE 8 and above, this option
-- requires the TabProcGrowth registry value to be set to 0.
, ieForceCreateProcess :: Bool
-- |Specifies command-line switches with which to launch Internet
-- Explorer. This is only valid when used with the
-- forceCreateProcess.
, ieSwitches :: Maybe Text
}
| Opera { -- |Server-side path to the Opera binary
operaBinary :: Maybe FilePath
--, operaNoRestart :: Maybe Bool
-- |Which Opera product we're using, e.g. \"desktop\",
-- \"core\"
, operaProduct :: Maybe String
-- |Whether the Opera instance should stay open after
-- we close the session. If false, closing the session
-- closes the browser.
, operaDetach :: Bool
-- |Whether to auto-start the Opera binary. If false,
-- OperaDriver will wait for a connection from the
-- browser. By default this is True.
, operaAutoStart :: Bool
-- |Whether to use Opera's alternative implicit wait
-- implementation. It will use an in-browser heuristic
-- to guess when a page has finished loading. This
-- feature is experimental, and disabled by default.
, operaIdle :: Bool
-- |(*nix only) which X display to use.
, operaDisplay :: Maybe Int
--, operaProfile :: Maybe (PreparedProfile Opera)
-- |Path to the launcher binary to use. The launcher
-- is a gateway between OperaDriver and the Opera
-- browser. If Nothing, OperaDriver will use the
-- launcher supplied with the package.
, operaLauncher :: Maybe FilePath
-- |The port we should use to connect to Opera. If Just 0
-- , use a random port. If Nothing, use the default
-- Opera port. The default 'opera' constructor uses
-- Just 0, since Nothing is likely to cause "address
-- already in use" errors.
, operaPort :: Maybe Word16
-- |The host Opera should connect to. Unless you're
-- starting Opera manually you won't need this.
, operaHost :: Maybe String
-- |Command-line arguments to pass to Opera.
, operaOptions :: Maybe String
-- |Where to send the log output. If Nothing, logging is
-- disabled.
, operaLogFile :: Maybe FilePath
-- |Log level preference. Defaults to 'LogInfo'
, operaLogPref :: LogLevel
}
| HTMLUnit
| IPhone
| IPad
| Android
-- |some other browser, specified by a string name
| Browser Text
deriving (Eq, Show)
instance Default Browser where
def = firefox
instance ToJSON Browser where
toJSON Firefox {} = String "firefox"
toJSON Chrome {} = String "chrome"
toJSON Opera {} = String "opera"
toJSON IE {} = String "internet explorer"
toJSON (Browser b) = String b
toJSON b = String . toLower . fromString . show $ b
instance FromJSON Browser where
parseJSON (String jStr) = case toLower jStr of
"firefox" -> return firefox
"chrome" -> return chrome
"internet explorer" -> return ie
"opera" -> return opera
-- "safari" -> return safari
"iphone" -> return iPhone
"ipad" -> return iPad
"android" -> return android
"htmlunit" -> return htmlUnit
other -> return (Browser other)
parseJSON v = typeMismatch "Browser" v
-- |Default Firefox settings. All Maybe fields are set to Nothing. ffLogPref
-- is set to 'LogInfo'.
firefox :: Browser
firefox = Firefox Nothing def Nothing
-- |Default Chrome settings. All Maybe fields are set to Nothing, no options are
-- specified, and no extensions are used.
chrome :: Browser
chrome = Chrome Nothing Nothing [] []
-- |Default IE settings. See the 'IE' constructor for more details on
-- individual defaults
ie :: Browser
ie = IE { ieIgnoreProtectedModeSettings = True
, ieIgnoreZoomSetting = False
, ieInitialBrowserUrl = Nothing
, ieElementScrollBehavior = def
, ieEnablePersistentHover = True
, ieEnableElementCacheCleanup = True
, ieRequireWindowFocus = False
, ieBrowserAttachTimeout = 0
, ieLogFile = Nothing
, ieLogLevel = def
, ieHost = Nothing
, ieExtractPath = Nothing
, ieSilent = False
, ieForceCreateProcess = False
, ieSwitches = Nothing
}
-- |Default Opera settings. See the 'Opera' constructor for more details on
-- individual defaults.
opera :: Browser
opera = Opera { operaBinary = Nothing
--, operaNoRestart = Nothing
, operaProduct = Nothing
, operaDetach = False
, operaAutoStart = True
, operaDisplay = Nothing
, operaIdle = False
-- , operaProfile = Nothing
, operaLauncher = Nothing
, operaHost = Nothing
, operaPort = Just 0
, operaOptions = Nothing
, operaLogFile = Nothing
, operaLogPref = def
}
--safari :: Browser
--safari = Safari
htmlUnit :: Browser
htmlUnit = HTMLUnit
iPhone :: Browser
iPhone = IPhone
iPad :: Browser
iPad = IPad
android :: Browser
android = Android
-- |Represents platform options supported by WebDriver. The value Any represents
-- no preference.
data Platform = Windows | XP | Vista | Mac | Linux | Unix | Any
deriving (Eq, Show, Ord, Bounded, Enum)
instance ToJSON Platform where
toJSON = String . toUpper . fromString . show
instance FromJSON Platform where
parseJSON (String jStr) = case toLower jStr of
"windows" -> return Windows
"xp" -> return XP
"vista" -> return Vista
"mac" -> return Mac
"linux" -> return Linux
"unix" -> return Unix
"any" -> return Any
err -> fail $ "Invalid Platform string " ++ show err
parseJSON v = typeMismatch "Platform" v
-- |Available settings for the proxy 'Capabilities' field
data ProxyType = NoProxy
| UseSystemSettings
| AutoDetect
-- |Use a proxy auto-config file specified by URL
| PAC { autoConfigUrl :: String }
-- |Manually specify proxy hosts as hostname:port strings.
-- Note that behavior is undefined for empty strings.
| Manual { ftpProxy :: String
, sslProxy :: String
, httpProxy :: String
}
deriving (Eq, Show)
instance FromJSON ProxyType where
parseJSON (Object obj) = do
pTyp <- f "proxyType"
case toLower pTyp of
"direct" -> return NoProxy
"system" -> return UseSystemSettings
"pac" -> PAC <$> f "autoConfigUrl"
"manual" -> Manual <$> f "ftpProxy"
<*> f "sslProxy"
<*> f "httpProxy"
_ -> fail $ "Invalid ProxyType " ++ show pTyp
where
f :: FromJSON a => Text -> Parser a
f = (obj .:)
parseJSON v = typeMismatch "ProxyType" v
instance ToJSON ProxyType where
toJSON pt = object $ case pt of
NoProxy ->
["proxyType" .= ("DIRECT" :: String)]
UseSystemSettings ->
["proxyType" .= ("SYSTEM" :: String)]
AutoDetect ->
["proxyType" .= ("AUTODETECT" :: String)]
PAC{autoConfigUrl = url} ->
["proxyType" .= ("PAC" :: String)
,"autoConfigUrl" .= url
]
Manual{ftpProxy = ftp, sslProxy = ssl, httpProxy = http} ->
["proxyType" .= ("MANUAL" :: String)
,"ftpProxy" .= ftp
,"sslProxy" .= ssl
,"httpProxy" .= http
]
data UnexpectedAlertBehavior = AcceptAlert | DismissAlert | IgnoreAlert
deriving (Bounded, Enum, Eq, Ord, Read, Show)
instance ToJSON UnexpectedAlertBehavior where
toJSON AcceptAlert = String "accept"
toJSON DismissAlert = String "dismiss"
toJSON IgnoreAlert = String "ignore"
instance FromJSON UnexpectedAlertBehavior where
parseJSON (String s) =
return $ case s of
"accept" -> AcceptAlert
"dismiss" -> DismissAlert
"ignore" -> IgnoreAlert
err -> throw . BadJSON
$ "Invalid string value for UnexpectedAlertBehavior: " ++ show err
parseJSON v = typeMismatch "UnexpectedAlertBehavior" v
-- |Indicates a log verbosity level. Used in 'Firefox' and 'Opera' configuration.
data LogLevel = LogOff | LogSevere | LogWarning | LogInfo | LogConfig
| LogFine | LogFiner | LogFinest | LogAll
deriving (Eq, Show, Read, Ord, Bounded, Enum)
instance Default LogLevel where
def = LogInfo
instance ToJSON LogLevel where
toJSON p= String $ case p of
LogOff -> "OFF"
LogSevere -> "SEVERE"
LogWarning -> "WARNING"
LogInfo -> "INFO"
LogConfig -> "CONFIG"
LogFine -> "FINE"
LogFiner -> "FINER"
LogFinest -> "FINEST"
LogAll -> "ALL"
instance FromJSON LogLevel where
parseJSON (String s) = return $ case s of
"OFF" -> LogOff
"SEVERE" -> LogSevere
"WARNING" -> LogWarning
"INFO" -> LogInfo
"CONFIG" -> LogConfig
"FINE" -> LogFine
"FINER" -> LogFiner
"FINEST" -> LogFinest
"ALL" -> LogAll
_ -> throw . BadJSON $ "Invalid logging preference: " ++ show s
parseJSON other = typeMismatch "LogLevel" other
-- |Logging levels for Internet Explorer
data IELogLevel = IELogTrace | IELogDebug | IELogInfo | IELogWarn | IELogError
| IELogFatal
deriving (Eq, Show, Read, Ord, Bounded, Enum)
instance Default IELogLevel where
def = IELogFatal
instance ToJSON IELogLevel where
toJSON p= String $ case p of
IELogTrace -> "TRACE"
IELogDebug -> "DEBUG"
IELogInfo -> "INFO"
IELogWarn -> "WARN"
IELogError -> "ERROR"
IELogFatal -> "FATAL"
instance FromJSON IELogLevel where
parseJSON (String s) = return $ case s of
"TRACE" -> IELogTrace
"DEBIG" -> IELogDebug
"INFO" -> IELogInfo
"WARN" -> IELogWarn
"ERROR" -> IELogError
"FATAL" -> IELogFatal
_ -> throw . BadJSON $ "Invalid logging preference: " ++ show s
parseJSON other = typeMismatch "IELogLevel" other
-- |Specifies how elements scroll into the viewport. (see 'ieElementScrollBehavior')
data IEElementScrollBehavior = AlignTop | AlignBottom
deriving (Eq, Ord, Show, Read, Enum, Bounded)
instance Default IEElementScrollBehavior where
def = AlignTop
instance ToJSON IEElementScrollBehavior where
toJSON AlignTop = toJSON (0 :: Int)
toJSON AlignBottom = toJSON (1 :: Int)
instance FromJSON IEElementScrollBehavior where
parseJSON v = do
n <- parseJSON v
case n :: Integer of
0 -> return AlignTop
1 -> return AlignBottom
_ -> fail $ "Invalid integer for IEElementScrollBehavior: " ++ show n
|
begriffs/hs-webdriver
|
src/Test/WebDriver/Capabilities.hs
|
bsd-3-clause
| 32,067
| 0
| 26
| 11,866
| 4,413
| 2,449
| 1,964
| 455
| 1
|
module Obsidian.Coordination where
|
svenssonjoel/GCDObsidian
|
Obsidian/Coordination.hs
|
bsd-3-clause
| 40
| 0
| 3
| 8
| 6
| 4
| 2
| 1
| 0
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE DeriveDataTypeable #-}
module Web.UAParser.Core
( UAConfig (..)
, loadConfig
, parseUA
, UAResult (..)
, uarVersion
, parseOS
, OSResult (..)
, osrVersion
) where
-------------------------------------------------------------------------------
import Control.Applicative
import Control.Monad
import Data.Aeson
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as B
import Data.Default
import Data.Generics
import Data.Maybe
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Yaml
import Text.Regex.PCRE.Light
-------------------------------------------------------------------------------
test :: [ByteString]
test =
["SonyEricssonK750i/R1L Browser/SEMC-Browser/4.2 Profile/MIDP-2.0 Configuration/CLDC-1.1"
, "Mozilla/5.0 (Windows; U; Windows NT 5.2; en-GB; rv:1.8.1.18) Gecko/20081029 Firefox/2.0.0.18"
, "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_5; en-us) AppleWebKit/525.26.2 (KHTML, like Gecko) Version/3.2 Safari/525.26.12'"
, "Mozilla/4.0 (compatible; MSIE 6.0; Windows XP 5.1) Lobo/0.98.4"
, "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; )'"
, "Opera/9.80 (Windows NT 5.1; U; cs) Presto/2.2.15 Version/10.00'"
, "boxee (alpha/Darwin 8.7.1 i386 - 0.9.11.5591)'"
, "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; CSM-NEWUSER; GTB6; byond_4.0; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30; .NET CLR 1.1.4322; .NET CLR 3.0.04506.648; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; InfoPath.1)'"
, "Mozilla/5.0 (compatible; Yahoo! Slurp; http://help.yahoo.com/help/us/ysearch/slurp)'"
]
---------------
-- UA Parser --
---------------
-------------------------------------------------------------------------------
parseUA
:: UAConfig
-- ^ Loaded parser data
-> ByteString
-- ^ User-Agent string to be parsed
-> Maybe UAResult
parseUA UAConfig{..} bs = foldr mplus Nothing $ map go uaParsers
where
go UAParser{..} = mkRes . map T.decodeUtf8 =<< match uaRegex bs []
where
mkRes [] = Nothing
mkRes [_,f,v1,v2,v3] = Just $ UAResult (repF f) (repV1 v1) (Just v2) (Just v3)
mkRes [_,f,v1,v2] = Just $ UAResult (repF f) (repV1 v1) (Just v2) Nothing
mkRes [_,f,v1] = Just $ UAResult (repF f) (repV1 v1) Nothing Nothing
mkRes [_, f] = Just $ UAResult (repF f) Nothing Nothing Nothing
mkRes x = Nothing
-- error $ "Unsupported match in parseUA" ++ show x
repV1 x = uaV1Rep `mplus` Just x
repF x = maybe x id uaFamRep
-------------------------------------------------------------------------------
data UAResult = UAResult {
uarFamily :: Text
, uarV1 :: Maybe Text
, uarV2 :: Maybe Text
, uarV3 :: Maybe Text
} deriving (Show, Eq, Typeable, Data)
-------------------------------------------------------------------------------
-- | Construct a browser versionstring from 'UAResult'
uarVersion :: UAResult -> Text
uarVersion UAResult{..} =
T.intercalate "." . catMaybes . takeWhile isJust $ [uarV1, uarV2, uarV3]
-------------------------------------------------------------------------------
instance Default UAResult where
def = UAResult "" Nothing Nothing Nothing
---------------
-- OS Parser --
---------------
-------------------------------------------------------------------------------
parseOS
:: UAConfig
-- ^ Loaded parser data
-> ByteString
-- ^ User-Agent string to be parsed
-> Maybe OSResult
parseOS UAConfig{..} bs = foldr mplus Nothing $ map go osParsers
where
go OSParser{..} = mkRes . map T.decodeUtf8 =<< match osRegex bs []
where
mkRes [] = Nothing
mkRes [_,f,v1,v2,v3, v4] = Just $
OSResult (repF f) (Just v1) (Just v2) (Just v3) (Just v4)
mkRes [_,f,v1,v2,v3] = Just $ OSResult (repF f) (Just v1) (Just v2) (Just v3) Nothing
mkRes [_,f,v1,v2] = Just $ OSResult (repF f) (Just v1) (Just v2) Nothing Nothing
mkRes [_,f,v1] = Just $ OSResult (repF f) (Just v1) Nothing Nothing Nothing
mkRes [_, f] = Just $ OSResult (repF f) Nothing Nothing Nothing Nothing
mkRes x = Nothing
-- error $ "Unsupported match in parseOS" ++ show x
repF x = maybe x id osFamRep
-------------------------------------------------------------------------------
-- | Result type for 'parseOS'
data OSResult = OSResult {
osrFamily :: Text
, osrV1 :: Maybe Text
, osrV2 :: Maybe Text
, osrV3 :: Maybe Text
, osrV4 :: Maybe Text
} deriving (Show,Eq,Typeable,Data)
instance Default OSResult where
def = OSResult "" Nothing Nothing Nothing Nothing
-------------------------------------------------------------------------------
-- | Construct a version string from 'OSResult'
osrVersion :: OSResult -> Text
osrVersion OSResult{..} =
T.intercalate "." . catMaybes . takeWhile isJust $ [osrV1, osrV2, osrV3, osrV4]
-------------------
-- Parser Config --
-------------------
-------------------------------------------------------------------------------
loadConfig :: FilePath -> IO UAConfig
loadConfig = maybe (error "Can't load UA config file") return <=< decodeFile
-------------------------------------------------------------------------------
-- | User-Agent string parser data
data UAConfig = UAConfig {
uaParsers :: [UAParser]
, osParsers :: [OSParser]
, devParsers :: [DevParser]
} deriving (Eq,Show)
-------------------------------------------------------------------------------
data UAParser = UAParser {
uaRegex :: Regex
, uaFamRep :: Maybe Text
, uaV1Rep :: Maybe Text
} deriving (Eq,Show)
-------------------------------------------------------------------------------
data OSParser = OSParser {
osRegex :: Regex
, osFamRep :: Maybe Text
, osRep1 :: Maybe Text
, osRep2 :: Maybe Text
} deriving (Eq,Show)
data DevParser = DevParser {
devRegex :: Regex
, devRep :: Maybe Text
} deriving (Eq,Show)
-------------------------------------------------------------------------------
parseRegex v = flip compile [] `liftM` (v .: "regex")
-------------------------------------------------------------------------------
instance FromJSON UAConfig where
parseJSON (Object v) =
UAConfig
<$> v .: "user_agent_parsers"
<*> v .: "os_parsers"
<*> v .: "device_parsers"
parseJSON _ = error "Object expected when parsing JSON"
-------------------------------------------------------------------------------
instance FromJSON UAParser where
parseJSON (Object v) =
UAParser <$> parseRegex v
<*> (v .:? "family_replacement" <|> return Nothing)
<*> (v .:? "v1_replacement" <|> return Nothing)
parseJSON _ = error "Object expected when parsing JSON"
-------------------------------------------------------------------------------
instance FromJSON OSParser where
parseJSON (Object v) =
OSParser <$> parseRegex v
<*> (v .:? "os_replacement" <|> return Nothing)
<*> (v .:? "os_v1_replacement" <|> return Nothing)
<*> (v .:? "os_v2_replacement" <|> return Nothing)
parseJSON _ = error "Object expected when parsing JSON"
-------------------------------------------------------------------------------
instance FromJSON DevParser where
parseJSON (Object v) =
DevParser <$> parseRegex v
<*> v .:? "device_replacement"
parseJSON _ = error "Object expected when parsing JSON"
|
ozataman/ua-parser-standalone
|
src/Web/UAParser/Core.hs
|
bsd-3-clause
| 8,215
| 0
| 12
| 2,022
| 1,719
| 939
| 780
| 137
| 7
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
-----------------------------------------------------------------
-- Auto-generated by regenClassifiers
--
-- DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-- @generated
-----------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Ranking.Classifiers.JA_XX (classifiers) where
import Data.String
import Prelude
import qualified Data.HashMap.Strict as HashMap
import Duckling.Ranking.Types
classifiers :: Classifiers
classifiers = HashMap.fromList []
|
facebookincubator/duckling
|
Duckling/Ranking/Classifiers/JA_XX.hs
|
bsd-3-clause
| 828
| 0
| 6
| 105
| 66
| 47
| 19
| 8
| 1
|
{-|
Module : Numeric.AERN.RealArithmetic.Basis.MPFR.ExactOps
Description : one, zero, negation etc for MPFR numbers
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : mikkonecny@gmail.com
Stability : experimental
Portability : portable
One, zero, negation etc for MPFR numbers.
This is a private module reexported publicly via its parent.
-}
module Numeric.AERN.RealArithmetic.Basis.MPFR.ExactOps where
import Numeric.AERN.RealArithmetic.Basis.MPFR.Basics
import Numeric.AERN.RealArithmetic.ExactOps
import qualified Numeric.Rounded as R
instance HasSampleFromContext MPFR where
sampleFromContext = withPrec defaultPrecision 0 -- useless...
instance HasZero MPFR where
zero sample = withPrec (getPrecision sample) 0
instance HasOne MPFR where
one sample = withPrec (getPrecision sample) 1
instance HasInfinities MPFR where
plusInfinity sample = withPrec (getPrecision sample) $ 1/0
minusInfinity sample = withPrec (getPrecision sample) $ -1/0
excludesPlusInfinity a = (a /= plusInfinity a)
excludesMinusInfinity a = (a /= minusInfinity a)
instance Neg MPFR where
neg = liftRoundedToMPFR1 negate
|
michalkonecny/aern
|
aern-mpfr-rounded/src/Numeric/AERN/RealArithmetic/Basis/MPFR/ExactOps.hs
|
bsd-3-clause
| 1,226
| 0
| 10
| 258
| 220
| 118
| 102
| 17
| 0
|
module Tct.Trs.Data.RuleSet where
import Tct.Trs.Data.Rules (Rules, empty)
-- * ruleset
data RuleSet f v = RuleSet
{ sdps :: Rules f v -- ^ strict dependency pairs
, wdps :: Rules f v -- ^ weak dependency pairs
, strs :: Rules f v -- ^ strict rules
, wtrs :: Rules f v -- ^ weak rules
}
emptyRuleSet :: RuleSet f v
emptyRuleSet = RuleSet empty empty empty empty
|
ComputationWithBoundedResources/tct-trs
|
src/Tct/Trs/Data/RuleSet.hs
|
bsd-3-clause
| 376
| 0
| 9
| 85
| 107
| 64
| 43
| 9
| 1
|
{-# LANGUAGE MultiParamTypeClasses #-}
module SAWServer.NoParams (NoParams(..)) where
import Data.Aeson
( withObject, object, FromJSON(parseJSON), ToJSON(toJSON) )
import qualified Argo.Doc as Doc
data NoParams = NoParams
instance ToJSON NoParams where
toJSON NoParams = object []
instance FromJSON NoParams where
parseJSON = withObject "no parameters" (const (pure NoParams))
instance Doc.DescribedMethod NoParams () where
parameterFieldDescription = []
|
GaloisInc/saw-script
|
saw-remote-api/src/SAWServer/NoParams.hs
|
bsd-3-clause
| 470
| 0
| 10
| 69
| 130
| 75
| 55
| 12
| 0
|
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Control.Monad
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Trans.Cont
import Control.Applicative
import Control.Concurrent
import Control.Exception as E hiding (handle)
import Data.Unique
import Text.Read (readMaybe)
import System.IO
import System.Environment (getArgs)
import Lib
main :: IO ()
main = do
print =<< bracket_demo
port:_ <- map read <$> getArgs
launchServer port
bracket_demo = (`runContT` return) $ do
fh <- ContT $ bracket
(openFile "tmp" AppendMode)
hClose
n <- ContT $ bracket
(hPutStrLn fh "Gain n" >> return 42)
(\n -> hPutStrLn fh $ "Finalize n: " ++ show n)
l <- ContT $ bracket
(hPutStrLn fh "Gain l" >> return 13)
(\n -> hPutStrLn fh $ "Finalize l: " ++ show n)
-- liftIO $ throwIO (ErrorCall "heyhey")
return $ n + l
-- sketch
launchServer :: Port -> IO ()
launchServer port = (`runContT` return) $ do
client <- ContT $ acceptLoop port
loginedClient <- ContT $ login client
roomId <- ContT $ joinRoom loginedClient
ContT $ chat loginedClient roomId
---- NPC
--data NPC = NPC { _npcId :: Int }
--
---- NPC spawner
--launchNpcServer :: Port -> IO ()
--launchNpcServer port (`runContT` return) $ do
-- npc <- ContT $ genNpcLoop port
-- loop npc
-- where
-- loop npc = forever $ do
-- mUsers <- ContT $ searchUser npc
-- case mUsers of
-- Nothing -> return ()
-- Just users -> forM_ users $ \user -> do
-- mChan <- ContT $ npc `tryConnect` user
-- case mChan of
-- Nothing -> return ()
-- Just chan = do
-- ContT $ greet chan npc "Heyhey!"
-- mResponse <- ContT $ wait chan
-- case mResponse of
-- Nothing -> return ()
-- Just response -> do
-- ContT $ greet chan "Oh thanks for your response, human living. Take care!"
-- kill npc
|
ruicc/concurrent-world
|
app/Main.hs
|
bsd-3-clause
| 2,157
| 0
| 15
| 730
| 400
| 220
| 180
| 35
| 1
|
{-# LANGUAGE OverloadedStrings #-}
-- | This module provides a bridge between the low-level text protocol that
-- IRC uses and the high-level events in the "Irc.Model" module.
module Irc.Core
( MsgFromServer(..)
, IrcError(..)
, ircMsgToServerMsg
) where
import Control.Lens (over, _2)
import Data.ByteString (ByteString)
import Data.Time
import Data.Time.Clock.POSIX
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as B8
import Irc.Format
-- | 'MsgFromServer' provides a typed view of the various IRC protocol messages.
-- There are more messages defined for IRC (and many of those overlap) than
-- are in common use. Please report a bug if a common message is missing
-- from this type.
data MsgFromServer
-- 001-099 Client-server connection messages
= RplWelcome ByteString -- ^ 001 "Welcome to the Internet Relay Network \<nick\>!\<user\>\@\<host\>"
| RplYourHost ByteString -- ^ 002 "Your host is \<servername\>, running version \<ver\>"
| RplCreated ByteString -- ^ 003 "This server was created \<date\>"
| RplMyInfo ByteString ByteString [ByteString] -- ^ 004 servername version *(modes)
| RplISupport [(ByteString,ByteString)] -- ^ 005 *(KEY=VALUE)
| RplSnoMask ByteString -- ^ 008 snomask
| RplYourId ByteString -- ^ 042 unique-id
-- 200-399 Command responses
| RplStatsLinkInfo [ByteString] -- ^ 211 arguments
| RplStatsCommands [ByteString] -- ^ 212 arguments
| RplStatsCLine [ByteString] -- ^ 213 arguments
| RplStatsNLine [ByteString] -- ^ 214 arguments
| RplStatsILine [ByteString] -- ^ 215 arguments
| RplStatsKLine [ByteString] -- ^ 216 arguments
| RplStatsQLine [ByteString] -- ^ 217 arguments
| RplStatsYLine [ByteString] -- ^ 218 arguments
| RplEndOfStats Char -- ^ 219 mode
| RplStatsPLine [ByteString] -- ^ 220 arguments
| RplUmodeIs ByteString [ByteString] -- ^ 221 modes *(params)
| RplStatsDLine [ByteString] -- ^ 225
| RplStatsVLine [ByteString] -- ^ 240
| RplStatsLLine [ByteString] -- ^ 241
| RplStatsUptime ByteString -- ^ 242
| RplStatsOLine [ByteString] -- ^ 243
| RplStatsHLine [ByteString] -- ^ 244
| RplStatsSLine [ByteString] -- ^ 245
| RplStatsPing [ByteString] -- ^ 246
| RplStatsXLine [ByteString] -- ^ 247
| RplStatsULine [ByteString] -- ^ 248
| RplStatsDebug [ByteString] -- ^ 249
| RplStatsConn ByteString -- ^ 250 connection
| RplLuserClient ByteString -- ^ 251 "There are \<integer\> users and \<integer\> services on \<integer\> servers"
| RplLuserOp ByteString -- ^ 252 number-of-ops
| RplLuserUnknown ByteString -- ^ 253 number-of-unknown
| RplLuserChannels ByteString -- ^ 254 number-of-channels
| RplLuserMe ByteString -- ^ 255 "I have \<integer\> clients and \<integer\> servers"
| RplLuserAdminMe ByteString -- ^ 256 server
| RplLuserAdminLoc1 ByteString -- ^ 257 admin-info-1
| RplLuserAdminLoc2 ByteString -- ^ 258 admin-info-2
| RplLuserAdminEmail ByteString -- ^ 259 admin-email
| RplLoadTooHigh ByteString -- ^ 263 command
| RplLocalUsers [ByteString] -- ^ 265 [local] [max] txt
| RplGlobalUsers [ByteString] -- ^ 266 [global] [max] txt
| RplPrivs ByteString -- ^ 270 privstring
| RplWhoisCertFp Identifier ByteString -- ^ 276 nick txt
| RplAcceptList Identifier -- ^ 281
| RplEndOfAccept -- ^ 282
| RplAway Identifier ByteString -- ^ 301 nick away_message
| RplUserHost [ByteString] -- ^ 302 *(user hosts)
| RplIsOn [Identifier] -- ^ 303 *(nick)
| RplSyntax ByteString -- ^ (inspircd) 304 text
| RplUnAway -- ^ 305
| RplNowAway -- ^ 306
| RplWhoisUser Identifier ByteString ByteString ByteString -- ^ 311 nick user host realname
| RplWhoisServer Identifier ByteString ByteString -- ^ 312 nick server serverinfo
| RplWhoisOperator Identifier ByteString -- ^ 313 nick "is an IRC operator"
| RplWhoWasUser Identifier ByteString ByteString ByteString -- ^ 314 nick user host realname
| RplEndOfWho Identifier -- ^ 315 channel
| RplWhoisIdle Identifier Integer (Maybe UTCTime) -- ^ 317 nick idle signon
| RplEndOfWhois Identifier -- ^ 318 nick
| RplWhoisChannels Identifier ByteString -- ^ 319 nick channels
| RplListStart -- ^ 321
| RplList Identifier Integer ByteString -- ^ 322 channel usercount topic
| RplListEnd -- ^ 323
| RplChannelModeIs Identifier ByteString [ByteString] -- ^ 324 channel modes *(params)
| RplNoTopicSet Identifier -- ^ 331 channel
| RplTopic Identifier ByteString -- ^ 332 channel topic
| RplChannelUrl Identifier ByteString -- ^ 328 channel url
| RplCreationTime Identifier UTCTime -- ^ 329 channel timestamp
| RplWhoisAccount Identifier ByteString -- ^ 330 nick account
| RplTopicWhoTime Identifier ByteString UTCTime -- ^ 333 channel nickname timestamp
| RplInviting Identifier Identifier -- ^ 341 nick channel
| RplInviteList Identifier ByteString ByteString UTCTime -- ^ 346 channel mask who timestamp
| RplEndOfInviteList Identifier -- ^ 347 channel
| RplExceptionList Identifier ByteString ByteString UTCTime -- ^ 348 channel mask who timestamp
| RplEndOfExceptionList Identifier -- ^ 349 channel
| RplVersion [ByteString] -- ^ 351 version server comments
| RplWhoReply Identifier ByteString ByteString ByteString Identifier ByteString ByteString -- ^ 352 channel user host server nick flags txt
| RplNameReply ChannelType Identifier [ByteString] -- ^ 353 channeltype channel names
| RplLinks ByteString ByteString ByteString -- ^ 364 mask server info
| RplEndOfLinks ByteString -- ^ 365 mask
| RplEndOfNames Identifier -- ^ 366 channel
| RplBanList Identifier ByteString ByteString UTCTime -- ^ 367 channel banned banner timestamp
| RplEndOfBanList Identifier -- ^ 368 channel
| RplEndOfWhoWas Identifier -- ^ 369 nick
| RplMotd ByteString -- ^ 372 line-of-motd
| RplMotdStart -- ^ 375
| RplEndOfMotd -- ^ 376
| RplTime ByteString ByteString -- ^ 391 server "\<string showing server's local time\>"
| RplInfo ByteString -- ^ 371 info
| RplEndOfInfo -- ^ 374
| RplWhoisHost Identifier ByteString -- ^ 378 nick host
| RplWhoisModes Identifier ByteString [ByteString] -- ^ 379 nick modes *(args)
| RplYoureOper ByteString -- ^ 381 text
| RplHostHidden ByteString -- ^ 396 hostname
| Err Identifier IrcError
-- Random high-numbered stuff
| RplWhoisSecure Identifier -- ^ 671 nick
| RplHelpStart ByteString ByteString -- ^ 704 topic text
| RplHelp ByteString ByteString -- ^ 705 topic text
| RplEndOfHelp ByteString -- ^ 706 topic text
| RplKnock Identifier UserInfo -- ^ 710 channel
| RplKnockDelivered Identifier -- ^ 711 channel
| RplTargNotify Identifier -- ^ 717 nick
| RplUmodeGMsg Identifier ByteString -- ^ 718 nick mask
| RplQuietList Identifier Char ByteString ByteString UTCTime -- ^ 728 channel mode mask who timestamp
| RplEndOfQuietList Identifier Char -- ^ 729 channel mode
-- SASL stuff
| RplLoggedIn ByteString -- ^ 900 account
| RplLoggedOut -- ^ 901
| RplNickLocked -- ^ 902
| RplSaslSuccess -- ^ 903
| RplSaslFail -- ^ 904
| RplSaslTooLong -- ^ 905
| RplSaslAborted -- ^ 906
| RplSaslAlready -- ^ 907
| RplSaslMechs ByteString -- ^ 908 comma-sep-mechs
| Away UserInfo (Maybe ByteString)
| Ping ByteString
| Pong ByteString (Maybe ByteString)
| Notice UserInfo Identifier ByteString
| Topic UserInfo Identifier ByteString
| PrivMsg UserInfo Identifier ByteString
| ExtJoin UserInfo Identifier (Maybe ByteString) ByteString
| Join UserInfo Identifier
| Nick UserInfo Identifier
| Mode UserInfo Identifier [ByteString]
| Quit UserInfo ByteString
| Cap ByteString ByteString
| Kick UserInfo Identifier Identifier ByteString
| Part UserInfo Identifier ByteString
| Invite UserInfo Identifier
| Error ByteString
| Authenticate ByteString
| Account UserInfo (Maybe ByteString)
deriving (Read, Show)
data IrcError
-- 400-499 Errors
= ErrNoSuchNick -- ^ 401
| ErrNoSuchServer ByteString -- ^ 402 server
| ErrNoSuchChannel -- ^ 403
| ErrCannotSendToChan -- ^ 404
| ErrTooManyChannels -- ^ 405
| ErrWasNoSuchNick -- ^ 406
| ErrTooManyTargets -- ^ 407
| ErrNoOrigin -- ^ 409
| ErrNoRecipient -- ^ 411
| ErrNoTextToSend -- ^ 412
| ErrUnknownCommand ByteString -- ^ 421 command
| ErrNoMotd -- ^ 422
| ErrNoAdminInfo ByteString -- ^ 423 server
| ErrNoNicknameGiven -- ^ 431
| ErrErroneousNickname ByteString -- ^ 432 badnick
| ErrNicknameInUse Identifier -- ^ 433 nick
| ErrBanNickChange -- ^ 435
| ErrUnavailResource -- ^ 437
| ErrNickTooFast -- ^ 438
| ErrServicesDown -- ^ 440
| ErrUserNotInChannel Identifier -- ^ 441 nick
| ErrNotOnChannel -- ^ 442 channel
| ErrUserOnChannel Identifier -- ^ 443 nick
| ErrNotRegistered -- ^ 451
| ErrAcceptFull -- ^ 456
| ErrAcceptExist -- ^ 457
| ErrAcceptNot -- ^ 458
| ErrNeedMoreParams ByteString -- ^ 461 command
| ErrAlreadyRegistered -- ^ 462
| ErrNoPermForHost -- ^ 463
| ErrPasswordMismatch -- ^ 464
| ErrYoureBannedCreep -- ^ 465
| ErrLinkChannel Identifier -- ^ 470 dstchannel
| ErrChannelFull -- ^ 471 channel
| ErrUnknownMode Char -- ^ 472 mode
| ErrInviteOnlyChan -- ^ 473
| ErrBannedFromChan -- ^ 474
| ErrBadChannelKey -- ^ 475
| ErrNeedReggedNick -- ^ 477
| ErrBanListFull Char -- ^ 478 mode
| ErrBadChanName ByteString -- ^ 479 name
| ErrThrottle -- ^ 480
| ErrNoPrivileges -- ^ 481
| ErrChanOpPrivsNeeded -- ^ 482
| ErrCantKillServer -- ^ 483
| ErrIsChanService Identifier -- ^ 484 nick
| ErrNoNonReg -- ^ 486
| ErrVoiceNeeded -- ^ 489
| ErrNoOperHost -- ^ 491
| ErrOwnMode -- ^ 494
| ErrUnknownUmodeFlag Char -- ^ 501 mode
| ErrUsersDontMatch -- ^ 502
| ErrHelpNotFound ByteString -- ^ 524 topic
| ErrTooManyKnocks -- ^ 713
| ErrChanOpen -- ^ 713
| ErrKnockOnChan -- ^ 714
| ErrTargUmodeG -- ^ 716
| ErrNoPrivs ByteString -- ^ 723 priv
| ErrMlockRestricted Char ByteString -- ^ 742 mode setting
deriving (Read, Show)
data ChannelType = SecretChannel | PrivateChannel | PublicChannel
deriving (Read, Show)
ircMsgToServerMsg :: RawIrcMsg -> Maybe MsgFromServer
ircMsgToServerMsg ircmsg =
case (msgCommand ircmsg, msgParams ircmsg) of
("001",[_,txt]) -> Just (RplWelcome txt)
("002",[_,txt]) -> Just (RplYourHost txt)
("003",[_,txt]) -> Just (RplCreated txt)
("004", _:host:version:modes) ->
Just (RplMyInfo host version modes)
("005",_:params)
| not (null params) ->
let parse1 = over _2 (B.drop 1) . B8.break (=='=')
in Just (RplISupport (map parse1 (init params)))
("008",[_,snomask,_]) ->
Just (RplSnoMask (B.tail snomask))
("042",[_,yourid,_]) ->
Just (RplYourId yourid)
("211", _:linkinfo) -> Just (RplStatsLinkInfo linkinfo)
("212", _:commands) -> Just (RplStatsCommands commands)
("213", _:cline ) -> Just (RplStatsCLine cline)
("214", _:nline ) -> Just (RplStatsNLine nline)
("215", _:iline ) -> Just (RplStatsILine iline)
("216", _:kline ) -> Just (RplStatsKLine kline)
("217", _:qline ) -> Just (RplStatsQLine qline)
("218", _:yline ) -> Just (RplStatsYLine yline)
("219",[_,mode,_] ) -> Just (RplEndOfStats (B8.head mode))
("220", _:pline ) -> Just (RplStatsPLine pline)
("221", _:mode:params) -> Just (RplUmodeIs mode params)
("225", _:dline ) -> Just (RplStatsDLine dline)
("240", _:vline ) -> Just (RplStatsVLine vline)
("241", _:lline ) -> Just (RplStatsLLine lline)
("242", [_,uptime]) -> Just (RplStatsUptime uptime)
("243", _:oline ) -> Just (RplStatsOLine oline)
("244", _:hline ) -> Just (RplStatsHLine hline)
("245", _:sline ) -> Just (RplStatsSLine sline)
("246", _:ping ) -> Just (RplStatsPing ping )
("247", _:xline ) -> Just (RplStatsXLine xline)
("248", _:uline ) -> Just (RplStatsULine uline)
("249", _:debug ) -> Just (RplStatsDebug debug)
("250",[_,stats]) ->
Just (RplStatsConn stats)
("251",[_,stats]) ->
Just (RplLuserClient stats)
("252",[_,num,_]) ->
Just (RplLuserOp num)
("253",[_,num,_]) ->
Just (RplLuserUnknown num)
("254",[_,num,_]) ->
Just (RplLuserChannels num)
("255",[_,txt]) -> Just (RplLuserMe txt)
("256",[_,server]) -> Just (RplLuserAdminMe server)
("257",[_,txt]) -> Just (RplLuserAdminLoc1 txt)
("258",[_,txt]) -> Just (RplLuserAdminLoc2 txt)
("259",[_,txt]) -> Just (RplLuserAdminEmail txt)
("263",[_,cmd,_]) ->
Just (RplLoadTooHigh cmd)
("265", _:params) ->
Just (RplLocalUsers params)
("266", _:params ) ->
Just (RplGlobalUsers params)
("270",[_,txt]) ->
Just (RplPrivs txt)
("276",[_,nick,txt]) ->
Just (RplWhoisCertFp (mkId nick) txt)
("281",[_,nick]) ->
Just (RplAcceptList (mkId nick))
("282",[_,_]) ->
Just RplEndOfAccept
("301",[_,nick,message]) ->
Just (RplAway (mkId nick) message)
("302",[_,txt]) ->
Just (RplUserHost (filter (not . B.null) (B8.split ' ' txt)))
("303",[_,txt]) ->
Just (RplIsOn (map mkId (filter (not . B.null) (B8.split ' ' txt))))
("304",[_,txt]) ->
Just (RplSyntax txt)
("305",[_,_]) ->
Just RplUnAway
("306",[_,_]) ->
Just RplNowAway
("311",[_,nick,user,host,_star,txt]) ->
Just (RplWhoisUser (mkId nick) user host txt)
("312",[_,nick,server,txt]) ->
Just (RplWhoisServer (mkId nick) server txt)
("314",[_,nick,user,host,_star,txt]) ->
Just (RplWhoWasUser (mkId nick) user host txt)
("319",[_,nick,txt]) ->
Just (RplWhoisChannels (mkId nick) txt)
("313",[_,nick,txt]) ->
Just (RplWhoisOperator (mkId nick) txt)
("315",[_,chan,_]) ->
Just (RplEndOfWho (mkId chan))
("317",[_,nick,idle,signon,_txt]) ->
Just (RplWhoisIdle (mkId nick) (asNumber idle) (Just (asTimeStamp signon)))
("317",[_,nick,idle,_txt]) ->
Just (RplWhoisIdle (mkId nick) (asNumber idle) Nothing)
("318",[_,nick,_txt]) ->
Just (RplEndOfWhois (mkId nick))
("321",[_,_,_]) ->
Just RplListStart
("322",[_,chan,num,topic]) ->
Just (RplList (mkId chan) (asNumber num) topic)
("323",[_,_]) ->
Just RplListEnd
("324",_:chan:modes:params) ->
Just (RplChannelModeIs (mkId chan) modes params)
("328",[_,chan,url]) ->
Just (RplChannelUrl (mkId chan) url)
("329",[_,chan,time]) ->
Just (RplCreationTime (mkId chan) (asTimeStamp time))
("330",[_,nick,account,_txt]) ->
Just (RplWhoisAccount (mkId nick) account)
("331",[_,chan,_]) ->
Just (RplNoTopicSet (mkId chan))
("332",[_,chan,txt]) ->
Just (RplTopic (mkId chan) txt)
("333",[_,chan,who,time]) ->
Just (RplTopicWhoTime (mkId chan) who (asTimeStamp time))
("341",[_,nick,chan,_]) ->
Just (RplInviting (mkId nick) (mkId chan))
("346",[_,chan,mask,who,time]) ->
Just (RplInviteList (mkId chan) mask who (asTimeStamp time))
("347",[_,chan,_txt]) ->
Just (RplEndOfInviteList (mkId chan))
("348",[_,chan,mask,who,time]) ->
Just (RplExceptionList (mkId chan) mask who (asTimeStamp time))
("349",[_,chan,_txt]) ->
Just (RplEndOfExceptionList (mkId chan))
("351", _:version) ->
Just (RplVersion version)
("352",[_,chan,user,host,server,nick,flags,txt]) ->
Just (RplWhoReply (mkId chan) user host server (mkId nick) flags txt)
-- trailing is: <hop> <realname>
("353",[_,ty,chan,txt]) ->
do ty' <- case ty of
"=" -> Just PublicChannel
"*" -> Just PrivateChannel
"@" -> Just SecretChannel
_ -> Nothing
Just (RplNameReply ty' (mkId chan) (filter (not . B.null) (B8.split ' ' txt)))
("364",[_,mask,server,info]) -> Just (RplLinks mask server info)
("365",[_,mask,_] ) -> Just (RplEndOfLinks mask)
("366",[_,chan,_]) -> Just (RplEndOfNames (mkId chan))
("367",[_,chan,banned,banner,time]) ->
Just (RplBanList (mkId chan) banned banner (asTimeStamp time))
("368",[_,chan,_txt]) ->
Just (RplEndOfBanList (mkId chan))
("369",[_,nick,_]) ->
Just (RplEndOfWhoWas (mkId nick))
("371",[_,txt]) ->
Just (RplInfo txt)
("374",[_,_]) ->
Just RplEndOfInfo
("375",[_,_]) -> Just RplMotdStart
("372",[_,txt]) -> Just (RplMotd txt)
("376",[_,_]) -> Just RplEndOfMotd
("379",_:nick:modes:args) ->
Just (RplWhoisModes (mkId nick) modes args)
("378",[_,nick,txt]) ->
Just (RplWhoisHost (mkId nick) txt)
("381",[_,txt]) ->
Just (RplYoureOper txt)
("391",[_,server,txt]) ->
Just (RplTime server txt)
("396",[_,host,_]) ->
Just (RplHostHidden host)
("401",[_,nick,_]) ->
Just (Err (mkId nick) ErrNoSuchNick)
("402",[_,server,_]) ->
Just (Err "" (ErrNoSuchServer server))
("403",[_,channel,_]) ->
Just (Err (mkId channel) ErrNoSuchChannel)
("404",[_,channel,_]) ->
Just (Err (mkId channel) ErrCannotSendToChan)
("405",[_,channel,_]) ->
Just (Err (mkId channel) ErrTooManyChannels)
("406",[_,nick,_]) ->
Just (Err (mkId nick) ErrWasNoSuchNick)
("407",[_,target,_]) ->
Just (Err (mkId target) ErrTooManyTargets)
("409",[_,_]) ->
Just (Err "" ErrNoOrigin)
("411",[_,_]) ->
Just (Err "" ErrNoRecipient)
("412",[_,_]) ->
Just (Err "" ErrNoTextToSend)
("421",[_,cmd,_]) ->
Just (Err "" (ErrUnknownCommand cmd))
("422",[_,_]) ->
Just (Err "" ErrNoMotd)
("423",[_,server,_]) ->
Just (Err "" (ErrNoAdminInfo server))
("431",[_,_]) -> Just (Err "" ErrNoNicknameGiven)
("432",[_,nick,_]) -> Just (Err "" (ErrErroneousNickname nick))
("433",[_,nick,_]) -> Just (Err "" (ErrNicknameInUse (mkId nick)))
("435",[_,chan,_]) -> Just (Err (mkId chan) ErrBanNickChange)
("437",[_,ident,_]) -> Just (Err (mkId ident) ErrUnavailResource)
("438",[_,_,_,_]) -> Just (Err "" ErrNickTooFast)
("441",[_,nick,_]) ->
Just (Err (mkId nick) ErrServicesDown)
("441",[_,nick,chan,_]) ->
Just (Err (mkId chan) (ErrUserNotInChannel (mkId nick)))
("442",[_,chan,_]) ->
Just (Err (mkId chan) ErrNotOnChannel)
("443",[_,nick,chan,_]) ->
Just (Err (mkId chan) (ErrUserOnChannel (mkId nick)))
("451",[_,_]) ->
Just (Err "" ErrNotRegistered)
("456",[_,_]) ->
Just (Err "" ErrAcceptFull)
("457",[_,nick,_]) ->
Just (Err (mkId nick) ErrAcceptExist)
("458",[_,nick,_]) ->
Just (Err (mkId nick) ErrAcceptNot)
("461",[_,cmd,_]) ->
Just (Err "" (ErrNeedMoreParams cmd))
("462",[_,_]) ->
Just (Err "" ErrAlreadyRegistered)
("463",[_,_]) ->
Just (Err "" ErrNoPermForHost)
("464",[_,_]) ->
Just (Err "" ErrPasswordMismatch)
("465",[_,_]) ->
Just (Err "" ErrYoureBannedCreep)
("470",[_,chan1,chan2,_]) ->
Just (Err (mkId chan1) (ErrLinkChannel (mkId chan2)))
("471",[_,chan,_]) ->
Just (Err (mkId chan) ErrChannelFull)
("472",[_,mode,_]) ->
Just (Err "" (ErrUnknownMode (B8.head mode)))
("473",[_,chan,_]) ->
Just (Err (mkId chan) ErrInviteOnlyChan)
("474",[_,chan,_]) ->
Just (Err (mkId chan) ErrBannedFromChan)
("475",[_,chan,_]) ->
Just (Err (mkId chan) ErrBadChannelKey)
("477",[_,chan,_]) ->
Just (Err (mkId chan) ErrNeedReggedNick)
("478",[_,chan,mode,_]) ->
Just (Err (mkId chan) (ErrBanListFull (B8.head mode)))
("479",[_,chan,_]) ->
Just (Err "" (ErrBadChanName chan))
("480",[_,chan,_]) ->
Just (Err (mkId chan) ErrThrottle)
("481",[_,_]) ->
Just (Err "" ErrNoPrivileges)
("482",[_,chan,_]) ->
Just (Err (mkId chan) ErrChanOpPrivsNeeded)
("483",[_,_]) ->
Just (Err "" ErrCantKillServer)
("484",[_,nick,chan,_]) ->
Just (Err (mkId chan) (ErrIsChanService (mkId nick)))
("486",[_,nick,_]) ->
Just (Err (mkId nick) ErrNoNonReg)
("489",[_,chan,_]) ->
Just (Err (mkId chan) ErrVoiceNeeded)
("491",[_,_]) ->
Just (Err "" ErrNoOperHost)
("494",[_,nick,_]) ->
Just (Err (mkId nick) ErrOwnMode)
("501",[_,mode,_]) ->
Just (Err "" (ErrUnknownUmodeFlag (B8.head mode)))
("502",[_,_]) ->
Just (Err "" ErrUsersDontMatch)
("524",[_,topic,_]) ->
Just (Err "" (ErrHelpNotFound topic))
("671",[_,nick,_]) ->
Just (RplWhoisSecure (mkId nick))
("704",[_,topic,txt]) ->
Just (RplHelpStart topic txt)
("705",[_,topic,txt]) ->
Just (RplHelp topic txt)
("706",[_,topic,_]) ->
Just (RplEndOfHelp topic)
("710",[_,chan,who,_]) ->
Just (RplKnock (mkId chan) (parseUserInfo who))
("711",[_,chan,_]) ->
Just (RplKnockDelivered (mkId chan))
("712",[_,chan,_]) ->
Just (Err (mkId chan) ErrTooManyKnocks)
("713",[_,chan,_]) ->
Just (Err (mkId chan) ErrChanOpen)
("714",[_,chan,_]) ->
Just (Err (mkId chan) ErrKnockOnChan)
("716",[_,nick,_]) ->
Just (Err (mkId nick) ErrTargUmodeG)
("723",[_,priv,_]) ->
Just (Err "" (ErrNoPrivs priv))
("717",[_,nick,_]) ->
Just (RplTargNotify (mkId nick))
("718",[_,nick,mask,_]) ->
Just (RplUmodeGMsg (mkId nick) mask)
("728",[_,chan,mode,banned,banner,time]) ->
Just (RplQuietList (mkId chan) (B8.head mode) banned banner (asTimeStamp time))
("729",[_,chan,mode,_]) ->
Just (RplEndOfQuietList (mkId chan) (B8.head mode))
("742",[_,chan,mode,setting,_]) ->
Just (Err (mkId chan) (ErrMlockRestricted (B8.head mode) setting))
("900",[_,_,account,_]) ->
Just (RplLoggedIn account)
("901",[_,_,_]) ->
Just RplLoggedOut
("902",[_,_]) ->
Just RplNickLocked
("903",[_,_]) ->
Just RplSaslSuccess
("904",[_,_]) ->
Just RplSaslFail
("905",[_,_]) ->
Just RplSaslTooLong
("906",[_,_]) ->
Just RplSaslAborted
("907",[_,_]) ->
Just RplSaslAlready
("908",[_,mechs,_]) ->
Just (RplSaslMechs mechs)
("PING",[txt]) -> Just (Ping txt)
("PONG",[server ]) -> Just (Pong server Nothing)
("PONG",[server,txt]) -> Just (Pong server (Just txt))
("PRIVMSG",[dst,txt]) ->
do src <- msgPrefix ircmsg
Just (PrivMsg src (mkId dst) txt)
("NOTICE",[dst,txt]) ->
do src <- msgPrefix ircmsg
Just (Notice src (mkId dst) txt)
("TOPIC",[chan,txt]) ->
do who <- msgPrefix ircmsg
Just (Topic who (mkId chan) txt)
("JOIN",[chan,account,real]) ->
do who <- msgPrefix ircmsg
Just (ExtJoin who (mkId chan) (if account == "*" then Nothing else Just account) real)
("JOIN",[chan]) ->
do who <- msgPrefix ircmsg
Just (Join who (mkId chan))
("NICK",[newnick]) ->
do who <- msgPrefix ircmsg
Just (Nick who (mkId newnick))
("MODE",tgt:modes) ->
do who <- msgPrefix ircmsg
Just (Mode who (mkId tgt) modes)
("PART",[chan]) ->
do who <- msgPrefix ircmsg
Just (Part who (mkId chan) "")
("PART",[chan,txt]) ->
do who <- msgPrefix ircmsg
Just (Part who (mkId chan) txt)
("AWAY",[txt]) ->
do who <- msgPrefix ircmsg
Just (Away who (Just txt))
("AWAY",[]) ->
do who <- msgPrefix ircmsg
Just (Away who Nothing)
("QUIT",[txt]) ->
do who <- msgPrefix ircmsg
Just (Quit who txt)
("KICK",[chan,tgt,txt]) ->
do who <- msgPrefix ircmsg
Just (Kick who (mkId chan) (mkId tgt) txt)
("INVITE",[_,chan]) ->
do who <- msgPrefix ircmsg
Just (Invite who (mkId chan))
("CAP",[_,cmd,txt]) ->
Just (Cap cmd txt)
("ERROR",[txt]) ->
Just (Error txt)
("AUTHENTICATE",[txt]) ->
Just (Authenticate txt)
("ACCOUNT",[acct]) ->
do who <- msgPrefix ircmsg
Just (Account who (if acct == "*" then Nothing else Just acct))
_ -> Nothing
asTimeStamp :: ByteString -> UTCTime
asTimeStamp = posixSecondsToUTCTime . fromInteger . asNumber
asNumber :: ByteString -> Integer
asNumber b =
case B8.readInteger b of
Nothing -> 0
Just (x,_) -> x
|
bitemyapp/irc-core
|
src/Irc/Core.hs
|
bsd-3-clause
| 24,602
| 0
| 17
| 5,836
| 8,621
| 4,839
| 3,782
| 585
| 200
|
{-# LANGUAGE RankNTypes #-}
module Network.Xmpp.IM.PresenceTracker where
import Control.Applicative
import Control.Concurrent
import Control.Concurrent.STM
import Control.Monad
import qualified Data.Foldable as Foldable
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.Maybe
import Lens.Family2
import Lens.Family2.Stock
import Network.Xmpp.Concurrent.Types
import Network.Xmpp.IM.Presence
import Network.Xmpp.Lens hiding (Lens, Traversal)
import Network.Xmpp.Types
import Prelude hiding (mapM)
import Network.Xmpp.IM.PresenceTracker.Types
_peers :: Iso Peers (Map Jid (Map Jid (Maybe IMPresence)))
_peers = mkIso unPeers Peers
_PeerAvailable :: Prism PeerStatus (Maybe IMPresence)
_PeerAvailable = prism' PeerAvailable fromPeerAvailable
where
fromPeerAvailable (PeerAvailable pa) = Just pa
fromPeerAvailable _ = Nothing
_PeerUnavailable :: Prism PeerStatus ()
_PeerUnavailable = prism' (const PeerUnavailable) fromPeerUnavailable
where
fromPeerUnavailable PeerUnavailable = Just ()
fromPeerUnavailable _ = Nothing
_PeerStatus :: Iso (Maybe (Maybe IMPresence)) PeerStatus
_PeerStatus = mkIso toPeerStatus fromPeerStatus
where
toPeerStatus (Nothing) = PeerUnavailable
toPeerStatus (Just imp) = PeerAvailable imp
fromPeerStatus PeerUnavailable = Nothing
fromPeerStatus (PeerAvailable imp) = Just imp
maybeMap :: Iso (Maybe (Map a b)) (Map a b)
maybeMap = mkIso maybeToMap mapToMaybe
where
maybeToMap Nothing = Map.empty
maybeToMap (Just m) = m
mapToMaybe m | Map.null m = Nothing
| otherwise = Just m
-- | Status of give full JID
peerStatusL :: Jid -> Lens' Peers PeerStatus
peerStatusL j = _peers . at (toBare j) . maybeMap . at j . _PeerStatus
peerMapPeerAvailable :: Jid -> Peers -> Bool
peerMapPeerAvailable j | isFull j = not . nullOf (peerStatusL j . _PeerAvailable)
| otherwise = not . nullOf (_peers . at j . _Just)
handlePresence :: Maybe (Jid -> PeerStatus -> PeerStatus -> IO ())
-> TVar Peers
-> StanzaHandler
handlePresence onChange peers _ st _ = do
let mbPr = do
pr <- st ^? _Presence -- Only act on presence stanzas
fr <- pr ^? from . _Just . _isFull -- Only act on full JIDs
return (pr, fr)
Foldable.forM_ mbPr $ \(pr, fr) ->
case presenceType pr of
Available -> setStatus fr (PeerAvailable (getIMPresence pr))
Unavailable -> setStatus fr PeerUnavailable
_ -> return ()
return [(st, [])]
where
setStatus fr newStatus = do
os <- atomically $ do
ps <- readTVar peers
let oldStatus = ps ^. peerStatusL fr
writeTVar peers $ ps & set (peerStatusL fr) newStatus
return oldStatus
unless (os == newStatus) $ case onChange of
Nothing -> return ()
Just oc -> void . forkIO $ oc fr os newStatus
return ()
-- | Check whether a given jid is available
isPeerAvailable :: Jid -> Session -> STM Bool
isPeerAvailable j sess = peerMapPeerAvailable j <$> readTVar (presenceRef sess)
-- | Get status of given full JID
getEntityStatus :: Jid -> Session -> STM PeerStatus
getEntityStatus j sess = do
peers <- readTVar (presenceRef sess)
return $ peers ^. peerStatusL j
-- | Get list of (bare) Jids with available entities
getAvailablePeers :: Session -> STM [Jid]
getAvailablePeers sess = do
Peers peers <- readTVar (presenceRef sess)
return $ Map.keys peers
-- | Get all available full JIDs to the given JID
getPeerEntities :: Jid -> Session -> STM (Map Jid (Maybe IMPresence))
getPeerEntities j sess = do
Peers peers <- readTVar (presenceRef sess)
case Map.lookup (toBare j) peers of
Nothing -> return Map.empty
Just js -> return js
|
Philonous/pontarius-xmpp
|
source/Network/Xmpp/IM/PresenceTracker.hs
|
bsd-3-clause
| 4,016
| 0
| 17
| 1,077
| 1,188
| 593
| 595
| -1
| -1
|
{-# LANGUAGE TemplateHaskell #-}
module Haskell99Pointfree.P71
(
) where
import Control.Monad.Fix (fix)
import Control.Lens.Prism
import Control.Lens
import Data.Maybe
import Control.Applicative
data Tree a = Node a [Tree a] deriving (Eq, Show)
makePrisms ''Tree
p71_1 :: Tree a -> Int
p71_1 = fix fixFn 0
where
fixFn = ((( . ( view _2 .fromJust . preview _Node)) . ) . ( . ) . ( . sum ) . (+) <*> ) . (map . ) . ( . (+1))
|
SvenWille/Haskell99Pointfree
|
src/Haskell99Pointfree/P71.hs
|
bsd-3-clause
| 445
| 1
| 17
| 101
| 177
| 104
| 73
| 13
| 1
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Time.SV.Corpus
( corpus
) where
import Data.String
import Prelude
import Duckling.Locale
import Duckling.Resolve
import Duckling.Testing.Types hiding (examples)
import Duckling.Time.Corpus
import Duckling.Time.Types hiding (Month)
import Duckling.TimeGrain.Types hiding (add)
corpus :: Corpus
corpus = (testContext {locale = makeLocale SV Nothing}, testOptions, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (datetime (2013, 2, 12, 4, 30, 0) Second)
[ "nu"
, "just nu"
]
, examples (datetime (2013, 2, 12, 0, 0, 0) Day)
[ "idag"
]
, examples (datetime (2013, 2, 11, 0, 0, 0) Day)
[ "igår"
]
, examples (datetime (2013, 2, 13, 0, 0, 0) Day)
[ "imorgon"
, "i morgon"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Day)
[ "måndag"
, "mån"
, "på måndag"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Day)
[ "Måndag den 18 februari"
, "Mån, 18 februari"
]
, examples (datetime (2013, 2, 19, 0, 0, 0) Day)
[ "tisdag"
]
, examples (datetime (2013, 2, 14, 0, 0, 0) Day)
[ "torsdag"
, "tors"
, "tors."
]
, examples (datetime (2013, 2, 15, 0, 0, 0) Day)
[ "fredag"
, "fre"
, "fre."
]
, examples (datetime (2013, 2, 16, 0, 0, 0) Day)
[ "lördag"
, "lör"
, "lör."
]
, examples (datetime (2013, 2, 17, 0, 0, 0) Day)
[ "söndag"
, "sön"
, "sön."
]
, examples (datetime (2013, 3, 1, 0, 0, 0) Day)
[ "Den förste mars"
, "Den första mars"
, "1:a mars"
, "Den 1:a mars"
]
, examples (datetime (2013, 3, 3, 0, 0, 0) Day)
[ "3 mars"
, "den tredje mars"
, "den 3:e mars"
]
, examples (datetime (2015, 3, 3, 0, 0, 0) Day)
[ "3 mars 2015"
, "tredje mars 2015"
, "3:e mars 2015"
, "3-3-2015"
, "03-03-2015"
, "3/3/2015"
, "3/3/15"
, "2015-3-3"
, "2015-03-03"
]
, examples (datetime (2013, 2, 15, 0, 0, 0) Day)
[ "På den 15:e"
, "På den 15"
, "Den 15:e"
]
, examples (datetime (2013, 2, 15, 0, 0, 0) Day)
[ "den 15:e februari"
, "15:e februari"
, "februari 15"
, "15-02"
, "15/02"
]
, examples (datetime (2013, 8, 8, 0, 0, 0) Day)
[ "8 Aug"
]
, examples (datetime (2014, 10, 0, 0, 0, 0) Month)
[ "Oktober 2014"
]
, examples (datetime (1974, 10, 31, 0, 0, 0) Day)
[ "31/10/1974"
, "31/10/74"
, "31-10-74"
]
, examples (datetime (2015, 4, 14, 0, 0, 0) Day)
[ "14april 2015"
, "April 14, 2015"
, "fjortonde April 15"
]
, examples (datetime (2013, 2, 22, 0, 0, 0) Day)
[ "nästa fredag igen"
]
, examples (datetime (2013, 3, 0, 0, 0, 0) Month)
[ "nästa mars"
]
, examples (datetime (2014, 3, 0, 0, 0, 0) Month)
[ "nästa mars igen"
]
, examples (datetime (2013, 2, 10, 0, 0, 0) Day)
[ "Söndag, 10 feb"
, "Söndag 10 Feb"
]
, examples (datetime (2013, 2, 13, 0, 0, 0) Day)
[ "Ons, Feb13"
, "Ons feb13"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Day)
[ "Måndag, Feb 18"
, "Mån, februari 18"
]
, examples (datetime (2013, 2, 11, 0, 0, 0) Week)
[ "denna vecka"
]
, examples (datetime (2013, 2, 4, 0, 0, 0) Week)
[ "förra vecka"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Week)
[ "nästa vecka"
]
, examples (datetime (2013, 1, 0, 0, 0, 0) Month)
[ "förra månad"
]
, examples (datetime (2013, 3, 0, 0, 0, 0) Month)
[ "nästa månad"
]
, examples (datetime (2013, 1, 1, 0, 0, 0) Quarter)
[ "detta kvartal"
]
, examples (datetime (2013, 4, 1, 0, 0, 0) Quarter)
[ "nästa kvartal"
]
, examples (datetime (2013, 7, 1, 0, 0, 0) Quarter)
[ "tredje kvartalet"
, "3:e kvartal"
]
, examples (datetime (2018, 10, 1, 0, 0, 0) Quarter)
[ "4:e kvartal 2018"
, "fjärde kvartalet 2018"
]
, examples (datetime (2012, 1, 1, 0, 0, 0) Year)
[ "förra år"
, "förra året"
, "föregående år"
]
, examples (datetime (2012, 1, 1, 0, 0, 0) Year)
[ "i fjol"
]
, examples (datetime (2013, 1, 1, 0, 0, 0) Year)
[ "i år"
, "detta år"
]
, examples (datetime (2014, 1, 1, 0, 0, 0) Year)
[ "nästa år"
]
, examples (datetime (2013, 2, 10, 0, 0, 0) Day)
[ "förra söndag"
, "söndag i förra veckan"
, "söndag förra veckan"
]
, examples (datetime (2013, 2, 5, 0, 0, 0) Day)
[ "förra tisdag"
, "i tisdags"
]
, examples (datetime (2013, 2, 19, 0, 0, 0) Day)
[ "nästa tisdag"
]
, examples (datetime (2013, 2, 13, 0, 0, 0) Day)
[ "nästa onsdag"
]
, examples (datetime (2013, 2, 20, 0, 0, 0) Day)
[ "onsdag i nästa vecka"
, "onsdag nästa vecka"
, "nästa onsdag igen"
]
, examples (datetime (2013, 2, 22, 0, 0, 0) Day)
[ "nästa fredag igen"
]
, examples (datetime (2013, 2, 11, 0, 0, 0) Day)
[ "måndag denna veckan"
]
, examples (datetime (2013, 2, 12, 0, 0, 0) Day)
[ "tisdag denna vecka"
]
, examples (datetime (2013, 2, 13, 0, 0, 0) Day)
[ "onsdag denna vecka"
]
, examples (datetime (2013, 2, 14, 0, 0, 0) Day)
[ "i överimorgon"
]
, examples (datetime (2013, 2, 10, 0, 0, 0) Day)
[ "i förrgår"
]
, examples (datetime (2013, 3, 25, 0, 0, 0) Day)
[ "sista måndag i mars"
]
, examples (datetime (2014, 3, 30, 0, 0, 0) Day)
[ "sista söndag i mars 2014"
]
, examples (datetime (2013, 10, 3, 0, 0, 0) Day)
[ "tredje dagen i oktober"
, "tredje dagen i Oktober"
]
, examples (datetime (2014, 10, 6, 0, 0, 0) Week)
[ "första veckan i oktober 2014"
, "första veckan i Oktober 2014"
]
, examples (datetime (2015, 10, 31, 0, 0, 0) Day)
[ "sista dagen i oktober 2015"
, "sista dagen i Oktober 2015"
]
, examples (datetime (2014, 9, 22, 0, 0, 0) Week)
[ "sista veckan i september 2014"
, "sista veckan i September 2014"
]
, examples (datetime (2013, 10, 1, 0, 0, 0) Day)
[ "första tisdag i oktober"
, "första tisdagen i Oktober"
]
, examples (datetime (2014, 9, 16, 0, 0, 0) Day)
[ "tredje tisdagen i september 2014"
, "tredje tisdagen i September 2014"
]
, examples (datetime (2014, 10, 1, 0, 0, 0) Day)
[ "första onsdagen i oktober 2014"
, "första onsdagen i Oktober 2014"
]
, examples (datetime (2014, 10, 8, 0, 0, 0) Day)
[ "andra onsdagen i oktober 2014"
, "andra onsdagen i Oktober 2014"
]
, examples (datetime (2013, 2, 13, 3, 0, 0) Hour)
[ "klockan 3"
, "kl. 3"
]
, examples (datetime (2013, 2, 13, 3, 18, 0) Minute)
[ "3:18"
]
, examples (datetime (2013, 2, 12, 15, 0, 0) Hour)
[ "klockan 15"
, "kl. 15"
, "15h"
]
, examples (datetime (2013, 2, 12, 15, 0, 0) Hour)
[ "ca. kl. 15"
, "cirka kl. 15"
, "omkring klockan 15"
]
, examples (datetime (2013, 2, 13, 17, 0, 0) Hour)
[ "imorgon klockan 17 exakt"
, "imorgon kl. 17 precis"
]
, examples (datetime (2013, 2, 12, 15, 15, 0) Minute)
[ "kvart över 15"
, "15:15"
]
, examples (datetime (2013, 2, 12, 15, 20, 0) Minute)
[ "kl. 20 över 15"
, "klockan 20 över 15"
, "tjugo över 15"
, "kl. 15:20"
, "15:20"
]
, examples (datetime (2013, 2, 12, 15, 30, 0) Minute)
[ "15:30"
]
, examples (datetime (2013, 2, 12, 15, 23, 24) Second)
[ "15:23:24"
]
, examples (datetime (2013, 2, 12, 11, 45, 0) Minute)
[ "kvart i 12"
, "kvart i tolv"
, "11:45"
]
, examples (datetime (2013, 2, 16, 9, 0, 0) Hour)
[ "klockan 9 på lördag"
]
, examples (datetime (2014, 7, 18, 19, 0, 0) Minute)
[ "Fre, Jul 18, 2014 19:00"
]
, examples (datetime (2014, 7, 18, 0, 0, 0) Day)
[ "Fre, Jul 18"
, "Jul 18, Fre"
]
, examples (datetime (2014, 9, 20, 19, 30, 0) Minute)
[ "kl. 19:30, Lör, 20 sep"
]
, examples (datetime (2013, 2, 12, 4, 30, 1) Second)
[ "om 1 sekund"
, "om en sekund"
, "en sekund från nu"
]
, examples (datetime (2013, 2, 12, 4, 31, 0) Second)
[ "om 1 minut"
, "om en minut"
]
, examples (datetime (2013, 2, 12, 4, 32, 0) Second)
[ "om 2 minuter"
, "om två minuter"
, "om 2 minuter mer"
, "om två minuter mer"
--, "2 minuter från nu" t14892978
, "två minuter från nu"
]
, examples (datetime (2013, 2, 12, 5, 30, 0) Second)
[ "om 60 minuter"
]
, examples (datetime (2013, 2, 12, 5, 0, 0) Second)
[ "om en halv timme"
]
, examples (datetime (2013, 2, 12, 7, 0, 0) Second)
[ "om 2,5 timme"
, "om 2 och en halv timme"
, "om två och en halv timme"
]
, examples (datetime (2013, 2, 12, 5, 30, 0) Minute)
[ "om en timme"
, "om 1 timme"
, "om 1t"
]
, examples (datetime (2013, 2, 12, 6, 30, 0) Minute)
[ "om ett par timmar"
]
, examples (datetime (2013, 2, 13, 4, 30, 0) Minute)
[ "om 24 timmar"
, "2013-02-13 kl. 4:30"
, "2013-02-13 kl 04:30"
]
, examples (datetime (2013, 2, 13, 4, 0, 0) Hour)
[ "om en dag"
]
, examples (datetime (2016, 2, 0, 0, 0, 0) Month)
[ -- "3 år från idag" t14892978
]
, examples (datetime (2013, 2, 19, 4, 0, 0) Hour)
[ "om 7 dagar"
]
, examples (datetime (2013, 2, 19, 0, 0, 0) Day)
[ "om en vecka"
]
, examples (datetime (2013, 2, 12, 5, 0, 0) Second)
[ "om ca. en halv timme"
, "om cirka en halv timme"
]
, examples (datetime (2013, 2, 5, 4, 0, 0) Hour)
[ "7 dagar sedan"
, "sju dagar sedan"
]
, examples (datetime (2013, 1, 29, 4, 0, 0) Hour)
[ "14 dagar sedan"
, "fjorton dagar sedan"
]
, examples (datetime (2013, 2, 5, 0, 0, 0) Day)
[ "en vecka sedan"
, "1 vecka sedan"
]
, examples (datetime (2013, 1, 22, 0, 0, 0) Day)
[ "3 veckor sedan"
, "tre veckor sedan"
]
, examples (datetime (2012, 11, 12, 0, 0, 0) Day)
[ "3 månader sedan"
, "tre månader sedan"
]
, examples (datetime (2011, 2, 0, 0, 0, 0) Month)
[ "två år sedan"
, "2 år sedan"
]
, examples (datetime (1954, 0, 0, 0, 0, 0) Year)
[ "1954"
]
, examples (datetimeInterval ((2013, 6, 21, 0, 0, 0), (2013, 9, 24, 0, 0, 0)) Day)
[ "denna sommaren"
, "den här sommaren"
]
, examples (datetimeInterval ((2012, 12, 21, 0, 0, 0), (2013, 3, 21, 0, 0, 0)) Day)
[ "denna vintern"
, "den här vintern"
]
, examples (datetime (2013, 12, 25, 0, 0, 0) Day)
[ "juldagen"
]
, examples (datetime (2013, 12, 31, 0, 0, 0) Day)
[ "nyårsafton"
]
, examples (datetime (2014, 1, 1, 0, 0, 0) Day)
[ "nyårsdagen"
, "nyårsdag"
]
, examples (datetimeInterval ((2013, 2, 12, 18, 0, 0), (2013, 2, 13, 0, 0, 0)) Hour)
[ "ikväll"
]
, examples (datetimeInterval ((2013, 2, 8, 18, 0, 0), (2013, 2, 11, 0, 0, 0)) Hour)
[ "förra helg"
]
, examples (datetimeInterval ((2013, 2, 13, 18, 0, 0), (2013, 2, 14, 0, 0, 0)) Hour)
[ "imorgon kväll"
]
, examples (datetimeInterval ((2013, 2, 13, 12, 0, 0), (2013, 2, 13, 14, 0, 0)) Hour)
[ "imorgon lunch"
]
, examples (datetimeInterval ((2013, 2, 11, 18, 0, 0), (2013, 2, 12, 0, 0, 0)) Hour)
[ "igår kväll"
]
, examples (datetimeInterval ((2013, 2, 15, 18, 0, 0), (2013, 2, 18, 0, 0, 0)) Hour)
[ "denna helgen"
, "denna helg"
, "i helgen"
]
, examples (datetimeInterval ((2013, 2, 18, 4, 0, 0), (2013, 2, 18, 12, 0, 0)) Hour)
[ "måndag morgon"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 29, 58), (2013, 2, 12, 4, 30, 0)) Second)
[ "senaste 2 sekunder"
, "senaste två sekunderna"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 30, 1), (2013, 2, 12, 4, 30, 4)) Second)
[ "nästa 3 sekunder"
, "nästa tre sekunder"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 28, 0), (2013, 2, 12, 4, 30, 0)) Minute)
[ "senaste 2 minuter"
, "senaste två minuter"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 31, 0), (2013, 2, 12, 4, 34, 0)) Minute)
[ "nästa 3 minuter"
, "nästa tre minuter"
]
, examples (datetimeInterval ((2013, 2, 12, 3, 0, 0), (2013, 2, 12, 4, 0, 0)) Hour)
[ "senaste 1 timme"
]
, examples (datetimeInterval ((2013, 2, 12, 5, 0, 0), (2013, 2, 12, 8, 0, 0)) Hour)
[ "nästa 3 timmar"
, "nästa tre timmar"
]
, examples (datetimeInterval ((2013, 2, 10, 0, 0, 0), (2013, 2, 12, 0, 0, 0)) Day)
[ "senaste 2 dagar"
, "senaste två dagar"
, "senaste 2 dagar"
]
, examples (datetimeInterval ((2013, 2, 13, 0, 0, 0), (2013, 2, 16, 0, 0, 0)) Day)
[ "nästa 3 dagar"
, "nästa tre dagar"
]
, examples (datetimeInterval ((2013, 1, 28, 0, 0, 0), (2013, 2, 11, 0, 0, 0)) Week)
[ "senaste 2 veckor"
, "senaste två veckorna"
, "senaste två veckor"
]
, examples (datetimeInterval ((2013, 2, 18, 0, 0, 0), (2013, 3, 11, 0, 0, 0)) Week)
[ "nästa 3 veckor"
, "nästa tre veckorna"
]
, examples (datetimeInterval ((2012, 12, 0, 0, 0, 0), (2013, 2, 0, 0, 0, 0)) Month)
[ "senaste 2 månader"
, "senaste två månader"
, "senaste två månader"
]
, examples (datetimeInterval ((2013, 3, 0, 0, 0, 0), (2013, 6, 0, 0, 0, 0)) Month)
[ "nästa 3 månader"
, "nästa tre månader"
]
, examples (datetimeInterval ((2011, 0, 0, 0, 0, 0), (2013, 0, 0, 0, 0, 0)) Year)
[ "senaste 2 år"
, "senaste två år"
, "senaste 2 år"
]
, examples (datetimeInterval ((2014, 0, 0, 0, 0, 0), (2017, 0, 0, 0, 0, 0)) Year)
[ "nästa 3 år"
, "nästa tre år"
]
, examples (datetimeInterval ((2013, 7, 13, 0, 0, 0), (2013, 7, 16, 0, 0, 0)) Day)
[ "13-15 juli"
, "13-15 Juli"
, "13 till 15 Juli"
, "13 juli till 15 juli"
]
, examples (datetimeInterval ((2013, 8, 8, 0, 0, 0), (2013, 8, 13, 0, 0, 0)) Day)
[ "8 Aug - 12 Aug"
, "8 Aug - 12 aug"
, "8 aug - 12 aug"
, "8 augusti - 12 augusti"
]
, examples (datetimeInterval ((2013, 2, 12, 9, 30, 0), (2013, 2, 12, 11, 1, 0)) Minute)
[ "9:30 - 11:00"
, "9:30 till 11:00"
]
, examples (datetimeInterval ((2013, 2, 14, 9, 30, 0), (2013, 2, 14, 11, 1, 0)) Minute)
[ "från 9:30 - 11:00 på torsdag"
, "från 9:30 till 11:00 på torsdag"
, "mellan 9:30 och 11:00 på torsdag"
, "9:30 - 11:00 på torsdag"
, "9:30 till 11:00 på torsdag"
, "efter 9:30 men före 11:00 på torsdag"
, "torsdag från 9:30 till 11:00"
, "torsdag mellan 9:30 och 11:00"
, "från 9:30 till 11:00 på torsdag"
]
, examples (datetimeInterval ((2013, 2, 14, 9, 0, 0), (2013, 2, 14, 12, 0, 0)) Hour)
[ "torsdag från 9 till 11"
]
, examples (datetimeInterval ((2013, 2, 12, 11, 30, 0), (2013, 2, 12, 13, 31, 0)) Minute)
[ "11:30-13:30"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 30, 0), (2013, 2, 26, 0, 0, 0)) Second)
[ "inom 2 veckor"
]
, examples (datetimeOpenInterval Before (2013, 2, 12, 14, 0, 0) Hour)
[ "innan kl. 14"
, "innan klockan 14"
]
, examples (datetime (2013, 2, 12, 13, 0, 0) Minute)
[ "@ 16 CET"
, "kl. 16 CET"
, "klockan 16 CET"
]
, examples (datetime (2013, 2, 14, 6, 0, 0) Minute)
[ "torsdag kl. 8:00 GMT"
, "torsdag kl. 8:00 gmt"
, "torsdag klockan 8:00 GMT"
, "torsdag klockan 8:00 gmt"
, "torsdag 08:00 GMT"
, "torsdag 08:00 gmt"
]
, examples (datetime (2013, 2, 12, 14, 0, 0) Hour)
[ "idag kl. 14"
, "idag klockan 14"
, "kl. 14"
, "klockan 14"
]
, examples (datetime (2013, 4, 25, 16, 0, 0) Minute)
[ "25/4 kl. 16:00"
, "25/4 klockan 16:00"
, "25-04 klockan 16:00"
, "25-4 kl. 16:00"
, "2013-04-25 kl 16:00"
]
, examples (datetime (2013, 2, 13, 15, 0, 0) Minute)
[ "15:00 imorgon"
, "kl. 15:00 imorgon"
, "klockan 15:00 imorgon"
]
, examples (datetimeOpenInterval After (2013, 2, 12, 14, 0, 0) Hour)
[ "efter kl. 14"
, "efter klockan 14"
]
, examples (datetimeOpenInterval After (2013, 2, 17, 4, 0, 0) Hour)
[ "efter 5 dagar"
, "efter fem dagar"
]
, examples (datetime (2013, 2, 17, 4, 0, 0) Hour)
[ "om 5 dagar"
, "om fem dagar"
]
, examples (datetimeOpenInterval After (2013, 2, 13, 14, 0, 0) Hour)
[ "efter imorgon kl. 14"
, "efter imorgon klockan 14"
, "imorgon efter kl. 14"
, "imorgon efter klockan 14"
]
, examples (datetimeOpenInterval Before (2013, 2, 12, 11, 0, 0) Hour)
[ "före kl. 11"
, "före klockan 11"
]
, examples (datetimeOpenInterval Before (2013, 2, 13, 11, 0, 0) Hour)
[ "imorgon före kl. 11"
, "imorgon före klockan 11"
]
, examples (datetimeInterval ((2013, 2, 12, 12, 0, 0), (2013, 2, 12, 19, 0, 0)) Hour)
[ "under eftermiddagen"
]
, examples (datetime (2013, 2, 12, 13, 30, 0) Minute)
[ "kl. 13:30"
, "klockan 13:30"
]
, examples (datetime (2013, 2, 12, 4, 45, 0) Second)
[ "om 15 minuter"
]
, examples (datetimeInterval ((2013, 2, 12, 13, 0, 0), (2013, 2, 12, 17, 0, 0)) Hour)
[ "efter lunch"
]
, examples (datetime (2013, 2, 12, 10, 30, 0) Minute)
[ "10:30"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Day)
[ "nästa måndag"
]
]
|
facebookincubator/duckling
|
Duckling/Time/SV/Corpus.hs
|
bsd-3-clause
| 21,621
| 0
| 11
| 9,169
| 6,772
| 4,121
| 2,651
| 461
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import Codec.Compression.Zlib
( compressWith, decompress, defaultCompressParams, compressionLevel
, CompressParams(..))
import qualified Data.ByteString.Lazy as B
import qualified Data.ByteString.Lazy.Char8 as BC
import Data.Digest.Pure.SHA (showDigest, sha1)
import System.Directory (createDirectoryIfMissing, doesFileExist)
import System.Environment (getArgs)
import System.Exit (exitFailure)
import System.FilePath ((</>))
main :: IO ()
main = do
args <- getArgs
case args of
[] -> storeStdin
[sha] -> restoreStdout sha
_ -> do
BC.putStrLn "Usage."
exitFailure
-- | Oversimple content-addressable storage:
-- Get content from stdin, store it as a Git blob under `/objects` and print
-- the Git SHA1 of the stored content to stdout.
-- TODO Use sha-streams instead of strict reading.
-- TODO Share code with buh.hs.
storeStdin :: IO ()
storeStdin = do
content <- B.getContents
let l = B.length content
content' = B.concat ["blob ", BC.pack (show l), "\x00", content]
sha = showDigest (sha1 content')
compressed = compress' content'
createDirectoryIfMissing True (sha1ToDirPath sha)
B.writeFile (sha1ToPath sha) compressed
putStrLn sha
-- | Given a SHA1, print to stdout the blob content, if any.
restoreStdout :: String -> IO ()
restoreStdout sha = do
b <- doesFileExist (sha1ToPath sha)
-- TODO Use also try/catch.
if b
then do
content <- B.readFile (sha1ToPath sha)
-- TODO Validate content (header, lenght and SHA1).
let decompressed = decompress content
content' = B.drop 1 (BC.dropWhile (/= '\x00') decompressed)
B.putStr content'
else do
BC.putStrLn "No such blob."
exitFailure
sha1ToPath :: String -> FilePath
sha1ToPath sha =
"/objects" </> take 2 sha </> drop 2 sha
sha1ToDirPath :: String -> FilePath
sha1ToDirPath sha =
"/objects" </> take 2 sha
-- | Same compression than Git, at least on a simple "hello" string. Also,
-- maybe Git can be configured to change the compression level it uses.
compress' :: B.ByteString -> B.ByteString
compress' =
compressWith defaultCompressParams { compressLevel = compressionLevel 1 }
|
noteed/buh
|
bin/hush.hs
|
bsd-3-clause
| 2,231
| 0
| 16
| 439
| 538
| 286
| 252
| 52
| 3
|
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE ViewPatterns #-}
module Nix.Cited.Basic where
import Control.Comonad ( Comonad )
import Control.Comonad.Env ( ComonadEnv )
import Control.Monad.Catch hiding ( catchJust )
import Control.Monad.Reader
import Data.Fix
import GHC.Generics
import Nix.Cited
import Nix.Eval as Eval
import Nix.Exec
import Nix.Expr
import Nix.Frames
import Nix.Options
import Nix.Thunk
import Nix.Utils
import Nix.Value
newtype Cited t f m a = Cited { getCited :: NCited m (NValue t f m) a }
deriving
( Generic
, Typeable
, Functor
, Applicative
, Foldable
, Traversable
, Comonad
, ComonadEnv [Provenance m (NValue t f m)]
)
instance HasCitations1 m (NValue t f m) (Cited t f m) where
citations1 (Cited c) = citations c
addProvenance1 x (Cited c) = Cited (addProvenance x c)
instance ( Has e Options
, Framed e m
, MonadThunk t m v
, Typeable m
, Typeable f
, Typeable u
, MonadCatch m
)
=> MonadThunk (Cited u f m t) m v where
thunk mv = do
opts :: Options <- asks (view hasLens)
if thunks opts
then do
frames :: Frames <- asks (view hasLens)
-- Gather the current evaluation context at the time of thunk
-- creation, and record it along with the thunk.
let go (fromException ->
Just (EvaluatingExpr scope
(Fix (Compose (Ann s e))))) =
let e' = Compose (Ann s (Nothing <$ e))
in [Provenance scope e']
go _ = []
ps = concatMap (go . frame) frames
fmap (Cited . NCited ps) . thunk $ mv
else fmap (Cited . NCited []) . thunk $ mv
thunkId (Cited (NCited _ t)) = thunkId @_ @m t
queryM (Cited (NCited _ t)) = queryM t
-- | The ThunkLoop exception is thrown as an exception with MonadThrow,
-- which does not capture the current stack frame information to provide
-- it in a NixException, so we catch and re-throw it here using
-- 'throwError' from Frames.hs.
force (Cited (NCited ps t)) f =
catch go (throwError @ThunkLoop)
where
go = case ps of
[] -> force t f
Provenance scope e@(Compose (Ann s _)) : _ ->
withFrame Info (ForcingExpr scope (wrapExprLoc s e)) (force t f)
forceEff (Cited (NCited ps t)) f = catch
go
(throwError @ThunkLoop)
where
go = case ps of
[] -> forceEff t f
Provenance scope e@(Compose (Ann s _)) : _ ->
withFrame Info (ForcingExpr scope (wrapExprLoc s e)) (forceEff t f)
further (Cited (NCited ps t)) f = Cited . NCited ps <$> further t f
|
jwiegley/hnix
|
src/Nix/Cited/Basic.hs
|
bsd-3-clause
| 3,218
| 12
| 18
| 1,033
| 920
| 484
| 436
| 79
| 0
|
module Rubik.Negate where
import Prelude hiding (negate)
import qualified Prelude as P
class Negate a where
negate :: a -> a
instance Negate Integer where
negate = P.negate
prop_negate :: (Eq a, Negate a) => a -> Bool
prop_negate a = negate (negate a) == a
|
andygill/rubik-solver
|
src/Rubik/Negate.hs
|
bsd-3-clause
| 269
| 0
| 8
| 58
| 99
| 55
| 44
| 9
| 1
|
module Data.Povray where
import Data.Povray.Base
import Data.Povray.Types
import Data.Povray.Object
import Data.Povray.Texture
import Data.Povray.Transformation
include :: Str -> IO ()
include = putStrLn . ("#include " `mappend`) . show
put :: Povray a => a -> IO ()
put = putStrLn . toPov
|
lesguillemets/hspov_proto
|
src/Data/Povray.hs
|
bsd-3-clause
| 293
| 0
| 8
| 45
| 99
| 58
| 41
| 10
| 1
|
{-# LANGUAGE Arrows, TypeFamilies #-}
module GLUi (newGLUi) where
import Control.Monad
import Control.Concurrent (threadDelay)
import Data.IORef
import qualified Data.Set as S
import Graphics.UI.GLFW
import qualified Graphics.Rendering.OpenGL as GL
import qualified Graphics.Rendering.FTGL as Font
import Ui
import Prelude hiding ((.), id, until)
import GLUtils
import qualified GLConfig as A
data GLUi = GLUi { win :: Window, keysRef :: IORef Keys
, atlas :: GL.TextureObject, font :: Font.Font }
type GLOutput = Output GLUi
type GLInput = Input GLUi
instance UiImpl GLUi where
data Input GLUi = GLInput { keys :: !Keys }
data Output GLUi = GLOutput { renderQueue :: !RenderQueue }
--data GameState GLUi = GameMode
handleInput = getInput
handleOutput = draw
sleep = sleep'
outputFreq = outputFreq'
worldFreq = worldFreq'
processWorld = processWorld'
coQuit = coQuit'
coMoveCursorRel = coMoveCursorRel'
coStartSelect = coStartSelect'
coCancelSelect = coCancelSelect'
coDig = coDig'
type GLWire a b = GameWire GLUi a b
--Drawing, maybe other stuff later
processWorld' :: GLWire UiWorld GLOutput
processWorld' = arr (\ UiWorld{tiles = t, creatures = c, buildings = b, items = i
,focusPos = f, selectPos = sel} ->
GLOutput (mkTiles t ++ mkFocus f))
where
--draw creatures, buildings, items here
mkTiles = foldWithIndices drawTile []
mkFocus (x,y,_) = [drawImage (fromIntegral x) (fromIntegral y) A.Focus]
--Commands
coQuit' :: GLWire a (Event a)
coQuit' = keyDown Key'Q
coMoveCursorRel' :: GLWire a (Event (Dir, Int))
coMoveCursorRel' = mRight <& mLeft <& mUp <& mDown <& mTop <& mBottom
where
mkDir k d = keyDown k . mkConst (Right (d, 1))
mRight = mkDir Key'Right DRight
mLeft = mkDir Key'Left DLeft
mUp = mkDir Key'Up DUp
mDown = mkDir Key'Down DDown
mTop = mkDir Key'Period DTop
mBottom = mkDir Key'Comma DBottom
coDig' :: GLWire a (Event a)
coDig' = keyDown Key'D
coStartSelect' :: GLWire a (Event a)
coStartSelect' = keyDown Key'Space
coCancelSelect' :: GLWire a (Event a)
coCancelSelect' = keyDown Key'Escape
--fps stuff
outputFreq' :: GLUi -> MWire IO a (Event a)
outputFreq' _ = periodic 0.02
worldFreq' :: GLWire a (Event a)
worldFreq' = periodic 0.06
--IO stuff
newGLUi :: IO ()
newGLUi = setupUi GLUi >>= runUiImpl
getInput :: GLUi -> IO (Maybe (GLInput, GLUi))
getInput s@GLUi{win = win', keysRef = keysRef'} = do
pollEvents
k <- readIORef keysRef'
c <- windowShouldClose win'
return (if c then Nothing else Just (GLInput k, s))
draw :: (GLOutput, GLUi) -> IO ()
draw (GLOutput{renderQueue = r}, GLUi{win = w, atlas = tex, font = font'}) = do
GL.clear [GL.ColorBuffer]
GL.textureBinding GL.Texture2D GL.$= Just tex
GL.renderPrimitive GL.Quads $
foldM execRenderFunc () r
GL.preservingAttrib [GL.AllServerAttributes] $
Font.renderFont font' "Hello World!" Font.All
swapBuffers w
where
execRenderFunc :: () -> RenderFunc -> IO ()
execRenderFunc _ f = f (Resolution 64 64) (AtlasSize 32 32) NormalRender
sleep' :: GLUi -> IO ()
sleep' _ = threadDelay 10000
--Keyboard utils, needs to see GLInput
keyDown :: Key -> GLWire a (Event a)
keyDown k = off
where
pressed = asks (S.member k . keys)
off = mkGenN $ \ a -> pressed >>= (\p -> if p then e a else ne off)
on = mkGenN $ \ _ -> pressed >>= (\p -> ne (if p then on else off))
e a = return (Right (Event a), on)
ne w = return (Right NoEvent, w)
keyPressed :: Key -> GLWire a a
keyPressed k = mkGen_ $ \ a -> do
pressed <- asks (S.member k . keys)
return (if pressed then Right a else Left mempty)
keyNotPressed :: Key -> GLWire a a
keyNotPressed k = mkGen_ $ \ a -> do
pressed <- asks (S.member k . keys)
return (if pressed then Left mempty else Right a)
|
Laquendi/neflEFortress
|
GLUi.hs
|
mit
| 4,004
| 0
| 14
| 998
| 1,389
| 739
| 650
| 99
| 3
|
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE FlexibleContexts #-}
-- |
-- Copyright : (c) 2010-2012 Benedikt Schmidt & Simon Meier
-- License : GPL v3 (see LICENSE)
--
-- Maintainer : Simon Meier <iridcode@gmail.com>
-- Portability : GHC only
--
-- The constraint reduction rules, which are not enforced as invariants in
-- "Theory.Constraint.Solver.Reduction".
--
-- A goal represents a possible application of a rule that may result in
-- multiple cases or even non-termination (if applied repeatedly). These goals
-- are computed as the list of 'openGoals'. See
-- "Theory.Constraint.Solver.ProofMethod" for the public interface to solving
-- goals and the implementation of heuristics.
module Theory.Constraint.Solver.Goals (
Usefulness(..)
, AnnotatedGoal
, openGoals
, solveGoal
) where
-- import Debug.Trace
import Prelude hiding (id, (.))
import qualified Data.ByteString.Char8 as BC
import qualified Data.DAG.Simple as D (reachableSet)
-- import Data.Foldable (foldMap)
import qualified Data.Map as M
import qualified Data.Monoid as Mono
import qualified Data.Set as S
import Control.Basics
import Control.Category
import Control.Monad.Disj
import Control.Monad.State (gets)
import Control.Monad.Trans.State.Lazy hiding (get,gets)
import Control.Monad.Trans.FastFresh -- GHC7.10 needs: hiding (get,gets)
import Control.Monad.Trans.Reader -- GHC7.10 needs: hiding (get,gets)
import Extension.Data.Label
import Theory.Constraint.Solver.Contradictions (substCreatesNonNormalTerms)
import Theory.Constraint.Solver.Reduction
import Theory.Constraint.System
import Theory.Tools.IntruderRules (mkDUnionRule, isDExpRule, isDPMultRule, isDEMapRule)
import Theory.Model
import Utils.Misc (twoPartitions)
------------------------------------------------------------------------------
-- Extracting Goals
------------------------------------------------------------------------------
data Usefulness =
Useful
-- ^ A goal that is likely to result in progress.
| LoopBreaker
-- ^ A goal that is delayed to avoid immediate termination.
| ProbablyConstructible
-- ^ A goal that is likely to be constructible by the adversary.
| CurrentlyDeducible
-- ^ A message that is deducible for the current solution.
deriving (Show, Eq, Ord)
-- | Goals annotated with their number and usefulness.
type AnnotatedGoal = (Goal, (Integer, Usefulness))
-- Instances
------------
-- | The list of goals that must be solved before a solution can be extracted.
-- Each goal is annotated with its age and an indicator for its usefulness.
openGoals :: System -> [AnnotatedGoal]
openGoals sys = do
(goal, status) <- M.toList $ get sGoals sys
let solved = get gsSolved status
-- check whether the goal is still open
guard $ case goal of
ActionG i (kFactView -> Just (UpK, m)) ->
if get sDiffSystem sys
-- In a diff proof, all action goals need to be solved.
then not (solved)
else
not $ solved
-- message variables are not solved, except if the node already exists in the system -> facilitates finding contradictions
|| (isMsgVar m && Nothing == M.lookup i (get sNodes sys)) || sortOfLNTerm m == LSortPub
-- handled by 'insertAction'
|| isPair m || isInverse m || isProduct m -- || isXor m
|| isUnion m || isNullaryPublicFunction m
ActionG _ _ -> not solved
PremiseG _ _ -> not solved
-- Technically the 'False' disj would be a solvable goal. However, we
-- have a separate proof method for this, i.e., contradictions.
DisjG (Disj []) -> False
DisjG _ -> not solved
ChainG c p ->
case kFactView (nodeConcFact c sys) of
Just (DnK, viewTerm2 -> FUnion args) ->
not solved && allMsgVarsKnownEarlier c args
-- open chains for msg vars are only solved if N5'' is applicable
Just (DnK, m) | isMsgVar m -> (not solved) &&
(chainToEquality m c p)
| otherwise -> not solved
fa -> error $ "openChainGoals: impossible fact: " ++ show fa
-- FIXME: Split goals may be duplicated, we always have to check
-- explicitly if they still exist.
SplitG idx -> splitExists (get sEqStore sys) idx
let
useful = case goal of
_ | get gsLoopBreaker status -> LoopBreaker
ActionG i (kFactView -> Just (UpK, m))
-- if there are KU-guards then all knowledge goals are useful
| hasKUGuards -> Useful
| currentlyDeducible i m -> CurrentlyDeducible
| probablyConstructible m -> ProbablyConstructible
_ -> Useful
return (goal, (get gsNr status, useful))
where
existingDeps = rawLessRel sys
hasKUGuards =
any ((KUFact `elem`) . guardFactTags) $ S.toList $ get sFormulas sys
checkTermLits :: (LSort -> Bool) -> LNTerm -> Bool
checkTermLits p =
Mono.getAll . foldMap (Mono.All . p . sortOfLit)
-- KU goals of messages that are likely to be constructible by the
-- adversary. These are terms that do not contain a fresh name or a fresh
-- name variable. For protocols without loops they are very likely to be
-- constructible. For protocols with loops, such terms have to be given
-- similar priority as loop-breakers.
probablyConstructible m = checkTermLits (LSortFresh /=) m
&& not (containsPrivate m)
-- KU goals of messages that are currently deducible. Either because they
-- are composed of public names only and do not contain private function
-- symbols or because they can be extracted from a sent message using
-- unpairing or inversion only.
currentlyDeducible i m = (checkTermLits (LSortPub ==) m
&& not (containsPrivate m))
|| extractible i m
extractible i m = or $ do
(j, ru) <- M.toList $ get sNodes sys
-- We cannot deduce a message from a last node.
guard (not $ isLast sys j)
let derivedMsgs = concatMap toplevelTerms $
[ t | Fact OutFact _ [t] <- get rConcs ru] <|>
[ t | Just (DnK, t) <- kFactView <$> get rConcs ru]
-- m is deducible from j without an immediate contradiction
-- if it is a derived message of 'ru' and the dependency does
-- not make the graph cyclic.
return $ m `elem` derivedMsgs &&
not (j `S.member` D.reachableSet [i] existingDeps)
toplevelTerms t@(viewTerm2 -> FPair t1 t2) =
t : toplevelTerms t1 ++ toplevelTerms t2
toplevelTerms t@(viewTerm2 -> FInv t1) = t : toplevelTerms t1
toplevelTerms t = [t]
allMsgVarsKnownEarlier (i,_) args =
all (`elem` earlierMsgVars) (filter isMsgVar args)
where earlierMsgVars = do (j, _, t) <- allKUActions sys
guard $ isMsgVar t && alwaysBefore sys j i
return t
-- check whether we have a chain that fits N5'' (an open chain between an
-- equality rule and a simple msg var conclusion that exists as a K up
-- previously) which needs to be resolved even if it is an open chain
chainToEquality :: LNTerm -> NodeConc -> NodePrem -> Bool
chainToEquality t_start conc p = is_msg_var && is_equality && ku_before
where
-- check whether it is a msg var
is_msg_var = isMsgVar t_start
-- and whether we do have an equality rule instance at the end
is_equality = isIEqualityRule $ nodeRule (fst p) sys
-- get all KU-facts with the same msg var
ku_start = filter (\x -> (fst x) == t_start) $
map (\(i, _, m) -> (m, i)) $ allKUActions sys
-- and check whether any of them happens before the KD-conclusion
ku_before = any (\(_, x) -> alwaysBefore sys x (fst conc)) ku_start
------------------------------------------------------------------------------
-- Solving 'Goal's
------------------------------------------------------------------------------
-- | @solveGoal rules goal@ enumerates all possible cases of how this goal
-- could be solved in the context of the given @rules@. For each case, a
-- sensible case-name is returned.
solveGoal :: Goal -> Reduction String
solveGoal goal = do
-- mark before solving, as representation might change due to unification
markGoalAsSolved "directly" goal
rules <- askM pcRules
case goal of
ActionG i fa -> solveAction (nonSilentRules rules) (i, fa)
PremiseG p fa ->
solvePremise (get crProtocol rules ++ get crConstruct rules) p fa
ChainG c p -> solveChain (get crDestruct rules) (c, p)
SplitG i -> solveSplit i
DisjG disj -> solveDisjunction disj
-- The following functions are internal to 'solveGoal'. Use them with great
-- care.
-- | CR-rule *S_at*: solve an action goal.
solveAction :: [RuleAC] -- ^ All rules labelled with an action
-> (NodeId, LNFact) -- ^ The action we are looking for.
-> Reduction String -- ^ A sensible case name.
solveAction rules (i, fa@(Fact _ ann _)) = do
mayRu <- M.lookup i <$> getM sNodes
showRuleCaseName <$> case mayRu of
Nothing -> case fa of
(Fact KUFact _ [m@(viewTerm2 -> FXor ts)]) -> do
partitions <- disjunctionOfList $ twoPartitions ts
case partitions of
(_, []) -> do
let ru = Rule (IntrInfo CoerceRule) [kdFact m] [fa] [fa] []
modM sNodes (M.insert i ru)
insertGoal (PremiseG (i, PremIdx 0) (kdFact m)) False
return ru
(a', b') -> do
let a = fAppAC Xor a'
let b = fAppAC Xor b'
let ru = Rule (IntrInfo (ConstrRule $ BC.pack "_xor")) [(kuFact a),(kuFact b)] [fa] [fa] []
modM sNodes (M.insert i ru)
mapM_ requiresKU [a, b] *> return ru
_ -> do
ru <- labelNodeId i (annotatePrems <$> rules) Nothing
act <- disjunctionOfList $ get rActs ru
void (solveFactEqs SplitNow [Equal fa act])
return ru
Just ru -> do unless (fa `elem` get rActs ru) $ do
act <- disjunctionOfList $ get rActs ru
void (solveFactEqs SplitNow [Equal fa act])
return ru
where
-- If the fact in the action goal has annotations, then consider annotated
-- versions of intruder rules (this allows high or low priority intruder knowledge
-- goals to propagate to intruder knowledge of subterms)
annotatePrems ru@(Rule ri ps cs as nvs) =
if not (S.null ann) && isIntruderRule ru then
Rule ri (annotateFact ann <$> ps) cs (annotateFact ann <$> as) nvs
else ru
requiresKU t = do
j <- freshLVar "vk" LSortNode
let faKU = kuFact t
insertLess j i
void (insertAction j faKU)
-- | CR-rules *DG_{2,P}* and *DG_{2,d}*: solve a premise with a direct edge
-- from a unifying conclusion or using a destruction chain.
--
-- Note that *In*, *Fr*, and *KU* facts are solved directly when adding a
-- 'ruleNode'.
--
solvePremise :: [RuleAC] -- ^ All rules with a non-K-fact conclusion.
-> NodePrem -- ^ Premise to solve.
-> LNFact -- ^ Fact required at this premise.
-> Reduction String -- ^ Case name to use.
solvePremise rules p faPrem
| isKDFact faPrem = do
iLearn <- freshLVar "vl" LSortNode
mLearn <- varTerm <$> freshLVar "t" LSortMsg
let concLearn = kdFact mLearn
premLearn = outFact mLearn
-- !! Make sure that you construct the correct rule!
ruLearn = Rule (IntrInfo IRecvRule) [premLearn] [concLearn] [] []
cLearn = (iLearn, ConcIdx 0)
pLearn = (iLearn, PremIdx 0)
modM sNodes (M.insert iLearn ruLearn)
insertChain cLearn p
solvePremise rules pLearn premLearn
| otherwise = do
(ru, c, faConc) <- insertFreshNodeConc rules
insertEdges [(c, faConc, faPrem, p)]
return $ showRuleCaseName ru
-- | CR-rule *DG2_chain*: solve a chain constraint.
solveChain :: [RuleAC] -- ^ All destruction rules.
-> (NodeConc, NodePrem) -- ^ The chain to extend by one step.
-> Reduction String -- ^ Case name to use.
solveChain rules (c, p) = do
faConc <- gets $ nodeConcFact c
(do -- solve it by a direct edge
cRule <- gets $ nodeRule (nodeConcNode c)
pRule <- gets $ nodeRule (nodePremNode p)
faPrem <- gets $ nodePremFact p
contradictoryIf (forbiddenEdge cRule pRule)
insertEdges [(c, faConc, faPrem, p)]
let mPrem = case kFactView faConc of
Just (DnK, m') -> m'
_ -> error $ "solveChain: impossible"
caseName (viewTerm -> FApp o _) = showFunSymName o
caseName (viewTerm -> Lit l) = showLitName l
caseName t = show t
contradictoryIf (illegalCoerce pRule mPrem)
return (caseName mPrem)
`disjunction`
-- extend it with one step
case kFactView faConc of
Just (DnK, viewTerm2 -> FUnion args) ->
do -- If the chain starts at a union message, we
-- compute the applicable destruction rules directly.
i <- freshLVar "vr" LSortNode
let rus = map (ruleACIntrToRuleACInst . mkDUnionRule args)
(filter (not . isMsgVar) args)
-- NOTE: We rely on the check that the chain is open here.
ru <- disjunctionOfList rus
modM sNodes (M.insert i ru)
-- FIXME: Do we have to add the PremiseG here so it
-- marked as solved?
let v = PremIdx 0
faPrem <- gets $ nodePremFact (i,v)
extendAndMark i ru v faPrem faConc
Just (DnK, m) ->
do -- If the chain does not start at a union message,
-- the usual *DG2_chain* extension is perfomed.
-- But we ignore open chains, as we only resolve
-- open chains with a direct chain
contradictoryIf (isMsgVar m)
cRule <- gets $ nodeRule (nodeConcNode c)
(i, ru) <- insertFreshNode rules (Just cRule)
contradictoryIf (forbiddenEdge cRule ru)
-- This requires a modified chain constraint def:
-- path via first destruction premise of rule ...
(v, faPrem) <- disjunctionOfList $ take 1 $ enumPrems ru
extendAndMark i ru v faPrem faConc
_ -> error "solveChain: not a down fact"
)
where
extendAndMark :: NodeId -> RuleACInst -> PremIdx -> LNFact -> LNFact
-> Control.Monad.Trans.State.Lazy.StateT System
(Control.Monad.Trans.FastFresh.FreshT
(DisjT (Control.Monad.Trans.Reader.Reader ProofContext))) String
extendAndMark i ru v faPrem faConc = do
insertEdges [(c, faConc, faPrem, (i, v))]
markGoalAsSolved "directly" (PremiseG (i, v) faPrem)
insertChain (i, ConcIdx 0) p
return (showRuleCaseName ru)
-- contradicts normal form condition:
-- no edge from dexp to dexp KD premise, no edge from dpmult
-- to dpmult KD premise, and no edge from dpmult to demap KD premise
-- (this condition replaces the exp/noexp tags)
-- no more than the allowed consecutive rule applications
forbiddenEdge :: RuleACInst -> RuleACInst -> Bool
forbiddenEdge cRule pRule = isDExpRule cRule && isDExpRule pRule ||
isDPMultRule cRule && isDPMultRule pRule ||
isDPMultRule cRule && isDEMapRule pRule ||
(getRuleName cRule == getRuleName pRule)
&& (getRemainingRuleApplications cRule == 1)
-- Contradicts normal form condition N2:
-- No coerce of a pair of inverse.
illegalCoerce pRule mPrem = isCoerceRule pRule && isPair mPrem ||
isCoerceRule pRule && isInverse mPrem ||
-- Also: Coercing of products is unnecessary, since the protocol is *-restricted.
isCoerceRule pRule && isProduct mPrem
-- | Solve an equation split. There is no corresponding CR-rule in the rule
-- system on paper because there we eagerly split over all variants of a rule.
-- In practice, this is too expensive and we therefore use the equation store
-- to delay these splits.
solveSplit :: SplitId -> Reduction String
solveSplit x = do
split <- gets ((`performSplit` x) . get sEqStore)
let errMsg = error "solveSplit: inexistent split-id"
store <- maybe errMsg disjunctionOfList split
-- FIXME: Simplify this interaction with the equation store
hnd <- getMaudeHandle
substCheck <- gets (substCreatesNonNormalTerms hnd)
store' <- simp hnd substCheck store
contradictoryIf (eqsIsFalse store')
sEqStore =: store'
return "split"
-- | CR-rule *S_disj*: solve a disjunction of guarded formulas using a case
-- distinction.
--
-- In contrast to the paper, we use n-ary disjunctions and also split over all
-- of them at once.
solveDisjunction :: Disj LNGuarded -> Reduction String
solveDisjunction disj = do
(i, gfm) <- disjunctionOfList $ zip [(1::Int)..] $ getDisj disj
insertFormula gfm
return $ "case_" ++ show i
|
rsasse/tamarin-prover
|
lib/theory/src/Theory/Constraint/Solver/Goals.hs
|
gpl-3.0
| 18,815
| 0
| 31
| 6,296
| 3,767
| 1,939
| 1,828
| 248
| 14
|
-- | The widget ids of exposed hole components
{-# LANGUAGE OverloadedStrings #-}
module Lamdu.GUI.ExpressionEdit.HoleEdit.WidgetIds
( WidgetIds(..), make
) where
import qualified Graphics.UI.Bottle.Widget as Widget
import qualified Lamdu.GUI.WidgetIds as WidgetIds
import Lamdu.Sugar.EntityId (EntityId)
data WidgetIds = WidgetIds
{ hidHole :: Widget.Id
, hidWrapper :: Widget.Id
, hidClosedSearchArea :: Widget.Id
, hidOpen :: Widget.Id
, hidOpenSearchTerm :: Widget.Id
, hidResultsPrefix :: Widget.Id
}
make :: EntityId -> WidgetIds
make entityId = WidgetIds
{ hidHole = holeId
, hidWrapper = Widget.joinId holeId ["Wrapper"]
, hidClosedSearchArea = Widget.joinId holeId ["SearchArea", "SearchTerm"]
, hidOpen = Widget.joinId holeId ["SearchArea", "Open"]
, hidOpenSearchTerm = Widget.joinId holeId ["SearchArea", "Open", "SearchTerm"]
, hidResultsPrefix = Widget.joinId holeId ["SearchArea", "Open", "Results"]
}
where
holeId = WidgetIds.fromEntityId entityId
|
da-x/lamdu
|
Lamdu/GUI/ExpressionEdit/HoleEdit/WidgetIds.hs
|
gpl-3.0
| 1,079
| 0
| 9
| 232
| 245
| 151
| 94
| 22
| 1
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="pt-BR">
<title>Relatório em HTML personalizável</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Conteúdo</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Índice</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Busca</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favoritos</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
veggiespam/zap-extensions
|
addOns/customreport/src/main/javahelp/org/zaproxy/zap/extension/customreport/resources/help_pt_BR/helpset_pt_BR.hs
|
apache-2.0
| 982
| 79
| 66
| 159
| 420
| 212
| 208
| -1
| -1
|
-- CIS 194 Homework 2
module Log where
import Control.Applicative
data MessageType = Info
| Warning
| Error Int
deriving (Show, Eq)
type TimeStamp = Int
data LogMessage = LogMessage MessageType TimeStamp String
| Unknown String
deriving (Show, Eq)
data MessageTree = Leaf
| Node MessageTree LogMessage MessageTree
deriving (Show, Eq)
-- | @testParse p n f@ tests the log file parser @p@ by running it
-- on the first @n@ lines of file @f@.
testParse :: (String -> [LogMessage])
-> Int
-> FilePath
-> IO [LogMessage]
testParse parse n file = take n . parse <$> readFile file
-- | @testWhatWentWrong p w f@ tests the log file parser @p@ and
-- warning message extractor @w@ by running them on the log file
-- @f@.
testWhatWentWrong :: (String -> [LogMessage])
-> ([LogMessage] -> [String])
-> FilePath
-> IO [String]
testWhatWentWrong parse whatWentWrong file
= whatWentWrong . parse <$> readFile file
|
hungaikev/learning-haskell
|
cis194/week2/Log.hs
|
apache-2.0
| 1,072
| 0
| 9
| 323
| 231
| 129
| 102
| 24
| 1
|
{-# LANGUAGE TemplateHaskell #-}
import Data.FileEmbed
main = print $(dummySpace 100)
|
phadej/file-embed
|
template.hs
|
bsd-2-clause
| 87
| 0
| 8
| 12
| 22
| 11
| 11
| 3
| 1
|
{-# LANGUAGE OverloadedStrings #-}
import Reflex.Dom
main = mainWidget $ text "Hello, world!"
|
reflex-frp/reflex-platform
|
examples/WorkOnTest/Main.hs
|
bsd-3-clause
| 95
| 0
| 6
| 14
| 19
| 10
| 9
| 3
| 1
|
{-# LANGUAGE TupleSections, RecordWildCards, FlexibleContexts #-}
module Analysis where
import qualified Data.Map as M
import Data.Map ((!))
import Data.Function
import Data.List (sortBy)
-- import Debug.Trace
import Control.Arrow
import Types
import Unification
import Propositions
import Scopes
import Unbound.LocallyNameless
-- | This function turns a proof into something that is easier to work on in
-- unfication:
-- * Constants have been turned into variables,
-- * Local variables have been renamed
-- * Scopes have been calculated
-- * Free variables have their scoped variables as arguments
--
-- After unification, ScopedProofs contains the fully unified result
data ScopedProof = ScopedProof
{ spProps :: M.Map PortSpec Term
, spScopedVars :: M.Map PortSpec [Var]
, spFreeVars :: [Var]
}
prepare :: Context -> Task -> Proof -> ScopedProof
prepare ctxt task proof = ScopedProof {..}
where
scopes = calculateScopes ctxt task proof
scopeMap = M.fromListWith (++) [ (k, [pdom]) | (ks, pdom) <- scopes, k <- ks ]
localize :: Block -> Var -> Var
localize block n = makeName (name2String n) (fromIntegral (blockNum block))
scopesOverBlock :: Key Block -> [Var]
scopesOverBlock blockKey = [ v'
| BlockPort pdBlockKey pdPortKey <- M.findWithDefault [] blockKey scopeMap
, let pdBlock = blocks proof ! pdBlockKey
, let pdRule = block2Rule ctxt task pdBlock
, let port = ports pdRule ! pdPortKey
, v <- portScopes port
, let v' = localize pdBlock v
]
allPortSpecs :: [PortSpec]
allPortSpecs =
[ BlockPort blockKey portKey
| (blockKey, block) <- M.toList (blocks proof)
, portKey <- M.keys $ ports (block2Rule ctxt task block)
]
propAtPortSpec :: PortSpec -> Term
propAtPortSpec (BlockPort blockKey portKey) = prop'
where
block = blocks proof ! blockKey
rule = block2Rule ctxt task block
prop = portProp (ports rule ! portKey)
scopes = scopesOverBlock blockKey
f = freeVars rule
l = localVars rule
-- localize everything
s1 = [ (a, V a') | a <- l, let a' = localize block a]
-- add scopes
s2 = [ (a', mkApps (V a') (map V scopes)) | a <- f, let a' = localize block a]
prop' = substs s2 (substs s1 prop)
spProps :: M.Map PortSpec Term
spProps = M.fromList $ map (id &&& propAtPortSpec) allPortSpecs
scopedVarsAtPortSpec :: PortSpec -> [Var]
scopedVarsAtPortSpec (BlockPort blockKey portKey) =
scopesOverBlock blockKey ++ map (localize block) (portScopes port)
where
block = blocks proof ! blockKey
port = ports (block2Rule ctxt task block) ! portKey
spScopedVars :: M.Map PortSpec [Var]
spScopedVars = M.fromList $ map (id &&& scopedVarsAtPortSpec) allPortSpecs
spFreeVars =
[ localize block v
| (_, block) <- M.toList (blocks proof)
, v <- freeVars (block2Rule ctxt task block)
]
type UnificationResults = M.Map (Key Connection) UnificationResult
unifyScopedProof :: Proof -> ScopedProof -> (ScopedProof, UnificationResults)
unifyScopedProof proof (ScopedProof {..}) =
(ScopedProof spProps' spScopedVars spFreeVars, M.fromList unificationResults)
where
equations =
[ (connKey, (prop1, prop2))
| (connKey, conn) <- sortBy (compare `on` snd) $ M.toList (connections proof)
, Just psFrom <- return $ connFrom conn
, Just psTo <- return $ connTo conn
, Just prop1 <- return $ M.lookup psFrom spProps
, Just prop2 <- return $ M.lookup psTo spProps
]
(final_bind, unificationResults) = unifyLiberally spFreeVars equations
-- It is far to costly to do that in every invocation to applyBinding below
highest = firstFree (M.toList final_bind, M.elems spProps)
spProps' = M.map (applyBinding' highest final_bind) spProps
|
eccstartup/incredible
|
logic/Analysis.hs
|
mit
| 3,958
| 0
| 14
| 995
| 1,169
| 616
| 553
| 73
| 1
|
module Control.Gruppe.CGI where
import Control.Types
import Gateway.CGI
import Control.Gruppe.Typ as T
import Control.Gruppe.DB
import Control.Monad
edit :: VNr -> Maybe Gruppe -> Form IO ()
edit v mg = do
open btable
let dtf label select =
defaulted_textfield label $ case mg of
Just g -> toString $ select g ; Nothing -> "?"
n <- dtf "Name" T.name
r <- dtf "Referent" T.referent
m <- dtf "Plätze" T.maxStudents
close -- btable
up <- submit "update"
when up $ do
io $ put ( fmap T.gnr mg )
$ Gruppe { vnr = v
, name = fromCGI n
, referent = fromCGI r
, maxStudents = fromCGI m
}
|
florianpilz/autotool
|
src/Control/Gruppe/CGI.hs
|
gpl-2.0
| 683
| 4
| 15
| 212
| 252
| 124
| 128
| 23
| 2
|
module Reddit.Routes.Subreddit where
import Reddit.Types.Subreddit hiding (title)
import Reddit.Types.SubredditSettings
import Network.API.Builder.Routes
aboutSubreddit :: SubredditName -> Route
aboutSubreddit (R sub) = Route ["r", sub, "about"]
[]
"GET"
subredditSettings :: SubredditName -> Route
subredditSettings (R sub) = Route ["r", sub, "about", "edit"]
[]
"GET"
setSubredditSettings :: SubredditID -> SubredditSettings -> Route
setSubredditSettings sr settings =
Route ["api", "site_admin"]
[ "sr" =. sr
, "description" =. sidebarText settings
, "public_description" =. descriptionText settings
, "title" =. title settings
, "link_type" =. linkType settings
, "comment_score_hide_mins" =. hideScoreMins settings
, "submit_link_label" =. submitLinkLabel settings
, "submit_text_label" =. submitTextLabel settings
, "domain_css" =. domainCSS settings
, "domain_sidebar" =. domainSidebar settings
, "show_media" =. showMedia settings
, "over_18" =. over18 settings
, "language" =. language settings
, "wiki_edit_karma" =. wikiEditKarma settings
, "wiki_edit_age" =. wikiEditAge settings
, "wikimode" =. wikiEditMode settings
, "spam_comments" =. spamComments settings
, "spam_selfposts" =. spamSelfposts settings
, "spam_links" =. spamLinks settings
, "public_traffic" =. publicTrafficStats settings
, "type" =. subredditType settings ]
"POST"
|
FranklinChen/reddit
|
src/Reddit/Routes/Subreddit.hs
|
bsd-2-clause
| 1,662
| 0
| 8
| 475
| 360
| 191
| 169
| 37
| 1
|
{-# LANGUAGE EmptyDataDecls #-}
module ReservedWords where
main = do
-- All reserved words
let break = "break" in putStrLn break
let catch = "catch" in putStrLn catch
let const = "const" in putStrLn const
let continue = "continue" in putStrLn continue
let debugger = "debugger" in putStrLn debugger
let delete = "delete" in putStrLn delete
let enum = "enum" in putStrLn enum
let export = "export" in putStrLn export
let extends = "extends" in putStrLn extends
let finally = "finally" in putStrLn finally
let for = "for" in putStrLn for
let function = "function" in putStrLn function
let implements = "implements" in putStrLn implements
let instanceof = "instanceof" in putStrLn instanceof
let interface = "interface" in putStrLn interface
let new = "new" in putStrLn new
let null = "null" in putStrLn null
let package = "package" in putStrLn package
let private = "private" in putStrLn private
let protected = "protected" in putStrLn protected
let public = "public" in putStrLn public
let return = "return" in putStrLn return
let static = "static" in putStrLn static
let super = "super" in putStrLn super
let switch = "switch" in putStrLn switch
let this = "this" in putStrLn this
let throw = "throw" in putStrLn throw
let try = "try" in putStrLn try
let typeof = "typeof" in putStrLn typeof
let undefined = "undefined" in putStrLn undefined
let var = "var" in putStrLn var
let void = "void" in putStrLn void
let while = "while" in putStrLn while
let with = "with" in putStrLn with
let yield = "yield" in putStrLn yield
putStrLn ""
-- Stdlib functions that need to be encoded
putStrLn $ const "stdconst" 2
|
fpco/fay
|
tests/reservedWords.hs
|
bsd-3-clause
| 1,690
| 0
| 10
| 358
| 625
| 261
| 364
| 40
| 1
|
--
-- Copyright (c) 2010 Citrix Systems, Inc.
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
{-# LANGUAGE OverloadedStrings #-}
module Backend.Camel where
import Data.List
import qualified Data.Text.Lazy as T
import qualified Data.Map as M
import qualified DBus.Types as D
import qualified DBus.Introspection as I
import Text.Printf
import Backend
import Tools
import Template
backend :: Backend
backend = Backend { genServer = genServer_
, genClient = genClient_ }
outputServerFile object = object ++ "_server.ml"
serverMLFileTemplate = "template_server.ml"
outputClientFile object = object ++ "_client.ml"
clientMLFileTemplate = "template_client.ml"
lookupTemplate :: String -> Input -> String
lookupTemplate name input =
case M.lookup name (templates input) of
Nothing -> error $ "No such template: " ++ name
Just c -> c
paramType :: I.Parameter -> D.Type
paramType (I.Parameter _ sig) = head $ D.signatureTypes sig
paramTypes :: [I.Parameter] -> [D.Type]
paramTypes = map paramType
paramName :: I.Parameter -> String
paramName (I.Parameter n _) = T.unpack n
typeSig :: D.Type -> String
typeSig D.DBusBoolean = "DBus.SigBool"
typeSig D.DBusByte = "DBus.SigByte"
typeSig D.DBusInt16 = "DBus.SigInt16"
typeSig D.DBusInt32 = "DBus.SigInt32"
typeSig D.DBusInt64 = "DBus.SigInt64"
typeSig D.DBusWord16 = "DBus.SigUInt16"
typeSig D.DBusWord32 = "DBus.SigUInt32"
typeSig D.DBusWord64 = "DBus.SigUInt64"
typeSig D.DBusDouble = "DBus.SigDouble"
typeSig D.DBusString = "DBus.SigString"
typeSig D.DBusObjectPath = "DBus.SigObjectPath"
typeSig D.DBusSignature = "DBus.SigString"
typeSig D.DBusVariant = "DBus.SigVariant"
typeSig (D.DBusArray elemT) = "DBus.SigArray (" ++ typeSig elemT ++ ")"
typeSig (D.DBusStructure elemTs) = "DBus.SigStruct [" ++ (concat . intersperse ";" $ map typeSig elemTs) ++ "]"
typeSig (D.DBusDictionary keyT elemT) = "DBus.SigDict ((" ++ typeSig keyT ++ "),(" ++ typeSig elemT ++ "))"
arrayConstructor :: D.Type -> String -> String
arrayConstructor elemT var_name =
cons elemT
where
n = var_name
cons D.DBusBoolean = "DBus.Bools " ++ n
cons D.DBusByte = "DBus.Bytes " ++ n
cons D.DBusInt16 = "DBus.Int16s " ++ n
cons D.DBusInt32 = "DBus.Int32s " ++ n
cons D.DBusInt64 = "DBus.Int64s " ++ n
cons D.DBusWord16 = "DBus.UInt16s " ++ n
cons D.DBusWord32 = "DBus.UInt32s " ++ n
cons D.DBusWord64 = "DBus.UInt64s " ++ n
cons D.DBusDouble = "DBus.Doubles " ++ n
cons D.DBusString = "DBus.Strings " ++ n
cons D.DBusObjectPath = "DBus.Strings " ++ n
cons D.DBusSignature = "DBus.Strings " ++ n
cons D.DBusVariant = "DBus.Variants " ++ n
cons (D.DBusStructure types) = printf "DBus.Structs (%s,%s)" siglist n
where siglist = concat . intersperse ";" . map typeSig $ types
cons (D.DBusArray elemT) = printf "DBus.Arrays (%s,%s)" (typeSig elemT) subarrays
where subarrays = n
cons (D.DBusDictionary keyT elemT) = printf "DBus.Dicts ((%s,%s),%s)" (typeSig keyT) (typeSig elemT) n
typeConstructor :: D.Type -> String -> String
typeConstructor typ var_name =
cons typ
where
n = var_name
cons D.DBusBoolean = "DBus.Bool " ++ n
cons D.DBusByte = "DBus.Byte " ++ n
cons D.DBusInt16 = "DBus.Int16 " ++ n
cons D.DBusInt32 = "DBus.Int32 " ++ n
cons D.DBusInt64 = "DBus.Int64 " ++ n
cons D.DBusWord16 = "DBus.UInt16 " ++ n
cons D.DBusWord32 = "DBus.UInt32 " ++ n
cons D.DBusWord64 = "DBus.UInt64 " ++ n
cons D.DBusDouble = "DBus.Double " ++ n
cons D.DBusString = "DBus.String " ++ n
cons D.DBusObjectPath = "DBus.ObjectPath " ++ n
cons D.DBusSignature = "DBus.String " ++ n
cons D.DBusVariant = "DBus.Variant " ++ n
cons (D.DBusStructure types)
= "DBus.Struct " ++ n
cons (D.DBusArray elemT)
= "DBus.Array " ++ "(" ++ arrayConstructor elemT n ++ ")"
cons t@(D.DBusDictionary keyT elemT)
= "DBus.Array " ++ "(" ++ arrayConstructor t n ++ ")"
typeConstructor' :: I.Parameter -> String
typeConstructor' p = typeConstructor (paramType p) (paramName p)
handlerStubs :: String -> I.Interface -> [String]
handlerStubs object (I.Interface iname methods _ _) =
map stub methods
where
stub (I.Method name inparams outparams) =
let mname = T.unpack $ D.strMemberName name
call_module = capitalise . decamelise $ object ++ "Methods"
call_name = replace "." "_" (interface ++ "_" ++ mname)
call_args = concat . intersperse " " . ("msg" :) . map get_pname $ inparams
in
unlines $ [ printf "\t\t\t| \"%s\", \"%s\", args -> (try " interface mname
, printf "\t\t\t\tlet [%s] = args in" (concat . intersperse "; " . map typeConstructor' $ inparams)
, printf "\t\t\t\tlet (%s) = %s.%s %s in" (outvars outparams) call_module call_name call_args
, "\t\t\t\tlet reply = DBus.Message.new_method_return msg in"
]
++ reply_appends (nameSequence outparams)
++ [ "\t\t\t\treply"
, "\t\t\t\t with Match_failure _ -> DBus.Message.new_error msg DBus.ERR_INVALID_SIGNATURE \"invalid arguments\""
, "\t\t\t\t | Failure s -> DBus.Message.new_error msg DBus.ERR_FAILED s"
, "\t\t\t\t | _ -> DBus.Message.new_error msg DBus.ERR_FAILED \"exception occured\")"
]
outvars pms = concat . intersperse "," $ map get_pname (nameSequence pms)
nameSequence pms = aux 1 pms
where
aux _ [] = []
aux id (p:ps) =
let (I.Parameter _ sig) = p in
I.Parameter (T.pack $ "out_" ++ show id) sig : aux (id+1) ps
reply_appends params = map append params
append param = "\t\t\t\tDBus.Message.append reply [" ++ typeConstructor' param ++ "];"
interface = T.unpack $ D.strInterfaceName iname
get_pname (I.Parameter pname _) = T.unpack pname
callStubs :: String -> I.Interface -> [String]
callStubs object (I.Interface iname methods _ _) =
map stub methods
where
stub (I.Method name inparams outparams) =
let mname = T.unpack $ D.strMemberName name
stub_name = replace "." "_" (interface ++ "_" ++ mname)
stub_args = concat . intersperse " " . map paramName $ inparams
call_args = concat . intersperse "; " . map typeConstructor' $ inparams
out_cons = concat . intersperse "; " $ outConstructors outparams
out_values = concat . intersperse ", " $ take (length outparams) outnames
in
unlines $ [ printf "let %s ?(timeout=(-1)) service_ obj_path_ %s = " stub_name stub_args
, printf "\tlet reply = call_dbus ~timeout ~service:service_ ~obj:obj_path_ ~interface:\"%s\" ~member:\"%s\" ~args:[ %s ] in" interface mname call_args
, printf "\tmatch Message.get reply with"
, printf "\t| [ %s ] -> (%s)" out_cons out_values
, printf "\t| _ -> failwith \"unexpected reply\""
]
indices = [1..]
outnames = map name indices
where name i = "out_" ++ show i
outConstructors pms = map constructor (zip (paramTypes pms) outnames)
where constructor (typ,name) = typeConstructor typ name
interface = T.unpack $ D.strInterfaceName iname
genServer_ :: Input -> IO Output
genServer_ input =
return [ (outputServerFile object, server_contents) ]
where
object = objectname input
server_contents =
substRules [ ("@QUOTED_INTROSPECT_XML@", quoted_introspect_xml)
, ("@INTERFACE@", interface_name interface)
, ("@METHOD_HANDLER_STUBS@", handlers)
, ("@object@", object)
]
$ lookupTemplate serverMLFileTemplate input
quoted_introspect_xml = replace "\"" "\\\"" (xml input)
interface_name (I.Interface n _ _ _) = T.unpack $ D.strInterfaceName n
interface = head interfaces
interfaces = let Just intro_obj = I.fromXML "/" (T.pack (xml input))
(I.Object _ ifs _) = intro_obj
in
ifs
handlers = concat . intersperse "\n" $ handlerStubs object interface
genClient_ :: Input -> IO Output
genClient_ input =
return [ (outputClientFile object, client_contents) ]
where
object = objectname input
client_contents =
substRules [ ("@STUBS@", stubs) ]
$ lookupTemplate clientMLFileTemplate input
stubs = concat . intersperse "\n" . concat . map (callStubs object) $ interfaces
interfaces = let Just intro_obj = I.fromXML "/" (T.pack (xml input))
(I.Object _ ifs _) = intro_obj
in
ifs
|
jean-edouard/idl
|
rpcgen/Backend/Camel.hs
|
gpl-2.0
| 9,672
| 0
| 18
| 2,586
| 2,454
| 1,250
| 1,204
| 169
| 16
|
{-# LANGUAGE OverloadedStrings #-}
module Parser (
parseExpr,
parseModule
) where
import Text.Parsec
import Text.Parsec.Text.Lazy (Parser)
import qualified Text.Parsec.Expr as Ex
import qualified Text.Parsec.Token as Tok
import qualified Data.Text.Lazy as L
import Lexer
import Syntax
import Control.Applicative ((<$>))
integer :: Parser Integer
integer = Tok.integer lexer
variable :: Parser Expr
variable = do
x <- identifier
l <- sourceLine <$> getPosition
return (Var (Located l) x)
number :: Parser Expr
number = do
n <- integer
l <- sourceLine <$> getPosition
return (Lit (Located l) (fromIntegral n))
lambda :: Parser Expr
lambda = do
reservedOp "\\"
args <- many identifier
reservedOp "->"
body <- expr
l <- sourceLine <$> getPosition
return $ foldr (Lam (Located l)) body args
aexp :: Parser Expr
aexp = parens expr
<|> lambda
<|> number
<|> variable
expr :: Parser Expr
expr = do
es <- many1 aexp
l <- sourceLine <$> getPosition
return (foldl1 (App (Located l)) es)
type Binding = (String, Expr)
val :: Parser Binding
val = do
ex <- expr
return ("it", ex)
top :: Parser Binding
top = do
x <- val
optional semi
return x
modl :: Parser [Binding]
modl = many top
parseExpr :: L.Text -> Either ParseError Expr
parseExpr input = parse (contents expr) "<stdin>" input
parseModule :: FilePath -> L.Text -> Either ParseError [(String, Expr)]
parseModule fname input = parse (contents modl) fname input
|
yupferris/write-you-a-haskell
|
chapter9/provenance/Parser.hs
|
mit
| 1,477
| 0
| 13
| 301
| 552
| 283
| 269
| 58
| 1
|
module PatBindIn1 where
main :: Int
main = foo 3
foo :: Int -> Int
foo x = (h_1 + t) + (snd tup_1)
tup_1 :: (Int, Int)
tup_1@(h_1, t) = head $ (zip [1 .. 10] [3 .. 15])
tup = 10
h = 17
|
kmate/HaRe
|
old/testing/liftOneLevel/PatBindIn1_AstOut.hs
|
bsd-3-clause
| 200
| 0
| 8
| 61
| 111
| 64
| 47
| 9
| 1
|
module Div where
{-@ LIQUID "--real" @-}
{-@ type Valid = {v:Bool | ( (Prop v) <=> true ) } @-}
{-@ mulAssoc :: Double -> Double -> Double -> Valid @-}
mulAssoc :: Double -> Double -> Double -> Bool
mulAssoc x y z = (x * y) * z == x * (y * z)
{-@ mulCommut :: Double -> Double -> Valid @-}
mulCommut :: Double -> Double -> Bool
mulCommut x y = x * y == y * x
{-@ mulId :: Double -> Valid @-}
mulId :: Double -> Bool
mulId x = x == 1 * x
{-@ mulDistr :: Double -> Double -> Double -> Valid @-}
mulDistr :: Double -> Double -> Double -> Bool
mulDistr x y z = x * (y + z) == (x * y) + (x * z)
{-@ divId :: Double -> Valid @-}
divId :: Double -> Bool
divId x = x / 1.0 == x
{-@ inverse :: {v:Double | v != 0.0} -> Valid @-}
inverse :: Double -> Bool
inverse x = 1.0 == x * (1.0 / x)
|
mightymoose/liquidhaskell
|
tests/pos/RealProps.hs
|
bsd-3-clause
| 792
| 0
| 9
| 201
| 246
| 134
| 112
| 13
| 1
|
{-# LANGUAGE DuplicateRecordFields #-}
{-# OPTIONS_GHC -Werror=duplicate-exports #-}
-- This should warn about the duplicate export of foo, but not the
-- exports of the two different bar fields.
module Export (T(foo, bar), foo, S(bar)) where
data T = MkT { foo :: Int, bar :: Int }
data S = MkS { bar :: Int }
|
sdiehl/ghc
|
testsuite/tests/overloadedrecflds/should_fail/DuplicateExports.hs
|
bsd-3-clause
| 313
| 0
| 8
| 59
| 65
| 44
| 21
| 10
| 0
|
{-# LANGUAGE FlexibleContexts #-}
module JSDOM.Custom.Database (
module Generated
, changeVersion'
, changeVersion
, transaction'
, transaction
, readTransaction'
, readTransaction
) where
import Data.Maybe (fromJust, maybe)
import Control.Monad.IO.Class (MonadIO(..))
import Control.Concurrent.MVar (takeMVar, putMVar, newEmptyMVar)
import JSDOM.Types
(MonadDOM, SQLTransaction, SQLError, DOM,
SQLTransactionCallback(..), ToJSString(..), Callback(..), withCallback,
SQLTransactionErrorCallback(..), VoidCallback(..))
import JSDOM.Custom.SQLError (throwSQLException)
import JSDOM.Generated.SQLTransactionCallback (newSQLTransactionCallbackSync)
import JSDOM.Generated.Database as Generated hiding (changeVersion, transaction, readTransaction)
import qualified JSDOM.Generated.Database as Generated (changeVersion, transaction, readTransaction)
import JSDOM.Generated.SQLTransactionErrorCallback
(newSQLTransactionErrorCallback)
import JSDOM.Generated.VoidCallback
(newVoidCallback)
withSQLTransactionCallback :: MonadDOM m => (SQLTransaction -> DOM ()) -> (SQLTransactionCallback -> DOM a) -> m a
withSQLTransactionCallback callback = withCallback (newSQLTransactionCallbackSync callback)
withSQLErrorCallbacks :: MonadDOM m => (Maybe SQLTransactionErrorCallback -> Maybe VoidCallback -> DOM ()) -> m (Maybe SQLError)
withSQLErrorCallbacks f = do
result <- liftIO newEmptyMVar
withCallback (newSQLTransactionErrorCallback (liftIO . putMVar result . Just)) $ \error ->
withCallback (newVoidCallback $ liftIO $ putMVar result Nothing) $ \success -> do
f (Just error) (Just success)
liftIO $ takeMVar result
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Database.changeVersion Mozilla Database.changeVersion documentation>
changeVersion' :: (MonadDOM m, ToJSString oldVersion, ToJSString newVersion) =>
Database -> oldVersion -> newVersion -> Maybe (SQLTransaction -> DOM ()) -> m (Maybe SQLError)
changeVersion' self oldVersion newVersion Nothing = withSQLErrorCallbacks $ Generated.changeVersion self oldVersion newVersion Nothing
changeVersion' self oldVersion newVersion (Just callback) =
withSQLTransactionCallback callback $ \transaction ->
withSQLErrorCallbacks $ \e s ->
Generated.changeVersion self oldVersion newVersion (Just transaction) e s
changeVersion :: (MonadDOM m, ToJSString oldVersion, ToJSString newVersion) =>
Database -> oldVersion -> newVersion -> Maybe (SQLTransaction -> DOM ()) -> m ()
changeVersion self oldVersion newVersion callback =
changeVersion' self oldVersion newVersion callback >>= maybe (return ()) throwSQLException
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Database.transaction Mozilla Database.transaction documentation>
transaction' :: (MonadDOM m) => Database -> (SQLTransaction -> DOM ()) -> m (Maybe SQLError)
transaction' self callback =
withSQLTransactionCallback callback $ \transaction ->
withSQLErrorCallbacks $ \e s ->
Generated.transaction self transaction e s
transaction :: (MonadDOM m) => Database -> (SQLTransaction -> DOM ()) -> m ()
transaction self callback = transaction' self callback >>= maybe (return ()) throwSQLException
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Database.readTransaction Mozilla Database.readTransaction documentation>
readTransaction' :: (MonadDOM m) => Database -> (SQLTransaction -> DOM ()) -> m (Maybe SQLError)
readTransaction' self callback =
withSQLTransactionCallback callback $ \transaction ->
withSQLErrorCallbacks $ \e s ->
Generated.readTransaction self transaction e s
readTransaction :: (MonadDOM m) => Database -> (SQLTransaction -> DOM ()) -> m ()
readTransaction self callback = readTransaction' self callback >>= maybe (return ()) throwSQLException
|
ghcjs/jsaddle-dom
|
src/JSDOM/Custom/Database.hs
|
mit
| 3,898
| 0
| 15
| 610
| 1,019
| 541
| 478
| 58
| 1
|
-- :l C:\Local\Dev\haskell\learn_chapter5.hs
-- Recursion
fsmaximum1 :: Ord a => [a] -> a
fsmaximum1 (x:xs)
| null xs = x
| otherwise = max x (fsmaximum1 xs)
fsmaximum2 :: Ord a => [a] -> a
fsmaximum2 [] = error "maximum of empty list"
fsmaximum2 [x] = x
fsmaximum2 (x:xs) = max x (fsmaximum2 xs)
fsreplicate :: (Num n, Ord n) => n -> e -> [e]
fsreplicate n e
| n <= 0 = []
| otherwise = e:fsreplicate (n-1) e
fstake :: (Num n, Ord n) => n -> [x] -> [x]
fstake n xs
| n <= 0 || null xs = []
| otherwise = head xs:fstake (n-1) (tail xs)
fstake2 :: (Num n, Ord n) => n -> [x] -> [x]
fstake2 n [] = []
fstake2 n (x:xs)
| n <= 0 = []
| otherwise = x:fstake2 (n-1) xs
{-
take 3 [4,3,2,1]
take n = 3, x = 4, xs = [3,2,1]
return 4 : take 2 [3,2,1]
take n = 2, x = 3, xs = [2,1]
return 3 : take 1 [2,1]
take n = 1, x = 2, xs = [1]
return 2 : take 0 [1]
[]
4 : 3 : 2 : []
4:3:2:[]
[4,3,2]
-}
fsreverse :: [a] -> [a]
fsreverse [] = []
fsreverse (x:xs) = fsreverse xs ++ [x]
fsrepeat :: a -> [a]
fsrepeat x = x:fsrepeat x
fszip :: [a] -> [b] -> [(a,b)]
fszip [] _ = []
fszip _ [] = []
fszip (x:xs) (y:ys) = (x,y):fszip xs ys
-- Challenge - DONE! GREAT!
fsunzip :: [(a, b)] -> ([a], [b])
fsunzip pairs = uzHelp pairs [] []
where
uzHelp [] xs ys = (reverse xs, reverse ys) -- Assuming reversing one time is faster than adding to the tail every cycle
uzHelp ((x,y):pairs) xs ys = uzHelp pairs (x:xs) (y:ys) -- Tail recursion optimization possible
-- Even better!!! =D
-- Unzip elements and assemble the lists in order, but no tail recursion opt. possible (right?!)
fsunzip2 :: [(a, b)] -> ([a], [b])
fsunzip2 [] = ([], [])
fsunzip2 ((x,y):pairs) = ((x:xs), (y:ys))
where (xs, ys) = fsunzip2 pairs
fselem :: Eq e => e -> [e] -> Bool
fselem _ [] = False
fselem e (x:xs)
| e == x = True
| otherwise = fselem e xs
fsqsort :: (Ord a) => [a] -> [a]
fsqsort [] = []
fsqsort (x:xs) = smaller ++ [x] ++ bigger
where smaller = fsqsort [a | a <- xs, a <= x]
bigger = fsqsort [a | a <- xs, a > x]
fssum :: (Num a) => [a] -> a
fssum [] = 0
fssum (x:xs) = x + fssum xs
fsproduct :: (Num a) => [a] -> a
fsproduct [] = 1
fsproduct (x:xs) = x * fsproduct xs
fslength :: (Num a) => [a] -> a
fslength [] = 0
fslength (_:xs) = 1 + fslength xs
-- main = putStr $ show $ fsunzip2 [(1, True), (0, False)]
|
feliposz/learning-stuff
|
haskell/learn_chapter5.hs
|
mit
| 2,568
| 0
| 10
| 800
| 1,177
| 621
| 556
| 57
| 2
|
{-# LANGUAGE OverloadedStrings #-}
module Unison.Test.Typechecker where
import Data.Functor
import Test.Tasty
import Test.Tasty.HUnit
import Unison.Codebase.MemCodebase ()
import Unison.Symbol (Symbol)
import Unison.Term as E
import Unison.Paths (Path)
import Unison.Type as T
import Unison.Typechecker as Typechecker
import Unison.View (DFO)
import qualified Unison.Codebase as Codebase
import qualified Unison.Note as Note
import qualified Unison.Parsers as Parsers
import qualified Unison.Paths as Paths
import qualified Unison.Test.Common as Common
import qualified Unison.Test.Term as Term
type V = Symbol DFO
type TTerm = Term.TTerm
type TType = Type V
type TEnv f = T.Env f V
type TCodebase = IO Common.TCodebase
infixr 1 -->
(-->) :: TType -> TType -> TType
(-->) = T.arrow
data StrongEq = StrongEq TType
instance Eq StrongEq where StrongEq t1 == StrongEq t2 = Typechecker.equals t1 t2
instance Show StrongEq where show (StrongEq t) = show t
env :: TCodebase -> TEnv IO
env codebase r = do
(codebase, _, _, _) <- Note.lift codebase
Codebase.typeAt codebase (E.ref r) mempty
localsAt :: TCodebase -> Path -> TTerm -> IO [(V, Type V)]
localsAt codebase path e = Note.run $ do
t2 <- Typechecker.locals (env codebase) path e
pure t2
synthesizesAt :: TCodebase -> Path -> TTerm -> TType -> Assertion
synthesizesAt codebase path e t = Note.run $ do
(codebase, _, _, _) <- Note.lift codebase
t2 <- Codebase.typeAt codebase e path
_ <- Note.fromEither (Typechecker.subtype t2 t)
_ <- Note.fromEither (Typechecker.subtype t t2)
pure ()
checksAt :: TCodebase -> Path -> TTerm -> TType -> Assertion
checksAt node path e t = Note.run . void $
Typechecker.synthesize (env node) (Paths.modifyTerm' (\e -> E.wrapV (E.ann e t)) path e)
synthesizesAndChecksAt :: TCodebase -> Path -> TTerm -> TType -> Assertion
synthesizesAndChecksAt node path e t =
synthesizesAt node path e t >> checksAt node path e t
synthesizes :: TCodebase -> TTerm -> TType -> Assertion
synthesizes node e t = Note.run $ do
t2 <- Typechecker.synthesize (env node) e
_ <- Note.fromEither (Typechecker.subtype t2 t)
_ <- Note.fromEither (Typechecker.subtype t t2)
pure ()
checks :: TCodebase -> TTerm -> TType -> Assertion
checks node e t = void $ Note.run (Typechecker.check (env node) e t)
checkSubtype :: TType -> TType -> Assertion
checkSubtype t1 t2 = case Typechecker.subtype t1 t2 of
Left err -> assertFailure ("subtype failure:\n" ++ show err)
Right _ -> pure ()
synthesizesAndChecks :: TCodebase -> TTerm -> TType -> Assertion
synthesizesAndChecks node e t =
synthesizes node e t >> checks node e t
--singleTest = withResource Common.node (\_ -> pure ()) $ \node -> testGroup "Typechecker"
-- [
-- testTerm "f -> let x = (let saved = f; 42); 1" $ \tms ->
-- testCase ("synthesize/check ("++tms++")") $ synthesizesAndChecks node
-- (unsafeParseTerm tms)
-- (unsafeParseType "forall x. x -> Number")
-- ]
testTerm :: String -> (String -> TestTree) -> TestTree
testTerm termString f = f termString
unsafeParseTerm :: String -> TTerm
unsafeParseTerm = Parsers.unsafeParseTerm
unsafeParseType :: String -> TType
unsafeParseType = Parsers.unsafeParseType
tests :: TestTree
tests = withResource Common.codebase (\_ -> pure ()) $ \node -> testGroup "Typechecker"
[
testCase "alpha equivalence (type)" $ assertEqual "const"
(unsafeParseType "forall a b. a -> b -> a")
(unsafeParseType "forall x y. x -> y -> x")
, testCase "subtype (1)" $ checkSubtype
(unsafeParseType "Number")
(unsafeParseType "Number")
, testCase "subtype (2)" $ checkSubtype
(unsafeParseType "forall a. a")
(unsafeParseType "Number")
, testCase "subtype (3)" $ checkSubtype
(unsafeParseType "forall a. a")
(unsafeParseType "forall a. a")
, testCase "strong equivalence (type)" $ assertEqual "types were not equal"
(StrongEq (unsafeParseType "forall a b. a -> b -> a"))
(StrongEq (unsafeParseType "forall y x. x -> y -> x"))
, testTerm "42" $ \tms -> testCase ("synthesize/check" ++ tms) $ synthesizesAndChecks node
(unsafeParseTerm tms)
(unsafeParseType "Number")
, testCase "synthesize/check Term.id" $ synthesizesAndChecks node
(unsafeParseTerm "a -> a")
(unsafeParseType "forall b. b -> b")
, testCase "synthesize/check Term.const" $ synthesizesAndChecks node
(unsafeParseTerm "x y -> x")
(unsafeParseType "forall a b. a -> b -> a")
, testCase "synthesize/check (x y -> y)" $ synthesizesAndChecks node
(unsafeParseTerm "x y -> y")
(unsafeParseType "forall a b. a -> b -> b")
, testCase "synthesize/check (let f = (+); f 1;;)" $ synthesizesAndChecks node
(unsafeParseTerm "let f = (+); f 1;;")
(T.lit T.Number --> T.lit T.Number)
, testCase "synthesize/check (let blank x = _; blank 1;;)" $ synthesizesAndChecks node
(unsafeParseTerm "let blank x = _; blank 1;;")
(forall' ["a"] $ T.v' "a")
, testCase "synthesize/check Term.fix" $ synthesizesAndChecks node
(unsafeParseTerm "let rec fix f = f (fix f); fix;;")
(forall' ["a"] $ (T.v' "a" --> T.v' "a") --> T.v' "a")
, testCase "synthesize/check Term.pingpong1" $ synthesizesAndChecks node
Term.pingpong1
(forall' ["a"] $ T.v' "a")
, testCase "synthesize/check [1,2,1+1]" $ synthesizesAndChecks node
(unsafeParseTerm "[1, 2, 1 + 1]")
(T.lit T.Vector `T.app` T.lit T.Number)
, testTerm "[1, 2, 1 + 1]" $ \tms ->
testCase ("synthesize/checkAt "++tms++"@[Paths.Arg, Index 2]") $ synthesizesAndChecksAt node
[Paths.Arg, Paths.Index 2] (unsafeParseTerm tms) (T.lit T.Number)
, testTerm "let x = _; _;;" $ \tms ->
testCase ("synthesize/checkAt ("++tms++")@[Binding 0,Body]") $ synthesizesAndChecksAt node
[Paths.Binding 0, Paths.Body] (unsafeParseTerm tms) unconstrained
-- fails
, testTerm "f -> let x = (let saved = f; 42;;); 1;;" $ \tms ->
testCase ("synthesize/check ("++tms++")") $ synthesizesAndChecks node
(unsafeParseTerm tms)
(unsafeParseType "forall x. x -> Number")
, testTerm "f -> let x = (b a -> b) 42 f; 1;;" $ \tms ->
testCase ("synthesize/check ("++tms++")") $ synthesizesAndChecks node
(unsafeParseTerm tms) (unsafeParseType "forall x. x -> Number")
, testTerm "f x y -> (x y -> y) f _ + _" $ \tms ->
testCase ("synthesize/check ("++tms++")") $ do
synthesizesAndChecks node
(unsafeParseTerm tms)
(unsafeParseType "forall a b c. a -> b -> c -> Number")
, testTerm "(id -> let x = id 42; y = id \"hi\"; 43;;) : (forall a . a -> a) -> Number" $ \tms ->
testCase ("higher rank checking: " ++ tms) $
let
t = unsafeParseType "(forall a . a -> a) -> Number"
tm = unsafeParseTerm tms
in synthesizesAndChecks node tm t
-- Let generalization not implemented yet; this test fails
--, testCase "let generalization: let id a = a; x = id 42; y = id 'hi'; 23" $
-- let
-- tm = E.let1'
-- [ ("id", E.lam' ["a"] (E.var' "a") `E.ann` T.forall' ["a"] (T.v' "a")),
-- ("id@Number", E.var' "id" `E.app` E.num 42),
-- ("id@Text", E.var' "id" `E.app` E.text "hi")
-- ] (E.num 43)
-- in synthesizesAndChecks node tm $ T.lit T.Number
, testTerm "x y -> _ + _" $ \tms ->
testCase ("locals ("++tms++")@[Body,Body,Fn,Arg]") $ do
let tm = unsafeParseTerm tms
[(_,xt), (_,yt)] <- localsAt node [Paths.Body, Paths.Body, Paths.Fn, Paths.Arg] tm
assertEqual "xt unconstrainted" unconstrained (T.generalize xt)
assertEqual "yt unconstrainted" unconstrained (T.generalize yt)
, testTerm "let x = _; _;;" $ \tms ->
testCase ("locals ("++tms++")") $ do
let tm = unsafeParseTerm tms
[(_,xt)] <- localsAt node [Paths.Body] tm
[] <- localsAt node [Paths.Binding 0, Paths.Body] tm
assertEqual "xt unconstrainted" unconstrained (T.generalize xt)
, testTerm "let x = _; y = _; _;;" $ \tms ->
testCase ("locals ("++tms++")@[Body,Body]") $ do
let tm = unsafeParseTerm tms
[(_,xt), (_,yt)] <- localsAt node [Paths.Body, Paths.Body] tm
assertEqual "xt unconstrained" unconstrained (T.generalize xt)
assertEqual "yt unconstrained" unconstrained (T.generalize yt)
, testTerm "let x = _; y = _; _;;" $ \tms ->
-- testTerm "let x = 42; y = _; _" $ \tms ->
-- testTerm "let x = 42; y = 43; _" $ \tms ->
-- testTerm "let x = 42; y = 43; 4224" $ \tms ->
testCase ("locals ("++tms++")@[Body,Binding 0,Body]") $ do
let tm = unsafeParseTerm tms
[(_,xt)] <- localsAt node [Paths.Body, Paths.Binding 0, Paths.Body] tm
assertEqual "xt unconstrainted" unconstrained (T.generalize xt)
]
unconstrained :: TType
unconstrained = unsafeParseType "forall a. a"
main :: IO ()
main = defaultMain tests
-- main = defaultMain singleTest
|
nightscape/platform
|
shared/tests/Unison/Test/Typechecker.hs
|
mit
| 8,844
| 0
| 18
| 1,868
| 2,495
| 1,277
| 1,218
| 167
| 2
|
{- |
Typed AST for Stan programs
-}
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances, FunctionalDependencies, OverloadedStrings, UndecidableInstances #-}
module Math.Stan.TAST where
import Control.Monad.State.Strict
import Math.Stan.AST
import Data.String
import Data.List
------------------------------------------------
---- Typed Expressions
-----------------------------------------------
data Expr a = Expr { exprTy:: T,
unExpr :: E }
deriving (Show)
data Prob a = Prob T E
deriving (Show)
newtype Pat a = Pat P
deriving (Show)
newtype TyT a = TyT T --typed type
deriving (Show)
instance IsString (Pat a) where
fromString nm = Pat (nm,[])
{-instance IsString (Expr a) where
fromString nm = Expr $ EVar nm -}
instance Num a => Num (Expr a) where
(Expr t e1) + (Expr _ e2) = Expr t $ EBin "+" e1 e2
(Expr t e1) - (Expr _ e2) = Expr t $ EBin "-" e1 e2
(Expr t e1) * (Expr _ e2) = Expr t $ EBin "*" e1 e2
abs (Expr t e) = Expr t$ EApp "abs" [e]
fromInteger = Expr (fromBase TInt) . EInt . fromInteger
instance Fractional a => Fractional (Expr a) where
(Expr t e1) / (Expr _ e2) = Expr t $ EBin "/" e1 e2
fromRational = Expr (fromBase TReal) . EReal . fromRational
infixl 7 !
class Indexable a b | a -> b, b -> a where
(!) :: a -> Expr Int -> b
instance Indexable (Pat [a]) (Pat a) where
(Pat (nm, ixs)) ! moreIx = Pat (nm,ixs++[unExpr moreIx])
instance Indexable (Expr [a]) (Expr a) where
(Expr t e) ! ix = Expr (reduceDims t) $ EIx e [unExpr ix]
instance Indexable (TyT a) (TyT [a]) where
(TyT (T base bnds dims)) ! (Expr _ dime) = TyT $ T base bnds $ dims++[dime]
int :: TyT Int
int = TyT $ T TInt (Nothing, Nothing) []
real :: TyT Double
real = TyT $ T TReal (Nothing, Nothing) []
vec :: Expr Int -> TyT [Double]
vec (Expr t n) = TyT $ T (TVector n) (Nothing, Nothing) []
infixl 1 .:
(.:) :: Id -> TyT a -> (Id,T)
ident .: (TyT t) = (ident,t)
pToExpr :: P -> T -> Expr a
pToExpr (nm,[]) t = Expr t $ EVar nm
pToExpr (nm,ixs) t = Expr t $ EIx (EVar nm) ixs
class LoopWrap a b | a -> b where
loopwrap :: Expr Int -> a -> b
instance LoopWrap (Expr a) (Expr [a]) where
loopwrap (Expr _ n) (Expr t e) = Expr (addDim n t) e
instance (LoopWrap a b, LoopWrap c d) => LoopWrap (a,c) (b,d) where
loopwrap e (x,y) = (loopwrap e x, loopwrap e y)
|
openbrainsrc/hstan
|
src/Math/Stan/TAST.hs
|
mit
| 2,350
| 0
| 10
| 542
| 1,114
| 581
| 533
| -1
| -1
|
module Server.Test.TestCompiler (compile) where
compile :: String -> String -> String -> String
compile testSrc extraSrc contentSrc = unlines [
"{-# OPTIONS_GHC -fdefer-type-errors #-}",
"import Test.Hspec",
"import Test.Hspec.Formatters.Structured",
"import Test.Hspec.Runner (hspecWith, defaultConfig, Config (configFormatter))",
"import Test.QuickCheck",
"import qualified Control.Exception as Exception",
contentSrc,
extraSrc,
"main :: IO ()",
"main = hspecWith defaultConfig {configFormatter = Just structured} $ do",
testSrc ]
|
mumuki/mumuki-hspec-server
|
src/Server/Test/TestCompiler.hs
|
mit
| 594
| 0
| 7
| 120
| 77
| 46
| 31
| 14
| 1
|
module Light.Cameras (module X)
where
import Light.Camera as X
import Light.Camera.OrthographicCamera as X
import Light.Camera.PerspectiveCamera as X
|
jtdubs/Light
|
src/Light/Cameras.hs
|
mit
| 151
| 0
| 4
| 18
| 35
| 25
| 10
| 4
| 0
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
module Main where
import Control.Concurrent (forkIO)
import Control.Exception (SomeException(..), handle)
import Control.Monad (forM_)
import Control.Monad.Fix (fix)
import Data.ByteString (ByteString, hGet, hPut, length)
import Data.Int (Int32, Int64)
import qualified Data.Pool as Pool
import Data.Serialize.Get (getWord32be, runGet)
import Data.Serialize.Put (putWord32be, runPut)
import qualified Database.PostgreSQL.Simple as PG
import Database.PostgreSQL.Simple.SqlQQ (sql)
import Network.Socket
import RequestHandlers (handleRequest)
import System.IO (Handle, IOMode(ReadWriteMode), hClose)
getFramedMessage :: Handle -> IO (Maybe ByteString)
getFramedMessage hdl = do
len <- hGet hdl 4
let res = runGet getWord32be len
case res of
Left _ -> return Nothing
Right lenAsWord -> do
let msgLen = fromIntegral lenAsWord :: Int32
msg <- hGet hdl . fromIntegral $ msgLen
return . Just $ msg
runConn :: Pool.Pool PG.Connection -> (Socket, SockAddr) -> IO ()
runConn pool (sock, _) = do
hdl <- socketToHandle sock ReadWriteMode
handle (\(SomeException e) -> print e) $
fix
(\loop -> do
content <- getFramedMessage hdl
case content of
Nothing -> return ()
Just msg -> do
let response = handleRequest pool msg
case response of
Left err -> print err
Right getResponseBytes -> do
responseBytes <- getResponseBytes
hPut hdl .
runPut . putWord32be . fromIntegral . Data.ByteString.length $
responseBytes
hPut hdl responseBytes
loop)
hClose hdl
mainLoop :: Pool.Pool PG.Connection -> Socket -> IO ()
mainLoop pool sock = do
conn <- accept sock
_ <- forkIO (runConn pool conn)
mainLoop pool sock
createTables :: PG.Connection -> IO ()
createTables conn = do
_ <-
PG.execute_
conn
[sql| CREATE TABLE IF NOT EXISTS topics
( id SERIAL PRIMARY KEY
, name text NOT NULL UNIQUE ) |]
_ <-
PG.execute_
conn
[sql| CREATE TABLE IF NOT EXISTS partitions
( topic_id int NOT NULL REFERENCES topics (id)
, partition_id int NOT NULL
, next_offset bigint NOT NULL
, total_bytes bigint NOT NULL
, PRIMARY KEY (topic_id, partition_id) ) |]
_ <-
PG.execute_
conn
[sql| CREATE TABLE IF NOT EXISTS records
( topic_id int NOT NULL
, partition_id int NOT NULL
, record bytea NOT NULL
, log_offset bigint NOT NULL
, byte_offset bigint NOT NULL
, FOREIGN KEY (topic_id, partition_id) REFERENCES partitions ) |]
_ <-
PG.execute_
conn
[sql| CREATE INDEX ON records (topic_id, partition_id, log_offset) |]
_ <-
PG.execute_
conn
[sql| CREATE INDEX ON records (topic_id, partition_id, byte_offset) |]
let initialTopics =
[ ("topic-a", 2)
, ("topic-b", 4)
, ("test", 1)
, ("kafka-test-topic", 3)
, ("__consumer_offsets", 8)
] :: [(String, Int)]
forM_
initialTopics
(\(topic, partitionCount) -> do
_ <-
PG.execute
conn
"INSERT INTO topics (name) VALUES (?) ON CONFLICT DO NOTHING" $
PG.Only topic
[PG.Only topicId] <-
PG.query conn "SELECT id FROM topics WHERE name = ?" (PG.Only topic) :: IO [PG.Only Int32]
forM_
[0 .. (partitionCount - 1)]
(\partitionId ->
PG.execute
conn
"INSERT INTO partitions (topic_id, partition_id, next_offset, total_bytes) VALUES (?, ?, ?, ?) ON CONFLICT DO NOTHING"
(topicId, partitionId, 0 :: Int64, 0 :: Int64)))
main :: IO ()
main = do
let createConn =
PG.connect
PG.defaultConnectInfo
{PG.connectDatabase = "kafkaesque", PG.connectUser = "kafkaesque"}
pool <- Pool.createPool createConn PG.close 1 10 8
Pool.withResource pool createTables
sock <- socket AF_INET Stream 0
setSocketOption sock ReuseAddr 1
bind sock (SockAddrInet 9092 iNADDR_ANY)
listen sock 2
mainLoop pool sock
|
cjlarose/kafkaesque
|
app/Main.hs
|
mit
| 4,262
| 0
| 28
| 1,255
| 1,064
| 552
| 512
| 112
| 3
|
{-# LANGUAGE OverloadedStrings #-}
module Network.Wai.ParseSpec (main, spec) where
import Test.Hspec
import Test.HUnit
import System.IO
import Data.Monoid
import qualified Data.IORef as I
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as S8
import qualified Data.ByteString.Lazy as L
import qualified Data.Text as TS
import qualified Data.Text.Encoding as TE
import Control.Monad.Trans.Resource (withInternalState, runResourceT)
import Network.Wai
import Network.Wai.Test
import Network.Wai.Parse
import WaiExtraSpec (toRequest)
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "parseContentType" $ do
let go (x, y, z) = it (TS.unpack $ TE.decodeUtf8 x) $ parseContentType x `shouldBe` (y, z)
mapM_ go
[ ("text/plain", "text/plain", [])
, ("text/plain; charset=UTF-8 ", "text/plain", [("charset", "UTF-8")])
, ("text/plain; charset=UTF-8 ; boundary = foo", "text/plain", [("charset", "UTF-8"), ("boundary", "foo")])
, ("text/plain; charset=UTF-8 ; boundary = \"quoted\"", "text/plain", [("charset", "UTF-8"), ("boundary", "quoted")])
]
it "parseHttpAccept" caseParseHttpAccept
describe "parseRequestBody" $ do
caseParseRequestBody
it "multipart with plus" caseMultipartPlus
it "multipart with multiple attributes" caseMultipartAttrs
it "urlencoded with plus" caseUrlEncPlus
describe "dalvik multipart" $ do
it "non-chunked" $ dalvikHelper True
it "chunked" $ dalvikHelper False
caseParseHttpAccept :: Assertion
caseParseHttpAccept = do
let input = "text/plain; q=0.5, text/html;charset=utf-8, text/*;q=0.8;ext=blah, text/x-dvi; q=0.8, text/x-c"
expected = ["text/html;charset=utf-8", "text/x-c", "text/x-dvi", "text/*", "text/plain"]
expected @=? parseHttpAccept input
parseRequestBody' :: BackEnd file
-> SRequest
-> IO ([(S.ByteString, S.ByteString)], [(S.ByteString, FileInfo file)])
parseRequestBody' sink (SRequest req bod) =
case getRequestBodyType req of
Nothing -> return ([], [])
Just rbt -> do
ref <- I.newIORef $ L.toChunks bod
let rb = I.atomicModifyIORef ref $ \chunks ->
case chunks of
[] -> ([], S.empty)
x:y -> (y, x)
sinkRequestBody sink rbt rb
caseParseRequestBody :: Spec
caseParseRequestBody = do
it "parsing post x-www-form-urlencoded" $ do
let content1 = "foo=bar&baz=bin"
let ctype1 = "application/x-www-form-urlencoded"
result1 <- parseRequestBody' lbsBackEnd $ toRequest ctype1 content1
result1 `shouldBe` ([("foo", "bar"), ("baz", "bin")], [])
let ctype2 = "multipart/form-data; boundary=AaB03x"
let expectedsmap2 =
[ ("title", "A File")
, ("summary", "This is my file\nfile test")
]
let textPlain = "text/plain; charset=iso-8859-1"
let expectedfile2 =
[("document", FileInfo "b.txt" textPlain "This is a file.\nIt has two lines.")]
let expected2 = (expectedsmap2, expectedfile2)
it "parsing post multipart/form-data" $ do
result2 <- parseRequestBody' lbsBackEnd $ toRequest ctype2 content2
result2 `shouldBe` expected2
it "parsing post multipart/form-data 2" $ do
result2' <- parseRequestBody' lbsBackEnd $ toRequest' ctype2 content2
result2' `shouldBe` expected2
let ctype3 = "multipart/form-data; boundary=----WebKitFormBoundaryB1pWXPZ6lNr8RiLh"
let expectedsmap3 = []
let expectedfile3 = [("yaml", FileInfo "README" "application/octet-stream" "Photo blog using Hack.\n")]
let expected3 = (expectedsmap3, expectedfile3)
let def = defaultParseRequestBodyOptions
it "parsing actual post multipart/form-data" $ do
result3 <- parseRequestBody' lbsBackEnd $ toRequest ctype3 content3
result3 `shouldBe` expected3
it "parsing actual post multipart/form-data 2" $ do
result3' <- parseRequestBody' lbsBackEnd $ toRequest' ctype3 content3
result3' `shouldBe` expected3
it "parsing with memory limit" $ do
SRequest req4 _bod4 <- toRequest'' ctype3 content3
result4' <- parseRequestBodyEx ( setMaxRequestNumFiles 1 $ setMaxRequestKeyLength 14 def ) lbsBackEnd req4
result4' `shouldBe` expected3
it "exceeding number of files" $ do
SRequest req4 _bod4 <- toRequest'' ctype3 content3
(parseRequestBodyEx ( setMaxRequestNumFiles 0 def ) lbsBackEnd req4) `shouldThrow` anyErrorCall
it "exceeding parameter length" $ do
SRequest req4 _bod4 <- toRequest'' ctype3 content3
(parseRequestBodyEx ( setMaxRequestKeyLength 2 def ) lbsBackEnd req4) `shouldThrow` anyErrorCall
it "exceeding file size" $ do
SRequest req4 _bod4 <- toRequest'' ctype3 content3
(parseRequestBodyEx ( setMaxRequestFileSize 2 def ) lbsBackEnd req4) `shouldThrow` anyErrorCall
it "exceeding total file size" $ do
SRequest req4 _bod4 <- toRequest'' ctype3 content3
(parseRequestBodyEx ( setMaxRequestFilesSize 20 def ) lbsBackEnd req4) `shouldThrow` anyErrorCall
SRequest req5 _bod5 <- toRequest'' ctype3 content5
(parseRequestBodyEx ( setMaxRequestFilesSize 20 def ) lbsBackEnd req5) `shouldThrow` anyErrorCall
it "exceeding max parm value size" $ do
SRequest req4 _bod4 <- toRequest'' ctype2 content2
(parseRequestBodyEx ( setMaxRequestParmsSize 10 def ) lbsBackEnd req4) `shouldThrow` anyErrorCall
it "exceeding max header lines" $ do
SRequest req4 _bod4 <- toRequest'' ctype2 content2
(parseRequestBodyEx ( setMaxHeaderLines 1 def ) lbsBackEnd req4) `shouldThrow` anyErrorCall
it "exceeding header line size" $ do
SRequest req4 _bod4 <- toRequest'' ctype3 content4
(parseRequestBodyEx ( setMaxHeaderLineLength 8190 def ) lbsBackEnd req4) `shouldThrow` anyErrorCall
it "Testing parseRequestBodyEx with application/x-www-form-urlencoded" $ do
let content = "thisisalongparameterkey=andthisbeanevenlongerparametervaluehelloworldhowareyou"
let ctype = "application/x-www-form-urlencoded"
SRequest req _bod <- toRequest'' ctype content
result <- parseRequestBodyEx def lbsBackEnd req
result `shouldBe` ([( "thisisalongparameterkey"
, "andthisbeanevenlongerparametervaluehelloworldhowareyou" )], [])
it "exceeding max parm value size with x-www-form-urlencoded mimetype" $ do
let content = "thisisalongparameterkey=andthisbeanevenlongerparametervaluehelloworldhowareyou"
let ctype = "application/x-www-form-urlencoded"
SRequest req _bod <- toRequest'' ctype content
(parseRequestBodyEx ( setMaxRequestParmsSize 10 def ) lbsBackEnd req) `shouldThrow` anyErrorCall
where
content2 =
"--AaB03x\n"
<> "Content-Disposition: form-data; name=\"document\"; filename=\"b.txt\"\n"
<> "Content-Type: text/plain; charset=iso-8859-1\n\n"
<> "This is a file.\n"
<> "It has two lines.\n"
<> "--AaB03x\n"
<> "Content-Disposition: form-data; name=\"title\"\n"
<> "Content-Type: text/plain; charset=iso-8859-1\n\n"
<> "A File\n"
<> "--AaB03x\n"
<> "Content-Disposition: form-data; name=\"summary\"\n"
<> "Content-Type: text/plain; charset=iso-8859-1\n\n"
<> "This is my file\n"
<> "file test\n"
<> "--AaB03x--"
content3 =
"------WebKitFormBoundaryB1pWXPZ6lNr8RiLh\r\n"
<> "Content-Disposition: form-data; name=\"yaml\"; filename=\"README\"\r\n"
<> "Content-Type: application/octet-stream\r\n\r\n"
<> "Photo blog using Hack.\n\r\n"
<> "------WebKitFormBoundaryB1pWXPZ6lNr8RiLh--\r\n"
content4 =
"------WebKitFormBoundaryB1pWXPZ6lNr8RiLh\r\n"
<> "Content-Disposition: form-data; name=\"alb\"; filename=\"README\"\r\n"
<> "Content-Type: application/octet-stream\r\n\r\n"
<> "Photo blog using Hack.\r\n\r\n"
<> "------WebKitFormBoundaryB1pWXPZ6lNr8RiLh\r\n"
<> "Content-Disposition: form-data; name=\"bla\"; filename=\"riedmi"
<> S8.replicate 8190 'e' <> "\"\r\n"
<> "Content-Type: application/octet-stream\r\n\r\n"
<> "Photo blog using Hack.\r\n\r\n"
<> "------WebKitFormBoundaryB1pWXPZ6lNr8RiLh--\r\n"
content5 =
"------WebKitFormBoundaryB1pWXPZ6lNr8RiLh\r\n"
<> "Content-Disposition: form-data; name=\"yaml\"; filename=\"README\"\r\n"
<> "Content-Type: application/octet-stream\r\n\r\n"
<> "Photo blog using Hack.\n\r\n"
<> "------WebKitFormBoundaryB1pWXPZ6lNr8RiLh\r\n"
<> "Content-Disposition: form-data; name=\"yaml2\"; filename=\"MEADRE\"\r\n"
<> "Content-Type: application/octet-stream\r\n\r\n"
<> "Photo blog using Hack.\n\r\n"
<> "------WebKitFormBoundaryB1pWXPZ6lNr8RiLh--\r\n"
caseMultipartPlus :: Assertion
caseMultipartPlus = do
result <- parseRequestBody' lbsBackEnd $ toRequest ctype content
result @?= ([("email", "has+plus")], [])
where
content =
"--AaB03x\n" <>
"Content-Disposition: form-data; name=\"email\"\n" <>
"Content-Type: text/plain; charset=iso-8859-1\n\n" <>
"has+plus\n" <>
"--AaB03x--"
ctype = "multipart/form-data; boundary=AaB03x"
caseMultipartAttrs :: Assertion
caseMultipartAttrs = do
result <- parseRequestBody' lbsBackEnd $ toRequest ctype content
result @?= ([("email", "has+plus")], [])
where
content =
"--AaB03x\n" <>
"Content-Disposition: form-data; name=\"email\"\n" <>
"Content-Type: text/plain; charset=iso-8859-1\n\n" <>
"has+plus\n" <>
"--AaB03x--"
ctype = "multipart/form-data; charset=UTF-8; boundary=AaB03x"
caseUrlEncPlus :: Assertion
caseUrlEncPlus = do
result <- runResourceT $ withInternalState $ \state ->
parseRequestBody' (tempFileBackEnd state) $ toRequest ctype content
result @?= ([("email", "has+plus")], [])
where
content = "email=has%2Bplus"
ctype = "application/x-www-form-urlencoded"
dalvikHelper :: Bool -> Assertion
dalvikHelper includeLength = do
let headers' =
[ ("content-type", "multipart/form-data;boundary=*****")
, ("GATEWAY_INTERFACE", "CGI/1.1")
, ("PATH_INFO", "/")
, ("QUERY_STRING", "")
, ("REMOTE_ADDR", "192.168.1.115")
, ("REMOTE_HOST", "ganjizza")
, ("REQUEST_URI", "http://192.168.1.115:3000/")
, ("REQUEST_METHOD", "POST")
, ("HTTP_CONNECTION", "Keep-Alive")
, ("HTTP_COOKIE", "_SESSION=fgUGM5J/k6mGAAW+MMXIJZCJHobw/oEbb6T17KQN0p9yNqiXn/m/ACrsnRjiCEgqtG4fogMUDI+jikoFGcwmPjvuD5d+MDz32iXvDdDJsFdsFMfivuey2H+n6IF6yFGD")
, ("HTTP_USER_AGENT", "Dalvik/1.1.0 (Linux; U; Android 2.1-update1; sdk Build/ECLAIR)")
, ("HTTP_HOST", "192.168.1.115:3000")
, ("HTTP_ACCEPT", "*, */*")
, ("HTTP_VERSION", "HTTP/1.1")
, ("REQUEST_PATH", "/")
]
headers
| includeLength = ("content-length", "12098") : headers'
| otherwise = headers'
let request' = defaultRequest
{ requestHeaders = headers
}
(params, files) <-
case getRequestBodyType request' of
Nothing -> return ([], [])
Just rbt -> withFile "test/requests/dalvik-request" ReadMode $ \h ->
sinkRequestBody lbsBackEnd rbt $ S.hGetSome h 2048
lookup "scannedTime" params @?= Just "1.298590056748E9"
lookup "geoLong" params @?= Just "0"
lookup "geoLat" params @?= Just "0"
length files @?= 1
toRequest' :: S8.ByteString -> S8.ByteString -> SRequest
toRequest' ctype content = SRequest defaultRequest
{ requestHeaders = [("Content-Type", ctype)]
} (L.fromChunks $ map S.singleton $ S.unpack content)
toRequest'' :: S8.ByteString -> S8.ByteString -> IO SRequest
toRequest'' ctype content = mkRB content >>= \b -> return $ SRequest defaultRequest
{ requestHeaders = [("Content-Type", ctype)], requestBody = b
} (L.fromChunks $ map S.singleton $ S.unpack content)
mkRB :: S8.ByteString -> IO (IO S8.ByteString)
mkRB content = do
r <- I.newIORef content
return $
I.atomicModifyIORef r $ \a -> (S8.empty, a)
|
creichert/wai
|
wai-extra/test/Network/Wai/ParseSpec.hs
|
mit
| 12,301
| 0
| 20
| 2,687
| 2,691
| 1,384
| 1,307
| 243
| 3
|
module Network.API.Twitter
( getResponse
) where
import Data.Maybe (fromJust)
import Network.OAuth.Consumer
import Network.OAuth.Http.Request
import Network.OAuth.Http.Response
import Network.OAuth.Http.CurlHttpClient
import Data.Aeson
import Network.API.Twitter.Shim
import Data.API.Twitter.Query
import Data.API.Twitter.QueryResponsePair
-- reqUrl = fromJust $ parseURL "https://api.twitter.com/oauth/request_token"
-- accUrl = fromJust $ parseURL "https://api.twitter.com/oauth/access_token"
-- srvUrl = fromJust $ parseURL "http://api.twitter.com/1/people/~"
-- authUrl = ("https://api.twitter.com/oauth/authorize?oauth_token="++) . findWithDefault ("oauth_token","ERROR") . oauthParams
-- app = Application "y7mcOjAr18BvMV5KVKNW0g" "uOP2Ij1IYBwVJYotgkWj11XvtUP5aStutQieczCnuo" OOB
-- tokenM :: IO Token
-- tokenM = runOAuthM (fromApplication app) $ do
-- ignite app
-- signRq2 HMACSHA1 Nothing reqUrl >>= oauthRequest CurlClient
-- cliAskAuthorization authUrl
-- signRq2 HMACSHA1 Nothing accUrl >>= oauthRequest CurlClient
-- getToken
-- response = do
-- token <- tokenM
-- runOAuthM token $ do
-- signRq2 HMACSHA1 Nothing srvUrl >>= serviceRequest CurlClient
getResponse :: ( QueryResponsePair q r
, Query q
, FromJSON r
) => q -> IO (Maybe r)
getResponse q = fmap (decode . rspPayload) $ sendRequest $ mkRequest q
-- |sendRequest signs an already-formed request with OAuth headers and
-- sends it using Curl.
sendRequest :: Request -> IO Response
sendRequest request = runOAuthM shimToken $ (signRq2 HMACSHA1 Nothing request >>= serviceRequest CurlClient)
-- |This function creates an HTTP request from any type in the Query
-- class.
mkRequest :: (Query q) => q -> Request
mkRequest query = baseURL { pathComps = toPathComps query
, qString = toQueryStr query
}
baseURL = fromJust $ parseURL $ "http://api.twitter.com/"
|
whittle/twitter-api
|
Network/API/Twitter.hs
|
mit
| 1,968
| 0
| 9
| 370
| 268
| 159
| 109
| 22
| 1
|
module Data.Language
( Language(..)
, LanguageBranch(..)
) where
import ClassyPrelude
import Data.Phoneme
import Data.Word
import Data.Grammar
import Data.Inflection
import Data.Soundchange
import Data.Other
-- Language trees
data LanguageBranch = LanguageBranch
{ getLanguage :: Language
, getChildren :: [LanguageBranch]
, getN :: Int
}
-- Language
data Language = Language
{ getName :: Text
, getNameMod :: (Text, Text)
, getCInv :: [Phoneme]
, getVInv :: [Phoneme]
, getDInv :: [Phoneme]
, getCMap :: ([Place], [Manner], [Phonation], [Airstream])
, getVMap :: ([Height], [Backness], [Roundedness], [Length])
, getTones :: [Tone]
, getStresses :: [Stress]
, getSonHier :: Int
, getOnsetCCs :: [ConsCluster]
, getNuclei :: [Phoneme]
, getCodaCCs :: [ConsCluster]
, getInflMap :: InflectionMap
, getInflMorphemes :: [Morpheme]
, getLemmaMorphemes :: [Morpheme]
, getDerivMorphemes :: [Morpheme]
, getCompoundMorphemes :: [Morpheme]
, getPronouns :: [Morpheme]
, getRootMorphemes :: [Morpheme]
, getGrammar :: Grammar
, getWriting :: ([(Phoneme, (Int, CharPath))], [(Syllable, (Int, CharPath))], [(Morpheme, (Int, CharPath))])
, getRules :: [Rule]
} deriving (Show)
|
Brightgalrs/con-lang-gen
|
src/Data/Language.hs
|
mit
| 1,585
| 0
| 12
| 577
| 394
| 259
| 135
| 39
| 0
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures, ScopedTypeVariables #-}
module Symbol where
import Data.Singletons.TH
data Symbol = A | B | C | D | E | F | G | H | I | J | K | L | M | N | O | P | Q | R | S | T | U | V | W | X | Z
deriving (Eq, Ord, Show)
genSingletons [''Symbol]
|
vladfi1/hs-misc
|
Symbol.hs
|
mit
| 381
| 0
| 7
| 91
| 120
| 77
| 43
| 10
| 0
|
{-# LANGUAGE GADTs, DataKinds, PolyKinds, TypeFamilies, KindSignatures #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
{-# LANGUAGE InstanceSigs, DefaultSignatures #-}
module Binary where
import Data.Singletons.TH
import Data.Singletons.Prelude
import Data.Singletons.Prelude.Enum
import Data.Singletons.Prelude.Num
type Bit = Bool
type BiNat = [Bit]
$(singletons [d|
instance Enum BiNat where
succ [] = [True]
succ (False:as) = True : as
succ (True:as) = False : succ as
pred [] = error "pred 0"
pred (False:as) = True : pred as
pred (True:as) = False : as
{- no type-level quoteRem or div :(
toEnum i | i < 0 = error "negative toEnum"
| i == 0 = []
| otherwise = let (q, r) = quotRem i 2 in r /= 0 : toEnum q
-}
{-
toEnum i | i < 0 = error "negative toEnum"
| i == 0 = []
| otherwise = succ (toEnum (pred i))
-}
fromEnum [] = 0
fromEnum (False:as) = 2 * fromEnum as
fromEnum (True:as) = 1 + 2 * fromEnum as
{-
xor a b = if a then not b else b
sumBit a b c = a `xor` b `xor` c
carryBit a b c = (a && b) || ((a || b) && c)
add :: Bit -> BiNat -> BiNat -> BiNat
add c as [] = if c then succ as else as
add c [] bs = if c then succ bs else bs
add c (a:as) (b:bs) = sumBit a b c : add (carryBit a b c) as bs
instance Num BiNat where
a + b = add False a b
a - b = error "unimplemented"
a * b = error "unimplemented"
fromInteger i | i < 0 = error "negative toEnum"
| i == 0 = []
| otherwise = succ (toEnum (pred i))
abs a = a
negate a = error "negate BiNat"
-}
|])
|
vladfi1/hs-misc
|
Binary.hs
|
mit
| 1,811
| 0
| 7
| 552
| 65
| 45
| 20
| 24
| 0
|
{-# LANGUAGE ScopedTypeVariables #-}
module Fractional
( fractional
) where
import Data.Ratio ((%))
import Test.QuickCheck (Arbitrary)
import Test.QuickCheck.Checkers (EqProp, (=-=))
import Test.QuickCheck.Classes.Extra (field)
import Test.QuickCheck.Modifiers (NonZero(..))
import Test.QuickCheck.Extra ()
import Test.Tasty (testGroup, TestTree)
import Test.Tasty.QuickCheck (testProperty)
import Num (numAuxTests)
-- TODO: Reduce Ord to Eq on the new quickcheck release
-- TODO: Write a program to email me for todo's like that when the conditions
-- are met
fractional :: forall a. (Arbitrary a, EqProp a, Show a, Fractional a, Ord a) =>
a -> TestTree
fractional _ = testGroup "Test Fractional instance" ts
where
ts = [ field "field" (undefined :: a)
, numAuxTests (undefined :: a)
, testProperty "x * recip y = x / y" (\x (NonZero (y :: a)) -> x * recip y =-= x / y)
, testProperty "fromRational (x % y) = fromInteger x / fromInteger y"
(\x (NonZero y) -> fromRational (x % y) =-= fromInteger x / (fromInteger y :: a))
]
|
expipiplus1/exact-real
|
test/Fractional.hs
|
mit
| 1,101
| 0
| 15
| 237
| 312
| 181
| 131
| 20
| 1
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
module XDCC
( Env(..)
, Pack(..)
, initialState
, putDccState
, onAbort
, dispatcher
) where
import qualified DCC
import IRC.Types
import Control.Concurrent.STM (atomically, modifyTVar)
import Control.Monad.Base (MonadBase)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Trans.Control (MonadBaseControl(..), liftBaseOp)
import qualified Data.CaseInsensitive as CI (mk)
import Data.Monoid ((<>))
import qualified Data.Text as T (Text, pack)
import qualified Network.IRC.Client as IRC
import System.Console.Concurrent (outputConcurrent)
import System.Signal (Signal, installHandler, sigINT)
class XdccCommand a where
toText :: a -> T.Text
data XdccSend
= Send !Pack
instance XdccCommand XdccSend where
toText (Send p) = "XDCC SEND #" <> packToText p
data XdccCancel
= Cancel
instance XdccCommand XdccCancel where
toText Cancel = "XDCC CANCEL"
newtype Pack = Pack { unpack :: Int }
deriving (Eq, Show)
packToText :: Pack -> T.Text
packToText = T.pack . show . unpack
newtype XdccIO a = XdccIO { runXdccIO :: IRC.StatefulIRC Stati a }
deriving (Functor, Applicative, Monad, MonadIO, MonadBase IO)
instance MonadBaseControl IO XdccIO where
type StM XdccIO a = a
liftBaseWith f = XdccIO $ liftBaseWith $ \q -> f (q . runXdccIO)
restoreM = XdccIO . restoreM
putState :: Status -> XdccIO ()
putState newS = XdccIO $ do
state <- IRC.stateTVar
liftIO . atomically . modifyTVar state $ \s -> s { xdccStatus = newS }
addHandler :: IRC.EventHandler Stati -> XdccIO ()
addHandler = XdccIO . IRC.addHandler
sendXdcc :: XdccCommand a => Nickname -> a -> XdccIO ()
sendXdcc nick cmd = XdccIO $ do
liftIO $ outputConcurrent (show msg <> "\n")
IRC.send msg
where
msg = IRC.Privmsg nick (Right $ toText cmd)
data Stati = Stati { xdccStatus :: Status
, dccStatus :: DCC.Status
}
initialState :: Channel -> Stati
initialState chan = Stati { xdccStatus = WaitingForJoin chan
, dccStatus = DCC.Requesting
}
data Env = Env { packNumber :: !Pack
, dccEnv :: !(DCC.Env Stati) }
data Status
= WaitingForJoin !Channel
| Joined
deriving (Eq, Show)
dispatcher :: Env -> IRC.EventHandler Stati
dispatcher env = IRC.EventHandler
{ _description = "XDCC SEND workflow handling"
, _matchType = IRC.EEverything
, _eventFunc = \ev -> do
status <- xdccStatus <$> IRC.state
case status of
WaitingForJoin chan -> runXdccIO $ joinedHandler env chan ev
_ -> return ()
}
joinedHandler :: Env -> Channel -> IRC.UnicodeEvent -> XdccIO ()
joinedHandler Env {..} channel IRC.Event { _message = IRC.Join joined }
| CI.mk joined == channel = do
putState Joined
liftIO $ outputConcurrent ( "Joined " <> joined <> ".\n")
liftIO $ outputConcurrent
( "Requesting pack #" <> packToText packNumber <> " from " <> rNick
<> ", awaiting file offer…\n" )
addHandler (dispatcherDcc dccEnv)
onInterruptSignal $ const $ sendXdcc rNick Cancel
sendXdcc rNick (Send packNumber)
where
rNick = DCC.remoteNick dccEnv
joinedHandler _ _ _ = return ()
onAbort :: Nickname -> IRC.StatefulIRC Stati ()
onAbort rNick = runXdccIO $ sendXdcc rNick Cancel
onInterruptSignal :: (Signal -> XdccIO ()) -> XdccIO ()
onInterruptSignal f = installHandler' sigINT $ \s -> do
f s
XdccIO IRC.disconnect
where
installHandler' = liftBaseOp . installHandler
putDccState :: DCC.Status -> IRC.StatefulIRC Stati ()
putDccState newS = do
state <- IRC.stateTVar
liftIO . atomically . modifyTVar state $ \s -> s { dccStatus = newS }
dispatcherDcc :: DCC.Env Stati -> IRC.EventHandler Stati
dispatcherDcc env = IRC.EventHandler
{ _description = "DCC SEND workflow handling"
, _matchType = IRC.EEverything
, _eventFunc = \ev -> do
status <- dccStatus <$> IRC.state
case status of
DCC.Requesting -> DCC.runDccIO env $ DCC.offerReceivedHandler ev
DCC.TryResuming offer -> DCC.runDccIO env $ DCC.acceptResumeHandler offer ev
_ -> return ()
}
|
JanGe/xdcc
|
src/XDCC.hs
|
mit
| 4,691
| 0
| 15
| 1,313
| 1,338
| 706
| 632
| 118
| 3
|
{-# LANGUAGE OverloadedStrings #-}
module Jira.API.Types.Assignee where
import Data.Aeson
data Assignee = AssigneeUsername String
| AssigneeAutomatic
| AssigneeNobody
instance ToJSON Assignee where
toJSON (AssigneeUsername name) = object [ "name" .= name ]
toJSON AssigneeAutomatic = object [ "name" .= ("-1" :: String) ]
toJSON AssigneeNobody = object [ "name" .= Null ]
|
dsmatter/jira-api
|
src/Jira/API/Types/Assignee.hs
|
mit
| 438
| 0
| 9
| 118
| 103
| 57
| 46
| 10
| 0
|
---------------------------------------------------------------------
module NASDAQ where
import Model
nasdaq = Exchange {
sellAt = error "NASDAQ doesn't sell." ,
getPendingOrders = Nothing,
buyAt = undefined,
cancelOrder = undefined
}
---------------------------------------------------------------------
|
dimitri-xyz/interfaces-in-haskell
|
record-solution/NASDAQ.hs
|
cc0-1.0
| 358
| 0
| 7
| 80
| 43
| 28
| 15
| 7
| 1
|
-- 单纯的functor没有办法做到将一个函数也放在最小上下文中然后将包裹在上下文中的函数作用在其他functor上 所以出现了applicative functor
class (Functor f) => Applicative f where
pure :: a -> f a
(<*>) :: f (a -> b) -> f a -> f b
(<$>) :: (Functor f) => (a -> b) -> f a -> f b
f <$> x = fmap f x
-- pure用来将一个类型包在一个applicative functor的上下文中
-- <*>用来将包裹在上下文中的函数作用在其他的functor上 通常叫做apply
-- <$>就是fmap的语法糖 用来和<*>一起组成lift操作
--Maybe Applicative Functor
instance Applicative Maybe where
pure = Just
Nothing <*> _ = Nothing
(Just f) <*> something = fmap f something
a = Just (+3) <*> Just 9 --Just 12
b = pure (+) <*> Just 3 <*> Just 5 --Just 8
-- <*>在List Applicative Functor中就是list comprehension
instance Applicative [] where
pure x = [x]
fs <*> xs = [f x | f <- fs, x <- xs]
c = (*0),(+100),(^2)] <*> [1,2,3] --[0,0,0,101,102,103,1,4,9]
d = [(+),(*)] <*> [1,2] <*> [3,4] --[4,5,5,6,3,4,6,8]
e = (++) <$> ["ha","heh","hmm"] <*> ["?","!","."] --["ha?","ha!","ha.","heh?","heh!","heh.","hmm?","hmm!","hmm."]
-- 所以下面两个表达式的值是一样的 都是[16,20,22,40,50,55,80,100,110]
f = (*) <$> [2,5,10] <*> [8,10,11]
g = [ x*y | x <- [2,5,10], y <- [8,10,11]]
-- applicative functor中还有一个非常有用的函数
liftA2 :: (Applicative f) => (a -> b -> c) -> f a -> f b -> f c
liftA2 f a b = f <$> a <*> b
h = liftA2 (:) (Just 3) (Just [4]) --Just [3,4]
i = (:) <$> Just 3 <*> Just [4] --Just [3,4]
-- 所以 上面的list comprehension可以用lift重写
import Control.Applicative
h = liftA2 (*) [2,5,10] [8,10,11] --[16,20,22,40,50,55,80,100,110]
-- applicative functor law
-- Identity
pure f <*> v = fmap f v
-- Composition
pure (.) <*> u <*> v <*> w = u <*> (v <*> w)
-- Homomorphism
pure f <*> pure x = pure (f x)
-- Interchange
u <*> pure y = pure ($ y) <*> u
-- $ in haskell
-- 一种用来代替括号
-- 一种是用来yield住参数将yield住的参数传给传入的lambda 详细可以看clojure版的$来解释这个奇葩的概念
(x+) = \y -> x + y
(+y) = \x -> x + y
(+) = \x y -> x + y
-- $ 运算符也一样
($ x) = \y -> y x
:t ($)
($) :: (a -> b) -> a -> b
(\x -> x * 2) $ 10 --20
:t ($ 10)
($ 10) :: Num a => (a -> b) -> b
($ 10) (\x -> x * 2) --20
|
zjhmale/monadme
|
monad/src/monad/haskell/applicativefunctor.hs
|
epl-1.0
| 2,404
| 20
| 10
| 439
| 925
| 513
| 412
| -1
| -1
|
module Moonbase.Panel.Gtk.Item
( date
, spacer
, quitButton
, xmonadLog
, dbusLabel
, cpuGraph
, defaultGraphConfig
) where
import Moonbase.Util.Gtk.Widget.Graph (defaultGraphConfig)
import Moonbase.Panel.Gtk.Item.Spacer
import Moonbase.Panel.Gtk.Item.Date
import Moonbase.Panel.Gtk.Item.QuitButton
import Moonbase.Panel.Gtk.Item.DbusLabel
import Moonbase.Panel.Gtk.Item.CpuGraph
|
felixsch/moonbase-gtk
|
src/Moonbase/Panel/Gtk/Item.hs
|
gpl-2.0
| 415
| 0
| 5
| 66
| 84
| 60
| 24
| 14
| 0
|
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Keymap.Vim.Ex.Commands.BufferDelete
-- License : GPL-2
--
-- :reg[isters] ex command to list yanked texts.
module Yi.Keymap.Vim.Ex.Commands.Registers (printRegisters, parse) where
import Control.Applicative (Alternative ((<|>)), (<*))
import Control.Monad (void)
import Data.Monoid ((<>))
import Yi.Keymap (Action (EditorA))
import Yi.Keymap.Vim.Ex.Types (ExCommand (cmdAction, cmdShow))
import Yi.Keymap.Vim.Common (EventString, RegisterName, Register (regContent), VimState (vsRegisterMap))
import Yi.Editor (EditorM, getEditorDyn, newBufferE)
import Yi.Rope (YiString)
import Yi.Types (withEditor, BufferId (MemBuffer))
import qualified Data.Attoparsec.Text as P (string, try, endOfInput)
import qualified Data.HashMap.Strict as HM (toList)
import qualified Yi.Keymap.Vim.Ex.Commands.Common as Common (parse, pureExCommand)
import qualified Yi.Rope as R (concat, toString, fromString)
-- | Show registered register and content in new buffer
printRegisters :: EditorM ()
printRegisters = do
xs <- HM.toList . vsRegisterMap <$> getEditorDyn
let xs' = visualizeConvert xs
registers = flip map xs' $ \(nameWithSep, content) -> nameWithSep <> content <> "\n"
bufDetail = "--- Register ---\n" <> R.concat registers
void $ newBufferE (MemBuffer "Register list") bufDetail
where
replaceName n | n == '\NUL' = "\\NUL | "
| otherwise = ['"', n] ++ " | " -- Straighten diff of \NUL
replaceContent = let replaceContentChar c | c == '\n' = "^J"
| otherwise = [c]
in concatMap replaceContentChar
visualizeConvert :: [(RegisterName, Register)] -> [(YiString, YiString)]
visualizeConvert = map $ \(name, reg) ->
let content = R.toString . regContent $ reg
in ( R.fromString . replaceName $ name
, R.fromString . replaceContent $ content
)
-- | See :help :registers on Vim
parse :: EventString -> Maybe ExCommand
parse = Common.parse $ do
P.string "reg" <* ( P.try (P.string "isters")
<|> P.try (P.string "ister")
<|> P.try (P.string "iste")
<|> P.try (P.string "ist")
<|> P.try (P.string "is")
<|> P.try (P.string "i")
<|> P.string ""
)
<* P.endOfInput
return Common.pureExCommand
{ cmdShow = "registers"
, cmdAction = EditorA $ withEditor printRegisters
}
|
siddhanathan/yi
|
yi-keymap-vim/src/Yi/Keymap/Vim/Ex/Commands/Registers.hs
|
gpl-2.0
| 2,919
| 0
| 20
| 984
| 715
| 407
| 308
| 46
| 1
|
import Control.Monad
main = do
colors <- forM [1,2,3,4] (\a -> do
putStrLn $ "Which color do you associate with the number " ++ show a ++ "?"
color <- getLine
return color)
putStrLn "The colors that you associate with 1, 2, 3 and 4 are: "
mapM putStrLn colors
|
softwaremechanic/Miscellaneous
|
Haskell/14.hs
|
gpl-2.0
| 278
| 0
| 15
| 68
| 88
| 42
| 46
| 8
| 1
|
-- grid is a game written in Haskell
-- Copyright (C) 2018 karamellpelle@hotmail.com
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
{-# LANGUAGE ForeignFunctionInterface #-}
module MEnv.System.IOS
(
systemHandleFrontBegin,
systemHandleFrontEnd,
systemHandleBackBegin,
systemHandleBackEnd,
) where
import MyPrelude
import MEnv
import Foreign.Ptr
import Foreign.C.Types
import Foreign.C.String
import Foreign.Marshal.Array
-- | application: begin frontground
systemHandleFrontBegin :: a -> a -> MEnv res a
systemHandleFrontBegin a a' = io $
ios_systemHandleFrontBegin >>= \value -> case value of
0 -> return a
_ -> return a'
foreign import ccall unsafe "ios_systemHandleFrontBegin" ios_systemHandleFrontBegin
:: IO CUInt
-- | application: end frontground
systemHandleFrontEnd :: a -> a -> MEnv res a
systemHandleFrontEnd a a' = io $
ios_systemHandleFrontEnd >>= \value -> case value of
0 -> return a
_ -> return a'
foreign import ccall unsafe "ios_systemHandleFrontEnd" ios_systemHandleFrontEnd
:: IO CUInt
-- | application: begin background
systemHandleBackBegin :: a -> a -> MEnv res a
systemHandleBackBegin a a' = io $
ios_systemHandleBackBegin >>= \value -> case value of
0 -> return a
_ -> return a'
foreign import ccall unsafe "ios_systemHandleBackBegin" ios_systemHandleBackBegin
:: IO CUInt
-- | application: end background
systemHandleBackEnd :: a -> a -> MEnv res a
systemHandleBackEnd a a' = io $
ios_systemHandleBackEnd >>= \value -> case value of
0 -> return a
_ -> return a'
foreign import ccall unsafe "ios_systemHandleBackEnd" ios_systemHandleBackEnd
:: IO CUInt
|
karamellpelle/grid
|
source/MEnv/System/IOS.hs
|
gpl-3.0
| 2,352
| 0
| 10
| 495
| 394
| 217
| 177
| 41
| 2
|
module LangDefs.DFA (DFA (..), accept) where
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Maybe
import Control.Monad
type Delta s a = Map.Map (s, a) s
data DFA s a = DFA
{ states :: Set.Set s
, sigma :: Set.Set a
, delta :: Delta s a
, startState :: s
, acceptStates :: Set.Set s
} deriving (Show, Eq)
trans :: (Ord s, Ord a) => s -> a -> DFA s a -> Maybe s
trans state alpha dfa = Map.lookup (state, alpha) $ delta dfa
run :: (Ord s, Ord a) => [a] -> DFA s a -> Maybe s
run input dfa = (foldM trans' $ startState dfa) input
where trans' state alpha = trans state alpha dfa
accept :: (Ord s, Ord a) => [a] -> DFA s a -> Bool
accept input dfa =
if isNothing final
then False
else Set.member (fromJust final) (acceptStates dfa)
where final = run input dfa
|
aplavin/formal-languages
|
src/LangDefs/DFA.hs
|
gpl-3.0
| 845
| 0
| 10
| 219
| 379
| 205
| 174
| 24
| 2
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
module Hledger.Cli.Commands.Tags (
tagsmode
,tags
)
where
import Data.List
import Data.String.Here
import qualified Data.Text as T
import Safe
import Hledger
import Hledger.Cli.CliOptions
tagsmode = hledgerCommandMode
[here| tags
List all the tag names used in the journal. With a TAGREGEX argument,
only tag names matching the regular expression (case insensitive) are shown.
With QUERY arguments, only transactions matching the query are considered.
Reads the default journal file, or another specified with -f.
FLAGS
|]
[] -- [flagNone ["strict"] (\opts -> setboolopt "strict" opts) "makes date comparing strict"] --
[generalflagsgroup1]
[]
([], Just $ argsFlag "[TAGREGEX [QUERY...]]")
tags CliOpts{rawopts_=rawopts,reportopts_=ropts} j = do
d <- getCurrentDay
let
args = listofstringopt "args" rawopts
mtagpats = headMay args
queryargs = drop 1 args
q = queryFromOpts d $ ropts{query_ = unwords queryargs}
txns = filter (q `matchesTransaction`) $ jtxns $ journalSelectingAmountFromOpts ropts j
tags =
nub $ sort $
(maybe id (filter . regexMatchesCI) mtagpats) $
map (T.unpack . fst) $ concatMap transactionAllTags txns
mapM_ putStrLn tags
|
ony/hledger
|
hledger/Hledger/Cli/Commands/Tags.hs
|
gpl-3.0
| 1,290
| 0
| 16
| 252
| 269
| 149
| 120
| 30
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE BangPatterns #-}
{-# OPTIONS_GHC -fno-warn-unused-top-binds #-}
-------------------------------------------------------------------------------
-- |
-- Module : OpenSandbox.Data.Yggdrasil
-- Copyright : (c) 2016 Michael Carpenter
-- License : GPL3
-- Maintainer : Michael Carpenter <oldmanmike.dev@gmail.com>
-- Stability : experimental
-- Portability : portable
--
-------------------------------------------------------------------------------
module OpenSandbox.Data.Yggdrasil
( ClientAuth
, ServerAuth
, authDigest
, twosComplement
, AuthenticatePayload
, AuthenticateResponse
, RefreshPayload
, RefreshResponse
, ValidatePayload
, SignoutPayload
, InvalidatePayload
, Agent
, AvailableProfile
, SelectedProfile
, YggdrasilResponseError
, YggdrasilError
) where
import Crypto.Hash
import Data.Aeson
import Data.Aeson.Types
import Data.Bits
import Data.ByteArray
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Builder as Encode
import Data.Monoid
import qualified Data.Text as T
import Data.Word
import GHC.Generics (Generic)
data ClientAuth = ClientAuth
{ accessToken :: T.Text
, selectedProfile :: SelectedProfile
, serverId :: T.Text
} deriving (Show,Eq,Generic,ToJSON,FromJSON)
data ServerAuth = ServerAuth
{ id :: T.Text
, name :: T.Text
, properties :: [AuthProperty]
} deriving (Show,Eq,Generic,ToJSON,FromJSON)
data AuthProperty = AuthProperty
{ name :: T.Text
, value :: T.Text
, signature :: T.Text
} deriving (Show,Eq,Generic,ToJSON,FromJSON)
authDigest :: B.ByteString -> B.ByteString
authDigest bs = do
let bs' = B.pack . unpack . hashWith SHA1 $ bs
if (B.index bs' 0 .&. 0x80) == 0x80
then B.dropWhile (==0) . twosComplement $ bs'
else B.dropWhile (==0) bs'
twosComplement :: B.ByteString -> B.ByteString
twosComplement bs = BL.toStrict $ Encode.toLazyByteString (go (B.length bs - 1) True mempty)
where
go :: Int -> Bool -> Encode.Builder -> Encode.Builder
go (-1) _ bs' = bs'
go i carry bs' = do
let !b = B.index bs i
let !b' = complement b .&. 0xff
if carry
then go (i - 1) (b' == 0xff) (Encode.word8 (b + 1) <> bs')
else go (i - 1) carry (Encode.word8 b <> bs')
data AuthenticatePayload = AuthenticatePayload
{ agent :: Agent
, username :: T.Text
, password :: T.Text
, clientToken :: Maybe T.Text
} deriving (Show,Eq,Generic,ToJSON,FromJSON)
data AuthenticateResponse = AuthenticateResponse
{ accessToken :: T.Text
, clientToken :: T.Text
, availableProfiles :: Maybe [AvailableProfile]
, selectedProfile :: Maybe SelectedProfile
} deriving (Show,Eq,Generic,ToJSON,FromJSON)
data RefreshPayload = RefreshPayload
{ accessToken :: T.Text
, clientToken :: T.Text
, selectedProfile :: SelectedProfile
} deriving (Show,Eq,Generic,ToJSON,FromJSON)
data RefreshResponse = RefreshResponse
{ accessToken :: T.Text
, clientToken :: T.Text
, selectedProfile :: SelectedProfile
} deriving (Show,Eq,Generic,ToJSON,FromJSON)
data ValidatePayload = ValidatePayload
{ accessToken :: T.Text
, clientToken :: T.Text
} deriving (Show,Eq,Generic,ToJSON,FromJSON)
data SignoutPayload = SignoutPayload
{ username :: T.Text
, password :: T.Text
} deriving (Show,Eq,Generic,ToJSON,FromJSON)
data InvalidatePayload = InvalidatePayload
{ accessToken :: T.Text
, clientToken :: T.Text
} deriving (Show,Eq,Generic,ToJSON,FromJSON)
data Agent = Agent
{ name :: T.Text
, version :: Word8
} deriving (Show,Eq,Generic,ToJSON,FromJSON)
data AvailableProfile = AvailableProfile
{ id :: T.Text
, name :: T.Text
, legacy :: Maybe Bool
} deriving (Show,Eq,Generic,ToJSON,FromJSON)
data SelectedProfile = SelectedProfile
{ id :: T.Text
, name :: T.Text
, legacy :: Maybe Bool
} deriving (Show,Eq,Generic,ToJSON,FromJSON)
data YggdrasilResponseError = YggdrasilResponseError
{ error :: YggdrasilError
, errorMessage :: T.Text
, cause :: Maybe T.Text
} deriving (Show,Eq,Generic,ToJSON,FromJSON)
data YggdrasilError
= MethodNotAllowed
| NotFound
| ForbiddenOperationException
| IllegalArgumentException
| UnsupportedMediaType
deriving (Show,Eq)
instance ToJSON YggdrasilError where
toJSON ye =
case ye of
MethodNotAllowed -> String "Method Not Allowed"
NotFound -> String "Not Found"
ForbiddenOperationException -> String "ForbiddenOperationException"
IllegalArgumentException -> String "IllegalArgumentException"
UnsupportedMediaType -> String "Unsupported Media Type"
instance FromJSON YggdrasilError where
parseJSON (String s) =
case s of
"Method Not Allowed" -> return MethodNotAllowed
"Not Found" -> return NotFound
"ForbiddenOperationException" -> return ForbiddenOperationException
"IllegalArgumentException" -> return IllegalArgumentException
"Unsupported Media Type" -> return UnsupportedMediaType
_ -> undefined
parseJSON x = typeMismatch "Unknown Yggdrasil error!" x
|
oldmanmike/opensandbox
|
src/OpenSandbox/Data/Yggdrasil.hs
|
gpl-3.0
| 5,474
| 0
| 15
| 1,205
| 1,431
| 796
| 635
| 144
| 3
|
module Reactive.Banana.SDL.Util ( addHandler, fire, sdlEvent, tickEvent, tickDiffEvent
, keyEvent, keyDownEvent, keyUpEvent, mouseEvent, mouseButtonEvent
, filterEq, keyFilter, keyUpFilter
, mouseEventWithin, keyPressed, buttonClick
, whileM, successive ) where
import Reactive.Banana as R
import Graphics.UI.SDL as SDL
import Reactive.Banana.SDL.Types
import Control.Monad (when,liftM)
whileM :: IO Bool -> IO ()
whileM f = f >>= (\x -> when x $ whileM f )
addHandler :: EventSource a -> AddHandler a
addHandler = fst
fire :: EventSource a -> a -> IO ()
fire = snd
sdlEvent :: SDLEventSource -> NetworkDescription t (WrappedEvent t)
sdlEvent = fromAddHandler . addHandler . getSDLEvent
tickEvent :: SDLEventSource -> NetworkDescription t (TickEvent t)
tickEvent = fromAddHandler . addHandler . getTickEvent
-- | event carrying the difference between the last two SDL ticks
tickDiffEvent :: SDLEventSource -> NetworkDescription t (TickEvent t)
tickDiffEvent =liftM (successive (\a b->if b>a then Just (b-a) else Nothing)) . tickEvent
keyEvent :: WrappedEvent t -> WrappedEvent t
keyEvent = collect . filterE isKey . spill
where
isKey e = case e of
KeyUp _ -> True
KeyDown _ -> True
otherwise -> False
-- | event carrying the key pressed down
keyDownEvent :: WrappedEvent t -> R.Event t SDL.Keysym
keyDownEvent= filterJust . (isDown <$>) . spill . keyEvent
where isDown (SDL.KeyDown k)=Just k
isDown _ = Nothing
-- | event carrying the key pressed up
keyUpEvent :: WrappedEvent t -> R.Event t SDL.Keysym
keyUpEvent= filterJust . (isDown <$>) . spill . keyEvent
where isDown (SDL.KeyUp k)=Just k
isDown _ = Nothing
mouseEvent :: WrappedEvent t -> WrappedEvent t
mouseEvent esdl = mouseMotion `union` mouseButtonEvent esdl
where
mouseMotion = collect . filterE isMotion $ spill esdl
isMotion e = case e of
MouseMotion {} -> True
otherwise -> False
mouseButtonEvent :: WrappedEvent t -> WrappedEvent t
mouseButtonEvent = collect . filterE isButton . spill
where
isButton e = case e of
MouseButtonDown {} -> True
MouseButtonUp {} -> True
otherwise -> False
mouseEventWithin :: Rect -> WrappedEvent t -> WrappedEvent t
mouseEventWithin ~(Rect x y w h) = collect . filterE isWithin . spill
where
within mx' my' = let (mx, my) = (fromIntegral mx', fromIntegral my') in (mx >= x && mx <= x + w) && (my >= y && my <= y + h)
isWithin e = case e of
MouseMotion mx my _ _ -> within mx my
MouseButtonDown mx my _ -> within mx my
MouseButtonUp mx my _ -> within mx my
otherwise -> False
filterEq :: Eq a => R.Event t a -> R.Event t a
filterEq = filterJust . fst . mapAccum Nothing . fmap f
where
f y (Just x) = if x == y then (Nothing, Just x) else (Just y, Just y)
f y Nothing = (Just y, Just y)
keyFilter :: SDL.SDLKey -> SDL.Event -> Bool
keyFilter k (KeyDown (Keysym k' _ _)) | k == k' = True
keyFilter _ _ = False
keyUpFilter :: SDL.SDLKey -> SDL.Event -> Bool
keyUpFilter k (KeyUp (Keysym k' _ _)) | k == k' = True
keyUpFilter _ _ = False
successive :: (a -> a -> Maybe b) -> R.Event t a -> R.Event t b
successive f e = filterJust (b <@> e)
where b = stepper (const Nothing) (f <$> e)
-- | fires when the given key is pressed (down + up)
keyPressed :: SDL.SDLKey -> WrappedEvent t -> WrappedEvent t
keyPressed k = collect . successive (\p c -> if keyFilter k p && keyUpFilter k c then Just c else Nothing) . spill . keyEvent
buttonClick :: MouseButton -> WrappedEvent t -> WrappedEvent t
buttonClick b = collect . successive sameButton . spill . mouseButtonEvent
where sameButton (MouseButtonDown _ _ b1) e@(MouseButtonUp _ _ b2) | b1 == b && b2 == b = Just e
sameButton _ _ = Nothing
|
orclev/reactive-banana-sdl
|
src/Reactive/Banana/SDL/Util.hs
|
gpl-3.0
| 4,021
| 0
| 14
| 1,076
| 1,429
| 728
| 701
| 74
| 4
|
{-# LANGUAGE TemplateHaskell #-}
module Data.Ephys.OldMWL.ParsePFile where
import Data.Ephys.Position
import Data.Ephys.OldMWL.FileInfo
import Data.Ephys.OldMWL.Parse (decodeTime, encodeTime, dropResult, getMany)
import Data.Ephys.OldMWL.Header
import Control.Lens
import qualified Data.ByteString.Lazy as BSL
import qualified Data.ByteString as BS
import qualified Pipes.ByteString as PBS
import qualified Pipes.Binary as PBinary
import Pipes
import qualified Pipes.Prelude as PP
import qualified Data.Binary as Binary
import Data.Binary.Put
import Data.Binary.Get (getWord32le, getWord16le, Get(..))
data MWLPos = MWLPos { _mwlPosTime :: !Double
, _mwlPxf :: !Int
, _mwlPyf :: !Int
, _mwlPxb :: !Int
, _mwlPyb :: !Int
} deriving (Eq, Show)
$(makeLenses ''MWLPos)
produceMWLPos :: BSL.ByteString -> Producer MWLPos IO ()
produceMWLPos f =
let bytes = PBS.fromLazy . dropHeaderInFirstChunk $ f
in dropResult $ getMany Binary.get bytes
data PosMWLShim = PosMWLShim {
shimOriginXPixel :: !Int
, shimOriginYPixel :: !Int
, shimPxPerMeter :: !Double
, shimTrackHeight :: !Double
}
producePosition :: PosMWLShim -> FilePath -> Producer Position IO ()
producePosition sh fp = produceMWLPosFromFile fp >->
runningPosition (x0,y0) s h nullPosition
where (x0,y0) = (fromIntegral $ shimOriginXPixel sh,
fromIntegral $ shimOriginYPixel sh)
s = shimPxPerMeter sh
h = shimTrackHeight sh
produceMWLPosFromFile :: FilePath -> Producer MWLPos IO ()
produceMWLPosFromFile fn = do
r <- liftIO $ loadRawMWL fn
f <- liftIO $ BSL.readFile fn
case r of
Right (_,_) -> dropResult $ produceMWLPos f
Left e -> error $ "Couldn't open mwl p file: " ++
fn ++ " error: " ++ e
parsePRecord :: Binary.Get MWLPos
parsePRecord = do
recTs <- getWord32le
recXf <- getWord16le
recYf <- getWord16le
recXb <- getWord16le
recYb <- getWord16le
return $ MWLPos (decodeTime recTs)
(fI recXf) (fI recYf) (fI recXb) (fI recYb)
instance Binary.Binary MWLPos where
get = parsePRecord
put (MWLPos t xf yf xb yb) = do
putWord32le $ encodeTime t
putWord16le $ fromIntegral xf
putWord16le $ fromIntegral yf
putWord16le $ fromIntegral xb
putWord16le $ fromIntegral yb
mwlToArtePos :: (Double,Double)
-> Double
-> Double
-> MWLPos
-> Position
-> Position
mwlToArtePos (pX0,pY0) pixelsPerMeter height m p =
let s = 1/pixelsPerMeter
pXToArte = (*s) . subtract pX0
pYToArte = (*s) . subtract pY0
fX = fI $ m^.mwlPxf :: Double
fY = fI $ m^.mwlPyf :: Double
bX = fI $ m^.mwlPxb :: Double
bY = fI $ m^.mwlPyb :: Double
xArte = pXToArte $ avg2 fX bX
yArte = pYToArte $ avg2 fY bY
loc = Location xArte yArte height
angleArte = Angle (atan2 (fY - bY) (fX - bX)) 0 0
conf = if all (> 0) [fX,bX,fY,bY] then ConfSure else ConfUnsure
in
stepPos p (m^.mwlPosTime) loc angleArte conf
runningPosition :: (Monad m) =>
(Double,Double) ->
Double ->
Double ->
Position -> Pipe MWLPos Position m r
runningPosition (pX0, pY0) pixPerMeter height =
loop
where
loop p0 = do
mwlP <- await
let p = mwlToArtePos (pX0,pY0) pixPerMeter height mwlP p0 :: Position
yield p
loop p
avg2 :: Double -> Double -> Double
avg2 a b = (a+b)/2
fI :: (Num a, Integral b) => b -> a
fI = fromIntegral
catPos :: Monad m => Pipe MWLPos MWLPos m r
catPos = do
p <- await
yield p
catPos
|
imalsogreg/arte-ephys
|
tetrode-ephys/src/Data/Ephys/OldMWL/ParsePFile.hs
|
gpl-3.0
| 3,791
| 0
| 13
| 1,112
| 1,213
| 643
| 570
| 123
| 2
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.People.People.Connections.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Provides a list of the authenticated user\'s contacts merged with any
-- linked profiles.
--
-- /See:/ <https://developers.google.com/people/ Google People API Reference> for @people.people.connections.list@.
module Network.Google.Resource.People.People.Connections.List
(
-- * REST Resource
PeopleConnectionsListResource
-- * Creating a Request
, peopleConnectionsList
, PeopleConnectionsList
-- * Request Lenses
, pclSyncToken
, pclXgafv
, pclUploadProtocol
, pclResourceName
, pclRequestMaskIncludeField
, pclPp
, pclAccessToken
, pclUploadType
, pclSortOrder
, pclBearerToken
, pclPageToken
, pclPageSize
, pclCallback
) where
import Network.Google.People.Types
import Network.Google.Prelude
-- | A resource alias for @people.people.connections.list@ method which the
-- 'PeopleConnectionsList' request conforms to.
type PeopleConnectionsListResource =
"v1" :>
Capture "resourceName" Text :>
"connections" :>
QueryParam "syncToken" Text :>
QueryParam "$.xgafv" Text :>
QueryParam "upload_protocol" Text :>
QueryParam "requestMask.includeField" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "sortOrder" Text :>
QueryParam "bearer_token" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListConnectionsResponse
-- | Provides a list of the authenticated user\'s contacts merged with any
-- linked profiles.
--
-- /See:/ 'peopleConnectionsList' smart constructor.
data PeopleConnectionsList = PeopleConnectionsList'
{ _pclSyncToken :: !(Maybe Text)
, _pclXgafv :: !(Maybe Text)
, _pclUploadProtocol :: !(Maybe Text)
, _pclResourceName :: !Text
, _pclRequestMaskIncludeField :: !(Maybe Text)
, _pclPp :: !Bool
, _pclAccessToken :: !(Maybe Text)
, _pclUploadType :: !(Maybe Text)
, _pclSortOrder :: !(Maybe Text)
, _pclBearerToken :: !(Maybe Text)
, _pclPageToken :: !(Maybe Text)
, _pclPageSize :: !(Maybe (Textual Int32))
, _pclCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PeopleConnectionsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pclSyncToken'
--
-- * 'pclXgafv'
--
-- * 'pclUploadProtocol'
--
-- * 'pclResourceName'
--
-- * 'pclRequestMaskIncludeField'
--
-- * 'pclPp'
--
-- * 'pclAccessToken'
--
-- * 'pclUploadType'
--
-- * 'pclSortOrder'
--
-- * 'pclBearerToken'
--
-- * 'pclPageToken'
--
-- * 'pclPageSize'
--
-- * 'pclCallback'
peopleConnectionsList
:: Text -- ^ 'pclResourceName'
-> PeopleConnectionsList
peopleConnectionsList pPclResourceName_ =
PeopleConnectionsList'
{ _pclSyncToken = Nothing
, _pclXgafv = Nothing
, _pclUploadProtocol = Nothing
, _pclResourceName = pPclResourceName_
, _pclRequestMaskIncludeField = Nothing
, _pclPp = True
, _pclAccessToken = Nothing
, _pclUploadType = Nothing
, _pclSortOrder = Nothing
, _pclBearerToken = Nothing
, _pclPageToken = Nothing
, _pclPageSize = Nothing
, _pclCallback = Nothing
}
-- | A sync token, returned by a previous call to
-- \`people.connections.list\`. Only resources changed since the sync token
-- was created are returned.
pclSyncToken :: Lens' PeopleConnectionsList (Maybe Text)
pclSyncToken
= lens _pclSyncToken (\ s a -> s{_pclSyncToken = a})
-- | V1 error format.
pclXgafv :: Lens' PeopleConnectionsList (Maybe Text)
pclXgafv = lens _pclXgafv (\ s a -> s{_pclXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pclUploadProtocol :: Lens' PeopleConnectionsList (Maybe Text)
pclUploadProtocol
= lens _pclUploadProtocol
(\ s a -> s{_pclUploadProtocol = a})
-- | The resource name to return connections for. Only \`people\/me\` is
-- valid.
pclResourceName :: Lens' PeopleConnectionsList Text
pclResourceName
= lens _pclResourceName
(\ s a -> s{_pclResourceName = a})
-- | Comma-separated list of fields to be included in the response. Omitting
-- this field will include all fields. Each path should start with
-- \`person.\`: for example, \`person.names\` or \`person.photos\`.
pclRequestMaskIncludeField :: Lens' PeopleConnectionsList (Maybe Text)
pclRequestMaskIncludeField
= lens _pclRequestMaskIncludeField
(\ s a -> s{_pclRequestMaskIncludeField = a})
-- | Pretty-print response.
pclPp :: Lens' PeopleConnectionsList Bool
pclPp = lens _pclPp (\ s a -> s{_pclPp = a})
-- | OAuth access token.
pclAccessToken :: Lens' PeopleConnectionsList (Maybe Text)
pclAccessToken
= lens _pclAccessToken
(\ s a -> s{_pclAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pclUploadType :: Lens' PeopleConnectionsList (Maybe Text)
pclUploadType
= lens _pclUploadType
(\ s a -> s{_pclUploadType = a})
-- | The order in which the connections should be sorted. Defaults to
-- \`LAST_MODIFIED_ASCENDING\`.
pclSortOrder :: Lens' PeopleConnectionsList (Maybe Text)
pclSortOrder
= lens _pclSortOrder (\ s a -> s{_pclSortOrder = a})
-- | OAuth bearer token.
pclBearerToken :: Lens' PeopleConnectionsList (Maybe Text)
pclBearerToken
= lens _pclBearerToken
(\ s a -> s{_pclBearerToken = a})
-- | The token of the page to be returned.
pclPageToken :: Lens' PeopleConnectionsList (Maybe Text)
pclPageToken
= lens _pclPageToken (\ s a -> s{_pclPageToken = a})
-- | The number of connections to include in the response. Valid values are
-- between 1 and 500, inclusive. Defaults to 100.
pclPageSize :: Lens' PeopleConnectionsList (Maybe Int32)
pclPageSize
= lens _pclPageSize (\ s a -> s{_pclPageSize = a}) .
mapping _Coerce
-- | JSONP
pclCallback :: Lens' PeopleConnectionsList (Maybe Text)
pclCallback
= lens _pclCallback (\ s a -> s{_pclCallback = a})
instance GoogleRequest PeopleConnectionsList where
type Rs PeopleConnectionsList =
ListConnectionsResponse
type Scopes PeopleConnectionsList =
'["https://www.googleapis.com/auth/contacts",
"https://www.googleapis.com/auth/contacts.readonly"]
requestClient PeopleConnectionsList'{..}
= go _pclResourceName _pclSyncToken _pclXgafv
_pclUploadProtocol
_pclRequestMaskIncludeField
(Just _pclPp)
_pclAccessToken
_pclUploadType
_pclSortOrder
_pclBearerToken
_pclPageToken
_pclPageSize
_pclCallback
(Just AltJSON)
peopleService
where go
= buildClient
(Proxy :: Proxy PeopleConnectionsListResource)
mempty
|
rueshyna/gogol
|
gogol-people/gen/Network/Google/Resource/People/People/Connections/List.hs
|
mpl-2.0
| 8,204
| 0
| 23
| 2,122
| 1,284
| 740
| 544
| 177
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Directory.Roles.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves a role.
--
-- /See:/ <https://developers.google.com/admin-sdk/directory/ Admin Directory API Reference> for @directory.roles.get@.
module Network.Google.Resource.Directory.Roles.Get
(
-- * REST Resource
RolesGetResource
-- * Creating a Request
, rolesGet
, RolesGet
-- * Request Lenses
, rgRoleId
, rgCustomer
) where
import Network.Google.Directory.Types
import Network.Google.Prelude
-- | A resource alias for @directory.roles.get@ method which the
-- 'RolesGet' request conforms to.
type RolesGetResource =
"admin" :>
"directory" :>
"v1" :>
"customer" :>
Capture "customer" Text :>
"roles" :>
Capture "roleId" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Role
-- | Retrieves a role.
--
-- /See:/ 'rolesGet' smart constructor.
data RolesGet = RolesGet'
{ _rgRoleId :: !Text
, _rgCustomer :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'RolesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rgRoleId'
--
-- * 'rgCustomer'
rolesGet
:: Text -- ^ 'rgRoleId'
-> Text -- ^ 'rgCustomer'
-> RolesGet
rolesGet pRgRoleId_ pRgCustomer_ =
RolesGet'
{ _rgRoleId = pRgRoleId_
, _rgCustomer = pRgCustomer_
}
-- | Immutable ID of the role.
rgRoleId :: Lens' RolesGet Text
rgRoleId = lens _rgRoleId (\ s a -> s{_rgRoleId = a})
-- | Immutable ID of the Google Apps account.
rgCustomer :: Lens' RolesGet Text
rgCustomer
= lens _rgCustomer (\ s a -> s{_rgCustomer = a})
instance GoogleRequest RolesGet where
type Rs RolesGet = Role
type Scopes RolesGet =
'["https://www.googleapis.com/auth/admin.directory.rolemanagement",
"https://www.googleapis.com/auth/admin.directory.rolemanagement.readonly"]
requestClient RolesGet'{..}
= go _rgCustomer _rgRoleId (Just AltJSON)
directoryService
where go
= buildClient (Proxy :: Proxy RolesGetResource)
mempty
|
rueshyna/gogol
|
gogol-admin-directory/gen/Network/Google/Resource/Directory/Roles/Get.hs
|
mpl-2.0
| 2,968
| 0
| 15
| 738
| 388
| 233
| 155
| 62
| 1
|
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
module Model.Factories where
import Data.Aeson
import qualified Data.ByteString as BS
import Data.Maybe
import Data.Fixed
import Data.Monoid ((<>))
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import Data.Time
import qualified Data.Vector as V
import Hedgehog
import qualified Hedgehog.Gen as Gen
import qualified Hedgehog.Range as Range
import Network.URI
-- import Test.Tasty.HUnit
import Model.Age
import Model.Asset
import Model.Category
import Model.Citation.Types
import Model.Container
import Model.Format
import Model.GeoNames
import Model.Id
-- import Model.Identity.Types
import Model.Metric
import Model.Offset
import Model.Party.Types
import Model.Permission
import Model.Record.Types
-- import Model.Release.Types
import Model.Slot.Types
import Model.Time
-- import Model.Transcode
import Model.TypeOrphans ()
-- import Model.Slot.Types
import Model.Volume.Types
import Model.VolumeAccess.Types
----- general utilities ------
genDate :: Gen Date
genDate =
fromGregorian
<$> Gen.integral (Range.constant 1990 2015)
<*> Gen.integral (Range.constant 1 12)
<*> Gen.integral (Range.constant 1 28)
{-
genVolumeCreationTime :: Gen UTCTime
genVolumeCreationTime =
UTCTime
<$> (fromGregorian
<$> Gen.integral (Range.constant 2000 2018)
<*> Gen.integral (Range.constant 1 12)
<*> Gen.integral (Range.constant 1 28))
<*> (secondsToDiffTime <$> Gen.integral (Range.constant 0 86399))
-}
genGeneralURI :: Gen URI
genGeneralURI = do
domain <- Gen.string (Range.constant 1 20) Gen.alphaNum
pathSeg1 <- Gen.string (Range.constant 1 20) Gen.alphaNum -- TODO: generate multiple segs, allowed chars?
scheme1 <- Gen.element ["http:", "https:"]
pure
(nullURI {
uriScheme = scheme1
, uriAuthority = Just (URIAuth "" ("www." <> domain <> ".com") "") -- TODO: choose on prefix and suffix?
, uriPath = "/" <> pathSeg1
})
----- value objects ------
-- id -- usually have db generate db
-- maskeddate
genMaskedDate :: Gen MaskedDate
genMaskedDate = do
dt <- genDate
mask <- Gen.bool
pure (maskDateIf mask dt)
-- geoname
genGeoName :: Gen GeoName
genGeoName = do
-- TODO: better generator, longer list from a csv file?
(i, nm) <- Gen.element [(3041565, "Andorra"), (3351879, "Angola")]
pure (GeoName (Id i) nm)
-- orcid
-- url
----- gen doi value
----- gen hdl value
----- gen doi url
----- gen hdl url
-- release
-- offset
genOffset :: Milli -> Gen Offset
genOffset totalLength =
Offset <$> Gen.realFrac_ (Range.constant 0 totalLength)
-- segment
-- age
genAge :: Gen Age
genAge =
let maxAgeTypicallyStudied = 14
in Age <$> Gen.integral (Range.constant 0 (maxAgeTypicallyStudied*365))
-- format
genFormat :: Gen Format
genFormat = Gen.element allFormats
genAVFormat :: Gen Format
genAVFormat = Gen.element (filter formatIsAV allFormats)
genNotAVFormat :: Gen Format
genNotAVFormat = Gen.element (filter formatNotAV allFormats)
genTranscodeOutputFormat :: Gen Format
genTranscodeOutputFormat = Gen.element (catMaybes (fmap formatTranscodable allFormats))
-- category
genCategory :: Gen Category
genCategory = Gen.element allCategories
-- ...
-- funder
-- tag
----- entities ------
-- party
genPartyId :: Gen (Id Party)
genPartyId = Id <$> Gen.integral (Range.constant 3 5000)
genPartySortName :: Gen Text
genPartySortName = Gen.text (Range.constant 0 80) Gen.alpha
genPartyPreName :: Gen Text
genPartyPreName = Gen.text (Range.constant 0 80) Gen.alpha
genPartyAffiliation :: Gen Text
genPartyAffiliation = Gen.text (Range.constant 0 150) Gen.alpha
genPartyRowSimple :: Gen PartyRow
genPartyRowSimple =
PartyRow
<$> genPartyId
<*> genPartySortName
<*> Gen.maybe genPartyPreName
<*> pure Nothing
<*> Gen.maybe genPartyAffiliation
<*> pure Nothing
-- TODO: split into group, ai, collaborator, lab manager, lab staff
genInstitutionUrl :: Gen (Maybe URI)
genInstitutionUrl =
Just <$> pure ((fromJust . parseURI) "https://www.nyu.edu")
genAccountEmail :: Gen BS.ByteString
genAccountEmail = pure "adam.smith@nyu.edu"
genPartySimple :: Gen Party
genPartySimple = do
let gPerm = pure PermissionPUBLIC
let gAccess = pure Nothing
p <- Party <$> genPartyRowSimple <*> pure Nothing <*> pure NotLoaded <*> gPerm <*> gAccess
a <- Account <$> genAccountEmail <*> pure p
(let p2 = p { partyAccount = Just a2 } -- account expected below
a2 = a { accountParty = p2 }
in pure p2)
-- TODO: get rid of this and genPartySimplex
genAccountSimple :: Gen Account
genAccountSimple = do
firstName <- genPartyPreName
lastName <- genPartySortName
email <- (\d -> TE.encodeUtf8 (firstName <> "." <> lastName <> "@" <> d)) <$> Gen.element ["nyu.edu", "wm.edu"]
(let pr = (partyRow blankParty) { partySortName = lastName , partyPreName = Just firstName }
p = blankParty { partyRow = pr, partyAccount = Just a }
a = blankAccount { accountParty = p, accountEmail = email }
in pure a)
genCreateInstitutionParty :: Gen Party
genCreateInstitutionParty = do
let bp = blankParty
url <- genInstitutionUrl
mPreName <- Gen.maybe (pure "The")
sortName <- genPartySortName
pure
(bp {
partyRow = (partyRow bp) {
partySortName = sortName
, partyPreName = mPreName
, partyURL = url
}
})
{-
-- identity
genInitialIdentNeedAuthRoutes :: SiteAuth -> Gen Identity
genInitialIdentNeedAuthRoutes =
Gen.choice
[ pure NotLoggedIn
, Identified <$> undefined -- TODO: finish gen session in Token types
]
genInitialIdentOpenRoutes :: Gen Identity
genInitialIdentOpenRoutes =
pure IdentityNotNeeded
genReIdentified :: SiteAuth -> Gen Identity
genReIdentified =
ReIdentified <$> genSiteAuthSimple -- TODO: come up with a better site auth generator
-}
-- token
-- authorize
---- genCreateAuthorizeReq :: Party -> Party -> Gen Authorize
-- volume / citation
genVolumeName :: Gen Text -- Verify this and next two with real data profile
genVolumeName = Gen.text (Range.constant 0 200) Gen.alphaNum
genVolumeBody :: Gen Text
genVolumeBody = Gen.text (Range.constant 0 300) Gen.alphaNum
genVolumeAlias :: Gen Text
genVolumeAlias = Gen.text (Range.constant 0 60) Gen.alphaNum
-- citation only uses head, url, and year fields of citation; start with a doi like 10.1145/2897518.2897542 which get site converts to hdl,
-- hdl result example: hdl:10.1145/2897518.2897542
{-
genVolumeDOI :: Gen BS.ByteString
genVolumeDOI = pure "10.17910/B7159Q" -- TODO: good generator for this?
-}
-- Note: keep this in sync with changes in Controller.createVolume
genVolumeCreateSimple :: Gen Volume
genVolumeCreateSimple = do
let bv = blankVolume
name <- genVolumeName
mBody <- Gen.maybe genVolumeBody
mAlias <- Gen.maybe genVolumeAlias
pure
(bv {
volumeRow = (volumeRow bv) {
volumeName = name
, volumeBody = mBody
, volumeAlias = mAlias
}
})
-- vol acc
genGroupPermission :: Party -> Gen (Permission, Maybe Bool)
genGroupPermission p
| partyRow p == partyRow rootParty =
Gen.element [(PermissionNONE, Nothing), (PermissionSHARED, Just False), (PermissionSHARED, Just True)]
| partyRow p == partyRow nobodyParty =
Gen.element [(PermissionNONE, Nothing), (PermissionPUBLIC, Just False), (PermissionPUBLIC, Just True)]
| otherwise = error "only known group parties that should get volume access are root party and nobody party"
genGroupVolumeAccess :: Maybe Party -> Volume -> Gen VolumeAccess
genGroupVolumeAccess mGroup vol = do
group <- maybe (Gen.element [nobodyParty, rootParty]) pure mGroup
(perm, mShareFull) <- genGroupPermission group
VolumeAccess
<$> pure perm
<*> pure perm
<*> Gen.maybe (Gen.integral (Range.constant 1 20)) -- TODO: what does this field mean?
<*> pure mShareFull
<*> pure group
<*> pure vol
-- container / slot
genContainerTestDay :: Gen Day
genContainerTestDay =
fromGregorian
<$> Gen.integral (Range.constant 2000 2018)
<*> Gen.integral (Range.constant 1 12)
<*> Gen.integral (Range.constant 1 28)
genContainerName :: Gen Text
genContainerName = Gen.text (Range.constant 0 80) Gen.alphaNum
genCreateContainerRow :: Gen ContainerRow
genCreateContainerRow =
-- some redundancy with blankContainer
ContainerRow
<$> (pure . error) "container id created after save"
<*> pure False -- Gen.bool -- TODO: when is top valid?
<*> Gen.maybe genContainerName
<*> Gen.maybe genContainerTestDay
genCreateContainer :: Gen Container
genCreateContainer =
-- some redundancy with blankContainer
Container
<$> genCreateContainerRow
<*> Gen.maybe Gen.enumBounded
<*> (pure . error) "container volume not specified"
-- upload / asset / assetslot / assetsegment / assetrevision
genUploadFileName :: Format -> Gen Text
genUploadFileName fmt = do
let ext = (TE.decodeUtf8 . head . formatExtension) fmt
prefix <- Gen.text (Range.constant 0 80) Gen.alphaNum -- include spaces?
pure (prefix <> "." <> ext)
---- genCreateUpload :: Volume -> Gen Upload
---- genSendFileChunk :: File -> Gen Chunk
---- genFileContents :: Format -> Gen BS.ByteString
genCreateAssetAfterUpload :: Volume -> Gen (Asset, BS.ByteString)
genCreateAssetAfterUpload vol = do -- modeled after processAsset (AssetTargetVolume ..) w/name,container,upload,maybe release
-- TODO: who should create the asset file?
let ba = blankAsset vol
fmt <- pure (getFormat' (Id 2)) -- csv; TODO: general format + file contents
mName <- Just <$> genUploadFileName fmt
mRel <- Gen.maybe Gen.enumBounded
contents <- pure "col1,col2\n123,456\n"
-- duration, sha1, size remain nothing from blankAsset
pure
(ba {
assetRow = (assetRow ba) {
assetFormat = fmt
, assetRelease = mRel
, assetName = mName
}
}
, contents)
-- TODO: this assumes that the asset has been updated with real name
genCreateSlotAssetAfterUpload :: Asset -> Slot -> Gen Asset
genCreateSlotAssetAfterUpload _ _ = do -- modeled after processAsset (AssetTargetVolume ..) w/name,container,upload, maybe release
-- assetNoSlot (blankAsset v)
-- lookupVolContainer -> slotContainer -> build up segment into slot
-- fix asset slot duration
-- assetName = read name
--
pure undefined
-- mkTranscodeFromInputAsset :: Asset -> a -> Transcode
-- mkTranscodeFromInputAsset orig probe =
-- undefined
-- transcode
---- genCreateTranscode :: Asset -> ...
-- probe <- fileuploadprobe upfile
-- trans <- model.add transcode
-- (asset with duration, no sha1 and no size; name is upload name; has rel and fmt)
-- fullSegment
-- defaultTranscodeOptions
-- probe
-- starttranscode trans
-- the script eventually uses postback to remoteTranscode (during tests, ignore postback, but simulate its actions)
-- lookupTranscode
-- collectTranscode with submitted exit code, sha1, logs (need to recreate sha1 using sha1sum command)
-- updateTranscode
-- maketempfile
-- ctlTranscode w/tempfile
-- updatetranscode
-- avprobe tempfile
-- a <- changeAsset w/sha1, (dur probe), tempfile
-- changeAssetSlotDuration a
-- excerpt
---- genCreateExcerpt :: Asset -> Gen Excerpt
-- measure / metric
----- TODO: expand these to really generate random measure values
genBirthdateMeasure :: Gen (Metric, BS.ByteString)
genBirthdateMeasure =
pure (participantMetricBirthdate, "1990-01-02")
genGenderMeasure :: Gen (Metric, BS.ByteString)
genGenderMeasure =
pure (participantMetricGender, "Male")
-- TODO: genCreateMeasures :: Gen [(Metric, BS.ByteString)] -- will ensure each measure metric is distinct
genCreateGenderMeasure :: Gen Measure
genCreateGenderMeasure = do
(mtrc, val) <- genGenderMeasure
Measure
<$> (pure . error) "measure record not set yet"
<*> pure mtrc
<*> pure val
genCreateBirthdateMeasure :: Gen Measure
genCreateBirthdateMeasure = do
(mtrc, val) <- genBirthdateMeasure
Measure
<$> (pure . error) "measure record not set yet"
<*> pure mtrc
<*> pure val
-- record
genCreateRecord :: Volume -> Gen Record
genCreateRecord vol = do
-- repeats some logic from blankRecord
Record
<$> (RecordRow <$> (pure . error) "Id set after saved" <*> genCategory)
<*> pure []
<*> Gen.maybe Gen.enumBounded
<*> pure vol
----- genAddVolumeCategory
----- genAddVolumeMetric
-- recordslot
----- genCreateRecordSlot :: Slot -> Record -> Gen RecordSlot
-- funding
----- genCreateVolumeFunding :: Gen Funding
-- links
genVolumeLink :: Gen Citation
genVolumeLink =
-- TODO: Create and use more realistic generators for head and uri values. Generate them together.
-- TODO: This logic repeats behavior from C.Volume.postVolumeLinks, create blankLink helper
-- function instead of repeating.
Citation
<$> Gen.text (Range.constant 0 50) Gen.alpha
<*> (Just <$> genGeneralURI)
<*> pure Nothing
<*> pure Nothing
-- notification
-- audit
-- ingest
mkIngestInput :: T.Text -> Value
mkIngestInput volName =
object
[ ("name", String volName)
, ("containers"
, Array
(V.fromList
[object
[ ("name", "cont1")
, ("key", "key1")
, ("records", Array (V.fromList []))
, ("assets", Array (V.fromList []))
]]))]
-- vol state
---- genCreateVolumeState :: Volume -> Gen VolumeState
---- generate key value pair and is public
{-
volumeState1 :: VolumeState
volumeState1 =
VolumeState {
volumeStateKey = "key1"
, volumeStateValue = Null
, volumeStatePublic = True
, stateVolume = vol
}
-}
-- activity
-- stats
-- comment
----- genCreateComment :: Slot -> Maybe Parent -> Comment
-- tag use
----- genCreateTagUse
----- genCreateKeywordUse
|
databrary/databrary
|
test/Model/Factories.hs
|
agpl-3.0
| 14,457
| 0
| 18
| 3,271
| 2,869
| 1,532
| 1,337
| 253
| 1
|
-- Huffman encoding
-- huffman [('a', 24), ('b', 10), ('c', 3)] ->
-- [('a', "0"), ('b', "10"), ('c', "100")]
-- huff "go go gophers" -> [('g', "00"), ('o', "01"), (' ', "110"),
-- ('p', "111"), ('h', "100") ..]
huff :: Eq a => [a] -> [(a, [Char])]
huff [] = []
huff [x] = [(x, "0")]
huff xs =
let
ys = countsorted xs
cs = fmap snd ys
in
where countsorted = count . sort
-- count "aaabbba" -> [('a', 3), ('b', 3), ('a', 1)]
count :: Eq a => [a] -> [(a, Int)]
count [] = []
count [x] = [(x, 1)]
count (x:xs) =
if x == (fst . head) cxs
then (x, (snd . head) cxs + 1):(tail cxs)
else (x, 1):cxs
where cxs = count xs
-- sequential sort i.e. O(n^2)
sort :: Ord a => [a] -> [a]
sort [] = []
sort [x] = [x]
sort (x:xs) =
let
y = head ys
yst = tail ys
in
if x < y
then x:ys
else y:(x:yst)
where ys = sort xs
|
ekalosak/haskell-practice
|
Pr50.hs
|
lgpl-3.0
| 905
| 1
| 11
| 283
| 383
| 208
| 175
| -1
| -1
|
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
module ValidResponses where
useIt :: (Eq a, Show a) => [a] -> [a] -> IO ()
useIt xs ys =
case invalidResponses xs ys of
[] -> putStrLn "success: pretend you are doing real blockchain work"
xys -> complain xys
invalidResponses :: Eq a => [a] -> [a] -> [(a,a)]
invalidResponses xs ys =
filter (uncurry (/=)) (zip xs ys)
complain :: (Eq a, Show a) => [(a,a)] -> IO ()
complain =
mapM_ (debugFn .
(\(x,y) ->
"CONFLICTING RESPONSES: " ++
"leader: '" ++ show x ++ "' " ++
"follower: '" ++ show y))
------
-- example usage:
debugFn :: Show a => a -> IO ()
debugFn = print
cmds1 :: [Integer]
cmds1 = [1,2,3]
cmds2 :: [Integer]
cmds2 = [1,3,2]
tryIt1 :: IO ()
tryIt1 = useIt cmds1 cmds1
tryIt2 :: IO ()
tryIt2 = useIt cmds1 cmds2
|
haroldcarr/learn-haskell-coq-ml-etc
|
haskell/playpen/misc/src/ValidResponses.hs
|
unlicense
| 836
| 0
| 14
| 217
| 354
| 192
| 162
| 27
| 2
|
{-
Copyright 2020 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
program = animationOf(a & b)
a(t) = rotated(solidRectangle(1, 1), 45 * t)
b(t) = circle(t)
|
google/codeworld
|
codeworld-compiler/test/testcases/combiningAnimation/source.hs
|
apache-2.0
| 707
| 0
| 8
| 136
| 67
| 36
| 31
| 3
| 1
|
module Handler.ShowSpec (spec) where
import TestImport
spec :: Spec
spec = withApp $ do
describe "getShowR" $ do
error "Spec not implemented: getShowR"
describe "postShowR" $ do
error "Spec not implemented: postShowR"
|
Drezil/FFF
|
test/Handler/ShowSpec.hs
|
apache-2.0
| 248
| 0
| 11
| 62
| 60
| 29
| 31
| 8
| 1
|
-- http://www.codewars.com/kata/53223653a191940f2b000877
module Graph where
import qualified Data.IntMap.Strict as IM
import qualified Data.IntSet as IS
import Data.Char
import Control.Arrow
import Data.Foldable
import Data.Monoid
import Prelude hiding (any)
type Node = Char
type Arc = (Node, Node)
solveGraph :: Node -> Node -> [Arc] -> Bool
solveGraph s' e' arcs' = any (e `IS.member`) steps' where
s = ord s'
e = ord e'
arcs = IM.fromListWith IS.union $ map (ord *** IS.singleton . ord) arcs'
step xs = xs <> foldMap id (IM.filterWithKey (\k _ -> k `IS.member` xs) arcs)
steps = iterate step (IS.singleton s)
steps' = map snd $ takeWhile (uncurry (/=)) $ zip steps (tail steps)
|
Bodigrim/katas
|
src/haskell/4-Determining-if-a-graph-has-a-solution.hs
|
bsd-2-clause
| 698
| 0
| 13
| 125
| 267
| 150
| 117
| -1
| -1
|
module Main where
import Graphics.Blank
main = blankCanvas 3000 $ \ context -> loop context (0 :: Float)
loop context n = do
send context $ do
-- clear the canvas
...
-- draw the square
...
loop context (n + 0.01)
|
ku-fpg/talks
|
blank-canvas/examples/AnimationTemplate.hs
|
bsd-2-clause
| 314
| 1
| 10
| 142
| 78
| 42
| 36
| -1
| -1
|
{-# LANGUAGE PackageImports #-}
import "lemonstand" Application (getApplicationDev)
import Network.Wai.Handler.Warp
(runSettings, defaultSettings, settingsPort)
import Control.Concurrent (forkIO)
import System.Directory (doesFileExist, removeFile)
import System.Exit (exitSuccess)
import Control.Concurrent (threadDelay)
main :: IO ()
main = do
putStrLn "Starting devel application"
(port, app) <- getApplicationDev
forkIO $ runSettings defaultSettings
{ settingsPort = port
} app
loop
loop :: IO ()
loop = do
threadDelay 100000
e <- doesFileExist "dist/devel-terminate"
if e then terminateDevel else loop
terminateDevel :: IO ()
terminateDevel = exitSuccess
|
pbrisbin/lemonstand
|
devel.hs
|
bsd-2-clause
| 704
| 0
| 10
| 123
| 186
| 101
| 85
| 23
| 2
|
module Text.ICalendar.DataType.TextSpec
( main
, spec
) where
-- foreign libraries
import Test.Hspec
-- native libraries
import SpecHelper
import Text.ICalendar.DataType.Text
main :: IO ()
main = hspec spec
parse :: String -> TestParser String
parse = parseLineWith asText
spec :: Spec
spec = do
describe "asText" $ do
let line1 = "first line"
line2 = "second line"
describe "parsing a single line of content" $ do
it "parses the line of text" $ do
parse line1 `shouldParseTo` line1
describe "parsing multiple lines of content" $ do
let parsed = line1 ++ " " ++ line2
context "split via newline followed by a space" $ do
it "parses the lines and joins them with a space" $ do
let lines = line1 ++ "\r\n " ++ line2
parse lines `shouldParseTo` parsed
context "split via newline followed by a tab" $ do
it "parses the lines and joins them with a space" $ do
let lines = line1 ++ "\r\n\t" ++ line2
parse lines `shouldParseTo` parsed
|
begriffs/iCalendar
|
test/Text/ICalendar/DataType/TextSpec.hs
|
bsd-3-clause
| 1,051
| 0
| 23
| 286
| 262
| 129
| 133
| 28
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
module Futhark.CodeGen.Backends.PyOpenCL.Boilerplate
( openClInit
, openClPrelude
) where
import Data.FileEmbed
import qualified Data.Text as T
import NeatInterpolation (text)
import Futhark.Representation.AST.Attributes.Constants (value)
import Futhark.CodeGen.OpenCL.Kernels
import Futhark.CodeGen.Backends.GenericPython.AST
import Futhark.Util.Pretty (pretty)
openClPrelude :: String
openClPrelude = $(embedStringFile "rts/python/opencl.py")
openClInit :: String -> String
openClInit assign = T.unpack [text|
self.ctx = get_prefered_context(interactive, platform_pref, device_pref)
self.queue = cl.CommandQueue(self.ctx)
# XXX: Assuming just a single device here.
platform_name = self.ctx.get_info(cl.context_info.DEVICES)[0].platform.name
device_type = self.ctx.get_info(cl.context_info.DEVICES)[0].type
lockstep_width = 1
$set_lockstep_width
if (len(fut_opencl_src) >= 0):
program = cl.Program(self.ctx, fut_opencl_src).build(["-DFUT_BLOCK_DIM={}".format(FUT_BLOCK_DIM), "-DLOCKSTEP_WIDTH={}".format(lockstep_width)])
$assign'
|]
where assign' = T.pack assign
set_lockstep_width =
T.pack $ unlines $
map (pretty . lockstepWidthHeuristicsCode) lockstepWidthHeuristicsTable
lockstepWidthHeuristicsCode :: LockstepWidthHeuristic -> PyStmt
lockstepWidthHeuristicsCode
(LockstepWidthHeuristic platform_name device_type width) =
If (BinOp "and"
(BinOp "==" (Var "platform_name") (StringLiteral platform_name))
(BinOp "==" (Var "device_type") (clDeviceType device_type)))
[Assign (Var "lockstep_width") (Constant (value (fromIntegral width::Int32)))]
[]
where clDeviceType DeviceGPU = Var "cl.device_type.GPU"
clDeviceType DeviceCPU = Var "cl.device_type.CPU"
|
mrakgr/futhark
|
src/Futhark/CodeGen/Backends/PyOpenCL/Boilerplate.hs
|
bsd-3-clause
| 1,831
| 0
| 13
| 231
| 311
| 174
| 137
| 31
| 2
|
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses, PatternGuards, TypeSynonymInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Layout.Tabbed
-- Copyright : (c) 2007 David Roundy, Andrea Rossato
-- License : BSD-style (see xmonad/LICENSE)
--
-- Maintainer : andrea.rossato@unibz.it
-- Stability : unstable
-- Portability : unportable
--
-- A tabbed layout for the Xmonad Window Manager
--
-----------------------------------------------------------------------------
module XMonad.Layout.Tabbed
( -- * Usage:
-- $usage
simpleTabbed, tabbed, addTabs
, simpleTabbedAlways, tabbedAlways, addTabsAlways
, simpleTabbedBottom, tabbedBottom, addTabsBottom
, simpleTabbedBottomAlways, tabbedBottomAlways, addTabsBottomAlways
, Theme (..)
, defaultTheme
, TabbedDecoration (..)
, shrinkText, CustomShrink(CustomShrink)
, Shrinker(..)
) where
import Data.List
import XMonad
import qualified XMonad.StackSet as S
import XMonad.Layout.Decoration
import XMonad.Layout.Simplest ( Simplest(Simplest) )
-- $usage
-- You can use this module with the following in your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Layout.Tabbed
--
-- Then edit your @layoutHook@ by adding the Tabbed layout:
--
-- > myLayout = simpleTabbed ||| Full ||| etc..
--
-- or, if you want a specific theme for you tabbed layout:
--
-- > myLayout = tabbed shrinkText defaultTheme ||| Full ||| etc..
--
-- and then:
--
-- > main = xmonad defaultConfig { layoutHook = myLayout }
--
-- This layout has hardcoded behaviour for mouse clicks on tab decorations:
-- Left click on the tab switches focus to that window.
-- Middle click on the tab closes the window.
--
-- The default Tabbar behaviour is to hide it when only one window is open
-- on the workspace. To have it always shown, use one of the layouts or
-- modifiers ending in @Always@.
--
-- For more detailed instructions on editing the layoutHook see:
--
-- "XMonad.Doc.Extending#Editing_the_layout_hook"
--
-- You can also edit the default configuration options.
--
-- > myTabConfig = defaultTheme { inactiveBorderColor = "#FF0000"
-- > , activeTextColor = "#00FF00"}
--
-- and
--
-- > mylayout = tabbed shrinkText myTabConfig ||| Full ||| etc..
-- Layouts
-- | A tabbed layout with the default xmonad Theme.
--
-- This is a minimal working configuration:
--
-- > import XMonad
-- > import XMonad.Layout.Tabbed
-- > main = xmonad defaultConfig { layoutHook = simpleTabbed }
simpleTabbed :: ModifiedLayout (Decoration TabbedDecoration DefaultShrinker) Simplest Window
simpleTabbed = tabbed shrinkText defaultTheme
simpleTabbedAlways :: ModifiedLayout (Decoration TabbedDecoration DefaultShrinker) Simplest Window
simpleTabbedAlways = tabbedAlways shrinkText defaultTheme
-- | A bottom-tabbed layout with the default xmonad Theme.
simpleTabbedBottom :: ModifiedLayout (Decoration TabbedDecoration DefaultShrinker) Simplest Window
simpleTabbedBottom = tabbedBottom shrinkText defaultTheme
-- | A bottom-tabbed layout with the default xmonad Theme.
simpleTabbedBottomAlways :: ModifiedLayout (Decoration TabbedDecoration DefaultShrinker) Simplest Window
simpleTabbedBottomAlways = tabbedBottomAlways shrinkText defaultTheme
-- | A layout decorated with tabs and the possibility to set a custom
-- shrinker and theme.
tabbed :: (Eq a, Shrinker s) => s -> Theme
-> ModifiedLayout (Decoration TabbedDecoration s) Simplest a
tabbed s c = addTabs s c Simplest
tabbedAlways :: (Eq a, Shrinker s) => s -> Theme
-> ModifiedLayout (Decoration TabbedDecoration s) Simplest a
tabbedAlways s c = addTabsAlways s c Simplest
-- | A layout decorated with tabs at the bottom and the possibility to set a custom
-- shrinker and theme.
tabbedBottom :: (Eq a, Shrinker s) => s -> Theme
-> ModifiedLayout (Decoration TabbedDecoration s) Simplest a
tabbedBottom s c = addTabsBottom s c Simplest
tabbedBottomAlways :: (Eq a, Shrinker s) => s -> Theme
-> ModifiedLayout (Decoration TabbedDecoration s) Simplest a
tabbedBottomAlways s c = addTabsBottomAlways s c Simplest
-- Layout Modifiers
-- | A layout modifier that uses the provided shrinker and theme to add tabs to any layout.
addTabs :: (Eq a, LayoutClass l a, Shrinker s) => s -> Theme -> l a
-> ModifiedLayout (Decoration TabbedDecoration s) l a
addTabs = createTabs WhenPlural Top
addTabsAlways :: (Eq a, LayoutClass l a, Shrinker s) => s -> Theme -> l a
-> ModifiedLayout (Decoration TabbedDecoration s) l a
addTabsAlways = createTabs Always Top
-- | A layout modifier that uses the provided shrinker and theme to add tabs to the bottom of any layout.
addTabsBottom :: (Eq a, LayoutClass l a, Shrinker s) => s -> Theme -> l a
-> ModifiedLayout (Decoration TabbedDecoration s) l a
addTabsBottom = createTabs WhenPlural Bottom
addTabsBottomAlways :: (Eq a, LayoutClass l a, Shrinker s) => s -> Theme -> l a
-> ModifiedLayout (Decoration TabbedDecoration s) l a
addTabsBottomAlways = createTabs Always Bottom
-- Tab creation abstractions. Internal use only.
-- Create tabbar when required at the given location with the given
-- shrinker and theme to the supplied layout.
createTabs ::(Eq a, LayoutClass l a, Shrinker s) => TabbarShown -> TabbarLocation -> s
-> Theme -> l a -> ModifiedLayout (Decoration TabbedDecoration s) l a
createTabs sh loc tx th l = decoration tx th (Tabbed loc sh) l
data TabbarLocation = Top | Bottom deriving (Read,Show)
data TabbarShown = Always | WhenPlural deriving (Read, Show, Eq)
data TabbedDecoration a = Tabbed TabbarLocation TabbarShown deriving (Read, Show)
instance Eq a => DecorationStyle TabbedDecoration a where
describeDeco (Tabbed Top _ ) = "Tabbed"
describeDeco (Tabbed Bottom _ ) = "Tabbed Bottom"
decorationEventHook _ ds ButtonEvent { ev_window = ew
, ev_event_type = et
, ev_button = eb }
| et == buttonPress
, Just ((w,_),_) <-findWindowByDecoration ew ds =
if eb == button2
then killWindow w
else focus w
decorationEventHook _ _ _ = return ()
pureDecoration (Tabbed lc sh) _ ht _ s wrs (w,r@(Rectangle x y wh hh))
= if ((sh == Always && numWindows > 0) || numWindows > 1)
then Just $ case lc of
Top -> upperTab
Bottom -> lowerTab
else Nothing
where ws = filter (`elem` map fst (filter ((==r) . snd) wrs)) (S.integrate s)
loc i = x + fi ((wh * fi i) `div` max 1 (fi $ length ws))
wid = fi $ maybe x (\i -> loc (i+1) - loc i) $ w `elemIndex` ws
nx = maybe x loc $ w `elemIndex` ws
upperTab = Rectangle nx y wid (fi ht)
lowerTab = Rectangle nx (y+fi(hh-ht)) wid (fi ht)
numWindows = length ws
shrink (Tabbed loc _ ) (Rectangle _ _ _ dh) (Rectangle x y w h)
= case loc of
Top -> Rectangle x (y + fi dh) w (h - dh)
Bottom -> Rectangle x y w (h - dh)
|
MasseR/xmonadcontrib
|
XMonad/Layout/Tabbed.hs
|
bsd-3-clause
| 7,299
| 0
| 17
| 1,727
| 1,582
| 864
| 718
| 87
| 1
|
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module Pipes.Text.Tutorial (
-- * Effectful Text
-- $intro
-- ** @Pipes.Text@
-- $pipestext
-- ** @Pipes.Text.IO@
-- $pipestextio
-- ** @Pipes.Text.Encoding@
-- $pipestextencoding
-- ** Implicit chunking
-- $chunks
-- * Lenses
-- $lenses
-- ** @view@ \/ @(^.)@
-- $view
-- ** @over@ \/ @(%~)@
-- $over
-- ** @zoom@
-- $zoom
-- * Special types: @Producer Text m (Producer Text m r)@ and @FreeT (Producer Text m) m r@
-- $special
) where
import Pipes
import Pipes.Text
import Pipes.Text.IO
import Pipes.Text.Encoding
{- $intro
This package provides @pipes@ utilities for /character streams/,
realized as streams of 'Text' chunks. The individual chunks are uniformly /strict/,
and thus the @Text@ type we are using is the one from @Data.Text@, not @Data.Text.Lazy@
But the type @Producer Text m r@, as we are using it, is a sort of /pipes/ equivalent of
the lazy @Text@ type.
-}
{- $pipestext
The main @Pipes.Text@ module provides many functions equivalent
in one way or another to the pure functions in
<https://hackage.haskell.org/package/text-1.1.0.0/docs/Data-Text-Lazy.html Data.Text.Lazy>
(and the corresponding @Prelude@ functions for @String@ s): they transform,
divide, group and fold text streams. Though @Producer Text m r@
is the type of \'effectful Text\', the functions in @Pipes.Text@ are \'pure\'
in the sense that they are uniformly monad-independent.
-}
{- $pipestextencoding
In the @text@ library, @Data.Text.Lazy.Encoding@
handles inter-operation with @Data.ByteString.Lazy@. Here, @Pipes.Text.Encoding@
provides for interoperation with the \'effectful ByteStrings\' of @Pipes.ByteString@.
-}
{- $pipestextio
Simple /IO/ operations are defined in @Pipes.Text.IO@ - as lazy IO @Text@
operations are in @Data.Text.Lazy.IO@. It is generally
-}
{- $chunks
Remember that the @Text@ type exported by @Data.Text.Lazy@ is basically
that of a lazy list of strict @Text@: the implementation is arranged so that
the individual strict 'Text' chunks are kept to a reasonable size; the user
is not aware of the divisions between the connected 'Text' chunks, but uses
operations akin to those for strict text.
So also here: the operations in @Pipes.Text@ are designed to operate on character streams that
in a way that is independent of the boundaries of the underlying @Text@ chunks.
This means that they may freely split text into smaller texts and /discard empty texts/.
The objective, though, is that they should not /concatenate texts/ in order to provide strict upper
bounds on memory usage.
For example, to stream only the first three lines of 'stdin' to 'stdout' you
might write:
> import Pipes
> import qualified Pipes.Text as Text
> import qualified Pipes.Text.IO as Text
> import Pipes.Group (takes')
> import Lens.Family (view, (%~)) -- or, Control.Lens
>
> main = runEffect $ takeLines 3 Text.stdin >-> Text.stdout
> where
> takeLines n = view Text.unlines . takes' n . view Text.lines
> -- or equivalently: Text.unlines %~ takes' n
This program will not bring more into memory than what @Text.stdin@ considers
one chunk of text (~ 32 KB), even if individual lines are split
across many chunks. The division into lines does not join Text fragments.
-}
{- $lenses
As the use of @view@ in this example shows, one superficial difference from @Data.Text.Lazy@
is that many of the operations, like 'lines', are \'lensified\'; this has a
number of advantages; in particular it facilitates their use with 'Parser's of Text
(in the general <http://hackage.haskell.org/package/pipes-parse-3.0.1/docs/Pipes-Parse-Tutorial.html pipes-parse>
sense.) The remarks that follow in this section are for non-lens adepts.
Each lens exported here, e.g. 'lines', 'chunksOf' or 'splitAt', reduces to the
intuitively corresponding function when used with @view@ or @(^.)@. Instead of
writing:
> splitAt 17 producer
as we would with the Prelude or Text functions called @splitAt@, we write
> view (splitAt 17) producer
or equivalently
> producer ^. splitAt 17
This may seem a little indirect, but note that many equivalents of
@Text -> Text@ functions are exported here as 'Pipe's. Here too we recover the intuitively
corresponding functions by prefixing them with @(>->)@. Thus something like
> stripLines = view Text.unlines . Group.maps (>-> Text.stripStart) . view Text.lines
would drop the leading white space from each line.
The lenses in this library are marked as /improper/; this just means that
they don't admit all the operations of an ideal lens, but only /getting/ and /focusing/.
Just for this reason, though, the magnificent complexities of the lens libraries
are a distraction. The lens combinators to keep in mind, the ones that make sense for
our lenses, are @view@, @over@, and @zoom@.
One need only keep in mind that if @l@ is a @Lens' a b@, then:
-}
{- $view
@view l@ is a function @a -> b@ . Thus @view l a@ (also written @a ^. l@ )
is the corresponding @b@; as was said above, this function will typically be
the pipes equivalent of the function you think it is, given its name. So for example
> view (Text.splitAt 300) :: Producer Text m r -> Producer Text (Producer Text m r)
> Text.stdin ^. splitAt 300 :: Producer Text IO (Producer Text IO r)
I.e., it produces the first 300 characters, and returns the rest of the producer.
Thus to uppercase the first n characters
of a Producer, leaving the rest the same, we could write:
> upper n p = do p' <- p ^. Text.splitAt n >-> Text.toUpper
> p'
or equivalently:
> upper n p = join (p ^. Text.splitAt n >-> Text.toUpper)
-}
{- $over
If @l@ is a @Lens a b@, @over l@ is a function @(b -> b) -> a -> a@.
Thus, given a function that modifies
@b@s, the lens lets us modify an @a@ by applying @f :: b -> b@ to
the @b@ that we \"see\" in the @a@ through the lens.
So the type of @over l f@ is @a -> a@ for the concrete type @a@
(it can also be written @l %~ f@).
For any particular @a@, then, @over l f a@ or @(l %~ f) a@ is a revised @a@.
So above we might have written things like these:
> stripLines = over Text.lines (maps (>-> Text.stripStart))
> stripLines = Text.lines %~ maps (>-> Text.stripStart)
> upper n = Text.splitAt n %~ (>-> Text.toUpper)
-}
{- $zoom
@zoom l@, finally, is a function from a @Parser b m r@
to a @Parser a m r@ (or more generally a @StateT (Producer b m x) m r@).
Its use is easiest to see with an decoding lens like 'utf8', which
\"sees\" a Text producer hidden inside a ByteString producer:
@drawChar@ is a Text parser, returning a @Maybe Char@, @zoom utf8 drawChar@ is
a /ByteString/ parser, returning a @Maybe Char@. @drawAll@ is a Parser that returns
a list of everything produced from a Producer, leaving only the return value; it would
usually be unreasonable to use it. But @zoom (splitAt 17) drawAll@
returns a list of Text chunks containing the first seventeen Chars, and returns the rest of
the Text Producer for further parsing. Suppose that we want, inexplicably, to
modify the casing of a Text Producer according to any instruction it might
contain at the start. Then we might write something like this:
> obey :: Monad m => Producer Text m b -> Producer Text m b
> obey p = do (ts, p') <- lift $ runStateT (zoom (Text.splitAt 7) drawAll) p
> let seven = T.concat ts
> case T.toUpper seven of
> "TOUPPER" -> p' >-> Text.toUpper
> "TOLOWER" -> p' >-> Text.toLower
> _ -> do yield seven
> p'
> -- > let doc = each ["toU","pperTh","is document.\n"]
> -- > runEffect $ obey doc >-> Text.stdout
> -- THIS DOCUMENT.
The purpose of exporting lenses is the mental economy achieved with this three-way
applicability. That one expression, e.g. @lines@ or @splitAt 17@ can have these
three uses is no more surprising than that a pipe can act as a function modifying
the output of a producer, namely by using @>->@ to its left: @producer >-> pipe@
-- but can /also/ modify the inputs to a consumer by using @>->@ to its right:
@pipe >-> consumer@
The three functions, @view@ \/ @(^.)@, @over@ \/ @(%~)@ and @zoom@ are supplied by
both <http://hackage.haskell.org/package/lens lens> and
<http://hackage.haskell.org/package/lens-family lens-family> The use of 'zoom' is explained
in <http://hackage.haskell.org/package/pipes-parse-3.0.1/docs/Pipes-Parse-Tutorial.html Pipes.Parse.Tutorial>
and to some extent in the @Pipes.Text.Encoding@ module here.
-}
{- $special
The simple programs using the 'lines' lens reveal a more important difference from @Data.Text.Lazy@ .
This is in the types that are most closely associated with our central text type,
@Producer Text m r@. In @Data.Text@ and @Data.Text.Lazy@ we find functions like
> splitAt :: Int -> Text -> (Text, Text)
> lines :: Text -> [Text]
> chunksOf :: Int -> Text -> [Text]
which relate a Text with a pair of Texts or a list of Texts.
The corresponding functions here (taking account of \'lensification\') are
> view . splitAt :: (Monad m, Integral n) => n -> Producer Text m r -> Producer Text m (Producer Text m r)
> view lines :: Monad m => Producer Text m r -> FreeT (Producer Text m) m r
> view . chunksOf :: (Monad m, Integral n) => n -> Producer Text m r -> FreeT (Producer Text m) m r
Some of the types may be more readable if you imagine that we have introduced
our own type synonyms
> type Text m r = Producer T.Text m r
> type Texts m r = FreeT (Producer T.Text m) m r
Then we would think of the types above as
> view . splitAt :: (Monad m, Integral n) => n -> Text m r -> Text m (Text m r)
> view lines :: (Monad m) => Text m r -> Texts m r
> view . chunksOf :: (Monad m, Integral n) => n -> Text m r -> Texts m r
which brings one closer to the types of the similar functions in @Data.Text.Lazy@
In the type @Producer Text m (Producer Text m r)@ the second
element of the \'pair\' of effectful Texts cannot simply be retrieved
with something like 'snd'. This is an \'effectful\' pair, and one must work
through the effects of the first element to arrive at the second Text stream, even
if you are proposing to throw the Text in the first element away.
Note that we use Control.Monad.join to fuse the pair back together, since it specializes to
> join :: Monad m => Producer Text m (Producer m r) -> Producer m r
The return type of 'lines', 'words', 'chunksOf' and the other /splitter/ functions,
@FreeT (Producer m Text) m r@ -- our @Texts m r@ -- is the type of (effectful)
lists of (effectful) texts. The type @([Text],r)@ might be seen to gather
together things of the forms:
> r
> (Text,r)
> (Text, (Text, r))
> (Text, (Text, (Text, r)))
> (Text, (Text, (Text, (Text, r))))
> ...
(We might also have identified the sum of those types with @Free ((,) Text) r@
-- or, more absurdly, @FreeT ((,) Text) Identity r@.)
Similarly, our type @Texts m r@, or @FreeT (Text m) m r@ -- in fact called
@FreeT (Producer Text m) m r@ here -- encompasses all the members of the sequence:
> m r
> Text m r
> Text m (Text m r)
> Text m (Text m (Text m r))
> Text m (Text m (Text m (Text m r)))
> ...
We might have used a more specialized type in place of @FreeT (Producer a m) m r@,
or indeed of @FreeT (Producer Text m) m r@, but it is clear that the correct
result type of 'lines' will be isomorphic to @FreeT (Producer Text m) m r@ .
One might think that
> lines :: Monad m => Lens' (Producer Text m r) (FreeT (Producer Text m) m r)
> view . lines :: Monad m => Producer Text m r -> FreeT (Producer Text m) m r
should really have the type
> lines :: Monad m => Pipe Text Text m r
as e.g. 'toUpper' does. But this would spoil the control we are
attempting to maintain over the size of chunks. It is in fact just
as unreasonable to want such a pipe as to want
> Data.Text.Lazy.lines :: Text -> Text
to 'rechunk' the strict Text chunks inside the lazy Text to respect
line boundaries. In fact we have
> Data.Text.Lazy.lines :: Text -> [Text]
> Prelude.lines :: String -> [String]
where the elements of the list are themselves lazy Texts or Strings; the use
of @FreeT (Producer Text m) m r@ is simply the 'effectful' version of this.
The @Pipes.Group@ module, which can generally be imported without qualification,
provides many functions for working with things of type @FreeT (Producer a m) m r@.
In particular it conveniently exports the constructors for @FreeT@ and the associated
@FreeF@ type -- a fancy form of @Either@, namely
> data FreeF f a b = Pure a | Free (f b)
for pattern-matching. Consider the implementation of the 'words' function, or
of the part of the lens that takes us to the words; it is compact but exhibits many
of the points under discussion, including explicit handling of the @FreeT@ and @FreeF@
constuctors. Keep in mind that
> newtype FreeT f m a = FreeT (m (FreeF f a (FreeT f m a)))
> next :: Monad m => Producer a m r -> m (Either r (a, Producer a m r))
Thus the @do@ block after the @FreeT@ constructor is in the base monad, e.g. 'IO' or 'Identity';
the later subordinate block, opened by the @Free@ constructor, is in the @Producer@ monad:
> words :: Monad m => Producer Text m r -> FreeT (Producer Text m) m r
> words p = FreeT $ do -- With 'next' we will inspect p's first chunk, excluding spaces;
> x <- next (p >-> dropWhile isSpace) -- note that 'dropWhile isSpace' is a pipe, and is thus *applied* with '>->'.
> return $ case x of -- We use 'return' and so need something of type 'FreeF (Text m) r (Texts m r)'
> Left r -> Pure r -- 'Left' means we got no Text chunk, but only the return value; so we are done.
> Right (txt, p') -> Free $ do -- If we get a chunk and the rest of the producer, p', we enter the 'Producer' monad
> p'' <- view (break isSpace) -- When we apply 'break isSpace', we get a Producer that returns a Producer;
> (yield txt >> p') -- so here we yield everything up to the next space, and get the rest back.
> return (words p'') -- We then carry on with the rest, which is likely to begin with space.
-}
|
bitemyapp/text-pipes
|
Pipes/Text/Tutorial.hs
|
bsd-3-clause
| 14,976
| 0
| 4
| 3,681
| 61
| 51
| 10
| 6
| 0
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS -fno-warn-orphans #-}
module Dixi.Markup where
import Control.Lens
import Data.Foldable (toList)
import Data.Maybe (fromMaybe)
import Data.Monoid
import Data.Patch (Hunks, HunkStatus(..))
import Data.Proxy
import Data.Text (Text)
import Servant.API
import Servant.HTML.Blaze
import Text.Blaze
import Text.Blaze.Renderer.Utf8 (renderMarkup)
import Text.Hamlet (shamlet, Html)
import Text.Heredoc
import Text.Lucius
import Text.Pandoc.Error
import qualified Data.Text as T
import qualified Data.Text.Lazy as L
import qualified Data.ByteString.Lazy as B
import Dixi.API
import Dixi.Common
import Dixi.Config
import Dixi.Page
import Dixi.Hamlet
import Dixi.PatchUtils
link :: (IsElem endpoint Dixi, HasLink endpoint) => Proxy endpoint -> MkLink endpoint
link = safeLink dixi
renderTitle :: Text -> Text
renderTitle = T.pack . map (\c -> if c == '_' then ' ' else c) . T.unpack
prettyUrl :: Proxy ( Capture "page" Key :> "history"
:> Capture "version" Version
:> Get '[HTML, JSON] PrettyPage
)
prettyUrl = Proxy
latestUrl :: Proxy (Capture "page" Key :> Get '[HTML, JSON] PrettyPage)
latestUrl = Proxy
rawUrl :: Proxy ( Capture "page" Key :> "history"
:> Capture "version" Version
:> "raw" :> Get '[HTML, JSON] RawPage
)
rawUrl = Proxy
amendUrl :: Proxy ( Capture "page" Key :> "history"
:> Capture "version" Version
:> ReqBody '[FormUrlEncoded, JSON] NewBody
:> Post '[HTML, JSON] PrettyPage
)
amendUrl = Proxy
diffUrl :: Proxy (Capture "page" Key :> "history" :> "diff" :> Get '[HTML, JSON] DiffPage)
diffUrl = Proxy
historyUrl :: Proxy (Capture "page" Key :> "history" :> Get '[HTML, JSON] History)
historyUrl = Proxy
revertUrl :: Proxy (Capture "page" Key :> "history" :> "revert" :> ReqBody '[FormUrlEncoded, JSON] RevReq :> Post '[HTML, JSON] PrettyPage)
revertUrl = Proxy
outerMatter :: Html -> Text -> Html -> Html
outerMatter ss title bod = [shamlet|
$doctype 5
<html>
<head>
<link href="http://fonts.googleapis.com/css?family=PT+Serif:400,700" rel="stylesheet" type="text/css">
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/font-awesome/4.4.0/css/font-awesome.min.css">
#{ss}
<title> #{title}
<body>
<div .header> #{title}
#{bod}
|]
unlast :: a -> Last a -> a
unlast d (Last x) = fromMaybe d x
guardText :: Text -> Text -> Text
guardText x y | y == "" = x
| otherwise = y
dixiError :: Html -> DixiError -> B.ByteString
dixiError header (VersionNotFound k v) = renderMarkup $ outerMatter header (renderTitle k)
[shamlet|
#{pageHeader k "Error"}
<div .body>
<h1> Error
<span.error> Version #{v} not found!
|]
dixiError header (PatchNotApplicable k) = renderMarkup $ outerMatter header (renderTitle k)
[shamlet|
#{pageHeader k "Error"}
<div .body>
<h1> Internal Error
<span.error> Patch not Applicable!
|]
instance ToMarkup URI where
toMarkup u = [shamlet|#{show u}|]
instance ToMarkup PatchSummary where
toMarkup (i,d,r) = [hml|
<span .fa .fa-plus-square-o .addition-sum> #{show i}
<span .fa .fa-minus-square-o .subtraction-sum> #{show d}
<span .fa .fa-pencil-square-o .replacement-sum> #{show r}
|]
instance ToMarkup DiffPage where
toMarkup (DP (Renders {..}) k v1 v2 p) = outerMatter headerBlock (renderTitle k) $ [shamlet|
#{pageHeader k vString}
<div .body>
<div>
#{renderHunks d}
<br>
<hr>
<form method="POST" action="/#{link revertUrl k}">
<input type="hidden" name="from" value="#{show v1}">
<input type="hidden" name="to" value="#{show v2}">
<input type="text" name="comment" value="revert #{show v1} - #{show v2}">
<button type="submit">
<span .fa .fa-undo> Revert
|]
where
d = p ^. body
renderHunks :: Hunks Char -> Html
renderHunks ps = [hml|
$forall (x, s) <- ps
<span class="hunk #{styleFor s}">#{toList x}
|]
styleFor :: HunkStatus -> String
styleFor Inserted = "hunk-inserted"
styleFor Deleted = "hunk-deleted"
styleFor Replaced = "hunk-replaced"
styleFor Unchanged = "hunk-unchanged"
vString :: Text
vString = "diff " <> T.pack (show v1) <> " - " <> T.pack (show v2)
instance ToMarkup History where
toMarkup (H (Renders {..}) k []) = outerMatter headerBlock (renderTitle k) $ pageHeader k "history"
toMarkup (H (Renders {..}) k ps) = outerMatter headerBlock (renderTitle k) $ [shamlet|
#{pageHeader k "history"}
<div .body>
<form method="GET" action="/#{link diffUrl k}">
<table .history>
<tr>
<th .histh-version> Version
<th .histh-fromto> From/To
<th .histh-changes> Changes
<th .histh-comment> Comment
<th .histh-comment> Time
$forall (v, p) <- ps'
<tr>
<td .hist-version>
#{show v}.
<td .hist-fromto>
<input type="radio" checked style="position:relative; top:1em;" name="from" value="#{show (v - 1)}">
<input type="radio" checked name="to" value="#{show v}">
<td>
#{(p ^. body)}
<td>
<a .histlink href="/#{link prettyUrl k v}">#{guardText "no comment" (unlast "no comment" (p ^. comment))}
<td>
<span .timestamp>#{renderTime (p ^. time)}
<tr>
<td>
<tr>
<td>
<td>
<button type="submit">
<span .fa .fa-files-o>
\ Diff
<td>
<td>
<small> (to revert a change, view the diff first)
|]
where ps' = reverse $ zip [1..] ps
versionHeader :: Key -> Version -> Text -> Html
versionHeader k v com = [shamlet|
<div .subtitle>
version #{v} (#{com'})
<div .toolbar>
<a href="/#{link rawUrl k v}" .fa .fa-edit> edit
<a href="/#{link prettyUrl k v}" .fa .fa-eye> view
<a href="/#{link historyUrl k}" .fa .fa-history> history
<a href="/#{link latestUrl k}" .fa .fa-fast-forward> latest
|]
where com' = if com == "" then "no comment" else com
pageHeader :: Key -> Text -> Html
pageHeader k com = [shamlet|
<div .subtitle>
#{com}
<div .toolbar>
<a href="/#{link historyUrl k}" .fa .fa-history> history
<a href="/#{link latestUrl k}" .fa .fa-fast-forward> latest
|]
instance ToMarkup PandocError where
toMarkup (ParseFailure s) = [shamlet| <b> Parse Failure: </b> #{s}|]
toMarkup (ParsecError _ e) = [shamlet| <b> Parse Error: </b> #{show e} |]
writePandocError :: PandocError -> Html
writePandocError err = [shamlet|#{err}|]
instance ToMarkup PrettyPage where
toMarkup (PP (Renders {..}) k v p)
= let
com = p ^. comment . traverse
tim = renderTime $ p ^. time
in outerMatter headerBlock (renderTitle k)
[shamlet|
#{versionHeader k v com}
<div .body>
#{p ^. body}
<div .timestamp> This version was last edited at #{tim}
|]
instance ToMarkup RawPage where
toMarkup (RP (Renders {..}) k v p )
= let
com = p ^. comment . traverse
bod = p ^. body
in outerMatter headerBlock (renderTitle k)
[shamlet|
#{versionHeader k v com}
<div .body>
<form method="POST" action="/#{link amendUrl k v}">
<textarea name="content" cols=80 rows=24 style="font-family:monospace">#{bod}
<br>
<input type="text" name="comment" value="no comment">
<input type="submit">
|]
defaultStylesheet :: L.Text
Right defaultStylesheet = luciusRT [here|
div.body {
margin: 1em;
}
table.history {
border: 0px;
td {
border: 0px;
button {
width: 100%;
padding: 4px;
}
}
tr {
border: 0px;
}
}
.hist-version {
text-align:right;
}
.histh-comment {
text-align:left;
}
.histh-version {
padding-right:5px;
}
.hist-fromto {
text-align:center;
}
body {
font-family: PT Serif, Palatino, Georgia, Times, serif;
margin: 0px;
}
.toolbar {
background: #BBBBAA;
border-top: 1px solid #888877;
border-bottom: 1px solid #EEEEDD;
a:hover {
background: #F1F1D9;
border: 1px outset #F1F1D9;
}
a:active {
background: #F1F1D9;
border: 1px inset #F1F1D9;
}
a {
background: #DCDCCB;
border: 1px outset #F1F1D9;
text-decoration: none;
color: black;
padding: 2px;
margin-top: 2px;
margin-bottom: 2px;
margin-left: 2px;
}
}
.header {
background: #FFFFDD;
font-size: 1.5em;
font-weight: bold;
padding-left: 0.5em;
padding-top: 0.5em;
padding-bottom: 0.5em;
}
.subtitle {
float:right;
font-size: 0.8em;
margin-right: 0.5em;
color: gray;
position: relative;
top: -2.5em;
}
.addition-sum {
background: #B5F386;
padding: 3px;
border-radius: 6px 0px 0px 6px;
margin-top:1px;
margin-bottom:1px;
}
.subtraction-sum {
background: #EC8160;
padding: 3px;
margin-top:1px;
margin-bottom:1px;
}
.replacement-sum {
background: #F3E686;
padding: 3px;
border-radius: 0px 6px 6px 0px;
margin-top:1px;
margin-bottom:1px;
}
.hunk {
white-space: pre;
font-family:monospace;
border-radius: 4px;
}
.hunk-inserted {
background: #B5F386;
}
.hunk-deleted {
background: #EC8160;
text-decoration: line-through;;
}
.hunk-replaced {
background: #F3E686;
}
.timestamp {
color: #444444;
font-size: small;
}
div.timestamp {
margin-left: 0.5em;
margin-top: 2em;
}
|] []
|
liamoc/dixi
|
Dixi/Markup.hs
|
bsd-3-clause
| 10,519
| 0
| 13
| 3,129
| 1,536
| 843
| 693
| -1
| -1
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE RankNTypes #-}
{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-}
#if __GLASGOW_HASKELL__ < 708
{-# LANGUAGE Trustworthy #-}
#endif
#include "lens-common.h"
-------------------------------------------------------------------------------
-- |
-- Module : Control.Lens.Zoom
-- Copyright : (C) 2012-16 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <ekmett@gmail.com>
-- Stability : provisional
-- Portability : Rank2Types
--
-------------------------------------------------------------------------------
module Control.Lens.Zoom
( Magnified
, Magnify(..)
, Zoom(..)
, Zoomed
) where
import Prelude ()
import Control.Lens.Getter
import Control.Lens.Internal.Coerce
import Control.Lens.Internal.Prelude
import Control.Lens.Internal.Zoom
import Control.Lens.Type
import Control.Monad
import Control.Monad.Reader as Reader
import Control.Monad.State as State
import Control.Monad.Trans.State.Lazy as Lazy
import Control.Monad.Trans.State.Strict as Strict
import Control.Monad.Trans.Writer.Lazy as Lazy
import Control.Monad.Trans.Writer.Strict as Strict
import Control.Monad.Trans.RWS.Lazy as Lazy
import Control.Monad.Trans.RWS.Strict as Strict
import Control.Monad.Trans.Error
import Control.Monad.Trans.Except
import Control.Monad.Trans.List
import Control.Monad.Trans.Identity
import Control.Monad.Trans.Maybe
import Control.Monad.Trans.Free
#ifdef HLINT
{-# ANN module "HLint: ignore Use fmap" #-}
#endif
-- $setup
-- >>> import Control.Lens
-- >>> import Control.Monad.State
-- >>> import Data.Map as Map
-- >>> import Debug.SimpleReflect.Expr as Expr
-- >>> import Debug.SimpleReflect.Vars as Vars
-- >>> let f :: Expr -> Expr; f = Debug.SimpleReflect.Vars.f
-- >>> let g :: Expr -> Expr; g = Debug.SimpleReflect.Vars.g
-- >>> let h :: Expr -> Expr -> Expr; h = Debug.SimpleReflect.Vars.h
-- Chosen so that they have lower fixity than ('%='), and to match ('<~').
infixr 2 `zoom`, `magnify`
------------------------------------------------------------------------------
-- Zoomed
------------------------------------------------------------------------------
-- | This type family is used by 'Control.Lens.Zoom.Zoom' to describe the common effect type.
type family Zoomed (m :: * -> *) :: * -> * -> *
type instance Zoomed (Strict.StateT s z) = Focusing z
type instance Zoomed (Lazy.StateT s z) = Focusing z
type instance Zoomed (ReaderT e m) = Zoomed m
type instance Zoomed (IdentityT m) = Zoomed m
type instance Zoomed (Strict.RWST r w s z) = FocusingWith w z
type instance Zoomed (Lazy.RWST r w s z) = FocusingWith w z
type instance Zoomed (Strict.WriterT w m) = FocusingPlus w (Zoomed m)
type instance Zoomed (Lazy.WriterT w m) = FocusingPlus w (Zoomed m)
type instance Zoomed (ListT m) = FocusingOn [] (Zoomed m)
type instance Zoomed (MaybeT m) = FocusingMay (Zoomed m)
type instance Zoomed (ErrorT e m) = FocusingErr e (Zoomed m)
type instance Zoomed (ExceptT e m) = FocusingErr e (Zoomed m)
type instance Zoomed (FreeT f m) = FocusingFree f m (Zoomed m)
------------------------------------------------------------------------------
-- Magnified
------------------------------------------------------------------------------
-- | This type family is used by 'Control.Lens.Zoom.Magnify' to describe the common effect type.
type family Magnified (m :: * -> *) :: * -> * -> *
type instance Magnified (ReaderT b m) = Effect m
type instance Magnified ((->)b) = Const
type instance Magnified (Strict.RWST a w s m) = EffectRWS w s m
type instance Magnified (Lazy.RWST a w s m) = EffectRWS w s m
type instance Magnified (IdentityT m) = Magnified m
------------------------------------------------------------------------------
-- Zoom
------------------------------------------------------------------------------
-- | This class allows us to use 'zoom' in, changing the 'State' supplied by
-- many different 'Control.Monad.Monad' transformers, potentially quite
-- deep in a 'Monad' transformer stack.
class (MonadState s m, MonadState t n) => Zoom m n s t | m -> s, n -> t, m t -> n, n s -> m where
-- | Run a monadic action in a larger 'State' than it was defined in,
-- using a 'Lens'' or 'Control.Lens.Traversal.Traversal''.
--
-- This is commonly used to lift actions in a simpler 'State'
-- 'Monad' into a 'State' 'Monad' with a larger 'State' type.
--
-- When applied to a 'Control.Lens.Traversal.Traversal'' over
-- multiple values, the actions for each target are executed sequentially
-- and the results are aggregated.
--
-- This can be used to edit pretty much any 'Monad' transformer stack with a 'State' in it!
--
-- >>> flip State.evalState (a,b) $ zoom _1 $ use id
-- a
--
-- >>> flip State.execState (a,b) $ zoom _1 $ id .= c
-- (c,b)
--
-- >>> flip State.execState [(a,b),(c,d)] $ zoom traverse $ _2 %= f
-- [(a,f b),(c,f d)]
--
-- >>> flip State.runState [(a,b),(c,d)] $ zoom traverse $ _2 <%= f
-- (f b <> f d <> mempty,[(a,f b),(c,f d)])
--
-- >>> flip State.evalState (a,b) $ zoom both (use id)
-- a <> b
--
-- @
-- 'zoom' :: 'Monad' m => 'Lens'' s t -> 'StateT' t m a -> 'StateT' s m a
-- 'zoom' :: ('Monad' m, 'Monoid' c) => 'Control.Lens.Traversal.Traversal'' s t -> 'StateT' t m c -> 'StateT' s m c
-- 'zoom' :: ('Monad' m, 'Monoid' w) => 'Lens'' s t -> 'RWST' r w t m c -> 'RWST' r w s m c
-- 'zoom' :: ('Monad' m, 'Monoid' w, 'Monoid' c) => 'Control.Lens.Traversal.Traversal'' s t -> 'RWST' r w t m c -> 'RWST' r w s m c
-- 'zoom' :: ('Monad' m, 'Monoid' w, 'Error' e) => 'Lens'' s t -> 'ErrorT' e ('RWST' r w t m) c -> 'ErrorT' e ('RWST' r w s m) c
-- 'zoom' :: ('Monad' m, 'Monoid' w, 'Monoid' c, 'Error' e) => 'Control.Lens.Traversal.Traversal'' s t -> 'ErrorT' e ('RWST' r w t m) c -> 'ErrorT' e ('RWST' r w s m) c
-- ...
-- @
zoom :: LensLike' (Zoomed m c) t s -> m c -> n c
instance Monad z => Zoom (Strict.StateT s z) (Strict.StateT t z) s t where
zoom l (Strict.StateT m) = Strict.StateT $ unfocusing #. l (Focusing #. m)
{-# INLINE zoom #-}
instance Monad z => Zoom (Lazy.StateT s z) (Lazy.StateT t z) s t where
zoom l (Lazy.StateT m) = Lazy.StateT $ unfocusing #. l (Focusing #. m)
{-# INLINE zoom #-}
instance Zoom m n s t => Zoom (ReaderT e m) (ReaderT e n) s t where
zoom l (ReaderT m) = ReaderT (zoom l . m)
{-# INLINE zoom #-}
instance Zoom m n s t => Zoom (IdentityT m) (IdentityT n) s t where
zoom l (IdentityT m) = IdentityT (zoom l m)
{-# INLINE zoom #-}
instance (Monoid w, Monad z) => Zoom (Strict.RWST r w s z) (Strict.RWST r w t z) s t where
zoom l (Strict.RWST m) = Strict.RWST $ \r -> unfocusingWith #. l (FocusingWith #. m r)
{-# INLINE zoom #-}
instance (Monoid w, Monad z) => Zoom (Lazy.RWST r w s z) (Lazy.RWST r w t z) s t where
zoom l (Lazy.RWST m) = Lazy.RWST $ \r -> unfocusingWith #. l (FocusingWith #. m r)
{-# INLINE zoom #-}
instance (Monoid w, Zoom m n s t) => Zoom (Strict.WriterT w m) (Strict.WriterT w n) s t where
zoom l = Strict.WriterT . zoom (\afb -> unfocusingPlus #.. l (FocusingPlus #.. afb)) . Strict.runWriterT
{-# INLINE zoom #-}
instance (Monoid w, Zoom m n s t) => Zoom (Lazy.WriterT w m) (Lazy.WriterT w n) s t where
zoom l = Lazy.WriterT . zoom (\afb -> unfocusingPlus #.. l (FocusingPlus #.. afb)) . Lazy.runWriterT
{-# INLINE zoom #-}
instance Zoom m n s t => Zoom (ListT m) (ListT n) s t where
zoom l = ListT . zoom (\afb -> unfocusingOn . l (FocusingOn . afb)) . runListT
{-# INLINE zoom #-}
instance Zoom m n s t => Zoom (MaybeT m) (MaybeT n) s t where
zoom l = MaybeT . liftM getMay . zoom (\afb -> unfocusingMay #.. l (FocusingMay #.. afb)) . liftM May . runMaybeT
{-# INLINE zoom #-}
instance (Error e, Zoom m n s t) => Zoom (ErrorT e m) (ErrorT e n) s t where
zoom l = ErrorT . liftM getErr . zoom (\afb -> unfocusingErr #.. l (FocusingErr #.. afb)) . liftM Err . runErrorT
{-# INLINE zoom #-}
instance Zoom m n s t => Zoom (ExceptT e m) (ExceptT e n) s t where
zoom l = ExceptT . liftM getErr . zoom (\afb -> unfocusingErr #.. l (FocusingErr #.. afb)) . liftM Err . runExceptT
{-# INLINE zoom #-}
instance (Functor f, Zoom m n s t) => Zoom (FreeT f m) (FreeT f n) s t where
zoom l = FreeT . liftM (fmap (zoom l) . getFreed) . zoom (\afb -> unfocusingFree #.. l (FocusingFree #.. afb)) . liftM Freed . runFreeT
------------------------------------------------------------------------------
-- Magnify
------------------------------------------------------------------------------
-- TODO: instance Zoom m m a a => Zoom (ContT r m) (ContT r m) a a where
-- | This class allows us to use 'magnify' part of the environment, changing the environment supplied by
-- many different 'Monad' transformers. Unlike 'zoom' this can change the environment of a deeply nested 'Monad' transformer.
--
-- Also, unlike 'zoom', this can be used with any valid 'Getter', but cannot be used with a 'Traversal' or 'Fold'.
class (Magnified m ~ Magnified n, MonadReader b m, MonadReader a n) => Magnify m n b a | m -> b, n -> a, m a -> n, n b -> m where
-- | Run a monadic action in a larger environment than it was defined in, using a 'Getter'.
--
-- This acts like 'Control.Monad.Reader.Class.local', but can in many cases change the type of the environment as well.
--
-- This is commonly used to lift actions in a simpler 'Reader' 'Monad' into a 'Monad' with a larger environment type.
--
-- This can be used to edit pretty much any 'Monad' transformer stack with an environment in it:
--
-- >>> (1,2) & magnify _2 (+1)
-- 3
--
-- >>> flip Reader.runReader (1,2) $ magnify _1 Reader.ask
-- 1
--
-- >>> flip Reader.runReader (1,2,[10..20]) $ magnify (_3._tail) Reader.ask
-- [11,12,13,14,15,16,17,18,19,20]
--
-- The type can be read as
--
-- @
-- magnify :: LensLike' (Magnified m c) a b -> m c -> n c
-- @
--
-- but the higher-rank constraints make it easier to apply @magnify@ to a
-- 'Getter' in highly-polymorphic code.
--
-- @
-- 'magnify' :: 'Getter' s a -> (a -> r) -> s -> r
-- 'magnify' :: 'Monoid' r => 'Fold' s a -> (a -> r) -> s -> r
-- @
--
-- @
-- 'magnify' :: 'Monoid' w => 'Getter' s t -> 'RWS' t w st c -> 'RWS' s w st c
-- 'magnify' :: ('Monoid' w, 'Monoid' c) => 'Fold' s a -> 'RWS' a w st c -> 'RWS' s w st c
-- ...
-- @
magnify :: ((Functor (Magnified m c), Contravariant (Magnified m c))
=> LensLike' (Magnified m c) a b)
-> m c -> n c
instance Monad m => Magnify (ReaderT b m) (ReaderT a m) b a where
magnify l (ReaderT m) = ReaderT $ getEffect #. l (Effect #. m)
{-# INLINE magnify #-}
-- | @
-- 'magnify' = 'views'
-- @
instance Magnify ((->) b) ((->) a) b a where
magnify l = views l
{-# INLINE magnify #-}
instance (Monad m, Monoid w) => Magnify (Strict.RWST b w s m) (Strict.RWST a w s m) b a where
magnify l (Strict.RWST m) = Strict.RWST $ getEffectRWS #. l (EffectRWS #. m)
{-# INLINE magnify #-}
instance (Monad m, Monoid w) => Magnify (Lazy.RWST b w s m) (Lazy.RWST a w s m) b a where
magnify l (Lazy.RWST m) = Lazy.RWST $ getEffectRWS #. l (EffectRWS #. m)
{-# INLINE magnify #-}
instance Magnify m n b a => Magnify (IdentityT m) (IdentityT n) b a where
magnify l (IdentityT m) = IdentityT (magnify l m)
{-# INLINE magnify #-}
|
ddssff/lens
|
src/Control/Lens/Zoom.hs
|
bsd-3-clause
| 11,559
| 0
| 15
| 2,309
| 2,678
| 1,485
| 1,193
| -1
| -1
|
--------------------------------------------------------------------
-- |
-- Module : Flickr.Photosets
-- Description : flickr.photosets - navigating and managing sets.
-- Copyright : (c) Sigbjorn Finne, 2008
-- License : BSD3
--
-- Maintainer : Sigbjorn Finne <sof@forkIO.com>
-- Stability : provisional
-- Portability : portable
--
-- flickr.photosets API, navigating and managing photo sets.
--------------------------------------------------------------------
module Flickr.Photosets where
import Flickr.Monad
import Flickr.Types
import Flickr.Utils
import Flickr.Types.Import
import Data.List
-- | Add a photo to the end of an existing photoset.
addPhoto :: PhotosetID -> PhotoID -> FM ()
addPhoto psid pid = withWritePerm $ postMethod $
flickCall_ "flickr.photosets.addPhoto"
[ ("photoset_id", psid) , ("photo_id", pid) ]
-- | Create a new photoset for the calling user.
create :: String -> Maybe String -> PhotoID -> FM Photoset
create title mbDesc primPid = withWritePerm $ postMethod $
flickTranslate toPhotoset $
flickrCall "flickr.photosets.create"
(mbArg "description" mbDesc $
[ ("title", title)
, ("primary_photo_id", primPid)
])
-- | Delete a photoset.
delete :: PhotosetID -> FM ()
delete psid = withWritePerm $ postMethod $
flickCall_ "flickr.photosets.delete"
[ ("photoset_id", psid) ]
-- | Modify the meta-data for a photoset.
editMeta :: PhotosetID -> String -> Maybe String -> FM ()
editMeta psid title mbDesc = withWritePerm $ postMethod $
flickCall_ "flickr.photosets.editMeta"
(mbArg "description" mbDesc $
[ ("photoset_id", psid)
, ("title", title)
])
-- | Modify the photos in a photoset. Use this method to add, remove and re-order photos.
editPhotos :: PhotosetID -> PhotoID -> [PhotoID] -> FM ()
editPhotos psid primPhoto pids = withWritePerm $ postMethod $
flickCall_ "flickr.photosets.editPhotos"
[ ("photoset_id", psid)
, ("primary_photo_id", primPhoto)
, ("photo_ids", intercalate "," pids)
]
-- | Returns next and previous photos for a photo in a set.
getContext :: PhotosetID -> PhotoID -> FM (Photo, Photo)
getContext psid pid =
flickTranslate toPhotoPair $
flickrCall "flickr.photosets.getContext"
[ ("photo_id", pid)
, ("photoset_id", psid)
]
-- | Gets information about a photoset.
getInfo :: PhotosetID -> FM Photoset
getInfo psid =
flickTranslate toPhotoset $
flickrCall "flickr.photosets.getInfo"
[ ("photoset_id", psid) ]
-- | Returns the photosets belonging to the specified user.
getList :: Maybe UserID -> FM (Bool, [Photoset])
getList mbUser =
flickTranslate toRes $
flickrCall "flickr.photosets.getList" (mbArg "user_id" mbUser [])
where
toRes s = parseDoc eltRes s
eltRes e = do
u <- eltBool "cancreate" e
ls <- mapM eltPhotoset (pNodes "photoset" (children e))
return (u,ls)
-- | Get the list of photos in a set.
getPhotos :: PhotosetID
-> [PhotoInfo]
-> Maybe Privacy
-> Maybe MediaType
-> FM Photoset
getPhotos psid extras priv med =
flickTranslate toPhotoset $
flickrCall "flickr.photosets.getPhotos" $
mbArg "privacy_filter" (fmap (show.fromEnum) priv) $
mbArg "media" (fmap show med) $
lsArg "extras" (map show extras)
[ ("photoset_id", psid) ])
-- | Set the order of photosets for the calling user.
orderSets :: [PhotosetID] -> FM ()
orderSets psids = withWritePerm $ postMethod $
flickCall_ "flickr.photosets.orderSets"
[ ("photoset_ids", intercalate "," psids) ]
-- | Remove a photo from a photoset.
removePhoto :: PhotosetID -> PhotoID -> FM ()
removePhoto psid pid = withWritePerm $ postMethod $
flickCall_ "flickr.photosets.removePhoto"
[ ("photoset_id", psid)
, ("photo_id", pid)
]
|
BeautifulDestinations/hs-flickr
|
Flickr/Photosets.hs
|
bsd-3-clause
| 3,791
| 1
| 15
| 751
| 876
| 468
| 408
| -1
| -1
|
module Matterhorn.State.PostListOverlay
( enterFlaggedPostListMode
, enterPinnedPostListMode
, enterSearchResultPostListMode
, postListJumpToCurrent
, postListSelectUp
, postListSelectDown
, postListUnflagSelected
, exitPostListMode
)
where
import GHC.Exts ( IsList(..) )
import Prelude ()
import Matterhorn.Prelude
import qualified Data.Foldable as F
import qualified Data.Text as T
import Lens.Micro.Platform ( (.=) )
import Network.Mattermost.Endpoints
import Network.Mattermost.Types
import Matterhorn.State.Messages ( jumpToPost )
import Matterhorn.State.Common
import Matterhorn.State.MessageSelect
import Matterhorn.State.Messages ( addObtainedMessages
, asyncFetchMessagesSurrounding )
import Matterhorn.Types
import Matterhorn.Types.DirectionalSeq (emptyDirSeq)
-- | Create a PostListOverlay with the given content description and
-- with a specified list of messages.
enterPostListMode :: PostListContents -> Messages -> MH ()
enterPostListMode contents msgs = do
csCurrentTeam.tsPostListOverlay.postListPosts .= msgs
let mlatest = getLatestPostMsg msgs
pId = mlatest >>= messagePostId
cId = mlatest >>= \m -> m^.mChannelId
csCurrentTeam.tsPostListOverlay.postListSelected .= pId
setMode $ PostListOverlay contents
case (pId, cId) of
(Just p, Just c) -> asyncFetchMessagesSurrounding c p
_ -> return ()
-- | Clear out the state of a PostListOverlay
exitPostListMode :: MH ()
exitPostListMode = do
csCurrentTeam.tsPostListOverlay.postListPosts .= emptyDirSeq
csCurrentTeam.tsPostListOverlay.postListSelected .= Nothing
setMode Main
createPostList :: TeamId -> PostListContents -> (Session -> IO Posts) -> MH ()
createPostList tId contentsType fetchOp = do
session <- getSession
doAsyncWith Preempt $ do
posts <- fetchOp session
return $ Just $ do
messages <- installMessagesFromPosts (Just tId) posts
-- n.b. do not use addNewPostedMessage because these messages
-- are not new, and so no notifications or channel highlighting
-- or other post-processing should be performed.
let plist = F.toList $ postsPosts posts
postsSpec p = Posts { postsPosts = fromList [(postId p, p)]
, postsOrder = fromList [postId p]
}
mapM_ (\p -> addObtainedMessages (postChannelId p) 0 False $ postsSpec p) plist
enterPostListMode contentsType messages
-- | Create a PostListOverlay with flagged messages from the server.
enterFlaggedPostListMode :: MH ()
enterFlaggedPostListMode = do
tId <- use csCurrentTeamId
createPostList tId PostListFlagged $
mmGetListOfFlaggedPosts UserMe defaultFlaggedPostsQuery
-- | Create a PostListOverlay with pinned messages from the server for
-- the current channel.
enterPinnedPostListMode :: MH ()
enterPinnedPostListMode = do
tId <- use csCurrentTeamId
cId <- use (csCurrentChannelId tId)
createPostList tId (PostListPinned cId) $ mmGetChannelPinnedPosts cId
-- | Create a PostListOverlay with post search result messages from the
-- server.
enterSearchResultPostListMode :: Text -> MH ()
enterSearchResultPostListMode terms
| T.null (T.strip terms) = postInfoMessage "Search command requires at least one search term."
| otherwise = do
enterPostListMode (PostListSearch terms True) noMessages
tId <- use csCurrentTeamId
createPostList tId (PostListSearch terms False) $
mmSearchForTeamPosts tId (SearchPosts terms False)
-- | Move the selection up in the PostListOverlay, which corresponds
-- to finding a chronologically /newer/ message.
postListSelectDown :: MH ()
postListSelectDown = do
selId <- use (csCurrentTeam.tsPostListOverlay.postListSelected)
posts <- use (csCurrentTeam.tsPostListOverlay.postListPosts)
let nextMsg = getNextMessage (MessagePostId <$> selId) posts
case nextMsg of
Nothing -> return ()
Just m -> do
let pId = m^.mMessageId >>= messageIdPostId
csCurrentTeam.tsPostListOverlay.postListSelected .= pId
case (m^.mChannelId, pId) of
(Just c, Just p) -> asyncFetchMessagesSurrounding c p
o -> mhLog LogError
(T.pack $ "postListSelectDown" <>
" unable to get channel or post ID: " <> show o)
-- | Move the selection down in the PostListOverlay, which corresponds
-- to finding a chronologically /old/ message.
postListSelectUp :: MH ()
postListSelectUp = do
selId <- use (csCurrentTeam.tsPostListOverlay.postListSelected)
posts <- use (csCurrentTeam.tsPostListOverlay.postListPosts)
let prevMsg = getPrevMessage (MessagePostId <$> selId) posts
case prevMsg of
Nothing -> return ()
Just m -> do
let pId = m^.mMessageId >>= messageIdPostId
csCurrentTeam.tsPostListOverlay.postListSelected .= pId
case (m^.mChannelId, pId) of
(Just c, Just p) -> asyncFetchMessagesSurrounding c p
o -> mhLog LogError
(T.pack $ "postListSelectUp" <>
" unable to get channel or post ID: " <> show o)
-- | Unflag the post currently selected in the PostListOverlay, if any
postListUnflagSelected :: MH ()
postListUnflagSelected = do
msgId <- use (csCurrentTeam.tsPostListOverlay.postListSelected)
case msgId of
Nothing -> return ()
Just pId -> flagMessage pId False
-- | Jumps to the specified message in the message's main channel
-- display and changes to MessageSelectState.
postListJumpToCurrent :: MH ()
postListJumpToCurrent = do
msgId <- use (csCurrentTeam.tsPostListOverlay.postListSelected)
case msgId of
Nothing -> return ()
Just pId -> jumpToPost pId
|
matterhorn-chat/matterhorn
|
src/Matterhorn/State/PostListOverlay.hs
|
bsd-3-clause
| 5,784
| 0
| 21
| 1,265
| 1,301
| 653
| 648
| -1
| -1
|
{-# LANGUAGE CPP #-}
-- | Cross-platform operations for manipulating terminal console windows.
module Language.Haskell.Ghcid.Terminal(
terminalTopmost,
withWindowIcon, WindowIcon(..), setWindowIcon
) where
#if defined(mingw32_HOST_OS)
import Data.Word
import Data.Bits
import Control.Exception
import Graphics.Win32.Misc
import Graphics.Win32.Window
import Graphics.Win32.Message
import Graphics.Win32.GDI.Types
import System.Win32.Types
wM_SETICON = 0x0080 :: WindowMessage
wM_GETICON = 0x007F :: WindowMessage
iCON_BIG = 1
iCON_SMALL = 0
#ifdef x86_64_HOST_ARCH
#define CALLCONV ccall
#else
#define CALLCONV stdcall
#endif
foreign import CALLCONV unsafe "windows.h GetConsoleWindow"
getConsoleWindow :: IO HWND
foreign import CALLCONV unsafe "windows.h SetWindowPos"
setWindowPos :: HWND -> HWND -> Int -> Int -> Int -> Int -> Word32 -> IO Bool
#endif
-- | Raise the current terminal on top of all other screens, if you can.
terminalTopmost :: IO ()
#if defined(mingw32_HOST_OS)
terminalTopmost = do
wnd <- getConsoleWindow
setWindowPos wnd hWND_TOPMOST 0 0 0 0 (sWP_NOMOVE .|. sWP_NOSIZE)
pure ()
#else
terminalTopmost = pure ()
#endif
data WindowIcon = IconOK | IconWarning | IconError
-- | Change the window icon to green, yellow or red depending on whether the file was errorless, contained only warnings or contained at least one error.
setWindowIcon :: WindowIcon -> IO ()
#if defined(mingw32_HOST_OS)
setWindowIcon x = do
ico <- pure $ case x of
IconOK -> iDI_ASTERISK
IconWarning -> iDI_EXCLAMATION
IconError -> iDI_HAND
icon <- loadIcon Nothing ico
wnd <- getConsoleWindow
-- SMALL is the system tray, BIG is the taskbar and Alt-Tab screen
sendMessage wnd wM_SETICON iCON_SMALL $ fromIntegral $ castPtrToUINTPtr icon
sendMessage wnd wM_SETICON iCON_BIG $ fromIntegral $ castPtrToUINTPtr icon
pure ()
#else
setWindowIcon _ = pure ()
#endif
-- | Run an operation in which you call setWindowIcon
withWindowIcon :: IO a -> IO a
#if defined(mingw32_HOST_OS)
withWindowIcon act = do
wnd <- getConsoleWindow
icoBig <- sendMessage wnd wM_GETICON iCON_BIG 0
icoSmall <- sendMessage wnd wM_GETICON iCON_SMALL 0
act `finally` do
sendMessage wnd wM_SETICON iCON_BIG icoBig
sendMessage wnd wM_SETICON iCON_SMALL icoSmall
pure ()
#else
withWindowIcon act = act
#endif
|
ndmitchell/ghcid
|
src/Language/Haskell/Ghcid/Terminal.hs
|
bsd-3-clause
| 2,405
| 36
| 6
| 457
| 443
| 253
| 190
| 11
| 1
|
module SIGyM.IO (
module SIGyM.IO.Raster
) where
import SIGyM.IO.Raster
|
meteogrid/sigym-core
|
src/SIGyM/IO.hs
|
bsd-3-clause
| 77
| 0
| 5
| 14
| 21
| 14
| 7
| 3
| 0
|
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Files.Structure (
unfoldFileTree,
downloadTree,
renameRoot,
isSingleNode,
SIO
) where
import Control.Monad.Trans.Except (ExceptT, runExceptT)
import Data.Aeson
import Data.Foldable
import Data.Tree
import System.FilePath ((</>))
import Files.Node.FSNode
import Files.Node.NodeJSON
import Files.State
import Files.Tree
import Settings.Monad.Exception
import Settings.Monad.State
import Settings.Network
type TreeSeed = Either FileJSON FolderJSON
type SIO m = (MonadIO m, MonadEnvState m)
unfoldFileTree :: RIOE' m => TreeSeed -> m (Tree FSNode)
unfoldFileTree = unfoldTreeM genTreeSeeds
renameRoot :: String -> Tree FSNode -> Tree FSNode
renameRoot newName (Node root xs) = Node (root { relativePath = newName }) xs
downloadTree :: SIO m => FilePath -> Tree FSNode -> m DownloadSummary
downloadTree parent rootNode = fold <$> downloadTreeUncounted parent rootNode
downloadTreeUncounted :: SIO m => FilePath -> Tree FSNode -> m (Tree DownloadSummary)
downloadTreeUncounted fp tree = do
chan <- get
liftIO $ traverseTreeFoldPar combinator (writeAndCount chan) fp tree
where
combinator l r = l </> relativePath r
writeAndCount :: MonadIO m => EnvS -> FilePath -> FSNode -> m DownloadSummary
writeAndCount chan parent node = do
exist <- doesExist parent node
if exist then spitState $ singleExState path else do
result <- liftIO $ runExceptT downloadExceptT
let state = either (singleFaStateWith path) (const $ singleSuState path) result
spitState state
where
spitState state = do
liftIO $ writeChan chan $ toJSON state
return $ summarize state
downloadExceptT :: ExceptT SomeException IO ()
downloadExceptT = writeNode parent node
path = parent </> relativePath node
genTreeSeeds :: RIOE' m => TreeSeed -> m (FSNode, [TreeSeed])
genTreeSeeds (Left filej) = return (filejsonToNode filej, [])
genTreeSeeds (Right folderj) = do
filejsons <- canvasJSON $ files_url folderj
folderjsons <- canvasJSON $ folders_url folderj
let seeds = map Left (filejsons :: [FileJSON])
++ map Right (folderjsons :: [FolderJSON])
return (FolderNode $ name folderj, seeds)
|
Evan-Zhao/FastCanvas
|
src/Files/Structure.hs
|
bsd-3-clause
| 2,440
| 0
| 16
| 581
| 710
| 362
| 348
| 55
| 2
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# OPTIONS_HADDOCK hide #-}
-- |
-- Module : Data.Array.Accelerate.Trafo.Sharing
-- Copyright : [2008..2014] Manuel M T Chakravarty, Gabriele Keller
-- [2009..2014] Trevor L. McDonell
-- [2013..2014] Robert Clifton-Everest
-- License : BSD3
--
-- Maintainer : Manuel M T Chakravarty <chak@cse.unsw.edu.au>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- This module implements HOAS to de Bruijn conversion of array expressions
-- while incorporating sharing information.
--
module Data.Array.Accelerate.Trafo.Sharing (
-- * HOAS -> de Bruijn conversion
convertAcc, convertAfun, Afunction, AfunctionR,
convertExp, convertFun, Function, FunctionR
) where
-- standard library
import Control.Applicative hiding ( Const )
import Control.Monad.Fix
import Data.List
import Data.Maybe
import Data.Hashable
import Data.Typeable
import qualified Data.HashTable.IO as Hash
import qualified Data.IntMap as IntMap
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import System.IO.Unsafe ( unsafePerformIO )
import System.Mem.StableName
-- friends
import Data.Array.Accelerate.Error
import Data.Array.Accelerate.Smart
import Data.Array.Accelerate.Array.Sugar as Sugar
import Data.Array.Accelerate.Tuple hiding ( Tuple )
import Data.Array.Accelerate.AST hiding (
PreOpenAcc(..), OpenAcc(..), Acc, Stencil(..), PreOpenExp(..), OpenExp, PreExp, Exp,
showPreAccOp, showPreExpOp )
import qualified Data.Array.Accelerate.AST as AST
import qualified Data.Array.Accelerate.Tuple as Tuple
import qualified Data.Array.Accelerate.Debug as Debug
-- Configuration
-- -------------
-- Perhaps the configuration should be passed as a reader monad or some such,
-- but that's a little inconvenient.
--
data Config = Config
{
recoverAccSharing :: Bool -- ^ Recover sharing of array computations ?
, recoverExpSharing :: Bool -- ^ Recover sharing of scalar expressions ?
, floatOutAcc :: Bool -- ^ Always float array computations out of expressions ?
}
-- Layouts
-- -------
-- A layout of an environment has an entry for each entry of the environment.
-- Each entry in the layout holds the de Bruijn index that refers to the
-- corresponding entry in the environment.
--
data Layout env env' where
EmptyLayout :: Layout env ()
PushLayout :: Typeable t
=> Layout env env' -> Idx env t -> Layout env (env', t)
-- Project the nth index out of an environment layout.
--
-- The first argument provides context information for error messages in the case of failure.
--
prjIdx :: forall t env env'. Typeable t => String -> Int -> Layout env env' -> Idx env t
prjIdx ctxt 0 (PushLayout _ (ix :: Idx env0 t0))
= flip fromMaybe (gcast ix)
$ possiblyNestedErr ctxt $
"Couldn't match expected type `" ++ show (typeOf (undefined::t)) ++
"' with actual type `" ++ show (typeOf (undefined::t0)) ++ "'" ++
"\n Type mismatch"
prjIdx ctxt n (PushLayout l _) = prjIdx ctxt (n - 1) l
prjIdx ctxt _ EmptyLayout = possiblyNestedErr ctxt "Environment doesn't contain index"
possiblyNestedErr :: String -> String -> a
possiblyNestedErr ctxt failreason
= error $ "Fatal error in Sharing.prjIdx:"
++ "\n " ++ failreason ++ " at " ++ ctxt
++ "\n Possible reason: nested data parallelism — array computation that depends on a"
++ "\n scalar variable of type 'Exp a'"
-- Add an entry to a layout, incrementing all indices
--
incLayout :: Layout env env' -> Layout (env, t) env'
incLayout EmptyLayout = EmptyLayout
incLayout (PushLayout lyt ix) = PushLayout (incLayout lyt) (SuccIdx ix)
sizeLayout :: Layout env env' -> Int
sizeLayout EmptyLayout = 0
sizeLayout (PushLayout lyt _) = 1 + sizeLayout lyt
-- Conversion from HOAS to de Bruijn computation AST
-- =================================================
-- Array computations
-- ------------------
-- | Convert a closed array expression to de Bruijn form while also incorporating sharing
-- information.
--
convertAcc
:: Arrays arrs
=> Bool -- ^ recover sharing of array computations ?
-> Bool -- ^ recover sharing of scalar expressions ?
-> Bool -- ^ always float array computations out of expressions?
-> Acc arrs
-> AST.Acc arrs
convertAcc shareAcc shareExp floatAcc acc
= let config = Config shareAcc shareExp (shareAcc && floatAcc)
in
convertOpenAcc config 0 [] EmptyLayout acc
-- | Convert a closed function over array computations, while incorporating
-- sharing information.
--
convertAfun :: Afunction f => Bool -> Bool -> Bool -> f -> AST.Afun (AfunctionR f)
convertAfun shareAcc shareExp floatAcc =
let config = Config shareAcc shareExp (shareAcc && floatAcc)
in aconvert config EmptyLayout
-- Convert a HOAS fragment into de Bruijn form, binding variables into the typed
-- environment layout one binder at a time.
--
-- NOTE: Because we convert one binder at a time left-to-right, the bound
-- variables ('vars') will have de Bruijn index _zero_ as the outermost
-- binding, and thus go to the end of the list.
--
class Afunction f where
type AfunctionR f
aconvert :: Config -> Layout aenv aenv -> f -> AST.OpenAfun aenv (AfunctionR f)
instance (Arrays a, Afunction r) => Afunction (Acc a -> r) where
type AfunctionR (Acc a -> r) = a -> AfunctionR r
--
aconvert config alyt f
= let a = Acc $ Atag (sizeLayout alyt)
alyt' = incLayout alyt `PushLayout` ZeroIdx
in
Alam $ aconvert config alyt' (f a)
instance Arrays b => Afunction (Acc b) where
type AfunctionR (Acc b) = b
--
aconvert config alyt body
= let lvl = sizeLayout alyt
vars = [lvl-1, lvl-2 .. 0]
in
Abody $ convertOpenAcc config lvl vars alyt body
-- | Convert an open array expression to de Bruijn form while also incorporating sharing
-- information.
--
convertOpenAcc
:: Arrays arrs
=> Config
-> Level
-> [Level]
-> Layout aenv aenv
-> Acc arrs
-> AST.OpenAcc aenv arrs
convertOpenAcc config lvl fvs alyt acc
= let (sharingAcc, initialEnv) = recoverSharingAcc config lvl fvs acc
in
convertSharingAcc config alyt initialEnv sharingAcc
-- | Convert an array expression with given array environment layout and sharing information into
-- de Bruijn form while recovering sharing at the same time (by introducing appropriate let
-- bindings). The latter implements the third phase of sharing recovery.
--
-- The sharing environment 'env' keeps track of all currently bound sharing variables, keeping them
-- in reverse chronological order (outermost variable is at the end of the list).
--
convertSharingAcc
:: forall aenv arrs. Arrays arrs
=> Config
-> Layout aenv aenv
-> [StableSharingAcc]
-> ScopedAcc arrs
-> AST.OpenAcc aenv arrs
convertSharingAcc _ alyt aenv (ScopedAcc lams (AvarSharing sa))
| Just i <- findIndex (matchStableAcc sa) aenv'
= AST.OpenAcc $ AST.Avar (prjIdx (ctxt ++ "; i = " ++ show i) i alyt)
| null aenv'
= error $ "Cyclic definition of a value of type 'Acc' (sa = " ++
show (hashStableNameHeight sa) ++ ")"
| otherwise
= $internalError "convertSharingAcc" err
where
aenv' = lams ++ aenv
ctxt = "shared 'Acc' tree with stable name " ++ show (hashStableNameHeight sa)
err = "inconsistent valuation @ " ++ ctxt ++ ";\n aenv = " ++ show aenv'
convertSharingAcc config alyt aenv (ScopedAcc lams (AletSharing sa@(StableSharingAcc _ boundAcc) bodyAcc))
= AST.OpenAcc
$ let alyt' = incLayout alyt `PushLayout` ZeroIdx
aenv' = lams ++ aenv
in
AST.Alet (convertSharingAcc config alyt aenv' (ScopedAcc [] boundAcc))
(convertSharingAcc config alyt' (sa:aenv') bodyAcc)
convertSharingAcc config alyt aenv (ScopedAcc lams (AccSharing _ preAcc))
= AST.OpenAcc
$ let aenv' = lams ++ aenv
cvtA :: Arrays a => ScopedAcc a -> AST.OpenAcc aenv a
cvtA = convertSharingAcc config alyt aenv'
cvtE :: Elt t => ScopedExp t -> AST.Exp aenv t
cvtE = convertSharingExp config EmptyLayout alyt [] aenv'
cvtF1 :: (Elt a, Elt b) => (Exp a -> ScopedExp b) -> AST.Fun aenv (a -> b)
cvtF1 = convertSharingFun1 config alyt aenv'
cvtF2 :: (Elt a, Elt b, Elt c) => (Exp a -> Exp b -> ScopedExp c) -> AST.Fun aenv (a -> b -> c)
cvtF2 = convertSharingFun2 config alyt aenv'
cvtAfun1 :: (Arrays a, Arrays b) => (Acc a -> ScopedAcc b) -> AST.OpenAfun aenv (a -> b)
cvtAfun1 = convertSharingAfun1 config alyt aenv'
in
case preAcc of
Atag i
-> AST.Avar (prjIdx ("de Bruijn conversion tag " ++ show i) i alyt)
Pipe afun1 afun2 acc
-> let noStableSharing = StableSharingAcc noStableAccName (undefined :: SharingAcc acc exp ())
alyt' = incLayout alyt `PushLayout` ZeroIdx
boundAcc = cvtAfun1 afun1 `AST.Apply` cvtA acc
bodyAcc = convertSharingAfun1 config alyt' (noStableSharing : aenv') afun2
`AST.Apply`
AST.OpenAcc (AST.Avar AST.ZeroIdx)
in
AST.Alet (AST.OpenAcc boundAcc) (AST.OpenAcc bodyAcc)
Aforeign ff afun acc
-> let a = recoverAccSharing config
e = recoverExpSharing config
f = floatOutAcc config
in
AST.Aforeign ff (convertAfun a e f afun) (cvtA acc)
Acond b acc1 acc2 -> AST.Acond (cvtE b) (cvtA acc1) (cvtA acc2)
Awhile pred iter init -> AST.Awhile (cvtAfun1 pred) (cvtAfun1 iter) (cvtA init)
Atuple arrs -> AST.Atuple (convertSharingAtuple config alyt aenv' arrs)
Aprj ix a -> AST.Aprj ix (cvtA a)
Use array -> AST.Use (fromArr array)
Unit e -> AST.Unit (cvtE e)
Generate sh f -> AST.Generate (cvtE sh) (cvtF1 f)
Reshape e acc -> AST.Reshape (cvtE e) (cvtA acc)
Replicate ix acc -> mkReplicate (cvtE ix) (cvtA acc)
Slice acc ix -> mkIndex (cvtA acc) (cvtE ix)
Map f acc -> AST.Map (cvtF1 f) (cvtA acc)
ZipWith f acc1 acc2 -> AST.ZipWith (cvtF2 f) (cvtA acc1) (cvtA acc2)
Fold f e acc -> AST.Fold (cvtF2 f) (cvtE e) (cvtA acc)
Fold1 f acc -> AST.Fold1 (cvtF2 f) (cvtA acc)
FoldSeg f e acc1 acc2 -> AST.FoldSeg (cvtF2 f) (cvtE e) (cvtA acc1) (cvtA acc2)
Fold1Seg f acc1 acc2 -> AST.Fold1Seg (cvtF2 f) (cvtA acc1) (cvtA acc2)
Scanl f e acc -> AST.Scanl (cvtF2 f) (cvtE e) (cvtA acc)
Scanl' f e acc -> AST.Scanl' (cvtF2 f) (cvtE e) (cvtA acc)
Scanl1 f acc -> AST.Scanl1 (cvtF2 f) (cvtA acc)
Scanr f e acc -> AST.Scanr (cvtF2 f) (cvtE e) (cvtA acc)
Scanr' f e acc -> AST.Scanr' (cvtF2 f) (cvtE e) (cvtA acc)
Scanr1 f acc -> AST.Scanr1 (cvtF2 f) (cvtA acc)
Permute f dftAcc perm acc -> AST.Permute (cvtF2 f) (cvtA dftAcc) (cvtF1 perm) (cvtA acc)
Backpermute newDim perm acc -> AST.Backpermute (cvtE newDim) (cvtF1 perm) (cvtA acc)
Stencil stencil boundary acc
-> AST.Stencil (convertSharingStencilFun1 config acc alyt aenv' stencil)
(convertBoundary boundary)
(cvtA acc)
Stencil2 stencil bndy1 acc1 bndy2 acc2
-> AST.Stencil2 (convertSharingStencilFun2 config acc1 acc2 alyt aenv' stencil)
(convertBoundary bndy1)
(cvtA acc1)
(convertBoundary bndy2)
(cvtA acc2)
convertSharingAfun1
:: forall aenv a b. (Arrays a, Arrays b)
=> Config
-> Layout aenv aenv
-> [StableSharingAcc]
-> (Acc a -> ScopedAcc b)
-> OpenAfun aenv (a -> b)
convertSharingAfun1 config alyt aenv f
= Alam (Abody (convertSharingAcc config alyt' aenv body))
where
alyt' = incLayout alyt `PushLayout` ZeroIdx
body = f undefined
convertSharingAtuple
:: forall aenv a.
Config
-> Layout aenv aenv
-> [StableSharingAcc]
-> Tuple.Atuple ScopedAcc a
-> Tuple.Atuple (AST.OpenAcc aenv) a
convertSharingAtuple config alyt aenv = cvt
where
cvt :: Tuple.Atuple ScopedAcc a' -> Tuple.Atuple (AST.OpenAcc aenv) a'
cvt NilAtup = NilAtup
cvt (SnocAtup t a) = cvt t `SnocAtup` convertSharingAcc config alyt aenv a
-- | Convert a boundary condition
--
convertBoundary :: Elt e => Boundary e -> Boundary (EltRepr e)
convertBoundary Clamp = Clamp
convertBoundary Mirror = Mirror
convertBoundary Wrap = Wrap
convertBoundary (Constant e) = Constant (fromElt e)
-- Smart constructors to represent AST forms
--
mkIndex :: forall slix e aenv. (Slice slix, Elt e)
=> AST.OpenAcc aenv (Array (FullShape slix) e)
-> AST.Exp aenv slix
-> AST.PreOpenAcc AST.OpenAcc aenv (Array (SliceShape slix) e)
mkIndex = AST.Slice (sliceIndex slix)
where
slix = undefined :: slix
mkReplicate :: forall slix e aenv. (Slice slix, Elt e)
=> AST.Exp aenv slix
-> AST.OpenAcc aenv (Array (SliceShape slix) e)
-> AST.PreOpenAcc AST.OpenAcc aenv (Array (FullShape slix) e)
mkReplicate = AST.Replicate (sliceIndex slix)
where
slix = undefined :: slix
-- Scalar functions
-- ----------------
-- | Convert a closed scalar function to de Bruijn form while incorporating
-- sharing information.
--
-- The current design requires all free variables to be bound at the outermost
-- level --- we have no general apply term, and so lambdas are always outermost.
-- In higher-order abstract syntax, this represents an n-ary, polyvariadic
-- function.
--
convertFun :: Function f => Bool -> f -> AST.Fun () (FunctionR f)
convertFun shareExp =
let config = Config False shareExp False
in convert config EmptyLayout
class Function f where
type FunctionR f
convert :: Config -> Layout env env -> f -> AST.OpenFun env () (FunctionR f)
instance (Elt a, Function r) => Function (Exp a -> r) where
type FunctionR (Exp a -> r) = a -> FunctionR r
--
convert config lyt f
= let x = Exp $ Tag (sizeLayout lyt)
lyt' = incLayout lyt `PushLayout` ZeroIdx
in
Lam $ convert config lyt' (f x)
instance Elt b => Function (Exp b) where
type FunctionR (Exp b) = b
--
convert config lyt body
= let lvl = sizeLayout lyt
vars = [lvl-1, lvl-2 .. 0]
in
Body $ convertOpenExp config lvl vars lyt body
-- Scalar expressions
-- ------------------
-- | Convert a closed scalar expression to de Bruijn form while incorporating
-- sharing information.
--
convertExp
:: Elt e
=> Bool -- ^ recover sharing of scalar expressions ?
-> Exp e -- ^ expression to be converted
-> AST.Exp () e
convertExp shareExp exp
= let config = Config False shareExp False
in
convertOpenExp config 0 [] EmptyLayout exp
convertOpenExp
:: Elt e
=> Config
-> Level -- level of currently bound scalar variables
-> [Level] -- tags of bound scalar variables
-> Layout env env
-> Exp e
-> AST.OpenExp env () e
convertOpenExp config lvl fvar lyt exp
= let (sharingExp, initialEnv) = recoverSharingExp config lvl fvar exp
in
convertSharingExp config lyt EmptyLayout initialEnv [] sharingExp
-- | Convert an open expression with given environment layouts and sharing information into
-- de Bruijn form while recovering sharing at the same time (by introducing appropriate let
-- bindings). The latter implements the third phase of sharing recovery.
--
-- The sharing environments 'env' and 'aenv' keep track of all currently bound sharing variables,
-- keeping them in reverse chronological order (outermost variable is at the end of the list).
--
convertSharingExp
:: forall t env aenv. Elt t
=> Config
-> Layout env env -- scalar environment
-> Layout aenv aenv -- array environment
-> [StableSharingExp] -- currently bound sharing variables of expressions
-> [StableSharingAcc] -- currently bound sharing variables of array computations
-> ScopedExp t -- expression to be converted
-> AST.OpenExp env aenv t
convertSharingExp config lyt alyt env aenv exp@(ScopedExp lams _) = cvt exp
where
-- scalar environment with any lambda bound variables this expression is rooted in
env' = lams ++ env
cvt :: Elt t' => ScopedExp t' -> AST.OpenExp env aenv t'
cvt (ScopedExp _ (VarSharing se))
| Just i <- findIndex (matchStableExp se) env'
= AST.Var (prjIdx (ctxt ++ "; i = " ++ show i) i lyt)
| null env'
= error $ "Cyclic definition of a value of type 'Exp' (sa = " ++ show (hashStableNameHeight se) ++ ")"
| otherwise
= $internalError "convertSharingExp" err
where
ctxt = "shared 'Exp' tree with stable name " ++ show (hashStableNameHeight se)
err = "inconsistent valuation @ " ++ ctxt ++ ";\n env' = " ++ show env'
cvt (ScopedExp _ (LetSharing se@(StableSharingExp _ boundExp) bodyExp))
= let lyt' = incLayout lyt `PushLayout` ZeroIdx
in
AST.Let (cvt (ScopedExp [] boundExp)) (convertSharingExp config lyt' alyt (se:env') aenv bodyExp)
cvt (ScopedExp _ (ExpSharing _ pexp))
= case pexp of
Tag i -> AST.Var (prjIdx ("de Bruijn conversion tag " ++ show i) i lyt)
Const v -> AST.Const (fromElt v)
Tuple tup -> AST.Tuple (cvtT tup)
Prj idx e -> AST.Prj idx (cvt e)
IndexNil -> AST.IndexNil
IndexCons ix i -> AST.IndexCons (cvt ix) (cvt i)
IndexHead i -> AST.IndexHead (cvt i)
IndexTail ix -> AST.IndexTail (cvt ix)
IndexAny -> AST.IndexAny
ToIndex sh ix -> AST.ToIndex (cvt sh) (cvt ix)
FromIndex sh e -> AST.FromIndex (cvt sh) (cvt e)
Cond e1 e2 e3 -> AST.Cond (cvt e1) (cvt e2) (cvt e3)
While p it i -> AST.While (cvtFun1 p) (cvtFun1 it) (cvt i)
PrimConst c -> AST.PrimConst c
PrimApp f e -> cvtPrimFun f (cvt e)
Index a e -> AST.Index (cvtA a) (cvt e)
LinearIndex a i -> AST.LinearIndex (cvtA a) (cvt i)
Shape a -> AST.Shape (cvtA a)
ShapeSize e -> AST.ShapeSize (cvt e)
Intersect sh1 sh2 -> AST.Intersect (cvt sh1) (cvt sh2)
Foreign ff f e -> AST.Foreign ff (convertFun (recoverExpSharing config) f) (cvt e)
cvtA :: Arrays a => ScopedAcc a -> AST.OpenAcc aenv a
cvtA = convertSharingAcc config alyt aenv
cvtT :: Tuple.Tuple ScopedExp tup -> Tuple.Tuple (AST.OpenExp env aenv) tup
cvtT = convertSharingTuple config lyt alyt env' aenv
cvtFun1 :: (Elt a, Elt b) => (Exp a -> ScopedExp b) -> AST.OpenFun env aenv (a -> b)
cvtFun1 f = Lam (Body (convertSharingExp config lyt' alyt env' aenv body))
where
lyt' = incLayout lyt `PushLayout` ZeroIdx
body = f undefined
-- Push primitive function applications down through let bindings so that
-- they are adjacent to their arguments. It looks a bit nicer this way.
--
cvtPrimFun :: (Elt a, Elt r)
=> AST.PrimFun (a -> r) -> AST.OpenExp env' aenv' a -> AST.OpenExp env' aenv' r
cvtPrimFun f e = case e of
AST.Let bnd body -> AST.Let bnd (cvtPrimFun f body)
x -> AST.PrimApp f x
-- | Convert a tuple expression
--
convertSharingTuple
:: Config
-> Layout env env
-> Layout aenv aenv
-> [StableSharingExp] -- currently bound scalar sharing-variables
-> [StableSharingAcc] -- currently bound array sharing-variables
-> Tuple.Tuple ScopedExp t
-> Tuple.Tuple (AST.OpenExp env aenv) t
convertSharingTuple config lyt alyt env aenv tup =
case tup of
NilTup -> NilTup
SnocTup t e -> convertSharingTuple config lyt alyt env aenv t
`SnocTup` convertSharingExp config lyt alyt env aenv e
-- | Convert a unary functions
--
convertSharingFun1
:: forall a b aenv. (Elt a, Elt b)
=> Config
-> Layout aenv aenv
-> [StableSharingAcc] -- currently bound array sharing-variables
-> (Exp a -> ScopedExp b)
-> AST.Fun aenv (a -> b)
convertSharingFun1 config alyt aenv f = Lam (Body openF)
where
a = Exp undefined -- the 'tag' was already embedded in Phase 1
lyt = EmptyLayout
`PushLayout`
(ZeroIdx :: Idx ((), a) a)
openF = convertSharingExp config lyt alyt [] aenv (f a)
-- | Convert a binary functions
--
convertSharingFun2
:: forall a b c aenv. (Elt a, Elt b, Elt c)
=> Config
-> Layout aenv aenv
-> [StableSharingAcc] -- currently bound array sharing-variables
-> (Exp a -> Exp b -> ScopedExp c)
-> AST.Fun aenv (a -> b -> c)
convertSharingFun2 config alyt aenv f = Lam (Lam (Body openF))
where
a = Exp undefined
b = Exp undefined
lyt = EmptyLayout
`PushLayout`
(SuccIdx ZeroIdx :: Idx (((), a), b) a)
`PushLayout`
(ZeroIdx :: Idx (((), a), b) b)
openF = convertSharingExp config lyt alyt [] aenv (f a b)
-- | Convert a unary stencil function
--
convertSharingStencilFun1
:: forall sh a stencil b aenv. (Elt a, Stencil sh a stencil, Elt b)
=> Config
-> ScopedAcc (Array sh a) -- just passed to fix the type variables
-> Layout aenv aenv
-> [StableSharingAcc] -- currently bound array sharing-variables
-> (stencil -> ScopedExp b)
-> AST.Fun aenv (StencilRepr sh stencil -> b)
convertSharingStencilFun1 config _ alyt aenv stencilFun = Lam (Body openStencilFun)
where
stencil = Exp undefined :: Exp (StencilRepr sh stencil)
lyt = EmptyLayout
`PushLayout`
(ZeroIdx :: Idx ((), StencilRepr sh stencil)
(StencilRepr sh stencil))
body = stencilFun (stencilPrj (undefined::sh) (undefined::a) stencil)
openStencilFun = convertSharingExp config lyt alyt [] aenv body
-- | Convert a binary stencil function
--
convertSharingStencilFun2
:: forall sh a b stencil1 stencil2 c aenv.
(Elt a, Stencil sh a stencil1,
Elt b, Stencil sh b stencil2,
Elt c)
=> Config
-> ScopedAcc (Array sh a) -- just passed to fix the type variables
-> ScopedAcc (Array sh b) -- just passed to fix the type variables
-> Layout aenv aenv
-> [StableSharingAcc] -- currently bound array sharing-variables
-> (stencil1 -> stencil2 -> ScopedExp c)
-> AST.Fun aenv (StencilRepr sh stencil1 -> StencilRepr sh stencil2 -> c)
convertSharingStencilFun2 config _ _ alyt aenv stencilFun = Lam (Lam (Body openStencilFun))
where
stencil1 = Exp undefined :: Exp (StencilRepr sh stencil1)
stencil2 = Exp undefined :: Exp (StencilRepr sh stencil2)
lyt = EmptyLayout
`PushLayout`
(SuccIdx ZeroIdx :: Idx (((), StencilRepr sh stencil1),
StencilRepr sh stencil2)
(StencilRepr sh stencil1))
`PushLayout`
(ZeroIdx :: Idx (((), StencilRepr sh stencil1),
StencilRepr sh stencil2)
(StencilRepr sh stencil2))
body = stencilFun (stencilPrj (undefined::sh) (undefined::a) stencil1)
(stencilPrj (undefined::sh) (undefined::b) stencil2)
openStencilFun = convertSharingExp config lyt alyt [] aenv body
-- Sharing recovery
-- ================
-- Sharing recovery proceeds in two phases:
--
-- /Phase One: build the occurrence map/
--
-- This is a top-down traversal of the AST that computes a map from AST nodes to the number of
-- occurrences of that AST node in the overall Accelerate program. An occurrences count of two or
-- more indicates sharing.
--
-- IMPORTANT: To avoid unfolding the sharing, we do not descent into subtrees that we have
-- previously encountered. Hence, the complexity is proportional to the number of nodes in the
-- tree /with/ sharing. Consequently, the occurrence count is that in the tree with sharing
-- as well.
--
-- During computation of the occurrences, the tree is annotated with stable names on every node
-- using 'AccSharing' constructors and all but the first occurrence of shared subtrees are pruned
-- using 'AvarSharing' constructors (see 'SharingAcc' below). This phase is impure as it is based
-- on stable names.
--
-- We use a hash table (instead of 'Data.Map') as computing stable names forces us to live in IO
-- anyway. Once, the computation of occurrence counts is complete, we freeze the hash table into
-- a 'Data.Map'.
--
-- (Implemented by 'makeOccMap*'.)
--
-- /Phase Two: determine scopes and inject sharing information/
--
-- This is a bottom-up traversal that determines the scope for every binding to be introduced
-- to share a subterm. It uses the occurrence map to determine, for every shared subtree, the
-- lowest AST node at which the binding for that shared subtree can be placed (using a
-- 'AletSharing' constructor)— it's the meet of all the shared subtree occurrences.
--
-- The second phase is also replacing the first occurrence of each shared subtree with a
-- 'AvarSharing' node and floats the shared subtree up to its binding point.
--
-- (Implemented by 'determineScopes*'.)
--
-- /Sharing recovery for expressions/
--
-- We recover sharing for each expression (including function bodies) independently of any other
-- expression — i.e., we cannot share scalar expressions across array computations. Hence, during
-- Phase One, we mark all scalar expression nodes with a stable name and compute one occurrence map
-- for every scalar expression (including functions) that occurs in an array computation. These
-- occurrence maps are added to the root of scalar expressions using 'RootExp'.
--
-- NB: We do not need to worry sharing recovery will try to float a shared subexpression past a
-- binder that occurs in that subexpression. Why? Otherwise, the binder would already occur
-- out of scope in the original source program.
--
-- /Lambda bound variables/
--
-- During sharing recovery, lambda bound variables appear in the form of 'Atag' and 'Tag' data
-- constructors. The tag values are determined during Phase One of sharing recovery by computing
-- the /level/ of each variable at its binding occurrence. The level at the root of the AST is 0
-- and increases by one with each lambda on each path through the AST.
-- Stable names
-- ------------
-- Opaque stable name for AST nodes — used to key the occurrence map.
--
data StableASTName c where
StableASTName :: (Typeable c, Typeable t) => StableName (c t) -> StableASTName c
instance Show (StableASTName c) where
show (StableASTName sn) = show $ hashStableName sn
instance Eq (StableASTName c) where
StableASTName sn1 == StableASTName sn2
| Just sn1' <- gcast sn1 = sn1' == sn2
| otherwise = False
instance Hashable (StableASTName c) where
hashWithSalt s (StableASTName sn) = hashWithSalt s sn
makeStableAST :: c t -> IO (StableName (c t))
makeStableAST e = e `seq` makeStableName e
-- Stable name for an AST node including the height of the AST representing the array computation.
--
data StableNameHeight t = StableNameHeight (StableName t) Int
instance Eq (StableNameHeight t) where
(StableNameHeight sn1 _) == (StableNameHeight sn2 _) = sn1 == sn2
higherSNH :: StableNameHeight t1 -> StableNameHeight t2 -> Bool
StableNameHeight _ h1 `higherSNH` StableNameHeight _ h2 = h1 > h2
hashStableNameHeight :: StableNameHeight t -> Int
hashStableNameHeight (StableNameHeight sn _) = hashStableName sn
-- Mutable occurrence map
-- ----------------------
-- Hash table keyed on the stable names of array computations.
--
type HashTable key val = Hash.BasicHashTable key val
type ASTHashTable c v = HashTable (StableASTName c) v
-- Mutable hashtable version of the occurrence map, which associates each AST node with an
-- occurrence count and the height of the AST.
--
type OccMapHash c = ASTHashTable c (Int, Int)
-- Create a new hash table keyed on AST nodes.
--
newASTHashTable :: IO (ASTHashTable c v)
newASTHashTable = Hash.new
-- Enter one AST node occurrence into an occurrence map. Returns 'Just h' if this is a repeated
-- occurrence and the height of the repeatedly occurring AST is 'h'.
--
-- If this is the first occurrence, the 'height' *argument* must provide the height of the AST;
-- otherwise, the height will be *extracted* from the occurrence map. In the latter case, this
-- function yields the AST height.
--
enterOcc :: OccMapHash c -> StableASTName c -> Int -> IO (Maybe Int)
enterOcc occMap sa height
= do
entry <- Hash.lookup occMap sa
case entry of
Nothing -> Hash.insert occMap sa (1 , height) >> return Nothing
Just (n, heightS) -> Hash.insert occMap sa (n + 1, heightS) >> return (Just heightS)
-- Immutable occurrence map
-- ------------------------
-- Immutable version of the occurrence map (storing the occurrence count only, not the height). We
-- use the 'StableName' hash to index an 'IntMap' and disambiguate 'StableName's with identical
-- hashes explicitly, storing them in a list in the 'IntMap'.
--
type OccMap c = IntMap.IntMap [(StableASTName c, Int)]
-- Turn a mutable into an immutable occurrence map.
--
freezeOccMap :: OccMapHash c -> IO (OccMap c)
freezeOccMap oc
= do
ocl <- Hash.toList oc
traceChunk "OccMap" (show ocl)
return . IntMap.fromList
. map (\kvs -> (key (head kvs), kvs))
. groupBy sameKey
. map dropHeight
$ ocl
where
key (StableASTName sn, _) = hashStableName sn
sameKey kv1 kv2 = key kv1 == key kv2
dropHeight (k, (cnt, _)) = (k, cnt)
-- Look up the occurrence map keyed by array computations using a stable name. If the key does
-- not exist in the map, return an occurrence count of '1'.
--
lookupWithASTName :: OccMap c -> StableASTName c -> Int
lookupWithASTName oc sa@(StableASTName sn)
= fromMaybe 1 $ IntMap.lookup (hashStableName sn) oc >>= Prelude.lookup sa
-- Look up the occurrence map keyed by array computations using a sharing array computation. If an
-- the key does not exist in the map, return an occurrence count of '1'.
--
lookupWithSharingAcc :: OccMap Acc -> StableSharingAcc -> Int
lookupWithSharingAcc oc (StableSharingAcc (StableNameHeight sn _) _)
= lookupWithASTName oc (StableASTName sn)
-- Look up the occurrence map keyed by scalar expressions using a sharing expression. If an
-- the key does not exist in the map, return an occurrence count of '1'.
--
lookupWithSharingExp :: OccMap Exp -> StableSharingExp -> Int
lookupWithSharingExp oc (StableSharingExp (StableNameHeight sn _) _)
= lookupWithASTName oc (StableASTName sn)
-- Stable 'Acc' nodes
-- ------------------
-- Stable name for 'Acc' nodes including the height of the AST.
--
type StableAccName arrs = StableNameHeight (Acc arrs)
-- Interleave sharing annotations into an array computation AST. Subtrees can be marked as being
-- represented by variable (binding a shared subtree) using 'AvarSharing' and as being prefixed by
-- a let binding (for a shared subtree) using 'AletSharing'.
--
data SharingAcc acc exp arrs where
AvarSharing :: Arrays arrs
=> StableAccName arrs -> SharingAcc acc exp arrs
AletSharing :: StableSharingAcc -> acc arrs -> SharingAcc acc exp arrs
AccSharing :: Arrays arrs
=> StableAccName arrs -> PreAcc acc exp arrs -> SharingAcc acc exp arrs
-- Array expression with sharing but shared values have not been scoped; i.e. no let bindings. If
-- the expression is rooted in a function, the list contains the tags of the variables bound by the
-- immediate surrounding lambdas.
data UnscopedAcc t = UnscopedAcc [Int] (SharingAcc UnscopedAcc RootExp t)
-- Array expression with sharing. For expressions rooted in functions the list holds a sorted
-- environment corresponding to the variables bound in the immediate surounding lambdas.
data ScopedAcc t = ScopedAcc [StableSharingAcc] (SharingAcc ScopedAcc ScopedExp t)
-- Stable name for an array computation associated with its sharing-annotated version.
--
data StableSharingAcc where
StableSharingAcc :: Arrays arrs
=> StableAccName arrs
-> SharingAcc ScopedAcc ScopedExp arrs
-> StableSharingAcc
instance Show StableSharingAcc where
show (StableSharingAcc sn _) = show $ hashStableNameHeight sn
instance Eq StableSharingAcc where
StableSharingAcc sn1 _ == StableSharingAcc sn2 _
| Just sn1' <- gcast sn1 = sn1' == sn2
| otherwise = False
higherSSA :: StableSharingAcc -> StableSharingAcc -> Bool
StableSharingAcc sn1 _ `higherSSA` StableSharingAcc sn2 _ = sn1 `higherSNH` sn2
-- Test whether the given stable names matches an array computation with sharing.
--
matchStableAcc :: Typeable arrs => StableAccName arrs -> StableSharingAcc -> Bool
matchStableAcc sn1 (StableSharingAcc sn2 _)
| Just sn1' <- gcast sn1 = sn1' == sn2
| otherwise = False
-- Dummy entry for environments to be used for unused variables.
--
noStableAccName :: StableAccName arrs
noStableAccName = unsafePerformIO $ StableNameHeight <$> makeStableName undefined <*> pure 0
-- Stable 'Exp' nodes
-- ------------------
-- Stable name for 'Exp' nodes including the height of the AST.
--
type StableExpName t = StableNameHeight (Exp t)
-- Interleave sharing annotations into a scalar expressions AST in the same manner as 'SharingAcc'
-- do for array computations.
--
data SharingExp (acc :: * -> *) exp t where
VarSharing :: Elt t
=> StableExpName t -> SharingExp acc exp t
LetSharing :: StableSharingExp -> exp t -> SharingExp acc exp t
ExpSharing :: Elt t
=> StableExpName t -> PreExp acc exp t -> SharingExp acc exp t
-- Specifies a scalar expression AST with sharing annotations but no scoping; i.e. no LetSharing
-- constructors. If the expression is rooted in a function, the list contains the tags of the
-- variables bound by the immediate surrounding lambdas.
data UnscopedExp t = UnscopedExp [Int] (SharingExp UnscopedAcc UnscopedExp t)
-- Specifies a scalar expression AST with sharing. For expressions rooted in functions the list
-- holds a sorted environment corresponding to the variables bound in the immediate surounding
-- lambdas.
data ScopedExp t = ScopedExp [StableSharingExp] (SharingExp ScopedAcc ScopedExp t)
-- Expressions rooted in 'Acc' computations.
--
-- * When counting occurrences, the root of every expression embedded in an 'Acc' is annotated by
-- an occurrence map for that one expression (excluding any subterms that are rooted in embedded
-- 'Acc's.)
--
data RootExp t = RootExp (OccMap Exp) (UnscopedExp t)
-- Stable name for an expression associated with its sharing-annotated version.
--
data StableSharingExp where
StableSharingExp :: Elt t => StableExpName t -> SharingExp ScopedAcc ScopedExp t -> StableSharingExp
instance Show StableSharingExp where
show (StableSharingExp sn _) = show $ hashStableNameHeight sn
instance Eq StableSharingExp where
StableSharingExp sn1 _ == StableSharingExp sn2 _
| Just sn1' <- gcast sn1 = sn1' == sn2
| otherwise = False
higherSSE :: StableSharingExp -> StableSharingExp -> Bool
StableSharingExp sn1 _ `higherSSE` StableSharingExp sn2 _ = sn1 `higherSNH` sn2
-- Test whether the given stable names matches an expression with sharing.
--
matchStableExp :: Typeable t => StableExpName t -> StableSharingExp -> Bool
matchStableExp sn1 (StableSharingExp sn2 _)
| Just sn1' <- gcast sn1 = sn1' == sn2
| otherwise = False
-- Dummy entry for environments to be used for unused variables.
--
noStableExpName :: StableExpName t
noStableExpName = unsafePerformIO $ StableNameHeight <$> makeStableName undefined <*> pure 0
-- Occurrence counting
-- ===================
-- Compute the 'Acc' occurrence map, marks all nodes (both 'Acc' and 'Exp' nodes) with stable names,
-- and drop repeated occurrences of shared 'Acc' and 'Exp' subtrees (Phase One).
--
-- We compute a single 'Acc' occurrence map for the whole AST, but one 'Exp' occurrence map for each
-- sub-expression rooted in an 'Acc' operation. This is as we cannot float 'Exp' subtrees across
-- 'Acc' operations, but we can float 'Acc' subtrees out of 'Exp' expressions.
--
-- Note [Traversing functions and side effects]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- We need to descent into function bodies to build the 'OccMap' with all occurrences in the
-- function bodies. Due to the side effects in the construction of the occurrence map and, more
-- importantly, the dependence of the second phase on /global/ occurrence information, we may not
-- delay the body traversals by putting them under a lambda. Hence, we apply each function, to
-- traverse its body and use a /dummy abstraction/ of the result.
--
-- For example, given a function 'f', we traverse 'f (Tag 0)', which yields a transformed body 'e'.
-- As the result of the traversal of the overall function, we use 'const e'. Hence, it is crucial
-- that the 'Tag' supplied during the initial traversal is already the one required by the HOAS to
-- de Bruijn conversion in 'convertSharingAcc' — any subsequent application of 'const e' will only
-- yield 'e' with the embedded 'Tag 0' of the original application. During sharing recovery, we
-- float /all/ free variables ('Atag' and 'Tag') out to construct the initial environment for
-- producing de Bruijn indices, which replaces them by 'AvarSharing' or 'VarSharing' nodes. Hence,
-- the tag values only serve the purpose of determining the ordering in that initial environment.
-- They are /not/ directly used to compute the de Brujin indices.
--
makeOccMapAcc
:: Typeable arrs
=> Config
-> Level
-> Acc arrs
-> IO (UnscopedAcc arrs, OccMap Acc)
makeOccMapAcc config lvl acc = do
traceLine "makeOccMapAcc" "Enter"
accOccMap <- newASTHashTable
(acc', _) <- makeOccMapSharingAcc config accOccMap lvl acc
frozenAccOccMap <- freezeOccMap accOccMap
traceLine "makeOccMapAcc" "Exit"
return (acc', frozenAccOccMap)
makeOccMapSharingAcc
:: Typeable arrs
=> Config
-> OccMapHash Acc
-> Level
-> Acc arrs
-> IO (UnscopedAcc arrs, Int)
makeOccMapSharingAcc config accOccMap = traverseAcc
where
traverseFun1 :: (Elt a, Typeable b) => Level -> (Exp a -> Exp b) -> IO (Exp a -> RootExp b, Int)
traverseFun1 = makeOccMapFun1 config accOccMap
traverseFun2 :: (Elt a, Elt b, Typeable c)
=> Level
-> (Exp a -> Exp b -> Exp c)
-> IO (Exp a -> Exp b -> RootExp c, Int)
traverseFun2 = makeOccMapFun2 config accOccMap
traverseAfun1 :: (Arrays a, Typeable b) => Level -> (Acc a -> Acc b) -> IO (Acc a -> UnscopedAcc b, Int)
traverseAfun1 = makeOccMapAfun1 config accOccMap
traverseExp :: Typeable e => Level -> Exp e -> IO (RootExp e, Int)
traverseExp = makeOccMapExp config accOccMap
traverseAcc :: forall arrs. Typeable arrs => Level -> Acc arrs -> IO (UnscopedAcc arrs, Int)
traverseAcc lvl acc@(Acc pacc)
= mfix $ \ ~(_, height) -> do
-- Compute stable name and enter it into the occurrence map
--
sn <- makeStableAST acc
heightIfRepeatedOccurrence <- enterOcc accOccMap (StableASTName sn) height
traceLine (showPreAccOp pacc) $ do
let hash = show (hashStableName sn)
case heightIfRepeatedOccurrence of
Just height -> "REPEATED occurrence (sn = " ++ hash ++ "; height = " ++ show height ++ ")"
Nothing -> "first occurrence (sn = " ++ hash ++ ")"
-- Reconstruct the computation in shared form.
--
-- In case of a repeated occurrence, the height comes from the occurrence map; otherwise
-- it is computed by the traversal function passed in 'newAcc'. See also 'enterOcc'.
--
-- NB: This function can only be used in the case alternatives below; outside of the
-- case we cannot discharge the 'Arrays arrs' constraint.
--
let reconstruct :: Arrays arrs
=> IO (PreAcc UnscopedAcc RootExp arrs, Int)
-> IO (UnscopedAcc arrs, Int)
reconstruct newAcc
= case heightIfRepeatedOccurrence of
Just height | recoverAccSharing config
-> return (UnscopedAcc [] (AvarSharing (StableNameHeight sn height)), height)
_ -> do (acc, height) <- newAcc
return (UnscopedAcc [] (AccSharing (StableNameHeight sn height) acc), height)
case pacc of
Atag i -> reconstruct $ return (Atag i, 0) -- height is 0!
Pipe afun1 afun2 acc -> reconstruct $ do
(afun1', h1) <- traverseAfun1 lvl afun1
(afun2', h2) <- traverseAfun1 lvl afun2
(acc', h3) <- traverseAcc lvl acc
return (Pipe afun1' afun2' acc'
, h1 `max` h2 `max` h3 + 1)
Aforeign ff afun acc -> reconstruct $ travA (Aforeign ff afun) acc
Acond e acc1 acc2 -> reconstruct $ do
(e' , h1) <- traverseExp lvl e
(acc1', h2) <- traverseAcc lvl acc1
(acc2', h3) <- traverseAcc lvl acc2
return (Acond e' acc1' acc2', h1 `max` h2 `max` h3 + 1)
Awhile pred iter init -> reconstruct $ do
(pred', h1) <- traverseAfun1 lvl pred
(iter', h2) <- traverseAfun1 lvl iter
(init', h3) <- traverseAcc lvl init
return (Awhile pred' iter' init'
, h1 `max` h2 `max` h3 + 1)
Atuple tup -> reconstruct $ do
(tup', h) <- travAtup tup
return (Atuple tup', h)
Aprj ix a -> reconstruct $ travA (Aprj ix) a
Use arr -> reconstruct $ return (Use arr, 1)
Unit e -> reconstruct $ do
(e', h) <- traverseExp lvl e
return (Unit e', h + 1)
Generate e f -> reconstruct $ do
(e', h1) <- traverseExp lvl e
(f', h2) <- traverseFun1 lvl f
return (Generate e' f', h1 `max` h2 + 1)
Reshape e acc -> reconstruct $ travEA Reshape e acc
Replicate e acc -> reconstruct $ travEA Replicate e acc
Slice acc e -> reconstruct $ travEA (flip Slice) e acc
Map f acc -> reconstruct $ do
(f' , h1) <- traverseFun1 lvl f
(acc', h2) <- traverseAcc lvl acc
return (Map f' acc', h1 `max` h2 + 1)
ZipWith f acc1 acc2 -> reconstruct $ travF2A2 ZipWith f acc1 acc2
Fold f e acc -> reconstruct $ travF2EA Fold f e acc
Fold1 f acc -> reconstruct $ travF2A Fold1 f acc
FoldSeg f e acc1 acc2 -> reconstruct $ do
(f' , h1) <- traverseFun2 lvl f
(e' , h2) <- traverseExp lvl e
(acc1', h3) <- traverseAcc lvl acc1
(acc2', h4) <- traverseAcc lvl acc2
return (FoldSeg f' e' acc1' acc2',
h1 `max` h2 `max` h3 `max` h4 + 1)
Fold1Seg f acc1 acc2 -> reconstruct $ travF2A2 Fold1Seg f acc1 acc2
Scanl f e acc -> reconstruct $ travF2EA Scanl f e acc
Scanl' f e acc -> reconstruct $ travF2EA Scanl' f e acc
Scanl1 f acc -> reconstruct $ travF2A Scanl1 f acc
Scanr f e acc -> reconstruct $ travF2EA Scanr f e acc
Scanr' f e acc -> reconstruct $ travF2EA Scanr' f e acc
Scanr1 f acc -> reconstruct $ travF2A Scanr1 f acc
Permute c acc1 p acc2 -> reconstruct $ do
(c' , h1) <- traverseFun2 lvl c
(p' , h2) <- traverseFun1 lvl p
(acc1', h3) <- traverseAcc lvl acc1
(acc2', h4) <- traverseAcc lvl acc2
return (Permute c' acc1' p' acc2',
h1 `max` h2 `max` h3 `max` h4 + 1)
Backpermute e p acc -> reconstruct $ do
(e' , h1) <- traverseExp lvl e
(p' , h2) <- traverseFun1 lvl p
(acc', h3) <- traverseAcc lvl acc
return (Backpermute e' p' acc', h1 `max` h2 `max` h3 + 1)
Stencil s bnd acc -> reconstruct $ do
(s' , h1) <- makeOccMapStencil1 config accOccMap acc lvl s
(acc', h2) <- traverseAcc lvl acc
return (Stencil s' bnd acc', h1 `max` h2 + 1)
Stencil2 s bnd1 acc1
bnd2 acc2 -> reconstruct $ do
(s' , h1) <- makeOccMapStencil2 config accOccMap acc1 acc2 lvl s
(acc1', h2) <- traverseAcc lvl acc1
(acc2', h3) <- traverseAcc lvl acc2
return (Stencil2 s' bnd1 acc1' bnd2 acc2',
h1 `max` h2 `max` h3 + 1)
where
travA :: Arrays arrs'
=> (UnscopedAcc arrs' -> PreAcc UnscopedAcc RootExp arrs)
-> Acc arrs' -> IO (PreAcc UnscopedAcc RootExp arrs, Int)
travA c acc
= do
(acc', h) <- traverseAcc lvl acc
return (c acc', h + 1)
travEA :: (Typeable b, Arrays arrs')
=> (RootExp b -> UnscopedAcc arrs' -> PreAcc UnscopedAcc RootExp arrs)
-> Exp b -> Acc arrs' -> IO (PreAcc UnscopedAcc RootExp arrs, Int)
travEA c exp acc
= do
(exp', h1) <- traverseExp lvl exp
(acc', h2) <- traverseAcc lvl acc
return (c exp' acc', h1 `max` h2 + 1)
travF2A :: (Elt b, Elt c, Typeable d, Arrays arrs')
=> ((Exp b -> Exp c -> RootExp d) -> UnscopedAcc arrs'
-> PreAcc UnscopedAcc RootExp arrs)
-> (Exp b -> Exp c -> Exp d) -> Acc arrs'
-> IO (PreAcc UnscopedAcc RootExp arrs, Int)
travF2A c fun acc
= do
(fun', h1) <- traverseFun2 lvl fun
(acc', h2) <- traverseAcc lvl acc
return (c fun' acc', h1 `max` h2 + 1)
travF2EA :: (Elt b, Elt c, Typeable d, Typeable e, Arrays arrs')
=> ((Exp b -> Exp c -> RootExp d) -> RootExp e -> UnscopedAcc arrs' -> PreAcc UnscopedAcc RootExp arrs)
-> (Exp b -> Exp c -> Exp d) -> Exp e -> Acc arrs'
-> IO (PreAcc UnscopedAcc RootExp arrs, Int)
travF2EA c fun exp acc
= do
(fun', h1) <- traverseFun2 lvl fun
(exp', h2) <- traverseExp lvl exp
(acc', h3) <- traverseAcc lvl acc
return (c fun' exp' acc', h1 `max` h2 `max` h3 + 1)
travF2A2 :: (Elt b, Elt c, Typeable d, Arrays arrs1, Arrays arrs2)
=> ((Exp b -> Exp c -> RootExp d) -> UnscopedAcc arrs1 -> UnscopedAcc arrs2 -> PreAcc UnscopedAcc RootExp arrs)
-> (Exp b -> Exp c -> Exp d) -> Acc arrs1 -> Acc arrs2
-> IO (PreAcc UnscopedAcc RootExp arrs, Int)
travF2A2 c fun acc1 acc2
= do
(fun' , h1) <- traverseFun2 lvl fun
(acc1', h2) <- traverseAcc lvl acc1
(acc2', h3) <- traverseAcc lvl acc2
return (c fun' acc1' acc2', h1 `max` h2 `max` h3 + 1)
travAtup :: Tuple.Atuple Acc a
-> IO (Tuple.Atuple UnscopedAcc a, Int)
travAtup NilAtup = return (NilAtup, 1)
travAtup (SnocAtup tup a) = do
(tup', h1) <- travAtup tup
(a', h2) <- traverseAcc lvl a
return (SnocAtup tup' a', h1 `max` h2 + 1)
makeOccMapAfun1 :: (Arrays a, Typeable b)
=> Config
-> OccMapHash Acc
-> Level
-> (Acc a -> Acc b)
-> IO (Acc a -> UnscopedAcc b, Int)
makeOccMapAfun1 config accOccMap lvl f = do
let x = Acc (Atag lvl)
--
(UnscopedAcc [] body, height) <- makeOccMapSharingAcc config accOccMap (lvl+1) (f x)
return (const (UnscopedAcc [lvl] body), height)
-- Generate occupancy information for scalar functions and expressions. Helper
-- functions wrapping around 'makeOccMapRootExp' with more specific types.
--
-- See Note [Traversing functions and side effects]
--
makeOccMapExp
:: Typeable e
=> Config
-> OccMapHash Acc
-> Level
-> Exp e
-> IO (RootExp e, Int)
makeOccMapExp config accOccMap lvl = makeOccMapRootExp config accOccMap lvl []
makeOccMapFun1
:: (Elt a, Typeable b)
=> Config
-> OccMapHash Acc
-> Level
-> (Exp a -> Exp b)
-> IO (Exp a -> RootExp b, Int)
makeOccMapFun1 config accOccMap lvl f = do
let x = Exp (Tag lvl)
--
(body, height) <- makeOccMapRootExp config accOccMap (lvl+1) [lvl] (f x)
return (const body, height)
makeOccMapFun2
:: (Elt a, Elt b, Typeable c)
=> Config
-> OccMapHash Acc
-> Level
-> (Exp a -> Exp b -> Exp c)
-> IO (Exp a -> Exp b -> RootExp c, Int)
makeOccMapFun2 config accOccMap lvl f = do
let x = Exp (Tag (lvl+1))
y = Exp (Tag lvl)
--
(body, height) <- makeOccMapRootExp config accOccMap (lvl+2) [lvl, lvl+1] (f x y)
return (\_ _ -> body, height)
makeOccMapStencil1
:: forall sh a b stencil. (Stencil sh a stencil, Typeable b)
=> Config
-> OccMapHash Acc
-> Acc (Array sh a) {- dummy -}
-> Level
-> (stencil -> Exp b)
-> IO (stencil -> RootExp b, Int)
makeOccMapStencil1 config accOccMap _ lvl stencil = do
let x = Exp (Tag lvl)
f = stencil . stencilPrj (undefined::sh) (undefined::a)
--
(body, height) <- makeOccMapRootExp config accOccMap (lvl+1) [lvl] (f x)
return (const body, height)
makeOccMapStencil2
:: forall sh a b c stencil1 stencil2. (Stencil sh a stencil1, Stencil sh b stencil2, Typeable c)
=> Config
-> OccMapHash Acc
-> Acc (Array sh a) {- dummy -}
-> Acc (Array sh b) {- dummy -}
-> Level
-> (stencil1 -> stencil2 -> Exp c)
-> IO (stencil1 -> stencil2 -> RootExp c, Int)
makeOccMapStencil2 config accOccMap _ _ lvl stencil = do
let x = Exp (Tag (lvl+1))
y = Exp (Tag lvl)
f a b = stencil (stencilPrj (undefined::sh) (undefined::a) a)
(stencilPrj (undefined::sh) (undefined::b) b)
--
(body, height) <- makeOccMapRootExp config accOccMap (lvl+2) [lvl, lvl+1] (f x y)
return (\_ _ -> body, height)
-- Generate sharing information for expressions embedded in Acc computations.
-- Expressions are annotated with:
--
-- 1) the tags of free scalar variables (for scalar functions)
-- 2) a local occurrence map for that expression.
--
makeOccMapRootExp
:: Typeable e
=> Config
-> OccMapHash Acc
-> Level -- The level of currently bound scalar variables
-> [Int] -- The tags of newly introduced free scalar variables in this expression
-> Exp e
-> IO (RootExp e, Int)
makeOccMapRootExp config accOccMap lvl fvs exp = do
traceLine "makeOccMapRootExp" "Enter"
expOccMap <- newASTHashTable
(UnscopedExp [] exp', height) <- makeOccMapSharingExp config accOccMap expOccMap lvl exp
frozenExpOccMap <- freezeOccMap expOccMap
traceLine "makeOccMapRootExp" "Exit"
return (RootExp frozenExpOccMap (UnscopedExp fvs exp'), height)
-- Generate sharing information for an open scalar expression.
--
makeOccMapSharingExp
:: Typeable e
=> Config
-> OccMapHash Acc
-> OccMapHash Exp
-> Level -- The level of currently bound variables
-> Exp e
-> IO (UnscopedExp e, Int)
makeOccMapSharingExp config accOccMap expOccMap = travE
where
travE :: forall a. Typeable a => Level -> Exp a -> IO (UnscopedExp a, Int)
travE lvl exp@(Exp pexp)
= mfix $ \ ~(_, height) -> do
-- Compute stable name and enter it into the occurrence map
--
sn <- makeStableAST exp
heightIfRepeatedOccurrence <- enterOcc expOccMap (StableASTName sn) height
traceLine (showPreExpOp pexp) $ do
let hash = show (hashStableName sn)
case heightIfRepeatedOccurrence of
Just height -> "REPEATED occurrence (sn = " ++ hash ++ "; height = " ++ show height ++ ")"
Nothing -> "first occurrence (sn = " ++ hash ++ ")"
-- Reconstruct the computation in shared form.
--
-- In case of a repeated occurrence, the height comes from the occurrence map; otherwise
-- it is computed by the traversal function passed in 'newExp'. See also 'enterOcc'.
--
-- NB: This function can only be used in the case alternatives below; outside of the
-- case we cannot discharge the 'Elt a' constraint.
--
let reconstruct :: Elt a
=> IO (PreExp UnscopedAcc UnscopedExp a, Int)
-> IO (UnscopedExp a, Int)
reconstruct newExp
= case heightIfRepeatedOccurrence of
Just height | recoverExpSharing config
-> return (UnscopedExp [] (VarSharing (StableNameHeight sn height)), height)
_ -> do (exp, height) <- newExp
return (UnscopedExp [] (ExpSharing (StableNameHeight sn height) exp), height)
case pexp of
Tag i -> reconstruct $ return (Tag i, 0) -- height is 0!
Const c -> reconstruct $ return (Const c, 1)
Tuple tup -> reconstruct $ do
(tup', h) <- travTup tup
return (Tuple tup', h)
Prj i e -> reconstruct $ travE1 (Prj i) e
IndexNil -> reconstruct $ return (IndexNil, 1)
IndexCons ix i -> reconstruct $ travE2 IndexCons ix i
IndexHead i -> reconstruct $ travE1 IndexHead i
IndexTail ix -> reconstruct $ travE1 IndexTail ix
IndexAny -> reconstruct $ return (IndexAny, 1)
ToIndex sh ix -> reconstruct $ travE2 ToIndex sh ix
FromIndex sh e -> reconstruct $ travE2 FromIndex sh e
Cond e1 e2 e3 -> reconstruct $ travE3 Cond e1 e2 e3
While p iter init -> reconstruct $ do
(p' , h1) <- traverseFun1 lvl p
(iter', h2) <- traverseFun1 lvl iter
(init', h3) <- travE lvl init
return (While p' iter' init', h1 `max` h2 `max` h3 + 1)
PrimConst c -> reconstruct $ return (PrimConst c, 1)
PrimApp p e -> reconstruct $ travE1 (PrimApp p) e
Index a e -> reconstruct $ travAE Index a e
LinearIndex a i -> reconstruct $ travAE LinearIndex a i
Shape a -> reconstruct $ travA Shape a
ShapeSize e -> reconstruct $ travE1 ShapeSize e
Intersect sh1 sh2 -> reconstruct $ travE2 Intersect sh1 sh2
Foreign ff f e -> reconstruct $ do
(e', h) <- travE lvl e
return (Foreign ff f e', h+1)
where
traverseAcc :: Typeable arrs => Level -> Acc arrs -> IO (UnscopedAcc arrs, Int)
traverseAcc = makeOccMapSharingAcc config accOccMap
traverseFun1 :: (Elt a, Typeable b)
=> Level
-> (Exp a -> Exp b)
-> IO (Exp a -> UnscopedExp b, Int)
traverseFun1 lvl f
= do
let x = Exp (Tag lvl)
(UnscopedExp [] body, height) <- travE (lvl+1) (f x)
return (const (UnscopedExp [lvl] body), height + 1)
travE1 :: Typeable b => (UnscopedExp b -> PreExp UnscopedAcc UnscopedExp a) -> Exp b
-> IO (PreExp UnscopedAcc UnscopedExp a, Int)
travE1 c e
= do
(e', h) <- travE lvl e
return (c e', h + 1)
travE2 :: (Typeable b, Typeable c)
=> (UnscopedExp b -> UnscopedExp c -> PreExp UnscopedAcc UnscopedExp a)
-> Exp b -> Exp c
-> IO (PreExp UnscopedAcc UnscopedExp a, Int)
travE2 c e1 e2
= do
(e1', h1) <- travE lvl e1
(e2', h2) <- travE lvl e2
return (c e1' e2', h1 `max` h2 + 1)
travE3 :: (Typeable b, Typeable c, Typeable d)
=> (UnscopedExp b -> UnscopedExp c -> UnscopedExp d -> PreExp UnscopedAcc UnscopedExp a)
-> Exp b -> Exp c -> Exp d
-> IO (PreExp UnscopedAcc UnscopedExp a, Int)
travE3 c e1 e2 e3
= do
(e1', h1) <- travE lvl e1
(e2', h2) <- travE lvl e2
(e3', h3) <- travE lvl e3
return (c e1' e2' e3', h1 `max` h2 `max` h3 + 1)
travA :: Typeable b => (UnscopedAcc b -> PreExp UnscopedAcc UnscopedExp a) -> Acc b
-> IO (PreExp UnscopedAcc UnscopedExp a, Int)
travA c acc
= do
(acc', h) <- traverseAcc lvl acc
return (c acc', h + 1)
travAE :: (Typeable b, Typeable c)
=> (UnscopedAcc b -> UnscopedExp c -> PreExp UnscopedAcc UnscopedExp a)
-> Acc b -> Exp c
-> IO (PreExp UnscopedAcc UnscopedExp a, Int)
travAE c acc e
= do
(acc', h1) <- traverseAcc lvl acc
(e' , h2) <- travE lvl e
return (c acc' e', h1 `max` h2 + 1)
travTup :: Tuple.Tuple Exp tup -> IO (Tuple.Tuple UnscopedExp tup, Int)
travTup NilTup = return (NilTup, 1)
travTup (SnocTup tup e) = do
(tup', h1) <- travTup tup
(e' , h2) <- travE lvl e
return (SnocTup tup' e', h1 `max` h2 + 1)
-- Type used to maintain how often each shared subterm, so far, occurred during a bottom-up sweep,
-- as well as the relation between subterms. It is comprised of a list of terms and a graph giving
-- their relation.
--
-- Invariants of the list:
-- - If one shared term 's' is itself a subterm of another shared term 't', then 's' must occur
-- *after* 't' in the list.
-- - No shared term occurs twice.
-- - A term may have a final occurrence count of only 1 iff it is either a free variable ('Atag'
-- or 'Tag') or an array computation lifted out of an expression.
-- - All 'Exp' node counts precede all 'Acc' node counts as we don't share 'Exp' nodes across 'Acc'
-- nodes.
--
-- We determine the subterm property by using the tree height in 'StableNameHeight'. Trees get
-- smaller towards the end of a 'NodeCounts' list. The height of free variables ('Atag' or 'Tag')
-- is 0, whereas other leaves have height 1. This guarantees that all free variables are at the end
-- of the 'NodeCounts' list.
--
-- The graph is represented as a map where a stable name 'a' is mapped to a set of stables names 'b'
-- such that if there exists a edge from 'a' to 'c' that 'c' is contained within 'b'.
--
-- Properties of the graph:
-- - There exists an edge from 'a' to 'b' if the term 'a' names is a subterm of the term named by
-- 'b'.
--
-- To ensure the list invariant and the graph properties are preserved over merging node counts from
-- sibling subterms, the function '(+++)' must be used.
--
type NodeCounts = ([NodeCount], Map.HashMap NodeName (Set.HashSet NodeName))
data NodeName where
NodeName :: Typeable a => StableName a -> NodeName
instance Eq NodeName where
(NodeName sn1) == (NodeName sn2) | Just sn2' <- gcast sn2 = sn1 == sn2'
| otherwise = False
instance Hashable NodeName where
hashWithSalt hash (NodeName sn1) = hash + hashStableName sn1
instance Show NodeName where
show (NodeName sn) = show (hashStableName sn)
data NodeCount = AccNodeCount StableSharingAcc Int
| ExpNodeCount StableSharingExp Int
deriving Show
-- Empty node counts
--
noNodeCounts :: NodeCounts
noNodeCounts = ([], Map.empty)
-- Insert an Acc node into the node counts, assuming that it is a superterm of the all the existing
-- nodes.
--
-- TODO: Perform cycle detection here.
insertAccNode :: StableSharingAcc -> NodeCounts -> NodeCounts
insertAccNode ssa@(StableSharingAcc (StableNameHeight sn _) _) (subterms,g)
= ([AccNodeCount ssa 1], g') +++ (subterms,g)
where
k = NodeName sn
hs = map nodeName subterms
g' = Map.fromList $ (k, Set.empty) : [(h, Set.singleton k) | h <- hs]
-- Insert an Exp node into the node counts, assuming that it is a superterm of the all the existing
-- nodes.
--
-- TODO: Perform cycle detection here.
insertExpNode :: StableSharingExp -> NodeCounts -> NodeCounts
insertExpNode ssa@(StableSharingExp (StableNameHeight sn _) _) (subterms,g)
= ([ExpNodeCount ssa 1], g') +++ (subterms,g)
where
k = NodeName sn
hs = map nodeName subterms
g' = Map.fromList $ (k, Set.empty) : [(h, Set.singleton k) | h <- hs]
-- Remove nodes that aren't in the list from the graph.
--
-- RCE: This is no longer necessary when NDP is supported.
cleanCounts :: NodeCounts -> NodeCounts
cleanCounts (ns, g) = (ns, Map.fromList $ [(h, Set.filter (flip elem hs) (g Map.! h)) | h <- hs ])
where
hs = (map nodeName ns)
nodeName :: NodeCount -> NodeName
nodeName (AccNodeCount (StableSharingAcc (StableNameHeight sn _) _) _) = NodeName sn
nodeName (ExpNodeCount (StableSharingExp (StableNameHeight sn _) _) _) = NodeName sn
-- Combine node counts that belong to the same node.
--
-- * We assume that the list invariant —subterms follow their parents— holds for both arguments and
-- guarantee that it still holds for the result.
-- * In the same manner, we assume that all 'Exp' node counts precede 'Acc' node counts and
-- guarantee that this also hold for the result.
--
-- RCE: The list combination should be able to be performed as a more efficient merge.
--
(+++) :: NodeCounts -> NodeCounts -> NodeCounts
(ns1,g1) +++ (ns2,g2) = (foldr insert ns1 ns2, Map.unionWith Set.union g1 g2)
where
insert x [] = [x]
insert x@(AccNodeCount sa1 count1) ys@(y@(AccNodeCount sa2 count2) : ys')
| sa1 == sa2 = AccNodeCount (sa1 `pickNoneAvar` sa2) (count1 + count2) : ys'
| sa1 `higherSSA` sa2 = x : ys
| otherwise = y : insert x ys'
insert x@(ExpNodeCount se1 count1) ys@(y@(ExpNodeCount se2 count2) : ys')
| se1 == se2 = ExpNodeCount (se1 `pickNoneVar` se2) (count1 + count2) : ys'
| se1 `higherSSE` se2 = x : ys
| otherwise = y : insert x ys'
insert x@(AccNodeCount _ _) (y@(ExpNodeCount _ _) : ys')
= y : insert x ys'
insert x@(ExpNodeCount _ _) (y@(AccNodeCount _ _) : ys')
= x : insert y ys'
(StableSharingAcc _ (AvarSharing _)) `pickNoneAvar` sa2 = sa2
sa1 `pickNoneAvar` _sa2 = sa1
(StableSharingExp _ (VarSharing _)) `pickNoneVar` sa2 = sa2
sa1 `pickNoneVar` _sa2 = sa1
-- Build an initial environment for the tag values given in the first argument for traversing an
-- array expression. The 'StableSharingAcc's for all tags /actually used/ in the expressions are
-- in the second argument. (Tags are not used if a bound variable has no usage occurrence.)
--
-- Bail out if any tag occurs multiple times as this indicates that the sharing of an argument
-- variable was not preserved and we cannot build an appropriate initial environment (c.f., comments
-- at 'determineScopesAcc'.
--
buildInitialEnvAcc :: [Level] -> [StableSharingAcc] -> [StableSharingAcc]
buildInitialEnvAcc tags sas = map (lookupSA sas) tags
where
lookupSA sas tag1
= case filter hasTag sas of
[] -> noStableSharing -- tag is not used in the analysed expression
[sa] -> sa -- tag has a unique occurrence
sas2 -> $internalError "buildInitialEnvAcc"
$ "Encountered duplicate 'ATag's\n " ++ intercalate ", " (map showSA sas2)
where
hasTag (StableSharingAcc _ (AccSharing _ (Atag tag2))) = tag1 == tag2
hasTag sa
= $internalError "buildInitialEnvAcc"
$ "Encountered a node that is not a plain 'Atag'\n " ++ showSA sa
noStableSharing :: StableSharingAcc
noStableSharing = StableSharingAcc noStableAccName (undefined :: SharingAcc acc exp ())
showSA (StableSharingAcc _ (AccSharing sn acc)) = show (hashStableNameHeight sn) ++ ": " ++
showPreAccOp acc
showSA (StableSharingAcc _ (AvarSharing sn)) = "AvarSharing " ++ show (hashStableNameHeight sn)
showSA (StableSharingAcc _ (AletSharing sa _ )) = "AletSharing " ++ show sa ++ "..."
-- Build an initial environment for the tag values given in the first argument for traversing a
-- scalar expression. The 'StableSharingExp's for all tags /actually used/ in the expressions are
-- in the second argument. (Tags are not used if a bound variable has no usage occurrence.)
--
-- Bail out if any tag occurs multiple times as this indicates that the sharing of an argument
-- variable was not preserved and we cannot build an appropriate initial environment (c.f., comments
-- at 'determineScopesAcc'.
--
buildInitialEnvExp :: [Level] -> [StableSharingExp] -> [StableSharingExp]
buildInitialEnvExp tags ses = map (lookupSE ses) tags
where
lookupSE ses tag1
= case filter hasTag ses of
[] -> noStableSharing -- tag is not used in the analysed expression
[se] -> se -- tag has a unique occurrence
ses2 -> $internalError "buildInitialEnvExp"
("Encountered a duplicate 'Tag'\n " ++ intercalate ", " (map showSE ses2))
where
hasTag (StableSharingExp _ (ExpSharing _ (Tag tag2))) = tag1 == tag2
hasTag se
= $internalError "buildInitialEnvExp"
("Encountered a node that is not a plain 'Tag'\n " ++ showSE se)
noStableSharing :: StableSharingExp
noStableSharing = StableSharingExp noStableExpName (undefined :: SharingExp acc exp ())
showSE (StableSharingExp _ (ExpSharing sn exp)) = show (hashStableNameHeight sn) ++ ": " ++
showPreExpOp exp
showSE (StableSharingExp _ (VarSharing sn)) = "VarSharing " ++ show (hashStableNameHeight sn)
showSE (StableSharingExp _ (LetSharing se _ )) = "LetSharing " ++ show se ++ "..."
-- Determine whether a 'NodeCount' is for an 'Atag' or 'Tag', which represent free variables.
--
isFreeVar :: NodeCount -> Bool
isFreeVar (AccNodeCount (StableSharingAcc _ (AccSharing _ (Atag _))) _) = True
isFreeVar (ExpNodeCount (StableSharingExp _ (ExpSharing _ (Tag _))) _) = True
isFreeVar _ = False
-- Determine scope of shared subterms
-- ==================================
-- Determine the scopes of all variables representing shared subterms (Phase Two) in a bottom-up
-- sweep. The first argument determines whether array computations are floated out of expressions
-- irrespective of whether they are shared or not — 'True' implies floating them out.
--
-- In addition to the AST with sharing information, yield the 'StableSharingAcc's for all free
-- variables of 'rootAcc', which are represented by 'Atag' leaves in the tree. They are in order of
-- the tag values — i.e., in the same order that they need to appear in an environment to use the
-- tag for indexing into that environment.
--
-- Precondition: there are only 'AvarSharing' and 'AccSharing' nodes in the argument.
--
determineScopesAcc
:: Typeable a
=> Config
-> [Level]
-> OccMap Acc
-> UnscopedAcc a
-> (ScopedAcc a, [StableSharingAcc])
determineScopesAcc config fvs accOccMap rootAcc
= let (sharingAcc, (counts, _)) = determineScopesSharingAcc config accOccMap rootAcc
unboundTrees = filter (not . isFreeVar) counts
in
if all isFreeVar counts
then (sharingAcc, buildInitialEnvAcc fvs [sa | AccNodeCount sa _ <- counts])
else $internalError "determineScopesAcc" ("unbound shared subtrees" ++ show unboundTrees)
determineScopesSharingAcc
:: Config
-> OccMap Acc
-> UnscopedAcc a
-> (ScopedAcc a, NodeCounts)
determineScopesSharingAcc config accOccMap = scopesAcc
where
scopesAcc :: forall arrs. UnscopedAcc arrs -> (ScopedAcc arrs, NodeCounts)
scopesAcc (UnscopedAcc _ (AletSharing _ _))
= $internalError "determineScopesSharingAcc: scopesAcc" "unexpected 'AletSharing'"
scopesAcc (UnscopedAcc _ (AvarSharing sn))
= (ScopedAcc [] (AvarSharing sn), StableSharingAcc sn (AvarSharing sn) `insertAccNode` noNodeCounts)
scopesAcc (UnscopedAcc _ (AccSharing sn pacc))
= case pacc of
Atag i -> reconstruct (Atag i) noNodeCounts
Pipe afun1 afun2 acc -> let
(afun1', accCount1) = scopesAfun1 afun1
(afun2', accCount2) = scopesAfun1 afun2
(acc', accCount3) = scopesAcc acc
in
reconstruct (Pipe afun1' afun2' acc')
(accCount1 +++ accCount2 +++ accCount3)
Aforeign ff afun acc -> let
(acc', accCount) = scopesAcc acc
in
reconstruct (Aforeign ff afun acc') accCount
Acond e acc1 acc2 -> let
(e' , accCount1) = scopesExp e
(acc1', accCount2) = scopesAcc acc1
(acc2', accCount3) = scopesAcc acc2
in
reconstruct (Acond e' acc1' acc2')
(accCount1 +++ accCount2 +++ accCount3)
Awhile pred iter init -> let
(pred', accCount1) = scopesAfun1 pred
(iter', accCount2) = scopesAfun1 iter
(init', accCount3) = scopesAcc init
in
reconstruct (Awhile pred' iter' init')
(accCount1 +++ accCount2 +++ accCount3)
Atuple tup -> let (tup', accCount) = travAtup tup
in reconstruct (Atuple tup') accCount
Aprj ix a -> travA (Aprj ix) a
Use arr -> reconstruct (Use arr) noNodeCounts
Unit e -> let
(e', accCount) = scopesExp e
in
reconstruct (Unit e') accCount
Generate sh f -> let
(sh', accCount1) = scopesExp sh
(f' , accCount2) = scopesFun1 f
in
reconstruct (Generate sh' f') (accCount1 +++ accCount2)
Reshape sh acc -> travEA Reshape sh acc
Replicate n acc -> travEA Replicate n acc
Slice acc i -> travEA (flip Slice) i acc
Map f acc -> let
(f' , accCount1) = scopesFun1 f
(acc', accCount2) = scopesAcc acc
in
reconstruct (Map f' acc') (accCount1 +++ accCount2)
ZipWith f acc1 acc2 -> travF2A2 ZipWith f acc1 acc2
Fold f z acc -> travF2EA Fold f z acc
Fold1 f acc -> travF2A Fold1 f acc
FoldSeg f z acc1 acc2 -> let
(f' , accCount1) = scopesFun2 f
(z' , accCount2) = scopesExp z
(acc1', accCount3) = scopesAcc acc1
(acc2', accCount4) = scopesAcc acc2
in
reconstruct (FoldSeg f' z' acc1' acc2')
(accCount1 +++ accCount2 +++ accCount3 +++ accCount4)
Fold1Seg f acc1 acc2 -> travF2A2 Fold1Seg f acc1 acc2
Scanl f z acc -> travF2EA Scanl f z acc
Scanl' f z acc -> travF2EA Scanl' f z acc
Scanl1 f acc -> travF2A Scanl1 f acc
Scanr f z acc -> travF2EA Scanr f z acc
Scanr' f z acc -> travF2EA Scanr' f z acc
Scanr1 f acc -> travF2A Scanr1 f acc
Permute fc acc1 fp acc2 -> let
(fc' , accCount1) = scopesFun2 fc
(acc1', accCount2) = scopesAcc acc1
(fp' , accCount3) = scopesFun1 fp
(acc2', accCount4) = scopesAcc acc2
in
reconstruct (Permute fc' acc1' fp' acc2')
(accCount1 +++ accCount2 +++ accCount3 +++ accCount4)
Backpermute sh fp acc -> let
(sh' , accCount1) = scopesExp sh
(fp' , accCount2) = scopesFun1 fp
(acc', accCount3) = scopesAcc acc
in
reconstruct (Backpermute sh' fp' acc')
(accCount1 +++ accCount2 +++ accCount3)
Stencil st bnd acc -> let
(st' , accCount1) = scopesStencil1 acc st
(acc', accCount2) = scopesAcc acc
in
reconstruct (Stencil st' bnd acc') (accCount1 +++ accCount2)
Stencil2 st bnd1 acc1 bnd2 acc2
-> let
(st' , accCount1) = scopesStencil2 acc1 acc2 st
(acc1', accCount2) = scopesAcc acc1
(acc2', accCount3) = scopesAcc acc2
in
reconstruct (Stencil2 st' bnd1 acc1' bnd2 acc2')
(accCount1 +++ accCount2 +++ accCount3)
where
travEA :: Arrays arrs
=> (ScopedExp e -> ScopedAcc arrs' -> PreAcc ScopedAcc ScopedExp arrs)
-> RootExp e
-> UnscopedAcc arrs'
-> (ScopedAcc arrs, NodeCounts)
travEA c e acc = reconstruct (c e' acc') (accCount1 +++ accCount2)
where
(e' , accCount1) = scopesExp e
(acc', accCount2) = scopesAcc acc
travF2A :: (Elt a, Elt b, Arrays arrs)
=> ((Exp a -> Exp b -> ScopedExp c) -> ScopedAcc arrs'
-> PreAcc ScopedAcc ScopedExp arrs)
-> (Exp a -> Exp b -> RootExp c)
-> UnscopedAcc arrs'
-> (ScopedAcc arrs, NodeCounts)
travF2A c f acc = reconstruct (c f' acc') (accCount1 +++ accCount2)
where
(f' , accCount1) = scopesFun2 f
(acc', accCount2) = scopesAcc acc
travF2EA :: (Elt a, Elt b, Arrays arrs)
=> ((Exp a -> Exp b -> ScopedExp c) -> ScopedExp e
-> ScopedAcc arrs' -> PreAcc ScopedAcc ScopedExp arrs)
-> (Exp a -> Exp b -> RootExp c)
-> RootExp e
-> UnscopedAcc arrs'
-> (ScopedAcc arrs, NodeCounts)
travF2EA c f e acc = reconstruct (c f' e' acc') (accCount1 +++ accCount2 +++ accCount3)
where
(f' , accCount1) = scopesFun2 f
(e' , accCount2) = scopesExp e
(acc', accCount3) = scopesAcc acc
travF2A2 :: (Elt a, Elt b, Arrays arrs)
=> ((Exp a -> Exp b -> ScopedExp c) -> ScopedAcc arrs1
-> ScopedAcc arrs2 -> PreAcc ScopedAcc ScopedExp arrs)
-> (Exp a -> Exp b -> RootExp c)
-> UnscopedAcc arrs1
-> UnscopedAcc arrs2
-> (ScopedAcc arrs, NodeCounts)
travF2A2 c f acc1 acc2 = reconstruct (c f' acc1' acc2')
(accCount1 +++ accCount2 +++ accCount3)
where
(f' , accCount1) = scopesFun2 f
(acc1', accCount2) = scopesAcc acc1
(acc2', accCount3) = scopesAcc acc2
travAtup :: Tuple.Atuple UnscopedAcc a
-> (Tuple.Atuple ScopedAcc a, NodeCounts)
travAtup NilAtup = (NilAtup, noNodeCounts)
travAtup (SnocAtup tup a) = let (tup', accCountT) = travAtup tup
(a', accCountA) = scopesAcc a
in
(SnocAtup tup' a', accCountT +++ accCountA)
travA :: Arrays arrs
=> (ScopedAcc arrs' -> PreAcc ScopedAcc ScopedExp arrs)
-> UnscopedAcc arrs'
-> (ScopedAcc arrs, NodeCounts)
travA c acc = reconstruct (c acc') accCount
where
(acc', accCount) = scopesAcc acc
-- Occurrence count of the currently processed node
accOccCount = let StableNameHeight sn' _ = sn
in
lookupWithASTName accOccMap (StableASTName sn')
-- Reconstruct the current tree node.
--
-- * If the current node is being shared ('accOccCount > 1'), replace it by a 'AvarSharing'
-- node and float the shared subtree out wrapped in a 'NodeCounts' value.
-- * If the current node is not shared, reconstruct it in place.
-- * Special case for free variables ('Atag'): Replace the tree by a sharing variable and
-- float the 'Atag' out in a 'NodeCounts' value. This is independent of the number of
-- occurrences.
--
-- In either case, any completed 'NodeCounts' are injected as bindings using 'AletSharing'
-- node.
--
reconstruct :: Arrays arrs
=> PreAcc ScopedAcc ScopedExp arrs -> NodeCounts
-> (ScopedAcc arrs, NodeCounts)
reconstruct newAcc@(Atag _) _subCount
-- free variable => replace by a sharing variable regardless of the number of
-- occurrences
= let thisCount = StableSharingAcc sn (AccSharing sn newAcc) `insertAccNode` noNodeCounts
in
tracePure "FREE" (show thisCount)
(ScopedAcc [] (AvarSharing sn), thisCount)
reconstruct newAcc subCount
-- shared subtree => replace by a sharing variable (if 'recoverAccSharing' enabled)
| accOccCount > 1 && recoverAccSharing config
= let allCount = (StableSharingAcc sn sharingAcc `insertAccNode` newCount)
in
tracePure ("SHARED" ++ completed) (show allCount)
(ScopedAcc [] (AvarSharing sn), allCount)
-- neither shared nor free variable => leave it as it is
| otherwise
= tracePure ("Normal" ++ completed) (show newCount)
(ScopedAcc [] sharingAcc, newCount)
where
-- Determine the bindings that need to be attached to the current node...
(newCount, bindHere) = filterCompleted subCount
-- ...and wrap them in 'AletSharing' constructors
lets = foldl (flip (.)) id . map (\x y -> AletSharing x (ScopedAcc [] y)) $ bindHere
sharingAcc = lets $ AccSharing sn newAcc
-- trace support
completed | null bindHere = ""
| otherwise = "(" ++ show (length bindHere) ++ " lets)"
-- Extract *leading* nodes that have a complete node count (i.e., their node count is equal
-- to the number of occurrences of that node in the overall expression).
--
-- Nodes with a completed node count should be let bound at the currently processed node.
--
-- NB: Only extract leading nodes (i.e., the longest run at the *front* of the list that is
-- complete). Otherwise, we would let-bind subterms before their parents, which leads
-- scope errors.
--
filterCompleted :: NodeCounts -> (NodeCounts, [StableSharingAcc])
filterCompleted (ns, graph)
= let bindable = map (isBindable bindable (map nodeName ns)) ns
(bind, rest) = partition fst $ zip bindable ns
in ((map snd rest, graph), [sa | AccNodeCount sa _ <- map snd bind])
where
-- a node is not yet complete while the node count 'n' is below the overall number
-- of occurrences for that node in the whole program, with the exception that free
-- variables are never complete
isCompleted nc@(AccNodeCount sa n) | not . isFreeVar $ nc = lookupWithSharingAcc accOccMap sa == n
isCompleted _ = False
isBindable :: [Bool] -> [NodeName] -> NodeCount -> Bool
isBindable bindable nodes nc@(AccNodeCount _ _) =
let superTerms = Set.toList $ graph Map.! nodeName nc
unbound = mapMaybe (`elemIndex` nodes) superTerms
in isCompleted nc
&& all (bindable !!) unbound
isBindable _ _ (ExpNodeCount _ _) = False
scopesExp :: RootExp t -> (ScopedExp t, NodeCounts)
scopesExp = determineScopesExp config accOccMap
-- The lambda bound variable is at this point already irrelevant; for details, see
-- Note [Traversing functions and side effects]
--
scopesAfun1 :: Arrays a1 => (Acc a1 -> UnscopedAcc a2) -> (Acc a1 -> ScopedAcc a2, NodeCounts)
scopesAfun1 f = (const (ScopedAcc ssa body'), (counts',graph))
where
body@(UnscopedAcc fvs _) = f undefined
((ScopedAcc [] body'), (counts,graph)) = scopesAcc body
ssa = buildInitialEnvAcc fvs [sa | AccNodeCount sa _ <- freeCounts]
(freeCounts, counts') = partition isBoundHere counts
isBoundHere (AccNodeCount (StableSharingAcc _ (AccSharing _ (Atag i))) _) = i `elem` fvs
isBoundHere _ = False
-- The lambda bound variable is at this point already irrelevant; for details, see
-- Note [Traversing functions and side effects]
--
scopesFun1 :: Elt e1 => (Exp e1 -> RootExp e2) -> (Exp e1 -> ScopedExp e2, NodeCounts)
scopesFun1 f = (const body, counts)
where
(body, counts) = scopesExp (f undefined)
-- The lambda bound variable is at this point already irrelevant; for details, see
-- Note [Traversing functions and side effects]
--
scopesFun2 :: (Elt e1, Elt e2)
=> (Exp e1 -> Exp e2 -> RootExp e3)
-> (Exp e1 -> Exp e2 -> ScopedExp e3, NodeCounts)
scopesFun2 f = (\_ _ -> body, counts)
where
(body, counts) = scopesExp (f undefined undefined)
-- The lambda bound variable is at this point already irrelevant; for details, see
-- Note [Traversing functions and side effects]
--
scopesStencil1 :: forall sh e1 e2 stencil. Stencil sh e1 stencil
=> UnscopedAcc (Array sh e1){-dummy-}
-> (stencil -> RootExp e2)
-> (stencil -> ScopedExp e2, NodeCounts)
scopesStencil1 _ stencilFun = (const body, counts)
where
(body, counts) = scopesExp (stencilFun undefined)
-- The lambda bound variable is at this point already irrelevant; for details, see
-- Note [Traversing functions and side effects]
--
scopesStencil2 :: forall sh e1 e2 e3 stencil1 stencil2.
(Stencil sh e1 stencil1, Stencil sh e2 stencil2)
=> UnscopedAcc (Array sh e1){-dummy-}
-> UnscopedAcc (Array sh e2){-dummy-}
-> (stencil1 -> stencil2 -> RootExp e3)
-> (stencil1 -> stencil2 -> ScopedExp e3, NodeCounts)
scopesStencil2 _ _ stencilFun = (\_ _ -> body, counts)
where
(body, counts) = scopesExp (stencilFun undefined undefined)
determineScopesExp
:: Config
-> OccMap Acc
-> RootExp t
-> (ScopedExp t, NodeCounts) -- Root (closed) expression plus Acc node counts
determineScopesExp config accOccMap (RootExp expOccMap exp@(UnscopedExp fvs _))
= let
((ScopedExp [] expWithScopes), (nodeCounts,graph)) = determineScopesSharingExp config accOccMap expOccMap exp
(expCounts, accCounts) = break isAccNodeCount nodeCounts
isAccNodeCount AccNodeCount{} = True
isAccNodeCount _ = False
in
(ScopedExp (buildInitialEnvExp fvs [se | ExpNodeCount se _ <- expCounts]) expWithScopes, cleanCounts (accCounts,graph))
determineScopesSharingExp
:: Config
-> OccMap Acc
-> OccMap Exp
-> UnscopedExp t
-> (ScopedExp t, NodeCounts)
determineScopesSharingExp config accOccMap expOccMap = scopesExp
where
scopesAcc :: UnscopedAcc a -> (ScopedAcc a, NodeCounts)
scopesAcc = determineScopesSharingAcc config accOccMap
scopesFun1 :: (Exp a -> UnscopedExp b) -> (Exp a -> ScopedExp b, NodeCounts)
scopesFun1 f = tracePure ("LAMBDA " ++ (show ssa)) (show counts) (const (ScopedExp ssa body'), (counts',graph))
where
body@(UnscopedExp fvs _) = f undefined
((ScopedExp [] body'), (counts, graph)) = scopesExp body
ssa = buildInitialEnvExp fvs [se | ExpNodeCount se _ <- freeCounts]
(freeCounts, counts') = partition isBoundHere counts
isBoundHere (ExpNodeCount (StableSharingExp _ (ExpSharing _ (Tag i))) _) = i `elem` fvs
isBoundHere _ = False
scopesExp :: forall t. UnscopedExp t -> (ScopedExp t, NodeCounts)
scopesExp (UnscopedExp _ (LetSharing _ _))
= $internalError "determineScopesSharingExp: scopesExp" "unexpected 'LetSharing'"
scopesExp (UnscopedExp _ (VarSharing sn))
= (ScopedExp [] (VarSharing sn), StableSharingExp sn (VarSharing sn) `insertExpNode` noNodeCounts)
scopesExp (UnscopedExp _ (ExpSharing sn pexp))
= case pexp of
Tag i -> reconstruct (Tag i) noNodeCounts
Const c -> reconstruct (Const c) noNodeCounts
Tuple tup -> let (tup', accCount) = travTup tup
in
reconstruct (Tuple tup') accCount
Prj i e -> travE1 (Prj i) e
IndexNil -> reconstruct IndexNil noNodeCounts
IndexCons ix i -> travE2 IndexCons ix i
IndexHead i -> travE1 IndexHead i
IndexTail ix -> travE1 IndexTail ix
IndexAny -> reconstruct IndexAny noNodeCounts
ToIndex sh ix -> travE2 ToIndex sh ix
FromIndex sh e -> travE2 FromIndex sh e
Cond e1 e2 e3 -> travE3 Cond e1 e2 e3
While p it i -> let
(p' , accCount1) = scopesFun1 p
(it', accCount2) = scopesFun1 it
(i' , accCount3) = scopesExp i
in reconstruct (While p' it' i') (accCount1 +++ accCount2 +++ accCount3)
PrimConst c -> reconstruct (PrimConst c) noNodeCounts
PrimApp p e -> travE1 (PrimApp p) e
Index a e -> travAE Index a e
LinearIndex a e -> travAE LinearIndex a e
Shape a -> travA Shape a
ShapeSize e -> travE1 ShapeSize e
Intersect sh1 sh2 -> travE2 Intersect sh1 sh2
Foreign ff f e -> travE1 (Foreign ff f) e
where
travTup :: Tuple.Tuple UnscopedExp tup -> (Tuple.Tuple ScopedExp tup, NodeCounts)
travTup NilTup = (NilTup, noNodeCounts)
travTup (SnocTup tup e) = let
(tup', accCountT) = travTup tup
(e' , accCountE) = scopesExp e
in
(SnocTup tup' e', accCountT +++ accCountE)
travE1 :: (ScopedExp a -> PreExp ScopedAcc ScopedExp t) -> UnscopedExp a
-> (ScopedExp t, NodeCounts)
travE1 c e = reconstruct (c e') accCount
where
(e', accCount) = scopesExp e
travE2 :: (ScopedExp a -> ScopedExp b -> PreExp ScopedAcc ScopedExp t)
-> UnscopedExp a
-> UnscopedExp b
-> (ScopedExp t, NodeCounts)
travE2 c e1 e2 = reconstruct (c e1' e2') (accCount1 +++ accCount2)
where
(e1', accCount1) = scopesExp e1
(e2', accCount2) = scopesExp e2
travE3 :: (ScopedExp a -> ScopedExp b -> ScopedExp c -> PreExp ScopedAcc ScopedExp t)
-> UnscopedExp a
-> UnscopedExp b
-> UnscopedExp c
-> (ScopedExp t, NodeCounts)
travE3 c e1 e2 e3 = reconstruct (c e1' e2' e3') (accCount1 +++ accCount2 +++ accCount3)
where
(e1', accCount1) = scopesExp e1
(e2', accCount2) = scopesExp e2
(e3', accCount3) = scopesExp e3
travA :: (ScopedAcc a -> PreExp ScopedAcc ScopedExp t) -> UnscopedAcc a
-> (ScopedExp t, NodeCounts)
travA c acc = maybeFloatOutAcc c acc' accCount
where
(acc', accCount) = scopesAcc acc
travAE :: (ScopedAcc a -> ScopedExp b -> PreExp ScopedAcc ScopedExp t)
-> UnscopedAcc a
-> UnscopedExp b
-> (ScopedExp t, NodeCounts)
travAE c acc e = maybeFloatOutAcc (`c` e') acc' (accCountA +++ accCountE)
where
(acc', accCountA) = scopesAcc acc
(e' , accCountE) = scopesExp e
maybeFloatOutAcc :: (ScopedAcc a -> PreExp ScopedAcc ScopedExp t)
-> ScopedAcc a
-> NodeCounts
-> (ScopedExp t, NodeCounts)
maybeFloatOutAcc c acc@(ScopedAcc _ (AvarSharing _)) accCount -- nothing to float out
= reconstruct (c acc) accCount
maybeFloatOutAcc c acc accCount
| floatOutAcc config = reconstruct (c var) ((stableAcc `insertAccNode` noNodeCounts) +++ accCount)
| otherwise = reconstruct (c acc) accCount
where
(var, stableAcc) = abstract acc (\(ScopedAcc _ s) -> s)
abstract :: ScopedAcc a -> (ScopedAcc a -> SharingAcc ScopedAcc ScopedExp a)
-> (ScopedAcc a, StableSharingAcc)
abstract (ScopedAcc _ (AvarSharing _)) _ = $internalError "sharingAccToVar" "AvarSharing"
abstract (ScopedAcc ssa (AletSharing sa acc)) lets = abstract acc (lets . (\x -> ScopedAcc ssa (AletSharing sa x)))
abstract acc@(ScopedAcc ssa (AccSharing sn _)) lets = (ScopedAcc ssa (AvarSharing sn), StableSharingAcc sn (lets acc))
-- Occurrence count of the currently processed node
expOccCount = let StableNameHeight sn' _ = sn
in
lookupWithASTName expOccMap (StableASTName sn')
-- Reconstruct the current tree node.
--
-- * If the current node is being shared ('expOccCount > 1'), replace it by a 'VarSharing'
-- node and float the shared subtree out wrapped in a 'NodeCounts' value.
-- * If the current node is not shared, reconstruct it in place.
-- * Special case for free variables ('Tag'): Replace the tree by a sharing variable and
-- float the 'Tag' out in a 'NodeCounts' value. This is independent of the number of
-- occurrences.
--
-- In either case, any completed 'NodeCounts' are injected as bindings using 'LetSharing'
-- node.
--
reconstruct :: PreExp ScopedAcc ScopedExp t -> NodeCounts
-> (ScopedExp t, NodeCounts)
reconstruct newExp@(Tag _) _subCount
-- free variable => replace by a sharing variable regardless of the number of
-- occurrences
= let thisCount = StableSharingExp sn (ExpSharing sn newExp) `insertExpNode` noNodeCounts
in
tracePure "FREE" (show thisCount)
(ScopedExp [] (VarSharing sn), thisCount)
reconstruct newExp subCount
-- shared subtree => replace by a sharing variable (if 'recoverExpSharing' enabled)
| expOccCount > 1 && recoverExpSharing config
= let allCount = StableSharingExp sn sharingExp `insertExpNode` newCount
in
tracePure ("SHARED" ++ completed) (show allCount)
(ScopedExp [] (VarSharing sn), allCount)
-- neither shared nor free variable => leave it as it is
| otherwise
= tracePure ("Normal" ++ completed) (show newCount)
(ScopedExp [] sharingExp, newCount)
where
-- Determine the bindings that need to be attached to the current node...
(newCount, bindHere) = filterCompleted subCount
-- ...and wrap them in 'LetSharing' constructors
lets = foldl (flip (.)) id . map (\x y -> LetSharing x (ScopedExp [] y)) $ bindHere
sharingExp = lets $ ExpSharing sn newExp
-- trace support
completed | null bindHere = ""
| otherwise = "(" ++ show (length bindHere) ++ " lets)"
-- Extract *leading* nodes that have a complete node count (i.e., their node count is equal
-- to the number of occurrences of that node in the overall expression).
--
-- Nodes with a completed node count should be let bound at the currently processed node.
--
-- NB: Only extract leading nodes (i.e., the longest run at the *front* of the list that is
-- complete). Otherwise, we would let-bind subterms before their parents, which leads
-- scope errors.
--
filterCompleted :: NodeCounts -> (NodeCounts, [StableSharingExp])
filterCompleted (ns,graph)
= let bindable = map (isBindable bindable (map nodeName ns)) ns
(bind, unbind) = partition fst $ zip bindable ns
in ((map snd unbind, graph), [se | ExpNodeCount se _ <- map snd bind])
where
-- a node is not yet complete while the node count 'n' is below the overall number
-- of occurrences for that node in the whole program, with the exception that free
-- variables are never complete
isCompleted nc@(ExpNodeCount sa n) | not . isFreeVar $ nc = lookupWithSharingExp expOccMap sa == n
isCompleted _ = False
isBindable :: [Bool] -> [NodeName] -> NodeCount -> Bool
isBindable bindable nodes nc@(ExpNodeCount _ _) =
let superTerms = Set.toList $ graph Map.! nodeName nc
unbound = mapMaybe (`elemIndex` nodes) superTerms
in isCompleted nc
&& all (bindable !!) unbound
isBindable _ _ (AccNodeCount _ _) = False
-- |Recover sharing information and annotate the HOAS AST with variable and let binding
-- annotations. The first argument determines whether array computations are floated out of
-- expressions irrespective of whether they are shared or not — 'True' implies floating them out.
--
-- Also returns the 'StableSharingAcc's of all 'Atag' leaves in environment order — they represent
-- the free variables of the AST.
--
-- NB: Strictly speaking, this function is not deterministic, as it uses stable pointers to
-- determine the sharing of subterms. The stable pointer API does not guarantee its
-- completeness; i.e., it may miss some equalities, which implies that we may fail to discover
-- some sharing. However, sharing does not affect the denotational meaning of an array
-- computation; hence, we do not compromise denotational correctness.
--
-- There is one caveat: We currently rely on the 'Atag' and 'Tag' leaves representing free
-- variables to be shared if any of them is used more than once. If one is duplicated, the
-- environment for de Bruijn conversion will have a duplicate entry, and hence, be of the wrong
-- size, which is fatal. (The 'buildInitialEnv*' functions will already bail out.)
--
recoverSharingAcc
:: Typeable a
=> Config
-> Level -- The level of currently bound array variables
-> [Level] -- The tags of newly introduced free array variables
-> Acc a
-> (ScopedAcc a, [StableSharingAcc])
{-# NOINLINE recoverSharingAcc #-}
recoverSharingAcc config alvl avars acc
= let (acc', occMap)
= unsafePerformIO -- to enable stable pointers; this is safe as explained above
$ makeOccMapAcc config alvl acc
in
determineScopesAcc config avars occMap acc'
recoverSharingExp
:: Typeable e
=> Config
-> Level -- The level of currently bound scalar variables
-> [Level] -- The tags of newly introduced free scalar variables
-> Exp e
-> (ScopedExp e, [StableSharingExp])
{-# NOINLINE recoverSharingExp #-}
recoverSharingExp config lvl fvar exp
= let
(rootExp, accOccMap) = unsafePerformIO $ do
accOccMap <- newASTHashTable
(exp', _) <- makeOccMapRootExp config accOccMap lvl fvar exp
frozenAccOccMap <- freezeOccMap accOccMap
return (exp', frozenAccOccMap)
(ScopedExp sse sharingExp, _) =
determineScopesExp config accOccMap rootExp
in
(ScopedExp [] sharingExp, sse)
-- Debugging
-- ---------
traceLine :: String -> String -> IO ()
traceLine header msg
= Debug.traceMessage Debug.dump_sharing
$ header ++ ": " ++ msg
traceChunk :: String -> String -> IO ()
traceChunk header msg
= Debug.traceMessage Debug.dump_sharing
$ header ++ "\n " ++ msg
tracePure :: String -> String -> a -> a
tracePure header msg
= Debug.tracePure Debug.dump_sharing
$ header ++ ": " ++ msg
|
kumasento/accelerate
|
Data/Array/Accelerate/Trafo/Sharing.hs
|
bsd-3-clause
| 103,177
| 0
| 25
| 34,159
| 25,126
| 12,813
| 12,313
| 1,413
| 36
|
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE ExplicitForAll #-}
--{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
--{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE RankNTypes #-}
--{-# LANGUAGE RebindableSyntax #-}
--{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedLists #-}
--{-# LANGUAGE NamedFieldPuns #-}
module Test.FVT (testFVT) where
import Prelude.Extended
-- import Control.Monad.Eff (Eff)
-- import Control.Monad.Eff.Console (CONSOLE, log, logShow)
-- import Control.Monad.Eff.Random ( RANDOM )
import Data.Monoid ( mempty )
-- import Data.Tuple ( Tuple(..) )
-- import Data.Array ( length, zip, foldl )
import Data.Foldable (sum, traverse_)
import FV.Types
( VHMeas, HMeas, QMeas
, XMeas, Prong (..), Chi2 (Chi2)
, vertex, helices, hFilter, fromHMeas, fromQMeas, vBlowup, distance, invMass
)
import FV.Fit ( fit )
showMomentum :: HMeas -> String
showMomentum h = "pt,pz,fi,E ->" <> (show <<< fromHMeas) h
showHelix :: HMeas -> String
showHelix h = "Helix ->" <> (show h)
showProng :: Prong -> String
showProng (Prong {nProng= n, fitVertex= v, fitMomenta= ql, fitChi2s= cl}) =
let
showCl :: String -> List Chi2 -> String
showCl = foldl (\s (Chi2 x) -> s <> to1fix x)
Chi2 chi2tot = sum cl
sc = "chi2tot ->" <> to1fix chi2tot <> ", ndof " <> show (n*2)
sd = ", r ->" <> (show $ distance v mempty)
scl = showCl ", chi2s ->" cl
sm = ", Mass ->" <> show (invMass (map fromQMeas ql))
in sc <> sd <> scl <> sm
testFVT :: List Int -> VHMeas -> IO ()
testFVT l5 vm = do
let hel = helices vm
traverse_ (putStrLn <<< showHelix) hel
traverse_ (putStrLn <<< showMomentum) hel
doFitTest vm l5
putStrLn $ showProng <<< fit <<< hFilter l5 <<< vBlowup 10000.0 $ vm
pure ()
doFitTest :: VHMeas
-> List Int
-> IO ()
doFitTest vm' l5 = do
let vm = vBlowup 10000.0 vm'
let showLen xs = show $ length xs
showQChi2 :: (QMeas, Chi2) -> String
showQChi2 (qm, (Chi2 chi2)) = "q"
<> " chi2 ->" <> to1fix chi2
<> " pt,pz,fi,E ->"
<> show qm
putStrLn $ "initial vertex position -> " <> show ((vertex vm)::XMeas)
let pl = map (fromQMeas <<< fromHMeas) $ helices vm
putStrLn $ "Inv Mass " <> showLen pl <> " helix" <> show (invMass pl)
let pl5 = map (fromQMeas <<< fromHMeas) (helices <<< hFilter l5 $ vm)
putStrLn $ "Inv Mass " <> showLen pl5 <> " helix" <> show (invMass pl5)
putStrLn "Fitting Vertex --------------------"
let -- pr = fit vm
Prong {fitVertex= vf, fitMomenta= ql, fitChi2s= cl} = fit vm
putStrLn $ "Fitted vertex -> " <> show vf
traverse_ (putStrLn <<< showQChi2) $ zip ql cl
putStrLn $ "Inv Mass " <> show (length ql) <> " fit"
<> show (invMass (map fromQMeas ql))
let m5 = invMass <<< map fromQMeas <<< iflt l5 $ ql
putStrLn $ "Inv Mass " <> show (length l5) <> " fit" <> show m5
putStrLn $ "Refitting Vertex-----------------"
let Prong {fitVertex=fv, fitMomenta=fqs, fitChi2s=fcs, nProng=np} = fit <<< hFilter l5 $ vm
putStrLn $ "Refitted vertex -> " <> show fv
traverse_ (putStrLn <<< showQChi2) $ zip fqs fcs
putStrLn $ "Inv Mass " <> show np <> " refit"
<> (show <<< invMass <<< map fromQMeas $ fqs)
putStrLn $ "Final vertex -> " <> show fv
putStrLn $ "end of doFitTest------------------------------------------"
|
LATBauerdick/fv.hs
|
src/Test/FVT.hs
|
bsd-3-clause
| 3,635
| 0
| 14
| 957
| 1,112
| 563
| 549
| 72
| 1
|
module TeleHash.Packet
(
NetworkPacket(..)
, Packet(..)
, Head (..)
, Body(..)
, unBody
, newPacket
, headLen
, bodyLen
, packetLen
, networkPacketLen
, LinePacket(..)
, unLP
, toNetworkPacket
, fromNetworkPacket
, toLinePacket
, fromLinePacket
-- debug
, p1, p2
, myencode
) where
import Crypto.Number.Serialize
import Data.Binary
import Data.Bits
import Data.Binary.Get
import Data.Binary.Put
import TeleHash.Convert
import qualified Data.ByteString.Base16 as B16
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Lazy as BL
-- | A network level packet.
-- See https://github.com/telehash/telehash.org/blob/master/network.md
-- This is either an `open` or a 'line' packet
-- The open is coded as 0x00 0x01 followed by the crypto set id, followed by the body
-- The line is coded as 0x00 0x00 followed by the encrypted line packet
data NetworkPacket = OpenPacket Word8 BC.ByteString
| LinePacket BC.ByteString
deriving (Eq,Show)
instance Binary NetworkPacket where
put (OpenPacket cs bs) = do put (0::Word8)
put (1::Word8)
put (cs::Word8)
put bs
put (LinePacket bs) = do put (0::Word8)
put (0::Word8)
put bs
get = do h <- get
case h::Word16 of
0 -> do pb <- getRemainingLazyByteString
return (LinePacket $ lbsTocbs pb)
1 -> do cs <- get
-- pb <- get
pb <- getRemainingLazyByteString
return (OpenPacket cs (lbsTocbs pb))
-- ---------------------------------------------------------------------
{-
HEAD
A length of 0 means there is no HEAD included and the packet is all
binary (only BODY).
A length of 1 means there is a single byte value that is not JSON.
A length of 2+ means the HEAD must be a UTF-8 encoded JSON object or
array (not any bare string/bool/number value). If the JSON parsing
fails, the parser must return an error.
-}
data Head = HeadEmpty | HeadJson BC.ByteString
deriving (Show,Eq)
{-
BODY
The optional BODY is always a raw binary of the remainder bytes
between the packet's total length and that of the HEAD.
Often packets are attached inside other packets as the BODY, enabling
simple packet wrapping/relaying usage patterns.
The BODY is also used as the raw content transport for channels and
any app-specific usage.
-}
data Body = Body BC.ByteString
deriving Show
unBody (Body b) = b
-- | A packet is carried encrypted inside the NetworkPacket
data Packet = Packet { paHead :: Head
, paBody :: Body
}
deriving Show
newPacket :: Packet
newPacket = Packet { paHead = HeadEmpty
, paBody = Body BC.empty
}
headLen :: Packet -> Int
headLen (Packet HeadEmpty _) = 2
-- headLen (Packet (HeadByte _) _) = 3
headLen (Packet (HeadJson bs) _) = 2 + (fromIntegral $ BC.length bs)
bodyLen :: Packet -> Int
bodyLen (Packet _ (Body bs)) = fromIntegral $ BC.length bs
packetLen :: Packet -> Int
packetLen p = headLen p + bodyLen p
networkPacketLen :: NetworkPacket -> Int
networkPacketLen (OpenPacket _ pb) = 3 + (BC.length pb)
networkPacketLen (LinePacket pb) = (BC.length pb)
-- ---------------------------------------------------------------------
instance Binary Packet where
put p = do put (paHead p)
put (paBody p)
get = do h <- get
-- pb <- getRemainingLazyByteString
-- return (newPacket { paHead = h, paBody = lbsTocbs pb})
pb <- get
return (newPacket { paHead = h, paBody = pb})
-- ---------------------------------------------------------------------
instance Binary Head where
put HeadEmpty = put (0 :: Word16)
-- put (HeadByte b) = do put (1 :: Word16)
-- put b
put (HeadJson x) = do put ((fromIntegral $ BC.length x) :: Word16)
mapM_ put $ BC.unpack x
get = do hb <- get :: Get Word16
h <- case hb of
0 -> return HeadEmpty
-- 1 -> do b <- get
-- return (HeadByte b)
x -> do b <- getLazyByteString (fromIntegral x)
return (HeadJson (lbsTocbs b))
return h
instance Binary Body where
put (Body bs) = mapM_ put $ BC.unpack bs
get = do bs <- getRemainingLazyByteString
return (Body $ lbsTocbs bs)
-- ---------------------------------------------------------------------
data LinePacket = LP BC.ByteString
deriving Show
unLP :: LinePacket -> BC.ByteString
unLP (LP x) = x
-- ---------------------------------------------------------------------
toNetworkPacket :: NetworkPacket -> LinePacket
toNetworkPacket (OpenPacket cs bs) = LP $ BC.append (lbsTocbs $ BL.pack [0,1,cs]) bs
toNetworkPacket (LinePacket bs) = LP $ BC.append (lbsTocbs $ BL.pack [0,0]) bs
fromNetworkPacket :: LinePacket -> Maybe NetworkPacket
fromNetworkPacket (LP bs) = Just $ decode (cbsTolbs bs)
-- ---------------------------------------------------------------------
toLinePacket :: Packet -> LinePacket
-- toLinePacket p = LP $ encode p
toLinePacket (Packet h (Body b)) = LP $ BC.append (myencode h) b
myencode :: Head -> BC.ByteString
myencode (HeadEmpty) = lbsTocbs $ BL.pack [0,0]
-- myencode (HeadByte b) = BL.pack [0,1,b]
myencode (HeadJson x) = BC.append (bb) x
where
xlen :: Integer
xlen = fromIntegral (BC.length x)
Just bb = i2ospOf 2 xlen
--
-- ---------------------------------------------------------------------
-- |Note: this will throw an exception is the decode fails
fromLinePacket :: LinePacket -> Maybe Packet
fromLinePacket (LP bs) = Just $ decode (cbsTolbs bs)
-- ---------------------------------------------------------------------
{-
Examples
First packet out
This encodes a startup packet of
{"type":"seek","c":0,"seek":"89a4cbc6c27eb913c1bcaf06bac2d8b872c7cbef626b35b6d7eaf993590d37de"}
-}
p1 :: [Word8]
p1 =
[
0x00, 0x01, -- head length = 1 (BE 16 bit)
0x1a, -- Single byte head means 'open' packet, using crypto "1a"
-- HMAC (20 bytes)
0x70, 0xf0, 0xd6, 0x5a, 0xc1, 0xae, 0xae, 0x58, 0xe4, 0xaf,
0x0e, 0x58, 0x27, 0xa4, 0x4b, 0x4b, 0x0b, 0x0d, 0x39, 0x41,
-- Public Key (40 bytes)
0x15, 0x97, 0xb6, 0x35, 0x55, 0xf0, 0xf0, 0x99, 0x48, 0xce,
0x81, 0xf5, 0xba, 0xd9, 0xdc, 0x3b, 0x05, 0xc5, 0x81, 0xce,
0x2e, 0x6d, 0xc9, 0x1a, 0xb9, 0x87, 0xdc, 0xd9, 0x13, 0x44,
0x37, 0xb0, 0x68, 0x25, 0x62, 0xac, 0xc7, 0x07, 0x1e, 0x27,
-- inner packet, AES encrypted
0xff,
0xb5, 0x15, 0x64, 0x2e, 0x1a, 0x38, 0xaa, 0x33, 0xe2, 0xaf, 0x1d, 0x74, 0x46, 0xef, 0x89, 0xdc,
0xa8, 0x15, 0x66, 0x7a, 0x5f, 0xa6, 0x45, 0x9f, 0xbb, 0xdb, 0x7a, 0x27, 0xb5, 0xa9, 0x48, 0xff,
0xc3, 0xf6, 0xc3, 0x1e, 0xf6, 0x83, 0xf5, 0x1e, 0x06, 0xb4, 0xb3, 0x13, 0xfc, 0x57, 0xa1, 0x2a,
0xdf, 0x96, 0xdf, 0x90, 0x2d, 0x14, 0x24, 0x11, 0xa6, 0x01, 0x4b, 0xed, 0xf1, 0xd1, 0x32, 0x88,
0x15, 0xb4, 0x25, 0x0f, 0xa8, 0xda, 0x19, 0xc4, 0xb1, 0xf3, 0xe3, 0x4c, 0x31, 0x4d, 0xfe, 0x36,
0xcf, 0x76, 0xc8, 0x46, 0x04, 0x30, 0xd2, 0x96, 0x46, 0xec, 0x45, 0xd3, 0x06, 0xb7, 0x92, 0x61,
0xe8, 0xcf, 0x57, 0xd7, 0x20, 0xc7, 0xf4, 0xcb, 0xab, 0x66, 0x73, 0x39, 0xc5, 0xe4, 0xb4, 0x11,
0x34, 0xd3, 0x45, 0x4f, 0x06, 0x4e, 0x75, 0xa1, 0xa6, 0x33, 0x91, 0x71, 0x49, 0xeb, 0x6c, 0xd9,
0x6b, 0xf3, 0x8b, 0x3f, 0x96, 0xe1, 0x2e, 0xad, 0xbc, 0xf0, 0x81, 0x60, 0xae, 0x3d, 0x7d, 0x59,
0xad, 0x1a, 0x0f, 0xdb, 0x1f, 0xa7, 0x6b, 0x36, 0x24, 0xfc, 0x6a, 0x0c, 0x15, 0xe9, 0x32, 0x64,
0xe4, 0x55, 0x3f, 0x19, 0xd9, 0x20, 0x4d, 0x80, 0x27, 0x50, 0x68, 0x77, 0x32, 0x27, 0x34, 0x66,
0xc2, 0x76, 0x02, 0x8f, 0x14, 0xda, 0xe8, 0xfb, 0x89, 0x28, 0x27, 0xfd, 0xbd, 0x8f, 0x41, 0x3f,
0x71, 0xaa, 0x50, 0xca, 0x21, 0x98, 0x0e, 0x44, 0x69, 0x49, 0xc7, 0x74, 0xf0, 0xa0, 0xc9, 0x0b,
0x30, 0x8f, 0x99, 0x60, 0x87, 0xec, 0x35, 0x25, 0x0d, 0xeb, 0xa5, 0x0a, 0x29, 0xec, 0x22, 0x13,
0xae, 0xae, 0xdb, 0x32, 0xf9
]
{-
Second packet out
-}
p2 :: [Word8]
p2 =
[
0x00, 0x00, -- head length 0
0x16, 0x60, 0xef, 0x04, 0x2e, 0x32, 0x1e, 0xfb, 0x11, 0x0d, 0xb8, 0x9f, 0xe7, 0x05,
0x72, 0xf6, 0x06, 0x48, 0xe2, 0x9c, 0x00, 0x00, 0x00, 0x00, 0x08, 0xf2, 0x87, 0x9e, 0xb5, 0xb2,
0x4c, 0x3f, 0xf3, 0xca, 0x4c, 0xa3, 0x18, 0xdc, 0x16, 0xac, 0x33, 0x94, 0x9a, 0xaa, 0xcc, 0x01,
0xdf, 0xb8, 0x16, 0x7f, 0x48, 0xe1, 0x4c, 0xe4, 0x45, 0xa8, 0x4b, 0x61, 0xfa, 0x1e, 0xdb, 0x99,
0xee, 0x83, 0xdb, 0xb0, 0xbf, 0x83, 0x33, 0x72, 0xbc, 0xf0, 0xbc, 0xfd, 0xda, 0x4a, 0x5c, 0x40,
0x9d, 0xb6, 0xe1, 0x33, 0x38, 0xc3, 0x9a, 0x54, 0x3e, 0x9e, 0xf6, 0xbe, 0x11, 0x39, 0x2c, 0x0f,
0x57, 0xb0, 0xc9, 0x27, 0x97, 0x20, 0x8e, 0xf5, 0xf2, 0x38, 0x0a, 0xc1, 0xb9, 0x95, 0xf1, 0xe4,
0x68, 0x34, 0xd0, 0xc8, 0x55, 0x9b, 0x8a, 0x87, 0xa5, 0xc5, 0xe3
]
testp1 = do
let p1b = BL.pack p1
let p@(Packet h (Body b)) = decode p1b :: Packet
putStrLn $ show p
putStrLn $ show (BC.length b)
testp2 = do
let p1b = BL.pack p2
let p@(Packet h (Body b)) = decode p1b :: Packet
putStrLn $ show p
putStrLn $ show (BC.length b)
-- ---------------------------------------------------------------------
-- Received open packet
-- RECV from IPP "10.0.0.42:42424":"00011adee339dc7ca4227333401b8d2dc460dfa78317b6c5dea168b4679c59fbc93a2267e1c2b7cf4bfe832f0fb07221f8654a758d6a63200979f9367e046379aa1f4d27f74be6ae9367f4ff655820f2e0dedea70c6a8e32084180a464993e625803fa9774ac99a50c2e63fa637a07a2ae52860a1961f630c51d4f6779c7409c80497f52c91c69ed812261f2dcb5c1675b24d978a94fb55d9d55ecb772b542aa21c32d9dc704374dcbf53b32579e68cc3a01da6f9fd44ee1a1753919c50a09790c168d2a22069e0bd1f7e7db5410ec540c90f893956ddbdf01fc9ae5a7c82fc832ae72f846a2b1dc3a911dc13aa641fcf83f68ed1d3e6f445f5b82814649b9a127c7ad6fd2e3a8d5b986852c8bca221931e7a09ea1a2e7aff7ea090fdc8eebdd8664bb926909c396c3f7dd01ac38819a6cf7b947a855f8bdc87593e20bda115913056d6935b188308fad9a7873fb95395216d487cb5173a20296b86103715005e1ccbe3bcaae8ee64e4806928dd654a08ed8a7818d4eff2052aaa62c300c7661e678febaf34378a32028e0a3eea83cc87bc9c18742d4daafa3029df15030d7fc2cf916eab082e2424e4f912cadd319aaa39d6a8dc32c4282" at
b16_rx_open = "00011adee339dc7ca4227333401b8d2dc460dfa78317b6c5dea168b4679c59fbc93a2267e1c2b7cf4bfe832f0fb07221f8654a758d6a63200979f9367e046379aa1f4d27f74be6ae9367f4ff655820f2e0dedea70c6a8e32084180a464993e625803fa9774ac99a50c2e63fa637a07a2ae52860a1961f630c51d4f6779c7409c80497f52c91c69ed812261f2dcb5c1675b24d978a94fb55d9d55ecb772b542aa21c32d9dc704374dcbf53b32579e68cc3a01da6f9fd44ee1a1753919c50a09790c168d2a22069e0bd1f7e7db5410ec540c90f893956ddbdf01fc9ae5a7c82fc832ae72f846a2b1dc3a911dc13aa641fcf83f68ed1d3e6f445f5b82814649b9a127c7ad6fd2e3a8d5b986852c8bca221931e7a09ea1a2e7aff7ea090fdc8eebdd8664bb926909c396c3f7dd01ac38819a6cf7b947a855f8bdc87593e20bda115913056d6935b188308fad9a7873fb95395216d487cb5173a20296b86103715005e1ccbe3bcaae8ee64e4806928dd654a08ed8a7818d4eff2052aaa62c300c7661e678febaf34378a32028e0a3eea83cc87bc9c18742d4daafa3029df15030d7fc2cf916eab082e2424e4f912cadd319aaa39d6a8dc32c4282"
rx_open = b16ToLbs b16_rx_open
lp_rx_open = LP (lbsTocbs rx_open)
b16ToLbs str = cbsTolbs r
where (r,_) = B16.decode $ BC.pack str
headok :: Head
headok = decode $ b16ToLbs "00011a"
bodyok :: Body
bodyok = decode $ b16ToLbs "dee339dc7ca4227333401b8d2dc460dfa78317b6c5dea168b4679c59fbc93a2267e1c2b7cf4bfe832f0fb07221f8654a758d6a63200979f9367e046379aa1f4d27f74be6ae9367f4ff655820f2e0dedea70c6a8e32084180a464993e625803fa9774ac99a50c2e63fa637a07a2ae52860a1961f630c51d4f6779c7409c80497f52c91c69ed812261f2dcb5c1675b24d978a94fb55d9d55ecb772b542aa21c32d9dc704374dcbf53b32579e68cc3a01da6f9fd44ee1a1753919c50a09790c168d2a22069e0bd1f7e7db5410ec540c90f893956ddbdf01fc9ae5a7c82fc832ae72f846a2b1dc3a911dc13aa641fcf83f68ed1d3e6f445f5b82814649b9a127c7ad6fd2e3a8d5b986852c8bca221931e7a09ea1a2e7aff7ea090fdc8eebdd8664bb926909c396c3f7dd01ac38819a6cf7b947a855f8bdc87593e20bda115913056d6935b188308fad9a7873fb95395216d487cb5173a20296b86103715005e1ccbe3bcaae8ee64e4806928dd654a08ed8a7818d4eff2052aaa62c300c7661e678febaf34378a32028e0a3eea83cc87bc9c18742d4daafa3029df15030d7fc2cf916eab082e2424e4f912cadd319aaa39d6a8dc32c4282"
decodeok = fromLinePacket lp_rx_open
decodefail = fromLinePacket (LP (lbsTocbs $ b16ToLbs "08011adee339dc7ca422"))
|
alanz/htelehash
|
src/Network/TeleHash/Old/Packet.hs
|
bsd-3-clause
| 12,400
| 0
| 18
| 2,325
| 2,944
| 1,726
| 1,218
| 167
| 1
|
-----------------------------------------------------------------------------
-- |
-- Module : Network.AWS.AWSConnection
-- Copyright : (c) Greg Heartsfield 2007
-- License : BSD3
--
-- Connection and authentication info for an Amazon AWS request.
-----------------------------------------------------------------------------
module Network.AWS.AWSConnection (
-- * Constants
defaultAmazonSimpleDBHost, defaultAmazonSimpleDBPort,
-- * Function Types
amazonSimpleDBConnection, amazonConnectionFromEnv, amazonConnectionFromEnv',
-- * Data Types
AWSConnection(..)
) where
import System.Environment
-- | An Amazon Web Services connection. Everything needed to connect
-- and authenticate requests.
data AWSConnection =
AWSConnection { awsHost :: String, -- ^ Service provider hostname
awsPort :: Int, -- ^ Service provider port number
awsAccessKey :: String, -- ^ Access Key ID
awsSecretKey :: String -- ^ Secret Access Key
} deriving (Show)
-- | Hostname used for connecting to Amazon's production S3 service (@s3.amazonaws.com@).
defaultAmazonS3Host :: String
defaultAmazonS3Host = "s3.amazonaws.com"
-- | Hostname used for connecting to Amazon's production SimpleDB service (@sdb.amazonaws.com@).
defaultAmazonSimpleDBHost :: String
defaultAmazonSimpleDBHost = "sdb.amazonaws.com"
-- | Port number used for connecting to Amazon's production S3 service (@80@).
defaultAmazonS3Port :: Int
defaultAmazonS3Port = 80
-- | Port number used for connecting to Amazon's production SimpleDB service (@80@).
defaultAmazonSimpleDBPort :: Int
defaultAmazonSimpleDBPort = 80
-- | Create an AWSConnection to Amazon from credentials. Uses the
-- production service.
amazonSimpleDBConnection :: String -- ^ Access Key ID
-> String -- ^ Secret Access Key
-> AWSConnection -- ^ Connection to Amazon S3
amazonSimpleDBConnection = AWSConnection defaultAmazonSimpleDBHost defaultAmazonSimpleDBPort
-- | Retrieve Access and Secret keys from environment variables
-- AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY, respectively.
-- Either variable being undefined or empty will result in
-- 'Nothing'.
amazonConnectionFromEnv :: IO (Maybe AWSConnection)
amazonConnectionFromEnv =
do ak <- getEnvKey "AWS_ACCESS_KEY_ID"
sk0 <- getEnvKey "AWS_ACCESS_KEY_SECRET"
sk1 <- getEnvKey "AWS_SECRET_ACCESS_KEY"
return $ case (ak, sk0, sk1) of
("", _, _) -> Nothing
( _, "", "") -> Nothing
( _, "", _) -> Just (amazonSimpleDBConnection ak sk1)
( _, _, _) -> Just (amazonSimpleDBConnection ak sk0)
where getEnvKey s = catch (getEnv s) (const $ return "")
-- | Retrieve Access and Secret keys from environment variables
-- AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY, respectively.
-- Either variable being undefined or empty will result in
-- an exception.
amazonConnectionFromEnv' :: IO AWSConnection
amazonConnectionFromEnv' =
do mbConn <- amazonConnectionFromEnv
case mbConn of
Nothing -> error "Please define the AWS_ACCESS_KEY_ID and AWS_ACCESS_KEY_SECRET environment variables."
Just conn -> return conn
|
pheaver/hSimpleDB
|
src/Network/AWS/AWSConnection.hs
|
bsd-3-clause
| 3,306
| 0
| 13
| 713
| 403
| 235
| 168
| 40
| 4
|
module Data.Viz (
) where
|
adarqui/DataViz
|
src/Data/Viz.hs
|
bsd-3-clause
| 26
| 0
| 3
| 5
| 9
| 6
| 3
| 1
| 0
|
module Test.Arbitrary where
import Control.Monad (liftM2)
import Data.String (fromString)
import Test.QuickCheck (Arbitrary (..), choose, elements, frequency, getSmall,
suchThat)
import Universum
import Toy.Base (Value (..), Var (..))
import Toy.Exp
import Toy.Lang (Stmt (..))
import qualified Toy.Lang as L
instance Arbitrary Var where
arbitrary = fromString . pure <$> choose ('a', 'z')
instance Arbitrary Value where
arbitrary = (Value <$> arbitrary) `suchThat` (\x -> minBound <= x && x <= maxBound)
instance Arbitrary Exp where
arbitrary = frequency
[ (50, ValueE . getSmall <$> arbitrary)
, (10, pure readE)
, (1, liftM2 (+:) arbitrary arbitrary)
, (1, liftM2 (-:) arbitrary arbitrary)
, (1, liftM2 (*:) arbitrary arbitrary)
, (1, liftM2 (/:) arbitrary arbitrary)
, (1, liftM2 (%:) arbitrary arbitrary)
, (1, notE <$> arbitrary)
, (1, liftM2 (&&:) arbitrary arbitrary)
, (1, liftM2 (||:) arbitrary arbitrary)
, (1, liftM2 (^:) arbitrary arbitrary)
, (1, liftM2 (&:) arbitrary arbitrary)
, (1, liftM2 (|:) arbitrary arbitrary)
, (1, liftM2 (>:) arbitrary arbitrary)
, (1, liftM2 (<:) arbitrary arbitrary)
, (1, liftM2 (>=:) arbitrary arbitrary)
, (1, liftM2 (<=:) arbitrary arbitrary)
, (1, liftM2 (==:) arbitrary arbitrary)
, (1, liftM2 (!=:) arbitrary arbitrary)
]
instance Arbitrary Stmt where
arbitrary = frequency
[ (3, liftM2 (:=) arbitrary arbitrary)
, (2, L.writeS <$> arbitrary)
, (1, If <$> arbitrary <*> arbitrary <*> arbitrary)
, (1, forLoop <$> (Var . toText . (:"_i") <$> choose ('a', 'z'))
<*> elements [0, 1] <*> arbitrary)
, (4, Seq <$> arbitrary <*> arbitrary)
, (8, pure Skip)
]
where
forLoop i n body = mconcat
[ i := 0
, L.whileS (VarE i <=: n) $ mconcat
[ body
, i := VarE i + 1
]
]
|
Martoon-00/toy-compiler
|
test/Test/Arbitrary.hs
|
bsd-3-clause
| 2,203
| 0
| 15
| 769
| 751
| 443
| 308
| 49
| 0
|
{-# LANGUAGE RecursiveDo #-}
-- |
-- Module : Simulation.Aivika.Branch.Internal.BR
-- Copyright : Copyright (c) 2016-2017, David Sorokin <david.sorokin@gmail.com>
-- License : BSD3
-- Maintainer : David Sorokin <david.sorokin@gmail.com>
-- Stability : experimental
-- Tested with: GHC 7.10.3
--
-- This module defines a branching computation.
--
module Simulation.Aivika.Branch.Internal.BR
(BRParams(..),
BR(..),
invokeBR,
runBR,
newBRParams,
newRootBRParams,
branchLevel) where
import Data.IORef
import Data.Maybe
import Control.Applicative
import Control.Monad
import Control.Monad.Trans
import Control.Monad.Fix
import Control.Exception (throw, catch, finally)
import Simulation.Aivika.Trans.Exception
-- | The branching computation.
newtype BR m a = BR { unBR :: BRParams -> m a
-- ^ Unwrap the computation.
}
-- | The parameters of the computation.
data BRParams =
BRParams { brId :: !Int,
-- ^ The branch identifier.
brIdGenerator :: IORef Int,
-- ^ The generator of identifiers.
brLevel :: !Int,
-- ^ The branch level.
brParent :: Maybe BRParams,
-- ^ The branch parent.
brUniqueRef :: IORef ()
-- ^ The unique reference to which
-- the finalizers are attached to
-- be garbage collected.
}
instance Monad m => Monad (BR m) where
{-# INLINE return #-}
return = BR . const . return
{-# INLINE (>>=) #-}
(BR m) >>= k = BR $ \ps ->
m ps >>= \a ->
let m' = unBR (k a) in m' ps
instance Applicative m => Applicative (BR m) where
{-# INLINE pure #-}
pure = BR . const . pure
{-# INLINE (<*>) #-}
(BR f) <*> (BR m) = BR $ \ps -> f ps <*> m ps
instance Functor m => Functor (BR m) where
{-# INLINE fmap #-}
fmap f (BR m) = BR $ fmap f . m
instance MonadIO m => MonadIO (BR m) where
{-# INLINE liftIO #-}
liftIO = BR . const . liftIO
instance MonadTrans BR where
{-# INLINE lift #-}
lift = BR . const
instance MonadFix m => MonadFix (BR m) where
mfix f =
BR $ \ps ->
do { rec { a <- invokeBR ps (f a) }; return a }
instance MonadException m => MonadException (BR m) where
catchComp (BR m) h = BR $ \ps ->
catchComp (m ps) (\e -> unBR (h e) ps)
finallyComp (BR m1) (BR m2) = BR $ \ps ->
finallyComp (m1 ps) (m2 ps)
throwComp e = BR $ \ps ->
throwComp e
-- | Invoke the computation.
invokeBR :: BRParams -> BR m a -> m a
{-# INLINE invokeBR #-}
invokeBR ps (BR m) = m ps
-- | Run the branching computation.
runBR :: MonadIO m => BR m a -> m a
{-# INLINABLE runBR #-}
runBR m =
do ps <- liftIO newRootBRParams
unBR m ps
-- | Create a new child branch.
newBRParams :: BRParams -> IO BRParams
newBRParams ps =
do id <- atomicModifyIORef (brIdGenerator ps) $ \a ->
let b = a + 1 in b `seq` (b, b)
let level = 1 + brLevel ps
uniqueRef <- newIORef ()
return BRParams { brId = id,
brIdGenerator = brIdGenerator ps,
brLevel = level `seq` level,
brParent = Just ps,
brUniqueRef = uniqueRef }
-- | Create a root branch.
newRootBRParams :: IO BRParams
newRootBRParams =
do genId <- newIORef 0
uniqueRef <- newIORef ()
return BRParams { brId = 0,
brIdGenerator = genId,
brLevel = 0,
brParent = Nothing,
brUniqueRef = uniqueRef
}
-- | Return the current branch level starting from 0.
branchLevel :: Monad m => BR m Int
{-# INLINABLE branchLevel #-}
branchLevel = BR $ \ps -> return (brLevel ps)
|
dsorokin/aivika-branches
|
Simulation/Aivika/Branch/Internal/Br.hs
|
bsd-3-clause
| 3,786
| 0
| 16
| 1,194
| 1,042
| 559
| 483
| 89
| 1
|
--
-- Copyright © 2013-2015 Anchor Systems, Pty Ltd and Others
--
-- The code in this file, and the program it is a part of, is
-- made available to you by its authors as open source software:
-- you can redistribute it and/or modify it under the terms of
-- the 3-clause BSD licence.
--
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
-- | This module provides an AST for druid's query language that has ToJSON
-- instances for actually building these queries.
module Network.Druid.Query.AST
(
-- * Query AST
Query(..),
Threshold(..),
DataSourceName(..),
Granularity(..),
Filter(..),
DimensionName(..),
Aggregation(..),
PostAggregation(..),
NumericalValue(..),
ArithmeticFunction(..),
PostAggregationOrdering(..),
Interval(..),
MetricName(..),
OutputName(..),
UTCTime(..),
LimitSpec(..),
Having(..),
OrderByColumnSpec(..),
Direction(..),
Bound(..),
JS(..),
) where
import Data.Aeson
import Data.Maybe
import Data.Monoid
import Data.Scientific (Scientific (..))
import Data.String
import Data.Text (Text)
import qualified Data.Text as T
import Data.Time.Clock (UTCTime (..))
import Data.Time.Format (formatTime)
import Data.Time.Locale.Compat (defaultTimeLocale)
-- | Druid has numerous query types for various use cases. Queries are composed
-- of various JSON properties and Druid has different types of queries for
-- different use cases.
data Query
-- | These types of queries take a timeseries query object and return an
-- array of JSON objects where each object represents a value asked for by
-- the timeseries query.
= QueryTimeSeries
{ _queryDataSourceName :: DataSourceName
, _queryGranularity :: Granularity
, _queryFilter :: Maybe Filter
, _queryAggregations :: [Aggregation]
, _queryPostAggregations :: Maybe [PostAggregation]
, _queryIntervals :: [Interval]
}
-- | TopN queries return a sorted set of results for the values in a given
-- dimension according to some criteria. Conceptually, they can be thought
-- of as an approximate GroupByQuery over a single dimension with an
-- Ordering spec. TopNs are much faster and resource efficient than
-- GroupBys for this use case. These types of queries take a topN query
-- object and return an array of JSON objects where each object represents
-- a value asked for by the topN query.
--
-- TopNs are approximate in that each node will rank their top K results
-- and only return those top K results to the broker. K, by default in
-- Druid, is max(1000, threshold). In practice, this means that if you ask
-- for the top 1000 items ordered, the correctness of the first ~900 items
-- will be 100%, and the ordering of the results after that is not
-- guaranteed. TopNs can be made more accurate by increasing the threshold.
| QueryTopN
{ _queryDataSourceName :: DataSourceName
, _queryGranularity :: Granularity
, _queryFilter :: Maybe Filter
, _queryAggregations :: [Aggregation]
, _queryPostAggregations :: Maybe [PostAggregation]
, _queryIntervals :: [Interval]
, _queryDimensionName :: DimensionName
, _queryThreshold :: Threshold
, _queryMetric :: MetricName
}
-- | These types of queries take a groupBy query object and return an array
-- of JSON objects where each object represents a grouping asked for by the
-- query. Note: If you only want to do straight aggregates for some time
-- range, we highly recommend using TimeseriesQueries instead. The
-- performance will be substantially better. If you want to do an ordered
-- groupBy over a single dimension, please look at TopN queries. The
-- performance for that use case is also substantially better.
| QueryGroupBy
{ _queryDataSourceName :: DataSourceName
, _queryGranularity :: Granularity
, _queryFilter :: Maybe Filter
, _queryAggregations :: [Aggregation]
, _queryPostAggregations :: Maybe [PostAggregation]
, _queryIntervals :: [Interval]
, _queryDimensionNames :: [DimensionName]
, _queryLimitSpec :: Maybe LimitSpec
, _queryHaving :: Maybe Having
}
-- | Time boundary queries return the earliest and latest data points of a
-- data set. '_queryBound' defaults to both if not set.'
| QueryTimeBoundary
{ _queryDataSourceName :: DataSourceName
, _queryBound :: Maybe Bound
}
deriving (Eq, Show)
-- | Set to 'MaxTime' or 'MinTime' to return only the latest or earliest
-- timestamp.
data Bound = MaxTime | MinTime
deriving (Eq, Show)
-- | The limitSpec field provides the functionality to sort and limit the set
-- of results from a groupBy query. If you group by a single dimension and are
-- ordering by a single metric, we highly recommend using 'QueryTopN' instead.
-- The performance will be substantially better. Available options are:
data LimitSpec = LimitSpecDefault
{ _limitSpecLimit :: Integer
, _limitSpecColumns :: [OrderByColumnSpec]
}
deriving (Eq, Show)
-- | OrderByColumnSpecs indicate how to do order by operations.
data OrderByColumnSpec
= OrderByColumnSpecDirected
{ _orderByColumnSpecDimensionName :: DimensionName
, _orderByColumnSpecDirection :: Direction
}
| OrderByColumnSpecSimple
{ _orderByColumnSpecDimensionName :: DimensionName }
deriving (Eq, Show)
data Direction = Ascending | Descending
deriving (Eq, Show)
-- | A having clause is a JSON object identifying which rows from a groupBy
-- query should be returned, by specifying conditions on aggregated values.
--
-- It is essentially the equivalent of the HAVING clause in SQL.
data Having
= HavingEqualTo
{ _havingAggregation :: MetricName
, _havingValue :: Integer
}
| HavingGreaterThan
{ _havingAggregation :: MetricName
, _havingValue :: Integer
}
| HavingLessThan
{ _havingAggregation :: MetricName
, _havingValue :: Integer
}
| HavingAnd
{ _havingSpecs :: [Having] }
| HavingOr
{ _havingSpecs :: [Having] }
| HavingNot
{ _havingSpec :: Having }
deriving (Eq, Show)
newtype Threshold = Threshold { unThreshold :: Integer }
deriving (Num, ToJSON, Eq, Show)
-- | A data source is the Druid equivalent of a database table. However, a
-- query can also masquerade as a data source, providing subquery-like
-- functionality. Query data sources are currently supported only by GroupBy
-- queries.
newtype DataSourceName = DataSourceName { unDataSourceName :: Text }
deriving (IsString, Eq, Show)
-- | The granularity field determines how data gets bucketed across the time
-- dimension, or how it gets aggregated by hour, day, minute, etc.
--
-- It can be specified either as a string for simple granularities or as an
-- object for arbitrary granularities.
data Granularity
= GranularityAll
| GranularityNone
| GranularityMinute
| GranularityFifteenMinute
| GranularityThirtyMinute
| GranularityHour
| GranularityDay
deriving (Eq, Show)
-- | A filter is a JSON object indicating which rows of data should be included
-- in the computation for a query. It’s essentially the equivalent of the WHERE
-- clause in SQL. Druid supports the following types of filters.
data Filter
-- | The simplest filter is a selector filter. The selector filter will
-- match a specific dimension with a specific value. Selector filters can
-- be used as the base filters for more complex Boolean expressions of
-- filters.
= FilterSelector
{ _selectorDimensionName :: DimensionName
, _selectorValue :: Text
}
| FilterRegularExpression
{ _selectorDimensionName :: DimensionName
, _selectorPattern :: Text }
| FilterJS
{ _selectorDimensionName :: DimensionName
, _selectorFunction :: JS }
| FilterAnd { _selectorFields :: [Filter] }
| FilterOr { _selectorFields :: [Filter] }
| FilterNot { _selectorField :: Filter }
deriving (Eq, Show)
-- | TODO: Undocumented
newtype DimensionName = DimensionName { unDimensionName :: Text }
deriving (IsString, ToJSON, Eq, Show)
newtype JS = JS { unJS :: Text }
deriving (IsString, ToJSON, Eq, Show)
-- | TODO: Undocumented
newtype OutputName = OutputName { unOutputName :: Text }
deriving (IsString, ToJSON, Eq, Show)
-- | TODO: Undocumented
newtype MetricName = MetricName { unMetricName :: Text }
deriving (IsString, ToJSON, Eq, Show)
-- | Aggregations are specifications of processing over metrics available in
-- Druid. Available aggregations are:
data Aggregation
= AggregationCount { _aggregationName :: OutputName }
| AggregationLongSum
{ _aggregationName :: OutputName
, _aggregationFieldName :: MetricName }
| AggregationDoubleSum
{ _aggregationName :: OutputName
, _aggregationFieldName :: MetricName }
| AggregationMin
{ _aggregationName :: OutputName
, _aggregationFieldName :: MetricName }
| AggregationMax
{ _aggregationName :: OutputName
, _aggregationFieldName :: MetricName }
| AggregationHyperUnique
{ _aggregationName :: OutputName
, _aggregationFieldName :: MetricName }
| AggregationJS
{ _aggregationName :: OutputName
, _aggregationFieldNames :: [MetricName]
, _aggregationFunctionAggregate :: JS
, _aggregationFunctionCombine :: JS
, _aggregationFunctionReset :: JS
}
| AggregationCardinality
{ _aggregationName :: OutputName
, _aggregationFieldNames :: [MetricName]
, _aggregationByRow :: Maybe Bool
}
| AggregationFiltered
{ _aggregationFilter :: Filter
, _aggregationAggregator :: Aggregation
}
deriving (Eq, Show)
-- | Post-aggregations are specifications of processing that should happen on
-- aggregated values as they come out of Druid. If you include a post
-- aggregation as part of a query, make sure to include all aggregators the
-- post-aggregator requires.
data PostAggregation
-- | The arithmetic post-aggregator applies the provided function to the
-- given fields from left to right. The fields can be aggregators or other
-- post aggregators.
--
-- Supported functions are 'APlus', 'AMinus', 'AMulti', 'ADiv', and
-- 'AQuot'.
--
-- Note:
--
-- Division always returns 0 if dividing by 0, regardless of the numerator.
-- quotient division behaves like regular floating point division
-- Arithmetic post-aggregators may also specify an ordering, which defines
-- the order of resulting values when sorting results (this can be useful
-- for 'TopN' queries for instance):
--
-- If no ordering (or 'PostAggregationOrderingNull') is specified, the
-- default floating point ordering is used.
-- 'PostAggregationOrderingNumericFirst' ordering always returns finite
-- values first, followed by NaN, and infinite values last.
= PostAggregationArithmetic
{ _postAggregationName :: OutputName
, _postAggregationArithmeticFunction :: ArithmeticFunction
, _postAggregationFields :: [PostAggregation]
, _postAggregationOrdering :: Maybe PostAggregationOrdering
}
-- | This returns the value produced by the specified aggregator.
--
-- fieldName refers to the output name of the aggregator given in the
-- aggregations portion of the query.
| PostAggregationFieldAccess
{ _postAggregationFieldName :: OutputName }
-- | The constant post-aggregator always returns the specified value.
| PostAggregationConstant
{ _postAggregationName :: OutputName
, _postAggregationValue :: NumericalValue }
-- | Applies the provided JavaScript function to the given fields. Fields
-- are passed as arguments to the JavaScript function in the given order.
| PostAggregationJS
{ _postAggregationName :: OutputName
, _postAggregationFieldNames :: [OutputName]
, _postAggregationFunction :: JS
}
-- | The 'PostAggregationHyperUniqueCardinality' post aggregator is used to
-- wrap a hyperUnique object such that it can be used in post aggregations.
| PostAggregationHyperUniqueCardinality
{ _postAggregationFieldName :: OutputName }
deriving (Eq, Show)
newtype NumericalValue = NumericalValue { unNumericalValue :: Scientific }
deriving (Num, Eq, Show)
-- | An arithmetic function as supported by 'PostAggregation'
data ArithmeticFunction
-- | Addition
= APlus
-- | Subtraction
| AMinus
-- | Multiplication
| AMult
-- | Division
| ADiv
-- | Quotient
| AQuot
deriving (Eq, Show)
-- | If PostAggregationOrderingNull is specified, the default floating point
-- ordering is used. 'PostAggregationOrderingNumericFirst' ordering always
-- returns finite values first, followed by NaN, and infinite values last.
data PostAggregationOrdering
= PostAggregationOrderingNull | PostAggregationOrderingNumericFirst
deriving (Eq, Show)
data Interval = Interval
{ _intervalStart :: UTCTime
, _intervalEnd :: UTCTime
}
deriving (Eq, Show)
-- * Instances
instance ToJSON Query where
toJSON QueryTimeSeries{..} = object $
[ "queryType" .= String "timeseries"
, "granularity" .= toJSON _queryGranularity
, "dataSource" .= toJSON _queryDataSourceName
, "aggregations" .= toJSON _queryAggregations
, "intervals" .= toJSON _queryIntervals
]
<> fmap ("postAggregations" .= ) (maybeToList _queryPostAggregations)
<> fmap ("filter" .= ) (maybeToList _queryFilter)
toJSON QueryTopN{..} = object $
[ "queryType" .= String "topN"
, "dimension" .= toJSON _queryDimensionName
, "threshold" .= toJSON _queryThreshold
, "granularity" .= toJSON _queryGranularity
, "metric" .= toJSON _queryMetric
, "dataSource" .= toJSON _queryDataSourceName
, "aggregations" .= toJSON _queryAggregations
, "intervals" .= toJSON _queryIntervals
]
<> fmap ("postAggregations" .= ) (maybeToList _queryPostAggregations)
<> fmap ("filter" .= ) (maybeToList _queryFilter)
toJSON QueryGroupBy{..} = object $
[ "queryType" .= String "groupBy"
, "dimensions" .= toJSON _queryDimensionNames
, "granularity" .= toJSON _queryGranularity
, "dataSource" .= toJSON _queryDataSourceName
, "aggregations" .= toJSON _queryAggregations
, "intervals" .= toJSON _queryIntervals
]
<> fmap ("postAggregations" .= ) (maybeToList _queryPostAggregations)
<> fmap ("filter" .= ) (maybeToList _queryFilter)
<> fmap ("limitSpec" .= ) (maybeToList _queryLimitSpec)
<> fmap ("having" .= ) (maybeToList _queryHaving)
toJSON QueryTimeBoundary{..} = object $
[ "queryType" .= String "timeBoundary"
, "dataSource" .= toJSON _queryDataSourceName
]
<> fmap ("bound" .=) (maybeToList _queryBound)
instance ToJSON Bound where
toJSON MaxTime = "maxTime"
toJSON MinTime = "minTime"
instance ToJSON Having where
toJSON HavingEqualTo{..} = object
[ "type" .= String "equalTo"
, "aggregation" .= _havingAggregation
, "value" .= _havingValue
]
toJSON HavingGreaterThan{..} = object
[ "type" .= String "greaterThan"
, "aggregation" .= _havingAggregation
, "value" .= _havingValue
]
toJSON HavingLessThan{..} = object
[ "type" .= String "lessThan"
, "aggregation" .= _havingAggregation
, "value" .= _havingValue
]
toJSON HavingOr{..} = object
[ "type" .= String "or"
, "havingSpecs" .= _havingSpecs
]
toJSON HavingAnd{..} = object
[ "type" .= String "and"
, "havingSpecs" .= _havingSpecs
]
toJSON HavingNot{..} = object
[ "type" .= String "not"
, "havingSpec" .= _havingSpec
]
instance ToJSON LimitSpec where
toJSON LimitSpecDefault{..} = object
[ "type" .= String "default"
, "limit" .= _limitSpecLimit
, "columns" .= _limitSpecColumns
]
instance ToJSON OrderByColumnSpec where
toJSON OrderByColumnSpecSimple{..} = toJSON _orderByColumnSpecDimensionName
toJSON OrderByColumnSpecDirected{..} = object
[ "dimension" .= _orderByColumnSpecDimensionName
, "direction" .= case _orderByColumnSpecDirection of
Ascending -> String "ascending"
Descending -> String "descending"
]
instance ToJSON Interval where
toJSON Interval{..} =
let (l,r) = (fmt _intervalStart, fmt _intervalEnd)
in String $ l <> "/" <> r
where
fmt = T.pack . formatTime defaultTimeLocale "%Y-%m-%dT%H:%M:%S%Q"
instance ToJSON Aggregation where
toJSON AggregationCount{..} = object
[ "type" .= String "count"
, "name" .= _aggregationName
]
toJSON AggregationLongSum{..} = object
[ "type" .= String "longSum"
, "name" .= _aggregationName
, "fieldName" .= _aggregationFieldName
]
toJSON AggregationDoubleSum{..} = object
[ "type" .= String "doubleSum"
, "name" .= _aggregationName
, "fieldName" .= _aggregationFieldName
]
toJSON AggregationMin{..} = object
[ "type" .= String "min"
, "name" .= _aggregationName
]
toJSON AggregationMax{..} = object
[ "type" .= String "max"
, "name" .= _aggregationName
]
toJSON AggregationJS{..} = object
[ "type" .= String "javascript"
, "name" .= _aggregationName
, "fieldNames" .= _aggregationFieldNames
, "fnAggregate" .= _aggregationFunctionAggregate
, "fnCombine" .= _aggregationFunctionCombine
, "fnReset" .= _aggregationFunctionReset
]
toJSON AggregationCardinality{..} = object $
[ "type" .= String "cardinality"
, "name" .= _aggregationName
, "fieldNames" .= _aggregationFieldNames
]
<> fmap ("byRow" .=) (maybeToList _aggregationByRow)
toJSON AggregationHyperUnique{..} = object
[ "type" .= String "hyperUnique"
, "name" .= _aggregationName
, "fieldName" .= _aggregationFieldName
]
toJSON AggregationFiltered{..} = object
[ "type" .= String "filtered"
, "filter" .= _aggregationFilter
, "aggregator" .= _aggregationAggregator
]
instance ToJSON PostAggregation where
toJSON PostAggregationArithmetic{..} = object $
[ "type" .= String "arithmetic"
, "name" .= _postAggregationName
, "fn" .= _postAggregationArithmeticFunction
, "fields" .= _postAggregationFields
]
<> fmap ("ordering" .=) (maybeToList _postAggregationOrdering)
toJSON PostAggregationFieldAccess{..} = object
[ "type" .= String "fieldAccess"
, "fieldName" .= _postAggregationFieldName
]
toJSON PostAggregationConstant{..} = object
[ "type" .= String "constant"
, "name" .= _postAggregationName
, "value" .= Number (unNumericalValue _postAggregationValue)
]
toJSON PostAggregationJS{..} = object
[ "type" .= String "javascript"
, "name" .= _postAggregationName
, "fieldNames" .= _postAggregationFieldNames
, "function" .= _postAggregationFunction
]
toJSON PostAggregationHyperUniqueCardinality{..} = object
[ "type" .= String "hyperUniqueCardinality"
, "fieldName" .= _postAggregationFieldName
]
instance ToJSON PostAggregationOrdering where
toJSON PostAggregationOrderingNull = Null
toJSON PostAggregationOrderingNumericFirst = "numericFirst"
instance ToJSON ArithmeticFunction where
toJSON APlus = "+"
toJSON AMinus = "-"
toJSON AMult = "*"
toJSON ADiv = "/"
toJSON AQuot = "quotient"
instance ToJSON DataSourceName where
toJSON (DataSourceName str) = String str
instance ToJSON Granularity where
toJSON GranularityAll = "all"
toJSON GranularityNone = "none"
toJSON GranularityMinute = "minute"
toJSON GranularityFifteenMinute = "fifteen_minute"
toJSON GranularityThirtyMinute = "thirty_minute"
toJSON GranularityHour = "hour"
toJSON GranularityDay = "day"
instance ToJSON Filter where
toJSON FilterSelector{..} = object
[ "type" .= String "selector"
, "dimension" .= _selectorDimensionName
, "value" .= _selectorValue
]
toJSON FilterRegularExpression{..} = object
[ "type" .= String "regex"
, "dimension" .= _selectorDimensionName
, "pattern" .= _selectorPattern
]
toJSON FilterJS{..} = object
[ "type" .= String "javascript"
, "dimension" .= _selectorDimensionName
, "function" .= _selectorFunction
]
toJSON FilterAnd{..} = object
[ "type" .= String "and"
, "fields" .= _selectorFields
]
toJSON FilterOr{..} = object
[ "type" .= String "or"
, "fields" .= _selectorFields
]
toJSON FilterNot{..} = object
[ "type" .= String "not"
, "field" .= _selectorField
]
|
anchor/druid-query
|
lib/Network/Druid/Query/AST.hs
|
bsd-3-clause
| 22,288
| 0
| 13
| 6,056
| 3,548
| 2,035
| 1,513
| 378
| 0
|
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -Wno-missing-signatures #-}
{-# OPTIONS_GHC -Wno-missing-pattern-synonym-signatures #-}
module Nix.Value where
import Control.Comonad
import Control.Exception
import Control.Monad
import Control.Monad.Free
import Control.Monad.Trans.Class
import qualified Data.Aeson as A
import Data.Functor.Classes
import Data.HashMap.Lazy ( HashMap )
import Data.Text ( Text )
import Data.Typeable ( Typeable )
import GHC.Generics
import Lens.Family2
import Lens.Family2.Stock
import Lens.Family2.TH
import Nix.Atoms
import Nix.Expr.Types
import Nix.Expr.Types.Annotated
import Nix.String
import Nix.Thunk
import Nix.Utils
-- | An 'NValue' is the most reduced form of an 'NExpr' after evaluation is
-- completed. 's' is related to the type of errors that might occur during
-- construction or use of a value.
data NValueF p m r
= NVConstantF NAtom
-- | A string has a value and a context, which can be used to record what a
-- string has been build from
| NVStrF NixString
| NVPathF FilePath
| NVListF [r]
| NVSetF (AttrSet r) (AttrSet SourcePos)
| NVClosureF (Params ()) (p -> m r)
-- ^ A function is a closed set of parameters representing the "call
-- signature", used at application time to check the type of arguments
-- passed to the function. Since it supports default values which may
-- depend on other values within the final argument set, this
-- dependency is represented as a set of pending evaluations. The
-- arguments are finally normalized into a set which is passed to the
-- function.
--
-- Note that 'm r' is being used here because effectively a function
-- and its set of default arguments is "never fully evaluated". This
-- enforces in the type that it must be re-evaluated for each call.
| NVBuiltinF String (p -> m r)
-- ^ A builtin function is itself already in normal form. Also, it may
-- or may not choose to evaluate its argument in the production of a
-- result.
deriving (Generic, Typeable, Functor)
-- | This 'Foldable' instance only folds what the value actually is known to
-- contain at time of fold.
instance Foldable (NValueF p m) where
foldMap f = \case
NVConstantF _ -> mempty
NVStrF _ -> mempty
NVPathF _ -> mempty
NVListF l -> foldMap f l
NVSetF s _ -> foldMap f s
NVClosureF _ _ -> mempty
NVBuiltinF _ _ -> mempty
instance Show r => Show (NValueF p m r) where
showsPrec = flip go where
go (NVConstantF atom ) = showsCon1 "NVConstant" atom
go (NVStrF ns ) = showsCon1 "NVStr" (hackyStringIgnoreContext ns)
go (NVListF lst ) = showsCon1 "NVList" lst
go (NVSetF attrs _) = showsCon1 "NVSet" attrs
go (NVClosureF p _) = showsCon1 "NVClosure" p
go (NVPathF p ) = showsCon1 "NVPath" p
go (NVBuiltinF name _ ) = showsCon1 "NVBuiltin" name
showsCon1 :: Show a => String -> a -> Int -> String -> String
showsCon1 con a d =
showParen (d > 10) $ showString (con ++ " ") . showsPrec 11 a
lmapNValueF :: Functor m => (b -> a) -> NValueF a m r -> NValueF b m r
lmapNValueF f = \case
NVConstantF a -> NVConstantF a
NVStrF s -> NVStrF s
NVPathF p -> NVPathF p
NVListF l -> NVListF l
NVSetF s p -> NVSetF s p
NVClosureF p g -> NVClosureF p (g . f)
NVBuiltinF s g -> NVBuiltinF s (g . f)
hoistNValueF
:: (forall x . m x -> n x)
-> NValueF p m a
-> NValueF p n a
hoistNValueF lft = \case
NVConstantF a -> NVConstantF a
NVStrF s -> NVStrF s
NVPathF p -> NVPathF p
NVListF l -> NVListF l
NVSetF s p -> NVSetF s p
NVClosureF p g -> NVClosureF p (lft . g)
NVBuiltinF s g -> NVBuiltinF s (lft . g)
sequenceNValueF
:: (Functor n, Monad m, Applicative n)
=> (forall x . n x -> m x)
-> NValueF p m (n a)
-> n (NValueF p m a)
sequenceNValueF transform = \case
NVConstantF a -> pure $ NVConstantF a
NVStrF s -> pure $ NVStrF s
NVPathF p -> pure $ NVPathF p
NVListF l -> NVListF <$> sequenceA l
NVSetF s p -> NVSetF <$> sequenceA s <*> pure p
NVClosureF p g -> pure $ NVClosureF p (transform <=< g)
NVBuiltinF s g -> pure $ NVBuiltinF s (transform <=< g)
bindNValueF
:: (Monad m, Monad n)
=> (forall x . n x -> m x)
-> (a -> n b)
-> NValueF p m a
-> n (NValueF p m b)
bindNValueF transform f = \case
NVConstantF a -> pure $ NVConstantF a
NVStrF s -> pure $ NVStrF s
NVPathF p -> pure $ NVPathF p
NVListF l -> NVListF <$> traverse f l
NVSetF s p -> NVSetF <$> traverse f s <*> pure p
NVClosureF p g -> pure $ NVClosureF p (transform . f <=< g)
NVBuiltinF s g -> pure $ NVBuiltinF s (transform . f <=< g)
liftNValueF
:: (MonadTrans u, Monad m)
=> NValueF p m a
-> NValueF p (u m) a
liftNValueF = hoistNValueF lift
unliftNValueF
:: (MonadTrans u, Monad m)
=> (forall x . u m x -> m x)
-> NValueF p (u m) a
-> NValueF p m a
unliftNValueF = hoistNValueF
type MonadDataContext f (m :: * -> *)
= (Comonad f, Applicative f, Traversable f, Monad m)
-- | At the time of constructor, the expected arguments to closures are values
-- that may contain thunks. The type of such thunks are fixed at that time.
newtype NValue' t f m a = NValue { _nValue :: f (NValueF (NValue t f m) m a) }
deriving (Generic, Typeable, Functor, Foldable)
instance (Comonad f, Show a) => Show (NValue' t f m a) where
show (NValue (extract -> v)) = show v
instance Comonad f => Show1 (NValue' t f m) where
liftShowsPrec sp sl p = \case
NVConstant' atom -> showsUnaryWith showsPrec "NVConstantF" p atom
NVStr' ns ->
showsUnaryWith showsPrec "NVStrF" p (hackyStringIgnoreContext ns)
NVList' lst -> showsUnaryWith (liftShowsPrec sp sl) "NVListF" p lst
NVSet' attrs _ -> showsUnaryWith (liftShowsPrec sp sl) "NVSetF" p attrs
NVPath' path -> showsUnaryWith showsPrec "NVPathF" p path
NVClosure' c _ -> showsUnaryWith showsPrec "NVClosureF" p c
NVBuiltin' name _ -> showsUnaryWith showsPrec "NVBuiltinF" p name
_ -> error "Pattern synonyms mask coverage"
sequenceNValue'
:: (Functor n, Traversable f, Monad m, Applicative n)
=> (forall x . n x -> m x)
-> NValue' t f m (n a)
-> n (NValue' t f m a)
sequenceNValue' transform (NValue v) =
NValue <$> traverse (sequenceNValueF transform) v
bindNValue'
:: (Traversable f, Monad m, Monad n)
=> (forall x . n x -> m x)
-> (a -> n b)
-> NValue' t f m a
-> n (NValue' t f m b)
bindNValue' transform f (NValue v) =
NValue <$> traverse (bindNValueF transform f) v
hoistNValue'
:: (Functor m, Functor n, Functor f)
=> (forall x . n x -> m x)
-> (forall x . m x -> n x)
-> NValue' t f m a
-> NValue' t f n a
hoistNValue' run lft (NValue v) =
NValue (fmap (lmapNValueF (hoistNValue lft run) . hoistNValueF lft) v)
liftNValue'
:: (MonadTrans u, Monad m, Functor (u m), Functor f)
=> (forall x . u m x -> m x)
-> NValue' t f m a
-> NValue' t f (u m) a
liftNValue' run = hoistNValue' run lift
unliftNValue'
:: (MonadTrans u, Monad m, Functor (u m), Functor f)
=> (forall x . u m x -> m x)
-> NValue' t f (u m) a
-> NValue' t f m a
unliftNValue' run = hoistNValue' lift run
iterNValue'
:: forall t f m a r
. MonadDataContext f m
=> (a -> (NValue' t f m a -> r) -> r)
-> (NValue' t f m r -> r)
-> NValue' t f m a
-> r
iterNValue' k f = f . fmap (\a -> k a (iterNValue' k f))
-- | An 'NValueNF' is a fully evaluated value in normal form. An 'NValue f t m' is
-- a value in head normal form, where only the "top layer" has been
-- evaluated. An action of type 'm (NValue f t m)' is a pending evualation that
-- has yet to be performed. An 't' is either a pending evaluation, or
-- a value in head normal form. A 'NThunkSet' is a set of mappings from keys
-- to thunks.
--
-- The 'Free' structure is used here to represent the possibility that
-- cycles may appear during normalization.
type NValue t f m = Free (NValue' t f m) t
hoistNValue
:: (Functor m, Functor n, Functor f)
=> (forall x . n x -> m x)
-> (forall x . m x -> n x)
-> NValue t f m
-> NValue t f n
hoistNValue run lft = hoistFree (hoistNValue' run lft)
liftNValue
:: (MonadTrans u, Monad m, Functor (u m), Functor f)
=> (forall x . u m x -> m x)
-> NValue t f m
-> NValue t f (u m)
liftNValue run = hoistNValue run lift
unliftNValue
:: (MonadTrans u, Monad m, Functor (u m), Functor f)
=> (forall x . u m x -> m x)
-> NValue t f (u m)
-> NValue t f m
unliftNValue run = hoistNValue lift run
iterNValue
:: forall t f m r
. MonadDataContext f m
=> (t -> (NValue t f m -> r) -> r)
-> (NValue' t f m r -> r)
-> NValue t f m
-> r
iterNValue k f = iter f . fmap (\t -> k t (iterNValue k f))
iterNValueM
:: (MonadDataContext f m, Monad n)
=> (forall x . n x -> m x)
-> (t -> (NValue t f m -> n r) -> n r)
-> (NValue' t f m (n r) -> n r)
-> NValue t f m
-> n r
iterNValueM transform k f =
iterM f <=< go . fmap (\t -> k t (iterNValueM transform k f))
where
go (Pure x) = Pure <$> x
go (Free fa) = Free <$> bindNValue' transform go fa
pattern NVThunk t <- Pure t
nvThunk :: Applicative f => t -> NValue t f m
nvThunk = Pure
pattern NVConstant' x <- NValue (extract -> NVConstantF x)
pattern NVConstant x <- Free (NVConstant' x)
nvConstant' :: Applicative f => NAtom -> NValue' t f m r
nvConstant' x = NValue (pure (NVConstantF x))
nvConstant :: Applicative f => NAtom -> NValue t f m
nvConstant x = Free (NValue (pure (NVConstantF x)))
pattern NVStr' ns <- NValue (extract -> NVStrF ns)
pattern NVStr ns <- Free (NVStr' ns)
nvStr' :: Applicative f => NixString -> NValue' t f m r
nvStr' ns = NValue (pure (NVStrF ns))
nvStr :: Applicative f => NixString -> NValue t f m
nvStr ns = Free (NValue (pure (NVStrF ns)))
pattern NVPath' x <- NValue (extract -> NVPathF x)
pattern NVPath x <- Free (NVPath' x)
nvPath' :: Applicative f => FilePath -> NValue' t f m r
nvPath' x = NValue (pure (NVPathF x))
nvPath :: Applicative f => FilePath -> NValue t f m
nvPath x = Free (NValue (pure (NVPathF x)))
pattern NVList' l <- NValue (extract -> NVListF l)
pattern NVList l <- Free (NVList' l)
nvList' :: Applicative f => [r] -> NValue' t f m r
nvList' l = NValue (pure (NVListF l))
nvList :: Applicative f => [NValue t f m] -> NValue t f m
nvList l = Free (NValue (pure (NVListF l)))
pattern NVSet' s x <- NValue (extract -> NVSetF s x)
pattern NVSet s x <- Free (NVSet' s x)
nvSet' :: Applicative f
=> HashMap Text r -> HashMap Text SourcePos -> NValue' t f m r
nvSet' s x = NValue (pure (NVSetF s x))
nvSet :: Applicative f
=> HashMap Text (NValue t f m) -> HashMap Text SourcePos -> NValue t f m
nvSet s x = Free (NValue (pure (NVSetF s x)))
pattern NVClosure' x f <- NValue (extract -> NVClosureF x f)
pattern NVClosure x f <- Free (NVClosure' x f)
nvClosure' :: (Applicative f, Functor m)
=> Params () -> (NValue t f m -> m r) -> NValue' t f m r
nvClosure' x f = NValue (pure (NVClosureF x f))
nvClosure :: (Applicative f, Functor m)
=> Params () -> (NValue t f m -> m (NValue t f m)) -> NValue t f m
nvClosure x f = Free (NValue (pure (NVClosureF x f)))
pattern NVBuiltin' name f <- NValue (extract -> NVBuiltinF name f)
pattern NVBuiltin name f <- Free (NVBuiltin' name f)
nvBuiltin' :: (Applicative f, Functor m)
=> String -> (NValue t f m -> m r) -> NValue' t f m r
nvBuiltin' name f = NValue (pure (NVBuiltinF name f))
nvBuiltin :: (Applicative f, Functor m)
=> String -> (NValue t f m -> m (NValue t f m)) -> NValue t f m
nvBuiltin name f =
Free (NValue (pure (NVBuiltinF name f)))
builtin
:: forall m f t
. (MonadThunk t m (NValue t f m), MonadDataContext f m)
=> String
-> (NValue t f m -> m (NValue t f m))
-> m (NValue t f m)
builtin name f = return $ nvBuiltin name $ \a -> f a
builtin2
:: (MonadThunk t m (NValue t f m), MonadDataContext f m)
=> String
-> (NValue t f m -> NValue t f m -> m (NValue t f m))
-> m (NValue t f m)
builtin2 name f = builtin name $ \a -> builtin name $ \b -> f a b
builtin3
:: (MonadThunk t m (NValue t f m), MonadDataContext f m)
=> String
-> ( NValue t f m
-> NValue t f m
-> NValue t f m
-> m (NValue t f m)
)
-> m (NValue t f m)
builtin3 name f =
builtin name $ \a -> builtin name $ \b -> builtin name $ \c -> f a b c
data TStringContext = NoContext | HasContext
deriving Show
data ValueType
= TInt
| TFloat
| TBool
| TNull
| TString TStringContext
| TList
| TSet
| TClosure
| TPath
| TBuiltin
deriving Show
valueType :: NValueF a m r -> ValueType
valueType = \case
NVConstantF a -> case a of
NInt _ -> TInt
NFloat _ -> TFloat
NBool _ -> TBool
NNull -> TNull
NVStrF ns | stringHasContext ns -> TString HasContext
| otherwise -> TString NoContext
NVListF{} -> TList
NVSetF{} -> TSet
NVClosureF{} -> TClosure
NVPathF{} -> TPath
NVBuiltinF{} -> TBuiltin
describeValue :: ValueType -> String
describeValue = \case
TInt -> "an integer"
TFloat -> "a float"
TBool -> "a boolean"
TNull -> "a null"
TString NoContext -> "a string"
TString HasContext -> "a string with context"
TList -> "a list"
TSet -> "an attr set"
TClosure -> "a function"
TPath -> "a path"
TBuiltin -> "a builtin function"
showValueType :: (MonadThunk t m (NValue t f m), Comonad f)
=> NValue t f m -> m String
showValueType (Pure t) = force t showValueType
showValueType (Free (NValue (extract -> v))) =
pure $ describeValue $ valueType $ v
data ValueFrame t f m
= ForcingThunk t
| ConcerningValue (NValue t f m)
| Comparison (NValue t f m) (NValue t f m)
| Addition (NValue t f m) (NValue t f m)
| Multiplication (NValue t f m) (NValue t f m)
| Division (NValue t f m) (NValue t f m)
| Coercion ValueType ValueType
| CoercionToJson (NValue t f m)
| CoercionFromJson A.Value
| Expectation ValueType (NValue t f m)
deriving Typeable
deriving instance (Comonad f, Show t) => Show (ValueFrame t f m)
type MonadDataErrorContext t f m
= (Show t, Typeable t, Typeable m, Typeable f, MonadDataContext f m)
instance MonadDataErrorContext t f m => Exception (ValueFrame t f m)
$(makeTraversals ''NValueF)
$(makeLenses ''NValue')
key
:: (Traversable f, Applicative g)
=> VarName
-> LensLike' g (NValue' t f m a) (Maybe a)
key k = nValue . traverse . _NVSetF . _1 . hashAt k
|
jwiegley/hnix
|
src/Nix/Value.hs
|
bsd-3-clause
| 15,814
| 0
| 14
| 4,224
| 5,828
| 2,912
| 2,916
| -1
| -1
|
-- |
-- Module: Math.NumberTheory.GCD.LowLevel
-- Copyright: (c) 2011 Daniel Fischer
-- Licence: MIT
-- Maintainer: Daniel Fischer <daniel.is.fischer@googlemail.com>
-- Stability: Provisional
-- Portability: Non-portable (GHC extensions)
--
-- Low level gcd and coprimality functions using the binary gcd algorithm.
-- Normally, accessing these via the higher level interface of "Math.NumberTheory.GCD"
-- should be sufficient.
--
{-# LANGUAGE CPP, MagicHash, UnboxedTuples #-}
module Math.NumberTheory.GCD.LowLevel
( -- * Specialised GCDs
gcdInt
, gcdWord
-- ** GCDs for unboxed types
, gcdInt#
, gcdWord#
-- * Specialised tests for coprimality
, coprimeInt
, coprimeWord
-- ** Coprimality tests for unboxed types
, coprimeInt#
, coprimeWord#
) where
import GHC.Base
#if __GLASGOW_HASKELL__ < 705
import GHC.Word (Word(..)) -- Moved to GHC.Types
#endif
import Math.NumberTheory.Utils
-- | Greatest common divisor of two 'Int's, calculated with the binary gcd algorithm.
gcdInt :: Int -> Int -> Int
gcdInt (I# a#) (I# b#) = I# (gcdInt# a# b#)
-- | Test whether two 'Int's are coprime, using an abbreviated binary gcd algorithm.
coprimeInt :: Int -> Int -> Bool
coprimeInt (I# a#) (I# b#) = coprimeInt# a# b#
-- | Greatest common divisor of two 'Word's, calculated with the binary gcd algorithm.
gcdWord :: Word -> Word -> Word
gcdWord (W# a#) (W# b#) = W# (gcdWord# a# b#)
-- | Test whether two 'Word's are coprime, using an abbreviated binary gcd algorithm.
coprimeWord :: Word -> Word -> Bool
coprimeWord (W# a#) (W# b#) = coprimeWord# a# b#
-- | Greatest common divisor of two 'Int#'s, calculated with the binary gcd algorithm.
gcdInt# :: Int# -> Int# -> Int#
gcdInt# a# b# = word2Int# (gcdWord# (int2Word# (absInt# a#)) (int2Word# (absInt# b#)))
-- | Test whether two 'Int#'s are coprime.
coprimeInt# :: Int# -> Int# -> Bool
coprimeInt# a# b# = coprimeWord# (int2Word# (absInt# a#)) (int2Word# (absInt# b#))
-- | Greatest common divisor of two 'Word#'s, calculated with the binary gcd algorithm.
gcdWord# :: Word# -> Word# -> Word#
gcdWord# a# 0## = a#
gcdWord# 0## b# = b#
gcdWord# a# b# =
case shiftToOddCount# a# of
(# za#, oa# #) ->
case shiftToOddCount# b# of
(# zb#, ob# #) -> gcdWordOdd# oa# ob# `uncheckedShiftL#` (if isTrue# (za# <# zb#) then za# else zb#)
-- | Test whether two 'Word#'s are coprime.
coprimeWord# :: Word# -> Word# -> Bool
coprimeWord# a# b# =
(isTrue# (a# `eqWord#` 1##) || isTrue# (b# `eqWord#` 1##))
|| (isTrue# (((a# `or#` b#) `and#` 1##) `eqWord#` 1##) -- not both even
&& ((isTrue# (a# `neWord#` 0##) && isTrue# (b# `neWord#` 0##)) -- neither is zero
&& isTrue# (gcdWordOdd# (shiftToOdd# a#) (shiftToOdd# b#) `eqWord#` 1##)))
-- Various auxiliary functions
-- calculate the gcd of two odd numbers
{-# INLINE gcdWordOdd# #-}
gcdWordOdd# :: Word# -> Word# -> Word#
gcdWordOdd# a# b#
| isTrue# (a# `eqWord#` 1##) || isTrue# (b# `eqWord#` 1##) = 1##
| isTrue# (a# `eqWord#` b#) = a#
| isTrue# (a# `ltWord#` b#) = oddGCD# b# a#
| otherwise = oddGCD# a# b#
-- calculate the gcd of two odd numbers using the binary gcd algorithm
-- Precondition: first argument strictly larger than second (which should be greater than 1)
oddGCD# :: Word# -> Word# -> Word#
oddGCD# a# b# =
case shiftToOdd# (a# `minusWord#` b#) of
1## -> 1##
c# | isTrue# (c# `ltWord#` b#) -> oddGCD# b# c#
| isTrue# (c# `gtWord#` b#) -> oddGCD# c# b#
| otherwise -> c#
absInt# :: Int# -> Int#
absInt# i#
| isTrue# (i# <# 0#) = negateInt# i#
| otherwise = i#
|
shlevy/arithmoi
|
Math/NumberTheory/GCD/LowLevel.hs
|
mit
| 3,771
| 0
| 16
| 898
| 921
| 501
| 420
| 58
| 2
|
{-|
Tracks terminal state so that new terminal connections can be brought
up to speed.
-}
module Urbit.Vere.Term.Logic
( SpinnerCause(..), St, Ev(..), Ef(..)
, init
, step
, drawState
, fromTermEv
, toTermEv
) where
import Urbit.Prelude hiding (init)
import Data.Sequence (Seq((:<|)))
import qualified Urbit.Arvo as Arvo
import qualified Urbit.Vere.Term.API as Term
--------------------------------------------------------------------------------
data SpinnerCause = User | Event Text
deriving (Show)
type SpinnerState = Maybe SpinnerCause
{-|
%line -- Output a line above the edit line.
%spin -- Set the spinner state.
%bell -- Ring a bell (no change to the state).
%draw -- Redraw the current line (no change to the state).
%move -- Move the cursor position.
%edit -- Set the edit line, moving the cursor to the end.
%more -- Write the edit line to history, and clear it.
-}
data Ev
= EvLine Text
| EvSpin SpinnerState
| EvMove Word
| EvBell
| EvDraw
| EvEdit Text
| EvMore
deriving (Show)
data Ef
= EfClear
| EfWrite Text
| EfShift Int
| EfRing
| EfSpin SpinnerState
deriving (Show)
data St = St
{ sHistory :: Seq Text
, sLine :: Text
, sCurPos :: Word
, sSpinner :: SpinnerState
}
deriving (Show)
--------------------------------------------------------------------------------
init :: St
init = St mempty "" 0 Nothing
{-|
When we process `EvMore`, we need to append a newline to the end of
the current line. During normal play, the ENTER key inserts the
newline for us, so we need to recreate that newline when we rebuild
the state for a new terminal connection.
-}
step :: St -> Ev -> St
step st@St{..} = \case
EvLine t -> st & record t
EvSpin s -> st { sSpinner = s }
EvMove w -> st { sCurPos = min w (word $ length sLine) }
EvEdit t -> st { sLine = t, sCurPos = word (length t) }
EvMore -> st { sLine = "", sCurPos = 0 } & record (sLine <> "\n")
EvBell -> st
EvDraw -> st
where
word :: Integral i => i -> Word
word = fromIntegral
record :: Text -> St -> St
record t st@St{..} = st { sHistory = trim (sHistory |> t) }
trim :: Seq a -> Seq a
trim s | length s < 20 = s
trim (_ :<| s) = s
trim s = s
drawState :: St -> [Ev]
drawState St{..} = hist <> out <> cur <> spin
where
hist = EvLine <$> toList sHistory
out = if null sLine then [] else [EvEdit sLine]
cur = if 0 == sCurPos then [] else [EvMove $ fromIntegral $ sCurPos]
spin = maybe [] (singleton . EvSpin . Just) sSpinner
-- Conversion ------------------------------------------------------------------
fromBlit :: Arvo.Blit -> Maybe Ev
fromBlit = \case
Arvo.Hop w -> Just $ EvMove $ fromIntegral w
Arvo.Bel () -> Just EvBell
Arvo.Clr () -> Just EvDraw
Arvo.Lin s -> Just $ EvEdit (pack s)
Arvo.Mor () -> Just EvMore
_ -> Nothing
toCause :: Maybe Cord -> SpinnerCause
toCause Nothing = User
toCause (Just (Cord c)) = Event c
fromCause :: SpinnerCause -> Maybe Cord
fromCause User = Nothing
fromCause (Event t) = Just (Cord t)
fromTermEv :: Term.Ev -> [Ev]
fromTermEv = \case
Term.Blits bs -> catMaybes (fromBlit <$> bs)
Term.Trace t -> [EvLine $ unCord t]
Term.Blank -> [EvLine ""]
Term.Spinr s -> [EvSpin $ toCause <$> s]
toTermEv :: Ev -> Term.Ev
toTermEv = \case
EvLine "" -> Term.Blank
EvLine t -> Term.Trace (Cord t)
EvSpin s -> Term.Spinr (fromCause <$> s)
EvMove w -> Term.Blits [Arvo.Hop $ fromIntegral w]
EvBell -> Term.Blits [Arvo.Bel ()]
EvDraw -> Term.Blits [Arvo.Clr ()]
EvEdit t -> Term.Blits [Arvo.Lin $ unpack t]
EvMore -> Term.Blits [Arvo.Mor ()]
|
jfranklin9000/urbit
|
pkg/hs/urbit-king/lib/Urbit/Vere/Term/Logic.hs
|
mit
| 3,848
| 0
| 13
| 1,038
| 1,191
| 631
| 560
| -1
| -1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.StorageGateway.CancelRetrieval
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Cancels retrieval of a virtual tape from the virtual tape shelf (VTS) to a
-- gateway after the retrieval process is initiated. The virtual tape is
-- returned to the VTS.
--
-- <http://docs.aws.amazon.com/storagegateway/latest/APIReference/API_CancelRetrieval.html>
module Network.AWS.StorageGateway.CancelRetrieval
(
-- * Request
CancelRetrieval
-- ** Request constructor
, cancelRetrieval
-- ** Request lenses
, crGatewayARN
, crTapeARN
-- * Response
, CancelRetrievalResponse
-- ** Response constructor
, cancelRetrievalResponse
-- ** Response lenses
, crrTapeARN
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.StorageGateway.Types
import qualified GHC.Exts
data CancelRetrieval = CancelRetrieval
{ _crGatewayARN :: Text
, _crTapeARN :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'CancelRetrieval' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'crGatewayARN' @::@ 'Text'
--
-- * 'crTapeARN' @::@ 'Text'
--
cancelRetrieval :: Text -- ^ 'crGatewayARN'
-> Text -- ^ 'crTapeARN'
-> CancelRetrieval
cancelRetrieval p1 p2 = CancelRetrieval
{ _crGatewayARN = p1
, _crTapeARN = p2
}
crGatewayARN :: Lens' CancelRetrieval Text
crGatewayARN = lens _crGatewayARN (\s a -> s { _crGatewayARN = a })
-- | The Amazon Resource Name (ARN) of the virtual tape you want to cancel
-- retrieval for.
crTapeARN :: Lens' CancelRetrieval Text
crTapeARN = lens _crTapeARN (\s a -> s { _crTapeARN = a })
newtype CancelRetrievalResponse = CancelRetrievalResponse
{ _crrTapeARN :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'CancelRetrievalResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'crrTapeARN' @::@ 'Maybe' 'Text'
--
cancelRetrievalResponse :: CancelRetrievalResponse
cancelRetrievalResponse = CancelRetrievalResponse
{ _crrTapeARN = Nothing
}
-- | The Amazon Resource Name (ARN) of the virtual tape for which retrieval was
-- canceled.
crrTapeARN :: Lens' CancelRetrievalResponse (Maybe Text)
crrTapeARN = lens _crrTapeARN (\s a -> s { _crrTapeARN = a })
instance ToPath CancelRetrieval where
toPath = const "/"
instance ToQuery CancelRetrieval where
toQuery = const mempty
instance ToHeaders CancelRetrieval
instance ToJSON CancelRetrieval where
toJSON CancelRetrieval{..} = object
[ "GatewayARN" .= _crGatewayARN
, "TapeARN" .= _crTapeARN
]
instance AWSRequest CancelRetrieval where
type Sv CancelRetrieval = StorageGateway
type Rs CancelRetrieval = CancelRetrievalResponse
request = post "CancelRetrieval"
response = jsonResponse
instance FromJSON CancelRetrievalResponse where
parseJSON = withObject "CancelRetrievalResponse" $ \o -> CancelRetrievalResponse
<$> o .:? "TapeARN"
|
romanb/amazonka
|
amazonka-storagegateway/gen/Network/AWS/StorageGateway/CancelRetrieval.hs
|
mpl-2.0
| 4,006
| 0
| 9
| 880
| 524
| 318
| 206
| 63
| 1
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fr-FR">
<title>Code Dx | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
veggiespam/zap-extensions
|
addOns/codedx/src/main/javahelp/org/zaproxy/zap/extension/codedx/resources/help_fr_FR/helpset_fr_FR.hs
|
apache-2.0
| 969
| 80
| 66
| 160
| 415
| 210
| 205
| -1
| -1
|
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
module Data.Map.Syntax.Util where
------------------------------------------------------------------------------
import qualified Data.Map as M
import qualified Data.Set as Set
import Test.QuickCheck (Arbitrary (arbitrary))
import Test.QuickCheck.Gen (listOf, elements)
------------------------------------------------------------------------------
import Data.Map.Syntax
------------------------------------------------------------------------------
------------------------------------------------------------------------------
-- |All elements that appear more than once in a list (once each)
dups :: (Eq a,Ord a) => [a] -> Set.Set a
dups xs = let countMap = M.fromListWith (+) (zip xs $ repeat (1::Int))
in Set.fromList . map fst . M.toList $ M.filter (>1) countMap
newtype ArbMapSyntax a b = ArbMapSyntax { unArbSyntax :: MapSyntax a b }
------------------------------------------------------------------------------
instance (Arbitrary a, Arbitrary b) => Arbitrary (ArbMapSyntax a b) where
arbitrary = do
ks <- arbitrary
vs <- arbitrary
strats <- listOf $ elements [Replace,Ignore,Error]
return . ArbMapSyntax $
mapM_ (\(s, k, v) -> addStrat s k v) (zip3 strats ks vs)
------------------------------------------------------------------------------
-- |An (invalid) show instance - to have something for QuickCheck to print
instance (Show a, Ord a, Show b) => Show (ArbMapSyntax a b) where
show m = "<MapSyntax> state " ++ show (runMap . unArbSyntax $ m)
------------------------------------------------------------------------------
-- | Some sample MapSyntax's with various degrees of overlap
mkMapABC :: (Char -> Int -> MapSyntax Char Int) -> MapSyntax Char Int
mkMapABC strat = do
'A' `strat` 1
'B' `strat` 2
'C' `strat` 3
mkMapDEF :: (Char -> Int -> MapSyntax Char Int) -> MapSyntax Char Int
mkMapDEF strat = do
'D' `strat` 10
'E' `strat` 20
'F' `strat` 30
mkMapAEF :: (Char -> Int -> MapSyntax Char Int) -> MapSyntax Char Int
mkMapAEF strat = do
'A' `strat` 100
'E' `strat` 200
'F' `strat` 300
|
mightybyte/map-syntax
|
test/Data/Map/Syntax/Util.hs
|
bsd-3-clause
| 2,290
| 0
| 13
| 447
| 581
| 319
| 262
| 37
| 1
|
{-# OPTIONS_GHC -Wall #-}
-- | This module re-exports both 'MonadTardis' and 'TardisT'
-- (Wherever there is overlap, the 'MonadTardis' version is preferred.)
--
-- The recommended usage of a Tardis is to import this module.
module Control.Monad.Tardis
( -- * Re-exports
module Control.Monad.Trans.Tardis
, module Control.Monad.Tardis.Class
-- * What is a Tardis?
-- $whatis
-- * How do you use a Tardis?
-- $howuse
) where
import Control.Monad.Tardis.Class
import Control.Monad.Trans.Tardis
( TardisT
, runTardisT
, evalTardisT
, execTardisT
, Tardis
, runTardis
, evalTardis
, execTardis
, noState
)
{- $whatis
A Tardis is the combination of the State monad transformer
and the Reverse State monad transformer.
The State monad transformer features a forwards-traveling state.
You can retrieve the current value of the state,
and you can set its value, affecting any future attempts
to retrieve it.
The Reverse State monad transformer is just the opposite:
it features a backwards-traveling state.
You can retrieve the current value of the state,
and you can set its value, affecting any /past/ attempts
to retrieve it. This is a bit weirder than its
forwards-traveling counterpart, so its Monad instance
additionally requires that the underlying Monad it transforms
must be an instance of MonadFix.
A Tardis is nothing more than mashing these two things together.
A Tardis gives you /two/ states: one which travels /backwards/
(or /upwards/) through your code (referred to as @bw@),
and one which travels /forwards/ (or /downwards/) through your code
(referred to as @fw@). You can retrieve the current
value of either state, and you can set the value of either state.
Setting the forwards-traveling state will affect the /future/,
while setting the backwards-traveling state will affect the /past/.
Take a look at how Monadic bind is implemented for 'TardisT':
> m >>= f = TardisT $ \ ~(bw, fw) -> do
> rec (x, ~(bw'', fw' )) <- runTardisT m (bw', fw)
> (x', ~(bw' , fw'')) <- runTardisT (f x) (bw, fw')
> return (x', (bw'', fw''))
Like the Reverse State monad transformer, TardisT's Monad instance
requires that the monad it transforms is an instance of MonadFix,
as is evidenced by the use of @rec@.
Notice how the forwards-traveling state travels /normally/:
first it is fed to @m@, producing @fw'@, and then it is fed to @f x@,
producing @fw''@. The backwards-traveling state travels in the opposite
direction: first it is fed to @f x@, producing @bw'@, and then
it is fed to @m@, producing @bw''@.
-}
{- $howuse
A Tardis provides four primitive operations,
corresponding to the /get/ and /put/ for each of its two states.
The most concise way to explain it is this:
'getPast' retrieves the value from the latest 'sendFuture',
while 'getFuture' retrieves the value from the next 'sendPast'.
Beware the pitfall of performing send and get in the wrong order.
Let's consider forwards-traveling state:
> do sendFuture "foo"
> x <- getPast
In this code snippet, @x@ will be @\"foo\"@, because 'getPast'
grabs the value from the latest 'sendFuture'. If you wanted
to observe that state /before/ overwriting it with @\"foo\"@,
then re-arrange the code so that 'getPast' happens earlier
than 'sendFuture'. Now let's consider backwards-traveling state:
> do x <- getFuture
> sendPast "bar"
In this code snippet, @x@ will be @\"bar\"@, because 'getFuture'
grabs the value from the next 'sendPast'. If you wanted
to observe that state /before/ overwriting it with @\"bar\"@,
then re-arrange the code so that 'getFuture' happens later
than 'sendPast'.
TardisT is an instance of MonadFix. This is especially important
when attempting to write backwards-traveling code, because
the name binding occurs later than its usage.
The result of the following code will be @(11, \"Dan Burton\")@.
> flip execTardis (10, "Dan") $ do
> name <- getPast
> sendFuture (name ++ " Burton")
> rec
> sendPast (score + 1)
> score <- getFuture
> return ()
To avoid using @rec@, you may find 'modifyBackwards' to be useful.
This code is equivalent to the previous example:
> flip execTardis (10, "Dan") $ do
> modifyForwards (++ " Burton")
> modifyBackwards (+ 1)
-}
|
DanBurton/tardis
|
src/Control/Monad/Tardis.hs
|
bsd-3-clause
| 4,477
| 0
| 5
| 1,013
| 80
| 59
| 21
| 16
| 0
|
module Formalize.Formalizer
( pdfFromParams
, emptyFormData
) where
import Data.Text (Text)
import Formalize.Internal.Util
import Formalize.Internal.Validate
import Formalize.Internal.Mailer
import Formalize.Types
-- TODO: Simplify Left Right handling.
-- Try to create PDF file from params.
pdfFromParams
:: [(Text,Text)]
-> FilePath
-> SMTPInfo
-> IO (Either FormData PDF)
pdfFromParams ps path smtp =
case formFromParams ps of
Left x -> do fd <- invalidInput x
return $ Left fd
Right x -> do pdf <- validInput path x smtp
return $ Right pdf
-- Empty data is used when rendering the form first time.
emptyFormData :: IO FormData
emptyFormData = createEmptyFormData
-- Create form data containing error message.
invalidInput :: (FormInput,Text) -> IO FormData
invalidInput (fi,msg) = createFormData fi $ FlashMessage msg
-- Create, save and email pdf.
validInput :: FilePath -> FormInput -> SMTPInfo -> IO PDF
validInput path fi smtp = do
let email = fiEmail fi
formData <- createFormData fi emptyFlash
saveAsPdf formData path >>= emailPDF smtp email
|
Lepovirta/Crystallize
|
src/Formalize/Formalizer.hs
|
bsd-3-clause
| 1,248
| 0
| 11
| 354
| 298
| 152
| 146
| 28
| 2
|
{-# LANGUAGE OverloadedStrings #-}
-- |
-- Module : Language.Ava.Base.Reader
--
-- Copyright : (c) 2016 Owain Lewis
--
-- License : BSD-style
-- Maintainer : Owain Lewis <owain@owainlewis.com>
-- Stability : experimental
-- Portability : GHC
--
-- Reads input strings and converts them into a concrete AST
--
module Language.Ava.Base.Reader
( readText
, readString
, readFile
) where
import Prelude hiding (readFile)
import Language.Ava.Base.AST (Value)
import Language.Ava.Base.Parser as P
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
type ParseOutcome = Either AvaParseError [Value]
readText :: T.Text -> ParseOutcome
readText = P.parseMany
readString :: String -> ParseOutcome
readString = readText . T.pack
readFile :: FilePath -> IO ParseOutcome
readFile path = readText <$> TIO.readFile path
|
owainlewis/seven
|
src/Language/Ava/Base/Reader.hs
|
bsd-3-clause
| 852
| 0
| 7
| 148
| 160
| 103
| 57
| 17
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE PolyKinds #-}
------------------------------------------------------------------------
-- |
-- Module : Hanoi
-- Copyright : (C) 2019, Adam Boniecki
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Adam Boniecki <adambonie@gmail.com>
-- Stability : provisional
-- Portability : non-portable (GHC extensions)
--
-- Solution to the famous Tower of Hanoi puzzle using tools for state
-- machine property-based testing.
--
-- The puzzle is to move N discs of different sizes from one peg to
-- another, with one auxiliary peg and a restriction that no disc may ever
-- be placed on top of a smaller disc. Only one disc can be moved at a time.
------------------------------------------------------------------------
module Hanoi
( prop_hanoi
) where
import Data.Array
import Data.Kind
(Type)
import Data.Maybe
import Data.TreeDiff.Expr
()
import GHC.Generics
(Generic, Generic1)
import Prelude
import Test.QuickCheck
(Arbitrary(arbitrary), Gen, Property, choose,
suchThat, (===))
import Test.QuickCheck.Monadic
(monadicIO)
import Test.StateMachine
import qualified Test.StateMachine.Types.Rank2 as Rank2
------------------------------------------------------------------------
-- The model keeps track of which disc is on which peg
newtype Model (r :: Type -> Type) = Model (Array Int [Int])
deriving stock (Show, Eq, Generic)
-- There are 3 pegs, so the bounds are (0, 2)
pegsBounds :: (Int,Int)
pegsBounds = (0, 2)
instance ToExpr (Model r) where
toExpr (Model a) = toExpr $ elems a
initModel :: Int -> Model r
initModel discs = Model $ listArray pegsBounds [[1..discs], [], []]
-- Allowed action is to move one disc from the top of one peg to the top of another
data Command (r :: Type -> Type) = Move (Int,Int)
deriving stock (Eq, Show, Generic1)
deriving anyclass (Rank2.Functor, Rank2.Foldable, Rank2.Traversable, CommandNames)
instance Arbitrary (Command r) where
arbitrary = do
x <- choose pegsBounds
y <- choose pegsBounds `suchThat` (/= x)
return $ Move (x,y)
data Response (r :: Type -> Type) = Done
deriving stock (Show, Generic1)
deriving anyclass (Rank2.Foldable)
------------------------------------------------------------------------
transitions :: Model r -> Command r -> Response r -> Model r
transitions (Model pegs) (Move (from_, to_)) _ = case pegs ! from_ of
(x : xs) -> Model $ pegs // [(from_, xs), (to_, x : pegs ! to_)]
_ -> error "transition: impossible, due to preconditon"
preconditions :: Model Symbolic -> Command Symbolic -> Logic
preconditions (Model pegs) (Move (from_, to_)) = Boolean (isJust x) .&& x .<= y
where
x = listToMaybe (pegs ! from_)
-- Any disc can be placed on empty peg, so no disc counts as largest disc.
y = listToMaybe (pegs ! to_ ++ [maxBound])
-- Check if all discs are at the last peg. The invariant states that this is not
-- the case, so when it is not satisfied, we have a counter example that is a
-- solution to our puzzle.
postconditions :: Model Concrete -> Command Concrete -> Response Concrete -> Logic
postconditions m c r = length lst ./= sum (fmap length pegs)
where
lst = pegs ! (snd $ bounds pegs)
Model pegs = transitions m c r
------------------------------------------------------------------------
generator :: Model Symbolic -> Maybe (Gen (Command Symbolic))
generator _ = Just $ arbitrary
shrinker :: Model r -> Command r -> [Command r]
shrinker _ _ = []
------------------------------------------------------------------------
semantics :: Command Concrete -> IO (Response Concrete)
semantics _ = return Done
mock :: Model Symbolic -> Command Symbolic -> GenSym (Response Symbolic)
mock _ _ = return Done
------------------------------------------------------------------------
sm :: Int -> StateMachine Model Command IO Response
sm discs = StateMachine (initModel discs) transitions preconditions postconditions
Nothing generator shrinker semantics mock noCleanup
-- A sequential property for Tower of Hanoi with n discs.
-- Note that optimal solution requires 2^n-1 moves and this is not guaranteeed
-- to find an optimal one (or any at all).
prop_hanoi :: Int -> Property
prop_hanoi n = forAllCommands (sm n) Nothing $ \cmds -> monadicIO $ do
(hist, _model, res) <- runCommands (sm n) cmds
prettyCommands (sm n) hist (checkCommandNames cmds (res === Ok))
|
advancedtelematic/quickcheck-state-machine-model
|
test/Hanoi.hs
|
bsd-3-clause
| 4,781
| 0
| 14
| 1,026
| 1,097
| 598
| 499
| 70
| 2
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.