code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies #-}
module Data.Graph where
import Prelude hiding (lookup)
import Data.FiniteMap
import Control.Exception
-- A graph is a pair of finite sets, the first of objects, the second of relations
-- between objects (edges). Nodes and edges need to be uniquely indentified, so the
-- choice of a finite map to a ordered value seems obvious.
class Graph g where
nodes :: (Ord k, FiniteMap m k, Node n k) => g (m (n p)) (m e) -> m (n p)
edges :: (Ord k, FiniteMap m k, Edge e k) => g (m n) (m (e p)) -> m (e p)
nodeById :: (Ord k, FiniteMap m k, Node n k) => g (m (n p)) (m e) -> k -> Maybe (n p)
nodeById g i = lookup i $ nodes g
edgeById :: (Ord k, FiniteMap m k, Edge e k) => g (m n) (m (e p)) -> k -> Maybe (e p)
edgeById g i = lookup i $ edges g
-- A node
class Node n k | n -> k where
nodeKey :: (Ord k) => n p -> k
connectedEdges :: (Ord k) => n p -> [k]
nodePayload :: n p -> p
class Edge e k | e -> k where
edgeKey :: (Ord k) => e p -> k
class Labeled o where
label :: o -> String
class Weigthed o n | o -> n where
weigth :: (Num n) => o -> n
| trbecker/haskell-musings | Graph/src/Data/Graph.hs | unlicense | 1,122 | 0 | 13 | 270 | 516 | 266 | 250 | 22 | 0 |
module Haskoin.Protocol.GetBlocks
( GetBlocks(..)
, BlockLocator
) where
import Control.Monad (replicateM, forM_)
import Control.Applicative ((<$>),(<*>))
import Data.Word (Word32)
import Data.Binary (Binary, get, put)
import Data.Binary.Get (getWord32le)
import Data.Binary.Put (putWord32le)
import Haskoin.Protocol.VarInt
import Haskoin.Crypto (Hash256)
type BlockLocator = [Hash256]
data GetBlocks = GetBlocks {
getBlocksVersion :: !Word32,
getBlocksLocator :: !BlockLocator,
getBlocksHashStop :: !Hash256
} deriving (Eq, Show)
instance Binary GetBlocks where
get = GetBlocks <$> getWord32le
<*> (repList =<< get)
<*> get
where repList (VarInt c) = replicateM (fromIntegral c) get
put (GetBlocks v xs h) = do
putWord32le v
put $ VarInt $ fromIntegral $ length xs
forM_ xs put
put h
| OttoAllmendinger/haskoin-protocol | src/Haskoin/Protocol/GetBlocks.hs | unlicense | 898 | 0 | 10 | 215 | 278 | 156 | 122 | 33 | 0 |
import Control.Monad
main = do
c <- getChar
when (c /= ' ') $ do
putChar c
main
| Oscarzhao/haskell | learnyouahaskell/when_test.hs | apache-2.0 | 101 | 0 | 10 | 39 | 44 | 20 | 24 | 6 | 1 |
-- Copyright 2020 Google LLC
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module Main where
import Reexport2
main = print value
| google/cabal2bazel | bzl/tests/rules/ReexportTest2.hs | apache-2.0 | 645 | 0 | 5 | 112 | 28 | 22 | 6 | 3 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Openshift.V1.OAuthClient where
import GHC.Generics
import Data.Text
import Kubernetes.V1.ObjectMeta
import qualified Data.Aeson
-- |
data OAuthClient = OAuthClient
{ kind :: Maybe Text -- ^ Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds
, apiVersion :: Maybe Text -- ^ APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#resources
, metadata :: Maybe ObjectMeta -- ^
, secret :: Maybe Text -- ^ unique secret associated with a client
, respondWithChallenges :: Maybe Bool -- ^ indicates whether the client wants authentication needed responses made in the form of challenges instead of redirects
, redirectURIs :: Maybe [Text] -- ^ valid redirection URIs associated with a client
} deriving (Show, Eq, Generic)
instance Data.Aeson.FromJSON OAuthClient
instance Data.Aeson.ToJSON OAuthClient
| minhdoboi/deprecated-openshift-haskell-api | openshift/lib/Openshift/V1/OAuthClient.hs | apache-2.0 | 1,426 | 0 | 10 | 222 | 142 | 86 | 56 | 20 | 0 |
{-# LANGUAGE GADTs, TypeOperators, EmptyDataDecls, MultiParamTypeClasses, FlexibleInstances, OverlappingInstances, StandaloneDeriving, ConstraintKinds, TypeFamilies, ImplicitParams, RankNTypes #-}
{- Declares various types that are used everywhere -}
module Types where
import GHC.Prim
{- Type-level dimension information -}
data X
data Y
data Z
data T
data Dim t where
X :: Dim X
Y :: Dim Y
Z :: Dim Z
T :: Dim T
{- Mapping from dimensions to their 'implicit parameter' requirements -}
type family Delta t a :: Constraint
type instance Delta X a = ?dx :: a
type instance Delta Y a = ?dy :: a
type instance Delta Z a = ?dz :: a
type instance Delta T a = ?dt :: a
type family DeltaMap a ds :: Constraint
type instance DeltaMap a Nil = ()
type instance DeltaMap a (d :. ds) = (Delta d a, DeltaMap a ds)
deriving instance Show (Dim t)
data Nil
data t :. ts
infixr 3 :.
data Dimension t where
Nil :: Dimension Nil
(:.) :: Dim d -> Dimension t -> Dimension (d :. t)
instance Show (Dimension t) where
show Nil = "Nil"
show (d :. ds) = (show d) ++ ":." ++ (show ds)
class Member t ts
instance Member t (t :. ts)
instance Member t ts => Member t (t' :. ts)
{- Map type-level dimension lists to tuples #-}
type family Indices ds t
type instance Indices Nil t = ()
type instance Indices (a :. Nil) t = (t)
type instance Indices (a :. b :. Nil) t = (t, t)
type instance Indices (a :. b :. c :. Nil) t = (t, t, t)
type instance Indices (a :. b :. c :. d :. Nil) t = (t, t, t, t)
{- Solver types -}
class (Member d ds) => Euler t d ds where
euler :: (Delta d a, Fractional a) => Dim d -> Dimension ds -> (t -> a) -> (t -> a)
{- Scaffolding for building equations
Defines an abstract syntax tree for PDEs -}
data Eqn ds a where
Delta :: (Euler t d ds, Member d ds, Delta d a) => Integer -> (t -> a) -> (Dim d) -> Eqn ds (t -> a)
Times :: Eqn ds a -> Eqn ds a -> Eqn ds a
Add :: Eqn ds a -> Eqn ds a -> Eqn ds a
Minus :: Eqn ds a -> Eqn ds a -> Eqn ds a
Divide :: Eqn ds a -> Eqn ds a -> Eqn ds a
Abs :: Eqn ds a -> Eqn ds a
Constant :: ((Show a) => a) -> Maybe String -> Eqn ds (t -> a)
instance (Show a) => Show (Eqn ds (t -> a)) where
show (Delta n _ dim) = "Delta " ++ (show n) ++ " " ++ (show dim) ++ " fun "
show (Times e1 e2) = "Times (" ++ show e1 ++ ") (" ++ show e2 ++ ")"
show (Add e1 e2) = "Add (" ++ show e1 ++ ") (" ++ show e2 ++ ")"
show (Minus e1 e2) = "Minus (" ++ show e1 ++ ") (" ++ show e2 ++ ")"
show (Divide e1 e2) = "Divide (" ++ show e1 ++ ") (" ++ show e2 ++ ")"
show (Abs e) = "Abs (" ++ show e ++ ")"
show (Constant a Nothing) = "Constant " ++ show a
show (Constant a (Just x)) = "Constant " ++ x
-- Specification type (currently just one equality)
-- TODO: could be more in the future
data Spec ds a = Equality (Eqn ds a) (Eqn ds a) (Dimension ds)
lhs, rhs :: Spec ds a -> Eqn ds a
lhs (Equality l _ _) = l
rhs (Equality _ r _) = r
instance (Show a) => Show (Spec ds (t -> a)) where
show (Equality eq1 eq2 d) = "Equality (" ++ (show eq1) ++ ") (" ++ (show eq2) ++ " " ++ show d
{- Numerical classes -}
instance Num a => Num (Eqn ds (t -> a)) where
a * b = Times a b
a + b = Add a b
a - b = Minus a b
abs a = Abs a
signum a = error "signum not implemented"
fromInteger a = Constant (fromInteger a) Nothing
instance Fractional a => Fractional (Eqn ds (t -> a)) where
fromRational x = Constant (fromRational x) Nothing
| dorchard/pde-specs | Types.hs | bsd-2-clause | 3,530 | 0 | 12 | 943 | 1,503 | 786 | 717 | -1 | -1 |
-- 31626
import Euler(divisorPowerSum)
nn = 10000
-- divisorPowerSum 1 is the sum of all divisors
isAmicable a = a /= b && a == factorSum b
where b = factorSum a
factorSum n = divisorPowerSum 1 n - n
sumAmicable n = sum $ filter isAmicable [2..n]
main = putStrLn $ show $ sumAmicable nn
| higgsd/euler | hs/21.hs | bsd-2-clause | 304 | 2 | 7 | 73 | 107 | 53 | 54 | 7 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Foundation where
import Control.Concurrent.STM (TChan)
import Data.Text (Text)
import qualified Database.Persist
import Database.Persist.Sql (SqlPersistT)
import Model
import Network.HTTP.Client.Conduit (HasHttpManager (getHttpManager),
Manager)
import Prelude
import Settings (Extra (..), widgetFile)
import qualified Settings
import Settings.Development (development)
import Settings.StaticFiles
import Text.Hamlet (hamletFile)
import Text.Jasmine (minifym)
import Yesod
import Yesod.Auth
import Yesod.Auth.BrowserId
import Yesod.Core.Types (Logger)
import Yesod.Default.Config
import Yesod.Default.Util (addStaticContentExternal)
import Yesod.Static
-- | The site argument for your application. This can be a good place to
-- keep settings and values requiring initialization before your application
-- starts running, such as database connections. Every handler will have
-- access to the data present here.
data App = App
{ settings :: AppConfig DefaultEnv Extra
, getStatic :: Static -- ^ Settings for static file serving.
, connPool :: Database.Persist.PersistConfigPool Settings.PersistConf -- ^ Database connection pool.
, httpManager :: Manager
, persistConfig :: Settings.PersistConf
, appLogger :: Logger
, socketEvents :: TChan Text
}
instance HasHttpManager App where
getHttpManager = httpManager
-- Set up i18n messages. See the message folder.
mkMessage "App" "messages" "en"
-- This is where we define all of the routes in our application. For a full
-- explanation of the syntax, please see:
-- http://www.yesodweb.com/book/routing-and-handlers
--
-- Note that this is really half the story; in Application.hs, mkYesodDispatch
-- generates the rest of the code. Please see the linked documentation for an
-- explanation for this split.
mkYesodData "App" $(parseRoutesFile "config/routes")
type Form x = Html -> MForm (HandlerT App IO) (FormResult x, Widget)
-- Please see the documentation for the Yesod typeclass. There are a number
-- of settings which can be configured by overriding methods here.
instance Yesod App where
approot = ApprootMaster $ appRoot . settings
-- Store session data on the client in encrypted cookies,
-- default session idle timeout is 120 minutes
makeSessionBackend _ = fmap Just $ defaultClientSessionBackend
120 -- timeout in minutes
"config/client_session_key.aes"
defaultLayout widget = do
master <- getYesod
mmsg <- getMessage
-- We break up the default layout into two components:
-- default-layout is the contents of the body tag, and
-- default-layout-wrapper is the entire page. Since the final
-- value passed to hamletToRepHtml cannot be a widget, this allows
-- you to use normal widget features in default-layout.
pc <- widgetToPageContent $ do
$(combineStylesheets 'StaticR
[ css_normalize_css
, css_bootstrap_css
])
$(widgetFile "default-layout")
giveUrlRenderer $(hamletFile "templates/default-layout-wrapper.hamlet")
-- This is done to provide an optimization for serving static files from
-- a separate domain. Please see the staticRoot setting in Settings.hs
urlRenderOverride y (StaticR s) =
Just $ uncurry (joinPath y (Settings.staticRoot $ settings y)) $ renderRoute s
urlRenderOverride _ _ = Nothing
-- The page to be redirected to when authentication is required.
authRoute _ = Just $ AuthR LoginR
-- Routes not requiring authenitcation.
isAuthorized (AuthR _) _ = return Authorized
isAuthorized FaviconR _ = return Authorized
isAuthorized RobotsR _ = return Authorized
-- Default to Authorized for now.
isAuthorized _ _ = return Authorized
-- This function creates static content files in the static folder
-- and names them based on a hash of their content. This allows
-- expiration dates to be set far in the future without worry of
-- users receiving stale content.
addStaticContent =
addStaticContentExternal minifym genFileName Settings.staticDir (StaticR . flip StaticRoute [])
where
-- Generate a unique filename based on the content itself
genFileName lbs
| development = "autogen-" ++ base64md5 lbs
| otherwise = base64md5 lbs
-- Place Javascript at bottom of the body tag so the rest of the page loads first
jsLoader _ = BottomOfBody
-- What messages should be logged. The following includes all messages when
-- in development, and warnings and errors in production.
shouldLog _ _source level =
development || level == LevelWarn || level == LevelError
makeLogger = return . appLogger
-- How to run database actions.
instance YesodPersist App where
type YesodPersistBackend App = SqlPersistT
runDB = defaultRunDB persistConfig connPool
instance YesodPersistRunner App where
getDBRunner = defaultGetDBRunner connPool
instance YesodAuth App where
type AuthId App = UserId
-- Where to send a user after successful login
loginDest _ = HomeR
-- Where to send a user after logout
logoutDest _ = HomeR
getAuthId creds = runDB $ do
x <- getBy $ UniqueUser $ credsIdent creds
case x of
Just (Entity uid _) -> return $ Just uid
Nothing -> do
fmap Just $ insert User
{ userIdent = credsIdent creds
, userPassword = Nothing
}
-- You can add other plugins like BrowserID, email or OAuth here
authPlugins _ = [authBrowserId def]
authHttpManager = httpManager
-- This instance is required to use forms. You can modify renderMessage to
-- achieve customized and internationalized form validation messages.
instance RenderMessage App FormMessage where
renderMessage _ _ = defaultFormMessage
-- | Get the 'Extra' value, used to hold data from the settings.yml file.
getExtra :: Handler Extra
getExtra = fmap (appExtra . settings) getYesod
-- Note: previous versions of the scaffolding included a deliver function to
-- send emails. Unfortunately, there are too many different options for us to
-- give a reasonable default. Instead, the information is available on the
-- wiki:
--
-- https://github.com/yesodweb/yesod/wiki/Sending-email
| Codas/campaign-next | Foundation.hs | bsd-3-clause | 6,807 | 0 | 18 | 1,793 | 956 | 524 | 432 | -1 | -1 |
{-# LANGUAGE
QuasiQuotes
, OverloadedStrings
#-}
module Templates.MainScripts where
import Text.Julius
import Data.Aeson.Types (Value (String))
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
mainScripts :: T.Text -> LT.Text
mainScripts glossary = renderJavascript $ mainScripts' undefined
where
mainScripts' = [julius|
$(document).ready( function() {
$("hr").replaceWith("<div class='ui divider'></div>");
$("h1:not(.header), h2:not(.header), h3:not(.header), h4:not(.header)").addClass("ui header");
$("table").addClass("ui table striped celled orange");
$("#docs-markdown img").addClass("ui image centered");
$("#navBar-menu-button").click(function(e) {
$("#sideBar").sidebar('toggle');
});
$(".ui.accordion").accordion();
});
function deleteDef(t) {
$.ajax({
method: "DELETE",
url: #{String glossary} + "/" + t,
success: function(data) {
console.log("DELETE RESPONSE: " + data);
$("#" + t).remove();
}
});
}
|]
loginButton = String "<a onclick=\"$('#login').modal('show');\" href=\"#\" id=\"login-button\">Login</a>"
logoutButton = String "<a onclick=\"signOut();\" href=\"#\" id=\"logout-button\">Logout</a>"
googleSignIn :: T.Text -> T.Text -> LT.Text
googleSignIn verify logout = renderJavascript $ googleSignIn' undefined
where
googleSignIn' = [julius|
function onSignIn(googleUser) {
// Useful data for your client-side scripts:
var profile = googleUser.getBasicProfile();
console.log("ID: " + profile.getId()); // Don't send this directly to your server!
console.log("Name: " + profile.getName());
console.log("Image URL: " + profile.getImageUrl());
console.log("Email: " + profile.getEmail());
// The ID token you need to pass to your backend:
var id_token = googleUser.getAuthResponse().id_token;
console.log("ID Token: " + id_token);
$.ajax({
url: #{String verify},
data: {"id_token": id_token},
success: function(data) {
console.log("LOGIN RESPONSE: " + data);
$('#login-button').replaceWith(#{logoutButton});
$('#login').modal('hide');
}
});
}
function signOut() {
var auth2 = gapi.auth2.getAuthInstance();
auth2.signOut().then(function () {
console.log('User signed out.');
});
$.ajax({
url: #{String logout},
success: function(data) {
console.log("LOGOUT RESPONSE: " + data);
$('#logout-button').replaceWith(#{loginButton});
}
})
}
|]
| athanclark/nested-routes-website | src/Templates/MainScripts.hs | bsd-3-clause | 2,462 | 0 | 7 | 426 | 146 | 87 | 59 | 16 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Dependency.TopDown.Types
-- Copyright : (c) Duncan Coutts 2008
-- License : BSD-like
--
-- Maintainer : cabal-devel@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- Types for the top-down dependency resolver.
-----------------------------------------------------------------------------
module Distribution.Client.Dependency.TopDown.Types where
import Distribution.Client.Types
( SourcePackage(..), InstalledPackage )
import Distribution.Package
( PackageIdentifier, Dependency
, Package(packageId), PackageFixedDeps(depends) )
import Distribution.PackageDescription
( FlagAssignment )
-- ------------------------------------------------------------
-- * The various kinds of packages
-- ------------------------------------------------------------
type SelectablePackage
= InstalledOrSource InstalledPackageEx UnconfiguredPackage
type SelectedPackage
= InstalledOrSource InstalledPackageEx SemiConfiguredPackage
data InstalledOrSource installed source
= InstalledOnly installed
| SourceOnly source
| InstalledAndSource installed source
deriving Eq
type TopologicalSortNumber = Int
data InstalledPackageEx
= InstalledPackageEx
InstalledPackage
!TopologicalSortNumber
[PackageIdentifier] -- transative closure of installed deps
data UnconfiguredPackage
= UnconfiguredPackage
SourcePackage
!TopologicalSortNumber
FlagAssignment
data SemiConfiguredPackage
= SemiConfiguredPackage
SourcePackage -- package info
FlagAssignment -- total flag assignment for the package
[Dependency] -- dependencies we end up with when we apply
-- the flag assignment
instance Package InstalledPackageEx where
packageId (InstalledPackageEx p _ _) = packageId p
instance PackageFixedDeps InstalledPackageEx where
depends (InstalledPackageEx _ _ deps) = deps
instance Package UnconfiguredPackage where
packageId (UnconfiguredPackage p _ _) = packageId p
instance Package SemiConfiguredPackage where
packageId (SemiConfiguredPackage p _ _) = packageId p
instance (Package installed, Package source)
=> Package (InstalledOrSource installed source) where
packageId (InstalledOnly p ) = packageId p
packageId (SourceOnly p ) = packageId p
packageId (InstalledAndSource p _) = packageId p
-- | We can have constraints on selecting just installed or just source
-- packages.
--
-- In particular, installed packages can only depend on other installed
-- packages while packages that are not yet installed but which we plan to
-- install can depend on installed or other not-yet-installed packages.
--
data InstalledConstraint = InstalledConstraint
| SourceConstraint
deriving (Eq, Show)
| IreneKnapp/Faction | faction/Distribution/Client/Dependency/TopDown/Types.hs | bsd-3-clause | 2,978 | 0 | 8 | 583 | 410 | 235 | 175 | 53 | 0 |
import System.Environment (getArgs)
import Data.List.Split (splitOn)
findWine :: String -> String -> Bool
findWine _ "" = True
findWine xs (y:ys) | elem y xs = findWine (l ++ r) ys
| otherwise = False
where l = takeWhile (/= y) xs
r = tail (dropWhile (/= y) xs)
findWines :: [String] -> String -> [String]
findWines [] _ = []
findWines (xs:xss) ys | findWine xs ys = xs : findWines xss ys
| otherwise = findWines xss ys
findAllWines :: [String] -> String
findAllWines [xs, ys] | null w = "False"
| otherwise = unwords w
where w = findWines (words xs) ys
main :: IO ()
main = do
[inpFile] <- getArgs
input <- readFile inpFile
putStr . unlines . map (findAllWines . splitOn " | ") $ lines input
| nikai3d/ce-challenges | easy/chardonnay.hs | bsd-3-clause | 886 | 0 | 12 | 326 | 345 | 172 | 173 | 21 | 1 |
{-# LANGUAGE KindSignatures, Rank2Types, TypeFamilies #-}
module Data.Hot.Base
( HotClass
, Hot
, unfold
, elementAt
, mapAt
) where
import GHC.TypeLits (Nat)
class (Foldable (Hot n)) => HotClass (n :: Nat) where
data Hot n :: * -> *
unfold :: (forall r. c (a -> r) -> c r) -> (forall r. r -> c r) -> c (Hot n a)
elementAt :: Hot n a -> Int -> a
mapAt :: (a -> a) -> Hot n a -> Int -> Hot n a
| tserduke/hot | base/Data/Hot/Base.hs | bsd-3-clause | 425 | 0 | 13 | 119 | 192 | 105 | 87 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
module SDL.Exception
( SDLException(..)
, fromC
, getError
, throwIf
, throwIf_
, throwIf0
, throwIfNeg
, throwIfNeg_
, throwIfNot0
, throwIfNot0_
, throwIfNull
) where
import Control.Applicative
import Control.Exception
import Control.Monad
import Control.Monad.IO.Class (MonadIO, liftIO)
import Data.Data (Data)
import Data.Maybe (fromMaybe)
import Data.Text (Text)
import Data.Typeable (Typeable)
import Foreign (Ptr, nullPtr)
import GHC.Generics (Generic)
import qualified SDL.Raw as Raw
import qualified Data.ByteString as BS
import qualified Data.Text.Encoding as Text
data SDLException
= SDLCallFailed {sdlExceptionCaller :: !Text
,sdlFunction :: !Text
,sdlExceptionError :: !Text}
| SDLUnexpectedArgument {sdlExceptionCaller :: !Text
,sdlFunction :: !Text
,sdlUnknownValue :: !String}
| SDLUnknownHintValue {sdlHint :: !String
,sdlUnknownValue :: !String}
deriving (Data, Eq, Generic, Ord, Read, Show, Typeable)
instance Exception SDLException
getError :: MonadIO m => m Text
getError = liftIO $ do
cstr <- Raw.getError
Text.decodeUtf8 <$> BS.packCString cstr
throwIf :: MonadIO m => (a -> Bool) -> Text -> Text -> m a -> m a
throwIf f caller funName m = do
a <- m
liftIO $ when (f a) $
(SDLCallFailed caller funName <$> getError) >>= throwIO
return a
throwIf_ :: MonadIO m => (a -> Bool) -> Text -> Text -> m a -> m ()
throwIf_ f caller funName m = throwIf f caller funName m >> return ()
throwIfNeg :: (MonadIO m, Num a, Ord a) => Text -> Text -> m a -> m a
throwIfNeg = throwIf (< 0)
throwIfNeg_ :: (MonadIO m, Num a, Ord a) => Text -> Text -> m a -> m ()
throwIfNeg_ = throwIf_ (< 0)
throwIfNull :: (MonadIO m) => Text -> Text -> m (Ptr a) -> m (Ptr a)
throwIfNull = throwIf (== nullPtr)
throwIf0 :: (Eq a, MonadIO m, Num a) => Text -> Text -> m a -> m a
throwIf0 = throwIf (== 0)
throwIfNot0 :: (Eq a, MonadIO m, Num a) => Text -> Text -> m a -> m a
throwIfNot0 = throwIf (/= 0)
throwIfNot0_ :: (Eq a, MonadIO m, Num a) => Text -> Text -> m a -> m ()
throwIfNot0_ = throwIf_ (/= 0)
fromC :: Show a => Text -> Text -> (a -> Maybe b) -> a -> b
fromC caller funName f x =
fromMaybe (throw (SDLUnexpectedArgument caller
funName
(show x)))
(f x)
| svenkeidel/sdl2 | src/SDL/Exception.hs | bsd-3-clause | 2,491 | 0 | 12 | 653 | 927 | 494 | 433 | 84 | 1 |
{-# LANGUAGE BangPatterns, TypeSynonymInstances, CPP #-}
module Data.TrieMap.OrdMap.Splittable () where
import Data.TrieMap.OrdMap.Base
import Data.TrieMap.OrdMap.Zippable ()
instance Splittable (SNode k) where
before (Empty _ path) = beforePath tip path
before (Full _ path l _) = beforePath l path
beforeWith a (Empty k path) = beforePath (single k a) path
beforeWith a (Full k path l _) = beforePath (insertMax k a l) path
after (Empty _ path) = afterPath tip path
after (Full _ path _ r) = afterPath r path
afterWith a (Empty k path) = afterPath (single k a) path
afterWith a (Full k path _ r) = afterPath (insertMin k a r) path
#define SPLIT(op) op (Hole hole) = OrdMap (op hole)
instance Splittable (OrdMap k) where
SPLIT(before)
SPLIT(beforeWith a)
SPLIT(after)
SPLIT(afterWith a)
beforePath :: Sized a => SNode k a -> Path k a -> SNode k a
beforePath !t (LeftBin _ _ path _) = beforePath t path
beforePath !t (RightBin k a l path) = beforePath (join k a l t) path
beforePath !t _ = t
afterPath :: Sized a => SNode k a -> Path k a -> SNode k a
afterPath !t (LeftBin k a path r) = afterPath (join k a t r) path
afterPath !t (RightBin _ _ _ path) = afterPath t path
afterPath !t _ = t
| lowasser/TrieMap | Data/TrieMap/OrdMap/Splittable.hs | bsd-3-clause | 1,220 | 9 | 8 | 250 | 379 | 214 | 165 | -1 | -1 |
module Game.Dink.Files.SaveDat where
import Control.Monad
import qualified Data.ByteString.Lazy as BS
import Data.Binary
import Data.Binary.Get
import Data.Binary.Put
import Data.Char
import qualified Data.Array.IArray as A
import qualified Data.Map as M
import CStructUtil
data DinkSave = Save { save_version :: Int
, save_info :: String
, save_minutes :: Int
, save_x :: Int
, save_y :: Int
, save_die :: Int
, save_size :: Int
, save_defense :: Int
, save_dir :: Int
, save_pframe :: Int
, save_pseq :: Int
, save_seq :: Int
, save_frame :: Int
, save_strength :: Int
, save_base_walk :: Int
, save_base_idle :: Int
, save_base_hit :: Int
, save_que :: Int
, save_magic :: A.Array Int Magic
, save_items :: A.Array Int Item
, save_spmap :: A.Array Int (A.Array Int EditorInfo, Int) -- [769], [100]
, save_buttons :: A.Array Int Int
, save_varman :: M.Map String VarInfo
, save_last_talk :: Int
, save_mouse :: Int
, save_last_map :: Int
, save_mapdat :: String
, save_dinkdat :: String
, save_palette :: String
, save_tiles :: A.Array Int String
, save_funcs :: M.Map String Func
}
deriving (Show)
instance Binary DinkSave where
get = do
version <- getInt
info <- getString 196
[min,x,y,die,size,def,dir,pfr,pseq,seq,fr,str,base_walk,base_idle,base_hit,que] <- replicateM 16 getInt
(_:magic') <- replicateM 9 get
(_:items') <- replicateM 17 get
let magic = A.array (1,8) $ zip [1..] magic'
items = A.array (1,16) $ zip [1..] items'
skip (3*4)
spMap <- getSpMap
button' <- replicateM 10 getInt
let buttons = A.array (1,10) $ zip [1..] button'
varman <- getVarMan
skip (3*4)
[last_talk,mouse] <- replicateM 2 getInt
skip 4
last_map <- getInt
mapdat <- getString 50
dinkdat <- getString 50
palette <- getString 50
tiles' <- replicateM 42 $ getString 50
let tiles = A.array (1,42) $ zip [1..] tiles'
funcs <- getFuncs
skip 750
return $ Save version info min x y die size def dir pfr pseq seq fr str base_walk base_idle base_hit que magic items spMap buttons varman last_talk mouse last_map mapdat dinkdat palette tiles funcs
data EditorInfo = EditorInfo { editor_type :: Int -- in-file as char
, editor_seq :: Int -- in-file as short
, editor_frame :: Int -- in-file as char
}
deriving (Show)
getSpMap :: Get (A.Array Int (A.Array Int EditorInfo, Int))
getSpMap = fmap (A.array (1,768) . zip [1..] . tail) $ replicateM 769 getEditorInfo
getEditorInfo :: Get (A.Array Int EditorInfo, Int)
getEditorInfo = do
(_:types) <- fmap (map fromIntegral) $ replicateM 100 getWord8
(_:seqs) <- fmap (map fromIntegral) $ replicateM 100 getWord16le
(_:frames) <- fmap (map fromIntegral) $ replicateM 100 getWord8
last <- getInt
let arr = A.array (1,99) $ zip [1..] $ zipWith3 EditorInfo types seqs frames
return (arr, last)
data VarInfo = VarInfo { var_val :: Int
, var_scope :: Int
, var_active :: Bool
}
deriving (Show)
getVarMan :: Get (M.Map String VarInfo)
getVarMan = do
vars <- replicateM 100 $ do
val <- getInt
name <- getString 20
scope <- getInt
active <- getInt
return $ (name,VarInfo val scope (toBool active))
return $ M.fromList vars
data Func = Func { func_file :: String
, func_func :: String
}
deriving (Show)
getFuncs :: Get (M.Map String Func)
getFuncs = do
funcs <- replicateM 100 $ do
file <- getString 10
func <- getString 20
return (func,Func file func)
return $ M.fromList funcs
type Magic = Item
data Item = Item { item_active :: Bool
, item_name :: String
, item_seq :: Int
, item_frame :: Int
}
deriving (Show)
instance Binary Item where
get = do
act <- getInt
name <- getString 10
seq <- getInt
fr <- getInt
return $ Item (toBool act) name seq fr
toBool 0 = False
toBool _ = True
| tchakkazulu/dinkfiles | Game/Dink/Files/SaveDat.hs | bsd-3-clause | 4,718 | 0 | 15 | 1,731 | 1,473 | 779 | 694 | 120 | 1 |
--------------------------------------------------------------------------------
-- $Id: URITest.hs,v 1.8 2005/07/19 22:01:27 gklyne Exp $
--
-- Copyright (c) 2004, G. KLYNE. All rights reserved.
-- See end of this file for licence information.
--------------------------------------------------------------------------------
-- |
-- Module : URITest
-- Copyright : (c) 2004, Graham Klyne
-- License : BSD-style (see end of this file)
--
-- Maintainer : Graham Klyne
-- Stability : provisional
-- Portability : H98
--
-- This Module contains test cases for module URI.
--
-- Using GHC, I compile with this command line:
-- ghc --make -fglasgow-exts
-- -i..\;C:\Dev\Haskell\Lib\HUnit;C:\Dev\Haskell\Lib\Parsec
-- -o URITest.exe URITest -main-is URITest.main
-- The -i line may need changing for alternative installations.
--
--------------------------------------------------------------------------------
module URITest where
import Network.URI
( URI(..), URIAuth(..)
, nullURI
, parseURI, parseURIReference, parseRelativeReference, parseAbsoluteURI
, parseabsoluteURI
, isURI, isURIReference, isRelativeReference, isAbsoluteURI
, isIPv6address, isIPv4address
, relativeTo, nonStrictRelativeTo
, relativeFrom
, uriToString
, isUnescapedInURI, escapeURIString, unEscapeString
, normalizeCase, normalizeEscape, normalizePathSegments
)
import HUnit
import IO ( Handle, openFile, IOMode(WriteMode), hClose, hPutStr, hPutStrLn )
import Maybe ( fromJust )
-- Test supplied string for valid URI reference syntax
-- isValidURIRef :: String -> Bool
-- Test supplied string for valid absolute URI reference syntax
-- isAbsoluteURIRef :: String -> Bool
-- Test supplied string for valid absolute URI syntax
-- isAbsoluteURI :: String -> Bool
data URIType = AbsId -- URI form (absolute, no fragment)
| AbsRf -- Absolute URI reference
| RelRf -- Relative URI reference
| InvRf -- Invalid URI reference
isValidT :: URIType -> Bool
isValidT InvRf = False
isValidT _ = True
isAbsRfT :: URIType -> Bool
isAbsRfT AbsId = True
isAbsRfT AbsRf = True
isAbsRfT _ = False
isRelRfT :: URIType -> Bool
isRelRfT RelRf = True
isRelRfT _ = False
isAbsIdT :: URIType -> Bool
isAbsIdT AbsId = True
isAbsIdT _ = False
testEq :: (Eq a, Show a) => String -> a -> a -> Test
testEq lab a1 a2 = TestCase ( assertEqual lab a1 a2 )
testURIRef :: URIType -> String -> Test
testURIRef t u = TestList
[ testEq ("test_isURIReference:"++u) (isValidT t) (isURIReference u)
, testEq ("test_isRelativeReference:"++u) (isRelRfT t) (isRelativeReference u)
, testEq ("test_isAbsoluteURI:"++u) (isAbsIdT t) (isAbsoluteURI u)
]
testURIRefComponents :: String -> (Maybe URI) -> String -> Test
testURIRefComponents lab uv us =
testEq ("testURIRefComponents:"++us) uv (parseURIReference us)
testURIRef001 = testURIRef AbsRf "http://example.org/aaa/bbb#ccc"
testURIRef002 = testURIRef AbsId "mailto:local@domain.org"
testURIRef003 = testURIRef AbsRf "mailto:local@domain.org#frag"
testURIRef004 = testURIRef AbsRf "HTTP://EXAMPLE.ORG/AAA/BBB#CCC"
testURIRef005 = testURIRef RelRf "//example.org/aaa/bbb#ccc"
testURIRef006 = testURIRef RelRf "/aaa/bbb#ccc"
testURIRef007 = testURIRef RelRf "bbb#ccc"
testURIRef008 = testURIRef RelRf "#ccc"
testURIRef009 = testURIRef RelRf "#"
testURIRef010 = testURIRef RelRf "/"
-- escapes
testURIRef011 = testURIRef AbsRf "http://example.org/aaa%2fbbb#ccc"
testURIRef012 = testURIRef AbsRf "http://example.org/aaa%2Fbbb#ccc"
testURIRef013 = testURIRef RelRf "%2F"
testURIRef014 = testURIRef RelRf "aaa%2Fbbb"
-- ports
testURIRef015 = testURIRef AbsRf "http://example.org:80/aaa/bbb#ccc"
testURIRef016 = testURIRef AbsRf "http://example.org:/aaa/bbb#ccc"
testURIRef017 = testURIRef AbsRf "http://example.org./aaa/bbb#ccc"
testURIRef018 = testURIRef AbsRf "http://example.123./aaa/bbb#ccc"
-- bare authority
testURIRef019 = testURIRef AbsId "http://example.org"
-- IPv6 literals (from RFC2732):
testURIRef021 = testURIRef AbsId "http://[FEDC:BA98:7654:3210:FEDC:BA98:7654:3210]:80/index.html"
testURIRef022 = testURIRef AbsId "http://[1080:0:0:0:8:800:200C:417A]/index.html"
testURIRef023 = testURIRef AbsId "http://[3ffe:2a00:100:7031::1]"
testURIRef024 = testURIRef AbsId "http://[1080::8:800:200C:417A]/foo"
testURIRef025 = testURIRef AbsId "http://[::192.9.5.5]/ipng"
testURIRef026 = testURIRef AbsId "http://[::FFFF:129.144.52.38]:80/index.html"
testURIRef027 = testURIRef AbsId "http://[2010:836B:4179::836B:4179]"
testURIRef028 = testURIRef RelRf "//[2010:836B:4179::836B:4179]"
testURIRef029 = testURIRef InvRf "[2010:836B:4179::836B:4179]"
-- RFC2396 test cases
testURIRef031 = testURIRef RelRf "./aaa"
testURIRef032 = testURIRef RelRf "../aaa"
testURIRef033 = testURIRef AbsId "g:h"
testURIRef034 = testURIRef RelRf "g"
testURIRef035 = testURIRef RelRf "./g"
testURIRef036 = testURIRef RelRf "g/"
testURIRef037 = testURIRef RelRf "/g"
testURIRef038 = testURIRef RelRf "//g"
testURIRef039 = testURIRef RelRf "?y"
testURIRef040 = testURIRef RelRf "g?y"
testURIRef041 = testURIRef RelRf "#s"
testURIRef042 = testURIRef RelRf "g#s"
testURIRef043 = testURIRef RelRf "g?y#s"
testURIRef044 = testURIRef RelRf ";x"
testURIRef045 = testURIRef RelRf "g;x"
testURIRef046 = testURIRef RelRf "g;x?y#s"
testURIRef047 = testURIRef RelRf "."
testURIRef048 = testURIRef RelRf "./"
testURIRef049 = testURIRef RelRf ".."
testURIRef050 = testURIRef RelRf "../"
testURIRef051 = testURIRef RelRf "../g"
testURIRef052 = testURIRef RelRf "../.."
testURIRef053 = testURIRef RelRf "../../"
testURIRef054 = testURIRef RelRf "../../g"
testURIRef055 = testURIRef RelRf "../../../g"
testURIRef056 = testURIRef RelRf "../../../../g"
testURIRef057 = testURIRef RelRf "/./g"
testURIRef058 = testURIRef RelRf "/../g"
testURIRef059 = testURIRef RelRf "g."
testURIRef060 = testURIRef RelRf ".g"
testURIRef061 = testURIRef RelRf "g.."
testURIRef062 = testURIRef RelRf "..g"
testURIRef063 = testURIRef RelRf "./../g"
testURIRef064 = testURIRef RelRf "./g/."
testURIRef065 = testURIRef RelRf "g/./h"
testURIRef066 = testURIRef RelRf "g/../h"
testURIRef067 = testURIRef RelRf "g;x=1/./y"
testURIRef068 = testURIRef RelRf "g;x=1/../y"
testURIRef069 = testURIRef RelRf "g?y/./x"
testURIRef070 = testURIRef RelRf "g?y/../x"
testURIRef071 = testURIRef RelRf "g#s/./x"
testURIRef072 = testURIRef RelRf "g#s/../x"
testURIRef073 = testURIRef RelRf ""
testURIRef074 = testURIRef RelRf "A'C"
testURIRef075 = testURIRef RelRf "A$C"
testURIRef076 = testURIRef RelRf "A@C"
testURIRef077 = testURIRef RelRf "A,C"
-- Invalid
testURIRef080 = testURIRef InvRf "http://foo.org:80Path/More"
testURIRef081 = testURIRef InvRf "::"
testURIRef082 = testURIRef InvRf " "
testURIRef083 = testURIRef InvRf "%"
testURIRef084 = testURIRef InvRf "A%Z"
testURIRef085 = testURIRef InvRf "%ZZ"
testURIRef086 = testURIRef InvRf "%AZ"
testURIRef087 = testURIRef InvRf "A C"
-- testURIRef088 = -- (case removed)
-- testURIRef089 = -- (case removed)
testURIRef090 = testURIRef InvRf "A\"C"
testURIRef091 = testURIRef InvRf "A`C"
testURIRef092 = testURIRef InvRf "A<C"
testURIRef093 = testURIRef InvRf "A>C"
testURIRef094 = testURIRef InvRf "A^C"
testURIRef095 = testURIRef InvRf "A\\C"
testURIRef096 = testURIRef InvRf "A{C"
testURIRef097 = testURIRef InvRf "A|C"
testURIRef098 = testURIRef InvRf "A}C"
-- From RFC2396:
-- rel_segment = 1*( unreserved | escaped |
-- ";" | "@" | "&" | "=" | "+" | "$" | "," )
-- unreserved = alphanum | mark
-- mark = "-" | "_" | "." | "!" | "~" | "*" | "'" |
-- "(" | ")"
-- Note RFC 2732 allows '[', ']' ONLY for reserved purpose of IPv6 literals,
-- or does it?
testURIRef101 = testURIRef InvRf "A[C"
testURIRef102 = testURIRef InvRf "A]C"
testURIRef103 = testURIRef InvRf "A[**]C"
testURIRef104 = testURIRef InvRf "http://[xyz]/"
testURIRef105 = testURIRef InvRf "http://]/"
testURIRef106 = testURIRef InvRf "http://example.org/[2010:836B:4179::836B:4179]"
testURIRef107 = testURIRef InvRf "http://example.org/abc#[2010:836B:4179::836B:4179]"
testURIRef108 = testURIRef InvRf "http://example.org/xxx/[qwerty]#a[b]"
-- Random other things that crop up
testURIRef111 = testURIRef AbsRf "http://example/Andrȷ"
testURIRef112 = testURIRef AbsId "file:///C:/DEV/Haskell/lib/HXmlToolbox-3.01/examples/"
testURIRef113 = testURIRef AbsId "http://46229EFFE16A9BD60B9F1BE88B2DB047ADDED785/demo.mp3"
testURIRef114 = testURIRef InvRf "http://example.org/xxx/qwerty#a#b"
testURIRef115 = testURIRef InvRf "dcp.tcp.pft://192.168.0.1:1002:3002?fec=1&crc=0"
testURIRef116 = testURIRef AbsId "dcp.tcp.pft://192.168.0.1:1002?fec=1&crc=0"
testURIRef117 = testURIRef AbsId "foo://"
testURIRefSuite = TestLabel "Test URIrefs" testURIRefList
testURIRefList = TestList
[
testURIRef001, testURIRef002, testURIRef003, testURIRef004,
testURIRef005, testURIRef006, testURIRef007, testURIRef008,
testURIRef009, testURIRef010,
--
testURIRef011, testURIRef012, testURIRef013, testURIRef014,
testURIRef015, testURIRef016, testURIRef017, testURIRef018,
--
testURIRef019,
--
testURIRef021, testURIRef022, testURIRef023, testURIRef024,
testURIRef025, testURIRef026, testURIRef027, testURIRef028,
testURIRef029,
--
testURIRef031, testURIRef032, testURIRef033, testURIRef034,
testURIRef035, testURIRef036, testURIRef037, testURIRef038,
testURIRef039,
testURIRef040, testURIRef041, testURIRef042, testURIRef043,
testURIRef044, testURIRef045, testURIRef046, testURIRef047,
testURIRef048, testURIRef049,
testURIRef050, testURIRef051, testURIRef052, testURIRef053,
testURIRef054, testURIRef055, testURIRef056, testURIRef057,
testURIRef058, testURIRef059,
testURIRef060, testURIRef061, testURIRef062, testURIRef063,
testURIRef064, testURIRef065, testURIRef066, testURIRef067,
testURIRef068, testURIRef069,
testURIRef070, testURIRef071, testURIRef072, testURIRef073,
testURIRef074, testURIRef075, testURIRef076, testURIRef077,
--
testURIRef080,
testURIRef081, testURIRef082, testURIRef083, testURIRef084,
testURIRef085, testURIRef086, testURIRef087, -- testURIRef088,
-- testURIRef089,
testURIRef090, testURIRef091, testURIRef092, testURIRef093,
testURIRef094, testURIRef095, testURIRef096, testURIRef097,
testURIRef098, -- testURIRef099,
--
testURIRef101, testURIRef102, testURIRef103, testURIRef104,
testURIRef105, testURIRef106, testURIRef107, testURIRef108,
--
testURIRef111, testURIRef112, testURIRef113, testURIRef114,
testURIRef115, testURIRef116, testURIRef117
]
-- test decomposition of URI into components
testComponent01 = testURIRefComponents "testComponent01"
( Just $ URI
{ uriScheme = "http:"
, uriAuthority = Just (URIAuth "user:pass@" "example.org" ":99")
, uriPath = "/aaa/bbb"
, uriQuery = "?qqq"
, uriFragment = "#fff"
} )
"http://user:pass@example.org:99/aaa/bbb?qqq#fff"
testComponent02 = testURIRefComponents "testComponent02"
( const Nothing
( Just $ URI
{ uriScheme = "http:"
, uriAuthority = Just (URIAuth "user:pass@" "example.org" ":99")
, uriPath = "aaa/bbb"
, uriQuery = ""
, uriFragment = ""
} )
)
"http://user:pass@example.org:99aaa/bbb"
testComponent03 = testURIRefComponents "testComponent03"
( Just $ URI
{ uriScheme = "http:"
, uriAuthority = Just (URIAuth "user:pass@" "example.org" ":99")
, uriPath = ""
, uriQuery = "?aaa/bbb"
, uriFragment = ""
} )
"http://user:pass@example.org:99?aaa/bbb"
testComponent04 = testURIRefComponents "testComponent03"
( Just $ URI
{ uriScheme = "http:"
, uriAuthority = Just (URIAuth "user:pass@" "example.org" ":99")
, uriPath = ""
, uriQuery = ""
, uriFragment = "#aaa/bbb"
} )
"http://user:pass@example.org:99#aaa/bbb"
-- These test cases contributed by Robert Buck (mathworks.com)
testComponent11 = testURIRefComponents "testComponent03"
( Just $ URI
{ uriScheme = "about:"
, uriAuthority = Nothing
, uriPath = ""
, uriQuery = ""
, uriFragment = ""
} )
"about:"
testComponent12 = testURIRefComponents "testComponent03"
( Just $ URI
{ uriScheme = "file:"
, uriAuthority = Just (URIAuth "" "windowsauth" "")
, uriPath = "/d$"
, uriQuery = ""
, uriFragment = ""
} )
"file://windowsauth/d$"
testComponentSuite = TestLabel "Test URIrefs" $ TestList
[ testComponent01
, testComponent02
, testComponent03
, testComponent04
, testComponent11
, testComponent12
]
-- Get reference relative to given base
-- relativeRef :: String -> String -> String
--
-- Get absolute URI given base and relative reference
-- absoluteURI :: String -> String -> String
--
-- Test cases taken from: http://www.w3.org/2000/10/swap/uripath.py
-- (Thanks, Dan Connolly)
--
-- NOTE: absoluteURI base (relativeRef base u) is always equivalent to u.
-- cf. http://lists.w3.org/Archives/Public/uri/2003Jan/0008.html
testRelSplit :: String -> String -> String -> String -> Test
testRelSplit label base uabs urel =
testEq label urel (mkrel puabs pubas)
where
mkrel (Just u1) (Just u2) = show (u1 `relativeFrom` u2)
mkrel Nothing _ = "Invalid URI: "++urel
mkrel _ Nothing = "Invalid URI: "++uabs
puabs = parseURIReference uabs
pubas = parseURIReference base
testRelJoin :: String -> String -> String -> String -> Test
testRelJoin label base urel uabs =
testEq label uabs (mkabs purel pubas)
where
mkabs (Just u1) (Just u2) = shabs (u1 `relativeTo` u2)
mkabs Nothing _ = "Invalid URI: "++urel
mkabs _ Nothing = "Invalid URI: "++uabs
shabs (Just u) = show u
shabs Nothing = "No result"
purel = parseURIReference urel
pubas = parseURIReference base
testRelative :: String -> String -> String -> String -> Test
testRelative label base uabs urel = TestList
[
(testRelSplit (label++"(rel)") base uabs urel),
(testRelJoin (label++"(abs)") base urel uabs)
]
testRelative01 = testRelative "testRelative01"
"foo:xyz" "bar:abc" "bar:abc"
testRelative02 = testRelative "testRelative02"
"http://example/x/y/z" "http://example/x/abc" "../abc"
testRelative03 = testRelative "testRelative03"
"http://example2/x/y/z" "http://example/x/abc" "//example/x/abc"
-- "http://example2/x/y/z" "http://example/x/abc" "http://example/x/abc"
testRelative04 = testRelative "testRelative04"
"http://ex/x/y/z" "http://ex/x/r" "../r"
testRelative05 = testRelative "testRelative05"
"http://ex/x/y/z" "http://ex/r" "/r"
-- "http://ex/x/y/z" "http://ex/r" "../../r"
testRelative06 = testRelative "testRelative06"
"http://ex/x/y/z" "http://ex/x/y/q/r" "q/r"
testRelative07 = testRelative "testRelative07"
"http://ex/x/y" "http://ex/x/q/r#s" "q/r#s"
testRelative08 = testRelative "testRelative08"
"http://ex/x/y" "http://ex/x/q/r#s/t" "q/r#s/t"
testRelative09 = testRelative "testRelative09"
"http://ex/x/y" "ftp://ex/x/q/r" "ftp://ex/x/q/r"
testRelative10 = testRelative "testRelative10"
-- "http://ex/x/y" "http://ex/x/y" "y"
"http://ex/x/y" "http://ex/x/y" ""
testRelative11 = testRelative "testRelative11"
-- "http://ex/x/y/" "http://ex/x/y/" "./"
"http://ex/x/y/" "http://ex/x/y/" ""
testRelative12 = testRelative "testRelative12"
-- "http://ex/x/y/pdq" "http://ex/x/y/pdq" "pdq"
"http://ex/x/y/pdq" "http://ex/x/y/pdq" ""
testRelative13 = testRelative "testRelative13"
"http://ex/x/y/" "http://ex/x/y/z/" "z/"
testRelative14 = testRelative "testRelative14"
-- "file:/swap/test/animal.rdf" "file:/swap/test/animal.rdf#Animal" "animal.rdf#Animal"
"file:/swap/test/animal.rdf" "file:/swap/test/animal.rdf#Animal" "#Animal"
testRelative15 = testRelative "testRelative15"
"file:/e/x/y/z" "file:/e/x/abc" "../abc"
testRelative16 = testRelative "testRelative16"
"file:/example2/x/y/z" "file:/example/x/abc" "/example/x/abc"
testRelative17 = testRelative "testRelative17"
"file:/ex/x/y/z" "file:/ex/x/r" "../r"
testRelative18 = testRelative "testRelative18"
"file:/ex/x/y/z" "file:/r" "/r"
testRelative19 = testRelative "testRelative19"
"file:/ex/x/y" "file:/ex/x/q/r" "q/r"
testRelative20 = testRelative "testRelative20"
"file:/ex/x/y" "file:/ex/x/q/r#s" "q/r#s"
testRelative21 = testRelative "testRelative21"
"file:/ex/x/y" "file:/ex/x/q/r#" "q/r#"
testRelative22 = testRelative "testRelative22"
"file:/ex/x/y" "file:/ex/x/q/r#s/t" "q/r#s/t"
testRelative23 = testRelative "testRelative23"
"file:/ex/x/y" "ftp://ex/x/q/r" "ftp://ex/x/q/r"
testRelative24 = testRelative "testRelative24"
-- "file:/ex/x/y" "file:/ex/x/y" "y"
"file:/ex/x/y" "file:/ex/x/y" ""
testRelative25 = testRelative "testRelative25"
-- "file:/ex/x/y/" "file:/ex/x/y/" "./"
"file:/ex/x/y/" "file:/ex/x/y/" ""
testRelative26 = testRelative "testRelative26"
-- "file:/ex/x/y/pdq" "file:/ex/x/y/pdq" "pdq"
"file:/ex/x/y/pdq" "file:/ex/x/y/pdq" ""
testRelative27 = testRelative "testRelative27"
"file:/ex/x/y/" "file:/ex/x/y/z/" "z/"
testRelative28 = testRelative "testRelative28"
"file:/devel/WWW/2000/10/swap/test/reluri-1.n3"
"file://meetings.example.com/cal#m1" "//meetings.example.com/cal#m1"
-- "file:/devel/WWW/2000/10/swap/test/reluri-1.n3"
-- "file://meetings.example.com/cal#m1" "file://meetings.example.com/cal#m1"
testRelative29 = testRelative "testRelative29"
"file:/home/connolly/w3ccvs/WWW/2000/10/swap/test/reluri-1.n3"
"file://meetings.example.com/cal#m1" "//meetings.example.com/cal#m1"
-- "file:/home/connolly/w3ccvs/WWW/2000/10/swap/test/reluri-1.n3"
-- "file://meetings.example.com/cal#m1" "file://meetings.example.com/cal#m1"
testRelative30 = testRelative "testRelative30"
"file:/some/dir/foo" "file:/some/dir/#blort" "./#blort"
testRelative31 = testRelative "testRelative31"
"file:/some/dir/foo" "file:/some/dir/#" "./#"
testRelative32 = testRelative "testRelative32"
"http://ex/x/y" "http://ex/x/q:r" "./q:r"
-- see RFC2396bis, section 5 ^^
testRelative33 = testRelative "testRelative33"
"http://ex/x/y" "http://ex/x/p=q:r" "./p=q:r"
-- "http://ex/x/y" "http://ex/x/p=q:r" "p=q:r"
testRelative34 = testRelative "testRelative34"
"http://ex/x/y?pp/qq" "http://ex/x/y?pp/rr" "?pp/rr"
testRelative35 = testRelative "testRelative35"
"http://ex/x/y?pp/qq" "http://ex/x/y/z" "y/z"
testRelative36 = testRelative "testRelative36"
"mailto:local"
"mailto:local/qual@domain.org#frag"
"local/qual@domain.org#frag"
testRelative37 = testRelative "testRelative37"
"mailto:local/qual1@domain1.org"
"mailto:local/more/qual2@domain2.org#frag"
"more/qual2@domain2.org#frag"
testRelative38 = testRelative "testRelative38"
"http://ex/x/z?q" "http://ex/x/y?q" "y?q"
testRelative39 = testRelative "testRelative39"
"http://ex?p" "http://ex/x/y?q" "/x/y?q"
testRelative40 = testRelative "testRelative40"
"foo:a/b" "foo:a/c/d" "c/d"
testRelative41 = testRelative "testRelative41"
"foo:a/b" "foo:/c/d" "/c/d"
testRelative42 = testRelative "testRelative42"
"foo:a/b?c#d" "foo:a/b?c" ""
testRelative43 = testRelative "testRelative42"
"foo:a" "foo:b/c" "b/c"
testRelative44 = testRelative "testRelative44"
"foo:/a/y/z" "foo:/a/b/c" "../b/c"
testRelative45 = testRelJoin "testRelative45"
"foo:a" "./b/c" "foo:b/c"
testRelative46 = testRelJoin "testRelative46"
"foo:a" "/./b/c" "foo:/b/c"
testRelative47 = testRelJoin "testRelative47"
"foo://a//b/c" "../../d" "foo://a/d"
testRelative48 = testRelJoin "testRelative48"
"foo:a" "." "foo:"
testRelative49 = testRelJoin "testRelative49"
"foo:a" ".." "foo:"
-- add escape tests
testRelative50 = testRelative "testRelative50"
"http://example/x/y%2Fz" "http://example/x/abc" "abc"
testRelative51 = testRelative "testRelative51"
"http://example/a/x/y/z" "http://example/a/x%2Fabc" "../../x%2Fabc"
testRelative52 = testRelative "testRelative52"
"http://example/a/x/y%2Fz" "http://example/a/x%2Fabc" "../x%2Fabc"
testRelative53 = testRelative "testRelative53"
"http://example/x%2Fy/z" "http://example/x%2Fy/abc" "abc"
testRelative54 = testRelative "testRelative54"
"http://ex/x/y" "http://ex/x/q%3Ar" "q%3Ar"
testRelative55 = testRelative "testRelative55"
"http://example/x/y%2Fz" "http://example/x%2Fabc" "/x%2Fabc"
-- Apparently, TimBL prefers the following way to 41, 42 above
-- cf. http://lists.w3.org/Archives/Public/uri/2003Feb/0028.html
-- He also notes that there may be different relative fuctions
-- that satisfy the basic equivalence axiom:
-- cf. http://lists.w3.org/Archives/Public/uri/2003Jan/0008.html
testRelative56 = testRelative "testRelative56"
"http://example/x/y/z" "http://example/x%2Fabc" "/x%2Fabc"
testRelative57 = testRelative "testRelative57"
"http://example/x/y%2Fz" "http://example/x%2Fabc" "/x%2Fabc"
-- Other oddball tests
-- Check segment normalization code:
testRelative60 = testRelJoin "testRelative60"
"ftp://example/x/y" "http://example/a/b/../../c" "http://example/c"
testRelative61 = testRelJoin "testRelative61"
"ftp://example/x/y" "http://example/a/b/c/../../" "http://example/a/"
testRelative62 = testRelJoin "testRelative62"
"ftp://example/x/y" "http://example/a/b/c/./" "http://example/a/b/c/"
testRelative63 = testRelJoin "testRelative63"
"ftp://example/x/y" "http://example/a/b/c/.././" "http://example/a/b/"
testRelative64 = testRelJoin "testRelative64"
"ftp://example/x/y" "http://example/a/b/c/d/../../../../e" "http://example/e"
testRelative65 = testRelJoin "testRelative65"
"ftp://example/x/y" "http://example/a/b/c/d/../.././../../e" "http://example/e"
-- Check handling of queries and fragments with non-relative paths
testRelative70 = testRelative "testRelative70"
"mailto:local1@domain1?query1" "mailto:local2@domain2"
"local2@domain2"
testRelative71 = testRelative "testRelative71"
"mailto:local1@domain1" "mailto:local2@domain2?query2"
"local2@domain2?query2"
testRelative72 = testRelative "testRelative72"
"mailto:local1@domain1?query1" "mailto:local2@domain2?query2"
"local2@domain2?query2"
testRelative73 = testRelative "testRelative73"
"mailto:local@domain?query1" "mailto:local@domain?query2"
"?query2"
testRelative74 = testRelative "testRelative74"
"mailto:?query1" "mailto:local@domain?query2"
"local@domain?query2"
testRelative75 = testRelative "testRelative75"
"mailto:local@domain?query1" "mailto:local@domain?query2"
"?query2"
testRelative76 = testRelative "testRelative76"
"foo:bar" "http://example/a/b?c/../d" "http://example/a/b?c/../d"
testRelative77 = testRelative "testRelative77"
"foo:bar" "http://example/a/b#c/../d" "http://example/a/b#c/../d"
{- These (78-81) are some awkward test cases thrown up by a question on the URI list:
http://lists.w3.org/Archives/Public/uri/2005Jul/0013
Mote that RFC 3986 discards path segents after the final '/' only when merging two
paths - otherwise the final segment in the base URI is mnaintained. This leads to
difficulty in constructinmg a reversible relativeTo/relativeFrom pair of functions.
-}
testRelative78 = testRelative "testRelative78"
"http://www.example.com/data/limit/.." "http://www.example.com/data/limit/test.xml"
"test.xml"
testRelative79 = testRelative "testRelative79"
"file:/some/dir/foo" "file:/some/dir/#blort" "./#blort"
testRelative80 = testRelative "testRelative80"
"file:/some/dir/foo" "file:/some/dir/#" "./#"
testRelative81 = testRelative "testRelative81"
"file:/some/dir/.." "file:/some/dir/#blort" "./#blort"
-- testRelative base abs rel
-- testRelSplit base abs rel
-- testRelJoin base rel abs
testRelative91 = testRelSplit "testRelative91"
"http://example.org/base/uri" "http:this"
"this"
testRelative92 = testRelJoin "testRelative92"
"http://example.org/base/uri" "http:this"
"http:this"
testRelative93 = testRelJoin "testRelative93"
"http:base" "http:this"
"http:this"
testRelative94 = testRelJoin "testRelative94"
"f:/a" ".//g"
"f://g"
testRelative95 = testRelJoin "testRelative95"
"f://example.org/base/a" "b/c//d/e"
"f://example.org/base/b/c//d/e"
testRelative96 = testRelJoin "testRelative96"
"mid:m@example.ord/c@example.org" "m2@example.ord/c2@example.org"
"mid:m@example.ord/m2@example.ord/c2@example.org"
testRelative97 = testRelJoin "testRelative97"
"file:///C:/DEV/Haskell/lib/HXmlToolbox-3.01/examples/" "mini1.xml"
"file:///C:/DEV/Haskell/lib/HXmlToolbox-3.01/examples/mini1.xml"
testRelative98 = testRelative "testRelative98"
"foo:a/y/z" "foo:a/b/c" "../b/c"
testRelative99 = testRelJoin "testRelative99"
"f:/a/" "..//g"
"f://g"
testRelativeSuite = TestLabel "Test Relative URIs" testRelativeList
testRelativeList = TestList
[ testRelative01, testRelative02, testRelative03, testRelative04
, testRelative05, testRelative06, testRelative07, testRelative08
, testRelative09
, testRelative10, testRelative11, testRelative12, testRelative13
, testRelative14, testRelative15, testRelative16, testRelative17
, testRelative18, testRelative19
, testRelative20, testRelative21, testRelative22, testRelative23
, testRelative24, testRelative25, testRelative26, testRelative27
, testRelative28, testRelative29
, testRelative30, testRelative31, testRelative32, testRelative33
, testRelative34, testRelative35, testRelative36, testRelative37
, testRelative38, testRelative39
, testRelative40, testRelative41, testRelative42, testRelative43
, testRelative44, testRelative45, testRelative46, testRelative47
, testRelative48, testRelative49
--
, testRelative50, testRelative51, testRelative52, testRelative53
, testRelative54, testRelative55, testRelative56, testRelative57
--
, testRelative60, testRelative61, testRelative62, testRelative63
, testRelative64, testRelative65
--
, testRelative70, testRelative71, testRelative72, testRelative73
, testRelative74, testRelative75, testRelative76, testRelative77
-- Awkward cases:
, testRelative78, testRelative79, testRelative80, testRelative81
--
-- , testRelative90
, testRelative91, testRelative92, testRelative93
, testRelative94, testRelative95, testRelative96
, testRelative97, testRelative98, testRelative99
]
-- RFC2396 relative-to-absolute URI tests
rfcbase = "http://a/b/c/d;p?q"
-- normal cases, RFC2396bis 5.4.1
testRFC01 = testRelJoin "testRFC01" rfcbase "g:h" "g:h"
testRFC02 = testRelJoin "testRFC02" rfcbase "g" "http://a/b/c/g"
testRFC03 = testRelJoin "testRFC03" rfcbase "./g" "http://a/b/c/g"
testRFC04 = testRelJoin "testRFC04" rfcbase "g/" "http://a/b/c/g/"
testRFC05 = testRelJoin "testRFC05" rfcbase "/g" "http://a/g"
testRFC06 = testRelJoin "testRFC06" rfcbase "//g" "http://g"
testRFC07 = testRelJoin "testRFC07" rfcbase "?y" "http://a/b/c/d;p?y"
testRFC08 = testRelJoin "testRFC08" rfcbase "g?y" "http://a/b/c/g?y"
testRFC09 = testRelJoin "testRFC09" rfcbase "?q#s" "http://a/b/c/d;p?q#s"
testRFC23 = testRelJoin "testRFC10" rfcbase "#s" "http://a/b/c/d;p?q#s"
testRFC10 = testRelJoin "testRFC11" rfcbase "g#s" "http://a/b/c/g#s"
testRFC11 = testRelJoin "testRFC12" rfcbase "g?y#s" "http://a/b/c/g?y#s"
testRFC12 = testRelJoin "testRFC13" rfcbase ";x" "http://a/b/c/;x"
testRFC13 = testRelJoin "testRFC14" rfcbase "g;x" "http://a/b/c/g;x"
testRFC14 = testRelJoin "testRFC15" rfcbase "g;x?y#s" "http://a/b/c/g;x?y#s"
testRFC24 = testRelJoin "testRFC16" rfcbase "" "http://a/b/c/d;p?q"
testRFC15 = testRelJoin "testRFC17" rfcbase "." "http://a/b/c/"
testRFC16 = testRelJoin "testRFC18" rfcbase "./" "http://a/b/c/"
testRFC17 = testRelJoin "testRFC19" rfcbase ".." "http://a/b/"
testRFC18 = testRelJoin "testRFC20" rfcbase "../" "http://a/b/"
testRFC19 = testRelJoin "testRFC21" rfcbase "../g" "http://a/b/g"
testRFC20 = testRelJoin "testRFC22" rfcbase "../.." "http://a/"
testRFC21 = testRelJoin "testRFC23" rfcbase "../../" "http://a/"
testRFC22 = testRelJoin "testRFC24" rfcbase "../../g" "http://a/g"
-- abnormal cases, RFC2396bis 5.4.2
testRFC31 = testRelJoin "testRFC31" rfcbase "?q" rfcbase
testRFC32 = testRelJoin "testRFC32" rfcbase "../../../g" "http://a/g"
testRFC33 = testRelJoin "testRFC33" rfcbase "../../../../g" "http://a/g"
testRFC34 = testRelJoin "testRFC34" rfcbase "/./g" "http://a/g"
testRFC35 = testRelJoin "testRFC35" rfcbase "/../g" "http://a/g"
testRFC36 = testRelJoin "testRFC36" rfcbase "g." "http://a/b/c/g."
testRFC37 = testRelJoin "testRFC37" rfcbase ".g" "http://a/b/c/.g"
testRFC38 = testRelJoin "testRFC38" rfcbase "g.." "http://a/b/c/g.."
testRFC39 = testRelJoin "testRFC39" rfcbase "..g" "http://a/b/c/..g"
testRFC40 = testRelJoin "testRFC40" rfcbase "./../g" "http://a/b/g"
testRFC41 = testRelJoin "testRFC41" rfcbase "./g/." "http://a/b/c/g/"
testRFC42 = testRelJoin "testRFC42" rfcbase "g/./h" "http://a/b/c/g/h"
testRFC43 = testRelJoin "testRFC43" rfcbase "g/../h" "http://a/b/c/h"
testRFC44 = testRelJoin "testRFC44" rfcbase "g;x=1/./y" "http://a/b/c/g;x=1/y"
testRFC45 = testRelJoin "testRFC45" rfcbase "g;x=1/../y" "http://a/b/c/y"
testRFC46 = testRelJoin "testRFC46" rfcbase "g?y/./x" "http://a/b/c/g?y/./x"
testRFC47 = testRelJoin "testRFC47" rfcbase "g?y/../x" "http://a/b/c/g?y/../x"
testRFC48 = testRelJoin "testRFC48" rfcbase "g#s/./x" "http://a/b/c/g#s/./x"
testRFC49 = testRelJoin "testRFC49" rfcbase "g#s/../x" "http://a/b/c/g#s/../x"
testRFC50 = testRelJoin "testRFC50" rfcbase "http:x" "http:x"
-- Null path tests
-- See RFC2396bis, section 5.2,
-- "If the base URI's path component is the empty string, then a single
-- slash character is copied to the buffer"
testRFC60 = testRelative "testRFC60" "http://ex" "http://ex/x/y?q" "/x/y?q"
testRFC61 = testRelJoin "testRFC61" "http://ex" "x/y?q" "http://ex/x/y?q"
testRFC62 = testRelative "testRFC62" "http://ex?p" "http://ex/x/y?q" "/x/y?q"
testRFC63 = testRelJoin "testRFC63" "http://ex?p" "x/y?q" "http://ex/x/y?q"
testRFC64 = testRelative "testRFC64" "http://ex#f" "http://ex/x/y?q" "/x/y?q"
testRFC65 = testRelJoin "testRFC65" "http://ex#f" "x/y?q" "http://ex/x/y?q"
testRFC66 = testRelative "testRFC66" "http://ex?p" "http://ex/x/y#g" "/x/y#g"
testRFC67 = testRelJoin "testRFC67" "http://ex?p" "x/y#g" "http://ex/x/y#g"
testRFC68 = testRelative "testRFC68" "http://ex" "http://ex/" "/"
testRFC69 = testRelJoin "testRFC69" "http://ex" "./" "http://ex/"
testRFC70 = testRelative "testRFC70" "http://ex" "http://ex/a/b" "/a/b"
testRFC71 = testRelative "testRFC71" "http://ex/a/b" "http://ex" "./"
testRFC2396Suite = TestLabel "Test RFC2396 examples" testRFC2396List
testRFC2396List = TestList
[
testRFC01, testRFC02, testRFC03, testRFC04,
testRFC05, testRFC06, testRFC07, testRFC08,
testRFC09,
testRFC10, testRFC11, testRFC12, testRFC13,
testRFC14, testRFC15, testRFC16, testRFC17,
testRFC18, testRFC19,
testRFC20, testRFC21, testRFC22, testRFC23,
testRFC24,
-- testRFC30,
testRFC31, testRFC32, testRFC33,
testRFC34, testRFC35, testRFC36, testRFC37,
testRFC38, testRFC39,
testRFC40, testRFC41, testRFC42, testRFC43,
testRFC44, testRFC45, testRFC46, testRFC47,
testRFC48, testRFC49,
testRFC50,
--
testRFC60, testRFC61, testRFC62, testRFC63,
testRFC64, testRFC65, testRFC66, testRFC67,
testRFC68, testRFC69,
testRFC70
]
-- And some other oddballs:
mailbase = "mailto:local/option@domain.org?notaquery#frag"
testMail01 = testRelJoin "testMail01"
mailbase "more@domain"
"mailto:local/more@domain"
testMail02 = testRelJoin "testMail02"
mailbase "#newfrag"
"mailto:local/option@domain.org?notaquery#newfrag"
testMail03 = testRelJoin "testMail03"
mailbase "l1/q1@domain"
"mailto:local/l1/q1@domain"
testMail11 = testRelJoin "testMail11"
"mailto:local1@domain1?query1" "mailto:local2@domain2"
"mailto:local2@domain2"
testMail12 = testRelJoin "testMail12"
"mailto:local1@domain1" "mailto:local2@domain2?query2"
"mailto:local2@domain2?query2"
testMail13 = testRelJoin "testMail13"
"mailto:local1@domain1?query1" "mailto:local2@domain2?query2"
"mailto:local2@domain2?query2"
testMail14 = testRelJoin "testMail14"
"mailto:local@domain?query1" "mailto:local@domain?query2"
"mailto:local@domain?query2"
testMail15 = testRelJoin "testMail15"
"mailto:?query1" "mailto:local@domain?query2"
"mailto:local@domain?query2"
testMail16 = testRelJoin "testMail16"
"mailto:local@domain?query1" "?query2"
"mailto:local@domain?query2"
testInfo17 = testRelJoin "testInfo17"
"info:name/1234/../567" "name/9876/../543"
"info:name/name/543"
testInfo18 = testRelJoin "testInfo18"
"info:/name/1234/../567" "name/9876/../543"
"info:/name/name/543"
testOddballSuite = TestLabel "Test oddball examples" testOddballList
testOddballList = TestList
[ testMail01, testMail02, testMail03
, testMail11, testMail12, testMail13, testMail14, testMail15, testMail16
, testInfo17
]
-- Normalization tests
-- Case normalization; cf. RFC2396bis section 6.2.2.1
-- NOTE: authority case normalization is not performed
testNormalize01 = testEq "testNormalize01"
"http://EXAMPLE.com/Root/%2A?%2B#%2C"
(normalizeCase "HTTP://EXAMPLE.com/Root/%2a?%2b#%2c")
-- Encoding normalization; cf. RFC2396bis section 6.2.2.2
testNormalize11 = testEq "testNormalize11"
"HTTP://EXAMPLE.com/Root/~Me/"
(normalizeEscape "HTTP://EXAMPLE.com/Root/%7eMe/")
testNormalize12 = testEq "testNormalize12"
"foo:%40AZ%5b%60az%7b%2f09%3a-._~"
(normalizeEscape "foo:%40%41%5a%5b%60%61%7a%7b%2f%30%39%3a%2d%2e%5f%7e")
testNormalize13 = testEq "testNormalize13"
"foo:%3a%2f%3f%23%5b%5d%40"
(normalizeEscape "foo:%3a%2f%3f%23%5b%5d%40")
-- Path segment normalization; cf. RFC2396bis section 6.2.2.4
testNormalize21 = testEq "testNormalize21"
"http://example/c"
(normalizePathSegments "http://example/a/b/../../c")
testNormalize22 = testEq "testNormalize22"
"http://example/a/"
(normalizePathSegments "http://example/a/b/c/../../")
testNormalize23 = testEq "testNormalize23"
"http://example/a/b/c/"
(normalizePathSegments "http://example/a/b/c/./")
testNormalize24 = testEq "testNormalize24"
"http://example/a/b/"
(normalizePathSegments "http://example/a/b/c/.././")
testNormalize25 = testEq "testNormalize25"
"http://example/e"
(normalizePathSegments "http://example/a/b/c/d/../../../../e")
testNormalize26 = testEq "testNormalize26"
"http://example/e"
(normalizePathSegments "http://example/a/b/c/d/../.././../../e")
testNormalize27 = testEq "testNormalize27"
"http://example/e"
(normalizePathSegments "http://example/a/b/../.././../../e")
testNormalize28 = testEq "testNormalize28"
"foo:e"
(normalizePathSegments "foo:a/b/../.././../../e")
testNormalizeSuite = TestList
[ testNormalize01
, testNormalize11
, testNormalize12
, testNormalize13
, testNormalize21, testNormalize22, testNormalize23, testNormalize24
, testNormalize25, testNormalize26, testNormalize27, testNormalize28
]
-- URI formatting (show) tests
ts02URI = URI { uriScheme = "http:"
, uriAuthority = Just (URIAuth "user:pass@" "example.org" ":99")
, uriPath = "/aaa/bbb"
, uriQuery = "?ccc"
, uriFragment = "#ddd/eee"
}
ts04URI = URI { uriScheme = "http:"
, uriAuthority = Just (URIAuth "user:anonymous@" "example.org" ":99")
, uriPath = "/aaa/bbb"
, uriQuery = "?ccc"
, uriFragment = "#ddd/eee"
}
ts02str = "http://user:...@example.org:99/aaa/bbb?ccc#ddd/eee"
ts03str = "http://user:pass@example.org:99/aaa/bbb?ccc#ddd/eee"
ts04str = "http://user:...@example.org:99/aaa/bbb?ccc#ddd/eee"
testShowURI01 = testEq "testShowURI01" "" (show nullURI)
testShowURI02 = testEq "testShowURI02" ts02str (show ts02URI)
testShowURI03 = testEq "testShowURI03" ts03str ((uriToString id ts02URI) "")
testShowURI04 = testEq "testShowURI04" ts04str (show ts04URI)
testShowURI = TestList
[ testShowURI01
, testShowURI02
, testShowURI03
, testShowURI04
]
-- URI escaping tests
te01str = "http://example.org/az/09-_/.~:/?#[]@!$&'()*+,;="
te02str = "http://example.org/a</b>/c%/d /e"
te02esc = "http://example.org/a%3C/b%3E/c%25/d%20/e"
testEscapeURIString01 = testEq "testEscapeURIString01"
te01str (escapeURIString isUnescapedInURI te01str)
testEscapeURIString02 = testEq "testEscapeURIString02"
te02esc (escapeURIString isUnescapedInURI te02str)
testEscapeURIString03 = testEq "testEscapeURIString03"
te01str (unEscapeString te01str)
testEscapeURIString04 = testEq "testEscapeURIString04"
te02str (unEscapeString te02esc)
testEscapeURIString = TestList
[ testEscapeURIString01
, testEscapeURIString02
, testEscapeURIString03
, testEscapeURIString04
]
-- URI string normalization tests
tn01str = "eXAMPLE://a/b/%7bfoo%7d"
tn01nrm = "example://a/b/%7Bfoo%7D"
tn02str = "example://a/b/%63/"
tn02nrm = "example://a/b/c/"
tn03str = "example://a/./b/../b/c/foo"
tn03nrm = "example://a/b/c/foo"
tn04str = "eXAMPLE://a/b/%7bfoo%7d" -- From RFC2396bis, 6.2.2
tn04nrm = "example://a/b/%7Bfoo%7D"
tn06str = "file:/x/..//y"
tn06nrm = "file://y"
tn07str = "file:x/..//y/"
tn07nrm = "file:/y/"
testNormalizeURIString01 = testEq "testNormalizeURIString01"
tn01nrm (normalizeCase tn01str)
testNormalizeURIString02 = testEq "testNormalizeURIString02"
tn02nrm (normalizeEscape tn02str)
testNormalizeURIString03 = testEq "testNormalizeURIString03"
tn03nrm (normalizePathSegments tn03str)
testNormalizeURIString04 = testEq "testNormalizeURIString04"
tn04nrm ((normalizeCase . normalizeEscape . normalizePathSegments) tn04str)
testNormalizeURIString05 = testEq "testNormalizeURIString05"
tn04nrm ((normalizePathSegments . normalizeEscape . normalizeCase) tn04str)
testNormalizeURIString06 = testEq "testNormalizeURIString06"
tn06nrm (normalizePathSegments tn06str)
testNormalizeURIString07 = testEq "testNormalizeURIString07"
tn07nrm (normalizePathSegments tn07str)
testNormalizeURIString = TestList
[ testNormalizeURIString01
, testNormalizeURIString02
, testNormalizeURIString03
, testNormalizeURIString04
, testNormalizeURIString05
, testNormalizeURIString06
, testNormalizeURIString07
]
tnus67 = runTestTT $ TestList
[ testNormalizeURIString06
, testNormalizeURIString07
]
-- Test strict vs non-strict relativeTo logic
trbase = fromJust $ parseURIReference "http://bar.org/"
testRelativeTo01 = testEq "testRelativeTo01"
"http://bar.org/foo"
(show . fromJust $
(fromJust $ parseURIReference "foo") `relativeTo` trbase)
testRelativeTo02 = testEq "testRelativeTo02"
"http:foo"
(show . fromJust $
(fromJust $ parseURIReference "http:foo") `relativeTo` trbase)
testRelativeTo03 = testEq "testRelativeTo03"
"http://bar.org/foo"
(show . fromJust $
(fromJust $ parseURIReference "http:foo") `nonStrictRelativeTo` trbase)
testRelativeTo = TestList
[ testRelativeTo01
, testRelativeTo02
, testRelativeTo03
]
-- Test alternative parsing functions
testAltFn01 = testEq "testAltFn01" "Just http://a.b/c#f"
(show . parseURI $ "http://a.b/c#f")
testAltFn02 = testEq "testAltFn02" "Just http://a.b/c#f"
(show . parseURIReference $ "http://a.b/c#f")
testAltFn03 = testEq "testAltFn03" "Just c/d#f"
(show . parseRelativeReference $ "c/d#f")
testAltFn04 = testEq "testAltFn04" "Nothing"
(show . parseRelativeReference $ "http://a.b/c#f")
testAltFn05 = testEq "testAltFn05" "Just http://a.b/c"
(show . parseAbsoluteURI $ "http://a.b/c")
testAltFn06 = testEq "testAltFn06" "Nothing"
(show . parseAbsoluteURI $ "http://a.b/c#f")
testAltFn07 = testEq "testAltFn07" "Nothing"
(show . parseAbsoluteURI $ "c/d")
testAltFn08 = testEq "testAltFn08" "Just http://a.b/c"
(show . parseabsoluteURI $ "http://a.b/c")
testAltFn11 = testEq "testAltFn11" True (isURI "http://a.b/c#f")
testAltFn12 = testEq "testAltFn12" True (isURIReference "http://a.b/c#f")
testAltFn13 = testEq "testAltFn13" True (isRelativeReference "c/d#f")
testAltFn14 = testEq "testAltFn14" False (isRelativeReference "http://a.b/c#f")
testAltFn15 = testEq "testAltFn15" True (isAbsoluteURI "http://a.b/c")
testAltFn16 = testEq "testAltFn16" False (isAbsoluteURI "http://a.b/c#f")
testAltFn17 = testEq "testAltFn17" False (isAbsoluteURI "c/d")
testAltFn = TestList
[ testAltFn01
, testAltFn02
, testAltFn03
, testAltFn04
, testAltFn05
, testAltFn06
, testAltFn07
, testAltFn08
, testAltFn11
, testAltFn12
, testAltFn13
, testAltFn14
, testAltFn15
, testAltFn16
, testAltFn17
]
-- Full test suite
allTests = TestList
[ testURIRefSuite
, testComponentSuite
, testRelativeSuite
, testRFC2396Suite
, testOddballSuite
, testNormalizeSuite
, testShowURI
, testEscapeURIString
, testNormalizeURIString
, testRelativeTo
, testAltFn
]
main = runTestTT allTests
runTestFile t = do
h <- openFile "a.tmp" WriteMode
runTestText (putTextToHandle h False) t
hClose h
tf = runTestFile
tt = runTestTT
-- Miscellaneous values for hand-testing/debugging in Hugs:
uref = testURIRefSuite
tr01 = testRelative01
tr02 = testRelative02
tr03 = testRelative03
tr04 = testRelative04
rel = testRelativeSuite
rfc = testRFC2396Suite
oddb = testOddballSuite
(Just bu02) = parseURIReference "http://example/x/y/z"
(Just ou02) = parseURIReference "../abc"
(Just ru02) = parseURIReference "http://example/x/abc"
-- fileuri = testURIReference "file:///C:/DEV/Haskell/lib/HXmlToolbox-3.01/examples/"
cu02 = ou02 `relativeTo` bu02
--------------------------------------------------------------------------------
--
-- Copyright (c) 2004, G. KLYNE. All rights reserved.
-- Distributed as free software under the following license.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- - Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
--
-- - Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- - Neither name of the copyright holders nor the names of its
-- contributors may be used to endorse or promote products derived from
-- this software without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND THE CONTRIBUTORS
-- "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-- HOLDERS OR THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-- INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-- BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
-- OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
-- TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
-- USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--
--------------------------------------------------------------------------------
-- $Source: /srv/cvs/cvs.haskell.org/fptools/libraries/network/tests/URITest.hs,v $
-- $Author: gklyne $
-- $Revision: 1.8 $
-- $Log: URITest.hs,v $
-- Revision 1.8 2005/07/19 22:01:27 gklyne
-- Added some additional test cases raised by discussion on URI@w3.org mailing list about 2005-07-19. The test p[roposed by this discussion exposed a subtle bug in relativeFrom not being an exact inverse of relativeTo.
--
-- Revision 1.7 2005/06/06 16:31:44 gklyne
-- Added two new test cases.
--
-- Revision 1.6 2005/05/31 17:18:36 gklyne
-- Added some additional test cases triggered by URI-list discussions.
--
-- Revision 1.5 2005/04/07 11:09:37 gklyne
-- Added test cases for alternate parsing functions (including deprecated 'parseabsoluteURI')
--
-- Revision 1.4 2005/04/05 12:47:32 gklyne
-- Added test case.
-- Changed module name, now requires GHC -main-is to compile.
-- All tests run OK with GHC 6.4 on MS-Windows.
--
-- Revision 1.3 2004/11/05 17:29:09 gklyne
-- Changed password-obscuring logic to reflect late change in revised URI
-- specification (password "anonymous" is no longer a special case).
-- Updated URI test module to use function 'escapeURIString'.
-- (Should unEscapeString be similarly updated?)
--
-- Revision 1.2 2004/10/27 13:06:55 gklyne
-- Updated URI module function names per:
-- http://www.haskell.org//pipermail/cvs-libraries/2004-October/002916.html
-- Added test cases to give better covereage of module functions.
--
-- Revision 1.1 2004/10/14 16:11:30 gklyne
-- Add URI unit test to cvs.haskell.org repository
--
-- Revision 1.17 2004/10/14 11:51:09 graham
-- Confirm that URITest runs with GHC.
-- Fix up some comments and other minor details.
--
-- Revision 1.16 2004/10/14 11:45:30 graham
-- Use moduke name main for GHC 6.2
--
-- Revision 1.15 2004/08/11 11:07:39 graham
-- Add new test case.
--
-- Revision 1.14 2004/06/30 11:35:27 graham
-- Update URI code to use hierarchical libraries for Parsec and Network.
--
-- Revision 1.13 2004/06/22 16:19:16 graham
-- New URI test case added.
--
-- Revision 1.12 2004/04/21 15:13:29 graham
-- Add test case
--
-- Revision 1.11 2004/04/21 14:54:05 graham
-- Fix up some tests
--
-- Revision 1.10 2004/04/20 14:54:13 graham
-- Fix up test cases related to port number in authority,
-- and add some more URI decomposition tests.
--
-- Revision 1.9 2004/04/07 15:06:17 graham
-- Add extra test case
-- Revise syntax in line with changes to RFC2396bis
--
-- Revision 1.8 2004/03/17 14:34:58 graham
-- Add Network.HTTP files to CVS
--
-- Revision 1.7 2004/03/16 14:19:38 graham
-- Change licence to BSD style; add nullURI definition; new test cases.
--
-- Revision 1.6 2004/02/20 12:12:00 graham
-- Add URI normalization functions
--
-- Revision 1.5 2004/02/19 23:19:35 graham
-- Network.URI module passes all test cases
--
-- Revision 1.4 2004/02/17 20:06:02 graham
-- Revised URI parser to reflect latest RFC2396bis (-04)
--
-- Revision 1.3 2004/02/11 14:32:14 graham
-- Added work-in-progress notes.
--
-- Revision 1.2 2004/02/02 14:00:39 graham
-- Fix optional host name in URI. Add test cases.
--
-- Revision 1.1 2004/01/27 21:13:45 graham
-- New URI module and test suite added,
-- implementing the GHC Network.URI interface.
--
| FranklinChen/hugs98-plus-Sep2006 | packages/network/tests/URITest.hs | bsd-3-clause | 49,990 | 0 | 14 | 9,977 | 7,093 | 3,927 | 3,166 | 804 | 4 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
module Test.ZM.ADT.Bits8.K9e3b8c835fe9 (Bits8(..)) where
import qualified Prelude(Eq,Ord,Show)
import qualified GHC.Generics
import qualified Flat
import qualified Data.Model
import qualified Test.ZM.ADT.Bit.K65149ce3b366
data Bits8 = Bits8 {bit0 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit1 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit2 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit3 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit4 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit5 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit6 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit,
bit7 :: Test.ZM.ADT.Bit.K65149ce3b366.Bit}
deriving (Prelude.Eq, Prelude.Ord, Prelude.Show, GHC.Generics.Generic, Flat.Flat)
instance Data.Model.Model Bits8
| tittoassini/typed | test/Test/ZM/ADT/Bits8/K9e3b8c835fe9.hs | bsd-3-clause | 923 | 0 | 9 | 224 | 209 | 143 | 66 | 18 | 0 |
{-# LANGUAGE BangPatterns #-}
import Control.Concurrent
import Control.Concurrent.Async
import Chan.KickChan
import Control.Monad
import Control.Monad.Fix
import Data.Vector.Mutable
import Data.Chronograph
-- benchmark throughput...
main = do
kc <- newKickChan 100
:: IO (KickChan IOVector (Either Int Int))
rdr1 <- newReader kc
rdr2 <- newReader kc
c1 <- async $ mkReader rdr1 (either (const Nothing) Just)
c2 <- async $ mkReader rdr2 (either Just (const Nothing))
-- spawn writer
_ <- forkIO $ mkWriter 8 kc Left
_ <- forkIO $ mkWriter 2 kc Right
_ <- waitBoth c1 c2
return ()
maxSize = 100000
mkWriter x kc proj = forM_ [0::Int .. maxSize] $ \i -> do
chronoTraceEventIO "putKickChan" <=< chronoIO $ putKickChan kc (proj i)
when (rem i x == 0) $ threadDelay 100
mkReader rdr dir = flip fix 0 $ \self expected -> if expected > maxSize then return () else do
v <- (fmap . fmap) dir $ readNext rdr
case v of
Nothing -> error "reader got Nothing..."
Just Nothing -> self expected
Just (Just x) | x == expected -> self $! expected + 1
| otherwise -> error $ "expected " ++ show expected ++ " but got " ++ show x
| JohnLato/kickchan | bench/bench_t3.hs | bsd-3-clause | 1,203 | 0 | 17 | 291 | 462 | 222 | 240 | 30 | 4 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Ivory.Language.Syntax.Concrete.QQ.StructQQ
( fromStruct
) where
import Prelude ()
import Prelude.Compat
import qualified Ivory.Language.Area as A
import Ivory.Language.Proxy
import qualified Ivory.Language.String as S
import qualified Ivory.Language.Struct as S
import qualified Ivory.Language.Syntax.AST as AST
import Ivory.Language.Syntax.Concrete.ParseAST
import qualified Ivory.Language.Syntax.Type as AST
import Ivory.Language.Syntax.Concrete.QQ.Common
import Ivory.Language.Syntax.Concrete.QQ.TypeQQ
import Language.Haskell.TH hiding (Type)
import Language.Haskell.TH.Datatype (tySynInstDCompat)
import Language.Haskell.TH.Quote ()
--------------------------------------------------------------------------------
fromStruct :: StructDef -> Q [Dec]
fromStruct def = case def of
#if __GLASGOW_HASKELL__ >= 709
StructDef n fs srcloc -> do
let sym = mkSym n
defs <- sequence (mkIvoryStruct sym def ++ mkFields sym fs)
ln <- lnPragma srcloc
return (ln ++ defs)
StringDef name len _srcloc -> mkStringDef name len
AbstractDef n _hdr _srcloc -> sequence (mkIvoryStruct (mkSym n) def)
#else
StructDef n fs _srcloc -> do
let sym = mkSym n
defs <- sequence (mkIvoryStruct sym def ++ mkFields sym fs)
return defs
StringDef name len _srcloc -> mkStringDef name len
AbstractDef n _hdr _srcloc -> sequence (mkIvoryStruct (mkSym n) def)
#endif
where
mkSym = litT . strTyLit
-- IvoryStruct -----------------------------------------------------------------
-- | Generate an @IvoryStruct@ instance.
mkIvoryStruct :: TypeQ -> StructDef -> [DecQ]
mkIvoryStruct sym def =
[ instanceD (cxt []) (appT (conT ''S.IvoryStruct) sym) [mkStructDef def]
]
mkStructDef :: StructDef -> DecQ
mkStructDef def = funD 'S.structDef
[ clause [] (normalB [| S.StructDef $astStruct |] ) []
]
where
astStruct = case def of
StructDef n fs _ -> [| AST.Struct $(stringE n) $(listE (map mkField fs)) |]
AbstractDef n hdr _ -> [| AST.Abstract $(stringE n) $(stringE hdr) |]
StringDef _ _ _ -> error "unexpected string definition"
mkField f =
[| AST.Typed
$(mkTypeE (fieldType f))
$(stringE (fieldName f))
|]
-- Field Labels ----------------------------------------------------------------
mkFields :: TypeQ -> [Field] -> [DecQ]
mkFields sym = concatMap (mkLabel sym)
mkLabel :: TypeQ -> Field -> [DecQ]
mkLabel sym f =
[ sigD field [t| S.Label $sym $(mkType (fieldType f)) |]
, funD field [clause [] (normalB [| S.Label $(stringE (fieldName f)) |]) []]
]
where
field = mkName (fieldName f)
mkType :: Type -> TypeQ
mkType area = do
ty <- runToQ $ fromType $ maybeLiftStored area
return (fst ty)
-- | Turn a parsed type into its AST representation.
mkTypeE :: Type -> ExpQ
mkTypeE ty =
appE (varE 'A.ivoryArea)
(sigE (conE 'Proxy)
(appT (conT ''Proxy) (mkType ty)))
-- Note: The above is equivalent to:
--
-- [| ivoryArea (Proxy :: Proxy $(mkType ty)) |]
--
-- except I can't get TH to type-check that (maybe this will
-- work in GHC 7.8?)
-- String Types ---------------------------------------------------------------
-- | Create an Ivory type for a string with a fixed capacity.
mkStringDef :: String -> Integer -> Q [Dec]
mkStringDef ty_s len = do
let ty_n = mkName ty_s
let struct_s = ivoryStringStructName ty_s
let struct_t = [t| 'A.Struct $(litT (strTyLit struct_s)) |]
let data_s = struct_s ++ "_data"
let data_n = mkName data_s
let len_s = struct_s ++ "_len"
let len_n = mkName len_s
let data_f = Field data_s (TyArray (TyStored (TyWord Word8)) (Right len)) mempty
let len_f = Field len_s (TyStored (TyInt Int32)) mempty
let struct_def = StructDef struct_s [data_f, len_f] mempty
d1 <- fromStruct struct_def
d2 <- sequence
[ tySynD ty_n [] struct_t
, instanceD (cxt []) (appT (conT ''S.IvoryString) struct_t)
[ tySynInstDCompat ''S.Capacity Nothing [struct_t] (pure $ szTy len)
, valD (varP 'S.stringDataL) (normalB (varE data_n)) []
, valD (varP 'S.stringLengthL) (normalB (varE len_n)) []
]
]
return (d1 ++ d2)
data TypeCon
= TApp TypeCon TypeCon
| TCon String
| TNat Integer
| TSym String
deriving (Show)
| GaloisInc/ivory | ivory/src/Ivory/Language/Syntax/Concrete/QQ/StructQQ.hs | bsd-3-clause | 4,731 | 6 | 14 | 1,183 | 1,108 | 612 | 496 | 87 | 3 |
-----------------------------------------------------------------------------
-- |
-- Module : Generics.EMGM
-- Copyright : (c) 2008, 2009 Universiteit Utrecht
-- License : BSD3
--
-- Maintainer : generics@haskell.org
-- Stability : experimental
-- Portability : non-portable
--
-- EMGM is \"Extensible and Modular Generics for the Masses,\" a library for
-- datatype-generic programming in Haskell.
--
-- This module exports the most commonly used types, classes, and functions. The
-- documentation is organized by topic for convenient access.
-----------------------------------------------------------------------------
{-# OPTIONS_GHC -Wall #-}
module Generics.EMGM (
-- * Foundation
--
-- | This is the collection of types, classes, and functions used to define
-- generic functions and to build representations for datatypes.
-- ** Datatype Representation
--
-- | These are the types and functions required to represent a datatype for
-- use by generic functions.
-- *** Structure Representation Types
--
-- | The unit, sum, and product types form the sum-of-products view for a
-- Haskell datatype.
Unit(..),
(:+:)(..),
(:*:)(..),
-- *** Embedding-Projection Pair
--
-- | A pair of a function and its inverse form the isomorphism between a
-- datatype and its structure representation.
EP(..),
-- *** Constructor Description
--
-- | A description of the syntax of each constructor provides useful auxiliary
-- information for some generic functions.
ConDescr(..),
LblDescr(..),
Fixity(..),
Associativity(..),
Prec,
prec,
-- ** Generic Function Classes
--
-- | Generic functions are instances of these classes. The value-level
-- structure representation of datatypes is implemented using the members of
-- these classes. Thus, a generic function is simply a case statement on the
-- value-level structure.
--
-- Note that the numerical suffix represents the number of generic type
-- variables used in the generic function. No suffix represents 1 generic type
-- variable.
Generic(..),
Generic2(..),
Generic3(..),
-- ** Representation Dispatchers
--
-- | Type classes simplify the application of generic functions by providing
-- (a.k.a. \"dispatching\") the appropriate structure representation. These
-- classes are divided into the kinds they support (monomorphic, functor, and
-- bifunctor).
--
-- Note that the numerical suffix represents the number of generic type
-- variables used in the generic function. No suffix represents 1 generic type
-- variable.
-- *** Monomorphic
--
-- | All types of kind @*@ should have an instance here. This includes types
-- applied to type variables: @[a]@, @'Maybe' a@, @'Either' a b@, etc.
Rep(..),
-- *** Functor
--
-- | Types of kind @* -> *@ should have an instance here. This includes @[]@,
-- 'Maybe', etc.
FRep(..),
FRep2(..),
FRep3(..),
-- *** Bifunctor
--
-- | Types of kind @* -> * -> *@ should have an instance here. This includes
-- @(,)@, 'Either', etc.
BiFRep2(..),
-- * Generic Functions
--
-- | The following collection of functions use the common EMGM infrastructure
-- to work on all datatypes that have instances for a certain representation
-- dispatcher. These functions are categorized by the core generic
-- functionality. For example, 'flattenr' is a type of \"crush\" function,
-- because it is defined by the 'Generic' instance of the @newtype 'Crush'@.
--
-- More information for each of these is available in its respective module.
-- ** Collect
--
-- | Function that collects values of one type from values of a possibly
-- different type.
--
-- For more details, see "Generics.EMGM.Functions.Collect".
collect,
-- ** Compare
--
-- | Functions that compare two values to determine an ordering.
--
-- For more details, see "Generics.EMGM.Functions.Compare".
compare,
eq,
neq,
lt,
lteq,
gt,
gteq,
min,
max,
-- ** Crush
--
-- | Functions that crush a polymorphic functor container into an iteration
-- over its elements.
--
-- For more details, see "Generics.EMGM.Functions.Crush".
Assoc(..),
crush,
crushl,
crushr,
flatten,
flattenl,
flattenr,
first,
firstl,
firstr,
and,
or,
any,
all,
sum,
product,
minimum,
maximum,
elem,
notElem,
-- ** Enum
--
-- | Functions that enumerate the values of a datatype.
--
-- For more details, see "Generics.EMGM.Functions.Enum".
enum,
enumN,
empty,
-- ** Everywhere
--
-- | Functions that apply a transformation at every location of one type in a
-- value of a possibly different type.
--
-- For more details, see "Generics.EMGM.Functions.Everywhere".
everywhere,
everywhere',
-- ** Map
--
-- | Functions that translate values of one type to values of another. This
-- includes map-like functions that apply non-generic functions to every
-- element in a polymorphic (functor or bifunctor) container. It also includes
-- 'cast', a configurable, type-safe casting function.
--
-- For more details, see "Generics.EMGM.Functions.Map".
map,
replace,
bimap,
cast,
-- ** Meta
--
-- | Functions for extracting meta-information about the representation.
--
-- For more details, see "Generics.EMGM.Functions.Meta".
HasEP(..),
conDescr,
lblDescrs,
-- ** Read
--
-- | Functions similar to @deriving Prelude.Read@ that parse a string and return a
-- value of a datatype.
--
-- For more details, see "Generics.EMGM.Functions.Read".
readPrec,
readP,
readsPrec,
reads,
read,
-- ** Show
--
-- | Functions similar to @deriving Prelude.Show@ that return a string
-- representation of a value of a datatype.
--
-- For more details, see "Generics.EMGM.Functions.Show".
showsPrec,
shows,
show,
-- ** Transpose
--
-- | Functions that transpose polymorphic functor values.
--
-- For more details, see "Generics.EMGM.Functions.Transpose".
transpose,
transposeE,
-- ** UnzipWith
--
-- | Functions that split a polymorphic functor values into two structurally
-- equilvalent values.
--
-- For more details, see "Generics.EMGM.Functions.UnzipWith".
unzipWithM,
unzipWith,
unzip,
-- ** ZipWith
--
-- | Functions that combine two structurally equilvalent, polymorphic functor
-- values into one.
--
-- For more details, see "Generics.EMGM.Functions.ZipWith".
zipWithM,
zipWith,
zip,
) where
import qualified Prelude ()
import Generics.EMGM.Base
import Generics.EMGM.Functions.Collect
import Generics.EMGM.Functions.Compare
import Generics.EMGM.Functions.Crush
import Generics.EMGM.Functions.Enum
import Generics.EMGM.Functions.Everywhere
import Generics.EMGM.Functions.Map
import Generics.EMGM.Functions.Meta
import Generics.EMGM.Functions.Read
import Generics.EMGM.Functions.Show
import Generics.EMGM.Functions.Transpose
import Generics.EMGM.Functions.UnzipWith
import Generics.EMGM.Functions.ZipWith
-- Export the instances from these
import Generics.EMGM.Data.Bool()
import Generics.EMGM.Data.Either()
import Generics.EMGM.Data.List()
import Generics.EMGM.Data.Maybe()
import Generics.EMGM.Data.Tuple()
| spl/emgm | src/Generics/EMGM.hs | bsd-3-clause | 7,271 | 0 | 5 | 1,445 | 609 | 469 | 140 | 97 | 0 |
{-|
Copyright : (c) Dave Laing, 2017
License : BSD3
Maintainer : dave.laing.80@gmail.com
Stability : experimental
Portability : non-portable
-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
module Fragment.Int.Rules.Type.Infer.Offline (
IOfflineInt
) where
import Control.Lens (review)
import Ast.Type
import Ast.Type.Var
import Rules.Type.Infer.Offline (IOffline)
import Control.Monad.State (MonadState)
import Fragment.Int.Rules.Type.Infer.Common
data IOfflineInt
instance IntInferTypeHelper ITOffline IOfflineInt where
type IntInferTypeHelperConstraint e w s r m ki ty a ITOffline IOfflineInt =
( MonadState s m
, HasTyVarSupply s
, ToTyVar a
)
createInt _ _ _ =
fmap (review _TyVar) freshTyVar
| dalaing/type-systems | src/Fragment/Int/Rules/Type/Infer/Offline.hs | bsd-3-clause | 834 | 0 | 8 | 143 | 156 | 93 | 63 | -1 | -1 |
{-# LANGUAGE Safe #-}
{-# LANGUAGE NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Version
-- Copyright : (c) The University of Glasgow 2004
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : non-portable (local universal quantification in ReadP)
--
-- A general library for representation and manipulation of versions.
--
-- Versioning schemes are many and varied, so the version
-- representation provided by this library is intended to be a
-- compromise between complete generality, where almost no common
-- functionality could reasonably be provided, and fixing a particular
-- versioning scheme, which would probably be too restrictive.
--
-- So the approach taken here is to provide a representation which
-- subsumes many of the versioning schemes commonly in use, and we
-- provide implementations of 'Eq', 'Ord' and conversion to\/from 'String'
-- which will be appropriate for some applications, but not all.
--
-----------------------------------------------------------------------------
module Data.Version (
-- * The @Version@ type
Version(..),
-- * A concrete representation of @Version@
showVersion, parseVersion,
-- * Constructor function
makeVersion
) where
import Control.Monad ( Monad(..), liftM )
import Data.Bool ( (&&) )
import Data.Char ( isDigit, isAlphaNum )
import Data.Eq
import Data.Int ( Int )
import Data.List
import Data.Ord
import Data.String ( String )
import GHC.Read
import GHC.Show
import Text.ParserCombinators.ReadP
import Text.Read ( read )
{- |
A 'Version' represents the version of a software entity.
An instance of 'Eq' is provided, which implements exact equality
modulo reordering of the tags in the 'versionTags' field.
An instance of 'Ord' is also provided, which gives lexicographic
ordering on the 'versionBranch' fields (i.e. 2.1 > 2.0, 1.2.3 > 1.2.2,
etc.). This is expected to be sufficient for many uses, but note that
you may need to use a more specific ordering for your versioning
scheme. For example, some versioning schemes may include pre-releases
which have tags @\"pre1\"@, @\"pre2\"@, and so on, and these would need to
be taken into account when determining ordering. In some cases, date
ordering may be more appropriate, so the application would have to
look for @date@ tags in the 'versionTags' field and compare those.
The bottom line is, don't always assume that 'compare' and other 'Ord'
operations are the right thing for every 'Version'.
Similarly, concrete representations of versions may differ. One
possible concrete representation is provided (see 'showVersion' and
'parseVersion'), but depending on the application a different concrete
representation may be more appropriate.
-}
data Version =
Version { versionBranch :: [Int],
-- ^ The numeric branch for this version. This reflects the
-- fact that most software versions are tree-structured; there
-- is a main trunk which is tagged with versions at various
-- points (1,2,3...), and the first branch off the trunk after
-- version 3 is 3.1, the second branch off the trunk after
-- version 3 is 3.2, and so on. The tree can be branched
-- arbitrarily, just by adding more digits.
--
-- We represent the branch as a list of 'Int', so
-- version 3.2.1 becomes [3,2,1]. Lexicographic ordering
-- (i.e. the default instance of 'Ord' for @[Int]@) gives
-- the natural ordering of branches.
versionTags :: [String] -- really a bag
-- ^ A version can be tagged with an arbitrary list of strings.
-- The interpretation of the list of tags is entirely dependent
-- on the entity that this version applies to.
}
deriving (Read,Show)
{-# DEPRECATED versionTags "See GHC ticket #2496" #-}
-- TODO. Remove all references to versionTags in GHC 7.12 release.
instance Eq Version where
v1 == v2 = versionBranch v1 == versionBranch v2
&& sort (versionTags v1) == sort (versionTags v2)
-- tags may be in any order
instance Ord Version where
v1 `compare` v2 = versionBranch v1 `compare` versionBranch v2
-- -----------------------------------------------------------------------------
-- A concrete representation of 'Version'
-- | Provides one possible concrete representation for 'Version'. For
-- a version with 'versionBranch' @= [1,2,3]@ and 'versionTags'
-- @= [\"tag1\",\"tag2\"]@, the output will be @1.2.3-tag1-tag2@.
--
showVersion :: Version -> String
showVersion (Version branch tags)
= concat (intersperse "." (map show branch)) ++
concatMap ('-':) tags
-- | A parser for versions in the format produced by 'showVersion'.
--
parseVersion :: ReadP Version
parseVersion = do branch <- sepBy1 (liftM read (munch1 isDigit)) (char '.')
tags <- many (char '-' >> munch1 isAlphaNum)
return Version{versionBranch=branch, versionTags=tags}
-- | Construct tag-less 'Version'
--
-- @since 4.8.0.0
makeVersion :: [Int] -> Version
makeVersion b = Version b []
| gcampax/ghc | libraries/base/Data/Version.hs | bsd-3-clause | 5,418 | 0 | 12 | 1,224 | 485 | 291 | 194 | 38 | 1 |
module Permissions.OnpingSpec (main, spec) where
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "someFunction" $ do
it "should work fine" $ do
True `shouldBe` False
| smurphy8/onping-permissions | test/Permissions/OnpingSpec.hs | bsd-3-clause | 215 | 0 | 13 | 49 | 76 | 40 | 36 | 9 | 1 |
module Gamgine.Math.Box where
#include "Gamgine/Utils.cpp"
import Data.Maybe
import qualified Data.List as L
import Gamgine.Math.Vect as V
import Gamgine.Math.Utils
import Gamgine.Utils
IMPORT_LENS_AS_LE
-- axis aligned bounding box
data Box = Box {
minPt :: Vect,
maxPt :: Vect
} deriving (Show, Read)
LENS(minPt)
LENS(maxPt)
center :: Box -> Vect
center b = minPt b + halfs b
halfs :: Box -> Vect
halfs (Box minPt maxPt) = (maxPt - minPt) * 0.5
intersects :: Box -> Box -> Bool
Box min1 max1 `intersects` Box min2 max2 =
not $ V.any (>) min2 max1 || V.any (<) max2 min1
inside :: Box -> Box -> Bool
Box min1 max1 `inside` Box min2 max2 =
V.all (>=) min1 min2 && V.all (<=) max1 max2
moveBy :: Box -> Vect -> Box
Box min max `moveBy` v = Box (min + v) (max + v)
extendBy :: Box -> Box -> Box
Box min1 max1 `extendBy` Box min2 max2 =
Box (V.minVec min1 min2) (V.maxVec max1 max2)
contains :: Box -> Vect -> Bool
contains (Box min max) v =
V.all (>=) v min && V.all (<=) v max
bound :: [Box] -> Box
bound [] = Box (V.v3 0 0 0) (V.v3 0 0 0)
bound (b:bs) = L.foldr extendBy b bs
-- overlapping if distance negative in all dimensions
distance :: Box -> Box -> Vect
distance b1 b2 = (abs $ center b2 - center b1) - (halfs b1 + halfs b2)
-- If the boxes are overlapping, than minOverlap returns the minimal
-- distance in each dimension by which b1 has to be moved to resolve
-- the overlapping with b2. Otherwise, if not overlapping, for each
-- dimension 0 is returned.
minOverlap :: Box -> Box -> Vect
minOverlap (Box min1 max1) (Box min2 max2) = V.fromList $ L.map overlap [0..2]
where
overlap dim =
let v1@(minv1, maxv1) = (getElem dim min1, getElem dim max1)
v2@(minv2, maxv2) = (getElem dim min2, getElem dim max2)
in if maxv1 < minv2 || minv1 > maxv2
then 0
else let minv1Outside = minv1 < minv2
maxv1Outside = maxv1 > maxv2
v1Inside = not minv1Outside && not maxv1Outside
o | v1Inside = insideOverlap v1 v2
| minv1Outside = minOutsideOverlap v1 v2
| maxv1Outside = maxOutsideOverlap v1 v2
in o
insideOverlap (minv1, maxv1) (minv2, maxv2) =
let leftDist = maxv1 - minv2
rightDist = maxv2 - minv1
o | leftDist < rightDist = -leftDist
| otherwise = rightDist
in o
minOutsideOverlap (_, maxv1) (minv2, _) = -(maxv1 - minv2)
maxOutsideOverlap (minv1, _) (_, maxv2) = maxv2 - minv1
type Tuple3d = (Double,Double,Double)
fromTuples :: (Tuple3d, Tuple3d) -> Box
fromTuples (t1, t2) = Box (V.fromTuple t1) (V.fromTuple t2)
toTuples :: Box -> (Tuple3d, Tuple3d)
toTuples (Box minPt maxPt) = (V.toTuple minPt, V.toTuple maxPt)
| dan-t/Gamgine | Gamgine/Math/Box.hs | bsd-3-clause | 2,932 | 0 | 16 | 871 | 1,069 | 563 | 506 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE CPP #-}
--
-- DefBitRep.hs --- Template Haskell utilities.
--
-- Copyright (C) 2013, Galois, Inc.
-- All Rights Reserved.
--
module Ivory.Language.BitData.DefBitRep where
import Language.Haskell.TH
import Language.Haskell.TH.Datatype (tySynInstDCompat)
-- | Define the type instance:
--
-- type instance <fname> <x> = <rname>
--
-- for each "n" in "xs".
--
-- Used to define the set of representation types for bit lengths.
defBitRep :: Name -> Name -> [Integer] -> Q [Dec]
defBitRep fname rname xs = mapM makeInstance xs
where
makeInstance n = do
let nType = LitT (NumTyLit n)
let rType = ConT rname
tySynInstDCompat fname Nothing [pure nType] (pure rType)
| GaloisInc/ivory | ivory/src/Ivory/Language/BitData/DefBitRep.hs | bsd-3-clause | 734 | 0 | 14 | 140 | 148 | 84 | 64 | 11 | 1 |
{-# LANGUAGE PackageImports #-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
module Language.Logic.Examples.Infer where
import Language.Logic
import Language.Logic.LogicList
import Control.Applicative
import Control.Monad
import "mtl" Control.Monad.Reader
import "mtl" Control.Monad.State
type Variable = String
data Exp = VarE (LogicVal Variable)
| IntE (LogicVal Int)
| BoolE (LogicVal Bool)
| IsZeroE (LogicVal Exp)
| PredE (LogicVal Exp)
| MultE (LogicVal Exp) (LogicVal Exp)
| IfE (LogicVal Exp) (LogicVal Exp) (LogicVal Exp)
| LamE (LogicVal Variable) (LogicVal Exp)
| AppE (LogicVal Exp) (LogicVal Exp)
deriving (Eq, Show)
instance Unifiable Exp where
unify x y = do
s <- getSubst
case (walk x s, walk y s) of
(x', y') | x' == y' -> return ()
(Var id, y') -> modifySubst (extendS (Var id) y')
(x', Var id) -> modifySubst (extendS (Var id) x')
(Val (VarE x), Val (VarE y)) -> unify x y
(Val (IntE x), Val (IntE y)) -> unify x y
(Val (BoolE x), Val (BoolE y)) -> unify x y
(Val (IsZeroE x), Val (IsZeroE y)) -> unify x y
(Val (PredE x), Val (PredE y)) -> unify x y
(Val (MultE x1 x2), Val (MultE y1 y2)) ->
unify x1 y1 >> unify x2 y2
(Val (IfE xt xc xa), Val (IfE yt yc ya)) ->
unify xt yt >> unify xc yc >> unify xa ya
(Val (LamE x xbody), Val (LamE y ybody)) ->
unify x y >> unify xbody ybody
(Val (AppE xrator xrand), Val (AppE yrator yrand)) ->
unify xrator yrator >> unify xrand yrand
_ -> mzero
instance Reifiable Exp where
reify lv = do
s <- ask
case walk lv s of
Val (VarE x) -> Val . VarE <$> reify x
Val (IntE x) -> Val . IntE <$> reify x
Val (BoolE x) -> Val . BoolE <$> reify x
Val (IsZeroE x) -> Val . IsZeroE <$> reify x
Val (PredE x) -> Val . PredE <$> reify x
Val (MultE x1 x2) -> Val <$> (MultE <$> reify x1 <*> reify x2)
Val (IfE t c a) -> Val <$> (IfE <$> reify t <*> reify c <*> reify a)
Val (LamE x body) -> Val <$> (LamE <$> reify x <*> reify body)
Val (AppE rator rand) -> Val <$> (AppE <$> reify rator <*> reify rand)
Var id -> reifyVar (Var id)
data Typ = IntT | BoolT | FunT (LogicVal Typ) (LogicVal Typ)
deriving (Eq, Show)
instance Unifiable Typ where
unify x y = do
s <- getSubst
case (walk x s, walk y s) of
(x', y') | x' == y' -> return ()
(Var id, y') -> modifySubst (extendS (Var id) y')
(x', Var id) -> modifySubst (extendS (Var id) x')
(Val (FunT x1 x2), Val (FunT y1 y2)) -> unify x1 x2 >> unify y1 y2
_ -> mzero
instance Reifiable Typ where
reify lv = do
s <- ask
case walk lv s of
Val (FunT t1 t2) -> do
t1' <- reify t1
t2' <- reify t2
return $ Val (FunT t1' t2')
Val x -> return $ Val x
Var id -> reifyVar (Var id)
type TypEnv = LogicList (LogicVal Variable, LogicVal Typ)
lookupEnv :: LogicVal Variable
-> LogicVal TypEnv
-> LogicVal Typ
-> LogicComp ()
lookupEnv x gamma t =
msum [ do gamma' <- var
gamma ==@ Cons (Val (x, t)) gamma'
, do y <- var
yt <- var
gamma' <- var
gamma ==@ Cons (Val (y, yt)) gamma'
lookupEnv x gamma' t
]
infer :: LogicVal TypEnv
-> LogicVal Exp
-> LogicVal Typ
-> LogicComp ()
infer gamma exp t =
msum [ do x <- var
exp ==@ VarE x
lookupEnv x gamma t
, do n <- var
exp ==@ IntE n
t ==@ IntT
, do b <- var
exp ==@ BoolE b
t ==@ BoolT
, do e <- var
exp ==@ IsZeroE e
t ==@ BoolT
infer gamma e (Val IntT)
, do e <- var
exp ==@ PredE e
t ==@ IntT
infer gamma e (Val IntT)
, do e1 <- var
e2 <- var
exp ==@ MultE e1 e2
t ==@ IntT
infer gamma e1 (Val IntT)
infer gamma e2 (Val IntT)
, do e1 <- var
e2 <- var
e3 <- var
exp ==@ IfE e1 e2 e3
infer gamma e1 (Val BoolT)
infer gamma e2 t
infer gamma e3 t
, do x <- var
body <- var
t1 <- var
t2 <- var
t ==@ FunT t1 t2
exp ==@ LamE x body
infer (Val (Cons (Val (x, t1)) gamma)) body t2
, do rator <- var
rand <- var
t1 <- var
exp ==@ AppE rator rand
infer gamma rator (Val (FunT t1 t))
infer gamma rand t1
]
emptyG :: LogicVal TypEnv
emptyG = (Val Nil)
varE = Val . VarE . Val
intE = Val . IntE . Val
boolE = Val . BoolE . Val
isZeroE = Val . IsZeroE
predE = Val . PredE
multE x y = Val $ MultE x y
ifE t c a = Val $ IfE t c a
lamE x body = Val $ LamE (Val x) body
appE rator rand = Val $ AppE rator rand
testInt = do t <- var
infer emptyG (intE 5) t
return t
testIf = do t <- var
infer emptyG (ifE (boolE True) (intE 2) (intE 3)) t
return t
testIf2 = do t <- var
infer emptyG (ifE (boolE True) (intE 2) (boolE False)) t
return t
testLam = do t <- var
infer emptyG (lamE "x" (intE 2)) t
return t
testLam2 = do t <- var
infer emptyG (lamE "x" (predE (varE "x"))) t
return t | acfoltzer/Molog | examples/Infer.hs | bsd-3-clause | 5,513 | 0 | 16 | 2,067 | 2,507 | 1,199 | 1,308 | 163 | 1 |
{-# LANGUAGE FlexibleContexts, LambdaCase, RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
import Graphics.UI.GLFW.Pal
import Graphics.GL.Pal
import Graphics.VR.Pal
import Control.Monad
import Control.Monad.State
import Control.Monad.Reader
import Control.Lens.Extra
import Data.Map (Map)
import qualified Data.Map as Map
import qualified Data.Set as Set
import Types
import CubeUniforms
import Physics.Bullet
-- | Demonstrates using setRigidBodyNoContactResponse
-- and getCollisions in lieu of GhostObjects
data World = World
{ _wldPlayer :: !(Pose GLfloat)
, _wldCubes :: !(Map ObjectID Cube)
}
makeLenses ''World
newWorld :: World
newWorld = World
(Pose (V3 0 20 60) (axisAngle (V3 0 1 0) 0))
mempty
planeM44 :: M44 GLfloat
planeM44 = transformationFromPose $ newPose
& posOrientation .~ axisAngle (V3 1 0 0) (-pi/2)
main :: IO ()
main = do
let fov = 45
-- ghostShapeSize = 10 :: V3 GLfloat
ghostShapeSize = V3 0.1 1 1 :: V3 GLfloat
-- ghostShapePose = newPose & posPosition .~ V3 0 5 0
ghostShapePose = newPose & posPosition .~ V3 0 0 0
VRPal{..} <- initVRPal "Bullet" []
shader <- createShaderProgram "test/shared/cube.vert" "test/shared/cube.frag"
cubeGeo <- cubeGeometry (1 :: V3 GLfloat) (V3 1 1 1)
cubeShape <- makeShape cubeGeo shader :: IO (Shape Uniforms)
ghostGeo <- cubeGeometry ghostShapeSize (V3 1 1 1)
ghostShape <- makeShape ghostGeo shader :: IO (Shape Uniforms)
planeGeo <- planeGeometry 1000 (V3 0 0 1) (V3 0 1 0) 1
planeShape <- makeShape planeGeo shader :: IO (Shape Uniforms)
dynamicsWorld <- createDynamicsWorld mempty {dwGravity = -10}
_ <- addGroundPlane dynamicsWorld (CollisionObjectID 0) 0
let ghostID = CollisionObjectID 1
ghostBox <- createBoxShape ghostShapeSize
ghostObject <- addRigidBody dynamicsWorld ghostID ghostBox
mempty { rbPosition = ghostShapePose ^. posPosition, rbRotation = ghostShapePose ^. posOrientation }
setRigidBodyKinematic ghostObject True
setRigidBodyNoContactResponse ghostObject True
glEnable GL_DEPTH_TEST
glBlendFunc GL_SRC_ALPHA GL_ONE_MINUS_SRC_ALPHA
glClearColor 0 0 0.1 1
void . flip runStateT newWorld $ do
-- boxShape <- createBoxShape (1 :: V3 GLfloat)
boxShape <- createBoxShape (V3 0.1 0.2 0.3)
forM_ [10..100] $ \i -> do
rigidBody <- addRigidBody dynamicsWorld (CollisionObjectID i) boxShape mempty
{ rbPosition = V3 0 20 0
, rbRotation = Quaternion 0.5 (V3 0 1 1)
}
wldCubes . at (fromIntegral i) ?= Cube
{ _cubBody = rigidBody
, _cubColor = V4 1 0 1 1
}
whileWindow gpWindow $ do
projMat <- getWindowProjection gpWindow fov 0.1 1000
viewMat <- viewMatrixFromPose <$> use wldPlayer
(x,y,w,h) <- getWindowViewport gpWindow
glViewport x y w h
ghostX <- (* 5) . sin <$> getNow
let ghostShapePoseMoving = ghostShapePose
& posPosition . _x .~ ghostX
-- & posOrientation .~ axisAngle (V3 1 1 0) ghostX
setRigidBodyWorldTransform ghostObject
(ghostShapePoseMoving ^. posPosition)
(ghostShapePoseMoving ^. posOrientation)
processEvents gpEvents $ \e -> do
closeOnEscape gpWindow e
applyMouseLook gpWindow wldPlayer
applyWASD gpWindow wldPlayer
stepSimulation dynamicsWorld 60
glClear (GL_COLOR_BUFFER_BIT .|. GL_DEPTH_BUFFER_BIT)
-- Set all colliding cubes to green
collisions <- getCollisions dynamicsWorld
-- collisions <- contactTest dynamicsWorld ghostObject
overlappingIDs <- Set.fromList . concat <$> forM collisions (\collision -> do
let bodyAID = cbBodyAID collision
bodyBID = cbBodyBID collision
-- appliedImpulse = cbAppliedImpulse collision
-- liftIO $ print [bodyAID, bodyBID]
return (if bodyAID == ghostID || bodyBID == ghostID then [bodyAID, bodyBID] else [])
)
-- cubz <- use wldCubes
-- forM_ cubz $ \cube ->
-- setRigidBodyDisableDeactivation (cube ^. cubBody) False
let viewProj = projMat !*! viewMat
-- Begin cube batch
withShape cubeShape $ do
Uniforms{..} <- asks sUniforms
uniformV3 uCamera =<< use (wldPlayer . posPosition)
cubes <- Map.toList <$> use wldCubes
forM_ cubes $ \(cubeID, cube) -> do
(position, orientation) <- getBodyState (cube ^. cubBody)
let model = mkTransformation orientation position
cubeCollisionID = CollisionObjectID cubeID
finalColor = if Set.member cubeCollisionID overlappingIDs
then V4 1 1 1 1
else cube ^. cubColor
uniformM44 uModelViewProjection (viewProj !*! model)
uniformM44 uInverseModel (inv44 model)
uniformM44 uModel model
uniformV4 uDiffuse finalColor
drawShape
withShape planeShape $ do
Uniforms{..} <- asks sUniforms
uniformV3 uCamera =<< use (wldPlayer . posPosition)
let model = planeM44
uniformM44 uModelViewProjection (viewProj !*! model)
uniformM44 uModel model
uniformV4 uDiffuse (V4 0.1 0.0 0.5 1)
drawShape
glEnable GL_BLEND
withShape ghostShape $ do
Uniforms{..} <- asks sUniforms
uniformV3 uCamera =<< use (wldPlayer . posPosition)
let model = transformationFromPose ghostShapePoseMoving
uniformM44 uModelViewProjection (viewProj !*! model)
uniformM44 uModel model
uniformV4 uDiffuse (V4 0.5 0.0 0.5 0.5)
drawShape
glDisable GL_BLEND
swapBuffers gpWindow
| lukexi/bullet-mini | test/GhostObjects2.hs | bsd-3-clause | 6,578 | 0 | 25 | 2,366 | 1,535 | 741 | 794 | -1 | -1 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances
, FlexibleContexts, UndecidableInstances #-}
-----------------------------------------------------------
-- |
-- Module : BoundedList
-- Copyright : HWT Group (c) 2003, haskelldb-users@lists.sourceforge.net
-- License : BSD-style
--
-- Maintainer : haskelldb-users@lists.sourceforge.net
-- Stability : experimental
-- Portability : non-portable
--
--
-- The main idea of bounded lists is to create lists with predetermined
-- maximum size.
--
-- BoundedList is a simple, fast and type safe approach to implementing
-- this idea.
-- The implementation is based on inductive instances, making it very easy to
-- expand with new bounds. A new bound only requires one instance of size and
-- two instances of Less.
--
-- BoundedList works as follows.
-- Every bound is build up by declaring a data-type representing the new bound.
-- The instance of size only returns the size as an Int.
-- The first instance of Less is for telling the typechecker that this bound
-- is greater than the largest smaller bound.
-- The second instance of Less is used by the typechecker to construct a chain
-- of instances if there is no hardcoded instance available.
-- This way the type checker can determine if a bound is smaller\/greater
-- then any other bound.
--
-- This inductive approach gives the complexity O(n) on the number of instances
-- and very short type checking times compared to an O(n\^2) implementation.
--
-- BoundedList also comes with a few utility function for manipulation an
-- contructing bounded lists.
--
-- To be noted:
-- Since each bound is a unique type:
-- Explicit shrink and\/or grow is needed before using (==).
-- BoundedList does not have an instance of Ordering. (This might change)
--
--
-----------------------------------------------------------
module Database.HaskellDB.BoundedList (shrink,
grow,
trunc,
listBound,
toBounded,
fromBounded,
Size,
BoundedList,
N0, N1, N2, N3, N4, N5, N6, N7, N8, N9,
N10, N11, N12, N13, N14, N15, N16, N17, N18, N19,
N20, N21, N22, N23, N24, N25, N26, N27, N28, N29,
N30, N31, N32, N33, N34, N35, N36, N37, N38, N39,
N40, N41, N42, N43, N44, N45, N46, N47, N48, N49,
N50, N51, N52, N53, N54, N55, N56, N57, N58, N59,
N60, N61, N62, N63, N64, N65, N66, N67, N68, N69,
N70, N71, N72, N73, N74, N75, N76, N77, N78, N79,
N80, N81, N82, N83, N84, N85, N86, N87, N88, N89,
N90, N91, N92, N93, N94, N95, N96, N97, N98, N99,
N100, N101, N102, N103, N104, N105, N106, N107, N108, N109,
N110, N111, N112, N113, N114, N115, N116, N117, N118, N119,
N120, N121, N122, N123, N124, N125, N126, N127, N128, N129,
N130, N131, N132, N133, N134, N135, N136, N137, N138, N139,
N140, N141, N142, N143, N144, N145, N146, N147, N148, N149,
N150, N151, N152, N153, N154, N155, N156, N157, N158, N159,
N160, N161, N162, N163, N164, N165, N166, N167, N168, N169,
N170, N171, N172, N173, N174, N175, N176, N177, N178, N179,
N180, N181, N182, N183, N184, N185, N186, N187, N188, N189,
N190, N191, N192, N193, N194, N195, N196, N197, N198, N199,
N200, N201, N202, N203, N204, N205, N206, N207, N208, N209,
N210, N211, N212, N213, N214, N215, N216, N217, N218, N219,
N220, N221, N222, N223, N224, N225, N226, N227, N228, N229,
N230, N231, N232, N233, N234, N235, N236, N237, N238, N239,
N240, N241, N242, N243, N244, N245, N246, N247, N248, N249,
N250, N251, N252, N253, N254, N255, N65535)
where
import Data.Typeable
class Size n where
size :: n -> Int
class (Size a, Size b) => Less a b
class (Size a, Size b) => LessEq a b
instance (Size a) => LessEq a a
instance (Size a, Size b, Less a b) => LessEq a b
data N0 = N0
instance Size N0 where size _ = 0
data N1 = N1
instance Size N1 where size _ = 1
instance Less N0 N1
data N2 = N2
instance Size N2 where size _ = 2
instance Less N1 N2
instance (Size a, Less a N1) => Less a N2
data N3 = N3
instance Size N3 where size _ = 3
instance Less N2 N3
instance (Size a, Less a N2) => Less a N3
data N4 = N4
instance Size N4 where size _ = 4
instance Less N3 N4
instance (Size a, Less a N3) => Less a N4
data N5 = N5
instance Size N5 where size _ = 5
instance Less N4 N5
instance (Size a, Less a N4) => Less a N5
data N6 = N6
instance Size N6 where size _ = 6
instance Less N5 N6
instance (Size a, Less a N5) => Less a N6
data N7 = N7
instance Size N7 where size _ = 7
instance Less N6 N7
instance (Size a, Less a N6) => Less a N7
data N8 = N8
instance Size N8 where size _ = 8
instance Less N7 N8
instance (Size a, Less a N7) => Less a N8
data N9 = N9
instance Size N9 where size _ = 9
instance Less N8 N9
instance (Size a, Less a N8) => Less a N9
data N10 = N10
instance Size N10 where size _ = 10
instance Less N9 N10
instance (Size a, Less a N9) => Less a N10
data N11 = N11
instance Size N11 where size _ = 11
instance Less N10 N11
instance (Size a, Less a N10) => Less a N11
data N12 = N12
instance Size N12 where size _ = 12
instance Less N11 N12
instance (Size a, Less a N11) => Less a N12
data N13 = N13
instance Size N13 where size _ = 13
instance Less N12 N13
instance (Size a, Less a N12) => Less a N13
data N14 = N14
instance Size N14 where size _ = 14
instance Less N13 N14
instance (Size a, Less a N13) => Less a N14
data N15 = N15
instance Size N15 where size _ = 15
instance Less N14 N15
instance (Size a, Less a N14) => Less a N15
data N16 = N16
instance Size N16 where size _ = 16
instance Less N15 N16
instance (Size a, Less a N15) => Less a N16
data N17 = N17
instance Size N17 where size _ = 17
instance Less N16 N17
instance (Size a, Less a N16) => Less a N17
data N18 = N18
instance Size N18 where size _ = 18
instance Less N17 N18
instance (Size a, Less a N17) => Less a N18
data N19 = N19
instance Size N19 where size _ = 19
instance Less N18 N19
instance (Size a, Less a N18) => Less a N19
data N20 = N20
instance Size N20 where size _ = 20
instance Less N19 N20
instance (Size a, Less a N19) => Less a N20
data N21 = N21
instance Size N21 where size _ = 21
instance Less N20 N21
instance (Size a, Less a N20) => Less a N21
data N22 = N22
instance Size N22 where size _ = 22
instance Less N21 N22
instance (Size a, Less a N21) => Less a N22
data N23 = N23
instance Size N23 where size _ = 23
instance Less N22 N23
instance (Size a, Less a N22) => Less a N23
data N24 = N24
instance Size N24 where size _ = 24
instance Less N23 N24
instance (Size a, Less a N23) => Less a N24
data N25 = N25
instance Size N25 where size _ = 25
instance Less N24 N25
instance (Size a, Less a N24) => Less a N25
data N26 = N26
instance Size N26 where size _ = 26
instance Less N25 N26
instance (Size a, Less a N25) => Less a N26
data N27 = N27
instance Size N27 where size _ = 27
instance Less N26 N27
instance (Size a, Less a N26) => Less a N27
data N28 = N28
instance Size N28 where size _ = 28
instance Less N27 N28
instance (Size a, Less a N27) => Less a N28
data N29 = N29
instance Size N29 where size _ = 29
instance Less N28 N29
instance (Size a, Less a N28) => Less a N29
data N30 = N30
instance Size N30 where size _ = 30
instance Less N29 N30
instance (Size a, Less a N29) => Less a N30
data N31 = N31
instance Size N31 where size _ = 31
instance Less N30 N31
instance (Size a, Less a N30) => Less a N31
data N32 = N32
instance Size N32 where size _ = 32
instance Less N31 N32
instance (Size a, Less a N31) => Less a N32
data N33 = N33
instance Size N33 where size _ = 33
instance Less N32 N33
instance (Size a, Less a N32) => Less a N33
data N34 = N34
instance Size N34 where size _ = 34
instance Less N33 N34
instance (Size a, Less a N33) => Less a N34
data N35 = N35
instance Size N35 where size _ = 35
instance Less N34 N35
instance (Size a, Less a N34) => Less a N35
data N36 = N36
instance Size N36 where size _ = 36
instance Less N35 N36
instance (Size a, Less a N35) => Less a N36
data N37 = N37
instance Size N37 where size _ = 37
instance Less N36 N37
instance (Size a, Less a N36) => Less a N37
data N38 = N38
instance Size N38 where size _ = 38
instance Less N37 N38
instance (Size a, Less a N37) => Less a N38
data N39 = N39
instance Size N39 where size _ = 39
instance Less N38 N39
instance (Size a, Less a N38) => Less a N39
data N40 = N40
instance Size N40 where size _ = 40
instance Less N39 N40
instance (Size a, Less a N39) => Less a N40
data N41 = N41
instance Size N41 where size _ = 41
instance Less N40 N41
instance (Size a, Less a N40) => Less a N41
data N42 = N42
instance Size N42 where size _ = 42
instance Less N41 N42
instance (Size a, Less a N41) => Less a N42
data N43 = N43
instance Size N43 where size _ = 43
instance Less N42 N43
instance (Size a, Less a N42) => Less a N43
data N44 = N44
instance Size N44 where size _ = 44
instance Less N43 N44
instance (Size a, Less a N43) => Less a N44
data N45 = N45
instance Size N45 where size _ = 45
instance Less N44 N45
instance (Size a, Less a N44) => Less a N45
data N46 = N46
instance Size N46 where size _ = 46
instance Less N45 N46
instance (Size a, Less a N45) => Less a N46
data N47 = N47
instance Size N47 where size _ = 47
instance Less N46 N47
instance (Size a, Less a N46) => Less a N47
data N48 = N48
instance Size N48 where size _ = 48
instance Less N47 N48
instance (Size a, Less a N47) => Less a N48
data N49 = N49
instance Size N49 where size _ = 49
instance Less N48 N49
instance (Size a, Less a N48) => Less a N49
data N50 = N50
instance Size N50 where size _ = 50
instance Less N49 N50
instance (Size a, Less a N49) => Less a N50
data N51 = N51
instance Size N51 where size _ = 51
instance Less N50 N51
instance (Size a, Less a N50) => Less a N51
data N52 = N52
instance Size N52 where size _ = 52
instance Less N51 N52
instance (Size a, Less a N51) => Less a N52
data N53 = N53
instance Size N53 where size _ = 53
instance Less N52 N53
instance (Size a, Less a N52) => Less a N53
data N54 = N54
instance Size N54 where size _ = 54
instance Less N53 N54
instance (Size a, Less a N53) => Less a N54
data N55 = N55
instance Size N55 where size _ = 55
instance Less N54 N55
instance (Size a, Less a N54) => Less a N55
data N56 = N56
instance Size N56 where size _ = 56
instance Less N55 N56
instance (Size a, Less a N55) => Less a N56
data N57 = N57
instance Size N57 where size _ = 57
instance Less N56 N57
instance (Size a, Less a N56) => Less a N57
data N58 = N58
instance Size N58 where size _ = 58
instance Less N57 N58
instance (Size a, Less a N57) => Less a N58
data N59 = N59
instance Size N59 where size _ = 59
instance Less N58 N59
instance (Size a, Less a N58) => Less a N59
data N60 = N60
instance Size N60 where size _ = 60
instance Less N59 N60
instance (Size a, Less a N59) => Less a N60
data N61 = N61
instance Size N61 where size _ = 61
instance Less N60 N61
instance (Size a, Less a N60) => Less a N61
data N62 = N62
instance Size N62 where size _ = 62
instance Less N61 N62
instance (Size a, Less a N61) => Less a N62
data N63 = N63
instance Size N63 where size _ = 63
instance Less N62 N63
instance (Size a, Less a N62) => Less a N63
data N64 = N64
instance Size N64 where size _ = 64
instance Less N63 N64
instance (Size a, Less a N63) => Less a N64
data N65 = N65
instance Size N65 where size _ = 65
instance Less N64 N65
instance (Size a, Less a N64) => Less a N65
data N66 = N66
instance Size N66 where size _ = 66
instance Less N65 N66
instance (Size a, Less a N65) => Less a N66
data N67 = N67
instance Size N67 where size _ = 67
instance Less N66 N67
instance (Size a, Less a N66) => Less a N67
data N68 = N68
instance Size N68 where size _ = 68
instance Less N67 N68
instance (Size a, Less a N67) => Less a N68
data N69 = N69
instance Size N69 where size _ = 69
instance Less N68 N69
instance (Size a, Less a N68) => Less a N69
data N70 = N70
instance Size N70 where size _ = 70
instance Less N69 N70
instance (Size a, Less a N69) => Less a N70
data N71 = N71
instance Size N71 where size _ = 71
instance Less N70 N71
instance (Size a, Less a N70) => Less a N71
data N72 = N72
instance Size N72 where size _ = 72
instance Less N71 N72
instance (Size a, Less a N71) => Less a N72
data N73 = N73
instance Size N73 where size _ = 73
instance Less N72 N73
instance (Size a, Less a N72) => Less a N73
data N74 = N74
instance Size N74 where size _ = 74
instance Less N73 N74
instance (Size a, Less a N73) => Less a N74
data N75 = N75
instance Size N75 where size _ = 75
instance Less N74 N75
instance (Size a, Less a N74) => Less a N75
data N76 = N76
instance Size N76 where size _ = 76
instance Less N75 N76
instance (Size a, Less a N75) => Less a N76
data N77 = N77
instance Size N77 where size _ = 77
instance Less N76 N77
instance (Size a, Less a N76) => Less a N77
data N78 = N78
instance Size N78 where size _ = 78
instance Less N77 N78
instance (Size a, Less a N77) => Less a N78
data N79 = N79
instance Size N79 where size _ = 79
instance Less N78 N79
instance (Size a, Less a N78) => Less a N79
data N80 = N80
instance Size N80 where size _ = 80
instance Less N79 N80
instance (Size a, Less a N79) => Less a N80
data N81 = N81
instance Size N81 where size _ = 81
instance Less N80 N81
instance (Size a, Less a N80) => Less a N81
data N82 = N82
instance Size N82 where size _ = 82
instance Less N81 N82
instance (Size a, Less a N81) => Less a N82
data N83 = N83
instance Size N83 where size _ = 83
instance Less N82 N83
instance (Size a, Less a N82) => Less a N83
data N84 = N84
instance Size N84 where size _ = 84
instance Less N83 N84
instance (Size a, Less a N83) => Less a N84
data N85 = N85
instance Size N85 where size _ = 85
instance Less N84 N85
instance (Size a, Less a N84) => Less a N85
data N86 = N86
instance Size N86 where size _ = 86
instance Less N85 N86
instance (Size a, Less a N85) => Less a N86
data N87 = N87
instance Size N87 where size _ = 87
instance Less N86 N87
instance (Size a, Less a N86) => Less a N87
data N88 = N88
instance Size N88 where size _ = 88
instance Less N87 N88
instance (Size a, Less a N87) => Less a N88
data N89 = N89
instance Size N89 where size _ = 89
instance Less N88 N89
instance (Size a, Less a N88) => Less a N89
data N90 = N90
instance Size N90 where size _ = 90
instance Less N89 N90
instance (Size a, Less a N89) => Less a N90
data N91 = N91
instance Size N91 where size _ = 91
instance Less N90 N91
instance (Size a, Less a N90) => Less a N91
data N92 = N92
instance Size N92 where size _ = 92
instance Less N91 N92
instance (Size a, Less a N91) => Less a N92
data N93 = N93
instance Size N93 where size _ = 93
instance Less N92 N93
instance (Size a, Less a N92) => Less a N93
data N94 = N94
instance Size N94 where size _ = 94
instance Less N93 N94
instance (Size a, Less a N93) => Less a N94
data N95 = N95
instance Size N95 where size _ = 95
instance Less N94 N95
instance (Size a, Less a N94) => Less a N95
data N96 = N96
instance Size N96 where size _ = 96
instance Less N95 N96
instance (Size a, Less a N95) => Less a N96
data N97 = N97
instance Size N97 where size _ = 97
instance Less N96 N97
instance (Size a, Less a N96) => Less a N97
data N98 = N98
instance Size N98 where size _ = 98
instance Less N97 N98
instance (Size a, Less a N97) => Less a N98
data N99 = N99
instance Size N99 where size _ = 99
instance Less N98 N99
instance (Size a, Less a N98) => Less a N99
data N100 = N100
instance Size N100 where size _ = 100
instance Less N99 N100
instance (Size a, Less a N99) => Less a N100
data N101 = N101
instance Size N101 where size _ = 101
instance Less N100 N101
instance (Size a, Less a N100) => Less a N101
data N102 = N102
instance Size N102 where size _ = 102
instance Less N101 N102
instance (Size a, Less a N101) => Less a N102
data N103 = N103
instance Size N103 where size _ = 103
instance Less N102 N103
instance (Size a, Less a N102) => Less a N103
data N104 = N104
instance Size N104 where size _ = 104
instance Less N103 N104
instance (Size a, Less a N103) => Less a N104
data N105 = N105
instance Size N105 where size _ = 105
instance Less N104 N105
instance (Size a, Less a N104) => Less a N105
data N106 = N106
instance Size N106 where size _ = 106
instance Less N105 N106
instance (Size a, Less a N105) => Less a N106
data N107 = N107
instance Size N107 where size _ = 107
instance Less N106 N107
instance (Size a, Less a N106) => Less a N107
data N108 = N108
instance Size N108 where size _ = 108
instance Less N107 N108
instance (Size a, Less a N107) => Less a N108
data N109 = N109
instance Size N109 where size _ = 109
instance Less N108 N109
instance (Size a, Less a N108) => Less a N109
data N110 = N110
instance Size N110 where size _ = 110
instance Less N109 N110
instance (Size a, Less a N109) => Less a N110
data N111 = N111
instance Size N111 where size _ = 111
instance Less N110 N111
instance (Size a, Less a N110) => Less a N111
data N112 = N112
instance Size N112 where size _ = 112
instance Less N111 N112
instance (Size a, Less a N111) => Less a N112
data N113 = N113
instance Size N113 where size _ = 113
instance Less N112 N113
instance (Size a, Less a N112) => Less a N113
data N114 = N114
instance Size N114 where size _ = 114
instance Less N113 N114
instance (Size a, Less a N113) => Less a N114
data N115 = N115
instance Size N115 where size _ = 115
instance Less N114 N115
instance (Size a, Less a N114) => Less a N115
data N116 = N116
instance Size N116 where size _ = 116
instance Less N115 N116
instance (Size a, Less a N115) => Less a N116
data N117 = N117
instance Size N117 where size _ = 117
instance Less N116 N117
instance (Size a, Less a N116) => Less a N117
data N118 = N118
instance Size N118 where size _ = 118
instance Less N117 N118
instance (Size a, Less a N117) => Less a N118
data N119 = N119
instance Size N119 where size _ = 119
instance Less N118 N119
instance (Size a, Less a N118) => Less a N119
data N120 = N120
instance Size N120 where size _ = 120
instance Less N119 N120
instance (Size a, Less a N119) => Less a N120
data N121 = N121
instance Size N121 where size _ = 121
instance Less N120 N121
instance (Size a, Less a N120) => Less a N121
data N122 = N122
instance Size N122 where size _ = 122
instance Less N121 N122
instance (Size a, Less a N121) => Less a N122
data N123 = N123
instance Size N123 where size _ = 123
instance Less N122 N123
instance (Size a, Less a N122) => Less a N123
data N124 = N124
instance Size N124 where size _ = 124
instance Less N123 N124
instance (Size a, Less a N123) => Less a N124
data N125 = N125
instance Size N125 where size _ = 125
instance Less N124 N125
instance (Size a, Less a N124) => Less a N125
data N126 = N126
instance Size N126 where size _ = 126
instance Less N125 N126
instance (Size a, Less a N125) => Less a N126
data N127 = N127
instance Size N127 where size _ = 127
instance Less N126 N127
instance (Size a, Less a N126) => Less a N127
data N128 = N128
instance Size N128 where size _ = 128
instance Less N127 N128
instance (Size a, Less a N127) => Less a N128
data N129 = N129
instance Size N129 where size _ = 129
instance Less N128 N129
instance (Size a, Less a N128) => Less a N129
data N130 = N130
instance Size N130 where size _ = 130
instance Less N129 N130
instance (Size a, Less a N129) => Less a N130
data N131 = N131
instance Size N131 where size _ = 131
instance Less N130 N131
instance (Size a, Less a N130) => Less a N131
data N132 = N132
instance Size N132 where size _ = 132
instance Less N131 N132
instance (Size a, Less a N131) => Less a N132
data N133 = N133
instance Size N133 where size _ = 133
instance Less N132 N133
instance (Size a, Less a N132) => Less a N133
data N134 = N134
instance Size N134 where size _ = 134
instance Less N133 N134
instance (Size a, Less a N133) => Less a N134
data N135 = N135
instance Size N135 where size _ = 135
instance Less N134 N135
instance (Size a, Less a N134) => Less a N135
data N136 = N136
instance Size N136 where size _ = 136
instance Less N135 N136
instance (Size a, Less a N135) => Less a N136
data N137 = N137
instance Size N137 where size _ = 137
instance Less N136 N137
instance (Size a, Less a N136) => Less a N137
data N138 = N138
instance Size N138 where size _ = 138
instance Less N137 N138
instance (Size a, Less a N137) => Less a N138
data N139 = N139
instance Size N139 where size _ = 139
instance Less N138 N139
instance (Size a, Less a N138) => Less a N139
data N140 = N140
instance Size N140 where size _ = 140
instance Less N139 N140
instance (Size a, Less a N139) => Less a N140
data N141 = N141
instance Size N141 where size _ = 141
instance Less N140 N141
instance (Size a, Less a N140) => Less a N141
data N142 = N142
instance Size N142 where size _ = 142
instance Less N141 N142
instance (Size a, Less a N141) => Less a N142
data N143 = N143
instance Size N143 where size _ = 143
instance Less N142 N143
instance (Size a, Less a N142) => Less a N143
data N144 = N144
instance Size N144 where size _ = 144
instance Less N143 N144
instance (Size a, Less a N143) => Less a N144
data N145 = N145
instance Size N145 where size _ = 145
instance Less N144 N145
instance (Size a, Less a N144) => Less a N145
data N146 = N146
instance Size N146 where size _ = 146
instance Less N145 N146
instance (Size a, Less a N145) => Less a N146
data N147 = N147
instance Size N147 where size _ = 147
instance Less N146 N147
instance (Size a, Less a N146) => Less a N147
data N148 = N148
instance Size N148 where size _ = 148
instance Less N147 N148
instance (Size a, Less a N147) => Less a N148
data N149 = N149
instance Size N149 where size _ = 149
instance Less N148 N149
instance (Size a, Less a N148) => Less a N149
data N150 = N150
instance Size N150 where size _ = 150
instance Less N149 N150
instance (Size a, Less a N149) => Less a N150
data N151 = N151
instance Size N151 where size _ = 151
instance Less N150 N151
instance (Size a, Less a N150) => Less a N151
data N152 = N152
instance Size N152 where size _ = 152
instance Less N151 N152
instance (Size a, Less a N151) => Less a N152
data N153 = N153
instance Size N153 where size _ = 153
instance Less N152 N153
instance (Size a, Less a N152) => Less a N153
data N154 = N154
instance Size N154 where size _ = 154
instance Less N153 N154
instance (Size a, Less a N153) => Less a N154
data N155 = N155
instance Size N155 where size _ = 155
instance Less N154 N155
instance (Size a, Less a N154) => Less a N155
data N156 = N156
instance Size N156 where size _ = 156
instance Less N155 N156
instance (Size a, Less a N155) => Less a N156
data N157 = N157
instance Size N157 where size _ = 157
instance Less N156 N157
instance (Size a, Less a N156) => Less a N157
data N158 = N158
instance Size N158 where size _ = 158
instance Less N157 N158
instance (Size a, Less a N157) => Less a N158
data N159 = N159
instance Size N159 where size _ = 159
instance Less N158 N159
instance (Size a, Less a N158) => Less a N159
data N160 = N160
instance Size N160 where size _ = 160
instance Less N159 N160
instance (Size a, Less a N159) => Less a N160
data N161 = N161
instance Size N161 where size _ = 161
instance Less N160 N161
instance (Size a, Less a N160) => Less a N161
data N162 = N162
instance Size N162 where size _ = 162
instance Less N161 N162
instance (Size a, Less a N161) => Less a N162
data N163 = N163
instance Size N163 where size _ = 163
instance Less N162 N163
instance (Size a, Less a N162) => Less a N163
data N164 = N164
instance Size N164 where size _ = 164
instance Less N163 N164
instance (Size a, Less a N163) => Less a N164
data N165 = N165
instance Size N165 where size _ = 165
instance Less N164 N165
instance (Size a, Less a N164) => Less a N165
data N166 = N166
instance Size N166 where size _ = 166
instance Less N165 N166
instance (Size a, Less a N165) => Less a N166
data N167 = N167
instance Size N167 where size _ = 167
instance Less N166 N167
instance (Size a, Less a N166) => Less a N167
data N168 = N168
instance Size N168 where size _ = 168
instance Less N167 N168
instance (Size a, Less a N167) => Less a N168
data N169 = N169
instance Size N169 where size _ = 169
instance Less N168 N169
instance (Size a, Less a N168) => Less a N169
data N170 = N170
instance Size N170 where size _ = 170
instance Less N169 N170
instance (Size a, Less a N169) => Less a N170
data N171 = N171
instance Size N171 where size _ = 171
instance Less N170 N171
instance (Size a, Less a N170) => Less a N171
data N172 = N172
instance Size N172 where size _ = 172
instance Less N171 N172
instance (Size a, Less a N171) => Less a N172
data N173 = N173
instance Size N173 where size _ = 173
instance Less N172 N173
instance (Size a, Less a N172) => Less a N173
data N174 = N174
instance Size N174 where size _ = 174
instance Less N173 N174
instance (Size a, Less a N173) => Less a N174
data N175 = N175
instance Size N175 where size _ = 175
instance Less N174 N175
instance (Size a, Less a N174) => Less a N175
data N176 = N176
instance Size N176 where size _ = 176
instance Less N175 N176
instance (Size a, Less a N175) => Less a N176
data N177 = N177
instance Size N177 where size _ = 177
instance Less N176 N177
instance (Size a, Less a N176) => Less a N177
data N178 = N178
instance Size N178 where size _ = 178
instance Less N177 N178
instance (Size a, Less a N177) => Less a N178
data N179 = N179
instance Size N179 where size _ = 179
instance Less N178 N179
instance (Size a, Less a N178) => Less a N179
data N180 = N180
instance Size N180 where size _ = 180
instance Less N179 N180
instance (Size a, Less a N179) => Less a N180
data N181 = N181
instance Size N181 where size _ = 181
instance Less N180 N181
instance (Size a, Less a N180) => Less a N181
data N182 = N182
instance Size N182 where size _ = 182
instance Less N181 N182
instance (Size a, Less a N181) => Less a N182
data N183 = N183
instance Size N183 where size _ = 183
instance Less N182 N183
instance (Size a, Less a N182) => Less a N183
data N184 = N184
instance Size N184 where size _ = 184
instance Less N183 N184
instance (Size a, Less a N183) => Less a N184
data N185 = N185
instance Size N185 where size _ = 185
instance Less N184 N185
instance (Size a, Less a N184) => Less a N185
data N186 = N186
instance Size N186 where size _ = 186
instance Less N185 N186
instance (Size a, Less a N185) => Less a N186
data N187 = N187
instance Size N187 where size _ = 187
instance Less N186 N187
instance (Size a, Less a N186) => Less a N187
data N188 = N188
instance Size N188 where size _ = 188
instance Less N187 N188
instance (Size a, Less a N187) => Less a N188
data N189 = N189
instance Size N189 where size _ = 189
instance Less N188 N189
instance (Size a, Less a N188) => Less a N189
data N190 = N190
instance Size N190 where size _ = 190
instance Less N189 N190
instance (Size a, Less a N189) => Less a N190
data N191 = N191
instance Size N191 where size _ = 191
instance Less N190 N191
instance (Size a, Less a N190) => Less a N191
data N192 = N192
instance Size N192 where size _ = 192
instance Less N191 N192
instance (Size a, Less a N191) => Less a N192
data N193 = N193
instance Size N193 where size _ = 193
instance Less N192 N193
instance (Size a, Less a N192) => Less a N193
data N194 = N194
instance Size N194 where size _ = 194
instance Less N193 N194
instance (Size a, Less a N193) => Less a N194
data N195 = N195
instance Size N195 where size _ = 195
instance Less N194 N195
instance (Size a, Less a N194) => Less a N195
data N196 = N196
instance Size N196 where size _ = 196
instance Less N195 N196
instance (Size a, Less a N195) => Less a N196
data N197 = N197
instance Size N197 where size _ = 197
instance Less N196 N197
instance (Size a, Less a N196) => Less a N197
data N198 = N198
instance Size N198 where size _ = 198
instance Less N197 N198
instance (Size a, Less a N197) => Less a N198
data N199 = N199
instance Size N199 where size _ = 199
instance Less N198 N199
instance (Size a, Less a N198) => Less a N199
data N200 = N200
instance Size N200 where size _ = 200
instance Less N199 N200
instance (Size a, Less a N199) => Less a N200
data N201 = N201
instance Size N201 where size _ = 201
instance Less N200 N201
instance (Size a, Less a N200) => Less a N201
data N202 = N202
instance Size N202 where size _ = 202
instance Less N201 N202
instance (Size a, Less a N201) => Less a N202
data N203 = N203
instance Size N203 where size _ = 203
instance Less N202 N203
instance (Size a, Less a N202) => Less a N203
data N204 = N204
instance Size N204 where size _ = 204
instance Less N203 N204
instance (Size a, Less a N203) => Less a N204
data N205 = N205
instance Size N205 where size _ = 205
instance Less N204 N205
instance (Size a, Less a N204) => Less a N205
data N206 = N206
instance Size N206 where size _ = 206
instance Less N205 N206
instance (Size a, Less a N205) => Less a N206
data N207 = N207
instance Size N207 where size _ = 207
instance Less N206 N207
instance (Size a, Less a N206) => Less a N207
data N208 = N208
instance Size N208 where size _ = 208
instance Less N207 N208
instance (Size a, Less a N207) => Less a N208
data N209 = N209
instance Size N209 where size _ = 209
instance Less N208 N209
instance (Size a, Less a N208) => Less a N209
data N210 = N210
instance Size N210 where size _ = 210
instance Less N209 N210
instance (Size a, Less a N209) => Less a N210
data N211 = N211
instance Size N211 where size _ = 211
instance Less N210 N211
instance (Size a, Less a N210) => Less a N211
data N212 = N212
instance Size N212 where size _ = 212
instance Less N211 N212
instance (Size a, Less a N211) => Less a N212
data N213 = N213
instance Size N213 where size _ = 213
instance Less N212 N213
instance (Size a, Less a N212) => Less a N213
data N214 = N214
instance Size N214 where size _ = 214
instance Less N213 N214
instance (Size a, Less a N213) => Less a N214
data N215 = N215
instance Size N215 where size _ = 215
instance Less N214 N215
instance (Size a, Less a N214) => Less a N215
data N216 = N216
instance Size N216 where size _ = 216
instance Less N215 N216
instance (Size a, Less a N215) => Less a N216
data N217 = N217
instance Size N217 where size _ = 217
instance Less N216 N217
instance (Size a, Less a N216) => Less a N217
data N218 = N218
instance Size N218 where size _ = 218
instance Less N217 N218
instance (Size a, Less a N217) => Less a N218
data N219 = N219
instance Size N219 where size _ = 219
instance Less N218 N219
instance (Size a, Less a N218) => Less a N219
data N220 = N220
instance Size N220 where size _ = 220
instance Less N219 N220
instance (Size a, Less a N219) => Less a N220
data N221 = N221
instance Size N221 where size _ = 221
instance Less N220 N221
instance (Size a, Less a N220) => Less a N221
data N222 = N222
instance Size N222 where size _ = 222
instance Less N221 N222
instance (Size a, Less a N221) => Less a N222
data N223 = N223
instance Size N223 where size _ = 223
instance Less N222 N223
instance (Size a, Less a N222) => Less a N223
data N224 = N224
instance Size N224 where size _ = 224
instance Less N223 N224
instance (Size a, Less a N223) => Less a N224
data N225 = N225
instance Size N225 where size _ = 225
instance Less N224 N225
instance (Size a, Less a N224) => Less a N225
data N226 = N226
instance Size N226 where size _ = 226
instance Less N225 N226
instance (Size a, Less a N225) => Less a N226
data N227 = N227
instance Size N227 where size _ = 227
instance Less N226 N227
instance (Size a, Less a N226) => Less a N227
data N228 = N228
instance Size N228 where size _ = 228
instance Less N227 N228
instance (Size a, Less a N227) => Less a N228
data N229 = N229
instance Size N229 where size _ = 229
instance Less N228 N229
instance (Size a, Less a N228) => Less a N229
data N230 = N230
instance Size N230 where size _ = 230
instance Less N229 N230
instance (Size a, Less a N229) => Less a N230
data N231 = N231
instance Size N231 where size _ = 231
instance Less N230 N231
instance (Size a, Less a N230) => Less a N231
data N232 = N232
instance Size N232 where size _ = 232
instance Less N231 N232
instance (Size a, Less a N231) => Less a N232
data N233 = N233
instance Size N233 where size _ = 233
instance Less N232 N233
instance (Size a, Less a N232) => Less a N233
data N234 = N234
instance Size N234 where size _ = 234
instance Less N233 N234
instance (Size a, Less a N233) => Less a N234
data N235 = N235
instance Size N235 where size _ = 235
instance Less N234 N235
instance (Size a, Less a N234) => Less a N235
data N236 = N236
instance Size N236 where size _ = 236
instance Less N235 N236
instance (Size a, Less a N235) => Less a N236
data N237 = N237
instance Size N237 where size _ = 237
instance Less N236 N237
instance (Size a, Less a N236) => Less a N237
data N238 = N238
instance Size N238 where size _ = 238
instance Less N237 N238
instance (Size a, Less a N237) => Less a N238
data N239 = N239
instance Size N239 where size _ = 239
instance Less N238 N239
instance (Size a, Less a N238) => Less a N239
data N240 = N240
instance Size N240 where size _ = 240
instance Less N239 N240
instance (Size a, Less a N239) => Less a N240
data N241 = N241
instance Size N241 where size _ = 241
instance Less N240 N241
instance (Size a, Less a N240) => Less a N241
data N242 = N242
instance Size N242 where size _ = 242
instance Less N241 N242
instance (Size a, Less a N241) => Less a N242
data N243 = N243
instance Size N243 where size _ = 243
instance Less N242 N243
instance (Size a, Less a N242) => Less a N243
data N244 = N244
instance Size N244 where size _ = 244
instance Less N243 N244
instance (Size a, Less a N243) => Less a N244
data N245 = N245
instance Size N245 where size _ = 245
instance Less N244 N245
instance (Size a, Less a N244) => Less a N245
data N246 = N246
instance Size N246 where size _ = 246
instance Less N245 N246
instance (Size a, Less a N245) => Less a N246
data N247 = N247
instance Size N247 where size _ = 247
instance Less N246 N247
instance (Size a, Less a N246) => Less a N247
data N248 = N248
instance Size N248 where size _ = 248
instance Less N247 N248
instance (Size a, Less a N247) => Less a N248
data N249 = N249
instance Size N249 where size _ = 249
instance Less N248 N249
instance (Size a, Less a N248) => Less a N249
data N250 = N250
instance Size N250 where size _ = 250
instance Less N249 N250
instance (Size a, Less a N249) => Less a N250
data N251 = N251
instance Size N251 where size _ = 251
instance Less N250 N251
instance (Size a, Less a N250) => Less a N251
data N252 = N252
instance Size N252 where size _ = 252
instance Less N251 N252
instance (Size a, Less a N251) => Less a N252
data N253 = N253
instance Size N253 where size _ = 253
instance Less N252 N253
instance (Size a, Less a N252) => Less a N253
data N254 = N254
instance Size N254 where size _ = 254
instance Less N253 N254
instance (Size a, Less a N253) => Less a N254
data N255 = N255
instance Size N255 where size _ = 255
instance Less N254 N255
instance (Size a, Less a N254) => Less a N255
data N65535 = N65535
instance Size N65535 where size _ = 65535
instance Less N255 N65535
instance (Size a, Less a N255) => Less a N65535
newtype BoundedList a n = L [a]
deriving (Typeable)
instance (Show a, Size n) => Show (BoundedList a n) where
show l@(L xs) = show xs
instance (Size n, Eq a) => Eq (BoundedList a n) where
L c == L d = c == d
-- | Shrinks the 'BoundedList' supplied if
-- it can do so without truncating the list. Returns Nothing
-- if the list inside was to long.
shrink :: (Size n, Size m) => BoundedList a n -> Maybe (BoundedList a m)
shrink = toBounded . fromBounded
-- | Takes a 'BoundedList' add grows it size.
grow :: LessEq n m => BoundedList a n -> BoundedList a m
grow (L xs) = (L xs)
-- | Takes a 'BoundedList' and return the list inside.
fromBounded :: Size n => BoundedList a n -> [a]
fromBounded (L xs) = xs
listLength :: BoundedList a n -> Int
listLength (L l) = length l
-- | Returns the length of a 'BoundedList'.
listBound :: Size n => BoundedList a n -> Int
listBound = size . listBoundType
listBoundType :: BoundedList a n -> n
listBoundType _ = undefined
-- | Takes a list and transforms it to a 'BoundedList'.
-- If the list doesn\'t fit, Nothing is returned.
toBounded :: Size n => [a] -> Maybe (BoundedList a n)
toBounded a = toBound_ (L a)
where
toBound_ :: Size n => BoundedList a n -> Maybe (BoundedList a n)
toBound_ l
| listLength l <= listBound l = Just l
| otherwise = Nothing
-- | Takes a list and transforms it to a 'BoundedList'.
-- If the list doesn\'n fit, the list is truncated
-- to make it fit into the bounded list.
trunc :: Size n => [a] -> BoundedList a n
trunc xs = trunc_ (L xs)
where
trunc_ :: Size n => BoundedList a n -> BoundedList a n
trunc_ l@(L xs) = (L $ take (listBound l) xs)
| m4dc4p/haskelldb | src/Database/HaskellDB/BoundedList.hs | bsd-3-clause | 36,891 | 67 | 13 | 8,126 | 16,452 | 8,385 | 8,067 | -1 | -1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
module Duckling.AmountOfMoney.Rules
( rules
) where
import Data.HashMap.Strict (HashMap)
import Data.Maybe
import Data.String
import Data.Text (Text)
import Prelude
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Text as Text
import Duckling.AmountOfMoney.Helpers
import Duckling.AmountOfMoney.Types (Currency(..), AmountOfMoneyData (..))
import Duckling.Dimensions.Types
import Duckling.Numeral.Helpers (isPositive)
import Duckling.Numeral.Types (NumeralData (..))
import Duckling.Regex.Types
import Duckling.Types
import qualified Duckling.AmountOfMoney.Types as TAmountOfMoney
import qualified Duckling.Numeral.Types as TNumeral
currencies :: HashMap Text Currency
currencies = HashMap.fromList
[ ("aed", AED)
, ("aud", AUD)
, ("bgn", BGN)
, ("brl", BRL)
, ("byn", BYN)
, ("cad", CAD)
, ("¢", Cent)
, ("c", Cent)
, ("chf", CHF)
, ("cny", CNY)
, ("czk", CZK)
, ("rmb", CNY)
, ("yuan", CNY)
, ("$", Dollar)
, ("dinar", Dinar)
, ("dinars", Dinar)
, ("dkk", DKK)
, ("dollar", Dollar)
, ("dollars", Dollar)
, ("egp", EGP)
, ("€", EUR)
, ("eur", EUR)
, ("euro", EUR)
, ("euros", EUR)
, ("eurs", EUR)
, ("€ur", EUR)
, ("€uro", EUR)
, ("€uros", EUR)
, ("€urs", EUR)
, ("gbp", GBP)
, ("gel", GEL)
, ("hkd", HKD)
, ("hrk", HRK)
, ("idr", IDR)
, ("ils", ILS)
, ("₪", ILS)
, ("nis", ILS)
, ("inr", INR)
, ("iqd", IQD)
, ("rs", INR)
, ("rs.", INR)
, ("rupee", INR)
, ("rupees", INR)
, ("jmd", JMD)
, ("jod", JOD)
, ("¥", JPY)
, ("jpy", JPY)
, ("lari", GEL)
, ("\x20BE", GEL)
, ("yen", JPY)
, ("krw", KRW)
, ("kwd", KWD)
, ("lbp", LBP)
, ("mad", MAD)
, ("mnt", MNT)
, ("myr", MYR)
, ("rm", MYR)
, ("₮", MNT)
, ("tugrik", MNT)
, ("tugriks", MNT)
, ("nok", NOK)
, ("nzd", NZD)
, ("pkr", PKR)
, ("pln", PLN)
, ("£", Pound)
, ("pt", PTS)
, ("pta", PTS)
, ("ptas", PTS)
, ("pts", PTS)
, ("qar", QAR)
, ("₽", RUB)
, ("rial", Rial)
, ("rials", Rial)
, ("riyal", Riyal)
, ("riyals", Riyal)
, ("ron", RON)
, ("rub", RUB)
, ("sar", SAR)
, ("sek", SEK)
, ("sgd", SGD)
, ("shekel", ILS)
, ("shekels", ILS)
, ("thb", THB)
, ("ttd", TTD)
, ("₴", UAH)
, ("uah", UAH)
, ("usd", USD)
, ("us$", USD)
, ("vnd", VND)
, ("zar", ZAR)
, ("tl", TRY)
, ("lira", TRY)
, ("₺", TRY)
]
ruleCurrencies :: Rule
ruleCurrencies = Rule
{ name = "currencies"
, pattern =
[ regex "(aed|aud|bgn|brl|byn|¢|cad|chf|cny|c|\\$|dinars?|dkk|dollars?|egp|(e|€)uro?s?|€|gbp|gel|\x20BE|hkd|hrk|idr|ils|₪|inr|iqd|jmd|jod|¥|jpy|lari|krw|kwd|lbp|mad|₮|mnt|tugriks?|myr|rm|nis|nok|nzd|£|pkr|pln|pta?s?|qar|₽|rs\\.?|riy?als?|ron|rub|rupees?|sar|sek|sgb|shekels?|thb|ttd|₴|uah|us(d|\\$)|vnd|yen|yuan|zar|tl|lira|₺)"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (match:_)):_) -> do
c <- HashMap.lookup (Text.toLower match) currencies
Just . Token AmountOfMoney $ currencyOnly c
_ -> Nothing
}
ruleAmountUnit :: Rule
ruleAmountUnit = Rule
{ name = "<amount> <unit>"
, pattern =
[ Predicate isPositive
, Predicate isCurrencyOnly
]
, prod = \tokens -> case tokens of
(Token Numeral NumeralData{TNumeral.value = v}:
Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.currency = c}:
_) -> Just . Token AmountOfMoney . withValue v $ currencyOnly c
_ -> Nothing
}
ruleAmountLatent :: Rule
ruleAmountLatent = Rule
{ name = "<amount> (latent)"
, pattern =
[ Predicate isPositive
]
, prod = \tokens -> case tokens of
(Token Numeral NumeralData{TNumeral.value = v}:_) ->
Just . Token AmountOfMoney . mkLatent $ valueOnly v
_ -> Nothing
}
rules :: [Rule]
rules =
[ ruleAmountUnit
, ruleAmountLatent
, ruleCurrencies
]
| facebookincubator/duckling | Duckling/AmountOfMoney/Rules.hs | bsd-3-clause | 4,127 | 0 | 17 | 885 | 1,384 | 875 | 509 | 150 | 2 |
{-|
Module : IRTS.Lang
Description : Internal representation of Idris' constructs.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE DeriveFunctor, DeriveGeneric, PatternGuards #-}
module IRTS.Lang where
import Idris.Core.CaseTree
import Idris.Core.TT
import Control.Applicative hiding (Const)
import Control.Monad.State hiding (lift)
import Data.List
import Debug.Trace
import GHC.Generics (Generic)
data Endianness = Native | BE | LE deriving (Show, Eq)
data LVar = Loc Int | Glob Name
deriving (Show, Eq)
-- ASSUMPTION: All variable bindings have unique names here
-- Constructors commented as lifted are not present in the LIR provided to the different backends.
data LExp = LV LVar
| LApp Bool LExp [LExp] -- True = tail call
| LLazyApp Name [LExp] -- True = tail call
| LLazyExp LExp -- lifted out before compiling
| LForce LExp -- make sure Exp is evaluted
| LLet Name LExp LExp -- name just for pretty printing
| LLam [Name] LExp -- lambda, lifted out before compiling
| LProj LExp Int -- projection
| LCon (Maybe LVar) -- Location to reallocate, if available
Int Name [LExp]
| LCase CaseType LExp [LAlt]
| LConst Const
| LForeign FDesc -- Function descriptor (usually name as string)
FDesc -- Return type descriptor
[(FDesc, LExp)] -- first LExp is the FFI type description
| LOp PrimFn [LExp]
| LNothing
| LError String
deriving Eq
data FDesc = FCon Name
| FStr String
| FUnknown
| FIO FDesc
| FApp Name [FDesc]
deriving (Show, Eq)
data Export = ExportData FDesc -- Exported data descriptor (usually string)
| ExportFun Name -- Idris name
FDesc -- Exported function descriptor
FDesc -- Return type descriptor
[FDesc] -- Argument types
deriving (Show, Eq)
data ExportIFace = Export Name -- FFI descriptor
String -- interface file
[Export]
deriving (Show, Eq)
-- Primitive operators. Backends are not *required* to implement all
-- of these, but should report an error if they are unable
data PrimFn = LPlus ArithTy | LMinus ArithTy | LTimes ArithTy
| LUDiv IntTy | LSDiv ArithTy | LURem IntTy | LSRem ArithTy
| LAnd IntTy | LOr IntTy | LXOr IntTy | LCompl IntTy
| LSHL IntTy | LLSHR IntTy | LASHR IntTy
| LEq ArithTy | LLt IntTy | LLe IntTy | LGt IntTy | LGe IntTy
| LSLt ArithTy | LSLe ArithTy | LSGt ArithTy | LSGe ArithTy
| LSExt IntTy IntTy | LZExt IntTy IntTy | LTrunc IntTy IntTy
| LStrConcat | LStrLt | LStrEq | LStrLen
| LIntFloat IntTy | LFloatInt IntTy | LIntStr IntTy | LStrInt IntTy
| LFloatStr | LStrFloat | LChInt IntTy | LIntCh IntTy
| LBitCast ArithTy ArithTy -- Only for values of equal width
| LFExp | LFLog | LFSin | LFCos | LFTan | LFASin | LFACos | LFATan
| LFSqrt | LFFloor | LFCeil | LFNegate
| LStrHead | LStrTail | LStrCons | LStrIndex | LStrRev | LStrSubstr
| LReadStr | LWriteStr
-- system info
| LSystemInfo
| LFork
| LPar -- evaluate argument anywhere, possibly on another
-- core or another machine. 'id' is a valid implementation
| LExternal Name
| LNoOp
deriving (Show, Eq, Generic)
-- Supported target languages for foreign calls
data FCallType = FStatic | FObject | FConstructor
deriving (Show, Eq)
data FType = FArith ArithTy
| FFunction
| FFunctionIO
| FString
| FUnit
| FPtr
| FManagedPtr
| FCData
| FAny
deriving (Show, Eq)
-- FIXME: Why not use this for all the IRs now?
data LAlt' e = LConCase Int Name [Name] e
| LConstCase Const e
| LDefaultCase e
deriving (Show, Eq, Functor)
type LAlt = LAlt' LExp
data LDecl = LFun [LOpt] Name [Name] LExp -- options, name, arg names, def
| LConstructor Name Int Int -- constructor name, tag, arity
deriving (Show, Eq)
type LDefs = Ctxt LDecl
data LOpt = Inline | NoInline
deriving (Show, Eq)
addTags :: Int -> [(Name, LDecl)] -> (Int, [(Name, LDecl)])
addTags i ds = tag i ds []
where tag i ((n, LConstructor n' (-1) a) : as) acc
= tag (i + 1) as ((n, LConstructor n' i a) : acc)
tag i ((n, LConstructor n' t a) : as) acc
= tag i as ((n, LConstructor n' t a) : acc)
tag i (x : as) acc = tag i as (x : acc)
tag i [] acc = (i, reverse acc)
data LiftState = LS Name Int [(Name, LDecl)]
lname (NS n x) i = NS (lname n i) x
lname (UN n) i = MN i n
lname x i = sMN i (showCG x ++ "_lam")
liftAll :: [(Name, LDecl)] -> [(Name, LDecl)]
liftAll xs = concatMap (\ (x, d) -> lambdaLift x d) xs
lambdaLift :: Name -> LDecl -> [(Name, LDecl)]
lambdaLift n (LFun opts _ args e)
= let (e', (LS _ _ decls)) = runState (lift args e) (LS n 0 []) in
(n, LFun opts n args e') : decls
lambdaLift n x = [(n, x)]
getNextName :: State LiftState Name
getNextName = do LS n i ds <- get
put (LS n (i + 1) ds)
return (lname n i)
addFn :: Name -> LDecl -> State LiftState ()
addFn fn d = do LS n i ds <- get
put (LS n i ((fn, d) : ds))
lift :: [Name] -> LExp -> State LiftState LExp
lift env (LV v) = return (LV v) -- Lifting happens before these can exist...
lift env (LApp tc (LV (Glob n)) args) = do args' <- mapM (lift env) args
return (LApp tc (LV (Glob n)) args')
lift env (LApp tc f args) = do f' <- lift env f
fn <- getNextName
addFn fn (LFun [Inline] fn env f')
args' <- mapM (lift env) args
return (LApp tc (LV (Glob fn)) (map (LV . Glob) env ++ args'))
lift env (LLazyApp n args) = do args' <- mapM (lift env) args
return (LLazyApp n args')
lift env (LLazyExp (LConst c)) = return (LConst c)
-- lift env (LLazyExp (LApp tc (LV (Glob f)) args))
-- = lift env (LLazyApp f args)
lift env (LLazyExp e) = do e' <- lift env e
let usedArgs = nub $ usedIn env e'
fn <- getNextName
addFn fn (LFun [NoInline] fn usedArgs e')
return (LLazyApp fn (map (LV . Glob) usedArgs))
lift env (LForce e) = do e' <- lift env e
return (LForce e')
lift env (LLet n v e) = do v' <- lift env v
e' <- lift (env ++ [n]) e
return (LLet n v' e')
lift env (LLam args e) = do e' <- lift (env ++ args) e
let usedArgs = nub $ usedIn env e'
fn <- getNextName
addFn fn (LFun [Inline] fn (usedArgs ++ args) e')
return (LApp False (LV (Glob fn)) (map (LV . Glob) usedArgs))
lift env (LProj t i) = do t' <- lift env t
return (LProj t' i)
lift env (LCon loc i n args) = do args' <- mapM (lift env) args
return (LCon loc i n args')
lift env (LCase up e alts) = do alts' <- mapM liftA alts
e' <- lift env e
return (LCase up e' alts')
where
liftA (LConCase i n args e) = do e' <- lift (env ++ args) e
return (LConCase i n args e')
liftA (LConstCase c e) = do e' <- lift env e
return (LConstCase c e')
liftA (LDefaultCase e) = do e' <- lift env e
return (LDefaultCase e')
lift env (LConst c) = return (LConst c)
lift env (LForeign t s args) = do args' <- mapM (liftF env) args
return (LForeign t s args')
where
liftF env (t, e) = do e' <- lift env e
return (t, e')
lift env (LOp f args) = do args' <- mapM (lift env) args
return (LOp f args')
lift env (LError str) = return $ LError str
lift env LNothing = return LNothing
allocUnique :: LDefs -> (Name, LDecl) -> (Name, LDecl)
allocUnique defs p@(n, LConstructor _ _ _) = p
allocUnique defs (n, LFun opts fn args e)
= let e' = evalState (findUp e) [] in
(n, LFun opts fn args e')
where
-- Keep track of 'updatable' names in the state, i.e. names whose heap
-- entry may be reused, along with the arity which was there
findUp :: LExp -> State [(Name, Int)] LExp
findUp (LApp t (LV (Glob n)) as)
| Just (LConstructor _ i ar) <- lookupCtxtExact n defs,
ar == length as
= findUp (LCon Nothing i n as)
findUp (LV (Glob n))
| Just (LConstructor _ i 0) <- lookupCtxtExact n defs
= return $ LCon Nothing i n [] -- nullary cons are global, no need to update
findUp (LApp t f as) = LApp t <$> findUp f <*> mapM findUp as
findUp (LLazyApp n as) = LLazyApp n <$> mapM findUp as
findUp (LLazyExp e) = LLazyExp <$> findUp e
findUp (LForce e) = LForce <$> findUp e
-- use assumption that names are unique!
findUp (LLet n val sc) = LLet n <$> findUp val <*> findUp sc
findUp (LLam ns sc) = LLam ns <$> findUp sc
findUp (LProj e i) = LProj <$> findUp e <*> return i
findUp (LCon (Just l) i n es) = LCon (Just l) i n <$> mapM findUp es
findUp (LCon Nothing i n es)
= do avail <- get
v <- findVar [] avail (length es)
LCon v i n <$> mapM findUp es
findUp (LForeign t s es)
= LForeign t s <$> mapM (\ (t, e) -> do e' <- findUp e
return (t, e')) es
findUp (LOp o es) = LOp o <$> mapM findUp es
findUp (LCase Updatable e@(LV (Glob n)) as)
= LCase Updatable e <$> mapM (doUpAlt n) as
findUp (LCase t e as)
= LCase t <$> findUp e <*> mapM findUpAlt as
findUp t = return t
findUpAlt (LConCase i t args rhs) = do avail <- get
rhs' <- findUp rhs
put avail
return $ LConCase i t args rhs'
findUpAlt (LConstCase i rhs) = LConstCase i <$> findUp rhs
findUpAlt (LDefaultCase rhs) = LDefaultCase <$> findUp rhs
doUpAlt n (LConCase i t args rhs)
= do avail <- get
put ((n, length args) : avail)
rhs' <- findUp rhs
put avail
return $ LConCase i t args rhs'
doUpAlt n (LConstCase i rhs) = LConstCase i <$> findUp rhs
doUpAlt n (LDefaultCase rhs) = LDefaultCase <$> findUp rhs
findVar _ [] i = return Nothing
findVar acc ((n, l) : ns) i | l == i = do put (reverse acc ++ ns)
return (Just (Glob n))
findVar acc (n : ns) i = findVar (n : acc) ns i
-- Return variables in list which are used in the expression
usedArg env n | n `elem` env = [n]
| otherwise = []
usedIn :: [Name] -> LExp -> [Name]
usedIn env (LV (Glob n)) = usedArg env n
usedIn env (LApp _ e args) = usedIn env e ++ concatMap (usedIn env) args
usedIn env (LLazyApp n args) = concatMap (usedIn env) args ++ usedArg env n
usedIn env (LLazyExp e) = usedIn env e
usedIn env (LForce e) = usedIn env e
usedIn env (LLet n v e) = usedIn env v ++ usedIn (env \\ [n]) e
usedIn env (LLam ns e) = usedIn (env \\ ns) e
usedIn env (LCon v i n args) = let rest = concatMap (usedIn env) args in
case v of
Nothing -> rest
Just (Glob n) -> usedArg env n ++ rest
usedIn env (LProj t i) = usedIn env t
usedIn env (LCase up e alts) = usedIn env e ++ concatMap (usedInA env) alts
where usedInA env (LConCase i n ns e) = usedIn env e
usedInA env (LConstCase c e) = usedIn env e
usedInA env (LDefaultCase e) = usedIn env e
usedIn env (LForeign _ _ args) = concatMap (usedIn env) (map snd args)
usedIn env (LOp f args) = concatMap (usedIn env) args
usedIn env _ = []
lsubst :: Name -> LExp -> LExp -> LExp
lsubst n new (LV (Glob x)) | n == x = new
lsubst n new (LApp t e args) = let e' = lsubst n new e
args' = map (lsubst n new) args in
LApp t e' args'
lsubst n new (LLazyApp fn args) = let args' = map (lsubst n new) args in
LLazyApp fn args'
lsubst n new (LLazyExp e) = LLazyExp (lsubst n new e)
lsubst n new (LForce e) = LForce (lsubst n new e)
lsubst n new (LLet v val sc) = LLet v (lsubst n new val) (lsubst n new sc)
lsubst n new (LLam ns sc) = LLam ns (lsubst n new sc)
lsubst n new (LProj e i) = LProj (lsubst n new e) i
lsubst n new (LCon lv t cn args) = let args' = map (lsubst n new) args in
LCon lv t cn args'
lsubst n new (LOp op args) = let args' = map (lsubst n new) args in
LOp op args'
lsubst n new (LForeign fd rd args)
= let args' = map (\(d, a) -> (d, lsubst n new a)) args in
LForeign fd rd args'
lsubst n new (LCase t e alts) = let e' = lsubst n new e
alts' = map (fmap (lsubst n new)) alts in
LCase t e' alts'
lsubst n new tm = tm
instance Show LExp where
show e = show' [] "" e where
show' env ind (LV (Loc i)) = env!!i
show' env ind (LV (Glob n)) = show n
show' env ind (LLazyApp e args)
= show e ++ "|(" ++ showSep ", " (map (show' env ind) args) ++")"
show' env ind (LApp _ e args)
= show' env ind e ++ "(" ++ showSep ", " (map (show' env ind) args) ++")"
show' env ind (LLazyExp e) = "lazy{ " ++ show' env ind e ++ " }"
show' env ind (LForce e) = "force{ " ++ show' env ind e ++ " }"
show' env ind (LLet n v e)
= "let " ++ show n ++ " = " ++ show' env ind v
++ " in " ++ show' (env ++ [show n]) ind e
show' env ind (LLam args e)
= "\\ " ++ showSep "," (map show args)
++ " => " ++ show' (env ++ (map show args)) ind e
show' env ind (LProj t i) = show t ++ "!" ++ show i
show' env ind (LCon loc i n args)
= atloc loc ++ show n ++ "(" ++ showSep ", " (map (show' env ind) args) ++ ")"
where atloc Nothing = ""
atloc (Just l) = "@" ++ show (LV l) ++ ":"
show' env ind (LCase up e alts)
= "case" ++ update ++ show' env ind e ++ " of \n" ++ fmt alts
where
update = case up of
Shared -> " "
Updatable -> "! "
fmt [] = ""
fmt [alt]
= "\t" ++ ind ++ "| " ++ showAlt env (ind ++ " ") alt
fmt (alt:as)
= "\t" ++ ind ++ "| " ++ showAlt env (ind ++ ". ") alt
++ "\n" ++ fmt as
show' env ind (LConst c) = show c
show' env ind (LForeign ty n args) = concat
[ "foreign{ "
, show n ++ "("
, showSep ", " (map (\(ty,x) -> show' env ind x ++ " : " ++ show ty) args)
, ") : "
, show ty
, " }"
]
show' env ind (LOp f args)
= show f ++ "(" ++ showSep ", " (map (show' env ind) args) ++ ")"
show' env ind (LError str) = "error " ++ show str
show' env ind LNothing = "____"
showAlt env ind (LConCase _ n args e)
= show n ++ "(" ++ showSep ", " (map show args) ++ ") => "
++ show' env ind e
showAlt env ind (LConstCase c e) = show c ++ " => " ++ show' env ind e
showAlt env ind (LDefaultCase e) = "_ => " ++ show' env ind e
| ben-schulz/Idris-dev | src/IRTS/Lang.hs | bsd-3-clause | 16,172 | 0 | 17 | 6,031 | 6,137 | 3,091 | 3,046 | 312 | 22 |
{-
- Builder.hs
- By Steven Smith
-}
module SpirV.Builder
( module SpirV.Builder
, R.buildModule
, R.nop
, R.source
, R.sourceExtension
, R.compileFlag
, R.extension
, R.memoryModel
, R.store
, R.copyMemory
, R.copyMemorySized
, R.functionEnd
, R.kill
, R.return_
, R.returnValue
, R.unreachable
, R.lifetimeStart
, R.lifetimeStop
, R.atomicInit
, R.atomicStore
, R.emitVertex
, R.endPrimitive
, R.emitStreamVertex
, R.endStreamPrimitive
, R.controlBarrier
, R.memoryBarrier
, R.retainEvent
, R.releaseEvent
, R.setUserEventStatus
, R.captureEventProfilingInfo
-- Re-exports from SpirV.Instructions
, Id(..)
, Signedness(..)
, SamplerContent(..)
, ArrayedContent(..)
, DepthComparison(..)
, MultiSampled(..)
, SamplerParam(..)
, ExecutionModel(..)
, AddressingModel(..)
, MemoryModel(..)
, ExecutionMode(..)
, StorageClass(..)
, Dim(..)
, SamplerAddressingMode(..)
, SamplerFilterMode(..)
, FPFastMathMode(..)
, FPRoundingMode(..)
, LinkageType(..)
, AccessQualifier(..)
, FunctionParameterAttribute(..)
, Decoration(..)
, BuiltIn(..)
, SelectionControl(..)
, LoopControl(..)
, FunctionControl(..)
, MemorySemantics(..)
, MemoryAccess(..)
, ExecutionScope(..)
, GroupOperation(..)
, KernelEnqueueFlags(..)
, KernelProfilingInfo(..)
-- Re-exports from SpirV.Types
, Builder(..)
)
where
import Data.Bits
import Data.Int
import Data.Text (Text)
import Data.Word
import Unsafe.Coerce (unsafeCoerce)
import SpirV.Builder.Types
import SpirV.Instructions
import qualified SpirV.Builder.Raw as R
-- Id: Result type
undef :: TypeId -> Builder TypeId
undef (TypeId i) = fmap TypeId (R.undef i)
extInstImport :: Text -> Builder ExtSet
extInstImport t = fmap ExtSet (R.extInstImport t)
-- Id 1: Result type
-- Id 2: Set, result of ExtInstImport instruction
-- Word32: Enumerant of instruction to execute within extended instruction set
-- Ids: Operands to the instruction to execute
extInst :: TypeId -> ExtSet -> Word32 -> [Id] -> Builder Id
extInst (TypeId i) (ExtSet s) e is = R.extInst i s e is
-- String is used solely with Line, in order to provide a string containing the
-- filename this module was generated from
string :: Text -> Builder FileId
string t = fmap FileId (R.string t)
name :: IsId id => Text -> Builder id -> Builder id
name t b = do
i <- b
R.name (toId i) t
return i
-- Word32: Line number
-- Word32: Column number
line :: IsId id => FileId -> Word32 -> Word32 -> Builder id -> Builder id
line (FileId f) l c b = do
i <- b
R.line (toId i) f l c
return i
decorationGroup :: Builder DecorationGroup
decorationGroup = fmap DecorationGroup R.decorationGroup
-- Id: Target to decorate, can be a DecorationGroup
decorate :: IsId id => Decoration -> Builder id -> Builder id
decorate d b = do
i <- b
R.decorate (toId i) d
return i
-- Id: Target to decorate, can be a DecorationGroup
-- Word32: Number of the member to decorate
memberDecorate :: IsId id => id -> Word32 -> Decoration -> Builder ()
memberDecorate i = R.memberDecorate (toId i)
groupDecorate :: DecorationGroup -> [Id] -> Builder ()
groupDecorate (DecorationGroup i) = R.groupDecorate i
groupMemberDecorate :: DecorationGroup -> [Id] -> Builder ()
groupMemberDecorate (DecorationGroup i) = R.groupMemberDecorate i
typeVoid :: Builder TypeId
typeVoid = fmap TypeId R.typeVoid
typeBool :: Builder TypeId
typeBool = fmap TypeId R.typeBool
-- Word32: Width, in bits (aka 32 for 32 bit integer)
typeInt :: Word32 -> Signedness -> Builder TypeId
typeInt w s = fmap TypeId (R.typeInt w s)
-- Word32: Width, in bits (aka 32 for 32 bit float)
typeFloat :: Word32 -> Builder TypeId
typeFloat w = fmap TypeId (R.typeFloat w)
-- Id: Type of components
-- Word32: Number of components, must be >= 2
typeVector :: TypeId -> Word32 -> Builder TypeId
typeVector (TypeId i) w = fmap TypeId (R.typeVector i w)
-- Id: Type of columns, must be vector type
-- Word32: Number of columns, must be >= 2
typeMatrix :: TypeId -> Word32 -> Builder TypeId
typeMatrix (TypeId i) w = fmap TypeId (R.typeMatrix i w)
-- Id: Type of components when sampled through this sampler
typeSampler :: TypeId -> Dim -> SamplerContent -> ArrayedContent
-> DepthComparison -> MultiSampled -> Maybe AccessQualifier
-> Builder TypeId
typeSampler (TypeId t) d sc ac dc ms aq =
fmap TypeId (R.typeSampler t d sc ac dc ms aq)
typeFilter :: Builder TypeId
typeFilter = fmap TypeId R.typeFilter
-- Array types require a constant instruction to hold the length of the array.
-- This introduces an unsigned int type, creates the constant for the length,
-- then declares the array type
-- Id: Type of elements
-- Word32: Length of array
typeArray :: TypeId -> Word32 -> Builder TypeId
typeArray (TypeId t) l = do
uint32 <- R.typeInt 32 Unsigned
i <- R.constant uint32 [l]
fmap TypeId (R.typeArray t i)
-- Id: Type of elements
typeRuntimeArray :: TypeId -> Builder TypeId
typeRuntimeArray (TypeId i) = fmap TypeId (R.typeRuntimeArray i)
-- Ids: List of types for each component in the struct, in order
typeStruct :: [TypeId] -> Builder TypeId
typeStruct ts = fmap TypeId (R.typeStruct (fmap toId ts))
-- Ids: List of types for each component in the struct, in order
-- Texts: Names of struct components
typeStructNamed :: [(TypeId, Text)] -> Builder TypeId
typeStructNamed its = do
struct <- R.typeStruct (map (runTypeId . fst) its)
mapM_ (go struct) (zipWith (\(_,t) n -> (n,t)) its [0..])
return (TypeId struct)
where
go struct (n,t) = R.memberName struct n t
-- Text: Name of opaque type
typeOpaque :: Text -> Builder TypeId
typeOpaque t = fmap TypeId (R.typeOpaque t)
-- Id: Type of object being pointed to
typePointer :: StorageClass -> TypeId -> Builder TypeId
typePointer sc (TypeId i) = fmap TypeId (R.typePointer sc i)
-- Id: Return type
-- Ids: Types of parameters, in order
typeFunction :: TypeId -> [TypeId] -> Builder TypeId
typeFunction (TypeId i) ts = fmap TypeId (R.typeFunction i (fmap toId ts))
-- Id: Type of data in pipe
typePipe :: TypeId -> AccessQualifier -> Builder TypeId
typePipe (TypeId i) aq = fmap TypeId (R.typePipe i aq)
constantTrue :: Builder Id
constantTrue = do
bool <- R.typeBool
R.constantTrue bool
constantFalse :: Builder Id
constantFalse = do
bool <- R.typeBool
R.constantFalse bool
constantInt32 :: Int32 -> Builder Id
constantInt32 i = do
int32 <- R.typeInt 32 Signed
R.constant int32 [fromIntegral i]
constantInt64 :: Int64 -> Builder Id
constantInt64 i = do
int64 <- R.typeInt 64 Signed
R.constant int64 [lowOrder, highOrder]
where
i' = fromIntegral i :: Word64
lowOrder = fromIntegral (i' .&. 0xFFFF)
highOrder = fromIntegral (shiftR i' 32)
constantWord32 :: Word32 -> Builder Id
constantWord32 w = do
uint32 <- R.typeInt 32 Unsigned
R.constant uint32 [w]
constantWord64 :: Word64 -> Builder Id
constantWord64 w = do
uint64 <- R.typeInt 64 Unsigned
R.constant uint64 [lowOrder, highOrder]
where
lowOrder = fromIntegral (w .&. 0xFFFF)
highOrder = fromIntegral (shiftR w 32)
constantFloat :: Float -> Builder Id
constantFloat f = do
float32 <- R.typeFloat 32
R.constant float32 [floatToWord32 f]
where
floatToWord32 :: Float -> Word32
floatToWord32 = unsafeCoerce
constantDouble :: Double -> Builder Id
constantDouble d = do
float64 <- R.typeFloat 64
R.constant float64 [lowOrder, highOrder]
where
doubleToWord64 :: Double -> Word64
doubleToWord64 = unsafeCoerce
d' = doubleToWord64 d
lowOrder = fromIntegral (d' .&. 0xFFFF)
highOrder = fromIntegral (shiftR d' 32)
-- Id: Return type (must be a composite type)
-- Ids: Constants for the constituents of the composite value
constantComposite :: TypeId -> [Id] -> Builder Id
constantComposite (TypeId i) = R.constantComposite i
-- Id: Return type (must be sampler type)
constantSampler :: TypeId -> SamplerAddressingMode -> SamplerParam
-> SamplerFilterMode -> Builder Id
constantSampler (TypeId i) = R.constantSampler i
-- Id: Return type (must be pointer type)
constantNullPointer :: TypeId -> Builder Id
constantNullPointer (TypeId i) = R.constantNullPointer i
specConstantTrue :: Builder Id
specConstantTrue = do
bool <- R.typeBool
R.specConstantTrue bool
specConstantFalse :: Builder Id
specConstantFalse = do
bool <- R.typeBool
R.specConstantFalse bool
specConstantInt32 :: Int32 -> Builder Id
specConstantInt32 i = do
int32 <- R.typeInt 32 Signed
R.specConstant int32 [fromIntegral i]
specConstantInt64 :: Int64 -> Builder Id
specConstantInt64 i = do
int64 <- R.typeInt 64 Signed
R.specConstant int64 [lowOrder, highOrder]
where
i' = fromIntegral i :: Word64
lowOrder = fromIntegral (i' .&. 0xFFFF)
highOrder = fromIntegral (shiftR i' 32)
specConstantWord32 :: Word32 -> Builder Id
specConstantWord32 w = do
uint32 <- R.typeInt 32 Unsigned
R.specConstant uint32 [w]
specConstantWord64 :: Word64 -> Builder Id
specConstantWord64 w = do
uint64 <- R.typeInt 64 Unsigned
R.specConstant uint64 [lowOrder, highOrder]
where
lowOrder = fromIntegral (w .&. 0xFFFF)
highOrder = fromIntegral (shiftR w 32)
specConstantFloat :: Float -> Builder Id
specConstantFloat f = do
float32 <- R.typeFloat 32
R.specConstant float32 [floatToWord32 f]
where
floatToWord32 :: Float -> Word32
floatToWord32 = unsafeCoerce
specConstantDouble :: Double -> Builder Id
specConstantDouble d = do
float64 <- R.typeFloat 64
R.specConstant float64 [lowOrder, highOrder]
where
doubleToWord64 :: Double -> Word64
doubleToWord64 = unsafeCoerce
d' = doubleToWord64 d
lowOrder = fromIntegral (d' .&. 0xFFFF)
highOrder = fromIntegral (shiftR d' 32)
-- Id: Return type (must be a composite type)
-- Ids: Constants for the constituents of the composite value
specConstantComposite :: TypeId -> [Id] -> Builder Id
specConstantComposite (TypeId i) = R.specConstantComposite i
-- Variables always go through a pointer type, but everything except the
-- variable declaration works in terms of the type the pointer points to.
-- In addition, the pointer type declaration requires specifying the storage
-- of the pointer, which is used again in the variable declaration.
variable :: TypeId -> StorageClass -> Maybe Id -> Builder Id
variable (TypeId t) sc ini = do
ptr_t <- R.typePointer sc t
R.variable ptr_t sc ini
-- Id 1: Result type (must be pointer type)
-- Id 2: Number of objects to allocate
variableArray :: TypeId -> StorageClass -> Id -> Builder Id
variableArray (TypeId i) = R.variableArray i
-- Id 1: Result type
-- Id 2: Pointer to load through (must be pointer type)
load :: TypeId -> Id -> MemoryAccess -> Builder Id
load (TypeId i) = R.load i
-- Id 1: Result type
-- Id 2: Pointer to base object (must be pointer type)
-- Ids: Indices of structure to walk down to reach result value
-- The storage class should be the same as the storage class the base object was
-- declared as.
accessChain :: TypeId -> StorageClass -> Id -> [Id] -> Builder Id
accessChain (TypeId t) sc i is = do
ptr_t <- R.typePointer sc t
R.accessChain ptr_t i is
-- Id 1: Result type
-- Id 2: Pointer to base object (must be pointer type)
-- Ids: Indices of structure to walk down to reach result value
-- The storage class should be the same as the storage class the base object was
-- declared as.
inBoundsAccessChain :: TypeId -> StorageClass -> Id -> [Id] -> Builder Id
inBoundsAccessChain (TypeId t) sc i is = do
ptr_t <- R.typePointer sc t
R.inBoundsAccessChain ptr_t i is
-- Id 1: Result type
-- Id 2: Structure which has a member that is an array
-- Word32: TODO
arrayLength :: TypeId -> Id -> Word32 -> Builder Id
arrayLength (TypeId i) = R.arrayLength i
-- Id 1: Result type
-- Id 2: Pointer to variable of type sampler
-- Id 3: Which texel coordinate to use
-- Id 4: Which texel sample to use
imagePointer :: TypeId -> Id -> Id -> Id -> Builder Id
imagePointer (TypeId i) = R.imagePointer i
-- Id: A pointer that must point to Generic
-- There's supposed to be a result type as well, but it must be a 32-bit
-- integer, so we simply define it here behind the scenes
genericPtrMemSemantics :: Id -> Builder Id
genericPtrMemSemantics p = do
uint32 <- R.typeInt 32 Unsigned
R.genericPtrMemSemantics uint32 p
-- Id 1: Result type from calling this function
-- Id 2: The function type for this function (the return type must be the same
-- as Id 1)
function :: TypeId -> [FunctionControl] -> TypeId -> Builder Id
function (TypeId r) fcm (TypeId f) = R.function r fcm f
-- Entry point functions have to have void as the return type and take no
-- parameters. If we take the execution model as a parameter then we can remove
-- a lot of boilerplate.
entryPointFunction :: ExecutionModel -> Builder Id
entryPointFunction em = do
void <- R.typeVoid
fn_t <- R.typeFunction void []
fn <- R.function void [] fn_t
R.entryPoint em fn
return fn
-- Id: Type of this function parameter
functionParameter :: TypeId -> Builder Id
functionParameter (TypeId i) = R.functionParameter i
-- Id 1: Result type from calling the given function
-- Id 2: The function to call
-- Ids: The function parameters
functionCall :: TypeId -> Id -> [Id] -> Builder Id
functionCall (TypeId i) = R.functionCall i
-- Id 1: Result type, must be a sampler type with both texture and filter
-- Id 2: Sampler with a texture and no filter
-- Id 3: Filter
sampler :: TypeId -> Id -> Id -> Builder Id
sampler (TypeId i) = R.sampler i
-- Id 1: Result type
-- Id 2: Sampler
-- Id 3: Texture coordinate, floating point scalar or vector
-- Maybe Id: Bias for implicit level of detail
textureSample :: TypeId -> Id -> Id -> Maybe Id -> Builder Id
textureSample (TypeId i) = R.textureSample i
-- Id 1: Result type
-- Id 2: Sampler, must be cube-arrayed depth-comparison type
-- Id 3: Texture coordinate, vector containing (u, v, w, array layer)
-- Id 4: Depth-comparison reference value
textureSampleDref :: TypeId -> Id -> Id -> Id -> Builder Id
textureSampleDref (TypeId i) = R.textureSampleDref i
-- Id 1: Result type
-- Id 2: Sampler
-- Id 3: Texture coordinate, floating point scalar or vector
-- Id 4: Level of detail to use when sampling
textureSampleLod :: TypeId -> Id -> Id -> Id -> Builder Id
textureSampleLod (TypeId i) = R.textureSampleLod i
-- Id 1: Result type
-- Id 2: Sampler
-- Id 3: Texture coordinate, floating point vector of 4 components
-- Maybe Id: Bias for implicit level of detail
textureSampleProj :: TypeId -> Id -> Id -> Maybe Id -> Builder Id
textureSampleProj (TypeId i) = R.textureSampleProj i
-- Id 1: Result type
-- Id 2: Sampler
-- Id 3: Texture coordinate, floating point scalar or vector
-- Id 4: dx, explicit derivative to use in calculating level of detail
-- Id 5: dy, explicit derivative to use in calculating level of detail
textureSampleGrad :: TypeId -> Id -> Id -> Id -> Id -> Builder Id
textureSampleGrad (TypeId i) = R.textureSampleGrad i
-- Id 1: Result type
-- Id 2: Sampler
-- Id 3: Texture coordinate, floating point scalar or vector
-- Id 4: Offset added before texel lookup
-- Maybe Id: Bias for implicit level of detail
textureSampleOffset :: TypeId -> Id -> Id -> Id -> Maybe Id -> Builder Id
textureSampleOffset (TypeId i) = R.textureSampleOffset i
-- Id 1: Result type
-- Id 2: Sampler
-- Id 3: Texture coordinate, floating point vector of 4 components
-- Id 4: Level of detail to use when sampling
textureSampleProjLod :: TypeId -> Id -> Id -> Id -> Builder Id
textureSampleProjLod (TypeId i) = R.textureSampleProjLod i
-- Id 1: Result type
-- Id 2: Sampler
-- Id 3: Texture coordinate, floating point vector of 4 components
-- Id 4: dx, explicit derivative to use in calculating level of detail
-- Id 5: dy, explicit derivative to use in calculating level of detail
textureSampleProjGrad :: TypeId -> Id -> Id -> Id -> Id -> Builder Id
textureSampleProjGrad (TypeId i) = R.textureSampleProjGrad i
-- Id 1: Result type
-- Id 2: Sampler
-- Id 3: Texture coordinate, floating point scalar or vector
-- Id 4: Level of detail to use when sampling
-- Id 5: Offset added before texel lookup
textureSampleLodOffset :: TypeId -> Id -> Id -> Id -> Id -> Builder Id
textureSampleLodOffset (TypeId i) = R.textureSampleLodOffset i
-- Id 1: Result type
-- Id 2: Sampler
-- Id 3: Texture coordinate, floating point vector of 4 components
-- Id 4: Offset added before texel lookup
-- Maybe Id: Bias for implicit level of detail
textureSampleProjOffset :: TypeId -> Id -> Id -> Id -> Maybe Id -> Builder Id
textureSampleProjOffset (TypeId i) = R.textureSampleProjOffset i
-- Id 1: Result type
-- Id 2: Sampler
-- Id 3: Texture coordinate, floating point scalar or vector
-- Id 4: dx, explicit derivative to use in calculating level of detail
-- Id 5: dy, explicit derivative to use in calculating level of detail
-- Id 6: Offset added before texel lookup
textureSampleGradOffset :: TypeId -> Id -> Id -> Id -> Id -> Id -> Builder Id
textureSampleGradOffset (TypeId i) = R.textureSampleGradOffset i
-- Id 1: Result type
-- Id 2: Sampler
-- Id 3: Texture coordinate, floating point vector of 4 components
-- Id 4: Level of detail to use when sampling
-- Id 5: Offset added before texel lookup
textureSampleProjLodOffset :: TypeId -> Id -> Id -> Id -> Id -> Builder Id
textureSampleProjLodOffset (TypeId i) = R.textureSampleProjLodOffset i
-- Id 1: Result type
-- Id 2: Sampler
-- Id 3: Texture coordinate, floating point vector of 4 components
-- Id 4: dx, explicit derivative to use in calculating level of detail
-- Id 5: dy, explicit derivative to use in calculating level of detail
-- Id 6: Offset added before texel lookup
textureSampleProjGradOffset :: TypeId -> Id -> Id -> Id -> Id -> Id
-> Builder Id
textureSampleProjGradOffset (TypeId i) = R.textureSampleProjGradOffset i
-- Id 1: Result type
-- Id 2: Sampler, cannot have dim of cube or buffer, no depth-comparison
-- Id 3: Texture coordinate, integer scalar or vector
-- Id 4: Level of detail to use when sampling
textureFetchTexelLod :: TypeId -> Id -> Id -> Id -> Builder Id
textureFetchTexelLod (TypeId i) = R.textureFetchTexelLod i
-- Id 1: Result type
-- Id 2: Sampler, cannot have dim of cube or buffer, no depth-comparison
-- Id 3: Texture coordinate, integer scalar or vector
-- Id 4: Offset added before texel lookup
textureFetchTexelOffset :: TypeId -> Id -> Id -> Id -> Builder Id
textureFetchTexelOffset (TypeId i) = R.textureFetchTexelOffset i
-- Id 1: Result type
-- Id 2: Sampler, must be multi-sample texture
-- Id 3: Texture coordinate, integer scalar or vector
-- Id 4: Sample number to return
textureFetchTexelSample :: TypeId -> Id -> Id -> Id -> Builder Id
textureFetchTexelSample (TypeId i) = R.textureFetchTexelSample i
-- Id 1: Result type
-- Id 2: Sampler, must have dim of buffer
-- Id 3: Scalar integer index into the buffer
textureFetchTexel :: TypeId -> Id -> Id -> Builder Id
textureFetchTexel (TypeId i) = R.textureFetchTexel i
-- Id 1: Result type
-- Id 2: Sampler, must have dim of 2D, rect, or cube
-- Id 3: Texture coordinate, floating point scalar or vector
-- Id 4: Component number that will be gathered from all 4 texels. Must be 0-3
textureGather :: TypeId -> Id -> Id -> Id -> Builder Id
textureGather (TypeId i) = R.textureGather i
-- Id 1: Result type
-- Id 2: Sampler, must have dim of 2D, rect, or cube
-- Id 3: Texture coordinate, floating point scalar or vector
-- Id 4: Component number that will be gathered from all 4 texels. Must be 0-3
-- Id 5: Offset added before texel lookup
textureGatherOffset :: TypeId -> Id -> Id -> Id -> Id -> Builder Id
textureGatherOffset (TypeId i) = R.textureGatherOffset i
-- Id 1: Result type
-- Id 2: Sampler, must have dim of 2D, rect, or cube
-- Id 3: Texture coordinate, floating point scalar or vector
-- Id 4: Component number that will be fathered from all 4 texels. Must be 0-3
-- Id 5: Offset added before texel lookup. Must be constant array of size 4 of
-- vectors of 2 integers
textureGatherOffsets :: TypeId -> Id -> Id -> Id -> Id -> Builder Id
textureGatherOffsets (TypeId i) = R.textureGatherOffsets i
-- Id 1: Result type, must have base type of integer
-- Id 2: Sampler, must have dim of 1D, 2D, 3D, or cube
-- Id 3: Level of detail, used to calculate which mipmap level to query
textureQuerySizeLod :: TypeId -> Id -> Id -> Builder Id
textureQuerySizeLod (TypeId i) = R.textureQuerySizeLod i
-- Id 1: Result type, must have base type of integer
-- Id 2: Sampler, must have type of rect, buffer, multisampled 2D
textureQuerySize :: TypeId -> Id -> Builder Id
textureQuerySize (TypeId i) = R.textureQuerySize i
-- Id 1: Result type, must be 2 component floating point vector
-- Id 2: Sampler, must have dim of 1D, 2D, 3D, or cube
-- Id 3: Texture coordinate, floating point scalar or vector
textureQueryLod :: TypeId -> Id -> Id -> Builder Id
textureQueryLod (TypeId i) = R.textureQueryLod i
-- Id 1: Result type, must be a scalar integer
-- Id 2: Sampler, must have dim of 1D, 2D, 3D, or cube
textureQueryLevels :: TypeId -> Id -> Builder Id
textureQueryLevels (TypeId i) = R.textureQueryLevels i
-- Id 1: Result type, must be a scalar integer
-- Id 2: Sampler, must have dim of 2D and be a multisample texture
textureQuerySamples :: TypeId -> Id -> Builder Id
textureQuerySamples (TypeId i) = R.textureQuerySamples i
-- Id 1: Result type, must be unsigned int
-- Id 2: Float value to convert
convertFToU :: TypeId -> Id -> Builder Id
convertFToU (TypeId i) = R.convertFToU i
-- Id 1: Result type, must be signed int
-- Id 2: Float value to convert
convertFToS :: TypeId -> Id -> Builder Id
convertFToS (TypeId i) = R.convertFToS i
-- Id 1: Result type, must be float value
-- Id 2: Signed value to convert
convertSToF :: TypeId -> Id -> Builder Id
convertSToF (TypeId i) = R.convertSToF i
-- Id 1: Result type, must be float value
-- Id 2: Unsigned value to convert
convertUToF :: TypeId -> Id -> Builder Id
convertUToF (TypeId i) = R.convertUToF i
-- Id 1: Result type, must be unsigned value
-- Id 2: Unsigned value to change width
uConvert :: TypeId -> Id -> Builder Id
uConvert (TypeId i) = R.uConvert i
-- Id 1: Result type, must be signed value
-- Id 2: Signed value to change width
sConvert :: TypeId -> Id -> Builder Id
sConvert (TypeId i) = R.sConvert i
-- Id 1: Result type, must be float value
-- Id 2: Float value to change width
fConvert :: TypeId -> Id -> Builder Id
fConvert (TypeId i) = R.fConvert i
-- Id 1: Result type, must be unsigned value
-- Id 2: Pointer value to cast to unsigned
convertPtrToU :: TypeId -> Id -> Builder Id
convertPtrToU (TypeId i) = R.convertPtrToU i
-- Id 1: Result type, must be pointer
-- Id 2: Unsigned value to cast to pointer
convertUToPtr :: TypeId -> Id -> Builder Id
convertUToPtr (TypeId i) = R.convertUToPtr i
-- Id 1: Result type, must be same type as source pointer
-- Id 2: Source pointer, must have storage class of generic
ptrCastToGeneric :: TypeId -> Id -> Builder Id
ptrCastToGeneric (TypeId i) = R.ptrCastToGeneric i
-- Id 1: Result type, must be same type as source pointer
-- Id 2: Source pointer, must point to WorkgroupLocal, WorkgroupGlobal, Private
genericCastToPtr :: TypeId -> Id -> Builder Id
genericCastToPtr (TypeId i) = R.genericCastToPtr i
-- Id 1: Result type, must have same width as operand
-- Id 2: Operand, must be numeric or pointer type
bitcast :: TypeId -> Id -> Builder Id
bitcast (TypeId i) = R.bitcast i
-- Id 1: Result type, must point to WorkgroupLocal, WorkgroupGlobal, Private
-- Id 2: Source pointer, must point to Generic
genericCastToPtrExplicit :: TypeId -> Id -> StorageClass -> Builder Id
genericCastToPtrExplicit (TypeId i) = R.genericCastToPtrExplicit i
-- Id 1: Result type, must be signed integer scalar or vector
-- Id 2: Signed integer value to convert
satConvertSToU :: TypeId -> Id -> Builder Id
satConvertSToU (TypeId i) = R.satConvertSToU i
-- Id 1: Result type, must be unsigned integer scalar or vector
-- Id 2: Unsigned integer value to convert
satConvertUToS :: TypeId -> Id -> Builder Id
satConvertUToS (TypeId i) = R.satConvertUToS i
-- Id 1: Result type, must be same type as given vector
-- Id 2: Vector to read from, must be vector type
-- Id 3: Index, must be scalar integer type
vectorExtractDynamic :: TypeId -> Id -> Id -> Builder Id
vectorExtractDynamic (TypeId i) = R.vectorExtractDynamic i
-- Id 1: Result type, must be same type as given vector
-- Id 2: Vector to insert into, must be vector type
-- Id 3: Component to write into given vector
-- Id 4: Index of vector to insert value
vectorInsertDynamic :: TypeId -> Id -> Id -> Id -> Builder Id
vectorInsertDynamic (TypeId i) = R.vectorInsertDynamic i
-- Id 1: Result type, must be same type as given vector
-- Id 2: First vector
-- Id 3: Second vector
-- Word32s: List of indices to use to construct new vector from the given
-- vectors concatenated together
vectorShuffle :: TypeId -> Id -> Id -> [Word32] -> Builder Id
vectorShuffle (TypeId i) = R.vectorShuffle i
-- Id 1: Result type, must be a composite type
-- Ids: Components of the composite to construct
compositeConstruct :: TypeId -> [Id] -> Builder Id
compositeConstruct (TypeId i) = R.compositeConstruct i
-- Id 1: Result type, must be the type of the component being extracted
-- Id 2: Composite to extract from
-- Word32s: Indexes to walk to reach the component to extract
compositeExtract :: TypeId -> Id -> [Word32] -> Builder Id
compositeExtract (TypeId i) = R.compositeExtract i
-- Id 1: Result type, must be the type of the component to insert
-- Id 2: Object to insert into the given composite
-- Id 3: Composite to insert into
-- Words32s: Indexes to walk to reach the component to insert
compositeInsert :: TypeId -> Id -> Id -> [Word32] -> Builder Id
compositeInsert (TypeId i) = R.compositeInsert i
-- Id 1: Result type, must be the same as the given object's type
-- Id 2: Object to copy
copyObject :: TypeId -> Id -> Builder Id
copyObject (TypeId i) = R.copyObject i
-- Id 1: Result type, must be the type of the given matrix transposed
-- Id 2: Matrix to transpose
transpose :: TypeId -> Id -> Builder Id
transpose (TypeId i) = R.transpose i
-- Id 1: Result type, must be signed integer
-- Id 2: Signed value to negate
sNegate :: TypeId -> Id -> Builder Id
sNegate (TypeId i) = R.sNegate i
-- Id 1: Result type, must be float
-- Id 2: Float value to negate
fNegate :: TypeId -> Id -> Builder Id
fNegate (TypeId i) = R.fNegate i
-- Id 1: Result type, must be integer type
-- Id 2: Operand to complement the bits of
bNot :: TypeId -> Id -> Builder Id
bNot (TypeId i) = R.bNot i
-- Id 1: Result type, must be integer type
-- Id 2: First operand
-- Id 3: Second operand
iAdd :: TypeId -> Id -> Id -> Builder Id
iAdd (TypeId i) = R.iAdd i
-- Id 1: Result type, must be float type
-- Id 2: First operand
-- Id 3: Second operand
fAdd :: TypeId -> Id -> Id -> Builder Id
fAdd (TypeId i) = R.fAdd i
-- Id 1: Result type, must be integer type
-- Id 2: First operand
-- Id 3: Second operand
iSub :: TypeId -> Id -> Id -> Builder Id
iSub (TypeId i) = R.iSub i
-- Id 1: Result type, must be float type
-- Id 2: First operand
-- Id 3: Second operand
fSub :: TypeId -> Id -> Id -> Builder Id
fSub (TypeId i) = R.fSub i
-- Id 1: Result type, must be integer type
-- Id 2: First operand
-- Id 3: Second operand
iMul :: TypeId -> Id -> Id -> Builder Id
iMul (TypeId i) = R.iMul i
-- Id 1: Result type, must be float type
-- Id 2: First operand
-- Id 3: Second operand
fMul :: TypeId -> Id -> Id -> Builder Id
fMul (TypeId i) = R.fMul i
-- Id 1: Result type, must be unsigned integer type
-- Id 2: First operand
-- Id 3: Second operand
uDiv :: TypeId -> Id -> Id -> Builder Id
uDiv (TypeId i) = R.uDiv i
-- Id 1: Result type, must be signed integer type
-- Id 2: First operand
-- Id 3: Second operand
sDiv :: TypeId -> Id -> Id -> Builder Id
sDiv (TypeId i) = R.sDiv i
-- Id 1: Result type, must be float type
-- Id 2: First operand
-- Id 3: Second operand
fDiv :: TypeId -> Id -> Id -> Builder Id
fDiv (TypeId i) = R.fDiv i
-- Id 1: Result type, must be unsigned integer type
-- Id 2: First operand
-- Id 3: Second operand
uMod :: TypeId -> Id -> Id -> Builder Id
uMod (TypeId i) = R.uMod i
-- Id 1: Result type, must be signed integer type
-- Id 2: First operand
-- Id 3: Second operand
sRem :: TypeId -> Id -> Id -> Builder Id
sRem (TypeId i) = R.sRem i
-- Id 1: Result type, must be signed integer type
-- Id 2: First operand
-- Id 3: Second operand
sMod :: TypeId -> Id -> Id -> Builder Id
sMod (TypeId i) = R.sMod i
-- Id 1: Result type, must be float type
-- Id 2: First operand
-- Id 3: Second operand
fRem :: TypeId -> Id -> Id -> Builder Id
fRem (TypeId i) = R.fRem i
-- Id 1: Result type, must be float type
-- Id 2: First operand
-- Id 3: Second operand
fMod :: TypeId -> Id -> Id -> Builder Id
fMod (TypeId i) = R.fMod i
-- Id 1: Result type, must be same vector type as first operand
-- Id 2: Vector, must be floating point vector
-- Id 3: Scalar, must be floating point value
vectorTimesScalar :: TypeId -> Id -> Id -> Builder Id
vectorTimesScalar (TypeId i) = R.vectorTimesScalar i
-- Id 1: Result type, must be same matrix type as first operand
-- Id 2: Matrix, must be floating point matrix
-- Id 3: Scalar, must be floating point value
matrixTimesScalar :: TypeId -> Id -> Id -> Builder Id
matrixTimesScalar (TypeId i) = R.matrixTimesScalar i
-- Id 1: Result type, must be vector type with same number of columns as matrix
-- Id 2: Vector, must be floating point vector
-- Id 3: Matrix, must be floating point matrix
vectorTimesMatrix :: TypeId -> Id -> Id -> Builder Id
vectorTimesMatrix (TypeId i) = R.vectorTimesMatrix i
-- Id 1: Result type, must be vector type with same number of rows as matrix
-- Id 2: Matrix, must be floating point matrix
-- Id 3: Vector, must be floating point vector
matrixTimesVector :: TypeId -> Id -> Id -> Builder Id
matrixTimesVector (TypeId i) = R.matrixTimesVector i
-- Id 1: Result type, must be matrix type
-- Id 2: Matrix, must be floating point matrix
-- Id 3: Matrix, must be floating point matrix
matrixTimesMatrix :: TypeId -> Id -> Id -> Builder Id
matrixTimesMatrix (TypeId i) = R.matrixTimesMatrix i
-- Id 1: Result type, must be matrix type
-- Id 2: Vector, must be floating point vector
-- Id 3: Vector, must be floating point vector
outerProduct :: TypeId -> Id -> Id -> Builder Id
outerProduct (TypeId i) = R.outerProduct i
-- Id 1: Result type, must be floating point type
-- Id 2: Vector, must be floating point vector
-- Id 3: Vector, must be floating point vector
dot :: TypeId -> Id -> Id -> Builder Id
dot (TypeId i) = R.dot i
-- Id 1: Result type, must be integer type
-- Id 2: First operand
-- Id 3: Second operand
shiftRightLogical :: TypeId -> Id -> Id -> Builder Id
shiftRightLogical (TypeId i) = R.shiftRightLogical i
-- Id 1: Result type, must be integer type
-- Id 2: First operand
-- Id 3: Second operand
shiftRightArithmetic :: TypeId -> Id -> Id -> Builder Id
shiftRightArithmetic (TypeId i) = R.shiftRightArithmetic i
-- Id 1: Result type, must be integer type
-- Id 2: First operand
-- Id 3: Second operand
shiftLeftLogical :: TypeId -> Id -> Id -> Builder Id
shiftLeftLogical (TypeId i) = R.shiftLeftLogical i
-- Id 1: Result type, must be integer type
-- Id 2: First operand
-- Id 3: Second operand
bitwiseOr :: TypeId -> Id -> Id -> Builder Id
bitwiseOr (TypeId i) = R.bitwiseOr i
-- Id 1: Result type, must be integer type
-- Id 2: First operand
-- Id 3: Second operand
bitwiseXor :: TypeId -> Id -> Id -> Builder Id
bitwiseXor (TypeId i) = R.bitwiseXor i
-- Id 1: Result type, must be integer type
-- Id 2: First operand
-- Id 3: Second operand
bitwiseAnd :: TypeId -> Id -> Id -> Builder Id
bitwiseAnd (TypeId i) = R.bitwiseAnd i
-- Id: Operand, must be boolean vector type
vAny :: Id -> Builder Id
vAny i = do
bool <- R.typeBool
R.vAny bool i
-- Id: Operand, must be boolean vector type
vAll :: Id -> Builder Id
vAll i = do
bool <- R.typeBool
R.vAll bool i
-- Id 1: Result type, must be boolean type
-- Id 2: Operand, must be floating point type
fIsNaN :: TypeId -> Id -> Builder Id
fIsNaN (TypeId i) = R.fIsNaN i
-- Id 1: Result type, must be boolean type
-- Id 2: Operand, must be floating point type
isInf :: TypeId -> Id -> Builder Id
isInf (TypeId i) = R.isInf i
-- Id 1: Result type, must be boolean type
-- Id 2: Operand, must be floating point type
isFinite :: TypeId -> Id -> Builder Id
isFinite (TypeId i) = R.isFinite i
-- Id 1: Result type, must be boolean type
-- Id 2: Operand, must be floating point type
isNormal :: TypeId -> Id -> Builder Id
isNormal (TypeId i) = R.isNormal i
-- Id 1: Result type, must be boolean type
-- Id 2: Operand, must be numeric type
signBitSet :: TypeId -> Id -> Builder Id
signBitSet (TypeId i) = R.signBitSet i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand, must be numeric type
-- Id 3: Second operand, must be numeric type
lessOrGreater :: TypeId -> Id -> Id -> Builder Id
lessOrGreater (TypeId i) = R.lessOrGreater i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand, must be numeric type
-- Id 3: Second operand, must be numeric type
ordered :: TypeId -> Id -> Id -> Builder Id
ordered (TypeId i) = R.ordered i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand, must be numeric type
-- Id 3: Second operand, must be numeric type
unordered :: TypeId -> Id -> Id -> Builder Id
unordered (TypeId i) = R.unordered i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand, must be numeric type
-- Id 3: Second operand, must be numeric type
logicalOr :: TypeId -> Id -> Id -> Builder Id
logicalOr (TypeId i) = R.logicalOr i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand, must be numeric type
-- Id 3: Second operand, must be numeric type
logicalXor :: TypeId -> Id -> Id -> Builder Id
logicalXor (TypeId i) = R.logicalXor i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand, must be numeric type
-- Id 3: Second operand, must be numeric type
logicalAnd :: TypeId -> Id -> Id -> Builder Id
logicalAnd (TypeId i) = R.logicalAnd i
-- Id 1: Result type, must be same type as both operands
-- Id 2: Condition to select upon, must be boolean type
-- Id 3: First operand
-- Id 4: Second operand
select :: TypeId -> Id -> Id -> Id -> Builder Id
select (TypeId i) = R.select i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand
-- Id 3: Second operand
iEqual :: TypeId -> Id -> Id -> Builder Id
iEqual (TypeId i) = R.iEqual i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand
-- Id 3: Second operand
fOrdEqual :: TypeId -> Id -> Id -> Builder Id
fOrdEqual (TypeId i) = R.fOrdEqual i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand
-- Id 3: Second operand
fUnordEqual :: TypeId -> Id -> Id -> Builder Id
fUnordEqual (TypeId i) = R.fUnordEqual i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand
-- Id 3: Second operand
iNotEqual :: TypeId -> Id -> Id -> Builder Id
iNotEqual (TypeId i) = R.iNotEqual i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand
-- Id 3: Second operand
fOrdNotEqual :: TypeId -> Id -> Id -> Builder Id
fOrdNotEqual (TypeId i) = R.fOrdNotEqual i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand
-- Id 3: Second operand
fUnordNotEqual :: TypeId -> Id -> Id -> Builder Id
fUnordNotEqual (TypeId i) = R.fUnordNotEqual i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand
-- Id 3: Second operand
uLessThan :: TypeId -> Id -> Id -> Builder Id
uLessThan (TypeId i) = R.uLessThan i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand
-- Id 3: Second operand
sLessThan :: TypeId -> Id -> Id -> Builder Id
sLessThan (TypeId i) = R.sLessThan i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand
-- Id 3: Second operand
fOrdLessThan :: TypeId -> Id -> Id -> Builder Id
fOrdLessThan (TypeId i) = R.fOrdLessThan i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand
-- Id 3: Second operand
fUnordLessThan :: TypeId -> Id -> Id -> Builder Id
fUnordLessThan (TypeId i) = R.fUnordLessThan i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand
-- Id 3: Second operand
uGreaterThan :: TypeId -> Id -> Id -> Builder Id
uGreaterThan (TypeId i) = R.uGreaterThan i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand
-- Id 3: Second operand
sGreaterThan :: TypeId -> Id -> Id -> Builder Id
sGreaterThan (TypeId i) = R.sGreaterThan i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand
-- Id 3: Second operand
fOrdGreaterThan :: TypeId -> Id -> Id -> Builder Id
fOrdGreaterThan (TypeId i) = R.fOrdGreaterThan i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand
-- Id 3: Second operand
fUnordGreaterThan :: TypeId -> Id -> Id -> Builder Id
fUnordGreaterThan (TypeId i) = R.fUnordGreaterThan i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand
-- Id 3: Second operand
uLessThanEqual :: TypeId -> Id -> Id -> Builder Id
uLessThanEqual (TypeId i) = R.uLessThanEqual i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand
-- Id 3: Second operand
sLessThanEqual :: TypeId -> Id -> Id -> Builder Id
sLessThanEqual (TypeId i) = R.sLessThanEqual i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand
-- Id 3: Second operand
fOrdLessThanEqual :: TypeId -> Id -> Id -> Builder Id
fOrdLessThanEqual (TypeId i) = R.fOrdLessThanEqual i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand
-- Id 3: Second operand
fUnordLessThanEqual :: TypeId -> Id -> Id -> Builder Id
fUnordLessThanEqual (TypeId i) = R.fUnordLessThanEqual i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand
-- Id 3: Second operand
uGreaterThanEqual :: TypeId -> Id -> Id -> Builder Id
uGreaterThanEqual (TypeId i) = R.uGreaterThanEqual i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand
-- Id 3: Second operand
sGreaterThanEqual :: TypeId -> Id -> Id -> Builder Id
sGreaterThanEqual (TypeId i) = R.sGreaterThanEqual i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand
-- Id 3: Second operand
fOrdGreaterThanEqual :: TypeId -> Id -> Id -> Builder Id
fOrdGreaterThanEqual (TypeId i) = R.fOrdGreaterThanEqual i
-- Id 1: Result type, must be boolean type
-- Id 2: First operand
-- Id 3: Second operand
fUnordGreaterThanEqual :: TypeId -> Id -> Id -> Builder Id
fUnordGreaterThanEqual (TypeId i) = R.fUnordGreaterThanEqual i
-- Id 1: Result type, must be float type
-- Id 2: Operand
dPdx :: TypeId -> Id -> Builder Id
dPdx (TypeId i) = R.dPdx i
-- Id 1: Result type, must be float type
-- Id 2: Operand
dPdy :: TypeId -> Id -> Builder Id
dPdy (TypeId i) = R.dPdy i
-- Id 1: Result type, must be float type
-- Id 2: Operand
fWidth :: TypeId -> Id -> Builder Id
fWidth (TypeId i) = R.fWidth i
-- Id 1: Result type, must be float type
-- Id 2: Operand
dPdxFine :: TypeId -> Id -> Builder Id
dPdxFine (TypeId i) = R.dPdxFine i
-- Id 1: Result type, must be float type
-- Id 2: Operand
dPdyFine :: TypeId -> Id -> Builder Id
dPdyFine (TypeId i) = R.dPdyFine i
-- Id 1: Result type, must be float type
-- Id 2: Operand
fWidthFine :: TypeId -> Id -> Builder Id
fWidthFine (TypeId i) = R.fWidthFine i
-- Id 1: Result type, must be float type
-- Id 2: Operand
dPdxCoarse :: TypeId -> Id -> Builder Id
dPdxCoarse (TypeId i) = R.dPdxCoarse i
-- Id 1: Result type, must be float type
-- Id 2: Operand
dPdyCoarse :: TypeId -> Id -> Builder Id
dPdyCoarse (TypeId i) = R.dPdyCoarse i
-- Id 1: Result type, must be float type
-- Id 2: Operand
fWidthCoarse :: TypeId -> Id -> Builder Id
fWidthCoarse (TypeId i) = R.fWidthCoarse i
-- Id: Result type
-- Id 1s: Variable, must have same type as result
-- Id 2s: Parent block
phi :: TypeId -> [(Id, LabelId)] -> Builder Id
phi (TypeId i) ils = R.phi i (map (\(i',l) -> (i', toId l)) ils)
-- Id: Label of merge block
loopMerge :: LabelId -> [LoopControl] -> Builder ()
loopMerge (LabelId i) = R.loopMerge i
-- Id: Label of merge block
selectionMerge :: LabelId -> [SelectionControl] -> Builder ()
selectionMerge (LabelId i) = R.selectionMerge i
label :: Builder LabelId
label = fmap (LabelId) R.label
-- Id: Target label to branch to
branch :: LabelId -> Builder ()
branch (LabelId i) = R.branch i
-- Id 1: Conditional, must be a boolean type
-- Id 2: Label to take if the condition is true, must be in current function
-- Id 3: Label to take if the condition is false, must be in current function
-- Maybe (Word32, Word32): Branch weights
branchConditional :: Id -> LabelId -> LabelId -> Maybe (Word32, Word32)
-> Builder ()
branchConditional b (LabelId l1) (LabelId l2) = R.branchConditional b l1 l2
-- Id 1: Selector value, must be scalar integer type
-- Id 2: Default label
-- Word32s: Scalar integer literal
-- Ids: Corresponding label
switch :: Id -> LabelId -> [(Word32, LabelId)] -> Builder ()
switch i (LabelId l) wls = R.switch i l (map (\(w,l') -> (w, toId l')) wls)
-- Id 1: Result type
-- Id 2: Pointer to load
atomicLoad :: TypeId -> Id -> ExecutionScope -> [MemorySemantics] -> Builder Id
atomicLoad (TypeId i) = R.atomicLoad i
-- Id 1: Result type
-- Id 2: Pointer
-- Id 3: Value to exchange
atomicExchange :: TypeId -> Id -> ExecutionScope -> [MemorySemantics] -> Id
-> Builder Id
atomicExchange (TypeId i) = R.atomicExchange i
-- Id 1: Result type
-- Id 2: Pointer
-- Id 3: Value
-- Id 4: Comparator
atomicCompareExchange :: TypeId -> Id -> ExecutionScope -> [MemorySemantics]
-> Id -> Id -> Builder Id
atomicCompareExchange (TypeId i) = R.atomicCompareExchange i
-- Id 1: Result type
-- Id 2: Pointer
-- Id 3: Value
-- Id 4: Comparator
atomicCompareExchangeWeak :: TypeId -> Id -> ExecutionScope -> [MemorySemantics]
-> Id -> Id -> Builder Id
atomicCompareExchangeWeak (TypeId i) = R.atomicCompareExchangeWeak i
-- Id 1: Result type
-- Id 2: Pointer, must point to scalar integer type
atomicIIncrement :: TypeId -> Id -> ExecutionScope -> [MemorySemantics]
-> Builder Id
atomicIIncrement (TypeId i) = R.atomicIIncrement i
-- Id 1: Result type
-- Id 2: Pointer, must point to scalar integer type
atomicIDecrement :: TypeId -> Id -> ExecutionScope -> [MemorySemantics]
-> Builder Id
atomicIDecrement (TypeId i) = R.atomicIDecrement i
-- Id 1: Result type
-- Id 2: Pointer, must point to scalar integer type
-- Id 3: Value
atomicIAdd :: TypeId -> Id -> ExecutionScope -> [MemorySemantics] -> Id
-> Builder Id
atomicIAdd (TypeId i) = R.atomicIAdd i
-- Id 1: Result type
-- Id 2: Pointer, must point to scalar integer type
-- Id 3: Value
atomicISub :: TypeId -> Id -> ExecutionScope -> [MemorySemantics] -> Id
-> Builder Id
atomicISub (TypeId i) = R.atomicISub i
-- Id 1: Result type
-- Id 2: Pointer, must point to scalar integer type
-- Id 3: Value
atomicUMin :: TypeId -> Id -> ExecutionScope -> [MemorySemantics] -> Id
-> Builder Id
atomicUMin (TypeId i) = R.atomicUMin i
-- Id 1: Result type
-- Id 2: Pointer, must point to scalar integer type
-- Id 3: Value
atomicUMax :: TypeId -> Id -> ExecutionScope -> [MemorySemantics] -> Id
-> Builder Id
atomicUMax (TypeId i) = R.atomicUMax i
-- Id 1: Result type
-- Id 2: Pointer, must point to scalar integer type
-- Id 3: Value
atomicAnd :: TypeId -> Id -> ExecutionScope -> [MemorySemantics] -> Id
-> Builder Id
atomicAnd (TypeId i) = R.atomicAnd i
-- Id 1: Result type
-- Id 2: Pointer, must point to scalar integer type
-- Id 3: Value
atomicOr :: TypeId -> Id -> ExecutionScope -> [MemorySemantics] -> Id
-> Builder Id
atomicOr (TypeId i) = R.atomicOr i
-- Id 1: Result type
-- Id 2: Pointer, must point to scalar integer type
-- Id 3: Value
atomicXor :: TypeId -> Id -> ExecutionScope -> [MemorySemantics] -> Id
-> Builder Id
atomicXor (TypeId i) = R.atomicXor i
-- Id 1: Result type
-- Id 2: Pointer, must point to scalar integer type
-- Id 3: Value
atomicIMin :: TypeId -> Id -> ExecutionScope -> [MemorySemantics] -> Id
-> Builder Id
atomicIMin (TypeId i) = R.atomicIMin i
-- Id 1: Result type
-- Id 2: Pointer, must point to scalar integer type
-- Id 3: Value
atomicIMax :: TypeId -> Id -> ExecutionScope -> [MemorySemantics] -> Id
-> Builder Id
atomicIMax (TypeId i) = R.atomicIMax i
-- Id 1: Result type
-- Id 2: Destination pointer
-- Id 3: Source pointer
-- Id 4: Number of elements, must be 32/64 bit integer depending on addressing
-- model
-- Id 5: Stride, must be 32/64 bit integer depending on addressing model
-- Id 6: Event
asyncGroupCopy :: TypeId -> ExecutionScope -> Id -> Id -> Id -> Id -> Id
-> Builder Id
asyncGroupCopy (TypeId i) = R.asyncGroupCopy i
-- Id 1: Result type
-- Id 2: Number of events, must be 32 bit integer type
-- Id 3: Events list, must be a pointer to an event type
waitGroupEvents :: TypeId -> ExecutionScope -> Id -> Id -> Builder Id
waitGroupEvents (TypeId i) = R.waitGroupEvents i
-- Id: Predicate, must be scalar boolean type
groupAll :: ExecutionScope -> Id -> Builder Id
groupAll es p = do
bool <- R.typeBool
R.groupAll bool es p
-- Id: Predicate, must be scalar boolean type
groupAny :: ExecutionScope -> Id -> Builder Id
groupAny es p = do
bool <- R.typeBool
R.groupAny bool es p
-- Id 1: Result type
-- Id 2: Value
-- Id 3: LocalId, must be integer type. Can be scalar, vec2, or vec3
-- Value and Result type must be one of:
-- 32/64 bit integer, 16/32/64 bit float
groupBroadcast :: TypeId -> ExecutionScope -> Id -> Id -> Builder Id
groupBroadcast (TypeId i) = R.groupBroadcast i
-- Id 1: Result type
-- Id 2: Value to add to group
groupIAdd :: TypeId -> ExecutionScope -> GroupOperation -> Id -> Builder Id
groupIAdd (TypeId i) = R.groupIAdd i
-- Id 1: Result type
-- Id 2: Value to add to group
groupFAdd :: TypeId -> ExecutionScope -> GroupOperation -> Id -> Builder Id
groupFAdd (TypeId i) = R.groupFAdd i
-- Id 1: Result type
-- Id 2: Value to min to group
groupFMin :: TypeId -> ExecutionScope -> GroupOperation -> Id -> Builder Id
groupFMin (TypeId i) = R.groupFMin i
-- Id 1: Result type
-- Id 2: Value to min to group
groupUMin :: TypeId -> ExecutionScope -> GroupOperation -> Id -> Builder Id
groupUMin (TypeId i) = R.groupUMin i
-- Id 1: Result type
-- Id 2: Value to min to group
groupSMin :: TypeId -> ExecutionScope -> GroupOperation -> Id -> Builder Id
groupSMin (TypeId i) = R.groupSMin i
-- Id 1: Result type
-- Id 2: Value to max to group
groupFMax :: TypeId -> ExecutionScope -> GroupOperation -> Id -> Builder Id
groupFMax (TypeId i) = R.groupFMax i
-- Id 1: Result type
-- Id 2: Value to max to group
groupUMax :: TypeId -> ExecutionScope -> GroupOperation -> Id -> Builder Id
groupUMax (TypeId i) = R.groupUMax i
-- Id 1: Result type
-- Id 2: Value to max to group
groupSMax :: TypeId -> ExecutionScope -> GroupOperation -> Id -> Builder Id
groupSMax (TypeId i) = R.groupSMax i
-- Id 1: Result type
-- Id 2: q (see docs)
-- Id 3: Number of events, must be 32 bit integer
-- Id 4: Wait events, must be pointer to device events
-- Id 5: Retained events, must be pointer to device events
enqueueMarker :: TypeId -> Id -> Id -> Id -> Id -> Builder Id
enqueueMarker (TypeId i) = R.enqueueMarker i
-- Id 1: Result type, must be 32 bit integer
-- Id 2: q (see docs)
-- Id 3: ND range, must be struct built with buildNDRange
-- Id 4: Number of events, must be signed 32 bit integer
-- Id 5: Wait events, must be pointer to device events
-- Id 6: Retained events, must be pointer to device events
-- Id 7: Invoke, must be function type (see docs)
-- Id 8: Param
-- Id 9: Param size
-- Id 10: Param align
-- Ids: Local size, optional list of 32 bit integers
enqueueKernel :: TypeId -> Id -> KernelEnqueueFlags -> NDRangeId -> Id -> Id
-> Id -> Id -> Id -> Id -> Id -> [Id] -> Builder Id
enqueueKernel (TypeId i) q kef (NDRangeId n) = R.enqueueKernel i q kef n
-- Id 1: Result type, must be 32 bit integer
-- Id 2: ND range, must be struct built with buildNDrange
-- Id 3: Invoke, must be function type (see docs)
getKernelNDrangeSubGroupCount :: TypeId -> NDRangeId -> Id -> Builder Id
getKernelNDrangeSubGroupCount (TypeId i) (NDRangeId n) =
R.getKernelNDrangeSubGroupCount i n
-- Id 1: Result type, must be 32 bit integer
-- Id 2: ND range, must be struct built with buildNDrange
-- Id 3: Invoke, must be function type (see docs)
getKernelNDrangeMaxSubGroupCount :: TypeId -> NDRangeId -> Id -> Builder Id
getKernelNDrangeMaxSubGroupCount (TypeId i) (NDRangeId n) =
R.getKernelNDrangeMaxSubGroupCount i n
-- Id 1: Result type, must be 32 bit integer
-- Id 2: Invoke, must be function type (see docs)
getKernelWorkGroupSize :: TypeId -> Id -> Builder Id
getKernelWorkGroupSize (TypeId i) = R.getKernelWorkGroupSize i
-- Id 1: Result type, must be 32 bit integer
-- Id 2: Invoke, must be function type (see docs)
getKernelPreferredWorkGroupSizeMultiple :: TypeId -> Id -> Builder Id
getKernelPreferredWorkGroupSizeMultiple (TypeId i) =
R.getKernelPreferredWorkGroupSizeMultiple i
createUserEvent :: Builder Id
createUserEvent = do
event <- R.typeDeviceEvent
R.createUserEvent event
-- Id: Event, must be device event type
isValidEvent :: Id -> Builder Id
isValidEvent e = do
bool <- R.typeBool
R.isValidEvent bool e
getDefaultQueue :: Builder Id
getDefaultQueue = do
queue <- R.typeQueue
R.getDefaultQueue queue
-- Id 1: Global work size
-- Id 2: Local work size
-- Id 3: Global work offset
-- All Ids must be 32-bit integer scalars or arrays with 2-3 items
-- Addressing mode must be Physical32 to use this instruction
buildNDRange32 :: Id -> Id -> Id -> Builder NDRangeId
buildNDRange32 gws lws gwo = do
uint32 <- R.typeInt 32 Unsigned
i_3 <- R.constant uint32 [3]
arr <- R.typeArray uint32 i_3
struct <- R.typeStruct [uint32, arr, arr, arr]
r <- R.buildNDRange struct gws lws gwo
return (NDRangeId r)
-- Id 1: Global work size
-- Id 2: Local work size
-- Id 3: Global work offset
-- All Ids must be 64-bit integer scalars or arrays with 2-3 items
-- Addressing mode must be Physical64 to use this instruction
buildNDRange64 :: Id -> Id -> Id -> Builder NDRangeId
buildNDRange64 gws lws gwo = do
uint32 <- R.typeInt 32 Unsigned
uint64 <- R.typeInt 64 Unsigned
i_3 <- R.constant uint32 [3]
arr <- R.typeArray uint64 i_3
struct <- R.typeStruct [uint32, arr, arr, arr]
r <- R.buildNDRange struct gws lws gwo
return (NDRangeId r)
-- Id 1: Result type
-- Id 2: Pipe, must be pipe type with ReadOnly access qualifier
-- Id 3: Pointer, must have generic storage class
readPipe :: TypeId -> Id -> Id -> Builder Id
readPipe (TypeId i) = R.readPipe i
-- Id 1: Result type, must be 32 bit integer
-- Id 2: Pipe, must be pipe type with WriteOnly access qualifier
-- Id 3: Pointer, must have generic storage class
writePipe :: TypeId -> Id -> Id -> Builder Id
writePipe (TypeId i) = R.writePipe i
-- Id 1: Result type, must be 32 bit integer
-- Id 2: Pipe, must be pipe type with ReadOnly access qualifier
-- Id 3: Reserve ID, must have type reserve ID
-- Id 4: Index, must be 32 bit integer
-- Id 5: Pointer, must have generic storage class
reservedReadPipe :: TypeId -> Id -> ReserveId -> Id -> Id -> Builder Id
reservedReadPipe (TypeId i) p (ReserveId r) = R.reservedReadPipe i p r
-- Id 1: Result type, must be 32 bit integer
-- Id 2: Pipe, must be pipe type with WriteOnly accesss qualifier
-- Id 3: Reserve ID, must have type reserve ID
-- Id 4: Index, must be 32 bit integer
-- Id 5: Pointer, must have generic storage class
reservedWritePipe :: TypeId -> Id -> ReserveId -> Id -> Id -> Builder Id
reservedWritePipe (TypeId i) p (ReserveId r) = R.reservedWritePipe i p r
-- Id 1: Result type
-- Id 2: Pipe
-- Id 3: Number of packets
reserveReadPipePackets :: Id -> Id -> Builder ReserveId
reserveReadPipePackets p n = do
reserveId <- R.typeReserveId
r <- R.reserveReadPipePackets reserveId p n
return (ReserveId r)
-- Id 1: Result type
-- Id 2: Pipe
-- Id 3: Number of packets
reserveWritePipePackets :: Id -> Id -> Builder ReserveId
reserveWritePipePackets p n = do
reserveId <- R.typeReserveId
r <- R.reserveWritePipePackets reserveId p n
return (ReserveId r)
-- Id 1: Pipe with ReadOnly access qualifier
-- Id 2: Reserve ID
commitReadPipe :: Id -> ReserveId -> Builder ()
commitReadPipe i (ReserveId r) = R.commitReadPipe i r
-- Id 1: Pipe with WriteOnly access qualifier
-- Id 2: Reserve ID
commitWritePipe :: Id -> ReserveId -> Builder ()
commitWritePipe i (ReserveId r) = R.commitWritePipe i r
-- Id: Reserve ID
isValidReserveId :: ReserveId -> Builder Id
isValidReserveId (ReserveId i) = do
bool <- R.typeBool
R.isValidReserveId bool i
-- Id 1: Result type, must be 32 bit integer
-- Id 2: Pipe with either ReadOnly or WriteOnly access qualifier
getNumPipePackets :: TypeId -> Id -> Builder Id
getNumPipePackets (TypeId i) = R.getNumPipePackets i
-- Id 1: Result type, must be 32 bit integer
-- Id 2: Pipe with either ReadOnly or WriteOnly access qualifier
getMaxPipePackets :: TypeId -> Id -> Builder Id
getMaxPipePackets (TypeId i) = R.getMaxPipePackets i
-- Id 1: Pipe with ReadOnly access qualifier
-- Id 2: Number of packets, must be 32 bit integer
groupReserveReadPipePackets :: ExecutionScope -> Id -> Id -> Builder ReserveId
groupReserveReadPipePackets es p n = do
reserveId <- R.typeReserveId
r <- R.groupReserveReadPipePackets reserveId es p n
return (ReserveId r)
-- Id 1: Pipe with ReadOnly access qualifier
-- Id 2: Number of packets, must be 32 bit integer
groupReserveWritePipePackets :: ExecutionScope -> Id -> Id -> Builder ReserveId
groupReserveWritePipePackets es p n = do
reserveId <- R.typeReserveId
r <- R.groupReserveReadPipePackets reserveId es p n
return (ReserveId r)
-- Id 1: Pipe with ReadOnly access qualifier
-- Id 2: Reserve ID
groupCommitReadPipe :: ExecutionScope -> Id -> ReserveId -> Builder ()
groupCommitReadPipe es i (ReserveId r) = R.groupCommitReadPipe es i r
-- Id 1: Pipe with ReadOnly access qualifier
-- Id 2: Reserve ID
groupCommitWritePipe :: ExecutionScope -> Id -> ReserveId -> Builder ()
groupCommitWritePipe es i (ReserveId r) = R.groupCommitWritePipe es i r
| stevely/hspirv | src/SpirV/Builder.hs | bsd-3-clause | 54,339 | 0 | 17 | 10,619 | 12,172 | 6,323 | 5,849 | 710 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ViewPatterns #-}
module Language.Haskell.Liquid.Qualifier (
specificationQualifiers
) where
import Language.Haskell.Liquid.RefType
import Language.Haskell.Liquid.GhcMisc (getSourcePos)
import Language.Haskell.Liquid.PredType
import Language.Haskell.Liquid.Types
import Language.Fixpoint.Types
import Language.Fixpoint.Misc
import Control.Applicative ((<$>))
import Control.Arrow (second)
import Data.List (delete, nub)
import Data.Maybe (fromMaybe)
import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as S
-----------------------------------------------------------------------------------
specificationQualifiers :: Int -> GhcInfo -> [Qualifier]
-----------------------------------------------------------------------------------
specificationQualifiers k info
= [ q | (x, t) <- (M.toList $ tySigs $ spec info) ++ (M.toList $ asmSigs $ spec info) ++ (M.toList $ ctors $ spec info)
-- FIXME: this mines extra, useful qualifiers but causes a significant increase in running time
-- , ((isClassOp x || isDataCon x) && x `S.member` (S.fromList $ impVars info ++ defVars info)) || x `S.member` (S.fromList $ defVars info)
, x `S.member` (S.fromList $ defVars info)
, q <- refTypeQuals (getSourcePos x) (tcEmbeds $ spec info) (val t)
, length (q_params q) <= k + 1
]
-- GRAVEYARD: scraping quals from imports kills the system with too much crap
-- specificationQualifiers info = {- filter okQual -} qs
-- where
-- qs = concatMap refTypeQualifiers ts
-- refTypeQualifiers = refTypeQuals $ tcEmbeds spc
-- ts = val <$> t1s ++ t2s
-- t1s = [t | (x, t) <- tySigs spc, x `S.member` definedVars]
-- t2s = [] -- [t | (_, t) <- ctor spc ]
-- definedVars = S.fromList $ defVars info
-- spc = spec info
--
-- okQual = not . any isPred . map snd . q_params
-- where
-- isPred (FApp tc _) = tc == stringFTycon "Pred"
-- isPred _ = False
refTypeQuals l tce t = quals ++ pAppQuals l tce preds quals
where
quals = refTypeQuals' l tce t
preds = filter isPropPV $ ty_preds $ toRTypeRep t
pAppQuals l tce ps qs = [ pAppQual l tce p xs (v, e) | p <- ps, (s, v, _) <- pargs p, (xs, e) <- mkE s ]
where
mkE s = concatMap (expressionsOfSort (rTypeSort tce s)) qs
expressionsOfSort sort (Q _ pars (PAtom Eq (EVar v) e2) _)
| (v, sort) `elem` pars
= [(filter (/=(v, sort)) pars, e2)]
expressionsOfSort _ _
= []
pAppQual l tce p args (v, expr) = Q "Auto" freeVars pred l
where
freeVars = (vv, tyvv) : (predv, typred) : args
pred = pApp predv $ EVar vv:predArgs
vv = "v"
predv = "~P"
tyvv = rTypeSort tce $ pvType p
typred = rTypeSort tce (pvarRType p :: RSort)
predArgs = mkexpr <$> (snd3 <$> pargs p)
mkexpr x = if x == v then expr else EVar x
-- refTypeQuals :: SpecType -> [Qualifier]
refTypeQuals' l tce t0 = go emptySEnv t0
where
go γ t@(RVar _ _) = refTopQuals l tce t0 γ t
go γ (RAllT _ t) = go γ t
go γ (RAllP _ t) = go γ t
go γ t@(RAppTy t1 t2 _) = go γ t1 ++ go γ t2 ++ refTopQuals l tce t0 γ t
go γ (RFun x t t' _) = (go γ t)
++ (go (insertSEnv x (rTypeSort tce t) γ) t')
go γ t@(RApp c ts rs _) = (refTopQuals l tce t0 γ t)
++ concatMap (go (insertSEnv (rTypeValueVar t) (rTypeSort tce t) γ)) ts
++ goRefs c (insertSEnv (rTypeValueVar t) (rTypeSort tce t) γ) rs
go γ (RAllE x t t') = (go γ t)
++ (go (insertSEnv x (rTypeSort tce t) γ) t')
go γ (REx x t t') = (go γ t)
++ (go (insertSEnv x (rTypeSort tce t) γ) t')
go _ _ = []
goRefs c g rs = concat $ zipWith (goRef g) rs (rTyConPVs c)
goRef g (RProp s t) _ = go (insertsSEnv g s) t
goRef _ (RPropP _ _) _ = []
goRef _ (RHProp _ _) _ = errorstar "TODO: EFFECTS"
insertsSEnv = foldr (\(x, t) γ -> insertSEnv x (rTypeSort tce t) γ)
refTopQuals l tce t0 γ t
= [ mkQual l t0 γ v so pa | let (RR so (Reft (v, ra))) = rTypeSortedReft tce t
, pa <- conjuncts $ raPred ra
, not $ isHole pa
] ++
[ mkPQual l tce t0 γ s e | let (U _ (Pr ps) _) = fromMaybe (msg t) $ stripRTypeBase t
, p <- findPVar (ty_preds $ toRTypeRep t0) <$> ps
, (s, _, e) <- pargs p
]
where
msg t = errorstar $ "Qualifier.refTopQuals: no typebase" ++ showpp t
mkPQual l tce t0 γ t e = mkQual l t0 γ' v so pa
where
v = "vv"
so = rTypeSort tce t
γ' = insertSEnv v so γ
pa = PAtom Eq (EVar v) e
mkQual l t0 γ v so p = Q "Auto" ((v, so) : yts) p' l
where
yts = [(y, lookupSort t0 x γ) | (x, y) <- xys ]
p' = subst (mkSubst (second EVar <$> xys)) p
xys = zipWith (\x i -> (x, symbol ("~A" ++ show i))) xs [0..]
xs = delete v $ orderedFreeVars γ p
lookupSort t0 x γ = fromMaybe (errorstar msg) $ lookupSEnv x γ
where
msg = "Unknown freeVar " ++ show x ++ " in specification " ++ show t0
orderedFreeVars γ = nub . filter (`memberSEnv` γ) . syms
-- atoms (PAnd ps) = concatMap atoms ps
-- atoms p = [p]
| spinda/liquidhaskell | src/Language/Haskell/Liquid/Qualifier.hs | bsd-3-clause | 5,998 | 0 | 15 | 2,199 | 1,893 | 991 | 902 | 83 | 11 |
module Main where
import Lib
main :: IO ()
main = do
putStrLn "----------------Gradient method(勾配法)----------------\n"
putStrLn "This program is sample of \"Gradient method\"."
putStrLn "This sample is for function of one variable."
let f x = x * x
putStrLn "Function: f(x) = x^2"
let a = (1)
putStrLn $ "Initial value: a = " ++ (show a)
let minValue = find_min f a (differential f a)
putStrLn $ "Local minimum value: min = " ++ (show minValue)
putStrLn $ "f(x): f(min) = " ++ (show $ f minValue)
putStrLn "\n\n----------------微分の例(勾配を求める例)----------------\n"
putStrLn $ "f(x)の、a = 1 における勾配は、f'(a) = " ++ (show $ differential f 1)
putStrLn $ "f(x)の、a = -1 における勾配は、f'(a) = " ++ (show $ differential f (-1))
putStrLn $ "f(x)の、a = 20 における勾配は、f'(a) = " ++ (show $ differential f 20)
-- 微分係数を計算するときにxに与える微小変化
h :: Double
h = 0.00001
-- 勾配法で使う、正の定数
epsilon :: Double
epsilon = 0.00001
-- 微分を計算
differential :: (Double -> Double) -> Double -> Double
differential f a = (f (a + h) - f a) / h
-- 極小点探索(勾配法)
find_min :: (Double -> Double) -> Double -> Double -> Double
find_min f a g =
let
a' = a - epsilon * g
g' = differential f a'
in
if g * g' <= 0
then a
else find_min f a' g
| reouno/gradient-method | app/Main.hs | bsd-3-clause | 1,481 | 0 | 12 | 341 | 399 | 198 | 201 | 32 | 2 |
import Data.List (intercalate)
-- | Given a DNA string s of length at most 100 bp and an array A containing
-- at most 20 numbers between 0 and 1, return an array B having the same length
-- as A in which B[k] represents the common logarithm of the probability that
-- a random string constructed with the GC-content found in A[k] will match s
-- exactly.
--
-- >>> ACGATACAA
-- >>> 0.129 0.287 0.423 0.476 0.641 0.742 0.783
-- -5.737 -5.217 -5.263 -5.360 -5.958 -6.628 -7.009
--
-- Note:
-- 1. Common logarithm (x) = log_10 (x)
-- 2. Given GC content y, the probability
-- p(G) = p(C) = y/2
-- p(A) = p(T) = (1-y)/2
-- 3. For any positive numbers x and y, log10(x⋅y) = log10(x) + log10(y)
prob :: (Floating b, Ord b, Foldable t) => t Char -> b -> b
prob s gcc = if (gcc > 0) then (foldl px 0 s) else 0
where gc = gcc / 2
at = (1 - gcc) / 2
px = (\acc base -> if (base == 'G' || base == 'C')
then acc + (logBase 10 gc)
else acc + (logBase 10 at))
main = do
seq <- getLine
arr <- getLine
let gc = map (\i -> read i :: Float) (words arr)
probX = map (prob seq) gc
clean = (\a -> if (a == ',') then ' ' else a)
pprint = map clean . show
print (pprint probX)
-- from perm.hs
show' :: Show a => [a] -> String
show' = intercalate " " . map show
| mitochon/hoosalind | src/problems/prob.hs | mit | 1,367 | 0 | 14 | 396 | 344 | 189 | 155 | 18 | 3 |
{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable, GeneralizedNewtypeDeriving #-}
module MinHS.Env ( Env(..)
, empty
, lookup
, add
, addAll
) where
import qualified Data.Map as M
import Data.Foldable (Foldable)
import Data.Traversable (Traversable)
import Data.Monoid
import Prelude hiding (lookup)
newtype Env e = Env (M.Map String e) deriving (Functor, Foldable, Traversable, Show, Eq, Monoid)
empty :: Env e
empty = Env M.empty
lookup :: Env e -> String -> Maybe e
lookup (Env env) var = M.lookup var env
add :: Env e -> (String, e) -> Env e
add (Env env) (key,elt) = Env (M.insert key elt env)
addAll :: Env e -> [(String,e)] -> Env e
addAll (Env env) pairs = Env $ foldr (\(k,e) g -> M.insert k e g) env pairs
| pierzchalski/cs3161a2 | MinHS/Env.hs | mit | 821 | 0 | 10 | 217 | 318 | 174 | 144 | 20 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Hate.Graphics.Shader where
-- data
type Name = String
data FloatPrecision = HighPrecision | MediumPrecision | LowPrecision
newtype Location = Location Int
data TypeTag = FloatTag | Vec2Tag | Vec3Tag | Vec4Tag | Mat2Tag | Mat3Tag | Mat4Tag | Sampler2DTag
data Input = Input TypeTag (Maybe Location) Name
data Output = Output TypeTag Name
newtype Binding = Binding Int
data Uniform = Uniform TypeTag (Maybe Binding) Name
data Varying = Varying TypeTag Name
data ShaderDesc = ShaderDesc {
sdFloatPrecision :: FloatPrecision,
sdInputs :: [Input],
sdOutputs :: [Output],
sdUniforms :: [Uniform],
sdBody :: String
}
-- show
showMaybe :: Show a => Maybe a -> String
showMaybe (Just x) = show x
showMaybe Nothing = ""
instance Show FloatPrecision where
show HighPrecision = "precision highp float;"
show MediumPrecision = "precision mediump float;"
show LowPrecision = "precision lowp float;"
instance Show Location where
show (Location loc) = "layout(location = " ++ show loc ++ ") "
instance Show TypeTag where
show FloatTag = "float"
show Vec2Tag = "vec2"
show Vec3Tag = "vec3"
show Vec4Tag = "vec4"
show Mat2Tag = "mat2"
show Mat3Tag = "mat3"
show Mat4Tag = "mat4"
show Sampler2DTag = "sampler2D"
instance Show Input where
show (Input tag loc name) = showMaybe loc ++ "in " ++ show tag ++ " " ++ name ++ ";"
instance Show Output where
show (Output tag name) = "out " ++ show tag ++ " " ++ name ++ ";"
instance Show Binding where
show (Binding bnd) = "layout(binding = " ++ show bnd ++ ") "
instance Show Uniform where
show (Uniform tag bnd name) = showMaybe bnd ++ "uniform " ++ show tag ++ " " ++ name ++ ";"
-- Utils
toInput :: Varying -> Input
toInput (Varying tag name) = Input tag Nothing name
toOutput :: Varying -> Output
toOutput (Varying tag name) = Output tag name
| maque/Hate | src/Hate/Graphics/Shader.hs | mit | 2,014 | 0 | 11 | 512 | 605 | 319 | 286 | 47 | 1 |
{-# LANGUAGE OverloadedStrings, FlexibleContexts #-}
{- |
Module : Text.Pandoc.Writers.ICML
Copyright : Copyright (C) 2013 github.com/mb21
License : GNU GPL, version 2 or above
Stability : alpha
Conversion of 'Pandoc' documents to Adobe InCopy ICML, a stand-alone XML format
which is a subset of the zipped IDML format for which the documentation is
available here: http://wwwimages.adobe.com/www.adobe.com/content/dam/Adobe/en/devnet/indesign/sdk/cs6/idml/idml-specification.pdf
InCopy is the companion word-processor to Adobe InDesign and ICML documents can be integrated
into InDesign with File -> Place.
-}
module Text.Pandoc.Writers.ICML (writeICML) where
import Text.Pandoc.Definition
import Text.Pandoc.XML
import Text.Pandoc.Writers.Shared
import Text.Pandoc.Shared (splitBy)
import Text.Pandoc.Options
import Text.Pandoc.Templates (renderTemplate')
import Text.Pandoc.Pretty
import Data.List (isPrefixOf, isInfixOf, stripPrefix)
import Data.Text as Text (breakOnAll, pack)
import Control.Monad.State
import Network.URI (isURI)
import qualified Data.Set as Set
type Style = [String]
type Hyperlink = [(Int, String)]
data WriterState = WriterState{
blockStyles :: Set.Set String
, inlineStyles :: Set.Set String
, links :: Hyperlink
, listDepth :: Int
, maxListDepth :: Int
}
type WS a = State WriterState a
defaultWriterState :: WriterState
defaultWriterState = WriterState{
blockStyles = Set.empty
, inlineStyles = Set.empty
, links = []
, listDepth = 1
, maxListDepth = 0
}
-- inline names (appear in InDesign's character styles pane)
emphName :: String
strongName :: String
strikeoutName :: String
superscriptName :: String
subscriptName :: String
smallCapsName :: String
codeName :: String
linkName :: String
emphName = "Italic"
strongName = "Bold"
strikeoutName = "Strikeout"
superscriptName = "Superscript"
subscriptName = "Subscript"
smallCapsName = "SmallCaps"
codeName = "Code"
linkName = "Link"
-- block element names (appear in InDesign's paragraph styles pane)
paragraphName :: String
codeBlockName :: String
blockQuoteName :: String
orderedListName :: String
bulletListName :: String
defListTermName :: String
defListDefName :: String
headerName :: String
tableName :: String
tableHeaderName :: String
tableCaptionName :: String
alignLeftName :: String
alignRightName :: String
alignCenterName :: String
firstListItemName :: String
beginsWithName :: String
lowerRomanName :: String
upperRomanName :: String
lowerAlphaName :: String
upperAlphaName :: String
subListParName :: String
footnoteName :: String
paragraphName = "Paragraph"
codeBlockName = "CodeBlock"
blockQuoteName = "Blockquote"
orderedListName = "NumList"
bulletListName = "BulList"
defListTermName = "DefListTerm"
defListDefName = "DefListDef"
headerName = "Header"
tableName = "TablePar"
tableHeaderName = "TableHeader"
tableCaptionName = "TableCaption"
alignLeftName = "LeftAlign"
alignRightName = "RightAlign"
alignCenterName = "CenterAlign"
firstListItemName = "first"
beginsWithName = "beginsWith-"
lowerRomanName = "lowerRoman"
upperRomanName = "upperRoman"
lowerAlphaName = "lowerAlpha"
upperAlphaName = "upperAlpha"
subListParName = "subParagraph"
footnoteName = "Footnote"
-- | Convert Pandoc document to string in ICML format.
writeICML :: WriterOptions -> Pandoc -> String
writeICML opts (Pandoc meta blocks) =
let colwidth = if writerWrapText opts
then Just $ writerColumns opts
else Nothing
render' = render colwidth
renderMeta f s = Just $ render' $ fst $ runState (f opts [] s) defaultWriterState
Just metadata = metaToJSON opts
(renderMeta blocksToICML)
(renderMeta inlinesToICML)
meta
(doc, st) = runState (blocksToICML opts [] blocks) defaultWriterState
main = render' doc
context = defField "body" main
$ defField "charStyles" (render' $ charStylesToDoc st)
$ defField "parStyles" (render' $ parStylesToDoc st)
$ defField "hyperlinks" (render' $ hyperlinksToDoc $ links st)
$ metadata
in if writerStandalone opts
then renderTemplate' (writerTemplate opts) context
else main
-- | Auxilary functions for parStylesToDoc and charStylesToDoc.
contains :: String -> (String, (String, String)) -> [(String, String)]
contains s rule =
if isInfixOf (fst rule) s
then [snd rule]
else []
-- | The monospaced font to use as default.
monospacedFont :: Doc
monospacedFont = inTags False "AppliedFont" [("type", "string")] $ text "Courier New"
-- | How much to indent blockquotes etc.
defaultIndent :: Int
defaultIndent = 20
-- | How much to indent numbered lists before the number.
defaultListIndent :: Int
defaultListIndent = 10
-- other constants
lineSeparator :: String
lineSeparator = "
"
-- | Convert a WriterState with its block styles to the ICML listing of Paragraph Styles.
parStylesToDoc :: WriterState -> Doc
parStylesToDoc st = vcat $ map makeStyle $ Set.toAscList $ blockStyles st
where
makeStyle s =
let countSubStrs sub str = length $ Text.breakOnAll (Text.pack sub) (Text.pack str)
attrs = concat $ map (contains s) $ [
(defListTermName, ("BulletsAndNumberingListType", "BulletList"))
, (defListTermName, ("FontStyle", "Bold"))
, (tableHeaderName, ("FontStyle", "Bold"))
, (alignLeftName, ("Justification", "LeftAlign"))
, (alignRightName, ("Justification", "RightAlign"))
, (alignCenterName, ("Justification", "CenterAlign"))
, (headerName++"1", ("PointSize", "36"))
, (headerName++"2", ("PointSize", "30"))
, (headerName++"3", ("PointSize", "24"))
, (headerName++"4", ("PointSize", "18"))
, (headerName++"5", ("PointSize", "14"))
]
-- what is the most nested list type, if any?
(isBulletList, isOrderedList) = findList $ reverse $ splitBy (==' ') s
where
findList [] = (False, False)
findList (x:xs) | x == bulletListName = (True, False)
| x == orderedListName = (False, True)
| otherwise = findList xs
nBuls = countSubStrs bulletListName s
nOrds = countSubStrs orderedListName s
attrs' = numbering ++ listType ++ indent ++ attrs
where
numbering | isOrderedList = [("NumberingExpression", "^#.^t"), ("NumberingLevel", show nOrds)]
| otherwise = []
listType | isOrderedList && (not $ isInfixOf subListParName s)
= [("BulletsAndNumberingListType", "NumberedList")]
| isBulletList && (not $ isInfixOf subListParName s)
= [("BulletsAndNumberingListType", "BulletList")]
| otherwise = []
indent = [("LeftIndent", show indt)]
where
nBlockQuotes = countSubStrs blockQuoteName s
nDefLists = countSubStrs defListDefName s
indt = max 0 $ defaultListIndent*(nBuls + nOrds - 1) + defaultIndent*(nBlockQuotes + nDefLists)
props = inTags True "Properties" [] $ (basedOn $$ tabList $$ numbForm)
where
font = if isInfixOf codeBlockName s
then monospacedFont
else empty
basedOn = inTags False "BasedOn" [("type", "object")] (text "$ID/NormalParagraphStyle") $$ font
tabList = if isBulletList
then inTags True "TabList" [("type","list")] $ inTags True "ListItem" [("type","record")]
$ vcat [
inTags False "Alignment" [("type","enumeration")] $ text "LeftAlign"
, inTags False "AlignmentCharacter" [("type","string")] $ text "."
, selfClosingTag "Leader" [("type","string")]
, inTags False "Position" [("type","unit")] $ text
$ show $ defaultListIndent * (nBuls + nOrds)
]
else empty
makeNumb name = inTags False "NumberingFormat" [("type", "string")] (text name)
numbForm | isInfixOf lowerRomanName s = makeNumb "i, ii, iii, iv..."
| isInfixOf upperRomanName s = makeNumb "I, II, III, IV..."
| isInfixOf lowerAlphaName s = makeNumb "a, b, c, d..."
| isInfixOf upperAlphaName s = makeNumb "A, B, C, D..."
| otherwise = empty
in inTags True "ParagraphStyle" ([("Self", "ParagraphStyle/"++s), ("Name", s)] ++ attrs') props
-- | Convert a WriterState with its inline styles to the ICML listing of Character Styles.
charStylesToDoc :: WriterState -> Doc
charStylesToDoc st = vcat $ map makeStyle $ Set.toAscList $ inlineStyles st
where
makeStyle s =
let attrs = concat $ map (contains s) [
(strikeoutName, ("StrikeThru", "true"))
, (superscriptName, ("Position", "Superscript"))
, (subscriptName, ("Position", "Subscript"))
, (smallCapsName, ("Capitalization", "SmallCaps"))
]
attrs' | isInfixOf emphName s && isInfixOf strongName s = ("FontStyle", "Bold Italic") : attrs
| isInfixOf strongName s = ("FontStyle", "Bold") : attrs
| isInfixOf emphName s = ("FontStyle", "Italic") : attrs
| otherwise = attrs
props = inTags True "Properties" [] $
inTags False "BasedOn" [("type", "object")] (text "$ID/NormalCharacterStyle") $$ font
where
font =
if isInfixOf codeName s
then monospacedFont
else empty
in inTags True "CharacterStyle" ([("Self", "CharacterStyle/"++s), ("Name", s)] ++ attrs') props
-- | Escape colon characters as %3a
escapeColons :: String -> String
escapeColons (x:xs)
| x == ':' = "%3a" ++ escapeColons xs
| otherwise = x : escapeColons xs
escapeColons [] = []
-- | Convert a list of (identifier, url) pairs to the ICML listing of hyperlinks.
hyperlinksToDoc :: Hyperlink -> Doc
hyperlinksToDoc [] = empty
hyperlinksToDoc (x:xs) = hyp x $$ hyperlinksToDoc xs
where
hyp (ident, url) = hdest $$ hlink
where
hdest = selfClosingTag "HyperlinkURLDestination"
[("Self", "HyperlinkURLDestination/"++(escapeColons url)), ("Name","link"), ("DestinationURL",url), ("DestinationUniqueKey","1")] -- HyperlinkURLDestination with more than one colon crashes CS6
hlink = inTags True "Hyperlink" [("Self","uf-"++show ident), ("Name",url),
("Source","htss-"++show ident), ("Visible","true"), ("DestinationUniqueKey","1")]
$ inTags True "Properties" []
$ inTags False "BorderColor" [("type","enumeration")] (text "Black")
$$ (inTags False "Destination" [("type","object")]
$ text $ "HyperlinkURLDestination/"++(escapeColons (escapeStringForXML url))) -- HyperlinkURLDestination with more than one colon crashes CS6
-- | Convert a list of Pandoc blocks to ICML.
blocksToICML :: WriterOptions -> Style -> [Block] -> WS Doc
blocksToICML opts style lst = vcat `fmap` mapM (blockToICML opts style) lst
-- | Convert a Pandoc block element to ICML.
blockToICML :: WriterOptions -> Style -> Block -> WS Doc
blockToICML opts style (Plain lst) = parStyle opts style lst
blockToICML opts style (Para lst) = parStyle opts (paragraphName:style) lst
blockToICML opts style (CodeBlock _ str) = parStyle opts (codeBlockName:style) $ [Str str]
blockToICML _ _ (RawBlock f str)
| f == Format "icml" = return $ text str
| otherwise = return empty
blockToICML opts style (BlockQuote blocks) = blocksToICML opts (blockQuoteName:style) blocks
blockToICML opts style (OrderedList attribs lst) = listItemsToICML opts orderedListName style (Just attribs) lst
blockToICML opts style (BulletList lst) = listItemsToICML opts bulletListName style Nothing lst
blockToICML opts style (DefinitionList lst) = vcat `fmap` mapM (definitionListItemToICML opts style) lst
blockToICML opts style (Header lvl _ lst) =
let stl = (headerName ++ show lvl):style
in parStyle opts stl lst
blockToICML _ _ HorizontalRule = return empty -- we could insert a page break instead
blockToICML opts style (Table caption aligns widths headers rows) =
let style' = tableName : style
noHeader = all null headers
nrHeaders = if noHeader
then "0"
else "1"
nrRows = length rows
nrCols = if null rows
then 0
else length $ head rows
rowsToICML [] _ = return empty
rowsToICML (col:rest) rowNr =
liftM2 ($$) (colsToICML col rowNr (0::Int)) $ rowsToICML rest (rowNr+1)
colsToICML [] _ _ = return empty
colsToICML (cell:rest) rowNr colNr = do
let stl = if rowNr == 0 && not noHeader
then tableHeaderName:style'
else style'
alig = aligns !! colNr
stl' | alig == AlignLeft = alignLeftName : stl
| alig == AlignRight = alignRightName : stl
| alig == AlignCenter = alignCenterName : stl
| otherwise = stl
c <- blocksToICML opts stl' cell
let cl = return $ inTags True "Cell"
[("Name", show colNr ++":"++ show rowNr), ("AppliedCellStyle","CellStyle/Cell")] c
liftM2 ($$) cl $ colsToICML rest rowNr (colNr+1)
in do
let tabl = if noHeader
then rows
else headers:rows
cells <- rowsToICML tabl (0::Int)
let colWidths w = if w > 0
then [("SingleColumnWidth",show $ 500 * w)]
else []
let tupToDoc tup = selfClosingTag "Column" $ [("Name",show $ fst tup)] ++ (colWidths $ snd tup)
let colDescs = vcat $ map tupToDoc $ zip [0..nrCols-1] widths
let tableDoc = return $ inTags True "Table" [
("AppliedTableStyle","TableStyle/Table")
, ("HeaderRowCount", nrHeaders)
, ("BodyRowCount", show nrRows)
, ("ColumnCount", show nrCols)
] (colDescs $$ cells)
liftM2 ($$) tableDoc $ parStyle opts (tableCaptionName:style) caption
blockToICML opts style (Div _ lst) = blocksToICML opts style lst
blockToICML _ _ Null = return empty
-- | Convert a list of lists of blocks to ICML list items.
listItemsToICML :: WriterOptions -> String -> Style -> Maybe ListAttributes -> [[Block]] -> WS Doc
listItemsToICML _ _ _ _ [] = return empty
listItemsToICML opts listType style attribs (first:rest) = do
st <- get
put st{ listDepth = 1 + listDepth st}
let stl = listType:style
let f = listItemToICML opts stl True attribs first
let r = map (listItemToICML opts stl False attribs) rest
docs <- sequence $ f:r
s <- get
let maxD = max (maxListDepth s) (listDepth s)
put s{ listDepth = 1, maxListDepth = maxD }
return $ vcat docs
-- | Convert a list of blocks to ICML list items.
listItemToICML :: WriterOptions -> Style -> Bool-> Maybe ListAttributes -> [Block] -> WS Doc
listItemToICML opts style isFirst attribs item =
let makeNumbStart (Just (beginsWith, numbStl, _)) =
let doN DefaultStyle = []
doN LowerRoman = [lowerRomanName]
doN UpperRoman = [upperRomanName]
doN LowerAlpha = [lowerAlphaName]
doN UpperAlpha = [upperAlphaName]
doN _ = []
bw = if beginsWith > 1
then [beginsWithName ++ show beginsWith]
else []
in doN numbStl ++ bw
makeNumbStart Nothing = []
stl = if isFirst
then firstListItemName:style
else style
stl' = makeNumbStart attribs ++ stl
in if length item > 1
then do
let insertTab (Para lst) = blockToICML opts (subListParName:style) $ Para $ (Str "\t"):lst
insertTab block = blockToICML opts style block
f <- blockToICML opts stl' $ head item
r <- fmap vcat $ mapM insertTab $ tail item
return $ f $$ r
else blocksToICML opts stl' item
definitionListItemToICML :: WriterOptions -> Style -> ([Inline],[[Block]]) -> WS Doc
definitionListItemToICML opts style (term,defs) = do
term' <- parStyle opts (defListTermName:style) term
defs' <- vcat `fmap` mapM (blocksToICML opts (defListDefName:style)) defs
return $ term' $$ defs'
-- | Convert a list of inline elements to ICML.
inlinesToICML :: WriterOptions -> Style -> [Inline] -> WS Doc
inlinesToICML opts style lst = vcat `fmap` mapM (inlineToICML opts style) (mergeSpaces lst)
-- | Convert an inline element to ICML.
inlineToICML :: WriterOptions -> Style -> Inline -> WS Doc
inlineToICML _ style (Str str) = charStyle style $ text $ escapeStringForXML str
inlineToICML opts style (Emph lst) = inlinesToICML opts (emphName:style) lst
inlineToICML opts style (Strong lst) = inlinesToICML opts (strongName:style) lst
inlineToICML opts style (Strikeout lst) = inlinesToICML opts (strikeoutName:style) lst
inlineToICML opts style (Superscript lst) = inlinesToICML opts (superscriptName:style) lst
inlineToICML opts style (Subscript lst) = inlinesToICML opts (subscriptName:style) lst
inlineToICML opts style (SmallCaps lst) = inlinesToICML opts (smallCapsName:style) lst
inlineToICML opts style (Quoted SingleQuote lst) = inlinesToICML opts style $ [Str "‘"] ++ lst ++ [Str "’"]
inlineToICML opts style (Quoted DoubleQuote lst) = inlinesToICML opts style $ [Str "“"] ++ lst ++ [Str "”"]
inlineToICML opts style (Cite _ lst) = inlinesToICML opts style lst
inlineToICML _ style (Code _ str) = charStyle (codeName:style) $ text $ escapeStringForXML str
inlineToICML _ style Space = charStyle style space
inlineToICML _ style LineBreak = charStyle style $ text lineSeparator
inlineToICML _ style (Math _ str) = charStyle style $ text $ escapeStringForXML str --InDesign doesn't really do math
inlineToICML _ _ (RawInline f str)
| f == Format "icml" = return $ text str
| otherwise = return empty
inlineToICML opts style (Link lst (url, title)) = do
content <- inlinesToICML opts (linkName:style) lst
state $ \st ->
let ident = if null $ links st
then 1::Int
else 1 + (fst $ head $ links st)
newst = st{ links = (ident, url):(links st) }
cont = inTags True "HyperlinkTextSource"
[("Self","htss-"++show ident), ("Name",title), ("Hidden","false")] content
in (cont, newst)
inlineToICML opts style (Image alt target) = imageICML opts style alt target
inlineToICML opts style (Note lst) = footnoteToICML opts style lst
inlineToICML opts style (Span _ lst) = inlinesToICML opts style lst
-- | Convert a list of block elements to an ICML footnote.
footnoteToICML :: WriterOptions -> Style -> [Block] -> WS Doc
footnoteToICML opts style lst =
let insertTab (Para ls) = blockToICML opts (footnoteName:style) $ Para $ (Str "\t"):ls
insertTab block = blockToICML opts (footnoteName:style) block
in do
contents <- mapM insertTab lst
let number = inTags True "ParagraphStyleRange" [] $
inTags True "CharacterStyleRange" [] $ inTagsSimple "Content" "<?ACE 4?>"
return $ inTags True "CharacterStyleRange"
[("AppliedCharacterStyle","$ID/NormalCharacterStyle"), ("Position","Superscript")]
$ inTags True "Footnote" [] $ number $$ vcat contents
-- | Auxiliary function to merge Space elements into the adjacent Strs.
mergeSpaces :: [Inline] -> [Inline]
mergeSpaces ((Str s):(Space:((Str s'):xs))) = mergeSpaces $ Str(s++" "++s') : xs
mergeSpaces (Space:((Str s):xs)) = mergeSpaces $ Str (" "++s) : xs
mergeSpaces ((Str s):(Space:xs)) = mergeSpaces $ Str (s++" ") : xs
mergeSpaces (x:xs) = x : (mergeSpaces xs)
mergeSpaces [] = []
-- | Wrap a list of inline elements in an ICML Paragraph Style
parStyle :: WriterOptions -> Style -> [Inline] -> WS Doc
parStyle opts style lst =
let slipIn x y = if null y
then x
else x ++ " > " ++ y
stlStr = foldr slipIn [] $ reverse style
stl = if null stlStr
then ""
else "ParagraphStyle/" ++ stlStr
attrs = ("AppliedParagraphStyle", stl)
attrs' = if firstListItemName `elem` style
then let ats = attrs : [("NumberingContinue", "false")]
begins = filter (isPrefixOf beginsWithName) style
in if null begins
then ats
else let i = maybe "" id $ stripPrefix beginsWithName $ head begins
in ("NumberingStartAt", i) : ats
else [attrs]
in do
content <- inlinesToICML opts [] lst
let cont = inTags True "ParagraphStyleRange" attrs'
$ mappend content $ selfClosingTag "Br" []
state $ \st -> (cont, st{ blockStyles = Set.insert stlStr $ blockStyles st })
-- | Wrap a Doc in an ICML Character Style.
charStyle :: Style -> Doc -> WS Doc
charStyle style content =
let (stlStr, attrs) = styleToStrAttr style
doc = inTags True "CharacterStyleRange" attrs $ inTagsSimple "Content" $ flush content
in do
state $ \st ->
let styles = if null stlStr
then st
else st{ inlineStyles = Set.insert stlStr $ inlineStyles st }
in (doc, styles)
-- | Transform a Style to a tuple of String (eliminating duplicates and ordered) and corresponding attribute.
styleToStrAttr :: Style -> (String, [(String, String)])
styleToStrAttr style =
let stlStr = unwords $ Set.toAscList $ Set.fromList style
stl = if null style
then "$ID/NormalCharacterStyle"
else "CharacterStyle/" ++ stlStr
attrs = [("AppliedCharacterStyle", stl)]
in (stlStr, attrs)
-- | Assemble an ICML Image.
imageICML :: WriterOptions -> Style -> [Inline] -> Target -> WS Doc
imageICML _ style _ (linkURI, _) =
let imgWidth = 300::Int --TODO: set width, height dynamically as in Docx.hs
imgHeight = 200::Int
scaleFact = show (1::Double) --TODO: set scaling factor so image is scaled exactly to imgWidth x imgHeight
hw = show $ imgWidth `div` 2
hh = show $ imgHeight `div` 2
qw = show $ imgWidth `div` 4
qh = show $ imgHeight `div` 4
uriPrefix = if isURI linkURI then "" else "file:"
(stlStr, attrs) = styleToStrAttr style
props = inTags True "Properties" [] $ inTags True "PathGeometry" []
$ inTags True "GeometryPathType" [("PathOpen","false")]
$ inTags True "PathPointArray" []
$ vcat [
selfClosingTag "PathPointType" [("Anchor", "-"++qw++" -"++qh),
("LeftDirection", "-"++qw++" -"++qh), ("RightDirection", "-"++qw++" -"++qh)]
, selfClosingTag "PathPointType" [("Anchor", "-"++qw++" "++qh),
("LeftDirection", "-"++qw++" "++qh), ("RightDirection", "-"++qw++" "++qh)]
, selfClosingTag "PathPointType" [("Anchor", qw++" "++qh),
("LeftDirection", qw++" "++qh), ("RightDirection", qw++" "++qh)]
, selfClosingTag "PathPointType" [("Anchor", qw++" -"++qh),
("LeftDirection", qw++" -"++qh), ("RightDirection", qw++" -"++qh)]
]
image = inTags True "Image"
[("Self","ue6"), ("ItemTransform", scaleFact++" 0 0 "++scaleFact++" -"++qw++" -"++qh)]
$ vcat [
inTags True "Properties" [] $ inTags True "Profile" [("type","string")] $ text "$ID/Embedded"
$$ selfClosingTag "GraphicBounds" [("Left","0"), ("Top","0"), ("Right", hw), ("Bottom", hh)]
, selfClosingTag "Link" [("Self", "ueb"), ("LinkResourceURI", uriPrefix++linkURI)]
]
doc = inTags True "CharacterStyleRange" attrs
$ inTags True "Rectangle" [("Self","uec"), ("ItemTransform", "1 0 0 1 "++qw++" -"++qh)]
$ (props $$ image)
in do
state $ \st -> (doc, st{ inlineStyles = Set.insert stlStr $ inlineStyles st } )
| alexvong1995/pandoc | src/Text/Pandoc/Writers/ICML.hs | gpl-2.0 | 25,478 | 0 | 23 | 7,553 | 7,199 | 3,810 | 3,389 | 448 | 11 |
{-# LANGUAGE ScopedTypeVariables#-}
module Main where
import OpenCog.AtomSpace
import OpenCog.Lojban
import Control.Exception
import Control.Monad
import Control.Parallel.Strategies
import System.Exit (exitFailure,exitSuccess)
main :: IO ()
main = do
putStrLn "Starting Test"
(parser,printer) <- initParserPrinter "cmavo.csv" "gismu.csv"
sentences <- loadData
let parsed = parMap rpar (ptest parser) sentences
testRes <- sequence parsed
let testResF = filter id testRes
putStrLn $
"Of " ++ show (length sentences) ++ " sentences " ++
show (length testResF) ++ " have been parsed/printed successfully."
if length testResF == length sentences
then exitSuccess
else exitFailure
ptest :: (String -> Either String Atom) -> String -> IO Bool
ptest parser text = do
case parser text of
Left e -> print text >> putStrLn e >> return False
Right _ -> return True
pptest :: (String -> Either String Atom) -> (Atom -> Either String String) -> String -> IO Bool
pptest parser printer text =
case parser text of
Left e -> print False >> return False
Right res ->
case printer res of
Left e -> print False >> return False
Right ptext ->
if ptext == text
then return True
else print text >> print ptext >> return False
loadData :: IO [String]
loadData = do
file <- readFile "data.txt"
return (lines file)
isRight :: Either a b -> Bool
isRight (Left _) = False
isRight (Right _) = True
| misgeatgit/opencog | opencog/nlp/lojban/HaskellLib/test/Main.hs | agpl-3.0 | 1,614 | 0 | 14 | 469 | 522 | 250 | 272 | 45 | 4 |
{-# LANGUAGE FlexibleInstances, PatternGuards, ScopedTypeVariables, TypeSynonymInstances #-}
-- | Route an incoming 'Request' to a handler. For more in-depth documentation see this section of the Happstack Crash Course: <http://happstack.com/docs/crashcourse/RouteFilters.html>
module Happstack.Server.Routing
( -- * Route by scheme
http
, https
-- * Route by request method
, methodM
, methodOnly
, methodSP
, method
, MatchMethod(..)
-- * Route by pathInfo
, dir
, dirs
, nullDir
, trailingSlash
, noTrailingSlash
, anyPath
, path
, uriRest
-- * Route by host
, host
, withHost
-- * Route by (Request -> Bool)
, guardRq
) where
import Control.Monad (MonadPlus(mzero), unless)
import qualified Data.ByteString.Char8 as B
import Happstack.Server.Monads (ServerMonad(..))
import Happstack.Server.Types (Request(..), Method(..), FromReqURI(..), getHeader, rqURL)
import System.FilePath (makeRelative, splitDirectories)
-- | instances of this class provide a variety of ways to match on the 'Request' method.
--
-- Examples:
--
-- > method GET -- match GET or HEAD
-- > method [GET, POST] -- match GET, HEAD or POST
-- > method HEAD -- match HEAD /but not/ GET
-- > method (== GET) -- match GET or HEAD
-- > method (not . (==) DELETE) -- match any method except DELETE
-- > method () -- match any method
--
-- As you can see, GET implies that HEAD should match as well. This is to
-- make it harder to write an application that uses HTTP incorrectly.
-- Happstack handles HEAD requests automatically, but we still need to make
-- sure our handlers don't mismatch or a HEAD will result in a 404.
--
-- If you must, you can still do something like this
-- to match GET without HEAD:
--
-- > guardRq ((== GET) . rqMethod)
class MatchMethod m where
matchMethod :: m -> Method -> Bool
instance MatchMethod Method where
matchMethod m = matchMethod (== m)
instance MatchMethod [Method] where
matchMethod ms m = any (`matchMethod` m) ms
instance MatchMethod (Method -> Bool) where
matchMethod f HEAD = f HEAD || f GET
matchMethod f m = f m
instance MatchMethod () where
matchMethod () _ = True
-------------------------------------
-- guards
-- | Guard using an arbitrary function on the 'Request'.
guardRq :: (ServerMonad m, MonadPlus m) => (Request -> Bool) -> m ()
guardRq f = do
rq <- askRq
unless (f rq) mzero
-- | guard which checks that an insecure connection was made via http:\/\/
--
-- Example:
--
-- > handler :: ServerPart Response
-- > handler =
-- > do http
-- > ...
http :: (ServerMonad m, MonadPlus m) => m ()
http = guardRq (not . rqSecure)
-- | guard which checks that a secure connection was made via https:\/\/
--
-- Example:
--
-- > handler :: ServerPart Response
-- > handler =
-- > do https
-- > ...
https :: (ServerMonad m, MonadPlus m) => m ()
https = guardRq rqSecure
-- | Guard against the method only (as opposed to 'methodM').
--
-- Example:
--
-- > handler :: ServerPart Response
-- > handler =
-- > do methodOnly [GET, HEAD]
-- > ...
method :: (ServerMonad m, MonadPlus m, MatchMethod method) => method -> m ()
method meth = guardRq $ \rq -> matchMethod meth (rqMethod rq)
-- | Guard against the method. This function also guards against
-- *any remaining path segments*. See 'method' for the version
-- that guards only by method.
--
-- Example:
--
-- > handler :: ServerPart Response
-- > handler =
-- > do methodM [GET, HEAD]
-- > ...
--
-- NOTE: This function is largely retained for backwards
-- compatibility. The fact that implicitly calls 'nullDir' is often
-- forgotten and leads to confusion. It is probably better to just use
-- 'method' and call 'nullDir' explicitly.
--
-- This function will likely be deprecated in the future.
methodM :: (ServerMonad m, MonadPlus m, MatchMethod method) => method -> m ()
methodM meth = methodOnly meth >> nullDir
-- | Guard against the method only (as opposed to 'methodM').
--
-- Example:
--
-- > handler :: ServerPart Response
-- > handler =
-- > do methodOnly [GET, HEAD]
-- > ...
methodOnly :: (ServerMonad m, MonadPlus m, MatchMethod method) => method -> m ()
methodOnly = method
{-# DEPRECATED methodOnly "this function is just an alias for method now" #-}
-- | Guard against the method. Note, this function also guards against
-- any remaining path segments. Similar to 'methodM' but with a different type signature.
--
-- Example:
--
-- > handler :: ServerPart Response
-- > handler = methodSP [GET, HEAD] $ subHandler
--
-- NOTE: This style of combinator is going to be deprecated in the
-- future. It is better to just use 'method'.
--
-- > handler :: ServerPart Response
-- > handler = method [GET, HEAD] >> nullDir >> subHandler
{-# DEPRECATED methodSP "use method instead." #-}
methodSP :: (ServerMonad m, MonadPlus m, MatchMethod method) => method -> m b-> m b
methodSP m handle = methodM m >> handle
-- | guard which only succeeds if there are no remaining path segments
--
-- Often used if you want to explicitly assign a route for '/'
--
nullDir :: (ServerMonad m, MonadPlus m) => m ()
nullDir = guardRq $ \rq -> null (rqPaths rq)
-- | Pop a path element and run the supplied handler if it matches the
-- given string.
--
-- > handler :: ServerPart Response
-- > handler = dir "foo" $ dir "bar" $ subHandler
--
-- The path element can not contain \'/\'. See also 'dirs'.
dir :: (ServerMonad m, MonadPlus m) => String -> m a -> m a
dir staticPath handle =
do
rq <- askRq
case rqPaths rq of
(p:xs) | p == staticPath -> localRq (\newRq -> newRq{rqPaths = xs}) handle
_ -> mzero
-- | Guard against a 'FilePath'. Unlike 'dir' the 'FilePath' may
-- contain \'/\'. If the guard succeeds, the matched elements will be
-- popped from the directory stack.
--
-- > dirs "foo/bar" $ ...
--
-- See also: 'dir'.
dirs :: (ServerMonad m, MonadPlus m) => FilePath -> m a -> m a
dirs fp m =
do let parts = splitDirectories (makeRelative "/" fp)
foldr dir m parts
-- | Guard against the host.
--
-- This matches against the @host@ header specified in the incoming 'Request'.
--
-- Can be used to support virtual hosting, <http://en.wikipedia.org/wiki/Virtual_hosting>
--
-- Note that this matches against the value of the @Host@ header which may include the port number.
--
-- <http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.23>
--
-- see also: 'withHost'
host :: (ServerMonad m, MonadPlus m) => String -> m a -> m a
host desiredHost handle =
do rq <- askRq
case getHeader "host" rq of
(Just hostBS) | desiredHost == B.unpack hostBS -> handle
_ -> mzero
-- | Lookup the @host@ header in the incoming request and pass it to the handler.
--
-- see also: 'host'
withHost :: (ServerMonad m, MonadPlus m) => (String -> m a) -> m a
withHost handle =
do rq <- askRq
case getHeader "host" rq of
(Just hostBS) -> handle (B.unpack hostBS)
_ -> mzero
-- | Pop a path element and parse it using the 'fromReqURI' in the
-- 'FromReqURI' class.
path :: (FromReqURI a, MonadPlus m, ServerMonad m) => (a -> m b) -> m b
path handle = do
rq <- askRq
case rqPaths rq of
(p:xs) | Just a <- fromReqURI p
-> localRq (\newRq -> newRq{rqPaths = xs}) (handle a)
_ -> mzero
-- | Grab the rest of the URL (dirs + query) and passes it to your
-- handler.
uriRest :: (ServerMonad m) => (String -> m a) -> m a
uriRest handle = askRq >>= handle . rqURL
-- | Pop any path element and run the handler.
--
-- Succeeds if a path component was popped. Fails is the remaining path was empty.
anyPath :: (ServerMonad m, MonadPlus m) => m r -> m r
anyPath x = path $ (\(_::String) -> x)
-- | Guard which checks that the Request URI ends in @\'\/\'@. Useful
-- for distinguishing between @foo@ and @foo/@
trailingSlash :: (ServerMonad m, MonadPlus m) => m ()
trailingSlash = guardRq $ \rq -> (last (rqUri rq)) == '/'
-- | The opposite of 'trailingSlash'.
noTrailingSlash :: (ServerMonad m, MonadPlus m) => m ()
noTrailingSlash = guardRq $ \rq -> (last (rqUri rq)) /= '/'
| erantapaa/happstack-server | src/Happstack/Server/Routing.hs | bsd-3-clause | 8,381 | 0 | 15 | 1,978 | 1,553 | 873 | 680 | 95 | 2 |
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "Data/ByteString/Builder/Prim.hs" #-}
{-# LANGUAGE CPP, BangPatterns, ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# LANGUAGE Trustworthy #-}
{- | Copyright : (c) 2010-2011 Simon Meier
(c) 2010 Jasper van der Jeugt
License : BSD3-style (see LICENSE)
Maintainer : Simon Meier <iridcode@gmail.com>
Portability : GHC
This module provides 'Builder' /primitives/, which are lower level building
blocks for constructing 'Builder's. You don't need to go down to this level but
it can be slightly faster.
Morally, builder primitives are like functions @a -> Builder@, that is they
take a value and encode it as a sequence of bytes, represented as a 'Builder'.
Of course their implementation is a bit more specialised.
Builder primitives come in two forms: fixed-size and bounded-size.
* /Fixed(-size) primitives/ are builder primitives that always result in a
sequence of bytes of a fixed length. That is, the length is independent of
the value that is encoded. An example of a fixed size primitive is the
big-endian encoding of a 'Word64', which always results in exactly 8 bytes.
* /Bounded(-size) primitives/ are builder primitives that always result in a
sequence of bytes that is no larger than a predetermined bound. That is, the
bound is independent of the value that is encoded but the actual length will
depend on the value. An example for a bounded primitive is the UTF-8 encoding
of a 'Char', which can be 1,2,3 or 4 bytes long, so the bound is 4 bytes.
Note that fixed primitives can be considered as a special case of bounded
primitives, and we can lift from fixed to bounded.
Because bounded primitives are the more general case, in this documentation we
only refer to fixed size primitives where it matters that the resulting
sequence of bytes is of a fixed length. Otherwise, we just refer to bounded
size primitives.
The purpose of using builder primitives is to improve the performance of
'Builder's. These improvements stem from making the two most common steps
performed by a 'Builder' more efficient. We explain these two steps in turn.
The first most common step is the concatenation of two 'Builder's. Internally,
concatenation corresponds to function composition. (Note that 'Builder's can
be seen as difference-lists of buffer-filling functions; cf.
<http://hackage.haskell.org/cgi-bin/hackage-scripts/package/dlist>. )
Function composition is a fast /O(1)/ operation. However, we can use bounded
primitives to remove some of these function compositions altogether, which is
more efficient.
The second most common step performed by a 'Builder' is to fill a buffer using
a bounded primitives, which works as follows. The 'Builder' checks whether
there is enough space left to execute the bounded primitive. If there is, then
the 'Builder' executes the bounded primitive and calls the next 'Builder' with
the updated buffer. Otherwise, the 'Builder' signals its driver that it
requires a new buffer. This buffer must be at least as large as the bound of
the primitive. We can use bounded primitives to reduce the number of
buffer-free checks by fusing the buffer-free checks of consecutive 'Builder's.
We can also use bounded primitives to simplify the control flow for signalling
that a buffer is full by ensuring that we check first that there is enough
space left and only then decide on how to encode a given value.
Let us illustrate these improvements on the CSV-table rendering example from
"Data.ByteString.Builder". Its \"hot code\" is the rendering of a table's
cells, which we implement as follows using only the functions from the
'Builder' API.
@
import "Data.ByteString.Builder" as B
renderCell :: Cell -> Builder
renderCell (StringC cs) = renderString cs
renderCell (IntC i) = B.intDec i
renderString :: String -> Builder
renderString cs = B.charUtf8 \'\"\' \<\> foldMap escape cs \<\> B.charUtf8 \'\"\'
where
escape \'\\\\\' = B.charUtf8 \'\\\\\' \<\> B.charUtf8 \'\\\\\'
escape \'\\\"\' = B.charUtf8 \'\\\\\' \<\> B.charUtf8 \'\\\"\'
escape c = B.charUtf8 c
@
Efficient encoding of 'Int's as decimal numbers is performed by @intDec@.
Optimization potential exists for the escaping of 'String's. The above
implementation has two optimization opportunities. First, the buffer-free
checks of the 'Builder's for escaping double quotes and backslashes can be
fused. Second, the concatenations performed by 'foldMap' can be eliminated.
The following implementation exploits these optimizations.
@
import qualified Data.ByteString.Builder.Prim as P
import Data.ByteString.Builder.Prim
( 'condB', 'liftFixedToBounded', ('>*<'), ('>$<') )
renderString :: String -\> Builder
renderString cs =
B.charUtf8 \'\"\' \<\> E.'encodeListWithB' escape cs \<\> B.charUtf8 \'\"\'
where
escape :: E.'BoundedPrim' Char
escape =
'condB' (== \'\\\\\') (fixed2 (\'\\\\\', \'\\\\\')) $
'condB' (== \'\\\"\') (fixed2 (\'\\\\\', \'\\\"\')) $
E.'charUtf8'
 
{-\# INLINE fixed2 \#-}
fixed2 x = 'liftFixedToBounded' $ const x '>$<' E.'char7' '>*<' E.'char7'
@
The code should be mostly self-explanatory. The slightly awkward syntax is
because the combinators are written such that the size-bound of the resulting
'BoundedPrim' can be computed at compile time. We also explicitly inline the
'fixed2' primitive, which encodes a fixed tuple of characters, to ensure that
the bound computation happens at compile time. When encoding the following list
of 'String's, the optimized implementation of 'renderString' is two times
faster.
@
maxiStrings :: [String]
maxiStrings = take 1000 $ cycle [\"hello\", \"\\\"1\\\"\", \"λ-wörld\"]
@
Most of the performance gain stems from using 'primMapListBounded', which
encodes a list of values from left-to-right with a 'BoundedPrim'. It exploits
the 'Builder' internals to avoid unnecessary function compositions (i.e.,
concatenations). In the future, we might expect the compiler to perform the
optimizations implemented in 'primMapListBounded'. However, it seems that the
code is currently to complicated for the compiler to see through. Therefore, we
provide the 'BoundedPrim' escape hatch, which allows data structures to provide
very efficient encoding traversals, like 'primMapListBounded' for lists.
Note that 'BoundedPrim's are a bit verbose, but quite versatile. Here is an
example of a 'BoundedPrim' for combined HTML escaping and UTF-8 encoding. It
exploits that the escaped character with the maximal Unicode codepoint is \'>\'.
@
{-\# INLINE charUtf8HtmlEscaped \#-}
charUtf8HtmlEscaped :: E.BoundedPrim Char
charUtf8HtmlEscaped =
'condB' (> \'\>\' ) E.'charUtf8' $
'condB' (== \'\<\' ) (fixed4 (\'&\',(\'l\',(\'t\',\';\')))) $ -- <
'condB' (== \'\>\' ) (fixed4 (\'&\',(\'g\',(\'t\',\';\')))) $ -- >
'condB' (== \'&\' ) (fixed5 (\'&\',(\'a\',(\'m\',(\'p\',\';\'))))) $ -- &
'condB' (== \'\"\' ) (fixed5 (\'&\',(\'\#\',(\'3\',(\'4\',\';\'))))) $ -- &\#34;
'condB' (== \'\\\'\') (fixed5 (\'&\',(\'\#\',(\'3\',(\'9\',\';\'))))) $ -- &\#39;
('liftFixedToBounded' E.'char7') -- fallback for 'Char's smaller than \'\>\'
where
{-\# INLINE fixed4 \#-}
fixed4 x = 'liftFixedToBounded' $ const x '>$<'
E.char7 '>*<' E.char7 '>*<' E.char7 '>*<' E.char7
 
{-\# INLINE fixed5 \#-}
fixed5 x = 'liftFixedToBounded' $ const x '>$<'
E.char7 '>*<' E.char7 '>*<' E.char7 '>*<' E.char7 '>*<' E.char7
@
This module currently does not expose functions that require the special
properties of fixed-size primitives. They are useful for prefixing 'Builder's
with their size or for implementing chunked encodings. We will expose the
corresponding functions in future releases of this library.
-}
{-
--
--
-- A /bounded primitive/ is a builder primitive that never results in a sequence
-- longer than some fixed number of bytes. This number of bytes must be
-- independent of the value being encoded. Typical examples of bounded
-- primitives are the big-endian encoding of a 'Word64', which results always
-- in exactly 8 bytes, or the UTF-8 encoding of a 'Char', which results always
-- in less or equal to 4 bytes.
--
-- Typically, primitives are implemented efficiently by allocating a buffer (an
-- array of bytes) and repeatedly executing the following two steps: (1)
-- writing to the buffer until it is full and (2) handing over the filled part
-- to the consumer of the encoded value. Step (1) is where bounded primitives
-- are used. We must use a bounded primitive, as we must check that there is
-- enough free space /before/ actually writing to the buffer.
--
-- In term of expressiveness, it would be sufficient to construct all encodings
-- from the single bounded encoding that encodes a 'Word8' as-is. However,
-- this is not sufficient in terms of efficiency. It results in unnecessary
-- buffer-full checks and it complicates the program-flow for writing to the
-- buffer, as buffer-full checks are interleaved with analysing the value to be
-- encoded (e.g., think about the program-flow for UTF-8 encoding). This has a
-- significant effect on overall encoding performance, as encoding primitive
-- Haskell values such as 'Word8's or 'Char's lies at the heart of every
-- encoding implementation.
--
-- The bounded 'Encoding's provided by this module remove this performance
-- problem. Intuitively, they consist of a tuple of the bound on the maximal
-- number of bytes written and the actual implementation of the encoding as a
-- function that modifies a mutable buffer. Hence when executing a bounded
-- 'Encoding', the buffer-full check can be done once before the actual writing
-- to the buffer. The provided 'Encoding's also take care to implement the
-- actual writing to the buffer efficiently. Moreover, combinators are
-- provided to construct new bounded encodings from the provided ones.
--
-- A typical example for using the combinators is a bounded 'Encoding' that
-- combines escaping the ' and \\ characters with UTF-8 encoding. More
-- precisely, the escaping to be done is the one implemented by the following
-- @escape@ function.
--
-- > escape :: Char -> [Char]
-- > escape '\'' = "\\'"
-- > escape '\\' = "\\\\"
-- > escape c = [c]
--
-- The bounded 'Encoding' that combines this escaping with UTF-8 encoding is
-- the following.
--
-- > import Data.ByteString.Builder.Prim.Utf8 (char)
-- >
-- > {-# INLINE escapeChar #-}
-- > escapeUtf8 :: BoundedPrim Char
-- > escapeUtf8 =
-- > encodeIf ('\'' ==) (char <#> char #. const ('\\','\'')) $
-- > encodeIf ('\\' ==) (char <#> char #. const ('\\','\\')) $
-- > char
--
-- The definition of 'escapeUtf8' is more complicated than 'escape', because
-- the combinators ('encodeIf', 'encodePair', '#.', and 'char') used in
-- 'escapeChar' compute both the bound on the maximal number of bytes written
-- (8 for 'escapeUtf8') as well as the low-level buffer manipulation required
-- to implement the encoding. Bounded 'Encoding's should always be inlined.
-- Otherwise, the compiler cannot compute the bound on the maximal number of
-- bytes written at compile-time. Without inlinining, it would also fail to
-- optimize the constant encoding of the escape characters in the above
-- example. Functions that execute bounded 'Encoding's also perform
-- suboptimally, if the definition of the bounded 'Encoding' is not inlined.
-- Therefore we add an 'INLINE' pragma to 'escapeUtf8'.
--
-- Currently, the only library that executes bounded 'Encoding's is the
-- 'bytestring' library (<http://hackage.haskell.org/package/bytestring>). It
-- uses bounded 'Encoding's to implement most of its lazy bytestring builders.
-- Executing a bounded encoding should be done using the corresponding
-- functions in the lazy bytestring builder 'Extras' module.
--
-- TODO: Merge with explanation/example below
--
-- Bounded 'E.Encoding's abstract encodings of Haskell values that can be implemented by
-- writing a bounded-size sequence of bytes directly to memory. They are
-- lifted to conversions from Haskell values to 'Builder's by wrapping them
-- with a bound-check. The compiler can implement this bound-check very
-- efficiently (i.e, a single comparison of the difference of two pointers to a
-- constant), because the bound of a 'E.Encoding' is always independent of the
-- value being encoded and, in most cases, a literal constant.
--
-- 'E.Encoding's are the primary means for defining conversion functions from
-- primitive Haskell values to 'Builder's. Most 'Builder' constructors
-- provided by this library are implemented that way.
-- 'E.Encoding's are also used to construct conversions that exploit the internal
-- representation of data-structures.
--
-- For example, 'encodeByteStringWith' works directly on the underlying byte
-- array and uses some tricks to reduce the number of variables in its inner
-- loop. Its efficiency is exploited for implementing the @filter@ and @map@
-- functions in "Data.ByteString.Lazy" as
--
-- > import qualified Codec.Bounded.Encoding as E
-- >
-- > filter :: (Word8 -> Bool) -> ByteString -> ByteString
-- > filter p = toLazyByteString . encodeLazyByteStringWithB write
-- > where
-- > write = E.encodeIf p E.word8 E.emptyEncoding
-- >
-- > map :: (Word8 -> Word8) -> ByteString -> ByteString
-- > map f = toLazyByteString . encodeLazyByteStringWithB (E.word8 E.#. f)
--
-- Compared to earlier versions of @filter@ and @map@ on lazy 'L.ByteString's,
-- these versions use a more efficient inner loop and have the additional
-- advantage that they always result in well-chunked 'L.ByteString's; i.e, they
-- also perform automatic defragmentation.
--
-- We can also use 'E.Encoding's to improve the efficiency of the following
-- 'renderString' function from our UTF-8 CSV table encoding example in
-- "Data.ByteString.Builder".
--
-- > renderString :: String -> Builder
-- > renderString cs = charUtf8 '"' <> foldMap escape cs <> charUtf8 '"'
-- > where
-- > escape '\\' = charUtf8 '\\' <> charUtf8 '\\'
-- > escape '\"' = charUtf8 '\\' <> charUtf8 '\"'
-- > escape c = charUtf8 c
--
-- The idea is to save on 'mappend's by implementing a 'E.Encoding' that escapes
-- characters and using 'encodeListWith', which implements writing a list of
-- values with a tighter inner loop and no 'mappend'.
--
-- > import Data.ByteString.Builder.Extra -- assume these
-- > import Data.ByteString.Builder.Prim -- imports are present
-- > ( BoundedPrim, encodeIf, (<#>), (#.) )
-- > import Data.ByteString.Builder.Prim.Utf8 (char)
-- >
-- > renderString :: String -> Builder
-- > renderString cs =
-- > charUtf8 '"' <> encodeListWithB escapedUtf8 cs <> charUtf8 '"'
-- > where
-- > escapedUtf8 :: BoundedPrim Char
-- > escapedUtf8 =
-- > encodeIf (== '\\') (char <#> char #. const ('\\', '\\')) $
-- > encodeIf (== '\"') (char <#> char #. const ('\\', '\"')) $
-- > char
--
-- This 'Builder' considers a buffer with less than 8 free bytes as full. As
-- all functions are inlined, the compiler is able to optimize the constant
-- 'E.Encoding's as two sequential 'poke's. Compared to the first implementation of
-- 'renderString' this implementation is 1.7x faster.
--
-}
{-
Internally, 'Builder's are buffer-fill operations that are
given a continuation buffer-fill operation and a buffer-range to be filled.
A 'Builder' first checks if the buffer-range is large enough. If that's
the case, the 'Builder' writes the sequences of bytes to the buffer and
calls its continuation. Otherwise, it returns a signal that it requires a
new buffer together with a continuation to be called on this new buffer.
Ignoring the rare case of a full buffer-range, the execution cost of a
'Builder' consists of three parts:
1. The time taken to read the parameters; i.e., the buffer-fill
operation to call after the 'Builder' is done and the buffer-range to
fill.
2. The time taken to check for the size of the buffer-range.
3. The time taken for the actual encoding.
We can reduce cost (1) by ensuring that fewer buffer-fill function calls are
required. We can reduce cost (2) by fusing buffer-size checks of sequential
writes. For example, when escaping a 'String' using 'renderString', it would
be sufficient to check before encoding a character that at least 8 bytes are
free. We can reduce cost (3) by implementing better primitive 'Builder's.
For example, 'renderCell' builds an intermediate list containing the decimal
representation of an 'Int'. Implementing a direct decimal encoding of 'Int's
to memory would be more efficient, as it requires fewer buffer-size checks
and less allocation. It is also a planned extension of this library.
The first two cost reductions are supported for user code through functions
in "Data.ByteString.Builder.Extra". There, we continue the above example
and drop the generation time to 0.8ms by implementing 'renderString' more
cleverly. The third reduction requires meddling with the internals of
'Builder's and is not recommended in code outside of this library. However,
patches to this library are very welcome.
-}
module Data.ByteString.Builder.Prim (
-- * Bounded-size primitives
BoundedPrim
-- ** Combinators
-- | The combinators for 'BoundedPrim's are implemented such that the
-- size of the resulting 'BoundedPrim' can be computed at compile time.
, emptyB
, (>*<)
, (>$<)
, eitherB
, condB
-- ** Builder construction
, primBounded
, primMapListBounded
, primUnfoldrBounded
, primMapByteStringBounded
, primMapLazyByteStringBounded
-- * Fixed-size primitives
, FixedPrim
-- ** Combinators
-- | The combinators for 'FixedPrim's are implemented such that the 'size'
-- of the resulting 'FixedPrim' is computed at compile time.
--
-- The '(>*<)' and '(>$<)' pairing and mapping operators can be used
-- with 'FixedPrim'.
, emptyF
, liftFixedToBounded
-- ** Builder construction
-- | In terms of expressivity, the function 'fixedPrim' would be sufficient
-- for constructing 'Builder's from 'FixedPrim's. The fused variants of
-- this function are provided because they allow for more efficient
-- implementations. Our compilers are just not smart enough yet; and for some
-- of the employed optimizations (see the code of 'encodeByteStringWithF')
-- they will very likely never be.
--
-- Note that functions marked with \"/Heavy inlining./\" are forced to be
-- inlined because they must be specialized for concrete encodings,
-- but are rather heavy in terms of code size. We recommend to define a
-- top-level function for every concrete instantiation of such a function in
-- order to share its code. A typical example is the function
-- 'byteStringHex' from "Data.ByteString.Builder.ASCII", which is
-- implemented as follows.
--
-- @
-- byteStringHex :: S.ByteString -> Builder
-- byteStringHex = 'encodeByteStringWithF' 'word8HexFixed'
-- @
--
, primFixed
, primMapListFixed
, primUnfoldrFixed
, primMapByteStringFixed
, primMapLazyByteStringFixed
-- * Standard encodings of Haskell values
, module Data.ByteString.Builder.Prim.Binary
-- ** Character encodings
, module Data.ByteString.Builder.Prim.ASCII
-- *** ISO/IEC 8859-1 (Char8)
-- | The ISO/IEC 8859-1 encoding is an 8-bit encoding often known as Latin-1.
-- The /Char8/ encoding implemented here works by truncating the Unicode
-- codepoint to 8-bits and encoding them as a single byte. For the codepoints
-- 0-255 this corresponds to the ISO/IEC 8859-1 encoding. Note that the
-- Char8 encoding is equivalent to the ASCII encoding on the Unicode
-- codepoints 0-127. Hence, functions such as 'intDec' can also be used for
-- encoding 'Int's as a decimal number with Char8 encoded characters.
, char8
-- *** UTF-8
-- | The UTF-8 encoding can encode all Unicode codepoints.
-- It is equivalent to the ASCII encoding on the Unicode codepoints 0-127.
-- Hence, functions such as 'intDec' can also be used for encoding 'Int's as
-- a decimal number with UTF-8 encoded characters.
, charUtf8
{-
-- * Testing support
-- | The following four functions are intended for testing use
-- only. They are /not/ efficient. Basic encodings are efficently executed by
-- creating 'Builder's from them using the @encodeXXX@ functions explained at
-- the top of this module.
, evalF
, evalB
, showF
, showB
-}
) where
import Data.ByteString.Builder.Internal
import Data.ByteString.Builder.Prim.Internal.UncheckedShifts
import qualified Data.ByteString as S
import qualified Data.ByteString.Internal as S
import qualified Data.ByteString.Lazy.Internal as L
import Data.Monoid
import Data.List (unfoldr) -- HADDOCK ONLY
import Data.Char (chr, ord)
import Control.Monad ((<=<), unless)
import Data.ByteString.Builder.Prim.Internal hiding (size, sizeBound)
import qualified Data.ByteString.Builder.Prim.Internal as I (size, sizeBound)
import Data.ByteString.Builder.Prim.Binary
import Data.ByteString.Builder.Prim.ASCII
import Foreign
import Foreign.ForeignPtr.Unsafe (unsafeForeignPtrToPtr)
------------------------------------------------------------------------------
-- Creating Builders from bounded primitives
------------------------------------------------------------------------------
-- | Encode a value with a 'FixedPrim'.
{-# INLINE primFixed #-}
primFixed :: FixedPrim a -> (a -> Builder)
primFixed = primBounded . toB
-- | Encode a list of values from left-to-right with a 'FixedPrim'.
{-# INLINE primMapListFixed #-}
primMapListFixed :: FixedPrim a -> ([a] -> Builder)
primMapListFixed = primMapListBounded . toB
-- | Encode a list of values represented as an 'unfoldr' with a 'FixedPrim'.
{-# INLINE primUnfoldrFixed #-}
primUnfoldrFixed :: FixedPrim b -> (a -> Maybe (b, a)) -> a -> Builder
primUnfoldrFixed = primUnfoldrBounded . toB
-- | /Heavy inlining./ Encode all bytes of a strict 'S.ByteString' from
-- left-to-right with a 'FixedPrim'. This function is quite versatile. For
-- example, we can use it to construct a 'Builder' that maps every byte before
-- copying it to the buffer to be filled.
--
-- > mapToBuilder :: (Word8 -> Word8) -> S.ByteString -> Builder
-- > mapToBuilder f = encodeByteStringWithF (contramapF f word8)
--
-- We can also use it to hex-encode a strict 'S.ByteString' as shown by the
-- 'byteStringHex' example above.
{-# INLINE primMapByteStringFixed #-}
primMapByteStringFixed :: FixedPrim Word8 -> (S.ByteString -> Builder)
primMapByteStringFixed = primMapByteStringBounded . toB
-- | /Heavy inlining./ Encode all bytes of a lazy 'L.ByteString' from
-- left-to-right with a 'FixedPrim'.
{-# INLINE primMapLazyByteStringFixed #-}
primMapLazyByteStringFixed :: FixedPrim Word8 -> (L.ByteString -> Builder)
primMapLazyByteStringFixed = primMapLazyByteStringBounded . toB
-- IMPLEMENTATION NOTE: Sadly, 'encodeListWith' cannot be used for foldr/build
-- fusion. Its performance relies on hoisting several variables out of the
-- inner loop. That's not possible when writing 'encodeListWith' as a 'foldr'.
-- If we had stream fusion for lists, then we could fuse 'encodeListWith', as
-- 'encodeWithStream' can keep control over the execution.
-- | Create a 'Builder' that encodes values with the given 'BoundedPrim'.
--
-- We rewrite consecutive uses of 'primBounded' such that the bound-checks are
-- fused. For example,
--
-- > primBounded (word32 c1) `mappend` primBounded (word32 c2)
--
-- is rewritten such that the resulting 'Builder' checks only once, if ther are
-- at 8 free bytes, instead of checking twice, if there are 4 free bytes. This
-- optimization is not observationally equivalent in a strict sense, as it
-- influences the boundaries of the generated chunks. However, for a user of
-- this library it is observationally equivalent, as chunk boundaries of a lazy
-- 'L.ByteString' can only be observed through the internal interface.
-- Morevoer, we expect that all primitives write much fewer than 4kb (the
-- default short buffer size). Hence, it is safe to ignore the additional
-- memory spilled due to the more agressive buffer wrapping introduced by this
-- optimization.
--
{-# INLINE[1] primBounded #-}
primBounded :: BoundedPrim a -> (a -> Builder)
primBounded w x =
-- It is important to avoid recursive 'BuildStep's where possible, as
-- their closure allocation is expensive. Using 'ensureFree' allows the
-- 'step' to assume that at least 'sizeBound w' free space is available.
ensureFree (I.sizeBound w) `mappend` builder step
where
step k (BufferRange op ope) = do
op' <- runB w x op
let !br' = BufferRange op' ope
k br'
{-# RULES
"append/primBounded" forall w1 w2 x1 x2.
append (primBounded w1 x1) (primBounded w2 x2)
= primBounded (pairB w1 w2) (x1, x2)
"append/primBounded/assoc_r" forall w1 w2 x1 x2 b.
append (primBounded w1 x1) (append (primBounded w2 x2) b)
= append (primBounded (pairB w1 w2) (x1, x2)) b
"append/primBounded/assoc_l" forall w1 w2 x1 x2 b.
append (append b (primBounded w1 x1)) (primBounded w2 x2)
= append b (primBounded (pairB w1 w2) (x1, x2))
#-}
-- TODO: The same rules for 'putBuilder (..) >> putBuilder (..)'
-- | Create a 'Builder' that encodes a list of values consecutively using a
-- 'BoundedPrim' for each element. This function is more efficient than the
-- canonical
--
-- > filter p =
-- > B.toLazyByteString .
-- > E.encodeLazyByteStringWithF (E.ifF p E.word8) E.emptyF)
-- >
--
-- > mconcat . map (primBounded w)
--
-- or
--
-- > foldMap (primBounded w)
--
-- because it moves several variables out of the inner loop.
{-# INLINE primMapListBounded #-}
primMapListBounded :: BoundedPrim a -> [a] -> Builder
primMapListBounded w xs0 =
builder $ step xs0
where
step xs1 k (BufferRange op0 ope0) =
go xs1 op0
where
go [] !op = k (BufferRange op ope0)
go xs@(x':xs') !op
| op `plusPtr` bound <= ope0 = runB w x' op >>= go xs'
| otherwise =
return $ bufferFull bound op (step xs k)
bound = I.sizeBound w
-- TODO: Add 'foldMap/encodeWith' its variants
-- TODO: Ensure rewriting 'primBounded w . f = primBounded (w #. f)'
-- | Create a 'Builder' that encodes a sequence generated from a seed value
-- using a 'BoundedPrim' for each sequence element.
{-# INLINE primUnfoldrBounded #-}
primUnfoldrBounded :: BoundedPrim b -> (a -> Maybe (b, a)) -> a -> Builder
primUnfoldrBounded w f x0 =
builder $ fillWith x0
where
fillWith x k !(BufferRange op0 ope0) =
go (f x) op0
where
go !Nothing !op = do let !br' = BufferRange op ope0
k br'
go !(Just (y, x')) !op
| op `plusPtr` bound <= ope0 = runB w y op >>= go (f x')
| otherwise = return $ bufferFull bound op $
\(BufferRange opNew opeNew) -> do
!opNew' <- runB w y opNew
fillWith x' k (BufferRange opNew' opeNew)
bound = I.sizeBound w
-- | Create a 'Builder' that encodes each 'Word8' of a strict 'S.ByteString'
-- using a 'BoundedPrim'. For example, we can write a 'Builder' that filters
-- a strict 'S.ByteString' as follows.
--
-- > import Data.ByteString.Builder.Primas P (word8, condB, emptyB)
--
-- > filterBS p = P.condB p P.word8 P.emptyB
--
{-# INLINE primMapByteStringBounded #-}
primMapByteStringBounded :: BoundedPrim Word8 -> S.ByteString -> Builder
primMapByteStringBounded w =
\bs -> builder $ step bs
where
bound = I.sizeBound w
step (S.PS ifp ioff isize) !k =
goBS (unsafeForeignPtrToPtr ifp `plusPtr` ioff)
where
!ipe = unsafeForeignPtrToPtr ifp `plusPtr` (ioff + isize)
goBS !ip0 !br@(BufferRange op0 ope)
| ip0 >= ipe = do
touchForeignPtr ifp -- input buffer consumed
k br
| op0 `plusPtr` bound < ope =
goPartial (ip0 `plusPtr` min outRemaining inpRemaining)
| otherwise = return $ bufferFull bound op0 (goBS ip0)
where
outRemaining = (ope `minusPtr` op0) `div` bound
inpRemaining = ipe `minusPtr` ip0
goPartial !ipeTmp = go ip0 op0
where
go !ip !op
| ip < ipeTmp = do
x <- peek ip
op' <- runB w x op
go (ip `plusPtr` 1) op'
| otherwise =
goBS ip (BufferRange op ope)
-- | Chunk-wise application of 'primMapByteStringBounded'.
{-# INLINE primMapLazyByteStringBounded #-}
primMapLazyByteStringBounded :: BoundedPrim Word8 -> L.ByteString -> Builder
primMapLazyByteStringBounded w =
L.foldrChunks (\x b -> primMapByteStringBounded w x `mappend` b) mempty
------------------------------------------------------------------------------
-- Char8 encoding
------------------------------------------------------------------------------
-- | Char8 encode a 'Char'.
{-# INLINE char8 #-}
char8 :: FixedPrim Char
char8 = (fromIntegral . ord) >$< word8
------------------------------------------------------------------------------
-- UTF-8 encoding
------------------------------------------------------------------------------
-- | UTF-8 encode a 'Char'.
{-# INLINE charUtf8 #-}
charUtf8 :: BoundedPrim Char
charUtf8 = boudedPrim 4 (encodeCharUtf8 f1 f2 f3 f4)
where
pokeN n io op = io op >> return (op `plusPtr` n)
f1 x1 = pokeN 1 $ \op -> do pokeByteOff op 0 x1
f2 x1 x2 = pokeN 2 $ \op -> do pokeByteOff op 0 x1
pokeByteOff op 1 x2
f3 x1 x2 x3 = pokeN 3 $ \op -> do pokeByteOff op 0 x1
pokeByteOff op 1 x2
pokeByteOff op 2 x3
f4 x1 x2 x3 x4 = pokeN 4 $ \op -> do pokeByteOff op 0 x1
pokeByteOff op 1 x2
pokeByteOff op 2 x3
pokeByteOff op 3 x4
-- | Encode a Unicode character to another datatype, using UTF-8. This function
-- acts as an abstract way of encoding characters, as it is unaware of what
-- needs to happen with the resulting bytes: you have to specify functions to
-- deal with those.
--
{-# INLINE encodeCharUtf8 #-}
encodeCharUtf8 :: (Word8 -> a) -- ^ 1-byte UTF-8
-> (Word8 -> Word8 -> a) -- ^ 2-byte UTF-8
-> (Word8 -> Word8 -> Word8 -> a) -- ^ 3-byte UTF-8
-> (Word8 -> Word8 -> Word8 -> Word8 -> a) -- ^ 4-byte UTF-8
-> Char -- ^ Input 'Char'
-> a -- ^ Result
encodeCharUtf8 f1 f2 f3 f4 c = case ord c of
x | x <= 0x7F -> f1 $ fromIntegral x
| x <= 0x07FF ->
let x1 = fromIntegral $ (x `shiftR` 6) + 0xC0
x2 = fromIntegral $ (x .&. 0x3F) + 0x80
in f2 x1 x2
| x <= 0xFFFF ->
let x1 = fromIntegral $ (x `shiftR` 12) + 0xE0
x2 = fromIntegral $ ((x `shiftR` 6) .&. 0x3F) + 0x80
x3 = fromIntegral $ (x .&. 0x3F) + 0x80
in f3 x1 x2 x3
| otherwise ->
let x1 = fromIntegral $ (x `shiftR` 18) + 0xF0
x2 = fromIntegral $ ((x `shiftR` 12) .&. 0x3F) + 0x80
x3 = fromIntegral $ ((x `shiftR` 6) .&. 0x3F) + 0x80
x4 = fromIntegral $ (x .&. 0x3F) + 0x80
in f4 x1 x2 x3 x4
| phischu/fragnix | tests/packages/scotty/Data.ByteString.Builder.Prim.hs | bsd-3-clause | 32,046 | 0 | 18 | 6,881 | 2,220 | 1,234 | 986 | 174 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
module Ros.Nav_msgs.GridCells where
import qualified Prelude as P
import Prelude ((.), (+), (*))
import qualified Data.Typeable as T
import Control.Applicative
import Ros.Internal.RosBinary
import Ros.Internal.Msg.MsgInfo
import qualified GHC.Generics as G
import qualified Data.Default.Generics as D
import Ros.Internal.Msg.HeaderSupport
import qualified Data.Vector.Storable as V
import qualified Ros.Geometry_msgs.Point as Point
import qualified Ros.Std_msgs.Header as Header
import Lens.Family.TH (makeLenses)
import Lens.Family (view, set)
data GridCells = GridCells { _header :: Header.Header
, _cell_width :: P.Float
, _cell_height :: P.Float
, _cells :: V.Vector Point.Point
} deriving (P.Show, P.Eq, P.Ord, T.Typeable, G.Generic)
$(makeLenses ''GridCells)
instance RosBinary GridCells where
put obj' = put (_header obj') *> put (_cell_width obj') *> put (_cell_height obj') *> put (_cells obj')
get = GridCells <$> get <*> get <*> get <*> get
putMsg = putStampedMsg
instance HasHeader GridCells where
getSequence = view (header . Header.seq)
getFrame = view (header . Header.frame_id)
getStamp = view (header . Header.stamp)
setSequence = set (header . Header.seq)
instance MsgInfo GridCells where
sourceMD5 _ = "b9e4f5df6d28e272ebde00a3994830f5"
msgTypeName _ = "nav_msgs/GridCells"
instance D.Default GridCells
| acowley/roshask | msgs/Nav_msgs/Ros/Nav_msgs/GridCells.hs | bsd-3-clause | 1,597 | 1 | 11 | 323 | 428 | 250 | 178 | 38 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.AutoScaling.DescribeAutoScalingGroups
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Describes one or more Auto Scaling groups. If a list of names is not
-- provided, the call describes all Auto Scaling groups.
--
-- You can specify a maximum number of items to be returned with a single call.
-- If there are more items to return, the call returns a token. To get the next
-- set of items, repeat the call with the returned token in the 'NextToken'
-- parameter.
--
-- <http://docs.aws.amazon.com/AutoScaling/latest/APIReference/API_DescribeAutoScalingGroups.html>
module Network.AWS.AutoScaling.DescribeAutoScalingGroups
(
-- * Request
DescribeAutoScalingGroups
-- ** Request constructor
, describeAutoScalingGroups
-- ** Request lenses
, dasgAutoScalingGroupNames
, dasgMaxRecords
, dasgNextToken
-- * Response
, DescribeAutoScalingGroupsResponse
-- ** Response constructor
, describeAutoScalingGroupsResponse
-- ** Response lenses
, dasgrAutoScalingGroups
, dasgrNextToken
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.AutoScaling.Types
import qualified GHC.Exts
data DescribeAutoScalingGroups = DescribeAutoScalingGroups
{ _dasgAutoScalingGroupNames :: List "member" Text
, _dasgMaxRecords :: Maybe Int
, _dasgNextToken :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'DescribeAutoScalingGroups' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dasgAutoScalingGroupNames' @::@ ['Text']
--
-- * 'dasgMaxRecords' @::@ 'Maybe' 'Int'
--
-- * 'dasgNextToken' @::@ 'Maybe' 'Text'
--
describeAutoScalingGroups :: DescribeAutoScalingGroups
describeAutoScalingGroups = DescribeAutoScalingGroups
{ _dasgAutoScalingGroupNames = mempty
, _dasgNextToken = Nothing
, _dasgMaxRecords = Nothing
}
-- | The group names.
dasgAutoScalingGroupNames :: Lens' DescribeAutoScalingGroups [Text]
dasgAutoScalingGroupNames =
lens _dasgAutoScalingGroupNames
(\s a -> s { _dasgAutoScalingGroupNames = a })
. _List
-- | The maximum number of items to return with this call.
dasgMaxRecords :: Lens' DescribeAutoScalingGroups (Maybe Int)
dasgMaxRecords = lens _dasgMaxRecords (\s a -> s { _dasgMaxRecords = a })
-- | The token for the next set of items to return. (You received this token from
-- a previous call.)
dasgNextToken :: Lens' DescribeAutoScalingGroups (Maybe Text)
dasgNextToken = lens _dasgNextToken (\s a -> s { _dasgNextToken = a })
data DescribeAutoScalingGroupsResponse = DescribeAutoScalingGroupsResponse
{ _dasgrAutoScalingGroups :: List "member" AutoScalingGroup
, _dasgrNextToken :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'DescribeAutoScalingGroupsResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dasgrAutoScalingGroups' @::@ ['AutoScalingGroup']
--
-- * 'dasgrNextToken' @::@ 'Maybe' 'Text'
--
describeAutoScalingGroupsResponse :: DescribeAutoScalingGroupsResponse
describeAutoScalingGroupsResponse = DescribeAutoScalingGroupsResponse
{ _dasgrAutoScalingGroups = mempty
, _dasgrNextToken = Nothing
}
-- | The groups.
dasgrAutoScalingGroups :: Lens' DescribeAutoScalingGroupsResponse [AutoScalingGroup]
dasgrAutoScalingGroups =
lens _dasgrAutoScalingGroups (\s a -> s { _dasgrAutoScalingGroups = a })
. _List
-- | The token to use when requesting the next set of items. If there are no
-- additional items to return, the string is empty.
dasgrNextToken :: Lens' DescribeAutoScalingGroupsResponse (Maybe Text)
dasgrNextToken = lens _dasgrNextToken (\s a -> s { _dasgrNextToken = a })
instance ToPath DescribeAutoScalingGroups where
toPath = const "/"
instance ToQuery DescribeAutoScalingGroups where
toQuery DescribeAutoScalingGroups{..} = mconcat
[ "AutoScalingGroupNames" =? _dasgAutoScalingGroupNames
, "MaxRecords" =? _dasgMaxRecords
, "NextToken" =? _dasgNextToken
]
instance ToHeaders DescribeAutoScalingGroups
instance AWSRequest DescribeAutoScalingGroups where
type Sv DescribeAutoScalingGroups = AutoScaling
type Rs DescribeAutoScalingGroups = DescribeAutoScalingGroupsResponse
request = post "DescribeAutoScalingGroups"
response = xmlResponse
instance FromXML DescribeAutoScalingGroupsResponse where
parseXML = withElement "DescribeAutoScalingGroupsResult" $ \x -> DescribeAutoScalingGroupsResponse
<$> x .@? "AutoScalingGroups" .!@ mempty
<*> x .@? "NextToken"
instance AWSPager DescribeAutoScalingGroups where
page rq rs
| stop (rs ^. dasgrNextToken) = Nothing
| otherwise = (\x -> rq & dasgNextToken ?~ x)
<$> (rs ^. dasgrNextToken)
| kim/amazonka | amazonka-autoscaling/gen/Network/AWS/AutoScaling/DescribeAutoScalingGroups.hs | mpl-2.0 | 5,814 | 0 | 12 | 1,200 | 724 | 429 | 295 | 80 | 1 |
{-# LANGUAGE CPP #-}
{-# OPTIONS_HADDOCK hide #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.Exception
-- Copyright : (c) Sven Panne 2002-2013
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- This is a purely internal module to compensate for differences between
-- Haskell implementations.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.Exception (
bracket, bracket_, unsafeBracket_, finallyRet
) where
import Data.IORef ( newIORef, readIORef, writeIORef )
#ifdef __NHC__
import qualified IO ( bracket, bracket_ )
{-# INLINE bracket #-}
bracket :: IO a -> (a -> IO b) -> (a -> IO c) -> IO c
bracket = IO.bracket
{-# INLINE bracket_ #-}
bracket_ :: IO a -> IO b -> IO c -> IO c
bracket_ before = IO.bracket_ before . const
finally :: IO a -> IO b -> IO a
finally = flip . bracket_ . return $ undefined
#else
import Control.Exception ( bracket, bracket_, finally )
#endif
{-# INLINE unsafeBracket_ #-}
unsafeBracket_ :: IO a -> IO b -> IO c -> IO c
unsafeBracket_ before after thing = do
_ <- before
r <- thing
_ <- after
return r
{-# INLINE finallyRet #-}
finallyRet :: IO a -> IO b -> IO (a, b)
a `finallyRet` sequel = do
r2Ref <- newIORef undefined
r1 <- a `finally` (sequel >>= writeIORef r2Ref)
r2 <- readIORef r2Ref
return (r1, r2)
| hesiod/OpenGL | src/Graphics/Rendering/OpenGL/GL/Exception.hs | bsd-3-clause | 1,525 | 0 | 11 | 289 | 373 | 199 | 174 | 20 | 1 |
module GA3 where
zipperM :: [a] -> IO [(a, Integer)]
zipperM lst = do
lst2 <- getOtherList
return $ zip lst lst2
getOtherList = return [1..]
| RefactoringTools/HaRe | test/testdata/GenApplicative/GA3.hs | bsd-3-clause | 147 | 0 | 8 | 32 | 66 | 35 | 31 | 6 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ur-PK">
<title>Active Scan Rules | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | mabdi/zap-extensions | src/org/zaproxy/zap/extension/ascanrules/resources/help_ur_PK/helpset_ur_PK.hs | apache-2.0 | 979 | 80 | 66 | 161 | 417 | 211 | 206 | -1 | -1 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.GHC.IPI642
-- Copyright : (c) The University of Glasgow 2004
-- License : BSD3
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
module Distribution.Simple.GHC.IPI642 (
InstalledPackageInfo(..),
toCurrent,
-- Don't use these, they're only for conversion purposes
PackageIdentifier, convertPackageId,
License, convertLicense,
convertModuleName
) where
import qualified Distribution.InstalledPackageInfo as Current
import qualified Distribution.Package as Current hiding (installedPackageId)
import qualified Distribution.License as Current
import Distribution.Version (Version)
import Distribution.ModuleName (ModuleName)
import Distribution.Text (simpleParse,display)
import Data.Maybe
-- | This is the InstalledPackageInfo type used by ghc-6.4.2 and later.
--
-- It's here purely for the 'Read' instance so that we can read the package
-- database used by those ghc versions. It is a little hacky to read the
-- package db directly, but we do need the info and until ghc-6.9 there was
-- no better method.
--
-- In ghc-6.4.1 and before the format was slightly different.
-- See "Distribution.Simple.GHC.IPI642"
--
data InstalledPackageInfo = InstalledPackageInfo {
package :: PackageIdentifier,
license :: License,
copyright :: String,
maintainer :: String,
author :: String,
stability :: String,
homepage :: String,
pkgUrl :: String,
description :: String,
category :: String,
exposed :: Bool,
exposedModules :: [String],
hiddenModules :: [String],
importDirs :: [FilePath],
libraryDirs :: [FilePath],
hsLibraries :: [String],
extraLibraries :: [String],
extraGHCiLibraries:: [String],
includeDirs :: [FilePath],
includes :: [String],
depends :: [PackageIdentifier],
hugsOptions :: [String],
ccOptions :: [String],
ldOptions :: [String],
frameworkDirs :: [FilePath],
frameworks :: [String],
haddockInterfaces :: [FilePath],
haddockHTMLs :: [FilePath]
}
deriving Read
data PackageIdentifier = PackageIdentifier {
pkgName :: String,
pkgVersion :: Version
}
deriving Read
data License = GPL | LGPL | BSD3 | BSD4
| PublicDomain | AllRightsReserved | OtherLicense
deriving Read
convertPackageId :: PackageIdentifier -> Current.PackageIdentifier
convertPackageId PackageIdentifier { pkgName = n, pkgVersion = v } =
Current.PackageIdentifier (Current.PackageName n) v
mkInstalledPackageId :: Current.PackageIdentifier -> Current.InstalledPackageId
mkInstalledPackageId = Current.InstalledPackageId . display
convertModuleName :: String -> ModuleName
convertModuleName s = fromJust $ simpleParse s
convertLicense :: License -> Current.License
convertLicense GPL = Current.GPL Nothing
convertLicense LGPL = Current.LGPL Nothing
convertLicense BSD3 = Current.BSD3
convertLicense BSD4 = Current.BSD4
convertLicense PublicDomain = Current.PublicDomain
convertLicense AllRightsReserved = Current.AllRightsReserved
convertLicense OtherLicense = Current.OtherLicense
toCurrent :: InstalledPackageInfo -> Current.InstalledPackageInfo
toCurrent ipi@InstalledPackageInfo{} =
let pid = convertPackageId (package ipi)
mkExposedModule m = Current.ExposedModule m Nothing Nothing
in Current.InstalledPackageInfo {
Current.installedPackageId = mkInstalledPackageId (convertPackageId (package ipi)),
Current.sourcePackageId = pid,
Current.packageKey = Current.OldPackageKey pid,
Current.license = convertLicense (license ipi),
Current.copyright = copyright ipi,
Current.maintainer = maintainer ipi,
Current.author = author ipi,
Current.stability = stability ipi,
Current.homepage = homepage ipi,
Current.pkgUrl = pkgUrl ipi,
Current.synopsis = "",
Current.description = description ipi,
Current.category = category ipi,
Current.exposed = exposed ipi,
Current.exposedModules = map (mkExposedModule . convertModuleName) (exposedModules ipi),
Current.hiddenModules = map convertModuleName (hiddenModules ipi),
Current.instantiatedWith = [],
Current.trusted = Current.trusted Current.emptyInstalledPackageInfo,
Current.importDirs = importDirs ipi,
Current.libraryDirs = libraryDirs ipi,
Current.dataDir = "",
Current.hsLibraries = hsLibraries ipi,
Current.extraLibraries = extraLibraries ipi,
Current.extraGHCiLibraries = extraGHCiLibraries ipi,
Current.includeDirs = includeDirs ipi,
Current.includes = includes ipi,
Current.depends = map (mkInstalledPackageId.convertPackageId) (depends ipi),
Current.ccOptions = ccOptions ipi,
Current.ldOptions = ldOptions ipi,
Current.frameworkDirs = frameworkDirs ipi,
Current.frameworks = frameworks ipi,
Current.haddockInterfaces = haddockInterfaces ipi,
Current.haddockHTMLs = haddockHTMLs ipi,
Current.pkgRoot = Nothing
}
| rimmington/cabal | Cabal/Distribution/Simple/GHC/IPI642.hs | bsd-3-clause | 5,477 | 0 | 13 | 1,324 | 1,078 | 633 | 445 | 104 | 1 |
{-# LANGUAGE ExistentialQuantification #-}
-- This test is really meant for human looking; do a -ddump-simpl.
-- The definition that you want to look at is for foo.
-- It produces a nested unfold that should look something
-- like the code below. Note the 'lvl1_shW'. It is BAD
-- if this is a lambda instead; you get a lot more allocation
-- See Note [Escaping a value lambda] in SetLevels
{-
$wunfold_shU =
\ (ww_she :: [[a_abm]]) (ww1_shf :: Data.Maybe.Maybe (Stream.Stream a_abm)) ->
case ww1_shf of wild2_afo {
Data.Maybe.Nothing ->
case ww_she of wild_ad6 {
[] -> GHC.Base.[] @ a_abm;
: x_ado xs1_adp ->
$wunfold_shU
xs1_adp
(Data.Maybe.Just
@ (Stream.Stream a_abm) (Stream.Stream @ a_abm @ [a_abm]
*** lvl1_shW ***
x_ado))
};
Data.Maybe.Just ds3_afJ ->
case ds3_afJ of wild3_afL { Stream.Stream @ s1_afN stepb_afO sb_afP ->
case stepb_afO sb_afP of wild4_afR {
Stream.Done -> $wunfold_shU ww_she (Data.Maybe.Nothing @ (Stream.Stream a_abm));
Stream.Yield x_afV sb'_afW ->
GHC.Base.:
@ a_abm
x_afV
($wunfold_shU
ww_she
(Data.Maybe.Just
@ (Stream.Stream a_abm) (Stream.Stream @ a_abm @ s1_afN stepb_afO sb'_afW)));
Stream.Skip sb'_afZ ->
$wunfold_shU
ww_she
(Data.Maybe.Just
@ (Stream.Stream a_abm) (Stream.Stream @ a_abm @ s1_afN stepb_afO sb'_afZ))
}
}
-}
module Main( main, foo ) where
-- Must export foo to make the issue show up
import Prelude hiding ( concatMap, map)
main = print (sum (foo [[1,2], [3,4,5]]))
foo :: Num a => [[a]] -> [a]
foo xss = Main.concatMap (\xs -> Main.map (+1) xs) xss
instance StreamableSequence [] where
stream = listToStream
unstream = streamToList
-- These inline pragmas are useless (see #5084)
{-
{-# INLINE stream #-}
{-# INLINE unstream #-}
-}
listToStream :: [a] -> Stream a
listToStream xs = Stream next xs
where next [] = Done
next (x:xs) = Yield x xs
{-# INLINE [0] listToStream #-}
streamToList :: Stream a -> [a]
streamToList (Stream next s) = unfold s
where unfold s =
case next s of
Done -> []
Skip s' -> unfold s'
Yield x s' -> x : unfold s'
{-# INLINE [0] streamToList #-}
{-# RULES
"stream/unstream"
forall s. listToStream (streamToList s) = s
#-}
map :: (a -> b) -> [a] -> [b]
map f = unstream . mapS f . stream
{-# INLINE map #-}
concatMap :: (a -> [b]) -> [a] -> [b]
concatMap f = unstream . concatMapS (stream . f) . stream
{-# INLINE concatMap #-}
data Stream a = forall s. Stream (s -> Step a s) s
data Step a s = Done
| Yield a s
| Skip s
class StreamableSequence seq where
stream :: seq a -> Stream a
unstream :: Stream a -> seq a
-- axiom: stream . unstream = id
-- These inline pragmas are useless (see #5084)
{-
{-# INLINE stream #-}
{-# INLINE unstream #-}
-}
{-
--version that does not require the sequence type
--to be polymorphic in its elements:
class StreamableSequence seq a | seq -> a where
stream :: seq -> Stream a
unstream :: Stream a -> seq
-}
mapS :: (a -> b) -> Stream a -> Stream b
mapS f (Stream next s0) = Stream next' s0
where next' s = case next s of
Done -> Done
Skip s' -> Skip s'
Yield x s' -> Yield (f x) s'
{-# INLINE [0] mapS #-}
concatMapS :: (a -> Stream b) -> Stream a -> Stream b
concatMapS f (Stream step s) = Stream step' (s, Nothing)
where step' (s, Nothing) =
case step s of
Yield x s' -> Skip (s', Just (f x))
Skip s' -> Skip (s', Nothing)
Done -> Done
step' (s, Just (Stream stepb sb)) =
case stepb sb of
Yield x sb' -> Yield x (s, Just (Stream stepb sb'))
Skip sb' -> Skip (s, Just (Stream stepb sb'))
Done -> Skip (s, Nothing)
{-# INLINE [0] concatMapS #-}
| olsner/ghc | testsuite/tests/simplCore/should_run/simplrun009.hs | bsd-3-clause | 4,453 | 0 | 14 | 1,629 | 853 | 449 | 404 | 58 | 6 |
-----------------------------------------------------------------------------
--
-- Module : PhysicalQuantities.Measures
-- Copyright :
-- License : MIT
--
-- Maintainer : -
-- Stability :
-- Portability :
--
-- |
--
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE FunctionalDependencies #-}
module PhysicalQuantities.Measures(
Measured(measuredValue, measuredPrefix)
, measured, measuredUnit
, Measurable(..), measurable, measurable'
, MeasurableVector(..)
, SomePrefixFor(..)
, measure, (:$)(..), MakeMeasure( ($:) )
, MeasuresOps(..), MeasuresFracOps(..)
, MeasureCoerce(..)
) where
import PhysicalQuantities.Definitions
import TypeNum.TypeFunctions
import Data.Function (on)
import Data.Typeable
import Data.Coerce
import Control.Arrow ( (&&&) )
-----------------------------------------------------------------------------
infixl 4 $:
infixl 6 $+, $-
infixl 7 $*, $/
infixl 9 :$
-----------------------------------------------------------------------------
-- * Definitions
-- | Measured shouldn't carry unit value, but rather its type.
data Measured u v = Measured {
measuredValue :: v -- ^ Raw value (without prefix), see 'measured'.
, measuredPrefix :: Maybe (SomePrefixFor v)
}
-- | Value with application of prefix, if any.
measured :: (Num v, Eq v) => Measured u v -> v
measured m = maybe (measuredValue m)
((measuredValue m *) . prefixValue)
(measuredPrefix m)
-- | Materialized using 'unitInstance'.
measuredUnit :: (UnitDecomposition u) => Measured u v -> u
measuredUnit _ = unitInstance
instance TypesEq (Measured u1 v) (Measured u2 v) where
type Measured u1 v ~=~ Measured u2 v = EqU u1 u2
instance TypesOrd (Measured u1 v) (Measured u2 v) where
type Cmp (Measured u1 v) (Measured u2 v) = CmpU u1 u2
instance (Num v, Eq v) => Eq (Measured u v) where
x == y = f x == f y
|| measured x == measured y
where f = measuredValue &&& measuredPrefix
instance (Num v, Ord v) => Ord (Measured u v) where
compare = compare `on` measured
instance (Show v, UnitDecomposition u) => Show (Measured u v) where
show m = show (measuredValue m) ++ " " ++ fullUnit
where uname = unitName (measuredUnit m)
fullUnit = maybe uname
(flip (++) $ "-" ++ uname)
(show <$> measuredPrefix m)
instance Functor (Measured u) where
fmap f (Measured v pref) = Measured (f v) (convertPrefix <$> pref)
-----------------------------------------------------------------------------
data Measurable q v = Measurable
(forall sys u . (UnitSystem sys, u ~ UnitFor sys q) =>
sys -> Measured u v
)
measurableQuantity :: (PhysicalQuantity q) => Measurable q v -> q
measurableQuantity _ = quantityInstance
instance TypesEq (Measurable q1 v) (Measurable q2 v) where
type Measurable q1 v ~=~ Measurable q2 v = EqQ q1 q2
instance TypesOrd (Measurable q1 v) (Measurable q2 v) where
type Cmp (Measurable q1 v) (Measurable q2 v) = CmpQ q1 q2
instance (PhysicalQuantity q) => Show (Measurable q v) where
show m = "Measurable " ++ quantityName (measurableQuantity m)
instance Functor (Measurable q) where
fmap f (Measurable mf) = Measurable (fmap f . mf)
-----------------------------------------------------------------------------
-- | 'UnitPrefix' container for value type `v`.
data SomePrefixFor v = forall p . UnitPrefix p => SomePrefix (p v)
-- | Except 'prefixFromValue' method.
instance UnitPrefix SomePrefixFor where
prefixGroup (SomePrefix p) = prefixGroup p
prefixValue (SomePrefix p) = prefixValue p
prefixName (SomePrefix p) = prefixName p
prefixFromValue = error $ "`prefixFromValue` cannot be called for" ++
"PhysicalQuantities.Measures.SomePrefixFor`"
convertPrefix (SomePrefix p) = SomePrefix $ convertPrefix p
instance (Num v, Eq v) => Eq (SomePrefixFor v) where (==) = (==) `on` prefixValue
instance (Num v, Ord v) => Ord (SomePrefixFor v) where compare = compare `on` prefixValue
instance Show (SomePrefixFor v) where show (SomePrefix p) = prefixName p
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-- * Creation
-- | Prefixed value constructor.
data (UnitPrefix p) => (:$) v p = (:$) v (p v)
-----------------------------------------------------------------------------
class MakeMeasure from
where type MeasureValue from :: *
-- | Measured constructor.
($:) :: from -> u -> Measured u (MeasureValue from)
measure :: v -> Measured u v
measure x = Measured x Nothing
measurable :: v -> Measurable q v
measurable x = Measurable $ const (measure x)
measurable' :: q -> v -> Measurable q v
measurable' = const measurable
class MakeMeasure' from (t :: MMT)
where type MeasureValue' t from :: *
mkMeasure' :: MMT' t -> from -> u -> Measured u (MeasureValue' t from)
-- | MakeMeasure Type
data MMT = MMPrefixed | MMUnprefixed
-- | 'MMT' proxy
data MMT' (t :: MMT) = MMT'
type family CanMakeMeasure (from :: *) :: MMT
where CanMakeMeasure (v:$p) = MMPrefixed
CanMakeMeasure v = MMUnprefixed
canMakeMeasure :: from -> MMT' (CanMakeMeasure from)
canMakeMeasure = const MMT'
-- | Creates measures using MakeMeasure' instances.
instance (MakeMeasure' from (CanMakeMeasure from)) => MakeMeasure from
where type MeasureValue from = MeasureValue' (CanMakeMeasure from) from
($:) v = mkMeasure' (canMakeMeasure v) v
-- | Create unprefixed measure.
instance MakeMeasure' v MMUnprefixed where
type MeasureValue' MMUnprefixed v = v
mkMeasure' _ v _ = Measured v Nothing
-- | Create prefixed measure.
instance (UnitPrefix p) => MakeMeasure' (v:$p) MMPrefixed where
type MeasureValue' MMPrefixed (v:$p) = v
mkMeasure' _ (v:$p) _ = Measured v . Just $ SomePrefix p
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-- * Operations
class (Num v, Ord v) =>
MeasuresOps m a1 a2 v where
type EqF m a1 a2 :: Bool
measuresSum :: (True ~ EqF m a1 a2) => m a1 v -> m a2 v -> m a1 v
measuresSub :: (True ~ EqF m a1 a2) => m a1 v -> m a2 v -> m a1 v
measuresMult :: m a1 v -> m a2 v -> m (a1:*a2) v
-- | Alias for 'measuresSum'.
($+) :: (True ~ EqF m a1 a2) => m a1 v -> m a2 v -> m a1 v
($+) = measuresSum
-- | Alias for 'measuresSub'.
($-) :: (True ~ EqF m a1 a2) => m a1 v -> m a2 v -> m a1 v
($-) = measuresSub
-- | Alias for 'measuresMult'.
($*) :: m a1 v -> m a2 v -> m (a1:*a2) v
($*) = measuresMult
class (Fractional v, MeasuresOps m a1 a2 v) =>
MeasuresFracOps m a1 a2 v where
measuresDiv :: m a1 v -> m a2 v -> m (a1:/a2) v
-- | Alias for 'measuresDiv'.
($/) :: m a1 v -> m a2 v -> m (a1:/a2) v
($/) = measuresDiv
class MeasureCoerce m a0 a v where measureCoerce :: m a v -> m a0 v
class MeasurableVector vec v | vec -> v
where
measurableAbs :: Measurable q vec -> Measurable (Abs q) v
measurableNorm :: Measurable q vec -> Measurable q vec
measurableScalarMult :: Measurable q1 vec -> Measurable q2 v -> Measurable (q1:*q2) vec
-----------------------------------------------------------------------------
instance (Num v, Ord v) => MeasuresOps Measured u1 u2 v where
type EqF Measured u1 u2 = EqU u1 u2
measuresSum = dumbMeasuredOp (+)
measuresSub = dumbMeasuredOp (-)
measuresMult = dumbMeasuredOp (*)
instance (Fractional v, Ord v) => MeasuresFracOps Measured u1 u2 v where
measuresDiv = dumbMeasuredOp (/)
-- Default operation implementaion: TODO: make it smarter
dumbMeasuredOp :: (Num v, Ord v) =>
(v -> v -> v) -> Measured u1 v -> Measured u2 v -> Measured u v
dumbMeasuredOp op (Measured v1 Nothing) (Measured v2 Nothing) = Measured (v1 `op` v2) Nothing
dumbMeasuredOp op (Measured v1 p1) (Measured v2 p2)
| p1 == p2 = Measured (v1 `op` v2) p1
| otherwise = let f v = (*) v . maybe 1 prefixValue
in Measured ( f v1 p1 `op` f v2 p2 ) Nothing
instance (EqU u0 u ~ True) =>
MeasureCoerce Measured u0 u v where
measureCoerce = coerce
-----------------------------------------------------------------------------
instance (Num v, Ord v) => MeasuresOps Measurable q1 q2 v where
type EqF Measurable q1 q2 = EqQ q1 q2
measuresSum = dumbMeasurableOp (+)
measuresSub = dumbMeasurableOp (-)
measuresMult = dumbMeasurableOp (*)
instance (Fractional v, Ord v) => MeasuresFracOps Measurable q1 q2 v where
measuresDiv = dumbMeasurableOp (/)
-- Uses `dumbMeasuredOp`.
dumbMeasurableOp :: (Num v, Ord v) =>
(v -> v -> v) -> Measurable q1 v -> Measurable q2 v -> Measurable q v
dumbMeasurableOp op (Measurable mf1) (Measurable mf2) =
Measurable $ \s -> dumbMeasuredOp op (mf1 s) (mf2 s)
instance (EqQ q0 q ~ True) =>
MeasureCoerce Measurable q0 q v where
measureCoerce (Measurable mf) = Measurable (coerce . mf)
-----------------------------------------------------------------------------
| fehu/PhysicalQuantities | src/PhysicalQuantities/Measures.hs | mit | 9,341 | 69 | 13 | 1,962 | 2,827 | 1,529 | 1,298 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
module Database.Sql.Simple.Pool where
import Control.Applicative
import Control.Monad.Trans.Control
import Data.Typeable
import Data.Default.Class
import Database.Sql.Simple.Internal
import Data.Time.Clock
import qualified Data.Pool as Pool
data Backend b => Pool b = Pool (Pool.Pool b)
deriving (Typeable)
instance Elem (Pool a) (a ': as)
data PoolConfig = PoolConfig
{ numStripes :: Int
, idleTime :: NominalDiffTime
, maxResources :: Int
} deriving (Show, Typeable)
instance Default PoolConfig where
def = PoolConfig 1 20 100
instance Backend b => Backend (Pool b) where
data ConnectInfo (Pool b) = ConnectionPool
{ poolConfig :: PoolConfig
, connectInfo :: ConnectInfo b
}
type ToRow (Pool b) = ToRow b
type FromRow (Pool b) = FromRow b
connect (ConnectionPool PoolConfig{..} ci) =
Pool <$> Pool.createPool (connect ci) close numStripes idleTime maxResources
close (Pool p) = Pool.destroyAllResources p
execute (Pool p) t q = Pool.withResource p $ \c -> execute c t q
execute_ (Pool p) t = Pool.withResource p $ \c -> execute_ c t
query (Pool p) t q = Pool.withResource p $ \c -> query c t q
query_ (Pool p) t = Pool.withResource p $ \c -> query_ c t
fold (Pool p) t q a f = Pool.withResource p $ \c -> fold c t q a f
fold_ (Pool p) t a f = Pool.withResource p $ \c -> fold_ c t a f
withPool :: (Backend b, MonadBaseControl IO m) => Pool b -> (b -> m a) -> m a
withPool (Pool p) = Pool.withResource p
transaction :: Transaction b => Pool b -> (b -> Sql c a) -> Sql c a
transaction p m = withPool p $ \c -> withTransaction c (m c)
| philopon/sql-simple | sql-simple-pool/Database/Sql/Simple/Pool.hs | mit | 1,945 | 0 | 10 | 451 | 705 | 366 | 339 | 45 | 1 |
module Paths_score3 (
version,
getBinDir, getLibDir, getDataDir, getLibexecDir,
getDataFileName, getSysconfDir
) where
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
catchIO = Exception.catch
version :: Version
version = Version [0,2,0,0] []
bindir, libdir, datadir, libexecdir, sysconfdir :: FilePath
bindir = "/home/z/score3/.stack-work/install/x86_64-linux-tinfo6/59bc4ff2d1edca160d803e6480473bc8760e483efc205237fbe657e371413eeb/7.10.2/bin"
libdir = "/home/z/score3/.stack-work/install/x86_64-linux-tinfo6/59bc4ff2d1edca160d803e6480473bc8760e483efc205237fbe657e371413eeb/7.10.2/lib/x86_64-linux-ghc-7.10.2/score3-0.2.0.0-4kUMGWC6ejH2eyLbcFFdTx"
datadir = "/home/z/score3/.stack-work/install/x86_64-linux-tinfo6/59bc4ff2d1edca160d803e6480473bc8760e483efc205237fbe657e371413eeb/7.10.2/share/x86_64-linux-ghc-7.10.2/score3-0.2.0.0"
libexecdir = "/home/z/score3/.stack-work/install/x86_64-linux-tinfo6/59bc4ff2d1edca160d803e6480473bc8760e483efc205237fbe657e371413eeb/7.10.2/libexec"
sysconfdir = "/home/z/score3/.stack-work/install/x86_64-linux-tinfo6/59bc4ff2d1edca160d803e6480473bc8760e483efc205237fbe657e371413eeb/7.10.2/etc"
getBinDir, getLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath
getBinDir = catchIO (getEnv "score3_bindir") (\_ -> return bindir)
getLibDir = catchIO (getEnv "score3_libdir") (\_ -> return libdir)
getDataDir = catchIO (getEnv "score3_datadir") (\_ -> return datadir)
getLibexecDir = catchIO (getEnv "score3_libexecdir") (\_ -> return libexecdir)
getSysconfDir = catchIO (getEnv "score3_sysconfdir") (\_ -> return sysconfdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
| dschalk/score3 | .stack-work/dist/x86_64-linux-tinfo6/Cabal-1.22.4.0/build/autogen/Paths_score3.hs | mit | 1,862 | 0 | 10 | 177 | 362 | 206 | 156 | 28 | 1 |
{-# language RecordWildCards #-}
-- TODO doc
-- Helper parser that works with ByteString,
-- not Decode
module Database.PostgreSQL.Protocol.Parsers
( parseServerVersion
, parseIntegerDatetimes
, parseErrorDesc
, parseNoticeDesc
, parseCommandResult
) where
import Data.Char (chr)
import Data.Maybe (fromMaybe)
import Data.Monoid ((<>))
import Text.Read (readMaybe)
import Data.ByteString.Char8 as BS (readInteger, readInt, unpack, pack)
import qualified Data.ByteString as B
import qualified Data.HashMap.Strict as HM
import Database.PostgreSQL.Protocol.Types
-- Helper to parse
parseServerVersion :: B.ByteString -> Either B.ByteString ServerVersion
parseServerVersion bs =
let (numbersStr, desc) = B.span isDigitDot bs
numbers = readMaybe . BS.unpack <$> B.split 46 numbersStr
in case numbers ++ repeat (Just 0) of
(Just major : Just minor : Just rev : _) ->
Right $ ServerVersion major minor rev desc
_ -> Left $ "Unknown server version" <> bs
where
isDigitDot c | c == 46 = True -- dot
| c >= 48 && c < 58 = True -- digits
| otherwise = False
-- Helper to parse
parseIntegerDatetimes :: B.ByteString -> Either B.ByteString Bool
parseIntegerDatetimes bs
| bs == "on" || bs == "yes" || bs == "1" = Right True
| otherwise = Right False
parseCommandResult :: B.ByteString -> Either B.ByteString CommandResult
parseCommandResult s =
let (command, rest) = B.break (== space) s
in case command of
-- format: `INSERT oid rows`
"INSERT" ->
maybe (Left "Invalid format in INSERT command result") Right $ do
(oid, r) <- readInteger $ B.dropWhile (== space) rest
(rows, _) <- readInteger $ B.dropWhile (== space) r
Just $ InsertCompleted (Oid $ fromInteger oid)
(RowsCount $ fromInteger rows)
"DELETE" -> DeleteCompleted <$> readRows rest
"UPDATE" -> UpdateCompleted <$> readRows rest
"SELECT" -> SelectCompleted <$> readRows rest
"MOVE" -> MoveCompleted <$> readRows rest
"FETCH" -> FetchCompleted <$> readRows rest
"COPY" -> CopyCompleted <$> readRows rest
_ -> Right CommandOk
where
space = 32
readRows = maybe (Left "Invalid rows format in command result")
(pure . RowsCount . fromInteger . fst)
. readInteger . B.dropWhile (== space)
parseErrorNoticeFields
:: B.ByteString -> Either B.ByteString (HM.HashMap Char B.ByteString)
parseErrorNoticeFields = Right . HM.fromList
. fmap (\s -> (chr . fromIntegral $ B.head s, B.tail s))
. filter (not . B.null) . B.split 0
parseErrorSeverity :: B.ByteString -> Either B.ByteString ErrorSeverity
parseErrorSeverity bs = Right $ case bs of
"ERROR" -> SeverityError
"FATAL" -> SeverityFatal
"PANIC" -> SeverityPanic
_ -> UnknownErrorSeverity
parseNoticeSeverity :: B.ByteString -> Either B.ByteString NoticeSeverity
parseNoticeSeverity bs = Right $ case bs of
"WARNING" -> SeverityWarning
"NOTICE" -> SeverityNotice
"DEBUG" -> SeverityDebug
"INFO" -> SeverityInfo
"LOG" -> SeverityLog
_ -> UnknownNoticeSeverity
parseErrorDesc :: B.ByteString -> Either B.ByteString ErrorDesc
parseErrorDesc s = do
hm <- parseErrorNoticeFields s
errorSeverityOld <- lookupKey 'S' hm
errorCode <- lookupKey 'C' hm
errorMessage <- lookupKey 'M' hm
-- This is identical to the S field except that the contents are
-- never localized. This is present only in messages generated by
-- PostgreSQL versions 9.6 and later.
let errorSeverityNew = HM.lookup 'V' hm
errorSeverity <- parseErrorSeverity $
fromMaybe errorSeverityOld errorSeverityNew
let
errorDetail = HM.lookup 'D' hm
errorHint = HM.lookup 'H' hm
errorPosition = HM.lookup 'P' hm >>= fmap fst . readInt
errorInternalPosition = HM.lookup 'p' hm >>= fmap fst . readInt
errorInternalQuery = HM.lookup 'q' hm
errorContext = HM.lookup 'W' hm
errorSchema = HM.lookup 's' hm
errorTable = HM.lookup 't' hm
errorColumn = HM.lookup 'c' hm
errorDataType = HM.lookup 'd' hm
errorConstraint = HM.lookup 'n' hm
errorSourceFilename = HM.lookup 'F' hm
errorSourceLine = HM.lookup 'L' hm >>= fmap fst . readInt
errorSourceRoutine = HM.lookup 'R' hm
Right ErrorDesc{..}
where
lookupKey c = maybe (Left $ "Neccessary key " <> BS.pack (show c) <>
"is not presented in ErrorResponse message")
Right . HM.lookup c
parseNoticeDesc :: B.ByteString -> Either B.ByteString NoticeDesc
parseNoticeDesc s = do
hm <- parseErrorNoticeFields s
noticeSeverityOld <- lookupKey 'S' hm
noticeCode <- lookupKey 'C' hm
noticeMessage <- lookupKey 'M' hm
-- This is identical to the S field except that the contents are
-- never localized. This is present only in messages generated by
-- PostgreSQL versions 9.6 and later.
let noticeSeverityNew = HM.lookup 'V' hm
noticeSeverity <- parseNoticeSeverity $
fromMaybe noticeSeverityOld noticeSeverityNew
let
noticeDetail = HM.lookup 'D' hm
noticeHint = HM.lookup 'H' hm
noticePosition = HM.lookup 'P' hm >>= fmap fst . readInt
noticeInternalPosition = HM.lookup 'p' hm >>= fmap fst . readInt
noticeInternalQuery = HM.lookup 'q' hm
noticeContext = HM.lookup 'W' hm
noticeSchema = HM.lookup 's' hm
noticeTable = HM.lookup 't' hm
noticeColumn = HM.lookup 'c' hm
noticeDataType = HM.lookup 'd' hm
noticeConstraint = HM.lookup 'n' hm
noticeSourceFilename = HM.lookup 'F' hm
noticeSourceLine = HM.lookup 'L' hm >>= fmap fst . readInt
noticeSourceRoutine = HM.lookup 'R' hm
Right NoticeDesc{..}
where
lookupKey c = maybe (Left $ "Neccessary key " <> BS.pack (show c) <>
"is not presented in NoticeResponse message")
Right . HM.lookup c
| postgres-haskell/postgres-wire | src/Database/PostgreSQL/Protocol/Parsers.hs | mit | 6,632 | 0 | 17 | 2,113 | 1,675 | 837 | 838 | 126 | 8 |
{-# LANGUAGE DeriveFunctor, DeriveFoldable #-}
module Types
( module Types, Word8, Word16, Word32 )
where
import qualified Data.ByteString.Lazy as B
import Data.Word
import qualified Data.Map as M
-- Main data types
type Offset = Word32
type Segment = (Offset, Word32, [String])
type Segments = [Segment]
data Register = RegPos Word16 | RegName String
deriving (Show, Eq, Ord)
type ResReg = Word16
data TVal r
= Reg r
| Const Word16
deriving (Eq, Show, Functor, Foldable)
data Conditional r = Cond (TVal r) CondOp (TVal r)
deriving (Eq, Functor, Foldable)
data CondOp
= Eq
| Gt
| Lt
| GEq
| LEq
| NEq
| Unknowncond B.ByteString
deriving (Eq)
data ArithOp
= Inc | Dec | Mult | Div | Mod | And | Or | XOr | Set
deriving (Eq, Bounded, Enum)
data Command r
= Play Word16
| Random Word8 Word8
| PlayAll Word8 Word8
| PlayAllVariant (TVal r)
| RandomVariant (TVal r)
| Cancel
| Game Word16
| ArithOp ArithOp r (TVal r)
| Neg r
| Unknown B.ByteString r (TVal r)
| Jump (TVal r)
| NamedJump String -- Only in YAML files, never read from GMEs
| Timer r (TVal r)
deriving (Eq, Functor, Foldable)
type PlayList = [Word16]
data Line r = Line Offset [Conditional r] [Command r] PlayList
deriving (Functor, Foldable)
type ProductID = Word32
data Similarity = Absent | Equal | Similar deriving (Show, Eq)
data TipToiFile = TipToiFile
{ ttProductId :: ProductID
, ttRawXor :: Word32
, ttComment :: B.ByteString
, ttDate :: B.ByteString
, ttLang :: B.ByteString
, ttInitialRegs :: [Word16]
, ttWelcome :: [PlayList]
, ttScripts :: [(Word16, Maybe [Line ResReg])]
, ttGames :: [Game]
, ttAudioFiles :: [B.ByteString]
, ttAudioFilesDoubles :: Similarity
, ttAudioXor :: Word8
, ttMediaFlags :: Maybe [Word16]
, ttBinaries1 :: [(B.ByteString, B.ByteString)]
, ttBinaries2 :: [(B.ByteString, B.ByteString)]
, ttBinaries3 :: [(B.ByteString, B.ByteString)]
, ttBinaries4 :: [(B.ByteString, B.ByteString)]
, ttBinaries5 :: [(B.ByteString, B.ByteString)]
, ttBinaries6 :: [(B.ByteString, B.ByteString)]
, ttSpecialOIDs :: Maybe (Word16, Word16)
, ttChecksum :: Word32
, ttChecksumCalc :: Word32
}
type PlayListList = [PlayList]
type GameId = Word16
data Game =
CommonGame
{ gGameType :: Word16
, gRounds :: Word16
, gUnknownC :: Word16
, gEarlyRounds :: Word16
, gRepeatLastMedia :: Word16
, gUnknownX :: Word16
, gUnknownW :: Word16
, gUnknownV :: Word16
, gStartPlayList :: PlayListList
, gRoundEndPlayList :: PlayListList
, gFinishPlayList :: PlayListList
, gRoundStartPlayList :: PlayListList
, gLaterRoundStartPlayList :: PlayListList
, gSubgames :: [SubGame]
, gTargetScores :: [Word16]
, gFinishPlayLists :: [PlayListList]
}
| Game6
{ gRounds :: Word16
, gBonusSubgameCount :: Word16
, gBonusRounds :: Word16
, gBonusTarget :: Word16
, gUnknownI :: Word16
, gEarlyRounds :: Word16
, gUnknownQ :: Word16
, gRepeatLastMedia :: Word16
, gUnknownX :: Word16
, gUnknownW :: Word16
, gUnknownV :: Word16
, gStartPlayList :: PlayListList
, gRoundEndPlayList :: PlayListList
, gFinishPlayList :: PlayListList
, gRoundStartPlayList :: PlayListList
, gLaterRoundStartPlayList :: PlayListList
, gRoundStartPlayList2 :: PlayListList
, gLaterRoundStartPlayList2 :: PlayListList
, gSubgames :: [SubGame]
, gTargetScores :: [Word16]
, gBonusTargetScores :: [Word16]
, gFinishPlayLists :: [PlayListList]
, gBonusFinishPlayLists :: [PlayListList]
, gBonusSubgameIds :: [Word16]
}
| Game7
{ gRounds :: Word16
, gUnknownC :: Word16
, gEarlyRounds :: Word16
, gRepeatLastMedia :: Word16
, gUnknownX :: Word16
, gUnknownW :: Word16
, gUnknownV :: Word16
, gStartPlayList :: PlayListList
, gRoundEndPlayList :: PlayListList
, gFinishPlayList :: PlayListList
, gRoundStartPlayList :: PlayListList
, gLaterRoundStartPlayList :: PlayListList
, gSubgames :: [SubGame]
, gTargetScores :: [Word16]
, gFinishPlayLists :: [PlayListList]
, gSubgameGroups :: [[GameId]]
}
| Game8
{ gRounds :: Word16
, gUnknownC :: Word16
, gEarlyRounds :: Word16
, gRepeatLastMedia :: Word16
, gUnknownX :: Word16
, gUnknownW :: Word16
, gUnknownV :: Word16
, gStartPlayList :: PlayListList
, gRoundEndPlayList :: PlayListList
, gFinishPlayList :: PlayListList
, gRoundStartPlayList :: PlayListList
, gLaterRoundStartPlayList :: PlayListList
, gSubgames :: [SubGame]
, gTargetScores :: [Word16]
, gFinishPlayLists :: [PlayListList]
, gGameSelectOIDs :: [Word16]
, gGameSelect :: [Word16]
, gGameSelectErrors1 :: PlayListList
, gGameSelectErrors2 :: PlayListList
}
| Game9
{ gRounds :: Word16
, gUnknownC :: Word16
, gEarlyRounds :: Word16
, gRepeatLastMedia :: Word16
, gUnknownX :: Word16
, gUnknownW :: Word16
, gUnknownV :: Word16
, gStartPlayList :: PlayListList
, gRoundEndPlayList :: PlayListList
, gFinishPlayList :: PlayListList
, gRoundStartPlayList :: PlayListList
, gLaterRoundStartPlayList :: PlayListList
, gSubgames :: [SubGame]
, gTargetScores :: [Word16]
, gFinishPlayLists :: [PlayListList]
, gExtraPlayLists :: [PlayListList]
}
| Game10
{ gRounds :: Word16
, gUnknownC :: Word16
, gEarlyRounds :: Word16
, gRepeatLastMedia :: Word16
, gUnknownX :: Word16
, gUnknownW :: Word16
, gUnknownV :: Word16
, gStartPlayList :: PlayListList
, gRoundEndPlayList :: PlayListList
, gFinishPlayList :: PlayListList
, gRoundStartPlayList :: PlayListList
, gLaterRoundStartPlayList :: PlayListList
, gSubgames :: [SubGame]
, gTargetScores :: [Word16]
, gFinishPlayLists :: [PlayListList]
, gExtraPlayLists :: [PlayListList]
}
| Game16
{ gRounds :: Word16
, gUnknownC :: Word16
, gEarlyRounds :: Word16
, gRepeatLastMedia :: Word16
, gUnknownX :: Word16
, gUnknownW :: Word16
, gUnknownV :: Word16
, gStartPlayList :: PlayListList
, gRoundEndPlayList :: PlayListList
, gFinishPlayList :: PlayListList
, gRoundStartPlayList :: PlayListList
, gLaterRoundStartPlayList :: PlayListList
, gSubgames :: [SubGame]
, gTargetScores :: [Word16]
, gFinishPlayLists :: [PlayListList]
, gExtraOIDs :: [Word16]
, gExtraPlayLists :: [PlayListList]
}
| Game253
deriving Show
gameType :: Game -> Word16
gameType (CommonGame {gGameType = gGameType }) = gGameType
gameType Game6 {} = 6
gameType Game7 {} = 7
gameType Game8 {} = 8
gameType Game9 {} = 9
gameType Game10 {} = 10
gameType Game16 {} = 16
gameType Game253 {} = 253
type OID = Word16
data SubGame = SubGame
{ sgUnknown :: B.ByteString
, sgOids1 :: [OID]
, sgOids2 :: [OID]
, sgOids3 :: [OID]
, sgPlaylist :: [PlayListList]
}
deriving Show
type Transscript = M.Map Word16 String
type CodeMap = M.Map String Word16
-- Command options
data ImageFormat = SVG { withPNG :: Bool } | PNG | PDF
deriving Show
suffixOf :: ImageFormat -> String
suffixOf (SVG _) = "svg"
suffixOf PNG = "png"
suffixOf PDF = "pdf"
data Conf = Conf
{ cTransscriptFile :: Maybe FilePath
, cCodeDim :: (Int, Int)
, cDPI :: Int
, cPixelSize :: Int
, cImageFormat :: Maybe ImageFormat
}
deriving Show
| entropia/tip-toi-reveng | src/Types.hs | mit | 9,557 | 0 | 13 | 3,838 | 1,973 | 1,234 | 739 | 241 | 1 |
{-# LANGUAGE Safe #-}
{-# LANGUAGE NamedFieldPuns #-}
module Main where
import Control.DeepSeq(force)
import Control.Applicative((<$>))
import Control.Monad.Except()
import Data.Map((!))
import Text.PrettyPrint
import FP15.Parsing()
import FP15.Parsing.Types
import FP15.Parsing.Lexer(scanTokens)
import FP15.Parsing.Parser(parse)
import FP15.Value
import FP15.Types hiding (Const, Func)
import FP15.Evaluator()
import FP15.Evaluator.Types
import FP15.Evaluator.Contract()
import FP15.Compiler
import FP15.Compiler.Types
import FP15.Compiler.CompiledModuleSet
import FP15.Compiler.PrettyPrinting
import FP15.Standard(standardCMS)
import FP15.Evaluator.Standard(standardEnv)
import FP15.Evaluator.Translation(transMap)
import FP15.Evaluator.FP(execFP)
main :: IO ()
main = do
src <- getContents
let ast = unwrap $ parse $ ModuleSource Nothing src
let m = unwrap $ stageModule standardCMS ast
let m' = force $ until ((==) Finished . rmsTag) (unwrap . stepModule) m
let c = force $ makeCompiledModule m'
let cms' = force $ addModule (ssMN $ rmsSS m') c standardCMS
let (CompiledModuleSet cmis) = force cms'
--print $ vcat $ prettyCMILines (M ["Main"]) (cmis ! (M ["Main"]))
--print $ (cmis ! (M ["Main"]))
let fl = unwrap $ translateCMS cms'
let s = force $ transMap standardEnv fl
res <- fmap unwrap $ execFP $ (s ! "Main.main") (Extended $ RealWorld RW)
putStrLn $ disp (res :: FPValue)
where unwrap (Left x) = error $ disp x
unwrap (Right x) = force x
{-
case m of
Left e -> print e
Right r@(ReducingModuleState SS { ssIN = in_ }
_ (Reducing Module { fs })) ->
let fs' = M.map mm fs
mm (Unresolved n) = convExprAST in_ n
mm x = error (show x) in
print (fs, fs')
-}
-- main = getContents >>= printTokens
-- $testSplitTokens
-- >>> splitTokens "abc def"
-- Right ["abc","def"]
-- >>> splitTokens "abc.def"
-- Right ["abc",".def"]
-- >>> splitTokens "Abc.def"
-- Right ["Abc.def"]
-- >>> splitTokens "abc!!def"
-- Right ["abc!","!","def"]
-- >>> splitTokens "abc+.def"
-- Right ["abc","+",".def"]
-- >>> splitTokens "abc?def"
-- Right ["abc?","def"]
-- >>> splitTokens "abc??def"
-- Right ["abc?","?","def"]
-- >>> splitTokens "X.Y...z"
-- Right ["X.Y","...","z"]
-- >>> splitTokens "Abc.Def.Ghi.jkl.Mno.Pqr.Stu."
-- Right ["Abc.Def.Ghi.jkl",".","Mno.Pqr.Stu","."]
-- >>> splitTokens "Abc.Def.Ghi.jkl.Mno.Pqr.Stu.(++)"
-- Right ["Abc.Def.Ghi.jkl",".","Mno.Pqr.Stu.(++)"]
-- >>> splitTokens "Abc.Def.Ghi.jkl.Mno.Pqr.Stu.f"
-- Right ["Abc.Def.Ghi.jkl",".Mno.Pqr.Stu.f"]
-- >>> splitTokens "!<+!<<"
-- Right ["!<+","!<<"]
-- >>> splitTokens "@@@!"
-- Right ["@","@","@","!"]
-- $setup
-- >>> import Control.Applicative
-- >>> import Control.Monad
-- >>> import Test.QuickCheck
-- >>> newtype T = T String deriving Show
-- >>> let genChar = elements $ ['a'..'z']++['A'..'Z']++"~!@#$%^&*()_+`-={}|[]\\:;<>?,./"
-- >>> instance Arbitrary T where arbitrary = T <$> (listOf genChar)
-- $props
-- prop> \(T s) -> splitTokens (s :: String) == (splitTokens =<< (fmap (concatMap (++ " ")) $ splitTokens s))
-- | The 'splitTokens' function splits a string into tokens.
splitTokens :: String -> Either String [String]
splitTokens s = (map (\(Token _ _ str) -> str) . init) <$> scanTokens s
| Ming-Tang/FP15 | src/Main.hs | mit | 3,323 | 0 | 14 | 587 | 593 | 338 | 255 | 42 | 2 |
import System.IO
import System.Directory
import System.Environment
import StyleSvg
main =
do
-- extract css file path and svg files from arguments
(svgCssFilePath: svgFiles) <- getArgs
-- read svgFile
svgStyle <- readFile svgCssFilePath
putStrLn $ "CSS File: " ++ svgCssFilePath
putStr $ "SVG Files: "
mapM_ putStr (zipWith (++) svgFiles (repeat " "))
putStrLn ""
-- update SVG files with the svg Style from the SVG file
mapM_ (updateSVG svgStyle) svgFiles
-- takes a string with an css style and a path to a SVG file and updates the svg file
updateSVG :: String -> String -> IO ()
updateSVG svgStyle fileName =
do
-- file handle for svg file
handle <- openFile fileName ReadMode
-- get contents of svg file
svgContents <- hGetContents handle
-- open temporary file to store updated results
(tempName, tempHandle) <- openTempFile "." "temp"
-- put the updated SVG file into the temporary file
hPutStr tempHandle $ cssStyleSVG svgStyle svgContents
hClose handle
hClose tempHandle
-- remove the old svg file
removeFile fileName
-- rename the temporary svg file to have the same name as the initial svg file
renameFile tempName fileName
| dino-r/casttex | src/UpdateSvg.hs | mit | 1,291 | 0 | 11 | 342 | 231 | 111 | 120 | 24 | 1 |
newtype Reader r a = Reader { runReader :: r -> a }
instance Monad (Reader r) where
return a = Reader $ \_ -> a
m >>= k = Reader $ \r -> runReader (k (runReader m r)) r
ask :: Reader a a
ask = Reader id
asks :: (r -> a) -> Reader r a
asks f = Reader f
local :: (r -> b) -> Reader b a -> Reader r a
local f m = Reader $ runReader m . f
| riwsky/wiwinwlh | src/reader_impl.hs | mit | 344 | 0 | 12 | 95 | 191 | 98 | 93 | 10 | 1 |
module Language.Instances (Show(..), Eq(..)) where
import Language.Type
import Language.FA
import Language.RE
import Text.ParserCombinators.Parsec
import Prelude hiding (negate)
import qualified Data.List as List
import Debug.Trace
--------------------------------------------------------------
dropQuote :: String -> String
dropQuote [] = []
dropQuote ('"':xs) = dropQuote xs
dropQuote ('\\':xs) = dropQuote xs
dropQuote ('\'':xs) = dropQuote xs
dropQuote ('8':'7':'0':'9':xs) = '∅' : dropQuote xs
dropQuote (x:xs) = x : dropQuote xs
instance Show Alphabet where
show (Alphabet a) = show a
show Epsilon = "ɛ"
instance Show Transitions where
show (TransitionsPDA mappings) = dropQuote $
listMapping mappings
where
listMapping = concat . fmap (prefixIndent . showMap)
prefixIndent = (++) "\n "
showMap (s, a, p, t, q) =
show s ++
" × " ++
show a ++
" × " ++
show p ++
" → " ++
show t ++
" × " ++
show q
show (TransitionsDFA mappings) = dropQuote $
listMapping mappings
where
listMapping = concat . fmap (prefixIndent . showMap)
prefixIndent = (++) "\n "
showMap (s, a, t) =
show s ++
" × " ++
show a ++
" → " ++
show t
show (TransitionsNFA mappings) = dropQuote $
listMapping mappings
where
listMapping = concat . fmap (prefixIndent . showMap)
prefixIndent = (++) "\n "
showMap (s, a, t) =
show s ++
" × " ++
show a ++
" → " ++
show t
show (TransitionsRE mappings) = dropQuote $
listMapping mappings
where
listMapping = concat . fmap (prefixIndent . showMap)
prefixIndent = (++) "\n "
showMap (s, a, t) =
show s ++
" × " ++
show a ++
" → " ++
show t
instance Show DFA where
show (DFA states alphabets mappings state accepts) = dropQuote $
"DFA" ++
"\n Q " ++ (show states) ++
"\n Σ " ++ (show alphabets) ++
"\n δ " ++ (show mappings) ++
"\n q " ++ (show state) ++
"\n F " ++ (show accepts) ++
"\n"
instance Show NFA where
show (NFA states alphabets mappings state accepts) = dropQuote $
"NFA" ++
"\n Q " ++ (show states) ++
"\n Σ " ++ (show alphabets) ++
"\n δ " ++ (show mappings) ++
"\n q " ++ (show state) ++
"\n F " ++ (show accepts) ++
"\n"
instance Show GNFA where
show (GNFA states alphabets mappings state accepts) = dropQuote $
"GNFA" ++
"\n Q " ++ (show states) ++
"\n Σ " ++ (show alphabets) ++
"\n δ " ++ (show mappings) ++
"\n q " ++ (show state) ++
"\n F " ++ (show accepts) ++
"\n"
instance Eq DFA where
(==) dfa0 dfa1 = alphabetDFA0 == alphabetDFA1 && null accepts
where (DFA _ _ _ _ accepts) = trimUnreachableStates wtf
wtf = (dfa0 `intersect` _dfa1) `union` (_dfa0 `intersect` dfa1)
_dfa0 = negate dfa0
_dfa1 = negate dfa1
alphabet (DFA _ a _ _ _) = a
alphabetDFA0 = List.sort $ alphabet dfa0
alphabetDFA1 = List.sort $ alphabet dfa1
instance Eq NFA where
(==) nfa0 nfa1 = nfa2dfa nfa0 == nfa2dfa nfa1
--------------------------------------------------------------
instance Show RE where
show (A a) = [a]
show N = "∅"
show E = "ɛ"
show (a :| b) = "(" ++ show a ++ "|" ++ show b ++ ")"
show (a :+ b) = show a ++ show b
show (Star a) = show a ++ "*"
instance Read RE where
readsPrec _ input = case parse reParser "Regular Expression" input of
Right x -> [(x, "")]
unitParser :: Parser RE
unitParser =
do
char '('
inside <- reParser
char ')'
do
char '*'
return (Star inside)
<|> return (inside)
<|>
do
char '∅'
do
char '*'
return (E)
<|> return (N)
<|>
do
char 'ɛ'
do
char '*'
return (Star (E))
<|> return (E)
<|>
do
char ' '
do
char '*'
return (Star (A ' '))
<|> return (A ' ')
<|>
do
c <- digit
do
char '*'
return (Star (A c))
<|> return (A c)
<|>
do
c <- letter
do
char '*'
return (Star (A c))
<|> return (A c)
concatParser :: Parser RE
concatParser =
do
a <- many1 unitParser
return $ if (N `elem` a) then N else (foldr1 (:+) a)
reParser :: Parser RE
reParser =
do
a <- concatParser
do
char '|'
b <- concatParser
return (a :| b)
<|> return a
| banacorn/formal-language | haskell-legacy/Language/instances.hs | mit | 5,403 | 0 | 17 | 2,330 | 1,729 | 862 | 867 | 170 | 2 |
module Rx.Observable.Throttle where
import Control.Concurrent.STM (atomically, newTVarIO, readTVar, writeTVar)
import Control.Monad (when)
import Data.Time (diffUTCTime, getCurrentTime)
import Tiempo (TimeInterval, toNominalDiffTime)
import Rx.Observable.Filter (filterM)
import Rx.Observable.Types
--------------------------------------------------------------------------------
throttle :: TimeInterval
-> Observable s a
-> Observable s a
throttle delay source =
Observable $ \observer -> do
mlastOnNextVar <- newTVarIO Nothing
let source' = filterM (throttleFilter mlastOnNextVar)
source
subscribeObserver source' observer
where
throttleFilter mlastOnNextVar _ = do
mlastOnNext <- atomically $ readTVar mlastOnNextVar
case mlastOnNext of
Nothing -> do
now <- getCurrentTime
atomically $ writeTVar mlastOnNextVar (Just now)
return True
Just backThen -> do
now <- getCurrentTime
let diff = diffUTCTime now backThen
passedDelay = diff > toNominalDiffTime delay
when passedDelay
$ atomically
$ writeTVar mlastOnNextVar (Just now)
return passedDelay
| roman/Haskell-Reactive-Extensions | rx-core/src/Rx/Observable/Throttle.hs | mit | 1,265 | 0 | 18 | 336 | 302 | 151 | 151 | 31 | 2 |
{-|
Module : Language.SAL.Syntax
Description : Data types for SAL Syntax
Copyright : (c) Galois Inc, 2015
License : MIT
Maintainer : Benjamin F Jones <bjones@galois.com>
Stability : experimental
Portability : Yes
Haskell encoding of the syntax presented in
http://sal.csl.sri.com/doc/language-report.pdf
Note that in the concrete syntax below (things in @typewriter font@) the use of
[]'s and {}'s sometimes mean literal brackets/braces and sometimes mean
_optional_ or _set of_. This is confusing, so we use [_ and {_ where appropriate to
denote a literal bracket or brace.
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-- Each data type in this module derives at least: Eq, Ord, Show, Typeable, Data
#define DERIVE deriving (Eq, Ord, Show, Typeable, Data)
module Language.SAL.Syntax (
-- * Types
Identifier(..)
, Numeral(..)
, TypeDef(..)
, Type(..)
, BasicType(..)
, VarDecl(..)
, VarDecls(..)
, Bound(..)
, Constructor(..)
, VarType(..)
, IndexType
, IndexVarDecl
, QualifiedName(..)
-- * Expressions
, Expr(..)
, Argument(..)
, Update(..)
, UpdatePos(..)
, Quantifier(..)
, LetDecl(..)
, RecordEntry(..)
, ThenRest(..)
, ElsIf(..)
-- * Transitions
, SimpleDefinition(..)
, Access(..)
, Lhs(..)
, RhsDefinition(..)
, Definition(..)
, Definitions(..)
, Guard
, GuardedCommand(..)
, ElseCommand(..)
, Assignments
-- * Modules
, ModuleDeclaration(..)
, Module(..)
, BaseDeclaration(..)
, DefinitionOrCommand(..)
, SomeCommand(..)
, Renames
, NewVarDecl
, ModulePred(..)
-- * Contexts
, Context(..)
, Parameters(..)
, ContextBody(..)
, Declaration(..)
, AssertionForm(..)
, AssertionExpr(..)
, PropOp(..)
, ActualParameters(..)
-- * Tokens
, keywordSet
, specialSet
, letterSet
, digitSet
, opCharSet
)
where
import Data.Char (chr)
import Data.List ((\\))
import Data.Data (Data)
import Data.String
import Data.Typeable (Typeable)
import Data.List.NonEmpty (NonEmpty)
------------------------------------------------------------------------
-- Types
------------------------------------------------------------------------
-- | Identifier for a variable, operator, type, module, context, ...
-- @Identifier := Letter {Letter | Digit | ? | _}∗ | {Opchar}+@
newtype Identifier = Identifier { identifier_str :: String }
DERIVE
instance IsString Identifier where
fromString = Identifier
-- | @Numeral := {Digit}+@
newtype Numeral = Numeral { numeral_val :: Integer }
deriving (Eq, Ord, Show, Data, Typeable, Num)
-- | SAL Type Definitions
data TypeDef
= TypeDef Type
-- | @{{Identifier}+, }@
| ScalarType (NonEmpty Identifier)
-- | @DATATYPE Constructors END@
| DataType (NonEmpty Constructor)
DERIVE
-- | SAL Types
data Type
-- | built-in type, e.g. @BOOLEAN@
= TyBasic BasicType
-- | named type, e.g. @mytype@
| TyName Name
-- | subrange type, e.g. @[1..n]@
| TySubRange Bound Bound
-- | subset type, e.g. @{ ident : type | expr }@
| TySubType Identifier Type Expr
-- | array type, e.g. @ARRAY idx OF type@
| TyArray IndexType Type
-- | function type, e.g. @[ var -> type ]@
| TyFunction VarType Type
-- | record type, e.g. @[# {Identifier : Type}+, #]@
| TyRecord VarDecls
-- | module state type, e.g. @MyModule . STATE@
| TyState Module
DERIVE
-- | Basic built-in mathematical types
data BasicType
= BOOLEAN
| REAL
| INTEGER
| NZINTEGER -- ^ non-zero integers
| NATURAL -- ^ positive integers
| NZREAL -- ^ non-zero real numbers
DERIVE
-- | A type name
type Name = Identifier
-- | Variable declaration of the form: @Identifier : Type@
data VarDecl = VarDecl Identifier Type
DERIVE
-- | Comma separated variable declarations
newtype VarDecls = VarDecls { var_decls :: NonEmpty VarDecl }
DERIVE
-- | A Bound in a sub-range expression
data Bound
= Unbounded -- ^ represents +/- inf depending on context, render as @_@
| Bound Expr -- ^ an expression representing a finite bound, render as @Expr@
DERIVE
-- | Data type constructors: @Identifier[(VarDecls)]@
data Constructor = Constructor Identifier (Maybe VarDecls) DERIVE
-- | Variable type declaration: @[identifier :] type@
data VarType = VarType (Maybe Identifier) Type DERIVE
-- | IndexType is really a subtype of Type:
--
-- > data IndexType = INTEGER | SubRange | ScalarTypeName
--
type IndexType = Type
type IndexVarDecl = VarDecl
-- | Name of the form: @Identifier[ {ActualParameters} ]!Identifier@
data QualifiedName = QualifiedName Identifier (Maybe ActualParameters) Identifier
DERIVE
------------------------------------------------------------------------
-- Expressions
------------------------------------------------------------------------
-- | SAL Expression type
data Expr
= NameExpr Name -- ^ named expresssion
| QualifiedNameExpr QualifiedName -- ^ qualified named expression
| NextVar Identifier -- ^ transition variable: @var'@
| NumLit Numeral -- ^ integer literal
| App Expr Argument -- ^ function application
| InfixApp Expr Identifier Expr -- ^ infix function application
| ArraySelec Expr Expr -- ^ array selection: @Expr[Expr]@
| RecordSelec Expr Identifier -- ^ record selection: @Expr.Identifier@
| TupleSelec Expr Numeral -- ^ tuple selection: @Expr.Numeral@
| UpdateExpr Expr Update -- ^ update expression: @Expr WITH Update@
| Lambda VarDecls Expr -- ^ lambda: @LAMBDA (VarDecls) : Expr@
| QuantifiedExpr Quantifier VarDecls Expr -- ^ @Quantifier (VarDecls) : Expr@
| LetExpr (NonEmpty LetDecl) Expr -- ^ let binding: @LET LetDeclarations IN Expr@
| SetExpr (Either SetPredExpr SetListExpr) -- ^ set comprehension: @{ id : ty | expr}@ or,
-- @{ expr1, expr2, ... }@
| ArrayLit IndexVarDecl Expr -- ^ array literal: @[[IndexVarDecl] Expr]@
| RecordLit (NonEmpty RecordEntry) -- ^ record literal: @(# {RecordEntry}+, #)@
| TupleLit Argument -- ^ tuple literal
| Conditional Expr ThenRest -- ^ conditional: @IF Expr ThenRest@
| GroupedExpr Expr -- ^ expression grouping: @( Expr )@
| StatePred Module ModulePred -- ^ module predicate: @Module . ( INIT | TRANS )@
DERIVE
instance IsString Expr where
fromString = NameExpr . fromString
-- | 'Argument' is a comma separated list of expressions:
-- @( {Expr}+, )@
newtype Argument = Argument (NonEmpty Expr)
DERIVE
-- | Update expression of the form: @UpdatePosition := Expr@
data Update = Update UpdatePos Expr
DERIVE
-- | Elements which may appear in sequence in the 'UpdatePosition' of an 'Update'
-- expression: @{'Argument' | ['Expr'] | .'Identifier' | .'Numeral'}+@
data UpdatePos
= ArgUpdate Argument -- @Expr1, Expr2, ...@
| ExprUpdate Expr -- @[Expr]@
| IdentUpdate Identifier -- @.Identifier@
| NumUpdate Numeral -- @.Numeral@
DERIVE
-- | Quantifier keyword
data Quantifier = FORALL | EXISTS
DERIVE
-- | Let declaration: @Identifier : Type = Expr@
data LetDecl = LetDecl Identifier Type Expr
DERIVE
type SetPredExpr = (Identifier, Type, Expr) -- ^ @{ Identifier : Type | Expr }@
type SetListExpr = NonEmpty Expr -- ^ @{ expr1, expr2, ... }@
-- | Record entry: @Identifier := Expr@
data RecordEntry = RecordEntry Identifier Expr
DERIVE
-- | Continued conditional: @THEN 'Expr' [ 'ElsIf' ] ELSE 'Expr' ENDIF@
data ThenRest = ThenRest Expr [ElsIf] Expr
DERIVE
-- | More continued conditional: @ELSIF Expr ThenRest@
data ElsIf = ElsIf Expr ThenRest
DERIVE
------------------------------------------------------------------------
-- Transitions
------------------------------------------------------------------------
-- | Left hand side of a definition
data Lhs = LhsCurrent Identifier [Access] -- ^ @Identifier@
| LhsNext Identifier [Access] -- ^ @Identifier'@
DERIVE
-- | Right hand side of a definition, either deterministic assignment or
-- non-deterministic.
data RhsDefinition = RhsExpr Expr -- @= Expr@
| RhsSelection Expr -- @IN Expr@
DERIVE
-- | Variable access
data Access = ArrayAccess Expr -- @[_ Expr _]@
| RecordAccess Identifier -- @.Identifier@
| TupleAccess Numeral -- @.Numeral@
DERIVE
-- | @Lhs RhsDefinition@
data SimpleDefinition = SimpleDefinition Lhs RhsDefinition
DERIVE
data Definition =
DefSimple SimpleDefinition -- @SimpleDefinition@
| DefForall VarDecls Definitions -- @(FORALL (VarDecls): Definitions)@
DERIVE
newtype Definitions = Definitions (NonEmpty Definition) -- @{Definition}+;@
DERIVE
data GuardedCommand = GuardedCommand Guard Assignments -- @Guard --> Assignments@
DERIVE
type Guard = Expr
type Assignments = [SimpleDefinition] -- @{SimpleDefinition}*;@ (optional ; at end)
------------------------------------------------------------------------
-- Modules
------------------------------------------------------------------------
-- | Top-level module declaration: @Identifier[VarDecls] : MODULE = Module@
data ModuleDeclaration =
ModuleDeclaration Identifier (Maybe VarDecls) Module
DERIVE
-- | SAL Module
data Module =
-- | @BEGIN BaseDeclarations END@
BaseModule [BaseDeclaration]
-- | @{Name|QualifiedName} Name[{Expr}+,]@
| ModuleInstance (Either Name QualifiedName) (NonEmpty Expr)
-- | @Module || Module@
| SynchronousComposition Module Module
-- | @Module [] Module@
| AsynchronousComposition Module Module
-- | @(|| (Identifier : IndexType): Module)@
| MultiSynchronous Identifier IndexType Module
-- | @([] (Identifier : IndexType): Module)@
| MultiAsynchronous Identifier IndexType Module
-- | @LOCAL {Identifier}+, IN Module@
| Hiding (NonEmpty Identifier) Module
-- | @OUTPUT {Identifier}+, IN Module@
| NewOutput (NonEmpty Identifier) Module
-- | @RENAME Renames IN Module@
| Renaming Renames Module
-- | @WITH NewVarDecls Module
| WithModule (NonEmpty NewVarDecl) Module
-- | @OBSERVE Module WITH Module@
| ObserveModule Module Module
-- | @( Module )@
| ParenModule Module
DERIVE
data BaseDeclaration =
InputDecl VarDecls -- @INPUT VarDecls@
| OutputDecl VarDecls -- @OUTPUT VarDecls@
| GlobalDecl VarDecls -- @GLOBAL VarDecls@
| LocalDecl VarDecls -- @LOCAL VarDecls@
| DefDecl Definitions -- @DEFINITION Definitions@
| InitDecl (NonEmpty DefinitionOrCommand) -- @{DOC}+;@ (optional ; at end)
| TransDecl (NonEmpty DefinitionOrCommand) -- @{DOC}+;@ (optional ; at end)
DERIVE
-- | NewVarDecl should be a subtype of BaseDeclaration:
-- data NewVarDecl =
-- InputDecl
-- | OutputDecl
-- | GlobalDecl
-- DERIVE
type NewVarDecl = BaseDeclaration
data DefinitionOrCommand =
DOCDef Definition
-- ^ @Definition@
| DOCCom (NonEmpty SomeCommand) (Maybe ElseCommand)
-- ^ @[_ {SomeCommand}+[__] [ [__] ElseCommand ] _]@
DERIVE
data SomeCommand =
-- | @[ Identifier : ] GuardedCommand@
NamedCommand (Maybe Identifier) GuardedCommand
-- | @([__] (VarDecls): SomeCommand)@
| MultiCommand VarDecls SomeCommand
DERIVE
-- | @[ Identifier : ] ELSE --> Assignments@
data ElseCommand = ElseCommand (Maybe Identifier) Assignments
DERIVE
-- | @{Lhs TO Lhs}+,@
type Renames = NonEmpty (Lhs, Lhs)
-- | Part of a 'StatePred' type Expr
data ModulePred = INIT | TRANS
DERIVE
------------------------------------------------------------------------
-- Context
------------------------------------------------------------------------
-- | @Identifier [ {Parameters} ] : CONTEXT = ContextBody@
data Context = Context Identifier (Maybe Parameters) ContextBody
DERIVE
-- | @[ {Identifier}+, : TYPE ] ; {VarDecls}*,
data Parameters = Parameters (NonEmpty Identifier) [VarDecls]
DERIVE
-- | @BEGIN { Declaration; }+ END@
data ContextBody = ContextBody (NonEmpty Declaration)
DERIVE
-- | Declaration in a context body
data Declaration =
ConstantDecl Identifier (Maybe VarDecls) Type (Maybe Expr)
-- ^ @Identifier[(VarDecls)] : Type [ = Expr ]@
| TypeDecl Identifier (Maybe TypeDef)
-- ^ @Identifier : TYPE [ = TypeDef ]@
| AssertionDecl Identifier AssertionForm AssertionExpr
-- ^ @Identifier : AssertionForm = AssertionExpr@
| ContextDecl Identifier Identifier ActualParameters
| ModuleDecl ModuleDeclaration
DERIVE
-- | Different classes of assertion
data AssertionForm =
OBLIGATION
| CLAIM
| LEMMA
| THEOREM
DERIVE
-- | Assertion Expressions allow properties to be stated.
data AssertionExpr =
ModuleModels Module Expr
-- ^ @Module |- Expr@
| ModuleImplements Module Module
-- ^ @Module IMPLEMENTS Module@
| PosProp PropOp AssertionExpr AssertionExpr
-- ^ @PropOp ( AssertionExpr, AsserstionExpr)@
| NegProp AssertionExpr
-- ^ @NOT AssertionExpr@
| QuantifiedAssertion Quantifier VarDecls AssertionExpr
-- ^ @Quantifier ( VarDecls ) : AssertionExpr@
| AssertExpr Expr
DERIVE
-- | Propositional operators allowed in 'AssertionExpr'
data PropOp =
AND -- ^ @AND@
| OR -- ^ @OR@
| IMPL -- ^ implication @=>@
| IFF -- ^ if and only if @<=>@
DERIVE
-- | @{Type}*, ; {Expr}*,@
data ActualParameters = ActualParameters [Type] [Expr]
DERIVE
------------------------------------------------------------------------
-- Tokens
------------------------------------------------------------------------
-- Define special tokens in the SAL Language
keywordSet :: [String]
keywordSet =
[ "AND", "ARRAY", "BEGIN", "BOOLEAN", "CLAIM", "CONTEXT", "DATATYPE"
, "DEFINITION", "ELSE" , "ELSIF", "END", "ENDIF", "EXISTS", "FALSE", "FORALL"
, "GLOBAL", "IF", "IN", "INITIALIZATION" , "INPUT", "INTEGER", "LAMBDA"
, "LEMMA", "LET", "LOCAL", "MODULE", "NATURAL", "NOT", "NZINTEGER", "NZREAL"
, "OBLIGATION", "OF", "OR", "OUTPUT", "REAL", "RENAME", "THEN", "THEOREM"
, "TO", "TRANSITION", "TRUE", "TYPE", "WITH", "XOR"
]
specialSet :: String
specialSet= "()[]{}%,.;:'!#?_"
letterSet :: String
letterSet = ['a'..'z'] ++ ['A'..'Z']
digitSet :: String
digitSet = ['0'..'9']
opCharSet :: String
opCharSet = map chr [33..126] \\ nonOp
where
nonOp = specialSet ++ letterSet ++ digitSet
| GaloisInc/language-sal | src/Language/SAL/Syntax.hs | mit | 14,509 | 9 | 8 | 3,028 | 2,080 | 1,311 | 769 | -1 | -1 |
module Y2017.M03.D13.Exercise where
import Network.HTTP.Conduit
{--
Oh, yeah. Let's do this one!
https://twitter.com/HaggardHawks/status/840321681726013440
Nic Wilkinson Retweeted
Haggard Hawks @HaggardHawks Mar 10
All fourteen lines in David Shulman’s sonnet ‘Washington Crossing The Delaware’
(1936) are anagrams of the title.
prove it!
--}
anagram :: String -> String -> Bool
anagram line = undefined
-- snaps for defining anagram elegantly
sonnet :: FilePath
sonnet = "https://raw.githubusercontent.com/geophf/1HaskellADay/master/exercises/HAD/Y2017/M03/D13/sonnet.txt"
{--
So, read in the first line. Use it as the basis of the anagram.
For each line thereafter:
Read in each line. Remove non-alpha characters. Determine if that line is an
anagram of the first line.
What are your results?
--}
| geophf/1HaskellADay | exercises/HAD/Y2017/M03/D13/Exercise.hs | mit | 821 | 0 | 6 | 122 | 48 | 30 | 18 | 6 | 1 |
{-# LANGUAGE BangPatterns, DataKinds, DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module Hadoop.Protos.ClientNamenodeProtocolProtos.GetServerDefaultsRequestProto (GetServerDefaultsRequestProto(..)) where
import Prelude ((+), (/))
import qualified Prelude as Prelude'
import qualified Data.Typeable as Prelude'
import qualified Data.Data as Prelude'
import qualified Text.ProtocolBuffers.Header as P'
data GetServerDefaultsRequestProto = GetServerDefaultsRequestProto{}
deriving (Prelude'.Show, Prelude'.Eq, Prelude'.Ord, Prelude'.Typeable, Prelude'.Data)
instance P'.Mergeable GetServerDefaultsRequestProto where
mergeAppend GetServerDefaultsRequestProto GetServerDefaultsRequestProto = GetServerDefaultsRequestProto
instance P'.Default GetServerDefaultsRequestProto where
defaultValue = GetServerDefaultsRequestProto
instance P'.Wire GetServerDefaultsRequestProto where
wireSize ft' self'@(GetServerDefaultsRequestProto)
= case ft' of
10 -> calc'Size
11 -> P'.prependMessageSize calc'Size
_ -> P'.wireSizeErr ft' self'
where
calc'Size = 0
wirePut ft' self'@(GetServerDefaultsRequestProto)
= case ft' of
10 -> put'Fields
11 -> do
P'.putSize (P'.wireSize 10 self')
put'Fields
_ -> P'.wirePutErr ft' self'
where
put'Fields
= do
Prelude'.return ()
wireGet ft'
= case ft' of
10 -> P'.getBareMessageWith update'Self
11 -> P'.getMessageWith update'Self
_ -> P'.wireGetErr ft'
where
update'Self wire'Tag old'Self
= case wire'Tag of
_ -> let (field'Number, wire'Type) = P'.splitWireTag wire'Tag in P'.unknown field'Number wire'Type old'Self
instance P'.MessageAPI msg' (msg' -> GetServerDefaultsRequestProto) GetServerDefaultsRequestProto where
getVal m' f' = f' m'
instance P'.GPB GetServerDefaultsRequestProto
instance P'.ReflectDescriptor GetServerDefaultsRequestProto where
getMessageInfo _ = P'.GetMessageInfo (P'.fromDistinctAscList []) (P'.fromDistinctAscList [])
reflectDescriptorInfo _
= Prelude'.read
"DescriptorInfo {descName = ProtoName {protobufName = FIName \".hadoop.hdfs.GetServerDefaultsRequestProto\", haskellPrefix = [MName \"Hadoop\",MName \"Protos\"], parentModule = [MName \"ClientNamenodeProtocolProtos\"], baseName = MName \"GetServerDefaultsRequestProto\"}, descFilePath = [\"Hadoop\",\"Protos\",\"ClientNamenodeProtocolProtos\",\"GetServerDefaultsRequestProto.hs\"], isGroup = False, fields = fromList [], descOneofs = fromList [], keys = fromList [], extRanges = [], knownKeys = fromList [], storeUnknown = False, lazyFields = False, makeLenses = False}"
instance P'.TextType GetServerDefaultsRequestProto where
tellT = P'.tellSubMessage
getT = P'.getSubMessage
instance P'.TextMsg GetServerDefaultsRequestProto where
textPut msg = Prelude'.return ()
textGet = Prelude'.return P'.defaultValue | alexbiehl/hoop | hadoop-protos/src/Hadoop/Protos/ClientNamenodeProtocolProtos/GetServerDefaultsRequestProto.hs | mit | 3,045 | 1 | 16 | 539 | 554 | 291 | 263 | 53 | 0 |
module Type.Meta
( zonk
, metaVars
, getMetaVars
) where
import qualified Data.Set as S
import Type.Types
zonk :: Type -> TI Type
zonk (TyForall tvs rho) = do
rho' <- zonk rho
return (mkForall tvs rho')
zonk (TyArr sigma1 sigma2) = do
sigma1' <- zonk sigma1
sigma2' <- zonk sigma2
return (TyArr sigma1' sigma2')
zonk tau@(TyCon _) = return tau
zonk tau@(TyVar _) = return tau
zonk (TyAp tau1 tau2) = do
tau1' <- zonk tau1
tau2' <- zonk tau2
return (TyAp tau1' tau2')
zonk tau@(TyMeta tv) = do
mbTau1 <- readMeta tv
case mbTau1 of
Nothing -> return tau
Just tau1 -> do
tau2 <- zonk tau1
writeMeta tv tau2
return tau2
metaVars :: Type -> S.Set TyMeta
metaVars (TyForall _ rho) = metaVars rho
metaVars (TyArr sigma1 sigma2) = S.union tvs1 tvs2
where tvs1 = metaVars sigma1
tvs2 = metaVars sigma2
metaVars tau = go tau S.empty
where go (TyCon _) = id
go (TyVar _) = id
go (TyAp tau1 tau2) = go tau1 . go tau2
go (TyMeta tv) = S.insert tv
getMetaVars :: [Type] -> TI (S.Set TyMeta)
getMetaVars = fmap (S.unions . map metaVars) . mapM zonk
| meimisaki/Rin | src/Type/Meta.hs | mit | 1,120 | 0 | 13 | 280 | 512 | 245 | 267 | 40 | 4 |
import "hint" HLint.Default
import "hint" HLint.HLint
infixl 4 <$>
infixr 9 .
infixl 1 <&>
infixl 1 &
infixl 3 <|>
infixl 4 *>
infixl 4 <*
infixl 4 <*>
infixr 0 $
infixr 6 <>
infixr 5 ++
-- warn "my-a" = a (b $ c d) ==> a . b $ c d
-- warn "my-b" = a (b *> c) ==> a $ b *> c
-- warn "my-c" = a (b (c d)) ==> a (b $ c d)
-- warn "my-d" = [a (b c)] ==> [a $ b c]
warn "Use liftA2" = a <$> b <*> c ==> liftA2 a b c
warn "my-e" = (a $ b c (d e), f) ==> (a . b c $ d e, f)
warn "my-f" = [a b (c d), e] ==> [a b $ c d, e]
warn "my-g" = (if a then (b $ c) else (b $ d)) ==> (b $ if a then c else d)
warn "my-h" = (do x <- a ; return $ b x) ==> b <$> a
warn "my-ha" = (do x <- a ; b x) ==> b <*> a
warn "my-i" = (\x -> a <$> b x) ==> fmap a . b
warn "my-j" = either (f . a) (f . b) ==> f . either a b
warn "my-ja" = either (f . a) (f . b) c ==> f . either a b c
warn "my-k" = (\x -> f x >>= y) ==> f >=> y
warn "my-l" = (a . b) . c ==> a . b . c
| ptol/oczor | HLint.hs | mit | 940 | 0 | 11 | 272 | 499 | 263 | 236 | -1 | -1 |
{-# LANGUAGE ParallelListComp, TemplateHaskell #-}
{-| TemplateHaskell helper for Ganeti Haskell code.
As TemplateHaskell require that splices be defined in a separate
module, we combine all the TemplateHaskell functionality that HTools
needs in this module (except the one for unittests).
-}
{-
Copyright (C) 2011, 2012, 2013, 2014 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.THH ( declareSADT
, declareLADT
, declareILADT
, declareIADT
, makeJSONInstance
, deCamelCase
, genOpID
, genAllConstr
, genAllOpIDs
, PyValue(..)
, PyValueEx(..)
, OpCodeField(..)
, OpCodeDescriptor(..)
, genOpCode
, genStrOfOp
, genStrOfKey
, genLuxiOp
, Field (..)
, simpleField
, andRestArguments
, specialNumericalField
, timeAsDoubleField
, withDoc
, defaultField
, optionalField
, optionalNullSerField
, renameField
, customField
, timeStampFields
, uuidFields
, serialFields
, tagsFields
, TagSet
, buildObject
, buildObjectSerialisation
, buildParam
, DictObject(..)
, genException
, excErrMsg
) where
import Control.Arrow ((&&&))
import Control.Applicative
import Control.Monad
import Data.Attoparsec () -- Needed to prevent spurious GHC 7.4 linking errors.
-- See issue #683 and https://ghc.haskell.org/trac/ghc/ticket/4899
import Data.Char
import Data.List
import Data.Maybe
import qualified Data.Map as M
import qualified Data.Set as Set
import Language.Haskell.TH
import System.Time (ClockTime(..))
import qualified Text.JSON as JSON
import Text.JSON.Pretty (pp_value)
import Ganeti.JSON
import Ganeti.PyValue
import Ganeti.THH.PyType
-- * Exported types
-- | Class of objects that can be converted to 'JSObject'
-- lists-format.
class DictObject a where
toDict :: a -> [(String, JSON.JSValue)]
-- | Optional field information.
data OptionalType
= NotOptional -- ^ Field is not optional
| OptionalOmitNull -- ^ Field is optional, null is not serialised
| OptionalSerializeNull -- ^ Field is optional, null is serialised
| AndRestArguments -- ^ Special field capturing all the remaining fields
-- as plain JSON values
deriving (Show, Eq)
-- | Serialised field data type describing how to generate code for the field.
-- Each field has a type, which isn't captured in the type of the data type,
-- but is saved in the 'Q' monad in 'fieldType'.
--
-- Let @t@ be a type we want to parametrize the field with. There are the
-- following possible types of fields:
--
-- [Mandatory with no default.] Then @fieldType@ holds @t@,
-- @fieldDefault = Nothing@ and @fieldIsOptional = NotOptional@.
--
-- [Field with a default value.] Then @fieldType@ holds @t@ and
-- @fieldDefault = Just exp@ where @exp@ is an expression of type @t@ and
-- @fieldIsOptional = NotOptional@.
--
-- [Optional, no default value.] Then @fieldType@ holds @Maybe t@,
-- @fieldDefault = Nothing@ and @fieldIsOptional@ is either
-- 'OptionalOmitNull' or 'OptionalSerializeNull'.
--
-- Optional fields with a default value are prohibited, as their main
-- intention is to represent the information that a request didn't contain
-- the field data.
--
-- /Custom (de)serialization:/
-- Field can have custom (de)serialization functions that are stored in
-- 'fieldRead' and 'fieldShow'. If they aren't provided, the default is to use
-- 'readJSON' and 'showJSON' for the field's type @t@. If they are provided,
-- the type of the contained deserializing expression must be
--
-- @
-- [(String, JSON.JSValue)] -> JSON.JSValue -> JSON.Result t
-- @
--
-- where the first argument carries the whole record in the case the
-- deserializing function needs to process additional information.
--
-- The type of the contained serializing experssion must be
--
-- @
-- t -> (JSON.JSValue, [(String, JSON.JSValue)])
-- @
--
-- where the result can provide extra JSON fields to include in the output
-- record (or just return @[]@ if they're not needed).
--
-- Note that for optional fields the type appearing in the custom functions
-- is still @t@. Therefore making a field optional doesn't change the
-- functions.
--
-- There is also a special type of optional field 'AndRestArguments' which
-- allows to parse any additional arguments not covered by other fields. There
-- can be at most one such special field and it's type must be
-- @Map String JSON.JSValue@. See also 'andRestArguments'.
data Field = Field { fieldName :: String
, fieldType :: Q Type
-- ^ the type of the field, @t@ for non-optional fields,
-- @Maybe t@ for optional ones.
, fieldRead :: Maybe (Q Exp)
-- ^ an optional custom deserialization function of type
-- @[(String, JSON.JSValue)] -> JSON.JSValue ->
-- JSON.Result t@
, fieldShow :: Maybe (Q Exp)
-- ^ an optional custom serialization function of type
-- @t -> (JSON.JSValue, [(String, JSON.JSValue)])@
, fieldExtraKeys :: [String]
, fieldDefault :: Maybe (Q Exp)
-- ^ an optional default value of type @t@
, fieldConstr :: Maybe String
, fieldIsOptional :: OptionalType
-- ^ determines if a field is optional, and if yes,
-- how
, fieldDoc :: String
}
-- | Generates a simple field.
simpleField :: String -> Q Type -> Field
simpleField fname ftype =
Field { fieldName = fname
, fieldType = ftype
, fieldRead = Nothing
, fieldShow = Nothing
, fieldExtraKeys = []
, fieldDefault = Nothing
, fieldConstr = Nothing
, fieldIsOptional = NotOptional
, fieldDoc = ""
}
-- | Generate an AndRestArguments catch-all field.
andRestArguments :: String -> Field
andRestArguments fname =
Field { fieldName = fname
, fieldType = [t| M.Map String JSON.JSValue |]
, fieldRead = Nothing
, fieldShow = Nothing
, fieldExtraKeys = []
, fieldDefault = Nothing
, fieldConstr = Nothing
, fieldIsOptional = AndRestArguments
, fieldDoc = ""
}
withDoc :: String -> Field -> Field
withDoc doc field =
field { fieldDoc = doc }
-- | Sets the renamed constructor field.
renameField :: String -> Field -> Field
renameField constrName field = field { fieldConstr = Just constrName }
-- | Sets the default value on a field (makes it optional with a
-- default value).
defaultField :: Q Exp -> Field -> Field
defaultField defval field = field { fieldDefault = Just defval }
-- | Marks a field optional (turning its base type into a Maybe).
optionalField :: Field -> Field
optionalField field = field { fieldIsOptional = OptionalOmitNull }
-- | Marks a field optional (turning its base type into a Maybe), but
-- with 'Nothing' serialised explicitly as /null/.
optionalNullSerField :: Field -> Field
optionalNullSerField field = field { fieldIsOptional = OptionalSerializeNull }
-- | Wrapper around a special parse function, suitable as field-parsing
-- function.
numericalReadFn :: JSON.JSON a => (String -> JSON.Result a)
-> [(String, JSON.JSValue)] -> JSON.JSValue -> JSON.Result a
numericalReadFn _ _ v@(JSON.JSRational _ _) = JSON.readJSON v
numericalReadFn f _ (JSON.JSString x) = f $ JSON.fromJSString x
numericalReadFn _ _ _ = JSON.Error "A numerical field has to be a number or\
\ a string."
-- | Sets the read function to also accept string parsable by the given
-- function.
specialNumericalField :: Name -> Field -> Field
specialNumericalField f field =
field { fieldRead = Just (appE (varE 'numericalReadFn) (varE f)) }
-- | Creates a new mandatory field that reads time as the (floating point)
-- number of seconds since the standard UNIX epoch, and represents it in
-- Haskell as 'ClockTime'.
timeAsDoubleField :: String -> Field
timeAsDoubleField fname =
(simpleField fname [t| ClockTime |])
{ fieldRead = Just $ [| \_ -> liftM unTimeAsDoubleJSON . JSON.readJSON |]
, fieldShow = Just $ [| \c -> (JSON.showJSON $ TimeAsDoubleJSON c, []) |]
}
-- | Sets custom functions on a field.
customField :: Name -- ^ The name of the read function
-> Name -- ^ The name of the show function
-> [String] -- ^ The name of extra field keys
-> Field -- ^ The original field
-> Field -- ^ Updated field
customField readfn showfn extra field =
field { fieldRead = Just (varE readfn), fieldShow = Just (varE showfn)
, fieldExtraKeys = extra }
-- | Computes the record name for a given field, based on either the
-- string value in the JSON serialisation or the custom named if any
-- exists.
fieldRecordName :: Field -> String
fieldRecordName (Field { fieldName = name, fieldConstr = alias }) =
fromMaybe (camelCase name) alias
-- | Computes the preferred variable name to use for the value of this
-- field. If the field has a specific constructor name, then we use a
-- first-letter-lowercased version of that; otherwise, we simply use
-- the field name. See also 'fieldRecordName'.
fieldVariable :: Field -> String
fieldVariable f =
case (fieldConstr f) of
Just name -> ensureLower name
_ -> map (\c -> if c == '-' then '_' else c) $ fieldName f
-- | Compute the actual field type (taking into account possible
-- optional status).
actualFieldType :: Field -> Q Type
actualFieldType f | fieldIsOptional f `elem` [NotOptional, AndRestArguments] = t
| otherwise = [t| Maybe $t |]
where t = fieldType f
-- | Checks that a given field is not optional (for object types or
-- fields which should not allow this case).
checkNonOptDef :: (Monad m) => Field -> m ()
checkNonOptDef (Field { fieldIsOptional = OptionalOmitNull
, fieldName = name }) =
fail $ "Optional field " ++ name ++ " used in parameter declaration"
checkNonOptDef (Field { fieldIsOptional = OptionalSerializeNull
, fieldName = name }) =
fail $ "Optional field " ++ name ++ " used in parameter declaration"
checkNonOptDef (Field { fieldDefault = (Just _), fieldName = name }) =
fail $ "Default field " ++ name ++ " used in parameter declaration"
checkNonOptDef _ = return ()
-- | Construct a function that parses a field value. If the field has
-- a custom 'fieldRead', it's applied to @o@ and used. Otherwise
-- @JSON.readJSON@ is used.
parseFn :: Field -- ^ The field definition
-> Q Exp -- ^ The entire object in JSON object format
-> Q Exp -- ^ The resulting function that parses a JSON message
parseFn field o
= maybe [| readJSONWithDesc $(stringE $ fieldName field) False |]
(`appE` o) (fieldRead field)
-- | Produces the expression that will de-serialise a given
-- field. Since some custom parsing functions might need to use the
-- entire object, we do take and pass the object to any custom read
-- functions.
loadFn :: Field -- ^ The field definition
-> Q Exp -- ^ The value of the field as existing in the JSON message
-> Q Exp -- ^ The entire object in JSON object format
-> Q Exp -- ^ Resulting expression
loadFn field expr o = [| $expr >>= $(parseFn field o) |]
-- | Just as 'loadFn', but for optional fields.
loadFnOpt :: Field -- ^ The field definition
-> Q Exp -- ^ The value of the field as existing in the JSON message
-- as Maybe
-> Q Exp -- ^ The entire object in JSON object format
-> Q Exp -- ^ Resulting expression
loadFnOpt field@(Field { fieldDefault = Just def }) expr o
= case fieldIsOptional field of
NotOptional -> [| $expr >>= maybe (return $def) $(parseFn field o) |]
_ -> fail $ "Field " ++ fieldName field ++ ":\
\ A field can't be optional and\
\ have a default value at the same time."
loadFnOpt field expr o
= [| $expr >>= maybe (return Nothing) (liftM Just . $(parseFn field o)) |]
-- * Common field declarations
-- | Timestamp fields description.
timeStampFields :: [Field]
timeStampFields = map (defaultField [| TOD 0 0 |] . timeAsDoubleField)
["ctime", "mtime"]
-- | Serial number fields description.
serialFields :: [Field]
serialFields =
[ renameField "Serial" $ simpleField "serial_no" [t| Int |] ]
-- | UUID fields description.
uuidFields :: [Field]
uuidFields = [ simpleField "uuid" [t| String |] ]
-- | Tag set type alias.
type TagSet = Set.Set String
-- | Tag field description.
tagsFields :: [Field]
tagsFields = [ defaultField [| Set.empty |] $
simpleField "tags" [t| TagSet |] ]
-- * Internal types
-- | A simple field, in constrast to the customisable 'Field' type.
type SimpleField = (String, Q Type)
-- | A definition for a single constructor for a simple object.
type SimpleConstructor = (String, [SimpleField])
-- | A definition for ADTs with simple fields.
type SimpleObject = [SimpleConstructor]
-- | A type alias for an opcode constructor of a regular object.
type OpCodeConstructor = (String, Q Type, String, [Field], String)
-- | A type alias for a Luxi constructor of a regular object.
type LuxiConstructor = (String, [Field])
-- * Helper functions
-- | Ensure first letter is lowercase.
--
-- Used to convert type name to function prefix, e.g. in @data Aa ->
-- aaToRaw@.
ensureLower :: String -> String
ensureLower [] = []
ensureLower (x:xs) = toLower x:xs
-- | Ensure first letter is uppercase.
--
-- Used to convert constructor name to component
ensureUpper :: String -> String
ensureUpper [] = []
ensureUpper (x:xs) = toUpper x:xs
-- | Helper for quoted expressions.
varNameE :: String -> Q Exp
varNameE = varE . mkName
-- | showJSON as an expression, for reuse.
showJSONE :: Q Exp
showJSONE = varE 'JSON.showJSON
-- | makeObj as an expression, for reuse.
makeObjE :: Q Exp
makeObjE = varE 'JSON.makeObj
-- | fromObj (Ganeti specific) as an expression, for reuse.
fromObjE :: Q Exp
fromObjE = varE 'fromObj
-- | ToRaw function name.
toRawName :: String -> Name
toRawName = mkName . (++ "ToRaw") . ensureLower
-- | FromRaw function name.
fromRawName :: String -> Name
fromRawName = mkName . (++ "FromRaw") . ensureLower
-- | Converts a name to it's varE\/litE representations.
reprE :: Either String Name -> Q Exp
reprE = either stringE varE
-- | Smarter function application.
--
-- This does simply f x, except that if is 'id', it will skip it, in
-- order to generate more readable code when using -ddump-splices.
appFn :: Exp -> Exp -> Exp
appFn f x | f == VarE 'id = x
| otherwise = AppE f x
-- | Builds a field for a normal constructor.
buildConsField :: Q Type -> StrictTypeQ
buildConsField ftype = do
ftype' <- ftype
return (NotStrict, ftype')
-- | Builds a constructor based on a simple definition (not field-based).
buildSimpleCons :: Name -> SimpleObject -> Q Dec
buildSimpleCons tname cons = do
decl_d <- mapM (\(cname, fields) -> do
fields' <- mapM (buildConsField . snd) fields
return $ NormalC (mkName cname) fields') cons
return $ DataD [] tname [] decl_d [''Show, ''Eq]
-- | Generate the save function for a given type.
genSaveSimpleObj :: Name -- ^ Object type
-> String -- ^ Function name
-> SimpleObject -- ^ Object definition
-> (SimpleConstructor -> Q Clause) -- ^ Constructor save fn
-> Q (Dec, Dec)
genSaveSimpleObj tname sname opdefs fn = do
let sigt = AppT (AppT ArrowT (ConT tname)) (ConT ''JSON.JSValue)
fname = mkName sname
cclauses <- mapM fn opdefs
return $ (SigD fname sigt, FunD fname cclauses)
-- * Template code for simple raw type-equivalent ADTs
-- | Generates a data type declaration.
--
-- The type will have a fixed list of instances.
strADTDecl :: Name -> [String] -> Dec
strADTDecl name constructors =
DataD [] name []
(map (flip NormalC [] . mkName) constructors)
[''Show, ''Eq, ''Enum, ''Bounded, ''Ord]
-- | Generates a toRaw function.
--
-- This generates a simple function of the form:
--
-- @
-- nameToRaw :: Name -> /traw/
-- nameToRaw Cons1 = var1
-- nameToRaw Cons2 = \"value2\"
-- @
genToRaw :: Name -> Name -> Name -> [(String, Either String Name)] -> Q [Dec]
genToRaw traw fname tname constructors = do
let sigt = AppT (AppT ArrowT (ConT tname)) (ConT traw)
-- the body clauses, matching on the constructor and returning the
-- raw value
clauses <- mapM (\(c, v) -> clause [recP (mkName c) []]
(normalB (reprE v)) []) constructors
return [SigD fname sigt, FunD fname clauses]
-- | Generates a fromRaw function.
--
-- The function generated is monadic and can fail parsing the
-- raw value. It is of the form:
--
-- @
-- nameFromRaw :: (Monad m) => /traw/ -> m Name
-- nameFromRaw s | s == var1 = Cons1
-- | s == \"value2\" = Cons2
-- | otherwise = fail /.../
-- @
genFromRaw :: Name -> Name -> Name -> [(String, Either String Name)] -> Q [Dec]
genFromRaw traw fname tname constructors = do
-- signature of form (Monad m) => String -> m $name
sigt <- [t| (Monad m) => $(conT traw) -> m $(conT tname) |]
-- clauses for a guarded pattern
let varp = mkName "s"
varpe = varE varp
clauses <- mapM (\(c, v) -> do
-- the clause match condition
g <- normalG [| $varpe == $(reprE v) |]
-- the clause result
r <- [| return $(conE (mkName c)) |]
return (g, r)) constructors
-- the otherwise clause (fallback)
oth_clause <- do
g <- normalG [| otherwise |]
r <- [|fail ("Invalid string value for type " ++
$(litE (stringL (nameBase tname))) ++ ": " ++ show $varpe) |]
return (g, r)
let fun = FunD fname [Clause [VarP varp]
(GuardedB (clauses++[oth_clause])) []]
return [SigD fname sigt, fun]
-- | Generates a data type from a given raw format.
--
-- The format is expected to multiline. The first line contains the
-- type name, and the rest of the lines must contain two words: the
-- constructor name and then the string representation of the
-- respective constructor.
--
-- The function will generate the data type declaration, and then two
-- functions:
--
-- * /name/ToRaw, which converts the type to a raw type
--
-- * /name/FromRaw, which (monadically) converts from a raw type to the type
--
-- Note that this is basically just a custom show\/read instance,
-- nothing else.
declareADT
:: (a -> Either String Name) -> Name -> String -> [(String, a)] -> Q [Dec]
declareADT fn traw sname cons = do
let name = mkName sname
ddecl = strADTDecl name (map fst cons)
-- process cons in the format expected by genToRaw
cons' = map (\(a, b) -> (a, fn b)) cons
toraw <- genToRaw traw (toRawName sname) name cons'
fromraw <- genFromRaw traw (fromRawName sname) name cons'
return $ ddecl:toraw ++ fromraw
declareLADT :: Name -> String -> [(String, String)] -> Q [Dec]
declareLADT = declareADT Left
declareILADT :: String -> [(String, Int)] -> Q [Dec]
declareILADT sname cons = do
consNames <- sequence [ newName ('_':n) | (n, _) <- cons ]
consFns <- concat <$> sequence
[ do sig <- sigD n [t| Int |]
let expr = litE (IntegerL (toInteger i))
fn <- funD n [clause [] (normalB expr) []]
return [sig, fn]
| n <- consNames
| (_, i) <- cons ]
let cons' = [ (n, n') | (n, _) <- cons | n' <- consNames ]
(consFns ++) <$> declareADT Right ''Int sname cons'
declareIADT :: String -> [(String, Name)] -> Q [Dec]
declareIADT = declareADT Right ''Int
declareSADT :: String -> [(String, Name)] -> Q [Dec]
declareSADT = declareADT Right ''String
-- | Creates the showJSON member of a JSON instance declaration.
--
-- This will create what is the equivalent of:
--
-- @
-- showJSON = showJSON . /name/ToRaw
-- @
--
-- in an instance JSON /name/ declaration
genShowJSON :: String -> Q Dec
genShowJSON name = do
body <- [| JSON.showJSON . $(varE (toRawName name)) |]
return $ FunD 'JSON.showJSON [Clause [] (NormalB body) []]
-- | Creates the readJSON member of a JSON instance declaration.
--
-- This will create what is the equivalent of:
--
-- @
-- readJSON s = case readJSON s of
-- Ok s' -> /name/FromRaw s'
-- Error e -> Error /description/
-- @
--
-- in an instance JSON /name/ declaration
genReadJSON :: String -> Q Dec
genReadJSON name = do
let s = mkName "s"
body <- [| $(varE (fromRawName name)) =<<
readJSONWithDesc $(stringE name) True $(varE s) |]
return $ FunD 'JSON.readJSON [Clause [VarP s] (NormalB body) []]
-- | Generates a JSON instance for a given type.
--
-- This assumes that the /name/ToRaw and /name/FromRaw functions
-- have been defined as by the 'declareSADT' function.
makeJSONInstance :: Name -> Q [Dec]
makeJSONInstance name = do
let base = nameBase name
showJ <- genShowJSON base
readJ <- genReadJSON base
return [InstanceD [] (AppT (ConT ''JSON.JSON) (ConT name)) [readJ,showJ]]
-- * Template code for opcodes
-- | Transforms a CamelCase string into an_underscore_based_one.
deCamelCase :: String -> String
deCamelCase =
intercalate "_" . map (map toUpper) . groupBy (\_ b -> not $ isUpper b)
-- | Transform an underscore_name into a CamelCase one.
camelCase :: String -> String
camelCase = concatMap (ensureUpper . drop 1) .
groupBy (\_ b -> b /= '_' && b /= '-') . ('_':)
-- | Computes the name of a given constructor.
constructorName :: Con -> Q Name
constructorName (NormalC name _) = return name
constructorName (RecC name _) = return name
constructorName x = fail $ "Unhandled constructor " ++ show x
-- | Extract all constructor names from a given type.
reifyConsNames :: Name -> Q [String]
reifyConsNames name = do
reify_result <- reify name
case reify_result of
TyConI (DataD _ _ _ cons _) -> mapM (liftM nameBase . constructorName) cons
o -> fail $ "Unhandled name passed to reifyConsNames, expected\
\ type constructor but got '" ++ show o ++ "'"
-- | Builds the generic constructor-to-string function.
--
-- This generates a simple function of the following form:
--
-- @
-- fname (ConStructorOne {}) = trans_fun("ConStructorOne")
-- fname (ConStructorTwo {}) = trans_fun("ConStructorTwo")
-- @
--
-- This builds a custom list of name\/string pairs and then uses
-- 'genToRaw' to actually generate the function.
genConstrToStr :: (String -> String) -> Name -> String -> Q [Dec]
genConstrToStr trans_fun name fname = do
cnames <- reifyConsNames name
let svalues = map (Left . trans_fun) cnames
genToRaw ''String (mkName fname) name $ zip cnames svalues
-- | Constructor-to-string for OpCode.
genOpID :: Name -> String -> Q [Dec]
genOpID = genConstrToStr deCamelCase
-- | Builds a list with all defined constructor names for a type.
--
-- @
-- vstr :: String
-- vstr = [...]
-- @
--
-- Where the actual values of the string are the constructor names
-- mapped via @trans_fun@.
genAllConstr :: (String -> String) -> Name -> String -> Q [Dec]
genAllConstr trans_fun name vstr = do
cnames <- reifyConsNames name
let svalues = sort $ map trans_fun cnames
vname = mkName vstr
sig = SigD vname (AppT ListT (ConT ''String))
body = NormalB (ListE (map (LitE . StringL) svalues))
return $ [sig, ValD (VarP vname) body []]
-- | Generates a list of all defined opcode IDs.
genAllOpIDs :: Name -> String -> Q [Dec]
genAllOpIDs = genAllConstr deCamelCase
-- | OpCode parameter (field) type.
type OpParam = (String, Q Type, Q Exp)
-- * Python code generation
data OpCodeField = OpCodeField { ocfName :: String
, ocfType :: PyType
, ocfDefl :: Maybe PyValueEx
, ocfDoc :: String
}
-- | Transfers opcode data between the opcode description (through
-- @genOpCode@) and the Python code generation functions.
data OpCodeDescriptor = OpCodeDescriptor { ocdName :: String
, ocdType :: PyType
, ocdDoc :: String
, ocdFields :: [OpCodeField]
, ocdDescr :: String
}
-- | Optionally encapsulates default values in @PyValueEx@.
--
-- @maybeApp exp typ@ returns a quoted expression that encapsulates
-- the default value @exp@ of an opcode parameter cast to @typ@ in a
-- @PyValueEx@, if @exp@ is @Just@. Otherwise, it returns a quoted
-- expression with @Nothing@.
maybeApp :: Maybe (Q Exp) -> Q Type -> Q Exp
maybeApp Nothing _ =
[| Nothing |]
maybeApp (Just expr) typ =
[| Just ($(conE (mkName "PyValueEx")) ($expr :: $typ)) |]
-- | Generates a Python type according to whether the field is
-- optional.
--
-- The type of created expression is PyType.
genPyType' :: OptionalType -> Q Type -> Q PyType
genPyType' opt typ = typ >>= pyOptionalType (opt /= NotOptional)
-- | Generates Python types from opcode parameters.
genPyType :: Field -> Q PyType
genPyType f = genPyType' (fieldIsOptional f) (fieldType f)
-- | Generates Python default values from opcode parameters.
genPyDefault :: Field -> Q Exp
genPyDefault f = maybeApp (fieldDefault f) (fieldType f)
pyField :: Field -> Q Exp
pyField f = genPyType f >>= \t ->
[| OpCodeField $(stringE (fieldName f))
t
$(genPyDefault f)
$(stringE (fieldDoc f)) |]
-- | Generates a Haskell function call to "showPyClass" with the
-- necessary information on how to build the Python class string.
pyClass :: OpCodeConstructor -> Q Exp
pyClass (consName, consType, consDoc, consFields, consDscField) =
do let pyClassVar = varNameE "showPyClass"
consName' = stringE consName
consType' <- genPyType' NotOptional consType
let consDoc' = stringE consDoc
[| OpCodeDescriptor $consName'
consType'
$consDoc'
$(listE $ map pyField consFields)
consDscField |]
-- | Generates a function called "pyClasses" that holds the list of
-- all the opcode descriptors necessary for generating the Python
-- opcodes.
pyClasses :: [OpCodeConstructor] -> Q [Dec]
pyClasses cons =
do let name = mkName "pyClasses"
sig = SigD name (AppT ListT (ConT ''OpCodeDescriptor))
fn <- FunD name <$> (:[]) <$> declClause cons
return [sig, fn]
where declClause c =
clause [] (normalB (ListE <$> mapM pyClass c)) []
-- | Converts from an opcode constructor to a Luxi constructor.
opcodeConsToLuxiCons :: (a, b, c, d, e) -> (a, d)
opcodeConsToLuxiCons (x, _, _, y, _) = (x, y)
-- | Generates the OpCode data type.
--
-- This takes an opcode logical definition, and builds both the
-- datatype and the JSON serialisation out of it. We can't use a
-- generic serialisation since we need to be compatible with Ganeti's
-- own, so we have a few quirks to work around.
genOpCode :: String -- ^ Type name to use
-> [OpCodeConstructor] -- ^ Constructor name and parameters
-> Q [Dec]
genOpCode name cons = do
let tname = mkName name
decl_d <- mapM (\(cname, _, _, fields, _) -> do
-- we only need the type of the field, without Q
fields' <- mapM (fieldTypeInfo "op") fields
return $ RecC (mkName cname) fields')
cons
let declD = DataD [] tname [] decl_d [''Show, ''Eq]
let (allfsig, allffn) = genAllOpFields "allOpFields" cons
save_decs <- genSaveOpCode tname "saveOpCode" "toDictOpCode"
(map opcodeConsToLuxiCons cons) saveConstructor True
(loadsig, loadfn) <- genLoadOpCode cons
pyDecls <- pyClasses cons
return $ [declD, allfsig, allffn, loadsig, loadfn] ++ save_decs ++ pyDecls
-- | Generates the function pattern returning the list of fields for a
-- given constructor.
genOpConsFields :: OpCodeConstructor -> Clause
genOpConsFields (cname, _, _, fields, _) =
let op_id = deCamelCase cname
fvals = map (LitE . StringL) . sort . nub $
concatMap (\f -> fieldName f:fieldExtraKeys f) fields
in Clause [LitP (StringL op_id)] (NormalB $ ListE fvals) []
-- | Generates a list of all fields of an opcode constructor.
genAllOpFields :: String -- ^ Function name
-> [OpCodeConstructor] -- ^ Object definition
-> (Dec, Dec)
genAllOpFields sname opdefs =
let cclauses = map genOpConsFields opdefs
other = Clause [WildP] (NormalB (ListE [])) []
fname = mkName sname
sigt = AppT (AppT ArrowT (ConT ''String)) (AppT ListT (ConT ''String))
in (SigD fname sigt, FunD fname (cclauses++[other]))
-- | Generates the \"save\" clause for an entire opcode constructor.
--
-- This matches the opcode with variables named the same as the
-- constructor fields (just so that the spliced in code looks nicer),
-- and passes those name plus the parameter definition to 'saveObjectField'.
saveConstructor :: LuxiConstructor -- ^ The constructor
-> Q Clause -- ^ Resulting clause
saveConstructor (sname, fields) = do
let cname = mkName sname
fnames <- mapM (newName . fieldVariable) fields
let pat = conP cname (map varP fnames)
let felems = map (uncurry saveObjectField) (zip fnames fields)
-- now build the OP_ID serialisation
opid = [| [( $(stringE "OP_ID"),
JSON.showJSON $(stringE . deCamelCase $ sname) )] |]
flist = listE (opid:felems)
-- and finally convert all this to a json object
flist' = [| concat $flist |]
clause [pat] (normalB flist') []
-- | Generates the main save opcode function.
--
-- This builds a per-constructor match clause that contains the
-- respective constructor-serialisation code.
genSaveOpCode :: Name -- ^ Object ype
-> String -- ^ To 'JSValue' function name
-> String -- ^ To 'JSObject' function name
-> [LuxiConstructor] -- ^ Object definition
-> (LuxiConstructor -> Q Clause) -- ^ Constructor save fn
-> Bool -- ^ Whether to generate
-- obj or just a
-- list\/tuple of values
-> Q [Dec]
genSaveOpCode tname jvalstr tdstr opdefs fn gen_object = do
tdclauses <- mapM fn opdefs
let typecon = ConT tname
jvalname = mkName jvalstr
jvalsig = AppT (AppT ArrowT typecon) (ConT ''JSON.JSValue)
tdname = mkName tdstr
tdsig <- [t| $(return typecon) -> [(String, JSON.JSValue)] |]
jvalclause <- if gen_object
then [| $makeObjE . $(varE tdname) |]
else [| JSON.showJSON . map snd . $(varE tdname) |]
return [ SigD tdname tdsig
, FunD tdname tdclauses
, SigD jvalname jvalsig
, ValD (VarP jvalname) (NormalB jvalclause) []]
-- | Generates load code for a single constructor of the opcode data type.
loadConstructor :: OpCodeConstructor -> Q Exp
loadConstructor (sname, _, _, fields, _) = do
let name = mkName sname
fbinds <- mapM (loadObjectField fields) fields
let (fnames, fstmts) = unzip fbinds
let cval = foldl (\accu fn -> AppE accu (VarE fn)) (ConE name) fnames
fstmts' = fstmts ++ [NoBindS (AppE (VarE 'return) cval)]
return $ DoE fstmts'
-- | Generates the loadOpCode function.
genLoadOpCode :: [OpCodeConstructor] -> Q (Dec, Dec)
genLoadOpCode opdefs = do
let fname = mkName "loadOpCode"
arg1 = mkName "v"
objname = mkName "o"
opid = mkName "op_id"
st1 <- bindS (varP objname) [| liftM JSON.fromJSObject
(JSON.readJSON $(varE arg1)) |]
st2 <- bindS (varP opid) [| $fromObjE $(varE objname) $(stringE "OP_ID") |]
-- the match results (per-constructor blocks)
mexps <- mapM loadConstructor opdefs
fails <- [| fail $ "Unknown opcode " ++ $(varE opid) |]
let mpats = map (\(me, (consName, _, _, _, _)) ->
let mp = LitP . StringL . deCamelCase $ consName
in Match mp (NormalB me) []
) $ zip mexps opdefs
defmatch = Match WildP (NormalB fails) []
cst = NoBindS $ CaseE (VarE opid) $ mpats++[defmatch]
body = DoE [st1, st2, cst]
sigt <- [t| JSON.JSValue -> JSON.Result $(conT (mkName "OpCode")) |]
return $ (SigD fname sigt, FunD fname [Clause [VarP arg1] (NormalB body) []])
-- * Template code for luxi
-- | Constructor-to-string for LuxiOp.
genStrOfOp :: Name -> String -> Q [Dec]
genStrOfOp = genConstrToStr id
-- | Constructor-to-string for MsgKeys.
genStrOfKey :: Name -> String -> Q [Dec]
genStrOfKey = genConstrToStr ensureLower
-- | Generates the LuxiOp data type.
--
-- This takes a Luxi operation definition and builds both the
-- datatype and the function transforming the arguments to JSON.
-- We can't use anything less generic, because the way different
-- operations are serialized differs on both parameter- and top-level.
--
-- There are two things to be defined for each parameter:
--
-- * name
--
-- * type
--
genLuxiOp :: String -> [LuxiConstructor] -> Q [Dec]
genLuxiOp name cons = do
let tname = mkName name
decl_d <- mapM (\(cname, fields) -> do
-- we only need the type of the field, without Q
fields' <- mapM actualFieldType fields
let fields'' = zip (repeat NotStrict) fields'
return $ NormalC (mkName cname) fields'')
cons
let declD = DataD [] (mkName name) [] decl_d [''Show, ''Eq]
save_decs <- genSaveOpCode tname "opToArgs" "opToDict"
cons saveLuxiConstructor False
req_defs <- declareSADT "LuxiReq" .
map (\(str, _) -> ("Req" ++ str, mkName ("luxiReq" ++ str))) $
cons
return $ declD:save_decs ++ req_defs
-- | Generates the \"save\" clause for entire LuxiOp constructor.
saveLuxiConstructor :: LuxiConstructor -> Q Clause
saveLuxiConstructor (sname, fields) = do
let cname = mkName sname
fnames <- mapM (newName . fieldVariable) fields
let pat = conP cname (map varP fnames)
let felems = map (uncurry saveObjectField) (zip fnames fields)
flist = [| concat $(listE felems) |]
clause [pat] (normalB flist) []
-- * "Objects" functionality
-- | Extract the field's declaration from a Field structure.
fieldTypeInfo :: String -> Field -> Q (Name, Strict, Type)
fieldTypeInfo field_pfx fd = do
t <- actualFieldType fd
let n = mkName . (field_pfx ++) . fieldRecordName $ fd
return (n, NotStrict, t)
-- | Build an object declaration.
buildObject :: String -> String -> [Field] -> Q [Dec]
buildObject sname field_pfx fields = do
when (any ((==) AndRestArguments . fieldIsOptional)
. drop 1 $ reverse fields)
$ fail "Objects may have only one AndRestArguments field,\
\ and it must be the last one."
let name = mkName sname
fields_d <- mapM (fieldTypeInfo field_pfx) fields
let decl_d = RecC name fields_d
let declD = DataD [] name [] [decl_d] [''Show, ''Eq]
ser_decls <- buildObjectSerialisation sname fields
return $ declD:ser_decls
-- | Generates an object definition: data type and its JSON instance.
buildObjectSerialisation :: String -> [Field] -> Q [Dec]
buildObjectSerialisation sname fields = do
let name = mkName sname
savedecls <- genSaveObject saveObjectField sname fields
(loadsig, loadfn) <- genLoadObject (loadObjectField fields) sname fields
shjson <- objectShowJSON sname
rdjson <- objectReadJSON sname
let instdecl = InstanceD [] (AppT (ConT ''JSON.JSON) (ConT name))
[rdjson, shjson]
return $ savedecls ++ [loadsig, loadfn, instdecl]
-- | The toDict function name for a given type.
toDictName :: String -> Name
toDictName sname = mkName ("toDict" ++ sname)
-- | Generates the save object functionality.
genSaveObject :: (Name -> Field -> Q Exp)
-> String -> [Field] -> Q [Dec]
genSaveObject save_fn sname fields = do
let name = mkName sname
fnames <- mapM (newName . fieldVariable) fields
let pat = conP name (map varP fnames)
let tdname = toDictName sname
tdsigt <- [t| $(conT name) -> [(String, JSON.JSValue)] |]
let felems = map (uncurry save_fn) (zip fnames fields)
flist = listE felems
-- and finally convert all this to a json object
tdlist = [| concat $flist |]
iname = mkName "i"
tclause <- clause [pat] (normalB tdlist) []
cclause <- [| $makeObjE . $(varE tdname) |]
let fname = mkName ("save" ++ sname)
sigt <- [t| $(conT name) -> JSON.JSValue |]
return [SigD tdname tdsigt, FunD tdname [tclause],
SigD fname sigt, ValD (VarP fname) (NormalB cclause) []]
-- | Generates the code for saving an object's field, handling the
-- various types of fields that we have.
saveObjectField :: Name -> Field -> Q Exp
saveObjectField fvar field =
let formatFn = fromMaybe [| JSON.showJSON &&& (const []) |] $
fieldShow field
formatCode v = [| let (actual, extra) = $formatFn $(v)
in ($nameE, actual) : extra |]
in case fieldIsOptional field of
OptionalOmitNull -> [| case $(fvarE) of
Nothing -> []
Just v -> $(formatCode [| v |])
|]
OptionalSerializeNull -> [| case $(fvarE) of
Nothing -> [( $nameE, JSON.JSNull )]
Just v -> $(formatCode [| v |])
|]
NotOptional -> formatCode fvarE
AndRestArguments -> [| M.toList $(varE fvar) |]
where nameE = stringE (fieldName field)
fvarE = varE fvar
-- | Generates the showJSON clause for a given object name.
objectShowJSON :: String -> Q Dec
objectShowJSON name = do
body <- [| JSON.showJSON . $(varE . mkName $ "save" ++ name) |]
return $ FunD 'JSON.showJSON [Clause [] (NormalB body) []]
-- | Generates the load object functionality.
genLoadObject :: (Field -> Q (Name, Stmt))
-> String -> [Field] -> Q (Dec, Dec)
genLoadObject load_fn sname fields = do
let name = mkName sname
funname = mkName $ "load" ++ sname
arg1 = mkName $ if null fields then "_" else "v"
objname = mkName "o"
opid = mkName "op_id"
st1 <- bindS (varP objname) [| liftM JSON.fromJSObject
(JSON.readJSON $(varE arg1)) |]
fbinds <- mapM load_fn fields
let (fnames, fstmts) = unzip fbinds
let cval = foldl (\accu fn -> AppE accu (VarE fn)) (ConE name) fnames
retstmt = [NoBindS (AppE (VarE 'return) cval)]
-- FIXME: should we require an empty dict for an empty type?
-- this allows any JSValue right now
fstmts' = if null fields
then retstmt
else st1:fstmts ++ retstmt
sigt <- [t| JSON.JSValue -> JSON.Result $(conT name) |]
return $ (SigD funname sigt,
FunD funname [Clause [VarP arg1] (NormalB (DoE fstmts')) []])
-- | Generates code for loading an object's field.
loadObjectField :: [Field] -> Field -> Q (Name, Stmt)
loadObjectField allFields field = do
let name = fieldVariable field
names = map fieldVariable allFields
otherNames = listE . map stringE $ names \\ [name]
fvar <- newName name
-- these are used in all patterns below
let objvar = varNameE "o"
objfield = stringE (fieldName field)
bexp <- case (fieldDefault field, fieldIsOptional field) of
-- Only non-optional fields without defaults must have a value;
-- we treat both optional types the same, since
-- 'maybeFromObj' can deal with both missing and null values
-- appropriately (the same)
(Nothing, NotOptional) ->
loadFn field [| fromObj $objvar $objfield |] objvar
-- AndRestArguments need not to be parsed at all,
-- they're just extracted from the list of other fields.
(Nothing, AndRestArguments) ->
[| return . M.fromList
$ filter (not . (`elem` $otherNames) . fst) $objvar |]
_ -> loadFnOpt field [| maybeFromObj $objvar $objfield |] objvar
return (fvar, BindS (VarP fvar) bexp)
-- | Builds the readJSON instance for a given object name.
objectReadJSON :: String -> Q Dec
objectReadJSON name = do
let s = mkName "s"
body <- [| $(varE . mkName $ "load" ++ name) =<<
readJSONWithDesc $(stringE name) False $(varE s) |]
return $ FunD 'JSON.readJSON [Clause [VarP s] (NormalB body) []]
-- * Inheritable parameter tables implementation
-- | Compute parameter type names.
paramTypeNames :: String -> (String, String)
paramTypeNames root = ("Filled" ++ root ++ "Params",
"Partial" ++ root ++ "Params")
-- | Compute information about the type of a parameter field.
paramFieldTypeInfo :: String -> Field -> Q (Name, Strict, Type)
paramFieldTypeInfo field_pfx fd = do
t <- actualFieldType fd
let n = mkName . (++ "P") . (field_pfx ++) .
fieldRecordName $ fd
return (n, NotStrict, AppT (ConT ''Maybe) t)
-- | Build a parameter declaration.
--
-- This function builds two different data structures: a /filled/ one,
-- in which all fields are required, and a /partial/ one, in which all
-- fields are optional. Due to the current record syntax issues, the
-- fields need to be named differrently for the two structures, so the
-- partial ones get a /P/ suffix.
buildParam :: String -> String -> [Field] -> Q [Dec]
buildParam sname field_pfx fields = do
let (sname_f, sname_p) = paramTypeNames sname
name_f = mkName sname_f
name_p = mkName sname_p
fields_f <- mapM (fieldTypeInfo field_pfx) fields
fields_p <- mapM (paramFieldTypeInfo field_pfx) fields
let decl_f = RecC name_f fields_f
decl_p = RecC name_p fields_p
let declF = DataD [] name_f [] [decl_f] [''Show, ''Eq]
declP = DataD [] name_p [] [decl_p] [''Show, ''Eq]
ser_decls_f <- buildObjectSerialisation sname_f fields
ser_decls_p <- buildPParamSerialisation sname_p fields
fill_decls <- fillParam sname field_pfx fields
return $ [declF, declP] ++ ser_decls_f ++ ser_decls_p ++ fill_decls ++
buildParamAllFields sname fields ++
buildDictObjectInst name_f sname_f
-- | Builds a list of all fields of a parameter.
buildParamAllFields :: String -> [Field] -> [Dec]
buildParamAllFields sname fields =
let vname = mkName ("all" ++ sname ++ "ParamFields")
sig = SigD vname (AppT ListT (ConT ''String))
val = ListE $ map (LitE . StringL . fieldName) fields
in [sig, ValD (VarP vname) (NormalB val) []]
-- | Builds the 'DictObject' instance for a filled parameter.
buildDictObjectInst :: Name -> String -> [Dec]
buildDictObjectInst name sname =
[InstanceD [] (AppT (ConT ''DictObject) (ConT name))
[ValD (VarP 'toDict) (NormalB (VarE (toDictName sname))) []]]
-- | Generates the serialisation for a partial parameter.
buildPParamSerialisation :: String -> [Field] -> Q [Dec]
buildPParamSerialisation sname fields = do
let name = mkName sname
savedecls <- genSaveObject savePParamField sname fields
(loadsig, loadfn) <- genLoadObject loadPParamField sname fields
shjson <- objectShowJSON sname
rdjson <- objectReadJSON sname
let instdecl = InstanceD [] (AppT (ConT ''JSON.JSON) (ConT name))
[rdjson, shjson]
return $ savedecls ++ [loadsig, loadfn, instdecl]
-- | Generates code to save an optional parameter field.
savePParamField :: Name -> Field -> Q Exp
savePParamField fvar field = do
checkNonOptDef field
let actualVal = mkName "v"
normalexpr <- saveObjectField actualVal field
-- we have to construct the block here manually, because we can't
-- splice-in-splice
return $ CaseE (VarE fvar) [ Match (ConP 'Nothing [])
(NormalB (ConE '[])) []
, Match (ConP 'Just [VarP actualVal])
(NormalB normalexpr) []
]
-- | Generates code to load an optional parameter field.
loadPParamField :: Field -> Q (Name, Stmt)
loadPParamField field = do
checkNonOptDef field
let name = fieldName field
fvar <- newName name
-- these are used in all patterns below
let objvar = varNameE "o"
objfield = stringE name
loadexp = [| $(varE 'maybeFromObj) $objvar $objfield |]
bexp <- loadFnOpt field loadexp objvar
return (fvar, BindS (VarP fvar) bexp)
-- | Builds a simple declaration of type @n_x = fromMaybe f_x p_x@.
buildFromMaybe :: String -> Q Dec
buildFromMaybe fname =
valD (varP (mkName $ "n_" ++ fname))
(normalB [| $(varE 'fromMaybe)
$(varNameE $ "f_" ++ fname)
$(varNameE $ "p_" ++ fname) |]) []
-- | Builds a function that executes the filling of partial parameter
-- from a full copy (similar to Python's fillDict).
fillParam :: String -> String -> [Field] -> Q [Dec]
fillParam sname field_pfx fields = do
let fnames = map (\fd -> field_pfx ++ fieldRecordName fd) fields
(sname_f, sname_p) = paramTypeNames sname
oname_f = "fobj"
oname_p = "pobj"
name_f = mkName sname_f
name_p = mkName sname_p
fun_name = mkName $ "fill" ++ sname ++ "Params"
le_full = ValD (ConP name_f (map (VarP . mkName . ("f_" ++)) fnames))
(NormalB . VarE . mkName $ oname_f) []
le_part = ValD (ConP name_p (map (VarP . mkName . ("p_" ++)) fnames))
(NormalB . VarE . mkName $ oname_p) []
obj_new = foldl (\accu vname -> AppE accu (VarE vname)) (ConE name_f)
$ map (mkName . ("n_" ++)) fnames
le_new <- mapM buildFromMaybe fnames
funt <- [t| $(conT name_f) -> $(conT name_p) -> $(conT name_f) |]
let sig = SigD fun_name funt
fclause = Clause [VarP (mkName oname_f), VarP (mkName oname_p)]
(NormalB $ LetE (le_full:le_part:le_new) obj_new) []
fun = FunD fun_name [fclause]
return [sig, fun]
-- * Template code for exceptions
-- | Exception simple error message field.
excErrMsg :: (String, Q Type)
excErrMsg = ("errMsg", [t| String |])
-- | Builds an exception type definition.
genException :: String -- ^ Name of new type
-> SimpleObject -- ^ Constructor name and parameters
-> Q [Dec]
genException name cons = do
let tname = mkName name
declD <- buildSimpleCons tname cons
(savesig, savefn) <- genSaveSimpleObj tname ("save" ++ name) cons $
uncurry saveExcCons
(loadsig, loadfn) <- genLoadExc tname ("load" ++ name) cons
return [declD, loadsig, loadfn, savesig, savefn]
-- | Generates the \"save\" clause for an entire exception constructor.
--
-- This matches the exception with variables named the same as the
-- constructor fields (just so that the spliced in code looks nicer),
-- and calls showJSON on it.
saveExcCons :: String -- ^ The constructor name
-> [SimpleField] -- ^ The parameter definitions for this
-- constructor
-> Q Clause -- ^ Resulting clause
saveExcCons sname fields = do
let cname = mkName sname
fnames <- mapM (newName . fst) fields
let pat = conP cname (map varP fnames)
felems = if null fnames
then conE '() -- otherwise, empty list has no type
else listE $ map (\f -> [| JSON.showJSON $(varE f) |]) fnames
let tup = tupE [ litE (stringL sname), felems ]
clause [pat] (normalB [| JSON.showJSON $tup |]) []
-- | Generates load code for a single constructor of an exception.
--
-- Generates the code (if there's only one argument, we will use a
-- list, not a tuple:
--
-- @
-- do
-- (x1, x2, ...) <- readJSON args
-- return $ Cons x1 x2 ...
-- @
loadExcConstructor :: Name -> String -> [SimpleField] -> Q Exp
loadExcConstructor inname sname fields = do
let name = mkName sname
f_names <- mapM (newName . fst) fields
let read_args = AppE (VarE 'JSON.readJSON) (VarE inname)
let binds = case f_names of
[x] -> BindS (ListP [VarP x])
_ -> BindS (TupP (map VarP f_names))
cval = foldl (\accu fn -> AppE accu (VarE fn)) (ConE name) f_names
return $ DoE [binds read_args, NoBindS (AppE (VarE 'return) cval)]
{-| Generates the loadException function.
This generates a quite complicated function, along the lines of:
@
loadFn (JSArray [JSString name, args]) = case name of
"A1" -> do
(x1, x2, ...) <- readJSON args
return $ A1 x1 x2 ...
"a2" -> ...
s -> fail $ "Unknown exception" ++ s
loadFn v = fail $ "Expected array but got " ++ show v
@
-}
genLoadExc :: Name -> String -> SimpleObject -> Q (Dec, Dec)
genLoadExc tname sname opdefs = do
let fname = mkName sname
exc_name <- newName "name"
exc_args <- newName "args"
exc_else <- newName "s"
arg_else <- newName "v"
fails <- [| fail $ "Unknown exception '" ++ $(varE exc_else) ++ "'" |]
-- default match for unknown exception name
let defmatch = Match (VarP exc_else) (NormalB fails) []
-- the match results (per-constructor blocks)
str_matches <-
mapM (\(s, params) -> do
body_exp <- loadExcConstructor exc_args s params
return $ Match (LitP (StringL s)) (NormalB body_exp) [])
opdefs
-- the first function clause; we can't use [| |] due to TH
-- limitations, so we have to build the AST by hand
let clause1 = Clause [ConP 'JSON.JSArray
[ListP [ConP 'JSON.JSString [VarP exc_name],
VarP exc_args]]]
(NormalB (CaseE (AppE (VarE 'JSON.fromJSString)
(VarE exc_name))
(str_matches ++ [defmatch]))) []
-- the fail expression for the second function clause
fail_type <- [| fail $ "Invalid exception: expected '(string, [args])' " ++
" but got " ++ show (pp_value $(varE arg_else)) ++ "'"
|]
-- the second function clause
let clause2 = Clause [VarP arg_else] (NormalB fail_type) []
sigt <- [t| JSON.JSValue -> JSON.Result $(conT tname) |]
return $ (SigD fname sigt, FunD fname [clause1, clause2])
| badp/ganeti | src/Ganeti/THH.hs | gpl-2.0 | 52,329 | 12 | 21 | 14,147 | 11,349 | 6,088 | 5,261 | -1 | -1 |
{-# LANGUAGE NoImplicitPrelude, RankNTypes #-}
module Lamdu.Sugar.RedundantTypes
( redundantTypes
) where
import Prelude.Compat
import Control.Lens (Traversal')
import qualified Control.Lens as Lens
import Control.Lens.Operators
import Lamdu.Sugar.Types
redundantTypesDefaultTop :: Bool -> Traversal' (Expression name m a) (Payload m a)
redundantTypesDefaultTop topRedundant f (Expression body pl) =
case body of
BodyGetVar (GetVarNamed NamedVar { _nvVarType = GetFieldParameter }) -> redundant
BodyGetVar (GetVarNamed NamedVar { _nvVarType = GetParameter }) -> redundant
BodyLiteralInteger {} -> redundant
BodyRecord{} -> redundant
BodyList{} -> redundantChildren
BodyToNom nom ->
nom & Lens.traversed . redundantTypesDefaultTop True %%~ f
<&> BodyToNom & mk
BodyApply (Apply func specialArgs annotatedArgs) ->
Apply
<$> ( func & redundantTypesDefaultTop True %%~ f )
<*> ( specialArgs & Lens.traversed recurse )
<*> ( annotatedArgs & Lens.traversed . Lens.traversed %%~ recurse )
<&> BodyApply & mk
BodyCase (Case kind alts caseTail mAddAlt entityId) ->
Case
<$> (kind & Lens.traversed %%~ recurse)
<*> ( alts
& Lens.traversed . Lens.traversed
. rBody . _BodyLam . Lens.traversed
. redundantTypesDefaultTop True %%~ f)
<*> (caseTail & Lens.traversed %%~ recurse)
<*> pure mAddAlt
<*> pure entityId
<&> BodyCase & mk
_ -> mk recBody
where
recurse = redundantTypes f
mk newBody =
Expression <$> newBody <*> (if topRedundant then f else pure) pl
recBody = body & Lens.traversed recurse
redundant = Expression <$> recBody <*> f pl
redundantChildren =
body & Lens.traversed . redundantTypesDefaultTop True %%~ f & mk
redundantTypes :: Traversal' (Expression name m a) (Payload m a)
redundantTypes = redundantTypesDefaultTop False
| rvion/lamdu | Lamdu/Sugar/RedundantTypes.hs | gpl-3.0 | 2,041 | 0 | 22 | 555 | 564 | 289 | 275 | -1 | -1 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 karamellpelle@hotmail.com
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Game.LevelPuzzleMode.LevelPuzzleData.Fancy.ShadeWall
(
ShadeWall (..),
loadShadeWall,
unloadShadeWall,
) where
import MyPrelude
import File
import Game.Shade
import OpenGL
import OpenGL.Helpers
data ShadeWall =
ShadeWall
{
shadeWallPrg :: !GLuint,
shadeWallUniAlpha :: !GLint,
shadeWallUniProjModvMatrix :: !GLint,
shadeWallUniNormalMatrix :: !GLint,
shadeWallUniRefDir :: !GLint,
shadeWallTex :: !GLuint
}
loadShadeWall :: IO ShadeWall
loadShadeWall = do
vsh <- fileStaticData "shaders/LevelWall.vsh"
fsh <- fileStaticData "shaders/LevelWall.fsh"
prg <- createPrg vsh fsh [ (attPos, "a_pos"),
(attNormal, "a_normal"),
(attTexCoord, "a_texcoord") ] [
(tex0, "u_tex") ]
uProjModvMatrix <- getUniformLocation prg "u_projmodv_matrix"
uNormalMatrix <- getUniformLocation prg "u_normal_matrix"
uAlpha <- getUniformLocation prg "u_alpha"
uRefDir <- getUniformLocation prg "u_ref_dir"
-- tex
tex <- makeTex "LevelPuzzle/Output/wall_tex.png"
-- tmp, set RefDir
glProgramUniform3fEXT prg uRefDir 1.0 0.0 0.0
return ShadeWall
{
shadeWallPrg = prg,
shadeWallUniAlpha = uAlpha,
shadeWallUniProjModvMatrix = uProjModvMatrix,
shadeWallUniNormalMatrix = uNormalMatrix,
shadeWallUniRefDir = uRefDir,
shadeWallTex = tex
}
where
makeTex path = do
tex <- bindNewTex gl_TEXTURE_2D
glTexParameteri gl_TEXTURE_2D gl_TEXTURE_MAG_FILTER $
fI gl_LINEAR
glTexParameteri gl_TEXTURE_2D gl_TEXTURE_MIN_FILTER $
fI gl_LINEAR_MIPMAP_LINEAR
glTexParameteri gl_TEXTURE_2D gl_TEXTURE_WRAP_S $
fI gl_CLAMP_TO_EDGE
glTexParameteri gl_TEXTURE_2D gl_TEXTURE_WRAP_T $
fI gl_CLAMP_TO_EDGE
path <- fileStaticData path
loadTexture gl_TEXTURE_2D tex path
glGenerateMipmap gl_TEXTURE_2D
return tex
unloadShadeWall :: ShadeWall -> IO ()
unloadShadeWall sh = do
return ()
-- fixme
| karamellpelle/grid | designer/source/Game/LevelPuzzleMode/LevelPuzzleData/Fancy/ShadeWall.hs | gpl-3.0 | 3,082 | 0 | 10 | 919 | 459 | 242 | 217 | 68 | 1 |
module UCeuler5
( solve
) where
smallest_even_divisable :: Int
smallest_even_divisable = foldr1 lcm [1..20]
solve :: Int -> [Char]
solve ucid = "Solved UC "++show(ucid)++": Result is: "++show(smallest_even_divisable) | tedhag/teuler | haskell/rest-euler/src/UCeuler5.hs | gpl-3.0 | 218 | 0 | 9 | 29 | 73 | 40 | 33 | 6 | 1 |
module Math.LambdaUtil.Generator
(generateExpr
, defaultParams) where
import Control.Monad.Random
import Control.Applicative
import Data.Functor
import Math.LambdaUtil.LambdaExpr
data GenParams = GenParams { depth :: Int -- ^ Max depth of expression
, vars :: Int -- ^ Max number of variables in expression
}
defaultParams :: GenParams
defaultParams = GenParams 3 3
generateExpr :: RandomGen g =>
GenParams
-> Rand g Expr
generateExpr (GenParams d v) = generate [] d v
generate :: RandomGen g =>
[Name] -- ^ Defined variables
-> Int -- ^ Rest depth
-> Int -- ^ Rest vars
-> Rand g Expr -- ^ Random lambda expression
generate names depth vars
| (length names) == 0 = do
f <- randomChoice [generateApp, generateLambda]
f names depth vars
| depth <= 0 = generateVar names
| vars == 0 = generateApp names depth vars
| otherwise = do
f <- randomChoice [\n _ _ -> generateVar n, generateApp, generateLambda]
f names depth vars
generateVar :: RandomGen g =>
[Name]
-> Rand g Expr
generateVar names = Var <$> (randomChoice names)
generateApp :: RandomGen g =>
[Name]
-> Int
-> Int
-> Rand g Expr
generateApp n d v = (App <$> (generate n (d - 1) v)) <*> (generate n (d - 1) v)
generateLambda :: RandomGen g =>
[Name]
-> Int
-> Int
-> Rand g Expr
generateLambda n d v = Lambda newVar <$> generate (newVar:n) (d - 1) (v - 1)
where
newVar = notIn n
randomChoice :: RandomGen g =>
[a]
-> Rand g a
randomChoice [] = error "List is empty"
randomChoice a = (a !!) <$> getRandomR (0, (length a) - 1)
notIn :: [Name] -> Name
notIn names = head $ filter (`notElem` names) vars'
where
vars = ["x", "y", "z", "a", "b", "c"]
vars' = vars ++ (iterate (++"'") (head vars))
| zakharvoit/lambda-util | src/Math/LambdaUtil/Generator.hs | gpl-3.0 | 2,073 | 0 | 12 | 724 | 676 | 354 | 322 | 55 | 1 |
{-# LANGUAGE ScopedTypeVariables, NoMonomorphismRestriction, RecordWildCards #-}
module Main where
import Control.Applicative
import Control.Monad
import Control.Monad.Error
import Control.Monad.Reader
import Control.Monad.State
import Data.Conduit
import qualified Data.Conduit.List as CL
import qualified Data.Traversable as T
import qualified Data.HashMap.Lazy as HM
import Data.Maybe
import System.Directory
import System.Environment
import System.FilePath ((</>))
import System.IO
import System.Log.Logger
--
import HEP.Parser.LHE.Type
import HEP.Automation.MadGraph.Model.ADMXQLD111
import HEP.Automation.MadGraph.Run
import HEP.Automation.MadGraph.SetupType
import HEP.Automation.MadGraph.Type
--
import HEP.Automation.EventChain.Driver
import HEP.Automation.EventChain.File
import HEP.Automation.EventChain.LHEConn
import HEP.Automation.EventChain.Type.Skeleton
import HEP.Automation.EventChain.Type.Spec
import HEP.Automation.EventChain.Type.Process
import HEP.Automation.EventChain.SpecDSL
import HEP.Automation.EventChain.Simulator
import HEP.Automation.EventChain.Process
import HEP.Automation.EventChain.Process.Generator
import HEP.Automation.EventGeneration.Config
import HEP.Automation.EventGeneration.Type
import HEP.Automation.EventGeneration.Work
import HEP.Storage.WebDAV
--
import qualified Paths_madgraph_auto as PMadGraph
import qualified Paths_madgraph_auto_model as PModel
jets = [1,2,3,4,-1,-2,-3,-4,21]
leptons = [11,13,-11,-13]
lepplusneut = [11,12,13,14,-11,-12,-13,-14]
adms = [9000201,-9000201,9000202,-9000202]
sup = [1000002,-1000002]
sdownR = [2000001,-2000001]
p_sdownR :: DDecay
p_sdownR = d (sdownR, [t lepplusneut, t jets, t adms])
p_2sd_2l2j2x :: DCross
p_2sd_2l2j2x = x (t proton, t proton, [p_sdownR, p_sdownR])
idx_2sd_2l2j2x :: CrossID ProcSmplIdx
idx_2sd_2l2j2x = mkCrossIDIdx (mkDICross p_2sd_2l2j2x)
map_2sd_2l2j2x :: ProcSpecMap
map_2sd_2l2j2x =
HM.fromList [(Nothing , MGProc [] [ "p p > dr dr~ QED=0"
, "p p > dr dr QED=0"
, "p p > dr~ dr~ QED=0"])
,(Just (3,-2000001,[]), MGProc [] [ "dr~ > u~ e+ sxxp~"
, "dr~ > d~ ve~ sxxp~" ])
,(Just (3,2000001,[]) , MGProc [] [ "dr > u e- sxxp"
, "dr > d ve sxxp" ])
,(Just (4,-2000001,[]), MGProc [] [ "dr~ > u~ e+ sxxp~ "
, "dr~ > d~ ve~ sxxp~ " ])
,(Just (4,2000001,[]) , MGProc [] [ "dr > u e- sxxp "
, "dr > d ve sxxp " ])
]
modelparam mgl msq msl mneut = ADMXQLD111Param mgl msq msl mneut
-- |
mgrunsetup :: Int -> RunSetup
mgrunsetup n =
RS { numevent = n
, machine = LHC7 ATLAS
, rgrun = Auto
, rgscale = 200.0
, match = NoMatch
, cut = NoCut
, pythia = RunPYTHIA
, lhesanitizer = -- NoLHESanitize
LHESanitize (Replace [(9000201,1000022),(-9000201,1000022)])
, pgs = RunPGS (AntiKTJet 0.4,NoTau)
, uploadhep = NoUploadHEP
, setnum = 1
}
worksets = [ (mgl,msq,50000,50000, 10000) | mgl <- [50000], msq <- [700,800..2000] ]
-- | mgl <- [200,300..2000], msq <- [100,200..mgl-100] ]
main :: IO ()
main = do
fp <- (!! 0) <$> getArgs
updateGlobalLogger "MadGraphAuto" (setLevel DEBUG)
mapM_ (scanwork fp) worksets
-- |
getScriptSetup :: FilePath -- ^ sandbox directory
-> FilePath -- ^ mg5base
-> FilePath -- ^ main montecarlo run
-> IO ScriptSetup
getScriptSetup dir_sb dir_mg5 dir_mc = do
dir_mdl <- (</> "template") <$> PModel.getDataDir
dir_tmpl <- (</> "template") <$> PMadGraph.getDataDir
return $
SS { modeltmpldir = dir_mdl
, runtmpldir = dir_tmpl
, sandboxdir = dir_sb
, mg5base = dir_mg5
, mcrundir = dir_mc
}
scanwork :: FilePath -> (Double,Double,Double,Double,Int) -> IO ()
scanwork fp (mgl,msq,msl,mneut,n) = do
homedir <- getHomeDirectory
getConfig fp >>=
maybe (return ()) (\ec -> do
let ssetup = evgen_scriptsetup ec
whost = evgen_webdavroot ec
pkey = evgen_privatekeyfile ec
pswd = evgen_passwordstore ec
Just cr <- getCredential pkey pswd
let wdavcfg = WebDAVConfig { webdav_credential = cr
, webdav_baseurl = whost }
param = modelparam mgl msq msl mneut
mgrs = mgrunsetup n
evchainGen ADMXQLD111
ssetup
("Work20130410_2sd","2sd_2l2j2x")
param
map_2sd_2l2j2x p_2sd_2l2j2x
mgrs
let wsetup' = getWorkSetupCombined ADMXQLD111 ssetup param ("Work20130410_2sd","2sd_2l2j2x") mgrs
wsetup = wsetup' { ws_storage = WebDAVRemoteDir "montecarlo/admproject/XQLD/scan" }
putStrLn "phase2work start"
phase2work wsetup
putStrLn "phase3work start"
phase3work wdavcfg wsetup
)
phase2work :: WorkSetup ADMXQLD111 -> IO ()
phase2work wsetup = do
r <- flip runReaderT wsetup . runErrorT $ do
ws <- ask
let (ssetup,psetup,param,rsetup) =
((,,,) <$> ws_ssetup <*> ws_psetup <*> ws_param <*> ws_rsetup) ws
cardPrepare
case (lhesanitizer rsetup,pythia rsetup) of
(NoLHESanitize,_) -> return ()
(LHESanitize pid, RunPYTHIA) -> do
sanitizeLHE
runPYTHIA
-- runHEP2LHE
runPGS
runClean
-- updateBanner
(LHESanitize pid, NoPYTHIA) -> do
sanitizeLHE
-- updateBanner
cleanHepFiles
print r
return ()
-- |
phase3work :: WebDAVConfig -> WorkSetup ADMXQLD111 -> IO ()
phase3work wdav wsetup = do
uploadEventFull NoUploadHEP wdav wsetup
return ()
{-
p_sqsg_2l3j2x :: DCross
p_sqsg_2l3j2x = x (t proton, t proton, [p_sup,p_gluino])
p_2sg_2l4j2x :: DCross
p_2sg_2l4j2x = x (t proton, t proton, [p_gluino,p_gluino])
p_gluino :: DDecay
p_gluino = d ([1000021], [p_sup,t jets])
-}
{-
p_sup :: DDecay
p_sup = d (sup, [t leptons, t jets, t adms])
-}
{-
idx_2sg_2l4j2x :: CrossID ProcSmplIdx
idx_2sg_2l4j2x = mkCrossIDIdx (mkDICross p_2sg_2l4j2x)
idx_sqsg_2l3j2x :: CrossID ProcSmplIdx
idx_sqsg_2l3j2x = mkCrossIDIdx (mkDICross p_sqsg_2l3j2x)
-}
{-
map_2sg_2l4j2x :: ProcSpecMap
map_2sg_2l4j2x =
HM.fromList [(Nothing , MGProc [] ["p p > go go QED=0"])
,(Just (3,1000021,[]) , MGProc [] ["go > ul u~"
,"go > ul~ u "])
,(Just (4,1000021,[]) , MGProc [] ["go > ul u~ "
,"go > ul~ u "])
,(Just (1,1000002,[3]), MGProc [] ["ul > d e+ sxxp~ "])
,(Just (1,-1000002,[3]),MGProc [] ["ul~ > d~ e- sxxp "])
,(Just (1,1000002,[4]), MGProc [] ["ul > d e+ sxxp~ "])
,(Just (1,-1000002,[4]),MGProc [] ["ul~ > d~ e- sxxp "])
]
-}
{-
map_sqsg_2l3j2x :: ProcSpecMap
map_sqsg_2l3j2x =
HM.fromList [(Nothing , "\n\
\generate p p > ul go QED=0\n\
\add process p p > ul~ go QED=0 \n")
,(Just (3,1000002,[]) , "\ngenerate ul > d e+ sxxp~ \n")
,(Just (3,-1000002,[]), "\ngenerate ul~ > d~ e- sxxp \n")
,(Just (4,1000021,[]) , "\n\
\generate go > ul u~ \n\
\add process go > ul~ u \n" )
,(Just (1,1000002,[4]), "\ngenerate ul > d e+ sxxp~ \n")
,(Just (1,-1000002,[4]),"\ngenerate ul~ > d~ e- sxxp \n")
]
-}
| wavewave/lhc-analysis-collection | exe/evchainRunXQLD.hs | gpl-3.0 | 8,161 | 0 | 20 | 2,694 | 1,596 | 912 | 684 | 146 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.TPU.Projects.Locations.TensorflowVersions.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- List TensorFlow versions supported by this API.
--
-- /See:/ <https://cloud.google.com/tpu/ Cloud TPU API Reference> for @tpu.projects.locations.tensorflowVersions.list@.
module Network.Google.Resource.TPU.Projects.Locations.TensorflowVersions.List
(
-- * REST Resource
ProjectsLocationsTensorflowVersionsListResource
-- * Creating a Request
, projectsLocationsTensorflowVersionsList
, ProjectsLocationsTensorflowVersionsList
-- * Request Lenses
, pltvlParent
, pltvlXgafv
, pltvlUploadProtocol
, pltvlOrderBy
, pltvlAccessToken
, pltvlUploadType
, pltvlFilter
, pltvlPageToken
, pltvlPageSize
, pltvlCallback
) where
import Network.Google.Prelude
import Network.Google.TPU.Types
-- | A resource alias for @tpu.projects.locations.tensorflowVersions.list@ method which the
-- 'ProjectsLocationsTensorflowVersionsList' request conforms to.
type ProjectsLocationsTensorflowVersionsListResource
=
"v1" :>
Capture "parent" Text :>
"tensorflowVersions" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "orderBy" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListTensorFlowVersionsResponse
-- | List TensorFlow versions supported by this API.
--
-- /See:/ 'projectsLocationsTensorflowVersionsList' smart constructor.
data ProjectsLocationsTensorflowVersionsList =
ProjectsLocationsTensorflowVersionsList'
{ _pltvlParent :: !Text
, _pltvlXgafv :: !(Maybe Xgafv)
, _pltvlUploadProtocol :: !(Maybe Text)
, _pltvlOrderBy :: !(Maybe Text)
, _pltvlAccessToken :: !(Maybe Text)
, _pltvlUploadType :: !(Maybe Text)
, _pltvlFilter :: !(Maybe Text)
, _pltvlPageToken :: !(Maybe Text)
, _pltvlPageSize :: !(Maybe (Textual Int32))
, _pltvlCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsTensorflowVersionsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pltvlParent'
--
-- * 'pltvlXgafv'
--
-- * 'pltvlUploadProtocol'
--
-- * 'pltvlOrderBy'
--
-- * 'pltvlAccessToken'
--
-- * 'pltvlUploadType'
--
-- * 'pltvlFilter'
--
-- * 'pltvlPageToken'
--
-- * 'pltvlPageSize'
--
-- * 'pltvlCallback'
projectsLocationsTensorflowVersionsList
:: Text -- ^ 'pltvlParent'
-> ProjectsLocationsTensorflowVersionsList
projectsLocationsTensorflowVersionsList pPltvlParent_ =
ProjectsLocationsTensorflowVersionsList'
{ _pltvlParent = pPltvlParent_
, _pltvlXgafv = Nothing
, _pltvlUploadProtocol = Nothing
, _pltvlOrderBy = Nothing
, _pltvlAccessToken = Nothing
, _pltvlUploadType = Nothing
, _pltvlFilter = Nothing
, _pltvlPageToken = Nothing
, _pltvlPageSize = Nothing
, _pltvlCallback = Nothing
}
-- | Required. The parent resource name.
pltvlParent :: Lens' ProjectsLocationsTensorflowVersionsList Text
pltvlParent
= lens _pltvlParent (\ s a -> s{_pltvlParent = a})
-- | V1 error format.
pltvlXgafv :: Lens' ProjectsLocationsTensorflowVersionsList (Maybe Xgafv)
pltvlXgafv
= lens _pltvlXgafv (\ s a -> s{_pltvlXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pltvlUploadProtocol :: Lens' ProjectsLocationsTensorflowVersionsList (Maybe Text)
pltvlUploadProtocol
= lens _pltvlUploadProtocol
(\ s a -> s{_pltvlUploadProtocol = a})
-- | Sort results.
pltvlOrderBy :: Lens' ProjectsLocationsTensorflowVersionsList (Maybe Text)
pltvlOrderBy
= lens _pltvlOrderBy (\ s a -> s{_pltvlOrderBy = a})
-- | OAuth access token.
pltvlAccessToken :: Lens' ProjectsLocationsTensorflowVersionsList (Maybe Text)
pltvlAccessToken
= lens _pltvlAccessToken
(\ s a -> s{_pltvlAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pltvlUploadType :: Lens' ProjectsLocationsTensorflowVersionsList (Maybe Text)
pltvlUploadType
= lens _pltvlUploadType
(\ s a -> s{_pltvlUploadType = a})
-- | List filter.
pltvlFilter :: Lens' ProjectsLocationsTensorflowVersionsList (Maybe Text)
pltvlFilter
= lens _pltvlFilter (\ s a -> s{_pltvlFilter = a})
-- | The next_page_token value returned from a previous List request, if any.
pltvlPageToken :: Lens' ProjectsLocationsTensorflowVersionsList (Maybe Text)
pltvlPageToken
= lens _pltvlPageToken
(\ s a -> s{_pltvlPageToken = a})
-- | The maximum number of items to return.
pltvlPageSize :: Lens' ProjectsLocationsTensorflowVersionsList (Maybe Int32)
pltvlPageSize
= lens _pltvlPageSize
(\ s a -> s{_pltvlPageSize = a})
. mapping _Coerce
-- | JSONP
pltvlCallback :: Lens' ProjectsLocationsTensorflowVersionsList (Maybe Text)
pltvlCallback
= lens _pltvlCallback
(\ s a -> s{_pltvlCallback = a})
instance GoogleRequest
ProjectsLocationsTensorflowVersionsList
where
type Rs ProjectsLocationsTensorflowVersionsList =
ListTensorFlowVersionsResponse
type Scopes ProjectsLocationsTensorflowVersionsList =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsLocationsTensorflowVersionsList'{..}
= go _pltvlParent _pltvlXgafv _pltvlUploadProtocol
_pltvlOrderBy
_pltvlAccessToken
_pltvlUploadType
_pltvlFilter
_pltvlPageToken
_pltvlPageSize
_pltvlCallback
(Just AltJSON)
tPUService
where go
= buildClient
(Proxy ::
Proxy
ProjectsLocationsTensorflowVersionsListResource)
mempty
| brendanhay/gogol | gogol-tpu/gen/Network/Google/Resource/TPU/Projects/Locations/TensorflowVersions/List.hs | mpl-2.0 | 6,960 | 0 | 20 | 1,610 | 1,039 | 598 | 441 | 153 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Gmail.Users.Threads.Untrash
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Removes the specified thread from the trash.
--
-- /See:/ <https://developers.google.com/gmail/api/ Gmail API Reference> for @gmail.users.threads.untrash@.
module Network.Google.Resource.Gmail.Users.Threads.Untrash
(
-- * REST Resource
UsersThreadsUntrashResource
-- * Creating a Request
, usersThreadsUntrash
, UsersThreadsUntrash
-- * Request Lenses
, utuXgafv
, utuUploadProtocol
, utuAccessToken
, utuUploadType
, utuUserId
, utuId
, utuCallback
) where
import Network.Google.Gmail.Types
import Network.Google.Prelude
-- | A resource alias for @gmail.users.threads.untrash@ method which the
-- 'UsersThreadsUntrash' request conforms to.
type UsersThreadsUntrashResource =
"gmail" :>
"v1" :>
"users" :>
Capture "userId" Text :>
"threads" :>
Capture "id" Text :>
"untrash" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Post '[JSON] Thread
-- | Removes the specified thread from the trash.
--
-- /See:/ 'usersThreadsUntrash' smart constructor.
data UsersThreadsUntrash =
UsersThreadsUntrash'
{ _utuXgafv :: !(Maybe Xgafv)
, _utuUploadProtocol :: !(Maybe Text)
, _utuAccessToken :: !(Maybe Text)
, _utuUploadType :: !(Maybe Text)
, _utuUserId :: !Text
, _utuId :: !Text
, _utuCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'UsersThreadsUntrash' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'utuXgafv'
--
-- * 'utuUploadProtocol'
--
-- * 'utuAccessToken'
--
-- * 'utuUploadType'
--
-- * 'utuUserId'
--
-- * 'utuId'
--
-- * 'utuCallback'
usersThreadsUntrash
:: Text -- ^ 'utuId'
-> UsersThreadsUntrash
usersThreadsUntrash pUtuId_ =
UsersThreadsUntrash'
{ _utuXgafv = Nothing
, _utuUploadProtocol = Nothing
, _utuAccessToken = Nothing
, _utuUploadType = Nothing
, _utuUserId = "me"
, _utuId = pUtuId_
, _utuCallback = Nothing
}
-- | V1 error format.
utuXgafv :: Lens' UsersThreadsUntrash (Maybe Xgafv)
utuXgafv = lens _utuXgafv (\ s a -> s{_utuXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
utuUploadProtocol :: Lens' UsersThreadsUntrash (Maybe Text)
utuUploadProtocol
= lens _utuUploadProtocol
(\ s a -> s{_utuUploadProtocol = a})
-- | OAuth access token.
utuAccessToken :: Lens' UsersThreadsUntrash (Maybe Text)
utuAccessToken
= lens _utuAccessToken
(\ s a -> s{_utuAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
utuUploadType :: Lens' UsersThreadsUntrash (Maybe Text)
utuUploadType
= lens _utuUploadType
(\ s a -> s{_utuUploadType = a})
-- | The user\'s email address. The special value \`me\` can be used to
-- indicate the authenticated user.
utuUserId :: Lens' UsersThreadsUntrash Text
utuUserId
= lens _utuUserId (\ s a -> s{_utuUserId = a})
-- | The ID of the thread to remove from Trash.
utuId :: Lens' UsersThreadsUntrash Text
utuId = lens _utuId (\ s a -> s{_utuId = a})
-- | JSONP
utuCallback :: Lens' UsersThreadsUntrash (Maybe Text)
utuCallback
= lens _utuCallback (\ s a -> s{_utuCallback = a})
instance GoogleRequest UsersThreadsUntrash where
type Rs UsersThreadsUntrash = Thread
type Scopes UsersThreadsUntrash =
'["https://mail.google.com/",
"https://www.googleapis.com/auth/gmail.modify"]
requestClient UsersThreadsUntrash'{..}
= go _utuUserId _utuId _utuXgafv _utuUploadProtocol
_utuAccessToken
_utuUploadType
_utuCallback
(Just AltJSON)
gmailService
where go
= buildClient
(Proxy :: Proxy UsersThreadsUntrashResource)
mempty
| brendanhay/gogol | gogol-gmail/gen/Network/Google/Resource/Gmail/Users/Threads/Untrash.hs | mpl-2.0 | 4,983 | 0 | 20 | 1,246 | 783 | 456 | 327 | 114 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DLP.Projects.Locations.DeidentifyTemplates.Create
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a DeidentifyTemplate for re-using frequently used configuration
-- for de-identifying content, images, and storage. See
-- https:\/\/cloud.google.com\/dlp\/docs\/creating-templates-deid to learn
-- more.
--
-- /See:/ <https://cloud.google.com/dlp/docs/ Cloud Data Loss Prevention (DLP) API Reference> for @dlp.projects.locations.deidentifyTemplates.create@.
module Network.Google.Resource.DLP.Projects.Locations.DeidentifyTemplates.Create
(
-- * REST Resource
ProjectsLocationsDeidentifyTemplatesCreateResource
-- * Creating a Request
, projectsLocationsDeidentifyTemplatesCreate
, ProjectsLocationsDeidentifyTemplatesCreate
-- * Request Lenses
, pldtcParent
, pldtcXgafv
, pldtcUploadProtocol
, pldtcAccessToken
, pldtcUploadType
, pldtcPayload
, pldtcCallback
) where
import Network.Google.DLP.Types
import Network.Google.Prelude
-- | A resource alias for @dlp.projects.locations.deidentifyTemplates.create@ method which the
-- 'ProjectsLocationsDeidentifyTemplatesCreate' request conforms to.
type ProjectsLocationsDeidentifyTemplatesCreateResource
=
"v2" :>
Capture "parent" Text :>
"deidentifyTemplates" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
GooglePrivacyDlpV2CreateDeidentifyTemplateRequest
:> Post '[JSON] GooglePrivacyDlpV2DeidentifyTemplate
-- | Creates a DeidentifyTemplate for re-using frequently used configuration
-- for de-identifying content, images, and storage. See
-- https:\/\/cloud.google.com\/dlp\/docs\/creating-templates-deid to learn
-- more.
--
-- /See:/ 'projectsLocationsDeidentifyTemplatesCreate' smart constructor.
data ProjectsLocationsDeidentifyTemplatesCreate =
ProjectsLocationsDeidentifyTemplatesCreate'
{ _pldtcParent :: !Text
, _pldtcXgafv :: !(Maybe Xgafv)
, _pldtcUploadProtocol :: !(Maybe Text)
, _pldtcAccessToken :: !(Maybe Text)
, _pldtcUploadType :: !(Maybe Text)
, _pldtcPayload :: !GooglePrivacyDlpV2CreateDeidentifyTemplateRequest
, _pldtcCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsDeidentifyTemplatesCreate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pldtcParent'
--
-- * 'pldtcXgafv'
--
-- * 'pldtcUploadProtocol'
--
-- * 'pldtcAccessToken'
--
-- * 'pldtcUploadType'
--
-- * 'pldtcPayload'
--
-- * 'pldtcCallback'
projectsLocationsDeidentifyTemplatesCreate
:: Text -- ^ 'pldtcParent'
-> GooglePrivacyDlpV2CreateDeidentifyTemplateRequest -- ^ 'pldtcPayload'
-> ProjectsLocationsDeidentifyTemplatesCreate
projectsLocationsDeidentifyTemplatesCreate pPldtcParent_ pPldtcPayload_ =
ProjectsLocationsDeidentifyTemplatesCreate'
{ _pldtcParent = pPldtcParent_
, _pldtcXgafv = Nothing
, _pldtcUploadProtocol = Nothing
, _pldtcAccessToken = Nothing
, _pldtcUploadType = Nothing
, _pldtcPayload = pPldtcPayload_
, _pldtcCallback = Nothing
}
-- | Required. Parent resource name. The format of this value varies
-- depending on the scope of the request (project or organization) and
-- whether you have [specified a processing
-- location](https:\/\/cloud.google.com\/dlp\/docs\/specifying-location): +
-- Projects scope, location specified:
-- \`projects\/\`PROJECT_ID\`\/locations\/\`LOCATION_ID + Projects scope,
-- no location specified (defaults to global): \`projects\/\`PROJECT_ID +
-- Organizations scope, location specified:
-- \`organizations\/\`ORG_ID\`\/locations\/\`LOCATION_ID + Organizations
-- scope, no location specified (defaults to global):
-- \`organizations\/\`ORG_ID The following example \`parent\` string
-- specifies a parent project with the identifier \`example-project\`, and
-- specifies the \`europe-west3\` location for processing data:
-- parent=projects\/example-project\/locations\/europe-west3
pldtcParent :: Lens' ProjectsLocationsDeidentifyTemplatesCreate Text
pldtcParent
= lens _pldtcParent (\ s a -> s{_pldtcParent = a})
-- | V1 error format.
pldtcXgafv :: Lens' ProjectsLocationsDeidentifyTemplatesCreate (Maybe Xgafv)
pldtcXgafv
= lens _pldtcXgafv (\ s a -> s{_pldtcXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pldtcUploadProtocol :: Lens' ProjectsLocationsDeidentifyTemplatesCreate (Maybe Text)
pldtcUploadProtocol
= lens _pldtcUploadProtocol
(\ s a -> s{_pldtcUploadProtocol = a})
-- | OAuth access token.
pldtcAccessToken :: Lens' ProjectsLocationsDeidentifyTemplatesCreate (Maybe Text)
pldtcAccessToken
= lens _pldtcAccessToken
(\ s a -> s{_pldtcAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pldtcUploadType :: Lens' ProjectsLocationsDeidentifyTemplatesCreate (Maybe Text)
pldtcUploadType
= lens _pldtcUploadType
(\ s a -> s{_pldtcUploadType = a})
-- | Multipart request metadata.
pldtcPayload :: Lens' ProjectsLocationsDeidentifyTemplatesCreate GooglePrivacyDlpV2CreateDeidentifyTemplateRequest
pldtcPayload
= lens _pldtcPayload (\ s a -> s{_pldtcPayload = a})
-- | JSONP
pldtcCallback :: Lens' ProjectsLocationsDeidentifyTemplatesCreate (Maybe Text)
pldtcCallback
= lens _pldtcCallback
(\ s a -> s{_pldtcCallback = a})
instance GoogleRequest
ProjectsLocationsDeidentifyTemplatesCreate
where
type Rs ProjectsLocationsDeidentifyTemplatesCreate =
GooglePrivacyDlpV2DeidentifyTemplate
type Scopes
ProjectsLocationsDeidentifyTemplatesCreate
= '["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsLocationsDeidentifyTemplatesCreate'{..}
= go _pldtcParent _pldtcXgafv _pldtcUploadProtocol
_pldtcAccessToken
_pldtcUploadType
_pldtcCallback
(Just AltJSON)
_pldtcPayload
dLPService
where go
= buildClient
(Proxy ::
Proxy
ProjectsLocationsDeidentifyTemplatesCreateResource)
mempty
| brendanhay/gogol | gogol-dlp/gen/Network/Google/Resource/DLP/Projects/Locations/DeidentifyTemplates/Create.hs | mpl-2.0 | 7,294 | 0 | 17 | 1,448 | 799 | 474 | 325 | 123 | 1 |
func (x :+: xr) = x
| lspitzner/brittany | data/Test85.hs | agpl-3.0 | 20 | 0 | 7 | 6 | 16 | 8 | 8 | 1 | 1 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
--------------------------------------------------------------------------------
{-|
Module : TopLevelWindow
Copyright : (c) Jeremy O'Donoghue, 2007
License : wxWindows
Maintainer : wxhaskell-devel@lists.sourceforge.net
Stability : provisional
Portability : portable
wxTopLevelwindow (wxWidgets >= 2.8.0) defines an (abstract) common base class
for wxFrame and wxDialog.
In the wxHaskell implementation, TopLevel has been added to encapsulate
some of the common functionality between the 'Dialog' and 'Frame' modules.
* Instances: 'HasDefault', 'Pictured', 'Framed', 'Form', 'Closeable'
* Instances inherited from 'Window': 'Textual', 'Literate', 'Dimensions',
'Colored', 'Visible', 'Child', 'Able', 'Tipped', 'Identity',
'Styled', 'Reactive', 'Paint'.
-}
--------------------------------------------------------------------------------
module Graphics.UI.WX.TopLevelWindow
( initialResizeable
, initialMaximizeable
, initialMinimizeable
, initialCloseable
) where
import Graphics.UI.WXCore
import Graphics.UI.WX.Types
import Graphics.UI.WX.Attributes
import Graphics.UI.WX.Classes
import Graphics.UI.WX.Layout
import Graphics.UI.WX.Window
import Graphics.UI.WX.Events
-- The default control activated by return key
instance HasDefault (TopLevelWindow a) where
unsafeDefaultItem = newAttr "unsafeDefaultItem" getter setter
where
getter :: (TopLevelWindow a) -> IO (Window ())
getter tlw = topLevelWindowGetDefaultItem tlw
setter tlw win = topLevelWindowSetDefaultItem tlw win
defaultButton = newAttr "defaultButton" getter setter
where
getter :: (TopLevelWindow a) -> IO (Button ())
getter tlw = topLevelWindowGetDefaultButton tlw
setter tlw win = topLevelWindowSetDefaultButton tlw win
-- The icon of a frame.
instance Pictured (TopLevelWindow a) where
picture = writeAttr "picture" topLevelWindowSetIconFromFile
-- Defaults for framed TopLevel windows
instance Framed (TopLevelWindow a) where
resizeable = windowResizeable
maximizeable = windowMaximizeable
minimizeable = windowMinimizeable
closeable = windowCloseable
-- Default layout implementation
instance Form (Frame a) where
layout = writeAttr "layout" windowSetLayout
-- Default window close
instance Closeable (Frame a) where
close f
= unitIO (windowClose f True {- force? -})
{--------------------------------------------------------------------------
Framed instances
--------------------------------------------------------------------------}
-- | Display a resize border on a 'Frame' or 'Dialog' window. Also enables or
-- disables the the maximize box.
-- This attribute must be set at creation time.
windowResizeable :: CreateAttr (Window a) Bool
windowResizeable
= reflectiveAttr "resizeable" getFlag setFlag
where
getFlag w
= do s <- get w style
return (bitsSet wxRESIZE_BORDER s)
setFlag w resize
= set w [style :~ \stl -> if resize
then stl .+. wxRESIZE_BORDER .+. wxMAXIMIZE_BOX
else stl .-. wxRESIZE_BORDER .-. wxMAXIMIZE_BOX]
-- | Helper function that transforms the style accordding
-- to the 'windowResizable' flag in of the properties
initialResizeable :: ([Prop (Window w)] -> Style -> a) -> [Prop (Window w)] -> Style -> a
initialResizeable
= withStyleProperty windowResizeable (wxRESIZE_BORDER .+. wxMAXIMIZE_BOX)
-- | Display a maximize box on a 'Frame' or 'Dialog' window.
-- This attribute must be set at creation time.
windowMaximizeable :: CreateAttr (Window a) Bool
windowMaximizeable
= reflectiveAttr "maximizeable" getFlag setFlag
where
getFlag w
= do s <- get w style
return (bitsSet wxMAXIMIZE_BOX s)
setFlag w max
= set w [style :~ \stl -> if max then stl .+. wxMAXIMIZE_BOX else stl .-. wxMAXIMIZE_BOX]
-- | Helper function that transforms the style accordding
-- to the 'windowMaximizable' flag in of the properties
initialMaximizeable :: ([Prop (Window w)] -> Style -> a) -> [Prop (Window w)] -> Style -> a
initialMaximizeable
= withStyleProperty windowMaximizeable wxMAXIMIZE_BOX
-- | Display a minimize box on a 'Frame' or 'Dialog' window.
-- This attribute must be set at creation time.
windowMinimizeable :: CreateAttr (Window a) Bool
windowMinimizeable
= reflectiveAttr "minimizeable" getFlag setFlag
where
getFlag w
= do s <- get w style
return (bitsSet wxMINIMIZE_BOX s)
setFlag w min
= set w [style :~ \stl -> if min then stl .+. wxMINIMIZE_BOX else stl .-. wxMINIMIZE_BOX]
-- | Helper function that transforms the style accordding
-- to the 'windowMinimizable' flag in of the properties
initialMinimizeable :: ([Prop (Window w)] -> Style -> a) -> [Prop (Window w)] -> Style -> a
initialMinimizeable
= withStyleProperty windowMinimizeable wxMINIMIZE_BOX
-- | Display a close box on a 'Frame' or 'Dialog' window.
-- This attribute must be set at creation time.
windowCloseable :: CreateAttr (Window a) Bool
windowCloseable
= reflectiveAttr "closeable" getFlag setFlag
where
getFlag w
= do s <- get w style
return (bitsSet wxCLOSE_BOX s)
setFlag w min
= set w [style :~ \stl -> if min then stl .+. wxCLOSE_BOX else stl .-. wxCLOSE_BOX]
-- | Helper function that transforms the style accordding
-- to the 'windowMinimizable' flag in of the properties
initialCloseable :: ([Prop (Window w)] -> Style -> a) -> [Prop (Window w)] -> Style -> a
initialCloseable
= withStyleProperty windowCloseable wxCLOSE_BOX
| sherwoodwang/wxHaskell | wx/src/Graphics/UI/WX/TopLevelWindow.hs | lgpl-2.1 | 5,825 | 0 | 13 | 1,265 | 1,050 | 555 | 495 | 80 | 2 |
type Station = String
type Tijd = Int
type Connectie = [(Station, Tijd)]
stations = ["Alkmaar","Almelo","Almere Centrum","Amersfoort","Amsterdam Amstel","Amsterdam Centraal","Amsterdam Sloterdijk","Amsterdam Zuid","Apeldoorn","Arnhem","Assen","Beverwijk","Breda","Castricum","Delft","Den Helder","Deventer","Dieren","Dordrecht","Ede-Wageningen","Eindhoven","Enschede","Etten-Leur","Gouda","Groningen","Den Haag Centraal","Den Haag HS","Den Haag Laan van NOI [1]","Haarlem","Heemstede-Aerdenhout","Heerenveen","Heerlen","Helmond","Hengelo","'s-Hertogenbosch","Hilversum","Hoorn","Leeuwarden","Leiden Centraal","Lelystad Centrum","Maastricht","Nijmegen","Oss","Roermond","Roosendaal","Rotterdam Alexander","Rotterdam Blaak[1]","Rotterdam Centraal","Schiphol","Schiedam Centrum","Sittard","Steenwijk","Tilburg","Utrecht Centraal","Venlo","Vlissingen","Weert","Zaandam","Zutphen","Zwolle"]
ic_enschede_denhaag = [("Enschede", 0), ("Hengelo", 9), ("Almelo", 21), ("Deventer", 47), ("Apeldoorn", 57), ("Amesfoort", 86) ,("Utrecht Centraal", 104]
| CanvasHS/Canvas.hs | demo/nederland/Trains.hs | lgpl-2.1 | 1,047 | 0 | 6 | 62 | 281 | 186 | 95 | -1 | -1 |
{-# language DeriveFunctor, DeriveFoldable #-}
module Data.StrictList where
import Data.Monoid
import Data.Foldable as F
import Control.Applicative
import Control.Monad (ap)
data StrictList a
= !a :! !(StrictList a)
| Empty
deriving (Show, Functor, Foldable)
infixr 5 :!
instance Monoid (StrictList a) where
mempty = Empty
mappend (a :! r) bs = a :! mappend r bs
mappend Empty bs = bs
instance Applicative StrictList where
(<*>) = ap
pure = return
instance Monad StrictList where
m >>= k = F.foldr (mappend . k) Empty m
m >> k = F.foldr (mappend . (\ _ -> k)) Empty m
return x = singleton x
fail _ = Empty
type SL a = StrictList a
fromList :: [a] -> StrictList a
fromList (a : r) = a :! fromList r
fromList [] = Empty
singleton :: a -> StrictList a
singleton = (:! Empty)
(!) :: StrictList a -> Int -> a
l ! n = case (l, n) of
((a :! r), 0) -> a
((_ :! r), i) -> r ! pred i
(Empty, _) -> error "(!): index out of bounds"
slFilter :: (a -> Bool) -> StrictList a -> StrictList a
slFilter p (a :! r) =
(if p a then (a :!) else id) (slFilter p r)
slFilter _ Empty = Empty
filterM :: Monad m => (a -> m Bool) -> StrictList a -> m (StrictList a)
filterM pred (a :! r) = do
c <- pred a
r' <- filterM pred r
if c then do
r' <- filterM pred r
return (a :! r')
else do
r' <- filterM pred r
return r'
filterM pred Empty = return Empty
-- | PRE: SL a is sorted.
-- Inserts an element to a list. The returned list will be sorted.
-- If the input list contained only unique elements, the output list will
-- do also. (This means, the element will not be inserted,
-- if it's already in the list.)
insertUnique :: Ord a => a -> SL a -> SL a
insertUnique a (b :! r) =
case compare a b of
GT -> b :! insertUnique a r
EQ -> b :! r -- not inserted
LT -> a :! b :! r
insertUnique a Empty = singleton a
delete :: Eq a => a -> SL a -> SL a
delete x (a :! r) =
if x == a
then r
else a :! delete x r
delete x Empty = Empty
| changlinli/nikki | src/Data/StrictList.hs | lgpl-3.0 | 2,069 | 0 | 12 | 581 | 831 | 429 | 402 | 66 | 3 |
import Test.Framework (defaultMain, testGroup)
import qualified Test.Vision.Image as I
import qualified Test.Vision.Histogram as H
main :: IO ()
main = defaultMain [
testGroup "Images" I.tests
, testGroup "Histograms" H.tests
]
| TomMD/friday | test/Test.hs | lgpl-3.0 | 248 | 0 | 8 | 50 | 70 | 41 | 29 | 7 | 1 |
-----------------------------------------------------------------------------
-- Copyright 2019, Ideas project team. This file is distributed under the
-- terms of the Apache License 2.0. For more information, see the files
-- "LICENSE.txt" and "NOTICE.txt", which are included in the distribution.
-----------------------------------------------------------------------------
-- |
-- Maintainer : bastiaan.heeren@ou.nl
-- Stability : provisional
-- Portability : portable (depends on ghc)
--
-----------------------------------------------------------------------------
module Ideas.Common.Rule.Recognizer
( -- * data type and type class
Recognizable(..), Recognizer
-- * Constructor functions
, makeRecognizer, makeRecognizerTrans
) where
import Data.Maybe
import Data.Semigroup as Sem
import Ideas.Common.Environment
import Ideas.Common.Rule.Transformation
import Ideas.Common.View
-----------------------------------------------------------
--- Data type and type class
class Recognizable f where
recognizer :: f a -> Recognizer a
recognizeAll :: f a -> a -> a -> [Environment]
recognize :: f a -> a -> a -> Maybe Environment
recognizeTrans :: f a -> Trans (a, a) ()
-- default definitions
recognizeAll r a b = map snd $ transApply (recognizeTrans r) (a, b)
recognize r a b = listToMaybe $ recognizeAll r a b
recognizeTrans = unR . recognizer
newtype Recognizer a = R { unR :: Trans (a, a) () }
instance LiftView Recognizer where
liftViewIn v r =
let f = fmap fst . match v
in R $ makeTrans f *** makeTrans f >>> unR r
instance Sem.Semigroup (Recognizer a) where
f <> g = R $ unR f `mappend` unR g
instance Monoid (Recognizer a) where
mempty = R mempty
mappend = (<>)
instance Recognizable Recognizer where
recognizer = id
instance HasRefs (Recognizer a) where
allRefs = allRefs . unR
-----------------------------------------------------------
--- Constructor functions
makeRecognizer :: (a -> a -> Bool) -> Recognizer a
makeRecognizer eq = makeRecognizerTrans $ transMaybe $ \(x, y) ->
if eq x y then Just () else Nothing
makeRecognizerTrans :: Trans (a, a) () -> Recognizer a
makeRecognizerTrans = R | ideas-edu/ideas | src/Ideas/Common/Rule/Recognizer.hs | apache-2.0 | 2,286 | 0 | 11 | 478 | 531 | 290 | 241 | 36 | 2 |
import Text.Printf
ans :: (Double,Double) -> (Double,Double) -> Double
ans (x1,y1) (x2,y2) = sqrt ( (x1-x2)^2 + (y1-y2)^2 )
main = do
l <- getLine
let (x1:y1:x2:y2:_) = map read $ words l :: [Double]
o = ans (x1,y1) (x2,y2)
printf "%.8f\n" o
| a143753/AOJ | ITP1_10_A.hs | apache-2.0 | 256 | 0 | 14 | 57 | 172 | 92 | 80 | 8 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Openshift.V1.Group where
import GHC.Generics
import Data.Text
import Kubernetes.V1.ObjectMeta
import qualified Data.Aeson
-- |
data Group = Group
{ kind :: Maybe Text -- ^ Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds
, apiVersion :: Maybe Text -- ^ APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#resources
, metadata :: Maybe ObjectMeta -- ^
, users :: [Text] -- ^ list of users in this group
} deriving (Show, Eq, Generic)
instance Data.Aeson.FromJSON Group
instance Data.Aeson.ToJSON Group
| minhdoboi/deprecated-openshift-haskell-api | openshift/lib/Openshift/V1/Group.hs | apache-2.0 | 1,124 | 0 | 9 | 173 | 119 | 73 | 46 | 18 | 0 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE PackageImports #-}
{-
Copyright 2016 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
module Internal.Exports (
-- Miscellaneous types and functions
Number,
Text,
-- * Colors
Color(..),
Colour,
black,
white,
red,
green,
blue,
cyan,
magenta,
yellow,
aquamarine,
orange,
azure,
violet,
chartreuse,
rose,
brown,
pink,
purple,
gray,
grey,
mixed,
lighter,
light,
darker,
dark,
brighter,
bright,
duller,
dull,
translucent,
hue,
saturation,
luminosity,
fromHSL,
-- * Pictures
Point,
Vector,
vectorSum,
vectorDifference,
scaledVector,
rotatedVector,
dotProduct,
Picture,
Font(..),
TextStyle(..),
blank,
path,
thickPath,
polygon,
thickPolygon,
solidPolygon,
curve,
thickCurve,
loop,
thickLoop,
solidLoop,
rectangle,
solidRectangle,
thickRectangle,
circle,
solidCircle,
thickCircle,
arc,
sector,
thickArc,
text,
styledText,
colored,
coloured,
translated,
scaled,
dilated,
rotated,
pictures,
(&),
coordinatePlane,
codeWorldLogo,
-- * Events
Event(..),
MouseButton(..),
-- * Debugging
traced,
-- * Entry points
Program,
drawingOf,
animationOf,
simulationOf,
interactionOf,
collaborationOf
) where
import "base" Prelude (IO)
import Internal.Num
import Internal.Prelude
import Internal.Text
import Internal.Color
import Internal.Picture
import Internal.Event
import Internal.CodeWorld
| nomeata/codeworld | codeworld-base/src/Internal/Exports.hs | apache-2.0 | 2,255 | 0 | 5 | 648 | 347 | 234 | 113 | 99 | 0 |
module Move (Move(..), Move, Pos, Row, Column) where
import Player (Player)
import Stone (Stone)
type Pos = (Row, Column)
type Row = Int
type Column = Int
data Move = Move
{ position :: Pos
, player :: Player
, stone :: Stone
} deriving (Eq, Show)
| fendor/connect-four | src/Move.hs | bsd-2-clause | 286 | 0 | 8 | 85 | 103 | 66 | 37 | 11 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | The types of the package. This module is considered "internal", and the
-- types are re-exported from Graphics.UI.GLFW as necessary.
module Graphics.UI.GLFW.Types where
--------------------------------------------------------------------------------
import Data.Data (Data)
import Data.IORef (IORef)
import Data.Typeable (Typeable)
import Foreign.Ptr (Ptr)
import Foreign.C.Types (CUChar(..))
import GHC.Generics
import Bindings.GLFW
--------------------------------------------------------------------------------
-- Error handling
-- | An enum for one of the <http://www.glfw.org/docs/3.1/group__errors.html#ga196e125ef261d94184e2b55c05762f14 GLFW error codes>.
data Error =
Error'NotInitialized -- ^ <http://www.glfw.org/docs/3.1/group__errors.html#ga2374ee02c177f12e1fa76ff3ed15e14a doc>
| Error'NoCurrentContext -- ^ <http://www.glfw.org/docs/3.1/group__errors.html#gaa8290386e9528ccb9e42a3a4e16fc0d0 doc>
| Error'InvalidEnum -- ^ <http://www.glfw.org/docs/3.1/group__errors.html#ga76f6bb9c4eea73db675f096b404593ce doc>
| Error'InvalidValue -- ^ <http://www.glfw.org/docs/3.1/group__errors.html#gaaf2ef9aa8202c2b82ac2d921e554c687 doc>
| Error'OutOfMemory -- ^ <http://www.glfw.org/docs/3.1/group__errors.html#ga9023953a2bcb98c2906afd071d21ee7f doc>
| Error'ApiUnavailable -- ^ <http://www.glfw.org/docs/3.1/group__errors.html#ga56882b290db23261cc6c053c40c2d08e doc>
| Error'VersionUnavailable -- ^ <http://www.glfw.org/docs/3.1/group__errors.html#gad16c5565b4a69f9c2a9ac2c0dbc89462 doc>
| Error'PlatformError -- ^ <http://www.glfw.org/docs/3.1/group__errors.html#gad44162d78100ea5e87cdd38426b8c7a1 doc>
| Error'FormatUnavailable -- ^ <http://www.glfw.org/docs/3.1/group__errors.html#ga196e125ef261d94184e2b55c05762f14 doc>
deriving (Data, Enum, Eq, Ord, Read, Show, Typeable, Generic)
--------------------------------------------------------------------------------
-- Initialization and version information
-- | The library version of the GLFW implementation in use.
-- See <http://www.glfw.org/docs/3.1/intro.html#intro_version Version Management>
data Version = Version
{ versionMajor :: Int
, versionMinor :: Int
, versionRevision :: Int
} deriving (Data, Eq, Ord, Read, Show, Typeable, Generic)
--------------------------------------------------------------------------------
-- Monitor handling
-- | Represents a physical monitor that's currently connected.
-- See the <http://www.glfw.org/docs/3.1/monitor.html Monitor Guide>
newtype Monitor = Monitor
{ unMonitor :: Ptr C'GLFWmonitor
} deriving (Data, Eq, Ord, Show, Typeable, Generic)
-- | Part of the 'MonitorCallback', for when a monitor gets connected or disconnected.
data MonitorState =
MonitorState'Connected
| MonitorState'Disconnected
deriving (Data, Eq, Ord, Read, Show, Typeable, Generic)
-- | See <http://www.glfw.org/docs/3.1/monitor.html#monitor_modes Video Modes>
data VideoMode = VideoMode
{ videoModeWidth :: Int
, videoModeHeight :: Int
, videoModeRedBits :: Int
, videoModeGreenBits :: Int
, videoModeBlueBits :: Int
, videoModeRefreshRate :: Int
} deriving (Data, Eq, Ord, Read, Show, Typeable, Generic)
-- | Lets you adjust the gamma of a monitor. To ensure that only valid values are created, use 'makeGammaRamp'.
-- See <http://www.glfw.org/docs/3.1/monitor.html#monitor_gamma Gamma Ramp>.
data GammaRamp = GammaRamp
-- NOTE: It would be bad to give clients a way to construct invalid gamma ramps
-- with lists of unequal length, so this constructor should not be exported.
{ gammaRampRed :: [Int]
, gammaRampGreen :: [Int]
, gammaRampBlue :: [Int]
} deriving (Data, Eq, Ord, Read, Show, Typeable, Generic)
-- | Smart constructor for a 'GammaRamp'.
makeGammaRamp :: [Int] -> [Int] -> [Int] -> Maybe GammaRamp
makeGammaRamp rs gs bs
| lengthsEqual = Just $ GammaRamp rs gs bs
| otherwise = Nothing
where
lengthsEqual =
let rsl = length rs
gsl = length gs
bsl = length bs
in rsl == gsl && gsl == bsl
--------------------------------------------------------------------------------
-- Window handling
-- | Collects all the callbacks that can be associated with a Window into a single place.
data WindowCallbacks = WindowCallbacks
{ storedCharFun :: IORef C'GLFWcharfun
, storedCursorEnterFun :: IORef C'GLFWcursorenterfun
, storedCursorPosFun :: IORef C'GLFWcursorposfun
, storedFramebufferSizeFun :: IORef C'GLFWframebuffersizefun
, storedKeyFun :: IORef C'GLFWkeyfun
, storedMouseButtonFun :: IORef C'GLFWmousebuttonfun
, storedScrollFun :: IORef C'GLFWscrollfun
, storedWindowCloseFun :: IORef C'GLFWwindowclosefun
, storedWindowFocusFun :: IORef C'GLFWwindowfocusfun
, storedWindowIconifyFun :: IORef C'GLFWwindowiconifyfun
, storedWindowPosFun :: IORef C'GLFWwindowposfun
, storedWindowRefreshFun :: IORef C'GLFWwindowrefreshfun
, storedWindowSizeFun :: IORef C'GLFWwindowsizefun
, storedDropFun :: IORef C'GLFWdropfun
}
-- | Reprisents a GLFW window value.
-- See the <http://www.glfw.org/docs/3.1/window.html Window Guide>
newtype Window = Window
{ unWindow :: Ptr C'GLFWwindow
} deriving (Data, Eq, Ord, Show, Typeable, Generic)
-- | Lets you set various window hints before creating a 'Window'.
-- See <http://www.glfw.org/docs/3.1/window.html#window_hints Window Hints>,
-- particularly <http://www.glfw.org/docs/3.1/window.html#window_hints_values Supported and Default Values>.
data WindowHint =
WindowHint'Resizable Bool
| WindowHint'Visible Bool
| WindowHint'Decorated Bool
| WindowHint'RedBits Int
| WindowHint'GreenBits Int
| WindowHint'BlueBits Int
| WindowHint'AlphaBits Int
| WindowHint'DepthBits Int
| WindowHint'StencilBits Int
| WindowHint'AccumRedBits Int
| WindowHint'AccumGreenBits Int
| WindowHint'AccumBlueBits Int
| WindowHint'AccumAlphaBits Int
| WindowHint'AuxBuffers Int
| WindowHint'Samples Int
| WindowHint'RefreshRate Int
| WindowHint'Stereo Bool
| WindowHint'sRGBCapable Bool
| WindowHint'ClientAPI ClientAPI
| WindowHint'ContextVersionMajor Int
| WindowHint'ContextVersionMinor Int
| WindowHint'ContextRobustness ContextRobustness
| WindowHint'OpenGLForwardCompat Bool
| WindowHint'OpenGLDebugContext Bool
| WindowHint'OpenGLProfile OpenGLProfile
deriving (Data, Eq, Ord, Read, Show, Typeable, Generic)
-- | For use with the focus callback.
data FocusState =
FocusState'Focused
| FocusState'Defocused
deriving (Data, Enum, Eq, Ord, Read, Show, Typeable, Generic)
-- | For use with the iconify callback. (note: iconified means minimized)
data IconifyState =
IconifyState'Iconified
| IconifyState'NotIconified
deriving (Data, Enum, Eq, Ord, Read, Show, Typeable, Generic)
-- | The OpenGL robustness strategy.
data ContextRobustness =
ContextRobustness'NoRobustness
| ContextRobustness'NoResetNotification
| ContextRobustness'LoseContextOnReset
deriving (Data, Enum, Eq, Ord, Read, Show, Typeable, Generic)
-- | The OpenGL profile.
data OpenGLProfile =
OpenGLProfile'Any
| OpenGLProfile'Compat
| OpenGLProfile'Core
deriving (Data, Enum, Eq, Ord, Read, Show, Typeable, Generic)
-- | The type of OpenGL to create a context for.
data ClientAPI =
ClientAPI'OpenGL
| ClientAPI'OpenGLES
deriving (Data, Enum, Eq, Ord, Read, Show, Typeable, Generic)
--------------------------------------------------------------------------------
-- Input handling
-- | Part of the <http://www.glfw.org/docs/3.1/input.html#input_keyboard Keyboard Input> system.
data Key =
Key'Unknown
| Key'Space
| Key'Apostrophe
| Key'Comma
| Key'Minus
| Key'Period
| Key'Slash
| Key'0
| Key'1
| Key'2
| Key'3
| Key'4
| Key'5
| Key'6
| Key'7
| Key'8
| Key'9
| Key'Semicolon
| Key'Equal
| Key'A
| Key'B
| Key'C
| Key'D
| Key'E
| Key'F
| Key'G
| Key'H
| Key'I
| Key'J
| Key'K
| Key'L
| Key'M
| Key'N
| Key'O
| Key'P
| Key'Q
| Key'R
| Key'S
| Key'T
| Key'U
| Key'V
| Key'W
| Key'X
| Key'Y
| Key'Z
| Key'LeftBracket
| Key'Backslash
| Key'RightBracket
| Key'GraveAccent
| Key'World1
| Key'World2
| Key'Escape
| Key'Enter
| Key'Tab
| Key'Backspace
| Key'Insert
| Key'Delete
| Key'Right
| Key'Left
| Key'Down
| Key'Up
| Key'PageUp
| Key'PageDown
| Key'Home
| Key'End
| Key'CapsLock
| Key'ScrollLock
| Key'NumLock
| Key'PrintScreen
| Key'Pause
| Key'F1
| Key'F2
| Key'F3
| Key'F4
| Key'F5
| Key'F6
| Key'F7
| Key'F8
| Key'F9
| Key'F10
| Key'F11
| Key'F12
| Key'F13
| Key'F14
| Key'F15
| Key'F16
| Key'F17
| Key'F18
| Key'F19
| Key'F20
| Key'F21
| Key'F22
| Key'F23
| Key'F24
| Key'F25
| Key'Pad0
| Key'Pad1
| Key'Pad2
| Key'Pad3
| Key'Pad4
| Key'Pad5
| Key'Pad6
| Key'Pad7
| Key'Pad8
| Key'Pad9
| Key'PadDecimal
| Key'PadDivide
| Key'PadMultiply
| Key'PadSubtract
| Key'PadAdd
| Key'PadEnter
| Key'PadEqual
| Key'LeftShift
| Key'LeftControl
| Key'LeftAlt
| Key'LeftSuper
| Key'RightShift
| Key'RightControl
| Key'RightAlt
| Key'RightSuper
| Key'Menu
deriving (Data, Enum, Eq, Ord, Read, Show, Typeable, Generic)
-- | The state of an individual key when 'getKey' is called.
data KeyState =
KeyState'Pressed
| KeyState'Released
| KeyState'Repeating
deriving (Data, Enum, Eq, Ord, Read, Show, Typeable, Generic)
-- | For use with the <http://www.glfw.org/docs/3.1/input.html#joystick Joystick Input> system.
data Joystick =
Joystick'1
| Joystick'2
| Joystick'3
| Joystick'4
| Joystick'5
| Joystick'6
| Joystick'7
| Joystick'8
| Joystick'9
| Joystick'10
| Joystick'11
| Joystick'12
| Joystick'13
| Joystick'14
| Joystick'15
| Joystick'16
deriving (Data, Enum, Eq, Ord, Read, Show, Typeable, Generic)
-- | If a given joystick button is pressed or not when 'getJoystickButtons' is called.
data JoystickButtonState =
JoystickButtonState'Pressed
| JoystickButtonState'Released
deriving (Data, Enum, Eq, Ord, Read, Show, Typeable, Generic)
-- | Part of the <http://www.glfw.org/docs/3.1/input.html#input_mouse Mouse Input> system.
data MouseButton =
MouseButton'1
| MouseButton'2
| MouseButton'3
| MouseButton'4
| MouseButton'5
| MouseButton'6
| MouseButton'7
| MouseButton'8
deriving (Data, Enum, Eq, Ord, Read, Show, Typeable, Generic)
-- | If the mouse button is pressed or not when 'getMouseButton' is called.
data MouseButtonState =
MouseButtonState'Pressed
| MouseButtonState'Released
deriving (Data, Enum, Eq, Ord, Read, Show, Typeable, Generic)
-- | If the mouse's cursor is in the window or not.
data CursorState =
CursorState'InWindow
| CursorState'NotInWindow
deriving (Data, Enum, Eq, Ord, Read, Show, Typeable, Generic)
-- | Allows for special forms of mouse input.
-- See <http://www.glfw.org/docs/3.1/input.html#cursor_mode Cursor Modes>
data CursorInputMode =
CursorInputMode'Normal
| CursorInputMode'Hidden
| CursorInputMode'Disabled
deriving (Data, Enum, Eq, Ord, Read, Show, Typeable, Generic)
-- | When sticky keys is enabled, once a key is pressed it will remain pressed
-- at least until the state is polled with 'getKey'. After that, if the key has
-- been released it will switch back to released. This helps prevent problems
-- with low-resolution polling missing key pressed. Note that use of the
-- callbacks to avoid this problem the the recommended route, and this is just
-- for a fallback.
data StickyKeysInputMode =
StickyKeysInputMode'Enabled
| StickyKeysInputMode'Disabled
deriving (Data, Enum, Eq, Ord, Read, Show, Typeable, Generic)
-- | This is the mouse version of "StickyKeysInputMode".
data StickyMouseButtonsInputMode =
StickyMouseButtonsInputMode'Enabled
| StickyMouseButtonsInputMode'Disabled
deriving (Data, Enum, Eq, Ord, Read, Show, Typeable, Generic)
-- | Modifier keys that were pressed as part of another keypress event.
data ModifierKeys = ModifierKeys
{ modifierKeysShift :: Bool
, modifierKeysControl :: Bool
, modifierKeysAlt :: Bool
, modifierKeysSuper :: Bool
} deriving (Data, Eq, Ord, Read, Show, Typeable, Generic)
--------------------------------------------------------------------------------
-- 3.1 Additions
--------------------------------------------------------------------------------
deriving instance Data CUChar
-- | GLFW image data, for setting up custom mouse cursor appearnaces.
data Image = Image
{ imageWidth :: Int
, imageHeight :: Int
, imagePixels :: [CUChar]
} deriving (Data, Eq, Ord, Read, Show, Typeable, Generic)
-- | Reprisents a GLFW cursor.
newtype Cursor = Cursor
{ unCursor :: Ptr C'GLFWcursor
} deriving (Data, Eq, Ord, Show, Typeable, Generic)
-- | Lets you use one of the standard cursor appearnaces that the local
-- system theme provides for.
-- See <http://www.glfw.org/docs/3.1/input.html#cursor_standard Standard Cursor Creation>.
data StandardCursorShape =
StandardCursorShape'Arrow
| StandardCursorShape'IBeam
| StandardCursorShape'Crosshair
| StandardCursorShape'Hand
| StandardCursorShape'HResize
| StandardCursorShape'VResize
deriving (Data, Enum, Eq, Ord, Read, Show, Typeable, Generic)
--------------------------------------------------------------------------------
{-# ANN module "HLint: ignore Use camelCase" #-}
| Peaker/GLFW-b | Graphics/UI/GLFW/Types.hs | bsd-2-clause | 13,856 | 0 | 11 | 2,602 | 2,291 | 1,350 | 941 | 329 | 1 |
--
-- Module : ContentType
-- Copyright : (c) Conrad Parker 2006
-- License : BSD-style
-- Maintainer : conradp@cse.unsw.edu.au
-- Stability : experimental
-- Portability : portable
module Codec.Container.Ogg.ContentType (
ContentType (..),
-- | A list of content type labels (eg. "Vorbis", "Theora") known by HOgg
knownContentTypes,
identify,
granulerate,
granuleshift,
parseType,
-- ContentTyped typeclass
ContentTyped,
contentTypeIs,
contentTypeOf,
contentTypeEq,
demuxByContentType,
-- Some guaranteed-known content-types
skeleton,
cmml,
flac,
speex,
celt,
theora,
vorbis
) where
import Data.Bits
import qualified Data.ByteString.Lazy as L
import Data.Char
import Data.List (sort)
import Data.Map (fromList)
import Data.Maybe
import Data.Ratio
import Text.Printf
import Codec.Container.Ogg.ByteFields
import Codec.Container.Ogg.Granulerate
import Codec.Container.Ogg.List
import Codec.Container.Ogg.MessageHeaders
import Codec.Container.Ogg.Timestamp
import Codec.Container.Ogg.TimeScheme
------------------------------------------------------------
-- | Typeclass: ContentTyped
--
class ContentTyped a where
contentTypeIs :: ContentType -> a -> Bool
contentTypeOf :: a -> Maybe ContentType
contentTypeEq :: (ContentTyped a, ContentTyped b) => a -> b -> Bool
contentTypeEq a b = case (contentTypeOf a, contentTypeOf b) of
(Just ca, Just cb) -> ca == cb
_ -> False
-- | Group a list of ContentTyped items by their Content-Type
demuxByContentType :: (ContentTyped a) => [a] -> [[a]]
demuxByContentType = classify contentTypeEq
------------------------------------------------------------
-- | Data: ContentType
--
data ContentType =
ContentType {
label :: String,
mime :: [String],
identifyP :: L.ByteString -> Bool, -- predictate, used by identify
headers :: L.ByteString -> Int,
preroll :: Int,
granulerateF :: Maybe (L.ByteString -> Granulerate), -- used by granulerate
granuleshiftF :: Maybe (L.ByteString -> Int), -- used by granuleshift
metadata :: L.ByteString -> MessageHeaders
}
knownContentTypes :: [String]
knownContentTypes = sort $ map label known
known :: [ContentType]
known = [skeleton, cmml, vorbis, theora, speex, celt, flac, oggpcm2]
identify :: L.ByteString -> Maybe ContentType
identify d = listToMaybe $ filter (\x -> identifyP x d) known
granulerate :: ContentType -> L.ByteString -> Maybe Granulerate
granulerate c d = maybe Nothing (\f -> Just (f d)) (granulerateF c)
granuleshift :: ContentType -> L.ByteString -> Maybe Int
granuleshift c d = maybe Nothing (\f -> Just (f d)) (granuleshiftF c)
parseType :: String -> Maybe ContentType
parseType s = listToMaybe $ filter (\x -> l (label x) == l s) known
where
l = map toLower
------------------------------------------------------------
-- Eq
--
instance Eq ContentType where
(==) a b = label a == label b
------------------------------------------------------------
-- Read
--
instance Read ContentType where
readsPrec _ = readsContentType
readsContentType :: ReadS ContentType
readsContentType str = [(c, rest) | (tok, rest) <- lex str, c <- matches tok]
where matches = \m -> filter (sameLabel m) known
sameLabel m = \x -> l (label x) == l m
l = map toLower
------------------------------------------------------------
-- Show
--
instance Show ContentType where
show x = label x
------------------------------------------------------------
-- Skeleton
--
skeleton :: ContentType
skeleton = ContentType
"Skeleton" -- label
["application/x-ogg-skeleton"] -- mime
(L.isPrefixOf skeletonIdent) -- identify
(const 0) -- headers
0 -- preroll
Nothing -- granulerate
Nothing -- granuleshift
skeletonMetadata
-- skeletonIdent = 'fishead\0'
skeletonIdent :: L.ByteString
skeletonIdent = L.pack [0x66, 0x69, 0x73, 0x68, 0x65, 0x61, 0x64, 0x00]
-- Extract the Presentation time, Basetime from Fishead (Skeleton BOS)
skeletonMetadata :: L.ByteString -> MessageHeaders
skeletonMetadata d = MessageHeaders (fromList headerVals)
where headerVals = [prestime, basetime]
prestime = ("Presentation-Time", [show (t 12 20)])
basetime = ("Basetime", [show (t 28 36)])
-- | Read a timestamp encoded as a (le64,le64) rational, where a
-- denominator of 0 is interepreted as the result being 0.
t o1 o2 = case od of
0 -> Timestamp (0, 1)
_ -> Timestamp (on, od)
where
on = le64At o1 d
od = le64At o2 d
------------------------------------------------------------
-- CMML
--
cmml :: ContentType
cmml = ContentType
"CMML" -- label
["text/x-cmml"] -- mime
(L.isPrefixOf cmmlIdent) -- identify
(const 3) -- headers
0 -- preroll
(Just (\d -> fracRate (le64At 12 d) (le64At 20 d))) -- granulerate
(Just (\d -> u8At 28 d)) -- granuleshift
(const mhEmpty)
-- cmmlIdent = 'CMML\0\0\0\0\'
cmmlIdent :: L.ByteString
cmmlIdent = L.pack [0x43, 0x4d, 0x4d, 0x4c, 0x00, 0x00, 0x00, 0x00]
------------------------------------------------------------
-- Vorbis
--
vorbis :: ContentType
vorbis = ContentType
"Vorbis" -- label
["audio/x-vorbis"] -- mime
(L.isPrefixOf vorbisIdent) -- identify
(const 3) -- headers
2 -- preroll
(Just (\d -> intRate (le32At 12 d))) -- granulerate
Nothing -- granuleshift
vorbisMetadata
-- vorbisIdent = '\x01vorbis'
vorbisIdent :: L.ByteString
vorbisIdent = L.pack [0x01, 0x76, 0x6f, 0x72, 0x62, 0x69, 0x73]
-- Extract sample rate from Vorbis BOS header
vorbisMetadata :: L.ByteString -> MessageHeaders
vorbisMetadata d = MessageHeaders (fromList headerVals)
where headerVals = [samplerate, channels]
samplerate = ("Audio-Samplerate", [printf "%d Hz" srate])
channels = ("Audio-Channels", [show c])
srate = (le32At 12 d) :: Int
c = (u8At 11 d) :: Int
------------------------------------------------------------
-- Theora
--
theora :: ContentType
theora = ContentType
"Theora" -- label
["video/x-theora"] -- mime
(L.isPrefixOf theoraIdent) -- identify
(const 3) -- headers
0 -- preroll
(Just (\d -> fracRate (be32At 22 d) (be32At 26 d))) -- granulerate
(Just theoraGranuleshift) -- granuleshift
theoraMetadata -- metadata
-- theoraIdent = '\x80theora'
theoraIdent :: L.ByteString
theoraIdent = L.pack [0x80, 0x74, 0x68, 0x65, 0x6f, 0x72, 0x61]
-- Theora's granuleshift is an 8 bit field split over two bytes
theoraGranuleshift :: L.ByteString -> Int
theoraGranuleshift d = (h40 .|. h41)
where h40 = (u8At 40 d .&. 0x03) `shiftL` 3
h41 = (u8At 41 d .&. 0xe0) `shiftR` 5
-- Extract video dimensions etc. from the Theora BOS header
theoraMetadata :: L.ByteString -> MessageHeaders
theoraMetadata d = MessageHeaders (fromList headerVals)
where headerVals = [framerate, width, height]
framerate = ("Video-Framerate", [printf "%.3f fps%s" fps tsName])
width = ("Video-Width", [show w])
height = ("Video-Height", [show h])
toDouble :: Integer -> Double -- monomorphic cast to double
toDouble x = (fromIntegral x) :: Double
fps = toDouble fpsN / toDouble fpsD
mTS = guessTimeScheme (fpsN % fpsD)
tsName = maybe "" (\x -> " (" ++ show x ++ ")") mTS
fpsN = be32At 22 d
fpsD = be32At 26 d
w = ((be16At 10 d) * 16) :: Int
h = ((be16At 12 d) * 16) :: Int
------------------------------------------------------------
-- Speex
--
speex :: ContentType
speex = ContentType
"Speex" -- label
["audio/x-speex"] -- mime
(L.isPrefixOf speexIdent) -- identify
(\d -> (le32At 68 d) + 2) -- headers
3 -- preroll
(Just (\d -> intRate (le32At 36 d))) -- granulerate
Nothing -- granuleshift
speexMetadata
-- speexIdent = 'Speex '
speexIdent :: L.ByteString
speexIdent = L.pack [0x53, 0x70, 0x65, 0x65, 0x78, 0x20, 0x20, 0x20]
-- Extract sample rate from Speex BOS header
speexMetadata :: L.ByteString -> MessageHeaders
speexMetadata d = MessageHeaders (fromList headerVals)
where headerVals = [samplerate, channels]
samplerate = ("Audio-Samplerate", [printf "%d Hz" srate])
channels = ("Audio-Channels", [show c])
srate = (le32At 36 d) :: Int
c = (le32At 48 d) :: Int
------------------------------------------------------------
-- CELT
--
celt :: ContentType
celt = ContentType
"CELT" -- label
["audio/x-celt"] -- mime
(L.isPrefixOf celtIdent) -- identify
(\d -> (le32At 52 d) + 2) -- headers
3 -- preroll
(Just (\d -> intRate (le32At 40 d))) -- granulerate
Nothing -- granuleshift
celtMetadata
-- celtIdent = 'CELT '
celtIdent :: L.ByteString
celtIdent = L.pack [0x43, 0x45, 0x4c, 0x54, 0x20, 0x20, 0x20, 0x20]
-- Extract sample rate from Speex BOS header
celtMetadata :: L.ByteString -> MessageHeaders
celtMetadata d = MessageHeaders (fromList headerVals)
where headerVals = [samplerate, channels]
samplerate = ("Audio-Samplerate", [printf "%d Hz" srate])
channels = ("Audio-Channels", [show c])
srate = (le32At 40 d) :: Int
c = (le32At 44 d) :: Int
------------------------------------------------------------
-- FLAC
--
flac :: ContentType
flac = ContentType
"FLAC" -- label
["audio/x-flac"] -- mime
(L.isPrefixOf flacIdent) -- identify
(\d -> be16At 7 d) -- headers
0 -- preroll
(Just flacGranulerate) -- granulerate
Nothing -- granuleshift
flacMetadata
-- flacIdent = 0x7F:"FLAC"
flacIdent :: L.ByteString
flacIdent = L.pack [0x7f, 0x46, 0x4c, 0x41, 0x43, 0x01]
-- Extract sample rate from FLAC BOS header
flacMetadata :: L.ByteString -> MessageHeaders
flacMetadata d = MessageHeaders (fromList headerVals)
where headerVals = [samplerate, channels, version]
samplerate = ("Audio-Samplerate", [(show srate) ++ " Hz"])
channels = ("Audio-Channels", [show c])
version = ("FLAC-Ogg-Mapping-Version", [show vMaj ++ "." ++ show vMin])
srate = flacGranulerate d
c = 1 + (u8At 29 d `shiftR` 1) .&. 0x7 :: Int
vMaj = u8At 5 d :: Integer
vMin = u8At 6 d :: Integer
flacGranulerate :: L.ByteString -> Granulerate
flacGranulerate d = intRate $ h27 .|. h28 .|. h29
where
h27 = (u8At 27 d) `shiftL` 12
h28 = (u8At 28 d) `shiftL` 4
h29 = (u8At 29 d .&. 0xf0) `shiftR` 4
------------------------------------------------------------
-- OggPCM2: http://wiki.xiph.org/index.php/OggPCM2
--
oggpcm2 :: ContentType
oggpcm2 = ContentType
"PCM" -- label
["audio/x-ogg-pcm"] -- mime
(L.isPrefixOf oggpcm2Ident) -- identify
(const 3) -- headers
0 -- preroll
(Just (\d -> intRate (be32At 16 d))) -- granulerate
Nothing -- granuleshift
oggpcm2Metadata
-- oggpcm2Ident = 'PCM '
oggpcm2Ident :: L.ByteString
oggpcm2Ident = L.pack [0x50, 0x43, 0x4D, 0x20, 0x20, 0x20, 0x20, 0x20]
-- Extract sample rate from OggPCM2 BOS header
oggpcm2Metadata :: L.ByteString -> MessageHeaders
oggpcm2Metadata d = MessageHeaders (fromList headerVals)
where headerVals = [samplerate, channels]
samplerate = ("Audio-Samplerate", [printf "%d Hz" srate])
channels = ("Audio-Channels", [show c])
srate = (be32At 16 d) :: Int
c = (u8At 21 d) :: Int
| kfish/hogg | Codec/Container/Ogg/ContentType.hs | bsd-3-clause | 12,488 | 0 | 12 | 3,535 | 3,106 | 1,774 | 1,332 | 244 | 2 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ForeignFunctionInterface #-}
-- |
-- Module : Data.Array.Accelerate.C
-- Copyright : [2013] Manuel M T Chakravarty
-- License : BSD3
--
-- Maintainer : Manuel M T Chakravarty <chak@cse.unsw.edu.au>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- This module implements the sequential C backend for the embedded array language /Accelerate/.
--
module Data.Array.Accelerate.C (
runExpIO, runIO
) where
-- standard libraries
import Control.Applicative
import Control.Monad
import Foreign
import System.Directory
import System.Exit
import System.FilePath
import System.IO
import System.Process (system)
-- libraries
import qualified
Text.PrettyPrint.Mainland as C
import Language.C.Quote.C as C
import System.Posix.Temp
-- accelerate
import Data.Array.Accelerate.Analysis.Type as Sugar
import Data.Array.Accelerate.Array.Sugar as Sugar
import Data.Array.Accelerate.AST (Val(..))
import Data.Array.Accelerate.Smart (Exp, Acc)
import Data.Array.Accelerate.Trafo.Sharing (convertExp, convertAcc)
import Data.Array.Accelerate.Type
-- friends
import Data.Array.Accelerate.C.Acc
import Data.Array.Accelerate.C.Base
import Data.Array.Accelerate.C.Execute
import Data.Array.Accelerate.C.Exp
import Data.Array.Accelerate.C.Load
import Data.Array.Accelerate.C.Type
-- Execute a scalar Accelerate computation
-- ---------------------------------------
-- Compile an scalar Accelerate computation to C code, run it, and return the result.
--
runExpIO :: forall t. Elt t => Exp t -> IO t
runExpIO e
= do
{ let e' = convertExp True e
ces = expToC e'
ctys = tupleTypeToC $ expType e'
resty = head ctys -- we check for 'length ces == 1' further down
cUnit = [cunit|
$edecls:cshapeDefs
$ty:resty * $id:cFunName ()
{
$ty:resty *result = malloc(sizeof($ty:resty));
*result = $exp:(head ces);
return result;
}
|]
; unless (length ces == 1) $
error "Data.Array.Accelerate.C.runExpIO: result type may neither be unit nor a tuple"
; tmpPath <- addTrailingPathSeparator <$> getTemporaryDirectory >>= mkdtemp
--; tmpPath <- getTemporaryDirectory >>= mkdtemp
; logMsgLn $ "Data.Array.Accelerate.C: temporary directory: " ++ tmpPath
; let cFilePath = tmpPath </> cFile
oFilePath = tmpPath </> oFile
; writeFile cFilePath $
"#include <stdlib.h>\n" ++
"#include <math.h>\n" ++
"#include \"HsFFI.h\"\n" ++
(show . C.ppr $ cUnit)
; logMsg "Data.Array.Accelerate.C: runExpIO: compiling..."
; ec <- system $ unwords $ [cCompiler, "-c", cOpts, "-I" ++ ffiLibDir, "-o", oFilePath, cFilePath]
; case ec of
ExitFailure c -> error $ "Data.Array.Accelerate.C: C compiler failed with exit code " ++ show c
ExitSuccess ->
do
{ logMsg "loading..."
; mFunPtr <- loadAndLookup oFilePath cFunName
; case mFunPtr of
Nothing -> error $ "Data.Array.Accelerate.C: unable to dynamically load generated code"
Just funPtr ->
do
{ logMsg "running..."
; resultPtr <- mkExpFun funPtr
; logMsg "peeking..."
; result <- toElt <$> peekSingleScalar (eltType (undefined::t)) resultPtr
; logMsg "unloading..."
; free resultPtr
; unload oFilePath
; logMsgLn "done"
; return result
}
} }
where
peekSingleScalar :: TupleType a -> Ptr a -> IO a
peekSingleScalar (PairTuple UnitTuple (SingleTuple t)) ptr = ((), ) <$> peekScalar t (castPtr ptr)
peekSingleScalar _ _ptr = error "peekElt: impossible"
peekScalar :: ScalarType a -> Ptr a -> IO a
peekScalar (NumScalarType t) ptr = peekNumScalar t ptr
peekScalar (NonNumScalarType t) ptr = peekNonNumScalar t ptr
peekNumScalar :: NumType a -> Ptr a -> IO a
peekNumScalar (IntegralNumType t) ptr = peekIntegral t ptr
peekNumScalar (FloatingNumType t) ptr = peekFloating t ptr
peekIntegral :: IntegralType a -> Ptr a -> IO a
peekIntegral TypeInt{} ptr = peek ptr
peekIntegral TypeInt8{} ptr = peek ptr
peekIntegral TypeInt16{} ptr = peek ptr
peekIntegral TypeInt32{} ptr = peek ptr
peekIntegral TypeInt64{} ptr = peek ptr
peekIntegral TypeWord{} ptr = peek ptr
peekIntegral TypeWord8{} ptr = peek ptr
peekIntegral TypeWord16{} ptr = peek ptr
peekIntegral TypeWord32{} ptr = peek ptr
peekIntegral TypeWord64{} ptr = peek ptr
peekIntegral TypeCShort{} ptr = peek ptr
peekIntegral TypeCUShort{} ptr = peek ptr
peekIntegral TypeCInt{} ptr = peek ptr
peekIntegral TypeCUInt{} ptr = peek ptr
peekIntegral TypeCLong{} ptr = peek ptr
peekIntegral TypeCULong{} ptr = peek ptr
peekIntegral TypeCLLong{} ptr = peek ptr
peekIntegral TypeCULLong{} ptr = peek ptr
peekFloating :: FloatingType a -> Ptr a -> IO a
peekFloating TypeFloat{} ptr = peek ptr
peekFloating TypeDouble{} ptr = peek ptr
peekFloating TypeCFloat{} ptr = peek ptr
peekFloating TypeCDouble{} ptr = peek ptr
peekNonNumScalar :: NonNumType a -> Ptr a -> IO a
peekNonNumScalar TypeBool{} ptr = peek ptr
peekNonNumScalar TypeChar{} ptr = peek ptr
peekNonNumScalar TypeCChar{} ptr = peek ptr
peekNonNumScalar TypeCSChar{} ptr = peek ptr
peekNonNumScalar TypeCUChar{} ptr = peek ptr
-- Execute an Accelerate array computation
-- ---------------------------------------
-- Compile an Accelerate array computation to C code, run it, and return the result.
--
runIO :: (Shape sh, Elt e) => Acc (Array sh e) -> IO (Array sh e)
runIO acc
= do
{ let acc' = convertAcc True True True acc
cacc = accToC EmptyEnv acc'
cUnit = [cunit|
$edecls:cshapeDefs
$edecl:cacc
|]
; tmpPath <- addTrailingPathSeparator <$> getTemporaryDirectory >>= mkdtemp
; logMsgLn $ "Data.Array.Accelerate.C: temporary directory: " ++ tmpPath
; let cFilePath = tmpPath </> cFile
oFilePath = tmpPath </> oFile
; writeFile cFilePath $
"#include <stdlib.h>\n" ++
"#include <math.h>\n" ++
"#include \"HsFFI.h\"\n" ++
(show . C.ppr $ cUnit)
; logMsg "Data.Array.Accelerate.C: runExpIO: compiling..."
; ec <- system $ unwords $ [cCompiler, "-c", cOpts, "-I" ++ ffiLibDir, "-o", oFilePath, cFilePath]
; case ec of
ExitFailure c -> error $ "Data.Array.Accelerate.C: C compiler failed with exit code " ++ show c
ExitSuccess ->
do
{ logMsg "loading..."
; ok <- load oFilePath
; unless ok $
error $ "Data.Array.Accelerate.C: unable to dynamically load generated code"
; logMsg "running..."
; result <- accExec Empty acc'
; logMsg "unloading..."
; unload oFilePath
; logMsgLn "done"
; return result
} }
-- Constants
-- ---------
cFile :: FilePath
cFile = "accelerate.c"
oFile :: FilePath
oFile = "accelerate.o"
cCompiler :: FilePath
cCompiler = "cc"
cOpts :: String
-- cOpts = "-O2 -w"
cOpts = "-O2"
-- IMPORTANT: check this path!
--
-- The default value is for the Haskell Platform with GHC 7.6.3 on OS X.
--
ffiLibDir :: FilePath
ffiLibDir = "/usr/local/lib/ghc-7.6.3/include"
-- Tracing
-- -------
logMsg :: String -> IO ()
logMsg msg = hPutStr stderr msg >> hFlush stderr
logMsgLn :: String -> IO ()
logMsgLn = logMsg . (++ "\n")
-- Foreign imports
-- ---------------
foreign import ccall "dynamic"
mkExpFun :: FunPtr (IO (Ptr a)) -> IO (Ptr a)
| AndrewWUw/cs9181 | Data/Array/Accelerate/C.hs | bsd-3-clause | 8,187 | 118 | 17 | 2,305 | 1,751 | 991 | 760 | 157 | 30 |
------------------------------------------------------------------------------
-- | Parsers and renderers for XML and HTML 5. Although the formats are
-- treated differently, the data types used by each are the same, which
-- makes it easy to write code that works with the element structure of
-- either XML or HTML 5 documents.
--
-- Limitations:
--
-- * The XML parser does not parse internal DOCTYPE subsets. They are just
-- stored as blocks of text, with minimal scanning done to match quotes
-- and brackets to determine the end.
--
-- * Since DTDs are not parsed, the XML parser fails on entity references,
-- except for those defined internally. You cannot use this library for
-- parsing XML documents with entity references outside the predefined
-- set.
--
-- * The HTML 5 parser is not a compliant HTML parser. Instead, it is a
-- parser for valid HTML 5 content. It should only be used on content
-- that you have reason to believe is probably correct, since the
-- compatibility features of HTML 5 are missing. This is the wrong
-- library on which to build a web spider.
--
-- * Both parsers accept fragments of documents, by which is meant that
-- they do not enforce the top-level structure of the document. Files
-- may contain more than one root element, for example.
module Text.XmlHtml (
-- * Types
Document(..),
Node(..),
DocType(..),
ExternalID(..),
InternalSubset(..),
Encoding(..),
-- * Manipulating documents
isTextNode,
isComment,
isElement,
tagName,
getAttribute,
hasAttribute,
setAttribute,
nodeText,
childNodes,
childElements,
childElementsTag,
childElementTag,
descendantNodes,
descendantElements,
descendantElementsTag,
descendantElementTag,
-- * Parsing
parseXML,
parseHTML,
-- * Rendering
render,
XMLP.docTypeDecl,
XML.renderXmlFragment,
HTML.renderHtmlFragment
) where
------------------------------------------------------------------------------
import Blaze.ByteString.Builder (Builder)
import Data.ByteString (ByteString)
import Text.XmlHtml.Common
import Text.XmlHtml.TextParser
import qualified Text.XmlHtml.XML.Parse as XMLP
import qualified Text.XmlHtml.XML.Parse as XML
import qualified Text.XmlHtml.XML.Render as XML
import qualified Text.XmlHtml.HTML.Parse as HTML
import qualified Text.XmlHtml.HTML.Render as HTML
------------------------------------------------------------------------------
-- | Parses the given XML fragment.
parseXML :: String
-- ^ Name of document source (perhaps a filename) for error messages
-> ByteString
-- ^ Document contents
-> Either String Document
-- ^ The document or an error message
parseXML = parse XML.docFragment
------------------------------------------------------------------------------
-- | Parses the given HTML fragment. This enables HTML quirks mode, which
-- changes the parsing algorithm to parse valid HTML 5 documents correctly.
parseHTML :: String
-- ^ Name of document source (perhaps a filename) for error messages
-> ByteString
-- ^ Document contents
-> Either String Document
-- ^ The document or an error message
parseHTML = parse HTML.docFragment
------------------------------------------------------------------------------
-- | Renders a 'Document'.
render :: Document -> Builder
render (XmlDocument e dt ns) = XML.render e dt ns
render (HtmlDocument e dt ns) = HTML.render e dt ns
| silkapp/xmlhtml | src/Text/XmlHtml.hs | bsd-3-clause | 3,645 | 0 | 7 | 801 | 368 | 246 | 122 | 49 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
module TimeSeriesData.Retrieve where
import Servant
import Servant.Client
import System.IO.Unsafe (unsafeInterleaveIO)
import Control.Monad.Trans.Either
import Data.Maybe (catMaybes)
import Data.List (nub)
import Data.Text as T (Text, lines)
import TimeSeriesData.Types
import Time
getTSData :: String -> Int -> [String] -> UTCTime -> UTCTime -> IO TSData
getTSData url port tagNames start end = let tagNames' = nub tagNames in
mapM (getTSPoints url port start end) tagNames'
>>= return . TSData (diffUTCTime start refTime) (diffUTCTime end refTime) .
filter (not . null . snd) . zip tagNames'
getTSPoints :: String -> Int -> UTCTime -> UTCTime -> String -> IO [TSPoint]
getTSPoints url port start end tagName = unsafeInterleaveIO $
runEitherT (getPoints' url port (Just tagName) (Just start) (Just end))
>>= return . either (\ _ -> []) (catMaybes . map decode . T.lines)
type DataAPI = QueryParam "q" String
:> QueryParam "s" UTCTime
:> QueryParam "e" UTCTime
:> Get '[PlainText] Text
getPoints' :: String -> Int -> Maybe String -> Maybe UTCTime -> Maybe UTCTime
-> EitherT ServantError IO Text
getPoints' url port = client (Proxy :: Proxy DataAPI) $ BaseUrl Http url port
| hectorhon/autotrace2 | src/TimeSeriesData/Retrieve.hs | bsd-3-clause | 1,343 | 0 | 13 | 268 | 452 | 234 | 218 | 29 | 1 |
module Main
( main -- :: IO ()
) where
import Text.Search.Whistlepig
main :: IO ()
main = return ()
| thoughtpolice/hs-whistlepig | examples/ex1.hs | bsd-3-clause | 116 | 0 | 6 | 36 | 36 | 21 | 15 | 5 | 1 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.APPLE.VertexArrayObject
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- The <https://www.opengl.org/registry/specs/APPLE/vertex_array_object.txt APPLE_vertex_array_object> extension.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.APPLE.VertexArrayObject (
-- * Enums
gl_VERTEX_ARRAY_BINDING_APPLE,
-- * Functions
glBindVertexArrayAPPLE,
glDeleteVertexArraysAPPLE,
glGenVertexArraysAPPLE,
glIsVertexArrayAPPLE
) where
import Graphics.Rendering.OpenGL.Raw.Tokens
import Graphics.Rendering.OpenGL.Raw.Functions
| phaazon/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/APPLE/VertexArrayObject.hs | bsd-3-clause | 857 | 0 | 4 | 97 | 58 | 46 | 12 | 8 | 0 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Pepino.Renderers.Ansi () where
import Pepino
import Text.PrettyPrint.ANSI.Leijen
instance Pretty Feature where
pretty (Feature title description background scenarios) = section "Feature:" title description
<> pretty background
<$> asDocs scenarios
instance Pretty Background where
pretty (Background "" "") = empty
pretty (Background title description) = section "Background:" title description
instance Pretty Scenario where
pretty (Scenario title description steps) = section "Scenario:" title description
<$> asDocs steps
pretty (Outline title description steps examples) = section "Scenario Outline:" title description
<$> asDocs steps
<$> asDocs examples
instance Pretty Examples where
pretty (Examples title description) = section "Examples:" title description
instance Pretty Step where
pretty (Given sentence) = step "Given" sentence
pretty (When sentence) = step " When" sentence
pretty (Then sentence) = step " Then" sentence
pretty (And sentence) = step " And" sentence
pretty (But sentence) = step " But" sentence
section :: String -> Title -> Description -> Doc
section name title description = (bold . underline . yellow . text) name <> render title description
render :: Title -> Description -> Doc
render "" "" = linebreak
render title "" = space <> white (text title) <> linebreak
render title description = space <> white (text title <$> text description)
asDocs :: (Pretty a) => [a] -> Doc
asDocs xs = vsep (map pretty xs) <> linebreak
step :: String -> Sentence -> Doc
step keyword sentence = indent 4 $ (bold . yellow . text) keyword <+> (green . text) sentence
| franckrasolo/pepino | core/src/Pepino/Renderers/Ansi.hs | bsd-3-clause | 1,734 | 0 | 10 | 352 | 570 | 282 | 288 | 35 | 1 |
module Math.Topology.SSet where
import Control.Monad (ap)
import qualified Control.Category.Constrained as Constrained
import Data.Maybe (isJust)
import Prelude hiding (Bounded)
-- NOTE: This should be made much more efficient. First, it could be
-- flattened so that in a degenerate simplex you have immediate access
-- to the underlying non-degenerate simplex. Also, the list of ints is
-- always strictly decreasing, and so could be stored as a bit mask as
-- is done in Kenzo. Can use pattern synonyms to make this
-- indistinguishable from what is used currently, but a lot of the
-- algorithms could then be done using bit operations.
-- NOTE: Another idea for efficiency: the degeneracy operator should
-- be strict, but there should be a shortcut to determine whether a
-- simplex is degenerate or not without calculating the entire degeneracy
-- operator.
data FormalDegen a
= NonDegen a
| Degen Int (FormalDegen a)
deriving (Eq, Functor)
deriving (Constrained.Functor (->) (->)) via (Constrained.Wrapped FormalDegen)
instance Show a => Show (FormalDegen a) where
show (NonDegen a) = show a
show (Degen i a) = "s_" ++ show i ++ " " ++ show a
instance Applicative FormalDegen where
pure = NonDegen
(<*>) = ap
instance Monad FormalDegen where
(NonDegen s) >>= f = f s
(Degen i s) >>= f = degen (s >>= f) i
isDegen :: FormalDegen a -> Bool
isDegen (NonDegen _) = False
isDegen (Degen _ _) = True
underlyingGeom :: FormalDegen a -> a
underlyingGeom (NonDegen s) = s
underlyingGeom (Degen _ s) = underlyingGeom s
degen :: FormalDegen a -> Int -> FormalDegen a
degen (Degen j s) i | i <= j = Degen (j + 1) (degen s i)
degen s i = Degen i s
degenList :: FormalDegen a -> [Int]
degenList (NonDegen _) = []
degenList (Degen i s) = i : degenList s
degenCount :: FormalDegen a -> Int
degenCount (NonDegen _) = 0
degenCount (Degen i s) = 1 + degenCount s
-- In this representation, we just need to check that the index is
-- somewhere in the list. (Not necessarily the first thing)
isImageOfDegen :: FormalDegen a -> Int -> Bool
isImageOfDegen (NonDegen _) _ = False
isImageOfDegen (Degen j s) i
| i == j = True
| i > j = False -- We missed it, it can't be further down.
| otherwise = isImageOfDegen s i
constantAt :: a -> Int -> FormalDegen a
constantAt a 0 = NonDegen a
constantAt a n = Degen (n - 1) $ constantAt a (n -1)
-- The following are dangerous and only make sense in certain situations.
downshiftN :: Int -> FormalDegen a -> FormalDegen a
downshiftN n (NonDegen s) = NonDegen s
downshiftN n (Degen i s) = Degen (i + n) (downshiftN n s)
downshift :: FormalDegen a -> FormalDegen a
downshift = downshiftN 1
unDegen :: FormalDegen a -> [Int] -> FormalDegen a
unDegen s [] = s
unDegen (NonDegen _) js = undefined -- shouldn't happen
unDegen (Degen i s) (j : js)
| i == j = unDegen s js
| otherwise = Degen (i - length (j : js)) (unDegen s (j : js))
type Simplex a = FormalDegen (GeomSimplex a)
class Eq (GeomSimplex a) => SSet a where
-- NOTE: Maybe this shouldn't be an associated type, instead just
-- another parameter to the typeclass
-- NOTE: Or we could even reverse things, so that GeomSimplex is the
-- class and SSet is the associated type.
type GeomSimplex a = s | s -> a
-- In a language with dependent types, this could be folded into the
-- GeomSimplex type.
isGeomSimplex :: a -> GeomSimplex a -> Bool
isGeomSimplex _ _ = True
geomSimplexDim :: a -> GeomSimplex a -> Int
-- geomSimplexDim a s = length (geomFaces a s)
geomFace :: a -> GeomSimplex a -> Int -> Simplex a
geomFaces :: a -> GeomSimplex a -> [Simplex a]
geomFaces a s =
let d = geomSimplexDim a s
in if d == 0 then [] else fmap (geomFace a s) [0 .. d]
-- TODO: for efficiency?
-- nonDegenFaces :: a -> GeomSimplex a -> [(Int, Simplex a)]
isSimplex' :: SSet a => a -> Simplex a -> Maybe Int
isSimplex' a (NonDegen s) = if isGeomSimplex a s then Just (geomSimplexDim a s) else Nothing
isSimplex' a (Degen i s) = do
nextdim <- isSimplex' a s
if i <= nextdim then
Just (nextdim + 1)
else
Nothing
isSimplex :: SSet a => a -> Simplex a -> Bool
isSimplex a s = isJust (isSimplex' a s)
simplexDim :: SSet a => a -> Simplex a -> Int
simplexDim a (NonDegen s) = geomSimplexDim a s
simplexDim a (Degen i s) = 1 + simplexDim a s
face :: SSet a => a -> Simplex a -> Int -> Simplex a
face a (NonDegen s) i = geomFace a s i
face a (Degen j s) i
| i < j = degen (face a s i) (j - 1)
| i > j + 1 = degen (face a s (i - 1)) j
| otherwise = s
hasFace :: SSet a => a -> GeomSimplex a -> GeomSimplex a -> Bool
hasFace a t s = NonDegen s `elem` geomFaces a t
frontFace :: SSet a => a -> Simplex a -> Simplex a
frontFace a s = face a s 0
backFace :: SSet a => a -> Simplex a -> Simplex a
backFace a s = face a s (simplexDim a s)
class SSet a => FiniteType a where
-- * `all isSimplex (geomBasis n)`
geomBasis :: a -> Int -> [GeomSimplex a]
allSimplices :: (FiniteType a) => a -> Int -> [Simplex a]
allSimplices a n | n < 0 = []
allSimplices a n = fmap NonDegen (geomBasis a n) ++ (degensOf =<< allSimplices a (n-1))
where degensOf s@(NonDegen g) = fmap (\i -> Degen i s) [0..simplexDim a s]
degensOf s@(Degen j _) = fmap (\i -> Degen i s) [(j+1) .. simplexDim a s]
class SSet a => Bounded a where
amplitude :: a -> [Int]
class SSet a => Pointed a where
basepoint :: a -> GeomSimplex a
-- TODO: move Pointed to its own file to import Morphism
-- basepointMor :: a -> Morphism () a
-- | SSet has unique 0-simplex.
class Pointed a => ZeroReduced a
-- | SSet has no non-degenerate 1-simplices.
class ZeroReduced a => OneReduced a
-- | Simplicial morphisms
newtype UMorphism a b = Morphism {onGeomSimplex :: a -> FormalDegen b}
type Morphism a b = UMorphism (GeomSimplex a) (GeomSimplex b)
onSimplex :: UMorphism a b -> FormalDegen a -> FormalDegen b
onSimplex (Morphism f) (NonDegen s) = f s
onSimplex m (Degen i s) = degen (onSimplex m s) i
instance Constrained.Semigroupoid UMorphism where
f2 . (Morphism f1) = Morphism $ \s -> f2 `onSimplex` f1 s
instance Constrained.Category UMorphism where
id = Morphism $ \s -> NonDegen s
instance Constrained.Functor UMorphism (->) FormalDegen where
fmap = onSimplex
-- Reid Barton:
-- https://categorytheory.zulipchat.com/#narrow/stream/241590-theory.3A-
-- algebraic.20topology.20.26.20homological.20algebra/topic/describing.
-- 20simplicial.20sets/near/260675092
--
-- There's a lot more interesting stuff to say about this situation.
--
-- If we want to understand the category of semisimplicial sets
-- relative to the category of simplicial sets via the left adjoint
-- you mentioned, we should answer three questions: 1) Which
-- simplicial sets lie in the image of this functor? 2) Which
-- morphisms lie in the image of this functor? 3) When do two
-- parallel morphisms of semisimplicial sets become equal when we
-- apply this functor?
--
-- I think, though I haven't carefully checked, that the answers are:
--
-- 1) The simplicial sets in which every face of a nondegenerate simplex is
-- nondegenerate.
-- 2) The morphisms which send nondegenerate simplices to nondegenerate
-- simplices.
-- 3) Only if the maps were already equal, i.e., the functor is faithful.
--
-- There's also a more efficient way to describe what this left
-- adjoint produces, related to the kerodon proposition that Daniel
-- linked to, and using the notion of a "degeneracy operation". A
-- degeneracy operation is an operation taking nn-simplices to
-- mm-simplices for some fixed nn and mm, for which the corresponding
-- map [m]→[n] of Δ is surjective. (So in particular, n≤m.) The
-- operations s_i are the generating degeneracy opaerations, and the
-- degeneracy options are all compositions of the s_is, but quotiented
-- by the simplicial relations involving the s_i.
--
-- The linked proposition says that every simplex of a simplicial set
-- can be expressed as a degeneracy operation applied to a
-- nondegenerate simplex in a unique way.
--
-- Now if we start with a semisimplicial set X, we can describe the
-- "free" simplicial set Y it generates as follows:
--
-- - The simplices of Y are formal applications of a degeneracy
-- operation to a simplex of X.
-- - The structure maps of X are computed as follows. Suppose we want
-- to compute the action of a simplicial operator ff on a formal
-- degeneracy sx. The combined operation fsfs corresponds to some
-- map of Δ which we can refactor as a surjection followed by an
-- injection. Then f(sx) is given by formally applying the
-- degeneracy operator corresponding to the surjection to the value
-- of the face operator corresponding to the injection on x
-- (computed in the semisimplicial set X).
--
-- A more syntactic way to describe the action in terms of the
-- generating face and degenerating operators is:
--
-- - If we want to apply s_i to a formal degeneracy sx, we just form (s_i s) x
-- - If we want to apply d_i to a formal degeneracy sx, then we use
-- the simplicial identities to rewrite d_i s as a composition s' d'
-- s "moving ds to the left". Since we started with a single d_i ,
-- what will happen is that either d_i will pass through all the ss
-- (possibly changing indices in the process) so that d = d_j or the
-- d_i will cancel with some s, so that d = id. Then we compute x'
-- = d' x in X and form s' x'.
--
-- There is also a way to specify an arbitrary simplicial set
-- in terms of only its nondegenerate simplices and its face maps, but
-- with the caveat that the face of a nondegenerate simplex can be a
-- formal degeneracy of another nondegenerate simplex. The full
-- simplicial structure is recovered by the same process as above
-- except that when we take the face of a nondegenerate simplex (in
-- what would have been X above), it may come as a formal degeneracy
-- to which we have to apply another degeneracy operator to--which is
-- no problem.
--
-- The other caveat is that because of the original question 2, in
-- order to recover the correct maps of simplicial sets, we also need
-- to allow a map to send a nondegenerate simplex to a formal
-- degeneracy in the target simplicial set.
--
-- The program Kenzo uses this representation of simplicial sets.
| mvr/at | src/Math/Topology/SSet.hs | bsd-3-clause | 10,274 | 13 | 12 | 2,090 | 2,312 | 1,203 | 1,109 | -1 | -1 |
module Main where
import Control.Monad (filterM)
import Control.Applicative ((<$>))
import System.Environment (getArgs)
import System.FilePath ((</>))
import System.Directory (getDirectoryContents, doesFileExist)
import Data.ByteString (readFile)
import Codec.Archive.Zip (ZipArchive, mkEntrySelector, addEntry, createArchive, CompressionMethod(..))
import Path (parseRelFile)
import Prelude hiding (readFile)
prepareFiles :: FilePath -> IO (ZipArchive ())
prepareFiles file = do
fn <- parseRelFile (file) >>= mkEntrySelector
cn <- readFile (file)
return $ addEntry Deflate cn fn
main :: IO ()
main = do
(dir:_) <- getArgs
files <- map (dir </>) <$> getDirectoryContents dir
filesOnly <- filterM doesFileExist files
zipPath <- parseRelFile "archive.zip"
prepared <- sequence $ map prepareFiles filesOnly
createArchive zipPath $ do
foldr1 (>>) prepared
putStrLn "Done." | mezzomondo/sghh-zip | src/Main.hs | bsd-3-clause | 1,047 | 0 | 10 | 284 | 301 | 160 | 141 | 25 | 1 |
module Data.Search.Results
( T
, size
, take
, toList
, filterM
, union
, intersection
, empty
, fromPSQ
, fromList
, changeWeight
) where
import qualified Control.Monad as Monad
import qualified Data.List as List
import Data.Maybe (mapMaybe)
import Data.Monoid ((<>))
import qualified Data.OrdPSQ as PSQ
import Prelude hiding (take)
data T id w v
= PSQ (PSQ.OrdPSQ id w v)
| List [(id, w, v)]
empty = fromList []
fromPSQ :: PSQ.OrdPSQ id w v -> T id w v
fromPSQ = PSQ
fromList :: [(id, w, v)] -> T id w v
fromList = List
ensurePSQ :: (Ord id, Ord w) => T id w v -> T id w v
ensurePSQ r@(PSQ _) = r
ensurePSQ (List xs) = fromPSQ $ PSQ.fromList xs
instance (Show id, Show w, Show v) => Show (T id w v) where
show r = "Results set of " ++ show (size r) ++ " results:\n" ++
List.intercalate "\n" (map show $ toList r)
size :: T id w v -> Int
size (PSQ p) = PSQ.size p
size (List xs) = List.length xs
take :: (Ord id, Ord w, Monoid w) => Int -> T id w v -> [(id, w, v)]
take n (PSQ p) = PSQ.takeMin n p
take n (List xs) = List.take n xs
toList :: T id w v -> [(id, w, v)]
toList (PSQ p) = PSQ.toList p
toList (List xs) = xs
filterM :: (Monad m) => ((id, v) -> m Bool) -> T id w v -> m (T id w v)
filterM f = fmap List . Monad.filterM (\ (i, _, v) -> f (i, v)) . toList
union :: (Ord id, Ord w, Monoid w) => [T id w v] -> T id w v
union rs = List.foldl' u empty sorted
where
sorted = List.sortOn size rs
u a b@(List _) = u a (ensurePSQ b)
u a (PSQ psq) = fromPSQ $ List.foldl'
(\ psq (k, w, v) -> PSQ.insertWith combine k w v psq)
psq
(toList a)
-- Used when the same source is contained in both results.
combine _ (w, v) (w', _v') = (w `min` w', v)
intersection :: (Ord id, Ord w, Monoid w) => [T id w v] -> T id w v
intersection rs = List.foldl1' i sorted
where
sorted = List.sortOn size $ rs
i a b@(List _) = i a (ensurePSQ b)
i a (PSQ psq) = fromList . mapMaybe f . toList $ a where
f (k, w, v) = case PSQ.lookup k psq of
Nothing -> Nothing
Just (w', _v') -> Just (k, w <> w', v)
changeWeight :: (w -> w') -> T id w v -> T id w' v
changeWeight cw = fromList . map f . toList where
f (k, w, v) = (k, cw w, v)
| ariep/text-index | src/Data/Search/Results.hs | bsd-3-clause | 2,261 | 0 | 14 | 627 | 1,201 | 639 | 562 | 63 | 3 |
module Data
( LispVal(..)
, LispError(..)
, ThrowsError
, LispFunction
, IOLispFunction
, Env
, emptyEnv
, IOThrowsError
, liftThrows
)
where
import Text.Parsec(ParseError)
import Control.Monad.Except
import Data.Functor.Identity(runIdentity)
import Data.IORef
import System.IO(Handle)
data LispVal = Atom String
| List [LispVal]
| DottedList [LispVal] LispVal
| Number Integer
| String String
| Bool Bool
| PrimitiveFunc LispFunction
| Func { _params :: [String]
, _vararg :: Maybe String
, _body :: [LispVal]
, _closure :: Env
}
| IOFunc IOLispFunction
| Port Handle
instance Eq LispVal where
(Atom a) == (Atom b) = a == b
(Number a) == (Number b) = a == b
(String a) == (String b) = a == b
(Bool a) == (Bool b) = a == b
(List a) == (List b) = a == b
(DottedList a a1) == (DottedList b b1) = a == b && a1 == b1
_ == _ = False
instance Show LispVal where
show (String contents) = "\"" ++ escapeString contents ++ "\""
show (Atom name) = name
show (Number contents) = show contents
show (Bool True) = "#t"
show (Bool False) = "#f"
show (List contents) = "(" ++ unwordsList contents ++ ")"
show (DottedList head_ tail_) = "(" ++ unwordsList head_ ++ " . " ++ show tail_ ++ ")"
show (PrimitiveFunc _) = "<primitive>"
show (Func args varargs _ _) = "(lambda (" ++ unwords (map show args) ++
maybe "" (" . "++) varargs
++ ") ...)"
show (Port _) = "<IO port>"
show (IOFunc _) = "<IO primitive>"
unwordsList :: [LispVal] -> String
unwordsList = unwords . map show
escapeString :: String -> String
escapeString = concatMap $ \char -> case char of
'\n' -> "\\n"
'\t' -> "\\t"
'\\' -> "\\\\"
'\"' -> "\\\""
c -> [c]
type LispFunction = [LispVal] -> ThrowsError LispVal
data LispError = NumArgs Integer [LispVal]
| TypeMismatch String LispVal
| Parser ParseError
| BadSpecialForm String LispVal
| NotFunction String String
| UnboundVar String String
| Default String
deriving(Eq)
instance Show LispError where
show (UnboundVar message varname) = message ++ ": " ++ varname
show (BadSpecialForm message form) = message ++ ": " ++ show form
show (NotFunction message func) = message ++ ": " ++ show func
show (NumArgs expected found) = "Expected " ++ show expected
++ " args; found values " ++ unwordsList found
show (TypeMismatch expected found) = "Invalid type: expected " ++ expected
++ ", found " ++ show found
show (Parser parseErr) = "Parse error at " ++ show parseErr
show (Default message) = "Default error: " ++ message
type ThrowsError = Except LispError
type Env = IORef [(String, IORef LispVal)]
emptyEnv :: IO Env
emptyEnv = newIORef []
type IOThrowsError = ExceptT LispError IO
type IOLispFunction = [LispVal] -> IOThrowsError LispVal
liftThrows :: ThrowsError a -> IOThrowsError a
liftThrows = mapExceptT (return.runIdentity)
| davideGiovannini/scheme-repl | src/Data.hs | bsd-3-clause | 3,614 | 0 | 11 | 1,356 | 1,074 | 563 | 511 | 86 | 5 |
-- (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- Modify and collect code generation for final STG program
{-
This is now a sort-of-normal STG-to-STG pass (WDP 94/06), run by stg2stg.
- Traverses the STG program collecting the cost centres. These are required
to declare the cost centres at the start of code generation.
Note: because of cross-module unfolding, some of these cost centres may be
from other modules.
- Puts on CAF cost-centres if the user has asked for individual CAF
cost-centres.
-}
module SCCfinal ( stgMassageForProfiling ) where
#include "HsVersions.h"
import GhcPrelude
import StgSyn
import CostCentre -- lots of things
import Id
import Name
import Module
import UniqSupply ( UniqSupply )
import ListSetOps ( removeDups )
import Outputable
import DynFlags
import CoreSyn ( Tickish(..) )
import FastString
import SrcLoc
import Util
import Control.Monad (liftM, ap)
stgMassageForProfiling
:: DynFlags
-> Module -- module name
-> UniqSupply -- unique supply
-> [StgTopBinding] -- input
-> (CollectedCCs, [StgTopBinding])
stgMassageForProfiling dflags mod_name _us stg_binds
= let
((local_ccs, extern_ccs, cc_stacks),
stg_binds2)
= initMM mod_name (do_top_bindings stg_binds)
(fixed_ccs, fixed_cc_stacks)
= if gopt Opt_AutoSccsOnIndividualCafs dflags
then ([],[]) -- don't need "all CAFs" CC
else ([all_cafs_cc], [all_cafs_ccs])
local_ccs_no_dups = fst (removeDups cmpCostCentre local_ccs)
extern_ccs_no_dups = fst (removeDups cmpCostCentre extern_ccs)
in
((fixed_ccs ++ local_ccs_no_dups,
extern_ccs_no_dups,
fixed_cc_stacks ++ cc_stacks), stg_binds2)
where
span = mkGeneralSrcSpan (mkFastString "<entire-module>") -- XXX do better
all_cafs_cc = mkAllCafsCC mod_name span
all_cafs_ccs = mkSingletonCCS all_cafs_cc
----------
do_top_bindings :: [StgTopBinding] -> MassageM [StgTopBinding]
do_top_bindings [] = return []
do_top_bindings (StgTopLifted (StgNonRec b rhs) : bs) = do
rhs' <- do_top_rhs b rhs
bs' <- do_top_bindings bs
return (StgTopLifted (StgNonRec b rhs') : bs')
do_top_bindings (StgTopLifted (StgRec pairs) : bs) = do
pairs2 <- mapM do_pair pairs
bs' <- do_top_bindings bs
return (StgTopLifted (StgRec pairs2) : bs')
where
do_pair (b, rhs) = do
rhs2 <- do_top_rhs b rhs
return (b, rhs2)
do_top_bindings (b@StgTopStringLit{} : bs) = do
bs' <- do_top_bindings bs
return (b : bs')
----------
do_top_rhs :: Id -> StgRhs -> MassageM StgRhs
do_top_rhs _ (StgRhsClosure _ _ _ _ []
(StgTick (ProfNote _cc False{-not tick-} _push)
(StgConApp con args _)))
| not (isDllConApp dflags mod_name con args)
-- Trivial _scc_ around nothing but static data
-- Eliminate _scc_ ... and turn into StgRhsCon
-- isDllConApp checks for LitLit args too
= return (StgRhsCon dontCareCCS con args)
do_top_rhs binder (StgRhsClosure _ bi fv u [] body)
= do
-- Top level CAF without a cost centre attached
-- Attach CAF cc (collect if individual CAF ccs)
caf_ccs <- if gopt Opt_AutoSccsOnIndividualCafs dflags
then let cc = mkAutoCC binder modl CafCC
ccs = mkSingletonCCS cc
-- careful: the binder might be :Main.main,
-- which doesn't belong to module mod_name.
-- bug #249, tests prof001, prof002
modl | Just m <- nameModule_maybe (idName binder) = m
| otherwise = mod_name
in do
collectNewCC cc
collectCCS ccs
return ccs
else
return all_cafs_ccs
body' <- do_expr body
return (StgRhsClosure caf_ccs bi fv u [] body')
do_top_rhs _ (StgRhsClosure _no_ccs bi fv u args body)
= do body' <- do_expr body
return (StgRhsClosure dontCareCCS bi fv u args body')
do_top_rhs _ (StgRhsCon _ con args)
-- Top-level (static) data is not counted in heap
-- profiles; nor do we set CCCS from it; so we
-- just slam in dontCareCostCentre
= return (StgRhsCon dontCareCCS con args)
------
do_expr :: StgExpr -> MassageM StgExpr
do_expr (StgLit l) = return (StgLit l)
do_expr (StgApp fn args)
= return (StgApp fn args)
do_expr (StgConApp con args ty_args)
= return (StgConApp con args ty_args)
do_expr (StgOpApp con args res_ty)
= return (StgOpApp con args res_ty)
do_expr (StgTick note@(ProfNote cc _ _) expr) = do
-- Ha, we found a cost centre!
collectCC cc
expr' <- do_expr expr
return (StgTick note expr')
do_expr (StgTick ti expr) = do
expr' <- do_expr expr
return (StgTick ti expr')
do_expr (StgCase expr bndr alt_type alts) = do
expr' <- do_expr expr
alts' <- mapM do_alt alts
return (StgCase expr' bndr alt_type alts')
where
do_alt (id, bs, e) = do
e' <- do_expr e
return (id, bs, e')
do_expr (StgLet b e) = do
(b,e) <- do_let b e
return (StgLet b e)
do_expr (StgLetNoEscape b e) = do
(b,e) <- do_let b e
return (StgLetNoEscape b e)
do_expr other = pprPanic "SCCfinal.do_expr" (ppr other)
----------------------------------
do_let (StgNonRec b rhs) e = do
rhs' <- do_rhs rhs
e' <- do_expr e
return (StgNonRec b rhs',e')
do_let (StgRec pairs) e = do
pairs' <- mapM do_pair pairs
e' <- do_expr e
return (StgRec pairs', e')
where
do_pair (b, rhs) = do
rhs2 <- do_rhs rhs
return (b, rhs2)
----------------------------------
do_rhs :: StgRhs -> MassageM StgRhs
-- We play much the same game as we did in do_top_rhs above;
-- but we don't have to worry about cafs etc.
-- throw away the SCC if we don't have to count entries. This
-- is a little bit wrong, because we're attributing the
-- allocation of the constructor to the wrong place (XXX)
-- We should really attach (PushCC cc CurrentCCS) to the rhs,
-- but need to reinstate PushCC for that.
do_rhs (StgRhsClosure _closure_cc _bi _fv _u []
(StgTick (ProfNote cc False{-not tick-} _push)
(StgConApp con args _)))
= do collectCC cc
return (StgRhsCon currentCCS con args)
do_rhs (StgRhsClosure _ bi fv u args expr) = do
expr' <- do_expr expr
return (StgRhsClosure currentCCS bi fv u args expr')
do_rhs (StgRhsCon _ con args)
= return (StgRhsCon currentCCS con args)
-- -----------------------------------------------------------------------------
-- Boring monad stuff for this
newtype MassageM result
= MassageM {
unMassageM :: Module -- module name
-> CollectedCCs
-> (CollectedCCs, result)
}
instance Functor MassageM where
fmap = liftM
instance Applicative MassageM where
pure x = MassageM (\_ ccs -> (ccs, x))
(<*>) = ap
(*>) = thenMM_
instance Monad MassageM where
(>>=) = thenMM
(>>) = (*>)
-- the initMM function also returns the final CollectedCCs
initMM :: Module -- module name, which we may consult
-> MassageM a
-> (CollectedCCs, a)
initMM mod_name (MassageM m) = m mod_name ([],[],[])
thenMM :: MassageM a -> (a -> MassageM b) -> MassageM b
thenMM_ :: MassageM a -> (MassageM b) -> MassageM b
thenMM expr cont = MassageM $ \mod ccs ->
case unMassageM expr mod ccs of { (ccs2, result) ->
unMassageM (cont result) mod ccs2 }
thenMM_ expr cont = MassageM $ \mod ccs ->
case unMassageM expr mod ccs of { (ccs2, _) ->
unMassageM cont mod ccs2 }
collectCC :: CostCentre -> MassageM ()
collectCC cc
= MassageM $ \mod_name (local_ccs, extern_ccs, ccss)
-> if (cc `ccFromThisModule` mod_name) then
((cc : local_ccs, extern_ccs, ccss), ())
else -- must declare it "extern"
((local_ccs, cc : extern_ccs, ccss), ())
-- Version of collectCC used when we definitely want to declare this
-- CC as local, even if its module name is not the same as the current
-- module name (eg. the special :Main module) see bug #249, #1472,
-- test prof001,prof002.
collectNewCC :: CostCentre -> MassageM ()
collectNewCC cc
= MassageM $ \_mod_name (local_ccs, extern_ccs, ccss)
-> ((cc : local_ccs, extern_ccs, ccss), ())
collectCCS :: CostCentreStack -> MassageM ()
collectCCS ccs
= MassageM $ \_mod_name (local_ccs, extern_ccs, ccss)
-> ASSERT(not (noCCSAttached ccs))
((local_ccs, extern_ccs, ccs : ccss), ())
| ezyang/ghc | compiler/profiling/SCCfinal.hs | bsd-3-clause | 9,341 | 0 | 21 | 2,889 | 2,317 | 1,201 | 1,116 | 173 | 21 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE RecordWildCards #-}
module Generator where
import Prelude hiding (const, mod)
import Data.Char (isLower, toUpper)
import Data.List (find, intercalate)
import Data.Maybe (isJust, maybeToList)
import Language.Haskell.Exts.SrcLoc (noLoc)
import qualified Language.Haskell.Exts.Syntax as S
import Types
typeDecl :: Identifier -> DefinitionType -> S.Decl
typeDecl ident d = S.TypeDecl noLoc (S.Ident ident) [] (toType d)
dataOrRec :: Identifier -> [Identifier] -> [S.ConDecl] -> S.Decl
dataOrRec ident derivs cds = S.DataDecl noLoc
S.DataType
context
(S.Ident ident)
tyVarBind
(map qualConDecl cds)
(map mkDeriving derivs)
where
tyVarBind = []
context = []
qualConDecl = S.QualConDecl noLoc tyVarBind' context'
where tyVarBind' = []
context' = []
mkDeriving d = (S.UnQual . S.Ident $ d, types)
where types = []
recDecl :: Identifier -> [Identifier] -> [Field] -> S.Decl
recDecl ident derivs fields = dataOrRec ident derivs [conDecl]
where
conDecl = S.RecDecl (S.Ident ident) (map mkField fields)
mkField Field{..} = ( [S.Ident ('_':_fieldName)]
, S.UnBangedTy (toType _fieldType)
)
dataDecl :: Identifier -> [Identifier] -> [FieldType] -> S.Decl
dataDecl ident derivs ftypes = dataOrRec ident derivs [conDecl]
where
conDecl = S.ConDecl (S.Ident ident) types
types = map (S.UnBangedTy . toType) ftypes
mapType :: String -> FieldType -> FieldType -> S.Type
mapType m k v = S.TyApp (S.TyApp (toType m) (toType k)) (toType v)
setType :: String -> FieldType -> S.Type
setType s a = S.TyApp (toType s) (toType a)
listType :: FieldType -> S.Type
listType = S.TyList . toType
class Typeable a where
toType :: a -> S.Type
instance Typeable a => Typeable (Maybe a) where
toType Nothing = toType "()"
toType (Just x) = toType x
instance Typeable FieldType where
toType (ContainerType x) = toType x
toType (BaseType x) = toType x
toType (Identifier x) = toType x
instance Typeable ContainerType where
toType (MapType _ (k, v)) = mapType "HashMap" k v
toType (SetType _ t) = setType "HashSet" t
toType (ListType t _) = listType t
instance Typeable DefinitionType where
toType (Left a) = toType a
toType (Right b) = toType b
instance Typeable String where
toType t = if isLower . head . mod $ t
then S.TyVar . S.Ident . mod $ t
else S.TyCon . S.UnQual . S.Ident . mod $ t
where mod "binary" = "ByteString"
mod "bool" = "Bool"
mod "byte" = "Char"
mod "double" = "Double"
mod "i16" = "Int16"
mod "i32" = "Int32"
mod "i64" = "Int64"
mod "string" = "String"
mod t' = t'
toExp :: ConstValue -> S.Exp
toExp (ConstLiteral s) = S.Lit . S.String $ s
toExp (ConstIdentifier s) = S.Var . S.UnQual . S.Ident $ s
toExp (ConstNumber (Left i)) = S.Lit . S.Int $ i
toExp (ConstNumber (Right d)) = S.Lit . S.Frac . toRational $ d
toExp (ConstList cs) = S.List . map toExp $ cs
toExp (ConstMap ps) = S.List . map tuplize $ ps
where tuplize (a, b) = S.Tuple S.Boxed (map toExp [a,b])
patBind :: Identifier -> S.Exp -> S.Decl
patBind ident exp = S.PatBind noLoc
(S.PVar . S.Ident $ ident)
Nothing
(S.UnGuardedRhs exp)
(S.BDecls bindingGroup)
where bindingGroup = []
typeSig :: Identifier -> S.Type -> S.Decl
typeSig ident = S.TypeSig noLoc [S.Ident ident]
classDecl :: Identifier -> Maybe Parent -> [Function] -> S.Decl
classDecl ident p fs = S.ClassDecl noLoc
context
(S.Ident ident)
[S.UnkindedVar (S.Ident "a")]
funDep
(map (S.ClsDecl . sig) fs)
where
classA = (`S.ClassA` [S.TyVar . S.Ident $ "a"]) . S.UnQual . S.Ident
context = map classA (maybeToList p)
funDep = []
sig Function{..} = let types = (map getType _fnFields) ++ [(toType _fnType)]
in typeSig _fnName (foldr1 S.TyFun types)
where getType Field{..} = toType _fieldType
class Generator a where
gen :: a -> [S.Decl]
instance Generator Definition where
gen (Typedef t ident) = [typeDecl ident t]
gen (Const t ident val) = [typeSig ident (toType t), patBind ident (toExp val)]
gen (Struct ident fields) =
[ recDecl ident ["Show"] fields
, instDecl "Binary" ident []
]
gen (Enum ident maps) =
[ dataDecl ident ["Enum", "Show"] $ map (Identifier . fst) maps
, instDecl "Binary" ident []
]
gen (Exception ident fields) =
[ recDecl ident ["Show"] fields
, instDecl "Binary" ident []
]
gen (Service _ _ funcs) = concatMap funcDecl funcs
gen _ = []
funcDecl :: Function -> [S.Decl]
funcDecl Function{..} =
[ dataDecl ident ["Show"] (types _fnFields)
, instDecl "Binary" ident [ S.InsDecl $ patBind "get" get
, S.InsDecl $ funBind "put" [putLhs] put
]
, instDecl "HasName" ident [S.InsDecl $ funBind "nameOf" [S.PWildCard] nameOf]
]
where
con = S.Con . S.UnQual . S.Ident
op = S.QVarOp . S.UnQual . S.Symbol
var = S.Var . S.UnQual . S.Ident
get = S.InfixApp (S.InfixApp (con "Echo") (op ".") (var "third"))
(op "<$>") (var "getField")
put = S.InfixApp (S.App (S.Var (qName "struct"))
(S.Paren (S.App (S.Var . qName $ "nameOf") (S.Var . qName $ "x"))))
(op "$")
(S.App (S.Var . qName $ "putField")
(S.Tuple S.Boxed [ S.Lit (S.Int 1)
, S.Lit (S.String "message")
, S.Var (qName "message")
]))
putLhs = S.PAsPat (S.Ident "x") (S.PRec (qName _fnName) [S.PFieldWildcard])
ident = capitalize _fnName
types = map (\Field{..} -> _fieldType)
nameOf = S.Lit . S.String $ _fnName
funBind ident lhs exp = S.FunBind [ S.Match noLoc
(S.Ident ident)
lhs
Nothing
(S.UnGuardedRhs exp)
(S.BDecls [])
]
qName :: Identifier -> S.QName
qName = S.UnQual . S.Ident
instDecl :: Identifier -> Identifier -> [S.InstDecl] -> S.Decl
instDecl c i ds = S.InstDecl noLoc [] (qName c) [toType i] ds
capitalize :: String -> String
capitalize [] = []
capitalize (a:bs) = toUpper a:bs
importDecl :: String -> Maybe String -> (Bool,[S.ImportSpec]) -> S.ImportDecl
importDecl name alias spec =
S.ImportDecl noLoc
(S.ModuleName name)
(isJust alias)
False
Nothing
(fmap S.ModuleName alias)
(mkSpec spec)
where
mkSpec (_,[]) = Nothing
mkSpec s = Just s
specifying :: [String] -> (Bool, [S.ImportSpec])
specifying s = (False, map (S.IAbs . S.Ident) s)
generate :: Document -> S.Module
generate (Document heads defs) =
S.Module noLoc
(S.ModuleName ns)
[ S.LanguagePragma noLoc [S.Ident "OverloadedStrings"]
, S.LanguagePragma noLoc [S.Ident "RecordWildCards"]
, S.LanguagePragma noLoc [S.Ident "TypeSynonymInstances"]
]
Nothing
Nothing
[ importDecl "Data.Binary" Nothing (specifying ["Binary"])
, importDecl "Thrift" Nothing (specifying [])
, importDecl "Vintage.Protocol.Binary" Nothing (specifying [])
]
(concatMap gen defs)
where
ns = intercalate "." (maybeToList nsDef ++ ["GenTypes"])
nsDef = fmap (\(Namespace _ n) -> n) (find isNs heads)
where isNs (Namespace "hs" _) = True
isNs _ = False
| luciferous/vintage | generator/Generator.hs | bsd-3-clause | 8,536 | 0 | 17 | 3,059 | 3,009 | 1,548 | 1,461 | 182 | 2 |
-- Taken from http://hackage.haskell.org/package/regex-genex
-- temporarily to build with GHC 7.10.
{-# LANGUAGE ImplicitParams, NamedFieldPuns, PatternGuards #-}
module Regex.Genex.Normalize (normalize) where
import Data.Set (toList, Set)
import Text.Regex.TDFA.Pattern
import Text.Regex.TDFA.ReadRegex (parseRegex)
import Data.IntSet (IntSet)
import qualified Data.IntSet as IntSet
import qualified Data.Set as Set
type BackReferences = IntSet
-- | Normalize a regex into @strong star normal form@, as defined in the paper
-- @Simplifying Regular Expressions: A Quantitative Perspective@.
normalize :: BackReferences -> Pattern -> Pattern
normalize refs p = black $ let ?refs = refs in simplify p
nullable :: Pattern -> Bool
nullable pat = case pat of
PGroup _ p -> nullable p
PQuest{} -> True
POr ps -> any nullable ps
PConcat ps -> all nullable ps
PBound 0 _ _ -> True
PBound _ _ _ -> False
PStar{} -> True
PEmpty -> True
_ -> False
white :: Pattern -> Pattern
white pat = case pat of
PQuest p -> white p
PStar _ p -> white p
PGroup x p -> PGroup x $ white p
POr ps -> POr (map white ps)
PConcat ps -> if nullable pat
then POr (map white ps)
else pat
PPlus p -> if nullable pat
then PConcat [p, white p]
else pat
_ -> pat
black :: Pattern -> Pattern
black pat = case pat of
POr ps -> POr (map black ps)
PConcat ps -> PConcat (map black ps)
PGroup x p -> PGroup x $ black p
PStar x p -> PStar x $ white (black p)
PPlus p -> PConcat [p, PStar (nullable p) (white $ black p)]
PBound 0 Nothing p -> PStar (nullable p) (white $ black p)
PBound x Nothing p -> PConcat [PBound x (Just x) p, PStar (nullable p) (white $ black p)]
PBound x y p -> PBound x y $ black p
PQuest p -> if nullable p
then black p
else PQuest $ black p
_ -> pat
_parse :: String -> Pattern
_parse r = case parseRegex r of
Right (pattern, _) -> pattern
Left x -> error $ show x
foldChars :: (Set Char, [Pattern]) -> Pattern -> (Set Char, [Pattern])
foldChars (cset, rest) pat = case pat of
PChar { getPatternChar = ch } -> (Set.insert ch cset, rest)
PAny {getPatternSet = PatternSet (Just cset') _ _ _} -> (Set.union cset cset', rest)
_ -> (cset, pat:rest)
simplify :: (?refs :: BackReferences) => Pattern -> Pattern
simplify pat = case pat of
PGroup (Just idx) p -> if idx `IntSet.member` ?refs then PGroup (Just idx) (simplify p) else simplify p
PGroup _ p -> simplify p
PQuest p -> case simplify p of
PEmpty -> PEmpty
p' -> PQuest p'
PAny {getPatternSet = pset, getDoPa} -> case pset of
PatternSet (Just cset) _ _ _ -> case toList cset of
[ch] -> PChar { getPatternChar = ch, getDoPa }
_ -> pat
_ -> pat
POr [] -> PEmpty
POr [p] -> simplify p
POr ps -> let ps' = map simplify ps in
case foldl foldChars (Set.empty, []) ps' of
(cset, rest)
| null rest -> anySet
| Set.null cset -> POr rest
| [r] <- rest -> POr [anySet, r]
| otherwise -> POr [anySet, POr rest]
where
anySet = case Set.size cset of
1 -> PChar { getPatternChar = Set.findMin cset, getDoPa = toEnum 0 }
_ -> PAny { getPatternSet = PatternSet (Just cset) Nothing Nothing Nothing, getDoPa = toEnum 0 }
PConcat [] -> PEmpty
PConcat [p] -> simplify p
PConcat ps -> case concatMap (fromConcat . simplify) ps of
[] -> PEmpty
ps' -> PConcat ps'
where
fromConcat (PConcat ps') = ps'
fromConcat PEmpty = []
fromConcat p = [p]
PBound low (Just high) p
| high == low -> simplify $ PConcat (replicate low (simplify p))
PBound low high p -> PBound low high (simplify p)
PPlus p -> PPlus (simplify p)
PStar x p -> PStar x (simplify p)
_ -> pat
| plow-technologies/cobalt-kiosk-data-template | test/Regex/Genex/Normalize.hs | bsd-3-clause | 4,014 | 0 | 22 | 1,206 | 1,573 | 783 | 790 | 96 | 23 |
----------------------------------------------------------------------------
-- |
-- Module : ImportsBViaReexports
-- Copyright : (c) Sergey Vinokurov 2018
-- License : BSD3-style (see LICENSE)
-- Maintainer : serg.foo@gmail.com
----------------------------------------------------------------------------
module ImportsBViaReexports (quuxB, module R) where
import Reexports as R
(FooB(..), BarB(..), BazBP, quuxB, FrobBP, QuuxB(..), QuuxBP, derivedB)
| sergv/tags-server | test-data/0012resolve_reexport_import_cycles/import1NoListImport2WithListChildrenWildcardsReexportModule/ImportsBViaReexports.hs | bsd-3-clause | 472 | 0 | 6 | 63 | 63 | 46 | 17 | 3 | 0 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
-- |
-- Module : Data.Vector.Based
-- Copyright : (c) 2014 Joseph T. Abrahamson
-- License : BSD3
-- Maintainer : Joseph T. Abrahamson <me@jspha.com>
-- Stability : experimental
-- Portability : GADTs
--
-- A vector space is represented by a set of linearly independent
-- basis vectors. Normally, if you treat a list or array as a vector
-- then this basis is the index into the array.
--
-- Vector types where the basis elements are polymorphic form a
-- near-monad as explored by Dan Piponi. This types have an
-- interesting structure, but are rarely used since they are either
-- inconvenient or inefficient to work with.
--
-- * http://blog.sigfpe.com/2007/02/monads-for-vector-spaces-probability.html
-- * http://blog.sigfpe.com/2007/03/monads-vector-spaces-and-quantum.html
-- * http://vimeo.com/6590617
--
-- The standard mechanism to make convenient, efficient arbitrary
-- basis vectors would be to transform them using the Codensity monad.
-- This allows vectors to be manipulated by standard monadic and
-- applicative combinators. Unfortunately, this becomes expensive when
-- intermediate values of vectors during computations are queried.
--
-- The mechanism of van der Ploeg and Kiselyov (2014) allows us to
-- have an arbitrary based vector type which is fast to bind and fast
-- to inspect.
--
-- * http://homepages.cwi.nl/~ploeg/papers/zseq.pdf
module Data.Vector.Based where
import Control.Applicative
import Control.Monad
import Data.Map (Map)
import qualified Data.Map as Map
newtype Vect_ s b = Vect_ { unVect_ :: Map b s }
returnV :: Num s => b -> Map b s
returnV a = Map.singleton a 1
joinV :: (Num s, Ord b) => Map (Map b s) s -> Map b s
joinV = Map.unionsWith (+)
. map (\(m, v) -> fmap (v *) m)
. Map.toList
(>>>)
:: (Ord b, Ord s, Num s) =>
(t -> Map k1 s) -> (k1 -> Map b s) -> t -> Map b s
f >>> g = \a -> f a >>- g
(>>-)
:: (Ord s, Ord b, Num s) => Map k1 s -> (k1 -> Map b s) -> Map b s
m >>- f = (joinV (Map.mapKeys f m))
data K s a b = K { appK :: a -> Map b s }
data Vect s b where
Vect :: Map x s -> TList (K s) x b -> Vect s b
-- This basically ruins it
Vect1 :: Vect s b -> (b -> Vect s c) -> Vect s c
concatTs :: (TSequence seq, Num s, Ord s, Ord b)
=> seq (K s) a b -> K s a b
concatTs x = case tviewl x of
TEmptyL -> K returnV
k :< ks -> K (appK k >>> appK (concatTs ks))
inj :: Map b s -> Vect s b
inj m = Vect m Nil
prj :: (Num s, Ord s, Ord b) => Vect s b -> Map b s
prj (Vect m0 x) = m0 >>- appK (concatTs x)
instance Num s => Functor (Vect s) where
fmap = liftM
instance Num s => Applicative (Vect s) where
pure = return
(<*>) = ap
instance Num s => Monad (Vect s) where
return = inj . returnV
v >>= f = Vect1 v f
-- Map x s -> TList (K s) x a -> (a -> Vect s b) -> Vect s b
--------------------------------------------------------------------------------
data TViewl s c x y where
TEmptyL :: TViewl s c x x
(:<) :: c x y -> s c y z -> TViewl s c x z
class TSequence s where
tempty :: s c x x
tsingleton :: c x y -> s c x y
(><) :: s c x y -> s c y z -> s c x z
tviewl :: s c x y -> TViewl s c x y
-- | Not the most efficient, but it'll do for initial implementation.
-- Later this can be swapped out.
infixr 5 :-
data TList c x y where
Nil :: TList c x x
(:-) :: c x y -> TList c y z -> TList c x z
instance TSequence TList where
tempty = Nil
tsingleton cxy = cxy :- Nil
Nil >< t = t
(t :- c) >< t' = t :- (c >< t')
tviewl Nil = TEmptyL
tviewl (t :- c) = t :< c
| tel/based-vectors | src/Data/Vector/Based.hs | bsd-3-clause | 3,764 | 0 | 13 | 966 | 1,131 | 604 | 527 | 63 | 2 |
module CalculatorKata.Day1 (calculate) where
import Data.List (splitAt)
calculate :: String -> Double
calculate source = do
let argOneList = takeWhile (\c -> c /= '+' && c /= '-' && c /= '*' && c /= '/') source
let argOne = read argOneList
let lengthOne = length argOneList
if lengthOne /= length source
then do
let sign = head (snd (splitAt lengthOne source))
let argTwoList = tail (snd (splitAt lengthOne source))
case sign of '+' -> argOne + (read argTwoList)
'-' -> argOne - (read argTwoList)
'*' -> argOne * (read argTwoList)
'/' -> argOne / (read argTwoList)
else
argOne
| Alex-Diez/haskell-tdd-kata | old-katas/src/CalculatorKata/Day1.hs | bsd-3-clause | 810 | 0 | 19 | 333 | 253 | 125 | 128 | 16 | 5 |
module Main where
import Control.Monad
import System.Environment
import System.Exit
import System.IO
type Triplet = (Int, Int, Int)
main :: IO ()
main = do
args <- getArgs
when (length args /= 1) $ die "Usage: Solve2 <file>"
contents <- lines `fmap` readFile (args !! 0)
let scores = map (\l -> map readi $ words l) $ tail contents
solns = map solveG scores
mapM_ printSoln $ zip [1..] solns
where
readi :: String -> Int
readi = read
printSoln :: (Int, Int) -> IO ()
printSoln (n, soln) = putStrLn $ "Case #" ++ show n ++ ": " ++ show soln
die :: String -> IO ()
die err = do
hPutStrLn stderr err
exitFailure
solveG :: [Int] -> Int
solveG (g : s : p : scores') = solve 0 s scores'
where
solve soln _ [] = soln
solve soln sup (score : scores) =
let (s1, s2) = genPossibleTriplets score
in case (good s1, good s2) of
(True, _) | not (surprising s1) -> solve (soln + 1) sup scores
(_, True) | not (surprising s2) -> solve (soln + 1) sup scores
(True, _) | sup > 0 -> solve (soln + 1) (sup - 1) scores
(_, True) | sup > 0 -> solve (soln + 1) (sup - 1) scores
_ -> solve soln sup scores
good s = tripletValid s && best s >= p
genPossibleTriplets :: Int -> (Triplet, Triplet)
genPossibleTriplets n
| n `mod` 3 == 0
= let x = floor $ toRational n / 3
in ((x, x, x), (x - 1, x, x + 1))
| (n - 1) `mod` 3 == 0
= let x = floor $ toRational (n - 1) / 3
in ((x, x, x + 1), (x - 1, x + 1, x + 1))
| (n - 2) `mod` 3 == 0
= let x = floor $ toRational (n - 2) / 3
in ((x, x, x + 2), (x, x + 1, x + 1))
tripletValid :: Triplet -> Bool
tripletValid (x, y, z)
| x > 10 || y > 10 || z > 10 || x < 0 || y < 0 || z < 0 = False
| otherwise = True
best :: Triplet -> Int
best (x, y, z) | x >= y && x >= z = x
| y >= x && y >= z = y
| z >= x && z >= y = z
surprising :: Triplet -> Bool
surprising (x, y, z) | abs (x - y) >= 2 = True
| abs (x - z) >= 2 = True
| abs (y - z) >= 2 = True
| otherwise = False
| dterei/Scraps | codeJam2012/q2/Solve2.hs | bsd-3-clause | 2,253 | 0 | 18 | 828 | 1,145 | 588 | 557 | 58 | 6 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.